applied-ai-018 commited on
Commit
adee62a
·
verified ·
1 Parent(s): f0fcc45

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +1 -0
  2. llmeval-env/lib/python3.10/site-packages/numpy/core/_simd.cpython-310-x86_64-linux-gnu.so +3 -0
  3. llmeval-env/lib/python3.10/site-packages/numpy/core/tests/data/astype_copy.pkl +3 -0
  4. llmeval-env/lib/python3.10/site-packages/numpy/core/tests/data/numpy_2_0_array.pkl +3 -0
  5. llmeval-env/lib/python3.10/site-packages/numpy/distutils/__init__.py +64 -0
  6. llmeval-env/lib/python3.10/site-packages/numpy/distutils/__init__.pyi +4 -0
  7. llmeval-env/lib/python3.10/site-packages/numpy/distutils/armccompiler.py +26 -0
  8. llmeval-env/lib/python3.10/site-packages/numpy/distutils/ccompiler.py +826 -0
  9. llmeval-env/lib/python3.10/site-packages/numpy/distutils/conv_template.py +329 -0
  10. llmeval-env/lib/python3.10/site-packages/numpy/distutils/cpuinfo.py +683 -0
  11. llmeval-env/lib/python3.10/site-packages/numpy/distutils/extension.py +107 -0
  12. llmeval-env/lib/python3.10/site-packages/numpy/distutils/from_template.py +261 -0
  13. llmeval-env/lib/python3.10/site-packages/numpy/distutils/fujitsuccompiler.py +28 -0
  14. llmeval-env/lib/python3.10/site-packages/numpy/distutils/intelccompiler.py +111 -0
  15. llmeval-env/lib/python3.10/site-packages/numpy/distutils/lib2def.py +116 -0
  16. llmeval-env/lib/python3.10/site-packages/numpy/distutils/log.py +111 -0
  17. llmeval-env/lib/python3.10/site-packages/numpy/distutils/mingw32ccompiler.py +591 -0
  18. llmeval-env/lib/python3.10/site-packages/numpy/distutils/misc_util.py +2493 -0
  19. llmeval-env/lib/python3.10/site-packages/numpy/distutils/msvc9compiler.py +63 -0
  20. llmeval-env/lib/python3.10/site-packages/numpy/distutils/msvccompiler.py +76 -0
  21. llmeval-env/lib/python3.10/site-packages/numpy/distutils/npy_pkg_config.py +437 -0
  22. llmeval-env/lib/python3.10/site-packages/numpy/distutils/setup.py +17 -0
  23. llmeval-env/lib/python3.10/site-packages/numpy/distutils/system_info.py +0 -0
  24. llmeval-env/lib/python3.10/site-packages/numpy/distutils/unixccompiler.py +141 -0
  25. llmeval-env/lib/python3.10/site-packages/numpy/polynomial/tests/__init__.py +0 -0
  26. llmeval-env/lib/python3.10/site-packages/numpy/polynomial/tests/__pycache__/test_hermite.cpython-310.pyc +0 -0
  27. llmeval-env/lib/python3.10/site-packages/numpy/polynomial/tests/__pycache__/test_hermite_e.cpython-310.pyc +0 -0
  28. llmeval-env/lib/python3.10/site-packages/numpy/polynomial/tests/__pycache__/test_polynomial.cpython-310.pyc +0 -0
  29. llmeval-env/lib/python3.10/site-packages/numpy/polynomial/tests/__pycache__/test_polyutils.cpython-310.pyc +0 -0
  30. llmeval-env/lib/python3.10/site-packages/numpy/polynomial/tests/test_hermite.py +555 -0
  31. llmeval-env/lib/python3.10/site-packages/numpy/polynomial/tests/test_legendre.py +568 -0
  32. llmeval-env/lib/python3.10/site-packages/numpy/polynomial/tests/test_printing.py +530 -0
  33. llmeval-env/lib/python3.10/site-packages/numpy/random/__pycache__/__init__.cpython-310.pyc +0 -0
  34. llmeval-env/lib/python3.10/site-packages/numpy/random/__pycache__/_pickle.cpython-310.pyc +0 -0
  35. llmeval-env/lib/python3.10/site-packages/numpy/random/_common.pxd +106 -0
  36. llmeval-env/lib/python3.10/site-packages/numpy/random/_examples/cffi/__pycache__/extending.cpython-310.pyc +0 -0
  37. llmeval-env/lib/python3.10/site-packages/numpy/random/_examples/cffi/__pycache__/parse.cpython-310.pyc +0 -0
  38. llmeval-env/lib/python3.10/site-packages/numpy/random/_examples/cffi/extending.py +40 -0
  39. llmeval-env/lib/python3.10/site-packages/numpy/random/_examples/cffi/parse.py +54 -0
  40. llmeval-env/lib/python3.10/site-packages/numpy/random/_examples/cython/extending.pyx +78 -0
  41. llmeval-env/lib/python3.10/site-packages/numpy/random/_examples/cython/extending_distributions.pyx +117 -0
  42. llmeval-env/lib/python3.10/site-packages/numpy/random/_examples/cython/meson.build +45 -0
  43. llmeval-env/lib/python3.10/site-packages/numpy/random/_examples/numba/__pycache__/extending.cpython-310.pyc +0 -0
  44. llmeval-env/lib/python3.10/site-packages/numpy/random/_examples/numba/__pycache__/extending_distributions.cpython-310.pyc +0 -0
  45. llmeval-env/lib/python3.10/site-packages/numpy/random/_examples/numba/extending.py +84 -0
  46. llmeval-env/lib/python3.10/site-packages/numpy/random/_examples/numba/extending_distributions.py +67 -0
  47. llmeval-env/lib/python3.10/site-packages/numpy/random/_sfc64.pyi +28 -0
  48. llmeval-env/lib/python3.10/site-packages/numpy/random/lib/libnpyrandom.a +0 -0
  49. llmeval-env/lib/python3.10/site-packages/numpy/random/tests/__init__.py +0 -0
  50. llmeval-env/lib/python3.10/site-packages/numpy/random/tests/__pycache__/__init__.cpython-310.pyc +0 -0
.gitattributes CHANGED
@@ -90,3 +90,4 @@ llmeval-env/lib/python3.10/site-packages/triton/_C/libtriton.so filter=lfs diff=
90
  llmeval-env/lib/python3.10/site-packages/yaml/_yaml.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
91
  llmeval-env/lib/python3.10/site-packages/numpy/core/_multiarray_umath.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
92
  llmeval-env/lib/python3.10/site-packages/sklearn/_loss/_loss.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
 
 
90
  llmeval-env/lib/python3.10/site-packages/yaml/_yaml.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
91
  llmeval-env/lib/python3.10/site-packages/numpy/core/_multiarray_umath.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
92
  llmeval-env/lib/python3.10/site-packages/sklearn/_loss/_loss.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
93
+ llmeval-env/lib/python3.10/site-packages/numpy/core/_simd.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
llmeval-env/lib/python3.10/site-packages/numpy/core/_simd.cpython-310-x86_64-linux-gnu.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b2c96bec20e3c7a59f8f78b30e7fd5142d015e42f2cbd27223c3e862c53e4113
3
+ size 3527040
llmeval-env/lib/python3.10/site-packages/numpy/core/tests/data/astype_copy.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9564b309cbf3441ff0a6e4468fddaca46230fab34f15c77d87025a455bdf59d9
3
+ size 716
llmeval-env/lib/python3.10/site-packages/numpy/core/tests/data/numpy_2_0_array.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:561d36b5dc82ca96bc35be10cdd5619c30225c43b6590adcc1cbce7430c5179c
3
+ size 718
llmeval-env/lib/python3.10/site-packages/numpy/distutils/__init__.py ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ An enhanced distutils, providing support for Fortran compilers, for BLAS,
3
+ LAPACK and other common libraries for numerical computing, and more.
4
+
5
+ Public submodules are::
6
+
7
+ misc_util
8
+ system_info
9
+ cpu_info
10
+ log
11
+ exec_command
12
+
13
+ For details, please see the *Packaging* and *NumPy Distutils User Guide*
14
+ sections of the NumPy Reference Guide.
15
+
16
+ For configuring the preference for and location of libraries like BLAS and
17
+ LAPACK, and for setting include paths and similar build options, please see
18
+ ``site.cfg.example`` in the root of the NumPy repository or sdist.
19
+
20
+ """
21
+
22
+ import warnings
23
+
24
+ # Must import local ccompiler ASAP in order to get
25
+ # customized CCompiler.spawn effective.
26
+ from . import ccompiler
27
+ from . import unixccompiler
28
+
29
+ from .npy_pkg_config import *
30
+
31
+ warnings.warn("\n\n"
32
+ " `numpy.distutils` is deprecated since NumPy 1.23.0, as a result\n"
33
+ " of the deprecation of `distutils` itself. It will be removed for\n"
34
+ " Python >= 3.12. For older Python versions it will remain present.\n"
35
+ " It is recommended to use `setuptools < 60.0` for those Python versions.\n"
36
+ " For more details, see:\n"
37
+ " https://numpy.org/devdocs/reference/distutils_status_migration.html \n\n",
38
+ DeprecationWarning, stacklevel=2
39
+ )
40
+ del warnings
41
+
42
+ # If numpy is installed, add distutils.test()
43
+ try:
44
+ from . import __config__
45
+ # Normally numpy is installed if the above import works, but an interrupted
46
+ # in-place build could also have left a __config__.py. In that case the
47
+ # next import may still fail, so keep it inside the try block.
48
+ from numpy._pytesttester import PytestTester
49
+ test = PytestTester(__name__)
50
+ del PytestTester
51
+ except ImportError:
52
+ pass
53
+
54
+
55
+ def customized_fcompiler(plat=None, compiler=None):
56
+ from numpy.distutils.fcompiler import new_fcompiler
57
+ c = new_fcompiler(plat=plat, compiler=compiler)
58
+ c.customize()
59
+ return c
60
+
61
+ def customized_ccompiler(plat=None, compiler=None, verbose=1):
62
+ c = ccompiler.new_compiler(plat=plat, compiler=compiler, verbose=verbose)
63
+ c.customize('')
64
+ return c
llmeval-env/lib/python3.10/site-packages/numpy/distutils/__init__.pyi ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ from typing import Any
2
+
3
+ # TODO: remove when the full numpy namespace is defined
4
+ def __getattr__(name: str) -> Any: ...
llmeval-env/lib/python3.10/site-packages/numpy/distutils/armccompiler.py ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from distutils.unixccompiler import UnixCCompiler
2
+
3
+ class ArmCCompiler(UnixCCompiler):
4
+
5
+ """
6
+ Arm compiler.
7
+ """
8
+
9
+ compiler_type = 'arm'
10
+ cc_exe = 'armclang'
11
+ cxx_exe = 'armclang++'
12
+
13
+ def __init__(self, verbose=0, dry_run=0, force=0):
14
+ UnixCCompiler.__init__(self, verbose, dry_run, force)
15
+ cc_compiler = self.cc_exe
16
+ cxx_compiler = self.cxx_exe
17
+ self.set_executables(compiler=cc_compiler +
18
+ ' -O3 -fPIC',
19
+ compiler_so=cc_compiler +
20
+ ' -O3 -fPIC',
21
+ compiler_cxx=cxx_compiler +
22
+ ' -O3 -fPIC',
23
+ linker_exe=cc_compiler +
24
+ ' -lamath',
25
+ linker_so=cc_compiler +
26
+ ' -lamath -shared')
llmeval-env/lib/python3.10/site-packages/numpy/distutils/ccompiler.py ADDED
@@ -0,0 +1,826 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import re
3
+ import sys
4
+ import platform
5
+ import shlex
6
+ import time
7
+ import subprocess
8
+ from copy import copy
9
+ from pathlib import Path
10
+ from distutils import ccompiler
11
+ from distutils.ccompiler import (
12
+ compiler_class, gen_lib_options, get_default_compiler, new_compiler,
13
+ CCompiler
14
+ )
15
+ from distutils.errors import (
16
+ DistutilsExecError, DistutilsModuleError, DistutilsPlatformError,
17
+ CompileError, UnknownFileError
18
+ )
19
+ from distutils.sysconfig import customize_compiler
20
+ from distutils.version import LooseVersion
21
+
22
+ from numpy.distutils import log
23
+ from numpy.distutils.exec_command import (
24
+ filepath_from_subprocess_output, forward_bytes_to_stdout
25
+ )
26
+ from numpy.distutils.misc_util import cyg2win32, is_sequence, mingw32, \
27
+ get_num_build_jobs, \
28
+ _commandline_dep_string, \
29
+ sanitize_cxx_flags
30
+
31
+ # globals for parallel build management
32
+ import threading
33
+
34
+ _job_semaphore = None
35
+ _global_lock = threading.Lock()
36
+ _processing_files = set()
37
+
38
+
39
+ def _needs_build(obj, cc_args, extra_postargs, pp_opts):
40
+ """
41
+ Check if an objects needs to be rebuild based on its dependencies
42
+
43
+ Parameters
44
+ ----------
45
+ obj : str
46
+ object file
47
+
48
+ Returns
49
+ -------
50
+ bool
51
+ """
52
+ # defined in unixcompiler.py
53
+ dep_file = obj + '.d'
54
+ if not os.path.exists(dep_file):
55
+ return True
56
+
57
+ # dep_file is a makefile containing 'object: dependencies'
58
+ # formatted like posix shell (spaces escaped, \ line continuations)
59
+ # the last line contains the compiler commandline arguments as some
60
+ # projects may compile an extension multiple times with different
61
+ # arguments
62
+ with open(dep_file) as f:
63
+ lines = f.readlines()
64
+
65
+ cmdline =_commandline_dep_string(cc_args, extra_postargs, pp_opts)
66
+ last_cmdline = lines[-1]
67
+ if last_cmdline != cmdline:
68
+ return True
69
+
70
+ contents = ''.join(lines[:-1])
71
+ deps = [x for x in shlex.split(contents, posix=True)
72
+ if x != "\n" and not x.endswith(":")]
73
+
74
+ try:
75
+ t_obj = os.stat(obj).st_mtime
76
+
77
+ # check if any of the dependencies is newer than the object
78
+ # the dependencies includes the source used to create the object
79
+ for f in deps:
80
+ if os.stat(f).st_mtime > t_obj:
81
+ return True
82
+ except OSError:
83
+ # no object counts as newer (shouldn't happen if dep_file exists)
84
+ return True
85
+
86
+ return False
87
+
88
+
89
+ def replace_method(klass, method_name, func):
90
+ # Py3k does not have unbound method anymore, MethodType does not work
91
+ m = lambda self, *args, **kw: func(self, *args, **kw)
92
+ setattr(klass, method_name, m)
93
+
94
+
95
+ ######################################################################
96
+ ## Method that subclasses may redefine. But don't call this method,
97
+ ## it i private to CCompiler class and may return unexpected
98
+ ## results if used elsewhere. So, you have been warned..
99
+
100
+ def CCompiler_find_executables(self):
101
+ """
102
+ Does nothing here, but is called by the get_version method and can be
103
+ overridden by subclasses. In particular it is redefined in the `FCompiler`
104
+ class where more documentation can be found.
105
+
106
+ """
107
+ pass
108
+
109
+
110
+ replace_method(CCompiler, 'find_executables', CCompiler_find_executables)
111
+
112
+
113
+ # Using customized CCompiler.spawn.
114
+ def CCompiler_spawn(self, cmd, display=None, env=None):
115
+ """
116
+ Execute a command in a sub-process.
117
+
118
+ Parameters
119
+ ----------
120
+ cmd : str
121
+ The command to execute.
122
+ display : str or sequence of str, optional
123
+ The text to add to the log file kept by `numpy.distutils`.
124
+ If not given, `display` is equal to `cmd`.
125
+ env : a dictionary for environment variables, optional
126
+
127
+ Returns
128
+ -------
129
+ None
130
+
131
+ Raises
132
+ ------
133
+ DistutilsExecError
134
+ If the command failed, i.e. the exit status was not 0.
135
+
136
+ """
137
+ env = env if env is not None else dict(os.environ)
138
+ if display is None:
139
+ display = cmd
140
+ if is_sequence(display):
141
+ display = ' '.join(list(display))
142
+ log.info(display)
143
+ try:
144
+ if self.verbose:
145
+ subprocess.check_output(cmd, env=env)
146
+ else:
147
+ subprocess.check_output(cmd, stderr=subprocess.STDOUT, env=env)
148
+ except subprocess.CalledProcessError as exc:
149
+ o = exc.output
150
+ s = exc.returncode
151
+ except OSError as e:
152
+ # OSError doesn't have the same hooks for the exception
153
+ # output, but exec_command() historically would use an
154
+ # empty string for EnvironmentError (base class for
155
+ # OSError)
156
+ # o = b''
157
+ # still that would make the end-user lost in translation!
158
+ o = f"\n\n{e}\n\n\n"
159
+ try:
160
+ o = o.encode(sys.stdout.encoding)
161
+ except AttributeError:
162
+ o = o.encode('utf8')
163
+ # status previously used by exec_command() for parent
164
+ # of OSError
165
+ s = 127
166
+ else:
167
+ # use a convenience return here so that any kind of
168
+ # caught exception will execute the default code after the
169
+ # try / except block, which handles various exceptions
170
+ return None
171
+
172
+ if is_sequence(cmd):
173
+ cmd = ' '.join(list(cmd))
174
+
175
+ if self.verbose:
176
+ forward_bytes_to_stdout(o)
177
+
178
+ if re.search(b'Too many open files', o):
179
+ msg = '\nTry rerunning setup command until build succeeds.'
180
+ else:
181
+ msg = ''
182
+ raise DistutilsExecError('Command "%s" failed with exit status %d%s' %
183
+ (cmd, s, msg))
184
+
185
+ replace_method(CCompiler, 'spawn', CCompiler_spawn)
186
+
187
+ def CCompiler_object_filenames(self, source_filenames, strip_dir=0, output_dir=''):
188
+ """
189
+ Return the name of the object files for the given source files.
190
+
191
+ Parameters
192
+ ----------
193
+ source_filenames : list of str
194
+ The list of paths to source files. Paths can be either relative or
195
+ absolute, this is handled transparently.
196
+ strip_dir : bool, optional
197
+ Whether to strip the directory from the returned paths. If True,
198
+ the file name prepended by `output_dir` is returned. Default is False.
199
+ output_dir : str, optional
200
+ If given, this path is prepended to the returned paths to the
201
+ object files.
202
+
203
+ Returns
204
+ -------
205
+ obj_names : list of str
206
+ The list of paths to the object files corresponding to the source
207
+ files in `source_filenames`.
208
+
209
+ """
210
+ if output_dir is None:
211
+ output_dir = ''
212
+ obj_names = []
213
+ for src_name in source_filenames:
214
+ base, ext = os.path.splitext(os.path.normpath(src_name))
215
+ base = os.path.splitdrive(base)[1] # Chop off the drive
216
+ base = base[os.path.isabs(base):] # If abs, chop off leading /
217
+ if base.startswith('..'):
218
+ # Resolve starting relative path components, middle ones
219
+ # (if any) have been handled by os.path.normpath above.
220
+ i = base.rfind('..')+2
221
+ d = base[:i]
222
+ d = os.path.basename(os.path.abspath(d))
223
+ base = d + base[i:]
224
+ if ext not in self.src_extensions:
225
+ raise UnknownFileError("unknown file type '%s' (from '%s')" % (ext, src_name))
226
+ if strip_dir:
227
+ base = os.path.basename(base)
228
+ obj_name = os.path.join(output_dir, base + self.obj_extension)
229
+ obj_names.append(obj_name)
230
+ return obj_names
231
+
232
+ replace_method(CCompiler, 'object_filenames', CCompiler_object_filenames)
233
+
234
+ def CCompiler_compile(self, sources, output_dir=None, macros=None,
235
+ include_dirs=None, debug=0, extra_preargs=None,
236
+ extra_postargs=None, depends=None):
237
+ """
238
+ Compile one or more source files.
239
+
240
+ Please refer to the Python distutils API reference for more details.
241
+
242
+ Parameters
243
+ ----------
244
+ sources : list of str
245
+ A list of filenames
246
+ output_dir : str, optional
247
+ Path to the output directory.
248
+ macros : list of tuples
249
+ A list of macro definitions.
250
+ include_dirs : list of str, optional
251
+ The directories to add to the default include file search path for
252
+ this compilation only.
253
+ debug : bool, optional
254
+ Whether or not to output debug symbols in or alongside the object
255
+ file(s).
256
+ extra_preargs, extra_postargs : ?
257
+ Extra pre- and post-arguments.
258
+ depends : list of str, optional
259
+ A list of file names that all targets depend on.
260
+
261
+ Returns
262
+ -------
263
+ objects : list of str
264
+ A list of object file names, one per source file `sources`.
265
+
266
+ Raises
267
+ ------
268
+ CompileError
269
+ If compilation fails.
270
+
271
+ """
272
+ global _job_semaphore
273
+
274
+ jobs = get_num_build_jobs()
275
+
276
+ # setup semaphore to not exceed number of compile jobs when parallelized at
277
+ # extension level (python >= 3.5)
278
+ with _global_lock:
279
+ if _job_semaphore is None:
280
+ _job_semaphore = threading.Semaphore(jobs)
281
+
282
+ if not sources:
283
+ return []
284
+ from numpy.distutils.fcompiler import (FCompiler,
285
+ FORTRAN_COMMON_FIXED_EXTENSIONS,
286
+ has_f90_header)
287
+ if isinstance(self, FCompiler):
288
+ display = []
289
+ for fc in ['f77', 'f90', 'fix']:
290
+ fcomp = getattr(self, 'compiler_'+fc)
291
+ if fcomp is None:
292
+ continue
293
+ display.append("Fortran %s compiler: %s" % (fc, ' '.join(fcomp)))
294
+ display = '\n'.join(display)
295
+ else:
296
+ ccomp = self.compiler_so
297
+ display = "C compiler: %s\n" % (' '.join(ccomp),)
298
+ log.info(display)
299
+ macros, objects, extra_postargs, pp_opts, build = \
300
+ self._setup_compile(output_dir, macros, include_dirs, sources,
301
+ depends, extra_postargs)
302
+ cc_args = self._get_cc_args(pp_opts, debug, extra_preargs)
303
+ display = "compile options: '%s'" % (' '.join(cc_args))
304
+ if extra_postargs:
305
+ display += "\nextra options: '%s'" % (' '.join(extra_postargs))
306
+ log.info(display)
307
+
308
+ def single_compile(args):
309
+ obj, (src, ext) = args
310
+ if not _needs_build(obj, cc_args, extra_postargs, pp_opts):
311
+ return
312
+
313
+ # check if we are currently already processing the same object
314
+ # happens when using the same source in multiple extensions
315
+ while True:
316
+ # need explicit lock as there is no atomic check and add with GIL
317
+ with _global_lock:
318
+ # file not being worked on, start working
319
+ if obj not in _processing_files:
320
+ _processing_files.add(obj)
321
+ break
322
+ # wait for the processing to end
323
+ time.sleep(0.1)
324
+
325
+ try:
326
+ # retrieve slot from our #job semaphore and build
327
+ with _job_semaphore:
328
+ self._compile(obj, src, ext, cc_args, extra_postargs, pp_opts)
329
+ finally:
330
+ # register being done processing
331
+ with _global_lock:
332
+ _processing_files.remove(obj)
333
+
334
+
335
+ if isinstance(self, FCompiler):
336
+ objects_to_build = list(build.keys())
337
+ f77_objects, other_objects = [], []
338
+ for obj in objects:
339
+ if obj in objects_to_build:
340
+ src, ext = build[obj]
341
+ if self.compiler_type=='absoft':
342
+ obj = cyg2win32(obj)
343
+ src = cyg2win32(src)
344
+ if Path(src).suffix.lower() in FORTRAN_COMMON_FIXED_EXTENSIONS \
345
+ and not has_f90_header(src):
346
+ f77_objects.append((obj, (src, ext)))
347
+ else:
348
+ other_objects.append((obj, (src, ext)))
349
+
350
+ # f77 objects can be built in parallel
351
+ build_items = f77_objects
352
+ # build f90 modules serial, module files are generated during
353
+ # compilation and may be used by files later in the list so the
354
+ # ordering is important
355
+ for o in other_objects:
356
+ single_compile(o)
357
+ else:
358
+ build_items = build.items()
359
+
360
+ if len(build) > 1 and jobs > 1:
361
+ # build parallel
362
+ from concurrent.futures import ThreadPoolExecutor
363
+ with ThreadPoolExecutor(jobs) as pool:
364
+ res = pool.map(single_compile, build_items)
365
+ list(res) # access result to raise errors
366
+ else:
367
+ # build serial
368
+ for o in build_items:
369
+ single_compile(o)
370
+
371
+ # Return *all* object filenames, not just the ones we just built.
372
+ return objects
373
+
374
+ replace_method(CCompiler, 'compile', CCompiler_compile)
375
+
376
+ def CCompiler_customize_cmd(self, cmd, ignore=()):
377
+ """
378
+ Customize compiler using distutils command.
379
+
380
+ Parameters
381
+ ----------
382
+ cmd : class instance
383
+ An instance inheriting from `distutils.cmd.Command`.
384
+ ignore : sequence of str, optional
385
+ List of `CCompiler` commands (without ``'set_'``) that should not be
386
+ altered. Strings that are checked for are:
387
+ ``('include_dirs', 'define', 'undef', 'libraries', 'library_dirs',
388
+ 'rpath', 'link_objects')``.
389
+
390
+ Returns
391
+ -------
392
+ None
393
+
394
+ """
395
+ log.info('customize %s using %s' % (self.__class__.__name__,
396
+ cmd.__class__.__name__))
397
+
398
+ if (
399
+ hasattr(self, 'compiler') and
400
+ 'clang' in self.compiler[0] and
401
+ not (platform.machine() == 'arm64' and sys.platform == 'darwin')
402
+ ):
403
+ # clang defaults to a non-strict floating error point model.
404
+ # However, '-ftrapping-math' is not currently supported (2023-04-08)
405
+ # for macosx_arm64.
406
+ # Since NumPy and most Python libs give warnings for these, override:
407
+ self.compiler.append('-ftrapping-math')
408
+ self.compiler_so.append('-ftrapping-math')
409
+
410
+ def allow(attr):
411
+ return getattr(cmd, attr, None) is not None and attr not in ignore
412
+
413
+ if allow('include_dirs'):
414
+ self.set_include_dirs(cmd.include_dirs)
415
+ if allow('define'):
416
+ for (name, value) in cmd.define:
417
+ self.define_macro(name, value)
418
+ if allow('undef'):
419
+ for macro in cmd.undef:
420
+ self.undefine_macro(macro)
421
+ if allow('libraries'):
422
+ self.set_libraries(self.libraries + cmd.libraries)
423
+ if allow('library_dirs'):
424
+ self.set_library_dirs(self.library_dirs + cmd.library_dirs)
425
+ if allow('rpath'):
426
+ self.set_runtime_library_dirs(cmd.rpath)
427
+ if allow('link_objects'):
428
+ self.set_link_objects(cmd.link_objects)
429
+
430
+ replace_method(CCompiler, 'customize_cmd', CCompiler_customize_cmd)
431
+
432
+ def _compiler_to_string(compiler):
433
+ props = []
434
+ mx = 0
435
+ keys = list(compiler.executables.keys())
436
+ for key in ['version', 'libraries', 'library_dirs',
437
+ 'object_switch', 'compile_switch',
438
+ 'include_dirs', 'define', 'undef', 'rpath', 'link_objects']:
439
+ if key not in keys:
440
+ keys.append(key)
441
+ for key in keys:
442
+ if hasattr(compiler, key):
443
+ v = getattr(compiler, key)
444
+ mx = max(mx, len(key))
445
+ props.append((key, repr(v)))
446
+ fmt = '%-' + repr(mx+1) + 's = %s'
447
+ lines = [fmt % prop for prop in props]
448
+ return '\n'.join(lines)
449
+
450
+ def CCompiler_show_customization(self):
451
+ """
452
+ Print the compiler customizations to stdout.
453
+
454
+ Parameters
455
+ ----------
456
+ None
457
+
458
+ Returns
459
+ -------
460
+ None
461
+
462
+ Notes
463
+ -----
464
+ Printing is only done if the distutils log threshold is < 2.
465
+
466
+ """
467
+ try:
468
+ self.get_version()
469
+ except Exception:
470
+ pass
471
+ if log._global_log.threshold<2:
472
+ print('*'*80)
473
+ print(self.__class__)
474
+ print(_compiler_to_string(self))
475
+ print('*'*80)
476
+
477
+ replace_method(CCompiler, 'show_customization', CCompiler_show_customization)
478
+
479
+ def CCompiler_customize(self, dist, need_cxx=0):
480
+ """
481
+ Do any platform-specific customization of a compiler instance.
482
+
483
+ This method calls `distutils.sysconfig.customize_compiler` for
484
+ platform-specific customization, as well as optionally remove a flag
485
+ to suppress spurious warnings in case C++ code is being compiled.
486
+
487
+ Parameters
488
+ ----------
489
+ dist : object
490
+ This parameter is not used for anything.
491
+ need_cxx : bool, optional
492
+ Whether or not C++ has to be compiled. If so (True), the
493
+ ``"-Wstrict-prototypes"`` option is removed to prevent spurious
494
+ warnings. Default is False.
495
+
496
+ Returns
497
+ -------
498
+ None
499
+
500
+ Notes
501
+ -----
502
+ All the default options used by distutils can be extracted with::
503
+
504
+ from distutils import sysconfig
505
+ sysconfig.get_config_vars('CC', 'CXX', 'OPT', 'BASECFLAGS',
506
+ 'CCSHARED', 'LDSHARED', 'SO')
507
+
508
+ """
509
+ # See FCompiler.customize for suggested usage.
510
+ log.info('customize %s' % (self.__class__.__name__))
511
+ customize_compiler(self)
512
+ if need_cxx:
513
+ # In general, distutils uses -Wstrict-prototypes, but this option is
514
+ # not valid for C++ code, only for C. Remove it if it's there to
515
+ # avoid a spurious warning on every compilation.
516
+ try:
517
+ self.compiler_so.remove('-Wstrict-prototypes')
518
+ except (AttributeError, ValueError):
519
+ pass
520
+
521
+ if hasattr(self, 'compiler') and 'cc' in self.compiler[0]:
522
+ if not self.compiler_cxx:
523
+ if self.compiler[0].startswith('gcc'):
524
+ a, b = 'gcc', 'g++'
525
+ else:
526
+ a, b = 'cc', 'c++'
527
+ self.compiler_cxx = [self.compiler[0].replace(a, b)]\
528
+ + self.compiler[1:]
529
+ else:
530
+ if hasattr(self, 'compiler'):
531
+ log.warn("#### %s #######" % (self.compiler,))
532
+ if not hasattr(self, 'compiler_cxx'):
533
+ log.warn('Missing compiler_cxx fix for ' + self.__class__.__name__)
534
+
535
+
536
+ # check if compiler supports gcc style automatic dependencies
537
+ # run on every extension so skip for known good compilers
538
+ if hasattr(self, 'compiler') and ('gcc' in self.compiler[0] or
539
+ 'g++' in self.compiler[0] or
540
+ 'clang' in self.compiler[0]):
541
+ self._auto_depends = True
542
+ elif os.name == 'posix':
543
+ import tempfile
544
+ import shutil
545
+ tmpdir = tempfile.mkdtemp()
546
+ try:
547
+ fn = os.path.join(tmpdir, "file.c")
548
+ with open(fn, "w") as f:
549
+ f.write("int a;\n")
550
+ self.compile([fn], output_dir=tmpdir,
551
+ extra_preargs=['-MMD', '-MF', fn + '.d'])
552
+ self._auto_depends = True
553
+ except CompileError:
554
+ self._auto_depends = False
555
+ finally:
556
+ shutil.rmtree(tmpdir)
557
+
558
+ return
559
+
560
+ replace_method(CCompiler, 'customize', CCompiler_customize)
561
+
562
+ def simple_version_match(pat=r'[-.\d]+', ignore='', start=''):
563
+ """
564
+ Simple matching of version numbers, for use in CCompiler and FCompiler.
565
+
566
+ Parameters
567
+ ----------
568
+ pat : str, optional
569
+ A regular expression matching version numbers.
570
+ Default is ``r'[-.\\d]+'``.
571
+ ignore : str, optional
572
+ A regular expression matching patterns to skip.
573
+ Default is ``''``, in which case nothing is skipped.
574
+ start : str, optional
575
+ A regular expression matching the start of where to start looking
576
+ for version numbers.
577
+ Default is ``''``, in which case searching is started at the
578
+ beginning of the version string given to `matcher`.
579
+
580
+ Returns
581
+ -------
582
+ matcher : callable
583
+ A function that is appropriate to use as the ``.version_match``
584
+ attribute of a `CCompiler` class. `matcher` takes a single parameter,
585
+ a version string.
586
+
587
+ """
588
+ def matcher(self, version_string):
589
+ # version string may appear in the second line, so getting rid
590
+ # of new lines:
591
+ version_string = version_string.replace('\n', ' ')
592
+ pos = 0
593
+ if start:
594
+ m = re.match(start, version_string)
595
+ if not m:
596
+ return None
597
+ pos = m.end()
598
+ while True:
599
+ m = re.search(pat, version_string[pos:])
600
+ if not m:
601
+ return None
602
+ if ignore and re.match(ignore, m.group(0)):
603
+ pos = m.end()
604
+ continue
605
+ break
606
+ return m.group(0)
607
+ return matcher
608
+
609
+ def CCompiler_get_version(self, force=False, ok_status=[0]):
610
+ """
611
+ Return compiler version, or None if compiler is not available.
612
+
613
+ Parameters
614
+ ----------
615
+ force : bool, optional
616
+ If True, force a new determination of the version, even if the
617
+ compiler already has a version attribute. Default is False.
618
+ ok_status : list of int, optional
619
+ The list of status values returned by the version look-up process
620
+ for which a version string is returned. If the status value is not
621
+ in `ok_status`, None is returned. Default is ``[0]``.
622
+
623
+ Returns
624
+ -------
625
+ version : str or None
626
+ Version string, in the format of `distutils.version.LooseVersion`.
627
+
628
+ """
629
+ if not force and hasattr(self, 'version'):
630
+ return self.version
631
+ self.find_executables()
632
+ try:
633
+ version_cmd = self.version_cmd
634
+ except AttributeError:
635
+ return None
636
+ if not version_cmd or not version_cmd[0]:
637
+ return None
638
+ try:
639
+ matcher = self.version_match
640
+ except AttributeError:
641
+ try:
642
+ pat = self.version_pattern
643
+ except AttributeError:
644
+ return None
645
+ def matcher(version_string):
646
+ m = re.match(pat, version_string)
647
+ if not m:
648
+ return None
649
+ version = m.group('version')
650
+ return version
651
+
652
+ try:
653
+ output = subprocess.check_output(version_cmd, stderr=subprocess.STDOUT)
654
+ except subprocess.CalledProcessError as exc:
655
+ output = exc.output
656
+ status = exc.returncode
657
+ except OSError:
658
+ # match the historical returns for a parent
659
+ # exception class caught by exec_command()
660
+ status = 127
661
+ output = b''
662
+ else:
663
+ # output isn't actually a filepath but we do this
664
+ # for now to match previous distutils behavior
665
+ output = filepath_from_subprocess_output(output)
666
+ status = 0
667
+
668
+ version = None
669
+ if status in ok_status:
670
+ version = matcher(output)
671
+ if version:
672
+ version = LooseVersion(version)
673
+ self.version = version
674
+ return version
675
+
676
+ replace_method(CCompiler, 'get_version', CCompiler_get_version)
677
+
678
+ def CCompiler_cxx_compiler(self):
679
+ """
680
+ Return the C++ compiler.
681
+
682
+ Parameters
683
+ ----------
684
+ None
685
+
686
+ Returns
687
+ -------
688
+ cxx : class instance
689
+ The C++ compiler, as a `CCompiler` instance.
690
+
691
+ """
692
+ if self.compiler_type in ('msvc', 'intelw', 'intelemw'):
693
+ return self
694
+
695
+ cxx = copy(self)
696
+ cxx.compiler_cxx = cxx.compiler_cxx
697
+ cxx.compiler_so = [cxx.compiler_cxx[0]] + \
698
+ sanitize_cxx_flags(cxx.compiler_so[1:])
699
+ if (sys.platform.startswith(('aix', 'os400')) and
700
+ 'ld_so_aix' in cxx.linker_so[0]):
701
+ # AIX needs the ld_so_aix script included with Python
702
+ cxx.linker_so = [cxx.linker_so[0], cxx.compiler_cxx[0]] \
703
+ + cxx.linker_so[2:]
704
+ if sys.platform.startswith('os400'):
705
+ #This is required by i 7.4 and prievous for PRId64 in printf() call.
706
+ cxx.compiler_so.append('-D__STDC_FORMAT_MACROS')
707
+ #This a bug of gcc10.3, which failed to handle the TLS init.
708
+ cxx.compiler_so.append('-fno-extern-tls-init')
709
+ cxx.linker_so.append('-fno-extern-tls-init')
710
+ else:
711
+ cxx.linker_so = [cxx.compiler_cxx[0]] + cxx.linker_so[1:]
712
+ return cxx
713
+
714
+ replace_method(CCompiler, 'cxx_compiler', CCompiler_cxx_compiler)
715
+
716
+ compiler_class['intel'] = ('intelccompiler', 'IntelCCompiler',
717
+ "Intel C Compiler for 32-bit applications")
718
+ compiler_class['intele'] = ('intelccompiler', 'IntelItaniumCCompiler',
719
+ "Intel C Itanium Compiler for Itanium-based applications")
720
+ compiler_class['intelem'] = ('intelccompiler', 'IntelEM64TCCompiler',
721
+ "Intel C Compiler for 64-bit applications")
722
+ compiler_class['intelw'] = ('intelccompiler', 'IntelCCompilerW',
723
+ "Intel C Compiler for 32-bit applications on Windows")
724
+ compiler_class['intelemw'] = ('intelccompiler', 'IntelEM64TCCompilerW',
725
+ "Intel C Compiler for 64-bit applications on Windows")
726
+ compiler_class['pathcc'] = ('pathccompiler', 'PathScaleCCompiler',
727
+ "PathScale Compiler for SiCortex-based applications")
728
+ compiler_class['arm'] = ('armccompiler', 'ArmCCompiler',
729
+ "Arm C Compiler")
730
+ compiler_class['fujitsu'] = ('fujitsuccompiler', 'FujitsuCCompiler',
731
+ "Fujitsu C Compiler")
732
+
733
+ ccompiler._default_compilers += (('linux.*', 'intel'),
734
+ ('linux.*', 'intele'),
735
+ ('linux.*', 'intelem'),
736
+ ('linux.*', 'pathcc'),
737
+ ('nt', 'intelw'),
738
+ ('nt', 'intelemw'))
739
+
740
+ if sys.platform == 'win32':
741
+ compiler_class['mingw32'] = ('mingw32ccompiler', 'Mingw32CCompiler',
742
+ "Mingw32 port of GNU C Compiler for Win32"\
743
+ "(for MSC built Python)")
744
+ if mingw32():
745
+ # On windows platforms, we want to default to mingw32 (gcc)
746
+ # because msvc can't build blitz stuff.
747
+ log.info('Setting mingw32 as default compiler for nt.')
748
+ ccompiler._default_compilers = (('nt', 'mingw32'),) \
749
+ + ccompiler._default_compilers
750
+
751
+
752
+ _distutils_new_compiler = new_compiler
753
+ def new_compiler (plat=None,
754
+ compiler=None,
755
+ verbose=None,
756
+ dry_run=0,
757
+ force=0):
758
+ # Try first C compilers from numpy.distutils.
759
+ if verbose is None:
760
+ verbose = log.get_threshold() <= log.INFO
761
+ if plat is None:
762
+ plat = os.name
763
+ try:
764
+ if compiler is None:
765
+ compiler = get_default_compiler(plat)
766
+ (module_name, class_name, long_description) = compiler_class[compiler]
767
+ except KeyError:
768
+ msg = "don't know how to compile C/C++ code on platform '%s'" % plat
769
+ if compiler is not None:
770
+ msg = msg + " with '%s' compiler" % compiler
771
+ raise DistutilsPlatformError(msg)
772
+ module_name = "numpy.distutils." + module_name
773
+ try:
774
+ __import__ (module_name)
775
+ except ImportError as e:
776
+ msg = str(e)
777
+ log.info('%s in numpy.distutils; trying from distutils',
778
+ str(msg))
779
+ module_name = module_name[6:]
780
+ try:
781
+ __import__(module_name)
782
+ except ImportError as e:
783
+ msg = str(e)
784
+ raise DistutilsModuleError("can't compile C/C++ code: unable to load module '%s'" % \
785
+ module_name)
786
+ try:
787
+ module = sys.modules[module_name]
788
+ klass = vars(module)[class_name]
789
+ except KeyError:
790
+ raise DistutilsModuleError(("can't compile C/C++ code: unable to find class '%s' " +
791
+ "in module '%s'") % (class_name, module_name))
792
+ compiler = klass(None, dry_run, force)
793
+ compiler.verbose = verbose
794
+ log.debug('new_compiler returns %s' % (klass))
795
+ return compiler
796
+
797
+ ccompiler.new_compiler = new_compiler
798
+
799
+ _distutils_gen_lib_options = gen_lib_options
800
+ def gen_lib_options(compiler, library_dirs, runtime_library_dirs, libraries):
801
+ # the version of this function provided by CPython allows the following
802
+ # to return lists, which are unpacked automatically:
803
+ # - compiler.runtime_library_dir_option
804
+ # our version extends the behavior to:
805
+ # - compiler.library_dir_option
806
+ # - compiler.library_option
807
+ # - compiler.find_library_file
808
+ r = _distutils_gen_lib_options(compiler, library_dirs,
809
+ runtime_library_dirs, libraries)
810
+ lib_opts = []
811
+ for i in r:
812
+ if is_sequence(i):
813
+ lib_opts.extend(list(i))
814
+ else:
815
+ lib_opts.append(i)
816
+ return lib_opts
817
+ ccompiler.gen_lib_options = gen_lib_options
818
+
819
+ # Also fix up the various compiler modules, which do
820
+ # from distutils.ccompiler import gen_lib_options
821
+ # Don't bother with mwerks, as we don't support Classic Mac.
822
+ for _cc in ['msvc9', 'msvc', '_msvc', 'bcpp', 'cygwinc', 'emxc', 'unixc']:
823
+ _m = sys.modules.get('distutils.' + _cc + 'compiler')
824
+ if _m is not None:
825
+ setattr(_m, 'gen_lib_options', gen_lib_options)
826
+
llmeval-env/lib/python3.10/site-packages/numpy/distutils/conv_template.py ADDED
@@ -0,0 +1,329 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python3
2
+ """
3
+ takes templated file .xxx.src and produces .xxx file where .xxx is
4
+ .i or .c or .h, using the following template rules
5
+
6
+ /**begin repeat -- on a line by itself marks the start of a repeated code
7
+ segment
8
+ /**end repeat**/ -- on a line by itself marks it's end
9
+
10
+ After the /**begin repeat and before the */, all the named templates are placed
11
+ these should all have the same number of replacements
12
+
13
+ Repeat blocks can be nested, with each nested block labeled with its depth,
14
+ i.e.
15
+ /**begin repeat1
16
+ *....
17
+ */
18
+ /**end repeat1**/
19
+
20
+ When using nested loops, you can optionally exclude particular
21
+ combinations of the variables using (inside the comment portion of the inner loop):
22
+
23
+ :exclude: var1=value1, var2=value2, ...
24
+
25
+ This will exclude the pattern where var1 is value1 and var2 is value2 when
26
+ the result is being generated.
27
+
28
+
29
+ In the main body each replace will use one entry from the list of named replacements
30
+
31
+ Note that all #..# forms in a block must have the same number of
32
+ comma-separated entries.
33
+
34
+ Example:
35
+
36
+ An input file containing
37
+
38
+ /**begin repeat
39
+ * #a = 1,2,3#
40
+ * #b = 1,2,3#
41
+ */
42
+
43
+ /**begin repeat1
44
+ * #c = ted, jim#
45
+ */
46
+ @a@, @b@, @c@
47
+ /**end repeat1**/
48
+
49
+ /**end repeat**/
50
+
51
+ produces
52
+
53
+ line 1 "template.c.src"
54
+
55
+ /*
56
+ *********************************************************************
57
+ ** This file was autogenerated from a template DO NOT EDIT!!**
58
+ ** Changes should be made to the original source (.src) file **
59
+ *********************************************************************
60
+ */
61
+
62
+ #line 9
63
+ 1, 1, ted
64
+
65
+ #line 9
66
+ 1, 1, jim
67
+
68
+ #line 9
69
+ 2, 2, ted
70
+
71
+ #line 9
72
+ 2, 2, jim
73
+
74
+ #line 9
75
+ 3, 3, ted
76
+
77
+ #line 9
78
+ 3, 3, jim
79
+
80
+ """
81
+
82
+ __all__ = ['process_str', 'process_file']
83
+
84
+ import os
85
+ import sys
86
+ import re
87
+
88
+ # names for replacement that are already global.
89
+ global_names = {}
90
+
91
+ # header placed at the front of head processed file
92
+ header =\
93
+ """
94
+ /*
95
+ *****************************************************************************
96
+ ** This file was autogenerated from a template DO NOT EDIT!!!! **
97
+ ** Changes should be made to the original source (.src) file **
98
+ *****************************************************************************
99
+ */
100
+
101
+ """
102
+ # Parse string for repeat loops
103
+ def parse_structure(astr, level):
104
+ """
105
+ The returned line number is from the beginning of the string, starting
106
+ at zero. Returns an empty list if no loops found.
107
+
108
+ """
109
+ if level == 0 :
110
+ loopbeg = "/**begin repeat"
111
+ loopend = "/**end repeat**/"
112
+ else :
113
+ loopbeg = "/**begin repeat%d" % level
114
+ loopend = "/**end repeat%d**/" % level
115
+
116
+ ind = 0
117
+ line = 0
118
+ spanlist = []
119
+ while True:
120
+ start = astr.find(loopbeg, ind)
121
+ if start == -1:
122
+ break
123
+ start2 = astr.find("*/", start)
124
+ start2 = astr.find("\n", start2)
125
+ fini1 = astr.find(loopend, start2)
126
+ fini2 = astr.find("\n", fini1)
127
+ line += astr.count("\n", ind, start2+1)
128
+ spanlist.append((start, start2+1, fini1, fini2+1, line))
129
+ line += astr.count("\n", start2+1, fini2)
130
+ ind = fini2
131
+ spanlist.sort()
132
+ return spanlist
133
+
134
+
135
+ def paren_repl(obj):
136
+ torep = obj.group(1)
137
+ numrep = obj.group(2)
138
+ return ','.join([torep]*int(numrep))
139
+
140
+ parenrep = re.compile(r"\(([^)]*)\)\*(\d+)")
141
+ plainrep = re.compile(r"([^*]+)\*(\d+)")
142
+ def parse_values(astr):
143
+ # replaces all occurrences of '(a,b,c)*4' in astr
144
+ # with 'a,b,c,a,b,c,a,b,c,a,b,c'. Empty braces generate
145
+ # empty values, i.e., ()*4 yields ',,,'. The result is
146
+ # split at ',' and a list of values returned.
147
+ astr = parenrep.sub(paren_repl, astr)
148
+ # replaces occurrences of xxx*3 with xxx, xxx, xxx
149
+ astr = ','.join([plainrep.sub(paren_repl, x.strip())
150
+ for x in astr.split(',')])
151
+ return astr.split(',')
152
+
153
+
154
+ stripast = re.compile(r"\n\s*\*?")
155
+ named_re = re.compile(r"#\s*(\w*)\s*=([^#]*)#")
156
+ exclude_vars_re = re.compile(r"(\w*)=(\w*)")
157
+ exclude_re = re.compile(":exclude:")
158
+ def parse_loop_header(loophead) :
159
+ """Find all named replacements in the header
160
+
161
+ Returns a list of dictionaries, one for each loop iteration,
162
+ where each key is a name to be substituted and the corresponding
163
+ value is the replacement string.
164
+
165
+ Also return a list of exclusions. The exclusions are dictionaries
166
+ of key value pairs. There can be more than one exclusion.
167
+ [{'var1':'value1', 'var2', 'value2'[,...]}, ...]
168
+
169
+ """
170
+ # Strip out '\n' and leading '*', if any, in continuation lines.
171
+ # This should not effect code previous to this change as
172
+ # continuation lines were not allowed.
173
+ loophead = stripast.sub("", loophead)
174
+ # parse out the names and lists of values
175
+ names = []
176
+ reps = named_re.findall(loophead)
177
+ nsub = None
178
+ for rep in reps:
179
+ name = rep[0]
180
+ vals = parse_values(rep[1])
181
+ size = len(vals)
182
+ if nsub is None :
183
+ nsub = size
184
+ elif nsub != size :
185
+ msg = "Mismatch in number of values, %d != %d\n%s = %s"
186
+ raise ValueError(msg % (nsub, size, name, vals))
187
+ names.append((name, vals))
188
+
189
+
190
+ # Find any exclude variables
191
+ excludes = []
192
+
193
+ for obj in exclude_re.finditer(loophead):
194
+ span = obj.span()
195
+ # find next newline
196
+ endline = loophead.find('\n', span[1])
197
+ substr = loophead[span[1]:endline]
198
+ ex_names = exclude_vars_re.findall(substr)
199
+ excludes.append(dict(ex_names))
200
+
201
+ # generate list of dictionaries, one for each template iteration
202
+ dlist = []
203
+ if nsub is None :
204
+ raise ValueError("No substitution variables found")
205
+ for i in range(nsub):
206
+ tmp = {name: vals[i] for name, vals in names}
207
+ dlist.append(tmp)
208
+ return dlist
209
+
210
+ replace_re = re.compile(r"@(\w+)@")
211
+ def parse_string(astr, env, level, line) :
212
+ lineno = "#line %d\n" % line
213
+
214
+ # local function for string replacement, uses env
215
+ def replace(match):
216
+ name = match.group(1)
217
+ try :
218
+ val = env[name]
219
+ except KeyError:
220
+ msg = 'line %d: no definition of key "%s"'%(line, name)
221
+ raise ValueError(msg) from None
222
+ return val
223
+
224
+ code = [lineno]
225
+ struct = parse_structure(astr, level)
226
+ if struct :
227
+ # recurse over inner loops
228
+ oldend = 0
229
+ newlevel = level + 1
230
+ for sub in struct:
231
+ pref = astr[oldend:sub[0]]
232
+ head = astr[sub[0]:sub[1]]
233
+ text = astr[sub[1]:sub[2]]
234
+ oldend = sub[3]
235
+ newline = line + sub[4]
236
+ code.append(replace_re.sub(replace, pref))
237
+ try :
238
+ envlist = parse_loop_header(head)
239
+ except ValueError as e:
240
+ msg = "line %d: %s" % (newline, e)
241
+ raise ValueError(msg)
242
+ for newenv in envlist :
243
+ newenv.update(env)
244
+ newcode = parse_string(text, newenv, newlevel, newline)
245
+ code.extend(newcode)
246
+ suff = astr[oldend:]
247
+ code.append(replace_re.sub(replace, suff))
248
+ else :
249
+ # replace keys
250
+ code.append(replace_re.sub(replace, astr))
251
+ code.append('\n')
252
+ return ''.join(code)
253
+
254
+ def process_str(astr):
255
+ code = [header]
256
+ code.extend(parse_string(astr, global_names, 0, 1))
257
+ return ''.join(code)
258
+
259
+
260
+ include_src_re = re.compile(r"(\n|\A)#include\s*['\"]"
261
+ r"(?P<name>[\w\d./\\]+[.]src)['\"]", re.I)
262
+
263
+ def resolve_includes(source):
264
+ d = os.path.dirname(source)
265
+ with open(source) as fid:
266
+ lines = []
267
+ for line in fid:
268
+ m = include_src_re.match(line)
269
+ if m:
270
+ fn = m.group('name')
271
+ if not os.path.isabs(fn):
272
+ fn = os.path.join(d, fn)
273
+ if os.path.isfile(fn):
274
+ lines.extend(resolve_includes(fn))
275
+ else:
276
+ lines.append(line)
277
+ else:
278
+ lines.append(line)
279
+ return lines
280
+
281
+ def process_file(source):
282
+ lines = resolve_includes(source)
283
+ sourcefile = os.path.normcase(source).replace("\\", "\\\\")
284
+ try:
285
+ code = process_str(''.join(lines))
286
+ except ValueError as e:
287
+ raise ValueError('In "%s" loop at %s' % (sourcefile, e)) from None
288
+ return '#line 1 "%s"\n%s' % (sourcefile, code)
289
+
290
+
291
+ def unique_key(adict):
292
+ # this obtains a unique key given a dictionary
293
+ # currently it works by appending together n of the letters of the
294
+ # current keys and increasing n until a unique key is found
295
+ # -- not particularly quick
296
+ allkeys = list(adict.keys())
297
+ done = False
298
+ n = 1
299
+ while not done:
300
+ newkey = "".join([x[:n] for x in allkeys])
301
+ if newkey in allkeys:
302
+ n += 1
303
+ else:
304
+ done = True
305
+ return newkey
306
+
307
+
308
+ def main():
309
+ try:
310
+ file = sys.argv[1]
311
+ except IndexError:
312
+ fid = sys.stdin
313
+ outfile = sys.stdout
314
+ else:
315
+ fid = open(file, 'r')
316
+ (base, ext) = os.path.splitext(file)
317
+ newname = base
318
+ outfile = open(newname, 'w')
319
+
320
+ allstr = fid.read()
321
+ try:
322
+ writestr = process_str(allstr)
323
+ except ValueError as e:
324
+ raise ValueError("In %s loop at %s" % (file, e)) from None
325
+
326
+ outfile.write(writestr)
327
+
328
+ if __name__ == "__main__":
329
+ main()
llmeval-env/lib/python3.10/site-packages/numpy/distutils/cpuinfo.py ADDED
@@ -0,0 +1,683 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python3
2
+ """
3
+ cpuinfo
4
+
5
+ Copyright 2002 Pearu Peterson all rights reserved,
6
+ Pearu Peterson <[email protected]>
7
+ Permission to use, modify, and distribute this software is given under the
8
+ terms of the NumPy (BSD style) license. See LICENSE.txt that came with
9
+ this distribution for specifics.
10
+
11
+ NO WARRANTY IS EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
12
+ Pearu Peterson
13
+
14
+ """
15
+ __all__ = ['cpu']
16
+
17
+ import os
18
+ import platform
19
+ import re
20
+ import sys
21
+ import types
22
+ import warnings
23
+
24
+ from subprocess import getstatusoutput
25
+
26
+
27
+ def getoutput(cmd, successful_status=(0,), stacklevel=1):
28
+ try:
29
+ status, output = getstatusoutput(cmd)
30
+ except OSError as e:
31
+ warnings.warn(str(e), UserWarning, stacklevel=stacklevel)
32
+ return False, ""
33
+ if os.WIFEXITED(status) and os.WEXITSTATUS(status) in successful_status:
34
+ return True, output
35
+ return False, output
36
+
37
+ def command_info(successful_status=(0,), stacklevel=1, **kw):
38
+ info = {}
39
+ for key in kw:
40
+ ok, output = getoutput(kw[key], successful_status=successful_status,
41
+ stacklevel=stacklevel+1)
42
+ if ok:
43
+ info[key] = output.strip()
44
+ return info
45
+
46
+ def command_by_line(cmd, successful_status=(0,), stacklevel=1):
47
+ ok, output = getoutput(cmd, successful_status=successful_status,
48
+ stacklevel=stacklevel+1)
49
+ if not ok:
50
+ return
51
+ for line in output.splitlines():
52
+ yield line.strip()
53
+
54
+ def key_value_from_command(cmd, sep, successful_status=(0,),
55
+ stacklevel=1):
56
+ d = {}
57
+ for line in command_by_line(cmd, successful_status=successful_status,
58
+ stacklevel=stacklevel+1):
59
+ l = [s.strip() for s in line.split(sep, 1)]
60
+ if len(l) == 2:
61
+ d[l[0]] = l[1]
62
+ return d
63
+
64
+ class CPUInfoBase:
65
+ """Holds CPU information and provides methods for requiring
66
+ the availability of various CPU features.
67
+ """
68
+
69
+ def _try_call(self, func):
70
+ try:
71
+ return func()
72
+ except Exception:
73
+ pass
74
+
75
+ def __getattr__(self, name):
76
+ if not name.startswith('_'):
77
+ if hasattr(self, '_'+name):
78
+ attr = getattr(self, '_'+name)
79
+ if isinstance(attr, types.MethodType):
80
+ return lambda func=self._try_call,attr=attr : func(attr)
81
+ else:
82
+ return lambda : None
83
+ raise AttributeError(name)
84
+
85
+ def _getNCPUs(self):
86
+ return 1
87
+
88
+ def __get_nbits(self):
89
+ abits = platform.architecture()[0]
90
+ nbits = re.compile(r'(\d+)bit').search(abits).group(1)
91
+ return nbits
92
+
93
+ def _is_32bit(self):
94
+ return self.__get_nbits() == '32'
95
+
96
+ def _is_64bit(self):
97
+ return self.__get_nbits() == '64'
98
+
99
+ class LinuxCPUInfo(CPUInfoBase):
100
+
101
+ info = None
102
+
103
+ def __init__(self):
104
+ if self.info is not None:
105
+ return
106
+ info = [ {} ]
107
+ ok, output = getoutput('uname -m')
108
+ if ok:
109
+ info[0]['uname_m'] = output.strip()
110
+ try:
111
+ fo = open('/proc/cpuinfo')
112
+ except OSError as e:
113
+ warnings.warn(str(e), UserWarning, stacklevel=2)
114
+ else:
115
+ for line in fo:
116
+ name_value = [s.strip() for s in line.split(':', 1)]
117
+ if len(name_value) != 2:
118
+ continue
119
+ name, value = name_value
120
+ if not info or name in info[-1]: # next processor
121
+ info.append({})
122
+ info[-1][name] = value
123
+ fo.close()
124
+ self.__class__.info = info
125
+
126
+ def _not_impl(self): pass
127
+
128
+ # Athlon
129
+
130
+ def _is_AMD(self):
131
+ return self.info[0]['vendor_id']=='AuthenticAMD'
132
+
133
+ def _is_AthlonK6_2(self):
134
+ return self._is_AMD() and self.info[0]['model'] == '2'
135
+
136
+ def _is_AthlonK6_3(self):
137
+ return self._is_AMD() and self.info[0]['model'] == '3'
138
+
139
+ def _is_AthlonK6(self):
140
+ return re.match(r'.*?AMD-K6', self.info[0]['model name']) is not None
141
+
142
+ def _is_AthlonK7(self):
143
+ return re.match(r'.*?AMD-K7', self.info[0]['model name']) is not None
144
+
145
+ def _is_AthlonMP(self):
146
+ return re.match(r'.*?Athlon\(tm\) MP\b',
147
+ self.info[0]['model name']) is not None
148
+
149
+ def _is_AMD64(self):
150
+ return self.is_AMD() and self.info[0]['family'] == '15'
151
+
152
+ def _is_Athlon64(self):
153
+ return re.match(r'.*?Athlon\(tm\) 64\b',
154
+ self.info[0]['model name']) is not None
155
+
156
+ def _is_AthlonHX(self):
157
+ return re.match(r'.*?Athlon HX\b',
158
+ self.info[0]['model name']) is not None
159
+
160
+ def _is_Opteron(self):
161
+ return re.match(r'.*?Opteron\b',
162
+ self.info[0]['model name']) is not None
163
+
164
+ def _is_Hammer(self):
165
+ return re.match(r'.*?Hammer\b',
166
+ self.info[0]['model name']) is not None
167
+
168
+ # Alpha
169
+
170
+ def _is_Alpha(self):
171
+ return self.info[0]['cpu']=='Alpha'
172
+
173
+ def _is_EV4(self):
174
+ return self.is_Alpha() and self.info[0]['cpu model'] == 'EV4'
175
+
176
+ def _is_EV5(self):
177
+ return self.is_Alpha() and self.info[0]['cpu model'] == 'EV5'
178
+
179
+ def _is_EV56(self):
180
+ return self.is_Alpha() and self.info[0]['cpu model'] == 'EV56'
181
+
182
+ def _is_PCA56(self):
183
+ return self.is_Alpha() and self.info[0]['cpu model'] == 'PCA56'
184
+
185
+ # Intel
186
+
187
+ #XXX
188
+ _is_i386 = _not_impl
189
+
190
+ def _is_Intel(self):
191
+ return self.info[0]['vendor_id']=='GenuineIntel'
192
+
193
+ def _is_i486(self):
194
+ return self.info[0]['cpu']=='i486'
195
+
196
+ def _is_i586(self):
197
+ return self.is_Intel() and self.info[0]['cpu family'] == '5'
198
+
199
+ def _is_i686(self):
200
+ return self.is_Intel() and self.info[0]['cpu family'] == '6'
201
+
202
+ def _is_Celeron(self):
203
+ return re.match(r'.*?Celeron',
204
+ self.info[0]['model name']) is not None
205
+
206
+ def _is_Pentium(self):
207
+ return re.match(r'.*?Pentium',
208
+ self.info[0]['model name']) is not None
209
+
210
+ def _is_PentiumII(self):
211
+ return re.match(r'.*?Pentium.*?II\b',
212
+ self.info[0]['model name']) is not None
213
+
214
+ def _is_PentiumPro(self):
215
+ return re.match(r'.*?PentiumPro\b',
216
+ self.info[0]['model name']) is not None
217
+
218
+ def _is_PentiumMMX(self):
219
+ return re.match(r'.*?Pentium.*?MMX\b',
220
+ self.info[0]['model name']) is not None
221
+
222
+ def _is_PentiumIII(self):
223
+ return re.match(r'.*?Pentium.*?III\b',
224
+ self.info[0]['model name']) is not None
225
+
226
+ def _is_PentiumIV(self):
227
+ return re.match(r'.*?Pentium.*?(IV|4)\b',
228
+ self.info[0]['model name']) is not None
229
+
230
+ def _is_PentiumM(self):
231
+ return re.match(r'.*?Pentium.*?M\b',
232
+ self.info[0]['model name']) is not None
233
+
234
+ def _is_Prescott(self):
235
+ return self.is_PentiumIV() and self.has_sse3()
236
+
237
+ def _is_Nocona(self):
238
+ return (self.is_Intel()
239
+ and (self.info[0]['cpu family'] == '6'
240
+ or self.info[0]['cpu family'] == '15')
241
+ and (self.has_sse3() and not self.has_ssse3())
242
+ and re.match(r'.*?\blm\b', self.info[0]['flags']) is not None)
243
+
244
+ def _is_Core2(self):
245
+ return (self.is_64bit() and self.is_Intel() and
246
+ re.match(r'.*?Core\(TM\)2\b',
247
+ self.info[0]['model name']) is not None)
248
+
249
+ def _is_Itanium(self):
250
+ return re.match(r'.*?Itanium\b',
251
+ self.info[0]['family']) is not None
252
+
253
+ def _is_XEON(self):
254
+ return re.match(r'.*?XEON\b',
255
+ self.info[0]['model name'], re.IGNORECASE) is not None
256
+
257
+ _is_Xeon = _is_XEON
258
+
259
+ # Varia
260
+
261
+ def _is_singleCPU(self):
262
+ return len(self.info) == 1
263
+
264
+ def _getNCPUs(self):
265
+ return len(self.info)
266
+
267
+ def _has_fdiv_bug(self):
268
+ return self.info[0]['fdiv_bug']=='yes'
269
+
270
+ def _has_f00f_bug(self):
271
+ return self.info[0]['f00f_bug']=='yes'
272
+
273
+ def _has_mmx(self):
274
+ return re.match(r'.*?\bmmx\b', self.info[0]['flags']) is not None
275
+
276
+ def _has_sse(self):
277
+ return re.match(r'.*?\bsse\b', self.info[0]['flags']) is not None
278
+
279
+ def _has_sse2(self):
280
+ return re.match(r'.*?\bsse2\b', self.info[0]['flags']) is not None
281
+
282
+ def _has_sse3(self):
283
+ return re.match(r'.*?\bpni\b', self.info[0]['flags']) is not None
284
+
285
+ def _has_ssse3(self):
286
+ return re.match(r'.*?\bssse3\b', self.info[0]['flags']) is not None
287
+
288
+ def _has_3dnow(self):
289
+ return re.match(r'.*?\b3dnow\b', self.info[0]['flags']) is not None
290
+
291
+ def _has_3dnowext(self):
292
+ return re.match(r'.*?\b3dnowext\b', self.info[0]['flags']) is not None
293
+
294
+ class IRIXCPUInfo(CPUInfoBase):
295
+ info = None
296
+
297
+ def __init__(self):
298
+ if self.info is not None:
299
+ return
300
+ info = key_value_from_command('sysconf', sep=' ',
301
+ successful_status=(0, 1))
302
+ self.__class__.info = info
303
+
304
+ def _not_impl(self): pass
305
+
306
+ def _is_singleCPU(self):
307
+ return self.info.get('NUM_PROCESSORS') == '1'
308
+
309
+ def _getNCPUs(self):
310
+ return int(self.info.get('NUM_PROCESSORS', 1))
311
+
312
+ def __cputype(self, n):
313
+ return self.info.get('PROCESSORS').split()[0].lower() == 'r%s' % (n)
314
+ def _is_r2000(self): return self.__cputype(2000)
315
+ def _is_r3000(self): return self.__cputype(3000)
316
+ def _is_r3900(self): return self.__cputype(3900)
317
+ def _is_r4000(self): return self.__cputype(4000)
318
+ def _is_r4100(self): return self.__cputype(4100)
319
+ def _is_r4300(self): return self.__cputype(4300)
320
+ def _is_r4400(self): return self.__cputype(4400)
321
+ def _is_r4600(self): return self.__cputype(4600)
322
+ def _is_r4650(self): return self.__cputype(4650)
323
+ def _is_r5000(self): return self.__cputype(5000)
324
+ def _is_r6000(self): return self.__cputype(6000)
325
+ def _is_r8000(self): return self.__cputype(8000)
326
+ def _is_r10000(self): return self.__cputype(10000)
327
+ def _is_r12000(self): return self.__cputype(12000)
328
+ def _is_rorion(self): return self.__cputype('orion')
329
+
330
+ def get_ip(self):
331
+ try: return self.info.get('MACHINE')
332
+ except Exception: pass
333
+ def __machine(self, n):
334
+ return self.info.get('MACHINE').lower() == 'ip%s' % (n)
335
+ def _is_IP19(self): return self.__machine(19)
336
+ def _is_IP20(self): return self.__machine(20)
337
+ def _is_IP21(self): return self.__machine(21)
338
+ def _is_IP22(self): return self.__machine(22)
339
+ def _is_IP22_4k(self): return self.__machine(22) and self._is_r4000()
340
+ def _is_IP22_5k(self): return self.__machine(22) and self._is_r5000()
341
+ def _is_IP24(self): return self.__machine(24)
342
+ def _is_IP25(self): return self.__machine(25)
343
+ def _is_IP26(self): return self.__machine(26)
344
+ def _is_IP27(self): return self.__machine(27)
345
+ def _is_IP28(self): return self.__machine(28)
346
+ def _is_IP30(self): return self.__machine(30)
347
+ def _is_IP32(self): return self.__machine(32)
348
+ def _is_IP32_5k(self): return self.__machine(32) and self._is_r5000()
349
+ def _is_IP32_10k(self): return self.__machine(32) and self._is_r10000()
350
+
351
+
352
+ class DarwinCPUInfo(CPUInfoBase):
353
+ info = None
354
+
355
+ def __init__(self):
356
+ if self.info is not None:
357
+ return
358
+ info = command_info(arch='arch',
359
+ machine='machine')
360
+ info['sysctl_hw'] = key_value_from_command('sysctl hw', sep='=')
361
+ self.__class__.info = info
362
+
363
+ def _not_impl(self): pass
364
+
365
+ def _getNCPUs(self):
366
+ return int(self.info['sysctl_hw'].get('hw.ncpu', 1))
367
+
368
+ def _is_Power_Macintosh(self):
369
+ return self.info['sysctl_hw']['hw.machine']=='Power Macintosh'
370
+
371
+ def _is_i386(self):
372
+ return self.info['arch']=='i386'
373
+ def _is_ppc(self):
374
+ return self.info['arch']=='ppc'
375
+
376
+ def __machine(self, n):
377
+ return self.info['machine'] == 'ppc%s'%n
378
+ def _is_ppc601(self): return self.__machine(601)
379
+ def _is_ppc602(self): return self.__machine(602)
380
+ def _is_ppc603(self): return self.__machine(603)
381
+ def _is_ppc603e(self): return self.__machine('603e')
382
+ def _is_ppc604(self): return self.__machine(604)
383
+ def _is_ppc604e(self): return self.__machine('604e')
384
+ def _is_ppc620(self): return self.__machine(620)
385
+ def _is_ppc630(self): return self.__machine(630)
386
+ def _is_ppc740(self): return self.__machine(740)
387
+ def _is_ppc7400(self): return self.__machine(7400)
388
+ def _is_ppc7450(self): return self.__machine(7450)
389
+ def _is_ppc750(self): return self.__machine(750)
390
+ def _is_ppc403(self): return self.__machine(403)
391
+ def _is_ppc505(self): return self.__machine(505)
392
+ def _is_ppc801(self): return self.__machine(801)
393
+ def _is_ppc821(self): return self.__machine(821)
394
+ def _is_ppc823(self): return self.__machine(823)
395
+ def _is_ppc860(self): return self.__machine(860)
396
+
397
+
398
+ class SunOSCPUInfo(CPUInfoBase):
399
+
400
+ info = None
401
+
402
+ def __init__(self):
403
+ if self.info is not None:
404
+ return
405
+ info = command_info(arch='arch',
406
+ mach='mach',
407
+ uname_i='uname_i',
408
+ isainfo_b='isainfo -b',
409
+ isainfo_n='isainfo -n',
410
+ )
411
+ info['uname_X'] = key_value_from_command('uname -X', sep='=')
412
+ for line in command_by_line('psrinfo -v 0'):
413
+ m = re.match(r'\s*The (?P<p>[\w\d]+) processor operates at', line)
414
+ if m:
415
+ info['processor'] = m.group('p')
416
+ break
417
+ self.__class__.info = info
418
+
419
+ def _not_impl(self): pass
420
+
421
+ def _is_i386(self):
422
+ return self.info['isainfo_n']=='i386'
423
+ def _is_sparc(self):
424
+ return self.info['isainfo_n']=='sparc'
425
+ def _is_sparcv9(self):
426
+ return self.info['isainfo_n']=='sparcv9'
427
+
428
+ def _getNCPUs(self):
429
+ return int(self.info['uname_X'].get('NumCPU', 1))
430
+
431
+ def _is_sun4(self):
432
+ return self.info['arch']=='sun4'
433
+
434
+ def _is_SUNW(self):
435
+ return re.match(r'SUNW', self.info['uname_i']) is not None
436
+ def _is_sparcstation5(self):
437
+ return re.match(r'.*SPARCstation-5', self.info['uname_i']) is not None
438
+ def _is_ultra1(self):
439
+ return re.match(r'.*Ultra-1', self.info['uname_i']) is not None
440
+ def _is_ultra250(self):
441
+ return re.match(r'.*Ultra-250', self.info['uname_i']) is not None
442
+ def _is_ultra2(self):
443
+ return re.match(r'.*Ultra-2', self.info['uname_i']) is not None
444
+ def _is_ultra30(self):
445
+ return re.match(r'.*Ultra-30', self.info['uname_i']) is not None
446
+ def _is_ultra4(self):
447
+ return re.match(r'.*Ultra-4', self.info['uname_i']) is not None
448
+ def _is_ultra5_10(self):
449
+ return re.match(r'.*Ultra-5_10', self.info['uname_i']) is not None
450
+ def _is_ultra5(self):
451
+ return re.match(r'.*Ultra-5', self.info['uname_i']) is not None
452
+ def _is_ultra60(self):
453
+ return re.match(r'.*Ultra-60', self.info['uname_i']) is not None
454
+ def _is_ultra80(self):
455
+ return re.match(r'.*Ultra-80', self.info['uname_i']) is not None
456
+ def _is_ultraenterprice(self):
457
+ return re.match(r'.*Ultra-Enterprise', self.info['uname_i']) is not None
458
+ def _is_ultraenterprice10k(self):
459
+ return re.match(r'.*Ultra-Enterprise-10000', self.info['uname_i']) is not None
460
+ def _is_sunfire(self):
461
+ return re.match(r'.*Sun-Fire', self.info['uname_i']) is not None
462
+ def _is_ultra(self):
463
+ return re.match(r'.*Ultra', self.info['uname_i']) is not None
464
+
465
+ def _is_cpusparcv7(self):
466
+ return self.info['processor']=='sparcv7'
467
+ def _is_cpusparcv8(self):
468
+ return self.info['processor']=='sparcv8'
469
+ def _is_cpusparcv9(self):
470
+ return self.info['processor']=='sparcv9'
471
+
472
+ class Win32CPUInfo(CPUInfoBase):
473
+
474
+ info = None
475
+ pkey = r"HARDWARE\DESCRIPTION\System\CentralProcessor"
476
+ # XXX: what does the value of
477
+ # HKEY_LOCAL_MACHINE\HARDWARE\DESCRIPTION\System\CentralProcessor\0
478
+ # mean?
479
+
480
+ def __init__(self):
481
+ if self.info is not None:
482
+ return
483
+ info = []
484
+ try:
485
+ #XXX: Bad style to use so long `try:...except:...`. Fix it!
486
+ import winreg
487
+
488
+ prgx = re.compile(r"family\s+(?P<FML>\d+)\s+model\s+(?P<MDL>\d+)"
489
+ r"\s+stepping\s+(?P<STP>\d+)", re.IGNORECASE)
490
+ chnd=winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, self.pkey)
491
+ pnum=0
492
+ while True:
493
+ try:
494
+ proc=winreg.EnumKey(chnd, pnum)
495
+ except winreg.error:
496
+ break
497
+ else:
498
+ pnum+=1
499
+ info.append({"Processor":proc})
500
+ phnd=winreg.OpenKey(chnd, proc)
501
+ pidx=0
502
+ while True:
503
+ try:
504
+ name, value, vtpe=winreg.EnumValue(phnd, pidx)
505
+ except winreg.error:
506
+ break
507
+ else:
508
+ pidx=pidx+1
509
+ info[-1][name]=value
510
+ if name=="Identifier":
511
+ srch=prgx.search(value)
512
+ if srch:
513
+ info[-1]["Family"]=int(srch.group("FML"))
514
+ info[-1]["Model"]=int(srch.group("MDL"))
515
+ info[-1]["Stepping"]=int(srch.group("STP"))
516
+ except Exception as e:
517
+ print(e, '(ignoring)')
518
+ self.__class__.info = info
519
+
520
+ def _not_impl(self): pass
521
+
522
+ # Athlon
523
+
524
+ def _is_AMD(self):
525
+ return self.info[0]['VendorIdentifier']=='AuthenticAMD'
526
+
527
+ def _is_Am486(self):
528
+ return self.is_AMD() and self.info[0]['Family']==4
529
+
530
+ def _is_Am5x86(self):
531
+ return self.is_AMD() and self.info[0]['Family']==4
532
+
533
+ def _is_AMDK5(self):
534
+ return self.is_AMD() and self.info[0]['Family']==5 \
535
+ and self.info[0]['Model'] in [0, 1, 2, 3]
536
+
537
+ def _is_AMDK6(self):
538
+ return self.is_AMD() and self.info[0]['Family']==5 \
539
+ and self.info[0]['Model'] in [6, 7]
540
+
541
+ def _is_AMDK6_2(self):
542
+ return self.is_AMD() and self.info[0]['Family']==5 \
543
+ and self.info[0]['Model']==8
544
+
545
+ def _is_AMDK6_3(self):
546
+ return self.is_AMD() and self.info[0]['Family']==5 \
547
+ and self.info[0]['Model']==9
548
+
549
+ def _is_AMDK7(self):
550
+ return self.is_AMD() and self.info[0]['Family'] == 6
551
+
552
+ # To reliably distinguish between the different types of AMD64 chips
553
+ # (Athlon64, Operton, Athlon64 X2, Semperon, Turion 64, etc.) would
554
+ # require looking at the 'brand' from cpuid
555
+
556
+ def _is_AMD64(self):
557
+ return self.is_AMD() and self.info[0]['Family'] == 15
558
+
559
+ # Intel
560
+
561
+ def _is_Intel(self):
562
+ return self.info[0]['VendorIdentifier']=='GenuineIntel'
563
+
564
+ def _is_i386(self):
565
+ return self.info[0]['Family']==3
566
+
567
+ def _is_i486(self):
568
+ return self.info[0]['Family']==4
569
+
570
+ def _is_i586(self):
571
+ return self.is_Intel() and self.info[0]['Family']==5
572
+
573
+ def _is_i686(self):
574
+ return self.is_Intel() and self.info[0]['Family']==6
575
+
576
+ def _is_Pentium(self):
577
+ return self.is_Intel() and self.info[0]['Family']==5
578
+
579
+ def _is_PentiumMMX(self):
580
+ return self.is_Intel() and self.info[0]['Family']==5 \
581
+ and self.info[0]['Model']==4
582
+
583
+ def _is_PentiumPro(self):
584
+ return self.is_Intel() and self.info[0]['Family']==6 \
585
+ and self.info[0]['Model']==1
586
+
587
+ def _is_PentiumII(self):
588
+ return self.is_Intel() and self.info[0]['Family']==6 \
589
+ and self.info[0]['Model'] in [3, 5, 6]
590
+
591
+ def _is_PentiumIII(self):
592
+ return self.is_Intel() and self.info[0]['Family']==6 \
593
+ and self.info[0]['Model'] in [7, 8, 9, 10, 11]
594
+
595
+ def _is_PentiumIV(self):
596
+ return self.is_Intel() and self.info[0]['Family']==15
597
+
598
+ def _is_PentiumM(self):
599
+ return self.is_Intel() and self.info[0]['Family'] == 6 \
600
+ and self.info[0]['Model'] in [9, 13, 14]
601
+
602
+ def _is_Core2(self):
603
+ return self.is_Intel() and self.info[0]['Family'] == 6 \
604
+ and self.info[0]['Model'] in [15, 16, 17]
605
+
606
+ # Varia
607
+
608
+ def _is_singleCPU(self):
609
+ return len(self.info) == 1
610
+
611
+ def _getNCPUs(self):
612
+ return len(self.info)
613
+
614
+ def _has_mmx(self):
615
+ if self.is_Intel():
616
+ return (self.info[0]['Family']==5 and self.info[0]['Model']==4) \
617
+ or (self.info[0]['Family'] in [6, 15])
618
+ elif self.is_AMD():
619
+ return self.info[0]['Family'] in [5, 6, 15]
620
+ else:
621
+ return False
622
+
623
+ def _has_sse(self):
624
+ if self.is_Intel():
625
+ return ((self.info[0]['Family']==6 and
626
+ self.info[0]['Model'] in [7, 8, 9, 10, 11])
627
+ or self.info[0]['Family']==15)
628
+ elif self.is_AMD():
629
+ return ((self.info[0]['Family']==6 and
630
+ self.info[0]['Model'] in [6, 7, 8, 10])
631
+ or self.info[0]['Family']==15)
632
+ else:
633
+ return False
634
+
635
+ def _has_sse2(self):
636
+ if self.is_Intel():
637
+ return self.is_Pentium4() or self.is_PentiumM() \
638
+ or self.is_Core2()
639
+ elif self.is_AMD():
640
+ return self.is_AMD64()
641
+ else:
642
+ return False
643
+
644
+ def _has_3dnow(self):
645
+ return self.is_AMD() and self.info[0]['Family'] in [5, 6, 15]
646
+
647
+ def _has_3dnowext(self):
648
+ return self.is_AMD() and self.info[0]['Family'] in [6, 15]
649
+
650
+ if sys.platform.startswith('linux'): # variations: linux2,linux-i386 (any others?)
651
+ cpuinfo = LinuxCPUInfo
652
+ elif sys.platform.startswith('irix'):
653
+ cpuinfo = IRIXCPUInfo
654
+ elif sys.platform == 'darwin':
655
+ cpuinfo = DarwinCPUInfo
656
+ elif sys.platform.startswith('sunos'):
657
+ cpuinfo = SunOSCPUInfo
658
+ elif sys.platform.startswith('win32'):
659
+ cpuinfo = Win32CPUInfo
660
+ elif sys.platform.startswith('cygwin'):
661
+ cpuinfo = LinuxCPUInfo
662
+ #XXX: other OS's. Eg. use _winreg on Win32. Or os.uname on unices.
663
+ else:
664
+ cpuinfo = CPUInfoBase
665
+
666
+ cpu = cpuinfo()
667
+
668
+ #if __name__ == "__main__":
669
+ #
670
+ # cpu.is_blaa()
671
+ # cpu.is_Intel()
672
+ # cpu.is_Alpha()
673
+ #
674
+ # print('CPU information:'),
675
+ # for name in dir(cpuinfo):
676
+ # if name[0]=='_' and name[1]!='_':
677
+ # r = getattr(cpu,name[1:])()
678
+ # if r:
679
+ # if r!=1:
680
+ # print('%s=%s' %(name[1:],r))
681
+ # else:
682
+ # print(name[1:]),
683
+ # print()
llmeval-env/lib/python3.10/site-packages/numpy/distutils/extension.py ADDED
@@ -0,0 +1,107 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """distutils.extension
2
+
3
+ Provides the Extension class, used to describe C/C++ extension
4
+ modules in setup scripts.
5
+
6
+ Overridden to support f2py.
7
+
8
+ """
9
+ import re
10
+ from distutils.extension import Extension as old_Extension
11
+
12
+
13
+ cxx_ext_re = re.compile(r'.*\.(cpp|cxx|cc)\Z', re.I).match
14
+ fortran_pyf_ext_re = re.compile(r'.*\.(f90|f95|f77|for|ftn|f|pyf)\Z', re.I).match
15
+
16
+
17
+ class Extension(old_Extension):
18
+ """
19
+ Parameters
20
+ ----------
21
+ name : str
22
+ Extension name.
23
+ sources : list of str
24
+ List of source file locations relative to the top directory of
25
+ the package.
26
+ extra_compile_args : list of str
27
+ Extra command line arguments to pass to the compiler.
28
+ extra_f77_compile_args : list of str
29
+ Extra command line arguments to pass to the fortran77 compiler.
30
+ extra_f90_compile_args : list of str
31
+ Extra command line arguments to pass to the fortran90 compiler.
32
+ """
33
+ def __init__(
34
+ self, name, sources,
35
+ include_dirs=None,
36
+ define_macros=None,
37
+ undef_macros=None,
38
+ library_dirs=None,
39
+ libraries=None,
40
+ runtime_library_dirs=None,
41
+ extra_objects=None,
42
+ extra_compile_args=None,
43
+ extra_link_args=None,
44
+ export_symbols=None,
45
+ swig_opts=None,
46
+ depends=None,
47
+ language=None,
48
+ f2py_options=None,
49
+ module_dirs=None,
50
+ extra_c_compile_args=None,
51
+ extra_cxx_compile_args=None,
52
+ extra_f77_compile_args=None,
53
+ extra_f90_compile_args=None,):
54
+
55
+ old_Extension.__init__(
56
+ self, name, [],
57
+ include_dirs=include_dirs,
58
+ define_macros=define_macros,
59
+ undef_macros=undef_macros,
60
+ library_dirs=library_dirs,
61
+ libraries=libraries,
62
+ runtime_library_dirs=runtime_library_dirs,
63
+ extra_objects=extra_objects,
64
+ extra_compile_args=extra_compile_args,
65
+ extra_link_args=extra_link_args,
66
+ export_symbols=export_symbols)
67
+
68
+ # Avoid assert statements checking that sources contains strings:
69
+ self.sources = sources
70
+
71
+ # Python 2.4 distutils new features
72
+ self.swig_opts = swig_opts or []
73
+ # swig_opts is assumed to be a list. Here we handle the case where it
74
+ # is specified as a string instead.
75
+ if isinstance(self.swig_opts, str):
76
+ import warnings
77
+ msg = "swig_opts is specified as a string instead of a list"
78
+ warnings.warn(msg, SyntaxWarning, stacklevel=2)
79
+ self.swig_opts = self.swig_opts.split()
80
+
81
+ # Python 2.3 distutils new features
82
+ self.depends = depends or []
83
+ self.language = language
84
+
85
+ # numpy_distutils features
86
+ self.f2py_options = f2py_options or []
87
+ self.module_dirs = module_dirs or []
88
+ self.extra_c_compile_args = extra_c_compile_args or []
89
+ self.extra_cxx_compile_args = extra_cxx_compile_args or []
90
+ self.extra_f77_compile_args = extra_f77_compile_args or []
91
+ self.extra_f90_compile_args = extra_f90_compile_args or []
92
+
93
+ return
94
+
95
+ def has_cxx_sources(self):
96
+ for source in self.sources:
97
+ if cxx_ext_re(str(source)):
98
+ return True
99
+ return False
100
+
101
+ def has_f2py_sources(self):
102
+ for source in self.sources:
103
+ if fortran_pyf_ext_re(source):
104
+ return True
105
+ return False
106
+
107
+ # class Extension
llmeval-env/lib/python3.10/site-packages/numpy/distutils/from_template.py ADDED
@@ -0,0 +1,261 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python3
2
+ """
3
+
4
+ process_file(filename)
5
+
6
+ takes templated file .xxx.src and produces .xxx file where .xxx
7
+ is .pyf .f90 or .f using the following template rules:
8
+
9
+ '<..>' denotes a template.
10
+
11
+ All function and subroutine blocks in a source file with names that
12
+ contain '<..>' will be replicated according to the rules in '<..>'.
13
+
14
+ The number of comma-separated words in '<..>' will determine the number of
15
+ replicates.
16
+
17
+ '<..>' may have two different forms, named and short. For example,
18
+
19
+ named:
20
+ <p=d,s,z,c> where anywhere inside a block '<p>' will be replaced with
21
+ 'd', 's', 'z', and 'c' for each replicate of the block.
22
+
23
+ <_c> is already defined: <_c=s,d,c,z>
24
+ <_t> is already defined: <_t=real,double precision,complex,double complex>
25
+
26
+ short:
27
+ <s,d,c,z>, a short form of the named, useful when no <p> appears inside
28
+ a block.
29
+
30
+ In general, '<..>' contains a comma separated list of arbitrary
31
+ expressions. If these expression must contain a comma|leftarrow|rightarrow,
32
+ then prepend the comma|leftarrow|rightarrow with a backslash.
33
+
34
+ If an expression matches '\\<index>' then it will be replaced
35
+ by <index>-th expression.
36
+
37
+ Note that all '<..>' forms in a block must have the same number of
38
+ comma-separated entries.
39
+
40
+ Predefined named template rules:
41
+ <prefix=s,d,c,z>
42
+ <ftype=real,double precision,complex,double complex>
43
+ <ftypereal=real,double precision,\\0,\\1>
44
+ <ctype=float,double,complex_float,complex_double>
45
+ <ctypereal=float,double,\\0,\\1>
46
+
47
+ """
48
+ __all__ = ['process_str', 'process_file']
49
+
50
+ import os
51
+ import sys
52
+ import re
53
+
54
+ routine_start_re = re.compile(r'(\n|\A)(( (\$|\*))|)\s*(subroutine|function)\b', re.I)
55
+ routine_end_re = re.compile(r'\n\s*end\s*(subroutine|function)\b.*(\n|\Z)', re.I)
56
+ function_start_re = re.compile(r'\n (\$|\*)\s*function\b', re.I)
57
+
58
+ def parse_structure(astr):
59
+ """ Return a list of tuples for each function or subroutine each
60
+ tuple is the start and end of a subroutine or function to be
61
+ expanded.
62
+ """
63
+
64
+ spanlist = []
65
+ ind = 0
66
+ while True:
67
+ m = routine_start_re.search(astr, ind)
68
+ if m is None:
69
+ break
70
+ start = m.start()
71
+ if function_start_re.match(astr, start, m.end()):
72
+ while True:
73
+ i = astr.rfind('\n', ind, start)
74
+ if i==-1:
75
+ break
76
+ start = i
77
+ if astr[i:i+7]!='\n $':
78
+ break
79
+ start += 1
80
+ m = routine_end_re.search(astr, m.end())
81
+ ind = end = m and m.end()-1 or len(astr)
82
+ spanlist.append((start, end))
83
+ return spanlist
84
+
85
+ template_re = re.compile(r"<\s*(\w[\w\d]*)\s*>")
86
+ named_re = re.compile(r"<\s*(\w[\w\d]*)\s*=\s*(.*?)\s*>")
87
+ list_re = re.compile(r"<\s*((.*?))\s*>")
88
+
89
+ def find_repl_patterns(astr):
90
+ reps = named_re.findall(astr)
91
+ names = {}
92
+ for rep in reps:
93
+ name = rep[0].strip() or unique_key(names)
94
+ repl = rep[1].replace(r'\,', '@comma@')
95
+ thelist = conv(repl)
96
+ names[name] = thelist
97
+ return names
98
+
99
+ def find_and_remove_repl_patterns(astr):
100
+ names = find_repl_patterns(astr)
101
+ astr = re.subn(named_re, '', astr)[0]
102
+ return astr, names
103
+
104
+ item_re = re.compile(r"\A\\(?P<index>\d+)\Z")
105
+ def conv(astr):
106
+ b = astr.split(',')
107
+ l = [x.strip() for x in b]
108
+ for i in range(len(l)):
109
+ m = item_re.match(l[i])
110
+ if m:
111
+ j = int(m.group('index'))
112
+ l[i] = l[j]
113
+ return ','.join(l)
114
+
115
+ def unique_key(adict):
116
+ """ Obtain a unique key given a dictionary."""
117
+ allkeys = list(adict.keys())
118
+ done = False
119
+ n = 1
120
+ while not done:
121
+ newkey = '__l%s' % (n)
122
+ if newkey in allkeys:
123
+ n += 1
124
+ else:
125
+ done = True
126
+ return newkey
127
+
128
+
129
+ template_name_re = re.compile(r'\A\s*(\w[\w\d]*)\s*\Z')
130
+ def expand_sub(substr, names):
131
+ substr = substr.replace(r'\>', '@rightarrow@')
132
+ substr = substr.replace(r'\<', '@leftarrow@')
133
+ lnames = find_repl_patterns(substr)
134
+ substr = named_re.sub(r"<\1>", substr) # get rid of definition templates
135
+
136
+ def listrepl(mobj):
137
+ thelist = conv(mobj.group(1).replace(r'\,', '@comma@'))
138
+ if template_name_re.match(thelist):
139
+ return "<%s>" % (thelist)
140
+ name = None
141
+ for key in lnames.keys(): # see if list is already in dictionary
142
+ if lnames[key] == thelist:
143
+ name = key
144
+ if name is None: # this list is not in the dictionary yet
145
+ name = unique_key(lnames)
146
+ lnames[name] = thelist
147
+ return "<%s>" % name
148
+
149
+ substr = list_re.sub(listrepl, substr) # convert all lists to named templates
150
+ # newnames are constructed as needed
151
+
152
+ numsubs = None
153
+ base_rule = None
154
+ rules = {}
155
+ for r in template_re.findall(substr):
156
+ if r not in rules:
157
+ thelist = lnames.get(r, names.get(r, None))
158
+ if thelist is None:
159
+ raise ValueError('No replicates found for <%s>' % (r))
160
+ if r not in names and not thelist.startswith('_'):
161
+ names[r] = thelist
162
+ rule = [i.replace('@comma@', ',') for i in thelist.split(',')]
163
+ num = len(rule)
164
+
165
+ if numsubs is None:
166
+ numsubs = num
167
+ rules[r] = rule
168
+ base_rule = r
169
+ elif num == numsubs:
170
+ rules[r] = rule
171
+ else:
172
+ print("Mismatch in number of replacements (base <%s=%s>)"
173
+ " for <%s=%s>. Ignoring." %
174
+ (base_rule, ','.join(rules[base_rule]), r, thelist))
175
+ if not rules:
176
+ return substr
177
+
178
+ def namerepl(mobj):
179
+ name = mobj.group(1)
180
+ return rules.get(name, (k+1)*[name])[k]
181
+
182
+ newstr = ''
183
+ for k in range(numsubs):
184
+ newstr += template_re.sub(namerepl, substr) + '\n\n'
185
+
186
+ newstr = newstr.replace('@rightarrow@', '>')
187
+ newstr = newstr.replace('@leftarrow@', '<')
188
+ return newstr
189
+
190
+ def process_str(allstr):
191
+ newstr = allstr
192
+ writestr = ''
193
+
194
+ struct = parse_structure(newstr)
195
+
196
+ oldend = 0
197
+ names = {}
198
+ names.update(_special_names)
199
+ for sub in struct:
200
+ cleanedstr, defs = find_and_remove_repl_patterns(newstr[oldend:sub[0]])
201
+ writestr += cleanedstr
202
+ names.update(defs)
203
+ writestr += expand_sub(newstr[sub[0]:sub[1]], names)
204
+ oldend = sub[1]
205
+ writestr += newstr[oldend:]
206
+
207
+ return writestr
208
+
209
+ include_src_re = re.compile(r"(\n|\A)\s*include\s*['\"](?P<name>[\w\d./\\]+\.src)['\"]", re.I)
210
+
211
+ def resolve_includes(source):
212
+ d = os.path.dirname(source)
213
+ with open(source) as fid:
214
+ lines = []
215
+ for line in fid:
216
+ m = include_src_re.match(line)
217
+ if m:
218
+ fn = m.group('name')
219
+ if not os.path.isabs(fn):
220
+ fn = os.path.join(d, fn)
221
+ if os.path.isfile(fn):
222
+ lines.extend(resolve_includes(fn))
223
+ else:
224
+ lines.append(line)
225
+ else:
226
+ lines.append(line)
227
+ return lines
228
+
229
+ def process_file(source):
230
+ lines = resolve_includes(source)
231
+ return process_str(''.join(lines))
232
+
233
+ _special_names = find_repl_patterns('''
234
+ <_c=s,d,c,z>
235
+ <_t=real,double precision,complex,double complex>
236
+ <prefix=s,d,c,z>
237
+ <ftype=real,double precision,complex,double complex>
238
+ <ctype=float,double,complex_float,complex_double>
239
+ <ftypereal=real,double precision,\\0,\\1>
240
+ <ctypereal=float,double,\\0,\\1>
241
+ ''')
242
+
243
+ def main():
244
+ try:
245
+ file = sys.argv[1]
246
+ except IndexError:
247
+ fid = sys.stdin
248
+ outfile = sys.stdout
249
+ else:
250
+ fid = open(file, 'r')
251
+ (base, ext) = os.path.splitext(file)
252
+ newname = base
253
+ outfile = open(newname, 'w')
254
+
255
+ allstr = fid.read()
256
+ writestr = process_str(allstr)
257
+ outfile.write(writestr)
258
+
259
+
260
+ if __name__ == "__main__":
261
+ main()
llmeval-env/lib/python3.10/site-packages/numpy/distutils/fujitsuccompiler.py ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from distutils.unixccompiler import UnixCCompiler
2
+
3
+ class FujitsuCCompiler(UnixCCompiler):
4
+
5
+ """
6
+ Fujitsu compiler.
7
+ """
8
+
9
+ compiler_type = 'fujitsu'
10
+ cc_exe = 'fcc'
11
+ cxx_exe = 'FCC'
12
+
13
+ def __init__(self, verbose=0, dry_run=0, force=0):
14
+ UnixCCompiler.__init__(self, verbose, dry_run, force)
15
+ cc_compiler = self.cc_exe
16
+ cxx_compiler = self.cxx_exe
17
+ self.set_executables(
18
+ compiler=cc_compiler +
19
+ ' -O3 -Nclang -fPIC',
20
+ compiler_so=cc_compiler +
21
+ ' -O3 -Nclang -fPIC',
22
+ compiler_cxx=cxx_compiler +
23
+ ' -O3 -Nclang -fPIC',
24
+ linker_exe=cc_compiler +
25
+ ' -lfj90i -lfj90f -lfjsrcinfo -lelf -shared',
26
+ linker_so=cc_compiler +
27
+ ' -lfj90i -lfj90f -lfjsrcinfo -lelf -shared'
28
+ )
llmeval-env/lib/python3.10/site-packages/numpy/distutils/intelccompiler.py ADDED
@@ -0,0 +1,111 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import platform
2
+
3
+ from distutils.unixccompiler import UnixCCompiler
4
+ from numpy.distutils.exec_command import find_executable
5
+ from numpy.distutils.ccompiler import simple_version_match
6
+ if platform.system() == 'Windows':
7
+ from numpy.distutils.msvc9compiler import MSVCCompiler
8
+
9
+
10
+ class IntelCCompiler(UnixCCompiler):
11
+ """A modified Intel compiler compatible with a GCC-built Python."""
12
+ compiler_type = 'intel'
13
+ cc_exe = 'icc'
14
+ cc_args = 'fPIC'
15
+
16
+ def __init__(self, verbose=0, dry_run=0, force=0):
17
+ UnixCCompiler.__init__(self, verbose, dry_run, force)
18
+
19
+ v = self.get_version()
20
+ mpopt = 'openmp' if v and v < '15' else 'qopenmp'
21
+ self.cc_exe = ('icc -fPIC -fp-model strict -O3 '
22
+ '-fomit-frame-pointer -{}').format(mpopt)
23
+ compiler = self.cc_exe
24
+
25
+ if platform.system() == 'Darwin':
26
+ shared_flag = '-Wl,-undefined,dynamic_lookup'
27
+ else:
28
+ shared_flag = '-shared'
29
+ self.set_executables(compiler=compiler,
30
+ compiler_so=compiler,
31
+ compiler_cxx=compiler,
32
+ archiver='xiar' + ' cru',
33
+ linker_exe=compiler + ' -shared-intel',
34
+ linker_so=compiler + ' ' + shared_flag +
35
+ ' -shared-intel')
36
+
37
+
38
+ class IntelItaniumCCompiler(IntelCCompiler):
39
+ compiler_type = 'intele'
40
+
41
+ # On Itanium, the Intel Compiler used to be called ecc, let's search for
42
+ # it (now it's also icc, so ecc is last in the search).
43
+ for cc_exe in map(find_executable, ['icc', 'ecc']):
44
+ if cc_exe:
45
+ break
46
+
47
+
48
+ class IntelEM64TCCompiler(UnixCCompiler):
49
+ """
50
+ A modified Intel x86_64 compiler compatible with a 64bit GCC-built Python.
51
+ """
52
+ compiler_type = 'intelem'
53
+ cc_exe = 'icc -m64'
54
+ cc_args = '-fPIC'
55
+
56
+ def __init__(self, verbose=0, dry_run=0, force=0):
57
+ UnixCCompiler.__init__(self, verbose, dry_run, force)
58
+
59
+ v = self.get_version()
60
+ mpopt = 'openmp' if v and v < '15' else 'qopenmp'
61
+ self.cc_exe = ('icc -std=c99 -m64 -fPIC -fp-model strict -O3 '
62
+ '-fomit-frame-pointer -{}').format(mpopt)
63
+ compiler = self.cc_exe
64
+
65
+ if platform.system() == 'Darwin':
66
+ shared_flag = '-Wl,-undefined,dynamic_lookup'
67
+ else:
68
+ shared_flag = '-shared'
69
+ self.set_executables(compiler=compiler,
70
+ compiler_so=compiler,
71
+ compiler_cxx=compiler,
72
+ archiver='xiar' + ' cru',
73
+ linker_exe=compiler + ' -shared-intel',
74
+ linker_so=compiler + ' ' + shared_flag +
75
+ ' -shared-intel')
76
+
77
+
78
+ if platform.system() == 'Windows':
79
+ class IntelCCompilerW(MSVCCompiler):
80
+ """
81
+ A modified Intel compiler compatible with an MSVC-built Python.
82
+ """
83
+ compiler_type = 'intelw'
84
+ compiler_cxx = 'icl'
85
+
86
+ def __init__(self, verbose=0, dry_run=0, force=0):
87
+ MSVCCompiler.__init__(self, verbose, dry_run, force)
88
+ version_match = simple_version_match(start=r'Intel\(R\).*?32,')
89
+ self.__version = version_match
90
+
91
+ def initialize(self, plat_name=None):
92
+ MSVCCompiler.initialize(self, plat_name)
93
+ self.cc = self.find_exe('icl.exe')
94
+ self.lib = self.find_exe('xilib')
95
+ self.linker = self.find_exe('xilink')
96
+ self.compile_options = ['/nologo', '/O3', '/MD', '/W3',
97
+ '/Qstd=c99']
98
+ self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3',
99
+ '/Qstd=c99', '/Z7', '/D_DEBUG']
100
+
101
+ class IntelEM64TCCompilerW(IntelCCompilerW):
102
+ """
103
+ A modified Intel x86_64 compiler compatible with
104
+ a 64bit MSVC-built Python.
105
+ """
106
+ compiler_type = 'intelemw'
107
+
108
+ def __init__(self, verbose=0, dry_run=0, force=0):
109
+ MSVCCompiler.__init__(self, verbose, dry_run, force)
110
+ version_match = simple_version_match(start=r'Intel\(R\).*?64,')
111
+ self.__version = version_match
llmeval-env/lib/python3.10/site-packages/numpy/distutils/lib2def.py ADDED
@@ -0,0 +1,116 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import re
2
+ import sys
3
+ import subprocess
4
+
5
+ __doc__ = """This module generates a DEF file from the symbols in
6
+ an MSVC-compiled DLL import library. It correctly discriminates between
7
+ data and functions. The data is collected from the output of the program
8
+ nm(1).
9
+
10
+ Usage:
11
+ python lib2def.py [libname.lib] [output.def]
12
+ or
13
+ python lib2def.py [libname.lib] > output.def
14
+
15
+ libname.lib defaults to python<py_ver>.lib and output.def defaults to stdout
16
+
17
+ Author: Robert Kern <[email protected]>
18
+ Last Update: April 30, 1999
19
+ """
20
+
21
+ __version__ = '0.1a'
22
+
23
+ py_ver = "%d%d" % tuple(sys.version_info[:2])
24
+
25
+ DEFAULT_NM = ['nm', '-Cs']
26
+
27
+ DEF_HEADER = """LIBRARY python%s.dll
28
+ ;CODE PRELOAD MOVEABLE DISCARDABLE
29
+ ;DATA PRELOAD SINGLE
30
+
31
+ EXPORTS
32
+ """ % py_ver
33
+ # the header of the DEF file
34
+
35
+ FUNC_RE = re.compile(r"^(.*) in python%s\.dll" % py_ver, re.MULTILINE)
36
+ DATA_RE = re.compile(r"^_imp__(.*) in python%s\.dll" % py_ver, re.MULTILINE)
37
+
38
+ def parse_cmd():
39
+ """Parses the command-line arguments.
40
+
41
+ libfile, deffile = parse_cmd()"""
42
+ if len(sys.argv) == 3:
43
+ if sys.argv[1][-4:] == '.lib' and sys.argv[2][-4:] == '.def':
44
+ libfile, deffile = sys.argv[1:]
45
+ elif sys.argv[1][-4:] == '.def' and sys.argv[2][-4:] == '.lib':
46
+ deffile, libfile = sys.argv[1:]
47
+ else:
48
+ print("I'm assuming that your first argument is the library")
49
+ print("and the second is the DEF file.")
50
+ elif len(sys.argv) == 2:
51
+ if sys.argv[1][-4:] == '.def':
52
+ deffile = sys.argv[1]
53
+ libfile = 'python%s.lib' % py_ver
54
+ elif sys.argv[1][-4:] == '.lib':
55
+ deffile = None
56
+ libfile = sys.argv[1]
57
+ else:
58
+ libfile = 'python%s.lib' % py_ver
59
+ deffile = None
60
+ return libfile, deffile
61
+
62
+ def getnm(nm_cmd=['nm', '-Cs', 'python%s.lib' % py_ver], shell=True):
63
+ """Returns the output of nm_cmd via a pipe.
64
+
65
+ nm_output = getnm(nm_cmd = 'nm -Cs py_lib')"""
66
+ p = subprocess.Popen(nm_cmd, shell=shell, stdout=subprocess.PIPE,
67
+ stderr=subprocess.PIPE, text=True)
68
+ nm_output, nm_err = p.communicate()
69
+ if p.returncode != 0:
70
+ raise RuntimeError('failed to run "%s": "%s"' % (
71
+ ' '.join(nm_cmd), nm_err))
72
+ return nm_output
73
+
74
+ def parse_nm(nm_output):
75
+ """Returns a tuple of lists: dlist for the list of data
76
+ symbols and flist for the list of function symbols.
77
+
78
+ dlist, flist = parse_nm(nm_output)"""
79
+ data = DATA_RE.findall(nm_output)
80
+ func = FUNC_RE.findall(nm_output)
81
+
82
+ flist = []
83
+ for sym in data:
84
+ if sym in func and (sym[:2] == 'Py' or sym[:3] == '_Py' or sym[:4] == 'init'):
85
+ flist.append(sym)
86
+
87
+ dlist = []
88
+ for sym in data:
89
+ if sym not in flist and (sym[:2] == 'Py' or sym[:3] == '_Py'):
90
+ dlist.append(sym)
91
+
92
+ dlist.sort()
93
+ flist.sort()
94
+ return dlist, flist
95
+
96
+ def output_def(dlist, flist, header, file = sys.stdout):
97
+ """Outputs the final DEF file to a file defaulting to stdout.
98
+
99
+ output_def(dlist, flist, header, file = sys.stdout)"""
100
+ for data_sym in dlist:
101
+ header = header + '\t%s DATA\n' % data_sym
102
+ header = header + '\n' # blank line
103
+ for func_sym in flist:
104
+ header = header + '\t%s\n' % func_sym
105
+ file.write(header)
106
+
107
+ if __name__ == '__main__':
108
+ libfile, deffile = parse_cmd()
109
+ if deffile is None:
110
+ deffile = sys.stdout
111
+ else:
112
+ deffile = open(deffile, 'w')
113
+ nm_cmd = DEFAULT_NM + [str(libfile)]
114
+ nm_output = getnm(nm_cmd, shell=False)
115
+ dlist, flist = parse_nm(nm_output)
116
+ output_def(dlist, flist, DEF_HEADER, deffile)
llmeval-env/lib/python3.10/site-packages/numpy/distutils/log.py ADDED
@@ -0,0 +1,111 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Colored log
2
+ import sys
3
+ from distutils.log import * # noqa: F403
4
+ from distutils.log import Log as old_Log
5
+ from distutils.log import _global_log
6
+
7
+ from numpy.distutils.misc_util import (red_text, default_text, cyan_text,
8
+ green_text, is_sequence, is_string)
9
+
10
+
11
+ def _fix_args(args,flag=1):
12
+ if is_string(args):
13
+ return args.replace('%', '%%')
14
+ if flag and is_sequence(args):
15
+ return tuple([_fix_args(a, flag=0) for a in args])
16
+ return args
17
+
18
+
19
+ class Log(old_Log):
20
+ def _log(self, level, msg, args):
21
+ if level >= self.threshold:
22
+ if args:
23
+ msg = msg % _fix_args(args)
24
+ if 0:
25
+ if msg.startswith('copying ') and msg.find(' -> ') != -1:
26
+ return
27
+ if msg.startswith('byte-compiling '):
28
+ return
29
+ print(_global_color_map[level](msg))
30
+ sys.stdout.flush()
31
+
32
+ def good(self, msg, *args):
33
+ """
34
+ If we log WARN messages, log this message as a 'nice' anti-warn
35
+ message.
36
+
37
+ """
38
+ if WARN >= self.threshold:
39
+ if args:
40
+ print(green_text(msg % _fix_args(args)))
41
+ else:
42
+ print(green_text(msg))
43
+ sys.stdout.flush()
44
+
45
+
46
+ _global_log.__class__ = Log
47
+
48
+ good = _global_log.good
49
+
50
+ def set_threshold(level, force=False):
51
+ prev_level = _global_log.threshold
52
+ if prev_level > DEBUG or force:
53
+ # If we're running at DEBUG, don't change the threshold, as there's
54
+ # likely a good reason why we're running at this level.
55
+ _global_log.threshold = level
56
+ if level <= DEBUG:
57
+ info('set_threshold: setting threshold to DEBUG level,'
58
+ ' it can be changed only with force argument')
59
+ else:
60
+ info('set_threshold: not changing threshold from DEBUG level'
61
+ ' %s to %s' % (prev_level, level))
62
+ return prev_level
63
+
64
+ def get_threshold():
65
+ return _global_log.threshold
66
+
67
+ def set_verbosity(v, force=False):
68
+ prev_level = _global_log.threshold
69
+ if v < 0:
70
+ set_threshold(ERROR, force)
71
+ elif v == 0:
72
+ set_threshold(WARN, force)
73
+ elif v == 1:
74
+ set_threshold(INFO, force)
75
+ elif v >= 2:
76
+ set_threshold(DEBUG, force)
77
+ return {FATAL:-2,ERROR:-1,WARN:0,INFO:1,DEBUG:2}.get(prev_level, 1)
78
+
79
+
80
+ _global_color_map = {
81
+ DEBUG:cyan_text,
82
+ INFO:default_text,
83
+ WARN:red_text,
84
+ ERROR:red_text,
85
+ FATAL:red_text
86
+ }
87
+
88
+ # don't use INFO,.. flags in set_verbosity, these flags are for set_threshold.
89
+ set_verbosity(0, force=True)
90
+
91
+
92
+ _error = error
93
+ _warn = warn
94
+ _info = info
95
+ _debug = debug
96
+
97
+
98
+ def error(msg, *a, **kw):
99
+ _error(f"ERROR: {msg}", *a, **kw)
100
+
101
+
102
+ def warn(msg, *a, **kw):
103
+ _warn(f"WARN: {msg}", *a, **kw)
104
+
105
+
106
+ def info(msg, *a, **kw):
107
+ _info(f"INFO: {msg}", *a, **kw)
108
+
109
+
110
+ def debug(msg, *a, **kw):
111
+ _debug(f"DEBUG: {msg}", *a, **kw)
llmeval-env/lib/python3.10/site-packages/numpy/distutils/mingw32ccompiler.py ADDED
@@ -0,0 +1,591 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Support code for building Python extensions on Windows.
3
+
4
+ # NT stuff
5
+ # 1. Make sure libpython<version>.a exists for gcc. If not, build it.
6
+ # 2. Force windows to use gcc (we're struggling with MSVC and g77 support)
7
+ # 3. Force windows to use g77
8
+
9
+ """
10
+ import os
11
+ import sys
12
+ import subprocess
13
+ import re
14
+ import textwrap
15
+
16
+ # Overwrite certain distutils.ccompiler functions:
17
+ import numpy.distutils.ccompiler # noqa: F401
18
+ from numpy.distutils import log
19
+ # NT stuff
20
+ # 1. Make sure libpython<version>.a exists for gcc. If not, build it.
21
+ # 2. Force windows to use gcc (we're struggling with MSVC and g77 support)
22
+ # --> this is done in numpy/distutils/ccompiler.py
23
+ # 3. Force windows to use g77
24
+
25
+ import distutils.cygwinccompiler
26
+ from distutils.unixccompiler import UnixCCompiler
27
+ from distutils.msvccompiler import get_build_version as get_build_msvc_version
28
+ from distutils.errors import UnknownFileError
29
+ from numpy.distutils.misc_util import (msvc_runtime_library,
30
+ msvc_runtime_version,
31
+ msvc_runtime_major,
32
+ get_build_architecture)
33
+
34
+ def get_msvcr_replacement():
35
+ """Replacement for outdated version of get_msvcr from cygwinccompiler"""
36
+ msvcr = msvc_runtime_library()
37
+ return [] if msvcr is None else [msvcr]
38
+
39
+
40
+ # Useful to generate table of symbols from a dll
41
+ _START = re.compile(r'\[Ordinal/Name Pointer\] Table')
42
+ _TABLE = re.compile(r'^\s+\[([\s*[0-9]*)\] ([a-zA-Z0-9_]*)')
43
+
44
+ # the same as cygwin plus some additional parameters
45
+ class Mingw32CCompiler(distutils.cygwinccompiler.CygwinCCompiler):
46
+ """ A modified MingW32 compiler compatible with an MSVC built Python.
47
+
48
+ """
49
+
50
+ compiler_type = 'mingw32'
51
+
52
+ def __init__ (self,
53
+ verbose=0,
54
+ dry_run=0,
55
+ force=0):
56
+
57
+ distutils.cygwinccompiler.CygwinCCompiler.__init__ (self, verbose,
58
+ dry_run, force)
59
+
60
+ # **changes: eric jones 4/11/01
61
+ # 1. Check for import library on Windows. Build if it doesn't exist.
62
+
63
+ build_import_library()
64
+
65
+ # Check for custom msvc runtime library on Windows. Build if it doesn't exist.
66
+ msvcr_success = build_msvcr_library()
67
+ msvcr_dbg_success = build_msvcr_library(debug=True)
68
+ if msvcr_success or msvcr_dbg_success:
69
+ # add preprocessor statement for using customized msvcr lib
70
+ self.define_macro('NPY_MINGW_USE_CUSTOM_MSVCR')
71
+
72
+ # Define the MSVC version as hint for MinGW
73
+ msvcr_version = msvc_runtime_version()
74
+ if msvcr_version:
75
+ self.define_macro('__MSVCRT_VERSION__', '0x%04i' % msvcr_version)
76
+
77
+ # MS_WIN64 should be defined when building for amd64 on windows,
78
+ # but python headers define it only for MS compilers, which has all
79
+ # kind of bad consequences, like using Py_ModuleInit4 instead of
80
+ # Py_ModuleInit4_64, etc... So we add it here
81
+ if get_build_architecture() == 'AMD64':
82
+ self.set_executables(
83
+ compiler='gcc -g -DDEBUG -DMS_WIN64 -O0 -Wall',
84
+ compiler_so='gcc -g -DDEBUG -DMS_WIN64 -O0 -Wall '
85
+ '-Wstrict-prototypes',
86
+ linker_exe='gcc -g',
87
+ linker_so='gcc -g -shared')
88
+ else:
89
+ self.set_executables(
90
+ compiler='gcc -O2 -Wall',
91
+ compiler_so='gcc -O2 -Wall -Wstrict-prototypes',
92
+ linker_exe='g++ ',
93
+ linker_so='g++ -shared')
94
+ # added for python2.3 support
95
+ # we can't pass it through set_executables because pre 2.2 would fail
96
+ self.compiler_cxx = ['g++']
97
+
98
+ # Maybe we should also append -mthreads, but then the finished dlls
99
+ # need another dll (mingwm10.dll see Mingw32 docs) (-mthreads: Support
100
+ # thread-safe exception handling on `Mingw32')
101
+
102
+ # no additional libraries needed
103
+ #self.dll_libraries=[]
104
+ return
105
+
106
+ # __init__ ()
107
+
108
+ def link(self,
109
+ target_desc,
110
+ objects,
111
+ output_filename,
112
+ output_dir,
113
+ libraries,
114
+ library_dirs,
115
+ runtime_library_dirs,
116
+ export_symbols = None,
117
+ debug=0,
118
+ extra_preargs=None,
119
+ extra_postargs=None,
120
+ build_temp=None,
121
+ target_lang=None):
122
+ # Include the appropriate MSVC runtime library if Python was built
123
+ # with MSVC >= 7.0 (MinGW standard is msvcrt)
124
+ runtime_library = msvc_runtime_library()
125
+ if runtime_library:
126
+ if not libraries:
127
+ libraries = []
128
+ libraries.append(runtime_library)
129
+ args = (self,
130
+ target_desc,
131
+ objects,
132
+ output_filename,
133
+ output_dir,
134
+ libraries,
135
+ library_dirs,
136
+ runtime_library_dirs,
137
+ None, #export_symbols, we do this in our def-file
138
+ debug,
139
+ extra_preargs,
140
+ extra_postargs,
141
+ build_temp,
142
+ target_lang)
143
+ func = UnixCCompiler.link
144
+ func(*args[:func.__code__.co_argcount])
145
+ return
146
+
147
+ def object_filenames (self,
148
+ source_filenames,
149
+ strip_dir=0,
150
+ output_dir=''):
151
+ if output_dir is None: output_dir = ''
152
+ obj_names = []
153
+ for src_name in source_filenames:
154
+ # use normcase to make sure '.rc' is really '.rc' and not '.RC'
155
+ (base, ext) = os.path.splitext (os.path.normcase(src_name))
156
+
157
+ # added these lines to strip off windows drive letters
158
+ # without it, .o files are placed next to .c files
159
+ # instead of the build directory
160
+ drv, base = os.path.splitdrive(base)
161
+ if drv:
162
+ base = base[1:]
163
+
164
+ if ext not in (self.src_extensions + ['.rc', '.res']):
165
+ raise UnknownFileError(
166
+ "unknown file type '%s' (from '%s')" % \
167
+ (ext, src_name))
168
+ if strip_dir:
169
+ base = os.path.basename (base)
170
+ if ext == '.res' or ext == '.rc':
171
+ # these need to be compiled to object files
172
+ obj_names.append (os.path.join (output_dir,
173
+ base + ext + self.obj_extension))
174
+ else:
175
+ obj_names.append (os.path.join (output_dir,
176
+ base + self.obj_extension))
177
+ return obj_names
178
+
179
+ # object_filenames ()
180
+
181
+
182
+ def find_python_dll():
183
+ # We can't do much here:
184
+ # - find it in the virtualenv (sys.prefix)
185
+ # - find it in python main dir (sys.base_prefix, if in a virtualenv)
186
+ # - in system32,
187
+ # - ortherwise (Sxs), I don't know how to get it.
188
+ stems = [sys.prefix]
189
+ if sys.base_prefix != sys.prefix:
190
+ stems.append(sys.base_prefix)
191
+
192
+ sub_dirs = ['', 'lib', 'bin']
193
+ # generate possible combinations of directory trees and sub-directories
194
+ lib_dirs = []
195
+ for stem in stems:
196
+ for folder in sub_dirs:
197
+ lib_dirs.append(os.path.join(stem, folder))
198
+
199
+ # add system directory as well
200
+ if 'SYSTEMROOT' in os.environ:
201
+ lib_dirs.append(os.path.join(os.environ['SYSTEMROOT'], 'System32'))
202
+
203
+ # search in the file system for possible candidates
204
+ major_version, minor_version = tuple(sys.version_info[:2])
205
+ implementation = sys.implementation.name
206
+ if implementation == 'cpython':
207
+ dllname = f'python{major_version}{minor_version}.dll'
208
+ elif implementation == 'pypy':
209
+ dllname = f'libpypy{major_version}.{minor_version}-c.dll'
210
+ else:
211
+ dllname = f'Unknown platform {implementation}'
212
+ print("Looking for %s" % dllname)
213
+ for folder in lib_dirs:
214
+ dll = os.path.join(folder, dllname)
215
+ if os.path.exists(dll):
216
+ return dll
217
+
218
+ raise ValueError("%s not found in %s" % (dllname, lib_dirs))
219
+
220
+ def dump_table(dll):
221
+ st = subprocess.check_output(["objdump.exe", "-p", dll])
222
+ return st.split(b'\n')
223
+
224
+ def generate_def(dll, dfile):
225
+ """Given a dll file location, get all its exported symbols and dump them
226
+ into the given def file.
227
+
228
+ The .def file will be overwritten"""
229
+ dump = dump_table(dll)
230
+ for i in range(len(dump)):
231
+ if _START.match(dump[i].decode()):
232
+ break
233
+ else:
234
+ raise ValueError("Symbol table not found")
235
+
236
+ syms = []
237
+ for j in range(i+1, len(dump)):
238
+ m = _TABLE.match(dump[j].decode())
239
+ if m:
240
+ syms.append((int(m.group(1).strip()), m.group(2)))
241
+ else:
242
+ break
243
+
244
+ if len(syms) == 0:
245
+ log.warn('No symbols found in %s' % dll)
246
+
247
+ with open(dfile, 'w') as d:
248
+ d.write('LIBRARY %s\n' % os.path.basename(dll))
249
+ d.write(';CODE PRELOAD MOVEABLE DISCARDABLE\n')
250
+ d.write(';DATA PRELOAD SINGLE\n')
251
+ d.write('\nEXPORTS\n')
252
+ for s in syms:
253
+ #d.write('@%d %s\n' % (s[0], s[1]))
254
+ d.write('%s\n' % s[1])
255
+
256
+ def find_dll(dll_name):
257
+
258
+ arch = {'AMD64' : 'amd64',
259
+ 'Intel' : 'x86'}[get_build_architecture()]
260
+
261
+ def _find_dll_in_winsxs(dll_name):
262
+ # Walk through the WinSxS directory to find the dll.
263
+ winsxs_path = os.path.join(os.environ.get('WINDIR', r'C:\WINDOWS'),
264
+ 'winsxs')
265
+ if not os.path.exists(winsxs_path):
266
+ return None
267
+ for root, dirs, files in os.walk(winsxs_path):
268
+ if dll_name in files and arch in root:
269
+ return os.path.join(root, dll_name)
270
+ return None
271
+
272
+ def _find_dll_in_path(dll_name):
273
+ # First, look in the Python directory, then scan PATH for
274
+ # the given dll name.
275
+ for path in [sys.prefix] + os.environ['PATH'].split(';'):
276
+ filepath = os.path.join(path, dll_name)
277
+ if os.path.exists(filepath):
278
+ return os.path.abspath(filepath)
279
+
280
+ return _find_dll_in_winsxs(dll_name) or _find_dll_in_path(dll_name)
281
+
282
+ def build_msvcr_library(debug=False):
283
+ if os.name != 'nt':
284
+ return False
285
+
286
+ # If the version number is None, then we couldn't find the MSVC runtime at
287
+ # all, because we are running on a Python distribution which is customed
288
+ # compiled; trust that the compiler is the same as the one available to us
289
+ # now, and that it is capable of linking with the correct runtime without
290
+ # any extra options.
291
+ msvcr_ver = msvc_runtime_major()
292
+ if msvcr_ver is None:
293
+ log.debug('Skip building import library: '
294
+ 'Runtime is not compiled with MSVC')
295
+ return False
296
+
297
+ # Skip using a custom library for versions < MSVC 8.0
298
+ if msvcr_ver < 80:
299
+ log.debug('Skip building msvcr library:'
300
+ ' custom functionality not present')
301
+ return False
302
+
303
+ msvcr_name = msvc_runtime_library()
304
+ if debug:
305
+ msvcr_name += 'd'
306
+
307
+ # Skip if custom library already exists
308
+ out_name = "lib%s.a" % msvcr_name
309
+ out_file = os.path.join(sys.prefix, 'libs', out_name)
310
+ if os.path.isfile(out_file):
311
+ log.debug('Skip building msvcr library: "%s" exists' %
312
+ (out_file,))
313
+ return True
314
+
315
+ # Find the msvcr dll
316
+ msvcr_dll_name = msvcr_name + '.dll'
317
+ dll_file = find_dll(msvcr_dll_name)
318
+ if not dll_file:
319
+ log.warn('Cannot build msvcr library: "%s" not found' %
320
+ msvcr_dll_name)
321
+ return False
322
+
323
+ def_name = "lib%s.def" % msvcr_name
324
+ def_file = os.path.join(sys.prefix, 'libs', def_name)
325
+
326
+ log.info('Building msvcr library: "%s" (from %s)' \
327
+ % (out_file, dll_file))
328
+
329
+ # Generate a symbol definition file from the msvcr dll
330
+ generate_def(dll_file, def_file)
331
+
332
+ # Create a custom mingw library for the given symbol definitions
333
+ cmd = ['dlltool', '-d', def_file, '-l', out_file]
334
+ retcode = subprocess.call(cmd)
335
+
336
+ # Clean up symbol definitions
337
+ os.remove(def_file)
338
+
339
+ return (not retcode)
340
+
341
+ def build_import_library():
342
+ if os.name != 'nt':
343
+ return
344
+
345
+ arch = get_build_architecture()
346
+ if arch == 'AMD64':
347
+ return _build_import_library_amd64()
348
+ elif arch == 'Intel':
349
+ return _build_import_library_x86()
350
+ else:
351
+ raise ValueError("Unhandled arch %s" % arch)
352
+
353
+ def _check_for_import_lib():
354
+ """Check if an import library for the Python runtime already exists."""
355
+ major_version, minor_version = tuple(sys.version_info[:2])
356
+
357
+ # patterns for the file name of the library itself
358
+ patterns = ['libpython%d%d.a',
359
+ 'libpython%d%d.dll.a',
360
+ 'libpython%d.%d.dll.a']
361
+
362
+ # directory trees that may contain the library
363
+ stems = [sys.prefix]
364
+ if hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix:
365
+ stems.append(sys.base_prefix)
366
+ elif hasattr(sys, 'real_prefix') and sys.real_prefix != sys.prefix:
367
+ stems.append(sys.real_prefix)
368
+
369
+ # possible subdirectories within those trees where it is placed
370
+ sub_dirs = ['libs', 'lib']
371
+
372
+ # generate a list of candidate locations
373
+ candidates = []
374
+ for pat in patterns:
375
+ filename = pat % (major_version, minor_version)
376
+ for stem_dir in stems:
377
+ for folder in sub_dirs:
378
+ candidates.append(os.path.join(stem_dir, folder, filename))
379
+
380
+ # test the filesystem to see if we can find any of these
381
+ for fullname in candidates:
382
+ if os.path.isfile(fullname):
383
+ # already exists, in location given
384
+ return (True, fullname)
385
+
386
+ # needs to be built, preferred location given first
387
+ return (False, candidates[0])
388
+
389
+ def _build_import_library_amd64():
390
+ out_exists, out_file = _check_for_import_lib()
391
+ if out_exists:
392
+ log.debug('Skip building import library: "%s" exists', out_file)
393
+ return
394
+
395
+ # get the runtime dll for which we are building import library
396
+ dll_file = find_python_dll()
397
+ log.info('Building import library (arch=AMD64): "%s" (from %s)' %
398
+ (out_file, dll_file))
399
+
400
+ # generate symbol list from this library
401
+ def_name = "python%d%d.def" % tuple(sys.version_info[:2])
402
+ def_file = os.path.join(sys.prefix, 'libs', def_name)
403
+ generate_def(dll_file, def_file)
404
+
405
+ # generate import library from this symbol list
406
+ cmd = ['dlltool', '-d', def_file, '-l', out_file]
407
+ subprocess.check_call(cmd)
408
+
409
+ def _build_import_library_x86():
410
+ """ Build the import libraries for Mingw32-gcc on Windows
411
+ """
412
+ out_exists, out_file = _check_for_import_lib()
413
+ if out_exists:
414
+ log.debug('Skip building import library: "%s" exists', out_file)
415
+ return
416
+
417
+ lib_name = "python%d%d.lib" % tuple(sys.version_info[:2])
418
+ lib_file = os.path.join(sys.prefix, 'libs', lib_name)
419
+ if not os.path.isfile(lib_file):
420
+ # didn't find library file in virtualenv, try base distribution, too,
421
+ # and use that instead if found there. for Python 2.7 venvs, the base
422
+ # directory is in attribute real_prefix instead of base_prefix.
423
+ if hasattr(sys, 'base_prefix'):
424
+ base_lib = os.path.join(sys.base_prefix, 'libs', lib_name)
425
+ elif hasattr(sys, 'real_prefix'):
426
+ base_lib = os.path.join(sys.real_prefix, 'libs', lib_name)
427
+ else:
428
+ base_lib = '' # os.path.isfile('') == False
429
+
430
+ if os.path.isfile(base_lib):
431
+ lib_file = base_lib
432
+ else:
433
+ log.warn('Cannot build import library: "%s" not found', lib_file)
434
+ return
435
+ log.info('Building import library (ARCH=x86): "%s"', out_file)
436
+
437
+ from numpy.distutils import lib2def
438
+
439
+ def_name = "python%d%d.def" % tuple(sys.version_info[:2])
440
+ def_file = os.path.join(sys.prefix, 'libs', def_name)
441
+ nm_output = lib2def.getnm(
442
+ lib2def.DEFAULT_NM + [lib_file], shell=False)
443
+ dlist, flist = lib2def.parse_nm(nm_output)
444
+ with open(def_file, 'w') as fid:
445
+ lib2def.output_def(dlist, flist, lib2def.DEF_HEADER, fid)
446
+
447
+ dll_name = find_python_dll ()
448
+
449
+ cmd = ["dlltool",
450
+ "--dllname", dll_name,
451
+ "--def", def_file,
452
+ "--output-lib", out_file]
453
+ status = subprocess.check_output(cmd)
454
+ if status:
455
+ log.warn('Failed to build import library for gcc. Linking will fail.')
456
+ return
457
+
458
+ #=====================================
459
+ # Dealing with Visual Studio MANIFESTS
460
+ #=====================================
461
+
462
+ # Functions to deal with visual studio manifests. Manifest are a mechanism to
463
+ # enforce strong DLL versioning on windows, and has nothing to do with
464
+ # distutils MANIFEST. manifests are XML files with version info, and used by
465
+ # the OS loader; they are necessary when linking against a DLL not in the
466
+ # system path; in particular, official python 2.6 binary is built against the
467
+ # MS runtime 9 (the one from VS 2008), which is not available on most windows
468
+ # systems; python 2.6 installer does install it in the Win SxS (Side by side)
469
+ # directory, but this requires the manifest for this to work. This is a big
470
+ # mess, thanks MS for a wonderful system.
471
+
472
+ # XXX: ideally, we should use exactly the same version as used by python. I
473
+ # submitted a patch to get this version, but it was only included for python
474
+ # 2.6.1 and above. So for versions below, we use a "best guess".
475
+ _MSVCRVER_TO_FULLVER = {}
476
+ if sys.platform == 'win32':
477
+ try:
478
+ import msvcrt
479
+ # I took one version in my SxS directory: no idea if it is the good
480
+ # one, and we can't retrieve it from python
481
+ _MSVCRVER_TO_FULLVER['80'] = "8.0.50727.42"
482
+ _MSVCRVER_TO_FULLVER['90'] = "9.0.21022.8"
483
+ # Value from msvcrt.CRT_ASSEMBLY_VERSION under Python 3.3.0
484
+ # on Windows XP:
485
+ _MSVCRVER_TO_FULLVER['100'] = "10.0.30319.460"
486
+ crt_ver = getattr(msvcrt, 'CRT_ASSEMBLY_VERSION', None)
487
+ if crt_ver is not None: # Available at least back to Python 3.3
488
+ maj, min = re.match(r'(\d+)\.(\d)', crt_ver).groups()
489
+ _MSVCRVER_TO_FULLVER[maj + min] = crt_ver
490
+ del maj, min
491
+ del crt_ver
492
+ except ImportError:
493
+ # If we are here, means python was not built with MSVC. Not sure what
494
+ # to do in that case: manifest building will fail, but it should not be
495
+ # used in that case anyway
496
+ log.warn('Cannot import msvcrt: using manifest will not be possible')
497
+
498
+ def msvc_manifest_xml(maj, min):
499
+ """Given a major and minor version of the MSVCR, returns the
500
+ corresponding XML file."""
501
+ try:
502
+ fullver = _MSVCRVER_TO_FULLVER[str(maj * 10 + min)]
503
+ except KeyError:
504
+ raise ValueError("Version %d,%d of MSVCRT not supported yet" %
505
+ (maj, min)) from None
506
+ # Don't be fooled, it looks like an XML, but it is not. In particular, it
507
+ # should not have any space before starting, and its size should be
508
+ # divisible by 4, most likely for alignment constraints when the xml is
509
+ # embedded in the binary...
510
+ # This template was copied directly from the python 2.6 binary (using
511
+ # strings.exe from mingw on python.exe).
512
+ template = textwrap.dedent("""\
513
+ <assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
514
+ <trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
515
+ <security>
516
+ <requestedPrivileges>
517
+ <requestedExecutionLevel level="asInvoker" uiAccess="false"></requestedExecutionLevel>
518
+ </requestedPrivileges>
519
+ </security>
520
+ </trustInfo>
521
+ <dependency>
522
+ <dependentAssembly>
523
+ <assemblyIdentity type="win32" name="Microsoft.VC%(maj)d%(min)d.CRT" version="%(fullver)s" processorArchitecture="*" publicKeyToken="1fc8b3b9a1e18e3b"></assemblyIdentity>
524
+ </dependentAssembly>
525
+ </dependency>
526
+ </assembly>""")
527
+
528
+ return template % {'fullver': fullver, 'maj': maj, 'min': min}
529
+
530
+ def manifest_rc(name, type='dll'):
531
+ """Return the rc file used to generate the res file which will be embedded
532
+ as manifest for given manifest file name, of given type ('dll' or
533
+ 'exe').
534
+
535
+ Parameters
536
+ ----------
537
+ name : str
538
+ name of the manifest file to embed
539
+ type : str {'dll', 'exe'}
540
+ type of the binary which will embed the manifest
541
+
542
+ """
543
+ if type == 'dll':
544
+ rctype = 2
545
+ elif type == 'exe':
546
+ rctype = 1
547
+ else:
548
+ raise ValueError("Type %s not supported" % type)
549
+
550
+ return """\
551
+ #include "winuser.h"
552
+ %d RT_MANIFEST %s""" % (rctype, name)
553
+
554
+ def check_embedded_msvcr_match_linked(msver):
555
+ """msver is the ms runtime version used for the MANIFEST."""
556
+ # check msvcr major version are the same for linking and
557
+ # embedding
558
+ maj = msvc_runtime_major()
559
+ if maj:
560
+ if not maj == int(msver):
561
+ raise ValueError(
562
+ "Discrepancy between linked msvcr " \
563
+ "(%d) and the one about to be embedded " \
564
+ "(%d)" % (int(msver), maj))
565
+
566
+ def configtest_name(config):
567
+ base = os.path.basename(config._gen_temp_sourcefile("yo", [], "c"))
568
+ return os.path.splitext(base)[0]
569
+
570
+ def manifest_name(config):
571
+ # Get configest name (including suffix)
572
+ root = configtest_name(config)
573
+ exext = config.compiler.exe_extension
574
+ return root + exext + ".manifest"
575
+
576
+ def rc_name(config):
577
+ # Get configtest name (including suffix)
578
+ root = configtest_name(config)
579
+ return root + ".rc"
580
+
581
+ def generate_manifest(config):
582
+ msver = get_build_msvc_version()
583
+ if msver is not None:
584
+ if msver >= 8:
585
+ check_embedded_msvcr_match_linked(msver)
586
+ ma_str, mi_str = str(msver).split('.')
587
+ # Write the manifest file
588
+ manxml = msvc_manifest_xml(int(ma_str), int(mi_str))
589
+ with open(manifest_name(config), "w") as man:
590
+ config.temp_files.append(manifest_name(config))
591
+ man.write(manxml)
llmeval-env/lib/python3.10/site-packages/numpy/distutils/misc_util.py ADDED
@@ -0,0 +1,2493 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import re
3
+ import sys
4
+ import copy
5
+ import glob
6
+ import atexit
7
+ import tempfile
8
+ import subprocess
9
+ import shutil
10
+ import multiprocessing
11
+ import textwrap
12
+ import importlib.util
13
+ from threading import local as tlocal
14
+ from functools import reduce
15
+
16
+ import distutils
17
+ from distutils.errors import DistutilsError
18
+
19
+ # stores temporary directory of each thread to only create one per thread
20
+ _tdata = tlocal()
21
+
22
+ # store all created temporary directories so they can be deleted on exit
23
+ _tmpdirs = []
24
+ def clean_up_temporary_directory():
25
+ if _tmpdirs is not None:
26
+ for d in _tmpdirs:
27
+ try:
28
+ shutil.rmtree(d)
29
+ except OSError:
30
+ pass
31
+
32
+ atexit.register(clean_up_temporary_directory)
33
+
34
+ __all__ = ['Configuration', 'get_numpy_include_dirs', 'default_config_dict',
35
+ 'dict_append', 'appendpath', 'generate_config_py',
36
+ 'get_cmd', 'allpath', 'get_mathlibs',
37
+ 'terminal_has_colors', 'red_text', 'green_text', 'yellow_text',
38
+ 'blue_text', 'cyan_text', 'cyg2win32', 'mingw32', 'all_strings',
39
+ 'has_f_sources', 'has_cxx_sources', 'filter_sources',
40
+ 'get_dependencies', 'is_local_src_dir', 'get_ext_source_files',
41
+ 'get_script_files', 'get_lib_source_files', 'get_data_files',
42
+ 'dot_join', 'get_frame', 'minrelpath', 'njoin',
43
+ 'is_sequence', 'is_string', 'as_list', 'gpaths', 'get_language',
44
+ 'get_build_architecture', 'get_info', 'get_pkg_info',
45
+ 'get_num_build_jobs', 'sanitize_cxx_flags',
46
+ 'exec_mod_from_location']
47
+
48
+ class InstallableLib:
49
+ """
50
+ Container to hold information on an installable library.
51
+
52
+ Parameters
53
+ ----------
54
+ name : str
55
+ Name of the installed library.
56
+ build_info : dict
57
+ Dictionary holding build information.
58
+ target_dir : str
59
+ Absolute path specifying where to install the library.
60
+
61
+ See Also
62
+ --------
63
+ Configuration.add_installed_library
64
+
65
+ Notes
66
+ -----
67
+ The three parameters are stored as attributes with the same names.
68
+
69
+ """
70
+ def __init__(self, name, build_info, target_dir):
71
+ self.name = name
72
+ self.build_info = build_info
73
+ self.target_dir = target_dir
74
+
75
+
76
+ def get_num_build_jobs():
77
+ """
78
+ Get number of parallel build jobs set by the --parallel command line
79
+ argument of setup.py
80
+ If the command did not receive a setting the environment variable
81
+ NPY_NUM_BUILD_JOBS is checked. If that is unset, return the number of
82
+ processors on the system, with a maximum of 8 (to prevent
83
+ overloading the system if there a lot of CPUs).
84
+
85
+ Returns
86
+ -------
87
+ out : int
88
+ number of parallel jobs that can be run
89
+
90
+ """
91
+ from numpy.distutils.core import get_distribution
92
+ try:
93
+ cpu_count = len(os.sched_getaffinity(0))
94
+ except AttributeError:
95
+ cpu_count = multiprocessing.cpu_count()
96
+ cpu_count = min(cpu_count, 8)
97
+ envjobs = int(os.environ.get("NPY_NUM_BUILD_JOBS", cpu_count))
98
+ dist = get_distribution()
99
+ # may be None during configuration
100
+ if dist is None:
101
+ return envjobs
102
+
103
+ # any of these three may have the job set, take the largest
104
+ cmdattr = (getattr(dist.get_command_obj('build'), 'parallel', None),
105
+ getattr(dist.get_command_obj('build_ext'), 'parallel', None),
106
+ getattr(dist.get_command_obj('build_clib'), 'parallel', None))
107
+ if all(x is None for x in cmdattr):
108
+ return envjobs
109
+ else:
110
+ return max(x for x in cmdattr if x is not None)
111
+
112
+ def quote_args(args):
113
+ """Quote list of arguments.
114
+
115
+ .. deprecated:: 1.22.
116
+ """
117
+ import warnings
118
+ warnings.warn('"quote_args" is deprecated.',
119
+ DeprecationWarning, stacklevel=2)
120
+ # don't used _nt_quote_args as it does not check if
121
+ # args items already have quotes or not.
122
+ args = list(args)
123
+ for i in range(len(args)):
124
+ a = args[i]
125
+ if ' ' in a and a[0] not in '"\'':
126
+ args[i] = '"%s"' % (a)
127
+ return args
128
+
129
+ def allpath(name):
130
+ "Convert a /-separated pathname to one using the OS's path separator."
131
+ split = name.split('/')
132
+ return os.path.join(*split)
133
+
134
+ def rel_path(path, parent_path):
135
+ """Return path relative to parent_path."""
136
+ # Use realpath to avoid issues with symlinked dirs (see gh-7707)
137
+ pd = os.path.realpath(os.path.abspath(parent_path))
138
+ apath = os.path.realpath(os.path.abspath(path))
139
+ if len(apath) < len(pd):
140
+ return path
141
+ if apath == pd:
142
+ return ''
143
+ if pd == apath[:len(pd)]:
144
+ assert apath[len(pd)] in [os.sep], repr((path, apath[len(pd)]))
145
+ path = apath[len(pd)+1:]
146
+ return path
147
+
148
+ def get_path_from_frame(frame, parent_path=None):
149
+ """Return path of the module given a frame object from the call stack.
150
+
151
+ Returned path is relative to parent_path when given,
152
+ otherwise it is absolute path.
153
+ """
154
+
155
+ # First, try to find if the file name is in the frame.
156
+ try:
157
+ caller_file = eval('__file__', frame.f_globals, frame.f_locals)
158
+ d = os.path.dirname(os.path.abspath(caller_file))
159
+ except NameError:
160
+ # __file__ is not defined, so let's try __name__. We try this second
161
+ # because setuptools spoofs __name__ to be '__main__' even though
162
+ # sys.modules['__main__'] might be something else, like easy_install(1).
163
+ caller_name = eval('__name__', frame.f_globals, frame.f_locals)
164
+ __import__(caller_name)
165
+ mod = sys.modules[caller_name]
166
+ if hasattr(mod, '__file__'):
167
+ d = os.path.dirname(os.path.abspath(mod.__file__))
168
+ else:
169
+ # we're probably running setup.py as execfile("setup.py")
170
+ # (likely we're building an egg)
171
+ d = os.path.abspath('.')
172
+
173
+ if parent_path is not None:
174
+ d = rel_path(d, parent_path)
175
+
176
+ return d or '.'
177
+
178
+ def njoin(*path):
179
+ """Join two or more pathname components +
180
+ - convert a /-separated pathname to one using the OS's path separator.
181
+ - resolve `..` and `.` from path.
182
+
183
+ Either passing n arguments as in njoin('a','b'), or a sequence
184
+ of n names as in njoin(['a','b']) is handled, or a mixture of such arguments.
185
+ """
186
+ paths = []
187
+ for p in path:
188
+ if is_sequence(p):
189
+ # njoin(['a', 'b'], 'c')
190
+ paths.append(njoin(*p))
191
+ else:
192
+ assert is_string(p)
193
+ paths.append(p)
194
+ path = paths
195
+ if not path:
196
+ # njoin()
197
+ joined = ''
198
+ else:
199
+ # njoin('a', 'b')
200
+ joined = os.path.join(*path)
201
+ if os.path.sep != '/':
202
+ joined = joined.replace('/', os.path.sep)
203
+ return minrelpath(joined)
204
+
205
+ def get_mathlibs(path=None):
206
+ """Return the MATHLIB line from numpyconfig.h
207
+ """
208
+ if path is not None:
209
+ config_file = os.path.join(path, '_numpyconfig.h')
210
+ else:
211
+ # Look for the file in each of the numpy include directories.
212
+ dirs = get_numpy_include_dirs()
213
+ for path in dirs:
214
+ fn = os.path.join(path, '_numpyconfig.h')
215
+ if os.path.exists(fn):
216
+ config_file = fn
217
+ break
218
+ else:
219
+ raise DistutilsError('_numpyconfig.h not found in numpy include '
220
+ 'dirs %r' % (dirs,))
221
+
222
+ with open(config_file) as fid:
223
+ mathlibs = []
224
+ s = '#define MATHLIB'
225
+ for line in fid:
226
+ if line.startswith(s):
227
+ value = line[len(s):].strip()
228
+ if value:
229
+ mathlibs.extend(value.split(','))
230
+ return mathlibs
231
+
232
+ def minrelpath(path):
233
+ """Resolve `..` and '.' from path.
234
+ """
235
+ if not is_string(path):
236
+ return path
237
+ if '.' not in path:
238
+ return path
239
+ l = path.split(os.sep)
240
+ while l:
241
+ try:
242
+ i = l.index('.', 1)
243
+ except ValueError:
244
+ break
245
+ del l[i]
246
+ j = 1
247
+ while l:
248
+ try:
249
+ i = l.index('..', j)
250
+ except ValueError:
251
+ break
252
+ if l[i-1]=='..':
253
+ j += 1
254
+ else:
255
+ del l[i], l[i-1]
256
+ j = 1
257
+ if not l:
258
+ return ''
259
+ return os.sep.join(l)
260
+
261
+ def sorted_glob(fileglob):
262
+ """sorts output of python glob for https://bugs.python.org/issue30461
263
+ to allow extensions to have reproducible build results"""
264
+ return sorted(glob.glob(fileglob))
265
+
266
+ def _fix_paths(paths, local_path, include_non_existing):
267
+ assert is_sequence(paths), repr(type(paths))
268
+ new_paths = []
269
+ assert not is_string(paths), repr(paths)
270
+ for n in paths:
271
+ if is_string(n):
272
+ if '*' in n or '?' in n:
273
+ p = sorted_glob(n)
274
+ p2 = sorted_glob(njoin(local_path, n))
275
+ if p2:
276
+ new_paths.extend(p2)
277
+ elif p:
278
+ new_paths.extend(p)
279
+ else:
280
+ if include_non_existing:
281
+ new_paths.append(n)
282
+ print('could not resolve pattern in %r: %r' %
283
+ (local_path, n))
284
+ else:
285
+ n2 = njoin(local_path, n)
286
+ if os.path.exists(n2):
287
+ new_paths.append(n2)
288
+ else:
289
+ if os.path.exists(n):
290
+ new_paths.append(n)
291
+ elif include_non_existing:
292
+ new_paths.append(n)
293
+ if not os.path.exists(n):
294
+ print('non-existing path in %r: %r' %
295
+ (local_path, n))
296
+
297
+ elif is_sequence(n):
298
+ new_paths.extend(_fix_paths(n, local_path, include_non_existing))
299
+ else:
300
+ new_paths.append(n)
301
+ return [minrelpath(p) for p in new_paths]
302
+
303
+ def gpaths(paths, local_path='', include_non_existing=True):
304
+ """Apply glob to paths and prepend local_path if needed.
305
+ """
306
+ if is_string(paths):
307
+ paths = (paths,)
308
+ return _fix_paths(paths, local_path, include_non_existing)
309
+
310
+ def make_temp_file(suffix='', prefix='', text=True):
311
+ if not hasattr(_tdata, 'tempdir'):
312
+ _tdata.tempdir = tempfile.mkdtemp()
313
+ _tmpdirs.append(_tdata.tempdir)
314
+ fid, name = tempfile.mkstemp(suffix=suffix,
315
+ prefix=prefix,
316
+ dir=_tdata.tempdir,
317
+ text=text)
318
+ fo = os.fdopen(fid, 'w')
319
+ return fo, name
320
+
321
+ # Hooks for colored terminal output.
322
+ # See also https://web.archive.org/web/20100314204946/http://www.livinglogic.de/Python/ansistyle
323
+ def terminal_has_colors():
324
+ if sys.platform=='cygwin' and 'USE_COLOR' not in os.environ:
325
+ # Avoid importing curses that causes illegal operation
326
+ # with a message:
327
+ # PYTHON2 caused an invalid page fault in
328
+ # module CYGNURSES7.DLL as 015f:18bbfc28
329
+ # Details: Python 2.3.3 [GCC 3.3.1 (cygming special)]
330
+ # ssh to Win32 machine from debian
331
+ # curses.version is 2.2
332
+ # CYGWIN_98-4.10, release 1.5.7(0.109/3/2))
333
+ return 0
334
+ if hasattr(sys.stdout, 'isatty') and sys.stdout.isatty():
335
+ try:
336
+ import curses
337
+ curses.setupterm()
338
+ if (curses.tigetnum("colors") >= 0
339
+ and curses.tigetnum("pairs") >= 0
340
+ and ((curses.tigetstr("setf") is not None
341
+ and curses.tigetstr("setb") is not None)
342
+ or (curses.tigetstr("setaf") is not None
343
+ and curses.tigetstr("setab") is not None)
344
+ or curses.tigetstr("scp") is not None)):
345
+ return 1
346
+ except Exception:
347
+ pass
348
+ return 0
349
+
350
+ if terminal_has_colors():
351
+ _colour_codes = dict(black=0, red=1, green=2, yellow=3,
352
+ blue=4, magenta=5, cyan=6, white=7, default=9)
353
+ def colour_text(s, fg=None, bg=None, bold=False):
354
+ seq = []
355
+ if bold:
356
+ seq.append('1')
357
+ if fg:
358
+ fgcode = 30 + _colour_codes.get(fg.lower(), 0)
359
+ seq.append(str(fgcode))
360
+ if bg:
361
+ bgcode = 40 + _colour_codes.get(bg.lower(), 7)
362
+ seq.append(str(bgcode))
363
+ if seq:
364
+ return '\x1b[%sm%s\x1b[0m' % (';'.join(seq), s)
365
+ else:
366
+ return s
367
+ else:
368
+ def colour_text(s, fg=None, bg=None):
369
+ return s
370
+
371
+ def default_text(s):
372
+ return colour_text(s, 'default')
373
+ def red_text(s):
374
+ return colour_text(s, 'red')
375
+ def green_text(s):
376
+ return colour_text(s, 'green')
377
+ def yellow_text(s):
378
+ return colour_text(s, 'yellow')
379
+ def cyan_text(s):
380
+ return colour_text(s, 'cyan')
381
+ def blue_text(s):
382
+ return colour_text(s, 'blue')
383
+
384
+ #########################
385
+
386
+ def cyg2win32(path: str) -> str:
387
+ """Convert a path from Cygwin-native to Windows-native.
388
+
389
+ Uses the cygpath utility (part of the Base install) to do the
390
+ actual conversion. Falls back to returning the original path if
391
+ this fails.
392
+
393
+ Handles the default ``/cygdrive`` mount prefix as well as the
394
+ ``/proc/cygdrive`` portable prefix, custom cygdrive prefixes such
395
+ as ``/`` or ``/mnt``, and absolute paths such as ``/usr/src/`` or
396
+ ``/home/username``
397
+
398
+ Parameters
399
+ ----------
400
+ path : str
401
+ The path to convert
402
+
403
+ Returns
404
+ -------
405
+ converted_path : str
406
+ The converted path
407
+
408
+ Notes
409
+ -----
410
+ Documentation for cygpath utility:
411
+ https://cygwin.com/cygwin-ug-net/cygpath.html
412
+ Documentation for the C function it wraps:
413
+ https://cygwin.com/cygwin-api/func-cygwin-conv-path.html
414
+
415
+ """
416
+ if sys.platform != "cygwin":
417
+ return path
418
+ return subprocess.check_output(
419
+ ["/usr/bin/cygpath", "--windows", path], text=True
420
+ )
421
+
422
+
423
+ def mingw32():
424
+ """Return true when using mingw32 environment.
425
+ """
426
+ if sys.platform=='win32':
427
+ if os.environ.get('OSTYPE', '')=='msys':
428
+ return True
429
+ if os.environ.get('MSYSTEM', '')=='MINGW32':
430
+ return True
431
+ return False
432
+
433
+ def msvc_runtime_version():
434
+ "Return version of MSVC runtime library, as defined by __MSC_VER__ macro"
435
+ msc_pos = sys.version.find('MSC v.')
436
+ if msc_pos != -1:
437
+ msc_ver = int(sys.version[msc_pos+6:msc_pos+10])
438
+ else:
439
+ msc_ver = None
440
+ return msc_ver
441
+
442
+ def msvc_runtime_library():
443
+ "Return name of MSVC runtime library if Python was built with MSVC >= 7"
444
+ ver = msvc_runtime_major ()
445
+ if ver:
446
+ if ver < 140:
447
+ return "msvcr%i" % ver
448
+ else:
449
+ return "vcruntime%i" % ver
450
+ else:
451
+ return None
452
+
453
+ def msvc_runtime_major():
454
+ "Return major version of MSVC runtime coded like get_build_msvc_version"
455
+ major = {1300: 70, # MSVC 7.0
456
+ 1310: 71, # MSVC 7.1
457
+ 1400: 80, # MSVC 8
458
+ 1500: 90, # MSVC 9 (aka 2008)
459
+ 1600: 100, # MSVC 10 (aka 2010)
460
+ 1900: 140, # MSVC 14 (aka 2015)
461
+ }.get(msvc_runtime_version(), None)
462
+ return major
463
+
464
+ #########################
465
+
466
+ #XXX need support for .C that is also C++
467
+ cxx_ext_match = re.compile(r'.*\.(cpp|cxx|cc)\Z', re.I).match
468
+ fortran_ext_match = re.compile(r'.*\.(f90|f95|f77|for|ftn|f)\Z', re.I).match
469
+ f90_ext_match = re.compile(r'.*\.(f90|f95)\Z', re.I).match
470
+ f90_module_name_match = re.compile(r'\s*module\s*(?P<name>[\w_]+)', re.I).match
471
+ def _get_f90_modules(source):
472
+ """Return a list of Fortran f90 module names that
473
+ given source file defines.
474
+ """
475
+ if not f90_ext_match(source):
476
+ return []
477
+ modules = []
478
+ with open(source) as f:
479
+ for line in f:
480
+ m = f90_module_name_match(line)
481
+ if m:
482
+ name = m.group('name')
483
+ modules.append(name)
484
+ # break # XXX can we assume that there is one module per file?
485
+ return modules
486
+
487
+ def is_string(s):
488
+ return isinstance(s, str)
489
+
490
+ def all_strings(lst):
491
+ """Return True if all items in lst are string objects. """
492
+ for item in lst:
493
+ if not is_string(item):
494
+ return False
495
+ return True
496
+
497
+ def is_sequence(seq):
498
+ if is_string(seq):
499
+ return False
500
+ try:
501
+ len(seq)
502
+ except Exception:
503
+ return False
504
+ return True
505
+
506
+ def is_glob_pattern(s):
507
+ return is_string(s) and ('*' in s or '?' in s)
508
+
509
+ def as_list(seq):
510
+ if is_sequence(seq):
511
+ return list(seq)
512
+ else:
513
+ return [seq]
514
+
515
+ def get_language(sources):
516
+ # not used in numpy/scipy packages, use build_ext.detect_language instead
517
+ """Determine language value (c,f77,f90) from sources """
518
+ language = None
519
+ for source in sources:
520
+ if isinstance(source, str):
521
+ if f90_ext_match(source):
522
+ language = 'f90'
523
+ break
524
+ elif fortran_ext_match(source):
525
+ language = 'f77'
526
+ return language
527
+
528
+ def has_f_sources(sources):
529
+ """Return True if sources contains Fortran files """
530
+ for source in sources:
531
+ if fortran_ext_match(source):
532
+ return True
533
+ return False
534
+
535
+ def has_cxx_sources(sources):
536
+ """Return True if sources contains C++ files """
537
+ for source in sources:
538
+ if cxx_ext_match(source):
539
+ return True
540
+ return False
541
+
542
+ def filter_sources(sources):
543
+ """Return four lists of filenames containing
544
+ C, C++, Fortran, and Fortran 90 module sources,
545
+ respectively.
546
+ """
547
+ c_sources = []
548
+ cxx_sources = []
549
+ f_sources = []
550
+ fmodule_sources = []
551
+ for source in sources:
552
+ if fortran_ext_match(source):
553
+ modules = _get_f90_modules(source)
554
+ if modules:
555
+ fmodule_sources.append(source)
556
+ else:
557
+ f_sources.append(source)
558
+ elif cxx_ext_match(source):
559
+ cxx_sources.append(source)
560
+ else:
561
+ c_sources.append(source)
562
+ return c_sources, cxx_sources, f_sources, fmodule_sources
563
+
564
+
565
+ def _get_headers(directory_list):
566
+ # get *.h files from list of directories
567
+ headers = []
568
+ for d in directory_list:
569
+ head = sorted_glob(os.path.join(d, "*.h")) #XXX: *.hpp files??
570
+ headers.extend(head)
571
+ return headers
572
+
573
+ def _get_directories(list_of_sources):
574
+ # get unique directories from list of sources.
575
+ direcs = []
576
+ for f in list_of_sources:
577
+ d = os.path.split(f)
578
+ if d[0] != '' and not d[0] in direcs:
579
+ direcs.append(d[0])
580
+ return direcs
581
+
582
+ def _commandline_dep_string(cc_args, extra_postargs, pp_opts):
583
+ """
584
+ Return commandline representation used to determine if a file needs
585
+ to be recompiled
586
+ """
587
+ cmdline = 'commandline: '
588
+ cmdline += ' '.join(cc_args)
589
+ cmdline += ' '.join(extra_postargs)
590
+ cmdline += ' '.join(pp_opts) + '\n'
591
+ return cmdline
592
+
593
+
594
+ def get_dependencies(sources):
595
+ #XXX scan sources for include statements
596
+ return _get_headers(_get_directories(sources))
597
+
598
+ def is_local_src_dir(directory):
599
+ """Return true if directory is local directory.
600
+ """
601
+ if not is_string(directory):
602
+ return False
603
+ abs_dir = os.path.abspath(directory)
604
+ c = os.path.commonprefix([os.getcwd(), abs_dir])
605
+ new_dir = abs_dir[len(c):].split(os.sep)
606
+ if new_dir and not new_dir[0]:
607
+ new_dir = new_dir[1:]
608
+ if new_dir and new_dir[0]=='build':
609
+ return False
610
+ new_dir = os.sep.join(new_dir)
611
+ return os.path.isdir(new_dir)
612
+
613
+ def general_source_files(top_path):
614
+ pruned_directories = {'CVS':1, '.svn':1, 'build':1}
615
+ prune_file_pat = re.compile(r'(?:[~#]|\.py[co]|\.o)$')
616
+ for dirpath, dirnames, filenames in os.walk(top_path, topdown=True):
617
+ pruned = [ d for d in dirnames if d not in pruned_directories ]
618
+ dirnames[:] = pruned
619
+ for f in filenames:
620
+ if not prune_file_pat.search(f):
621
+ yield os.path.join(dirpath, f)
622
+
623
+ def general_source_directories_files(top_path):
624
+ """Return a directory name relative to top_path and
625
+ files contained.
626
+ """
627
+ pruned_directories = ['CVS', '.svn', 'build']
628
+ prune_file_pat = re.compile(r'(?:[~#]|\.py[co]|\.o)$')
629
+ for dirpath, dirnames, filenames in os.walk(top_path, topdown=True):
630
+ pruned = [ d for d in dirnames if d not in pruned_directories ]
631
+ dirnames[:] = pruned
632
+ for d in dirnames:
633
+ dpath = os.path.join(dirpath, d)
634
+ rpath = rel_path(dpath, top_path)
635
+ files = []
636
+ for f in os.listdir(dpath):
637
+ fn = os.path.join(dpath, f)
638
+ if os.path.isfile(fn) and not prune_file_pat.search(fn):
639
+ files.append(fn)
640
+ yield rpath, files
641
+ dpath = top_path
642
+ rpath = rel_path(dpath, top_path)
643
+ filenames = [os.path.join(dpath, f) for f in os.listdir(dpath) \
644
+ if not prune_file_pat.search(f)]
645
+ files = [f for f in filenames if os.path.isfile(f)]
646
+ yield rpath, files
647
+
648
+
649
+ def get_ext_source_files(ext):
650
+ # Get sources and any include files in the same directory.
651
+ filenames = []
652
+ sources = [_m for _m in ext.sources if is_string(_m)]
653
+ filenames.extend(sources)
654
+ filenames.extend(get_dependencies(sources))
655
+ for d in ext.depends:
656
+ if is_local_src_dir(d):
657
+ filenames.extend(list(general_source_files(d)))
658
+ elif os.path.isfile(d):
659
+ filenames.append(d)
660
+ return filenames
661
+
662
+ def get_script_files(scripts):
663
+ scripts = [_m for _m in scripts if is_string(_m)]
664
+ return scripts
665
+
666
+ def get_lib_source_files(lib):
667
+ filenames = []
668
+ sources = lib[1].get('sources', [])
669
+ sources = [_m for _m in sources if is_string(_m)]
670
+ filenames.extend(sources)
671
+ filenames.extend(get_dependencies(sources))
672
+ depends = lib[1].get('depends', [])
673
+ for d in depends:
674
+ if is_local_src_dir(d):
675
+ filenames.extend(list(general_source_files(d)))
676
+ elif os.path.isfile(d):
677
+ filenames.append(d)
678
+ return filenames
679
+
680
+ def get_shared_lib_extension(is_python_ext=False):
681
+ """Return the correct file extension for shared libraries.
682
+
683
+ Parameters
684
+ ----------
685
+ is_python_ext : bool, optional
686
+ Whether the shared library is a Python extension. Default is False.
687
+
688
+ Returns
689
+ -------
690
+ so_ext : str
691
+ The shared library extension.
692
+
693
+ Notes
694
+ -----
695
+ For Python shared libs, `so_ext` will typically be '.so' on Linux and OS X,
696
+ and '.pyd' on Windows. For Python >= 3.2 `so_ext` has a tag prepended on
697
+ POSIX systems according to PEP 3149.
698
+
699
+ """
700
+ confvars = distutils.sysconfig.get_config_vars()
701
+ so_ext = confvars.get('EXT_SUFFIX', '')
702
+
703
+ if not is_python_ext:
704
+ # hardcode known values, config vars (including SHLIB_SUFFIX) are
705
+ # unreliable (see #3182)
706
+ # darwin, windows and debug linux are wrong in 3.3.1 and older
707
+ if (sys.platform.startswith('linux') or
708
+ sys.platform.startswith('gnukfreebsd')):
709
+ so_ext = '.so'
710
+ elif sys.platform.startswith('darwin'):
711
+ so_ext = '.dylib'
712
+ elif sys.platform.startswith('win'):
713
+ so_ext = '.dll'
714
+ else:
715
+ # fall back to config vars for unknown platforms
716
+ # fix long extension for Python >=3.2, see PEP 3149.
717
+ if 'SOABI' in confvars:
718
+ # Does nothing unless SOABI config var exists
719
+ so_ext = so_ext.replace('.' + confvars.get('SOABI'), '', 1)
720
+
721
+ return so_ext
722
+
723
+ def get_data_files(data):
724
+ if is_string(data):
725
+ return [data]
726
+ sources = data[1]
727
+ filenames = []
728
+ for s in sources:
729
+ if hasattr(s, '__call__'):
730
+ continue
731
+ if is_local_src_dir(s):
732
+ filenames.extend(list(general_source_files(s)))
733
+ elif is_string(s):
734
+ if os.path.isfile(s):
735
+ filenames.append(s)
736
+ else:
737
+ print('Not existing data file:', s)
738
+ else:
739
+ raise TypeError(repr(s))
740
+ return filenames
741
+
742
+ def dot_join(*args):
743
+ return '.'.join([a for a in args if a])
744
+
745
+ def get_frame(level=0):
746
+ """Return frame object from call stack with given level.
747
+ """
748
+ try:
749
+ return sys._getframe(level+1)
750
+ except AttributeError:
751
+ frame = sys.exc_info()[2].tb_frame
752
+ for _ in range(level+1):
753
+ frame = frame.f_back
754
+ return frame
755
+
756
+
757
+ ######################
758
+
759
+ class Configuration:
760
+
761
+ _list_keys = ['packages', 'ext_modules', 'data_files', 'include_dirs',
762
+ 'libraries', 'headers', 'scripts', 'py_modules',
763
+ 'installed_libraries', 'define_macros']
764
+ _dict_keys = ['package_dir', 'installed_pkg_config']
765
+ _extra_keys = ['name', 'version']
766
+
767
+ numpy_include_dirs = []
768
+
769
+ def __init__(self,
770
+ package_name=None,
771
+ parent_name=None,
772
+ top_path=None,
773
+ package_path=None,
774
+ caller_level=1,
775
+ setup_name='setup.py',
776
+ **attrs):
777
+ """Construct configuration instance of a package.
778
+
779
+ package_name -- name of the package
780
+ Ex.: 'distutils'
781
+ parent_name -- name of the parent package
782
+ Ex.: 'numpy'
783
+ top_path -- directory of the toplevel package
784
+ Ex.: the directory where the numpy package source sits
785
+ package_path -- directory of package. Will be computed by magic from the
786
+ directory of the caller module if not specified
787
+ Ex.: the directory where numpy.distutils is
788
+ caller_level -- frame level to caller namespace, internal parameter.
789
+ """
790
+ self.name = dot_join(parent_name, package_name)
791
+ self.version = None
792
+
793
+ caller_frame = get_frame(caller_level)
794
+ self.local_path = get_path_from_frame(caller_frame, top_path)
795
+ # local_path -- directory of a file (usually setup.py) that
796
+ # defines a configuration() function.
797
+ # local_path -- directory of a file (usually setup.py) that
798
+ # defines a configuration() function.
799
+ if top_path is None:
800
+ top_path = self.local_path
801
+ self.local_path = ''
802
+ if package_path is None:
803
+ package_path = self.local_path
804
+ elif os.path.isdir(njoin(self.local_path, package_path)):
805
+ package_path = njoin(self.local_path, package_path)
806
+ if not os.path.isdir(package_path or '.'):
807
+ raise ValueError("%r is not a directory" % (package_path,))
808
+ self.top_path = top_path
809
+ self.package_path = package_path
810
+ # this is the relative path in the installed package
811
+ self.path_in_package = os.path.join(*self.name.split('.'))
812
+
813
+ self.list_keys = self._list_keys[:]
814
+ self.dict_keys = self._dict_keys[:]
815
+
816
+ for n in self.list_keys:
817
+ v = copy.copy(attrs.get(n, []))
818
+ setattr(self, n, as_list(v))
819
+
820
+ for n in self.dict_keys:
821
+ v = copy.copy(attrs.get(n, {}))
822
+ setattr(self, n, v)
823
+
824
+ known_keys = self.list_keys + self.dict_keys
825
+ self.extra_keys = self._extra_keys[:]
826
+ for n in attrs.keys():
827
+ if n in known_keys:
828
+ continue
829
+ a = attrs[n]
830
+ setattr(self, n, a)
831
+ if isinstance(a, list):
832
+ self.list_keys.append(n)
833
+ elif isinstance(a, dict):
834
+ self.dict_keys.append(n)
835
+ else:
836
+ self.extra_keys.append(n)
837
+
838
+ if os.path.exists(njoin(package_path, '__init__.py')):
839
+ self.packages.append(self.name)
840
+ self.package_dir[self.name] = package_path
841
+
842
+ self.options = dict(
843
+ ignore_setup_xxx_py = False,
844
+ assume_default_configuration = False,
845
+ delegate_options_to_subpackages = False,
846
+ quiet = False,
847
+ )
848
+
849
+ caller_instance = None
850
+ for i in range(1, 3):
851
+ try:
852
+ f = get_frame(i)
853
+ except ValueError:
854
+ break
855
+ try:
856
+ caller_instance = eval('self', f.f_globals, f.f_locals)
857
+ break
858
+ except NameError:
859
+ pass
860
+ if isinstance(caller_instance, self.__class__):
861
+ if caller_instance.options['delegate_options_to_subpackages']:
862
+ self.set_options(**caller_instance.options)
863
+
864
+ self.setup_name = setup_name
865
+
866
+ def todict(self):
867
+ """
868
+ Return a dictionary compatible with the keyword arguments of distutils
869
+ setup function.
870
+
871
+ Examples
872
+ --------
873
+ >>> setup(**config.todict()) #doctest: +SKIP
874
+ """
875
+
876
+ self._optimize_data_files()
877
+ d = {}
878
+ known_keys = self.list_keys + self.dict_keys + self.extra_keys
879
+ for n in known_keys:
880
+ a = getattr(self, n)
881
+ if a:
882
+ d[n] = a
883
+ return d
884
+
885
+ def info(self, message):
886
+ if not self.options['quiet']:
887
+ print(message)
888
+
889
+ def warn(self, message):
890
+ sys.stderr.write('Warning: %s\n' % (message,))
891
+
892
+ def set_options(self, **options):
893
+ """
894
+ Configure Configuration instance.
895
+
896
+ The following options are available:
897
+ - ignore_setup_xxx_py
898
+ - assume_default_configuration
899
+ - delegate_options_to_subpackages
900
+ - quiet
901
+
902
+ """
903
+ for key, value in options.items():
904
+ if key in self.options:
905
+ self.options[key] = value
906
+ else:
907
+ raise ValueError('Unknown option: '+key)
908
+
909
+ def get_distribution(self):
910
+ """Return the distutils distribution object for self."""
911
+ from numpy.distutils.core import get_distribution
912
+ return get_distribution()
913
+
914
+ def _wildcard_get_subpackage(self, subpackage_name,
915
+ parent_name,
916
+ caller_level = 1):
917
+ l = subpackage_name.split('.')
918
+ subpackage_path = njoin([self.local_path]+l)
919
+ dirs = [_m for _m in sorted_glob(subpackage_path) if os.path.isdir(_m)]
920
+ config_list = []
921
+ for d in dirs:
922
+ if not os.path.isfile(njoin(d, '__init__.py')):
923
+ continue
924
+ if 'build' in d.split(os.sep):
925
+ continue
926
+ n = '.'.join(d.split(os.sep)[-len(l):])
927
+ c = self.get_subpackage(n,
928
+ parent_name = parent_name,
929
+ caller_level = caller_level+1)
930
+ config_list.extend(c)
931
+ return config_list
932
+
933
+ def _get_configuration_from_setup_py(self, setup_py,
934
+ subpackage_name,
935
+ subpackage_path,
936
+ parent_name,
937
+ caller_level = 1):
938
+ # In case setup_py imports local modules:
939
+ sys.path.insert(0, os.path.dirname(setup_py))
940
+ try:
941
+ setup_name = os.path.splitext(os.path.basename(setup_py))[0]
942
+ n = dot_join(self.name, subpackage_name, setup_name)
943
+ setup_module = exec_mod_from_location(
944
+ '_'.join(n.split('.')), setup_py)
945
+ if not hasattr(setup_module, 'configuration'):
946
+ if not self.options['assume_default_configuration']:
947
+ self.warn('Assuming default configuration '\
948
+ '(%s does not define configuration())'\
949
+ % (setup_module))
950
+ config = Configuration(subpackage_name, parent_name,
951
+ self.top_path, subpackage_path,
952
+ caller_level = caller_level + 1)
953
+ else:
954
+ pn = dot_join(*([parent_name] + subpackage_name.split('.')[:-1]))
955
+ args = (pn,)
956
+ if setup_module.configuration.__code__.co_argcount > 1:
957
+ args = args + (self.top_path,)
958
+ config = setup_module.configuration(*args)
959
+ if config.name!=dot_join(parent_name, subpackage_name):
960
+ self.warn('Subpackage %r configuration returned as %r' % \
961
+ (dot_join(parent_name, subpackage_name), config.name))
962
+ finally:
963
+ del sys.path[0]
964
+ return config
965
+
966
+ def get_subpackage(self,subpackage_name,
967
+ subpackage_path=None,
968
+ parent_name=None,
969
+ caller_level = 1):
970
+ """Return list of subpackage configurations.
971
+
972
+ Parameters
973
+ ----------
974
+ subpackage_name : str or None
975
+ Name of the subpackage to get the configuration. '*' in
976
+ subpackage_name is handled as a wildcard.
977
+ subpackage_path : str
978
+ If None, then the path is assumed to be the local path plus the
979
+ subpackage_name. If a setup.py file is not found in the
980
+ subpackage_path, then a default configuration is used.
981
+ parent_name : str
982
+ Parent name.
983
+ """
984
+ if subpackage_name is None:
985
+ if subpackage_path is None:
986
+ raise ValueError(
987
+ "either subpackage_name or subpackage_path must be specified")
988
+ subpackage_name = os.path.basename(subpackage_path)
989
+
990
+ # handle wildcards
991
+ l = subpackage_name.split('.')
992
+ if subpackage_path is None and '*' in subpackage_name:
993
+ return self._wildcard_get_subpackage(subpackage_name,
994
+ parent_name,
995
+ caller_level = caller_level+1)
996
+ assert '*' not in subpackage_name, repr((subpackage_name, subpackage_path, parent_name))
997
+ if subpackage_path is None:
998
+ subpackage_path = njoin([self.local_path] + l)
999
+ else:
1000
+ subpackage_path = njoin([subpackage_path] + l[:-1])
1001
+ subpackage_path = self.paths([subpackage_path])[0]
1002
+ setup_py = njoin(subpackage_path, self.setup_name)
1003
+ if not self.options['ignore_setup_xxx_py']:
1004
+ if not os.path.isfile(setup_py):
1005
+ setup_py = njoin(subpackage_path,
1006
+ 'setup_%s.py' % (subpackage_name))
1007
+ if not os.path.isfile(setup_py):
1008
+ if not self.options['assume_default_configuration']:
1009
+ self.warn('Assuming default configuration '\
1010
+ '(%s/{setup_%s,setup}.py was not found)' \
1011
+ % (os.path.dirname(setup_py), subpackage_name))
1012
+ config = Configuration(subpackage_name, parent_name,
1013
+ self.top_path, subpackage_path,
1014
+ caller_level = caller_level+1)
1015
+ else:
1016
+ config = self._get_configuration_from_setup_py(
1017
+ setup_py,
1018
+ subpackage_name,
1019
+ subpackage_path,
1020
+ parent_name,
1021
+ caller_level = caller_level + 1)
1022
+ if config:
1023
+ return [config]
1024
+ else:
1025
+ return []
1026
+
1027
+ def add_subpackage(self,subpackage_name,
1028
+ subpackage_path=None,
1029
+ standalone = False):
1030
+ """Add a sub-package to the current Configuration instance.
1031
+
1032
+ This is useful in a setup.py script for adding sub-packages to a
1033
+ package.
1034
+
1035
+ Parameters
1036
+ ----------
1037
+ subpackage_name : str
1038
+ name of the subpackage
1039
+ subpackage_path : str
1040
+ if given, the subpackage path such as the subpackage is in
1041
+ subpackage_path / subpackage_name. If None,the subpackage is
1042
+ assumed to be located in the local path / subpackage_name.
1043
+ standalone : bool
1044
+ """
1045
+
1046
+ if standalone:
1047
+ parent_name = None
1048
+ else:
1049
+ parent_name = self.name
1050
+ config_list = self.get_subpackage(subpackage_name, subpackage_path,
1051
+ parent_name = parent_name,
1052
+ caller_level = 2)
1053
+ if not config_list:
1054
+ self.warn('No configuration returned, assuming unavailable.')
1055
+ for config in config_list:
1056
+ d = config
1057
+ if isinstance(config, Configuration):
1058
+ d = config.todict()
1059
+ assert isinstance(d, dict), repr(type(d))
1060
+
1061
+ self.info('Appending %s configuration to %s' \
1062
+ % (d.get('name'), self.name))
1063
+ self.dict_append(**d)
1064
+
1065
+ dist = self.get_distribution()
1066
+ if dist is not None:
1067
+ self.warn('distutils distribution has been initialized,'\
1068
+ ' it may be too late to add a subpackage '+ subpackage_name)
1069
+
1070
+ def add_data_dir(self, data_path):
1071
+ """Recursively add files under data_path to data_files list.
1072
+
1073
+ Recursively add files under data_path to the list of data_files to be
1074
+ installed (and distributed). The data_path can be either a relative
1075
+ path-name, or an absolute path-name, or a 2-tuple where the first
1076
+ argument shows where in the install directory the data directory
1077
+ should be installed to.
1078
+
1079
+ Parameters
1080
+ ----------
1081
+ data_path : seq or str
1082
+ Argument can be either
1083
+
1084
+ * 2-sequence (<datadir suffix>, <path to data directory>)
1085
+ * path to data directory where python datadir suffix defaults
1086
+ to package dir.
1087
+
1088
+ Notes
1089
+ -----
1090
+ Rules for installation paths::
1091
+
1092
+ foo/bar -> (foo/bar, foo/bar) -> parent/foo/bar
1093
+ (gun, foo/bar) -> parent/gun
1094
+ foo/* -> (foo/a, foo/a), (foo/b, foo/b) -> parent/foo/a, parent/foo/b
1095
+ (gun, foo/*) -> (gun, foo/a), (gun, foo/b) -> gun
1096
+ (gun/*, foo/*) -> parent/gun/a, parent/gun/b
1097
+ /foo/bar -> (bar, /foo/bar) -> parent/bar
1098
+ (gun, /foo/bar) -> parent/gun
1099
+ (fun/*/gun/*, sun/foo/bar) -> parent/fun/foo/gun/bar
1100
+
1101
+ Examples
1102
+ --------
1103
+ For example suppose the source directory contains fun/foo.dat and
1104
+ fun/bar/car.dat:
1105
+
1106
+ >>> self.add_data_dir('fun') #doctest: +SKIP
1107
+ >>> self.add_data_dir(('sun', 'fun')) #doctest: +SKIP
1108
+ >>> self.add_data_dir(('gun', '/full/path/to/fun'))#doctest: +SKIP
1109
+
1110
+ Will install data-files to the locations::
1111
+
1112
+ <package install directory>/
1113
+ fun/
1114
+ foo.dat
1115
+ bar/
1116
+ car.dat
1117
+ sun/
1118
+ foo.dat
1119
+ bar/
1120
+ car.dat
1121
+ gun/
1122
+ foo.dat
1123
+ car.dat
1124
+
1125
+ """
1126
+ if is_sequence(data_path):
1127
+ d, data_path = data_path
1128
+ else:
1129
+ d = None
1130
+ if is_sequence(data_path):
1131
+ [self.add_data_dir((d, p)) for p in data_path]
1132
+ return
1133
+ if not is_string(data_path):
1134
+ raise TypeError("not a string: %r" % (data_path,))
1135
+ if d is None:
1136
+ if os.path.isabs(data_path):
1137
+ return self.add_data_dir((os.path.basename(data_path), data_path))
1138
+ return self.add_data_dir((data_path, data_path))
1139
+ paths = self.paths(data_path, include_non_existing=False)
1140
+ if is_glob_pattern(data_path):
1141
+ if is_glob_pattern(d):
1142
+ pattern_list = allpath(d).split(os.sep)
1143
+ pattern_list.reverse()
1144
+ # /a/*//b/ -> /a/*/b
1145
+ rl = list(range(len(pattern_list)-1)); rl.reverse()
1146
+ for i in rl:
1147
+ if not pattern_list[i]:
1148
+ del pattern_list[i]
1149
+ #
1150
+ for path in paths:
1151
+ if not os.path.isdir(path):
1152
+ print('Not a directory, skipping', path)
1153
+ continue
1154
+ rpath = rel_path(path, self.local_path)
1155
+ path_list = rpath.split(os.sep)
1156
+ path_list.reverse()
1157
+ target_list = []
1158
+ i = 0
1159
+ for s in pattern_list:
1160
+ if is_glob_pattern(s):
1161
+ if i>=len(path_list):
1162
+ raise ValueError('cannot fill pattern %r with %r' \
1163
+ % (d, path))
1164
+ target_list.append(path_list[i])
1165
+ else:
1166
+ assert s==path_list[i], repr((s, path_list[i], data_path, d, path, rpath))
1167
+ target_list.append(s)
1168
+ i += 1
1169
+ if path_list[i:]:
1170
+ self.warn('mismatch of pattern_list=%s and path_list=%s'\
1171
+ % (pattern_list, path_list))
1172
+ target_list.reverse()
1173
+ self.add_data_dir((os.sep.join(target_list), path))
1174
+ else:
1175
+ for path in paths:
1176
+ self.add_data_dir((d, path))
1177
+ return
1178
+ assert not is_glob_pattern(d), repr(d)
1179
+
1180
+ dist = self.get_distribution()
1181
+ if dist is not None and dist.data_files is not None:
1182
+ data_files = dist.data_files
1183
+ else:
1184
+ data_files = self.data_files
1185
+
1186
+ for path in paths:
1187
+ for d1, f in list(general_source_directories_files(path)):
1188
+ target_path = os.path.join(self.path_in_package, d, d1)
1189
+ data_files.append((target_path, f))
1190
+
1191
+ def _optimize_data_files(self):
1192
+ data_dict = {}
1193
+ for p, files in self.data_files:
1194
+ if p not in data_dict:
1195
+ data_dict[p] = set()
1196
+ for f in files:
1197
+ data_dict[p].add(f)
1198
+ self.data_files[:] = [(p, list(files)) for p, files in data_dict.items()]
1199
+
1200
+ def add_data_files(self,*files):
1201
+ """Add data files to configuration data_files.
1202
+
1203
+ Parameters
1204
+ ----------
1205
+ files : sequence
1206
+ Argument(s) can be either
1207
+
1208
+ * 2-sequence (<datadir prefix>,<path to data file(s)>)
1209
+ * paths to data files where python datadir prefix defaults
1210
+ to package dir.
1211
+
1212
+ Notes
1213
+ -----
1214
+ The form of each element of the files sequence is very flexible
1215
+ allowing many combinations of where to get the files from the package
1216
+ and where they should ultimately be installed on the system. The most
1217
+ basic usage is for an element of the files argument sequence to be a
1218
+ simple filename. This will cause that file from the local path to be
1219
+ installed to the installation path of the self.name package (package
1220
+ path). The file argument can also be a relative path in which case the
1221
+ entire relative path will be installed into the package directory.
1222
+ Finally, the file can be an absolute path name in which case the file
1223
+ will be found at the absolute path name but installed to the package
1224
+ path.
1225
+
1226
+ This basic behavior can be augmented by passing a 2-tuple in as the
1227
+ file argument. The first element of the tuple should specify the
1228
+ relative path (under the package install directory) where the
1229
+ remaining sequence of files should be installed to (it has nothing to
1230
+ do with the file-names in the source distribution). The second element
1231
+ of the tuple is the sequence of files that should be installed. The
1232
+ files in this sequence can be filenames, relative paths, or absolute
1233
+ paths. For absolute paths the file will be installed in the top-level
1234
+ package installation directory (regardless of the first argument).
1235
+ Filenames and relative path names will be installed in the package
1236
+ install directory under the path name given as the first element of
1237
+ the tuple.
1238
+
1239
+ Rules for installation paths:
1240
+
1241
+ #. file.txt -> (., file.txt)-> parent/file.txt
1242
+ #. foo/file.txt -> (foo, foo/file.txt) -> parent/foo/file.txt
1243
+ #. /foo/bar/file.txt -> (., /foo/bar/file.txt) -> parent/file.txt
1244
+ #. ``*``.txt -> parent/a.txt, parent/b.txt
1245
+ #. foo/``*``.txt`` -> parent/foo/a.txt, parent/foo/b.txt
1246
+ #. ``*/*.txt`` -> (``*``, ``*``/``*``.txt) -> parent/c/a.txt, parent/d/b.txt
1247
+ #. (sun, file.txt) -> parent/sun/file.txt
1248
+ #. (sun, bar/file.txt) -> parent/sun/file.txt
1249
+ #. (sun, /foo/bar/file.txt) -> parent/sun/file.txt
1250
+ #. (sun, ``*``.txt) -> parent/sun/a.txt, parent/sun/b.txt
1251
+ #. (sun, bar/``*``.txt) -> parent/sun/a.txt, parent/sun/b.txt
1252
+ #. (sun/``*``, ``*``/``*``.txt) -> parent/sun/c/a.txt, parent/d/b.txt
1253
+
1254
+ An additional feature is that the path to a data-file can actually be
1255
+ a function that takes no arguments and returns the actual path(s) to
1256
+ the data-files. This is useful when the data files are generated while
1257
+ building the package.
1258
+
1259
+ Examples
1260
+ --------
1261
+ Add files to the list of data_files to be included with the package.
1262
+
1263
+ >>> self.add_data_files('foo.dat',
1264
+ ... ('fun', ['gun.dat', 'nun/pun.dat', '/tmp/sun.dat']),
1265
+ ... 'bar/cat.dat',
1266
+ ... '/full/path/to/can.dat') #doctest: +SKIP
1267
+
1268
+ will install these data files to::
1269
+
1270
+ <package install directory>/
1271
+ foo.dat
1272
+ fun/
1273
+ gun.dat
1274
+ nun/
1275
+ pun.dat
1276
+ sun.dat
1277
+ bar/
1278
+ car.dat
1279
+ can.dat
1280
+
1281
+ where <package install directory> is the package (or sub-package)
1282
+ directory such as '/usr/lib/python2.4/site-packages/mypackage' ('C:
1283
+ \\Python2.4 \\Lib \\site-packages \\mypackage') or
1284
+ '/usr/lib/python2.4/site- packages/mypackage/mysubpackage' ('C:
1285
+ \\Python2.4 \\Lib \\site-packages \\mypackage \\mysubpackage').
1286
+ """
1287
+
1288
+ if len(files)>1:
1289
+ for f in files:
1290
+ self.add_data_files(f)
1291
+ return
1292
+ assert len(files)==1
1293
+ if is_sequence(files[0]):
1294
+ d, files = files[0]
1295
+ else:
1296
+ d = None
1297
+ if is_string(files):
1298
+ filepat = files
1299
+ elif is_sequence(files):
1300
+ if len(files)==1:
1301
+ filepat = files[0]
1302
+ else:
1303
+ for f in files:
1304
+ self.add_data_files((d, f))
1305
+ return
1306
+ else:
1307
+ raise TypeError(repr(type(files)))
1308
+
1309
+ if d is None:
1310
+ if hasattr(filepat, '__call__'):
1311
+ d = ''
1312
+ elif os.path.isabs(filepat):
1313
+ d = ''
1314
+ else:
1315
+ d = os.path.dirname(filepat)
1316
+ self.add_data_files((d, files))
1317
+ return
1318
+
1319
+ paths = self.paths(filepat, include_non_existing=False)
1320
+ if is_glob_pattern(filepat):
1321
+ if is_glob_pattern(d):
1322
+ pattern_list = d.split(os.sep)
1323
+ pattern_list.reverse()
1324
+ for path in paths:
1325
+ path_list = path.split(os.sep)
1326
+ path_list.reverse()
1327
+ path_list.pop() # filename
1328
+ target_list = []
1329
+ i = 0
1330
+ for s in pattern_list:
1331
+ if is_glob_pattern(s):
1332
+ target_list.append(path_list[i])
1333
+ i += 1
1334
+ else:
1335
+ target_list.append(s)
1336
+ target_list.reverse()
1337
+ self.add_data_files((os.sep.join(target_list), path))
1338
+ else:
1339
+ self.add_data_files((d, paths))
1340
+ return
1341
+ assert not is_glob_pattern(d), repr((d, filepat))
1342
+
1343
+ dist = self.get_distribution()
1344
+ if dist is not None and dist.data_files is not None:
1345
+ data_files = dist.data_files
1346
+ else:
1347
+ data_files = self.data_files
1348
+
1349
+ data_files.append((os.path.join(self.path_in_package, d), paths))
1350
+
1351
+ ### XXX Implement add_py_modules
1352
+
1353
+ def add_define_macros(self, macros):
1354
+ """Add define macros to configuration
1355
+
1356
+ Add the given sequence of macro name and value duples to the beginning
1357
+ of the define_macros list This list will be visible to all extension
1358
+ modules of the current package.
1359
+ """
1360
+ dist = self.get_distribution()
1361
+ if dist is not None:
1362
+ if not hasattr(dist, 'define_macros'):
1363
+ dist.define_macros = []
1364
+ dist.define_macros.extend(macros)
1365
+ else:
1366
+ self.define_macros.extend(macros)
1367
+
1368
+
1369
+ def add_include_dirs(self,*paths):
1370
+ """Add paths to configuration include directories.
1371
+
1372
+ Add the given sequence of paths to the beginning of the include_dirs
1373
+ list. This list will be visible to all extension modules of the
1374
+ current package.
1375
+ """
1376
+ include_dirs = self.paths(paths)
1377
+ dist = self.get_distribution()
1378
+ if dist is not None:
1379
+ if dist.include_dirs is None:
1380
+ dist.include_dirs = []
1381
+ dist.include_dirs.extend(include_dirs)
1382
+ else:
1383
+ self.include_dirs.extend(include_dirs)
1384
+
1385
+ def add_headers(self,*files):
1386
+ """Add installable headers to configuration.
1387
+
1388
+ Add the given sequence of files to the beginning of the headers list.
1389
+ By default, headers will be installed under <python-
1390
+ include>/<self.name.replace('.','/')>/ directory. If an item of files
1391
+ is a tuple, then its first argument specifies the actual installation
1392
+ location relative to the <python-include> path.
1393
+
1394
+ Parameters
1395
+ ----------
1396
+ files : str or seq
1397
+ Argument(s) can be either:
1398
+
1399
+ * 2-sequence (<includedir suffix>,<path to header file(s)>)
1400
+ * path(s) to header file(s) where python includedir suffix will
1401
+ default to package name.
1402
+ """
1403
+ headers = []
1404
+ for path in files:
1405
+ if is_string(path):
1406
+ [headers.append((self.name, p)) for p in self.paths(path)]
1407
+ else:
1408
+ if not isinstance(path, (tuple, list)) or len(path) != 2:
1409
+ raise TypeError(repr(path))
1410
+ [headers.append((path[0], p)) for p in self.paths(path[1])]
1411
+ dist = self.get_distribution()
1412
+ if dist is not None:
1413
+ if dist.headers is None:
1414
+ dist.headers = []
1415
+ dist.headers.extend(headers)
1416
+ else:
1417
+ self.headers.extend(headers)
1418
+
1419
+ def paths(self,*paths,**kws):
1420
+ """Apply glob to paths and prepend local_path if needed.
1421
+
1422
+ Applies glob.glob(...) to each path in the sequence (if needed) and
1423
+ pre-pends the local_path if needed. Because this is called on all
1424
+ source lists, this allows wildcard characters to be specified in lists
1425
+ of sources for extension modules and libraries and scripts and allows
1426
+ path-names be relative to the source directory.
1427
+
1428
+ """
1429
+ include_non_existing = kws.get('include_non_existing', True)
1430
+ return gpaths(paths,
1431
+ local_path = self.local_path,
1432
+ include_non_existing=include_non_existing)
1433
+
1434
+ def _fix_paths_dict(self, kw):
1435
+ for k in kw.keys():
1436
+ v = kw[k]
1437
+ if k in ['sources', 'depends', 'include_dirs', 'library_dirs',
1438
+ 'module_dirs', 'extra_objects']:
1439
+ new_v = self.paths(v)
1440
+ kw[k] = new_v
1441
+
1442
+ def add_extension(self,name,sources,**kw):
1443
+ """Add extension to configuration.
1444
+
1445
+ Create and add an Extension instance to the ext_modules list. This
1446
+ method also takes the following optional keyword arguments that are
1447
+ passed on to the Extension constructor.
1448
+
1449
+ Parameters
1450
+ ----------
1451
+ name : str
1452
+ name of the extension
1453
+ sources : seq
1454
+ list of the sources. The list of sources may contain functions
1455
+ (called source generators) which must take an extension instance
1456
+ and a build directory as inputs and return a source file or list of
1457
+ source files or None. If None is returned then no sources are
1458
+ generated. If the Extension instance has no sources after
1459
+ processing all source generators, then no extension module is
1460
+ built.
1461
+ include_dirs :
1462
+ define_macros :
1463
+ undef_macros :
1464
+ library_dirs :
1465
+ libraries :
1466
+ runtime_library_dirs :
1467
+ extra_objects :
1468
+ extra_compile_args :
1469
+ extra_link_args :
1470
+ extra_f77_compile_args :
1471
+ extra_f90_compile_args :
1472
+ export_symbols :
1473
+ swig_opts :
1474
+ depends :
1475
+ The depends list contains paths to files or directories that the
1476
+ sources of the extension module depend on. If any path in the
1477
+ depends list is newer than the extension module, then the module
1478
+ will be rebuilt.
1479
+ language :
1480
+ f2py_options :
1481
+ module_dirs :
1482
+ extra_info : dict or list
1483
+ dict or list of dict of keywords to be appended to keywords.
1484
+
1485
+ Notes
1486
+ -----
1487
+ The self.paths(...) method is applied to all lists that may contain
1488
+ paths.
1489
+ """
1490
+ ext_args = copy.copy(kw)
1491
+ ext_args['name'] = dot_join(self.name, name)
1492
+ ext_args['sources'] = sources
1493
+
1494
+ if 'extra_info' in ext_args:
1495
+ extra_info = ext_args['extra_info']
1496
+ del ext_args['extra_info']
1497
+ if isinstance(extra_info, dict):
1498
+ extra_info = [extra_info]
1499
+ for info in extra_info:
1500
+ assert isinstance(info, dict), repr(info)
1501
+ dict_append(ext_args,**info)
1502
+
1503
+ self._fix_paths_dict(ext_args)
1504
+
1505
+ # Resolve out-of-tree dependencies
1506
+ libraries = ext_args.get('libraries', [])
1507
+ libnames = []
1508
+ ext_args['libraries'] = []
1509
+ for libname in libraries:
1510
+ if isinstance(libname, tuple):
1511
+ self._fix_paths_dict(libname[1])
1512
+
1513
+ # Handle library names of the form libname@relative/path/to/library
1514
+ if '@' in libname:
1515
+ lname, lpath = libname.split('@', 1)
1516
+ lpath = os.path.abspath(njoin(self.local_path, lpath))
1517
+ if os.path.isdir(lpath):
1518
+ c = self.get_subpackage(None, lpath,
1519
+ caller_level = 2)
1520
+ if isinstance(c, Configuration):
1521
+ c = c.todict()
1522
+ for l in [l[0] for l in c.get('libraries', [])]:
1523
+ llname = l.split('__OF__', 1)[0]
1524
+ if llname == lname:
1525
+ c.pop('name', None)
1526
+ dict_append(ext_args,**c)
1527
+ break
1528
+ continue
1529
+ libnames.append(libname)
1530
+
1531
+ ext_args['libraries'] = libnames + ext_args['libraries']
1532
+ ext_args['define_macros'] = \
1533
+ self.define_macros + ext_args.get('define_macros', [])
1534
+
1535
+ from numpy.distutils.core import Extension
1536
+ ext = Extension(**ext_args)
1537
+ self.ext_modules.append(ext)
1538
+
1539
+ dist = self.get_distribution()
1540
+ if dist is not None:
1541
+ self.warn('distutils distribution has been initialized,'\
1542
+ ' it may be too late to add an extension '+name)
1543
+ return ext
1544
+
1545
+ def add_library(self,name,sources,**build_info):
1546
+ """
1547
+ Add library to configuration.
1548
+
1549
+ Parameters
1550
+ ----------
1551
+ name : str
1552
+ Name of the extension.
1553
+ sources : sequence
1554
+ List of the sources. The list of sources may contain functions
1555
+ (called source generators) which must take an extension instance
1556
+ and a build directory as inputs and return a source file or list of
1557
+ source files or None. If None is returned then no sources are
1558
+ generated. If the Extension instance has no sources after
1559
+ processing all source generators, then no extension module is
1560
+ built.
1561
+ build_info : dict, optional
1562
+ The following keys are allowed:
1563
+
1564
+ * depends
1565
+ * macros
1566
+ * include_dirs
1567
+ * extra_compiler_args
1568
+ * extra_f77_compile_args
1569
+ * extra_f90_compile_args
1570
+ * f2py_options
1571
+ * language
1572
+
1573
+ """
1574
+ self._add_library(name, sources, None, build_info)
1575
+
1576
+ dist = self.get_distribution()
1577
+ if dist is not None:
1578
+ self.warn('distutils distribution has been initialized,'\
1579
+ ' it may be too late to add a library '+ name)
1580
+
1581
+ def _add_library(self, name, sources, install_dir, build_info):
1582
+ """Common implementation for add_library and add_installed_library. Do
1583
+ not use directly"""
1584
+ build_info = copy.copy(build_info)
1585
+ build_info['sources'] = sources
1586
+
1587
+ # Sometimes, depends is not set up to an empty list by default, and if
1588
+ # depends is not given to add_library, distutils barfs (#1134)
1589
+ if not 'depends' in build_info:
1590
+ build_info['depends'] = []
1591
+
1592
+ self._fix_paths_dict(build_info)
1593
+
1594
+ # Add to libraries list so that it is build with build_clib
1595
+ self.libraries.append((name, build_info))
1596
+
1597
+ def add_installed_library(self, name, sources, install_dir, build_info=None):
1598
+ """
1599
+ Similar to add_library, but the specified library is installed.
1600
+
1601
+ Most C libraries used with `distutils` are only used to build python
1602
+ extensions, but libraries built through this method will be installed
1603
+ so that they can be reused by third-party packages.
1604
+
1605
+ Parameters
1606
+ ----------
1607
+ name : str
1608
+ Name of the installed library.
1609
+ sources : sequence
1610
+ List of the library's source files. See `add_library` for details.
1611
+ install_dir : str
1612
+ Path to install the library, relative to the current sub-package.
1613
+ build_info : dict, optional
1614
+ The following keys are allowed:
1615
+
1616
+ * depends
1617
+ * macros
1618
+ * include_dirs
1619
+ * extra_compiler_args
1620
+ * extra_f77_compile_args
1621
+ * extra_f90_compile_args
1622
+ * f2py_options
1623
+ * language
1624
+
1625
+ Returns
1626
+ -------
1627
+ None
1628
+
1629
+ See Also
1630
+ --------
1631
+ add_library, add_npy_pkg_config, get_info
1632
+
1633
+ Notes
1634
+ -----
1635
+ The best way to encode the options required to link against the specified
1636
+ C libraries is to use a "libname.ini" file, and use `get_info` to
1637
+ retrieve the required options (see `add_npy_pkg_config` for more
1638
+ information).
1639
+
1640
+ """
1641
+ if not build_info:
1642
+ build_info = {}
1643
+
1644
+ install_dir = os.path.join(self.package_path, install_dir)
1645
+ self._add_library(name, sources, install_dir, build_info)
1646
+ self.installed_libraries.append(InstallableLib(name, build_info, install_dir))
1647
+
1648
+ def add_npy_pkg_config(self, template, install_dir, subst_dict=None):
1649
+ """
1650
+ Generate and install a npy-pkg config file from a template.
1651
+
1652
+ The config file generated from `template` is installed in the
1653
+ given install directory, using `subst_dict` for variable substitution.
1654
+
1655
+ Parameters
1656
+ ----------
1657
+ template : str
1658
+ The path of the template, relatively to the current package path.
1659
+ install_dir : str
1660
+ Where to install the npy-pkg config file, relatively to the current
1661
+ package path.
1662
+ subst_dict : dict, optional
1663
+ If given, any string of the form ``@key@`` will be replaced by
1664
+ ``subst_dict[key]`` in the template file when installed. The install
1665
+ prefix is always available through the variable ``@prefix@``, since the
1666
+ install prefix is not easy to get reliably from setup.py.
1667
+
1668
+ See also
1669
+ --------
1670
+ add_installed_library, get_info
1671
+
1672
+ Notes
1673
+ -----
1674
+ This works for both standard installs and in-place builds, i.e. the
1675
+ ``@prefix@`` refer to the source directory for in-place builds.
1676
+
1677
+ Examples
1678
+ --------
1679
+ ::
1680
+
1681
+ config.add_npy_pkg_config('foo.ini.in', 'lib', {'foo': bar})
1682
+
1683
+ Assuming the foo.ini.in file has the following content::
1684
+
1685
+ [meta]
1686
+ Name=@foo@
1687
+ Version=1.0
1688
+ Description=dummy description
1689
+
1690
+ [default]
1691
+ Cflags=-I@prefix@/include
1692
+ Libs=
1693
+
1694
+ The generated file will have the following content::
1695
+
1696
+ [meta]
1697
+ Name=bar
1698
+ Version=1.0
1699
+ Description=dummy description
1700
+
1701
+ [default]
1702
+ Cflags=-Iprefix_dir/include
1703
+ Libs=
1704
+
1705
+ and will be installed as foo.ini in the 'lib' subpath.
1706
+
1707
+ When cross-compiling with numpy distutils, it might be necessary to
1708
+ use modified npy-pkg-config files. Using the default/generated files
1709
+ will link with the host libraries (i.e. libnpymath.a). For
1710
+ cross-compilation you of-course need to link with target libraries,
1711
+ while using the host Python installation.
1712
+
1713
+ You can copy out the numpy/core/lib/npy-pkg-config directory, add a
1714
+ pkgdir value to the .ini files and set NPY_PKG_CONFIG_PATH environment
1715
+ variable to point to the directory with the modified npy-pkg-config
1716
+ files.
1717
+
1718
+ Example npymath.ini modified for cross-compilation::
1719
+
1720
+ [meta]
1721
+ Name=npymath
1722
+ Description=Portable, core math library implementing C99 standard
1723
+ Version=0.1
1724
+
1725
+ [variables]
1726
+ pkgname=numpy.core
1727
+ pkgdir=/build/arm-linux-gnueabi/sysroot/usr/lib/python3.7/site-packages/numpy/core
1728
+ prefix=${pkgdir}
1729
+ libdir=${prefix}/lib
1730
+ includedir=${prefix}/include
1731
+
1732
+ [default]
1733
+ Libs=-L${libdir} -lnpymath
1734
+ Cflags=-I${includedir}
1735
+ Requires=mlib
1736
+
1737
+ [msvc]
1738
+ Libs=/LIBPATH:${libdir} npymath.lib
1739
+ Cflags=/INCLUDE:${includedir}
1740
+ Requires=mlib
1741
+
1742
+ """
1743
+ if subst_dict is None:
1744
+ subst_dict = {}
1745
+ template = os.path.join(self.package_path, template)
1746
+
1747
+ if self.name in self.installed_pkg_config:
1748
+ self.installed_pkg_config[self.name].append((template, install_dir,
1749
+ subst_dict))
1750
+ else:
1751
+ self.installed_pkg_config[self.name] = [(template, install_dir,
1752
+ subst_dict)]
1753
+
1754
+
1755
+ def add_scripts(self,*files):
1756
+ """Add scripts to configuration.
1757
+
1758
+ Add the sequence of files to the beginning of the scripts list.
1759
+ Scripts will be installed under the <prefix>/bin/ directory.
1760
+
1761
+ """
1762
+ scripts = self.paths(files)
1763
+ dist = self.get_distribution()
1764
+ if dist is not None:
1765
+ if dist.scripts is None:
1766
+ dist.scripts = []
1767
+ dist.scripts.extend(scripts)
1768
+ else:
1769
+ self.scripts.extend(scripts)
1770
+
1771
+ def dict_append(self,**dict):
1772
+ for key in self.list_keys:
1773
+ a = getattr(self, key)
1774
+ a.extend(dict.get(key, []))
1775
+ for key in self.dict_keys:
1776
+ a = getattr(self, key)
1777
+ a.update(dict.get(key, {}))
1778
+ known_keys = self.list_keys + self.dict_keys + self.extra_keys
1779
+ for key in dict.keys():
1780
+ if key not in known_keys:
1781
+ a = getattr(self, key, None)
1782
+ if a and a==dict[key]: continue
1783
+ self.warn('Inheriting attribute %r=%r from %r' \
1784
+ % (key, dict[key], dict.get('name', '?')))
1785
+ setattr(self, key, dict[key])
1786
+ self.extra_keys.append(key)
1787
+ elif key in self.extra_keys:
1788
+ self.info('Ignoring attempt to set %r (from %r to %r)' \
1789
+ % (key, getattr(self, key), dict[key]))
1790
+ elif key in known_keys:
1791
+ # key is already processed above
1792
+ pass
1793
+ else:
1794
+ raise ValueError("Don't know about key=%r" % (key))
1795
+
1796
+ def __str__(self):
1797
+ from pprint import pformat
1798
+ known_keys = self.list_keys + self.dict_keys + self.extra_keys
1799
+ s = '<'+5*'-' + '\n'
1800
+ s += 'Configuration of '+self.name+':\n'
1801
+ known_keys.sort()
1802
+ for k in known_keys:
1803
+ a = getattr(self, k, None)
1804
+ if a:
1805
+ s += '%s = %s\n' % (k, pformat(a))
1806
+ s += 5*'-' + '>'
1807
+ return s
1808
+
1809
+ def get_config_cmd(self):
1810
+ """
1811
+ Returns the numpy.distutils config command instance.
1812
+ """
1813
+ cmd = get_cmd('config')
1814
+ cmd.ensure_finalized()
1815
+ cmd.dump_source = 0
1816
+ cmd.noisy = 0
1817
+ old_path = os.environ.get('PATH')
1818
+ if old_path:
1819
+ path = os.pathsep.join(['.', old_path])
1820
+ os.environ['PATH'] = path
1821
+ return cmd
1822
+
1823
+ def get_build_temp_dir(self):
1824
+ """
1825
+ Return a path to a temporary directory where temporary files should be
1826
+ placed.
1827
+ """
1828
+ cmd = get_cmd('build')
1829
+ cmd.ensure_finalized()
1830
+ return cmd.build_temp
1831
+
1832
+ def have_f77c(self):
1833
+ """Check for availability of Fortran 77 compiler.
1834
+
1835
+ Use it inside source generating function to ensure that
1836
+ setup distribution instance has been initialized.
1837
+
1838
+ Notes
1839
+ -----
1840
+ True if a Fortran 77 compiler is available (because a simple Fortran 77
1841
+ code was able to be compiled successfully).
1842
+ """
1843
+ simple_fortran_subroutine = '''
1844
+ subroutine simple
1845
+ end
1846
+ '''
1847
+ config_cmd = self.get_config_cmd()
1848
+ flag = config_cmd.try_compile(simple_fortran_subroutine, lang='f77')
1849
+ return flag
1850
+
1851
+ def have_f90c(self):
1852
+ """Check for availability of Fortran 90 compiler.
1853
+
1854
+ Use it inside source generating function to ensure that
1855
+ setup distribution instance has been initialized.
1856
+
1857
+ Notes
1858
+ -----
1859
+ True if a Fortran 90 compiler is available (because a simple Fortran
1860
+ 90 code was able to be compiled successfully)
1861
+ """
1862
+ simple_fortran_subroutine = '''
1863
+ subroutine simple
1864
+ end
1865
+ '''
1866
+ config_cmd = self.get_config_cmd()
1867
+ flag = config_cmd.try_compile(simple_fortran_subroutine, lang='f90')
1868
+ return flag
1869
+
1870
+ def append_to(self, extlib):
1871
+ """Append libraries, include_dirs to extension or library item.
1872
+ """
1873
+ if is_sequence(extlib):
1874
+ lib_name, build_info = extlib
1875
+ dict_append(build_info,
1876
+ libraries=self.libraries,
1877
+ include_dirs=self.include_dirs)
1878
+ else:
1879
+ from numpy.distutils.core import Extension
1880
+ assert isinstance(extlib, Extension), repr(extlib)
1881
+ extlib.libraries.extend(self.libraries)
1882
+ extlib.include_dirs.extend(self.include_dirs)
1883
+
1884
+ def _get_svn_revision(self, path):
1885
+ """Return path's SVN revision number.
1886
+ """
1887
+ try:
1888
+ output = subprocess.check_output(['svnversion'], cwd=path)
1889
+ except (subprocess.CalledProcessError, OSError):
1890
+ pass
1891
+ else:
1892
+ m = re.match(rb'(?P<revision>\d+)', output)
1893
+ if m:
1894
+ return int(m.group('revision'))
1895
+
1896
+ if sys.platform=='win32' and os.environ.get('SVN_ASP_DOT_NET_HACK', None):
1897
+ entries = njoin(path, '_svn', 'entries')
1898
+ else:
1899
+ entries = njoin(path, '.svn', 'entries')
1900
+ if os.path.isfile(entries):
1901
+ with open(entries) as f:
1902
+ fstr = f.read()
1903
+ if fstr[:5] == '<?xml': # pre 1.4
1904
+ m = re.search(r'revision="(?P<revision>\d+)"', fstr)
1905
+ if m:
1906
+ return int(m.group('revision'))
1907
+ else: # non-xml entries file --- check to be sure that
1908
+ m = re.search(r'dir[\n\r]+(?P<revision>\d+)', fstr)
1909
+ if m:
1910
+ return int(m.group('revision'))
1911
+ return None
1912
+
1913
+ def _get_hg_revision(self, path):
1914
+ """Return path's Mercurial revision number.
1915
+ """
1916
+ try:
1917
+ output = subprocess.check_output(
1918
+ ['hg', 'identify', '--num'], cwd=path)
1919
+ except (subprocess.CalledProcessError, OSError):
1920
+ pass
1921
+ else:
1922
+ m = re.match(rb'(?P<revision>\d+)', output)
1923
+ if m:
1924
+ return int(m.group('revision'))
1925
+
1926
+ branch_fn = njoin(path, '.hg', 'branch')
1927
+ branch_cache_fn = njoin(path, '.hg', 'branch.cache')
1928
+
1929
+ if os.path.isfile(branch_fn):
1930
+ branch0 = None
1931
+ with open(branch_fn) as f:
1932
+ revision0 = f.read().strip()
1933
+
1934
+ branch_map = {}
1935
+ with open(branch_cache_fn) as f:
1936
+ for line in f:
1937
+ branch1, revision1 = line.split()[:2]
1938
+ if revision1==revision0:
1939
+ branch0 = branch1
1940
+ try:
1941
+ revision1 = int(revision1)
1942
+ except ValueError:
1943
+ continue
1944
+ branch_map[branch1] = revision1
1945
+
1946
+ return branch_map.get(branch0)
1947
+
1948
+ return None
1949
+
1950
+
1951
+ def get_version(self, version_file=None, version_variable=None):
1952
+ """Try to get version string of a package.
1953
+
1954
+ Return a version string of the current package or None if the version
1955
+ information could not be detected.
1956
+
1957
+ Notes
1958
+ -----
1959
+ This method scans files named
1960
+ __version__.py, <packagename>_version.py, version.py, and
1961
+ __svn_version__.py for string variables version, __version__, and
1962
+ <packagename>_version, until a version number is found.
1963
+ """
1964
+ version = getattr(self, 'version', None)
1965
+ if version is not None:
1966
+ return version
1967
+
1968
+ # Get version from version file.
1969
+ if version_file is None:
1970
+ files = ['__version__.py',
1971
+ self.name.split('.')[-1]+'_version.py',
1972
+ 'version.py',
1973
+ '__svn_version__.py',
1974
+ '__hg_version__.py']
1975
+ else:
1976
+ files = [version_file]
1977
+ if version_variable is None:
1978
+ version_vars = ['version',
1979
+ '__version__',
1980
+ self.name.split('.')[-1]+'_version']
1981
+ else:
1982
+ version_vars = [version_variable]
1983
+ for f in files:
1984
+ fn = njoin(self.local_path, f)
1985
+ if os.path.isfile(fn):
1986
+ info = ('.py', 'U', 1)
1987
+ name = os.path.splitext(os.path.basename(fn))[0]
1988
+ n = dot_join(self.name, name)
1989
+ try:
1990
+ version_module = exec_mod_from_location(
1991
+ '_'.join(n.split('.')), fn)
1992
+ except ImportError as e:
1993
+ self.warn(str(e))
1994
+ version_module = None
1995
+ if version_module is None:
1996
+ continue
1997
+
1998
+ for a in version_vars:
1999
+ version = getattr(version_module, a, None)
2000
+ if version is not None:
2001
+ break
2002
+
2003
+ # Try if versioneer module
2004
+ try:
2005
+ version = version_module.get_versions()['version']
2006
+ except AttributeError:
2007
+ pass
2008
+
2009
+ if version is not None:
2010
+ break
2011
+
2012
+ if version is not None:
2013
+ self.version = version
2014
+ return version
2015
+
2016
+ # Get version as SVN or Mercurial revision number
2017
+ revision = self._get_svn_revision(self.local_path)
2018
+ if revision is None:
2019
+ revision = self._get_hg_revision(self.local_path)
2020
+
2021
+ if revision is not None:
2022
+ version = str(revision)
2023
+ self.version = version
2024
+
2025
+ return version
2026
+
2027
+ def make_svn_version_py(self, delete=True):
2028
+ """Appends a data function to the data_files list that will generate
2029
+ __svn_version__.py file to the current package directory.
2030
+
2031
+ Generate package __svn_version__.py file from SVN revision number,
2032
+ it will be removed after python exits but will be available
2033
+ when sdist, etc commands are executed.
2034
+
2035
+ Notes
2036
+ -----
2037
+ If __svn_version__.py existed before, nothing is done.
2038
+
2039
+ This is
2040
+ intended for working with source directories that are in an SVN
2041
+ repository.
2042
+ """
2043
+ target = njoin(self.local_path, '__svn_version__.py')
2044
+ revision = self._get_svn_revision(self.local_path)
2045
+ if os.path.isfile(target) or revision is None:
2046
+ return
2047
+ else:
2048
+ def generate_svn_version_py():
2049
+ if not os.path.isfile(target):
2050
+ version = str(revision)
2051
+ self.info('Creating %s (version=%r)' % (target, version))
2052
+ with open(target, 'w') as f:
2053
+ f.write('version = %r\n' % (version))
2054
+
2055
+ def rm_file(f=target,p=self.info):
2056
+ if delete:
2057
+ try: os.remove(f); p('removed '+f)
2058
+ except OSError: pass
2059
+ try: os.remove(f+'c'); p('removed '+f+'c')
2060
+ except OSError: pass
2061
+
2062
+ atexit.register(rm_file)
2063
+
2064
+ return target
2065
+
2066
+ self.add_data_files(('', generate_svn_version_py()))
2067
+
2068
+ def make_hg_version_py(self, delete=True):
2069
+ """Appends a data function to the data_files list that will generate
2070
+ __hg_version__.py file to the current package directory.
2071
+
2072
+ Generate package __hg_version__.py file from Mercurial revision,
2073
+ it will be removed after python exits but will be available
2074
+ when sdist, etc commands are executed.
2075
+
2076
+ Notes
2077
+ -----
2078
+ If __hg_version__.py existed before, nothing is done.
2079
+
2080
+ This is intended for working with source directories that are
2081
+ in an Mercurial repository.
2082
+ """
2083
+ target = njoin(self.local_path, '__hg_version__.py')
2084
+ revision = self._get_hg_revision(self.local_path)
2085
+ if os.path.isfile(target) or revision is None:
2086
+ return
2087
+ else:
2088
+ def generate_hg_version_py():
2089
+ if not os.path.isfile(target):
2090
+ version = str(revision)
2091
+ self.info('Creating %s (version=%r)' % (target, version))
2092
+ with open(target, 'w') as f:
2093
+ f.write('version = %r\n' % (version))
2094
+
2095
+ def rm_file(f=target,p=self.info):
2096
+ if delete:
2097
+ try: os.remove(f); p('removed '+f)
2098
+ except OSError: pass
2099
+ try: os.remove(f+'c'); p('removed '+f+'c')
2100
+ except OSError: pass
2101
+
2102
+ atexit.register(rm_file)
2103
+
2104
+ return target
2105
+
2106
+ self.add_data_files(('', generate_hg_version_py()))
2107
+
2108
+ def make_config_py(self,name='__config__'):
2109
+ """Generate package __config__.py file containing system_info
2110
+ information used during building the package.
2111
+
2112
+ This file is installed to the
2113
+ package installation directory.
2114
+
2115
+ """
2116
+ self.py_modules.append((self.name, name, generate_config_py))
2117
+
2118
+ def get_info(self,*names):
2119
+ """Get resources information.
2120
+
2121
+ Return information (from system_info.get_info) for all of the names in
2122
+ the argument list in a single dictionary.
2123
+ """
2124
+ from .system_info import get_info, dict_append
2125
+ info_dict = {}
2126
+ for a in names:
2127
+ dict_append(info_dict,**get_info(a))
2128
+ return info_dict
2129
+
2130
+
2131
+ def get_cmd(cmdname, _cache={}):
2132
+ if cmdname not in _cache:
2133
+ import distutils.core
2134
+ dist = distutils.core._setup_distribution
2135
+ if dist is None:
2136
+ from distutils.errors import DistutilsInternalError
2137
+ raise DistutilsInternalError(
2138
+ 'setup distribution instance not initialized')
2139
+ cmd = dist.get_command_obj(cmdname)
2140
+ _cache[cmdname] = cmd
2141
+ return _cache[cmdname]
2142
+
2143
+ def get_numpy_include_dirs():
2144
+ # numpy_include_dirs are set by numpy/core/setup.py, otherwise []
2145
+ include_dirs = Configuration.numpy_include_dirs[:]
2146
+ if not include_dirs:
2147
+ import numpy
2148
+ include_dirs = [ numpy.get_include() ]
2149
+ # else running numpy/core/setup.py
2150
+ return include_dirs
2151
+
2152
+ def get_npy_pkg_dir():
2153
+ """Return the path where to find the npy-pkg-config directory.
2154
+
2155
+ If the NPY_PKG_CONFIG_PATH environment variable is set, the value of that
2156
+ is returned. Otherwise, a path inside the location of the numpy module is
2157
+ returned.
2158
+
2159
+ The NPY_PKG_CONFIG_PATH can be useful when cross-compiling, maintaining
2160
+ customized npy-pkg-config .ini files for the cross-compilation
2161
+ environment, and using them when cross-compiling.
2162
+
2163
+ """
2164
+ d = os.environ.get('NPY_PKG_CONFIG_PATH')
2165
+ if d is not None:
2166
+ return d
2167
+ spec = importlib.util.find_spec('numpy')
2168
+ d = os.path.join(os.path.dirname(spec.origin),
2169
+ 'core', 'lib', 'npy-pkg-config')
2170
+ return d
2171
+
2172
+ def get_pkg_info(pkgname, dirs=None):
2173
+ """
2174
+ Return library info for the given package.
2175
+
2176
+ Parameters
2177
+ ----------
2178
+ pkgname : str
2179
+ Name of the package (should match the name of the .ini file, without
2180
+ the extension, e.g. foo for the file foo.ini).
2181
+ dirs : sequence, optional
2182
+ If given, should be a sequence of additional directories where to look
2183
+ for npy-pkg-config files. Those directories are searched prior to the
2184
+ NumPy directory.
2185
+
2186
+ Returns
2187
+ -------
2188
+ pkginfo : class instance
2189
+ The `LibraryInfo` instance containing the build information.
2190
+
2191
+ Raises
2192
+ ------
2193
+ PkgNotFound
2194
+ If the package is not found.
2195
+
2196
+ See Also
2197
+ --------
2198
+ Configuration.add_npy_pkg_config, Configuration.add_installed_library,
2199
+ get_info
2200
+
2201
+ """
2202
+ from numpy.distutils.npy_pkg_config import read_config
2203
+
2204
+ if dirs:
2205
+ dirs.append(get_npy_pkg_dir())
2206
+ else:
2207
+ dirs = [get_npy_pkg_dir()]
2208
+ return read_config(pkgname, dirs)
2209
+
2210
+ def get_info(pkgname, dirs=None):
2211
+ """
2212
+ Return an info dict for a given C library.
2213
+
2214
+ The info dict contains the necessary options to use the C library.
2215
+
2216
+ Parameters
2217
+ ----------
2218
+ pkgname : str
2219
+ Name of the package (should match the name of the .ini file, without
2220
+ the extension, e.g. foo for the file foo.ini).
2221
+ dirs : sequence, optional
2222
+ If given, should be a sequence of additional directories where to look
2223
+ for npy-pkg-config files. Those directories are searched prior to the
2224
+ NumPy directory.
2225
+
2226
+ Returns
2227
+ -------
2228
+ info : dict
2229
+ The dictionary with build information.
2230
+
2231
+ Raises
2232
+ ------
2233
+ PkgNotFound
2234
+ If the package is not found.
2235
+
2236
+ See Also
2237
+ --------
2238
+ Configuration.add_npy_pkg_config, Configuration.add_installed_library,
2239
+ get_pkg_info
2240
+
2241
+ Examples
2242
+ --------
2243
+ To get the necessary information for the npymath library from NumPy:
2244
+
2245
+ >>> npymath_info = np.distutils.misc_util.get_info('npymath')
2246
+ >>> npymath_info #doctest: +SKIP
2247
+ {'define_macros': [], 'libraries': ['npymath'], 'library_dirs':
2248
+ ['.../numpy/core/lib'], 'include_dirs': ['.../numpy/core/include']}
2249
+
2250
+ This info dict can then be used as input to a `Configuration` instance::
2251
+
2252
+ config.add_extension('foo', sources=['foo.c'], extra_info=npymath_info)
2253
+
2254
+ """
2255
+ from numpy.distutils.npy_pkg_config import parse_flags
2256
+ pkg_info = get_pkg_info(pkgname, dirs)
2257
+
2258
+ # Translate LibraryInfo instance into a build_info dict
2259
+ info = parse_flags(pkg_info.cflags())
2260
+ for k, v in parse_flags(pkg_info.libs()).items():
2261
+ info[k].extend(v)
2262
+
2263
+ # add_extension extra_info argument is ANAL
2264
+ info['define_macros'] = info['macros']
2265
+ del info['macros']
2266
+ del info['ignored']
2267
+
2268
+ return info
2269
+
2270
+ def is_bootstrapping():
2271
+ import builtins
2272
+
2273
+ try:
2274
+ builtins.__NUMPY_SETUP__
2275
+ return True
2276
+ except AttributeError:
2277
+ return False
2278
+
2279
+
2280
+ #########################
2281
+
2282
+ def default_config_dict(name = None, parent_name = None, local_path=None):
2283
+ """Return a configuration dictionary for usage in
2284
+ configuration() function defined in file setup_<name>.py.
2285
+ """
2286
+ import warnings
2287
+ warnings.warn('Use Configuration(%r,%r,top_path=%r) instead of '\
2288
+ 'deprecated default_config_dict(%r,%r,%r)'
2289
+ % (name, parent_name, local_path,
2290
+ name, parent_name, local_path,
2291
+ ), stacklevel=2)
2292
+ c = Configuration(name, parent_name, local_path)
2293
+ return c.todict()
2294
+
2295
+
2296
+ def dict_append(d, **kws):
2297
+ for k, v in kws.items():
2298
+ if k in d:
2299
+ ov = d[k]
2300
+ if isinstance(ov, str):
2301
+ d[k] = v
2302
+ else:
2303
+ d[k].extend(v)
2304
+ else:
2305
+ d[k] = v
2306
+
2307
+ def appendpath(prefix, path):
2308
+ if os.path.sep != '/':
2309
+ prefix = prefix.replace('/', os.path.sep)
2310
+ path = path.replace('/', os.path.sep)
2311
+ drive = ''
2312
+ if os.path.isabs(path):
2313
+ drive = os.path.splitdrive(prefix)[0]
2314
+ absprefix = os.path.splitdrive(os.path.abspath(prefix))[1]
2315
+ pathdrive, path = os.path.splitdrive(path)
2316
+ d = os.path.commonprefix([absprefix, path])
2317
+ if os.path.join(absprefix[:len(d)], absprefix[len(d):]) != absprefix \
2318
+ or os.path.join(path[:len(d)], path[len(d):]) != path:
2319
+ # Handle invalid paths
2320
+ d = os.path.dirname(d)
2321
+ subpath = path[len(d):]
2322
+ if os.path.isabs(subpath):
2323
+ subpath = subpath[1:]
2324
+ else:
2325
+ subpath = path
2326
+ return os.path.normpath(njoin(drive + prefix, subpath))
2327
+
2328
+ def generate_config_py(target):
2329
+ """Generate config.py file containing system_info information
2330
+ used during building the package.
2331
+
2332
+ Usage:
2333
+ config['py_modules'].append((packagename, '__config__',generate_config_py))
2334
+ """
2335
+ from numpy.distutils.system_info import system_info
2336
+ from distutils.dir_util import mkpath
2337
+ mkpath(os.path.dirname(target))
2338
+ with open(target, 'w') as f:
2339
+ f.write('# This file is generated by numpy\'s %s\n' % (os.path.basename(sys.argv[0])))
2340
+ f.write('# It contains system_info results at the time of building this package.\n')
2341
+ f.write('__all__ = ["get_info","show"]\n\n')
2342
+
2343
+ # For gfortran+msvc combination, extra shared libraries may exist
2344
+ f.write(textwrap.dedent("""
2345
+ import os
2346
+ import sys
2347
+
2348
+ extra_dll_dir = os.path.join(os.path.dirname(__file__), '.libs')
2349
+
2350
+ if sys.platform == 'win32' and os.path.isdir(extra_dll_dir):
2351
+ os.add_dll_directory(extra_dll_dir)
2352
+
2353
+ """))
2354
+
2355
+ for k, i in system_info.saved_results.items():
2356
+ f.write('%s=%r\n' % (k, i))
2357
+ f.write(textwrap.dedent(r'''
2358
+ def get_info(name):
2359
+ g = globals()
2360
+ return g.get(name, g.get(name + "_info", {}))
2361
+
2362
+ def show():
2363
+ """
2364
+ Show libraries in the system on which NumPy was built.
2365
+
2366
+ Print information about various resources (libraries, library
2367
+ directories, include directories, etc.) in the system on which
2368
+ NumPy was built.
2369
+
2370
+ See Also
2371
+ --------
2372
+ get_include : Returns the directory containing NumPy C
2373
+ header files.
2374
+
2375
+ Notes
2376
+ -----
2377
+ 1. Classes specifying the information to be printed are defined
2378
+ in the `numpy.distutils.system_info` module.
2379
+
2380
+ Information may include:
2381
+
2382
+ * ``language``: language used to write the libraries (mostly
2383
+ C or f77)
2384
+ * ``libraries``: names of libraries found in the system
2385
+ * ``library_dirs``: directories containing the libraries
2386
+ * ``include_dirs``: directories containing library header files
2387
+ * ``src_dirs``: directories containing library source files
2388
+ * ``define_macros``: preprocessor macros used by
2389
+ ``distutils.setup``
2390
+ * ``baseline``: minimum CPU features required
2391
+ * ``found``: dispatched features supported in the system
2392
+ * ``not found``: dispatched features that are not supported
2393
+ in the system
2394
+
2395
+ 2. NumPy BLAS/LAPACK Installation Notes
2396
+
2397
+ Installing a numpy wheel (``pip install numpy`` or force it
2398
+ via ``pip install numpy --only-binary :numpy: numpy``) includes
2399
+ an OpenBLAS implementation of the BLAS and LAPACK linear algebra
2400
+ APIs. In this case, ``library_dirs`` reports the original build
2401
+ time configuration as compiled with gcc/gfortran; at run time
2402
+ the OpenBLAS library is in
2403
+ ``site-packages/numpy.libs/`` (linux), or
2404
+ ``site-packages/numpy/.dylibs/`` (macOS), or
2405
+ ``site-packages/numpy/.libs/`` (windows).
2406
+
2407
+ Installing numpy from source
2408
+ (``pip install numpy --no-binary numpy``) searches for BLAS and
2409
+ LAPACK dynamic link libraries at build time as influenced by
2410
+ environment variables NPY_BLAS_LIBS, NPY_CBLAS_LIBS, and
2411
+ NPY_LAPACK_LIBS; or NPY_BLAS_ORDER and NPY_LAPACK_ORDER;
2412
+ or the optional file ``~/.numpy-site.cfg``.
2413
+ NumPy remembers those locations and expects to load the same
2414
+ libraries at run-time.
2415
+ In NumPy 1.21+ on macOS, 'accelerate' (Apple's Accelerate BLAS
2416
+ library) is in the default build-time search order after
2417
+ 'openblas'.
2418
+
2419
+ Examples
2420
+ --------
2421
+ >>> import numpy as np
2422
+ >>> np.show_config()
2423
+ blas_opt_info:
2424
+ language = c
2425
+ define_macros = [('HAVE_CBLAS', None)]
2426
+ libraries = ['openblas', 'openblas']
2427
+ library_dirs = ['/usr/local/lib']
2428
+ """
2429
+ from numpy.core._multiarray_umath import (
2430
+ __cpu_features__, __cpu_baseline__, __cpu_dispatch__
2431
+ )
2432
+ for name,info_dict in globals().items():
2433
+ if name[0] == "_" or type(info_dict) is not type({}): continue
2434
+ print(name + ":")
2435
+ if not info_dict:
2436
+ print(" NOT AVAILABLE")
2437
+ for k,v in info_dict.items():
2438
+ v = str(v)
2439
+ if k == "sources" and len(v) > 200:
2440
+ v = v[:60] + " ...\n... " + v[-60:]
2441
+ print(" %s = %s" % (k,v))
2442
+
2443
+ features_found, features_not_found = [], []
2444
+ for feature in __cpu_dispatch__:
2445
+ if __cpu_features__[feature]:
2446
+ features_found.append(feature)
2447
+ else:
2448
+ features_not_found.append(feature)
2449
+
2450
+ print("Supported SIMD extensions in this NumPy install:")
2451
+ print(" baseline = %s" % (','.join(__cpu_baseline__)))
2452
+ print(" found = %s" % (','.join(features_found)))
2453
+ print(" not found = %s" % (','.join(features_not_found)))
2454
+
2455
+ '''))
2456
+
2457
+ return target
2458
+
2459
+ def msvc_version(compiler):
2460
+ """Return version major and minor of compiler instance if it is
2461
+ MSVC, raise an exception otherwise."""
2462
+ if not compiler.compiler_type == "msvc":
2463
+ raise ValueError("Compiler instance is not msvc (%s)"\
2464
+ % compiler.compiler_type)
2465
+ return compiler._MSVCCompiler__version
2466
+
2467
+ def get_build_architecture():
2468
+ # Importing distutils.msvccompiler triggers a warning on non-Windows
2469
+ # systems, so delay the import to here.
2470
+ from distutils.msvccompiler import get_build_architecture
2471
+ return get_build_architecture()
2472
+
2473
+
2474
+ _cxx_ignore_flags = {'-Werror=implicit-function-declaration', '-std=c99'}
2475
+
2476
+
2477
+ def sanitize_cxx_flags(cxxflags):
2478
+ '''
2479
+ Some flags are valid for C but not C++. Prune them.
2480
+ '''
2481
+ return [flag for flag in cxxflags if flag not in _cxx_ignore_flags]
2482
+
2483
+
2484
+ def exec_mod_from_location(modname, modfile):
2485
+ '''
2486
+ Use importlib machinery to import a module `modname` from the file
2487
+ `modfile`. Depending on the `spec.loader`, the module may not be
2488
+ registered in sys.modules.
2489
+ '''
2490
+ spec = importlib.util.spec_from_file_location(modname, modfile)
2491
+ foo = importlib.util.module_from_spec(spec)
2492
+ spec.loader.exec_module(foo)
2493
+ return foo
llmeval-env/lib/python3.10/site-packages/numpy/distutils/msvc9compiler.py ADDED
@@ -0,0 +1,63 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ from distutils.msvc9compiler import MSVCCompiler as _MSVCCompiler
3
+
4
+ from .system_info import platform_bits
5
+
6
+
7
+ def _merge(old, new):
8
+ """Concatenate two environment paths avoiding repeats.
9
+
10
+ Here `old` is the environment string before the base class initialize
11
+ function is called and `new` is the string after the call. The new string
12
+ will be a fixed string if it is not obtained from the current environment,
13
+ or the same as the old string if obtained from the same environment. The aim
14
+ here is not to append the new string if it is already contained in the old
15
+ string so as to limit the growth of the environment string.
16
+
17
+ Parameters
18
+ ----------
19
+ old : string
20
+ Previous environment string.
21
+ new : string
22
+ New environment string.
23
+
24
+ Returns
25
+ -------
26
+ ret : string
27
+ Updated environment string.
28
+
29
+ """
30
+ if not old:
31
+ return new
32
+ if new in old:
33
+ return old
34
+
35
+ # Neither new nor old is empty. Give old priority.
36
+ return ';'.join([old, new])
37
+
38
+
39
+ class MSVCCompiler(_MSVCCompiler):
40
+ def __init__(self, verbose=0, dry_run=0, force=0):
41
+ _MSVCCompiler.__init__(self, verbose, dry_run, force)
42
+
43
+ def initialize(self, plat_name=None):
44
+ # The 'lib' and 'include' variables may be overwritten
45
+ # by MSVCCompiler.initialize, so save them for later merge.
46
+ environ_lib = os.getenv('lib')
47
+ environ_include = os.getenv('include')
48
+ _MSVCCompiler.initialize(self, plat_name)
49
+
50
+ # Merge current and previous values of 'lib' and 'include'
51
+ os.environ['lib'] = _merge(environ_lib, os.environ['lib'])
52
+ os.environ['include'] = _merge(environ_include, os.environ['include'])
53
+
54
+ # msvc9 building for 32 bits requires SSE2 to work around a
55
+ # compiler bug.
56
+ if platform_bits == 32:
57
+ self.compile_options += ['/arch:SSE2']
58
+ self.compile_options_debug += ['/arch:SSE2']
59
+
60
+ def manifest_setup_ldargs(self, output_filename, build_temp, ld_args):
61
+ ld_args.append('/MANIFEST')
62
+ _MSVCCompiler.manifest_setup_ldargs(self, output_filename,
63
+ build_temp, ld_args)
llmeval-env/lib/python3.10/site-packages/numpy/distutils/msvccompiler.py ADDED
@@ -0,0 +1,76 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ from distutils.msvccompiler import MSVCCompiler as _MSVCCompiler
3
+
4
+ from .system_info import platform_bits
5
+
6
+
7
+ def _merge(old, new):
8
+ """Concatenate two environment paths avoiding repeats.
9
+
10
+ Here `old` is the environment string before the base class initialize
11
+ function is called and `new` is the string after the call. The new string
12
+ will be a fixed string if it is not obtained from the current environment,
13
+ or the same as the old string if obtained from the same environment. The aim
14
+ here is not to append the new string if it is already contained in the old
15
+ string so as to limit the growth of the environment string.
16
+
17
+ Parameters
18
+ ----------
19
+ old : string
20
+ Previous environment string.
21
+ new : string
22
+ New environment string.
23
+
24
+ Returns
25
+ -------
26
+ ret : string
27
+ Updated environment string.
28
+
29
+ """
30
+ if new in old:
31
+ return old
32
+ if not old:
33
+ return new
34
+
35
+ # Neither new nor old is empty. Give old priority.
36
+ return ';'.join([old, new])
37
+
38
+
39
+ class MSVCCompiler(_MSVCCompiler):
40
+ def __init__(self, verbose=0, dry_run=0, force=0):
41
+ _MSVCCompiler.__init__(self, verbose, dry_run, force)
42
+
43
+ def initialize(self):
44
+ # The 'lib' and 'include' variables may be overwritten
45
+ # by MSVCCompiler.initialize, so save them for later merge.
46
+ environ_lib = os.getenv('lib', '')
47
+ environ_include = os.getenv('include', '')
48
+ _MSVCCompiler.initialize(self)
49
+
50
+ # Merge current and previous values of 'lib' and 'include'
51
+ os.environ['lib'] = _merge(environ_lib, os.environ['lib'])
52
+ os.environ['include'] = _merge(environ_include, os.environ['include'])
53
+
54
+ # msvc9 building for 32 bits requires SSE2 to work around a
55
+ # compiler bug.
56
+ if platform_bits == 32:
57
+ self.compile_options += ['/arch:SSE2']
58
+ self.compile_options_debug += ['/arch:SSE2']
59
+
60
+
61
+ def lib_opts_if_msvc(build_cmd):
62
+ """ Add flags if we are using MSVC compiler
63
+
64
+ We can't see `build_cmd` in our scope, because we have not initialized
65
+ the distutils build command, so use this deferred calculation to run
66
+ when we are building the library.
67
+ """
68
+ if build_cmd.compiler.compiler_type != 'msvc':
69
+ return []
70
+ # Explicitly disable whole-program optimization.
71
+ flags = ['/GL-']
72
+ # Disable voltbl section for vc142 to allow link using mingw-w64; see:
73
+ # https://github.com/matthew-brett/dll_investigation/issues/1#issuecomment-1100468171
74
+ if build_cmd.compiler_opt.cc_test_flags(['-d2VolatileMetadata-']):
75
+ flags.append('-d2VolatileMetadata-')
76
+ return flags
llmeval-env/lib/python3.10/site-packages/numpy/distutils/npy_pkg_config.py ADDED
@@ -0,0 +1,437 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import sys
2
+ import re
3
+ import os
4
+
5
+ from configparser import RawConfigParser
6
+
7
+ __all__ = ['FormatError', 'PkgNotFound', 'LibraryInfo', 'VariableSet',
8
+ 'read_config', 'parse_flags']
9
+
10
+ _VAR = re.compile(r'\$\{([a-zA-Z0-9_-]+)\}')
11
+
12
+ class FormatError(OSError):
13
+ """
14
+ Exception thrown when there is a problem parsing a configuration file.
15
+
16
+ """
17
+ def __init__(self, msg):
18
+ self.msg = msg
19
+
20
+ def __str__(self):
21
+ return self.msg
22
+
23
+ class PkgNotFound(OSError):
24
+ """Exception raised when a package can not be located."""
25
+ def __init__(self, msg):
26
+ self.msg = msg
27
+
28
+ def __str__(self):
29
+ return self.msg
30
+
31
+ def parse_flags(line):
32
+ """
33
+ Parse a line from a config file containing compile flags.
34
+
35
+ Parameters
36
+ ----------
37
+ line : str
38
+ A single line containing one or more compile flags.
39
+
40
+ Returns
41
+ -------
42
+ d : dict
43
+ Dictionary of parsed flags, split into relevant categories.
44
+ These categories are the keys of `d`:
45
+
46
+ * 'include_dirs'
47
+ * 'library_dirs'
48
+ * 'libraries'
49
+ * 'macros'
50
+ * 'ignored'
51
+
52
+ """
53
+ d = {'include_dirs': [], 'library_dirs': [], 'libraries': [],
54
+ 'macros': [], 'ignored': []}
55
+
56
+ flags = (' ' + line).split(' -')
57
+ for flag in flags:
58
+ flag = '-' + flag
59
+ if len(flag) > 0:
60
+ if flag.startswith('-I'):
61
+ d['include_dirs'].append(flag[2:].strip())
62
+ elif flag.startswith('-L'):
63
+ d['library_dirs'].append(flag[2:].strip())
64
+ elif flag.startswith('-l'):
65
+ d['libraries'].append(flag[2:].strip())
66
+ elif flag.startswith('-D'):
67
+ d['macros'].append(flag[2:].strip())
68
+ else:
69
+ d['ignored'].append(flag)
70
+
71
+ return d
72
+
73
+ def _escape_backslash(val):
74
+ return val.replace('\\', '\\\\')
75
+
76
+ class LibraryInfo:
77
+ """
78
+ Object containing build information about a library.
79
+
80
+ Parameters
81
+ ----------
82
+ name : str
83
+ The library name.
84
+ description : str
85
+ Description of the library.
86
+ version : str
87
+ Version string.
88
+ sections : dict
89
+ The sections of the configuration file for the library. The keys are
90
+ the section headers, the values the text under each header.
91
+ vars : class instance
92
+ A `VariableSet` instance, which contains ``(name, value)`` pairs for
93
+ variables defined in the configuration file for the library.
94
+ requires : sequence, optional
95
+ The required libraries for the library to be installed.
96
+
97
+ Notes
98
+ -----
99
+ All input parameters (except "sections" which is a method) are available as
100
+ attributes of the same name.
101
+
102
+ """
103
+ def __init__(self, name, description, version, sections, vars, requires=None):
104
+ self.name = name
105
+ self.description = description
106
+ if requires:
107
+ self.requires = requires
108
+ else:
109
+ self.requires = []
110
+ self.version = version
111
+ self._sections = sections
112
+ self.vars = vars
113
+
114
+ def sections(self):
115
+ """
116
+ Return the section headers of the config file.
117
+
118
+ Parameters
119
+ ----------
120
+ None
121
+
122
+ Returns
123
+ -------
124
+ keys : list of str
125
+ The list of section headers.
126
+
127
+ """
128
+ return list(self._sections.keys())
129
+
130
+ def cflags(self, section="default"):
131
+ val = self.vars.interpolate(self._sections[section]['cflags'])
132
+ return _escape_backslash(val)
133
+
134
+ def libs(self, section="default"):
135
+ val = self.vars.interpolate(self._sections[section]['libs'])
136
+ return _escape_backslash(val)
137
+
138
+ def __str__(self):
139
+ m = ['Name: %s' % self.name, 'Description: %s' % self.description]
140
+ if self.requires:
141
+ m.append('Requires:')
142
+ else:
143
+ m.append('Requires: %s' % ",".join(self.requires))
144
+ m.append('Version: %s' % self.version)
145
+
146
+ return "\n".join(m)
147
+
148
+ class VariableSet:
149
+ """
150
+ Container object for the variables defined in a config file.
151
+
152
+ `VariableSet` can be used as a plain dictionary, with the variable names
153
+ as keys.
154
+
155
+ Parameters
156
+ ----------
157
+ d : dict
158
+ Dict of items in the "variables" section of the configuration file.
159
+
160
+ """
161
+ def __init__(self, d):
162
+ self._raw_data = dict([(k, v) for k, v in d.items()])
163
+
164
+ self._re = {}
165
+ self._re_sub = {}
166
+
167
+ self._init_parse()
168
+
169
+ def _init_parse(self):
170
+ for k, v in self._raw_data.items():
171
+ self._init_parse_var(k, v)
172
+
173
+ def _init_parse_var(self, name, value):
174
+ self._re[name] = re.compile(r'\$\{%s\}' % name)
175
+ self._re_sub[name] = value
176
+
177
+ def interpolate(self, value):
178
+ # Brute force: we keep interpolating until there is no '${var}' anymore
179
+ # or until interpolated string is equal to input string
180
+ def _interpolate(value):
181
+ for k in self._re.keys():
182
+ value = self._re[k].sub(self._re_sub[k], value)
183
+ return value
184
+ while _VAR.search(value):
185
+ nvalue = _interpolate(value)
186
+ if nvalue == value:
187
+ break
188
+ value = nvalue
189
+
190
+ return value
191
+
192
+ def variables(self):
193
+ """
194
+ Return the list of variable names.
195
+
196
+ Parameters
197
+ ----------
198
+ None
199
+
200
+ Returns
201
+ -------
202
+ names : list of str
203
+ The names of all variables in the `VariableSet` instance.
204
+
205
+ """
206
+ return list(self._raw_data.keys())
207
+
208
+ # Emulate a dict to set/get variables values
209
+ def __getitem__(self, name):
210
+ return self._raw_data[name]
211
+
212
+ def __setitem__(self, name, value):
213
+ self._raw_data[name] = value
214
+ self._init_parse_var(name, value)
215
+
216
+ def parse_meta(config):
217
+ if not config.has_section('meta'):
218
+ raise FormatError("No meta section found !")
219
+
220
+ d = dict(config.items('meta'))
221
+
222
+ for k in ['name', 'description', 'version']:
223
+ if not k in d:
224
+ raise FormatError("Option %s (section [meta]) is mandatory, "
225
+ "but not found" % k)
226
+
227
+ if not 'requires' in d:
228
+ d['requires'] = []
229
+
230
+ return d
231
+
232
+ def parse_variables(config):
233
+ if not config.has_section('variables'):
234
+ raise FormatError("No variables section found !")
235
+
236
+ d = {}
237
+
238
+ for name, value in config.items("variables"):
239
+ d[name] = value
240
+
241
+ return VariableSet(d)
242
+
243
+ def parse_sections(config):
244
+ return meta_d, r
245
+
246
+ def pkg_to_filename(pkg_name):
247
+ return "%s.ini" % pkg_name
248
+
249
+ def parse_config(filename, dirs=None):
250
+ if dirs:
251
+ filenames = [os.path.join(d, filename) for d in dirs]
252
+ else:
253
+ filenames = [filename]
254
+
255
+ config = RawConfigParser()
256
+
257
+ n = config.read(filenames)
258
+ if not len(n) >= 1:
259
+ raise PkgNotFound("Could not find file(s) %s" % str(filenames))
260
+
261
+ # Parse meta and variables sections
262
+ meta = parse_meta(config)
263
+
264
+ vars = {}
265
+ if config.has_section('variables'):
266
+ for name, value in config.items("variables"):
267
+ vars[name] = _escape_backslash(value)
268
+
269
+ # Parse "normal" sections
270
+ secs = [s for s in config.sections() if not s in ['meta', 'variables']]
271
+ sections = {}
272
+
273
+ requires = {}
274
+ for s in secs:
275
+ d = {}
276
+ if config.has_option(s, "requires"):
277
+ requires[s] = config.get(s, 'requires')
278
+
279
+ for name, value in config.items(s):
280
+ d[name] = value
281
+ sections[s] = d
282
+
283
+ return meta, vars, sections, requires
284
+
285
+ def _read_config_imp(filenames, dirs=None):
286
+ def _read_config(f):
287
+ meta, vars, sections, reqs = parse_config(f, dirs)
288
+ # recursively add sections and variables of required libraries
289
+ for rname, rvalue in reqs.items():
290
+ nmeta, nvars, nsections, nreqs = _read_config(pkg_to_filename(rvalue))
291
+
292
+ # Update var dict for variables not in 'top' config file
293
+ for k, v in nvars.items():
294
+ if not k in vars:
295
+ vars[k] = v
296
+
297
+ # Update sec dict
298
+ for oname, ovalue in nsections[rname].items():
299
+ if ovalue:
300
+ sections[rname][oname] += ' %s' % ovalue
301
+
302
+ return meta, vars, sections, reqs
303
+
304
+ meta, vars, sections, reqs = _read_config(filenames)
305
+
306
+ # FIXME: document this. If pkgname is defined in the variables section, and
307
+ # there is no pkgdir variable defined, pkgdir is automatically defined to
308
+ # the path of pkgname. This requires the package to be imported to work
309
+ if not 'pkgdir' in vars and "pkgname" in vars:
310
+ pkgname = vars["pkgname"]
311
+ if not pkgname in sys.modules:
312
+ raise ValueError("You should import %s to get information on %s" %
313
+ (pkgname, meta["name"]))
314
+
315
+ mod = sys.modules[pkgname]
316
+ vars["pkgdir"] = _escape_backslash(os.path.dirname(mod.__file__))
317
+
318
+ return LibraryInfo(name=meta["name"], description=meta["description"],
319
+ version=meta["version"], sections=sections, vars=VariableSet(vars))
320
+
321
+ # Trivial cache to cache LibraryInfo instances creation. To be really
322
+ # efficient, the cache should be handled in read_config, since a same file can
323
+ # be parsed many time outside LibraryInfo creation, but I doubt this will be a
324
+ # problem in practice
325
+ _CACHE = {}
326
+ def read_config(pkgname, dirs=None):
327
+ """
328
+ Return library info for a package from its configuration file.
329
+
330
+ Parameters
331
+ ----------
332
+ pkgname : str
333
+ Name of the package (should match the name of the .ini file, without
334
+ the extension, e.g. foo for the file foo.ini).
335
+ dirs : sequence, optional
336
+ If given, should be a sequence of directories - usually including
337
+ the NumPy base directory - where to look for npy-pkg-config files.
338
+
339
+ Returns
340
+ -------
341
+ pkginfo : class instance
342
+ The `LibraryInfo` instance containing the build information.
343
+
344
+ Raises
345
+ ------
346
+ PkgNotFound
347
+ If the package is not found.
348
+
349
+ See Also
350
+ --------
351
+ misc_util.get_info, misc_util.get_pkg_info
352
+
353
+ Examples
354
+ --------
355
+ >>> npymath_info = np.distutils.npy_pkg_config.read_config('npymath')
356
+ >>> type(npymath_info)
357
+ <class 'numpy.distutils.npy_pkg_config.LibraryInfo'>
358
+ >>> print(npymath_info)
359
+ Name: npymath
360
+ Description: Portable, core math library implementing C99 standard
361
+ Requires:
362
+ Version: 0.1 #random
363
+
364
+ """
365
+ try:
366
+ return _CACHE[pkgname]
367
+ except KeyError:
368
+ v = _read_config_imp(pkg_to_filename(pkgname), dirs)
369
+ _CACHE[pkgname] = v
370
+ return v
371
+
372
+ # TODO:
373
+ # - implements version comparison (modversion + atleast)
374
+
375
+ # pkg-config simple emulator - useful for debugging, and maybe later to query
376
+ # the system
377
+ if __name__ == '__main__':
378
+ from optparse import OptionParser
379
+ import glob
380
+
381
+ parser = OptionParser()
382
+ parser.add_option("--cflags", dest="cflags", action="store_true",
383
+ help="output all preprocessor and compiler flags")
384
+ parser.add_option("--libs", dest="libs", action="store_true",
385
+ help="output all linker flags")
386
+ parser.add_option("--use-section", dest="section",
387
+ help="use this section instead of default for options")
388
+ parser.add_option("--version", dest="version", action="store_true",
389
+ help="output version")
390
+ parser.add_option("--atleast-version", dest="min_version",
391
+ help="Minimal version")
392
+ parser.add_option("--list-all", dest="list_all", action="store_true",
393
+ help="Minimal version")
394
+ parser.add_option("--define-variable", dest="define_variable",
395
+ help="Replace variable with the given value")
396
+
397
+ (options, args) = parser.parse_args(sys.argv)
398
+
399
+ if len(args) < 2:
400
+ raise ValueError("Expect package name on the command line:")
401
+
402
+ if options.list_all:
403
+ files = glob.glob("*.ini")
404
+ for f in files:
405
+ info = read_config(f)
406
+ print("%s\t%s - %s" % (info.name, info.name, info.description))
407
+
408
+ pkg_name = args[1]
409
+ d = os.environ.get('NPY_PKG_CONFIG_PATH')
410
+ if d:
411
+ info = read_config(pkg_name, ['numpy/core/lib/npy-pkg-config', '.', d])
412
+ else:
413
+ info = read_config(pkg_name, ['numpy/core/lib/npy-pkg-config', '.'])
414
+
415
+ if options.section:
416
+ section = options.section
417
+ else:
418
+ section = "default"
419
+
420
+ if options.define_variable:
421
+ m = re.search(r'([\S]+)=([\S]+)', options.define_variable)
422
+ if not m:
423
+ raise ValueError("--define-variable option should be of "
424
+ "the form --define-variable=foo=bar")
425
+ else:
426
+ name = m.group(1)
427
+ value = m.group(2)
428
+ info.vars[name] = value
429
+
430
+ if options.cflags:
431
+ print(info.cflags(section))
432
+ if options.libs:
433
+ print(info.libs(section))
434
+ if options.version:
435
+ print(info.version)
436
+ if options.min_version:
437
+ print(info.version >= options.min_version)
llmeval-env/lib/python3.10/site-packages/numpy/distutils/setup.py ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python3
2
+ def configuration(parent_package='',top_path=None):
3
+ from numpy.distutils.misc_util import Configuration
4
+ config = Configuration('distutils', parent_package, top_path)
5
+ config.add_subpackage('command')
6
+ config.add_subpackage('fcompiler')
7
+ config.add_subpackage('tests')
8
+ config.add_data_files('site.cfg')
9
+ config.add_data_files('mingw/gfortran_vs2003_hack.c')
10
+ config.add_data_dir('checks')
11
+ config.add_data_files('*.pyi')
12
+ config.make_config_py()
13
+ return config
14
+
15
+ if __name__ == '__main__':
16
+ from numpy.distutils.core import setup
17
+ setup(configuration=configuration)
llmeval-env/lib/python3.10/site-packages/numpy/distutils/system_info.py ADDED
The diff for this file is too large to render. See raw diff
 
llmeval-env/lib/python3.10/site-packages/numpy/distutils/unixccompiler.py ADDED
@@ -0,0 +1,141 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ unixccompiler - can handle very long argument lists for ar.
3
+
4
+ """
5
+ import os
6
+ import sys
7
+ import subprocess
8
+ import shlex
9
+
10
+ from distutils.errors import CompileError, DistutilsExecError, LibError
11
+ from distutils.unixccompiler import UnixCCompiler
12
+ from numpy.distutils.ccompiler import replace_method
13
+ from numpy.distutils.misc_util import _commandline_dep_string
14
+ from numpy.distutils import log
15
+
16
+ # Note that UnixCCompiler._compile appeared in Python 2.3
17
+ def UnixCCompiler__compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
18
+ """Compile a single source files with a Unix-style compiler."""
19
+ # HP ad-hoc fix, see ticket 1383
20
+ ccomp = self.compiler_so
21
+ if ccomp[0] == 'aCC':
22
+ # remove flags that will trigger ANSI-C mode for aCC
23
+ if '-Ae' in ccomp:
24
+ ccomp.remove('-Ae')
25
+ if '-Aa' in ccomp:
26
+ ccomp.remove('-Aa')
27
+ # add flags for (almost) sane C++ handling
28
+ ccomp += ['-AA']
29
+ self.compiler_so = ccomp
30
+ # ensure OPT environment variable is read
31
+ if 'OPT' in os.environ:
32
+ # XXX who uses this?
33
+ from sysconfig import get_config_vars
34
+ opt = shlex.join(shlex.split(os.environ['OPT']))
35
+ gcv_opt = shlex.join(shlex.split(get_config_vars('OPT')[0]))
36
+ ccomp_s = shlex.join(self.compiler_so)
37
+ if opt not in ccomp_s:
38
+ ccomp_s = ccomp_s.replace(gcv_opt, opt)
39
+ self.compiler_so = shlex.split(ccomp_s)
40
+ llink_s = shlex.join(self.linker_so)
41
+ if opt not in llink_s:
42
+ self.linker_so = self.linker_so + shlex.split(opt)
43
+
44
+ display = '%s: %s' % (os.path.basename(self.compiler_so[0]), src)
45
+
46
+ # gcc style automatic dependencies, outputs a makefile (-MF) that lists
47
+ # all headers needed by a c file as a side effect of compilation (-MMD)
48
+ if getattr(self, '_auto_depends', False):
49
+ deps = ['-MMD', '-MF', obj + '.d']
50
+ else:
51
+ deps = []
52
+
53
+ try:
54
+ self.spawn(self.compiler_so + cc_args + [src, '-o', obj] + deps +
55
+ extra_postargs, display = display)
56
+ except DistutilsExecError as e:
57
+ msg = str(e)
58
+ raise CompileError(msg) from None
59
+
60
+ # add commandline flags to dependency file
61
+ if deps:
62
+ # After running the compiler, the file created will be in EBCDIC
63
+ # but will not be tagged as such. This tags it so the file does not
64
+ # have multiple different encodings being written to it
65
+ if sys.platform == 'zos':
66
+ subprocess.check_output(['chtag', '-tc', 'IBM1047', obj + '.d'])
67
+ with open(obj + '.d', 'a') as f:
68
+ f.write(_commandline_dep_string(cc_args, extra_postargs, pp_opts))
69
+
70
+ replace_method(UnixCCompiler, '_compile', UnixCCompiler__compile)
71
+
72
+
73
+ def UnixCCompiler_create_static_lib(self, objects, output_libname,
74
+ output_dir=None, debug=0, target_lang=None):
75
+ """
76
+ Build a static library in a separate sub-process.
77
+
78
+ Parameters
79
+ ----------
80
+ objects : list or tuple of str
81
+ List of paths to object files used to build the static library.
82
+ output_libname : str
83
+ The library name as an absolute or relative (if `output_dir` is used)
84
+ path.
85
+ output_dir : str, optional
86
+ The path to the output directory. Default is None, in which case
87
+ the ``output_dir`` attribute of the UnixCCompiler instance.
88
+ debug : bool, optional
89
+ This parameter is not used.
90
+ target_lang : str, optional
91
+ This parameter is not used.
92
+
93
+ Returns
94
+ -------
95
+ None
96
+
97
+ """
98
+ objects, output_dir = self._fix_object_args(objects, output_dir)
99
+
100
+ output_filename = \
101
+ self.library_filename(output_libname, output_dir=output_dir)
102
+
103
+ if self._need_link(objects, output_filename):
104
+ try:
105
+ # previous .a may be screwed up; best to remove it first
106
+ # and recreate.
107
+ # Also, ar on OS X doesn't handle updating universal archives
108
+ os.unlink(output_filename)
109
+ except OSError:
110
+ pass
111
+ self.mkpath(os.path.dirname(output_filename))
112
+ tmp_objects = objects + self.objects
113
+ while tmp_objects:
114
+ objects = tmp_objects[:50]
115
+ tmp_objects = tmp_objects[50:]
116
+ display = '%s: adding %d object files to %s' % (
117
+ os.path.basename(self.archiver[0]),
118
+ len(objects), output_filename)
119
+ self.spawn(self.archiver + [output_filename] + objects,
120
+ display = display)
121
+
122
+ # Not many Unices required ranlib anymore -- SunOS 4.x is, I
123
+ # think the only major Unix that does. Maybe we need some
124
+ # platform intelligence here to skip ranlib if it's not
125
+ # needed -- or maybe Python's configure script took care of
126
+ # it for us, hence the check for leading colon.
127
+ if self.ranlib:
128
+ display = '%s:@ %s' % (os.path.basename(self.ranlib[0]),
129
+ output_filename)
130
+ try:
131
+ self.spawn(self.ranlib + [output_filename],
132
+ display = display)
133
+ except DistutilsExecError as e:
134
+ msg = str(e)
135
+ raise LibError(msg) from None
136
+ else:
137
+ log.debug("skipping %s (up-to-date)", output_filename)
138
+ return
139
+
140
+ replace_method(UnixCCompiler, 'create_static_lib',
141
+ UnixCCompiler_create_static_lib)
llmeval-env/lib/python3.10/site-packages/numpy/polynomial/tests/__init__.py ADDED
File without changes
llmeval-env/lib/python3.10/site-packages/numpy/polynomial/tests/__pycache__/test_hermite.cpython-310.pyc ADDED
Binary file (17.5 kB). View file
 
llmeval-env/lib/python3.10/site-packages/numpy/polynomial/tests/__pycache__/test_hermite_e.cpython-310.pyc ADDED
Binary file (17.6 kB). View file
 
llmeval-env/lib/python3.10/site-packages/numpy/polynomial/tests/__pycache__/test_polynomial.cpython-310.pyc ADDED
Binary file (18.6 kB). View file
 
llmeval-env/lib/python3.10/site-packages/numpy/polynomial/tests/__pycache__/test_polyutils.cpython-310.pyc ADDED
Binary file (3.88 kB). View file
 
llmeval-env/lib/python3.10/site-packages/numpy/polynomial/tests/test_hermite.py ADDED
@@ -0,0 +1,555 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Tests for hermite module.
2
+
3
+ """
4
+ from functools import reduce
5
+
6
+ import numpy as np
7
+ import numpy.polynomial.hermite as herm
8
+ from numpy.polynomial.polynomial import polyval
9
+ from numpy.testing import (
10
+ assert_almost_equal, assert_raises, assert_equal, assert_,
11
+ )
12
+
13
+ H0 = np.array([1])
14
+ H1 = np.array([0, 2])
15
+ H2 = np.array([-2, 0, 4])
16
+ H3 = np.array([0, -12, 0, 8])
17
+ H4 = np.array([12, 0, -48, 0, 16])
18
+ H5 = np.array([0, 120, 0, -160, 0, 32])
19
+ H6 = np.array([-120, 0, 720, 0, -480, 0, 64])
20
+ H7 = np.array([0, -1680, 0, 3360, 0, -1344, 0, 128])
21
+ H8 = np.array([1680, 0, -13440, 0, 13440, 0, -3584, 0, 256])
22
+ H9 = np.array([0, 30240, 0, -80640, 0, 48384, 0, -9216, 0, 512])
23
+
24
+ Hlist = [H0, H1, H2, H3, H4, H5, H6, H7, H8, H9]
25
+
26
+
27
+ def trim(x):
28
+ return herm.hermtrim(x, tol=1e-6)
29
+
30
+
31
+ class TestConstants:
32
+
33
+ def test_hermdomain(self):
34
+ assert_equal(herm.hermdomain, [-1, 1])
35
+
36
+ def test_hermzero(self):
37
+ assert_equal(herm.hermzero, [0])
38
+
39
+ def test_hermone(self):
40
+ assert_equal(herm.hermone, [1])
41
+
42
+ def test_hermx(self):
43
+ assert_equal(herm.hermx, [0, .5])
44
+
45
+
46
+ class TestArithmetic:
47
+ x = np.linspace(-3, 3, 100)
48
+
49
+ def test_hermadd(self):
50
+ for i in range(5):
51
+ for j in range(5):
52
+ msg = f"At i={i}, j={j}"
53
+ tgt = np.zeros(max(i, j) + 1)
54
+ tgt[i] += 1
55
+ tgt[j] += 1
56
+ res = herm.hermadd([0]*i + [1], [0]*j + [1])
57
+ assert_equal(trim(res), trim(tgt), err_msg=msg)
58
+
59
+ def test_hermsub(self):
60
+ for i in range(5):
61
+ for j in range(5):
62
+ msg = f"At i={i}, j={j}"
63
+ tgt = np.zeros(max(i, j) + 1)
64
+ tgt[i] += 1
65
+ tgt[j] -= 1
66
+ res = herm.hermsub([0]*i + [1], [0]*j + [1])
67
+ assert_equal(trim(res), trim(tgt), err_msg=msg)
68
+
69
+ def test_hermmulx(self):
70
+ assert_equal(herm.hermmulx([0]), [0])
71
+ assert_equal(herm.hermmulx([1]), [0, .5])
72
+ for i in range(1, 5):
73
+ ser = [0]*i + [1]
74
+ tgt = [0]*(i - 1) + [i, 0, .5]
75
+ assert_equal(herm.hermmulx(ser), tgt)
76
+
77
+ def test_hermmul(self):
78
+ # check values of result
79
+ for i in range(5):
80
+ pol1 = [0]*i + [1]
81
+ val1 = herm.hermval(self.x, pol1)
82
+ for j in range(5):
83
+ msg = f"At i={i}, j={j}"
84
+ pol2 = [0]*j + [1]
85
+ val2 = herm.hermval(self.x, pol2)
86
+ pol3 = herm.hermmul(pol1, pol2)
87
+ val3 = herm.hermval(self.x, pol3)
88
+ assert_(len(pol3) == i + j + 1, msg)
89
+ assert_almost_equal(val3, val1*val2, err_msg=msg)
90
+
91
+ def test_hermdiv(self):
92
+ for i in range(5):
93
+ for j in range(5):
94
+ msg = f"At i={i}, j={j}"
95
+ ci = [0]*i + [1]
96
+ cj = [0]*j + [1]
97
+ tgt = herm.hermadd(ci, cj)
98
+ quo, rem = herm.hermdiv(tgt, ci)
99
+ res = herm.hermadd(herm.hermmul(quo, ci), rem)
100
+ assert_equal(trim(res), trim(tgt), err_msg=msg)
101
+
102
+ def test_hermpow(self):
103
+ for i in range(5):
104
+ for j in range(5):
105
+ msg = f"At i={i}, j={j}"
106
+ c = np.arange(i + 1)
107
+ tgt = reduce(herm.hermmul, [c]*j, np.array([1]))
108
+ res = herm.hermpow(c, j)
109
+ assert_equal(trim(res), trim(tgt), err_msg=msg)
110
+
111
+
112
+ class TestEvaluation:
113
+ # coefficients of 1 + 2*x + 3*x**2
114
+ c1d = np.array([2.5, 1., .75])
115
+ c2d = np.einsum('i,j->ij', c1d, c1d)
116
+ c3d = np.einsum('i,j,k->ijk', c1d, c1d, c1d)
117
+
118
+ # some random values in [-1, 1)
119
+ x = np.random.random((3, 5))*2 - 1
120
+ y = polyval(x, [1., 2., 3.])
121
+
122
+ def test_hermval(self):
123
+ #check empty input
124
+ assert_equal(herm.hermval([], [1]).size, 0)
125
+
126
+ #check normal input)
127
+ x = np.linspace(-1, 1)
128
+ y = [polyval(x, c) for c in Hlist]
129
+ for i in range(10):
130
+ msg = f"At i={i}"
131
+ tgt = y[i]
132
+ res = herm.hermval(x, [0]*i + [1])
133
+ assert_almost_equal(res, tgt, err_msg=msg)
134
+
135
+ #check that shape is preserved
136
+ for i in range(3):
137
+ dims = [2]*i
138
+ x = np.zeros(dims)
139
+ assert_equal(herm.hermval(x, [1]).shape, dims)
140
+ assert_equal(herm.hermval(x, [1, 0]).shape, dims)
141
+ assert_equal(herm.hermval(x, [1, 0, 0]).shape, dims)
142
+
143
+ def test_hermval2d(self):
144
+ x1, x2, x3 = self.x
145
+ y1, y2, y3 = self.y
146
+
147
+ #test exceptions
148
+ assert_raises(ValueError, herm.hermval2d, x1, x2[:2], self.c2d)
149
+
150
+ #test values
151
+ tgt = y1*y2
152
+ res = herm.hermval2d(x1, x2, self.c2d)
153
+ assert_almost_equal(res, tgt)
154
+
155
+ #test shape
156
+ z = np.ones((2, 3))
157
+ res = herm.hermval2d(z, z, self.c2d)
158
+ assert_(res.shape == (2, 3))
159
+
160
+ def test_hermval3d(self):
161
+ x1, x2, x3 = self.x
162
+ y1, y2, y3 = self.y
163
+
164
+ #test exceptions
165
+ assert_raises(ValueError, herm.hermval3d, x1, x2, x3[:2], self.c3d)
166
+
167
+ #test values
168
+ tgt = y1*y2*y3
169
+ res = herm.hermval3d(x1, x2, x3, self.c3d)
170
+ assert_almost_equal(res, tgt)
171
+
172
+ #test shape
173
+ z = np.ones((2, 3))
174
+ res = herm.hermval3d(z, z, z, self.c3d)
175
+ assert_(res.shape == (2, 3))
176
+
177
+ def test_hermgrid2d(self):
178
+ x1, x2, x3 = self.x
179
+ y1, y2, y3 = self.y
180
+
181
+ #test values
182
+ tgt = np.einsum('i,j->ij', y1, y2)
183
+ res = herm.hermgrid2d(x1, x2, self.c2d)
184
+ assert_almost_equal(res, tgt)
185
+
186
+ #test shape
187
+ z = np.ones((2, 3))
188
+ res = herm.hermgrid2d(z, z, self.c2d)
189
+ assert_(res.shape == (2, 3)*2)
190
+
191
+ def test_hermgrid3d(self):
192
+ x1, x2, x3 = self.x
193
+ y1, y2, y3 = self.y
194
+
195
+ #test values
196
+ tgt = np.einsum('i,j,k->ijk', y1, y2, y3)
197
+ res = herm.hermgrid3d(x1, x2, x3, self.c3d)
198
+ assert_almost_equal(res, tgt)
199
+
200
+ #test shape
201
+ z = np.ones((2, 3))
202
+ res = herm.hermgrid3d(z, z, z, self.c3d)
203
+ assert_(res.shape == (2, 3)*3)
204
+
205
+
206
+ class TestIntegral:
207
+
208
+ def test_hermint(self):
209
+ # check exceptions
210
+ assert_raises(TypeError, herm.hermint, [0], .5)
211
+ assert_raises(ValueError, herm.hermint, [0], -1)
212
+ assert_raises(ValueError, herm.hermint, [0], 1, [0, 0])
213
+ assert_raises(ValueError, herm.hermint, [0], lbnd=[0])
214
+ assert_raises(ValueError, herm.hermint, [0], scl=[0])
215
+ assert_raises(TypeError, herm.hermint, [0], axis=.5)
216
+
217
+ # test integration of zero polynomial
218
+ for i in range(2, 5):
219
+ k = [0]*(i - 2) + [1]
220
+ res = herm.hermint([0], m=i, k=k)
221
+ assert_almost_equal(res, [0, .5])
222
+
223
+ # check single integration with integration constant
224
+ for i in range(5):
225
+ scl = i + 1
226
+ pol = [0]*i + [1]
227
+ tgt = [i] + [0]*i + [1/scl]
228
+ hermpol = herm.poly2herm(pol)
229
+ hermint = herm.hermint(hermpol, m=1, k=[i])
230
+ res = herm.herm2poly(hermint)
231
+ assert_almost_equal(trim(res), trim(tgt))
232
+
233
+ # check single integration with integration constant and lbnd
234
+ for i in range(5):
235
+ scl = i + 1
236
+ pol = [0]*i + [1]
237
+ hermpol = herm.poly2herm(pol)
238
+ hermint = herm.hermint(hermpol, m=1, k=[i], lbnd=-1)
239
+ assert_almost_equal(herm.hermval(-1, hermint), i)
240
+
241
+ # check single integration with integration constant and scaling
242
+ for i in range(5):
243
+ scl = i + 1
244
+ pol = [0]*i + [1]
245
+ tgt = [i] + [0]*i + [2/scl]
246
+ hermpol = herm.poly2herm(pol)
247
+ hermint = herm.hermint(hermpol, m=1, k=[i], scl=2)
248
+ res = herm.herm2poly(hermint)
249
+ assert_almost_equal(trim(res), trim(tgt))
250
+
251
+ # check multiple integrations with default k
252
+ for i in range(5):
253
+ for j in range(2, 5):
254
+ pol = [0]*i + [1]
255
+ tgt = pol[:]
256
+ for k in range(j):
257
+ tgt = herm.hermint(tgt, m=1)
258
+ res = herm.hermint(pol, m=j)
259
+ assert_almost_equal(trim(res), trim(tgt))
260
+
261
+ # check multiple integrations with defined k
262
+ for i in range(5):
263
+ for j in range(2, 5):
264
+ pol = [0]*i + [1]
265
+ tgt = pol[:]
266
+ for k in range(j):
267
+ tgt = herm.hermint(tgt, m=1, k=[k])
268
+ res = herm.hermint(pol, m=j, k=list(range(j)))
269
+ assert_almost_equal(trim(res), trim(tgt))
270
+
271
+ # check multiple integrations with lbnd
272
+ for i in range(5):
273
+ for j in range(2, 5):
274
+ pol = [0]*i + [1]
275
+ tgt = pol[:]
276
+ for k in range(j):
277
+ tgt = herm.hermint(tgt, m=1, k=[k], lbnd=-1)
278
+ res = herm.hermint(pol, m=j, k=list(range(j)), lbnd=-1)
279
+ assert_almost_equal(trim(res), trim(tgt))
280
+
281
+ # check multiple integrations with scaling
282
+ for i in range(5):
283
+ for j in range(2, 5):
284
+ pol = [0]*i + [1]
285
+ tgt = pol[:]
286
+ for k in range(j):
287
+ tgt = herm.hermint(tgt, m=1, k=[k], scl=2)
288
+ res = herm.hermint(pol, m=j, k=list(range(j)), scl=2)
289
+ assert_almost_equal(trim(res), trim(tgt))
290
+
291
+ def test_hermint_axis(self):
292
+ # check that axis keyword works
293
+ c2d = np.random.random((3, 4))
294
+
295
+ tgt = np.vstack([herm.hermint(c) for c in c2d.T]).T
296
+ res = herm.hermint(c2d, axis=0)
297
+ assert_almost_equal(res, tgt)
298
+
299
+ tgt = np.vstack([herm.hermint(c) for c in c2d])
300
+ res = herm.hermint(c2d, axis=1)
301
+ assert_almost_equal(res, tgt)
302
+
303
+ tgt = np.vstack([herm.hermint(c, k=3) for c in c2d])
304
+ res = herm.hermint(c2d, k=3, axis=1)
305
+ assert_almost_equal(res, tgt)
306
+
307
+
308
+ class TestDerivative:
309
+
310
+ def test_hermder(self):
311
+ # check exceptions
312
+ assert_raises(TypeError, herm.hermder, [0], .5)
313
+ assert_raises(ValueError, herm.hermder, [0], -1)
314
+
315
+ # check that zeroth derivative does nothing
316
+ for i in range(5):
317
+ tgt = [0]*i + [1]
318
+ res = herm.hermder(tgt, m=0)
319
+ assert_equal(trim(res), trim(tgt))
320
+
321
+ # check that derivation is the inverse of integration
322
+ for i in range(5):
323
+ for j in range(2, 5):
324
+ tgt = [0]*i + [1]
325
+ res = herm.hermder(herm.hermint(tgt, m=j), m=j)
326
+ assert_almost_equal(trim(res), trim(tgt))
327
+
328
+ # check derivation with scaling
329
+ for i in range(5):
330
+ for j in range(2, 5):
331
+ tgt = [0]*i + [1]
332
+ res = herm.hermder(herm.hermint(tgt, m=j, scl=2), m=j, scl=.5)
333
+ assert_almost_equal(trim(res), trim(tgt))
334
+
335
+ def test_hermder_axis(self):
336
+ # check that axis keyword works
337
+ c2d = np.random.random((3, 4))
338
+
339
+ tgt = np.vstack([herm.hermder(c) for c in c2d.T]).T
340
+ res = herm.hermder(c2d, axis=0)
341
+ assert_almost_equal(res, tgt)
342
+
343
+ tgt = np.vstack([herm.hermder(c) for c in c2d])
344
+ res = herm.hermder(c2d, axis=1)
345
+ assert_almost_equal(res, tgt)
346
+
347
+
348
+ class TestVander:
349
+ # some random values in [-1, 1)
350
+ x = np.random.random((3, 5))*2 - 1
351
+
352
+ def test_hermvander(self):
353
+ # check for 1d x
354
+ x = np.arange(3)
355
+ v = herm.hermvander(x, 3)
356
+ assert_(v.shape == (3, 4))
357
+ for i in range(4):
358
+ coef = [0]*i + [1]
359
+ assert_almost_equal(v[..., i], herm.hermval(x, coef))
360
+
361
+ # check for 2d x
362
+ x = np.array([[1, 2], [3, 4], [5, 6]])
363
+ v = herm.hermvander(x, 3)
364
+ assert_(v.shape == (3, 2, 4))
365
+ for i in range(4):
366
+ coef = [0]*i + [1]
367
+ assert_almost_equal(v[..., i], herm.hermval(x, coef))
368
+
369
+ def test_hermvander2d(self):
370
+ # also tests hermval2d for non-square coefficient array
371
+ x1, x2, x3 = self.x
372
+ c = np.random.random((2, 3))
373
+ van = herm.hermvander2d(x1, x2, [1, 2])
374
+ tgt = herm.hermval2d(x1, x2, c)
375
+ res = np.dot(van, c.flat)
376
+ assert_almost_equal(res, tgt)
377
+
378
+ # check shape
379
+ van = herm.hermvander2d([x1], [x2], [1, 2])
380
+ assert_(van.shape == (1, 5, 6))
381
+
382
+ def test_hermvander3d(self):
383
+ # also tests hermval3d for non-square coefficient array
384
+ x1, x2, x3 = self.x
385
+ c = np.random.random((2, 3, 4))
386
+ van = herm.hermvander3d(x1, x2, x3, [1, 2, 3])
387
+ tgt = herm.hermval3d(x1, x2, x3, c)
388
+ res = np.dot(van, c.flat)
389
+ assert_almost_equal(res, tgt)
390
+
391
+ # check shape
392
+ van = herm.hermvander3d([x1], [x2], [x3], [1, 2, 3])
393
+ assert_(van.shape == (1, 5, 24))
394
+
395
+
396
+ class TestFitting:
397
+
398
+ def test_hermfit(self):
399
+ def f(x):
400
+ return x*(x - 1)*(x - 2)
401
+
402
+ def f2(x):
403
+ return x**4 + x**2 + 1
404
+
405
+ # Test exceptions
406
+ assert_raises(ValueError, herm.hermfit, [1], [1], -1)
407
+ assert_raises(TypeError, herm.hermfit, [[1]], [1], 0)
408
+ assert_raises(TypeError, herm.hermfit, [], [1], 0)
409
+ assert_raises(TypeError, herm.hermfit, [1], [[[1]]], 0)
410
+ assert_raises(TypeError, herm.hermfit, [1, 2], [1], 0)
411
+ assert_raises(TypeError, herm.hermfit, [1], [1, 2], 0)
412
+ assert_raises(TypeError, herm.hermfit, [1], [1], 0, w=[[1]])
413
+ assert_raises(TypeError, herm.hermfit, [1], [1], 0, w=[1, 1])
414
+ assert_raises(ValueError, herm.hermfit, [1], [1], [-1,])
415
+ assert_raises(ValueError, herm.hermfit, [1], [1], [2, -1, 6])
416
+ assert_raises(TypeError, herm.hermfit, [1], [1], [])
417
+
418
+ # Test fit
419
+ x = np.linspace(0, 2)
420
+ y = f(x)
421
+ #
422
+ coef3 = herm.hermfit(x, y, 3)
423
+ assert_equal(len(coef3), 4)
424
+ assert_almost_equal(herm.hermval(x, coef3), y)
425
+ coef3 = herm.hermfit(x, y, [0, 1, 2, 3])
426
+ assert_equal(len(coef3), 4)
427
+ assert_almost_equal(herm.hermval(x, coef3), y)
428
+ #
429
+ coef4 = herm.hermfit(x, y, 4)
430
+ assert_equal(len(coef4), 5)
431
+ assert_almost_equal(herm.hermval(x, coef4), y)
432
+ coef4 = herm.hermfit(x, y, [0, 1, 2, 3, 4])
433
+ assert_equal(len(coef4), 5)
434
+ assert_almost_equal(herm.hermval(x, coef4), y)
435
+ # check things still work if deg is not in strict increasing
436
+ coef4 = herm.hermfit(x, y, [2, 3, 4, 1, 0])
437
+ assert_equal(len(coef4), 5)
438
+ assert_almost_equal(herm.hermval(x, coef4), y)
439
+ #
440
+ coef2d = herm.hermfit(x, np.array([y, y]).T, 3)
441
+ assert_almost_equal(coef2d, np.array([coef3, coef3]).T)
442
+ coef2d = herm.hermfit(x, np.array([y, y]).T, [0, 1, 2, 3])
443
+ assert_almost_equal(coef2d, np.array([coef3, coef3]).T)
444
+ # test weighting
445
+ w = np.zeros_like(x)
446
+ yw = y.copy()
447
+ w[1::2] = 1
448
+ y[0::2] = 0
449
+ wcoef3 = herm.hermfit(x, yw, 3, w=w)
450
+ assert_almost_equal(wcoef3, coef3)
451
+ wcoef3 = herm.hermfit(x, yw, [0, 1, 2, 3], w=w)
452
+ assert_almost_equal(wcoef3, coef3)
453
+ #
454
+ wcoef2d = herm.hermfit(x, np.array([yw, yw]).T, 3, w=w)
455
+ assert_almost_equal(wcoef2d, np.array([coef3, coef3]).T)
456
+ wcoef2d = herm.hermfit(x, np.array([yw, yw]).T, [0, 1, 2, 3], w=w)
457
+ assert_almost_equal(wcoef2d, np.array([coef3, coef3]).T)
458
+ # test scaling with complex values x points whose square
459
+ # is zero when summed.
460
+ x = [1, 1j, -1, -1j]
461
+ assert_almost_equal(herm.hermfit(x, x, 1), [0, .5])
462
+ assert_almost_equal(herm.hermfit(x, x, [0, 1]), [0, .5])
463
+ # test fitting only even Legendre polynomials
464
+ x = np.linspace(-1, 1)
465
+ y = f2(x)
466
+ coef1 = herm.hermfit(x, y, 4)
467
+ assert_almost_equal(herm.hermval(x, coef1), y)
468
+ coef2 = herm.hermfit(x, y, [0, 2, 4])
469
+ assert_almost_equal(herm.hermval(x, coef2), y)
470
+ assert_almost_equal(coef1, coef2)
471
+
472
+
473
+ class TestCompanion:
474
+
475
+ def test_raises(self):
476
+ assert_raises(ValueError, herm.hermcompanion, [])
477
+ assert_raises(ValueError, herm.hermcompanion, [1])
478
+
479
+ def test_dimensions(self):
480
+ for i in range(1, 5):
481
+ coef = [0]*i + [1]
482
+ assert_(herm.hermcompanion(coef).shape == (i, i))
483
+
484
+ def test_linear_root(self):
485
+ assert_(herm.hermcompanion([1, 2])[0, 0] == -.25)
486
+
487
+
488
+ class TestGauss:
489
+
490
+ def test_100(self):
491
+ x, w = herm.hermgauss(100)
492
+
493
+ # test orthogonality. Note that the results need to be normalized,
494
+ # otherwise the huge values that can arise from fast growing
495
+ # functions like Laguerre can be very confusing.
496
+ v = herm.hermvander(x, 99)
497
+ vv = np.dot(v.T * w, v)
498
+ vd = 1/np.sqrt(vv.diagonal())
499
+ vv = vd[:, None] * vv * vd
500
+ assert_almost_equal(vv, np.eye(100))
501
+
502
+ # check that the integral of 1 is correct
503
+ tgt = np.sqrt(np.pi)
504
+ assert_almost_equal(w.sum(), tgt)
505
+
506
+
507
+ class TestMisc:
508
+
509
+ def test_hermfromroots(self):
510
+ res = herm.hermfromroots([])
511
+ assert_almost_equal(trim(res), [1])
512
+ for i in range(1, 5):
513
+ roots = np.cos(np.linspace(-np.pi, 0, 2*i + 1)[1::2])
514
+ pol = herm.hermfromroots(roots)
515
+ res = herm.hermval(roots, pol)
516
+ tgt = 0
517
+ assert_(len(pol) == i + 1)
518
+ assert_almost_equal(herm.herm2poly(pol)[-1], 1)
519
+ assert_almost_equal(res, tgt)
520
+
521
+ def test_hermroots(self):
522
+ assert_almost_equal(herm.hermroots([1]), [])
523
+ assert_almost_equal(herm.hermroots([1, 1]), [-.5])
524
+ for i in range(2, 5):
525
+ tgt = np.linspace(-1, 1, i)
526
+ res = herm.hermroots(herm.hermfromroots(tgt))
527
+ assert_almost_equal(trim(res), trim(tgt))
528
+
529
+ def test_hermtrim(self):
530
+ coef = [2, -1, 1, 0]
531
+
532
+ # Test exceptions
533
+ assert_raises(ValueError, herm.hermtrim, coef, -1)
534
+
535
+ # Test results
536
+ assert_equal(herm.hermtrim(coef), coef[:-1])
537
+ assert_equal(herm.hermtrim(coef, 1), coef[:-3])
538
+ assert_equal(herm.hermtrim(coef, 2), [0])
539
+
540
+ def test_hermline(self):
541
+ assert_equal(herm.hermline(3, 4), [3, 2])
542
+
543
+ def test_herm2poly(self):
544
+ for i in range(10):
545
+ assert_almost_equal(herm.herm2poly([0]*i + [1]), Hlist[i])
546
+
547
+ def test_poly2herm(self):
548
+ for i in range(10):
549
+ assert_almost_equal(herm.poly2herm(Hlist[i]), [0]*i + [1])
550
+
551
+ def test_weight(self):
552
+ x = np.linspace(-5, 5, 11)
553
+ tgt = np.exp(-x**2)
554
+ res = herm.hermweight(x)
555
+ assert_almost_equal(res, tgt)
llmeval-env/lib/python3.10/site-packages/numpy/polynomial/tests/test_legendre.py ADDED
@@ -0,0 +1,568 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Tests for legendre module.
2
+
3
+ """
4
+ from functools import reduce
5
+
6
+ import numpy as np
7
+ import numpy.polynomial.legendre as leg
8
+ from numpy.polynomial.polynomial import polyval
9
+ from numpy.testing import (
10
+ assert_almost_equal, assert_raises, assert_equal, assert_,
11
+ )
12
+
13
+ L0 = np.array([1])
14
+ L1 = np.array([0, 1])
15
+ L2 = np.array([-1, 0, 3])/2
16
+ L3 = np.array([0, -3, 0, 5])/2
17
+ L4 = np.array([3, 0, -30, 0, 35])/8
18
+ L5 = np.array([0, 15, 0, -70, 0, 63])/8
19
+ L6 = np.array([-5, 0, 105, 0, -315, 0, 231])/16
20
+ L7 = np.array([0, -35, 0, 315, 0, -693, 0, 429])/16
21
+ L8 = np.array([35, 0, -1260, 0, 6930, 0, -12012, 0, 6435])/128
22
+ L9 = np.array([0, 315, 0, -4620, 0, 18018, 0, -25740, 0, 12155])/128
23
+
24
+ Llist = [L0, L1, L2, L3, L4, L5, L6, L7, L8, L9]
25
+
26
+
27
+ def trim(x):
28
+ return leg.legtrim(x, tol=1e-6)
29
+
30
+
31
+ class TestConstants:
32
+
33
+ def test_legdomain(self):
34
+ assert_equal(leg.legdomain, [-1, 1])
35
+
36
+ def test_legzero(self):
37
+ assert_equal(leg.legzero, [0])
38
+
39
+ def test_legone(self):
40
+ assert_equal(leg.legone, [1])
41
+
42
+ def test_legx(self):
43
+ assert_equal(leg.legx, [0, 1])
44
+
45
+
46
+ class TestArithmetic:
47
+ x = np.linspace(-1, 1, 100)
48
+
49
+ def test_legadd(self):
50
+ for i in range(5):
51
+ for j in range(5):
52
+ msg = f"At i={i}, j={j}"
53
+ tgt = np.zeros(max(i, j) + 1)
54
+ tgt[i] += 1
55
+ tgt[j] += 1
56
+ res = leg.legadd([0]*i + [1], [0]*j + [1])
57
+ assert_equal(trim(res), trim(tgt), err_msg=msg)
58
+
59
+ def test_legsub(self):
60
+ for i in range(5):
61
+ for j in range(5):
62
+ msg = f"At i={i}, j={j}"
63
+ tgt = np.zeros(max(i, j) + 1)
64
+ tgt[i] += 1
65
+ tgt[j] -= 1
66
+ res = leg.legsub([0]*i + [1], [0]*j + [1])
67
+ assert_equal(trim(res), trim(tgt), err_msg=msg)
68
+
69
+ def test_legmulx(self):
70
+ assert_equal(leg.legmulx([0]), [0])
71
+ assert_equal(leg.legmulx([1]), [0, 1])
72
+ for i in range(1, 5):
73
+ tmp = 2*i + 1
74
+ ser = [0]*i + [1]
75
+ tgt = [0]*(i - 1) + [i/tmp, 0, (i + 1)/tmp]
76
+ assert_equal(leg.legmulx(ser), tgt)
77
+
78
+ def test_legmul(self):
79
+ # check values of result
80
+ for i in range(5):
81
+ pol1 = [0]*i + [1]
82
+ val1 = leg.legval(self.x, pol1)
83
+ for j in range(5):
84
+ msg = f"At i={i}, j={j}"
85
+ pol2 = [0]*j + [1]
86
+ val2 = leg.legval(self.x, pol2)
87
+ pol3 = leg.legmul(pol1, pol2)
88
+ val3 = leg.legval(self.x, pol3)
89
+ assert_(len(pol3) == i + j + 1, msg)
90
+ assert_almost_equal(val3, val1*val2, err_msg=msg)
91
+
92
+ def test_legdiv(self):
93
+ for i in range(5):
94
+ for j in range(5):
95
+ msg = f"At i={i}, j={j}"
96
+ ci = [0]*i + [1]
97
+ cj = [0]*j + [1]
98
+ tgt = leg.legadd(ci, cj)
99
+ quo, rem = leg.legdiv(tgt, ci)
100
+ res = leg.legadd(leg.legmul(quo, ci), rem)
101
+ assert_equal(trim(res), trim(tgt), err_msg=msg)
102
+
103
+ def test_legpow(self):
104
+ for i in range(5):
105
+ for j in range(5):
106
+ msg = f"At i={i}, j={j}"
107
+ c = np.arange(i + 1)
108
+ tgt = reduce(leg.legmul, [c]*j, np.array([1]))
109
+ res = leg.legpow(c, j)
110
+ assert_equal(trim(res), trim(tgt), err_msg=msg)
111
+
112
+
113
+ class TestEvaluation:
114
+ # coefficients of 1 + 2*x + 3*x**2
115
+ c1d = np.array([2., 2., 2.])
116
+ c2d = np.einsum('i,j->ij', c1d, c1d)
117
+ c3d = np.einsum('i,j,k->ijk', c1d, c1d, c1d)
118
+
119
+ # some random values in [-1, 1)
120
+ x = np.random.random((3, 5))*2 - 1
121
+ y = polyval(x, [1., 2., 3.])
122
+
123
+ def test_legval(self):
124
+ #check empty input
125
+ assert_equal(leg.legval([], [1]).size, 0)
126
+
127
+ #check normal input)
128
+ x = np.linspace(-1, 1)
129
+ y = [polyval(x, c) for c in Llist]
130
+ for i in range(10):
131
+ msg = f"At i={i}"
132
+ tgt = y[i]
133
+ res = leg.legval(x, [0]*i + [1])
134
+ assert_almost_equal(res, tgt, err_msg=msg)
135
+
136
+ #check that shape is preserved
137
+ for i in range(3):
138
+ dims = [2]*i
139
+ x = np.zeros(dims)
140
+ assert_equal(leg.legval(x, [1]).shape, dims)
141
+ assert_equal(leg.legval(x, [1, 0]).shape, dims)
142
+ assert_equal(leg.legval(x, [1, 0, 0]).shape, dims)
143
+
144
+ def test_legval2d(self):
145
+ x1, x2, x3 = self.x
146
+ y1, y2, y3 = self.y
147
+
148
+ #test exceptions
149
+ assert_raises(ValueError, leg.legval2d, x1, x2[:2], self.c2d)
150
+
151
+ #test values
152
+ tgt = y1*y2
153
+ res = leg.legval2d(x1, x2, self.c2d)
154
+ assert_almost_equal(res, tgt)
155
+
156
+ #test shape
157
+ z = np.ones((2, 3))
158
+ res = leg.legval2d(z, z, self.c2d)
159
+ assert_(res.shape == (2, 3))
160
+
161
+ def test_legval3d(self):
162
+ x1, x2, x3 = self.x
163
+ y1, y2, y3 = self.y
164
+
165
+ #test exceptions
166
+ assert_raises(ValueError, leg.legval3d, x1, x2, x3[:2], self.c3d)
167
+
168
+ #test values
169
+ tgt = y1*y2*y3
170
+ res = leg.legval3d(x1, x2, x3, self.c3d)
171
+ assert_almost_equal(res, tgt)
172
+
173
+ #test shape
174
+ z = np.ones((2, 3))
175
+ res = leg.legval3d(z, z, z, self.c3d)
176
+ assert_(res.shape == (2, 3))
177
+
178
+ def test_leggrid2d(self):
179
+ x1, x2, x3 = self.x
180
+ y1, y2, y3 = self.y
181
+
182
+ #test values
183
+ tgt = np.einsum('i,j->ij', y1, y2)
184
+ res = leg.leggrid2d(x1, x2, self.c2d)
185
+ assert_almost_equal(res, tgt)
186
+
187
+ #test shape
188
+ z = np.ones((2, 3))
189
+ res = leg.leggrid2d(z, z, self.c2d)
190
+ assert_(res.shape == (2, 3)*2)
191
+
192
+ def test_leggrid3d(self):
193
+ x1, x2, x3 = self.x
194
+ y1, y2, y3 = self.y
195
+
196
+ #test values
197
+ tgt = np.einsum('i,j,k->ijk', y1, y2, y3)
198
+ res = leg.leggrid3d(x1, x2, x3, self.c3d)
199
+ assert_almost_equal(res, tgt)
200
+
201
+ #test shape
202
+ z = np.ones((2, 3))
203
+ res = leg.leggrid3d(z, z, z, self.c3d)
204
+ assert_(res.shape == (2, 3)*3)
205
+
206
+
207
+ class TestIntegral:
208
+
209
+ def test_legint(self):
210
+ # check exceptions
211
+ assert_raises(TypeError, leg.legint, [0], .5)
212
+ assert_raises(ValueError, leg.legint, [0], -1)
213
+ assert_raises(ValueError, leg.legint, [0], 1, [0, 0])
214
+ assert_raises(ValueError, leg.legint, [0], lbnd=[0])
215
+ assert_raises(ValueError, leg.legint, [0], scl=[0])
216
+ assert_raises(TypeError, leg.legint, [0], axis=.5)
217
+
218
+ # test integration of zero polynomial
219
+ for i in range(2, 5):
220
+ k = [0]*(i - 2) + [1]
221
+ res = leg.legint([0], m=i, k=k)
222
+ assert_almost_equal(res, [0, 1])
223
+
224
+ # check single integration with integration constant
225
+ for i in range(5):
226
+ scl = i + 1
227
+ pol = [0]*i + [1]
228
+ tgt = [i] + [0]*i + [1/scl]
229
+ legpol = leg.poly2leg(pol)
230
+ legint = leg.legint(legpol, m=1, k=[i])
231
+ res = leg.leg2poly(legint)
232
+ assert_almost_equal(trim(res), trim(tgt))
233
+
234
+ # check single integration with integration constant and lbnd
235
+ for i in range(5):
236
+ scl = i + 1
237
+ pol = [0]*i + [1]
238
+ legpol = leg.poly2leg(pol)
239
+ legint = leg.legint(legpol, m=1, k=[i], lbnd=-1)
240
+ assert_almost_equal(leg.legval(-1, legint), i)
241
+
242
+ # check single integration with integration constant and scaling
243
+ for i in range(5):
244
+ scl = i + 1
245
+ pol = [0]*i + [1]
246
+ tgt = [i] + [0]*i + [2/scl]
247
+ legpol = leg.poly2leg(pol)
248
+ legint = leg.legint(legpol, m=1, k=[i], scl=2)
249
+ res = leg.leg2poly(legint)
250
+ assert_almost_equal(trim(res), trim(tgt))
251
+
252
+ # check multiple integrations with default k
253
+ for i in range(5):
254
+ for j in range(2, 5):
255
+ pol = [0]*i + [1]
256
+ tgt = pol[:]
257
+ for k in range(j):
258
+ tgt = leg.legint(tgt, m=1)
259
+ res = leg.legint(pol, m=j)
260
+ assert_almost_equal(trim(res), trim(tgt))
261
+
262
+ # check multiple integrations with defined k
263
+ for i in range(5):
264
+ for j in range(2, 5):
265
+ pol = [0]*i + [1]
266
+ tgt = pol[:]
267
+ for k in range(j):
268
+ tgt = leg.legint(tgt, m=1, k=[k])
269
+ res = leg.legint(pol, m=j, k=list(range(j)))
270
+ assert_almost_equal(trim(res), trim(tgt))
271
+
272
+ # check multiple integrations with lbnd
273
+ for i in range(5):
274
+ for j in range(2, 5):
275
+ pol = [0]*i + [1]
276
+ tgt = pol[:]
277
+ for k in range(j):
278
+ tgt = leg.legint(tgt, m=1, k=[k], lbnd=-1)
279
+ res = leg.legint(pol, m=j, k=list(range(j)), lbnd=-1)
280
+ assert_almost_equal(trim(res), trim(tgt))
281
+
282
+ # check multiple integrations with scaling
283
+ for i in range(5):
284
+ for j in range(2, 5):
285
+ pol = [0]*i + [1]
286
+ tgt = pol[:]
287
+ for k in range(j):
288
+ tgt = leg.legint(tgt, m=1, k=[k], scl=2)
289
+ res = leg.legint(pol, m=j, k=list(range(j)), scl=2)
290
+ assert_almost_equal(trim(res), trim(tgt))
291
+
292
+ def test_legint_axis(self):
293
+ # check that axis keyword works
294
+ c2d = np.random.random((3, 4))
295
+
296
+ tgt = np.vstack([leg.legint(c) for c in c2d.T]).T
297
+ res = leg.legint(c2d, axis=0)
298
+ assert_almost_equal(res, tgt)
299
+
300
+ tgt = np.vstack([leg.legint(c) for c in c2d])
301
+ res = leg.legint(c2d, axis=1)
302
+ assert_almost_equal(res, tgt)
303
+
304
+ tgt = np.vstack([leg.legint(c, k=3) for c in c2d])
305
+ res = leg.legint(c2d, k=3, axis=1)
306
+ assert_almost_equal(res, tgt)
307
+
308
+ def test_legint_zerointord(self):
309
+ assert_equal(leg.legint((1, 2, 3), 0), (1, 2, 3))
310
+
311
+
312
+ class TestDerivative:
313
+
314
+ def test_legder(self):
315
+ # check exceptions
316
+ assert_raises(TypeError, leg.legder, [0], .5)
317
+ assert_raises(ValueError, leg.legder, [0], -1)
318
+
319
+ # check that zeroth derivative does nothing
320
+ for i in range(5):
321
+ tgt = [0]*i + [1]
322
+ res = leg.legder(tgt, m=0)
323
+ assert_equal(trim(res), trim(tgt))
324
+
325
+ # check that derivation is the inverse of integration
326
+ for i in range(5):
327
+ for j in range(2, 5):
328
+ tgt = [0]*i + [1]
329
+ res = leg.legder(leg.legint(tgt, m=j), m=j)
330
+ assert_almost_equal(trim(res), trim(tgt))
331
+
332
+ # check derivation with scaling
333
+ for i in range(5):
334
+ for j in range(2, 5):
335
+ tgt = [0]*i + [1]
336
+ res = leg.legder(leg.legint(tgt, m=j, scl=2), m=j, scl=.5)
337
+ assert_almost_equal(trim(res), trim(tgt))
338
+
339
+ def test_legder_axis(self):
340
+ # check that axis keyword works
341
+ c2d = np.random.random((3, 4))
342
+
343
+ tgt = np.vstack([leg.legder(c) for c in c2d.T]).T
344
+ res = leg.legder(c2d, axis=0)
345
+ assert_almost_equal(res, tgt)
346
+
347
+ tgt = np.vstack([leg.legder(c) for c in c2d])
348
+ res = leg.legder(c2d, axis=1)
349
+ assert_almost_equal(res, tgt)
350
+
351
+ def test_legder_orderhigherthancoeff(self):
352
+ c = (1, 2, 3, 4)
353
+ assert_equal(leg.legder(c, 4), [0])
354
+
355
+ class TestVander:
356
+ # some random values in [-1, 1)
357
+ x = np.random.random((3, 5))*2 - 1
358
+
359
+ def test_legvander(self):
360
+ # check for 1d x
361
+ x = np.arange(3)
362
+ v = leg.legvander(x, 3)
363
+ assert_(v.shape == (3, 4))
364
+ for i in range(4):
365
+ coef = [0]*i + [1]
366
+ assert_almost_equal(v[..., i], leg.legval(x, coef))
367
+
368
+ # check for 2d x
369
+ x = np.array([[1, 2], [3, 4], [5, 6]])
370
+ v = leg.legvander(x, 3)
371
+ assert_(v.shape == (3, 2, 4))
372
+ for i in range(4):
373
+ coef = [0]*i + [1]
374
+ assert_almost_equal(v[..., i], leg.legval(x, coef))
375
+
376
+ def test_legvander2d(self):
377
+ # also tests polyval2d for non-square coefficient array
378
+ x1, x2, x3 = self.x
379
+ c = np.random.random((2, 3))
380
+ van = leg.legvander2d(x1, x2, [1, 2])
381
+ tgt = leg.legval2d(x1, x2, c)
382
+ res = np.dot(van, c.flat)
383
+ assert_almost_equal(res, tgt)
384
+
385
+ # check shape
386
+ van = leg.legvander2d([x1], [x2], [1, 2])
387
+ assert_(van.shape == (1, 5, 6))
388
+
389
+ def test_legvander3d(self):
390
+ # also tests polyval3d for non-square coefficient array
391
+ x1, x2, x3 = self.x
392
+ c = np.random.random((2, 3, 4))
393
+ van = leg.legvander3d(x1, x2, x3, [1, 2, 3])
394
+ tgt = leg.legval3d(x1, x2, x3, c)
395
+ res = np.dot(van, c.flat)
396
+ assert_almost_equal(res, tgt)
397
+
398
+ # check shape
399
+ van = leg.legvander3d([x1], [x2], [x3], [1, 2, 3])
400
+ assert_(van.shape == (1, 5, 24))
401
+
402
+ def test_legvander_negdeg(self):
403
+ assert_raises(ValueError, leg.legvander, (1, 2, 3), -1)
404
+
405
+
406
+ class TestFitting:
407
+
408
+ def test_legfit(self):
409
+ def f(x):
410
+ return x*(x - 1)*(x - 2)
411
+
412
+ def f2(x):
413
+ return x**4 + x**2 + 1
414
+
415
+ # Test exceptions
416
+ assert_raises(ValueError, leg.legfit, [1], [1], -1)
417
+ assert_raises(TypeError, leg.legfit, [[1]], [1], 0)
418
+ assert_raises(TypeError, leg.legfit, [], [1], 0)
419
+ assert_raises(TypeError, leg.legfit, [1], [[[1]]], 0)
420
+ assert_raises(TypeError, leg.legfit, [1, 2], [1], 0)
421
+ assert_raises(TypeError, leg.legfit, [1], [1, 2], 0)
422
+ assert_raises(TypeError, leg.legfit, [1], [1], 0, w=[[1]])
423
+ assert_raises(TypeError, leg.legfit, [1], [1], 0, w=[1, 1])
424
+ assert_raises(ValueError, leg.legfit, [1], [1], [-1,])
425
+ assert_raises(ValueError, leg.legfit, [1], [1], [2, -1, 6])
426
+ assert_raises(TypeError, leg.legfit, [1], [1], [])
427
+
428
+ # Test fit
429
+ x = np.linspace(0, 2)
430
+ y = f(x)
431
+ #
432
+ coef3 = leg.legfit(x, y, 3)
433
+ assert_equal(len(coef3), 4)
434
+ assert_almost_equal(leg.legval(x, coef3), y)
435
+ coef3 = leg.legfit(x, y, [0, 1, 2, 3])
436
+ assert_equal(len(coef3), 4)
437
+ assert_almost_equal(leg.legval(x, coef3), y)
438
+ #
439
+ coef4 = leg.legfit(x, y, 4)
440
+ assert_equal(len(coef4), 5)
441
+ assert_almost_equal(leg.legval(x, coef4), y)
442
+ coef4 = leg.legfit(x, y, [0, 1, 2, 3, 4])
443
+ assert_equal(len(coef4), 5)
444
+ assert_almost_equal(leg.legval(x, coef4), y)
445
+ # check things still work if deg is not in strict increasing
446
+ coef4 = leg.legfit(x, y, [2, 3, 4, 1, 0])
447
+ assert_equal(len(coef4), 5)
448
+ assert_almost_equal(leg.legval(x, coef4), y)
449
+ #
450
+ coef2d = leg.legfit(x, np.array([y, y]).T, 3)
451
+ assert_almost_equal(coef2d, np.array([coef3, coef3]).T)
452
+ coef2d = leg.legfit(x, np.array([y, y]).T, [0, 1, 2, 3])
453
+ assert_almost_equal(coef2d, np.array([coef3, coef3]).T)
454
+ # test weighting
455
+ w = np.zeros_like(x)
456
+ yw = y.copy()
457
+ w[1::2] = 1
458
+ y[0::2] = 0
459
+ wcoef3 = leg.legfit(x, yw, 3, w=w)
460
+ assert_almost_equal(wcoef3, coef3)
461
+ wcoef3 = leg.legfit(x, yw, [0, 1, 2, 3], w=w)
462
+ assert_almost_equal(wcoef3, coef3)
463
+ #
464
+ wcoef2d = leg.legfit(x, np.array([yw, yw]).T, 3, w=w)
465
+ assert_almost_equal(wcoef2d, np.array([coef3, coef3]).T)
466
+ wcoef2d = leg.legfit(x, np.array([yw, yw]).T, [0, 1, 2, 3], w=w)
467
+ assert_almost_equal(wcoef2d, np.array([coef3, coef3]).T)
468
+ # test scaling with complex values x points whose square
469
+ # is zero when summed.
470
+ x = [1, 1j, -1, -1j]
471
+ assert_almost_equal(leg.legfit(x, x, 1), [0, 1])
472
+ assert_almost_equal(leg.legfit(x, x, [0, 1]), [0, 1])
473
+ # test fitting only even Legendre polynomials
474
+ x = np.linspace(-1, 1)
475
+ y = f2(x)
476
+ coef1 = leg.legfit(x, y, 4)
477
+ assert_almost_equal(leg.legval(x, coef1), y)
478
+ coef2 = leg.legfit(x, y, [0, 2, 4])
479
+ assert_almost_equal(leg.legval(x, coef2), y)
480
+ assert_almost_equal(coef1, coef2)
481
+
482
+
483
+ class TestCompanion:
484
+
485
+ def test_raises(self):
486
+ assert_raises(ValueError, leg.legcompanion, [])
487
+ assert_raises(ValueError, leg.legcompanion, [1])
488
+
489
+ def test_dimensions(self):
490
+ for i in range(1, 5):
491
+ coef = [0]*i + [1]
492
+ assert_(leg.legcompanion(coef).shape == (i, i))
493
+
494
+ def test_linear_root(self):
495
+ assert_(leg.legcompanion([1, 2])[0, 0] == -.5)
496
+
497
+
498
+ class TestGauss:
499
+
500
+ def test_100(self):
501
+ x, w = leg.leggauss(100)
502
+
503
+ # test orthogonality. Note that the results need to be normalized,
504
+ # otherwise the huge values that can arise from fast growing
505
+ # functions like Laguerre can be very confusing.
506
+ v = leg.legvander(x, 99)
507
+ vv = np.dot(v.T * w, v)
508
+ vd = 1/np.sqrt(vv.diagonal())
509
+ vv = vd[:, None] * vv * vd
510
+ assert_almost_equal(vv, np.eye(100))
511
+
512
+ # check that the integral of 1 is correct
513
+ tgt = 2.0
514
+ assert_almost_equal(w.sum(), tgt)
515
+
516
+
517
+ class TestMisc:
518
+
519
+ def test_legfromroots(self):
520
+ res = leg.legfromroots([])
521
+ assert_almost_equal(trim(res), [1])
522
+ for i in range(1, 5):
523
+ roots = np.cos(np.linspace(-np.pi, 0, 2*i + 1)[1::2])
524
+ pol = leg.legfromroots(roots)
525
+ res = leg.legval(roots, pol)
526
+ tgt = 0
527
+ assert_(len(pol) == i + 1)
528
+ assert_almost_equal(leg.leg2poly(pol)[-1], 1)
529
+ assert_almost_equal(res, tgt)
530
+
531
+ def test_legroots(self):
532
+ assert_almost_equal(leg.legroots([1]), [])
533
+ assert_almost_equal(leg.legroots([1, 2]), [-.5])
534
+ for i in range(2, 5):
535
+ tgt = np.linspace(-1, 1, i)
536
+ res = leg.legroots(leg.legfromroots(tgt))
537
+ assert_almost_equal(trim(res), trim(tgt))
538
+
539
+ def test_legtrim(self):
540
+ coef = [2, -1, 1, 0]
541
+
542
+ # Test exceptions
543
+ assert_raises(ValueError, leg.legtrim, coef, -1)
544
+
545
+ # Test results
546
+ assert_equal(leg.legtrim(coef), coef[:-1])
547
+ assert_equal(leg.legtrim(coef, 1), coef[:-3])
548
+ assert_equal(leg.legtrim(coef, 2), [0])
549
+
550
+ def test_legline(self):
551
+ assert_equal(leg.legline(3, 4), [3, 4])
552
+
553
+ def test_legline_zeroscl(self):
554
+ assert_equal(leg.legline(3, 0), [3])
555
+
556
+ def test_leg2poly(self):
557
+ for i in range(10):
558
+ assert_almost_equal(leg.leg2poly([0]*i + [1]), Llist[i])
559
+
560
+ def test_poly2leg(self):
561
+ for i in range(10):
562
+ assert_almost_equal(leg.poly2leg(Llist[i]), [0]*i + [1])
563
+
564
+ def test_weight(self):
565
+ x = np.linspace(-1, 1, 11)
566
+ tgt = 1.
567
+ res = leg.legweight(x)
568
+ assert_almost_equal(res, tgt)
llmeval-env/lib/python3.10/site-packages/numpy/polynomial/tests/test_printing.py ADDED
@@ -0,0 +1,530 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from math import nan, inf
2
+ import pytest
3
+ from numpy.core import array, arange, printoptions
4
+ import numpy.polynomial as poly
5
+ from numpy.testing import assert_equal, assert_
6
+
7
+ # For testing polynomial printing with object arrays
8
+ from fractions import Fraction
9
+ from decimal import Decimal
10
+
11
+
12
+ class TestStrUnicodeSuperSubscripts:
13
+
14
+ @pytest.fixture(scope='class', autouse=True)
15
+ def use_unicode(self):
16
+ poly.set_default_printstyle('unicode')
17
+
18
+ @pytest.mark.parametrize(('inp', 'tgt'), (
19
+ ([1, 2, 3], "1.0 + 2.0·x + 3.0·x²"),
20
+ ([-1, 0, 3, -1], "-1.0 + 0.0·x + 3.0·x² - 1.0·x³"),
21
+ (arange(12), ("0.0 + 1.0·x + 2.0·x² + 3.0·x³ + 4.0·x⁴ + 5.0·x⁵ + "
22
+ "6.0·x⁶ + 7.0·x⁷ +\n8.0·x⁸ + 9.0·x⁹ + 10.0·x¹⁰ + "
23
+ "11.0·x¹¹")),
24
+ ))
25
+ def test_polynomial_str(self, inp, tgt):
26
+ res = str(poly.Polynomial(inp))
27
+ assert_equal(res, tgt)
28
+
29
+ @pytest.mark.parametrize(('inp', 'tgt'), (
30
+ ([1, 2, 3], "1.0 + 2.0·T₁(x) + 3.0·T₂(x)"),
31
+ ([-1, 0, 3, -1], "-1.0 + 0.0·T₁(x) + 3.0·T₂(x) - 1.0·T₃(x)"),
32
+ (arange(12), ("0.0 + 1.0·T₁(x) + 2.0·T₂(x) + 3.0·T₃(x) + 4.0·T₄(x) + "
33
+ "5.0·T₅(x) +\n6.0·T₆(x) + 7.0·T₇(x) + 8.0·T₈(x) + "
34
+ "9.0·T₉(x) + 10.0·T₁₀(x) + 11.0·T₁₁(x)")),
35
+ ))
36
+ def test_chebyshev_str(self, inp, tgt):
37
+ res = str(poly.Chebyshev(inp))
38
+ assert_equal(res, tgt)
39
+
40
+ @pytest.mark.parametrize(('inp', 'tgt'), (
41
+ ([1, 2, 3], "1.0 + 2.0·P₁(x) + 3.0·P₂(x)"),
42
+ ([-1, 0, 3, -1], "-1.0 + 0.0·P₁(x) + 3.0·P₂(x) - 1.0·P₃(x)"),
43
+ (arange(12), ("0.0 + 1.0·P₁(x) + 2.0·P₂(x) + 3.0·P₃(x) + 4.0·P₄(x) + "
44
+ "5.0·P₅(x) +\n6.0·P₆(x) + 7.0·P₇(x) + 8.0·P₈(x) + "
45
+ "9.0·P₉(x) + 10.0·P₁₀(x) + 11.0·P₁₁(x)")),
46
+ ))
47
+ def test_legendre_str(self, inp, tgt):
48
+ res = str(poly.Legendre(inp))
49
+ assert_equal(res, tgt)
50
+
51
+ @pytest.mark.parametrize(('inp', 'tgt'), (
52
+ ([1, 2, 3], "1.0 + 2.0·H₁(x) + 3.0·H₂(x)"),
53
+ ([-1, 0, 3, -1], "-1.0 + 0.0·H₁(x) + 3.0·H₂(x) - 1.0·H₃(x)"),
54
+ (arange(12), ("0.0 + 1.0·H₁(x) + 2.0·H₂(x) + 3.0·H₃(x) + 4.0·H₄(x) + "
55
+ "5.0·H₅(x) +\n6.0·H₆(x) + 7.0·H₇(x) + 8.0·H₈(x) + "
56
+ "9.0·H₉(x) + 10.0·H₁₀(x) + 11.0·H₁₁(x)")),
57
+ ))
58
+ def test_hermite_str(self, inp, tgt):
59
+ res = str(poly.Hermite(inp))
60
+ assert_equal(res, tgt)
61
+
62
+ @pytest.mark.parametrize(('inp', 'tgt'), (
63
+ ([1, 2, 3], "1.0 + 2.0·He₁(x) + 3.0·He₂(x)"),
64
+ ([-1, 0, 3, -1], "-1.0 + 0.0·He₁(x) + 3.0·He₂(x) - 1.0·He₃(x)"),
65
+ (arange(12), ("0.0 + 1.0·He₁(x) + 2.0·He₂(x) + 3.0·He₃(x) + "
66
+ "4.0·He₄(x) + 5.0·He₅(x) +\n6.0·He₆(x) + 7.0·He₇(x) + "
67
+ "8.0·He₈(x) + 9.0·He₉(x) + 10.0·He₁₀(x) +\n"
68
+ "11.0·He₁₁(x)")),
69
+ ))
70
+ def test_hermiteE_str(self, inp, tgt):
71
+ res = str(poly.HermiteE(inp))
72
+ assert_equal(res, tgt)
73
+
74
+ @pytest.mark.parametrize(('inp', 'tgt'), (
75
+ ([1, 2, 3], "1.0 + 2.0·L₁(x) + 3.0·L₂(x)"),
76
+ ([-1, 0, 3, -1], "-1.0 + 0.0·L₁(x) + 3.0·L₂(x) - 1.0·L₃(x)"),
77
+ (arange(12), ("0.0 + 1.0·L₁(x) + 2.0·L₂(x) + 3.0·L₃(x) + 4.0·L₄(x) + "
78
+ "5.0·L₅(x) +\n6.0·L₆(x) + 7.0·L₇(x) + 8.0·L₈(x) + "
79
+ "9.0·L₉(x) + 10.0·L₁₀(x) + 11.0·L₁₁(x)")),
80
+ ))
81
+ def test_laguerre_str(self, inp, tgt):
82
+ res = str(poly.Laguerre(inp))
83
+ assert_equal(res, tgt)
84
+
85
+
86
+ class TestStrAscii:
87
+
88
+ @pytest.fixture(scope='class', autouse=True)
89
+ def use_ascii(self):
90
+ poly.set_default_printstyle('ascii')
91
+
92
+ @pytest.mark.parametrize(('inp', 'tgt'), (
93
+ ([1, 2, 3], "1.0 + 2.0 x + 3.0 x**2"),
94
+ ([-1, 0, 3, -1], "-1.0 + 0.0 x + 3.0 x**2 - 1.0 x**3"),
95
+ (arange(12), ("0.0 + 1.0 x + 2.0 x**2 + 3.0 x**3 + 4.0 x**4 + "
96
+ "5.0 x**5 + 6.0 x**6 +\n7.0 x**7 + 8.0 x**8 + "
97
+ "9.0 x**9 + 10.0 x**10 + 11.0 x**11")),
98
+ ))
99
+ def test_polynomial_str(self, inp, tgt):
100
+ res = str(poly.Polynomial(inp))
101
+ assert_equal(res, tgt)
102
+
103
+ @pytest.mark.parametrize(('inp', 'tgt'), (
104
+ ([1, 2, 3], "1.0 + 2.0 T_1(x) + 3.0 T_2(x)"),
105
+ ([-1, 0, 3, -1], "-1.0 + 0.0 T_1(x) + 3.0 T_2(x) - 1.0 T_3(x)"),
106
+ (arange(12), ("0.0 + 1.0 T_1(x) + 2.0 T_2(x) + 3.0 T_3(x) + "
107
+ "4.0 T_4(x) + 5.0 T_5(x) +\n6.0 T_6(x) + 7.0 T_7(x) + "
108
+ "8.0 T_8(x) + 9.0 T_9(x) + 10.0 T_10(x) +\n"
109
+ "11.0 T_11(x)")),
110
+ ))
111
+ def test_chebyshev_str(self, inp, tgt):
112
+ res = str(poly.Chebyshev(inp))
113
+ assert_equal(res, tgt)
114
+
115
+ @pytest.mark.parametrize(('inp', 'tgt'), (
116
+ ([1, 2, 3], "1.0 + 2.0 P_1(x) + 3.0 P_2(x)"),
117
+ ([-1, 0, 3, -1], "-1.0 + 0.0 P_1(x) + 3.0 P_2(x) - 1.0 P_3(x)"),
118
+ (arange(12), ("0.0 + 1.0 P_1(x) + 2.0 P_2(x) + 3.0 P_3(x) + "
119
+ "4.0 P_4(x) + 5.0 P_5(x) +\n6.0 P_6(x) + 7.0 P_7(x) + "
120
+ "8.0 P_8(x) + 9.0 P_9(x) + 10.0 P_10(x) +\n"
121
+ "11.0 P_11(x)")),
122
+ ))
123
+ def test_legendre_str(self, inp, tgt):
124
+ res = str(poly.Legendre(inp))
125
+ assert_equal(res, tgt)
126
+
127
+ @pytest.mark.parametrize(('inp', 'tgt'), (
128
+ ([1, 2, 3], "1.0 + 2.0 H_1(x) + 3.0 H_2(x)"),
129
+ ([-1, 0, 3, -1], "-1.0 + 0.0 H_1(x) + 3.0 H_2(x) - 1.0 H_3(x)"),
130
+ (arange(12), ("0.0 + 1.0 H_1(x) + 2.0 H_2(x) + 3.0 H_3(x) + "
131
+ "4.0 H_4(x) + 5.0 H_5(x) +\n6.0 H_6(x) + 7.0 H_7(x) + "
132
+ "8.0 H_8(x) + 9.0 H_9(x) + 10.0 H_10(x) +\n"
133
+ "11.0 H_11(x)")),
134
+ ))
135
+ def test_hermite_str(self, inp, tgt):
136
+ res = str(poly.Hermite(inp))
137
+ assert_equal(res, tgt)
138
+
139
+ @pytest.mark.parametrize(('inp', 'tgt'), (
140
+ ([1, 2, 3], "1.0 + 2.0 He_1(x) + 3.0 He_2(x)"),
141
+ ([-1, 0, 3, -1], "-1.0 + 0.0 He_1(x) + 3.0 He_2(x) - 1.0 He_3(x)"),
142
+ (arange(12), ("0.0 + 1.0 He_1(x) + 2.0 He_2(x) + 3.0 He_3(x) + "
143
+ "4.0 He_4(x) +\n5.0 He_5(x) + 6.0 He_6(x) + "
144
+ "7.0 He_7(x) + 8.0 He_8(x) + 9.0 He_9(x) +\n"
145
+ "10.0 He_10(x) + 11.0 He_11(x)")),
146
+ ))
147
+ def test_hermiteE_str(self, inp, tgt):
148
+ res = str(poly.HermiteE(inp))
149
+ assert_equal(res, tgt)
150
+
151
+ @pytest.mark.parametrize(('inp', 'tgt'), (
152
+ ([1, 2, 3], "1.0 + 2.0 L_1(x) + 3.0 L_2(x)"),
153
+ ([-1, 0, 3, -1], "-1.0 + 0.0 L_1(x) + 3.0 L_2(x) - 1.0 L_3(x)"),
154
+ (arange(12), ("0.0 + 1.0 L_1(x) + 2.0 L_2(x) + 3.0 L_3(x) + "
155
+ "4.0 L_4(x) + 5.0 L_5(x) +\n6.0 L_6(x) + 7.0 L_7(x) + "
156
+ "8.0 L_8(x) + 9.0 L_9(x) + 10.0 L_10(x) +\n"
157
+ "11.0 L_11(x)")),
158
+ ))
159
+ def test_laguerre_str(self, inp, tgt):
160
+ res = str(poly.Laguerre(inp))
161
+ assert_equal(res, tgt)
162
+
163
+
164
+ class TestLinebreaking:
165
+
166
+ @pytest.fixture(scope='class', autouse=True)
167
+ def use_ascii(self):
168
+ poly.set_default_printstyle('ascii')
169
+
170
+ def test_single_line_one_less(self):
171
+ # With 'ascii' style, len(str(p)) is default linewidth - 1 (i.e. 74)
172
+ p = poly.Polynomial([12345678, 12345678, 12345678, 12345678, 123])
173
+ assert_equal(len(str(p)), 74)
174
+ assert_equal(str(p), (
175
+ '12345678.0 + 12345678.0 x + 12345678.0 x**2 + '
176
+ '12345678.0 x**3 + 123.0 x**4'
177
+ ))
178
+
179
+ def test_num_chars_is_linewidth(self):
180
+ # len(str(p)) == default linewidth == 75
181
+ p = poly.Polynomial([12345678, 12345678, 12345678, 12345678, 1234])
182
+ assert_equal(len(str(p)), 75)
183
+ assert_equal(str(p), (
184
+ '12345678.0 + 12345678.0 x + 12345678.0 x**2 + '
185
+ '12345678.0 x**3 +\n1234.0 x**4'
186
+ ))
187
+
188
+ def test_first_linebreak_multiline_one_less_than_linewidth(self):
189
+ # Multiline str where len(first_line) + len(next_term) == lw - 1 == 74
190
+ p = poly.Polynomial(
191
+ [12345678, 12345678, 12345678, 12345678, 1, 12345678]
192
+ )
193
+ assert_equal(len(str(p).split('\n')[0]), 74)
194
+ assert_equal(str(p), (
195
+ '12345678.0 + 12345678.0 x + 12345678.0 x**2 + '
196
+ '12345678.0 x**3 + 1.0 x**4 +\n12345678.0 x**5'
197
+ ))
198
+
199
+ def test_first_linebreak_multiline_on_linewidth(self):
200
+ # First line is one character longer than previous test
201
+ p = poly.Polynomial(
202
+ [12345678, 12345678, 12345678, 12345678.12, 1, 12345678]
203
+ )
204
+ assert_equal(str(p), (
205
+ '12345678.0 + 12345678.0 x + 12345678.0 x**2 + '
206
+ '12345678.12 x**3 +\n1.0 x**4 + 12345678.0 x**5'
207
+ ))
208
+
209
+ @pytest.mark.parametrize(('lw', 'tgt'), (
210
+ (75, ('0.0 + 10.0 x + 200.0 x**2 + 3000.0 x**3 + 40000.0 x**4 + '
211
+ '500000.0 x**5 +\n600000.0 x**6 + 70000.0 x**7 + 8000.0 x**8 + '
212
+ '900.0 x**9')),
213
+ (45, ('0.0 + 10.0 x + 200.0 x**2 + 3000.0 x**3 +\n40000.0 x**4 + '
214
+ '500000.0 x**5 +\n600000.0 x**6 + 70000.0 x**7 + 8000.0 x**8 +\n'
215
+ '900.0 x**9')),
216
+ (132, ('0.0 + 10.0 x + 200.0 x**2 + 3000.0 x**3 + 40000.0 x**4 + '
217
+ '500000.0 x**5 + 600000.0 x**6 + 70000.0 x**7 + 8000.0 x**8 + '
218
+ '900.0 x**9')),
219
+ ))
220
+ def test_linewidth_printoption(self, lw, tgt):
221
+ p = poly.Polynomial(
222
+ [0, 10, 200, 3000, 40000, 500000, 600000, 70000, 8000, 900]
223
+ )
224
+ with printoptions(linewidth=lw):
225
+ assert_equal(str(p), tgt)
226
+ for line in str(p).split('\n'):
227
+ assert_(len(line) < lw)
228
+
229
+
230
+ def test_set_default_printoptions():
231
+ p = poly.Polynomial([1, 2, 3])
232
+ c = poly.Chebyshev([1, 2, 3])
233
+ poly.set_default_printstyle('ascii')
234
+ assert_equal(str(p), "1.0 + 2.0 x + 3.0 x**2")
235
+ assert_equal(str(c), "1.0 + 2.0 T_1(x) + 3.0 T_2(x)")
236
+ poly.set_default_printstyle('unicode')
237
+ assert_equal(str(p), "1.0 + 2.0·x + 3.0·x²")
238
+ assert_equal(str(c), "1.0 + 2.0·T₁(x) + 3.0·T₂(x)")
239
+ with pytest.raises(ValueError):
240
+ poly.set_default_printstyle('invalid_input')
241
+
242
+
243
+ def test_complex_coefficients():
244
+ """Test both numpy and built-in complex."""
245
+ coefs = [0+1j, 1+1j, -2+2j, 3+0j]
246
+ # numpy complex
247
+ p1 = poly.Polynomial(coefs)
248
+ # Python complex
249
+ p2 = poly.Polynomial(array(coefs, dtype=object))
250
+ poly.set_default_printstyle('unicode')
251
+ assert_equal(str(p1), "1j + (1+1j)·x - (2-2j)·x² + (3+0j)·x³")
252
+ assert_equal(str(p2), "1j + (1+1j)·x + (-2+2j)·x² + (3+0j)·x³")
253
+ poly.set_default_printstyle('ascii')
254
+ assert_equal(str(p1), "1j + (1+1j) x - (2-2j) x**2 + (3+0j) x**3")
255
+ assert_equal(str(p2), "1j + (1+1j) x + (-2+2j) x**2 + (3+0j) x**3")
256
+
257
+
258
+ @pytest.mark.parametrize(('coefs', 'tgt'), (
259
+ (array([Fraction(1, 2), Fraction(3, 4)], dtype=object), (
260
+ "1/2 + 3/4·x"
261
+ )),
262
+ (array([1, 2, Fraction(5, 7)], dtype=object), (
263
+ "1 + 2·x + 5/7·x²"
264
+ )),
265
+ (array([Decimal('1.00'), Decimal('2.2'), 3], dtype=object), (
266
+ "1.00 + 2.2·x + 3·x²"
267
+ )),
268
+ ))
269
+ def test_numeric_object_coefficients(coefs, tgt):
270
+ p = poly.Polynomial(coefs)
271
+ poly.set_default_printstyle('unicode')
272
+ assert_equal(str(p), tgt)
273
+
274
+
275
+ @pytest.mark.parametrize(('coefs', 'tgt'), (
276
+ (array([1, 2, 'f'], dtype=object), '1 + 2·x + f·x²'),
277
+ (array([1, 2, [3, 4]], dtype=object), '1 + 2·x + [3, 4]·x²'),
278
+ ))
279
+ def test_nonnumeric_object_coefficients(coefs, tgt):
280
+ """
281
+ Test coef fallback for object arrays of non-numeric coefficients.
282
+ """
283
+ p = poly.Polynomial(coefs)
284
+ poly.set_default_printstyle('unicode')
285
+ assert_equal(str(p), tgt)
286
+
287
+
288
+ class TestFormat:
289
+ def test_format_unicode(self):
290
+ poly.set_default_printstyle('ascii')
291
+ p = poly.Polynomial([1, 2, 0, -1])
292
+ assert_equal(format(p, 'unicode'), "1.0 + 2.0·x + 0.0·x² - 1.0·x³")
293
+
294
+ def test_format_ascii(self):
295
+ poly.set_default_printstyle('unicode')
296
+ p = poly.Polynomial([1, 2, 0, -1])
297
+ assert_equal(
298
+ format(p, 'ascii'), "1.0 + 2.0 x + 0.0 x**2 - 1.0 x**3"
299
+ )
300
+
301
+ def test_empty_formatstr(self):
302
+ poly.set_default_printstyle('ascii')
303
+ p = poly.Polynomial([1, 2, 3])
304
+ assert_equal(format(p), "1.0 + 2.0 x + 3.0 x**2")
305
+ assert_equal(f"{p}", "1.0 + 2.0 x + 3.0 x**2")
306
+
307
+ def test_bad_formatstr(self):
308
+ p = poly.Polynomial([1, 2, 0, -1])
309
+ with pytest.raises(ValueError):
310
+ format(p, '.2f')
311
+
312
+
313
+ @pytest.mark.parametrize(('poly', 'tgt'), (
314
+ (poly.Polynomial, '1.0 + 2.0·z + 3.0·z²'),
315
+ (poly.Chebyshev, '1.0 + 2.0·T₁(z) + 3.0·T₂(z)'),
316
+ (poly.Hermite, '1.0 + 2.0·H₁(z) + 3.0·H₂(z)'),
317
+ (poly.HermiteE, '1.0 + 2.0·He₁(z) + 3.0·He₂(z)'),
318
+ (poly.Laguerre, '1.0 + 2.0·L₁(z) + 3.0·L₂(z)'),
319
+ (poly.Legendre, '1.0 + 2.0·P₁(z) + 3.0·P₂(z)'),
320
+ ))
321
+ def test_symbol(poly, tgt):
322
+ p = poly([1, 2, 3], symbol='z')
323
+ assert_equal(f"{p:unicode}", tgt)
324
+
325
+
326
+ class TestRepr:
327
+ def test_polynomial_str(self):
328
+ res = repr(poly.Polynomial([0, 1]))
329
+ tgt = (
330
+ "Polynomial([0., 1.], domain=[-1, 1], window=[-1, 1], "
331
+ "symbol='x')"
332
+ )
333
+ assert_equal(res, tgt)
334
+
335
+ def test_chebyshev_str(self):
336
+ res = repr(poly.Chebyshev([0, 1]))
337
+ tgt = (
338
+ "Chebyshev([0., 1.], domain=[-1, 1], window=[-1, 1], "
339
+ "symbol='x')"
340
+ )
341
+ assert_equal(res, tgt)
342
+
343
+ def test_legendre_repr(self):
344
+ res = repr(poly.Legendre([0, 1]))
345
+ tgt = (
346
+ "Legendre([0., 1.], domain=[-1, 1], window=[-1, 1], "
347
+ "symbol='x')"
348
+ )
349
+ assert_equal(res, tgt)
350
+
351
+ def test_hermite_repr(self):
352
+ res = repr(poly.Hermite([0, 1]))
353
+ tgt = (
354
+ "Hermite([0., 1.], domain=[-1, 1], window=[-1, 1], "
355
+ "symbol='x')"
356
+ )
357
+ assert_equal(res, tgt)
358
+
359
+ def test_hermiteE_repr(self):
360
+ res = repr(poly.HermiteE([0, 1]))
361
+ tgt = (
362
+ "HermiteE([0., 1.], domain=[-1, 1], window=[-1, 1], "
363
+ "symbol='x')"
364
+ )
365
+ assert_equal(res, tgt)
366
+
367
+ def test_laguerre_repr(self):
368
+ res = repr(poly.Laguerre([0, 1]))
369
+ tgt = (
370
+ "Laguerre([0., 1.], domain=[0, 1], window=[0, 1], "
371
+ "symbol='x')"
372
+ )
373
+ assert_equal(res, tgt)
374
+
375
+
376
+ class TestLatexRepr:
377
+ """Test the latex repr used by Jupyter"""
378
+
379
+ def as_latex(self, obj):
380
+ # right now we ignore the formatting of scalars in our tests, since
381
+ # it makes them too verbose. Ideally, the formatting of scalars will
382
+ # be fixed such that tests below continue to pass
383
+ obj._repr_latex_scalar = lambda x, parens=False: str(x)
384
+ try:
385
+ return obj._repr_latex_()
386
+ finally:
387
+ del obj._repr_latex_scalar
388
+
389
+ def test_simple_polynomial(self):
390
+ # default input
391
+ p = poly.Polynomial([1, 2, 3])
392
+ assert_equal(self.as_latex(p),
393
+ r'$x \mapsto 1.0 + 2.0\,x + 3.0\,x^{2}$')
394
+
395
+ # translated input
396
+ p = poly.Polynomial([1, 2, 3], domain=[-2, 0])
397
+ assert_equal(self.as_latex(p),
398
+ r'$x \mapsto 1.0 + 2.0\,\left(1.0 + x\right) + 3.0\,\left(1.0 + x\right)^{2}$')
399
+
400
+ # scaled input
401
+ p = poly.Polynomial([1, 2, 3], domain=[-0.5, 0.5])
402
+ assert_equal(self.as_latex(p),
403
+ r'$x \mapsto 1.0 + 2.0\,\left(2.0x\right) + 3.0\,\left(2.0x\right)^{2}$')
404
+
405
+ # affine input
406
+ p = poly.Polynomial([1, 2, 3], domain=[-1, 0])
407
+ assert_equal(self.as_latex(p),
408
+ r'$x \mapsto 1.0 + 2.0\,\left(1.0 + 2.0x\right) + 3.0\,\left(1.0 + 2.0x\right)^{2}$')
409
+
410
+ def test_basis_func(self):
411
+ p = poly.Chebyshev([1, 2, 3])
412
+ assert_equal(self.as_latex(p),
413
+ r'$x \mapsto 1.0\,{T}_{0}(x) + 2.0\,{T}_{1}(x) + 3.0\,{T}_{2}(x)$')
414
+ # affine input - check no surplus parens are added
415
+ p = poly.Chebyshev([1, 2, 3], domain=[-1, 0])
416
+ assert_equal(self.as_latex(p),
417
+ r'$x \mapsto 1.0\,{T}_{0}(1.0 + 2.0x) + 2.0\,{T}_{1}(1.0 + 2.0x) + 3.0\,{T}_{2}(1.0 + 2.0x)$')
418
+
419
+ def test_multichar_basis_func(self):
420
+ p = poly.HermiteE([1, 2, 3])
421
+ assert_equal(self.as_latex(p),
422
+ r'$x \mapsto 1.0\,{He}_{0}(x) + 2.0\,{He}_{1}(x) + 3.0\,{He}_{2}(x)$')
423
+
424
+ def test_symbol_basic(self):
425
+ # default input
426
+ p = poly.Polynomial([1, 2, 3], symbol='z')
427
+ assert_equal(self.as_latex(p),
428
+ r'$z \mapsto 1.0 + 2.0\,z + 3.0\,z^{2}$')
429
+
430
+ # translated input
431
+ p = poly.Polynomial([1, 2, 3], domain=[-2, 0], symbol='z')
432
+ assert_equal(
433
+ self.as_latex(p),
434
+ (
435
+ r'$z \mapsto 1.0 + 2.0\,\left(1.0 + z\right) + 3.0\,'
436
+ r'\left(1.0 + z\right)^{2}$'
437
+ ),
438
+ )
439
+
440
+ # scaled input
441
+ p = poly.Polynomial([1, 2, 3], domain=[-0.5, 0.5], symbol='z')
442
+ assert_equal(
443
+ self.as_latex(p),
444
+ (
445
+ r'$z \mapsto 1.0 + 2.0\,\left(2.0z\right) + 3.0\,'
446
+ r'\left(2.0z\right)^{2}$'
447
+ ),
448
+ )
449
+
450
+ # affine input
451
+ p = poly.Polynomial([1, 2, 3], domain=[-1, 0], symbol='z')
452
+ assert_equal(
453
+ self.as_latex(p),
454
+ (
455
+ r'$z \mapsto 1.0 + 2.0\,\left(1.0 + 2.0z\right) + 3.0\,'
456
+ r'\left(1.0 + 2.0z\right)^{2}$'
457
+ ),
458
+ )
459
+
460
+
461
+ SWITCH_TO_EXP = (
462
+ '1.0 + (1.0e-01) x + (1.0e-02) x**2',
463
+ '1.2 + (1.2e-01) x + (1.2e-02) x**2',
464
+ '1.23 + 0.12 x + (1.23e-02) x**2 + (1.23e-03) x**3',
465
+ '1.235 + 0.123 x + (1.235e-02) x**2 + (1.235e-03) x**3',
466
+ '1.2346 + 0.1235 x + 0.0123 x**2 + (1.2346e-03) x**3 + (1.2346e-04) x**4',
467
+ '1.23457 + 0.12346 x + 0.01235 x**2 + (1.23457e-03) x**3 + '
468
+ '(1.23457e-04) x**4',
469
+ '1.234568 + 0.123457 x + 0.012346 x**2 + 0.001235 x**3 + '
470
+ '(1.234568e-04) x**4 + (1.234568e-05) x**5',
471
+ '1.2345679 + 0.1234568 x + 0.0123457 x**2 + 0.0012346 x**3 + '
472
+ '(1.2345679e-04) x**4 + (1.2345679e-05) x**5')
473
+
474
+ class TestPrintOptions:
475
+ """
476
+ Test the output is properly configured via printoptions.
477
+ The exponential notation is enabled automatically when the values
478
+ are too small or too large.
479
+ """
480
+
481
+ @pytest.fixture(scope='class', autouse=True)
482
+ def use_ascii(self):
483
+ poly.set_default_printstyle('ascii')
484
+
485
+ def test_str(self):
486
+ p = poly.Polynomial([1/2, 1/7, 1/7*10**8, 1/7*10**9])
487
+ assert_equal(str(p), '0.5 + 0.14285714 x + 14285714.28571429 x**2 '
488
+ '+ (1.42857143e+08) x**3')
489
+
490
+ with printoptions(precision=3):
491
+ assert_equal(str(p), '0.5 + 0.143 x + 14285714.286 x**2 '
492
+ '+ (1.429e+08) x**3')
493
+
494
+ def test_latex(self):
495
+ p = poly.Polynomial([1/2, 1/7, 1/7*10**8, 1/7*10**9])
496
+ assert_equal(p._repr_latex_(),
497
+ r'$x \mapsto \text{0.5} + \text{0.14285714}\,x + '
498
+ r'\text{14285714.28571429}\,x^{2} + '
499
+ r'\text{(1.42857143e+08)}\,x^{3}$')
500
+
501
+ with printoptions(precision=3):
502
+ assert_equal(p._repr_latex_(),
503
+ r'$x \mapsto \text{0.5} + \text{0.143}\,x + '
504
+ r'\text{14285714.286}\,x^{2} + \text{(1.429e+08)}\,x^{3}$')
505
+
506
+ def test_fixed(self):
507
+ p = poly.Polynomial([1/2])
508
+ assert_equal(str(p), '0.5')
509
+
510
+ with printoptions(floatmode='fixed'):
511
+ assert_equal(str(p), '0.50000000')
512
+
513
+ with printoptions(floatmode='fixed', precision=4):
514
+ assert_equal(str(p), '0.5000')
515
+
516
+ def test_switch_to_exp(self):
517
+ for i, s in enumerate(SWITCH_TO_EXP):
518
+ with printoptions(precision=i):
519
+ p = poly.Polynomial([1.23456789*10**-i
520
+ for i in range(i//2+3)])
521
+ assert str(p).replace('\n', ' ') == s
522
+
523
+ def test_non_finite(self):
524
+ p = poly.Polynomial([nan, inf])
525
+ assert str(p) == 'nan + inf x'
526
+ assert p._repr_latex_() == r'$x \mapsto \text{nan} + \text{inf}\,x$'
527
+ with printoptions(nanstr='NAN', infstr='INF'):
528
+ assert str(p) == 'NAN + INF x'
529
+ assert p._repr_latex_() == \
530
+ r'$x \mapsto \text{NAN} + \text{INF}\,x$'
llmeval-env/lib/python3.10/site-packages/numpy/random/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (7.44 kB). View file
 
llmeval-env/lib/python3.10/site-packages/numpy/random/__pycache__/_pickle.cpython-310.pyc ADDED
Binary file (2.24 kB). View file
 
llmeval-env/lib/python3.10/site-packages/numpy/random/_common.pxd ADDED
@@ -0,0 +1,106 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #cython: language_level=3
2
+
3
+ from libc.stdint cimport uint32_t, uint64_t, int32_t, int64_t
4
+
5
+ import numpy as np
6
+ cimport numpy as np
7
+
8
+ from numpy.random cimport bitgen_t
9
+
10
+ cdef double POISSON_LAM_MAX
11
+ cdef double LEGACY_POISSON_LAM_MAX
12
+ cdef uint64_t MAXSIZE
13
+
14
+ cdef enum ConstraintType:
15
+ CONS_NONE
16
+ CONS_NON_NEGATIVE
17
+ CONS_POSITIVE
18
+ CONS_POSITIVE_NOT_NAN
19
+ CONS_BOUNDED_0_1
20
+ CONS_BOUNDED_GT_0_1
21
+ CONS_BOUNDED_LT_0_1
22
+ CONS_GT_1
23
+ CONS_GTE_1
24
+ CONS_POISSON
25
+ LEGACY_CONS_POISSON
26
+
27
+ ctypedef ConstraintType constraint_type
28
+
29
+ cdef object benchmark(bitgen_t *bitgen, object lock, Py_ssize_t cnt, object method)
30
+ cdef object random_raw(bitgen_t *bitgen, object lock, object size, object output)
31
+ cdef object prepare_cffi(bitgen_t *bitgen)
32
+ cdef object prepare_ctypes(bitgen_t *bitgen)
33
+ cdef int check_constraint(double val, object name, constraint_type cons) except -1
34
+ cdef int check_array_constraint(np.ndarray val, object name, constraint_type cons) except -1
35
+
36
+ cdef extern from "include/aligned_malloc.h":
37
+ cdef void *PyArray_realloc_aligned(void *p, size_t n)
38
+ cdef void *PyArray_malloc_aligned(size_t n)
39
+ cdef void *PyArray_calloc_aligned(size_t n, size_t s)
40
+ cdef void PyArray_free_aligned(void *p)
41
+
42
+ ctypedef void (*random_double_fill)(bitgen_t *state, np.npy_intp count, double* out) noexcept nogil
43
+ ctypedef double (*random_double_0)(void *state) noexcept nogil
44
+ ctypedef double (*random_double_1)(void *state, double a) noexcept nogil
45
+ ctypedef double (*random_double_2)(void *state, double a, double b) noexcept nogil
46
+ ctypedef double (*random_double_3)(void *state, double a, double b, double c) noexcept nogil
47
+
48
+ ctypedef void (*random_float_fill)(bitgen_t *state, np.npy_intp count, float* out) noexcept nogil
49
+ ctypedef float (*random_float_0)(bitgen_t *state) noexcept nogil
50
+ ctypedef float (*random_float_1)(bitgen_t *state, float a) noexcept nogil
51
+
52
+ ctypedef int64_t (*random_uint_0)(void *state) noexcept nogil
53
+ ctypedef int64_t (*random_uint_d)(void *state, double a) noexcept nogil
54
+ ctypedef int64_t (*random_uint_dd)(void *state, double a, double b) noexcept nogil
55
+ ctypedef int64_t (*random_uint_di)(void *state, double a, uint64_t b) noexcept nogil
56
+ ctypedef int64_t (*random_uint_i)(void *state, int64_t a) noexcept nogil
57
+ ctypedef int64_t (*random_uint_iii)(void *state, int64_t a, int64_t b, int64_t c) noexcept nogil
58
+
59
+ ctypedef uint32_t (*random_uint_0_32)(bitgen_t *state) noexcept nogil
60
+ ctypedef uint32_t (*random_uint_1_i_32)(bitgen_t *state, uint32_t a) noexcept nogil
61
+
62
+ ctypedef int32_t (*random_int_2_i_32)(bitgen_t *state, int32_t a, int32_t b) noexcept nogil
63
+ ctypedef int64_t (*random_int_2_i)(bitgen_t *state, int64_t a, int64_t b) noexcept nogil
64
+
65
+ cdef double kahan_sum(double *darr, np.npy_intp n) noexcept
66
+
67
+ cdef inline double uint64_to_double(uint64_t rnd) noexcept nogil:
68
+ return (rnd >> 11) * (1.0 / 9007199254740992.0)
69
+
70
+ cdef object double_fill(void *func, bitgen_t *state, object size, object lock, object out)
71
+
72
+ cdef object float_fill(void *func, bitgen_t *state, object size, object lock, object out)
73
+
74
+ cdef object float_fill_from_double(void *func, bitgen_t *state, object size, object lock, object out)
75
+
76
+ cdef object wrap_int(object val, object bits)
77
+
78
+ cdef np.ndarray int_to_array(object value, object name, object bits, object uint_size)
79
+
80
+ cdef validate_output_shape(iter_shape, np.ndarray output)
81
+
82
+ cdef object cont(void *func, void *state, object size, object lock, int narg,
83
+ object a, object a_name, constraint_type a_constraint,
84
+ object b, object b_name, constraint_type b_constraint,
85
+ object c, object c_name, constraint_type c_constraint,
86
+ object out)
87
+
88
+ cdef object disc(void *func, void *state, object size, object lock,
89
+ int narg_double, int narg_int64,
90
+ object a, object a_name, constraint_type a_constraint,
91
+ object b, object b_name, constraint_type b_constraint,
92
+ object c, object c_name, constraint_type c_constraint)
93
+
94
+ cdef object cont_f(void *func, bitgen_t *state, object size, object lock,
95
+ object a, object a_name, constraint_type a_constraint,
96
+ object out)
97
+
98
+ cdef object cont_broadcast_3(void *func, void *state, object size, object lock,
99
+ np.ndarray a_arr, object a_name, constraint_type a_constraint,
100
+ np.ndarray b_arr, object b_name, constraint_type b_constraint,
101
+ np.ndarray c_arr, object c_name, constraint_type c_constraint)
102
+
103
+ cdef object discrete_broadcast_iii(void *func, void *state, object size, object lock,
104
+ np.ndarray a_arr, object a_name, constraint_type a_constraint,
105
+ np.ndarray b_arr, object b_name, constraint_type b_constraint,
106
+ np.ndarray c_arr, object c_name, constraint_type c_constraint)
llmeval-env/lib/python3.10/site-packages/numpy/random/_examples/cffi/__pycache__/extending.cpython-310.pyc ADDED
Binary file (948 Bytes). View file
 
llmeval-env/lib/python3.10/site-packages/numpy/random/_examples/cffi/__pycache__/parse.cpython-310.pyc ADDED
Binary file (1.2 kB). View file
 
llmeval-env/lib/python3.10/site-packages/numpy/random/_examples/cffi/extending.py ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Use cffi to access any of the underlying C functions from distributions.h
3
+ """
4
+ import os
5
+ import numpy as np
6
+ import cffi
7
+ from .parse import parse_distributions_h
8
+ ffi = cffi.FFI()
9
+
10
+ inc_dir = os.path.join(np.get_include(), 'numpy')
11
+
12
+ # Basic numpy types
13
+ ffi.cdef('''
14
+ typedef intptr_t npy_intp;
15
+ typedef unsigned char npy_bool;
16
+
17
+ ''')
18
+
19
+ parse_distributions_h(ffi, inc_dir)
20
+
21
+ lib = ffi.dlopen(np.random._generator.__file__)
22
+
23
+ # Compare the distributions.h random_standard_normal_fill to
24
+ # Generator.standard_random
25
+ bit_gen = np.random.PCG64()
26
+ rng = np.random.Generator(bit_gen)
27
+ state = bit_gen.state
28
+
29
+ interface = rng.bit_generator.cffi
30
+ n = 100
31
+ vals_cffi = ffi.new('double[%d]' % n)
32
+ lib.random_standard_normal_fill(interface.bit_generator, n, vals_cffi)
33
+
34
+ # reset the state
35
+ bit_gen.state = state
36
+
37
+ vals = rng.standard_normal(n)
38
+
39
+ for i in range(n):
40
+ assert vals[i] == vals_cffi[i]
llmeval-env/lib/python3.10/site-packages/numpy/random/_examples/cffi/parse.py ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+
3
+
4
+ def parse_distributions_h(ffi, inc_dir):
5
+ """
6
+ Parse distributions.h located in inc_dir for CFFI, filling in the ffi.cdef
7
+
8
+ Read the function declarations without the "#define ..." macros that will
9
+ be filled in when loading the library.
10
+ """
11
+
12
+ with open(os.path.join(inc_dir, 'random', 'bitgen.h')) as fid:
13
+ s = []
14
+ for line in fid:
15
+ # massage the include file
16
+ if line.strip().startswith('#'):
17
+ continue
18
+ s.append(line)
19
+ ffi.cdef('\n'.join(s))
20
+
21
+ with open(os.path.join(inc_dir, 'random', 'distributions.h')) as fid:
22
+ s = []
23
+ in_skip = 0
24
+ ignoring = False
25
+ for line in fid:
26
+ # check for and remove extern "C" guards
27
+ if ignoring:
28
+ if line.strip().startswith('#endif'):
29
+ ignoring = False
30
+ continue
31
+ if line.strip().startswith('#ifdef __cplusplus'):
32
+ ignoring = True
33
+
34
+ # massage the include file
35
+ if line.strip().startswith('#'):
36
+ continue
37
+
38
+ # skip any inlined function definition
39
+ # which starts with 'static inline xxx(...) {'
40
+ # and ends with a closing '}'
41
+ if line.strip().startswith('static inline'):
42
+ in_skip += line.count('{')
43
+ continue
44
+ elif in_skip > 0:
45
+ in_skip += line.count('{')
46
+ in_skip -= line.count('}')
47
+ continue
48
+
49
+ # replace defines with their value or remove them
50
+ line = line.replace('DECLDIR', '')
51
+ line = line.replace('RAND_INT_TYPE', 'int64_t')
52
+ s.append(line)
53
+ ffi.cdef('\n'.join(s))
54
+
llmeval-env/lib/python3.10/site-packages/numpy/random/_examples/cython/extending.pyx ADDED
@@ -0,0 +1,78 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python3
2
+ #cython: language_level=3
3
+
4
+ from libc.stdint cimport uint32_t
5
+ from cpython.pycapsule cimport PyCapsule_IsValid, PyCapsule_GetPointer
6
+
7
+ import numpy as np
8
+ cimport numpy as np
9
+ cimport cython
10
+
11
+ from numpy.random cimport bitgen_t
12
+ from numpy.random import PCG64
13
+
14
+ np.import_array()
15
+
16
+
17
+ @cython.boundscheck(False)
18
+ @cython.wraparound(False)
19
+ def uniform_mean(Py_ssize_t n):
20
+ cdef Py_ssize_t i
21
+ cdef bitgen_t *rng
22
+ cdef const char *capsule_name = "BitGenerator"
23
+ cdef double[::1] random_values
24
+ cdef np.ndarray randoms
25
+
26
+ x = PCG64()
27
+ capsule = x.capsule
28
+ if not PyCapsule_IsValid(capsule, capsule_name):
29
+ raise ValueError("Invalid pointer to anon_func_state")
30
+ rng = <bitgen_t *> PyCapsule_GetPointer(capsule, capsule_name)
31
+ random_values = np.empty(n)
32
+ # Best practice is to acquire the lock whenever generating random values.
33
+ # This prevents other threads from modifying the state. Acquiring the lock
34
+ # is only necessary if the GIL is also released, as in this example.
35
+ with x.lock, nogil:
36
+ for i in range(n):
37
+ random_values[i] = rng.next_double(rng.state)
38
+ randoms = np.asarray(random_values)
39
+ return randoms.mean()
40
+
41
+
42
+ # This function is declared nogil so it can be used without the GIL below
43
+ cdef uint32_t bounded_uint(uint32_t lb, uint32_t ub, bitgen_t *rng) nogil:
44
+ cdef uint32_t mask, delta, val
45
+ mask = delta = ub - lb
46
+ mask |= mask >> 1
47
+ mask |= mask >> 2
48
+ mask |= mask >> 4
49
+ mask |= mask >> 8
50
+ mask |= mask >> 16
51
+
52
+ val = rng.next_uint32(rng.state) & mask
53
+ while val > delta:
54
+ val = rng.next_uint32(rng.state) & mask
55
+
56
+ return lb + val
57
+
58
+
59
+ @cython.boundscheck(False)
60
+ @cython.wraparound(False)
61
+ def bounded_uints(uint32_t lb, uint32_t ub, Py_ssize_t n):
62
+ cdef Py_ssize_t i
63
+ cdef bitgen_t *rng
64
+ cdef uint32_t[::1] out
65
+ cdef const char *capsule_name = "BitGenerator"
66
+
67
+ x = PCG64()
68
+ out = np.empty(n, dtype=np.uint32)
69
+ capsule = x.capsule
70
+
71
+ if not PyCapsule_IsValid(capsule, capsule_name):
72
+ raise ValueError("Invalid pointer to anon_func_state")
73
+ rng = <bitgen_t *>PyCapsule_GetPointer(capsule, capsule_name)
74
+
75
+ with x.lock, nogil:
76
+ for i in range(n):
77
+ out[i] = bounded_uint(lb, ub, rng)
78
+ return np.asarray(out)
llmeval-env/lib/python3.10/site-packages/numpy/random/_examples/cython/extending_distributions.pyx ADDED
@@ -0,0 +1,117 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python3
2
+ #cython: language_level=3
3
+ """
4
+ This file shows how the to use a BitGenerator to create a distribution.
5
+ """
6
+ import numpy as np
7
+ cimport numpy as np
8
+ cimport cython
9
+ from cpython.pycapsule cimport PyCapsule_IsValid, PyCapsule_GetPointer
10
+ from libc.stdint cimport uint16_t, uint64_t
11
+ from numpy.random cimport bitgen_t
12
+ from numpy.random import PCG64
13
+ from numpy.random.c_distributions cimport (
14
+ random_standard_uniform_fill, random_standard_uniform_fill_f)
15
+
16
+
17
+ @cython.boundscheck(False)
18
+ @cython.wraparound(False)
19
+ def uniforms(Py_ssize_t n):
20
+ """
21
+ Create an array of `n` uniformly distributed doubles.
22
+ A 'real' distribution would want to process the values into
23
+ some non-uniform distribution
24
+ """
25
+ cdef Py_ssize_t i
26
+ cdef bitgen_t *rng
27
+ cdef const char *capsule_name = "BitGenerator"
28
+ cdef double[::1] random_values
29
+
30
+ x = PCG64()
31
+ capsule = x.capsule
32
+ # Optional check that the capsule if from a BitGenerator
33
+ if not PyCapsule_IsValid(capsule, capsule_name):
34
+ raise ValueError("Invalid pointer to anon_func_state")
35
+ # Cast the pointer
36
+ rng = <bitgen_t *> PyCapsule_GetPointer(capsule, capsule_name)
37
+ random_values = np.empty(n, dtype='float64')
38
+ with x.lock, nogil:
39
+ for i in range(n):
40
+ # Call the function
41
+ random_values[i] = rng.next_double(rng.state)
42
+ randoms = np.asarray(random_values)
43
+
44
+ return randoms
45
+
46
+ # cython example 2
47
+ @cython.boundscheck(False)
48
+ @cython.wraparound(False)
49
+ def uint10_uniforms(Py_ssize_t n):
50
+ """Uniform 10 bit integers stored as 16-bit unsigned integers"""
51
+ cdef Py_ssize_t i
52
+ cdef bitgen_t *rng
53
+ cdef const char *capsule_name = "BitGenerator"
54
+ cdef uint16_t[::1] random_values
55
+ cdef int bits_remaining
56
+ cdef int width = 10
57
+ cdef uint64_t buff, mask = 0x3FF
58
+
59
+ x = PCG64()
60
+ capsule = x.capsule
61
+ if not PyCapsule_IsValid(capsule, capsule_name):
62
+ raise ValueError("Invalid pointer to anon_func_state")
63
+ rng = <bitgen_t *> PyCapsule_GetPointer(capsule, capsule_name)
64
+ random_values = np.empty(n, dtype='uint16')
65
+ # Best practice is to release GIL and acquire the lock
66
+ bits_remaining = 0
67
+ with x.lock, nogil:
68
+ for i in range(n):
69
+ if bits_remaining < width:
70
+ buff = rng.next_uint64(rng.state)
71
+ random_values[i] = buff & mask
72
+ buff >>= width
73
+
74
+ randoms = np.asarray(random_values)
75
+ return randoms
76
+
77
+ # cython example 3
78
+ def uniforms_ex(bit_generator, Py_ssize_t n, dtype=np.float64):
79
+ """
80
+ Create an array of `n` uniformly distributed doubles via a "fill" function.
81
+
82
+ A 'real' distribution would want to process the values into
83
+ some non-uniform distribution
84
+
85
+ Parameters
86
+ ----------
87
+ bit_generator: BitGenerator instance
88
+ n: int
89
+ Output vector length
90
+ dtype: {str, dtype}, optional
91
+ Desired dtype, either 'd' (or 'float64') or 'f' (or 'float32'). The
92
+ default dtype value is 'd'
93
+ """
94
+ cdef Py_ssize_t i
95
+ cdef bitgen_t *rng
96
+ cdef const char *capsule_name = "BitGenerator"
97
+ cdef np.ndarray randoms
98
+
99
+ capsule = bit_generator.capsule
100
+ # Optional check that the capsule if from a BitGenerator
101
+ if not PyCapsule_IsValid(capsule, capsule_name):
102
+ raise ValueError("Invalid pointer to anon_func_state")
103
+ # Cast the pointer
104
+ rng = <bitgen_t *> PyCapsule_GetPointer(capsule, capsule_name)
105
+
106
+ _dtype = np.dtype(dtype)
107
+ randoms = np.empty(n, dtype=_dtype)
108
+ if _dtype == np.float32:
109
+ with bit_generator.lock:
110
+ random_standard_uniform_fill_f(rng, n, <float*>np.PyArray_DATA(randoms))
111
+ elif _dtype == np.float64:
112
+ with bit_generator.lock:
113
+ random_standard_uniform_fill(rng, n, <double*>np.PyArray_DATA(randoms))
114
+ else:
115
+ raise TypeError('Unsupported dtype %r for random' % _dtype)
116
+ return randoms
117
+
llmeval-env/lib/python3.10/site-packages/numpy/random/_examples/cython/meson.build ADDED
@@ -0,0 +1,45 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ project('random-build-examples', 'c', 'cpp', 'cython')
2
+
3
+ py_mod = import('python')
4
+ py3 = py_mod.find_installation(pure: false)
5
+
6
+ cc = meson.get_compiler('c')
7
+ cy = meson.get_compiler('cython')
8
+
9
+ if not cy.version().version_compare('>=0.29.35')
10
+ error('tests requires Cython >= 0.29.35')
11
+ endif
12
+
13
+ _numpy_abs = run_command(py3, ['-c',
14
+ 'import os; os.chdir(".."); import numpy; print(os.path.abspath(numpy.get_include() + "../../.."))'],
15
+ check: true).stdout().strip()
16
+
17
+ npymath_path = _numpy_abs / 'core' / 'lib'
18
+ npy_include_path = _numpy_abs / 'core' / 'include'
19
+ npyrandom_path = _numpy_abs / 'random' / 'lib'
20
+ npymath_lib = cc.find_library('npymath', dirs: npymath_path)
21
+ npyrandom_lib = cc.find_library('npyrandom', dirs: npyrandom_path)
22
+
23
+ py3.extension_module(
24
+ 'extending_distributions',
25
+ 'extending_distributions.pyx',
26
+ install: false,
27
+ include_directories: [npy_include_path],
28
+ dependencies: [npyrandom_lib, npymath_lib],
29
+ )
30
+ py3.extension_module(
31
+ 'extending',
32
+ 'extending.pyx',
33
+ install: false,
34
+ include_directories: [npy_include_path],
35
+ dependencies: [npyrandom_lib, npymath_lib],
36
+ )
37
+ py3.extension_module(
38
+ 'extending_cpp',
39
+ 'extending_distributions.pyx',
40
+ install: false,
41
+ override_options : ['cython_language=cpp'],
42
+ cython_args: ['--module-name', 'extending_cpp'],
43
+ include_directories: [npy_include_path],
44
+ dependencies: [npyrandom_lib, npymath_lib],
45
+ )
llmeval-env/lib/python3.10/site-packages/numpy/random/_examples/numba/__pycache__/extending.cpython-310.pyc ADDED
Binary file (2.18 kB). View file
 
llmeval-env/lib/python3.10/site-packages/numpy/random/_examples/numba/__pycache__/extending_distributions.cpython-310.pyc ADDED
Binary file (2.11 kB). View file
 
llmeval-env/lib/python3.10/site-packages/numpy/random/_examples/numba/extending.py ADDED
@@ -0,0 +1,84 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import numba as nb
3
+
4
+ from numpy.random import PCG64
5
+ from timeit import timeit
6
+
7
+ bit_gen = PCG64()
8
+ next_d = bit_gen.cffi.next_double
9
+ state_addr = bit_gen.cffi.state_address
10
+
11
+ def normals(n, state):
12
+ out = np.empty(n)
13
+ for i in range((n + 1) // 2):
14
+ x1 = 2.0 * next_d(state) - 1.0
15
+ x2 = 2.0 * next_d(state) - 1.0
16
+ r2 = x1 * x1 + x2 * x2
17
+ while r2 >= 1.0 or r2 == 0.0:
18
+ x1 = 2.0 * next_d(state) - 1.0
19
+ x2 = 2.0 * next_d(state) - 1.0
20
+ r2 = x1 * x1 + x2 * x2
21
+ f = np.sqrt(-2.0 * np.log(r2) / r2)
22
+ out[2 * i] = f * x1
23
+ if 2 * i + 1 < n:
24
+ out[2 * i + 1] = f * x2
25
+ return out
26
+
27
+ # Compile using Numba
28
+ normalsj = nb.jit(normals, nopython=True)
29
+ # Must use state address not state with numba
30
+ n = 10000
31
+
32
+ def numbacall():
33
+ return normalsj(n, state_addr)
34
+
35
+ rg = np.random.Generator(PCG64())
36
+
37
+ def numpycall():
38
+ return rg.normal(size=n)
39
+
40
+ # Check that the functions work
41
+ r1 = numbacall()
42
+ r2 = numpycall()
43
+ assert r1.shape == (n,)
44
+ assert r1.shape == r2.shape
45
+
46
+ t1 = timeit(numbacall, number=1000)
47
+ print(f'{t1:.2f} secs for {n} PCG64 (Numba/PCG64) gaussian randoms')
48
+ t2 = timeit(numpycall, number=1000)
49
+ print(f'{t2:.2f} secs for {n} PCG64 (NumPy/PCG64) gaussian randoms')
50
+
51
+ # example 2
52
+
53
+ next_u32 = bit_gen.ctypes.next_uint32
54
+ ctypes_state = bit_gen.ctypes.state
55
+
56
+ @nb.jit(nopython=True)
57
+ def bounded_uint(lb, ub, state):
58
+ mask = delta = ub - lb
59
+ mask |= mask >> 1
60
+ mask |= mask >> 2
61
+ mask |= mask >> 4
62
+ mask |= mask >> 8
63
+ mask |= mask >> 16
64
+
65
+ val = next_u32(state) & mask
66
+ while val > delta:
67
+ val = next_u32(state) & mask
68
+
69
+ return lb + val
70
+
71
+
72
+ print(bounded_uint(323, 2394691, ctypes_state.value))
73
+
74
+
75
+ @nb.jit(nopython=True)
76
+ def bounded_uints(lb, ub, n, state):
77
+ out = np.empty(n, dtype=np.uint32)
78
+ for i in range(n):
79
+ out[i] = bounded_uint(lb, ub, state)
80
+
81
+
82
+ bounded_uints(323, 2394691, 10000000, ctypes_state.value)
83
+
84
+
llmeval-env/lib/python3.10/site-packages/numpy/random/_examples/numba/extending_distributions.py ADDED
@@ -0,0 +1,67 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ r"""
2
+ Building the required library in this example requires a source distribution
3
+ of NumPy or clone of the NumPy git repository since distributions.c is not
4
+ included in binary distributions.
5
+
6
+ On *nix, execute in numpy/random/src/distributions
7
+
8
+ export ${PYTHON_VERSION}=3.8 # Python version
9
+ export PYTHON_INCLUDE=#path to Python's include folder, usually \
10
+ ${PYTHON_HOME}/include/python${PYTHON_VERSION}m
11
+ export NUMPY_INCLUDE=#path to numpy's include folder, usually \
12
+ ${PYTHON_HOME}/lib/python${PYTHON_VERSION}/site-packages/numpy/core/include
13
+ gcc -shared -o libdistributions.so -fPIC distributions.c \
14
+ -I${NUMPY_INCLUDE} -I${PYTHON_INCLUDE}
15
+ mv libdistributions.so ../../_examples/numba/
16
+
17
+ On Windows
18
+
19
+ rem PYTHON_HOME and PYTHON_VERSION are setup dependent, this is an example
20
+ set PYTHON_HOME=c:\Anaconda
21
+ set PYTHON_VERSION=38
22
+ cl.exe /LD .\distributions.c -DDLL_EXPORT \
23
+ -I%PYTHON_HOME%\lib\site-packages\numpy\core\include \
24
+ -I%PYTHON_HOME%\include %PYTHON_HOME%\libs\python%PYTHON_VERSION%.lib
25
+ move distributions.dll ../../_examples/numba/
26
+ """
27
+ import os
28
+
29
+ import numba as nb
30
+ import numpy as np
31
+ from cffi import FFI
32
+
33
+ from numpy.random import PCG64
34
+
35
+ ffi = FFI()
36
+ if os.path.exists('./distributions.dll'):
37
+ lib = ffi.dlopen('./distributions.dll')
38
+ elif os.path.exists('./libdistributions.so'):
39
+ lib = ffi.dlopen('./libdistributions.so')
40
+ else:
41
+ raise RuntimeError('Required DLL/so file was not found.')
42
+
43
+ ffi.cdef("""
44
+ double random_standard_normal(void *bitgen_state);
45
+ """)
46
+ x = PCG64()
47
+ xffi = x.cffi
48
+ bit_generator = xffi.bit_generator
49
+
50
+ random_standard_normal = lib.random_standard_normal
51
+
52
+
53
+ def normals(n, bit_generator):
54
+ out = np.empty(n)
55
+ for i in range(n):
56
+ out[i] = random_standard_normal(bit_generator)
57
+ return out
58
+
59
+
60
+ normalsj = nb.jit(normals, nopython=True)
61
+
62
+ # Numba requires a memory address for void *
63
+ # Can also get address from x.ctypes.bit_generator.value
64
+ bit_generator_address = int(ffi.cast('uintptr_t', bit_generator))
65
+
66
+ norm = normalsj(1000, bit_generator_address)
67
+ print(norm[:12])
llmeval-env/lib/python3.10/site-packages/numpy/random/_sfc64.pyi ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Any, TypedDict
2
+
3
+ from numpy import dtype as dtype
4
+ from numpy import ndarray as ndarray
5
+ from numpy import uint64
6
+ from numpy.random.bit_generator import BitGenerator, SeedSequence
7
+ from numpy._typing import _ArrayLikeInt_co
8
+
9
+ class _SFC64Internal(TypedDict):
10
+ state: ndarray[Any, dtype[uint64]]
11
+
12
+ class _SFC64State(TypedDict):
13
+ bit_generator: str
14
+ state: _SFC64Internal
15
+ has_uint32: int
16
+ uinteger: int
17
+
18
+ class SFC64(BitGenerator):
19
+ def __init__(self, seed: None | _ArrayLikeInt_co | SeedSequence = ...) -> None: ...
20
+ @property
21
+ def state(
22
+ self,
23
+ ) -> _SFC64State: ...
24
+ @state.setter
25
+ def state(
26
+ self,
27
+ value: _SFC64State,
28
+ ) -> None: ...
llmeval-env/lib/python3.10/site-packages/numpy/random/lib/libnpyrandom.a ADDED
Binary file (71.9 kB). View file
 
llmeval-env/lib/python3.10/site-packages/numpy/random/tests/__init__.py ADDED
File without changes
llmeval-env/lib/python3.10/site-packages/numpy/random/tests/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (191 Bytes). View file