applied-ai-018 commited on
Commit
b62d97e
·
verified ·
1 Parent(s): d2716a8

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. env-llmeval/lib/python3.10/site-packages/numpy/distutils/checks/cpu_asimddp.c +16 -0
  2. env-llmeval/lib/python3.10/site-packages/numpy/distutils/checks/cpu_asimdfhm.c +19 -0
  3. env-llmeval/lib/python3.10/site-packages/numpy/distutils/checks/cpu_avx512_spr.c +26 -0
  4. env-llmeval/lib/python3.10/site-packages/numpy/distutils/checks/cpu_fma4.c +13 -0
  5. env-llmeval/lib/python3.10/site-packages/numpy/distutils/checks/cpu_sse42.c +20 -0
  6. env-llmeval/lib/python3.10/site-packages/numpy/distutils/checks/cpu_vsx2.c +13 -0
  7. env-llmeval/lib/python3.10/site-packages/numpy/distutils/checks/cpu_vxe.c +25 -0
  8. env-llmeval/lib/python3.10/site-packages/numpy/distutils/checks/extra_avx512dq_mask.c +16 -0
  9. env-llmeval/lib/python3.10/site-packages/numpy/distutils/checks/extra_avx512f_reduce.c +41 -0
  10. env-llmeval/lib/python3.10/site-packages/numpy/distutils/checks/extra_vsx_asm.c +36 -0
  11. env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/__init__.py +41 -0
  12. env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/__pycache__/__init__.cpython-310.pyc +0 -0
  13. env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/__pycache__/autodist.cpython-310.pyc +0 -0
  14. env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/__pycache__/bdist_rpm.cpython-310.pyc +0 -0
  15. env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/__pycache__/build.cpython-310.pyc +0 -0
  16. env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/__pycache__/build_clib.cpython-310.pyc +0 -0
  17. env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/__pycache__/build_ext.cpython-310.pyc +0 -0
  18. env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/__pycache__/build_py.cpython-310.pyc +0 -0
  19. env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/__pycache__/build_scripts.cpython-310.pyc +0 -0
  20. env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/__pycache__/build_src.cpython-310.pyc +0 -0
  21. env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/__pycache__/config.cpython-310.pyc +0 -0
  22. env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/__pycache__/config_compiler.cpython-310.pyc +0 -0
  23. env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/__pycache__/develop.cpython-310.pyc +0 -0
  24. env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/__pycache__/egg_info.cpython-310.pyc +0 -0
  25. env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/__pycache__/install.cpython-310.pyc +0 -0
  26. env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/__pycache__/install_clib.cpython-310.pyc +0 -0
  27. env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/__pycache__/install_data.cpython-310.pyc +0 -0
  28. env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/__pycache__/install_headers.cpython-310.pyc +0 -0
  29. env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/__pycache__/sdist.cpython-310.pyc +0 -0
  30. env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/autodist.py +148 -0
  31. env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/bdist_rpm.py +22 -0
  32. env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/build.py +62 -0
  33. env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/build_clib.py +469 -0
  34. env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/build_ext.py +752 -0
  35. env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/build_py.py +31 -0
  36. env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/build_scripts.py +49 -0
  37. env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/build_src.py +773 -0
  38. env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/config.py +516 -0
  39. env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/config_compiler.py +126 -0
  40. env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/develop.py +15 -0
  41. env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/egg_info.py +25 -0
  42. env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/install.py +79 -0
  43. env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/install_clib.py +40 -0
  44. env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/install_data.py +24 -0
  45. env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/install_headers.py +25 -0
  46. env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/sdist.py +27 -0
  47. env-llmeval/lib/python3.10/site-packages/numpy/distutils/fcompiler/__pycache__/__init__.cpython-310.pyc +0 -0
  48. env-llmeval/lib/python3.10/site-packages/numpy/distutils/fcompiler/__pycache__/absoft.cpython-310.pyc +0 -0
  49. env-llmeval/lib/python3.10/site-packages/numpy/distutils/fcompiler/__pycache__/arm.cpython-310.pyc +0 -0
  50. env-llmeval/lib/python3.10/site-packages/numpy/distutils/fcompiler/__pycache__/compaq.cpython-310.pyc +0 -0
env-llmeval/lib/python3.10/site-packages/numpy/distutils/checks/cpu_asimddp.c ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #ifdef _MSC_VER
2
+ #include <Intrin.h>
3
+ #endif
4
+ #include <arm_neon.h>
5
+
6
+ int main(int argc, char **argv)
7
+ {
8
+ unsigned char *src = (unsigned char*)argv[argc-1];
9
+ uint8x16_t v1 = vdupq_n_u8(src[0]), v2 = vdupq_n_u8(src[1]);
10
+ uint32x4_t va = vdupq_n_u32(3);
11
+ int ret = (int)vgetq_lane_u32(vdotq_u32(va, v1, v2), 0);
12
+ #ifdef __aarch64__
13
+ ret += (int)vgetq_lane_u32(vdotq_laneq_u32(va, v1, v2, 0), 0);
14
+ #endif
15
+ return ret;
16
+ }
env-llmeval/lib/python3.10/site-packages/numpy/distutils/checks/cpu_asimdfhm.c ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #ifdef _MSC_VER
2
+ #include <Intrin.h>
3
+ #endif
4
+ #include <arm_neon.h>
5
+
6
+ int main(int argc, char **argv)
7
+ {
8
+ float16_t *src = (float16_t*)argv[argc-1];
9
+ float *src2 = (float*)argv[argc-2];
10
+ float16x8_t vhp = vdupq_n_f16(src[0]);
11
+ float16x4_t vlhp = vdup_n_f16(src[1]);
12
+ float32x4_t vf = vdupq_n_f32(src2[0]);
13
+ float32x2_t vlf = vdup_n_f32(src2[1]);
14
+
15
+ int ret = (int)vget_lane_f32(vfmlal_low_f16(vlf, vlhp, vlhp), 0);
16
+ ret += (int)vgetq_lane_f32(vfmlslq_high_f16(vf, vhp, vhp), 0);
17
+
18
+ return ret;
19
+ }
env-llmeval/lib/python3.10/site-packages/numpy/distutils/checks/cpu_avx512_spr.c ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #if defined(DETECT_FEATURES) && defined(__INTEL_COMPILER)
2
+ /*
3
+ * Unlike GCC and CLANG, Intel Compiler exposes all supported intrinsics,
4
+ * whether or not the build options for those features are specified.
5
+ * Therefore, we must test #definitions of CPU features when option native/host
6
+ * is enabled via `--cpu-baseline` or through env var `CFLAGS` otherwise
7
+ * the test will be broken and leads to enable all possible features.
8
+ */
9
+ #if !defined(__AVX512FP16__)
10
+ #error "HOST/ARCH doesn't support Sapphire Rapids AVX512FP16 features"
11
+ #endif
12
+ #endif
13
+
14
+ #include <immintrin.h>
15
+
16
+ int main(int argc, char **argv)
17
+ {
18
+ /* clang has a bug regarding our spr coode, see gh-23730. */
19
+ #if __clang__
20
+ #error
21
+ #endif
22
+ __m512h a = _mm512_loadu_ph((void*)argv[argc-1]);
23
+ __m512h temp = _mm512_fmadd_ph(a, a, a);
24
+ _mm512_storeu_ph((void*)(argv[argc-1]), temp);
25
+ return 0;
26
+ }
env-llmeval/lib/python3.10/site-packages/numpy/distutils/checks/cpu_fma4.c ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #include <immintrin.h>
2
+ #ifdef _MSC_VER
3
+ #include <ammintrin.h>
4
+ #else
5
+ #include <x86intrin.h>
6
+ #endif
7
+
8
+ int main(int argc, char **argv)
9
+ {
10
+ __m256 a = _mm256_loadu_ps((const float*)argv[argc-1]);
11
+ a = _mm256_macc_ps(a, a, a);
12
+ return (int)_mm_cvtss_f32(_mm256_castps256_ps128(a));
13
+ }
env-llmeval/lib/python3.10/site-packages/numpy/distutils/checks/cpu_sse42.c ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #if defined(DETECT_FEATURES) && defined(__INTEL_COMPILER)
2
+ /*
3
+ * Unlike GCC and CLANG, Intel Compiler exposes all supported intrinsics,
4
+ * whether or not the build options for those features are specified.
5
+ * Therefore, we must test #definitions of CPU features when option native/host
6
+ * is enabled via `--cpu-baseline` or through env var `CFLAGS` otherwise
7
+ * the test will be broken and leads to enable all possible features.
8
+ */
9
+ #ifndef __SSE4_2__
10
+ #error "HOST/ARCH doesn't support SSE42"
11
+ #endif
12
+ #endif
13
+
14
+ #include <smmintrin.h>
15
+
16
+ int main(void)
17
+ {
18
+ __m128 a = _mm_hadd_ps(_mm_setzero_ps(), _mm_setzero_ps());
19
+ return (int)_mm_cvtss_f32(a);
20
+ }
env-llmeval/lib/python3.10/site-packages/numpy/distutils/checks/cpu_vsx2.c ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #ifndef __VSX__
2
+ #error "VSX is not supported"
3
+ #endif
4
+ #include <altivec.h>
5
+
6
+ typedef __vector unsigned long long v_uint64x2;
7
+
8
+ int main(void)
9
+ {
10
+ v_uint64x2 z2 = (v_uint64x2){0, 0};
11
+ z2 = (v_uint64x2)vec_cmpeq(z2, z2);
12
+ return (int)vec_extract(z2, 0);
13
+ }
env-llmeval/lib/python3.10/site-packages/numpy/distutils/checks/cpu_vxe.c ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #if (__VEC__ < 10302) || (__ARCH__ < 12)
2
+ #error VXE not supported
3
+ #endif
4
+
5
+ #include <vecintrin.h>
6
+ int main(int argc, char **argv)
7
+ {
8
+ __vector float x = vec_nabs(vec_xl(argc, (float*)argv));
9
+ __vector float y = vec_load_len((float*)argv, (unsigned int)argc);
10
+
11
+ x = vec_round(vec_ceil(x) + vec_floor(y));
12
+ __vector bool int m = vec_cmpge(x, y);
13
+ x = vec_sel(x, y, m);
14
+
15
+ // need to test the existence of intrin "vflls" since vec_doublee
16
+ // is vec_doublee maps to wrong intrin "vfll".
17
+ // see https://gcc.gnu.org/bugzilla/show_bug.cgi?id=100871
18
+ #if defined(__GNUC__) && !defined(__clang__)
19
+ __vector long long i = vec_signed(__builtin_s390_vflls(x));
20
+ #else
21
+ __vector long long i = vec_signed(vec_doublee(x));
22
+ #endif
23
+
24
+ return (int)vec_extract(i, 0);
25
+ }
env-llmeval/lib/python3.10/site-packages/numpy/distutils/checks/extra_avx512dq_mask.c ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #include <immintrin.h>
2
+ /**
3
+ * Test DQ mask operations due to:
4
+ * - MSVC has supported it since vs2019 see,
5
+ * https://developercommunity.visualstudio.com/content/problem/518298/missing-avx512bw-mask-intrinsics.html
6
+ * - Clang >= v8.0
7
+ * - GCC >= v7.1
8
+ */
9
+ int main(void)
10
+ {
11
+ __mmask8 m8 = _mm512_cmpeq_epi64_mask(_mm512_set1_epi64(1), _mm512_set1_epi64(1));
12
+ m8 = _kor_mask8(m8, m8);
13
+ m8 = _kxor_mask8(m8, m8);
14
+ m8 = _cvtu32_mask8(_cvtmask8_u32(m8));
15
+ return (int)_cvtmask8_u32(m8);
16
+ }
env-llmeval/lib/python3.10/site-packages/numpy/distutils/checks/extra_avx512f_reduce.c ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #include <immintrin.h>
2
+ /**
3
+ * The following intrinsics don't have direct native support but compilers
4
+ * tend to emulate them.
5
+ * They're usually supported by gcc >= 7.1, clang >= 4 and icc >= 19
6
+ */
7
+ int main(void)
8
+ {
9
+ __m512 one_ps = _mm512_set1_ps(1.0f);
10
+ __m512d one_pd = _mm512_set1_pd(1.0);
11
+ __m512i one_i64 = _mm512_set1_epi64(1);
12
+ // add
13
+ float sum_ps = _mm512_reduce_add_ps(one_ps);
14
+ double sum_pd = _mm512_reduce_add_pd(one_pd);
15
+ int sum_int = (int)_mm512_reduce_add_epi64(one_i64);
16
+ sum_int += (int)_mm512_reduce_add_epi32(one_i64);
17
+ // mul
18
+ sum_ps += _mm512_reduce_mul_ps(one_ps);
19
+ sum_pd += _mm512_reduce_mul_pd(one_pd);
20
+ sum_int += (int)_mm512_reduce_mul_epi64(one_i64);
21
+ sum_int += (int)_mm512_reduce_mul_epi32(one_i64);
22
+ // min
23
+ sum_ps += _mm512_reduce_min_ps(one_ps);
24
+ sum_pd += _mm512_reduce_min_pd(one_pd);
25
+ sum_int += (int)_mm512_reduce_min_epi32(one_i64);
26
+ sum_int += (int)_mm512_reduce_min_epu32(one_i64);
27
+ sum_int += (int)_mm512_reduce_min_epi64(one_i64);
28
+ // max
29
+ sum_ps += _mm512_reduce_max_ps(one_ps);
30
+ sum_pd += _mm512_reduce_max_pd(one_pd);
31
+ sum_int += (int)_mm512_reduce_max_epi32(one_i64);
32
+ sum_int += (int)_mm512_reduce_max_epu32(one_i64);
33
+ sum_int += (int)_mm512_reduce_max_epi64(one_i64);
34
+ // and
35
+ sum_int += (int)_mm512_reduce_and_epi32(one_i64);
36
+ sum_int += (int)_mm512_reduce_and_epi64(one_i64);
37
+ // or
38
+ sum_int += (int)_mm512_reduce_or_epi32(one_i64);
39
+ sum_int += (int)_mm512_reduce_or_epi64(one_i64);
40
+ return (int)sum_ps + (int)sum_pd + sum_int;
41
+ }
env-llmeval/lib/python3.10/site-packages/numpy/distutils/checks/extra_vsx_asm.c ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /**
2
+ * Testing ASM VSX register number fixer '%x<n>'
3
+ *
4
+ * old versions of CLANG doesn't support %x<n> in the inline asm template
5
+ * which fixes register number when using any of the register constraints wa, wd, wf.
6
+ *
7
+ * xref:
8
+ * - https://bugs.llvm.org/show_bug.cgi?id=31837
9
+ * - https://gcc.gnu.org/onlinedocs/gcc/Machine-Constraints.html
10
+ */
11
+ #ifndef __VSX__
12
+ #error "VSX is not supported"
13
+ #endif
14
+ #include <altivec.h>
15
+
16
+ #if (defined(__GNUC__) && !defined(vec_xl)) || (defined(__clang__) && !defined(__IBMC__))
17
+ #define vsx_ld vec_vsx_ld
18
+ #define vsx_st vec_vsx_st
19
+ #else
20
+ #define vsx_ld vec_xl
21
+ #define vsx_st vec_xst
22
+ #endif
23
+
24
+ int main(void)
25
+ {
26
+ float z4[] = {0, 0, 0, 0};
27
+ signed int zout[] = {0, 0, 0, 0};
28
+
29
+ __vector float vz4 = vsx_ld(0, z4);
30
+ __vector signed int asm_ret = vsx_ld(0, zout);
31
+
32
+ __asm__ ("xvcvspsxws %x0,%x1" : "=wa" (vz4) : "wa" (asm_ret));
33
+
34
+ vsx_st(asm_ret, 0, zout);
35
+ return zout[0];
36
+ }
env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/__init__.py ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """distutils.command
2
+
3
+ Package containing implementation of all the standard Distutils
4
+ commands.
5
+
6
+ """
7
+ def test_na_writable_attributes_deletion():
8
+ a = np.NA(2)
9
+ attr = ['payload', 'dtype']
10
+ for s in attr:
11
+ assert_raises(AttributeError, delattr, a, s)
12
+
13
+
14
+ __revision__ = "$Id: __init__.py,v 1.3 2005/05/16 11:08:49 pearu Exp $"
15
+
16
+ distutils_all = [ #'build_py',
17
+ 'clean',
18
+ 'install_clib',
19
+ 'install_scripts',
20
+ 'bdist',
21
+ 'bdist_dumb',
22
+ 'bdist_wininst',
23
+ ]
24
+
25
+ __import__('distutils.command', globals(), locals(), distutils_all)
26
+
27
+ __all__ = ['build',
28
+ 'config_compiler',
29
+ 'config',
30
+ 'build_src',
31
+ 'build_py',
32
+ 'build_ext',
33
+ 'build_clib',
34
+ 'build_scripts',
35
+ 'install',
36
+ 'install_data',
37
+ 'install_headers',
38
+ 'install_lib',
39
+ 'bdist_rpm',
40
+ 'sdist',
41
+ ] + distutils_all
env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (984 Bytes). View file
 
env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/__pycache__/autodist.cpython-310.pyc ADDED
Binary file (3.68 kB). View file
 
env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/__pycache__/bdist_rpm.cpython-310.pyc ADDED
Binary file (860 Bytes). View file
 
env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/__pycache__/build.cpython-310.pyc ADDED
Binary file (2.29 kB). View file
 
env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/__pycache__/build_clib.cpython-310.pyc ADDED
Binary file (10.6 kB). View file
 
env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/__pycache__/build_ext.cpython-310.pyc ADDED
Binary file (15.4 kB). View file
 
env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/__pycache__/build_py.cpython-310.pyc ADDED
Binary file (1.39 kB). View file
 
env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/__pycache__/build_scripts.cpython-310.pyc ADDED
Binary file (1.67 kB). View file
 
env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/__pycache__/build_src.cpython-310.pyc ADDED
Binary file (18.6 kB). View file
 
env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/__pycache__/config.cpython-310.pyc ADDED
Binary file (13.9 kB). View file
 
env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/__pycache__/config_compiler.cpython-310.pyc ADDED
Binary file (3.67 kB). View file
 
env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/__pycache__/develop.cpython-310.pyc ADDED
Binary file (887 Bytes). View file
 
env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/__pycache__/egg_info.cpython-310.pyc ADDED
Binary file (1.12 kB). View file
 
env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/__pycache__/install.cpython-310.pyc ADDED
Binary file (2.13 kB). View file
 
env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/__pycache__/install_clib.cpython-310.pyc ADDED
Binary file (1.66 kB). View file
 
env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/__pycache__/install_data.cpython-310.pyc ADDED
Binary file (921 Bytes). View file
 
env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/__pycache__/install_headers.cpython-310.pyc ADDED
Binary file (983 Bytes). View file
 
env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/__pycache__/sdist.cpython-310.pyc ADDED
Binary file (965 Bytes). View file
 
env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/autodist.py ADDED
@@ -0,0 +1,148 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """This module implements additional tests ala autoconf which can be useful.
2
+
3
+ """
4
+ import textwrap
5
+
6
+ # We put them here since they could be easily reused outside numpy.distutils
7
+
8
+ def check_inline(cmd):
9
+ """Return the inline identifier (may be empty)."""
10
+ cmd._check_compiler()
11
+ body = textwrap.dedent("""
12
+ #ifndef __cplusplus
13
+ static %(inline)s int static_func (void)
14
+ {
15
+ return 0;
16
+ }
17
+ %(inline)s int nostatic_func (void)
18
+ {
19
+ return 0;
20
+ }
21
+ #endif""")
22
+
23
+ for kw in ['inline', '__inline__', '__inline']:
24
+ st = cmd.try_compile(body % {'inline': kw}, None, None)
25
+ if st:
26
+ return kw
27
+
28
+ return ''
29
+
30
+
31
+ def check_restrict(cmd):
32
+ """Return the restrict identifier (may be empty)."""
33
+ cmd._check_compiler()
34
+ body = textwrap.dedent("""
35
+ static int static_func (char * %(restrict)s a)
36
+ {
37
+ return 0;
38
+ }
39
+ """)
40
+
41
+ for kw in ['restrict', '__restrict__', '__restrict']:
42
+ st = cmd.try_compile(body % {'restrict': kw}, None, None)
43
+ if st:
44
+ return kw
45
+
46
+ return ''
47
+
48
+
49
+ def check_compiler_gcc(cmd):
50
+ """Check if the compiler is GCC."""
51
+
52
+ cmd._check_compiler()
53
+ body = textwrap.dedent("""
54
+ int
55
+ main()
56
+ {
57
+ #if (! defined __GNUC__)
58
+ #error gcc required
59
+ #endif
60
+ return 0;
61
+ }
62
+ """)
63
+ return cmd.try_compile(body, None, None)
64
+
65
+
66
+ def check_gcc_version_at_least(cmd, major, minor=0, patchlevel=0):
67
+ """
68
+ Check that the gcc version is at least the specified version."""
69
+
70
+ cmd._check_compiler()
71
+ version = '.'.join([str(major), str(minor), str(patchlevel)])
72
+ body = textwrap.dedent("""
73
+ int
74
+ main()
75
+ {
76
+ #if (! defined __GNUC__) || (__GNUC__ < %(major)d) || \\
77
+ (__GNUC_MINOR__ < %(minor)d) || \\
78
+ (__GNUC_PATCHLEVEL__ < %(patchlevel)d)
79
+ #error gcc >= %(version)s required
80
+ #endif
81
+ return 0;
82
+ }
83
+ """)
84
+ kw = {'version': version, 'major': major, 'minor': minor,
85
+ 'patchlevel': patchlevel}
86
+
87
+ return cmd.try_compile(body % kw, None, None)
88
+
89
+
90
+ def check_gcc_function_attribute(cmd, attribute, name):
91
+ """Return True if the given function attribute is supported."""
92
+ cmd._check_compiler()
93
+ body = textwrap.dedent("""
94
+ #pragma GCC diagnostic error "-Wattributes"
95
+ #pragma clang diagnostic error "-Wattributes"
96
+
97
+ int %s %s(void* unused)
98
+ {
99
+ return 0;
100
+ }
101
+
102
+ int
103
+ main()
104
+ {
105
+ return 0;
106
+ }
107
+ """) % (attribute, name)
108
+ return cmd.try_compile(body, None, None) != 0
109
+
110
+
111
+ def check_gcc_function_attribute_with_intrinsics(cmd, attribute, name, code,
112
+ include):
113
+ """Return True if the given function attribute is supported with
114
+ intrinsics."""
115
+ cmd._check_compiler()
116
+ body = textwrap.dedent("""
117
+ #include<%s>
118
+ int %s %s(void)
119
+ {
120
+ %s;
121
+ return 0;
122
+ }
123
+
124
+ int
125
+ main()
126
+ {
127
+ return 0;
128
+ }
129
+ """) % (include, attribute, name, code)
130
+ return cmd.try_compile(body, None, None) != 0
131
+
132
+
133
+ def check_gcc_variable_attribute(cmd, attribute):
134
+ """Return True if the given variable attribute is supported."""
135
+ cmd._check_compiler()
136
+ body = textwrap.dedent("""
137
+ #pragma GCC diagnostic error "-Wattributes"
138
+ #pragma clang diagnostic error "-Wattributes"
139
+
140
+ int %s foo;
141
+
142
+ int
143
+ main()
144
+ {
145
+ return 0;
146
+ }
147
+ """) % (attribute, )
148
+ return cmd.try_compile(body, None, None) != 0
env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/bdist_rpm.py ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import sys
3
+ if 'setuptools' in sys.modules:
4
+ from setuptools.command.bdist_rpm import bdist_rpm as old_bdist_rpm
5
+ else:
6
+ from distutils.command.bdist_rpm import bdist_rpm as old_bdist_rpm
7
+
8
+ class bdist_rpm(old_bdist_rpm):
9
+
10
+ def _make_spec_file(self):
11
+ spec_file = old_bdist_rpm._make_spec_file(self)
12
+
13
+ # Replace hardcoded setup.py script name
14
+ # with the real setup script name.
15
+ setup_py = os.path.basename(sys.argv[0])
16
+ if setup_py == 'setup.py':
17
+ return spec_file
18
+ new_spec_file = []
19
+ for line in spec_file:
20
+ line = line.replace('setup.py', setup_py)
21
+ new_spec_file.append(line)
22
+ return new_spec_file
env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/build.py ADDED
@@ -0,0 +1,62 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import sys
3
+ from distutils.command.build import build as old_build
4
+ from distutils.util import get_platform
5
+ from numpy.distutils.command.config_compiler import show_fortran_compilers
6
+
7
+ class build(old_build):
8
+
9
+ sub_commands = [('config_cc', lambda *args: True),
10
+ ('config_fc', lambda *args: True),
11
+ ('build_src', old_build.has_ext_modules),
12
+ ] + old_build.sub_commands
13
+
14
+ user_options = old_build.user_options + [
15
+ ('fcompiler=', None,
16
+ "specify the Fortran compiler type"),
17
+ ('warn-error', None,
18
+ "turn all warnings into errors (-Werror)"),
19
+ ('cpu-baseline=', None,
20
+ "specify a list of enabled baseline CPU optimizations"),
21
+ ('cpu-dispatch=', None,
22
+ "specify a list of dispatched CPU optimizations"),
23
+ ('disable-optimization', None,
24
+ "disable CPU optimized code(dispatch,simd,fast...)"),
25
+ ('simd-test=', None,
26
+ "specify a list of CPU optimizations to be tested against NumPy SIMD interface"),
27
+ ]
28
+
29
+ help_options = old_build.help_options + [
30
+ ('help-fcompiler', None, "list available Fortran compilers",
31
+ show_fortran_compilers),
32
+ ]
33
+
34
+ def initialize_options(self):
35
+ old_build.initialize_options(self)
36
+ self.fcompiler = None
37
+ self.warn_error = False
38
+ self.cpu_baseline = "min"
39
+ self.cpu_dispatch = "max -xop -fma4" # drop AMD legacy features by default
40
+ self.disable_optimization = False
41
+ """
42
+ the '_simd' module is a very large. Adding more dispatched features
43
+ will increase binary size and compile time. By default we minimize
44
+ the targeted features to those most commonly used by the NumPy SIMD interface(NPYV),
45
+ NOTE: any specified features will be ignored if they're:
46
+ - part of the baseline(--cpu-baseline)
47
+ - not part of dispatch-able features(--cpu-dispatch)
48
+ - not supported by compiler or platform
49
+ """
50
+ self.simd_test = "BASELINE SSE2 SSE42 XOP FMA4 (FMA3 AVX2) AVX512F " \
51
+ "AVX512_SKX VSX VSX2 VSX3 VSX4 NEON ASIMD VX VXE VXE2"
52
+
53
+ def finalize_options(self):
54
+ build_scripts = self.build_scripts
55
+ old_build.finalize_options(self)
56
+ plat_specifier = ".{}-{}.{}".format(get_platform(), *sys.version_info[:2])
57
+ if build_scripts is None:
58
+ self.build_scripts = os.path.join(self.build_base,
59
+ 'scripts' + plat_specifier)
60
+
61
+ def run(self):
62
+ old_build.run(self)
env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/build_clib.py ADDED
@@ -0,0 +1,469 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """ Modified version of build_clib that handles fortran source files.
2
+ """
3
+ import os
4
+ from glob import glob
5
+ import shutil
6
+ from distutils.command.build_clib import build_clib as old_build_clib
7
+ from distutils.errors import DistutilsSetupError, DistutilsError, \
8
+ DistutilsFileError
9
+
10
+ from numpy.distutils import log
11
+ from distutils.dep_util import newer_group
12
+ from numpy.distutils.misc_util import (
13
+ filter_sources, get_lib_source_files, get_numpy_include_dirs,
14
+ has_cxx_sources, has_f_sources, is_sequence
15
+ )
16
+ from numpy.distutils.ccompiler_opt import new_ccompiler_opt
17
+
18
+ # Fix Python distutils bug sf #1718574:
19
+ _l = old_build_clib.user_options
20
+ for _i in range(len(_l)):
21
+ if _l[_i][0] in ['build-clib', 'build-temp']:
22
+ _l[_i] = (_l[_i][0] + '=',) + _l[_i][1:]
23
+ #
24
+
25
+
26
+ class build_clib(old_build_clib):
27
+
28
+ description = "build C/C++/F libraries used by Python extensions"
29
+
30
+ user_options = old_build_clib.user_options + [
31
+ ('fcompiler=', None,
32
+ "specify the Fortran compiler type"),
33
+ ('inplace', 'i', 'Build in-place'),
34
+ ('parallel=', 'j',
35
+ "number of parallel jobs"),
36
+ ('warn-error', None,
37
+ "turn all warnings into errors (-Werror)"),
38
+ ('cpu-baseline=', None,
39
+ "specify a list of enabled baseline CPU optimizations"),
40
+ ('cpu-dispatch=', None,
41
+ "specify a list of dispatched CPU optimizations"),
42
+ ('disable-optimization', None,
43
+ "disable CPU optimized code(dispatch,simd,fast...)"),
44
+ ]
45
+
46
+ boolean_options = old_build_clib.boolean_options + \
47
+ ['inplace', 'warn-error', 'disable-optimization']
48
+
49
+ def initialize_options(self):
50
+ old_build_clib.initialize_options(self)
51
+ self.fcompiler = None
52
+ self.inplace = 0
53
+ self.parallel = None
54
+ self.warn_error = None
55
+ self.cpu_baseline = None
56
+ self.cpu_dispatch = None
57
+ self.disable_optimization = None
58
+
59
+
60
+ def finalize_options(self):
61
+ if self.parallel:
62
+ try:
63
+ self.parallel = int(self.parallel)
64
+ except ValueError as e:
65
+ raise ValueError("--parallel/-j argument must be an integer") from e
66
+ old_build_clib.finalize_options(self)
67
+ self.set_undefined_options('build',
68
+ ('parallel', 'parallel'),
69
+ ('warn_error', 'warn_error'),
70
+ ('cpu_baseline', 'cpu_baseline'),
71
+ ('cpu_dispatch', 'cpu_dispatch'),
72
+ ('disable_optimization', 'disable_optimization')
73
+ )
74
+
75
+ def have_f_sources(self):
76
+ for (lib_name, build_info) in self.libraries:
77
+ if has_f_sources(build_info.get('sources', [])):
78
+ return True
79
+ return False
80
+
81
+ def have_cxx_sources(self):
82
+ for (lib_name, build_info) in self.libraries:
83
+ if has_cxx_sources(build_info.get('sources', [])):
84
+ return True
85
+ return False
86
+
87
+ def run(self):
88
+ if not self.libraries:
89
+ return
90
+
91
+ # Make sure that library sources are complete.
92
+ languages = []
93
+
94
+ # Make sure that extension sources are complete.
95
+ self.run_command('build_src')
96
+
97
+ for (lib_name, build_info) in self.libraries:
98
+ l = build_info.get('language', None)
99
+ if l and l not in languages:
100
+ languages.append(l)
101
+
102
+ from distutils.ccompiler import new_compiler
103
+ self.compiler = new_compiler(compiler=self.compiler,
104
+ dry_run=self.dry_run,
105
+ force=self.force)
106
+ self.compiler.customize(self.distribution,
107
+ need_cxx=self.have_cxx_sources())
108
+
109
+ if self.warn_error:
110
+ self.compiler.compiler.append('-Werror')
111
+ self.compiler.compiler_so.append('-Werror')
112
+
113
+ libraries = self.libraries
114
+ self.libraries = None
115
+ self.compiler.customize_cmd(self)
116
+ self.libraries = libraries
117
+
118
+ self.compiler.show_customization()
119
+
120
+ if not self.disable_optimization:
121
+ dispatch_hpath = os.path.join("numpy", "distutils", "include", "npy_cpu_dispatch_config.h")
122
+ dispatch_hpath = os.path.join(self.get_finalized_command("build_src").build_src, dispatch_hpath)
123
+ opt_cache_path = os.path.abspath(
124
+ os.path.join(self.build_temp, 'ccompiler_opt_cache_clib.py')
125
+ )
126
+ if hasattr(self, "compiler_opt"):
127
+ # By default `CCompilerOpt` update the cache at the exit of
128
+ # the process, which may lead to duplicate building
129
+ # (see build_extension()/force_rebuild) if run() called
130
+ # multiple times within the same os process/thread without
131
+ # giving the chance the previous instances of `CCompilerOpt`
132
+ # to update the cache.
133
+ self.compiler_opt.cache_flush()
134
+
135
+ self.compiler_opt = new_ccompiler_opt(
136
+ compiler=self.compiler, dispatch_hpath=dispatch_hpath,
137
+ cpu_baseline=self.cpu_baseline, cpu_dispatch=self.cpu_dispatch,
138
+ cache_path=opt_cache_path
139
+ )
140
+ def report(copt):
141
+ log.info("\n########### CLIB COMPILER OPTIMIZATION ###########")
142
+ log.info(copt.report(full=True))
143
+
144
+ import atexit
145
+ atexit.register(report, self.compiler_opt)
146
+
147
+ if self.have_f_sources():
148
+ from numpy.distutils.fcompiler import new_fcompiler
149
+ self._f_compiler = new_fcompiler(compiler=self.fcompiler,
150
+ verbose=self.verbose,
151
+ dry_run=self.dry_run,
152
+ force=self.force,
153
+ requiref90='f90' in languages,
154
+ c_compiler=self.compiler)
155
+ if self._f_compiler is not None:
156
+ self._f_compiler.customize(self.distribution)
157
+
158
+ libraries = self.libraries
159
+ self.libraries = None
160
+ self._f_compiler.customize_cmd(self)
161
+ self.libraries = libraries
162
+
163
+ self._f_compiler.show_customization()
164
+ else:
165
+ self._f_compiler = None
166
+
167
+ self.build_libraries(self.libraries)
168
+
169
+ if self.inplace:
170
+ for l in self.distribution.installed_libraries:
171
+ libname = self.compiler.library_filename(l.name)
172
+ source = os.path.join(self.build_clib, libname)
173
+ target = os.path.join(l.target_dir, libname)
174
+ self.mkpath(l.target_dir)
175
+ shutil.copy(source, target)
176
+
177
+ def get_source_files(self):
178
+ self.check_library_list(self.libraries)
179
+ filenames = []
180
+ for lib in self.libraries:
181
+ filenames.extend(get_lib_source_files(lib))
182
+ return filenames
183
+
184
+ def build_libraries(self, libraries):
185
+ for (lib_name, build_info) in libraries:
186
+ self.build_a_library(build_info, lib_name, libraries)
187
+
188
+ def assemble_flags(self, in_flags):
189
+ """ Assemble flags from flag list
190
+
191
+ Parameters
192
+ ----------
193
+ in_flags : None or sequence
194
+ None corresponds to empty list. Sequence elements can be strings
195
+ or callables that return lists of strings. Callable takes `self` as
196
+ single parameter.
197
+
198
+ Returns
199
+ -------
200
+ out_flags : list
201
+ """
202
+ if in_flags is None:
203
+ return []
204
+ out_flags = []
205
+ for in_flag in in_flags:
206
+ if callable(in_flag):
207
+ out_flags += in_flag(self)
208
+ else:
209
+ out_flags.append(in_flag)
210
+ return out_flags
211
+
212
+ def build_a_library(self, build_info, lib_name, libraries):
213
+ # default compilers
214
+ compiler = self.compiler
215
+ fcompiler = self._f_compiler
216
+
217
+ sources = build_info.get('sources')
218
+ if sources is None or not is_sequence(sources):
219
+ raise DistutilsSetupError(("in 'libraries' option (library '%s'), " +
220
+ "'sources' must be present and must be " +
221
+ "a list of source filenames") % lib_name)
222
+ sources = list(sources)
223
+
224
+ c_sources, cxx_sources, f_sources, fmodule_sources \
225
+ = filter_sources(sources)
226
+ requiref90 = not not fmodule_sources or \
227
+ build_info.get('language', 'c') == 'f90'
228
+
229
+ # save source type information so that build_ext can use it.
230
+ source_languages = []
231
+ if c_sources:
232
+ source_languages.append('c')
233
+ if cxx_sources:
234
+ source_languages.append('c++')
235
+ if requiref90:
236
+ source_languages.append('f90')
237
+ elif f_sources:
238
+ source_languages.append('f77')
239
+ build_info['source_languages'] = source_languages
240
+
241
+ lib_file = compiler.library_filename(lib_name,
242
+ output_dir=self.build_clib)
243
+ depends = sources + build_info.get('depends', [])
244
+
245
+ force_rebuild = self.force
246
+ if not self.disable_optimization and not self.compiler_opt.is_cached():
247
+ log.debug("Detected changes on compiler optimizations")
248
+ force_rebuild = True
249
+ if not (force_rebuild or newer_group(depends, lib_file, 'newer')):
250
+ log.debug("skipping '%s' library (up-to-date)", lib_name)
251
+ return
252
+ else:
253
+ log.info("building '%s' library", lib_name)
254
+
255
+ config_fc = build_info.get('config_fc', {})
256
+ if fcompiler is not None and config_fc:
257
+ log.info('using additional config_fc from setup script '
258
+ 'for fortran compiler: %s'
259
+ % (config_fc,))
260
+ from numpy.distutils.fcompiler import new_fcompiler
261
+ fcompiler = new_fcompiler(compiler=fcompiler.compiler_type,
262
+ verbose=self.verbose,
263
+ dry_run=self.dry_run,
264
+ force=self.force,
265
+ requiref90=requiref90,
266
+ c_compiler=self.compiler)
267
+ if fcompiler is not None:
268
+ dist = self.distribution
269
+ base_config_fc = dist.get_option_dict('config_fc').copy()
270
+ base_config_fc.update(config_fc)
271
+ fcompiler.customize(base_config_fc)
272
+
273
+ # check availability of Fortran compilers
274
+ if (f_sources or fmodule_sources) and fcompiler is None:
275
+ raise DistutilsError("library %s has Fortran sources"
276
+ " but no Fortran compiler found" % (lib_name))
277
+
278
+ if fcompiler is not None:
279
+ fcompiler.extra_f77_compile_args = build_info.get(
280
+ 'extra_f77_compile_args') or []
281
+ fcompiler.extra_f90_compile_args = build_info.get(
282
+ 'extra_f90_compile_args') or []
283
+
284
+ macros = build_info.get('macros')
285
+ if macros is None:
286
+ macros = []
287
+ include_dirs = build_info.get('include_dirs')
288
+ if include_dirs is None:
289
+ include_dirs = []
290
+ # Flags can be strings, or callables that return a list of strings.
291
+ extra_postargs = self.assemble_flags(
292
+ build_info.get('extra_compiler_args'))
293
+ extra_cflags = self.assemble_flags(
294
+ build_info.get('extra_cflags'))
295
+ extra_cxxflags = self.assemble_flags(
296
+ build_info.get('extra_cxxflags'))
297
+
298
+ include_dirs.extend(get_numpy_include_dirs())
299
+ # where compiled F90 module files are:
300
+ module_dirs = build_info.get('module_dirs') or []
301
+ module_build_dir = os.path.dirname(lib_file)
302
+ if requiref90:
303
+ self.mkpath(module_build_dir)
304
+
305
+ if compiler.compiler_type == 'msvc':
306
+ # this hack works around the msvc compiler attributes
307
+ # problem, msvc uses its own convention :(
308
+ c_sources += cxx_sources
309
+ cxx_sources = []
310
+ extra_cflags += extra_cxxflags
311
+
312
+ # filtering C dispatch-table sources when optimization is not disabled,
313
+ # otherwise treated as normal sources.
314
+ copt_c_sources = []
315
+ copt_cxx_sources = []
316
+ copt_baseline_flags = []
317
+ copt_macros = []
318
+ if not self.disable_optimization:
319
+ bsrc_dir = self.get_finalized_command("build_src").build_src
320
+ dispatch_hpath = os.path.join("numpy", "distutils", "include")
321
+ dispatch_hpath = os.path.join(bsrc_dir, dispatch_hpath)
322
+ include_dirs.append(dispatch_hpath)
323
+ # copt_build_src = None if self.inplace else bsrc_dir
324
+ copt_build_src = bsrc_dir
325
+ for _srcs, _dst, _ext in (
326
+ ((c_sources,), copt_c_sources, ('.dispatch.c',)),
327
+ ((c_sources, cxx_sources), copt_cxx_sources,
328
+ ('.dispatch.cpp', '.dispatch.cxx'))
329
+ ):
330
+ for _src in _srcs:
331
+ _dst += [
332
+ _src.pop(_src.index(s))
333
+ for s in _src[:] if s.endswith(_ext)
334
+ ]
335
+ copt_baseline_flags = self.compiler_opt.cpu_baseline_flags()
336
+ else:
337
+ copt_macros.append(("NPY_DISABLE_OPTIMIZATION", 1))
338
+
339
+ objects = []
340
+ if copt_cxx_sources:
341
+ log.info("compiling C++ dispatch-able sources")
342
+ objects += self.compiler_opt.try_dispatch(
343
+ copt_c_sources,
344
+ output_dir=self.build_temp,
345
+ src_dir=copt_build_src,
346
+ macros=macros + copt_macros,
347
+ include_dirs=include_dirs,
348
+ debug=self.debug,
349
+ extra_postargs=extra_postargs + extra_cxxflags,
350
+ ccompiler=cxx_compiler
351
+ )
352
+
353
+ if copt_c_sources:
354
+ log.info("compiling C dispatch-able sources")
355
+ objects += self.compiler_opt.try_dispatch(
356
+ copt_c_sources,
357
+ output_dir=self.build_temp,
358
+ src_dir=copt_build_src,
359
+ macros=macros + copt_macros,
360
+ include_dirs=include_dirs,
361
+ debug=self.debug,
362
+ extra_postargs=extra_postargs + extra_cflags)
363
+
364
+ if c_sources:
365
+ log.info("compiling C sources")
366
+ objects += compiler.compile(
367
+ c_sources,
368
+ output_dir=self.build_temp,
369
+ macros=macros + copt_macros,
370
+ include_dirs=include_dirs,
371
+ debug=self.debug,
372
+ extra_postargs=(extra_postargs +
373
+ copt_baseline_flags +
374
+ extra_cflags))
375
+
376
+ if cxx_sources:
377
+ log.info("compiling C++ sources")
378
+ cxx_compiler = compiler.cxx_compiler()
379
+ cxx_objects = cxx_compiler.compile(
380
+ cxx_sources,
381
+ output_dir=self.build_temp,
382
+ macros=macros + copt_macros,
383
+ include_dirs=include_dirs,
384
+ debug=self.debug,
385
+ extra_postargs=(extra_postargs +
386
+ copt_baseline_flags +
387
+ extra_cxxflags))
388
+ objects.extend(cxx_objects)
389
+
390
+ if f_sources or fmodule_sources:
391
+ extra_postargs = []
392
+ f_objects = []
393
+
394
+ if requiref90:
395
+ if fcompiler.module_dir_switch is None:
396
+ existing_modules = glob('*.mod')
397
+ extra_postargs += fcompiler.module_options(
398
+ module_dirs, module_build_dir)
399
+
400
+ if fmodule_sources:
401
+ log.info("compiling Fortran 90 module sources")
402
+ f_objects += fcompiler.compile(fmodule_sources,
403
+ output_dir=self.build_temp,
404
+ macros=macros,
405
+ include_dirs=include_dirs,
406
+ debug=self.debug,
407
+ extra_postargs=extra_postargs)
408
+
409
+ if requiref90 and self._f_compiler.module_dir_switch is None:
410
+ # move new compiled F90 module files to module_build_dir
411
+ for f in glob('*.mod'):
412
+ if f in existing_modules:
413
+ continue
414
+ t = os.path.join(module_build_dir, f)
415
+ if os.path.abspath(f) == os.path.abspath(t):
416
+ continue
417
+ if os.path.isfile(t):
418
+ os.remove(t)
419
+ try:
420
+ self.move_file(f, module_build_dir)
421
+ except DistutilsFileError:
422
+ log.warn('failed to move %r to %r'
423
+ % (f, module_build_dir))
424
+
425
+ if f_sources:
426
+ log.info("compiling Fortran sources")
427
+ f_objects += fcompiler.compile(f_sources,
428
+ output_dir=self.build_temp,
429
+ macros=macros,
430
+ include_dirs=include_dirs,
431
+ debug=self.debug,
432
+ extra_postargs=extra_postargs)
433
+ else:
434
+ f_objects = []
435
+
436
+ if f_objects and not fcompiler.can_ccompiler_link(compiler):
437
+ # Default linker cannot link Fortran object files, and results
438
+ # need to be wrapped later. Instead of creating a real static
439
+ # library, just keep track of the object files.
440
+ listfn = os.path.join(self.build_clib,
441
+ lib_name + '.fobjects')
442
+ with open(listfn, 'w') as f:
443
+ f.write("\n".join(os.path.abspath(obj) for obj in f_objects))
444
+
445
+ listfn = os.path.join(self.build_clib,
446
+ lib_name + '.cobjects')
447
+ with open(listfn, 'w') as f:
448
+ f.write("\n".join(os.path.abspath(obj) for obj in objects))
449
+
450
+ # create empty "library" file for dependency tracking
451
+ lib_fname = os.path.join(self.build_clib,
452
+ lib_name + compiler.static_lib_extension)
453
+ with open(lib_fname, 'wb') as f:
454
+ pass
455
+ else:
456
+ # assume that default linker is suitable for
457
+ # linking Fortran object files
458
+ objects.extend(f_objects)
459
+ compiler.create_static_lib(objects, lib_name,
460
+ output_dir=self.build_clib,
461
+ debug=self.debug)
462
+
463
+ # fix library dependencies
464
+ clib_libraries = build_info.get('libraries', [])
465
+ for lname, binfo in libraries:
466
+ if lname in clib_libraries:
467
+ clib_libraries.extend(binfo.get('libraries', []))
468
+ if clib_libraries:
469
+ build_info['libraries'] = clib_libraries
env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/build_ext.py ADDED
@@ -0,0 +1,752 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """ Modified version of build_ext that handles fortran source files.
2
+
3
+ """
4
+ import os
5
+ import subprocess
6
+ from glob import glob
7
+
8
+ from distutils.dep_util import newer_group
9
+ from distutils.command.build_ext import build_ext as old_build_ext
10
+ from distutils.errors import DistutilsFileError, DistutilsSetupError,\
11
+ DistutilsError
12
+ from distutils.file_util import copy_file
13
+
14
+ from numpy.distutils import log
15
+ from numpy.distutils.exec_command import filepath_from_subprocess_output
16
+ from numpy.distutils.system_info import combine_paths
17
+ from numpy.distutils.misc_util import (
18
+ filter_sources, get_ext_source_files, get_numpy_include_dirs,
19
+ has_cxx_sources, has_f_sources, is_sequence
20
+ )
21
+ from numpy.distutils.command.config_compiler import show_fortran_compilers
22
+ from numpy.distutils.ccompiler_opt import new_ccompiler_opt, CCompilerOpt
23
+
24
+ class build_ext (old_build_ext):
25
+
26
+ description = "build C/C++/F extensions (compile/link to build directory)"
27
+
28
+ user_options = old_build_ext.user_options + [
29
+ ('fcompiler=', None,
30
+ "specify the Fortran compiler type"),
31
+ ('parallel=', 'j',
32
+ "number of parallel jobs"),
33
+ ('warn-error', None,
34
+ "turn all warnings into errors (-Werror)"),
35
+ ('cpu-baseline=', None,
36
+ "specify a list of enabled baseline CPU optimizations"),
37
+ ('cpu-dispatch=', None,
38
+ "specify a list of dispatched CPU optimizations"),
39
+ ('disable-optimization', None,
40
+ "disable CPU optimized code(dispatch,simd,fast...)"),
41
+ ('simd-test=', None,
42
+ "specify a list of CPU optimizations to be tested against NumPy SIMD interface"),
43
+ ]
44
+
45
+ help_options = old_build_ext.help_options + [
46
+ ('help-fcompiler', None, "list available Fortran compilers",
47
+ show_fortran_compilers),
48
+ ]
49
+
50
+ boolean_options = old_build_ext.boolean_options + ['warn-error', 'disable-optimization']
51
+
52
+ def initialize_options(self):
53
+ old_build_ext.initialize_options(self)
54
+ self.fcompiler = None
55
+ self.parallel = None
56
+ self.warn_error = None
57
+ self.cpu_baseline = None
58
+ self.cpu_dispatch = None
59
+ self.disable_optimization = None
60
+ self.simd_test = None
61
+
62
+ def finalize_options(self):
63
+ if self.parallel:
64
+ try:
65
+ self.parallel = int(self.parallel)
66
+ except ValueError as e:
67
+ raise ValueError("--parallel/-j argument must be an integer") from e
68
+
69
+ # Ensure that self.include_dirs and self.distribution.include_dirs
70
+ # refer to the same list object. finalize_options will modify
71
+ # self.include_dirs, but self.distribution.include_dirs is used
72
+ # during the actual build.
73
+ # self.include_dirs is None unless paths are specified with
74
+ # --include-dirs.
75
+ # The include paths will be passed to the compiler in the order:
76
+ # numpy paths, --include-dirs paths, Python include path.
77
+ if isinstance(self.include_dirs, str):
78
+ self.include_dirs = self.include_dirs.split(os.pathsep)
79
+ incl_dirs = self.include_dirs or []
80
+ if self.distribution.include_dirs is None:
81
+ self.distribution.include_dirs = []
82
+ self.include_dirs = self.distribution.include_dirs
83
+ self.include_dirs.extend(incl_dirs)
84
+
85
+ old_build_ext.finalize_options(self)
86
+ self.set_undefined_options('build',
87
+ ('parallel', 'parallel'),
88
+ ('warn_error', 'warn_error'),
89
+ ('cpu_baseline', 'cpu_baseline'),
90
+ ('cpu_dispatch', 'cpu_dispatch'),
91
+ ('disable_optimization', 'disable_optimization'),
92
+ ('simd_test', 'simd_test')
93
+ )
94
+ CCompilerOpt.conf_target_groups["simd_test"] = self.simd_test
95
+
96
+ def run(self):
97
+ if not self.extensions:
98
+ return
99
+
100
+ # Make sure that extension sources are complete.
101
+ self.run_command('build_src')
102
+
103
+ if self.distribution.has_c_libraries():
104
+ if self.inplace:
105
+ if self.distribution.have_run.get('build_clib'):
106
+ log.warn('build_clib already run, it is too late to '
107
+ 'ensure in-place build of build_clib')
108
+ build_clib = self.distribution.get_command_obj(
109
+ 'build_clib')
110
+ else:
111
+ build_clib = self.distribution.get_command_obj(
112
+ 'build_clib')
113
+ build_clib.inplace = 1
114
+ build_clib.ensure_finalized()
115
+ build_clib.run()
116
+ self.distribution.have_run['build_clib'] = 1
117
+
118
+ else:
119
+ self.run_command('build_clib')
120
+ build_clib = self.get_finalized_command('build_clib')
121
+ self.library_dirs.append(build_clib.build_clib)
122
+ else:
123
+ build_clib = None
124
+
125
+ # Not including C libraries to the list of
126
+ # extension libraries automatically to prevent
127
+ # bogus linking commands. Extensions must
128
+ # explicitly specify the C libraries that they use.
129
+
130
+ from distutils.ccompiler import new_compiler
131
+ from numpy.distutils.fcompiler import new_fcompiler
132
+
133
+ compiler_type = self.compiler
134
+ # Initialize C compiler:
135
+ self.compiler = new_compiler(compiler=compiler_type,
136
+ verbose=self.verbose,
137
+ dry_run=self.dry_run,
138
+ force=self.force)
139
+ self.compiler.customize(self.distribution)
140
+ self.compiler.customize_cmd(self)
141
+
142
+ if self.warn_error:
143
+ self.compiler.compiler.append('-Werror')
144
+ self.compiler.compiler_so.append('-Werror')
145
+
146
+ self.compiler.show_customization()
147
+
148
+ if not self.disable_optimization:
149
+ dispatch_hpath = os.path.join("numpy", "distutils", "include", "npy_cpu_dispatch_config.h")
150
+ dispatch_hpath = os.path.join(self.get_finalized_command("build_src").build_src, dispatch_hpath)
151
+ opt_cache_path = os.path.abspath(
152
+ os.path.join(self.build_temp, 'ccompiler_opt_cache_ext.py')
153
+ )
154
+ if hasattr(self, "compiler_opt"):
155
+ # By default `CCompilerOpt` update the cache at the exit of
156
+ # the process, which may lead to duplicate building
157
+ # (see build_extension()/force_rebuild) if run() called
158
+ # multiple times within the same os process/thread without
159
+ # giving the chance the previous instances of `CCompilerOpt`
160
+ # to update the cache.
161
+ self.compiler_opt.cache_flush()
162
+
163
+ self.compiler_opt = new_ccompiler_opt(
164
+ compiler=self.compiler, dispatch_hpath=dispatch_hpath,
165
+ cpu_baseline=self.cpu_baseline, cpu_dispatch=self.cpu_dispatch,
166
+ cache_path=opt_cache_path
167
+ )
168
+ def report(copt):
169
+ log.info("\n########### EXT COMPILER OPTIMIZATION ###########")
170
+ log.info(copt.report(full=True))
171
+
172
+ import atexit
173
+ atexit.register(report, self.compiler_opt)
174
+
175
+ # Setup directory for storing generated extra DLL files on Windows
176
+ self.extra_dll_dir = os.path.join(self.build_temp, '.libs')
177
+ if not os.path.isdir(self.extra_dll_dir):
178
+ os.makedirs(self.extra_dll_dir)
179
+
180
+ # Create mapping of libraries built by build_clib:
181
+ clibs = {}
182
+ if build_clib is not None:
183
+ for libname, build_info in build_clib.libraries or []:
184
+ if libname in clibs and clibs[libname] != build_info:
185
+ log.warn('library %r defined more than once,'
186
+ ' overwriting build_info\n%s... \nwith\n%s...'
187
+ % (libname, repr(clibs[libname])[:300], repr(build_info)[:300]))
188
+ clibs[libname] = build_info
189
+ # .. and distribution libraries:
190
+ for libname, build_info in self.distribution.libraries or []:
191
+ if libname in clibs:
192
+ # build_clib libraries have a precedence before distribution ones
193
+ continue
194
+ clibs[libname] = build_info
195
+
196
+ # Determine if C++/Fortran 77/Fortran 90 compilers are needed.
197
+ # Update extension libraries, library_dirs, and macros.
198
+ all_languages = set()
199
+ for ext in self.extensions:
200
+ ext_languages = set()
201
+ c_libs = []
202
+ c_lib_dirs = []
203
+ macros = []
204
+ for libname in ext.libraries:
205
+ if libname in clibs:
206
+ binfo = clibs[libname]
207
+ c_libs += binfo.get('libraries', [])
208
+ c_lib_dirs += binfo.get('library_dirs', [])
209
+ for m in binfo.get('macros', []):
210
+ if m not in macros:
211
+ macros.append(m)
212
+
213
+ for l in clibs.get(libname, {}).get('source_languages', []):
214
+ ext_languages.add(l)
215
+ if c_libs:
216
+ new_c_libs = ext.libraries + c_libs
217
+ log.info('updating extension %r libraries from %r to %r'
218
+ % (ext.name, ext.libraries, new_c_libs))
219
+ ext.libraries = new_c_libs
220
+ ext.library_dirs = ext.library_dirs + c_lib_dirs
221
+ if macros:
222
+ log.info('extending extension %r defined_macros with %r'
223
+ % (ext.name, macros))
224
+ ext.define_macros = ext.define_macros + macros
225
+
226
+ # determine extension languages
227
+ if has_f_sources(ext.sources):
228
+ ext_languages.add('f77')
229
+ if has_cxx_sources(ext.sources):
230
+ ext_languages.add('c++')
231
+ l = ext.language or self.compiler.detect_language(ext.sources)
232
+ if l:
233
+ ext_languages.add(l)
234
+
235
+ # reset language attribute for choosing proper linker
236
+ #
237
+ # When we build extensions with multiple languages, we have to
238
+ # choose a linker. The rules here are:
239
+ # 1. if there is Fortran code, always prefer the Fortran linker,
240
+ # 2. otherwise prefer C++ over C,
241
+ # 3. Users can force a particular linker by using
242
+ # `language='c'` # or 'c++', 'f90', 'f77'
243
+ # in their config.add_extension() calls.
244
+ if 'c++' in ext_languages:
245
+ ext_language = 'c++'
246
+ else:
247
+ ext_language = 'c' # default
248
+
249
+ has_fortran = False
250
+ if 'f90' in ext_languages:
251
+ ext_language = 'f90'
252
+ has_fortran = True
253
+ elif 'f77' in ext_languages:
254
+ ext_language = 'f77'
255
+ has_fortran = True
256
+
257
+ if not ext.language or has_fortran:
258
+ if l and l != ext_language and ext.language:
259
+ log.warn('resetting extension %r language from %r to %r.' %
260
+ (ext.name, l, ext_language))
261
+
262
+ ext.language = ext_language
263
+
264
+ # global language
265
+ all_languages.update(ext_languages)
266
+
267
+ need_f90_compiler = 'f90' in all_languages
268
+ need_f77_compiler = 'f77' in all_languages
269
+ need_cxx_compiler = 'c++' in all_languages
270
+
271
+ # Initialize C++ compiler:
272
+ if need_cxx_compiler:
273
+ self._cxx_compiler = new_compiler(compiler=compiler_type,
274
+ verbose=self.verbose,
275
+ dry_run=self.dry_run,
276
+ force=self.force)
277
+ compiler = self._cxx_compiler
278
+ compiler.customize(self.distribution, need_cxx=need_cxx_compiler)
279
+ compiler.customize_cmd(self)
280
+ compiler.show_customization()
281
+ self._cxx_compiler = compiler.cxx_compiler()
282
+ else:
283
+ self._cxx_compiler = None
284
+
285
+ # Initialize Fortran 77 compiler:
286
+ if need_f77_compiler:
287
+ ctype = self.fcompiler
288
+ self._f77_compiler = new_fcompiler(compiler=self.fcompiler,
289
+ verbose=self.verbose,
290
+ dry_run=self.dry_run,
291
+ force=self.force,
292
+ requiref90=False,
293
+ c_compiler=self.compiler)
294
+ fcompiler = self._f77_compiler
295
+ if fcompiler:
296
+ ctype = fcompiler.compiler_type
297
+ fcompiler.customize(self.distribution)
298
+ if fcompiler and fcompiler.get_version():
299
+ fcompiler.customize_cmd(self)
300
+ fcompiler.show_customization()
301
+ else:
302
+ self.warn('f77_compiler=%s is not available.' %
303
+ (ctype))
304
+ self._f77_compiler = None
305
+ else:
306
+ self._f77_compiler = None
307
+
308
+ # Initialize Fortran 90 compiler:
309
+ if need_f90_compiler:
310
+ ctype = self.fcompiler
311
+ self._f90_compiler = new_fcompiler(compiler=self.fcompiler,
312
+ verbose=self.verbose,
313
+ dry_run=self.dry_run,
314
+ force=self.force,
315
+ requiref90=True,
316
+ c_compiler=self.compiler)
317
+ fcompiler = self._f90_compiler
318
+ if fcompiler:
319
+ ctype = fcompiler.compiler_type
320
+ fcompiler.customize(self.distribution)
321
+ if fcompiler and fcompiler.get_version():
322
+ fcompiler.customize_cmd(self)
323
+ fcompiler.show_customization()
324
+ else:
325
+ self.warn('f90_compiler=%s is not available.' %
326
+ (ctype))
327
+ self._f90_compiler = None
328
+ else:
329
+ self._f90_compiler = None
330
+
331
+ # Build extensions
332
+ self.build_extensions()
333
+
334
+ # Copy over any extra DLL files
335
+ # FIXME: In the case where there are more than two packages,
336
+ # we blindly assume that both packages need all of the libraries,
337
+ # resulting in a larger wheel than is required. This should be fixed,
338
+ # but it's so rare that I won't bother to handle it.
339
+ pkg_roots = {
340
+ self.get_ext_fullname(ext.name).split('.')[0]
341
+ for ext in self.extensions
342
+ }
343
+ for pkg_root in pkg_roots:
344
+ shared_lib_dir = os.path.join(pkg_root, '.libs')
345
+ if not self.inplace:
346
+ shared_lib_dir = os.path.join(self.build_lib, shared_lib_dir)
347
+ for fn in os.listdir(self.extra_dll_dir):
348
+ if not os.path.isdir(shared_lib_dir):
349
+ os.makedirs(shared_lib_dir)
350
+ if not fn.lower().endswith('.dll'):
351
+ continue
352
+ runtime_lib = os.path.join(self.extra_dll_dir, fn)
353
+ copy_file(runtime_lib, shared_lib_dir)
354
+
355
+ def swig_sources(self, sources, extensions=None):
356
+ # Do nothing. Swig sources have been handled in build_src command.
357
+ return sources
358
+
359
+ def build_extension(self, ext):
360
+ sources = ext.sources
361
+ if sources is None or not is_sequence(sources):
362
+ raise DistutilsSetupError(
363
+ ("in 'ext_modules' option (extension '%s'), " +
364
+ "'sources' must be present and must be " +
365
+ "a list of source filenames") % ext.name)
366
+ sources = list(sources)
367
+
368
+ if not sources:
369
+ return
370
+
371
+ fullname = self.get_ext_fullname(ext.name)
372
+ if self.inplace:
373
+ modpath = fullname.split('.')
374
+ package = '.'.join(modpath[0:-1])
375
+ base = modpath[-1]
376
+ build_py = self.get_finalized_command('build_py')
377
+ package_dir = build_py.get_package_dir(package)
378
+ ext_filename = os.path.join(package_dir,
379
+ self.get_ext_filename(base))
380
+ else:
381
+ ext_filename = os.path.join(self.build_lib,
382
+ self.get_ext_filename(fullname))
383
+ depends = sources + ext.depends
384
+
385
+ force_rebuild = self.force
386
+ if not self.disable_optimization and not self.compiler_opt.is_cached():
387
+ log.debug("Detected changes on compiler optimizations")
388
+ force_rebuild = True
389
+ if not (force_rebuild or newer_group(depends, ext_filename, 'newer')):
390
+ log.debug("skipping '%s' extension (up-to-date)", ext.name)
391
+ return
392
+ else:
393
+ log.info("building '%s' extension", ext.name)
394
+
395
+ extra_args = ext.extra_compile_args or []
396
+ extra_cflags = getattr(ext, 'extra_c_compile_args', None) or []
397
+ extra_cxxflags = getattr(ext, 'extra_cxx_compile_args', None) or []
398
+
399
+ macros = ext.define_macros[:]
400
+ for undef in ext.undef_macros:
401
+ macros.append((undef,))
402
+
403
+ c_sources, cxx_sources, f_sources, fmodule_sources = \
404
+ filter_sources(ext.sources)
405
+
406
+ if self.compiler.compiler_type == 'msvc':
407
+ if cxx_sources:
408
+ # Needed to compile kiva.agg._agg extension.
409
+ extra_args.append('/Zm1000')
410
+ extra_cflags += extra_cxxflags
411
+ # this hack works around the msvc compiler attributes
412
+ # problem, msvc uses its own convention :(
413
+ c_sources += cxx_sources
414
+ cxx_sources = []
415
+
416
+ # Set Fortran/C++ compilers for compilation and linking.
417
+ if ext.language == 'f90':
418
+ fcompiler = self._f90_compiler
419
+ elif ext.language == 'f77':
420
+ fcompiler = self._f77_compiler
421
+ else: # in case ext.language is c++, for instance
422
+ fcompiler = self._f90_compiler or self._f77_compiler
423
+ if fcompiler is not None:
424
+ fcompiler.extra_f77_compile_args = (ext.extra_f77_compile_args or []) if hasattr(
425
+ ext, 'extra_f77_compile_args') else []
426
+ fcompiler.extra_f90_compile_args = (ext.extra_f90_compile_args or []) if hasattr(
427
+ ext, 'extra_f90_compile_args') else []
428
+ cxx_compiler = self._cxx_compiler
429
+
430
+ # check for the availability of required compilers
431
+ if cxx_sources and cxx_compiler is None:
432
+ raise DistutilsError("extension %r has C++ sources"
433
+ "but no C++ compiler found" % (ext.name))
434
+ if (f_sources or fmodule_sources) and fcompiler is None:
435
+ raise DistutilsError("extension %r has Fortran sources "
436
+ "but no Fortran compiler found" % (ext.name))
437
+ if ext.language in ['f77', 'f90'] and fcompiler is None:
438
+ self.warn("extension %r has Fortran libraries "
439
+ "but no Fortran linker found, using default linker" % (ext.name))
440
+ if ext.language == 'c++' and cxx_compiler is None:
441
+ self.warn("extension %r has C++ libraries "
442
+ "but no C++ linker found, using default linker" % (ext.name))
443
+
444
+ kws = {'depends': ext.depends}
445
+ output_dir = self.build_temp
446
+
447
+ include_dirs = ext.include_dirs + get_numpy_include_dirs()
448
+
449
+ # filtering C dispatch-table sources when optimization is not disabled,
450
+ # otherwise treated as normal sources.
451
+ copt_c_sources = []
452
+ copt_cxx_sources = []
453
+ copt_baseline_flags = []
454
+ copt_macros = []
455
+ if not self.disable_optimization:
456
+ bsrc_dir = self.get_finalized_command("build_src").build_src
457
+ dispatch_hpath = os.path.join("numpy", "distutils", "include")
458
+ dispatch_hpath = os.path.join(bsrc_dir, dispatch_hpath)
459
+ include_dirs.append(dispatch_hpath)
460
+
461
+ # copt_build_src = None if self.inplace else bsrc_dir
462
+ # Always generate the generated config files and
463
+ # dispatch-able sources inside the build directory,
464
+ # even if the build option `inplace` is enabled.
465
+ # This approach prevents conflicts with Meson-generated
466
+ # config headers. Since `spin build --clean` will not remove
467
+ # these headers, they might overwrite the generated Meson headers,
468
+ # causing compatibility issues. Maintaining separate directories
469
+ # ensures compatibility between distutils dispatch config headers
470
+ # and Meson headers, avoiding build disruptions.
471
+ # See gh-24450 for more details.
472
+ copt_build_src = bsrc_dir
473
+ for _srcs, _dst, _ext in (
474
+ ((c_sources,), copt_c_sources, ('.dispatch.c',)),
475
+ ((c_sources, cxx_sources), copt_cxx_sources,
476
+ ('.dispatch.cpp', '.dispatch.cxx'))
477
+ ):
478
+ for _src in _srcs:
479
+ _dst += [
480
+ _src.pop(_src.index(s))
481
+ for s in _src[:] if s.endswith(_ext)
482
+ ]
483
+ copt_baseline_flags = self.compiler_opt.cpu_baseline_flags()
484
+ else:
485
+ copt_macros.append(("NPY_DISABLE_OPTIMIZATION", 1))
486
+
487
+ c_objects = []
488
+ if copt_cxx_sources:
489
+ log.info("compiling C++ dispatch-able sources")
490
+ c_objects += self.compiler_opt.try_dispatch(
491
+ copt_cxx_sources,
492
+ output_dir=output_dir,
493
+ src_dir=copt_build_src,
494
+ macros=macros + copt_macros,
495
+ include_dirs=include_dirs,
496
+ debug=self.debug,
497
+ extra_postargs=extra_args + extra_cxxflags,
498
+ ccompiler=cxx_compiler,
499
+ **kws
500
+ )
501
+ if copt_c_sources:
502
+ log.info("compiling C dispatch-able sources")
503
+ c_objects += self.compiler_opt.try_dispatch(
504
+ copt_c_sources,
505
+ output_dir=output_dir,
506
+ src_dir=copt_build_src,
507
+ macros=macros + copt_macros,
508
+ include_dirs=include_dirs,
509
+ debug=self.debug,
510
+ extra_postargs=extra_args + extra_cflags,
511
+ **kws)
512
+ if c_sources:
513
+ log.info("compiling C sources")
514
+ c_objects += self.compiler.compile(
515
+ c_sources,
516
+ output_dir=output_dir,
517
+ macros=macros + copt_macros,
518
+ include_dirs=include_dirs,
519
+ debug=self.debug,
520
+ extra_postargs=(extra_args + copt_baseline_flags +
521
+ extra_cflags),
522
+ **kws)
523
+ if cxx_sources:
524
+ log.info("compiling C++ sources")
525
+ c_objects += cxx_compiler.compile(
526
+ cxx_sources,
527
+ output_dir=output_dir,
528
+ macros=macros + copt_macros,
529
+ include_dirs=include_dirs,
530
+ debug=self.debug,
531
+ extra_postargs=(extra_args + copt_baseline_flags +
532
+ extra_cxxflags),
533
+ **kws)
534
+
535
+ extra_postargs = []
536
+ f_objects = []
537
+ if fmodule_sources:
538
+ log.info("compiling Fortran 90 module sources")
539
+ module_dirs = ext.module_dirs[:]
540
+ module_build_dir = os.path.join(
541
+ self.build_temp, os.path.dirname(
542
+ self.get_ext_filename(fullname)))
543
+
544
+ self.mkpath(module_build_dir)
545
+ if fcompiler.module_dir_switch is None:
546
+ existing_modules = glob('*.mod')
547
+ extra_postargs += fcompiler.module_options(
548
+ module_dirs, module_build_dir)
549
+ f_objects += fcompiler.compile(fmodule_sources,
550
+ output_dir=self.build_temp,
551
+ macros=macros,
552
+ include_dirs=include_dirs,
553
+ debug=self.debug,
554
+ extra_postargs=extra_postargs,
555
+ depends=ext.depends)
556
+
557
+ if fcompiler.module_dir_switch is None:
558
+ for f in glob('*.mod'):
559
+ if f in existing_modules:
560
+ continue
561
+ t = os.path.join(module_build_dir, f)
562
+ if os.path.abspath(f) == os.path.abspath(t):
563
+ continue
564
+ if os.path.isfile(t):
565
+ os.remove(t)
566
+ try:
567
+ self.move_file(f, module_build_dir)
568
+ except DistutilsFileError:
569
+ log.warn('failed to move %r to %r' %
570
+ (f, module_build_dir))
571
+ if f_sources:
572
+ log.info("compiling Fortran sources")
573
+ f_objects += fcompiler.compile(f_sources,
574
+ output_dir=self.build_temp,
575
+ macros=macros,
576
+ include_dirs=include_dirs,
577
+ debug=self.debug,
578
+ extra_postargs=extra_postargs,
579
+ depends=ext.depends)
580
+
581
+ if f_objects and not fcompiler.can_ccompiler_link(self.compiler):
582
+ unlinkable_fobjects = f_objects
583
+ objects = c_objects
584
+ else:
585
+ unlinkable_fobjects = []
586
+ objects = c_objects + f_objects
587
+
588
+ if ext.extra_objects:
589
+ objects.extend(ext.extra_objects)
590
+ extra_args = ext.extra_link_args or []
591
+ libraries = self.get_libraries(ext)[:]
592
+ library_dirs = ext.library_dirs[:]
593
+
594
+ linker = self.compiler.link_shared_object
595
+ # Always use system linker when using MSVC compiler.
596
+ if self.compiler.compiler_type in ('msvc', 'intelw', 'intelemw'):
597
+ # expand libraries with fcompiler libraries as we are
598
+ # not using fcompiler linker
599
+ self._libs_with_msvc_and_fortran(
600
+ fcompiler, libraries, library_dirs)
601
+ if ext.runtime_library_dirs:
602
+ # gcc adds RPATH to the link. On windows, copy the dll into
603
+ # self.extra_dll_dir instead.
604
+ for d in ext.runtime_library_dirs:
605
+ for f in glob(d + '/*.dll'):
606
+ copy_file(f, self.extra_dll_dir)
607
+ ext.runtime_library_dirs = []
608
+
609
+ elif ext.language in ['f77', 'f90'] and fcompiler is not None:
610
+ linker = fcompiler.link_shared_object
611
+ if ext.language == 'c++' and cxx_compiler is not None:
612
+ linker = cxx_compiler.link_shared_object
613
+
614
+ if fcompiler is not None:
615
+ objects, libraries = self._process_unlinkable_fobjects(
616
+ objects, libraries,
617
+ fcompiler, library_dirs,
618
+ unlinkable_fobjects)
619
+
620
+ linker(objects, ext_filename,
621
+ libraries=libraries,
622
+ library_dirs=library_dirs,
623
+ runtime_library_dirs=ext.runtime_library_dirs,
624
+ extra_postargs=extra_args,
625
+ export_symbols=self.get_export_symbols(ext),
626
+ debug=self.debug,
627
+ build_temp=self.build_temp,
628
+ target_lang=ext.language)
629
+
630
+ def _add_dummy_mingwex_sym(self, c_sources):
631
+ build_src = self.get_finalized_command("build_src").build_src
632
+ build_clib = self.get_finalized_command("build_clib").build_clib
633
+ objects = self.compiler.compile([os.path.join(build_src,
634
+ "gfortran_vs2003_hack.c")],
635
+ output_dir=self.build_temp)
636
+ self.compiler.create_static_lib(
637
+ objects, "_gfortran_workaround", output_dir=build_clib, debug=self.debug)
638
+
639
+ def _process_unlinkable_fobjects(self, objects, libraries,
640
+ fcompiler, library_dirs,
641
+ unlinkable_fobjects):
642
+ libraries = list(libraries)
643
+ objects = list(objects)
644
+ unlinkable_fobjects = list(unlinkable_fobjects)
645
+
646
+ # Expand possible fake static libraries to objects;
647
+ # make sure to iterate over a copy of the list as
648
+ # "fake" libraries will be removed as they are
649
+ # encountered
650
+ for lib in libraries[:]:
651
+ for libdir in library_dirs:
652
+ fake_lib = os.path.join(libdir, lib + '.fobjects')
653
+ if os.path.isfile(fake_lib):
654
+ # Replace fake static library
655
+ libraries.remove(lib)
656
+ with open(fake_lib) as f:
657
+ unlinkable_fobjects.extend(f.read().splitlines())
658
+
659
+ # Expand C objects
660
+ c_lib = os.path.join(libdir, lib + '.cobjects')
661
+ with open(c_lib) as f:
662
+ objects.extend(f.read().splitlines())
663
+
664
+ # Wrap unlinkable objects to a linkable one
665
+ if unlinkable_fobjects:
666
+ fobjects = [os.path.abspath(obj) for obj in unlinkable_fobjects]
667
+ wrapped = fcompiler.wrap_unlinkable_objects(
668
+ fobjects, output_dir=self.build_temp,
669
+ extra_dll_dir=self.extra_dll_dir)
670
+ objects.extend(wrapped)
671
+
672
+ return objects, libraries
673
+
674
+ def _libs_with_msvc_and_fortran(self, fcompiler, c_libraries,
675
+ c_library_dirs):
676
+ if fcompiler is None:
677
+ return
678
+
679
+ for libname in c_libraries:
680
+ if libname.startswith('msvc'):
681
+ continue
682
+ fileexists = False
683
+ for libdir in c_library_dirs or []:
684
+ libfile = os.path.join(libdir, '%s.lib' % (libname))
685
+ if os.path.isfile(libfile):
686
+ fileexists = True
687
+ break
688
+ if fileexists:
689
+ continue
690
+ # make g77-compiled static libs available to MSVC
691
+ fileexists = False
692
+ for libdir in c_library_dirs:
693
+ libfile = os.path.join(libdir, 'lib%s.a' % (libname))
694
+ if os.path.isfile(libfile):
695
+ # copy libname.a file to name.lib so that MSVC linker
696
+ # can find it
697
+ libfile2 = os.path.join(self.build_temp, libname + '.lib')
698
+ copy_file(libfile, libfile2)
699
+ if self.build_temp not in c_library_dirs:
700
+ c_library_dirs.append(self.build_temp)
701
+ fileexists = True
702
+ break
703
+ if fileexists:
704
+ continue
705
+ log.warn('could not find library %r in directories %s'
706
+ % (libname, c_library_dirs))
707
+
708
+ # Always use system linker when using MSVC compiler.
709
+ f_lib_dirs = []
710
+ for dir in fcompiler.library_dirs:
711
+ # correct path when compiling in Cygwin but with normal Win
712
+ # Python
713
+ if dir.startswith('/usr/lib'):
714
+ try:
715
+ dir = subprocess.check_output(['cygpath', '-w', dir])
716
+ except (OSError, subprocess.CalledProcessError):
717
+ pass
718
+ else:
719
+ dir = filepath_from_subprocess_output(dir)
720
+ f_lib_dirs.append(dir)
721
+ c_library_dirs.extend(f_lib_dirs)
722
+
723
+ # make g77-compiled static libs available to MSVC
724
+ for lib in fcompiler.libraries:
725
+ if not lib.startswith('msvc'):
726
+ c_libraries.append(lib)
727
+ p = combine_paths(f_lib_dirs, 'lib' + lib + '.a')
728
+ if p:
729
+ dst_name = os.path.join(self.build_temp, lib + '.lib')
730
+ if not os.path.isfile(dst_name):
731
+ copy_file(p[0], dst_name)
732
+ if self.build_temp not in c_library_dirs:
733
+ c_library_dirs.append(self.build_temp)
734
+
735
+ def get_source_files(self):
736
+ self.check_extensions_list(self.extensions)
737
+ filenames = []
738
+ for ext in self.extensions:
739
+ filenames.extend(get_ext_source_files(ext))
740
+ return filenames
741
+
742
+ def get_outputs(self):
743
+ self.check_extensions_list(self.extensions)
744
+
745
+ outputs = []
746
+ for ext in self.extensions:
747
+ if not ext.sources:
748
+ continue
749
+ fullname = self.get_ext_fullname(ext.name)
750
+ outputs.append(os.path.join(self.build_lib,
751
+ self.get_ext_filename(fullname)))
752
+ return outputs
env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/build_py.py ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from distutils.command.build_py import build_py as old_build_py
2
+ from numpy.distutils.misc_util import is_string
3
+
4
+ class build_py(old_build_py):
5
+
6
+ def run(self):
7
+ build_src = self.get_finalized_command('build_src')
8
+ if build_src.py_modules_dict and self.packages is None:
9
+ self.packages = list(build_src.py_modules_dict.keys ())
10
+ old_build_py.run(self)
11
+
12
+ def find_package_modules(self, package, package_dir):
13
+ modules = old_build_py.find_package_modules(self, package, package_dir)
14
+
15
+ # Find build_src generated *.py files.
16
+ build_src = self.get_finalized_command('build_src')
17
+ modules += build_src.py_modules_dict.get(package, [])
18
+
19
+ return modules
20
+
21
+ def find_modules(self):
22
+ old_py_modules = self.py_modules[:]
23
+ new_py_modules = [_m for _m in self.py_modules if is_string(_m)]
24
+ self.py_modules[:] = new_py_modules
25
+ modules = old_build_py.find_modules(self)
26
+ self.py_modules[:] = old_py_modules
27
+
28
+ return modules
29
+
30
+ # XXX: Fix find_source_files for item in py_modules such that item is 3-tuple
31
+ # and item[2] is source file.
env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/build_scripts.py ADDED
@@ -0,0 +1,49 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """ Modified version of build_scripts that handles building scripts from functions.
2
+
3
+ """
4
+ from distutils.command.build_scripts import build_scripts as old_build_scripts
5
+ from numpy.distutils import log
6
+ from numpy.distutils.misc_util import is_string
7
+
8
+ class build_scripts(old_build_scripts):
9
+
10
+ def generate_scripts(self, scripts):
11
+ new_scripts = []
12
+ func_scripts = []
13
+ for script in scripts:
14
+ if is_string(script):
15
+ new_scripts.append(script)
16
+ else:
17
+ func_scripts.append(script)
18
+ if not func_scripts:
19
+ return new_scripts
20
+
21
+ build_dir = self.build_dir
22
+ self.mkpath(build_dir)
23
+ for func in func_scripts:
24
+ script = func(build_dir)
25
+ if not script:
26
+ continue
27
+ if is_string(script):
28
+ log.info(" adding '%s' to scripts" % (script,))
29
+ new_scripts.append(script)
30
+ else:
31
+ [log.info(" adding '%s' to scripts" % (s,)) for s in script]
32
+ new_scripts.extend(list(script))
33
+ return new_scripts
34
+
35
+ def run (self):
36
+ if not self.scripts:
37
+ return
38
+
39
+ self.scripts = self.generate_scripts(self.scripts)
40
+ # Now make sure that the distribution object has this list of scripts.
41
+ # setuptools' develop command requires that this be a list of filenames,
42
+ # not functions.
43
+ self.distribution.scripts = self.scripts
44
+
45
+ return old_build_scripts.run(self)
46
+
47
+ def get_source_files(self):
48
+ from numpy.distutils.misc_util import get_script_files
49
+ return get_script_files(self.scripts)
env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/build_src.py ADDED
@@ -0,0 +1,773 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """ Build swig and f2py sources.
2
+ """
3
+ import os
4
+ import re
5
+ import sys
6
+ import shlex
7
+ import copy
8
+
9
+ from distutils.command import build_ext
10
+ from distutils.dep_util import newer_group, newer
11
+ from distutils.util import get_platform
12
+ from distutils.errors import DistutilsError, DistutilsSetupError
13
+
14
+
15
+ # this import can't be done here, as it uses numpy stuff only available
16
+ # after it's installed
17
+ #import numpy.f2py
18
+ from numpy.distutils import log
19
+ from numpy.distutils.misc_util import (
20
+ fortran_ext_match, appendpath, is_string, is_sequence, get_cmd
21
+ )
22
+ from numpy.distutils.from_template import process_file as process_f_file
23
+ from numpy.distutils.conv_template import process_file as process_c_file
24
+
25
+ def subst_vars(target, source, d):
26
+ """Substitute any occurrence of @foo@ by d['foo'] from source file into
27
+ target."""
28
+ var = re.compile('@([a-zA-Z_]+)@')
29
+ with open(source, 'r') as fs:
30
+ with open(target, 'w') as ft:
31
+ for l in fs:
32
+ m = var.search(l)
33
+ if m:
34
+ ft.write(l.replace('@%s@' % m.group(1), d[m.group(1)]))
35
+ else:
36
+ ft.write(l)
37
+
38
+ class build_src(build_ext.build_ext):
39
+
40
+ description = "build sources from SWIG, F2PY files or a function"
41
+
42
+ user_options = [
43
+ ('build-src=', 'd', "directory to \"build\" sources to"),
44
+ ('f2py-opts=', None, "list of f2py command line options"),
45
+ ('swig=', None, "path to the SWIG executable"),
46
+ ('swig-opts=', None, "list of SWIG command line options"),
47
+ ('swig-cpp', None, "make SWIG create C++ files (default is autodetected from sources)"),
48
+ ('f2pyflags=', None, "additional flags to f2py (use --f2py-opts= instead)"), # obsolete
49
+ ('swigflags=', None, "additional flags to swig (use --swig-opts= instead)"), # obsolete
50
+ ('force', 'f', "forcibly build everything (ignore file timestamps)"),
51
+ ('inplace', 'i',
52
+ "ignore build-lib and put compiled extensions into the source " +
53
+ "directory alongside your pure Python modules"),
54
+ ('verbose-cfg', None,
55
+ "change logging level from WARN to INFO which will show all " +
56
+ "compiler output")
57
+ ]
58
+
59
+ boolean_options = ['force', 'inplace', 'verbose-cfg']
60
+
61
+ help_options = []
62
+
63
+ def initialize_options(self):
64
+ self.extensions = None
65
+ self.package = None
66
+ self.py_modules = None
67
+ self.py_modules_dict = None
68
+ self.build_src = None
69
+ self.build_lib = None
70
+ self.build_base = None
71
+ self.force = None
72
+ self.inplace = None
73
+ self.package_dir = None
74
+ self.f2pyflags = None # obsolete
75
+ self.f2py_opts = None
76
+ self.swigflags = None # obsolete
77
+ self.swig_opts = None
78
+ self.swig_cpp = None
79
+ self.swig = None
80
+ self.verbose_cfg = None
81
+
82
+ def finalize_options(self):
83
+ self.set_undefined_options('build',
84
+ ('build_base', 'build_base'),
85
+ ('build_lib', 'build_lib'),
86
+ ('force', 'force'))
87
+ if self.package is None:
88
+ self.package = self.distribution.ext_package
89
+ self.extensions = self.distribution.ext_modules
90
+ self.libraries = self.distribution.libraries or []
91
+ self.py_modules = self.distribution.py_modules or []
92
+ self.data_files = self.distribution.data_files or []
93
+
94
+ if self.build_src is None:
95
+ plat_specifier = ".{}-{}.{}".format(get_platform(), *sys.version_info[:2])
96
+ self.build_src = os.path.join(self.build_base, 'src'+plat_specifier)
97
+
98
+ # py_modules_dict is used in build_py.find_package_modules
99
+ self.py_modules_dict = {}
100
+
101
+ if self.f2pyflags:
102
+ if self.f2py_opts:
103
+ log.warn('ignoring --f2pyflags as --f2py-opts already used')
104
+ else:
105
+ self.f2py_opts = self.f2pyflags
106
+ self.f2pyflags = None
107
+ if self.f2py_opts is None:
108
+ self.f2py_opts = []
109
+ else:
110
+ self.f2py_opts = shlex.split(self.f2py_opts)
111
+
112
+ if self.swigflags:
113
+ if self.swig_opts:
114
+ log.warn('ignoring --swigflags as --swig-opts already used')
115
+ else:
116
+ self.swig_opts = self.swigflags
117
+ self.swigflags = None
118
+
119
+ if self.swig_opts is None:
120
+ self.swig_opts = []
121
+ else:
122
+ self.swig_opts = shlex.split(self.swig_opts)
123
+
124
+ # use options from build_ext command
125
+ build_ext = self.get_finalized_command('build_ext')
126
+ if self.inplace is None:
127
+ self.inplace = build_ext.inplace
128
+ if self.swig_cpp is None:
129
+ self.swig_cpp = build_ext.swig_cpp
130
+ for c in ['swig', 'swig_opt']:
131
+ o = '--'+c.replace('_', '-')
132
+ v = getattr(build_ext, c, None)
133
+ if v:
134
+ if getattr(self, c):
135
+ log.warn('both build_src and build_ext define %s option' % (o))
136
+ else:
137
+ log.info('using "%s=%s" option from build_ext command' % (o, v))
138
+ setattr(self, c, v)
139
+
140
+ def run(self):
141
+ log.info("build_src")
142
+ if not (self.extensions or self.libraries):
143
+ return
144
+ self.build_sources()
145
+
146
+ def build_sources(self):
147
+
148
+ if self.inplace:
149
+ self.get_package_dir = \
150
+ self.get_finalized_command('build_py').get_package_dir
151
+
152
+ self.build_py_modules_sources()
153
+
154
+ for libname_info in self.libraries:
155
+ self.build_library_sources(*libname_info)
156
+
157
+ if self.extensions:
158
+ self.check_extensions_list(self.extensions)
159
+
160
+ for ext in self.extensions:
161
+ self.build_extension_sources(ext)
162
+
163
+ self.build_data_files_sources()
164
+ self.build_npy_pkg_config()
165
+
166
+ def build_data_files_sources(self):
167
+ if not self.data_files:
168
+ return
169
+ log.info('building data_files sources')
170
+ from numpy.distutils.misc_util import get_data_files
171
+ new_data_files = []
172
+ for data in self.data_files:
173
+ if isinstance(data, str):
174
+ new_data_files.append(data)
175
+ elif isinstance(data, tuple):
176
+ d, files = data
177
+ if self.inplace:
178
+ build_dir = self.get_package_dir('.'.join(d.split(os.sep)))
179
+ else:
180
+ build_dir = os.path.join(self.build_src, d)
181
+ funcs = [f for f in files if hasattr(f, '__call__')]
182
+ files = [f for f in files if not hasattr(f, '__call__')]
183
+ for f in funcs:
184
+ if f.__code__.co_argcount==1:
185
+ s = f(build_dir)
186
+ else:
187
+ s = f()
188
+ if s is not None:
189
+ if isinstance(s, list):
190
+ files.extend(s)
191
+ elif isinstance(s, str):
192
+ files.append(s)
193
+ else:
194
+ raise TypeError(repr(s))
195
+ filenames = get_data_files((d, files))
196
+ new_data_files.append((d, filenames))
197
+ else:
198
+ raise TypeError(repr(data))
199
+ self.data_files[:] = new_data_files
200
+
201
+
202
+ def _build_npy_pkg_config(self, info, gd):
203
+ template, install_dir, subst_dict = info
204
+ template_dir = os.path.dirname(template)
205
+ for k, v in gd.items():
206
+ subst_dict[k] = v
207
+
208
+ if self.inplace == 1:
209
+ generated_dir = os.path.join(template_dir, install_dir)
210
+ else:
211
+ generated_dir = os.path.join(self.build_src, template_dir,
212
+ install_dir)
213
+ generated = os.path.basename(os.path.splitext(template)[0])
214
+ generated_path = os.path.join(generated_dir, generated)
215
+ if not os.path.exists(generated_dir):
216
+ os.makedirs(generated_dir)
217
+
218
+ subst_vars(generated_path, template, subst_dict)
219
+
220
+ # Where to install relatively to install prefix
221
+ full_install_dir = os.path.join(template_dir, install_dir)
222
+ return full_install_dir, generated_path
223
+
224
+ def build_npy_pkg_config(self):
225
+ log.info('build_src: building npy-pkg config files')
226
+
227
+ # XXX: another ugly workaround to circumvent distutils brain damage. We
228
+ # need the install prefix here, but finalizing the options of the
229
+ # install command when only building sources cause error. Instead, we
230
+ # copy the install command instance, and finalize the copy so that it
231
+ # does not disrupt how distutils want to do things when with the
232
+ # original install command instance.
233
+ install_cmd = copy.copy(get_cmd('install'))
234
+ if not install_cmd.finalized == 1:
235
+ install_cmd.finalize_options()
236
+ build_npkg = False
237
+ if self.inplace == 1:
238
+ top_prefix = '.'
239
+ build_npkg = True
240
+ elif hasattr(install_cmd, 'install_libbase'):
241
+ top_prefix = install_cmd.install_libbase
242
+ build_npkg = True
243
+
244
+ if build_npkg:
245
+ for pkg, infos in self.distribution.installed_pkg_config.items():
246
+ pkg_path = self.distribution.package_dir[pkg]
247
+ prefix = os.path.join(os.path.abspath(top_prefix), pkg_path)
248
+ d = {'prefix': prefix}
249
+ for info in infos:
250
+ install_dir, generated = self._build_npy_pkg_config(info, d)
251
+ self.distribution.data_files.append((install_dir,
252
+ [generated]))
253
+
254
+ def build_py_modules_sources(self):
255
+ if not self.py_modules:
256
+ return
257
+ log.info('building py_modules sources')
258
+ new_py_modules = []
259
+ for source in self.py_modules:
260
+ if is_sequence(source) and len(source)==3:
261
+ package, module_base, source = source
262
+ if self.inplace:
263
+ build_dir = self.get_package_dir(package)
264
+ else:
265
+ build_dir = os.path.join(self.build_src,
266
+ os.path.join(*package.split('.')))
267
+ if hasattr(source, '__call__'):
268
+ target = os.path.join(build_dir, module_base + '.py')
269
+ source = source(target)
270
+ if source is None:
271
+ continue
272
+ modules = [(package, module_base, source)]
273
+ if package not in self.py_modules_dict:
274
+ self.py_modules_dict[package] = []
275
+ self.py_modules_dict[package] += modules
276
+ else:
277
+ new_py_modules.append(source)
278
+ self.py_modules[:] = new_py_modules
279
+
280
+ def build_library_sources(self, lib_name, build_info):
281
+ sources = list(build_info.get('sources', []))
282
+
283
+ if not sources:
284
+ return
285
+
286
+ log.info('building library "%s" sources' % (lib_name))
287
+
288
+ sources = self.generate_sources(sources, (lib_name, build_info))
289
+
290
+ sources = self.template_sources(sources, (lib_name, build_info))
291
+
292
+ sources, h_files = self.filter_h_files(sources)
293
+
294
+ if h_files:
295
+ log.info('%s - nothing done with h_files = %s',
296
+ self.package, h_files)
297
+
298
+ #for f in h_files:
299
+ # self.distribution.headers.append((lib_name,f))
300
+
301
+ build_info['sources'] = sources
302
+ return
303
+
304
+ def build_extension_sources(self, ext):
305
+
306
+ sources = list(ext.sources)
307
+
308
+ log.info('building extension "%s" sources' % (ext.name))
309
+
310
+ fullname = self.get_ext_fullname(ext.name)
311
+
312
+ modpath = fullname.split('.')
313
+ package = '.'.join(modpath[0:-1])
314
+
315
+ if self.inplace:
316
+ self.ext_target_dir = self.get_package_dir(package)
317
+
318
+ sources = self.generate_sources(sources, ext)
319
+ sources = self.template_sources(sources, ext)
320
+ sources = self.swig_sources(sources, ext)
321
+ sources = self.f2py_sources(sources, ext)
322
+ sources = self.pyrex_sources(sources, ext)
323
+
324
+ sources, py_files = self.filter_py_files(sources)
325
+
326
+ if package not in self.py_modules_dict:
327
+ self.py_modules_dict[package] = []
328
+ modules = []
329
+ for f in py_files:
330
+ module = os.path.splitext(os.path.basename(f))[0]
331
+ modules.append((package, module, f))
332
+ self.py_modules_dict[package] += modules
333
+
334
+ sources, h_files = self.filter_h_files(sources)
335
+
336
+ if h_files:
337
+ log.info('%s - nothing done with h_files = %s',
338
+ package, h_files)
339
+ #for f in h_files:
340
+ # self.distribution.headers.append((package,f))
341
+
342
+ ext.sources = sources
343
+
344
+ def generate_sources(self, sources, extension):
345
+ new_sources = []
346
+ func_sources = []
347
+ for source in sources:
348
+ if is_string(source):
349
+ new_sources.append(source)
350
+ else:
351
+ func_sources.append(source)
352
+ if not func_sources:
353
+ return new_sources
354
+ if self.inplace and not is_sequence(extension):
355
+ build_dir = self.ext_target_dir
356
+ else:
357
+ if is_sequence(extension):
358
+ name = extension[0]
359
+ # if 'include_dirs' not in extension[1]:
360
+ # extension[1]['include_dirs'] = []
361
+ # incl_dirs = extension[1]['include_dirs']
362
+ else:
363
+ name = extension.name
364
+ # incl_dirs = extension.include_dirs
365
+ #if self.build_src not in incl_dirs:
366
+ # incl_dirs.append(self.build_src)
367
+ build_dir = os.path.join(*([self.build_src]
368
+ +name.split('.')[:-1]))
369
+ self.mkpath(build_dir)
370
+
371
+ if self.verbose_cfg:
372
+ new_level = log.INFO
373
+ else:
374
+ new_level = log.WARN
375
+ old_level = log.set_threshold(new_level)
376
+
377
+ for func in func_sources:
378
+ source = func(extension, build_dir)
379
+ if not source:
380
+ continue
381
+ if is_sequence(source):
382
+ [log.info(" adding '%s' to sources." % (s,)) for s in source]
383
+ new_sources.extend(source)
384
+ else:
385
+ log.info(" adding '%s' to sources." % (source,))
386
+ new_sources.append(source)
387
+ log.set_threshold(old_level)
388
+ return new_sources
389
+
390
+ def filter_py_files(self, sources):
391
+ return self.filter_files(sources, ['.py'])
392
+
393
+ def filter_h_files(self, sources):
394
+ return self.filter_files(sources, ['.h', '.hpp', '.inc'])
395
+
396
+ def filter_files(self, sources, exts = []):
397
+ new_sources = []
398
+ files = []
399
+ for source in sources:
400
+ (base, ext) = os.path.splitext(source)
401
+ if ext in exts:
402
+ files.append(source)
403
+ else:
404
+ new_sources.append(source)
405
+ return new_sources, files
406
+
407
+ def template_sources(self, sources, extension):
408
+ new_sources = []
409
+ if is_sequence(extension):
410
+ depends = extension[1].get('depends')
411
+ include_dirs = extension[1].get('include_dirs')
412
+ else:
413
+ depends = extension.depends
414
+ include_dirs = extension.include_dirs
415
+ for source in sources:
416
+ (base, ext) = os.path.splitext(source)
417
+ if ext == '.src': # Template file
418
+ if self.inplace:
419
+ target_dir = os.path.dirname(base)
420
+ else:
421
+ target_dir = appendpath(self.build_src, os.path.dirname(base))
422
+ self.mkpath(target_dir)
423
+ target_file = os.path.join(target_dir, os.path.basename(base))
424
+ if (self.force or newer_group([source] + depends, target_file)):
425
+ if _f_pyf_ext_match(base):
426
+ log.info("from_template:> %s" % (target_file))
427
+ outstr = process_f_file(source)
428
+ else:
429
+ log.info("conv_template:> %s" % (target_file))
430
+ outstr = process_c_file(source)
431
+ with open(target_file, 'w') as fid:
432
+ fid.write(outstr)
433
+ if _header_ext_match(target_file):
434
+ d = os.path.dirname(target_file)
435
+ if d not in include_dirs:
436
+ log.info(" adding '%s' to include_dirs." % (d))
437
+ include_dirs.append(d)
438
+ new_sources.append(target_file)
439
+ else:
440
+ new_sources.append(source)
441
+ return new_sources
442
+
443
+ def pyrex_sources(self, sources, extension):
444
+ """Pyrex not supported; this remains for Cython support (see below)"""
445
+ new_sources = []
446
+ ext_name = extension.name.split('.')[-1]
447
+ for source in sources:
448
+ (base, ext) = os.path.splitext(source)
449
+ if ext == '.pyx':
450
+ target_file = self.generate_a_pyrex_source(base, ext_name,
451
+ source,
452
+ extension)
453
+ new_sources.append(target_file)
454
+ else:
455
+ new_sources.append(source)
456
+ return new_sources
457
+
458
+ def generate_a_pyrex_source(self, base, ext_name, source, extension):
459
+ """Pyrex is not supported, but some projects monkeypatch this method.
460
+
461
+ That allows compiling Cython code, see gh-6955.
462
+ This method will remain here for compatibility reasons.
463
+ """
464
+ return []
465
+
466
+ def f2py_sources(self, sources, extension):
467
+ new_sources = []
468
+ f2py_sources = []
469
+ f_sources = []
470
+ f2py_targets = {}
471
+ target_dirs = []
472
+ ext_name = extension.name.split('.')[-1]
473
+ skip_f2py = 0
474
+
475
+ for source in sources:
476
+ (base, ext) = os.path.splitext(source)
477
+ if ext == '.pyf': # F2PY interface file
478
+ if self.inplace:
479
+ target_dir = os.path.dirname(base)
480
+ else:
481
+ target_dir = appendpath(self.build_src, os.path.dirname(base))
482
+ if os.path.isfile(source):
483
+ name = get_f2py_modulename(source)
484
+ if name != ext_name:
485
+ raise DistutilsSetupError('mismatch of extension names: %s '
486
+ 'provides %r but expected %r' % (
487
+ source, name, ext_name))
488
+ target_file = os.path.join(target_dir, name+'module.c')
489
+ else:
490
+ log.debug(' source %s does not exist: skipping f2py\'ing.' \
491
+ % (source))
492
+ name = ext_name
493
+ skip_f2py = 1
494
+ target_file = os.path.join(target_dir, name+'module.c')
495
+ if not os.path.isfile(target_file):
496
+ log.warn(' target %s does not exist:\n '\
497
+ 'Assuming %smodule.c was generated with '\
498
+ '"build_src --inplace" command.' \
499
+ % (target_file, name))
500
+ target_dir = os.path.dirname(base)
501
+ target_file = os.path.join(target_dir, name+'module.c')
502
+ if not os.path.isfile(target_file):
503
+ raise DistutilsSetupError("%r missing" % (target_file,))
504
+ log.info(' Yes! Using %r as up-to-date target.' \
505
+ % (target_file))
506
+ target_dirs.append(target_dir)
507
+ f2py_sources.append(source)
508
+ f2py_targets[source] = target_file
509
+ new_sources.append(target_file)
510
+ elif fortran_ext_match(ext):
511
+ f_sources.append(source)
512
+ else:
513
+ new_sources.append(source)
514
+
515
+ if not (f2py_sources or f_sources):
516
+ return new_sources
517
+
518
+ for d in target_dirs:
519
+ self.mkpath(d)
520
+
521
+ f2py_options = extension.f2py_options + self.f2py_opts
522
+
523
+ if self.distribution.libraries:
524
+ for name, build_info in self.distribution.libraries:
525
+ if name in extension.libraries:
526
+ f2py_options.extend(build_info.get('f2py_options', []))
527
+
528
+ log.info("f2py options: %s" % (f2py_options))
529
+
530
+ if f2py_sources:
531
+ if len(f2py_sources) != 1:
532
+ raise DistutilsSetupError(
533
+ 'only one .pyf file is allowed per extension module but got'\
534
+ ' more: %r' % (f2py_sources,))
535
+ source = f2py_sources[0]
536
+ target_file = f2py_targets[source]
537
+ target_dir = os.path.dirname(target_file) or '.'
538
+ depends = [source] + extension.depends
539
+ if (self.force or newer_group(depends, target_file, 'newer')) \
540
+ and not skip_f2py:
541
+ log.info("f2py: %s" % (source))
542
+ from numpy.f2py import f2py2e
543
+ f2py2e.run_main(f2py_options
544
+ + ['--build-dir', target_dir, source])
545
+ else:
546
+ log.debug(" skipping '%s' f2py interface (up-to-date)" % (source))
547
+ else:
548
+ #XXX TODO: --inplace support for sdist command
549
+ if is_sequence(extension):
550
+ name = extension[0]
551
+ else: name = extension.name
552
+ target_dir = os.path.join(*([self.build_src]
553
+ +name.split('.')[:-1]))
554
+ target_file = os.path.join(target_dir, ext_name + 'module.c')
555
+ new_sources.append(target_file)
556
+ depends = f_sources + extension.depends
557
+ if (self.force or newer_group(depends, target_file, 'newer')) \
558
+ and not skip_f2py:
559
+ log.info("f2py:> %s" % (target_file))
560
+ self.mkpath(target_dir)
561
+ from numpy.f2py import f2py2e
562
+ f2py2e.run_main(f2py_options + ['--lower',
563
+ '--build-dir', target_dir]+\
564
+ ['-m', ext_name]+f_sources)
565
+ else:
566
+ log.debug(" skipping f2py fortran files for '%s' (up-to-date)"\
567
+ % (target_file))
568
+
569
+ if not os.path.isfile(target_file):
570
+ raise DistutilsError("f2py target file %r not generated" % (target_file,))
571
+
572
+ build_dir = os.path.join(self.build_src, target_dir)
573
+ target_c = os.path.join(build_dir, 'fortranobject.c')
574
+ target_h = os.path.join(build_dir, 'fortranobject.h')
575
+ log.info(" adding '%s' to sources." % (target_c))
576
+ new_sources.append(target_c)
577
+ if build_dir not in extension.include_dirs:
578
+ log.info(" adding '%s' to include_dirs." % (build_dir))
579
+ extension.include_dirs.append(build_dir)
580
+
581
+ if not skip_f2py:
582
+ import numpy.f2py
583
+ d = os.path.dirname(numpy.f2py.__file__)
584
+ source_c = os.path.join(d, 'src', 'fortranobject.c')
585
+ source_h = os.path.join(d, 'src', 'fortranobject.h')
586
+ if newer(source_c, target_c) or newer(source_h, target_h):
587
+ self.mkpath(os.path.dirname(target_c))
588
+ self.copy_file(source_c, target_c)
589
+ self.copy_file(source_h, target_h)
590
+ else:
591
+ if not os.path.isfile(target_c):
592
+ raise DistutilsSetupError("f2py target_c file %r not found" % (target_c,))
593
+ if not os.path.isfile(target_h):
594
+ raise DistutilsSetupError("f2py target_h file %r not found" % (target_h,))
595
+
596
+ for name_ext in ['-f2pywrappers.f', '-f2pywrappers2.f90']:
597
+ filename = os.path.join(target_dir, ext_name + name_ext)
598
+ if os.path.isfile(filename):
599
+ log.info(" adding '%s' to sources." % (filename))
600
+ f_sources.append(filename)
601
+
602
+ return new_sources + f_sources
603
+
604
+ def swig_sources(self, sources, extension):
605
+ # Assuming SWIG 1.3.14 or later. See compatibility note in
606
+ # http://www.swig.org/Doc1.3/Python.html#Python_nn6
607
+
608
+ new_sources = []
609
+ swig_sources = []
610
+ swig_targets = {}
611
+ target_dirs = []
612
+ py_files = [] # swig generated .py files
613
+ target_ext = '.c'
614
+ if '-c++' in extension.swig_opts:
615
+ typ = 'c++'
616
+ is_cpp = True
617
+ extension.swig_opts.remove('-c++')
618
+ elif self.swig_cpp:
619
+ typ = 'c++'
620
+ is_cpp = True
621
+ else:
622
+ typ = None
623
+ is_cpp = False
624
+ skip_swig = 0
625
+ ext_name = extension.name.split('.')[-1]
626
+
627
+ for source in sources:
628
+ (base, ext) = os.path.splitext(source)
629
+ if ext == '.i': # SWIG interface file
630
+ # the code below assumes that the sources list
631
+ # contains not more than one .i SWIG interface file
632
+ if self.inplace:
633
+ target_dir = os.path.dirname(base)
634
+ py_target_dir = self.ext_target_dir
635
+ else:
636
+ target_dir = appendpath(self.build_src, os.path.dirname(base))
637
+ py_target_dir = target_dir
638
+ if os.path.isfile(source):
639
+ name = get_swig_modulename(source)
640
+ if name != ext_name[1:]:
641
+ raise DistutilsSetupError(
642
+ 'mismatch of extension names: %s provides %r'
643
+ ' but expected %r' % (source, name, ext_name[1:]))
644
+ if typ is None:
645
+ typ = get_swig_target(source)
646
+ is_cpp = typ=='c++'
647
+ else:
648
+ typ2 = get_swig_target(source)
649
+ if typ2 is None:
650
+ log.warn('source %r does not define swig target, assuming %s swig target' \
651
+ % (source, typ))
652
+ elif typ!=typ2:
653
+ log.warn('expected %r but source %r defines %r swig target' \
654
+ % (typ, source, typ2))
655
+ if typ2=='c++':
656
+ log.warn('resetting swig target to c++ (some targets may have .c extension)')
657
+ is_cpp = True
658
+ else:
659
+ log.warn('assuming that %r has c++ swig target' % (source))
660
+ if is_cpp:
661
+ target_ext = '.cpp'
662
+ target_file = os.path.join(target_dir, '%s_wrap%s' \
663
+ % (name, target_ext))
664
+ else:
665
+ log.warn(' source %s does not exist: skipping swig\'ing.' \
666
+ % (source))
667
+ name = ext_name[1:]
668
+ skip_swig = 1
669
+ target_file = _find_swig_target(target_dir, name)
670
+ if not os.path.isfile(target_file):
671
+ log.warn(' target %s does not exist:\n '\
672
+ 'Assuming %s_wrap.{c,cpp} was generated with '\
673
+ '"build_src --inplace" command.' \
674
+ % (target_file, name))
675
+ target_dir = os.path.dirname(base)
676
+ target_file = _find_swig_target(target_dir, name)
677
+ if not os.path.isfile(target_file):
678
+ raise DistutilsSetupError("%r missing" % (target_file,))
679
+ log.warn(' Yes! Using %r as up-to-date target.' \
680
+ % (target_file))
681
+ target_dirs.append(target_dir)
682
+ new_sources.append(target_file)
683
+ py_files.append(os.path.join(py_target_dir, name+'.py'))
684
+ swig_sources.append(source)
685
+ swig_targets[source] = new_sources[-1]
686
+ else:
687
+ new_sources.append(source)
688
+
689
+ if not swig_sources:
690
+ return new_sources
691
+
692
+ if skip_swig:
693
+ return new_sources + py_files
694
+
695
+ for d in target_dirs:
696
+ self.mkpath(d)
697
+
698
+ swig = self.swig or self.find_swig()
699
+ swig_cmd = [swig, "-python"] + extension.swig_opts
700
+ if is_cpp:
701
+ swig_cmd.append('-c++')
702
+ for d in extension.include_dirs:
703
+ swig_cmd.append('-I'+d)
704
+ for source in swig_sources:
705
+ target = swig_targets[source]
706
+ depends = [source] + extension.depends
707
+ if self.force or newer_group(depends, target, 'newer'):
708
+ log.info("%s: %s" % (os.path.basename(swig) \
709
+ + (is_cpp and '++' or ''), source))
710
+ self.spawn(swig_cmd + self.swig_opts \
711
+ + ["-o", target, '-outdir', py_target_dir, source])
712
+ else:
713
+ log.debug(" skipping '%s' swig interface (up-to-date)" \
714
+ % (source))
715
+
716
+ return new_sources + py_files
717
+
718
+ _f_pyf_ext_match = re.compile(r'.*\.(f90|f95|f77|for|ftn|f|pyf)\Z', re.I).match
719
+ _header_ext_match = re.compile(r'.*\.(inc|h|hpp)\Z', re.I).match
720
+
721
+ #### SWIG related auxiliary functions ####
722
+ _swig_module_name_match = re.compile(r'\s*%module\s*(.*\(\s*package\s*=\s*"(?P<package>[\w_]+)".*\)|)\s*(?P<name>[\w_]+)',
723
+ re.I).match
724
+ _has_c_header = re.compile(r'-\*-\s*c\s*-\*-', re.I).search
725
+ _has_cpp_header = re.compile(r'-\*-\s*c\+\+\s*-\*-', re.I).search
726
+
727
+ def get_swig_target(source):
728
+ with open(source) as f:
729
+ result = None
730
+ line = f.readline()
731
+ if _has_cpp_header(line):
732
+ result = 'c++'
733
+ if _has_c_header(line):
734
+ result = 'c'
735
+ return result
736
+
737
+ def get_swig_modulename(source):
738
+ with open(source) as f:
739
+ name = None
740
+ for line in f:
741
+ m = _swig_module_name_match(line)
742
+ if m:
743
+ name = m.group('name')
744
+ break
745
+ return name
746
+
747
+ def _find_swig_target(target_dir, name):
748
+ for ext in ['.cpp', '.c']:
749
+ target = os.path.join(target_dir, '%s_wrap%s' % (name, ext))
750
+ if os.path.isfile(target):
751
+ break
752
+ return target
753
+
754
+ #### F2PY related auxiliary functions ####
755
+
756
+ _f2py_module_name_match = re.compile(r'\s*python\s*module\s*(?P<name>[\w_]+)',
757
+ re.I).match
758
+ _f2py_user_module_name_match = re.compile(r'\s*python\s*module\s*(?P<name>[\w_]*?'
759
+ r'__user__[\w_]*)', re.I).match
760
+
761
+ def get_f2py_modulename(source):
762
+ name = None
763
+ with open(source) as f:
764
+ for line in f:
765
+ m = _f2py_module_name_match(line)
766
+ if m:
767
+ if _f2py_user_module_name_match(line): # skip *__user__* names
768
+ continue
769
+ name = m.group('name')
770
+ break
771
+ return name
772
+
773
+ ##########################################
env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/config.py ADDED
@@ -0,0 +1,516 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Added Fortran compiler support to config. Currently useful only for
2
+ # try_compile call. try_run works but is untested for most of Fortran
3
+ # compilers (they must define linker_exe first).
4
+ # Pearu Peterson
5
+ import os
6
+ import signal
7
+ import subprocess
8
+ import sys
9
+ import textwrap
10
+ import warnings
11
+
12
+ from distutils.command.config import config as old_config
13
+ from distutils.command.config import LANG_EXT
14
+ from distutils import log
15
+ from distutils.file_util import copy_file
16
+ from distutils.ccompiler import CompileError, LinkError
17
+ import distutils
18
+ from numpy.distutils.exec_command import filepath_from_subprocess_output
19
+ from numpy.distutils.mingw32ccompiler import generate_manifest
20
+ from numpy.distutils.command.autodist import (check_gcc_function_attribute,
21
+ check_gcc_function_attribute_with_intrinsics,
22
+ check_gcc_variable_attribute,
23
+ check_gcc_version_at_least,
24
+ check_inline,
25
+ check_restrict,
26
+ check_compiler_gcc)
27
+
28
+ LANG_EXT['f77'] = '.f'
29
+ LANG_EXT['f90'] = '.f90'
30
+
31
+ class config(old_config):
32
+ old_config.user_options += [
33
+ ('fcompiler=', None, "specify the Fortran compiler type"),
34
+ ]
35
+
36
+ def initialize_options(self):
37
+ self.fcompiler = None
38
+ old_config.initialize_options(self)
39
+
40
+ def _check_compiler (self):
41
+ old_config._check_compiler(self)
42
+ from numpy.distutils.fcompiler import FCompiler, new_fcompiler
43
+
44
+ if sys.platform == 'win32' and (self.compiler.compiler_type in
45
+ ('msvc', 'intelw', 'intelemw')):
46
+ # XXX: hack to circumvent a python 2.6 bug with msvc9compiler:
47
+ # initialize call query_vcvarsall, which throws an IOError, and
48
+ # causes an error along the way without much information. We try to
49
+ # catch it here, hoping it is early enough, and print a helpful
50
+ # message instead of Error: None.
51
+ if not self.compiler.initialized:
52
+ try:
53
+ self.compiler.initialize()
54
+ except IOError as e:
55
+ msg = textwrap.dedent("""\
56
+ Could not initialize compiler instance: do you have Visual Studio
57
+ installed? If you are trying to build with MinGW, please use "python setup.py
58
+ build -c mingw32" instead. If you have Visual Studio installed, check it is
59
+ correctly installed, and the right version (VS 2015 as of this writing).
60
+
61
+ Original exception was: %s, and the Compiler class was %s
62
+ ============================================================================""") \
63
+ % (e, self.compiler.__class__.__name__)
64
+ print(textwrap.dedent("""\
65
+ ============================================================================"""))
66
+ raise distutils.errors.DistutilsPlatformError(msg) from e
67
+
68
+ # After MSVC is initialized, add an explicit /MANIFEST to linker
69
+ # flags. See issues gh-4245 and gh-4101 for details. Also
70
+ # relevant are issues 4431 and 16296 on the Python bug tracker.
71
+ from distutils import msvc9compiler
72
+ if msvc9compiler.get_build_version() >= 10:
73
+ for ldflags in [self.compiler.ldflags_shared,
74
+ self.compiler.ldflags_shared_debug]:
75
+ if '/MANIFEST' not in ldflags:
76
+ ldflags.append('/MANIFEST')
77
+
78
+ if not isinstance(self.fcompiler, FCompiler):
79
+ self.fcompiler = new_fcompiler(compiler=self.fcompiler,
80
+ dry_run=self.dry_run, force=1,
81
+ c_compiler=self.compiler)
82
+ if self.fcompiler is not None:
83
+ self.fcompiler.customize(self.distribution)
84
+ if self.fcompiler.get_version():
85
+ self.fcompiler.customize_cmd(self)
86
+ self.fcompiler.show_customization()
87
+
88
+ def _wrap_method(self, mth, lang, args):
89
+ from distutils.ccompiler import CompileError
90
+ from distutils.errors import DistutilsExecError
91
+ save_compiler = self.compiler
92
+ if lang in ['f77', 'f90']:
93
+ self.compiler = self.fcompiler
94
+ if self.compiler is None:
95
+ raise CompileError('%s compiler is not set' % (lang,))
96
+ try:
97
+ ret = mth(*((self,)+args))
98
+ except (DistutilsExecError, CompileError) as e:
99
+ self.compiler = save_compiler
100
+ raise CompileError from e
101
+ self.compiler = save_compiler
102
+ return ret
103
+
104
+ def _compile (self, body, headers, include_dirs, lang):
105
+ src, obj = self._wrap_method(old_config._compile, lang,
106
+ (body, headers, include_dirs, lang))
107
+ # _compile in unixcompiler.py sometimes creates .d dependency files.
108
+ # Clean them up.
109
+ self.temp_files.append(obj + '.d')
110
+ return src, obj
111
+
112
+ def _link (self, body,
113
+ headers, include_dirs,
114
+ libraries, library_dirs, lang):
115
+ if self.compiler.compiler_type=='msvc':
116
+ libraries = (libraries or [])[:]
117
+ library_dirs = (library_dirs or [])[:]
118
+ if lang in ['f77', 'f90']:
119
+ lang = 'c' # always use system linker when using MSVC compiler
120
+ if self.fcompiler:
121
+ for d in self.fcompiler.library_dirs or []:
122
+ # correct path when compiling in Cygwin but with
123
+ # normal Win Python
124
+ if d.startswith('/usr/lib'):
125
+ try:
126
+ d = subprocess.check_output(['cygpath',
127
+ '-w', d])
128
+ except (OSError, subprocess.CalledProcessError):
129
+ pass
130
+ else:
131
+ d = filepath_from_subprocess_output(d)
132
+ library_dirs.append(d)
133
+ for libname in self.fcompiler.libraries or []:
134
+ if libname not in libraries:
135
+ libraries.append(libname)
136
+ for libname in libraries:
137
+ if libname.startswith('msvc'): continue
138
+ fileexists = False
139
+ for libdir in library_dirs or []:
140
+ libfile = os.path.join(libdir, '%s.lib' % (libname))
141
+ if os.path.isfile(libfile):
142
+ fileexists = True
143
+ break
144
+ if fileexists: continue
145
+ # make g77-compiled static libs available to MSVC
146
+ fileexists = False
147
+ for libdir in library_dirs:
148
+ libfile = os.path.join(libdir, 'lib%s.a' % (libname))
149
+ if os.path.isfile(libfile):
150
+ # copy libname.a file to name.lib so that MSVC linker
151
+ # can find it
152
+ libfile2 = os.path.join(libdir, '%s.lib' % (libname))
153
+ copy_file(libfile, libfile2)
154
+ self.temp_files.append(libfile2)
155
+ fileexists = True
156
+ break
157
+ if fileexists: continue
158
+ log.warn('could not find library %r in directories %s' \
159
+ % (libname, library_dirs))
160
+ elif self.compiler.compiler_type == 'mingw32':
161
+ generate_manifest(self)
162
+ return self._wrap_method(old_config._link, lang,
163
+ (body, headers, include_dirs,
164
+ libraries, library_dirs, lang))
165
+
166
+ def check_header(self, header, include_dirs=None, library_dirs=None, lang='c'):
167
+ self._check_compiler()
168
+ return self.try_compile(
169
+ "/* we need a dummy line to make distutils happy */",
170
+ [header], include_dirs)
171
+
172
+ def check_decl(self, symbol,
173
+ headers=None, include_dirs=None):
174
+ self._check_compiler()
175
+ body = textwrap.dedent("""
176
+ int main(void)
177
+ {
178
+ #ifndef %s
179
+ (void) %s;
180
+ #endif
181
+ ;
182
+ return 0;
183
+ }""") % (symbol, symbol)
184
+
185
+ return self.try_compile(body, headers, include_dirs)
186
+
187
+ def check_macro_true(self, symbol,
188
+ headers=None, include_dirs=None):
189
+ self._check_compiler()
190
+ body = textwrap.dedent("""
191
+ int main(void)
192
+ {
193
+ #if %s
194
+ #else
195
+ #error false or undefined macro
196
+ #endif
197
+ ;
198
+ return 0;
199
+ }""") % (symbol,)
200
+
201
+ return self.try_compile(body, headers, include_dirs)
202
+
203
+ def check_type(self, type_name, headers=None, include_dirs=None,
204
+ library_dirs=None):
205
+ """Check type availability. Return True if the type can be compiled,
206
+ False otherwise"""
207
+ self._check_compiler()
208
+
209
+ # First check the type can be compiled
210
+ body = textwrap.dedent(r"""
211
+ int main(void) {
212
+ if ((%(name)s *) 0)
213
+ return 0;
214
+ if (sizeof (%(name)s))
215
+ return 0;
216
+ }
217
+ """) % {'name': type_name}
218
+
219
+ st = False
220
+ try:
221
+ try:
222
+ self._compile(body % {'type': type_name},
223
+ headers, include_dirs, 'c')
224
+ st = True
225
+ except distutils.errors.CompileError:
226
+ st = False
227
+ finally:
228
+ self._clean()
229
+
230
+ return st
231
+
232
+ def check_type_size(self, type_name, headers=None, include_dirs=None, library_dirs=None, expected=None):
233
+ """Check size of a given type."""
234
+ self._check_compiler()
235
+
236
+ # First check the type can be compiled
237
+ body = textwrap.dedent(r"""
238
+ typedef %(type)s npy_check_sizeof_type;
239
+ int main (void)
240
+ {
241
+ static int test_array [1 - 2 * !(((long) (sizeof (npy_check_sizeof_type))) >= 0)];
242
+ test_array [0] = 0
243
+
244
+ ;
245
+ return 0;
246
+ }
247
+ """)
248
+ self._compile(body % {'type': type_name},
249
+ headers, include_dirs, 'c')
250
+ self._clean()
251
+
252
+ if expected:
253
+ body = textwrap.dedent(r"""
254
+ typedef %(type)s npy_check_sizeof_type;
255
+ int main (void)
256
+ {
257
+ static int test_array [1 - 2 * !(((long) (sizeof (npy_check_sizeof_type))) == %(size)s)];
258
+ test_array [0] = 0
259
+
260
+ ;
261
+ return 0;
262
+ }
263
+ """)
264
+ for size in expected:
265
+ try:
266
+ self._compile(body % {'type': type_name, 'size': size},
267
+ headers, include_dirs, 'c')
268
+ self._clean()
269
+ return size
270
+ except CompileError:
271
+ pass
272
+
273
+ # this fails to *compile* if size > sizeof(type)
274
+ body = textwrap.dedent(r"""
275
+ typedef %(type)s npy_check_sizeof_type;
276
+ int main (void)
277
+ {
278
+ static int test_array [1 - 2 * !(((long) (sizeof (npy_check_sizeof_type))) <= %(size)s)];
279
+ test_array [0] = 0
280
+
281
+ ;
282
+ return 0;
283
+ }
284
+ """)
285
+
286
+ # The principle is simple: we first find low and high bounds of size
287
+ # for the type, where low/high are looked up on a log scale. Then, we
288
+ # do a binary search to find the exact size between low and high
289
+ low = 0
290
+ mid = 0
291
+ while True:
292
+ try:
293
+ self._compile(body % {'type': type_name, 'size': mid},
294
+ headers, include_dirs, 'c')
295
+ self._clean()
296
+ break
297
+ except CompileError:
298
+ #log.info("failure to test for bound %d" % mid)
299
+ low = mid + 1
300
+ mid = 2 * mid + 1
301
+
302
+ high = mid
303
+ # Binary search:
304
+ while low != high:
305
+ mid = (high - low) // 2 + low
306
+ try:
307
+ self._compile(body % {'type': type_name, 'size': mid},
308
+ headers, include_dirs, 'c')
309
+ self._clean()
310
+ high = mid
311
+ except CompileError:
312
+ low = mid + 1
313
+ return low
314
+
315
+ def check_func(self, func,
316
+ headers=None, include_dirs=None,
317
+ libraries=None, library_dirs=None,
318
+ decl=False, call=False, call_args=None):
319
+ # clean up distutils's config a bit: add void to main(), and
320
+ # return a value.
321
+ self._check_compiler()
322
+ body = []
323
+ if decl:
324
+ if type(decl) == str:
325
+ body.append(decl)
326
+ else:
327
+ body.append("int %s (void);" % func)
328
+ # Handle MSVC intrinsics: force MS compiler to make a function call.
329
+ # Useful to test for some functions when built with optimization on, to
330
+ # avoid build error because the intrinsic and our 'fake' test
331
+ # declaration do not match.
332
+ body.append("#ifdef _MSC_VER")
333
+ body.append("#pragma function(%s)" % func)
334
+ body.append("#endif")
335
+ body.append("int main (void) {")
336
+ if call:
337
+ if call_args is None:
338
+ call_args = ''
339
+ body.append(" %s(%s);" % (func, call_args))
340
+ else:
341
+ body.append(" %s;" % func)
342
+ body.append(" return 0;")
343
+ body.append("}")
344
+ body = '\n'.join(body) + "\n"
345
+
346
+ return self.try_link(body, headers, include_dirs,
347
+ libraries, library_dirs)
348
+
349
+ def check_funcs_once(self, funcs,
350
+ headers=None, include_dirs=None,
351
+ libraries=None, library_dirs=None,
352
+ decl=False, call=False, call_args=None):
353
+ """Check a list of functions at once.
354
+
355
+ This is useful to speed up things, since all the functions in the funcs
356
+ list will be put in one compilation unit.
357
+
358
+ Arguments
359
+ ---------
360
+ funcs : seq
361
+ list of functions to test
362
+ include_dirs : seq
363
+ list of header paths
364
+ libraries : seq
365
+ list of libraries to link the code snippet to
366
+ library_dirs : seq
367
+ list of library paths
368
+ decl : dict
369
+ for every (key, value), the declaration in the value will be
370
+ used for function in key. If a function is not in the
371
+ dictionary, no declaration will be used.
372
+ call : dict
373
+ for every item (f, value), if the value is True, a call will be
374
+ done to the function f.
375
+ """
376
+ self._check_compiler()
377
+ body = []
378
+ if decl:
379
+ for f, v in decl.items():
380
+ if v:
381
+ body.append("int %s (void);" % f)
382
+
383
+ # Handle MS intrinsics. See check_func for more info.
384
+ body.append("#ifdef _MSC_VER")
385
+ for func in funcs:
386
+ body.append("#pragma function(%s)" % func)
387
+ body.append("#endif")
388
+
389
+ body.append("int main (void) {")
390
+ if call:
391
+ for f in funcs:
392
+ if f in call and call[f]:
393
+ if not (call_args and f in call_args and call_args[f]):
394
+ args = ''
395
+ else:
396
+ args = call_args[f]
397
+ body.append(" %s(%s);" % (f, args))
398
+ else:
399
+ body.append(" %s;" % f)
400
+ else:
401
+ for f in funcs:
402
+ body.append(" %s;" % f)
403
+ body.append(" return 0;")
404
+ body.append("}")
405
+ body = '\n'.join(body) + "\n"
406
+
407
+ return self.try_link(body, headers, include_dirs,
408
+ libraries, library_dirs)
409
+
410
+ def check_inline(self):
411
+ """Return the inline keyword recognized by the compiler, empty string
412
+ otherwise."""
413
+ return check_inline(self)
414
+
415
+ def check_restrict(self):
416
+ """Return the restrict keyword recognized by the compiler, empty string
417
+ otherwise."""
418
+ return check_restrict(self)
419
+
420
+ def check_compiler_gcc(self):
421
+ """Return True if the C compiler is gcc"""
422
+ return check_compiler_gcc(self)
423
+
424
+ def check_gcc_function_attribute(self, attribute, name):
425
+ return check_gcc_function_attribute(self, attribute, name)
426
+
427
+ def check_gcc_function_attribute_with_intrinsics(self, attribute, name,
428
+ code, include):
429
+ return check_gcc_function_attribute_with_intrinsics(self, attribute,
430
+ name, code, include)
431
+
432
+ def check_gcc_variable_attribute(self, attribute):
433
+ return check_gcc_variable_attribute(self, attribute)
434
+
435
+ def check_gcc_version_at_least(self, major, minor=0, patchlevel=0):
436
+ """Return True if the GCC version is greater than or equal to the
437
+ specified version."""
438
+ return check_gcc_version_at_least(self, major, minor, patchlevel)
439
+
440
+ def get_output(self, body, headers=None, include_dirs=None,
441
+ libraries=None, library_dirs=None,
442
+ lang="c", use_tee=None):
443
+ """Try to compile, link to an executable, and run a program
444
+ built from 'body' and 'headers'. Returns the exit status code
445
+ of the program and its output.
446
+ """
447
+ # 2008-11-16, RemoveMe
448
+ warnings.warn("\n+++++++++++++++++++++++++++++++++++++++++++++++++\n"
449
+ "Usage of get_output is deprecated: please do not \n"
450
+ "use it anymore, and avoid configuration checks \n"
451
+ "involving running executable on the target machine.\n"
452
+ "+++++++++++++++++++++++++++++++++++++++++++++++++\n",
453
+ DeprecationWarning, stacklevel=2)
454
+ self._check_compiler()
455
+ exitcode, output = 255, ''
456
+ try:
457
+ grabber = GrabStdout()
458
+ try:
459
+ src, obj, exe = self._link(body, headers, include_dirs,
460
+ libraries, library_dirs, lang)
461
+ grabber.restore()
462
+ except Exception:
463
+ output = grabber.data
464
+ grabber.restore()
465
+ raise
466
+ exe = os.path.join('.', exe)
467
+ try:
468
+ # specify cwd arg for consistency with
469
+ # historic usage pattern of exec_command()
470
+ # also, note that exe appears to be a string,
471
+ # which exec_command() handled, but we now
472
+ # use a list for check_output() -- this assumes
473
+ # that exe is always a single command
474
+ output = subprocess.check_output([exe], cwd='.')
475
+ except subprocess.CalledProcessError as exc:
476
+ exitstatus = exc.returncode
477
+ output = ''
478
+ except OSError:
479
+ # preserve the EnvironmentError exit status
480
+ # used historically in exec_command()
481
+ exitstatus = 127
482
+ output = ''
483
+ else:
484
+ output = filepath_from_subprocess_output(output)
485
+ if hasattr(os, 'WEXITSTATUS'):
486
+ exitcode = os.WEXITSTATUS(exitstatus)
487
+ if os.WIFSIGNALED(exitstatus):
488
+ sig = os.WTERMSIG(exitstatus)
489
+ log.error('subprocess exited with signal %d' % (sig,))
490
+ if sig == signal.SIGINT:
491
+ # control-C
492
+ raise KeyboardInterrupt
493
+ else:
494
+ exitcode = exitstatus
495
+ log.info("success!")
496
+ except (CompileError, LinkError):
497
+ log.info("failure.")
498
+ self._clean()
499
+ return exitcode, output
500
+
501
+ class GrabStdout:
502
+
503
+ def __init__(self):
504
+ self.sys_stdout = sys.stdout
505
+ self.data = ''
506
+ sys.stdout = self
507
+
508
+ def write (self, data):
509
+ self.sys_stdout.write(data)
510
+ self.data += data
511
+
512
+ def flush (self):
513
+ self.sys_stdout.flush()
514
+
515
+ def restore(self):
516
+ sys.stdout = self.sys_stdout
env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/config_compiler.py ADDED
@@ -0,0 +1,126 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from distutils.core import Command
2
+ from numpy.distutils import log
3
+
4
+ #XXX: Linker flags
5
+
6
+ def show_fortran_compilers(_cache=None):
7
+ # Using cache to prevent infinite recursion.
8
+ if _cache:
9
+ return
10
+ elif _cache is None:
11
+ _cache = []
12
+ _cache.append(1)
13
+ from numpy.distutils.fcompiler import show_fcompilers
14
+ import distutils.core
15
+ dist = distutils.core._setup_distribution
16
+ show_fcompilers(dist)
17
+
18
+ class config_fc(Command):
19
+ """ Distutils command to hold user specified options
20
+ to Fortran compilers.
21
+
22
+ config_fc command is used by the FCompiler.customize() method.
23
+ """
24
+
25
+ description = "specify Fortran 77/Fortran 90 compiler information"
26
+
27
+ user_options = [
28
+ ('fcompiler=', None, "specify Fortran compiler type"),
29
+ ('f77exec=', None, "specify F77 compiler command"),
30
+ ('f90exec=', None, "specify F90 compiler command"),
31
+ ('f77flags=', None, "specify F77 compiler flags"),
32
+ ('f90flags=', None, "specify F90 compiler flags"),
33
+ ('opt=', None, "specify optimization flags"),
34
+ ('arch=', None, "specify architecture specific optimization flags"),
35
+ ('debug', 'g', "compile with debugging information"),
36
+ ('noopt', None, "compile without optimization"),
37
+ ('noarch', None, "compile without arch-dependent optimization"),
38
+ ]
39
+
40
+ help_options = [
41
+ ('help-fcompiler', None, "list available Fortran compilers",
42
+ show_fortran_compilers),
43
+ ]
44
+
45
+ boolean_options = ['debug', 'noopt', 'noarch']
46
+
47
+ def initialize_options(self):
48
+ self.fcompiler = None
49
+ self.f77exec = None
50
+ self.f90exec = None
51
+ self.f77flags = None
52
+ self.f90flags = None
53
+ self.opt = None
54
+ self.arch = None
55
+ self.debug = None
56
+ self.noopt = None
57
+ self.noarch = None
58
+
59
+ def finalize_options(self):
60
+ log.info('unifing config_fc, config, build_clib, build_ext, build commands --fcompiler options')
61
+ build_clib = self.get_finalized_command('build_clib')
62
+ build_ext = self.get_finalized_command('build_ext')
63
+ config = self.get_finalized_command('config')
64
+ build = self.get_finalized_command('build')
65
+ cmd_list = [self, config, build_clib, build_ext, build]
66
+ for a in ['fcompiler']:
67
+ l = []
68
+ for c in cmd_list:
69
+ v = getattr(c, a)
70
+ if v is not None:
71
+ if not isinstance(v, str): v = v.compiler_type
72
+ if v not in l: l.append(v)
73
+ if not l: v1 = None
74
+ else: v1 = l[0]
75
+ if len(l)>1:
76
+ log.warn(' commands have different --%s options: %s'\
77
+ ', using first in list as default' % (a, l))
78
+ if v1:
79
+ for c in cmd_list:
80
+ if getattr(c, a) is None: setattr(c, a, v1)
81
+
82
+ def run(self):
83
+ # Do nothing.
84
+ return
85
+
86
+ class config_cc(Command):
87
+ """ Distutils command to hold user specified options
88
+ to C/C++ compilers.
89
+ """
90
+
91
+ description = "specify C/C++ compiler information"
92
+
93
+ user_options = [
94
+ ('compiler=', None, "specify C/C++ compiler type"),
95
+ ]
96
+
97
+ def initialize_options(self):
98
+ self.compiler = None
99
+
100
+ def finalize_options(self):
101
+ log.info('unifing config_cc, config, build_clib, build_ext, build commands --compiler options')
102
+ build_clib = self.get_finalized_command('build_clib')
103
+ build_ext = self.get_finalized_command('build_ext')
104
+ config = self.get_finalized_command('config')
105
+ build = self.get_finalized_command('build')
106
+ cmd_list = [self, config, build_clib, build_ext, build]
107
+ for a in ['compiler']:
108
+ l = []
109
+ for c in cmd_list:
110
+ v = getattr(c, a)
111
+ if v is not None:
112
+ if not isinstance(v, str): v = v.compiler_type
113
+ if v not in l: l.append(v)
114
+ if not l: v1 = None
115
+ else: v1 = l[0]
116
+ if len(l)>1:
117
+ log.warn(' commands have different --%s options: %s'\
118
+ ', using first in list as default' % (a, l))
119
+ if v1:
120
+ for c in cmd_list:
121
+ if getattr(c, a) is None: setattr(c, a, v1)
122
+ return
123
+
124
+ def run(self):
125
+ # Do nothing.
126
+ return
env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/develop.py ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """ Override the develop command from setuptools so we can ensure that our
2
+ generated files (from build_src or build_scripts) are properly converted to real
3
+ files with filenames.
4
+
5
+ """
6
+ from setuptools.command.develop import develop as old_develop
7
+
8
+ class develop(old_develop):
9
+ __doc__ = old_develop.__doc__
10
+ def install_for_development(self):
11
+ # Build sources in-place, too.
12
+ self.reinitialize_command('build_src', inplace=1)
13
+ # Make sure scripts are built.
14
+ self.run_command('build_scripts')
15
+ old_develop.install_for_development(self)
env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/egg_info.py ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import sys
2
+
3
+ from setuptools.command.egg_info import egg_info as _egg_info
4
+
5
+ class egg_info(_egg_info):
6
+ def run(self):
7
+ if 'sdist' in sys.argv:
8
+ import warnings
9
+ import textwrap
10
+ msg = textwrap.dedent("""
11
+ `build_src` is being run, this may lead to missing
12
+ files in your sdist! You want to use distutils.sdist
13
+ instead of the setuptools version:
14
+
15
+ from distutils.command.sdist import sdist
16
+ cmdclass={'sdist': sdist}"
17
+
18
+ See numpy's setup.py or gh-7131 for details.""")
19
+ warnings.warn(msg, UserWarning, stacklevel=2)
20
+
21
+ # We need to ensure that build_src has been executed in order to give
22
+ # setuptools' egg_info command real filenames instead of functions which
23
+ # generate files.
24
+ self.run_command("build_src")
25
+ _egg_info.run(self)
env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/install.py ADDED
@@ -0,0 +1,79 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import sys
2
+ if 'setuptools' in sys.modules:
3
+ import setuptools.command.install as old_install_mod
4
+ have_setuptools = True
5
+ else:
6
+ import distutils.command.install as old_install_mod
7
+ have_setuptools = False
8
+ from distutils.file_util import write_file
9
+
10
+ old_install = old_install_mod.install
11
+
12
+ class install(old_install):
13
+
14
+ # Always run install_clib - the command is cheap, so no need to bypass it;
15
+ # but it's not run by setuptools -- so it's run again in install_data
16
+ sub_commands = old_install.sub_commands + [
17
+ ('install_clib', lambda x: True)
18
+ ]
19
+
20
+ def finalize_options (self):
21
+ old_install.finalize_options(self)
22
+ self.install_lib = self.install_libbase
23
+
24
+ def setuptools_run(self):
25
+ """ The setuptools version of the .run() method.
26
+
27
+ We must pull in the entire code so we can override the level used in the
28
+ _getframe() call since we wrap this call by one more level.
29
+ """
30
+ from distutils.command.install import install as distutils_install
31
+
32
+ # Explicit request for old-style install? Just do it
33
+ if self.old_and_unmanageable or self.single_version_externally_managed:
34
+ return distutils_install.run(self)
35
+
36
+ # Attempt to detect whether we were called from setup() or by another
37
+ # command. If we were called by setup(), our caller will be the
38
+ # 'run_command' method in 'distutils.dist', and *its* caller will be
39
+ # the 'run_commands' method. If we were called any other way, our
40
+ # immediate caller *might* be 'run_command', but it won't have been
41
+ # called by 'run_commands'. This is slightly kludgy, but seems to
42
+ # work.
43
+ #
44
+ caller = sys._getframe(3)
45
+ caller_module = caller.f_globals.get('__name__', '')
46
+ caller_name = caller.f_code.co_name
47
+
48
+ if caller_module != 'distutils.dist' or caller_name!='run_commands':
49
+ # We weren't called from the command line or setup(), so we
50
+ # should run in backward-compatibility mode to support bdist_*
51
+ # commands.
52
+ distutils_install.run(self)
53
+ else:
54
+ self.do_egg_install()
55
+
56
+ def run(self):
57
+ if not have_setuptools:
58
+ r = old_install.run(self)
59
+ else:
60
+ r = self.setuptools_run()
61
+ if self.record:
62
+ # bdist_rpm fails when INSTALLED_FILES contains
63
+ # paths with spaces. Such paths must be enclosed
64
+ # with double-quotes.
65
+ with open(self.record) as f:
66
+ lines = []
67
+ need_rewrite = False
68
+ for l in f:
69
+ l = l.rstrip()
70
+ if ' ' in l:
71
+ need_rewrite = True
72
+ l = '"%s"' % (l)
73
+ lines.append(l)
74
+ if need_rewrite:
75
+ self.execute(write_file,
76
+ (self.record, lines),
77
+ "re-writing list of installed files to '%s'" %
78
+ self.record)
79
+ return r
env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/install_clib.py ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ from distutils.core import Command
3
+ from distutils.ccompiler import new_compiler
4
+ from numpy.distutils.misc_util import get_cmd
5
+
6
+ class install_clib(Command):
7
+ description = "Command to install installable C libraries"
8
+
9
+ user_options = []
10
+
11
+ def initialize_options(self):
12
+ self.install_dir = None
13
+ self.outfiles = []
14
+
15
+ def finalize_options(self):
16
+ self.set_undefined_options('install', ('install_lib', 'install_dir'))
17
+
18
+ def run (self):
19
+ build_clib_cmd = get_cmd("build_clib")
20
+ if not build_clib_cmd.build_clib:
21
+ # can happen if the user specified `--skip-build`
22
+ build_clib_cmd.finalize_options()
23
+ build_dir = build_clib_cmd.build_clib
24
+
25
+ # We need the compiler to get the library name -> filename association
26
+ if not build_clib_cmd.compiler:
27
+ compiler = new_compiler(compiler=None)
28
+ compiler.customize(self.distribution)
29
+ else:
30
+ compiler = build_clib_cmd.compiler
31
+
32
+ for l in self.distribution.installed_libraries:
33
+ target_dir = os.path.join(self.install_dir, l.target_dir)
34
+ name = compiler.library_filename(l.name)
35
+ source = os.path.join(build_dir, name)
36
+ self.mkpath(target_dir)
37
+ self.outfiles.append(self.copy_file(source, target_dir)[0])
38
+
39
+ def get_outputs(self):
40
+ return self.outfiles
env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/install_data.py ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import sys
2
+ have_setuptools = ('setuptools' in sys.modules)
3
+
4
+ from distutils.command.install_data import install_data as old_install_data
5
+
6
+ #data installer with improved intelligence over distutils
7
+ #data files are copied into the project directory instead
8
+ #of willy-nilly
9
+ class install_data (old_install_data):
10
+
11
+ def run(self):
12
+ old_install_data.run(self)
13
+
14
+ if have_setuptools:
15
+ # Run install_clib again, since setuptools does not run sub-commands
16
+ # of install automatically
17
+ self.run_command('install_clib')
18
+
19
+ def finalize_options (self):
20
+ self.set_undefined_options('install',
21
+ ('install_lib', 'install_dir'),
22
+ ('root', 'root'),
23
+ ('force', 'force'),
24
+ )
env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/install_headers.py ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ from distutils.command.install_headers import install_headers as old_install_headers
3
+
4
+ class install_headers (old_install_headers):
5
+
6
+ def run (self):
7
+ headers = self.distribution.headers
8
+ if not headers:
9
+ return
10
+
11
+ prefix = os.path.dirname(self.install_dir)
12
+ for header in headers:
13
+ if isinstance(header, tuple):
14
+ # Kind of a hack, but I don't know where else to change this...
15
+ if header[0] == 'numpy.core':
16
+ header = ('numpy', header[1])
17
+ if os.path.splitext(header[1])[1] == '.inc':
18
+ continue
19
+ d = os.path.join(*([prefix]+header[0].split('.')))
20
+ header = header[1]
21
+ else:
22
+ d = self.install_dir
23
+ self.mkpath(d)
24
+ (out, _) = self.copy_file(header, d)
25
+ self.outfiles.append(out)
env-llmeval/lib/python3.10/site-packages/numpy/distutils/command/sdist.py ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import sys
2
+ if 'setuptools' in sys.modules:
3
+ from setuptools.command.sdist import sdist as old_sdist
4
+ else:
5
+ from distutils.command.sdist import sdist as old_sdist
6
+
7
+ from numpy.distutils.misc_util import get_data_files
8
+
9
+ class sdist(old_sdist):
10
+
11
+ def add_defaults (self):
12
+ old_sdist.add_defaults(self)
13
+
14
+ dist = self.distribution
15
+
16
+ if dist.has_data_files():
17
+ for data in dist.data_files:
18
+ self.filelist.extend(get_data_files(data))
19
+
20
+ if dist.has_headers():
21
+ headers = []
22
+ for h in dist.headers:
23
+ if isinstance(h, str): headers.append(h)
24
+ else: headers.append(h[1])
25
+ self.filelist.extend(headers)
26
+
27
+ return
env-llmeval/lib/python3.10/site-packages/numpy/distutils/fcompiler/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (28.6 kB). View file
 
env-llmeval/lib/python3.10/site-packages/numpy/distutils/fcompiler/__pycache__/absoft.cpython-310.pyc ADDED
Binary file (4.42 kB). View file
 
env-llmeval/lib/python3.10/site-packages/numpy/distutils/fcompiler/__pycache__/arm.cpython-310.pyc ADDED
Binary file (2.74 kB). View file
 
env-llmeval/lib/python3.10/site-packages/numpy/distutils/fcompiler/__pycache__/compaq.cpython-310.pyc ADDED
Binary file (4.16 kB). View file