diff --git a/ckpts/universal/global_step60/zero/11.mlp.dense_4h_to_h.weight/exp_avg_sq.pt b/ckpts/universal/global_step60/zero/11.mlp.dense_4h_to_h.weight/exp_avg_sq.pt
new file mode 100644
index 0000000000000000000000000000000000000000..48d28a69360430b51c6bb97a6bc7cdd134f4af47
--- /dev/null
+++ b/ckpts/universal/global_step60/zero/11.mlp.dense_4h_to_h.weight/exp_avg_sq.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:99a9b8f17bddbd04cf1feea10887beab07ac7c898b801e1356a578de57655bc3
+size 33555627
diff --git a/ckpts/universal/global_step60/zero/11.mlp.dense_4h_to_h.weight/fp32.pt b/ckpts/universal/global_step60/zero/11.mlp.dense_4h_to_h.weight/fp32.pt
new file mode 100644
index 0000000000000000000000000000000000000000..0a440e7d5230c938cbafbacc1674d4c778b4d5e6
--- /dev/null
+++ b/ckpts/universal/global_step60/zero/11.mlp.dense_4h_to_h.weight/fp32.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:22f6a1b974ff327748f2181a5ec65649f99fd639edc3738b72dd82b2853cac8c
+size 33555533
diff --git a/ckpts/universal/global_step60/zero/14.mlp.dense_h_to_4h_swiglu.weight/exp_avg_sq.pt b/ckpts/universal/global_step60/zero/14.mlp.dense_h_to_4h_swiglu.weight/exp_avg_sq.pt
new file mode 100644
index 0000000000000000000000000000000000000000..9b77d778779e61abd30563190718fe279985b50e
--- /dev/null
+++ b/ckpts/universal/global_step60/zero/14.mlp.dense_h_to_4h_swiglu.weight/exp_avg_sq.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:86a7ad8b22be2381c48aab135903df8a101b90a2114d890dc4fbdbf2e47d3b5c
+size 33555627
diff --git a/ckpts/universal/global_step60/zero/14.mlp.dense_h_to_4h_swiglu.weight/fp32.pt b/ckpts/universal/global_step60/zero/14.mlp.dense_h_to_4h_swiglu.weight/fp32.pt
new file mode 100644
index 0000000000000000000000000000000000000000..a233d2a5a8c088af2979008735c52ceaddd9d902
--- /dev/null
+++ b/ckpts/universal/global_step60/zero/14.mlp.dense_h_to_4h_swiglu.weight/fp32.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:a47c6a2ae99c85980c8cec1084022489fe521047b8979c41065b89c620637cec
+size 33555533
diff --git a/ckpts/universal/global_step60/zero/17.post_attention_layernorm.weight/fp32.pt b/ckpts/universal/global_step60/zero/17.post_attention_layernorm.weight/fp32.pt
new file mode 100644
index 0000000000000000000000000000000000000000..a9e58fbb70f444772102bdfda8e427475c0e3555
--- /dev/null
+++ b/ckpts/universal/global_step60/zero/17.post_attention_layernorm.weight/fp32.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:6971754b8ccc71acdf6b144b95e3b20e678c24fdbfcf76bcff9318b733212361
+size 9293
diff --git a/ckpts/universal/global_step60/zero/4.post_attention_layernorm.weight/exp_avg.pt b/ckpts/universal/global_step60/zero/4.post_attention_layernorm.weight/exp_avg.pt
new file mode 100644
index 0000000000000000000000000000000000000000..6246c87ad7e94bba62cd190f7b8b60e0cb4c2e5e
--- /dev/null
+++ b/ckpts/universal/global_step60/zero/4.post_attention_layernorm.weight/exp_avg.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d06bbbca46d1d15bfe3723a1d4945804c7c4787dcb76bf75dbbb345c2d9e3fed
+size 9372
diff --git a/ckpts/universal/global_step60/zero/4.post_attention_layernorm.weight/exp_avg_sq.pt b/ckpts/universal/global_step60/zero/4.post_attention_layernorm.weight/exp_avg_sq.pt
new file mode 100644
index 0000000000000000000000000000000000000000..39698b50da5e304bb7fe30f1ba9e2fb46ef0b910
--- /dev/null
+++ b/ckpts/universal/global_step60/zero/4.post_attention_layernorm.weight/exp_avg_sq.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:39a3adef985cc4688beaefc56a905ee69d196a752d806127ccaa6a045e27e274
+size 9387
diff --git a/ckpts/universal/global_step60/zero/4.post_attention_layernorm.weight/fp32.pt b/ckpts/universal/global_step60/zero/4.post_attention_layernorm.weight/fp32.pt
new file mode 100644
index 0000000000000000000000000000000000000000..26679f63523f2ea19975173857729aba28448619
--- /dev/null
+++ b/ckpts/universal/global_step60/zero/4.post_attention_layernorm.weight/fp32.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:b0c1f9143d8bf3298026c06a9dfcda5d507858d6ccbbfcb15380120406e83be3
+size 9293
diff --git a/venv/lib/python3.10/site-packages/numpy/f2py/__init__.py b/venv/lib/python3.10/site-packages/numpy/f2py/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e583250f7060aaa909d43b28dcb7c0021d0175d4
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/numpy/f2py/__init__.py
@@ -0,0 +1,194 @@
+#!/usr/bin/env python3
+"""Fortran to Python Interface Generator.
+
+Copyright 1999 -- 2011 Pearu Peterson all rights reserved.
+Copyright 2011 -- present NumPy Developers.
+Permission to use, modify, and distribute this software is given under the terms
+of the NumPy License.
+
+NO WARRANTY IS EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
+"""
+__all__ = ['run_main', 'compile', 'get_include']
+
+import sys
+import subprocess
+import os
+import warnings
+
+from numpy.exceptions import VisibleDeprecationWarning
+from . import f2py2e
+from . import diagnose
+
+run_main = f2py2e.run_main
+main = f2py2e.main
+
+
+def compile(source,
+ modulename='untitled',
+ extra_args='',
+ verbose=True,
+ source_fn=None,
+ extension='.f',
+ full_output=False
+ ):
+ """
+ Build extension module from a Fortran 77 source string with f2py.
+
+ Parameters
+ ----------
+ source : str or bytes
+ Fortran source of module / subroutine to compile
+
+ .. versionchanged:: 1.16.0
+ Accept str as well as bytes
+
+ modulename : str, optional
+ The name of the compiled python module
+ extra_args : str or list, optional
+ Additional parameters passed to f2py
+
+ .. versionchanged:: 1.16.0
+ A list of args may also be provided.
+
+ verbose : bool, optional
+ Print f2py output to screen
+ source_fn : str, optional
+ Name of the file where the fortran source is written.
+ The default is to use a temporary file with the extension
+ provided by the ``extension`` parameter
+ extension : ``{'.f', '.f90'}``, optional
+ Filename extension if `source_fn` is not provided.
+ The extension tells which fortran standard is used.
+ The default is ``.f``, which implies F77 standard.
+
+ .. versionadded:: 1.11.0
+
+ full_output : bool, optional
+ If True, return a `subprocess.CompletedProcess` containing
+ the stdout and stderr of the compile process, instead of just
+ the status code.
+
+ .. versionadded:: 1.20.0
+
+
+ Returns
+ -------
+ result : int or `subprocess.CompletedProcess`
+ 0 on success, or a `subprocess.CompletedProcess` if
+ ``full_output=True``
+
+ Examples
+ --------
+ .. literalinclude:: ../../source/f2py/code/results/compile_session.dat
+ :language: python
+
+ """
+ import tempfile
+ import shlex
+
+ if source_fn is None:
+ f, fname = tempfile.mkstemp(suffix=extension)
+ # f is a file descriptor so need to close it
+ # carefully -- not with .close() directly
+ os.close(f)
+ else:
+ fname = source_fn
+
+ if not isinstance(source, str):
+ source = str(source, 'utf-8')
+ try:
+ with open(fname, 'w') as f:
+ f.write(source)
+
+ args = ['-c', '-m', modulename, f.name]
+
+ if isinstance(extra_args, str):
+ is_posix = (os.name == 'posix')
+ extra_args = shlex.split(extra_args, posix=is_posix)
+
+ args.extend(extra_args)
+
+ c = [sys.executable,
+ '-c',
+ 'import numpy.f2py as f2py2e;f2py2e.main()'] + args
+ try:
+ cp = subprocess.run(c, capture_output=True)
+ except OSError:
+ # preserve historic status code used by exec_command()
+ cp = subprocess.CompletedProcess(c, 127, stdout=b'', stderr=b'')
+ else:
+ if verbose:
+ print(cp.stdout.decode())
+ finally:
+ if source_fn is None:
+ os.remove(fname)
+
+ if full_output:
+ return cp
+ else:
+ return cp.returncode
+
+
+def get_include():
+ """
+ Return the directory that contains the ``fortranobject.c`` and ``.h`` files.
+
+ .. note::
+
+ This function is not needed when building an extension with
+ `numpy.distutils` directly from ``.f`` and/or ``.pyf`` files
+ in one go.
+
+ Python extension modules built with f2py-generated code need to use
+ ``fortranobject.c`` as a source file, and include the ``fortranobject.h``
+ header. This function can be used to obtain the directory containing
+ both of these files.
+
+ Returns
+ -------
+ include_path : str
+ Absolute path to the directory containing ``fortranobject.c`` and
+ ``fortranobject.h``.
+
+ Notes
+ -----
+ .. versionadded:: 1.21.1
+
+ Unless the build system you are using has specific support for f2py,
+ building a Python extension using a ``.pyf`` signature file is a two-step
+ process. For a module ``mymod``:
+
+ * Step 1: run ``python -m numpy.f2py mymod.pyf --quiet``. This
+ generates ``_mymodmodule.c`` and (if needed)
+ ``_fblas-f2pywrappers.f`` files next to ``mymod.pyf``.
+ * Step 2: build your Python extension module. This requires the
+ following source files:
+
+ * ``_mymodmodule.c``
+ * ``_mymod-f2pywrappers.f`` (if it was generated in Step 1)
+ * ``fortranobject.c``
+
+ See Also
+ --------
+ numpy.get_include : function that returns the numpy include directory
+
+ """
+ return os.path.join(os.path.dirname(__file__), 'src')
+
+
+def __getattr__(attr):
+
+ # Avoid importing things that aren't needed for building
+ # which might import the main numpy module
+ if attr == "test":
+ from numpy._pytesttester import PytestTester
+ test = PytestTester(__name__)
+ return test
+
+ else:
+ raise AttributeError("module {!r} has no attribute "
+ "{!r}".format(__name__, attr))
+
+
+def __dir__():
+ return list(globals().keys() | {"test"})
diff --git a/venv/lib/python3.10/site-packages/numpy/f2py/__init__.pyi b/venv/lib/python3.10/site-packages/numpy/f2py/__init__.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..81b6a24f39ec37ba45055d8fefa819e816a61b8d
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/numpy/f2py/__init__.pyi
@@ -0,0 +1,42 @@
+import os
+import subprocess
+from collections.abc import Iterable
+from typing import Literal as L, Any, overload, TypedDict
+
+from numpy._pytesttester import PytestTester
+
+class _F2PyDictBase(TypedDict):
+ csrc: list[str]
+ h: list[str]
+
+class _F2PyDict(_F2PyDictBase, total=False):
+ fsrc: list[str]
+ ltx: list[str]
+
+__all__: list[str]
+test: PytestTester
+
+def run_main(comline_list: Iterable[str]) -> dict[str, _F2PyDict]: ...
+
+@overload
+def compile( # type: ignore[misc]
+ source: str | bytes,
+ modulename: str = ...,
+ extra_args: str | list[str] = ...,
+ verbose: bool = ...,
+ source_fn: None | str | bytes | os.PathLike[Any] = ...,
+ extension: L[".f", ".f90"] = ...,
+ full_output: L[False] = ...,
+) -> int: ...
+@overload
+def compile(
+ source: str | bytes,
+ modulename: str = ...,
+ extra_args: str | list[str] = ...,
+ verbose: bool = ...,
+ source_fn: None | str | bytes | os.PathLike[Any] = ...,
+ extension: L[".f", ".f90"] = ...,
+ full_output: L[True] = ...,
+) -> subprocess.CompletedProcess[bytes]: ...
+
+def get_include() -> str: ...
diff --git a/venv/lib/python3.10/site-packages/numpy/f2py/__main__.py b/venv/lib/python3.10/site-packages/numpy/f2py/__main__.py
new file mode 100644
index 0000000000000000000000000000000000000000..936a753a2796896667aa782277be41b40af061d3
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/numpy/f2py/__main__.py
@@ -0,0 +1,5 @@
+# See:
+# https://web.archive.org/web/20140822061353/http://cens.ioc.ee/projects/f2py2e
+from numpy.f2py.f2py2e import main
+
+main()
diff --git a/venv/lib/python3.10/site-packages/numpy/f2py/__version__.py b/venv/lib/python3.10/site-packages/numpy/f2py/__version__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e20d7c1dbb38807d248ff886e30425e7ff597299
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/numpy/f2py/__version__.py
@@ -0,0 +1 @@
+from numpy.version import version
diff --git a/venv/lib/python3.10/site-packages/numpy/f2py/_backends/__init__.py b/venv/lib/python3.10/site-packages/numpy/f2py/_backends/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e91393c14be39b20d5e94e262e91a05052681318
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/numpy/f2py/_backends/__init__.py
@@ -0,0 +1,9 @@
+def f2py_build_generator(name):
+ if name == "meson":
+ from ._meson import MesonBackend
+ return MesonBackend
+ elif name == "distutils":
+ from ._distutils import DistutilsBackend
+ return DistutilsBackend
+ else:
+ raise ValueError(f"Unknown backend: {name}")
diff --git a/venv/lib/python3.10/site-packages/numpy/f2py/_backends/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/numpy/f2py/_backends/__pycache__/__init__.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..eedfe7bdf9ae07adcf51af5e0769f7439a95b10d
Binary files /dev/null and b/venv/lib/python3.10/site-packages/numpy/f2py/_backends/__pycache__/__init__.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/numpy/f2py/_backends/__pycache__/_backend.cpython-310.pyc b/venv/lib/python3.10/site-packages/numpy/f2py/_backends/__pycache__/_backend.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..f033074d7503fc576e70762eeab460cc93e4e444
Binary files /dev/null and b/venv/lib/python3.10/site-packages/numpy/f2py/_backends/__pycache__/_backend.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/numpy/f2py/_backends/__pycache__/_distutils.cpython-310.pyc b/venv/lib/python3.10/site-packages/numpy/f2py/_backends/__pycache__/_distutils.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..6d7f60d9c4c203be395da60ff607b3edb9e4a286
Binary files /dev/null and b/venv/lib/python3.10/site-packages/numpy/f2py/_backends/__pycache__/_distutils.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/numpy/f2py/_backends/__pycache__/_meson.cpython-310.pyc b/venv/lib/python3.10/site-packages/numpy/f2py/_backends/__pycache__/_meson.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..3eb46aceb1ef78aaee743e853b1d24ea6b8e4f40
Binary files /dev/null and b/venv/lib/python3.10/site-packages/numpy/f2py/_backends/__pycache__/_meson.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/numpy/f2py/_backends/_backend.py b/venv/lib/python3.10/site-packages/numpy/f2py/_backends/_backend.py
new file mode 100644
index 0000000000000000000000000000000000000000..a7d43d2587b2f4886372f44c9bac7f5b840d7612
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/numpy/f2py/_backends/_backend.py
@@ -0,0 +1,46 @@
+from __future__ import annotations
+
+from abc import ABC, abstractmethod
+
+
+class Backend(ABC):
+ def __init__(
+ self,
+ modulename,
+ sources,
+ extra_objects,
+ build_dir,
+ include_dirs,
+ library_dirs,
+ libraries,
+ define_macros,
+ undef_macros,
+ f2py_flags,
+ sysinfo_flags,
+ fc_flags,
+ flib_flags,
+ setup_flags,
+ remove_build_dir,
+ extra_dat,
+ ):
+ self.modulename = modulename
+ self.sources = sources
+ self.extra_objects = extra_objects
+ self.build_dir = build_dir
+ self.include_dirs = include_dirs
+ self.library_dirs = library_dirs
+ self.libraries = libraries
+ self.define_macros = define_macros
+ self.undef_macros = undef_macros
+ self.f2py_flags = f2py_flags
+ self.sysinfo_flags = sysinfo_flags
+ self.fc_flags = fc_flags
+ self.flib_flags = flib_flags
+ self.setup_flags = setup_flags
+ self.remove_build_dir = remove_build_dir
+ self.extra_dat = extra_dat
+
+ @abstractmethod
+ def compile(self) -> None:
+ """Compile the wrapper."""
+ pass
diff --git a/venv/lib/python3.10/site-packages/numpy/f2py/_backends/_distutils.py b/venv/lib/python3.10/site-packages/numpy/f2py/_backends/_distutils.py
new file mode 100644
index 0000000000000000000000000000000000000000..e9b22a3921a578758c92de19e3b77cf874d4e4ca
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/numpy/f2py/_backends/_distutils.py
@@ -0,0 +1,75 @@
+from ._backend import Backend
+
+from numpy.distutils.core import setup, Extension
+from numpy.distutils.system_info import get_info
+from numpy.distutils.misc_util import dict_append
+from numpy.exceptions import VisibleDeprecationWarning
+import os
+import sys
+import shutil
+import warnings
+
+
+class DistutilsBackend(Backend):
+ def __init__(sef, *args, **kwargs):
+ warnings.warn(
+ "distutils has been deprecated since NumPy 1.26.x"
+ "Use the Meson backend instead, or generate wrappers"
+ "without -c and use a custom build script",
+ VisibleDeprecationWarning,
+ stacklevel=2,
+ )
+ super().__init__(*args, **kwargs)
+
+ def compile(self):
+ num_info = {}
+ if num_info:
+ self.include_dirs.extend(num_info.get("include_dirs", []))
+ ext_args = {
+ "name": self.modulename,
+ "sources": self.sources,
+ "include_dirs": self.include_dirs,
+ "library_dirs": self.library_dirs,
+ "libraries": self.libraries,
+ "define_macros": self.define_macros,
+ "undef_macros": self.undef_macros,
+ "extra_objects": self.extra_objects,
+ "f2py_options": self.f2py_flags,
+ }
+
+ if self.sysinfo_flags:
+ for n in self.sysinfo_flags:
+ i = get_info(n)
+ if not i:
+ print(
+ f"No {repr(n)} resources found"
+ "in system (try `f2py --help-link`)"
+ )
+ dict_append(ext_args, **i)
+
+ ext = Extension(**ext_args)
+
+ sys.argv = [sys.argv[0]] + self.setup_flags
+ sys.argv.extend(
+ [
+ "build",
+ "--build-temp",
+ self.build_dir,
+ "--build-base",
+ self.build_dir,
+ "--build-platlib",
+ ".",
+ "--disable-optimization",
+ ]
+ )
+
+ if self.fc_flags:
+ sys.argv.extend(["config_fc"] + self.fc_flags)
+ if self.flib_flags:
+ sys.argv.extend(["build_ext"] + self.flib_flags)
+
+ setup(ext_modules=[ext])
+
+ if self.remove_build_dir and os.path.exists(self.build_dir):
+ print(f"Removing build directory {self.build_dir}")
+ shutil.rmtree(self.build_dir)
diff --git a/venv/lib/python3.10/site-packages/numpy/f2py/_backends/_meson.py b/venv/lib/python3.10/site-packages/numpy/f2py/_backends/_meson.py
new file mode 100644
index 0000000000000000000000000000000000000000..f324e0f595fbc6b5e2caa0959027f09495e4fecd
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/numpy/f2py/_backends/_meson.py
@@ -0,0 +1,205 @@
+from __future__ import annotations
+
+import os
+import errno
+import shutil
+import subprocess
+import sys
+from pathlib import Path
+
+from ._backend import Backend
+from string import Template
+from itertools import chain
+
+import warnings
+
+
+class MesonTemplate:
+ """Template meson build file generation class."""
+
+ def __init__(
+ self,
+ modulename: str,
+ sources: list[Path],
+ deps: list[str],
+ libraries: list[str],
+ library_dirs: list[Path],
+ include_dirs: list[Path],
+ object_files: list[Path],
+ linker_args: list[str],
+ c_args: list[str],
+ build_type: str,
+ python_exe: str,
+ ):
+ self.modulename = modulename
+ self.build_template_path = (
+ Path(__file__).parent.absolute() / "meson.build.template"
+ )
+ self.sources = sources
+ self.deps = deps
+ self.libraries = libraries
+ self.library_dirs = library_dirs
+ if include_dirs is not None:
+ self.include_dirs = include_dirs
+ else:
+ self.include_dirs = []
+ self.substitutions = {}
+ self.objects = object_files
+ self.pipeline = [
+ self.initialize_template,
+ self.sources_substitution,
+ self.deps_substitution,
+ self.include_substitution,
+ self.libraries_substitution,
+ ]
+ self.build_type = build_type
+ self.python_exe = python_exe
+
+ def meson_build_template(self) -> str:
+ if not self.build_template_path.is_file():
+ raise FileNotFoundError(
+ errno.ENOENT,
+ "Meson build template"
+ f" {self.build_template_path.absolute()}"
+ " does not exist.",
+ )
+ return self.build_template_path.read_text()
+
+ def initialize_template(self) -> None:
+ self.substitutions["modulename"] = self.modulename
+ self.substitutions["buildtype"] = self.build_type
+ self.substitutions["python"] = self.python_exe
+
+ def sources_substitution(self) -> None:
+ indent = " " * 21
+ self.substitutions["source_list"] = f",\n{indent}".join(
+ [f"{indent}'{source}'" for source in self.sources]
+ )
+
+ def deps_substitution(self) -> None:
+ indent = " " * 21
+ self.substitutions["dep_list"] = f",\n{indent}".join(
+ [f"{indent}dependency('{dep}')" for dep in self.deps]
+ )
+
+ def libraries_substitution(self) -> None:
+ self.substitutions["lib_dir_declarations"] = "\n".join(
+ [
+ f"lib_dir_{i} = declare_dependency(link_args : ['-L{lib_dir}'])"
+ for i, lib_dir in enumerate(self.library_dirs)
+ ]
+ )
+
+ self.substitutions["lib_declarations"] = "\n".join(
+ [
+ f"{lib} = declare_dependency(link_args : ['-l{lib}'])"
+ for lib in self.libraries
+ ]
+ )
+
+ indent = " " * 21
+ self.substitutions["lib_list"] = f"\n{indent}".join(
+ [f"{indent}{lib}," for lib in self.libraries]
+ )
+ self.substitutions["lib_dir_list"] = f"\n{indent}".join(
+ [f"{indent}lib_dir_{i}," for i in range(len(self.library_dirs))]
+ )
+
+ def include_substitution(self) -> None:
+ indent = " " * 21
+ self.substitutions["inc_list"] = f",\n{indent}".join(
+ [f"{indent}'{inc}'" for inc in self.include_dirs]
+ )
+
+ def generate_meson_build(self):
+ for node in self.pipeline:
+ node()
+ template = Template(self.meson_build_template())
+ return template.substitute(self.substitutions)
+
+
+class MesonBackend(Backend):
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ self.dependencies = self.extra_dat.get("dependencies", [])
+ self.meson_build_dir = "bbdir"
+ self.build_type = (
+ "debug" if any("debug" in flag for flag in self.fc_flags) else "release"
+ )
+
+ def _move_exec_to_root(self, build_dir: Path):
+ walk_dir = Path(build_dir) / self.meson_build_dir
+ path_objects = chain(
+ walk_dir.glob(f"{self.modulename}*.so"),
+ walk_dir.glob(f"{self.modulename}*.pyd"),
+ )
+ # Same behavior as distutils
+ # https://github.com/numpy/numpy/issues/24874#issuecomment-1835632293
+ for path_object in path_objects:
+ dest_path = Path.cwd() / path_object.name
+ if dest_path.exists():
+ dest_path.unlink()
+ shutil.copy2(path_object, dest_path)
+ os.remove(path_object)
+
+ def write_meson_build(self, build_dir: Path) -> None:
+ """Writes the meson build file at specified location"""
+ meson_template = MesonTemplate(
+ self.modulename,
+ self.sources,
+ self.dependencies,
+ self.libraries,
+ self.library_dirs,
+ self.include_dirs,
+ self.extra_objects,
+ self.flib_flags,
+ self.fc_flags,
+ self.build_type,
+ sys.executable,
+ )
+ src = meson_template.generate_meson_build()
+ Path(build_dir).mkdir(parents=True, exist_ok=True)
+ meson_build_file = Path(build_dir) / "meson.build"
+ meson_build_file.write_text(src)
+ return meson_build_file
+
+ def _run_subprocess_command(self, command, cwd):
+ subprocess.run(command, cwd=cwd, check=True)
+
+ def run_meson(self, build_dir: Path):
+ setup_command = ["meson", "setup", self.meson_build_dir]
+ self._run_subprocess_command(setup_command, build_dir)
+ compile_command = ["meson", "compile", "-C", self.meson_build_dir]
+ self._run_subprocess_command(compile_command, build_dir)
+
+ def compile(self) -> None:
+ self.sources = _prepare_sources(self.modulename, self.sources, self.build_dir)
+ self.write_meson_build(self.build_dir)
+ self.run_meson(self.build_dir)
+ self._move_exec_to_root(self.build_dir)
+
+
+def _prepare_sources(mname, sources, bdir):
+ extended_sources = sources.copy()
+ Path(bdir).mkdir(parents=True, exist_ok=True)
+ # Copy sources
+ for source in sources:
+ if Path(source).exists() and Path(source).is_file():
+ shutil.copy(source, bdir)
+ generated_sources = [
+ Path(f"{mname}module.c"),
+ Path(f"{mname}-f2pywrappers2.f90"),
+ Path(f"{mname}-f2pywrappers.f"),
+ ]
+ bdir = Path(bdir)
+ for generated_source in generated_sources:
+ if generated_source.exists():
+ shutil.copy(generated_source, bdir / generated_source.name)
+ extended_sources.append(generated_source.name)
+ generated_source.unlink()
+ extended_sources = [
+ Path(source).name
+ for source in extended_sources
+ if not Path(source).suffix == ".pyf"
+ ]
+ return extended_sources
diff --git a/venv/lib/python3.10/site-packages/numpy/f2py/_backends/meson.build.template b/venv/lib/python3.10/site-packages/numpy/f2py/_backends/meson.build.template
new file mode 100644
index 0000000000000000000000000000000000000000..8e34fdc8d4d6a29d62022e82ae92e787b73f941b
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/numpy/f2py/_backends/meson.build.template
@@ -0,0 +1,54 @@
+project('${modulename}',
+ ['c', 'fortran'],
+ version : '0.1',
+ meson_version: '>= 1.1.0',
+ default_options : [
+ 'warning_level=1',
+ 'buildtype=${buildtype}'
+ ])
+fc = meson.get_compiler('fortran')
+
+py = import('python').find_installation('${python}', pure: false)
+py_dep = py.dependency()
+
+incdir_numpy = run_command(py,
+ ['-c', 'import os; os.chdir(".."); import numpy; print(numpy.get_include())'],
+ check : true
+).stdout().strip()
+
+incdir_f2py = run_command(py,
+ ['-c', 'import os; os.chdir(".."); import numpy.f2py; print(numpy.f2py.get_include())'],
+ check : true
+).stdout().strip()
+
+inc_np = include_directories(incdir_numpy)
+np_dep = declare_dependency(include_directories: inc_np)
+
+incdir_f2py = incdir_numpy / '..' / '..' / 'f2py' / 'src'
+inc_f2py = include_directories(incdir_f2py)
+fortranobject_c = incdir_f2py / 'fortranobject.c'
+
+inc_np = include_directories(incdir_numpy, incdir_f2py)
+# gh-25000
+quadmath_dep = fc.find_library('quadmath', required: false)
+
+${lib_declarations}
+${lib_dir_declarations}
+
+py.extension_module('${modulename}',
+ [
+${source_list},
+ fortranobject_c
+ ],
+ include_directories: [
+ inc_np,
+${inc_list}
+ ],
+ dependencies : [
+ py_dep,
+ quadmath_dep,
+${dep_list}
+${lib_list}
+${lib_dir_list}
+ ],
+ install : true)
diff --git a/venv/lib/python3.10/site-packages/numpy/f2py/_isocbind.py b/venv/lib/python3.10/site-packages/numpy/f2py/_isocbind.py
new file mode 100644
index 0000000000000000000000000000000000000000..3043c5d9163f7101d165ca08e33adf0970547612
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/numpy/f2py/_isocbind.py
@@ -0,0 +1,62 @@
+"""
+ISO_C_BINDING maps for f2py2e.
+Only required declarations/macros/functions will be used.
+
+Copyright 1999 -- 2011 Pearu Peterson all rights reserved.
+Copyright 2011 -- present NumPy Developers.
+Permission to use, modify, and distribute this software is given under the
+terms of the NumPy License.
+
+NO WARRANTY IS EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
+"""
+# These map to keys in c2py_map, via forced casting for now, see gh-25229
+iso_c_binding_map = {
+ 'integer': {
+ 'c_int': 'int',
+ 'c_short': 'short', # 'short' <=> 'int' for now
+ 'c_long': 'long', # 'long' <=> 'int' for now
+ 'c_long_long': 'long_long',
+ 'c_signed_char': 'signed_char',
+ 'c_size_t': 'unsigned', # size_t <=> 'unsigned' for now
+ 'c_int8_t': 'signed_char', # int8_t <=> 'signed_char' for now
+ 'c_int16_t': 'short', # int16_t <=> 'short' for now
+ 'c_int32_t': 'int', # int32_t <=> 'int' for now
+ 'c_int64_t': 'long_long',
+ 'c_int_least8_t': 'signed_char', # int_least8_t <=> 'signed_char' for now
+ 'c_int_least16_t': 'short', # int_least16_t <=> 'short' for now
+ 'c_int_least32_t': 'int', # int_least32_t <=> 'int' for now
+ 'c_int_least64_t': 'long_long',
+ 'c_int_fast8_t': 'signed_char', # int_fast8_t <=> 'signed_char' for now
+ 'c_int_fast16_t': 'short', # int_fast16_t <=> 'short' for now
+ 'c_int_fast32_t': 'int', # int_fast32_t <=> 'int' for now
+ 'c_int_fast64_t': 'long_long',
+ 'c_intmax_t': 'long_long', # intmax_t <=> 'long_long' for now
+ 'c_intptr_t': 'long', # intptr_t <=> 'long' for now
+ 'c_ptrdiff_t': 'long', # ptrdiff_t <=> 'long' for now
+ },
+ 'real': {
+ 'c_float': 'float',
+ 'c_double': 'double',
+ 'c_long_double': 'long_double'
+ },
+ 'complex': {
+ 'c_float_complex': 'complex_float',
+ 'c_double_complex': 'complex_double',
+ 'c_long_double_complex': 'complex_long_double'
+ },
+ 'logical': {
+ 'c_bool': 'unsigned_char' # _Bool <=> 'unsigned_char' for now
+ },
+ 'character': {
+ 'c_char': 'char'
+ }
+}
+
+# TODO: See gh-25229
+isoc_c2pycode_map = {}
+iso_c2py_map = {}
+
+isoc_kindmap = {}
+for fortran_type, c_type_dict in iso_c_binding_map.items():
+ for c_type in c_type_dict.keys():
+ isoc_kindmap[c_type] = fortran_type
diff --git a/venv/lib/python3.10/site-packages/numpy/f2py/_src_pyf.py b/venv/lib/python3.10/site-packages/numpy/f2py/_src_pyf.py
new file mode 100644
index 0000000000000000000000000000000000000000..6247b95bfe4603e9b136ca0b8e0c2842d1c1d1cc
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/numpy/f2py/_src_pyf.py
@@ -0,0 +1,239 @@
+import re
+
+# START OF CODE VENDORED FROM `numpy.distutils.from_template`
+#############################################################
+"""
+process_file(filename)
+
+ takes templated file .xxx.src and produces .xxx file where .xxx
+ is .pyf .f90 or .f using the following template rules:
+
+ '<..>' denotes a template.
+
+ All function and subroutine blocks in a source file with names that
+ contain '<..>' will be replicated according to the rules in '<..>'.
+
+ The number of comma-separated words in '<..>' will determine the number of
+ replicates.
+
+ '<..>' may have two different forms, named and short. For example,
+
+ named:
+
where anywhere inside a block '
' will be replaced with
+ 'd', 's', 'z', and 'c' for each replicate of the block.
+
+ <_c> is already defined: <_c=s,d,c,z>
+ <_t> is already defined: <_t=real,double precision,complex,double complex>
+
+ short:
+ , a short form of the named, useful when no
appears inside
+ a block.
+
+ In general, '<..>' contains a comma separated list of arbitrary
+ expressions. If these expression must contain a comma|leftarrow|rightarrow,
+ then prepend the comma|leftarrow|rightarrow with a backslash.
+
+ If an expression matches '\\' then it will be replaced
+ by -th expression.
+
+ Note that all '<..>' forms in a block must have the same number of
+ comma-separated entries.
+
+ Predefined named template rules:
+
+
+
+
+
+"""
+
+routine_start_re = re.compile(r'(\n|\A)(( (\$|\*))|)\s*(subroutine|function)\b', re.I)
+routine_end_re = re.compile(r'\n\s*end\s*(subroutine|function)\b.*(\n|\Z)', re.I)
+function_start_re = re.compile(r'\n (\$|\*)\s*function\b', re.I)
+
+def parse_structure(astr):
+ """ Return a list of tuples for each function or subroutine each
+ tuple is the start and end of a subroutine or function to be
+ expanded.
+ """
+
+ spanlist = []
+ ind = 0
+ while True:
+ m = routine_start_re.search(astr, ind)
+ if m is None:
+ break
+ start = m.start()
+ if function_start_re.match(astr, start, m.end()):
+ while True:
+ i = astr.rfind('\n', ind, start)
+ if i==-1:
+ break
+ start = i
+ if astr[i:i+7]!='\n $':
+ break
+ start += 1
+ m = routine_end_re.search(astr, m.end())
+ ind = end = m and m.end()-1 or len(astr)
+ spanlist.append((start, end))
+ return spanlist
+
+template_re = re.compile(r"<\s*(\w[\w\d]*)\s*>")
+named_re = re.compile(r"<\s*(\w[\w\d]*)\s*=\s*(.*?)\s*>")
+list_re = re.compile(r"<\s*((.*?))\s*>")
+
+def find_repl_patterns(astr):
+ reps = named_re.findall(astr)
+ names = {}
+ for rep in reps:
+ name = rep[0].strip() or unique_key(names)
+ repl = rep[1].replace(r'\,', '@comma@')
+ thelist = conv(repl)
+ names[name] = thelist
+ return names
+
+def find_and_remove_repl_patterns(astr):
+ names = find_repl_patterns(astr)
+ astr = re.subn(named_re, '', astr)[0]
+ return astr, names
+
+item_re = re.compile(r"\A\\(?P\d+)\Z")
+def conv(astr):
+ b = astr.split(',')
+ l = [x.strip() for x in b]
+ for i in range(len(l)):
+ m = item_re.match(l[i])
+ if m:
+ j = int(m.group('index'))
+ l[i] = l[j]
+ return ','.join(l)
+
+def unique_key(adict):
+ """ Obtain a unique key given a dictionary."""
+ allkeys = list(adict.keys())
+ done = False
+ n = 1
+ while not done:
+ newkey = '__l%s' % (n)
+ if newkey in allkeys:
+ n += 1
+ else:
+ done = True
+ return newkey
+
+
+template_name_re = re.compile(r'\A\s*(\w[\w\d]*)\s*\Z')
+def expand_sub(substr, names):
+ substr = substr.replace(r'\>', '@rightarrow@')
+ substr = substr.replace(r'\<', '@leftarrow@')
+ lnames = find_repl_patterns(substr)
+ substr = named_re.sub(r"<\1>", substr) # get rid of definition templates
+
+ def listrepl(mobj):
+ thelist = conv(mobj.group(1).replace(r'\,', '@comma@'))
+ if template_name_re.match(thelist):
+ return "<%s>" % (thelist)
+ name = None
+ for key in lnames.keys(): # see if list is already in dictionary
+ if lnames[key] == thelist:
+ name = key
+ if name is None: # this list is not in the dictionary yet
+ name = unique_key(lnames)
+ lnames[name] = thelist
+ return "<%s>" % name
+
+ substr = list_re.sub(listrepl, substr) # convert all lists to named templates
+ # newnames are constructed as needed
+
+ numsubs = None
+ base_rule = None
+ rules = {}
+ for r in template_re.findall(substr):
+ if r not in rules:
+ thelist = lnames.get(r, names.get(r, None))
+ if thelist is None:
+ raise ValueError('No replicates found for <%s>' % (r))
+ if r not in names and not thelist.startswith('_'):
+ names[r] = thelist
+ rule = [i.replace('@comma@', ',') for i in thelist.split(',')]
+ num = len(rule)
+
+ if numsubs is None:
+ numsubs = num
+ rules[r] = rule
+ base_rule = r
+ elif num == numsubs:
+ rules[r] = rule
+ else:
+ print("Mismatch in number of replacements (base <{}={}>) "
+ "for <{}={}>. Ignoring.".format(base_rule, ','.join(rules[base_rule]), r, thelist))
+ if not rules:
+ return substr
+
+ def namerepl(mobj):
+ name = mobj.group(1)
+ return rules.get(name, (k+1)*[name])[k]
+
+ newstr = ''
+ for k in range(numsubs):
+ newstr += template_re.sub(namerepl, substr) + '\n\n'
+
+ newstr = newstr.replace('@rightarrow@', '>')
+ newstr = newstr.replace('@leftarrow@', '<')
+ return newstr
+
+def process_str(allstr):
+ newstr = allstr
+ writestr = ''
+
+ struct = parse_structure(newstr)
+
+ oldend = 0
+ names = {}
+ names.update(_special_names)
+ for sub in struct:
+ cleanedstr, defs = find_and_remove_repl_patterns(newstr[oldend:sub[0]])
+ writestr += cleanedstr
+ names.update(defs)
+ writestr += expand_sub(newstr[sub[0]:sub[1]], names)
+ oldend = sub[1]
+ writestr += newstr[oldend:]
+
+ return writestr
+
+include_src_re = re.compile(r"(\n|\A)\s*include\s*['\"](?P[\w\d./\\]+\.src)['\"]", re.I)
+
+def resolve_includes(source):
+ d = os.path.dirname(source)
+ with open(source) as fid:
+ lines = []
+ for line in fid:
+ m = include_src_re.match(line)
+ if m:
+ fn = m.group('name')
+ if not os.path.isabs(fn):
+ fn = os.path.join(d, fn)
+ if os.path.isfile(fn):
+ lines.extend(resolve_includes(fn))
+ else:
+ lines.append(line)
+ else:
+ lines.append(line)
+ return lines
+
+def process_file(source):
+ lines = resolve_includes(source)
+ return process_str(''.join(lines))
+
+_special_names = find_repl_patterns('''
+<_c=s,d,c,z>
+<_t=real,double precision,complex,double complex>
+
+
+
+
+
+''')
+
+# END OF CODE VENDORED FROM `numpy.distutils.from_template`
+###########################################################
diff --git a/venv/lib/python3.10/site-packages/numpy/f2py/auxfuncs.py b/venv/lib/python3.10/site-packages/numpy/f2py/auxfuncs.py
new file mode 100644
index 0000000000000000000000000000000000000000..13a1074b447e2834c045df8757fc264cad077e03
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/numpy/f2py/auxfuncs.py
@@ -0,0 +1,988 @@
+"""
+Auxiliary functions for f2py2e.
+
+Copyright 1999 -- 2011 Pearu Peterson all rights reserved.
+Copyright 2011 -- present NumPy Developers.
+Permission to use, modify, and distribute this software is given under the
+terms of the NumPy (BSD style) LICENSE.
+
+NO WARRANTY IS EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
+"""
+import pprint
+import sys
+import re
+import types
+from functools import reduce
+from copy import deepcopy
+
+from . import __version__
+from . import cfuncs
+
+__all__ = [
+ 'applyrules', 'debugcapi', 'dictappend', 'errmess', 'gentitle',
+ 'getargs2', 'getcallprotoargument', 'getcallstatement',
+ 'getfortranname', 'getpymethoddef', 'getrestdoc', 'getusercode',
+ 'getusercode1', 'getdimension', 'hasbody', 'hascallstatement', 'hascommon',
+ 'hasexternals', 'hasinitvalue', 'hasnote', 'hasresultnote',
+ 'isallocatable', 'isarray', 'isarrayofstrings',
+ 'ischaracter', 'ischaracterarray', 'ischaracter_or_characterarray',
+ 'iscomplex',
+ 'iscomplexarray', 'iscomplexfunction', 'iscomplexfunction_warn',
+ 'isdouble', 'isdummyroutine', 'isexternal', 'isfunction',
+ 'isfunction_wrap', 'isint1', 'isint1array', 'isinteger', 'isintent_aux',
+ 'isintent_c', 'isintent_callback', 'isintent_copy', 'isintent_dict',
+ 'isintent_hide', 'isintent_in', 'isintent_inout', 'isintent_inplace',
+ 'isintent_nothide', 'isintent_out', 'isintent_overwrite', 'islogical',
+ 'islogicalfunction', 'islong_complex', 'islong_double',
+ 'islong_doublefunction', 'islong_long', 'islong_longfunction',
+ 'ismodule', 'ismoduleroutine', 'isoptional', 'isprivate', 'isrequired',
+ 'isroutine', 'isscalar', 'issigned_long_longarray', 'isstring',
+ 'isstringarray', 'isstring_or_stringarray', 'isstringfunction',
+ 'issubroutine', 'get_f2py_modulename',
+ 'issubroutine_wrap', 'isthreadsafe', 'isunsigned', 'isunsigned_char',
+ 'isunsigned_chararray', 'isunsigned_long_long',
+ 'isunsigned_long_longarray', 'isunsigned_short',
+ 'isunsigned_shortarray', 'l_and', 'l_not', 'l_or', 'outmess',
+ 'replace', 'show', 'stripcomma', 'throw_error', 'isattr_value',
+ 'getuseblocks', 'process_f2cmap_dict'
+]
+
+
+f2py_version = __version__.version
+
+
+errmess = sys.stderr.write
+show = pprint.pprint
+
+options = {}
+debugoptions = []
+wrapfuncs = 1
+
+
+def outmess(t):
+ if options.get('verbose', 1):
+ sys.stdout.write(t)
+
+
+def debugcapi(var):
+ return 'capi' in debugoptions
+
+
+def _ischaracter(var):
+ return 'typespec' in var and var['typespec'] == 'character' and \
+ not isexternal(var)
+
+
+def _isstring(var):
+ return 'typespec' in var and var['typespec'] == 'character' and \
+ not isexternal(var)
+
+
+def ischaracter_or_characterarray(var):
+ return _ischaracter(var) and 'charselector' not in var
+
+
+def ischaracter(var):
+ return ischaracter_or_characterarray(var) and not isarray(var)
+
+
+def ischaracterarray(var):
+ return ischaracter_or_characterarray(var) and isarray(var)
+
+
+def isstring_or_stringarray(var):
+ return _ischaracter(var) and 'charselector' in var
+
+
+def isstring(var):
+ return isstring_or_stringarray(var) and not isarray(var)
+
+
+def isstringarray(var):
+ return isstring_or_stringarray(var) and isarray(var)
+
+
+def isarrayofstrings(var): # obsolete?
+ # leaving out '*' for now so that `character*(*) a(m)` and `character
+ # a(m,*)` are treated differently. Luckily `character**` is illegal.
+ return isstringarray(var) and var['dimension'][-1] == '(*)'
+
+
+def isarray(var):
+ return 'dimension' in var and not isexternal(var)
+
+
+def isscalar(var):
+ return not (isarray(var) or isstring(var) or isexternal(var))
+
+
+def iscomplex(var):
+ return isscalar(var) and \
+ var.get('typespec') in ['complex', 'double complex']
+
+
+def islogical(var):
+ return isscalar(var) and var.get('typespec') == 'logical'
+
+
+def isinteger(var):
+ return isscalar(var) and var.get('typespec') == 'integer'
+
+
+def isreal(var):
+ return isscalar(var) and var.get('typespec') == 'real'
+
+
+def get_kind(var):
+ try:
+ return var['kindselector']['*']
+ except KeyError:
+ try:
+ return var['kindselector']['kind']
+ except KeyError:
+ pass
+
+
+def isint1(var):
+ return var.get('typespec') == 'integer' \
+ and get_kind(var) == '1' and not isarray(var)
+
+
+def islong_long(var):
+ if not isscalar(var):
+ return 0
+ if var.get('typespec') not in ['integer', 'logical']:
+ return 0
+ return get_kind(var) == '8'
+
+
+def isunsigned_char(var):
+ if not isscalar(var):
+ return 0
+ if var.get('typespec') != 'integer':
+ return 0
+ return get_kind(var) == '-1'
+
+
+def isunsigned_short(var):
+ if not isscalar(var):
+ return 0
+ if var.get('typespec') != 'integer':
+ return 0
+ return get_kind(var) == '-2'
+
+
+def isunsigned(var):
+ if not isscalar(var):
+ return 0
+ if var.get('typespec') != 'integer':
+ return 0
+ return get_kind(var) == '-4'
+
+
+def isunsigned_long_long(var):
+ if not isscalar(var):
+ return 0
+ if var.get('typespec') != 'integer':
+ return 0
+ return get_kind(var) == '-8'
+
+
+def isdouble(var):
+ if not isscalar(var):
+ return 0
+ if not var.get('typespec') == 'real':
+ return 0
+ return get_kind(var) == '8'
+
+
+def islong_double(var):
+ if not isscalar(var):
+ return 0
+ if not var.get('typespec') == 'real':
+ return 0
+ return get_kind(var) == '16'
+
+
+def islong_complex(var):
+ if not iscomplex(var):
+ return 0
+ return get_kind(var) == '32'
+
+
+def iscomplexarray(var):
+ return isarray(var) and \
+ var.get('typespec') in ['complex', 'double complex']
+
+
+def isint1array(var):
+ return isarray(var) and var.get('typespec') == 'integer' \
+ and get_kind(var) == '1'
+
+
+def isunsigned_chararray(var):
+ return isarray(var) and var.get('typespec') in ['integer', 'logical']\
+ and get_kind(var) == '-1'
+
+
+def isunsigned_shortarray(var):
+ return isarray(var) and var.get('typespec') in ['integer', 'logical']\
+ and get_kind(var) == '-2'
+
+
+def isunsignedarray(var):
+ return isarray(var) and var.get('typespec') in ['integer', 'logical']\
+ and get_kind(var) == '-4'
+
+
+def isunsigned_long_longarray(var):
+ return isarray(var) and var.get('typespec') in ['integer', 'logical']\
+ and get_kind(var) == '-8'
+
+
+def issigned_chararray(var):
+ return isarray(var) and var.get('typespec') in ['integer', 'logical']\
+ and get_kind(var) == '1'
+
+
+def issigned_shortarray(var):
+ return isarray(var) and var.get('typespec') in ['integer', 'logical']\
+ and get_kind(var) == '2'
+
+
+def issigned_array(var):
+ return isarray(var) and var.get('typespec') in ['integer', 'logical']\
+ and get_kind(var) == '4'
+
+
+def issigned_long_longarray(var):
+ return isarray(var) and var.get('typespec') in ['integer', 'logical']\
+ and get_kind(var) == '8'
+
+
+def isallocatable(var):
+ return 'attrspec' in var and 'allocatable' in var['attrspec']
+
+
+def ismutable(var):
+ return not ('dimension' not in var or isstring(var))
+
+
+def ismoduleroutine(rout):
+ return 'modulename' in rout
+
+
+def ismodule(rout):
+ return 'block' in rout and 'module' == rout['block']
+
+
+def isfunction(rout):
+ return 'block' in rout and 'function' == rout['block']
+
+
+def isfunction_wrap(rout):
+ if isintent_c(rout):
+ return 0
+ return wrapfuncs and isfunction(rout) and (not isexternal(rout))
+
+
+def issubroutine(rout):
+ return 'block' in rout and 'subroutine' == rout['block']
+
+
+def issubroutine_wrap(rout):
+ if isintent_c(rout):
+ return 0
+ return issubroutine(rout) and hasassumedshape(rout)
+
+def isattr_value(var):
+ return 'value' in var.get('attrspec', [])
+
+
+def hasassumedshape(rout):
+ if rout.get('hasassumedshape'):
+ return True
+ for a in rout['args']:
+ for d in rout['vars'].get(a, {}).get('dimension', []):
+ if d == ':':
+ rout['hasassumedshape'] = True
+ return True
+ return False
+
+
+def requiresf90wrapper(rout):
+ return ismoduleroutine(rout) or hasassumedshape(rout)
+
+
+def isroutine(rout):
+ return isfunction(rout) or issubroutine(rout)
+
+
+def islogicalfunction(rout):
+ if not isfunction(rout):
+ return 0
+ if 'result' in rout:
+ a = rout['result']
+ else:
+ a = rout['name']
+ if a in rout['vars']:
+ return islogical(rout['vars'][a])
+ return 0
+
+
+def islong_longfunction(rout):
+ if not isfunction(rout):
+ return 0
+ if 'result' in rout:
+ a = rout['result']
+ else:
+ a = rout['name']
+ if a in rout['vars']:
+ return islong_long(rout['vars'][a])
+ return 0
+
+
+def islong_doublefunction(rout):
+ if not isfunction(rout):
+ return 0
+ if 'result' in rout:
+ a = rout['result']
+ else:
+ a = rout['name']
+ if a in rout['vars']:
+ return islong_double(rout['vars'][a])
+ return 0
+
+
+def iscomplexfunction(rout):
+ if not isfunction(rout):
+ return 0
+ if 'result' in rout:
+ a = rout['result']
+ else:
+ a = rout['name']
+ if a in rout['vars']:
+ return iscomplex(rout['vars'][a])
+ return 0
+
+
+def iscomplexfunction_warn(rout):
+ if iscomplexfunction(rout):
+ outmess("""\
+ **************************************************************
+ Warning: code with a function returning complex value
+ may not work correctly with your Fortran compiler.
+ When using GNU gcc/g77 compilers, codes should work
+ correctly for callbacks with:
+ f2py -c -DF2PY_CB_RETURNCOMPLEX
+ **************************************************************\n""")
+ return 1
+ return 0
+
+
+def isstringfunction(rout):
+ if not isfunction(rout):
+ return 0
+ if 'result' in rout:
+ a = rout['result']
+ else:
+ a = rout['name']
+ if a in rout['vars']:
+ return isstring(rout['vars'][a])
+ return 0
+
+
+def hasexternals(rout):
+ return 'externals' in rout and rout['externals']
+
+
+def isthreadsafe(rout):
+ return 'f2pyenhancements' in rout and \
+ 'threadsafe' in rout['f2pyenhancements']
+
+
+def hasvariables(rout):
+ return 'vars' in rout and rout['vars']
+
+
+def isoptional(var):
+ return ('attrspec' in var and 'optional' in var['attrspec'] and
+ 'required' not in var['attrspec']) and isintent_nothide(var)
+
+
+def isexternal(var):
+ return 'attrspec' in var and 'external' in var['attrspec']
+
+
+def getdimension(var):
+ dimpattern = r"\((.*?)\)"
+ if 'attrspec' in var.keys():
+ if any('dimension' in s for s in var['attrspec']):
+ return [re.findall(dimpattern, v) for v in var['attrspec']][0]
+
+
+def isrequired(var):
+ return not isoptional(var) and isintent_nothide(var)
+
+
+def isintent_in(var):
+ if 'intent' not in var:
+ return 1
+ if 'hide' in var['intent']:
+ return 0
+ if 'inplace' in var['intent']:
+ return 0
+ if 'in' in var['intent']:
+ return 1
+ if 'out' in var['intent']:
+ return 0
+ if 'inout' in var['intent']:
+ return 0
+ if 'outin' in var['intent']:
+ return 0
+ return 1
+
+
+def isintent_inout(var):
+ return ('intent' in var and ('inout' in var['intent'] or
+ 'outin' in var['intent']) and 'in' not in var['intent'] and
+ 'hide' not in var['intent'] and 'inplace' not in var['intent'])
+
+
+def isintent_out(var):
+ return 'out' in var.get('intent', [])
+
+
+def isintent_hide(var):
+ return ('intent' in var and ('hide' in var['intent'] or
+ ('out' in var['intent'] and 'in' not in var['intent'] and
+ (not l_or(isintent_inout, isintent_inplace)(var)))))
+
+
+def isintent_nothide(var):
+ return not isintent_hide(var)
+
+
+def isintent_c(var):
+ return 'c' in var.get('intent', [])
+
+
+def isintent_cache(var):
+ return 'cache' in var.get('intent', [])
+
+
+def isintent_copy(var):
+ return 'copy' in var.get('intent', [])
+
+
+def isintent_overwrite(var):
+ return 'overwrite' in var.get('intent', [])
+
+
+def isintent_callback(var):
+ return 'callback' in var.get('intent', [])
+
+
+def isintent_inplace(var):
+ return 'inplace' in var.get('intent', [])
+
+
+def isintent_aux(var):
+ return 'aux' in var.get('intent', [])
+
+
+def isintent_aligned4(var):
+ return 'aligned4' in var.get('intent', [])
+
+
+def isintent_aligned8(var):
+ return 'aligned8' in var.get('intent', [])
+
+
+def isintent_aligned16(var):
+ return 'aligned16' in var.get('intent', [])
+
+
+isintent_dict = {isintent_in: 'INTENT_IN', isintent_inout: 'INTENT_INOUT',
+ isintent_out: 'INTENT_OUT', isintent_hide: 'INTENT_HIDE',
+ isintent_cache: 'INTENT_CACHE',
+ isintent_c: 'INTENT_C', isoptional: 'OPTIONAL',
+ isintent_inplace: 'INTENT_INPLACE',
+ isintent_aligned4: 'INTENT_ALIGNED4',
+ isintent_aligned8: 'INTENT_ALIGNED8',
+ isintent_aligned16: 'INTENT_ALIGNED16',
+ }
+
+
+def isprivate(var):
+ return 'attrspec' in var and 'private' in var['attrspec']
+
+
+def hasinitvalue(var):
+ return '=' in var
+
+
+def hasinitvalueasstring(var):
+ if not hasinitvalue(var):
+ return 0
+ return var['='][0] in ['"', "'"]
+
+
+def hasnote(var):
+ return 'note' in var
+
+
+def hasresultnote(rout):
+ if not isfunction(rout):
+ return 0
+ if 'result' in rout:
+ a = rout['result']
+ else:
+ a = rout['name']
+ if a in rout['vars']:
+ return hasnote(rout['vars'][a])
+ return 0
+
+
+def hascommon(rout):
+ return 'common' in rout
+
+
+def containscommon(rout):
+ if hascommon(rout):
+ return 1
+ if hasbody(rout):
+ for b in rout['body']:
+ if containscommon(b):
+ return 1
+ return 0
+
+
+def containsmodule(block):
+ if ismodule(block):
+ return 1
+ if not hasbody(block):
+ return 0
+ for b in block['body']:
+ if containsmodule(b):
+ return 1
+ return 0
+
+
+def hasbody(rout):
+ return 'body' in rout
+
+
+def hascallstatement(rout):
+ return getcallstatement(rout) is not None
+
+
+def istrue(var):
+ return 1
+
+
+def isfalse(var):
+ return 0
+
+
+class F2PYError(Exception):
+ pass
+
+
+class throw_error:
+
+ def __init__(self, mess):
+ self.mess = mess
+
+ def __call__(self, var):
+ mess = '\n\n var = %s\n Message: %s\n' % (var, self.mess)
+ raise F2PYError(mess)
+
+
+def l_and(*f):
+ l1, l2 = 'lambda v', []
+ for i in range(len(f)):
+ l1 = '%s,f%d=f[%d]' % (l1, i, i)
+ l2.append('f%d(v)' % (i))
+ return eval('%s:%s' % (l1, ' and '.join(l2)))
+
+
+def l_or(*f):
+ l1, l2 = 'lambda v', []
+ for i in range(len(f)):
+ l1 = '%s,f%d=f[%d]' % (l1, i, i)
+ l2.append('f%d(v)' % (i))
+ return eval('%s:%s' % (l1, ' or '.join(l2)))
+
+
+def l_not(f):
+ return eval('lambda v,f=f:not f(v)')
+
+
+def isdummyroutine(rout):
+ try:
+ return rout['f2pyenhancements']['fortranname'] == ''
+ except KeyError:
+ return 0
+
+
+def getfortranname(rout):
+ try:
+ name = rout['f2pyenhancements']['fortranname']
+ if name == '':
+ raise KeyError
+ if not name:
+ errmess('Failed to use fortranname from %s\n' %
+ (rout['f2pyenhancements']))
+ raise KeyError
+ except KeyError:
+ name = rout['name']
+ return name
+
+
+def getmultilineblock(rout, blockname, comment=1, counter=0):
+ try:
+ r = rout['f2pyenhancements'].get(blockname)
+ except KeyError:
+ return
+ if not r:
+ return
+ if counter > 0 and isinstance(r, str):
+ return
+ if isinstance(r, list):
+ if counter >= len(r):
+ return
+ r = r[counter]
+ if r[:3] == "'''":
+ if comment:
+ r = '\t/* start ' + blockname + \
+ ' multiline (' + repr(counter) + ') */\n' + r[3:]
+ else:
+ r = r[3:]
+ if r[-3:] == "'''":
+ if comment:
+ r = r[:-3] + '\n\t/* end multiline (' + repr(counter) + ')*/'
+ else:
+ r = r[:-3]
+ else:
+ errmess("%s multiline block should end with `'''`: %s\n"
+ % (blockname, repr(r)))
+ return r
+
+
+def getcallstatement(rout):
+ return getmultilineblock(rout, 'callstatement')
+
+
+def getcallprotoargument(rout, cb_map={}):
+ r = getmultilineblock(rout, 'callprotoargument', comment=0)
+ if r:
+ return r
+ if hascallstatement(rout):
+ outmess(
+ 'warning: callstatement is defined without callprotoargument\n')
+ return
+ from .capi_maps import getctype
+ arg_types, arg_types2 = [], []
+ if l_and(isstringfunction, l_not(isfunction_wrap))(rout):
+ arg_types.extend(['char*', 'size_t'])
+ for n in rout['args']:
+ var = rout['vars'][n]
+ if isintent_callback(var):
+ continue
+ if n in cb_map:
+ ctype = cb_map[n] + '_typedef'
+ else:
+ ctype = getctype(var)
+ if l_and(isintent_c, l_or(isscalar, iscomplex))(var):
+ pass
+ elif isstring(var):
+ pass
+ else:
+ if not isattr_value(var):
+ ctype = ctype + '*'
+ if ((isstring(var)
+ or isarrayofstrings(var) # obsolete?
+ or isstringarray(var))):
+ arg_types2.append('size_t')
+ arg_types.append(ctype)
+
+ proto_args = ','.join(arg_types + arg_types2)
+ if not proto_args:
+ proto_args = 'void'
+ return proto_args
+
+
+def getusercode(rout):
+ return getmultilineblock(rout, 'usercode')
+
+
+def getusercode1(rout):
+ return getmultilineblock(rout, 'usercode', counter=1)
+
+
+def getpymethoddef(rout):
+ return getmultilineblock(rout, 'pymethoddef')
+
+
+def getargs(rout):
+ sortargs, args = [], []
+ if 'args' in rout:
+ args = rout['args']
+ if 'sortvars' in rout:
+ for a in rout['sortvars']:
+ if a in args:
+ sortargs.append(a)
+ for a in args:
+ if a not in sortargs:
+ sortargs.append(a)
+ else:
+ sortargs = rout['args']
+ return args, sortargs
+
+
+def getargs2(rout):
+ sortargs, args = [], rout.get('args', [])
+ auxvars = [a for a in rout['vars'].keys() if isintent_aux(rout['vars'][a])
+ and a not in args]
+ args = auxvars + args
+ if 'sortvars' in rout:
+ for a in rout['sortvars']:
+ if a in args:
+ sortargs.append(a)
+ for a in args:
+ if a not in sortargs:
+ sortargs.append(a)
+ else:
+ sortargs = auxvars + rout['args']
+ return args, sortargs
+
+
+def getrestdoc(rout):
+ if 'f2pymultilines' not in rout:
+ return None
+ k = None
+ if rout['block'] == 'python module':
+ k = rout['block'], rout['name']
+ return rout['f2pymultilines'].get(k, None)
+
+
+def gentitle(name):
+ ln = (80 - len(name) - 6) // 2
+ return '/*%s %s %s*/' % (ln * '*', name, ln * '*')
+
+
+def flatlist(lst):
+ if isinstance(lst, list):
+ return reduce(lambda x, y, f=flatlist: x + f(y), lst, [])
+ return [lst]
+
+
+def stripcomma(s):
+ if s and s[-1] == ',':
+ return s[:-1]
+ return s
+
+
+def replace(str, d, defaultsep=''):
+ if isinstance(d, list):
+ return [replace(str, _m, defaultsep) for _m in d]
+ if isinstance(str, list):
+ return [replace(_m, d, defaultsep) for _m in str]
+ for k in 2 * list(d.keys()):
+ if k == 'separatorsfor':
+ continue
+ if 'separatorsfor' in d and k in d['separatorsfor']:
+ sep = d['separatorsfor'][k]
+ else:
+ sep = defaultsep
+ if isinstance(d[k], list):
+ str = str.replace('#%s#' % (k), sep.join(flatlist(d[k])))
+ else:
+ str = str.replace('#%s#' % (k), d[k])
+ return str
+
+
+def dictappend(rd, ar):
+ if isinstance(ar, list):
+ for a in ar:
+ rd = dictappend(rd, a)
+ return rd
+ for k in ar.keys():
+ if k[0] == '_':
+ continue
+ if k in rd:
+ if isinstance(rd[k], str):
+ rd[k] = [rd[k]]
+ if isinstance(rd[k], list):
+ if isinstance(ar[k], list):
+ rd[k] = rd[k] + ar[k]
+ else:
+ rd[k].append(ar[k])
+ elif isinstance(rd[k], dict):
+ if isinstance(ar[k], dict):
+ if k == 'separatorsfor':
+ for k1 in ar[k].keys():
+ if k1 not in rd[k]:
+ rd[k][k1] = ar[k][k1]
+ else:
+ rd[k] = dictappend(rd[k], ar[k])
+ else:
+ rd[k] = ar[k]
+ return rd
+
+
+def applyrules(rules, d, var={}):
+ ret = {}
+ if isinstance(rules, list):
+ for r in rules:
+ rr = applyrules(r, d, var)
+ ret = dictappend(ret, rr)
+ if '_break' in rr:
+ break
+ return ret
+ if '_check' in rules and (not rules['_check'](var)):
+ return ret
+ if 'need' in rules:
+ res = applyrules({'needs': rules['need']}, d, var)
+ if 'needs' in res:
+ cfuncs.append_needs(res['needs'])
+
+ for k in rules.keys():
+ if k == 'separatorsfor':
+ ret[k] = rules[k]
+ continue
+ if isinstance(rules[k], str):
+ ret[k] = replace(rules[k], d)
+ elif isinstance(rules[k], list):
+ ret[k] = []
+ for i in rules[k]:
+ ar = applyrules({k: i}, d, var)
+ if k in ar:
+ ret[k].append(ar[k])
+ elif k[0] == '_':
+ continue
+ elif isinstance(rules[k], dict):
+ ret[k] = []
+ for k1 in rules[k].keys():
+ if isinstance(k1, types.FunctionType) and k1(var):
+ if isinstance(rules[k][k1], list):
+ for i in rules[k][k1]:
+ if isinstance(i, dict):
+ res = applyrules({'supertext': i}, d, var)
+ if 'supertext' in res:
+ i = res['supertext']
+ else:
+ i = ''
+ ret[k].append(replace(i, d))
+ else:
+ i = rules[k][k1]
+ if isinstance(i, dict):
+ res = applyrules({'supertext': i}, d)
+ if 'supertext' in res:
+ i = res['supertext']
+ else:
+ i = ''
+ ret[k].append(replace(i, d))
+ else:
+ errmess('applyrules: ignoring rule %s.\n' % repr(rules[k]))
+ if isinstance(ret[k], list):
+ if len(ret[k]) == 1:
+ ret[k] = ret[k][0]
+ if ret[k] == []:
+ del ret[k]
+ return ret
+
+_f2py_module_name_match = re.compile(r'\s*python\s*module\s*(?P[\w_]+)',
+ re.I).match
+_f2py_user_module_name_match = re.compile(r'\s*python\s*module\s*(?P[\w_]*?'
+ r'__user__[\w_]*)', re.I).match
+
+def get_f2py_modulename(source):
+ name = None
+ with open(source) as f:
+ for line in f:
+ m = _f2py_module_name_match(line)
+ if m:
+ if _f2py_user_module_name_match(line): # skip *__user__* names
+ continue
+ name = m.group('name')
+ break
+ return name
+
+def getuseblocks(pymod):
+ all_uses = []
+ for inner in pymod['body']:
+ for modblock in inner['body']:
+ if modblock.get('use'):
+ all_uses.extend([x for x in modblock.get("use").keys() if "__" not in x])
+ return all_uses
+
+def process_f2cmap_dict(f2cmap_all, new_map, c2py_map, verbose = False):
+ """
+ Update the Fortran-to-C type mapping dictionary with new mappings and
+ return a list of successfully mapped C types.
+
+ This function integrates a new mapping dictionary into an existing
+ Fortran-to-C type mapping dictionary. It ensures that all keys are in
+ lowercase and validates new entries against a given C-to-Python mapping
+ dictionary. Redefinitions and invalid entries are reported with a warning.
+
+ Parameters
+ ----------
+ f2cmap_all : dict
+ The existing Fortran-to-C type mapping dictionary that will be updated.
+ It should be a dictionary of dictionaries where the main keys represent
+ Fortran types and the nested dictionaries map Fortran type specifiers
+ to corresponding C types.
+
+ new_map : dict
+ A dictionary containing new type mappings to be added to `f2cmap_all`.
+ The structure should be similar to `f2cmap_all`, with keys representing
+ Fortran types and values being dictionaries of type specifiers and their
+ C type equivalents.
+
+ c2py_map : dict
+ A dictionary used for validating the C types in `new_map`. It maps C
+ types to corresponding Python types and is used to ensure that the C
+ types specified in `new_map` are valid.
+
+ verbose : boolean
+ A flag used to provide information about the types mapped
+
+ Returns
+ -------
+ tuple of (dict, list)
+ The updated Fortran-to-C type mapping dictionary and a list of
+ successfully mapped C types.
+ """
+ f2cmap_mapped = []
+
+ new_map_lower = {}
+ for k, d1 in new_map.items():
+ d1_lower = {k1.lower(): v1 for k1, v1 in d1.items()}
+ new_map_lower[k.lower()] = d1_lower
+
+ for k, d1 in new_map_lower.items():
+ if k not in f2cmap_all:
+ f2cmap_all[k] = {}
+
+ for k1, v1 in d1.items():
+ if v1 in c2py_map:
+ if k1 in f2cmap_all[k]:
+ outmess(
+ "\tWarning: redefinition of {'%s':{'%s':'%s'->'%s'}}\n"
+ % (k, k1, f2cmap_all[k][k1], v1)
+ )
+ f2cmap_all[k][k1] = v1
+ if verbose:
+ outmess('\tMapping "%s(kind=%s)" to "%s"\n' % (k, k1, v1))
+ f2cmap_mapped.append(v1)
+ else:
+ if verbose:
+ errmess(
+ "\tIgnoring map {'%s':{'%s':'%s'}}: '%s' must be in %s\n"
+ % (k, k1, v1, v1, list(c2py_map.keys()))
+ )
+
+ return f2cmap_all, f2cmap_mapped
diff --git a/venv/lib/python3.10/site-packages/numpy/f2py/capi_maps.py b/venv/lib/python3.10/site-packages/numpy/f2py/capi_maps.py
new file mode 100644
index 0000000000000000000000000000000000000000..fa477a5b9aca4873c269b7e628dc50f4d58251b0
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/numpy/f2py/capi_maps.py
@@ -0,0 +1,819 @@
+"""
+Copyright 1999 -- 2011 Pearu Peterson all rights reserved.
+Copyright 2011 -- present NumPy Developers.
+Permission to use, modify, and distribute this software is given under the
+terms of the NumPy License.
+
+NO WARRANTY IS EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
+"""
+from . import __version__
+f2py_version = __version__.version
+
+import copy
+import re
+import os
+from .crackfortran import markoutercomma
+from . import cb_rules
+from ._isocbind import iso_c_binding_map, isoc_c2pycode_map, iso_c2py_map
+
+# The environment provided by auxfuncs.py is needed for some calls to eval.
+# As the needed functions cannot be determined by static inspection of the
+# code, it is safest to use import * pending a major refactoring of f2py.
+from .auxfuncs import *
+
+__all__ = [
+ 'getctype', 'getstrlength', 'getarrdims', 'getpydocsign',
+ 'getarrdocsign', 'getinit', 'sign2map', 'routsign2map', 'modsign2map',
+ 'cb_sign2map', 'cb_routsign2map', 'common_sign2map', 'process_f2cmap_dict'
+]
+
+
+depargs = []
+lcb_map = {}
+lcb2_map = {}
+# forced casting: mainly caused by the fact that Python or Numeric
+# C/APIs do not support the corresponding C types.
+c2py_map = {'double': 'float',
+ 'float': 'float', # forced casting
+ 'long_double': 'float', # forced casting
+ 'char': 'int', # forced casting
+ 'signed_char': 'int', # forced casting
+ 'unsigned_char': 'int', # forced casting
+ 'short': 'int', # forced casting
+ 'unsigned_short': 'int', # forced casting
+ 'int': 'int', # forced casting
+ 'long': 'int',
+ 'long_long': 'long',
+ 'unsigned': 'int', # forced casting
+ 'complex_float': 'complex', # forced casting
+ 'complex_double': 'complex',
+ 'complex_long_double': 'complex', # forced casting
+ 'string': 'string',
+ 'character': 'bytes',
+ }
+
+c2capi_map = {'double': 'NPY_DOUBLE',
+ 'float': 'NPY_FLOAT',
+ 'long_double': 'NPY_LONGDOUBLE',
+ 'char': 'NPY_BYTE',
+ 'unsigned_char': 'NPY_UBYTE',
+ 'signed_char': 'NPY_BYTE',
+ 'short': 'NPY_SHORT',
+ 'unsigned_short': 'NPY_USHORT',
+ 'int': 'NPY_INT',
+ 'unsigned': 'NPY_UINT',
+ 'long': 'NPY_LONG',
+ 'unsigned_long': 'NPY_ULONG',
+ 'long_long': 'NPY_LONGLONG',
+ 'unsigned_long_long': 'NPY_ULONGLONG',
+ 'complex_float': 'NPY_CFLOAT',
+ 'complex_double': 'NPY_CDOUBLE',
+ 'complex_long_double': 'NPY_CDOUBLE',
+ 'string': 'NPY_STRING',
+ 'character': 'NPY_STRING'}
+
+c2pycode_map = {'double': 'd',
+ 'float': 'f',
+ 'long_double': 'g',
+ 'char': 'b',
+ 'unsigned_char': 'B',
+ 'signed_char': 'b',
+ 'short': 'h',
+ 'unsigned_short': 'H',
+ 'int': 'i',
+ 'unsigned': 'I',
+ 'long': 'l',
+ 'unsigned_long': 'L',
+ 'long_long': 'q',
+ 'unsigned_long_long': 'Q',
+ 'complex_float': 'F',
+ 'complex_double': 'D',
+ 'complex_long_double': 'G',
+ 'string': 'S',
+ 'character': 'c'}
+
+# https://docs.python.org/3/c-api/arg.html#building-values
+c2buildvalue_map = {'double': 'd',
+ 'float': 'f',
+ 'char': 'b',
+ 'signed_char': 'b',
+ 'short': 'h',
+ 'int': 'i',
+ 'long': 'l',
+ 'long_long': 'L',
+ 'complex_float': 'N',
+ 'complex_double': 'N',
+ 'complex_long_double': 'N',
+ 'string': 'y',
+ 'character': 'c'}
+
+f2cmap_all = {'real': {'': 'float', '4': 'float', '8': 'double',
+ '12': 'long_double', '16': 'long_double'},
+ 'integer': {'': 'int', '1': 'signed_char', '2': 'short',
+ '4': 'int', '8': 'long_long',
+ '-1': 'unsigned_char', '-2': 'unsigned_short',
+ '-4': 'unsigned', '-8': 'unsigned_long_long'},
+ 'complex': {'': 'complex_float', '8': 'complex_float',
+ '16': 'complex_double', '24': 'complex_long_double',
+ '32': 'complex_long_double'},
+ 'complexkind': {'': 'complex_float', '4': 'complex_float',
+ '8': 'complex_double', '12': 'complex_long_double',
+ '16': 'complex_long_double'},
+ 'logical': {'': 'int', '1': 'char', '2': 'short', '4': 'int',
+ '8': 'long_long'},
+ 'double complex': {'': 'complex_double'},
+ 'double precision': {'': 'double'},
+ 'byte': {'': 'char'},
+ }
+
+# Add ISO_C handling
+c2pycode_map.update(isoc_c2pycode_map)
+c2py_map.update(iso_c2py_map)
+f2cmap_all, _ = process_f2cmap_dict(f2cmap_all, iso_c_binding_map, c2py_map)
+# End ISO_C handling
+f2cmap_default = copy.deepcopy(f2cmap_all)
+
+f2cmap_mapped = []
+
+def load_f2cmap_file(f2cmap_file):
+ global f2cmap_all, f2cmap_mapped
+
+ f2cmap_all = copy.deepcopy(f2cmap_default)
+
+ if f2cmap_file is None:
+ # Default value
+ f2cmap_file = '.f2py_f2cmap'
+ if not os.path.isfile(f2cmap_file):
+ return
+
+ # User defined additions to f2cmap_all.
+ # f2cmap_file must contain a dictionary of dictionaries, only. For
+ # example, {'real':{'low':'float'}} means that Fortran 'real(low)' is
+ # interpreted as C 'float'. This feature is useful for F90/95 users if
+ # they use PARAMETERS in type specifications.
+ try:
+ outmess('Reading f2cmap from {!r} ...\n'.format(f2cmap_file))
+ with open(f2cmap_file) as f:
+ d = eval(f.read().lower(), {}, {})
+ f2cmap_all, f2cmap_mapped = process_f2cmap_dict(f2cmap_all, d, c2py_map, True)
+ outmess('Successfully applied user defined f2cmap changes\n')
+ except Exception as msg:
+ errmess('Failed to apply user defined f2cmap changes: %s. Skipping.\n' % (msg))
+
+
+cformat_map = {'double': '%g',
+ 'float': '%g',
+ 'long_double': '%Lg',
+ 'char': '%d',
+ 'signed_char': '%d',
+ 'unsigned_char': '%hhu',
+ 'short': '%hd',
+ 'unsigned_short': '%hu',
+ 'int': '%d',
+ 'unsigned': '%u',
+ 'long': '%ld',
+ 'unsigned_long': '%lu',
+ 'long_long': '%ld',
+ 'complex_float': '(%g,%g)',
+ 'complex_double': '(%g,%g)',
+ 'complex_long_double': '(%Lg,%Lg)',
+ 'string': '\\"%s\\"',
+ 'character': "'%c'",
+ }
+
+# Auxiliary functions
+
+
+def getctype(var):
+ """
+ Determines C type
+ """
+ ctype = 'void'
+ if isfunction(var):
+ if 'result' in var:
+ a = var['result']
+ else:
+ a = var['name']
+ if a in var['vars']:
+ return getctype(var['vars'][a])
+ else:
+ errmess('getctype: function %s has no return value?!\n' % a)
+ elif issubroutine(var):
+ return ctype
+ elif ischaracter_or_characterarray(var):
+ return 'character'
+ elif isstring_or_stringarray(var):
+ return 'string'
+ elif 'typespec' in var and var['typespec'].lower() in f2cmap_all:
+ typespec = var['typespec'].lower()
+ f2cmap = f2cmap_all[typespec]
+ ctype = f2cmap[''] # default type
+ if 'kindselector' in var:
+ if '*' in var['kindselector']:
+ try:
+ ctype = f2cmap[var['kindselector']['*']]
+ except KeyError:
+ errmess('getctype: "%s %s %s" not supported.\n' %
+ (var['typespec'], '*', var['kindselector']['*']))
+ elif 'kind' in var['kindselector']:
+ if typespec + 'kind' in f2cmap_all:
+ f2cmap = f2cmap_all[typespec + 'kind']
+ try:
+ ctype = f2cmap[var['kindselector']['kind']]
+ except KeyError:
+ if typespec in f2cmap_all:
+ f2cmap = f2cmap_all[typespec]
+ try:
+ ctype = f2cmap[str(var['kindselector']['kind'])]
+ except KeyError:
+ errmess('getctype: "%s(kind=%s)" is mapped to C "%s" (to override define dict(%s = dict(%s="")) in %s/.f2py_f2cmap file).\n'
+ % (typespec, var['kindselector']['kind'], ctype,
+ typespec, var['kindselector']['kind'], os.getcwd()))
+ else:
+ if not isexternal(var):
+ errmess('getctype: No C-type found in "%s", assuming void.\n' % var)
+ return ctype
+
+
+def f2cexpr(expr):
+ """Rewrite Fortran expression as f2py supported C expression.
+
+ Due to the lack of a proper expression parser in f2py, this
+ function uses a heuristic approach that assumes that Fortran
+ arithmetic expressions are valid C arithmetic expressions when
+ mapping Fortran function calls to the corresponding C function/CPP
+ macros calls.
+
+ """
+ # TODO: support Fortran `len` function with optional kind parameter
+ expr = re.sub(r'\blen\b', 'f2py_slen', expr)
+ return expr
+
+
+def getstrlength(var):
+ if isstringfunction(var):
+ if 'result' in var:
+ a = var['result']
+ else:
+ a = var['name']
+ if a in var['vars']:
+ return getstrlength(var['vars'][a])
+ else:
+ errmess('getstrlength: function %s has no return value?!\n' % a)
+ if not isstring(var):
+ errmess(
+ 'getstrlength: expected a signature of a string but got: %s\n' % (repr(var)))
+ len = '1'
+ if 'charselector' in var:
+ a = var['charselector']
+ if '*' in a:
+ len = a['*']
+ elif 'len' in a:
+ len = f2cexpr(a['len'])
+ if re.match(r'\(\s*(\*|:)\s*\)', len) or re.match(r'(\*|:)', len):
+ if isintent_hide(var):
+ errmess('getstrlength:intent(hide): expected a string with defined length but got: %s\n' % (
+ repr(var)))
+ len = '-1'
+ return len
+
+
+def getarrdims(a, var, verbose=0):
+ ret = {}
+ if isstring(var) and not isarray(var):
+ ret['size'] = getstrlength(var)
+ ret['rank'] = '0'
+ ret['dims'] = ''
+ elif isscalar(var):
+ ret['size'] = '1'
+ ret['rank'] = '0'
+ ret['dims'] = ''
+ elif isarray(var):
+ dim = copy.copy(var['dimension'])
+ ret['size'] = '*'.join(dim)
+ try:
+ ret['size'] = repr(eval(ret['size']))
+ except Exception:
+ pass
+ ret['dims'] = ','.join(dim)
+ ret['rank'] = repr(len(dim))
+ ret['rank*[-1]'] = repr(len(dim) * [-1])[1:-1]
+ for i in range(len(dim)): # solve dim for dependencies
+ v = []
+ if dim[i] in depargs:
+ v = [dim[i]]
+ else:
+ for va in depargs:
+ if re.match(r'.*?\b%s\b.*' % va, dim[i]):
+ v.append(va)
+ for va in v:
+ if depargs.index(va) > depargs.index(a):
+ dim[i] = '*'
+ break
+ ret['setdims'], i = '', -1
+ for d in dim:
+ i = i + 1
+ if d not in ['*', ':', '(*)', '(:)']:
+ ret['setdims'] = '%s#varname#_Dims[%d]=%s,' % (
+ ret['setdims'], i, d)
+ if ret['setdims']:
+ ret['setdims'] = ret['setdims'][:-1]
+ ret['cbsetdims'], i = '', -1
+ for d in var['dimension']:
+ i = i + 1
+ if d not in ['*', ':', '(*)', '(:)']:
+ ret['cbsetdims'] = '%s#varname#_Dims[%d]=%s,' % (
+ ret['cbsetdims'], i, d)
+ elif isintent_in(var):
+ outmess('getarrdims:warning: assumed shape array, using 0 instead of %r\n'
+ % (d))
+ ret['cbsetdims'] = '%s#varname#_Dims[%d]=%s,' % (
+ ret['cbsetdims'], i, 0)
+ elif verbose:
+ errmess(
+ 'getarrdims: If in call-back function: array argument %s must have bounded dimensions: got %s\n' % (repr(a), repr(d)))
+ if ret['cbsetdims']:
+ ret['cbsetdims'] = ret['cbsetdims'][:-1]
+# if not isintent_c(var):
+# var['dimension'].reverse()
+ return ret
+
+
+def getpydocsign(a, var):
+ global lcb_map
+ if isfunction(var):
+ if 'result' in var:
+ af = var['result']
+ else:
+ af = var['name']
+ if af in var['vars']:
+ return getpydocsign(af, var['vars'][af])
+ else:
+ errmess('getctype: function %s has no return value?!\n' % af)
+ return '', ''
+ sig, sigout = a, a
+ opt = ''
+ if isintent_in(var):
+ opt = 'input'
+ elif isintent_inout(var):
+ opt = 'in/output'
+ out_a = a
+ if isintent_out(var):
+ for k in var['intent']:
+ if k[:4] == 'out=':
+ out_a = k[4:]
+ break
+ init = ''
+ ctype = getctype(var)
+
+ if hasinitvalue(var):
+ init, showinit = getinit(a, var)
+ init = ', optional\\n Default: %s' % showinit
+ if isscalar(var):
+ if isintent_inout(var):
+ sig = '%s : %s rank-0 array(%s,\'%s\')%s' % (a, opt, c2py_map[ctype],
+ c2pycode_map[ctype], init)
+ else:
+ sig = '%s : %s %s%s' % (a, opt, c2py_map[ctype], init)
+ sigout = '%s : %s' % (out_a, c2py_map[ctype])
+ elif isstring(var):
+ if isintent_inout(var):
+ sig = '%s : %s rank-0 array(string(len=%s),\'c\')%s' % (
+ a, opt, getstrlength(var), init)
+ else:
+ sig = '%s : %s string(len=%s)%s' % (
+ a, opt, getstrlength(var), init)
+ sigout = '%s : string(len=%s)' % (out_a, getstrlength(var))
+ elif isarray(var):
+ dim = var['dimension']
+ rank = repr(len(dim))
+ sig = '%s : %s rank-%s array(\'%s\') with bounds (%s)%s' % (a, opt, rank,
+ c2pycode_map[
+ ctype],
+ ','.join(dim), init)
+ if a == out_a:
+ sigout = '%s : rank-%s array(\'%s\') with bounds (%s)'\
+ % (a, rank, c2pycode_map[ctype], ','.join(dim))
+ else:
+ sigout = '%s : rank-%s array(\'%s\') with bounds (%s) and %s storage'\
+ % (out_a, rank, c2pycode_map[ctype], ','.join(dim), a)
+ elif isexternal(var):
+ ua = ''
+ if a in lcb_map and lcb_map[a] in lcb2_map and 'argname' in lcb2_map[lcb_map[a]]:
+ ua = lcb2_map[lcb_map[a]]['argname']
+ if not ua == a:
+ ua = ' => %s' % ua
+ else:
+ ua = ''
+ sig = '%s : call-back function%s' % (a, ua)
+ sigout = sig
+ else:
+ errmess(
+ 'getpydocsign: Could not resolve docsignature for "%s".\n' % a)
+ return sig, sigout
+
+
+def getarrdocsign(a, var):
+ ctype = getctype(var)
+ if isstring(var) and (not isarray(var)):
+ sig = '%s : rank-0 array(string(len=%s),\'c\')' % (a,
+ getstrlength(var))
+ elif isscalar(var):
+ sig = '%s : rank-0 array(%s,\'%s\')' % (a, c2py_map[ctype],
+ c2pycode_map[ctype],)
+ elif isarray(var):
+ dim = var['dimension']
+ rank = repr(len(dim))
+ sig = '%s : rank-%s array(\'%s\') with bounds (%s)' % (a, rank,
+ c2pycode_map[
+ ctype],
+ ','.join(dim))
+ return sig
+
+
+def getinit(a, var):
+ if isstring(var):
+ init, showinit = '""', "''"
+ else:
+ init, showinit = '', ''
+ if hasinitvalue(var):
+ init = var['=']
+ showinit = init
+ if iscomplex(var) or iscomplexarray(var):
+ ret = {}
+
+ try:
+ v = var["="]
+ if ',' in v:
+ ret['init.r'], ret['init.i'] = markoutercomma(
+ v[1:-1]).split('@,@')
+ else:
+ v = eval(v, {}, {})
+ ret['init.r'], ret['init.i'] = str(v.real), str(v.imag)
+ except Exception:
+ raise ValueError(
+ 'getinit: expected complex number `(r,i)\' but got `%s\' as initial value of %r.' % (init, a))
+ if isarray(var):
+ init = '(capi_c.r=%s,capi_c.i=%s,capi_c)' % (
+ ret['init.r'], ret['init.i'])
+ elif isstring(var):
+ if not init:
+ init, showinit = '""', "''"
+ if init[0] == "'":
+ init = '"%s"' % (init[1:-1].replace('"', '\\"'))
+ if init[0] == '"':
+ showinit = "'%s'" % (init[1:-1])
+ return init, showinit
+
+
+def get_elsize(var):
+ if isstring(var) or isstringarray(var):
+ elsize = getstrlength(var)
+ # override with user-specified length when available:
+ elsize = var['charselector'].get('f2py_len', elsize)
+ return elsize
+ if ischaracter(var) or ischaracterarray(var):
+ return '1'
+ # for numerical types, PyArray_New* functions ignore specified
+ # elsize, so we just return 1 and let elsize be determined at
+ # runtime, see fortranobject.c
+ return '1'
+
+
+def sign2map(a, var):
+ """
+ varname,ctype,atype
+ init,init.r,init.i,pytype
+ vardebuginfo,vardebugshowvalue,varshowvalue
+ varrformat
+
+ intent
+ """
+ out_a = a
+ if isintent_out(var):
+ for k in var['intent']:
+ if k[:4] == 'out=':
+ out_a = k[4:]
+ break
+ ret = {'varname': a, 'outvarname': out_a, 'ctype': getctype(var)}
+ intent_flags = []
+ for f, s in isintent_dict.items():
+ if f(var):
+ intent_flags.append('F2PY_%s' % s)
+ if intent_flags:
+ # TODO: Evaluate intent_flags here.
+ ret['intent'] = '|'.join(intent_flags)
+ else:
+ ret['intent'] = 'F2PY_INTENT_IN'
+ if isarray(var):
+ ret['varrformat'] = 'N'
+ elif ret['ctype'] in c2buildvalue_map:
+ ret['varrformat'] = c2buildvalue_map[ret['ctype']]
+ else:
+ ret['varrformat'] = 'O'
+ ret['init'], ret['showinit'] = getinit(a, var)
+ if hasinitvalue(var) and iscomplex(var) and not isarray(var):
+ ret['init.r'], ret['init.i'] = markoutercomma(
+ ret['init'][1:-1]).split('@,@')
+ if isexternal(var):
+ ret['cbnamekey'] = a
+ if a in lcb_map:
+ ret['cbname'] = lcb_map[a]
+ ret['maxnofargs'] = lcb2_map[lcb_map[a]]['maxnofargs']
+ ret['nofoptargs'] = lcb2_map[lcb_map[a]]['nofoptargs']
+ ret['cbdocstr'] = lcb2_map[lcb_map[a]]['docstr']
+ ret['cblatexdocstr'] = lcb2_map[lcb_map[a]]['latexdocstr']
+ else:
+ ret['cbname'] = a
+ errmess('sign2map: Confused: external %s is not in lcb_map%s.\n' % (
+ a, list(lcb_map.keys())))
+ if isstring(var):
+ ret['length'] = getstrlength(var)
+ if isarray(var):
+ ret = dictappend(ret, getarrdims(a, var))
+ dim = copy.copy(var['dimension'])
+ if ret['ctype'] in c2capi_map:
+ ret['atype'] = c2capi_map[ret['ctype']]
+ ret['elsize'] = get_elsize(var)
+ # Debug info
+ if debugcapi(var):
+ il = [isintent_in, 'input', isintent_out, 'output',
+ isintent_inout, 'inoutput', isrequired, 'required',
+ isoptional, 'optional', isintent_hide, 'hidden',
+ iscomplex, 'complex scalar',
+ l_and(isscalar, l_not(iscomplex)), 'scalar',
+ isstring, 'string', isarray, 'array',
+ iscomplexarray, 'complex array', isstringarray, 'string array',
+ iscomplexfunction, 'complex function',
+ l_and(isfunction, l_not(iscomplexfunction)), 'function',
+ isexternal, 'callback',
+ isintent_callback, 'callback',
+ isintent_aux, 'auxiliary',
+ ]
+ rl = []
+ for i in range(0, len(il), 2):
+ if il[i](var):
+ rl.append(il[i + 1])
+ if isstring(var):
+ rl.append('slen(%s)=%s' % (a, ret['length']))
+ if isarray(var):
+ ddim = ','.join(
+ map(lambda x, y: '%s|%s' % (x, y), var['dimension'], dim))
+ rl.append('dims(%s)' % ddim)
+ if isexternal(var):
+ ret['vardebuginfo'] = 'debug-capi:%s=>%s:%s' % (
+ a, ret['cbname'], ','.join(rl))
+ else:
+ ret['vardebuginfo'] = 'debug-capi:%s %s=%s:%s' % (
+ ret['ctype'], a, ret['showinit'], ','.join(rl))
+ if isscalar(var):
+ if ret['ctype'] in cformat_map:
+ ret['vardebugshowvalue'] = 'debug-capi:%s=%s' % (
+ a, cformat_map[ret['ctype']])
+ if isstring(var):
+ ret['vardebugshowvalue'] = 'debug-capi:slen(%s)=%%d %s=\\"%%s\\"' % (
+ a, a)
+ if isexternal(var):
+ ret['vardebugshowvalue'] = 'debug-capi:%s=%%p' % (a)
+ if ret['ctype'] in cformat_map:
+ ret['varshowvalue'] = '#name#:%s=%s' % (a, cformat_map[ret['ctype']])
+ ret['showvalueformat'] = '%s' % (cformat_map[ret['ctype']])
+ if isstring(var):
+ ret['varshowvalue'] = '#name#:slen(%s)=%%d %s=\\"%%s\\"' % (a, a)
+ ret['pydocsign'], ret['pydocsignout'] = getpydocsign(a, var)
+ if hasnote(var):
+ ret['note'] = var['note']
+ return ret
+
+
+def routsign2map(rout):
+ """
+ name,NAME,begintitle,endtitle
+ rname,ctype,rformat
+ routdebugshowvalue
+ """
+ global lcb_map
+ name = rout['name']
+ fname = getfortranname(rout)
+ ret = {'name': name,
+ 'texname': name.replace('_', '\\_'),
+ 'name_lower': name.lower(),
+ 'NAME': name.upper(),
+ 'begintitle': gentitle(name),
+ 'endtitle': gentitle('end of %s' % name),
+ 'fortranname': fname,
+ 'FORTRANNAME': fname.upper(),
+ 'callstatement': getcallstatement(rout) or '',
+ 'usercode': getusercode(rout) or '',
+ 'usercode1': getusercode1(rout) or '',
+ }
+ if '_' in fname:
+ ret['F_FUNC'] = 'F_FUNC_US'
+ else:
+ ret['F_FUNC'] = 'F_FUNC'
+ if '_' in name:
+ ret['F_WRAPPEDFUNC'] = 'F_WRAPPEDFUNC_US'
+ else:
+ ret['F_WRAPPEDFUNC'] = 'F_WRAPPEDFUNC'
+ lcb_map = {}
+ if 'use' in rout:
+ for u in rout['use'].keys():
+ if u in cb_rules.cb_map:
+ for un in cb_rules.cb_map[u]:
+ ln = un[0]
+ if 'map' in rout['use'][u]:
+ for k in rout['use'][u]['map'].keys():
+ if rout['use'][u]['map'][k] == un[0]:
+ ln = k
+ break
+ lcb_map[ln] = un[1]
+ elif 'externals' in rout and rout['externals']:
+ errmess('routsign2map: Confused: function %s has externals %s but no "use" statement.\n' % (
+ ret['name'], repr(rout['externals'])))
+ ret['callprotoargument'] = getcallprotoargument(rout, lcb_map) or ''
+ if isfunction(rout):
+ if 'result' in rout:
+ a = rout['result']
+ else:
+ a = rout['name']
+ ret['rname'] = a
+ ret['pydocsign'], ret['pydocsignout'] = getpydocsign(a, rout)
+ ret['ctype'] = getctype(rout['vars'][a])
+ if hasresultnote(rout):
+ ret['resultnote'] = rout['vars'][a]['note']
+ rout['vars'][a]['note'] = ['See elsewhere.']
+ if ret['ctype'] in c2buildvalue_map:
+ ret['rformat'] = c2buildvalue_map[ret['ctype']]
+ else:
+ ret['rformat'] = 'O'
+ errmess('routsign2map: no c2buildvalue key for type %s\n' %
+ (repr(ret['ctype'])))
+ if debugcapi(rout):
+ if ret['ctype'] in cformat_map:
+ ret['routdebugshowvalue'] = 'debug-capi:%s=%s' % (
+ a, cformat_map[ret['ctype']])
+ if isstringfunction(rout):
+ ret['routdebugshowvalue'] = 'debug-capi:slen(%s)=%%d %s=\\"%%s\\"' % (
+ a, a)
+ if isstringfunction(rout):
+ ret['rlength'] = getstrlength(rout['vars'][a])
+ if ret['rlength'] == '-1':
+ errmess('routsign2map: expected explicit specification of the length of the string returned by the fortran function %s; taking 10.\n' % (
+ repr(rout['name'])))
+ ret['rlength'] = '10'
+ if hasnote(rout):
+ ret['note'] = rout['note']
+ rout['note'] = ['See elsewhere.']
+ return ret
+
+
+def modsign2map(m):
+ """
+ modulename
+ """
+ if ismodule(m):
+ ret = {'f90modulename': m['name'],
+ 'F90MODULENAME': m['name'].upper(),
+ 'texf90modulename': m['name'].replace('_', '\\_')}
+ else:
+ ret = {'modulename': m['name'],
+ 'MODULENAME': m['name'].upper(),
+ 'texmodulename': m['name'].replace('_', '\\_')}
+ ret['restdoc'] = getrestdoc(m) or []
+ if hasnote(m):
+ ret['note'] = m['note']
+ ret['usercode'] = getusercode(m) or ''
+ ret['usercode1'] = getusercode1(m) or ''
+ if m['body']:
+ ret['interface_usercode'] = getusercode(m['body'][0]) or ''
+ else:
+ ret['interface_usercode'] = ''
+ ret['pymethoddef'] = getpymethoddef(m) or ''
+ if 'coutput' in m:
+ ret['coutput'] = m['coutput']
+ if 'f2py_wrapper_output' in m:
+ ret['f2py_wrapper_output'] = m['f2py_wrapper_output']
+ return ret
+
+
+def cb_sign2map(a, var, index=None):
+ ret = {'varname': a}
+ ret['varname_i'] = ret['varname']
+ ret['ctype'] = getctype(var)
+ if ret['ctype'] in c2capi_map:
+ ret['atype'] = c2capi_map[ret['ctype']]
+ ret['elsize'] = get_elsize(var)
+ if ret['ctype'] in cformat_map:
+ ret['showvalueformat'] = '%s' % (cformat_map[ret['ctype']])
+ if isarray(var):
+ ret = dictappend(ret, getarrdims(a, var))
+ ret['pydocsign'], ret['pydocsignout'] = getpydocsign(a, var)
+ if hasnote(var):
+ ret['note'] = var['note']
+ var['note'] = ['See elsewhere.']
+ return ret
+
+
+def cb_routsign2map(rout, um):
+ """
+ name,begintitle,endtitle,argname
+ ctype,rctype,maxnofargs,nofoptargs,returncptr
+ """
+ ret = {'name': 'cb_%s_in_%s' % (rout['name'], um),
+ 'returncptr': ''}
+ if isintent_callback(rout):
+ if '_' in rout['name']:
+ F_FUNC = 'F_FUNC_US'
+ else:
+ F_FUNC = 'F_FUNC'
+ ret['callbackname'] = '%s(%s,%s)' \
+ % (F_FUNC,
+ rout['name'].lower(),
+ rout['name'].upper(),
+ )
+ ret['static'] = 'extern'
+ else:
+ ret['callbackname'] = ret['name']
+ ret['static'] = 'static'
+ ret['argname'] = rout['name']
+ ret['begintitle'] = gentitle(ret['name'])
+ ret['endtitle'] = gentitle('end of %s' % ret['name'])
+ ret['ctype'] = getctype(rout)
+ ret['rctype'] = 'void'
+ if ret['ctype'] == 'string':
+ ret['rctype'] = 'void'
+ else:
+ ret['rctype'] = ret['ctype']
+ if ret['rctype'] != 'void':
+ if iscomplexfunction(rout):
+ ret['returncptr'] = """
+#ifdef F2PY_CB_RETURNCOMPLEX
+return_value=
+#endif
+"""
+ else:
+ ret['returncptr'] = 'return_value='
+ if ret['ctype'] in cformat_map:
+ ret['showvalueformat'] = '%s' % (cformat_map[ret['ctype']])
+ if isstringfunction(rout):
+ ret['strlength'] = getstrlength(rout)
+ if isfunction(rout):
+ if 'result' in rout:
+ a = rout['result']
+ else:
+ a = rout['name']
+ if hasnote(rout['vars'][a]):
+ ret['note'] = rout['vars'][a]['note']
+ rout['vars'][a]['note'] = ['See elsewhere.']
+ ret['rname'] = a
+ ret['pydocsign'], ret['pydocsignout'] = getpydocsign(a, rout)
+ if iscomplexfunction(rout):
+ ret['rctype'] = """
+#ifdef F2PY_CB_RETURNCOMPLEX
+#ctype#
+#else
+void
+#endif
+"""
+ else:
+ if hasnote(rout):
+ ret['note'] = rout['note']
+ rout['note'] = ['See elsewhere.']
+ nofargs = 0
+ nofoptargs = 0
+ if 'args' in rout and 'vars' in rout:
+ for a in rout['args']:
+ var = rout['vars'][a]
+ if l_or(isintent_in, isintent_inout)(var):
+ nofargs = nofargs + 1
+ if isoptional(var):
+ nofoptargs = nofoptargs + 1
+ ret['maxnofargs'] = repr(nofargs)
+ ret['nofoptargs'] = repr(nofoptargs)
+ if hasnote(rout) and isfunction(rout) and 'result' in rout:
+ ret['routnote'] = rout['note']
+ rout['note'] = ['See elsewhere.']
+ return ret
+
+
+def common_sign2map(a, var): # obsolute
+ ret = {'varname': a, 'ctype': getctype(var)}
+ if isstringarray(var):
+ ret['ctype'] = 'char'
+ if ret['ctype'] in c2capi_map:
+ ret['atype'] = c2capi_map[ret['ctype']]
+ ret['elsize'] = get_elsize(var)
+ if ret['ctype'] in cformat_map:
+ ret['showvalueformat'] = '%s' % (cformat_map[ret['ctype']])
+ if isarray(var):
+ ret = dictappend(ret, getarrdims(a, var))
+ elif isstring(var):
+ ret['size'] = getstrlength(var)
+ ret['rank'] = '1'
+ ret['pydocsign'], ret['pydocsignout'] = getpydocsign(a, var)
+ if hasnote(var):
+ ret['note'] = var['note']
+ var['note'] = ['See elsewhere.']
+ # for strings this returns 0-rank but actually is 1-rank
+ ret['arrdocstr'] = getarrdocsign(a, var)
+ return ret
diff --git a/venv/lib/python3.10/site-packages/numpy/f2py/cb_rules.py b/venv/lib/python3.10/site-packages/numpy/f2py/cb_rules.py
new file mode 100644
index 0000000000000000000000000000000000000000..721e075b6c73fd54c0f9f5b4802a5c94eb8d6a3f
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/numpy/f2py/cb_rules.py
@@ -0,0 +1,644 @@
+"""
+Build call-back mechanism for f2py2e.
+
+Copyright 1999 -- 2011 Pearu Peterson all rights reserved.
+Copyright 2011 -- present NumPy Developers.
+Permission to use, modify, and distribute this software is given under the
+terms of the NumPy License.
+
+NO WARRANTY IS EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
+"""
+from . import __version__
+from .auxfuncs import (
+ applyrules, debugcapi, dictappend, errmess, getargs, hasnote, isarray,
+ iscomplex, iscomplexarray, iscomplexfunction, isfunction, isintent_c,
+ isintent_hide, isintent_in, isintent_inout, isintent_nothide,
+ isintent_out, isoptional, isrequired, isscalar, isstring,
+ isstringfunction, issubroutine, l_and, l_not, l_or, outmess, replace,
+ stripcomma, throw_error
+)
+from . import cfuncs
+
+f2py_version = __version__.version
+
+
+################## Rules for callback function ##############
+
+cb_routine_rules = {
+ 'cbtypedefs': 'typedef #rctype#(*#name#_typedef)(#optargs_td##args_td##strarglens_td##noargs#);',
+ 'body': """
+#begintitle#
+typedef struct {
+ PyObject *capi;
+ PyTupleObject *args_capi;
+ int nofargs;
+ jmp_buf jmpbuf;
+} #name#_t;
+
+#if defined(F2PY_THREAD_LOCAL_DECL) && !defined(F2PY_USE_PYTHON_TLS)
+
+static F2PY_THREAD_LOCAL_DECL #name#_t *_active_#name# = NULL;
+
+static #name#_t *swap_active_#name#(#name#_t *ptr) {
+ #name#_t *prev = _active_#name#;
+ _active_#name# = ptr;
+ return prev;
+}
+
+static #name#_t *get_active_#name#(void) {
+ return _active_#name#;
+}
+
+#else
+
+static #name#_t *swap_active_#name#(#name#_t *ptr) {
+ char *key = "__f2py_cb_#name#";
+ return (#name#_t *)F2PySwapThreadLocalCallbackPtr(key, ptr);
+}
+
+static #name#_t *get_active_#name#(void) {
+ char *key = "__f2py_cb_#name#";
+ return (#name#_t *)F2PyGetThreadLocalCallbackPtr(key);
+}
+
+#endif
+
+/*typedef #rctype#(*#name#_typedef)(#optargs_td##args_td##strarglens_td##noargs#);*/
+#static# #rctype# #callbackname# (#optargs##args##strarglens##noargs#) {
+ #name#_t cb_local = { NULL, NULL, 0 };
+ #name#_t *cb = NULL;
+ PyTupleObject *capi_arglist = NULL;
+ PyObject *capi_return = NULL;
+ PyObject *capi_tmp = NULL;
+ PyObject *capi_arglist_list = NULL;
+ int capi_j,capi_i = 0;
+ int capi_longjmp_ok = 1;
+#decl#
+#ifdef F2PY_REPORT_ATEXIT
+f2py_cb_start_clock();
+#endif
+ cb = get_active_#name#();
+ if (cb == NULL) {
+ capi_longjmp_ok = 0;
+ cb = &cb_local;
+ }
+ capi_arglist = cb->args_capi;
+ CFUNCSMESS(\"cb:Call-back function #name# (maxnofargs=#maxnofargs#(-#nofoptargs#))\\n\");
+ CFUNCSMESSPY(\"cb:#name#_capi=\",cb->capi);
+ if (cb->capi==NULL) {
+ capi_longjmp_ok = 0;
+ cb->capi = PyObject_GetAttrString(#modulename#_module,\"#argname#\");
+ CFUNCSMESSPY(\"cb:#name#_capi=\",cb->capi);
+ }
+ if (cb->capi==NULL) {
+ PyErr_SetString(#modulename#_error,\"cb: Callback #argname# not defined (as an argument or module #modulename# attribute).\\n\");
+ goto capi_fail;
+ }
+ if (F2PyCapsule_Check(cb->capi)) {
+ #name#_typedef #name#_cptr;
+ #name#_cptr = F2PyCapsule_AsVoidPtr(cb->capi);
+ #returncptr#(*#name#_cptr)(#optargs_nm##args_nm##strarglens_nm#);
+ #return#
+ }
+ if (capi_arglist==NULL) {
+ capi_longjmp_ok = 0;
+ capi_tmp = PyObject_GetAttrString(#modulename#_module,\"#argname#_extra_args\");
+ if (capi_tmp) {
+ capi_arglist = (PyTupleObject *)PySequence_Tuple(capi_tmp);
+ Py_DECREF(capi_tmp);
+ if (capi_arglist==NULL) {
+ PyErr_SetString(#modulename#_error,\"Failed to convert #modulename#.#argname#_extra_args to tuple.\\n\");
+ goto capi_fail;
+ }
+ } else {
+ PyErr_Clear();
+ capi_arglist = (PyTupleObject *)Py_BuildValue(\"()\");
+ }
+ }
+ if (capi_arglist == NULL) {
+ PyErr_SetString(#modulename#_error,\"Callback #argname# argument list is not set.\\n\");
+ goto capi_fail;
+ }
+#setdims#
+#ifdef PYPY_VERSION
+#define CAPI_ARGLIST_SETITEM(idx, value) PyList_SetItem((PyObject *)capi_arglist_list, idx, value)
+ capi_arglist_list = PySequence_List(capi_arglist);
+ if (capi_arglist_list == NULL) goto capi_fail;
+#else
+#define CAPI_ARGLIST_SETITEM(idx, value) PyTuple_SetItem((PyObject *)capi_arglist, idx, value)
+#endif
+#pyobjfrom#
+#undef CAPI_ARGLIST_SETITEM
+#ifdef PYPY_VERSION
+ CFUNCSMESSPY(\"cb:capi_arglist=\",capi_arglist_list);
+#else
+ CFUNCSMESSPY(\"cb:capi_arglist=\",capi_arglist);
+#endif
+ CFUNCSMESS(\"cb:Call-back calling Python function #argname#.\\n\");
+#ifdef F2PY_REPORT_ATEXIT
+f2py_cb_start_call_clock();
+#endif
+#ifdef PYPY_VERSION
+ capi_return = PyObject_CallObject(cb->capi,(PyObject *)capi_arglist_list);
+ Py_DECREF(capi_arglist_list);
+ capi_arglist_list = NULL;
+#else
+ capi_return = PyObject_CallObject(cb->capi,(PyObject *)capi_arglist);
+#endif
+#ifdef F2PY_REPORT_ATEXIT
+f2py_cb_stop_call_clock();
+#endif
+ CFUNCSMESSPY(\"cb:capi_return=\",capi_return);
+ if (capi_return == NULL) {
+ fprintf(stderr,\"capi_return is NULL\\n\");
+ goto capi_fail;
+ }
+ if (capi_return == Py_None) {
+ Py_DECREF(capi_return);
+ capi_return = Py_BuildValue(\"()\");
+ }
+ else if (!PyTuple_Check(capi_return)) {
+ capi_return = Py_BuildValue(\"(N)\",capi_return);
+ }
+ capi_j = PyTuple_Size(capi_return);
+ capi_i = 0;
+#frompyobj#
+ CFUNCSMESS(\"cb:#name#:successful\\n\");
+ Py_DECREF(capi_return);
+#ifdef F2PY_REPORT_ATEXIT
+f2py_cb_stop_clock();
+#endif
+ goto capi_return_pt;
+capi_fail:
+ fprintf(stderr,\"Call-back #name# failed.\\n\");
+ Py_XDECREF(capi_return);
+ Py_XDECREF(capi_arglist_list);
+ if (capi_longjmp_ok) {
+ longjmp(cb->jmpbuf,-1);
+ }
+capi_return_pt:
+ ;
+#return#
+}
+#endtitle#
+""",
+ 'need': ['setjmp.h', 'CFUNCSMESS', 'F2PY_THREAD_LOCAL_DECL'],
+ 'maxnofargs': '#maxnofargs#',
+ 'nofoptargs': '#nofoptargs#',
+ 'docstr': """\
+ def #argname#(#docsignature#): return #docreturn#\\n\\
+#docstrsigns#""",
+ 'latexdocstr': """
+{{}\\verb@def #argname#(#latexdocsignature#): return #docreturn#@{}}
+#routnote#
+
+#latexdocstrsigns#""",
+ 'docstrshort': 'def #argname#(#docsignature#): return #docreturn#'
+}
+cb_rout_rules = [
+ { # Init
+ 'separatorsfor': {'decl': '\n',
+ 'args': ',', 'optargs': '', 'pyobjfrom': '\n', 'freemem': '\n',
+ 'args_td': ',', 'optargs_td': '',
+ 'args_nm': ',', 'optargs_nm': '',
+ 'frompyobj': '\n', 'setdims': '\n',
+ 'docstrsigns': '\\n"\n"',
+ 'latexdocstrsigns': '\n',
+ 'latexdocstrreq': '\n', 'latexdocstropt': '\n',
+ 'latexdocstrout': '\n', 'latexdocstrcbs': '\n',
+ },
+ 'decl': '/*decl*/', 'pyobjfrom': '/*pyobjfrom*/', 'frompyobj': '/*frompyobj*/',
+ 'args': [], 'optargs': '', 'return': '', 'strarglens': '', 'freemem': '/*freemem*/',
+ 'args_td': [], 'optargs_td': '', 'strarglens_td': '',
+ 'args_nm': [], 'optargs_nm': '', 'strarglens_nm': '',
+ 'noargs': '',
+ 'setdims': '/*setdims*/',
+ 'docstrsigns': '', 'latexdocstrsigns': '',
+ 'docstrreq': ' Required arguments:',
+ 'docstropt': ' Optional arguments:',
+ 'docstrout': ' Return objects:',
+ 'docstrcbs': ' Call-back functions:',
+ 'docreturn': '', 'docsign': '', 'docsignopt': '',
+ 'latexdocstrreq': '\\noindent Required arguments:',
+ 'latexdocstropt': '\\noindent Optional arguments:',
+ 'latexdocstrout': '\\noindent Return objects:',
+ 'latexdocstrcbs': '\\noindent Call-back functions:',
+ 'routnote': {hasnote: '--- #note#', l_not(hasnote): ''},
+ }, { # Function
+ 'decl': ' #ctype# return_value = 0;',
+ 'frompyobj': [
+ {debugcapi: ' CFUNCSMESS("cb:Getting return_value->");'},
+ '''\
+ if (capi_j>capi_i) {
+ GETSCALARFROMPYTUPLE(capi_return,capi_i++,&return_value,#ctype#,
+ "#ctype#_from_pyobj failed in converting return_value of"
+ " call-back function #name# to C #ctype#\\n");
+ } else {
+ fprintf(stderr,"Warning: call-back function #name# did not provide"
+ " return value (index=%d, type=#ctype#)\\n",capi_i);
+ }''',
+ {debugcapi:
+ ' fprintf(stderr,"#showvalueformat#.\\n",return_value);'}
+ ],
+ 'need': ['#ctype#_from_pyobj', {debugcapi: 'CFUNCSMESS'}, 'GETSCALARFROMPYTUPLE'],
+ 'return': ' return return_value;',
+ '_check': l_and(isfunction, l_not(isstringfunction), l_not(iscomplexfunction))
+ },
+ { # String function
+ 'pyobjfrom': {debugcapi: ' fprintf(stderr,"debug-capi:cb:#name#:%d:\\n",return_value_len);'},
+ 'args': '#ctype# return_value,int return_value_len',
+ 'args_nm': 'return_value,&return_value_len',
+ 'args_td': '#ctype# ,int',
+ 'frompyobj': [
+ {debugcapi: ' CFUNCSMESS("cb:Getting return_value->\\"");'},
+ """\
+ if (capi_j>capi_i) {
+ GETSTRFROMPYTUPLE(capi_return,capi_i++,return_value,return_value_len);
+ } else {
+ fprintf(stderr,"Warning: call-back function #name# did not provide"
+ " return value (index=%d, type=#ctype#)\\n",capi_i);
+ }""",
+ {debugcapi:
+ ' fprintf(stderr,"#showvalueformat#\\".\\n",return_value);'}
+ ],
+ 'need': ['#ctype#_from_pyobj', {debugcapi: 'CFUNCSMESS'},
+ 'string.h', 'GETSTRFROMPYTUPLE'],
+ 'return': 'return;',
+ '_check': isstringfunction
+ },
+ { # Complex function
+ 'optargs': """
+#ifndef F2PY_CB_RETURNCOMPLEX
+#ctype# *return_value
+#endif
+""",
+ 'optargs_nm': """
+#ifndef F2PY_CB_RETURNCOMPLEX
+return_value
+#endif
+""",
+ 'optargs_td': """
+#ifndef F2PY_CB_RETURNCOMPLEX
+#ctype# *
+#endif
+""",
+ 'decl': """
+#ifdef F2PY_CB_RETURNCOMPLEX
+ #ctype# return_value = {0, 0};
+#endif
+""",
+ 'frompyobj': [
+ {debugcapi: ' CFUNCSMESS("cb:Getting return_value->");'},
+ """\
+ if (capi_j>capi_i) {
+#ifdef F2PY_CB_RETURNCOMPLEX
+ GETSCALARFROMPYTUPLE(capi_return,capi_i++,&return_value,#ctype#,
+ \"#ctype#_from_pyobj failed in converting return_value of call-back\"
+ \" function #name# to C #ctype#\\n\");
+#else
+ GETSCALARFROMPYTUPLE(capi_return,capi_i++,return_value,#ctype#,
+ \"#ctype#_from_pyobj failed in converting return_value of call-back\"
+ \" function #name# to C #ctype#\\n\");
+#endif
+ } else {
+ fprintf(stderr,
+ \"Warning: call-back function #name# did not provide\"
+ \" return value (index=%d, type=#ctype#)\\n\",capi_i);
+ }""",
+ {debugcapi: """\
+#ifdef F2PY_CB_RETURNCOMPLEX
+ fprintf(stderr,\"#showvalueformat#.\\n\",(return_value).r,(return_value).i);
+#else
+ fprintf(stderr,\"#showvalueformat#.\\n\",(*return_value).r,(*return_value).i);
+#endif
+"""}
+ ],
+ 'return': """
+#ifdef F2PY_CB_RETURNCOMPLEX
+ return return_value;
+#else
+ return;
+#endif
+""",
+ 'need': ['#ctype#_from_pyobj', {debugcapi: 'CFUNCSMESS'},
+ 'string.h', 'GETSCALARFROMPYTUPLE', '#ctype#'],
+ '_check': iscomplexfunction
+ },
+ {'docstrout': ' #pydocsignout#',
+ 'latexdocstrout': ['\\item[]{{}\\verb@#pydocsignout#@{}}',
+ {hasnote: '--- #note#'}],
+ 'docreturn': '#rname#,',
+ '_check': isfunction},
+ {'_check': issubroutine, 'return': 'return;'}
+]
+
+cb_arg_rules = [
+ { # Doc
+ 'docstropt': {l_and(isoptional, isintent_nothide): ' #pydocsign#'},
+ 'docstrreq': {l_and(isrequired, isintent_nothide): ' #pydocsign#'},
+ 'docstrout': {isintent_out: ' #pydocsignout#'},
+ 'latexdocstropt': {l_and(isoptional, isintent_nothide): ['\\item[]{{}\\verb@#pydocsign#@{}}',
+ {hasnote: '--- #note#'}]},
+ 'latexdocstrreq': {l_and(isrequired, isintent_nothide): ['\\item[]{{}\\verb@#pydocsign#@{}}',
+ {hasnote: '--- #note#'}]},
+ 'latexdocstrout': {isintent_out: ['\\item[]{{}\\verb@#pydocsignout#@{}}',
+ {l_and(hasnote, isintent_hide): '--- #note#',
+ l_and(hasnote, isintent_nothide): '--- See above.'}]},
+ 'docsign': {l_and(isrequired, isintent_nothide): '#varname#,'},
+ 'docsignopt': {l_and(isoptional, isintent_nothide): '#varname#,'},
+ 'depend': ''
+ },
+ {
+ 'args': {
+ l_and(isscalar, isintent_c): '#ctype# #varname_i#',
+ l_and(isscalar, l_not(isintent_c)): '#ctype# *#varname_i#_cb_capi',
+ isarray: '#ctype# *#varname_i#',
+ isstring: '#ctype# #varname_i#'
+ },
+ 'args_nm': {
+ l_and(isscalar, isintent_c): '#varname_i#',
+ l_and(isscalar, l_not(isintent_c)): '#varname_i#_cb_capi',
+ isarray: '#varname_i#',
+ isstring: '#varname_i#'
+ },
+ 'args_td': {
+ l_and(isscalar, isintent_c): '#ctype#',
+ l_and(isscalar, l_not(isintent_c)): '#ctype# *',
+ isarray: '#ctype# *',
+ isstring: '#ctype#'
+ },
+ 'need': {l_or(isscalar, isarray, isstring): '#ctype#'},
+ # untested with multiple args
+ 'strarglens': {isstring: ',int #varname_i#_cb_len'},
+ 'strarglens_td': {isstring: ',int'}, # untested with multiple args
+ # untested with multiple args
+ 'strarglens_nm': {isstring: ',#varname_i#_cb_len'},
+ },
+ { # Scalars
+ 'decl': {l_not(isintent_c): ' #ctype# #varname_i#=(*#varname_i#_cb_capi);'},
+ 'error': {l_and(isintent_c, isintent_out,
+ throw_error('intent(c,out) is forbidden for callback scalar arguments')):
+ ''},
+ 'frompyobj': [{debugcapi: ' CFUNCSMESS("cb:Getting #varname#->");'},
+ {isintent_out:
+ ' if (capi_j>capi_i)\n GETSCALARFROMPYTUPLE(capi_return,capi_i++,#varname_i#_cb_capi,#ctype#,"#ctype#_from_pyobj failed in converting argument #varname# of call-back function #name# to C #ctype#\\n");'},
+ {l_and(debugcapi, l_and(l_not(iscomplex), isintent_c)):
+ ' fprintf(stderr,"#showvalueformat#.\\n",#varname_i#);'},
+ {l_and(debugcapi, l_and(l_not(iscomplex), l_not( isintent_c))):
+ ' fprintf(stderr,"#showvalueformat#.\\n",*#varname_i#_cb_capi);'},
+ {l_and(debugcapi, l_and(iscomplex, isintent_c)):
+ ' fprintf(stderr,"#showvalueformat#.\\n",(#varname_i#).r,(#varname_i#).i);'},
+ {l_and(debugcapi, l_and(iscomplex, l_not( isintent_c))):
+ ' fprintf(stderr,"#showvalueformat#.\\n",(*#varname_i#_cb_capi).r,(*#varname_i#_cb_capi).i);'},
+ ],
+ 'need': [{isintent_out: ['#ctype#_from_pyobj', 'GETSCALARFROMPYTUPLE']},
+ {debugcapi: 'CFUNCSMESS'}],
+ '_check': isscalar
+ }, {
+ 'pyobjfrom': [{isintent_in: """\
+ if (cb->nofargs>capi_i)
+ if (CAPI_ARGLIST_SETITEM(capi_i++,pyobj_from_#ctype#1(#varname_i#)))
+ goto capi_fail;"""},
+ {isintent_inout: """\
+ if (cb->nofargs>capi_i)
+ if (CAPI_ARGLIST_SETITEM(capi_i++,pyarr_from_p_#ctype#1(#varname_i#_cb_capi)))
+ goto capi_fail;"""}],
+ 'need': [{isintent_in: 'pyobj_from_#ctype#1'},
+ {isintent_inout: 'pyarr_from_p_#ctype#1'},
+ {iscomplex: '#ctype#'}],
+ '_check': l_and(isscalar, isintent_nothide),
+ '_optional': ''
+ }, { # String
+ 'frompyobj': [{debugcapi: ' CFUNCSMESS("cb:Getting #varname#->\\"");'},
+ """ if (capi_j>capi_i)
+ GETSTRFROMPYTUPLE(capi_return,capi_i++,#varname_i#,#varname_i#_cb_len);""",
+ {debugcapi:
+ ' fprintf(stderr,"#showvalueformat#\\":%d:.\\n",#varname_i#,#varname_i#_cb_len);'},
+ ],
+ 'need': ['#ctype#', 'GETSTRFROMPYTUPLE',
+ {debugcapi: 'CFUNCSMESS'}, 'string.h'],
+ '_check': l_and(isstring, isintent_out)
+ }, {
+ 'pyobjfrom': [
+ {debugcapi:
+ (' fprintf(stderr,"debug-capi:cb:#varname#=#showvalueformat#:'
+ '%d:\\n",#varname_i#,#varname_i#_cb_len);')},
+ {isintent_in: """\
+ if (cb->nofargs>capi_i)
+ if (CAPI_ARGLIST_SETITEM(capi_i++,pyobj_from_#ctype#1size(#varname_i#,#varname_i#_cb_len)))
+ goto capi_fail;"""},
+ {isintent_inout: """\
+ if (cb->nofargs>capi_i) {
+ int #varname_i#_cb_dims[] = {#varname_i#_cb_len};
+ if (CAPI_ARGLIST_SETITEM(capi_i++,pyarr_from_p_#ctype#1(#varname_i#,#varname_i#_cb_dims)))
+ goto capi_fail;
+ }"""}],
+ 'need': [{isintent_in: 'pyobj_from_#ctype#1size'},
+ {isintent_inout: 'pyarr_from_p_#ctype#1'}],
+ '_check': l_and(isstring, isintent_nothide),
+ '_optional': ''
+ },
+ # Array ...
+ {
+ 'decl': ' npy_intp #varname_i#_Dims[#rank#] = {#rank*[-1]#};',
+ 'setdims': ' #cbsetdims#;',
+ '_check': isarray,
+ '_depend': ''
+ },
+ {
+ 'pyobjfrom': [{debugcapi: ' fprintf(stderr,"debug-capi:cb:#varname#\\n");'},
+ {isintent_c: """\
+ if (cb->nofargs>capi_i) {
+ /* tmp_arr will be inserted to capi_arglist_list that will be
+ destroyed when leaving callback function wrapper together
+ with tmp_arr. */
+ PyArrayObject *tmp_arr = (PyArrayObject *)PyArray_New(&PyArray_Type,
+ #rank#,#varname_i#_Dims,#atype#,NULL,(char*)#varname_i#,#elsize#,
+ NPY_ARRAY_CARRAY,NULL);
+""",
+ l_not(isintent_c): """\
+ if (cb->nofargs>capi_i) {
+ /* tmp_arr will be inserted to capi_arglist_list that will be
+ destroyed when leaving callback function wrapper together
+ with tmp_arr. */
+ PyArrayObject *tmp_arr = (PyArrayObject *)PyArray_New(&PyArray_Type,
+ #rank#,#varname_i#_Dims,#atype#,NULL,(char*)#varname_i#,#elsize#,
+ NPY_ARRAY_FARRAY,NULL);
+""",
+ },
+ """
+ if (tmp_arr==NULL)
+ goto capi_fail;
+ if (CAPI_ARGLIST_SETITEM(capi_i++,(PyObject *)tmp_arr))
+ goto capi_fail;
+}"""],
+ '_check': l_and(isarray, isintent_nothide, l_or(isintent_in, isintent_inout)),
+ '_optional': '',
+ }, {
+ 'frompyobj': [{debugcapi: ' CFUNCSMESS("cb:Getting #varname#->");'},
+ """ if (capi_j>capi_i) {
+ PyArrayObject *rv_cb_arr = NULL;
+ if ((capi_tmp = PyTuple_GetItem(capi_return,capi_i++))==NULL) goto capi_fail;
+ rv_cb_arr = array_from_pyobj(#atype#,#varname_i#_Dims,#rank#,F2PY_INTENT_IN""",
+ {isintent_c: '|F2PY_INTENT_C'},
+ """,capi_tmp);
+ if (rv_cb_arr == NULL) {
+ fprintf(stderr,\"rv_cb_arr is NULL\\n\");
+ goto capi_fail;
+ }
+ MEMCOPY(#varname_i#,PyArray_DATA(rv_cb_arr),PyArray_NBYTES(rv_cb_arr));
+ if (capi_tmp != (PyObject *)rv_cb_arr) {
+ Py_DECREF(rv_cb_arr);
+ }
+ }""",
+ {debugcapi: ' fprintf(stderr,"<-.\\n");'},
+ ],
+ 'need': ['MEMCOPY', {iscomplexarray: '#ctype#'}],
+ '_check': l_and(isarray, isintent_out)
+ }, {
+ 'docreturn': '#varname#,',
+ '_check': isintent_out
+ }
+]
+
+################## Build call-back module #############
+cb_map = {}
+
+
+def buildcallbacks(m):
+ cb_map[m['name']] = []
+ for bi in m['body']:
+ if bi['block'] == 'interface':
+ for b in bi['body']:
+ if b:
+ buildcallback(b, m['name'])
+ else:
+ errmess('warning: empty body for %s\n' % (m['name']))
+
+
+def buildcallback(rout, um):
+ from . import capi_maps
+
+ outmess(' Constructing call-back function "cb_%s_in_%s"\n' %
+ (rout['name'], um))
+ args, depargs = getargs(rout)
+ capi_maps.depargs = depargs
+ var = rout['vars']
+ vrd = capi_maps.cb_routsign2map(rout, um)
+ rd = dictappend({}, vrd)
+ cb_map[um].append([rout['name'], rd['name']])
+ for r in cb_rout_rules:
+ if ('_check' in r and r['_check'](rout)) or ('_check' not in r):
+ ar = applyrules(r, vrd, rout)
+ rd = dictappend(rd, ar)
+ savevrd = {}
+ for i, a in enumerate(args):
+ vrd = capi_maps.cb_sign2map(a, var[a], index=i)
+ savevrd[a] = vrd
+ for r in cb_arg_rules:
+ if '_depend' in r:
+ continue
+ if '_optional' in r and isoptional(var[a]):
+ continue
+ if ('_check' in r and r['_check'](var[a])) or ('_check' not in r):
+ ar = applyrules(r, vrd, var[a])
+ rd = dictappend(rd, ar)
+ if '_break' in r:
+ break
+ for a in args:
+ vrd = savevrd[a]
+ for r in cb_arg_rules:
+ if '_depend' in r:
+ continue
+ if ('_optional' not in r) or ('_optional' in r and isrequired(var[a])):
+ continue
+ if ('_check' in r and r['_check'](var[a])) or ('_check' not in r):
+ ar = applyrules(r, vrd, var[a])
+ rd = dictappend(rd, ar)
+ if '_break' in r:
+ break
+ for a in depargs:
+ vrd = savevrd[a]
+ for r in cb_arg_rules:
+ if '_depend' not in r:
+ continue
+ if '_optional' in r:
+ continue
+ if ('_check' in r and r['_check'](var[a])) or ('_check' not in r):
+ ar = applyrules(r, vrd, var[a])
+ rd = dictappend(rd, ar)
+ if '_break' in r:
+ break
+ if 'args' in rd and 'optargs' in rd:
+ if isinstance(rd['optargs'], list):
+ rd['optargs'] = rd['optargs'] + ["""
+#ifndef F2PY_CB_RETURNCOMPLEX
+,
+#endif
+"""]
+ rd['optargs_nm'] = rd['optargs_nm'] + ["""
+#ifndef F2PY_CB_RETURNCOMPLEX
+,
+#endif
+"""]
+ rd['optargs_td'] = rd['optargs_td'] + ["""
+#ifndef F2PY_CB_RETURNCOMPLEX
+,
+#endif
+"""]
+ if isinstance(rd['docreturn'], list):
+ rd['docreturn'] = stripcomma(
+ replace('#docreturn#', {'docreturn': rd['docreturn']}))
+ optargs = stripcomma(replace('#docsignopt#',
+ {'docsignopt': rd['docsignopt']}
+ ))
+ if optargs == '':
+ rd['docsignature'] = stripcomma(
+ replace('#docsign#', {'docsign': rd['docsign']}))
+ else:
+ rd['docsignature'] = replace('#docsign#[#docsignopt#]',
+ {'docsign': rd['docsign'],
+ 'docsignopt': optargs,
+ })
+ rd['latexdocsignature'] = rd['docsignature'].replace('_', '\\_')
+ rd['latexdocsignature'] = rd['latexdocsignature'].replace(',', ', ')
+ rd['docstrsigns'] = []
+ rd['latexdocstrsigns'] = []
+ for k in ['docstrreq', 'docstropt', 'docstrout', 'docstrcbs']:
+ if k in rd and isinstance(rd[k], list):
+ rd['docstrsigns'] = rd['docstrsigns'] + rd[k]
+ k = 'latex' + k
+ if k in rd and isinstance(rd[k], list):
+ rd['latexdocstrsigns'] = rd['latexdocstrsigns'] + rd[k][0:1] +\
+ ['\\begin{description}'] + rd[k][1:] +\
+ ['\\end{description}']
+ if 'args' not in rd:
+ rd['args'] = ''
+ rd['args_td'] = ''
+ rd['args_nm'] = ''
+ if not (rd.get('args') or rd.get('optargs') or rd.get('strarglens')):
+ rd['noargs'] = 'void'
+
+ ar = applyrules(cb_routine_rules, rd)
+ cfuncs.callbacks[rd['name']] = ar['body']
+ if isinstance(ar['need'], str):
+ ar['need'] = [ar['need']]
+
+ if 'need' in rd:
+ for t in cfuncs.typedefs.keys():
+ if t in rd['need']:
+ ar['need'].append(t)
+
+ cfuncs.typedefs_generated[rd['name'] + '_typedef'] = ar['cbtypedefs']
+ ar['need'].append(rd['name'] + '_typedef')
+ cfuncs.needs[rd['name']] = ar['need']
+
+ capi_maps.lcb2_map[rd['name']] = {'maxnofargs': ar['maxnofargs'],
+ 'nofoptargs': ar['nofoptargs'],
+ 'docstr': ar['docstr'],
+ 'latexdocstr': ar['latexdocstr'],
+ 'argname': rd['argname']
+ }
+ outmess(' %s\n' % (ar['docstrshort']))
+ return
+################## Build call-back function #############
diff --git a/venv/lib/python3.10/site-packages/numpy/f2py/cfuncs.py b/venv/lib/python3.10/site-packages/numpy/f2py/cfuncs.py
new file mode 100644
index 0000000000000000000000000000000000000000..4328a6e5004c2b73e693b72a1ab9db8d924567ff
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/numpy/f2py/cfuncs.py
@@ -0,0 +1,1536 @@
+#!/usr/bin/env python3
+"""
+C declarations, CPP macros, and C functions for f2py2e.
+Only required declarations/macros/functions will be used.
+
+Copyright 1999 -- 2011 Pearu Peterson all rights reserved.
+Copyright 2011 -- present NumPy Developers.
+Permission to use, modify, and distribute this software is given under the
+terms of the NumPy License.
+
+NO WARRANTY IS EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
+"""
+import sys
+import copy
+
+from . import __version__
+
+f2py_version = __version__.version
+errmess = sys.stderr.write
+
+##################### Definitions ##################
+
+outneeds = {'includes0': [], 'includes': [], 'typedefs': [], 'typedefs_generated': [],
+ 'userincludes': [],
+ 'cppmacros': [], 'cfuncs': [], 'callbacks': [], 'f90modhooks': [],
+ 'commonhooks': []}
+needs = {}
+includes0 = {'includes0': '/*need_includes0*/'}
+includes = {'includes': '/*need_includes*/'}
+userincludes = {'userincludes': '/*need_userincludes*/'}
+typedefs = {'typedefs': '/*need_typedefs*/'}
+typedefs_generated = {'typedefs_generated': '/*need_typedefs_generated*/'}
+cppmacros = {'cppmacros': '/*need_cppmacros*/'}
+cfuncs = {'cfuncs': '/*need_cfuncs*/'}
+callbacks = {'callbacks': '/*need_callbacks*/'}
+f90modhooks = {'f90modhooks': '/*need_f90modhooks*/',
+ 'initf90modhooksstatic': '/*initf90modhooksstatic*/',
+ 'initf90modhooksdynamic': '/*initf90modhooksdynamic*/',
+ }
+commonhooks = {'commonhooks': '/*need_commonhooks*/',
+ 'initcommonhooks': '/*need_initcommonhooks*/',
+ }
+
+############ Includes ###################
+
+includes0['math.h'] = '#include '
+includes0['string.h'] = '#include '
+includes0['setjmp.h'] = '#include '
+
+includes['arrayobject.h'] = '''#define PY_ARRAY_UNIQUE_SYMBOL PyArray_API
+#include "arrayobject.h"'''
+includes['npy_math.h'] = '#include "numpy/npy_math.h"'
+
+includes['arrayobject.h'] = '#include "fortranobject.h"'
+includes['stdarg.h'] = '#include '
+
+############# Type definitions ###############
+
+typedefs['unsigned_char'] = 'typedef unsigned char unsigned_char;'
+typedefs['unsigned_short'] = 'typedef unsigned short unsigned_short;'
+typedefs['unsigned_long'] = 'typedef unsigned long unsigned_long;'
+typedefs['signed_char'] = 'typedef signed char signed_char;'
+typedefs['long_long'] = """
+#if defined(NPY_OS_WIN32)
+typedef __int64 long_long;
+#else
+typedef long long long_long;
+typedef unsigned long long unsigned_long_long;
+#endif
+"""
+typedefs['unsigned_long_long'] = """
+#if defined(NPY_OS_WIN32)
+typedef __uint64 long_long;
+#else
+typedef unsigned long long unsigned_long_long;
+#endif
+"""
+typedefs['long_double'] = """
+#ifndef _LONG_DOUBLE
+typedef long double long_double;
+#endif
+"""
+typedefs[
+ 'complex_long_double'] = 'typedef struct {long double r,i;} complex_long_double;'
+typedefs['complex_float'] = 'typedef struct {float r,i;} complex_float;'
+typedefs['complex_double'] = 'typedef struct {double r,i;} complex_double;'
+typedefs['string'] = """typedef char * string;"""
+typedefs['character'] = """typedef char character;"""
+
+
+############### CPP macros ####################
+cppmacros['CFUNCSMESS'] = """
+#ifdef DEBUGCFUNCS
+#define CFUNCSMESS(mess) fprintf(stderr,\"debug-capi:\"mess);
+#define CFUNCSMESSPY(mess,obj) CFUNCSMESS(mess) \\
+ PyObject_Print((PyObject *)obj,stderr,Py_PRINT_RAW);\\
+ fprintf(stderr,\"\\n\");
+#else
+#define CFUNCSMESS(mess)
+#define CFUNCSMESSPY(mess,obj)
+#endif
+"""
+cppmacros['F_FUNC'] = """
+#if defined(PREPEND_FORTRAN)
+#if defined(NO_APPEND_FORTRAN)
+#if defined(UPPERCASE_FORTRAN)
+#define F_FUNC(f,F) _##F
+#else
+#define F_FUNC(f,F) _##f
+#endif
+#else
+#if defined(UPPERCASE_FORTRAN)
+#define F_FUNC(f,F) _##F##_
+#else
+#define F_FUNC(f,F) _##f##_
+#endif
+#endif
+#else
+#if defined(NO_APPEND_FORTRAN)
+#if defined(UPPERCASE_FORTRAN)
+#define F_FUNC(f,F) F
+#else
+#define F_FUNC(f,F) f
+#endif
+#else
+#if defined(UPPERCASE_FORTRAN)
+#define F_FUNC(f,F) F##_
+#else
+#define F_FUNC(f,F) f##_
+#endif
+#endif
+#endif
+#if defined(UNDERSCORE_G77)
+#define F_FUNC_US(f,F) F_FUNC(f##_,F##_)
+#else
+#define F_FUNC_US(f,F) F_FUNC(f,F)
+#endif
+"""
+cppmacros['F_WRAPPEDFUNC'] = """
+#if defined(PREPEND_FORTRAN)
+#if defined(NO_APPEND_FORTRAN)
+#if defined(UPPERCASE_FORTRAN)
+#define F_WRAPPEDFUNC(f,F) _F2PYWRAP##F
+#else
+#define F_WRAPPEDFUNC(f,F) _f2pywrap##f
+#endif
+#else
+#if defined(UPPERCASE_FORTRAN)
+#define F_WRAPPEDFUNC(f,F) _F2PYWRAP##F##_
+#else
+#define F_WRAPPEDFUNC(f,F) _f2pywrap##f##_
+#endif
+#endif
+#else
+#if defined(NO_APPEND_FORTRAN)
+#if defined(UPPERCASE_FORTRAN)
+#define F_WRAPPEDFUNC(f,F) F2PYWRAP##F
+#else
+#define F_WRAPPEDFUNC(f,F) f2pywrap##f
+#endif
+#else
+#if defined(UPPERCASE_FORTRAN)
+#define F_WRAPPEDFUNC(f,F) F2PYWRAP##F##_
+#else
+#define F_WRAPPEDFUNC(f,F) f2pywrap##f##_
+#endif
+#endif
+#endif
+#if defined(UNDERSCORE_G77)
+#define F_WRAPPEDFUNC_US(f,F) F_WRAPPEDFUNC(f##_,F##_)
+#else
+#define F_WRAPPEDFUNC_US(f,F) F_WRAPPEDFUNC(f,F)
+#endif
+"""
+cppmacros['F_MODFUNC'] = """
+#if defined(F90MOD2CCONV1) /*E.g. Compaq Fortran */
+#if defined(NO_APPEND_FORTRAN)
+#define F_MODFUNCNAME(m,f) $ ## m ## $ ## f
+#else
+#define F_MODFUNCNAME(m,f) $ ## m ## $ ## f ## _
+#endif
+#endif
+
+#if defined(F90MOD2CCONV2) /*E.g. IBM XL Fortran, not tested though */
+#if defined(NO_APPEND_FORTRAN)
+#define F_MODFUNCNAME(m,f) __ ## m ## _MOD_ ## f
+#else
+#define F_MODFUNCNAME(m,f) __ ## m ## _MOD_ ## f ## _
+#endif
+#endif
+
+#if defined(F90MOD2CCONV3) /*E.g. MIPSPro Compilers */
+#if defined(NO_APPEND_FORTRAN)
+#define F_MODFUNCNAME(m,f) f ## .in. ## m
+#else
+#define F_MODFUNCNAME(m,f) f ## .in. ## m ## _
+#endif
+#endif
+/*
+#if defined(UPPERCASE_FORTRAN)
+#define F_MODFUNC(m,M,f,F) F_MODFUNCNAME(M,F)
+#else
+#define F_MODFUNC(m,M,f,F) F_MODFUNCNAME(m,f)
+#endif
+*/
+
+#define F_MODFUNC(m,f) (*(f2pymodstruct##m##.##f))
+"""
+cppmacros['SWAPUNSAFE'] = """
+#define SWAP(a,b) (size_t)(a) = ((size_t)(a) ^ (size_t)(b));\\
+ (size_t)(b) = ((size_t)(a) ^ (size_t)(b));\\
+ (size_t)(a) = ((size_t)(a) ^ (size_t)(b))
+"""
+cppmacros['SWAP'] = """
+#define SWAP(a,b,t) {\\
+ t *c;\\
+ c = a;\\
+ a = b;\\
+ b = c;}
+"""
+# cppmacros['ISCONTIGUOUS']='#define ISCONTIGUOUS(m) (PyArray_FLAGS(m) &
+# NPY_ARRAY_C_CONTIGUOUS)'
+cppmacros['PRINTPYOBJERR'] = """
+#define PRINTPYOBJERR(obj)\\
+ fprintf(stderr,\"#modulename#.error is related to \");\\
+ PyObject_Print((PyObject *)obj,stderr,Py_PRINT_RAW);\\
+ fprintf(stderr,\"\\n\");
+"""
+cppmacros['MINMAX'] = """
+#ifndef max
+#define max(a,b) ((a > b) ? (a) : (b))
+#endif
+#ifndef min
+#define min(a,b) ((a < b) ? (a) : (b))
+#endif
+#ifndef MAX
+#define MAX(a,b) ((a > b) ? (a) : (b))
+#endif
+#ifndef MIN
+#define MIN(a,b) ((a < b) ? (a) : (b))
+#endif
+"""
+cppmacros['len..'] = """
+/* See fortranobject.h for definitions. The macros here are provided for BC. */
+#define rank f2py_rank
+#define shape f2py_shape
+#define fshape f2py_shape
+#define len f2py_len
+#define flen f2py_flen
+#define slen f2py_slen
+#define size f2py_size
+"""
+cppmacros['pyobj_from_char1'] = r"""
+#define pyobj_from_char1(v) (PyLong_FromLong(v))
+"""
+cppmacros['pyobj_from_short1'] = r"""
+#define pyobj_from_short1(v) (PyLong_FromLong(v))
+"""
+needs['pyobj_from_int1'] = ['signed_char']
+cppmacros['pyobj_from_int1'] = r"""
+#define pyobj_from_int1(v) (PyLong_FromLong(v))
+"""
+cppmacros['pyobj_from_long1'] = r"""
+#define pyobj_from_long1(v) (PyLong_FromLong(v))
+"""
+needs['pyobj_from_long_long1'] = ['long_long']
+cppmacros['pyobj_from_long_long1'] = """
+#ifdef HAVE_LONG_LONG
+#define pyobj_from_long_long1(v) (PyLong_FromLongLong(v))
+#else
+#warning HAVE_LONG_LONG is not available. Redefining pyobj_from_long_long.
+#define pyobj_from_long_long1(v) (PyLong_FromLong(v))
+#endif
+"""
+needs['pyobj_from_long_double1'] = ['long_double']
+cppmacros['pyobj_from_long_double1'] = """
+#define pyobj_from_long_double1(v) (PyFloat_FromDouble(v))"""
+cppmacros['pyobj_from_double1'] = """
+#define pyobj_from_double1(v) (PyFloat_FromDouble(v))"""
+cppmacros['pyobj_from_float1'] = """
+#define pyobj_from_float1(v) (PyFloat_FromDouble(v))"""
+needs['pyobj_from_complex_long_double1'] = ['complex_long_double']
+cppmacros['pyobj_from_complex_long_double1'] = """
+#define pyobj_from_complex_long_double1(v) (PyComplex_FromDoubles(v.r,v.i))"""
+needs['pyobj_from_complex_double1'] = ['complex_double']
+cppmacros['pyobj_from_complex_double1'] = """
+#define pyobj_from_complex_double1(v) (PyComplex_FromDoubles(v.r,v.i))"""
+needs['pyobj_from_complex_float1'] = ['complex_float']
+cppmacros['pyobj_from_complex_float1'] = """
+#define pyobj_from_complex_float1(v) (PyComplex_FromDoubles(v.r,v.i))"""
+needs['pyobj_from_string1'] = ['string']
+cppmacros['pyobj_from_string1'] = """
+#define pyobj_from_string1(v) (PyUnicode_FromString((char *)v))"""
+needs['pyobj_from_string1size'] = ['string']
+cppmacros['pyobj_from_string1size'] = """
+#define pyobj_from_string1size(v,len) (PyUnicode_FromStringAndSize((char *)v, len))"""
+needs['TRYPYARRAYTEMPLATE'] = ['PRINTPYOBJERR']
+cppmacros['TRYPYARRAYTEMPLATE'] = """
+/* New SciPy */
+#define TRYPYARRAYTEMPLATECHAR case NPY_STRING: *(char *)(PyArray_DATA(arr))=*v; break;
+#define TRYPYARRAYTEMPLATELONG case NPY_LONG: *(long *)(PyArray_DATA(arr))=*v; break;
+#define TRYPYARRAYTEMPLATEOBJECT case NPY_OBJECT: PyArray_SETITEM(arr,PyArray_DATA(arr),pyobj_from_ ## ctype ## 1(*v)); break;
+
+#define TRYPYARRAYTEMPLATE(ctype,typecode) \\
+ PyArrayObject *arr = NULL;\\
+ if (!obj) return -2;\\
+ if (!PyArray_Check(obj)) return -1;\\
+ if (!(arr=(PyArrayObject *)obj)) {fprintf(stderr,\"TRYPYARRAYTEMPLATE:\");PRINTPYOBJERR(obj);return 0;}\\
+ if (PyArray_DESCR(arr)->type==typecode) {*(ctype *)(PyArray_DATA(arr))=*v; return 1;}\\
+ switch (PyArray_TYPE(arr)) {\\
+ case NPY_DOUBLE: *(npy_double *)(PyArray_DATA(arr))=*v; break;\\
+ case NPY_INT: *(npy_int *)(PyArray_DATA(arr))=*v; break;\\
+ case NPY_LONG: *(npy_long *)(PyArray_DATA(arr))=*v; break;\\
+ case NPY_FLOAT: *(npy_float *)(PyArray_DATA(arr))=*v; break;\\
+ case NPY_CDOUBLE: *(npy_double *)(PyArray_DATA(arr))=*v; break;\\
+ case NPY_CFLOAT: *(npy_float *)(PyArray_DATA(arr))=*v; break;\\
+ case NPY_BOOL: *(npy_bool *)(PyArray_DATA(arr))=(*v!=0); break;\\
+ case NPY_UBYTE: *(npy_ubyte *)(PyArray_DATA(arr))=*v; break;\\
+ case NPY_BYTE: *(npy_byte *)(PyArray_DATA(arr))=*v; break;\\
+ case NPY_SHORT: *(npy_short *)(PyArray_DATA(arr))=*v; break;\\
+ case NPY_USHORT: *(npy_ushort *)(PyArray_DATA(arr))=*v; break;\\
+ case NPY_UINT: *(npy_uint *)(PyArray_DATA(arr))=*v; break;\\
+ case NPY_ULONG: *(npy_ulong *)(PyArray_DATA(arr))=*v; break;\\
+ case NPY_LONGLONG: *(npy_longlong *)(PyArray_DATA(arr))=*v; break;\\
+ case NPY_ULONGLONG: *(npy_ulonglong *)(PyArray_DATA(arr))=*v; break;\\
+ case NPY_LONGDOUBLE: *(npy_longdouble *)(PyArray_DATA(arr))=*v; break;\\
+ case NPY_CLONGDOUBLE: *(npy_longdouble *)(PyArray_DATA(arr))=*v; break;\\
+ case NPY_OBJECT: PyArray_SETITEM(arr, PyArray_DATA(arr), pyobj_from_ ## ctype ## 1(*v)); break;\\
+ default: return -2;\\
+ };\\
+ return 1
+"""
+
+needs['TRYCOMPLEXPYARRAYTEMPLATE'] = ['PRINTPYOBJERR']
+cppmacros['TRYCOMPLEXPYARRAYTEMPLATE'] = """
+#define TRYCOMPLEXPYARRAYTEMPLATEOBJECT case NPY_OBJECT: PyArray_SETITEM(arr, PyArray_DATA(arr), pyobj_from_complex_ ## ctype ## 1((*v))); break;
+#define TRYCOMPLEXPYARRAYTEMPLATE(ctype,typecode)\\
+ PyArrayObject *arr = NULL;\\
+ if (!obj) return -2;\\
+ if (!PyArray_Check(obj)) return -1;\\
+ if (!(arr=(PyArrayObject *)obj)) {fprintf(stderr,\"TRYCOMPLEXPYARRAYTEMPLATE:\");PRINTPYOBJERR(obj);return 0;}\\
+ if (PyArray_DESCR(arr)->type==typecode) {\\
+ *(ctype *)(PyArray_DATA(arr))=(*v).r;\\
+ *(ctype *)(PyArray_DATA(arr)+sizeof(ctype))=(*v).i;\\
+ return 1;\\
+ }\\
+ switch (PyArray_TYPE(arr)) {\\
+ case NPY_CDOUBLE: *(npy_double *)(PyArray_DATA(arr))=(*v).r;\\
+ *(npy_double *)(PyArray_DATA(arr)+sizeof(npy_double))=(*v).i;\\
+ break;\\
+ case NPY_CFLOAT: *(npy_float *)(PyArray_DATA(arr))=(*v).r;\\
+ *(npy_float *)(PyArray_DATA(arr)+sizeof(npy_float))=(*v).i;\\
+ break;\\
+ case NPY_DOUBLE: *(npy_double *)(PyArray_DATA(arr))=(*v).r; break;\\
+ case NPY_LONG: *(npy_long *)(PyArray_DATA(arr))=(*v).r; break;\\
+ case NPY_FLOAT: *(npy_float *)(PyArray_DATA(arr))=(*v).r; break;\\
+ case NPY_INT: *(npy_int *)(PyArray_DATA(arr))=(*v).r; break;\\
+ case NPY_SHORT: *(npy_short *)(PyArray_DATA(arr))=(*v).r; break;\\
+ case NPY_UBYTE: *(npy_ubyte *)(PyArray_DATA(arr))=(*v).r; break;\\
+ case NPY_BYTE: *(npy_byte *)(PyArray_DATA(arr))=(*v).r; break;\\
+ case NPY_BOOL: *(npy_bool *)(PyArray_DATA(arr))=((*v).r!=0 && (*v).i!=0); break;\\
+ case NPY_USHORT: *(npy_ushort *)(PyArray_DATA(arr))=(*v).r; break;\\
+ case NPY_UINT: *(npy_uint *)(PyArray_DATA(arr))=(*v).r; break;\\
+ case NPY_ULONG: *(npy_ulong *)(PyArray_DATA(arr))=(*v).r; break;\\
+ case NPY_LONGLONG: *(npy_longlong *)(PyArray_DATA(arr))=(*v).r; break;\\
+ case NPY_ULONGLONG: *(npy_ulonglong *)(PyArray_DATA(arr))=(*v).r; break;\\
+ case NPY_LONGDOUBLE: *(npy_longdouble *)(PyArray_DATA(arr))=(*v).r; break;\\
+ case NPY_CLONGDOUBLE: *(npy_longdouble *)(PyArray_DATA(arr))=(*v).r;\\
+ *(npy_longdouble *)(PyArray_DATA(arr)+sizeof(npy_longdouble))=(*v).i;\\
+ break;\\
+ case NPY_OBJECT: PyArray_SETITEM(arr, PyArray_DATA(arr), pyobj_from_complex_ ## ctype ## 1((*v))); break;\\
+ default: return -2;\\
+ };\\
+ return -1;
+"""
+# cppmacros['NUMFROMARROBJ']="""
+# define NUMFROMARROBJ(typenum,ctype) \\
+# if (PyArray_Check(obj)) arr = (PyArrayObject *)obj;\\
+# else arr = (PyArrayObject *)PyArray_ContiguousFromObject(obj,typenum,0,0);\\
+# if (arr) {\\
+# if (PyArray_TYPE(arr)==NPY_OBJECT) {\\
+# if (!ctype ## _from_pyobj(v,(PyArray_DESCR(arr)->getitem)(PyArray_DATA(arr)),\"\"))\\
+# goto capi_fail;\\
+# } else {\\
+# (PyArray_DESCR(arr)->cast[typenum])(PyArray_DATA(arr),1,(char*)v,1,1);\\
+# }\\
+# if ((PyObject *)arr != obj) { Py_DECREF(arr); }\\
+# return 1;\\
+# }
+# """
+# XXX: Note that CNUMFROMARROBJ is identical with NUMFROMARROBJ
+# cppmacros['CNUMFROMARROBJ']="""
+# define CNUMFROMARROBJ(typenum,ctype) \\
+# if (PyArray_Check(obj)) arr = (PyArrayObject *)obj;\\
+# else arr = (PyArrayObject *)PyArray_ContiguousFromObject(obj,typenum,0,0);\\
+# if (arr) {\\
+# if (PyArray_TYPE(arr)==NPY_OBJECT) {\\
+# if (!ctype ## _from_pyobj(v,(PyArray_DESCR(arr)->getitem)(PyArray_DATA(arr)),\"\"))\\
+# goto capi_fail;\\
+# } else {\\
+# (PyArray_DESCR(arr)->cast[typenum])((void *)(PyArray_DATA(arr)),1,(void *)(v),1,1);\\
+# }\\
+# if ((PyObject *)arr != obj) { Py_DECREF(arr); }\\
+# return 1;\\
+# }
+# """
+
+
+needs['GETSTRFROMPYTUPLE'] = ['STRINGCOPYN', 'PRINTPYOBJERR']
+cppmacros['GETSTRFROMPYTUPLE'] = """
+#define GETSTRFROMPYTUPLE(tuple,index,str,len) {\\
+ PyObject *rv_cb_str = PyTuple_GetItem((tuple),(index));\\
+ if (rv_cb_str == NULL)\\
+ goto capi_fail;\\
+ if (PyBytes_Check(rv_cb_str)) {\\
+ str[len-1]='\\0';\\
+ STRINGCOPYN((str),PyBytes_AS_STRING((PyBytesObject*)rv_cb_str),(len));\\
+ } else {\\
+ PRINTPYOBJERR(rv_cb_str);\\
+ PyErr_SetString(#modulename#_error,\"string object expected\");\\
+ goto capi_fail;\\
+ }\\
+ }
+"""
+cppmacros['GETSCALARFROMPYTUPLE'] = """
+#define GETSCALARFROMPYTUPLE(tuple,index,var,ctype,mess) {\\
+ if ((capi_tmp = PyTuple_GetItem((tuple),(index)))==NULL) goto capi_fail;\\
+ if (!(ctype ## _from_pyobj((var),capi_tmp,mess)))\\
+ goto capi_fail;\\
+ }
+"""
+
+cppmacros['FAILNULL'] = """\
+#define FAILNULL(p) do { \\
+ if ((p) == NULL) { \\
+ PyErr_SetString(PyExc_MemoryError, "NULL pointer found"); \\
+ goto capi_fail; \\
+ } \\
+} while (0)
+"""
+needs['MEMCOPY'] = ['string.h', 'FAILNULL']
+cppmacros['MEMCOPY'] = """
+#define MEMCOPY(to,from,n)\\
+ do { FAILNULL(to); FAILNULL(from); (void)memcpy(to,from,n); } while (0)
+"""
+cppmacros['STRINGMALLOC'] = """
+#define STRINGMALLOC(str,len)\\
+ if ((str = (string)malloc(len+1)) == NULL) {\\
+ PyErr_SetString(PyExc_MemoryError, \"out of memory\");\\
+ goto capi_fail;\\
+ } else {\\
+ (str)[len] = '\\0';\\
+ }
+"""
+cppmacros['STRINGFREE'] = """
+#define STRINGFREE(str) do {if (!(str == NULL)) free(str);} while (0)
+"""
+needs['STRINGPADN'] = ['string.h']
+cppmacros['STRINGPADN'] = """
+/*
+STRINGPADN replaces null values with padding values from the right.
+
+`to` must have size of at least N bytes.
+
+If the `to[N-1]` has null value, then replace it and all the
+preceding, nulls with the given padding.
+
+STRINGPADN(to, N, PADDING, NULLVALUE) is an inverse operation.
+*/
+#define STRINGPADN(to, N, NULLVALUE, PADDING) \\
+ do { \\
+ int _m = (N); \\
+ char *_to = (to); \\
+ for (_m -= 1; _m >= 0 && _to[_m] == NULLVALUE; _m--) { \\
+ _to[_m] = PADDING; \\
+ } \\
+ } while (0)
+"""
+needs['STRINGCOPYN'] = ['string.h', 'FAILNULL']
+cppmacros['STRINGCOPYN'] = """
+/*
+STRINGCOPYN copies N bytes.
+
+`to` and `from` buffers must have sizes of at least N bytes.
+*/
+#define STRINGCOPYN(to,from,N) \\
+ do { \\
+ int _m = (N); \\
+ char *_to = (to); \\
+ char *_from = (from); \\
+ FAILNULL(_to); FAILNULL(_from); \\
+ (void)strncpy(_to, _from, _m); \\
+ } while (0)
+"""
+needs['STRINGCOPY'] = ['string.h', 'FAILNULL']
+cppmacros['STRINGCOPY'] = """
+#define STRINGCOPY(to,from)\\
+ do { FAILNULL(to); FAILNULL(from); (void)strcpy(to,from); } while (0)
+"""
+cppmacros['CHECKGENERIC'] = """
+#define CHECKGENERIC(check,tcheck,name) \\
+ if (!(check)) {\\
+ PyErr_SetString(#modulename#_error,\"(\"tcheck\") failed for \"name);\\
+ /*goto capi_fail;*/\\
+ } else """
+cppmacros['CHECKARRAY'] = """
+#define CHECKARRAY(check,tcheck,name) \\
+ if (!(check)) {\\
+ PyErr_SetString(#modulename#_error,\"(\"tcheck\") failed for \"name);\\
+ /*goto capi_fail;*/\\
+ } else """
+cppmacros['CHECKSTRING'] = """
+#define CHECKSTRING(check,tcheck,name,show,var)\\
+ if (!(check)) {\\
+ char errstring[256];\\
+ sprintf(errstring, \"%s: \"show, \"(\"tcheck\") failed for \"name, slen(var), var);\\
+ PyErr_SetString(#modulename#_error, errstring);\\
+ /*goto capi_fail;*/\\
+ } else """
+cppmacros['CHECKSCALAR'] = """
+#define CHECKSCALAR(check,tcheck,name,show,var)\\
+ if (!(check)) {\\
+ char errstring[256];\\
+ sprintf(errstring, \"%s: \"show, \"(\"tcheck\") failed for \"name, var);\\
+ PyErr_SetString(#modulename#_error,errstring);\\
+ /*goto capi_fail;*/\\
+ } else """
+# cppmacros['CHECKDIMS']="""
+# define CHECKDIMS(dims,rank) \\
+# for (int i=0;i<(rank);i++)\\
+# if (dims[i]<0) {\\
+# fprintf(stderr,\"Unspecified array argument requires a complete dimension specification.\\n\");\\
+# goto capi_fail;\\
+# }
+# """
+cppmacros[
+ 'ARRSIZE'] = '#define ARRSIZE(dims,rank) (_PyArray_multiply_list(dims,rank))'
+cppmacros['OLDPYNUM'] = """
+#ifdef OLDPYNUM
+#error You need to install NumPy version 0.13 or higher. See https://scipy.org/install.html
+#endif
+"""
+cppmacros["F2PY_THREAD_LOCAL_DECL"] = """
+#ifndef F2PY_THREAD_LOCAL_DECL
+#if defined(_MSC_VER)
+#define F2PY_THREAD_LOCAL_DECL __declspec(thread)
+#elif defined(NPY_OS_MINGW)
+#define F2PY_THREAD_LOCAL_DECL __thread
+#elif defined(__STDC_VERSION__) \\
+ && (__STDC_VERSION__ >= 201112L) \\
+ && !defined(__STDC_NO_THREADS__) \\
+ && (!defined(__GLIBC__) || __GLIBC__ > 2 || (__GLIBC__ == 2 && __GLIBC_MINOR__ > 12)) \\
+ && !defined(NPY_OS_OPENBSD) && !defined(NPY_OS_HAIKU)
+/* __STDC_NO_THREADS__ was first defined in a maintenance release of glibc 2.12,
+ see https://lists.gnu.org/archive/html/commit-hurd/2012-07/msg00180.html,
+ so `!defined(__STDC_NO_THREADS__)` may give false positive for the existence
+ of `threads.h` when using an older release of glibc 2.12
+ See gh-19437 for details on OpenBSD */
+#include
+#define F2PY_THREAD_LOCAL_DECL thread_local
+#elif defined(__GNUC__) \\
+ && (__GNUC__ > 4 || (__GNUC__ == 4 && (__GNUC_MINOR__ >= 4)))
+#define F2PY_THREAD_LOCAL_DECL __thread
+#endif
+#endif
+"""
+################# C functions ###############
+
+cfuncs['calcarrindex'] = """
+static int calcarrindex(int *i,PyArrayObject *arr) {
+ int k,ii = i[0];
+ for (k=1; k < PyArray_NDIM(arr); k++)
+ ii += (ii*(PyArray_DIM(arr,k) - 1)+i[k]); /* assuming contiguous arr */
+ return ii;
+}"""
+cfuncs['calcarrindextr'] = """
+static int calcarrindextr(int *i,PyArrayObject *arr) {
+ int k,ii = i[PyArray_NDIM(arr)-1];
+ for (k=1; k < PyArray_NDIM(arr); k++)
+ ii += (ii*(PyArray_DIM(arr,PyArray_NDIM(arr)-k-1) - 1)+i[PyArray_NDIM(arr)-k-1]); /* assuming contiguous arr */
+ return ii;
+}"""
+cfuncs['forcomb'] = """
+static struct { int nd;npy_intp *d;int *i,*i_tr,tr; } forcombcache;
+static int initforcomb(npy_intp *dims,int nd,int tr) {
+ int k;
+ if (dims==NULL) return 0;
+ if (nd<0) return 0;
+ forcombcache.nd = nd;
+ forcombcache.d = dims;
+ forcombcache.tr = tr;
+ if ((forcombcache.i = (int *)malloc(sizeof(int)*nd))==NULL) return 0;
+ if ((forcombcache.i_tr = (int *)malloc(sizeof(int)*nd))==NULL) return 0;
+ for (k=1;k PyArray_NBYTES(arr)) {
+ n = PyArray_NBYTES(arr);
+ }
+ STRINGCOPYN(buf, str, n);
+ return 1;
+ }
+capi_fail:
+ PRINTPYOBJERR(obj);
+ PyErr_SetString(#modulename#_error, \"try_pyarr_from_string failed\");
+ return 0;
+}
+"""
+needs['string_from_pyobj'] = ['string', 'STRINGMALLOC', 'STRINGCOPYN']
+cfuncs['string_from_pyobj'] = """
+/*
+ Create a new string buffer `str` of at most length `len` from a
+ Python string-like object `obj`.
+
+ The string buffer has given size (len) or the size of inistr when len==-1.
+
+ The string buffer is padded with blanks: in Fortran, trailing blanks
+ are insignificant contrary to C nulls.
+ */
+static int
+string_from_pyobj(string *str, int *len, const string inistr, PyObject *obj,
+ const char *errmess)
+{
+ PyObject *tmp = NULL;
+ string buf = NULL;
+ npy_intp n = -1;
+#ifdef DEBUGCFUNCS
+fprintf(stderr,\"string_from_pyobj(str='%s',len=%d,inistr='%s',obj=%p)\\n\",
+ (char*)str, *len, (char *)inistr, obj);
+#endif
+ if (obj == Py_None) {
+ n = strlen(inistr);
+ buf = inistr;
+ }
+ else if (PyArray_Check(obj)) {
+ PyArrayObject *arr = (PyArrayObject *)obj;
+ if (!ISCONTIGUOUS(arr)) {
+ PyErr_SetString(PyExc_ValueError,
+ \"array object is non-contiguous.\");
+ goto capi_fail;
+ }
+ n = PyArray_NBYTES(arr);
+ buf = PyArray_DATA(arr);
+ n = strnlen(buf, n);
+ }
+ else {
+ if (PyBytes_Check(obj)) {
+ tmp = obj;
+ Py_INCREF(tmp);
+ }
+ else if (PyUnicode_Check(obj)) {
+ tmp = PyUnicode_AsASCIIString(obj);
+ }
+ else {
+ PyObject *tmp2;
+ tmp2 = PyObject_Str(obj);
+ if (tmp2) {
+ tmp = PyUnicode_AsASCIIString(tmp2);
+ Py_DECREF(tmp2);
+ }
+ else {
+ tmp = NULL;
+ }
+ }
+ if (tmp == NULL) goto capi_fail;
+ n = PyBytes_GET_SIZE(tmp);
+ buf = PyBytes_AS_STRING(tmp);
+ }
+ if (*len == -1) {
+ /* TODO: change the type of `len` so that we can remove this */
+ if (n > NPY_MAX_INT) {
+ PyErr_SetString(PyExc_OverflowError,
+ "object too large for a 32-bit int");
+ goto capi_fail;
+ }
+ *len = n;
+ }
+ else if (*len < n) {
+ /* discard the last (len-n) bytes of input buf */
+ n = *len;
+ }
+ if (n < 0 || *len < 0 || buf == NULL) {
+ goto capi_fail;
+ }
+ STRINGMALLOC(*str, *len); // *str is allocated with size (*len + 1)
+ if (n < *len) {
+ /*
+ Pad fixed-width string with nulls. The caller will replace
+ nulls with blanks when the corresponding argument is not
+ intent(c).
+ */
+ memset(*str + n, '\\0', *len - n);
+ }
+ STRINGCOPYN(*str, buf, n);
+ Py_XDECREF(tmp);
+ return 1;
+capi_fail:
+ Py_XDECREF(tmp);
+ {
+ PyObject* err = PyErr_Occurred();
+ if (err == NULL) {
+ err = #modulename#_error;
+ }
+ PyErr_SetString(err, errmess);
+ }
+ return 0;
+}
+"""
+
+cfuncs['character_from_pyobj'] = """
+static int
+character_from_pyobj(character* v, PyObject *obj, const char *errmess) {
+ if (PyBytes_Check(obj)) {
+ /* empty bytes has trailing null, so dereferencing is always safe */
+ *v = PyBytes_AS_STRING(obj)[0];
+ return 1;
+ } else if (PyUnicode_Check(obj)) {
+ PyObject* tmp = PyUnicode_AsASCIIString(obj);
+ if (tmp != NULL) {
+ *v = PyBytes_AS_STRING(tmp)[0];
+ Py_DECREF(tmp);
+ return 1;
+ }
+ } else if (PyArray_Check(obj)) {
+ PyArrayObject* arr = (PyArrayObject*)obj;
+ if (F2PY_ARRAY_IS_CHARACTER_COMPATIBLE(arr)) {
+ *v = PyArray_BYTES(arr)[0];
+ return 1;
+ } else if (F2PY_IS_UNICODE_ARRAY(arr)) {
+ // TODO: update when numpy will support 1-byte and
+ // 2-byte unicode dtypes
+ PyObject* tmp = PyUnicode_FromKindAndData(
+ PyUnicode_4BYTE_KIND,
+ PyArray_BYTES(arr),
+ (PyArray_NBYTES(arr)>0?1:0));
+ if (tmp != NULL) {
+ if (character_from_pyobj(v, tmp, errmess)) {
+ Py_DECREF(tmp);
+ return 1;
+ }
+ Py_DECREF(tmp);
+ }
+ }
+ } else if (PySequence_Check(obj)) {
+ PyObject* tmp = PySequence_GetItem(obj,0);
+ if (tmp != NULL) {
+ if (character_from_pyobj(v, tmp, errmess)) {
+ Py_DECREF(tmp);
+ return 1;
+ }
+ Py_DECREF(tmp);
+ }
+ }
+ {
+ /* TODO: This error (and most other) error handling needs cleaning. */
+ char mess[F2PY_MESSAGE_BUFFER_SIZE];
+ strcpy(mess, errmess);
+ PyObject* err = PyErr_Occurred();
+ if (err == NULL) {
+ err = PyExc_TypeError;
+ Py_INCREF(err);
+ }
+ else {
+ Py_INCREF(err);
+ PyErr_Clear();
+ }
+ sprintf(mess + strlen(mess),
+ " -- expected str|bytes|sequence-of-str-or-bytes, got ");
+ f2py_describe(obj, mess + strlen(mess));
+ PyErr_SetString(err, mess);
+ Py_DECREF(err);
+ }
+ return 0;
+}
+"""
+
+# TODO: These should be dynamically generated, too many mapped to int things,
+# see note in _isocbind.py
+needs['char_from_pyobj'] = ['int_from_pyobj']
+cfuncs['char_from_pyobj'] = """
+static int
+char_from_pyobj(char* v, PyObject *obj, const char *errmess) {
+ int i = 0;
+ if (int_from_pyobj(&i, obj, errmess)) {
+ *v = (char)i;
+ return 1;
+ }
+ return 0;
+}
+"""
+
+
+needs['signed_char_from_pyobj'] = ['int_from_pyobj', 'signed_char']
+cfuncs['signed_char_from_pyobj'] = """
+static int
+signed_char_from_pyobj(signed_char* v, PyObject *obj, const char *errmess) {
+ int i = 0;
+ if (int_from_pyobj(&i, obj, errmess)) {
+ *v = (signed_char)i;
+ return 1;
+ }
+ return 0;
+}
+"""
+
+
+needs['short_from_pyobj'] = ['int_from_pyobj']
+cfuncs['short_from_pyobj'] = """
+static int
+short_from_pyobj(short* v, PyObject *obj, const char *errmess) {
+ int i = 0;
+ if (int_from_pyobj(&i, obj, errmess)) {
+ *v = (short)i;
+ return 1;
+ }
+ return 0;
+}
+"""
+
+
+cfuncs['int_from_pyobj'] = """
+static int
+int_from_pyobj(int* v, PyObject *obj, const char *errmess)
+{
+ PyObject* tmp = NULL;
+
+ if (PyLong_Check(obj)) {
+ *v = Npy__PyLong_AsInt(obj);
+ return !(*v == -1 && PyErr_Occurred());
+ }
+
+ tmp = PyNumber_Long(obj);
+ if (tmp) {
+ *v = Npy__PyLong_AsInt(tmp);
+ Py_DECREF(tmp);
+ return !(*v == -1 && PyErr_Occurred());
+ }
+
+ if (PyComplex_Check(obj)) {
+ PyErr_Clear();
+ tmp = PyObject_GetAttrString(obj,\"real\");
+ }
+ else if (PyBytes_Check(obj) || PyUnicode_Check(obj)) {
+ /*pass*/;
+ }
+ else if (PySequence_Check(obj)) {
+ PyErr_Clear();
+ tmp = PySequence_GetItem(obj, 0);
+ }
+
+ if (tmp) {
+ if (int_from_pyobj(v, tmp, errmess)) {
+ Py_DECREF(tmp);
+ return 1;
+ }
+ Py_DECREF(tmp);
+ }
+
+ {
+ PyObject* err = PyErr_Occurred();
+ if (err == NULL) {
+ err = #modulename#_error;
+ }
+ PyErr_SetString(err, errmess);
+ }
+ return 0;
+}
+"""
+
+
+cfuncs['long_from_pyobj'] = """
+static int
+long_from_pyobj(long* v, PyObject *obj, const char *errmess) {
+ PyObject* tmp = NULL;
+
+ if (PyLong_Check(obj)) {
+ *v = PyLong_AsLong(obj);
+ return !(*v == -1 && PyErr_Occurred());
+ }
+
+ tmp = PyNumber_Long(obj);
+ if (tmp) {
+ *v = PyLong_AsLong(tmp);
+ Py_DECREF(tmp);
+ return !(*v == -1 && PyErr_Occurred());
+ }
+
+ if (PyComplex_Check(obj)) {
+ PyErr_Clear();
+ tmp = PyObject_GetAttrString(obj,\"real\");
+ }
+ else if (PyBytes_Check(obj) || PyUnicode_Check(obj)) {
+ /*pass*/;
+ }
+ else if (PySequence_Check(obj)) {
+ PyErr_Clear();
+ tmp = PySequence_GetItem(obj, 0);
+ }
+
+ if (tmp) {
+ if (long_from_pyobj(v, tmp, errmess)) {
+ Py_DECREF(tmp);
+ return 1;
+ }
+ Py_DECREF(tmp);
+ }
+ {
+ PyObject* err = PyErr_Occurred();
+ if (err == NULL) {
+ err = #modulename#_error;
+ }
+ PyErr_SetString(err, errmess);
+ }
+ return 0;
+}
+"""
+
+
+needs['long_long_from_pyobj'] = ['long_long']
+cfuncs['long_long_from_pyobj'] = """
+static int
+long_long_from_pyobj(long_long* v, PyObject *obj, const char *errmess)
+{
+ PyObject* tmp = NULL;
+
+ if (PyLong_Check(obj)) {
+ *v = PyLong_AsLongLong(obj);
+ return !(*v == -1 && PyErr_Occurred());
+ }
+
+ tmp = PyNumber_Long(obj);
+ if (tmp) {
+ *v = PyLong_AsLongLong(tmp);
+ Py_DECREF(tmp);
+ return !(*v == -1 && PyErr_Occurred());
+ }
+
+ if (PyComplex_Check(obj)) {
+ PyErr_Clear();
+ tmp = PyObject_GetAttrString(obj,\"real\");
+ }
+ else if (PyBytes_Check(obj) || PyUnicode_Check(obj)) {
+ /*pass*/;
+ }
+ else if (PySequence_Check(obj)) {
+ PyErr_Clear();
+ tmp = PySequence_GetItem(obj, 0);
+ }
+
+ if (tmp) {
+ if (long_long_from_pyobj(v, tmp, errmess)) {
+ Py_DECREF(tmp);
+ return 1;
+ }
+ Py_DECREF(tmp);
+ }
+ {
+ PyObject* err = PyErr_Occurred();
+ if (err == NULL) {
+ err = #modulename#_error;
+ }
+ PyErr_SetString(err,errmess);
+ }
+ return 0;
+}
+"""
+
+
+needs['long_double_from_pyobj'] = ['double_from_pyobj', 'long_double']
+cfuncs['long_double_from_pyobj'] = """
+static int
+long_double_from_pyobj(long_double* v, PyObject *obj, const char *errmess)
+{
+ double d=0;
+ if (PyArray_CheckScalar(obj)){
+ if PyArray_IsScalar(obj, LongDouble) {
+ PyArray_ScalarAsCtype(obj, v);
+ return 1;
+ }
+ else if (PyArray_Check(obj) && PyArray_TYPE(obj) == NPY_LONGDOUBLE) {
+ (*v) = *((npy_longdouble *)PyArray_DATA(obj));
+ return 1;
+ }
+ }
+ if (double_from_pyobj(&d, obj, errmess)) {
+ *v = (long_double)d;
+ return 1;
+ }
+ return 0;
+}
+"""
+
+
+cfuncs['double_from_pyobj'] = """
+static int
+double_from_pyobj(double* v, PyObject *obj, const char *errmess)
+{
+ PyObject* tmp = NULL;
+ if (PyFloat_Check(obj)) {
+ *v = PyFloat_AsDouble(obj);
+ return !(*v == -1.0 && PyErr_Occurred());
+ }
+
+ tmp = PyNumber_Float(obj);
+ if (tmp) {
+ *v = PyFloat_AsDouble(tmp);
+ Py_DECREF(tmp);
+ return !(*v == -1.0 && PyErr_Occurred());
+ }
+
+ if (PyComplex_Check(obj)) {
+ PyErr_Clear();
+ tmp = PyObject_GetAttrString(obj,\"real\");
+ }
+ else if (PyBytes_Check(obj) || PyUnicode_Check(obj)) {
+ /*pass*/;
+ }
+ else if (PySequence_Check(obj)) {
+ PyErr_Clear();
+ tmp = PySequence_GetItem(obj, 0);
+ }
+
+ if (tmp) {
+ if (double_from_pyobj(v,tmp,errmess)) {Py_DECREF(tmp); return 1;}
+ Py_DECREF(tmp);
+ }
+ {
+ PyObject* err = PyErr_Occurred();
+ if (err==NULL) err = #modulename#_error;
+ PyErr_SetString(err,errmess);
+ }
+ return 0;
+}
+"""
+
+
+needs['float_from_pyobj'] = ['double_from_pyobj']
+cfuncs['float_from_pyobj'] = """
+static int
+float_from_pyobj(float* v, PyObject *obj, const char *errmess)
+{
+ double d=0.0;
+ if (double_from_pyobj(&d,obj,errmess)) {
+ *v = (float)d;
+ return 1;
+ }
+ return 0;
+}
+"""
+
+
+needs['complex_long_double_from_pyobj'] = ['complex_long_double', 'long_double',
+ 'complex_double_from_pyobj', 'npy_math.h']
+cfuncs['complex_long_double_from_pyobj'] = """
+static int
+complex_long_double_from_pyobj(complex_long_double* v, PyObject *obj, const char *errmess)
+{
+ complex_double cd = {0.0,0.0};
+ if (PyArray_CheckScalar(obj)){
+ if PyArray_IsScalar(obj, CLongDouble) {
+ PyArray_ScalarAsCtype(obj, v);
+ return 1;
+ }
+ else if (PyArray_Check(obj) && PyArray_TYPE(obj)==NPY_CLONGDOUBLE) {
+ (*v).r = npy_creall(*(((npy_clongdouble *)PyArray_DATA(obj))));
+ (*v).i = npy_cimagl(*(((npy_clongdouble *)PyArray_DATA(obj))));
+ return 1;
+ }
+ }
+ if (complex_double_from_pyobj(&cd,obj,errmess)) {
+ (*v).r = (long_double)cd.r;
+ (*v).i = (long_double)cd.i;
+ return 1;
+ }
+ return 0;
+}
+"""
+
+
+needs['complex_double_from_pyobj'] = ['complex_double', 'npy_math.h']
+cfuncs['complex_double_from_pyobj'] = """
+static int
+complex_double_from_pyobj(complex_double* v, PyObject *obj, const char *errmess) {
+ Py_complex c;
+ if (PyComplex_Check(obj)) {
+ c = PyComplex_AsCComplex(obj);
+ (*v).r = c.real;
+ (*v).i = c.imag;
+ return 1;
+ }
+ if (PyArray_IsScalar(obj, ComplexFloating)) {
+ if (PyArray_IsScalar(obj, CFloat)) {
+ npy_cfloat new;
+ PyArray_ScalarAsCtype(obj, &new);
+ (*v).r = (double)npy_crealf(new);
+ (*v).i = (double)npy_cimagf(new);
+ }
+ else if (PyArray_IsScalar(obj, CLongDouble)) {
+ npy_clongdouble new;
+ PyArray_ScalarAsCtype(obj, &new);
+ (*v).r = (double)npy_creall(new);
+ (*v).i = (double)npy_cimagl(new);
+ }
+ else { /* if (PyArray_IsScalar(obj, CDouble)) */
+ PyArray_ScalarAsCtype(obj, v);
+ }
+ return 1;
+ }
+ if (PyArray_CheckScalar(obj)) { /* 0-dim array or still array scalar */
+ PyArrayObject *arr;
+ if (PyArray_Check(obj)) {
+ arr = (PyArrayObject *)PyArray_Cast((PyArrayObject *)obj, NPY_CDOUBLE);
+ }
+ else {
+ arr = (PyArrayObject *)PyArray_FromScalar(obj, PyArray_DescrFromType(NPY_CDOUBLE));
+ }
+ if (arr == NULL) {
+ return 0;
+ }
+ (*v).r = npy_creal(*(((npy_cdouble *)PyArray_DATA(arr))));
+ (*v).i = npy_cimag(*(((npy_cdouble *)PyArray_DATA(arr))));
+ Py_DECREF(arr);
+ return 1;
+ }
+ /* Python does not provide PyNumber_Complex function :-( */
+ (*v).i = 0.0;
+ if (PyFloat_Check(obj)) {
+ (*v).r = PyFloat_AsDouble(obj);
+ return !((*v).r == -1.0 && PyErr_Occurred());
+ }
+ if (PyLong_Check(obj)) {
+ (*v).r = PyLong_AsDouble(obj);
+ return !((*v).r == -1.0 && PyErr_Occurred());
+ }
+ if (PySequence_Check(obj) && !(PyBytes_Check(obj) || PyUnicode_Check(obj))) {
+ PyObject *tmp = PySequence_GetItem(obj,0);
+ if (tmp) {
+ if (complex_double_from_pyobj(v,tmp,errmess)) {
+ Py_DECREF(tmp);
+ return 1;
+ }
+ Py_DECREF(tmp);
+ }
+ }
+ {
+ PyObject* err = PyErr_Occurred();
+ if (err==NULL)
+ err = PyExc_TypeError;
+ PyErr_SetString(err,errmess);
+ }
+ return 0;
+}
+"""
+
+
+needs['complex_float_from_pyobj'] = [
+ 'complex_float', 'complex_double_from_pyobj']
+cfuncs['complex_float_from_pyobj'] = """
+static int
+complex_float_from_pyobj(complex_float* v,PyObject *obj,const char *errmess)
+{
+ complex_double cd={0.0,0.0};
+ if (complex_double_from_pyobj(&cd,obj,errmess)) {
+ (*v).r = (float)cd.r;
+ (*v).i = (float)cd.i;
+ return 1;
+ }
+ return 0;
+}
+"""
+
+
+cfuncs['try_pyarr_from_character'] = """
+static int try_pyarr_from_character(PyObject* obj, character* v) {
+ PyArrayObject *arr = (PyArrayObject*)obj;
+ if (!obj) return -2;
+ if (PyArray_Check(obj)) {
+ if (F2PY_ARRAY_IS_CHARACTER_COMPATIBLE(arr)) {
+ *(character *)(PyArray_DATA(arr)) = *v;
+ return 1;
+ }
+ }
+ {
+ char mess[F2PY_MESSAGE_BUFFER_SIZE];
+ PyObject* err = PyErr_Occurred();
+ if (err == NULL) {
+ err = PyExc_ValueError;
+ strcpy(mess, "try_pyarr_from_character failed"
+ " -- expected bytes array-scalar|array, got ");
+ f2py_describe(obj, mess + strlen(mess));
+ PyErr_SetString(err, mess);
+ }
+ }
+ return 0;
+}
+"""
+
+needs['try_pyarr_from_char'] = ['pyobj_from_char1', 'TRYPYARRAYTEMPLATE']
+cfuncs[
+ 'try_pyarr_from_char'] = 'static int try_pyarr_from_char(PyObject* obj,char* v) {\n TRYPYARRAYTEMPLATE(char,\'c\');\n}\n'
+needs['try_pyarr_from_signed_char'] = ['TRYPYARRAYTEMPLATE', 'unsigned_char']
+cfuncs[
+ 'try_pyarr_from_unsigned_char'] = 'static int try_pyarr_from_unsigned_char(PyObject* obj,unsigned_char* v) {\n TRYPYARRAYTEMPLATE(unsigned_char,\'b\');\n}\n'
+needs['try_pyarr_from_signed_char'] = ['TRYPYARRAYTEMPLATE', 'signed_char']
+cfuncs[
+ 'try_pyarr_from_signed_char'] = 'static int try_pyarr_from_signed_char(PyObject* obj,signed_char* v) {\n TRYPYARRAYTEMPLATE(signed_char,\'1\');\n}\n'
+needs['try_pyarr_from_short'] = ['pyobj_from_short1', 'TRYPYARRAYTEMPLATE']
+cfuncs[
+ 'try_pyarr_from_short'] = 'static int try_pyarr_from_short(PyObject* obj,short* v) {\n TRYPYARRAYTEMPLATE(short,\'s\');\n}\n'
+needs['try_pyarr_from_int'] = ['pyobj_from_int1', 'TRYPYARRAYTEMPLATE']
+cfuncs[
+ 'try_pyarr_from_int'] = 'static int try_pyarr_from_int(PyObject* obj,int* v) {\n TRYPYARRAYTEMPLATE(int,\'i\');\n}\n'
+needs['try_pyarr_from_long'] = ['pyobj_from_long1', 'TRYPYARRAYTEMPLATE']
+cfuncs[
+ 'try_pyarr_from_long'] = 'static int try_pyarr_from_long(PyObject* obj,long* v) {\n TRYPYARRAYTEMPLATE(long,\'l\');\n}\n'
+needs['try_pyarr_from_long_long'] = [
+ 'pyobj_from_long_long1', 'TRYPYARRAYTEMPLATE', 'long_long']
+cfuncs[
+ 'try_pyarr_from_long_long'] = 'static int try_pyarr_from_long_long(PyObject* obj,long_long* v) {\n TRYPYARRAYTEMPLATE(long_long,\'L\');\n}\n'
+needs['try_pyarr_from_float'] = ['pyobj_from_float1', 'TRYPYARRAYTEMPLATE']
+cfuncs[
+ 'try_pyarr_from_float'] = 'static int try_pyarr_from_float(PyObject* obj,float* v) {\n TRYPYARRAYTEMPLATE(float,\'f\');\n}\n'
+needs['try_pyarr_from_double'] = ['pyobj_from_double1', 'TRYPYARRAYTEMPLATE']
+cfuncs[
+ 'try_pyarr_from_double'] = 'static int try_pyarr_from_double(PyObject* obj,double* v) {\n TRYPYARRAYTEMPLATE(double,\'d\');\n}\n'
+needs['try_pyarr_from_complex_float'] = [
+ 'pyobj_from_complex_float1', 'TRYCOMPLEXPYARRAYTEMPLATE', 'complex_float']
+cfuncs[
+ 'try_pyarr_from_complex_float'] = 'static int try_pyarr_from_complex_float(PyObject* obj,complex_float* v) {\n TRYCOMPLEXPYARRAYTEMPLATE(float,\'F\');\n}\n'
+needs['try_pyarr_from_complex_double'] = [
+ 'pyobj_from_complex_double1', 'TRYCOMPLEXPYARRAYTEMPLATE', 'complex_double']
+cfuncs[
+ 'try_pyarr_from_complex_double'] = 'static int try_pyarr_from_complex_double(PyObject* obj,complex_double* v) {\n TRYCOMPLEXPYARRAYTEMPLATE(double,\'D\');\n}\n'
+
+
+needs['create_cb_arglist'] = ['CFUNCSMESS', 'PRINTPYOBJERR', 'MINMAX']
+# create the list of arguments to be used when calling back to python
+cfuncs['create_cb_arglist'] = """
+static int
+create_cb_arglist(PyObject* fun, PyTupleObject* xa , const int maxnofargs,
+ const int nofoptargs, int *nofargs, PyTupleObject **args,
+ const char *errmess)
+{
+ PyObject *tmp = NULL;
+ PyObject *tmp_fun = NULL;
+ Py_ssize_t tot, opt, ext, siz, i, di = 0;
+ CFUNCSMESS(\"create_cb_arglist\\n\");
+ tot=opt=ext=siz=0;
+ /* Get the total number of arguments */
+ if (PyFunction_Check(fun)) {
+ tmp_fun = fun;
+ Py_INCREF(tmp_fun);
+ }
+ else {
+ di = 1;
+ if (PyObject_HasAttrString(fun,\"im_func\")) {
+ tmp_fun = PyObject_GetAttrString(fun,\"im_func\");
+ }
+ else if (PyObject_HasAttrString(fun,\"__call__\")) {
+ tmp = PyObject_GetAttrString(fun,\"__call__\");
+ if (PyObject_HasAttrString(tmp,\"im_func\"))
+ tmp_fun = PyObject_GetAttrString(tmp,\"im_func\");
+ else {
+ tmp_fun = fun; /* built-in function */
+ Py_INCREF(tmp_fun);
+ tot = maxnofargs;
+ if (PyCFunction_Check(fun)) {
+ /* In case the function has a co_argcount (like on PyPy) */
+ di = 0;
+ }
+ if (xa != NULL)
+ tot += PyTuple_Size((PyObject *)xa);
+ }
+ Py_XDECREF(tmp);
+ }
+ else if (PyFortran_Check(fun) || PyFortran_Check1(fun)) {
+ tot = maxnofargs;
+ if (xa != NULL)
+ tot += PyTuple_Size((PyObject *)xa);
+ tmp_fun = fun;
+ Py_INCREF(tmp_fun);
+ }
+ else if (F2PyCapsule_Check(fun)) {
+ tot = maxnofargs;
+ if (xa != NULL)
+ ext = PyTuple_Size((PyObject *)xa);
+ if(ext>0) {
+ fprintf(stderr,\"extra arguments tuple cannot be used with PyCapsule call-back\\n\");
+ goto capi_fail;
+ }
+ tmp_fun = fun;
+ Py_INCREF(tmp_fun);
+ }
+ }
+
+ if (tmp_fun == NULL) {
+ fprintf(stderr,
+ \"Call-back argument must be function|instance|instance.__call__|f2py-function \"
+ \"but got %s.\\n\",
+ ((fun == NULL) ? \"NULL\" : Py_TYPE(fun)->tp_name));
+ goto capi_fail;
+ }
+
+ if (PyObject_HasAttrString(tmp_fun,\"__code__\")) {
+ if (PyObject_HasAttrString(tmp = PyObject_GetAttrString(tmp_fun,\"__code__\"),\"co_argcount\")) {
+ PyObject *tmp_argcount = PyObject_GetAttrString(tmp,\"co_argcount\");
+ Py_DECREF(tmp);
+ if (tmp_argcount == NULL) {
+ goto capi_fail;
+ }
+ tot = PyLong_AsSsize_t(tmp_argcount) - di;
+ Py_DECREF(tmp_argcount);
+ }
+ }
+ /* Get the number of optional arguments */
+ if (PyObject_HasAttrString(tmp_fun,\"__defaults__\")) {
+ if (PyTuple_Check(tmp = PyObject_GetAttrString(tmp_fun,\"__defaults__\")))
+ opt = PyTuple_Size(tmp);
+ Py_XDECREF(tmp);
+ }
+ /* Get the number of extra arguments */
+ if (xa != NULL)
+ ext = PyTuple_Size((PyObject *)xa);
+ /* Calculate the size of call-backs argument list */
+ siz = MIN(maxnofargs+ext,tot);
+ *nofargs = MAX(0,siz-ext);
+
+#ifdef DEBUGCFUNCS
+ fprintf(stderr,
+ \"debug-capi:create_cb_arglist:maxnofargs(-nofoptargs),\"
+ \"tot,opt,ext,siz,nofargs = %d(-%d), %zd, %zd, %zd, %zd, %d\\n\",
+ maxnofargs, nofoptargs, tot, opt, ext, siz, *nofargs);
+#endif
+
+ if (siz < tot-opt) {
+ fprintf(stderr,
+ \"create_cb_arglist: Failed to build argument list \"
+ \"(siz) with enough arguments (tot-opt) required by \"
+ \"user-supplied function (siz,tot,opt=%zd, %zd, %zd).\\n\",
+ siz, tot, opt);
+ goto capi_fail;
+ }
+
+ /* Initialize argument list */
+ *args = (PyTupleObject *)PyTuple_New(siz);
+ for (i=0;i<*nofargs;i++) {
+ Py_INCREF(Py_None);
+ PyTuple_SET_ITEM((PyObject *)(*args),i,Py_None);
+ }
+ if (xa != NULL)
+ for (i=(*nofargs);i 0:
+ if outneeds[n][0] not in needs:
+ out.append(outneeds[n][0])
+ del outneeds[n][0]
+ else:
+ flag = 0
+ for k in outneeds[n][1:]:
+ if k in needs[outneeds[n][0]]:
+ flag = 1
+ break
+ if flag:
+ outneeds[n] = outneeds[n][1:] + [outneeds[n][0]]
+ else:
+ out.append(outneeds[n][0])
+ del outneeds[n][0]
+ if saveout and (0 not in map(lambda x, y: x == y, saveout, outneeds[n])) \
+ and outneeds[n] != []:
+ print(n, saveout)
+ errmess(
+ 'get_needs: no progress in sorting needs, probably circular dependence, skipping.\n')
+ out = out + saveout
+ break
+ saveout = copy.copy(outneeds[n])
+ if out == []:
+ out = [n]
+ res[n] = out
+ return res
diff --git a/venv/lib/python3.10/site-packages/numpy/f2py/common_rules.py b/venv/lib/python3.10/site-packages/numpy/f2py/common_rules.py
new file mode 100644
index 0000000000000000000000000000000000000000..64347b737454fe1bae544b6630de2729157d7f71
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/numpy/f2py/common_rules.py
@@ -0,0 +1,146 @@
+"""
+Build common block mechanism for f2py2e.
+
+Copyright 1999 -- 2011 Pearu Peterson all rights reserved.
+Copyright 2011 -- present NumPy Developers.
+Permission to use, modify, and distribute this software is given under the
+terms of the NumPy License
+
+NO WARRANTY IS EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
+"""
+from . import __version__
+f2py_version = __version__.version
+
+from .auxfuncs import (
+ hasbody, hascommon, hasnote, isintent_hide, outmess, getuseblocks
+)
+from . import capi_maps
+from . import func2subr
+from .crackfortran import rmbadname
+
+
+def findcommonblocks(block, top=1):
+ ret = []
+ if hascommon(block):
+ for key, value in block['common'].items():
+ vars_ = {v: block['vars'][v] for v in value}
+ ret.append((key, value, vars_))
+ elif hasbody(block):
+ for b in block['body']:
+ ret = ret + findcommonblocks(b, 0)
+ if top:
+ tret = []
+ names = []
+ for t in ret:
+ if t[0] not in names:
+ names.append(t[0])
+ tret.append(t)
+ return tret
+ return ret
+
+
+def buildhooks(m):
+ ret = {'commonhooks': [], 'initcommonhooks': [],
+ 'docs': ['"COMMON blocks:\\n"']}
+ fwrap = ['']
+
+ def fadd(line, s=fwrap):
+ s[0] = '%s\n %s' % (s[0], line)
+ chooks = ['']
+
+ def cadd(line, s=chooks):
+ s[0] = '%s\n%s' % (s[0], line)
+ ihooks = ['']
+
+ def iadd(line, s=ihooks):
+ s[0] = '%s\n%s' % (s[0], line)
+ doc = ['']
+
+ def dadd(line, s=doc):
+ s[0] = '%s\n%s' % (s[0], line)
+ for (name, vnames, vars) in findcommonblocks(m):
+ lower_name = name.lower()
+ hnames, inames = [], []
+ for n in vnames:
+ if isintent_hide(vars[n]):
+ hnames.append(n)
+ else:
+ inames.append(n)
+ if hnames:
+ outmess('\t\tConstructing COMMON block support for "%s"...\n\t\t %s\n\t\t Hidden: %s\n' % (
+ name, ','.join(inames), ','.join(hnames)))
+ else:
+ outmess('\t\tConstructing COMMON block support for "%s"...\n\t\t %s\n' % (
+ name, ','.join(inames)))
+ fadd('subroutine f2pyinit%s(setupfunc)' % name)
+ for usename in getuseblocks(m):
+ fadd(f'use {usename}')
+ fadd('external setupfunc')
+ for n in vnames:
+ fadd(func2subr.var2fixfortran(vars, n))
+ if name == '_BLNK_':
+ fadd('common %s' % (','.join(vnames)))
+ else:
+ fadd('common /%s/ %s' % (name, ','.join(vnames)))
+ fadd('call setupfunc(%s)' % (','.join(inames)))
+ fadd('end\n')
+ cadd('static FortranDataDef f2py_%s_def[] = {' % (name))
+ idims = []
+ for n in inames:
+ ct = capi_maps.getctype(vars[n])
+ elsize = capi_maps.get_elsize(vars[n])
+ at = capi_maps.c2capi_map[ct]
+ dm = capi_maps.getarrdims(n, vars[n])
+ if dm['dims']:
+ idims.append('(%s)' % (dm['dims']))
+ else:
+ idims.append('')
+ dms = dm['dims'].strip()
+ if not dms:
+ dms = '-1'
+ cadd('\t{\"%s\",%s,{{%s}},%s, %s},'
+ % (n, dm['rank'], dms, at, elsize))
+ cadd('\t{NULL}\n};')
+ inames1 = rmbadname(inames)
+ inames1_tps = ','.join(['char *' + s for s in inames1])
+ cadd('static void f2py_setup_%s(%s) {' % (name, inames1_tps))
+ cadd('\tint i_f2py=0;')
+ for n in inames1:
+ cadd('\tf2py_%s_def[i_f2py++].data = %s;' % (name, n))
+ cadd('}')
+ if '_' in lower_name:
+ F_FUNC = 'F_FUNC_US'
+ else:
+ F_FUNC = 'F_FUNC'
+ cadd('extern void %s(f2pyinit%s,F2PYINIT%s)(void(*)(%s));'
+ % (F_FUNC, lower_name, name.upper(),
+ ','.join(['char*'] * len(inames1))))
+ cadd('static void f2py_init_%s(void) {' % name)
+ cadd('\t%s(f2pyinit%s,F2PYINIT%s)(f2py_setup_%s);'
+ % (F_FUNC, lower_name, name.upper(), name))
+ cadd('}\n')
+ iadd('\ttmp = PyFortranObject_New(f2py_%s_def,f2py_init_%s);' % (name, name))
+ iadd('\tif (tmp == NULL) return NULL;')
+ iadd('\tif (F2PyDict_SetItemString(d, \"%s\", tmp) == -1) return NULL;'
+ % name)
+ iadd('\tPy_DECREF(tmp);')
+ tname = name.replace('_', '\\_')
+ dadd('\\subsection{Common block \\texttt{%s}}\n' % (tname))
+ dadd('\\begin{description}')
+ for n in inames:
+ dadd('\\item[]{{}\\verb@%s@{}}' %
+ (capi_maps.getarrdocsign(n, vars[n])))
+ if hasnote(vars[n]):
+ note = vars[n]['note']
+ if isinstance(note, list):
+ note = '\n'.join(note)
+ dadd('--- %s' % (note))
+ dadd('\\end{description}')
+ ret['docs'].append(
+ '"\t/%s/ %s\\n"' % (name, ','.join(map(lambda v, d: v + d, inames, idims))))
+ ret['commonhooks'] = chooks
+ ret['initcommonhooks'] = ihooks
+ ret['latexdoc'] = doc[0]
+ if len(ret['docs']) <= 1:
+ ret['docs'] = ''
+ return ret, fwrap[0]
diff --git a/venv/lib/python3.10/site-packages/numpy/f2py/crackfortran.py b/venv/lib/python3.10/site-packages/numpy/f2py/crackfortran.py
new file mode 100644
index 0000000000000000000000000000000000000000..8d3fc27608bd85f67867b66d39640a5167d7e5ee
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/numpy/f2py/crackfortran.py
@@ -0,0 +1,3767 @@
+#!/usr/bin/env python3
+"""
+crackfortran --- read fortran (77,90) code and extract declaration information.
+
+Copyright 1999 -- 2011 Pearu Peterson all rights reserved.
+Copyright 2011 -- present NumPy Developers.
+Permission to use, modify, and distribute this software is given under the
+terms of the NumPy License.
+
+NO WARRANTY IS EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
+
+
+Usage of crackfortran:
+======================
+Command line keys: -quiet,-verbose,-fix,-f77,-f90,-show,-h
+ -m ,--ignore-contains
+Functions: crackfortran, crack2fortran
+The following Fortran statements/constructions are supported
+(or will be if needed):
+ block data,byte,call,character,common,complex,contains,data,
+ dimension,double complex,double precision,end,external,function,
+ implicit,integer,intent,interface,intrinsic,
+ logical,module,optional,parameter,private,public,
+ program,real,(sequence?),subroutine,type,use,virtual,
+ include,pythonmodule
+Note: 'virtual' is mapped to 'dimension'.
+Note: 'implicit integer (z) static (z)' is 'implicit static (z)' (this is minor bug).
+Note: code after 'contains' will be ignored until its scope ends.
+Note: 'common' statement is extended: dimensions are moved to variable definitions
+Note: f2py directive: f2py is read as
+Note: pythonmodule is introduced to represent Python module
+
+Usage:
+ `postlist=crackfortran(files)`
+ `postlist` contains declaration information read from the list of files `files`.
+ `crack2fortran(postlist)` returns a fortran code to be saved to pyf-file
+
+ `postlist` has the following structure:
+ *** it is a list of dictionaries containing `blocks':
+ B = {'block','body','vars','parent_block'[,'name','prefix','args','result',
+ 'implicit','externals','interfaced','common','sortvars',
+ 'commonvars','note']}
+ B['block'] = 'interface' | 'function' | 'subroutine' | 'module' |
+ 'program' | 'block data' | 'type' | 'pythonmodule' |
+ 'abstract interface'
+ B['body'] --- list containing `subblocks' with the same structure as `blocks'
+ B['parent_block'] --- dictionary of a parent block:
+ C['body'][]['parent_block'] is C
+ B['vars'] --- dictionary of variable definitions
+ B['sortvars'] --- dictionary of variable definitions sorted by dependence (independent first)
+ B['name'] --- name of the block (not if B['block']=='interface')
+ B['prefix'] --- prefix string (only if B['block']=='function')
+ B['args'] --- list of argument names if B['block']== 'function' | 'subroutine'
+ B['result'] --- name of the return value (only if B['block']=='function')
+ B['implicit'] --- dictionary {'a':,'b':...} | None
+ B['externals'] --- list of variables being external
+ B['interfaced'] --- list of variables being external and defined
+ B['common'] --- dictionary of common blocks (list of objects)
+ B['commonvars'] --- list of variables used in common blocks (dimensions are moved to variable definitions)
+ B['from'] --- string showing the 'parents' of the current block
+ B['use'] --- dictionary of modules used in current block:
+ {:{['only':<0|1>],['map':{:,...}]}}
+ B['note'] --- list of LaTeX comments on the block
+ B['f2pyenhancements'] --- optional dictionary
+ {'threadsafe':'','fortranname':,
+ 'callstatement':|,
+ 'callprotoargument':,
+ 'usercode':|,
+ 'pymethoddef:'
+ }
+ B['entry'] --- dictionary {entryname:argslist,..}
+ B['varnames'] --- list of variable names given in the order of reading the
+ Fortran code, useful for derived types.
+ B['saved_interface'] --- a string of scanned routine signature, defines explicit interface
+ *** Variable definition is a dictionary
+ D = B['vars'][] =
+ {'typespec'[,'attrspec','kindselector','charselector','=','typename']}
+ D['typespec'] = 'byte' | 'character' | 'complex' | 'double complex' |
+ 'double precision' | 'integer' | 'logical' | 'real' | 'type'
+ D['attrspec'] --- list of attributes (e.g. 'dimension()',
+ 'external','intent(in|out|inout|hide|c|callback|cache|aligned4|aligned8|aligned16)',
+ 'optional','required', etc)
+ K = D['kindselector'] = {['*','kind']} (only if D['typespec'] =
+ 'complex' | 'integer' | 'logical' | 'real' )
+ C = D['charselector'] = {['*','len','kind','f2py_len']}
+ (only if D['typespec']=='character')
+ D['='] --- initialization expression string
+ D['typename'] --- name of the type if D['typespec']=='type'
+ D['dimension'] --- list of dimension bounds
+ D['intent'] --- list of intent specifications
+ D['depend'] --- list of variable names on which current variable depends on
+ D['check'] --- list of C-expressions; if C-expr returns zero, exception is raised
+ D['note'] --- list of LaTeX comments on the variable
+ *** Meaning of kind/char selectors (few examples):
+ D['typespec>']*K['*']
+ D['typespec'](kind=K['kind'])
+ character*C['*']
+ character(len=C['len'],kind=C['kind'], f2py_len=C['f2py_len'])
+ (see also fortran type declaration statement formats below)
+
+Fortran 90 type declaration statement format (F77 is subset of F90)
+====================================================================
+(Main source: IBM XL Fortran 5.1 Language Reference Manual)
+type declaration = [[]::]
+ = byte |
+ character[] |
+ complex[] |
+ double complex |
+ double precision |
+ integer[] |
+ logical[] |
+ real[] |
+ type()
+ = * |
+ ([len=][,[kind=]]) |
+ (kind=[,len=])
+ = * |
+ ([kind=])
+ = comma separated list of attributes.
+ Only the following attributes are used in
+ building up the interface:
+ external
+ (parameter --- affects '=' key)
+ optional
+ intent
+ Other attributes are ignored.
+ = in | out | inout
+ = comma separated list of dimension bounds.
+ = [[*][()] | [()]*]
+ [// | =] [,]
+
+In addition, the following attributes are used: check,depend,note
+
+TODO:
+ * Apply 'parameter' attribute (e.g. 'integer parameter :: i=2' 'real x(i)'
+ -> 'real x(2)')
+ The above may be solved by creating appropriate preprocessor program, for example.
+
+"""
+import sys
+import string
+import fileinput
+import re
+import os
+import copy
+import platform
+import codecs
+from pathlib import Path
+try:
+ import charset_normalizer
+except ImportError:
+ charset_normalizer = None
+
+from . import __version__
+
+# The environment provided by auxfuncs.py is needed for some calls to eval.
+# As the needed functions cannot be determined by static inspection of the
+# code, it is safest to use import * pending a major refactoring of f2py.
+from .auxfuncs import *
+from . import symbolic
+
+f2py_version = __version__.version
+
+# Global flags:
+strictf77 = 1 # Ignore `!' comments unless line[0]=='!'
+sourcecodeform = 'fix' # 'fix','free'
+quiet = 0 # Be verbose if 0 (Obsolete: not used any more)
+verbose = 1 # Be quiet if 0, extra verbose if > 1.
+tabchar = 4 * ' '
+pyffilename = ''
+f77modulename = ''
+skipemptyends = 0 # for old F77 programs without 'program' statement
+ignorecontains = 1
+dolowercase = 1
+debug = []
+
+# Global variables
+beginpattern = ''
+currentfilename = ''
+expectbegin = 1
+f90modulevars = {}
+filepositiontext = ''
+gotnextfile = 1
+groupcache = None
+groupcounter = 0
+grouplist = {groupcounter: []}
+groupname = ''
+include_paths = []
+neededmodule = -1
+onlyfuncs = []
+previous_context = None
+skipblocksuntil = -1
+skipfuncs = []
+skipfunctions = []
+usermodules = []
+
+
+def reset_global_f2py_vars():
+ global groupcounter, grouplist, neededmodule, expectbegin
+ global skipblocksuntil, usermodules, f90modulevars, gotnextfile
+ global filepositiontext, currentfilename, skipfunctions, skipfuncs
+ global onlyfuncs, include_paths, previous_context
+ global strictf77, sourcecodeform, quiet, verbose, tabchar, pyffilename
+ global f77modulename, skipemptyends, ignorecontains, dolowercase, debug
+
+ # flags
+ strictf77 = 1
+ sourcecodeform = 'fix'
+ quiet = 0
+ verbose = 1
+ tabchar = 4 * ' '
+ pyffilename = ''
+ f77modulename = ''
+ skipemptyends = 0
+ ignorecontains = 1
+ dolowercase = 1
+ debug = []
+ # variables
+ groupcounter = 0
+ grouplist = {groupcounter: []}
+ neededmodule = -1
+ expectbegin = 1
+ skipblocksuntil = -1
+ usermodules = []
+ f90modulevars = {}
+ gotnextfile = 1
+ filepositiontext = ''
+ currentfilename = ''
+ skipfunctions = []
+ skipfuncs = []
+ onlyfuncs = []
+ include_paths = []
+ previous_context = None
+
+
+def outmess(line, flag=1):
+ global filepositiontext
+
+ if not verbose:
+ return
+ if not quiet:
+ if flag:
+ sys.stdout.write(filepositiontext)
+ sys.stdout.write(line)
+
+re._MAXCACHE = 50
+defaultimplicitrules = {}
+for c in "abcdefghopqrstuvwxyz$_":
+ defaultimplicitrules[c] = {'typespec': 'real'}
+for c in "ijklmn":
+ defaultimplicitrules[c] = {'typespec': 'integer'}
+badnames = {}
+invbadnames = {}
+for n in ['int', 'double', 'float', 'char', 'short', 'long', 'void', 'case', 'while',
+ 'return', 'signed', 'unsigned', 'if', 'for', 'typedef', 'sizeof', 'union',
+ 'struct', 'static', 'register', 'new', 'break', 'do', 'goto', 'switch',
+ 'continue', 'else', 'inline', 'extern', 'delete', 'const', 'auto',
+ 'len', 'rank', 'shape', 'index', 'slen', 'size', '_i',
+ 'max', 'min',
+ 'flen', 'fshape',
+ 'string', 'complex_double', 'float_double', 'stdin', 'stderr', 'stdout',
+ 'type', 'default']:
+ badnames[n] = n + '_bn'
+ invbadnames[n + '_bn'] = n
+
+
+def rmbadname1(name):
+ if name in badnames:
+ errmess('rmbadname1: Replacing "%s" with "%s".\n' %
+ (name, badnames[name]))
+ return badnames[name]
+ return name
+
+
+def rmbadname(names):
+ return [rmbadname1(_m) for _m in names]
+
+
+def undo_rmbadname1(name):
+ if name in invbadnames:
+ errmess('undo_rmbadname1: Replacing "%s" with "%s".\n'
+ % (name, invbadnames[name]))
+ return invbadnames[name]
+ return name
+
+
+def undo_rmbadname(names):
+ return [undo_rmbadname1(_m) for _m in names]
+
+
+_has_f_header = re.compile(r'-\*-\s*fortran\s*-\*-', re.I).search
+_has_f90_header = re.compile(r'-\*-\s*f90\s*-\*-', re.I).search
+_has_fix_header = re.compile(r'-\*-\s*fix\s*-\*-', re.I).search
+_free_f90_start = re.compile(r'[^c*]\s*[^\s\d\t]', re.I).match
+
+# Extensions
+COMMON_FREE_EXTENSIONS = ['.f90', '.f95', '.f03', '.f08']
+COMMON_FIXED_EXTENSIONS = ['.for', '.ftn', '.f77', '.f']
+
+
+def openhook(filename, mode):
+ """Ensures that filename is opened with correct encoding parameter.
+
+ This function uses charset_normalizer package, when available, for
+ determining the encoding of the file to be opened. When charset_normalizer
+ is not available, the function detects only UTF encodings, otherwise, ASCII
+ encoding is used as fallback.
+ """
+ # Reads in the entire file. Robust detection of encoding.
+ # Correctly handles comments or late stage unicode characters
+ # gh-22871
+ if charset_normalizer is not None:
+ encoding = charset_normalizer.from_path(filename).best().encoding
+ else:
+ # hint: install charset_normalizer for correct encoding handling
+ # No need to read the whole file for trying with startswith
+ nbytes = min(32, os.path.getsize(filename))
+ with open(filename, 'rb') as fhandle:
+ raw = fhandle.read(nbytes)
+ if raw.startswith(codecs.BOM_UTF8):
+ encoding = 'UTF-8-SIG'
+ elif raw.startswith((codecs.BOM_UTF32_LE, codecs.BOM_UTF32_BE)):
+ encoding = 'UTF-32'
+ elif raw.startswith((codecs.BOM_LE, codecs.BOM_BE)):
+ encoding = 'UTF-16'
+ else:
+ # Fallback, without charset_normalizer
+ encoding = 'ascii'
+ return open(filename, mode, encoding=encoding)
+
+
+def is_free_format(fname):
+ """Check if file is in free format Fortran."""
+ # f90 allows both fixed and free format, assuming fixed unless
+ # signs of free format are detected.
+ result = False
+ if Path(fname).suffix.lower() in COMMON_FREE_EXTENSIONS:
+ result = True
+ with openhook(fname, 'r') as fhandle:
+ line = fhandle.readline()
+ n = 15 # the number of non-comment lines to scan for hints
+ if _has_f_header(line):
+ n = 0
+ elif _has_f90_header(line):
+ n = 0
+ result = True
+ while n > 0 and line:
+ if line[0] != '!' and line.strip():
+ n -= 1
+ if (line[0] != '\t' and _free_f90_start(line[:5])) or line[-2:-1] == '&':
+ result = True
+ break
+ line = fhandle.readline()
+ return result
+
+
+# Read fortran (77,90) code
+def readfortrancode(ffile, dowithline=show, istop=1):
+ """
+ Read fortran codes from files and
+ 1) Get rid of comments, line continuations, and empty lines; lower cases.
+ 2) Call dowithline(line) on every line.
+ 3) Recursively call itself when statement \"include ''\" is met.
+ """
+ global gotnextfile, filepositiontext, currentfilename, sourcecodeform, strictf77
+ global beginpattern, quiet, verbose, dolowercase, include_paths
+
+ if not istop:
+ saveglobals = gotnextfile, filepositiontext, currentfilename, sourcecodeform, strictf77,\
+ beginpattern, quiet, verbose, dolowercase
+ if ffile == []:
+ return
+ localdolowercase = dolowercase
+ # cont: set to True when the content of the last line read
+ # indicates statement continuation
+ cont = False
+ finalline = ''
+ ll = ''
+ includeline = re.compile(
+ r'\s*include\s*(\'|")(?P[^\'"]*)(\'|")', re.I)
+ cont1 = re.compile(r'(?P.*)&\s*\Z')
+ cont2 = re.compile(r'(\s*&|)(?P.*)')
+ mline_mark = re.compile(r".*?'''")
+ if istop:
+ dowithline('', -1)
+ ll, l1 = '', ''
+ spacedigits = [' '] + [str(_m) for _m in range(10)]
+ filepositiontext = ''
+ fin = fileinput.FileInput(ffile, openhook=openhook)
+ while True:
+ try:
+ l = fin.readline()
+ except UnicodeDecodeError as msg:
+ raise Exception(
+ f'readfortrancode: reading {fin.filename()}#{fin.lineno()}'
+ f' failed with\n{msg}.\nIt is likely that installing charset_normalizer'
+ ' package will help f2py determine the input file encoding'
+ ' correctly.')
+ if not l:
+ break
+ if fin.isfirstline():
+ filepositiontext = ''
+ currentfilename = fin.filename()
+ gotnextfile = 1
+ l1 = l
+ strictf77 = 0
+ sourcecodeform = 'fix'
+ ext = os.path.splitext(currentfilename)[1]
+ if Path(currentfilename).suffix.lower() in COMMON_FIXED_EXTENSIONS and \
+ not (_has_f90_header(l) or _has_fix_header(l)):
+ strictf77 = 1
+ elif is_free_format(currentfilename) and not _has_fix_header(l):
+ sourcecodeform = 'free'
+ if strictf77:
+ beginpattern = beginpattern77
+ else:
+ beginpattern = beginpattern90
+ outmess('\tReading file %s (format:%s%s)\n'
+ % (repr(currentfilename), sourcecodeform,
+ strictf77 and ',strict' or ''))
+
+ l = l.expandtabs().replace('\xa0', ' ')
+ # Get rid of newline characters
+ while not l == '':
+ if l[-1] not in "\n\r\f":
+ break
+ l = l[:-1]
+ if not strictf77:
+ (l, rl) = split_by_unquoted(l, '!')
+ l += ' '
+ if rl[:5].lower() == '!f2py': # f2py directive
+ l, _ = split_by_unquoted(l + 4 * ' ' + rl[5:], '!')
+ if l.strip() == '': # Skip empty line
+ if sourcecodeform == 'free':
+ # In free form, a statement continues in the next line
+ # that is not a comment line [3.3.2.4^1], lines with
+ # blanks are comment lines [3.3.2.3^1]. Hence, the
+ # line continuation flag must retain its state.
+ pass
+ else:
+ # In fixed form, statement continuation is determined
+ # by a non-blank character at the 6-th position. Empty
+ # line indicates a start of a new statement
+ # [3.3.3.3^1]. Hence, the line continuation flag must
+ # be reset.
+ cont = False
+ continue
+ if sourcecodeform == 'fix':
+ if l[0] in ['*', 'c', '!', 'C', '#']:
+ if l[1:5].lower() == 'f2py': # f2py directive
+ l = ' ' + l[5:]
+ else: # Skip comment line
+ cont = False
+ continue
+ elif strictf77:
+ if len(l) > 72:
+ l = l[:72]
+ if not (l[0] in spacedigits):
+ raise Exception('readfortrancode: Found non-(space,digit) char '
+ 'in the first column.\n\tAre you sure that '
+ 'this code is in fix form?\n\tline=%s' % repr(l))
+
+ if (not cont or strictf77) and (len(l) > 5 and not l[5] == ' '):
+ # Continuation of a previous line
+ ll = ll + l[6:]
+ finalline = ''
+ origfinalline = ''
+ else:
+ if not strictf77:
+ # F90 continuation
+ r = cont1.match(l)
+ if r:
+ l = r.group('line') # Continuation follows ..
+ if cont:
+ ll = ll + cont2.match(l).group('line')
+ finalline = ''
+ origfinalline = ''
+ else:
+ # clean up line beginning from possible digits.
+ l = ' ' + l[5:]
+ if localdolowercase:
+ finalline = ll.lower()
+ else:
+ finalline = ll
+ origfinalline = ll
+ ll = l
+ cont = (r is not None)
+ else:
+ # clean up line beginning from possible digits.
+ l = ' ' + l[5:]
+ if localdolowercase:
+ finalline = ll.lower()
+ else:
+ finalline = ll
+ origfinalline = ll
+ ll = l
+
+ elif sourcecodeform == 'free':
+ if not cont and ext == '.pyf' and mline_mark.match(l):
+ l = l + '\n'
+ while True:
+ lc = fin.readline()
+ if not lc:
+ errmess(
+ 'Unexpected end of file when reading multiline\n')
+ break
+ l = l + lc
+ if mline_mark.match(lc):
+ break
+ l = l.rstrip()
+ r = cont1.match(l)
+ if r:
+ l = r.group('line') # Continuation follows ..
+ if cont:
+ ll = ll + cont2.match(l).group('line')
+ finalline = ''
+ origfinalline = ''
+ else:
+ if localdolowercase:
+ finalline = ll.lower()
+ else:
+ finalline = ll
+ origfinalline = ll
+ ll = l
+ cont = (r is not None)
+ else:
+ raise ValueError(
+ "Flag sourcecodeform must be either 'fix' or 'free': %s" % repr(sourcecodeform))
+ filepositiontext = 'Line #%d in %s:"%s"\n\t' % (
+ fin.filelineno() - 1, currentfilename, l1)
+ m = includeline.match(origfinalline)
+ if m:
+ fn = m.group('name')
+ if os.path.isfile(fn):
+ readfortrancode(fn, dowithline=dowithline, istop=0)
+ else:
+ include_dirs = [
+ os.path.dirname(currentfilename)] + include_paths
+ foundfile = 0
+ for inc_dir in include_dirs:
+ fn1 = os.path.join(inc_dir, fn)
+ if os.path.isfile(fn1):
+ foundfile = 1
+ readfortrancode(fn1, dowithline=dowithline, istop=0)
+ break
+ if not foundfile:
+ outmess('readfortrancode: could not find include file %s in %s. Ignoring.\n' % (
+ repr(fn), os.pathsep.join(include_dirs)))
+ else:
+ dowithline(finalline)
+ l1 = ll
+ if localdolowercase:
+ finalline = ll.lower()
+ else:
+ finalline = ll
+ origfinalline = ll
+ filepositiontext = 'Line #%d in %s:"%s"\n\t' % (
+ fin.filelineno() - 1, currentfilename, l1)
+ m = includeline.match(origfinalline)
+ if m:
+ fn = m.group('name')
+ if os.path.isfile(fn):
+ readfortrancode(fn, dowithline=dowithline, istop=0)
+ else:
+ include_dirs = [os.path.dirname(currentfilename)] + include_paths
+ foundfile = 0
+ for inc_dir in include_dirs:
+ fn1 = os.path.join(inc_dir, fn)
+ if os.path.isfile(fn1):
+ foundfile = 1
+ readfortrancode(fn1, dowithline=dowithline, istop=0)
+ break
+ if not foundfile:
+ outmess('readfortrancode: could not find include file %s in %s. Ignoring.\n' % (
+ repr(fn), os.pathsep.join(include_dirs)))
+ else:
+ dowithline(finalline)
+ filepositiontext = ''
+ fin.close()
+ if istop:
+ dowithline('', 1)
+ else:
+ gotnextfile, filepositiontext, currentfilename, sourcecodeform, strictf77,\
+ beginpattern, quiet, verbose, dolowercase = saveglobals
+
+# Crack line
+beforethisafter = r'\s*(?P%s(?=\s*(\b(%s)\b)))' + \
+ r'\s*(?P(\b(%s)\b))' + \
+ r'\s*(?P%s)\s*\Z'
+##
+fortrantypes = r'character|logical|integer|real|complex|double\s*(precision\s*(complex|)|complex)|type(?=\s*\([\w\s,=(*)]*\))|byte'
+typespattern = re.compile(
+ beforethisafter % ('', fortrantypes, fortrantypes, '.*'), re.I), 'type'
+typespattern4implicit = re.compile(beforethisafter % (
+ '', fortrantypes + '|static|automatic|undefined', fortrantypes + '|static|automatic|undefined', '.*'), re.I)
+#
+functionpattern = re.compile(beforethisafter % (
+ r'([a-z]+[\w\s(=*+-/)]*?|)', 'function', 'function', '.*'), re.I), 'begin'
+subroutinepattern = re.compile(beforethisafter % (
+ r'[a-z\s]*?', 'subroutine', 'subroutine', '.*'), re.I), 'begin'
+# modulepattern=re.compile(beforethisafter%('[a-z\s]*?','module','module','.*'),re.I),'begin'
+#
+groupbegins77 = r'program|block\s*data'
+beginpattern77 = re.compile(
+ beforethisafter % ('', groupbegins77, groupbegins77, '.*'), re.I), 'begin'
+groupbegins90 = groupbegins77 + \
+ r'|module(?!\s*procedure)|python\s*module|(abstract|)\s*interface|' + \
+ r'type(?!\s*\()'
+beginpattern90 = re.compile(
+ beforethisafter % ('', groupbegins90, groupbegins90, '.*'), re.I), 'begin'
+groupends = (r'end|endprogram|endblockdata|endmodule|endpythonmodule|'
+ r'endinterface|endsubroutine|endfunction')
+endpattern = re.compile(
+ beforethisafter % ('', groupends, groupends, '.*'), re.I), 'end'
+# block, the Fortran 2008 construct needs special handling in the rest of the file
+endifs = r'end\s*(if|do|where|select|while|forall|associate|' + \
+ r'critical|enum|team)'
+endifpattern = re.compile(
+ beforethisafter % (r'[\w]*?', endifs, endifs, '.*'), re.I), 'endif'
+#
+moduleprocedures = r'module\s*procedure'
+moduleprocedurepattern = re.compile(
+ beforethisafter % ('', moduleprocedures, moduleprocedures, '.*'), re.I), \
+ 'moduleprocedure'
+implicitpattern = re.compile(
+ beforethisafter % ('', 'implicit', 'implicit', '.*'), re.I), 'implicit'
+dimensionpattern = re.compile(beforethisafter % (
+ '', 'dimension|virtual', 'dimension|virtual', '.*'), re.I), 'dimension'
+externalpattern = re.compile(
+ beforethisafter % ('', 'external', 'external', '.*'), re.I), 'external'
+optionalpattern = re.compile(
+ beforethisafter % ('', 'optional', 'optional', '.*'), re.I), 'optional'
+requiredpattern = re.compile(
+ beforethisafter % ('', 'required', 'required', '.*'), re.I), 'required'
+publicpattern = re.compile(
+ beforethisafter % ('', 'public', 'public', '.*'), re.I), 'public'
+privatepattern = re.compile(
+ beforethisafter % ('', 'private', 'private', '.*'), re.I), 'private'
+intrinsicpattern = re.compile(
+ beforethisafter % ('', 'intrinsic', 'intrinsic', '.*'), re.I), 'intrinsic'
+intentpattern = re.compile(beforethisafter % (
+ '', 'intent|depend|note|check', 'intent|depend|note|check', r'\s*\(.*?\).*'), re.I), 'intent'
+parameterpattern = re.compile(
+ beforethisafter % ('', 'parameter', 'parameter', r'\s*\(.*'), re.I), 'parameter'
+datapattern = re.compile(
+ beforethisafter % ('', 'data', 'data', '.*'), re.I), 'data'
+callpattern = re.compile(
+ beforethisafter % ('', 'call', 'call', '.*'), re.I), 'call'
+entrypattern = re.compile(
+ beforethisafter % ('', 'entry', 'entry', '.*'), re.I), 'entry'
+callfunpattern = re.compile(
+ beforethisafter % ('', 'callfun', 'callfun', '.*'), re.I), 'callfun'
+commonpattern = re.compile(
+ beforethisafter % ('', 'common', 'common', '.*'), re.I), 'common'
+usepattern = re.compile(
+ beforethisafter % ('', 'use', 'use', '.*'), re.I), 'use'
+containspattern = re.compile(
+ beforethisafter % ('', 'contains', 'contains', ''), re.I), 'contains'
+formatpattern = re.compile(
+ beforethisafter % ('', 'format', 'format', '.*'), re.I), 'format'
+# Non-fortran and f2py-specific statements
+f2pyenhancementspattern = re.compile(beforethisafter % ('', 'threadsafe|fortranname|callstatement|callprotoargument|usercode|pymethoddef',
+ 'threadsafe|fortranname|callstatement|callprotoargument|usercode|pymethoddef', '.*'), re.I | re.S), 'f2pyenhancements'
+multilinepattern = re.compile(
+ r"\s*(?P''')(?P.*?)(?P''')\s*\Z", re.S), 'multiline'
+##
+
+def split_by_unquoted(line, characters):
+ """
+ Splits the line into (line[:i], line[i:]),
+ where i is the index of first occurrence of one of the characters
+ not within quotes, or len(line) if no such index exists
+ """
+ assert not (set('"\'') & set(characters)), "cannot split by unquoted quotes"
+ r = re.compile(
+ r"\A(?P({single_quoted}|{double_quoted}|{not_quoted})*)"
+ r"(?P{char}.*)\Z".format(
+ not_quoted="[^\"'{}]".format(re.escape(characters)),
+ char="[{}]".format(re.escape(characters)),
+ single_quoted=r"('([^'\\]|(\\.))*')",
+ double_quoted=r'("([^"\\]|(\\.))*")'))
+ m = r.match(line)
+ if m:
+ d = m.groupdict()
+ return (d["before"], d["after"])
+ return (line, "")
+
+def _simplifyargs(argsline):
+ a = []
+ for n in markoutercomma(argsline).split('@,@'):
+ for r in '(),':
+ n = n.replace(r, '_')
+ a.append(n)
+ return ','.join(a)
+
+crackline_re_1 = re.compile(r'\s*(?P\b[a-z]+\w*\b)\s*=.*', re.I)
+crackline_bind_1 = re.compile(r'\s*(?P\b[a-z]+\w*\b)\s*=.*', re.I)
+crackline_bindlang = re.compile(r'\s*bind\(\s*(?P[^,]+)\s*,\s*name\s*=\s*"(?P[^"]+)"\s*\)', re.I)
+
+def crackline(line, reset=0):
+ """
+ reset=-1 --- initialize
+ reset=0 --- crack the line
+ reset=1 --- final check if mismatch of blocks occurred
+
+ Cracked data is saved in grouplist[0].
+ """
+ global beginpattern, groupcounter, groupname, groupcache, grouplist
+ global filepositiontext, currentfilename, neededmodule, expectbegin
+ global skipblocksuntil, skipemptyends, previous_context, gotnextfile
+
+ _, has_semicolon = split_by_unquoted(line, ";")
+ if has_semicolon and not (f2pyenhancementspattern[0].match(line) or
+ multilinepattern[0].match(line)):
+ # XXX: non-zero reset values need testing
+ assert reset == 0, repr(reset)
+ # split line on unquoted semicolons
+ line, semicolon_line = split_by_unquoted(line, ";")
+ while semicolon_line:
+ crackline(line, reset)
+ line, semicolon_line = split_by_unquoted(semicolon_line[1:], ";")
+ crackline(line, reset)
+ return
+ if reset < 0:
+ groupcounter = 0
+ groupname = {groupcounter: ''}
+ groupcache = {groupcounter: {}}
+ grouplist = {groupcounter: []}
+ groupcache[groupcounter]['body'] = []
+ groupcache[groupcounter]['vars'] = {}
+ groupcache[groupcounter]['block'] = ''
+ groupcache[groupcounter]['name'] = ''
+ neededmodule = -1
+ skipblocksuntil = -1
+ return
+ if reset > 0:
+ fl = 0
+ if f77modulename and neededmodule == groupcounter:
+ fl = 2
+ while groupcounter > fl:
+ outmess('crackline: groupcounter=%s groupname=%s\n' %
+ (repr(groupcounter), repr(groupname)))
+ outmess(
+ 'crackline: Mismatch of blocks encountered. Trying to fix it by assuming "end" statement.\n')
+ grouplist[groupcounter - 1].append(groupcache[groupcounter])
+ grouplist[groupcounter - 1][-1]['body'] = grouplist[groupcounter]
+ del grouplist[groupcounter]
+ groupcounter = groupcounter - 1
+ if f77modulename and neededmodule == groupcounter:
+ grouplist[groupcounter - 1].append(groupcache[groupcounter])
+ grouplist[groupcounter - 1][-1]['body'] = grouplist[groupcounter]
+ del grouplist[groupcounter]
+ groupcounter = groupcounter - 1 # end interface
+ grouplist[groupcounter - 1].append(groupcache[groupcounter])
+ grouplist[groupcounter - 1][-1]['body'] = grouplist[groupcounter]
+ del grouplist[groupcounter]
+ groupcounter = groupcounter - 1 # end module
+ neededmodule = -1
+ return
+ if line == '':
+ return
+ flag = 0
+ for pat in [dimensionpattern, externalpattern, intentpattern, optionalpattern,
+ requiredpattern,
+ parameterpattern, datapattern, publicpattern, privatepattern,
+ intrinsicpattern,
+ endifpattern, endpattern,
+ formatpattern,
+ beginpattern, functionpattern, subroutinepattern,
+ implicitpattern, typespattern, commonpattern,
+ callpattern, usepattern, containspattern,
+ entrypattern,
+ f2pyenhancementspattern,
+ multilinepattern,
+ moduleprocedurepattern
+ ]:
+ m = pat[0].match(line)
+ if m:
+ break
+ flag = flag + 1
+ if not m:
+ re_1 = crackline_re_1
+ if 0 <= skipblocksuntil <= groupcounter:
+ return
+ if 'externals' in groupcache[groupcounter]:
+ for name in groupcache[groupcounter]['externals']:
+ if name in invbadnames:
+ name = invbadnames[name]
+ if 'interfaced' in groupcache[groupcounter] and name in groupcache[groupcounter]['interfaced']:
+ continue
+ m1 = re.match(
+ r'(?P[^"]*)\b%s\b\s*@\(@(?P[^@]*)@\)@.*\Z' % name, markouterparen(line), re.I)
+ if m1:
+ m2 = re_1.match(m1.group('before'))
+ a = _simplifyargs(m1.group('args'))
+ if m2:
+ line = 'callfun %s(%s) result (%s)' % (
+ name, a, m2.group('result'))
+ else:
+ line = 'callfun %s(%s)' % (name, a)
+ m = callfunpattern[0].match(line)
+ if not m:
+ outmess(
+ 'crackline: could not resolve function call for line=%s.\n' % repr(line))
+ return
+ analyzeline(m, 'callfun', line)
+ return
+ if verbose > 1 or (verbose == 1 and currentfilename.lower().endswith('.pyf')):
+ previous_context = None
+ outmess('crackline:%d: No pattern for line\n' % (groupcounter))
+ return
+ elif pat[1] == 'end':
+ if 0 <= skipblocksuntil < groupcounter:
+ groupcounter = groupcounter - 1
+ if skipblocksuntil <= groupcounter:
+ return
+ if groupcounter <= 0:
+ raise Exception('crackline: groupcounter(=%s) is nonpositive. '
+ 'Check the blocks.'
+ % (groupcounter))
+ m1 = beginpattern[0].match((line))
+ if (m1) and (not m1.group('this') == groupname[groupcounter]):
+ raise Exception('crackline: End group %s does not match with '
+ 'previous Begin group %s\n\t%s' %
+ (repr(m1.group('this')), repr(groupname[groupcounter]),
+ filepositiontext)
+ )
+ if skipblocksuntil == groupcounter:
+ skipblocksuntil = -1
+ grouplist[groupcounter - 1].append(groupcache[groupcounter])
+ grouplist[groupcounter - 1][-1]['body'] = grouplist[groupcounter]
+ del grouplist[groupcounter]
+ groupcounter = groupcounter - 1
+ if not skipemptyends:
+ expectbegin = 1
+ elif pat[1] == 'begin':
+ if 0 <= skipblocksuntil <= groupcounter:
+ groupcounter = groupcounter + 1
+ return
+ gotnextfile = 0
+ analyzeline(m, pat[1], line)
+ expectbegin = 0
+ elif pat[1] == 'endif':
+ pass
+ elif pat[1] == 'moduleprocedure':
+ analyzeline(m, pat[1], line)
+ elif pat[1] == 'contains':
+ if ignorecontains:
+ return
+ if 0 <= skipblocksuntil <= groupcounter:
+ return
+ skipblocksuntil = groupcounter
+ else:
+ if 0 <= skipblocksuntil <= groupcounter:
+ return
+ analyzeline(m, pat[1], line)
+
+
+def markouterparen(line):
+ l = ''
+ f = 0
+ for c in line:
+ if c == '(':
+ f = f + 1
+ if f == 1:
+ l = l + '@(@'
+ continue
+ elif c == ')':
+ f = f - 1
+ if f == 0:
+ l = l + '@)@'
+ continue
+ l = l + c
+ return l
+
+
+def markoutercomma(line, comma=','):
+ l = ''
+ f = 0
+ before, after = split_by_unquoted(line, comma + '()')
+ l += before
+ while after:
+ if (after[0] == comma) and (f == 0):
+ l += '@' + comma + '@'
+ else:
+ l += after[0]
+ if after[0] == '(':
+ f += 1
+ elif after[0] == ')':
+ f -= 1
+ before, after = split_by_unquoted(after[1:], comma + '()')
+ l += before
+ assert not f, repr((f, line, l))
+ return l
+
+def unmarkouterparen(line):
+ r = line.replace('@(@', '(').replace('@)@', ')')
+ return r
+
+
+def appenddecl(decl, decl2, force=1):
+ if not decl:
+ decl = {}
+ if not decl2:
+ return decl
+ if decl is decl2:
+ return decl
+ for k in list(decl2.keys()):
+ if k == 'typespec':
+ if force or k not in decl:
+ decl[k] = decl2[k]
+ elif k == 'attrspec':
+ for l in decl2[k]:
+ decl = setattrspec(decl, l, force)
+ elif k == 'kindselector':
+ decl = setkindselector(decl, decl2[k], force)
+ elif k == 'charselector':
+ decl = setcharselector(decl, decl2[k], force)
+ elif k in ['=', 'typename']:
+ if force or k not in decl:
+ decl[k] = decl2[k]
+ elif k == 'note':
+ pass
+ elif k in ['intent', 'check', 'dimension', 'optional',
+ 'required', 'depend']:
+ errmess('appenddecl: "%s" not implemented.\n' % k)
+ else:
+ raise Exception('appenddecl: Unknown variable definition key: ' +
+ str(k))
+ return decl
+
+selectpattern = re.compile(
+ r'\s*(?P(@\(@.*?@\)@|\*[\d*]+|\*\s*@\(@.*?@\)@|))(?P.*)\Z', re.I)
+typedefpattern = re.compile(
+ r'(?:,(?P[\w(),]+))?(::)?(?P\b[a-z$_][\w$]*\b)'
+ r'(?:\((?P[\w,]*)\))?\Z', re.I)
+nameargspattern = re.compile(
+ r'\s*(?P\b[\w$]+\b)\s*(@\(@\s*(?P[\w\s,]*)\s*@\)@|)\s*((result(\s*@\(@\s*(?P\b[\w$]+\b)\s*@\)@|))|(bind\s*@\(@\s*(?P(?:(?!@\)@).)*)\s*@\)@))*\s*\Z', re.I)
+operatorpattern = re.compile(
+ r'\s*(?P(operator|assignment))'
+ r'@\(@\s*(?P[^)]+)\s*@\)@\s*\Z', re.I)
+callnameargspattern = re.compile(
+ r'\s*(?P\b[\w$]+\b)\s*@\(@\s*(?P.*)\s*@\)@\s*\Z', re.I)
+real16pattern = re.compile(
+ r'([-+]?(?:\d+(?:\.\d*)?|\d*\.\d+))[dD]((?:[-+]?\d+)?)')
+real8pattern = re.compile(
+ r'([-+]?((?:\d+(?:\.\d*)?|\d*\.\d+))[eE]((?:[-+]?\d+)?)|(\d+\.\d*))')
+
+_intentcallbackpattern = re.compile(r'intent\s*\(.*?\bcallback\b', re.I)
+
+
+def _is_intent_callback(vdecl):
+ for a in vdecl.get('attrspec', []):
+ if _intentcallbackpattern.match(a):
+ return 1
+ return 0
+
+
+def _resolvetypedefpattern(line):
+ line = ''.join(line.split()) # removes whitespace
+ m1 = typedefpattern.match(line)
+ print(line, m1)
+ if m1:
+ attrs = m1.group('attributes')
+ attrs = [a.lower() for a in attrs.split(',')] if attrs else []
+ return m1.group('name'), attrs, m1.group('params')
+ return None, [], None
+
+def parse_name_for_bind(line):
+ pattern = re.compile(r'bind\(\s*(?P[^,]+)(?:\s*,\s*name\s*=\s*["\'](?P[^"\']+)["\']\s*)?\)', re.I)
+ match = pattern.search(line)
+ bind_statement = None
+ if match:
+ bind_statement = match.group(0)
+ # Remove the 'bind' construct from the line.
+ line = line[:match.start()] + line[match.end():]
+ return line, bind_statement
+
+def _resolvenameargspattern(line):
+ line, bind_cname = parse_name_for_bind(line)
+ line = markouterparen(line)
+ m1 = nameargspattern.match(line)
+ if m1:
+ return m1.group('name'), m1.group('args'), m1.group('result'), bind_cname
+ m1 = operatorpattern.match(line)
+ if m1:
+ name = m1.group('scheme') + '(' + m1.group('name') + ')'
+ return name, [], None, None
+ m1 = callnameargspattern.match(line)
+ if m1:
+ return m1.group('name'), m1.group('args'), None, None
+ return None, [], None, None
+
+
+def analyzeline(m, case, line):
+ """
+ Reads each line in the input file in sequence and updates global vars.
+
+ Effectively reads and collects information from the input file to the
+ global variable groupcache, a dictionary containing info about each part
+ of the fortran module.
+
+ At the end of analyzeline, information is filtered into the correct dict
+ keys, but parameter values and dimensions are not yet interpreted.
+ """
+ global groupcounter, groupname, groupcache, grouplist, filepositiontext
+ global currentfilename, f77modulename, neededinterface, neededmodule
+ global expectbegin, gotnextfile, previous_context
+
+ block = m.group('this')
+ if case != 'multiline':
+ previous_context = None
+ if expectbegin and case not in ['begin', 'call', 'callfun', 'type'] \
+ and not skipemptyends and groupcounter < 1:
+ newname = os.path.basename(currentfilename).split('.')[0]
+ outmess(
+ 'analyzeline: no group yet. Creating program group with name "%s".\n' % newname)
+ gotnextfile = 0
+ groupcounter = groupcounter + 1
+ groupname[groupcounter] = 'program'
+ groupcache[groupcounter] = {}
+ grouplist[groupcounter] = []
+ groupcache[groupcounter]['body'] = []
+ groupcache[groupcounter]['vars'] = {}
+ groupcache[groupcounter]['block'] = 'program'
+ groupcache[groupcounter]['name'] = newname
+ groupcache[groupcounter]['from'] = 'fromsky'
+ expectbegin = 0
+ if case in ['begin', 'call', 'callfun']:
+ # Crack line => block,name,args,result
+ block = block.lower()
+ if re.match(r'block\s*data', block, re.I):
+ block = 'block data'
+ elif re.match(r'python\s*module', block, re.I):
+ block = 'python module'
+ elif re.match(r'abstract\s*interface', block, re.I):
+ block = 'abstract interface'
+ if block == 'type':
+ name, attrs, _ = _resolvetypedefpattern(m.group('after'))
+ groupcache[groupcounter]['vars'][name] = dict(attrspec = attrs)
+ args = []
+ result = None
+ else:
+ name, args, result, bindcline = _resolvenameargspattern(m.group('after'))
+ if name is None:
+ if block == 'block data':
+ name = '_BLOCK_DATA_'
+ else:
+ name = ''
+ if block not in ['interface', 'block data', 'abstract interface']:
+ outmess('analyzeline: No name/args pattern found for line.\n')
+
+ previous_context = (block, name, groupcounter)
+ if args:
+ args = rmbadname([x.strip()
+ for x in markoutercomma(args).split('@,@')])
+ else:
+ args = []
+ if '' in args:
+ while '' in args:
+ args.remove('')
+ outmess(
+ 'analyzeline: argument list is malformed (missing argument).\n')
+
+ # end of crack line => block,name,args,result
+ needmodule = 0
+ needinterface = 0
+
+ if case in ['call', 'callfun']:
+ needinterface = 1
+ if 'args' not in groupcache[groupcounter]:
+ return
+ if name not in groupcache[groupcounter]['args']:
+ return
+ for it in grouplist[groupcounter]:
+ if it['name'] == name:
+ return
+ if name in groupcache[groupcounter]['interfaced']:
+ return
+ block = {'call': 'subroutine', 'callfun': 'function'}[case]
+ if f77modulename and neededmodule == -1 and groupcounter <= 1:
+ neededmodule = groupcounter + 2
+ needmodule = 1
+ if block not in ['interface', 'abstract interface']:
+ needinterface = 1
+ # Create new block(s)
+ groupcounter = groupcounter + 1
+ groupcache[groupcounter] = {}
+ grouplist[groupcounter] = []
+ if needmodule:
+ if verbose > 1:
+ outmess('analyzeline: Creating module block %s\n' %
+ repr(f77modulename), 0)
+ groupname[groupcounter] = 'module'
+ groupcache[groupcounter]['block'] = 'python module'
+ groupcache[groupcounter]['name'] = f77modulename
+ groupcache[groupcounter]['from'] = ''
+ groupcache[groupcounter]['body'] = []
+ groupcache[groupcounter]['externals'] = []
+ groupcache[groupcounter]['interfaced'] = []
+ groupcache[groupcounter]['vars'] = {}
+ groupcounter = groupcounter + 1
+ groupcache[groupcounter] = {}
+ grouplist[groupcounter] = []
+ if needinterface:
+ if verbose > 1:
+ outmess('analyzeline: Creating additional interface block (groupcounter=%s).\n' % (
+ groupcounter), 0)
+ groupname[groupcounter] = 'interface'
+ groupcache[groupcounter]['block'] = 'interface'
+ groupcache[groupcounter]['name'] = 'unknown_interface'
+ groupcache[groupcounter]['from'] = '%s:%s' % (
+ groupcache[groupcounter - 1]['from'], groupcache[groupcounter - 1]['name'])
+ groupcache[groupcounter]['body'] = []
+ groupcache[groupcounter]['externals'] = []
+ groupcache[groupcounter]['interfaced'] = []
+ groupcache[groupcounter]['vars'] = {}
+ groupcounter = groupcounter + 1
+ groupcache[groupcounter] = {}
+ grouplist[groupcounter] = []
+ groupname[groupcounter] = block
+ groupcache[groupcounter]['block'] = block
+ if not name:
+ name = 'unknown_' + block.replace(' ', '_')
+ groupcache[groupcounter]['prefix'] = m.group('before')
+ groupcache[groupcounter]['name'] = rmbadname1(name)
+ groupcache[groupcounter]['result'] = result
+ if groupcounter == 1:
+ groupcache[groupcounter]['from'] = currentfilename
+ else:
+ if f77modulename and groupcounter == 3:
+ groupcache[groupcounter]['from'] = '%s:%s' % (
+ groupcache[groupcounter - 1]['from'], currentfilename)
+ else:
+ groupcache[groupcounter]['from'] = '%s:%s' % (
+ groupcache[groupcounter - 1]['from'], groupcache[groupcounter - 1]['name'])
+ for k in list(groupcache[groupcounter].keys()):
+ if not groupcache[groupcounter][k]:
+ del groupcache[groupcounter][k]
+
+ groupcache[groupcounter]['args'] = args
+ groupcache[groupcounter]['body'] = []
+ groupcache[groupcounter]['externals'] = []
+ groupcache[groupcounter]['interfaced'] = []
+ groupcache[groupcounter]['vars'] = {}
+ groupcache[groupcounter]['entry'] = {}
+ # end of creation
+ if block == 'type':
+ groupcache[groupcounter]['varnames'] = []
+
+ if case in ['call', 'callfun']: # set parents variables
+ if name not in groupcache[groupcounter - 2]['externals']:
+ groupcache[groupcounter - 2]['externals'].append(name)
+ groupcache[groupcounter]['vars'] = copy.deepcopy(
+ groupcache[groupcounter - 2]['vars'])
+ try:
+ del groupcache[groupcounter]['vars'][name][
+ groupcache[groupcounter]['vars'][name]['attrspec'].index('external')]
+ except Exception:
+ pass
+ if block in ['function', 'subroutine']: # set global attributes
+ # name is fortran name
+ if bindcline:
+ bindcdat = re.search(crackline_bindlang, bindcline)
+ if bindcdat:
+ groupcache[groupcounter]['bindlang'] = {name : {}}
+ groupcache[groupcounter]['bindlang'][name]["lang"] = bindcdat.group('lang')
+ if bindcdat.group('lang_name'):
+ groupcache[groupcounter]['bindlang'][name]["name"] = bindcdat.group('lang_name')
+ try:
+ groupcache[groupcounter]['vars'][name] = appenddecl(
+ groupcache[groupcounter]['vars'][name], groupcache[groupcounter - 2]['vars'][''])
+ except Exception:
+ pass
+ if case == 'callfun': # return type
+ if result and result in groupcache[groupcounter]['vars']:
+ if not name == result:
+ groupcache[groupcounter]['vars'][name] = appenddecl(
+ groupcache[groupcounter]['vars'][name], groupcache[groupcounter]['vars'][result])
+ # if groupcounter>1: # name is interfaced
+ try:
+ groupcache[groupcounter - 2]['interfaced'].append(name)
+ except Exception:
+ pass
+ if block == 'function':
+ t = typespattern[0].match(m.group('before') + ' ' + name)
+ if t:
+ typespec, selector, attr, edecl = cracktypespec0(
+ t.group('this'), t.group('after'))
+ updatevars(typespec, selector, attr, edecl)
+
+ if case in ['call', 'callfun']:
+ grouplist[groupcounter - 1].append(groupcache[groupcounter])
+ grouplist[groupcounter - 1][-1]['body'] = grouplist[groupcounter]
+ del grouplist[groupcounter]
+ groupcounter = groupcounter - 1 # end routine
+ grouplist[groupcounter - 1].append(groupcache[groupcounter])
+ grouplist[groupcounter - 1][-1]['body'] = grouplist[groupcounter]
+ del grouplist[groupcounter]
+ groupcounter = groupcounter - 1 # end interface
+
+ elif case == 'entry':
+ name, args, result, _= _resolvenameargspattern(m.group('after'))
+ if name is not None:
+ if args:
+ args = rmbadname([x.strip()
+ for x in markoutercomma(args).split('@,@')])
+ else:
+ args = []
+ assert result is None, repr(result)
+ groupcache[groupcounter]['entry'][name] = args
+ previous_context = ('entry', name, groupcounter)
+ elif case == 'type':
+ typespec, selector, attr, edecl = cracktypespec0(
+ block, m.group('after'))
+ last_name = updatevars(typespec, selector, attr, edecl)
+ if last_name is not None:
+ previous_context = ('variable', last_name, groupcounter)
+ elif case in ['dimension', 'intent', 'optional', 'required', 'external', 'public', 'private', 'intrinsic']:
+ edecl = groupcache[groupcounter]['vars']
+ ll = m.group('after').strip()
+ i = ll.find('::')
+ if i < 0 and case == 'intent':
+ i = markouterparen(ll).find('@)@') - 2
+ ll = ll[:i + 1] + '::' + ll[i + 1:]
+ i = ll.find('::')
+ if ll[i:] == '::' and 'args' in groupcache[groupcounter]:
+ outmess('All arguments will have attribute %s%s\n' %
+ (m.group('this'), ll[:i]))
+ ll = ll + ','.join(groupcache[groupcounter]['args'])
+ if i < 0:
+ i = 0
+ pl = ''
+ else:
+ pl = ll[:i].strip()
+ ll = ll[i + 2:]
+ ch = markoutercomma(pl).split('@,@')
+ if len(ch) > 1:
+ pl = ch[0]
+ outmess('analyzeline: cannot handle multiple attributes without type specification. Ignoring %r.\n' % (
+ ','.join(ch[1:])))
+ last_name = None
+
+ for e in [x.strip() for x in markoutercomma(ll).split('@,@')]:
+ m1 = namepattern.match(e)
+ if not m1:
+ if case in ['public', 'private']:
+ k = ''
+ else:
+ print(m.groupdict())
+ outmess('analyzeline: no name pattern found in %s statement for %s. Skipping.\n' % (
+ case, repr(e)))
+ continue
+ else:
+ k = rmbadname1(m1.group('name'))
+ if case in ['public', 'private'] and \
+ (k == 'operator' or k == 'assignment'):
+ k += m1.group('after')
+ if k not in edecl:
+ edecl[k] = {}
+ if case == 'dimension':
+ ap = case + m1.group('after')
+ if case == 'intent':
+ ap = m.group('this') + pl
+ if _intentcallbackpattern.match(ap):
+ if k not in groupcache[groupcounter]['args']:
+ if groupcounter > 1:
+ if '__user__' not in groupcache[groupcounter - 2]['name']:
+ outmess(
+ 'analyzeline: missing __user__ module (could be nothing)\n')
+ # fixes ticket 1693
+ if k != groupcache[groupcounter]['name']:
+ outmess('analyzeline: appending intent(callback) %s'
+ ' to %s arguments\n' % (k, groupcache[groupcounter]['name']))
+ groupcache[groupcounter]['args'].append(k)
+ else:
+ errmess(
+ 'analyzeline: intent(callback) %s is ignored\n' % (k))
+ else:
+ errmess('analyzeline: intent(callback) %s is already'
+ ' in argument list\n' % (k))
+ if case in ['optional', 'required', 'public', 'external', 'private', 'intrinsic']:
+ ap = case
+ if 'attrspec' in edecl[k]:
+ edecl[k]['attrspec'].append(ap)
+ else:
+ edecl[k]['attrspec'] = [ap]
+ if case == 'external':
+ if groupcache[groupcounter]['block'] == 'program':
+ outmess('analyzeline: ignoring program arguments\n')
+ continue
+ if k not in groupcache[groupcounter]['args']:
+ continue
+ if 'externals' not in groupcache[groupcounter]:
+ groupcache[groupcounter]['externals'] = []
+ groupcache[groupcounter]['externals'].append(k)
+ last_name = k
+ groupcache[groupcounter]['vars'] = edecl
+ if last_name is not None:
+ previous_context = ('variable', last_name, groupcounter)
+ elif case == 'moduleprocedure':
+ groupcache[groupcounter]['implementedby'] = \
+ [x.strip() for x in m.group('after').split(',')]
+ elif case == 'parameter':
+ edecl = groupcache[groupcounter]['vars']
+ ll = m.group('after').strip()[1:-1]
+ last_name = None
+ for e in markoutercomma(ll).split('@,@'):
+ try:
+ k, initexpr = [x.strip() for x in e.split('=')]
+ except Exception:
+ outmess(
+ 'analyzeline: could not extract name,expr in parameter statement "%s" of "%s"\n' % (e, ll))
+ continue
+ params = get_parameters(edecl)
+ k = rmbadname1(k)
+ if k not in edecl:
+ edecl[k] = {}
+ if '=' in edecl[k] and (not edecl[k]['='] == initexpr):
+ outmess('analyzeline: Overwriting the value of parameter "%s" ("%s") with "%s".\n' % (
+ k, edecl[k]['='], initexpr))
+ t = determineexprtype(initexpr, params)
+ if t:
+ if t.get('typespec') == 'real':
+ tt = list(initexpr)
+ for m in real16pattern.finditer(initexpr):
+ tt[m.start():m.end()] = list(
+ initexpr[m.start():m.end()].lower().replace('d', 'e'))
+ initexpr = ''.join(tt)
+ elif t.get('typespec') == 'complex':
+ initexpr = initexpr[1:].lower().replace('d', 'e').\
+ replace(',', '+1j*(')
+ try:
+ v = eval(initexpr, {}, params)
+ except (SyntaxError, NameError, TypeError) as msg:
+ errmess('analyzeline: Failed to evaluate %r. Ignoring: %s\n'
+ % (initexpr, msg))
+ continue
+ edecl[k]['='] = repr(v)
+ if 'attrspec' in edecl[k]:
+ edecl[k]['attrspec'].append('parameter')
+ else:
+ edecl[k]['attrspec'] = ['parameter']
+ last_name = k
+ groupcache[groupcounter]['vars'] = edecl
+ if last_name is not None:
+ previous_context = ('variable', last_name, groupcounter)
+ elif case == 'implicit':
+ if m.group('after').strip().lower() == 'none':
+ groupcache[groupcounter]['implicit'] = None
+ elif m.group('after'):
+ if 'implicit' in groupcache[groupcounter]:
+ impl = groupcache[groupcounter]['implicit']
+ else:
+ impl = {}
+ if impl is None:
+ outmess(
+ 'analyzeline: Overwriting earlier "implicit none" statement.\n')
+ impl = {}
+ for e in markoutercomma(m.group('after')).split('@,@'):
+ decl = {}
+ m1 = re.match(
+ r'\s*(?P.*?)\s*(\(\s*(?P[a-z-, ]+)\s*\)\s*|)\Z', e, re.I)
+ if not m1:
+ outmess(
+ 'analyzeline: could not extract info of implicit statement part "%s"\n' % (e))
+ continue
+ m2 = typespattern4implicit.match(m1.group('this'))
+ if not m2:
+ outmess(
+ 'analyzeline: could not extract types pattern of implicit statement part "%s"\n' % (e))
+ continue
+ typespec, selector, attr, edecl = cracktypespec0(
+ m2.group('this'), m2.group('after'))
+ kindselect, charselect, typename = cracktypespec(
+ typespec, selector)
+ decl['typespec'] = typespec
+ decl['kindselector'] = kindselect
+ decl['charselector'] = charselect
+ decl['typename'] = typename
+ for k in list(decl.keys()):
+ if not decl[k]:
+ del decl[k]
+ for r in markoutercomma(m1.group('after')).split('@,@'):
+ if '-' in r:
+ try:
+ begc, endc = [x.strip() for x in r.split('-')]
+ except Exception:
+ outmess(
+ 'analyzeline: expected "-" instead of "%s" in range list of implicit statement\n' % r)
+ continue
+ else:
+ begc = endc = r.strip()
+ if not len(begc) == len(endc) == 1:
+ outmess(
+ 'analyzeline: expected "-" instead of "%s" in range list of implicit statement (2)\n' % r)
+ continue
+ for o in range(ord(begc), ord(endc) + 1):
+ impl[chr(o)] = decl
+ groupcache[groupcounter]['implicit'] = impl
+ elif case == 'data':
+ ll = []
+ dl = ''
+ il = ''
+ f = 0
+ fc = 1
+ inp = 0
+ for c in m.group('after'):
+ if not inp:
+ if c == "'":
+ fc = not fc
+ if c == '/' and fc:
+ f = f + 1
+ continue
+ if c == '(':
+ inp = inp + 1
+ elif c == ')':
+ inp = inp - 1
+ if f == 0:
+ dl = dl + c
+ elif f == 1:
+ il = il + c
+ elif f == 2:
+ dl = dl.strip()
+ if dl.startswith(','):
+ dl = dl[1:].strip()
+ ll.append([dl, il])
+ dl = c
+ il = ''
+ f = 0
+ if f == 2:
+ dl = dl.strip()
+ if dl.startswith(','):
+ dl = dl[1:].strip()
+ ll.append([dl, il])
+ vars = groupcache[groupcounter].get('vars', {})
+ last_name = None
+ for l in ll:
+ l[0], l[1] = l[0].strip(), l[1].strip()
+ if l[0].startswith(','):
+ l[0] = l[0][1:]
+ if l[0].startswith('('):
+ outmess('analyzeline: implied-DO list "%s" is not supported. Skipping.\n' % l[0])
+ continue
+ for idx, v in enumerate(rmbadname([x.strip() for x in markoutercomma(l[0]).split('@,@')])):
+ if v.startswith('('):
+ outmess('analyzeline: implied-DO list "%s" is not supported. Skipping.\n' % v)
+ # XXX: subsequent init expressions may get wrong values.
+ # Ignoring since data statements are irrelevant for
+ # wrapping.
+ continue
+ if '!' in l[1]:
+ # Fixes gh-24746 pyf generation
+ # XXX: This essentially ignores the value for generating the pyf which is fine:
+ # integer dimension(3) :: mytab
+ # common /mycom/ mytab
+ # Since in any case it is initialized in the Fortran code
+ outmess('Comment line in declaration "%s" is not supported. Skipping.\n' % l[1])
+ continue
+ vars.setdefault(v, {})
+ vtype = vars[v].get('typespec')
+ vdim = getdimension(vars[v])
+ matches = re.findall(r"\(.*?\)", l[1]) if vtype == 'complex' else l[1].split(',')
+ try:
+ new_val = "(/{}/)".format(", ".join(matches)) if vdim else matches[idx]
+ except IndexError:
+ # gh-24746
+ # Runs only if above code fails. Fixes the line
+ # DATA IVAR1, IVAR2, IVAR3, IVAR4, EVAR5 /4*0,0.0D0/
+ # by expanding to ['0', '0', '0', '0', '0.0d0']
+ if any("*" in m for m in matches):
+ expanded_list = []
+ for match in matches:
+ if "*" in match:
+ try:
+ multiplier, value = match.split("*")
+ expanded_list.extend([value.strip()] * int(multiplier))
+ except ValueError: # if int(multiplier) fails
+ expanded_list.append(match.strip())
+ else:
+ expanded_list.append(match.strip())
+ matches = expanded_list
+ new_val = "(/{}/)".format(", ".join(matches)) if vdim else matches[idx]
+ current_val = vars[v].get('=')
+ if current_val and (current_val != new_val):
+ outmess('analyzeline: changing init expression of "%s" ("%s") to "%s"\n' % (v, current_val, new_val))
+ vars[v]['='] = new_val
+ last_name = v
+ groupcache[groupcounter]['vars'] = vars
+ if last_name:
+ previous_context = ('variable', last_name, groupcounter)
+ elif case == 'common':
+ line = m.group('after').strip()
+ if not line[0] == '/':
+ line = '//' + line
+ cl = []
+ f = 0
+ bn = ''
+ ol = ''
+ for c in line:
+ if c == '/':
+ f = f + 1
+ continue
+ if f >= 3:
+ bn = bn.strip()
+ if not bn:
+ bn = '_BLNK_'
+ cl.append([bn, ol])
+ f = f - 2
+ bn = ''
+ ol = ''
+ if f % 2:
+ bn = bn + c
+ else:
+ ol = ol + c
+ bn = bn.strip()
+ if not bn:
+ bn = '_BLNK_'
+ cl.append([bn, ol])
+ commonkey = {}
+ if 'common' in groupcache[groupcounter]:
+ commonkey = groupcache[groupcounter]['common']
+ for c in cl:
+ if c[0] not in commonkey:
+ commonkey[c[0]] = []
+ for i in [x.strip() for x in markoutercomma(c[1]).split('@,@')]:
+ if i:
+ commonkey[c[0]].append(i)
+ groupcache[groupcounter]['common'] = commonkey
+ previous_context = ('common', bn, groupcounter)
+ elif case == 'use':
+ m1 = re.match(
+ r'\A\s*(?P\b\w+\b)\s*((,(\s*\bonly\b\s*:|(?P))\s*(?P.*))|)\s*\Z', m.group('after'), re.I)
+ if m1:
+ mm = m1.groupdict()
+ if 'use' not in groupcache[groupcounter]:
+ groupcache[groupcounter]['use'] = {}
+ name = m1.group('name')
+ groupcache[groupcounter]['use'][name] = {}
+ isonly = 0
+ if 'list' in mm and mm['list'] is not None:
+ if 'notonly' in mm and mm['notonly'] is None:
+ isonly = 1
+ groupcache[groupcounter]['use'][name]['only'] = isonly
+ ll = [x.strip() for x in mm['list'].split(',')]
+ rl = {}
+ for l in ll:
+ if '=' in l:
+ m2 = re.match(
+ r'\A\s*(?P\b\w+\b)\s*=\s*>\s*(?P