diff --git a/env-llmeval/lib/python3.10/site-packages/async_timeout/__init__.py b/env-llmeval/lib/python3.10/site-packages/async_timeout/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1ffb069fce9b2b9a03515404155a7e5cc439484a --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/async_timeout/__init__.py @@ -0,0 +1,239 @@ +import asyncio +import enum +import sys +import warnings +from types import TracebackType +from typing import Optional, Type + + +if sys.version_info >= (3, 8): + from typing import final +else: + from typing_extensions import final + + +if sys.version_info >= (3, 11): + + def _uncancel_task(task: "asyncio.Task[object]") -> None: + task.uncancel() + +else: + + def _uncancel_task(task: "asyncio.Task[object]") -> None: + pass + + +__version__ = "4.0.3" + + +__all__ = ("timeout", "timeout_at", "Timeout") + + +def timeout(delay: Optional[float]) -> "Timeout": + """timeout context manager. + + Useful in cases when you want to apply timeout logic around block + of code or in cases when asyncio.wait_for is not suitable. For example: + + >>> async with timeout(0.001): + ... async with aiohttp.get('https://github.com') as r: + ... await r.text() + + + delay - value in seconds or None to disable timeout logic + """ + loop = asyncio.get_running_loop() + if delay is not None: + deadline = loop.time() + delay # type: Optional[float] + else: + deadline = None + return Timeout(deadline, loop) + + +def timeout_at(deadline: Optional[float]) -> "Timeout": + """Schedule the timeout at absolute time. + + deadline argument points on the time in the same clock system + as loop.time(). + + Please note: it is not POSIX time but a time with + undefined starting base, e.g. the time of the system power on. + + >>> async with timeout_at(loop.time() + 10): + ... async with aiohttp.get('https://github.com') as r: + ... await r.text() + + + """ + loop = asyncio.get_running_loop() + return Timeout(deadline, loop) + + +class _State(enum.Enum): + INIT = "INIT" + ENTER = "ENTER" + TIMEOUT = "TIMEOUT" + EXIT = "EXIT" + + +@final +class Timeout: + # Internal class, please don't instantiate it directly + # Use timeout() and timeout_at() public factories instead. + # + # Implementation note: `async with timeout()` is preferred + # over `with timeout()`. + # While technically the Timeout class implementation + # doesn't need to be async at all, + # the `async with` statement explicitly points that + # the context manager should be used from async function context. + # + # This design allows to avoid many silly misusages. + # + # TimeoutError is raised immediately when scheduled + # if the deadline is passed. + # The purpose is to time out as soon as possible + # without waiting for the next await expression. + + __slots__ = ("_deadline", "_loop", "_state", "_timeout_handler", "_task") + + def __init__( + self, deadline: Optional[float], loop: asyncio.AbstractEventLoop + ) -> None: + self._loop = loop + self._state = _State.INIT + + self._task: Optional["asyncio.Task[object]"] = None + self._timeout_handler = None # type: Optional[asyncio.Handle] + if deadline is None: + self._deadline = None # type: Optional[float] + else: + self.update(deadline) + + def __enter__(self) -> "Timeout": + warnings.warn( + "with timeout() is deprecated, use async with timeout() instead", + DeprecationWarning, + stacklevel=2, + ) + self._do_enter() + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> Optional[bool]: + self._do_exit(exc_type) + return None + + async def __aenter__(self) -> "Timeout": + self._do_enter() + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> Optional[bool]: + self._do_exit(exc_type) + return None + + @property + def expired(self) -> bool: + """Is timeout expired during execution?""" + return self._state == _State.TIMEOUT + + @property + def deadline(self) -> Optional[float]: + return self._deadline + + def reject(self) -> None: + """Reject scheduled timeout if any.""" + # cancel is maybe better name but + # task.cancel() raises CancelledError in asyncio world. + if self._state not in (_State.INIT, _State.ENTER): + raise RuntimeError(f"invalid state {self._state.value}") + self._reject() + + def _reject(self) -> None: + self._task = None + if self._timeout_handler is not None: + self._timeout_handler.cancel() + self._timeout_handler = None + + def shift(self, delay: float) -> None: + """Advance timeout on delay seconds. + + The delay can be negative. + + Raise RuntimeError if shift is called when deadline is not scheduled + """ + deadline = self._deadline + if deadline is None: + raise RuntimeError("cannot shift timeout if deadline is not scheduled") + self.update(deadline + delay) + + def update(self, deadline: float) -> None: + """Set deadline to absolute value. + + deadline argument points on the time in the same clock system + as loop.time(). + + If new deadline is in the past the timeout is raised immediately. + + Please note: it is not POSIX time but a time with + undefined starting base, e.g. the time of the system power on. + """ + if self._state == _State.EXIT: + raise RuntimeError("cannot reschedule after exit from context manager") + if self._state == _State.TIMEOUT: + raise RuntimeError("cannot reschedule expired timeout") + if self._timeout_handler is not None: + self._timeout_handler.cancel() + self._deadline = deadline + if self._state != _State.INIT: + self._reschedule() + + def _reschedule(self) -> None: + assert self._state == _State.ENTER + deadline = self._deadline + if deadline is None: + return + + now = self._loop.time() + if self._timeout_handler is not None: + self._timeout_handler.cancel() + + self._task = asyncio.current_task() + if deadline <= now: + self._timeout_handler = self._loop.call_soon(self._on_timeout) + else: + self._timeout_handler = self._loop.call_at(deadline, self._on_timeout) + + def _do_enter(self) -> None: + if self._state != _State.INIT: + raise RuntimeError(f"invalid state {self._state.value}") + self._state = _State.ENTER + self._reschedule() + + def _do_exit(self, exc_type: Optional[Type[BaseException]]) -> None: + if exc_type is asyncio.CancelledError and self._state == _State.TIMEOUT: + assert self._task is not None + _uncancel_task(self._task) + self._timeout_handler = None + self._task = None + raise asyncio.TimeoutError + # timeout has not expired + self._state = _State.EXIT + self._reject() + return None + + def _on_timeout(self) -> None: + assert self._task is not None + self._task.cancel() + self._state = _State.TIMEOUT + # drop the reference early + self._timeout_handler = None diff --git a/env-llmeval/lib/python3.10/site-packages/async_timeout/__pycache__/__init__.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/async_timeout/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5aa9a073b55dd99354590920941ea466eabfade2 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/async_timeout/__pycache__/__init__.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/async_timeout/py.typed b/env-llmeval/lib/python3.10/site-packages/async_timeout/py.typed new file mode 100644 index 0000000000000000000000000000000000000000..3b94f915737aba1f12a0f067fdba3726bfe02df5 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/async_timeout/py.typed @@ -0,0 +1 @@ +Placeholder diff --git a/env-llmeval/lib/python3.10/site-packages/joblib-1.4.0.dist-info/INSTALLER b/env-llmeval/lib/python3.10/site-packages/joblib-1.4.0.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/joblib-1.4.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/env-llmeval/lib/python3.10/site-packages/joblib-1.4.0.dist-info/LICENSE.txt b/env-llmeval/lib/python3.10/site-packages/joblib-1.4.0.dist-info/LICENSE.txt new file mode 100644 index 0000000000000000000000000000000000000000..910537bd33412dd9b70c4d07cedd41b519be7fb5 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/joblib-1.4.0.dist-info/LICENSE.txt @@ -0,0 +1,29 @@ +BSD 3-Clause License + +Copyright (c) 2008-2021, The joblib developers. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/env-llmeval/lib/python3.10/site-packages/joblib-1.4.0.dist-info/METADATA b/env-llmeval/lib/python3.10/site-packages/joblib-1.4.0.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..0ca5d797274c60a620b93d84010e122232011ba5 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/joblib-1.4.0.dist-info/METADATA @@ -0,0 +1,165 @@ +Metadata-Version: 2.1 +Name: joblib +Version: 1.4.0 +Summary: Lightweight pipelining with Python functions +Author-email: Gael Varoquaux +License: BSD 3-Clause +Project-URL: Homepage, https://joblib.readthedocs.io +Project-URL: Source, https://github.com/joblib/joblib +Platform: any +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Console +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: Science/Research +Classifier: Intended Audience :: Education +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Topic :: Scientific/Engineering +Classifier: Topic :: Utilities +Classifier: Topic :: Software Development :: Libraries +Requires-Python: >=3.8 +Description-Content-Type: text/x-rst +License-File: LICENSE.txt + +|PyPi| |Azure| |ReadTheDocs| |Codecov| + +.. |PyPi| image:: https://badge.fury.io/py/joblib.svg + :target: https://badge.fury.io/py/joblib + :alt: Joblib version + +.. |Azure| image:: https://dev.azure.com/joblib/joblib/_apis/build/status/joblib.joblib?branchName=master + :target: https://dev.azure.com/joblib/joblib/_build?definitionId=3&_a=summary&branchFilter=40 + :alt: Azure CI status + +.. |ReadTheDocs| image:: https://readthedocs.org/projects/joblib/badge/?version=latest + :target: https://joblib.readthedocs.io/en/latest/?badge=latest + :alt: Documentation Status + +.. |Codecov| image:: https://codecov.io/gh/joblib/joblib/branch/master/graph/badge.svg + :target: https://codecov.io/gh/joblib/joblib + :alt: Codecov coverage + + +The homepage of joblib with user documentation is located on: + +https://joblib.readthedocs.io + +Getting the latest code +======================= + +To get the latest code using git, simply type:: + + git clone https://github.com/joblib/joblib.git + +If you don't have git installed, you can download a zip +of the latest code: https://github.com/joblib/joblib/archive/refs/heads/master.zip + +Installing +========== + +You can use `pip` to install joblib:: + + pip install joblib + +from any directory or:: + + python setup.py install + +from the source directory. + +Dependencies +============ + +- Joblib has no mandatory dependencies besides Python (supported versions are + 3.8+). +- Joblib has an optional dependency on Numpy (at least version 1.6.1) for array + manipulation. +- Joblib includes its own vendored copy of + `loky `_ for process management. +- Joblib can efficiently dump and load numpy arrays but does not require numpy + to be installed. +- Joblib has an optional dependency on + `python-lz4 `_ as a faster alternative to + zlib and gzip for compressed serialization. +- Joblib has an optional dependency on psutil to mitigate memory leaks in + parallel worker processes. +- Some examples require external dependencies such as pandas. See the + instructions in the `Building the docs`_ section for details. + +Workflow to contribute +====================== + +To contribute to joblib, first create an account on `github +`_. Once this is done, fork the `joblib repository +`_ to have your own repository, +clone it using 'git clone' on the computers where you want to work. Make +your changes in your clone, push them to your github account, test them +on several computers, and when you are happy with them, send a pull +request to the main repository. + +Running the test suite +====================== + +To run the test suite, you need the pytest (version >= 3) and coverage modules. +Run the test suite using:: + + pytest joblib + +from the root of the project. + +Building the docs +================= + +To build the docs you need to have sphinx (>=1.4) and some dependencies +installed:: + + pip install -U -r .readthedocs-requirements.txt + +The docs can then be built with the following command:: + + make doc + +The html docs are located in the ``doc/_build/html`` directory. + + +Making a source tarball +======================= + +To create a source tarball, eg for packaging or distributing, run the +following command:: + + python setup.py sdist + +The tarball will be created in the `dist` directory. This command will +compile the docs, and the resulting tarball can be installed with +no extra dependencies than the Python standard library. You will need +setuptool and sphinx. + +Making a release and uploading it to PyPI +========================================= + +This command is only run by project manager, to make a release, and +upload in to PyPI:: + + python setup.py sdist bdist_wheel + twine upload dist/* + + +Note that the documentation should automatically get updated at each git +push. If that is not the case, try building th doc locally and resolve +any doc build error (in particular when running the examples). + +Updating the changelog +====================== + +Changes are listed in the CHANGES.rst file. They must be manually updated +but, the following git command may be used to generate the lines:: + + git log --abbrev-commit --date=short --no-merges --sparse + diff --git a/env-llmeval/lib/python3.10/site-packages/joblib-1.4.0.dist-info/RECORD b/env-llmeval/lib/python3.10/site-packages/joblib-1.4.0.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..d114b5590982003aa7fe08b03cccf6111f14a5eb --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/joblib-1.4.0.dist-info/RECORD @@ -0,0 +1,217 @@ +joblib-1.4.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +joblib-1.4.0.dist-info/LICENSE.txt,sha256=QmEpEcGHLF5LQ_auDo7llGfNNQMyJBz3LOkGQCZPrmo,1527 +joblib-1.4.0.dist-info/METADATA,sha256=RvoJhBm8jAYOnpWDNhwlybOVwN5nFst_OYytoS6_xRQ,5370 +joblib-1.4.0.dist-info/RECORD,, +joblib-1.4.0.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92 +joblib-1.4.0.dist-info/top_level.txt,sha256=P0LsoZ45gBL7ckL4lqQt7tdbrHD4xlVYhffmhHeeT_U,7 +joblib/__init__.py,sha256=ytsTJjbnDLtKYZ1il4QVYU8ReyTdKgKqd3zO_2FTv7Y,5132 +joblib/__pycache__/__init__.cpython-310.pyc,, +joblib/__pycache__/_cloudpickle_wrapper.cpython-310.pyc,, +joblib/__pycache__/_dask.cpython-310.pyc,, +joblib/__pycache__/_memmapping_reducer.cpython-310.pyc,, +joblib/__pycache__/_multiprocessing_helpers.cpython-310.pyc,, +joblib/__pycache__/_parallel_backends.cpython-310.pyc,, +joblib/__pycache__/_store_backends.cpython-310.pyc,, +joblib/__pycache__/_utils.cpython-310.pyc,, +joblib/__pycache__/backports.cpython-310.pyc,, +joblib/__pycache__/compressor.cpython-310.pyc,, +joblib/__pycache__/disk.cpython-310.pyc,, +joblib/__pycache__/executor.cpython-310.pyc,, +joblib/__pycache__/func_inspect.cpython-310.pyc,, +joblib/__pycache__/hashing.cpython-310.pyc,, +joblib/__pycache__/logger.cpython-310.pyc,, +joblib/__pycache__/memory.cpython-310.pyc,, +joblib/__pycache__/numpy_pickle.cpython-310.pyc,, +joblib/__pycache__/numpy_pickle_compat.cpython-310.pyc,, +joblib/__pycache__/numpy_pickle_utils.cpython-310.pyc,, +joblib/__pycache__/parallel.cpython-310.pyc,, +joblib/__pycache__/pool.cpython-310.pyc,, +joblib/__pycache__/testing.cpython-310.pyc,, +joblib/_cloudpickle_wrapper.py,sha256=-PBMUqgZCfd5EMaKn5FCQhWKiAJfbq4i2o6Z6KZ1ynE,417 +joblib/_dask.py,sha256=IhFjN6oMQVIcLC7bLHnA7KgO7nnEp7p6P4JH12934J4,13313 +joblib/_memmapping_reducer.py,sha256=4xclMWTgI5l-C6OFyt6FvamH-ToJu3DmopAXwt7a9F4,28092 +joblib/_multiprocessing_helpers.py,sha256=t7wIXfrLfzqFXjOeOYs4JP45tptxmYm5_yE8ylIRbR8,1925 +joblib/_parallel_backends.py,sha256=82qX4mJ5eHz4i0e5gbdHgvzST3P6LJ59hpfee0IyZCQ,25489 +joblib/_store_backends.py,sha256=PaoeHo4Mr7idPI3MvX766OqBQ_y_WkG_oZbE4MjbdKw,16683 +joblib/_utils.py,sha256=BBUs4ZHpDRxpgTsdrZSyBb39MqacM6JauI3dTRmnHm4,2076 +joblib/backports.py,sha256=aGh0cjs_xMhO2ym7RprpDjCBudiPtDhgD1vf5YMbMaI,5361 +joblib/compressor.py,sha256=WwRzsQhdQp0lbQvvEbAcaevCOCM2_Qyv_HJ4PVHtobQ,19768 +joblib/disk.py,sha256=PxUC63dBG2O1GriL1SLskHUjz7XzR-y6rqKEJHEY0jA,4389 +joblib/executor.py,sha256=PiaOwENhecRVnOdvHZLwGsr-RMKfk_F_Noy8y7nntJY,5136 +joblib/externals/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +joblib/externals/__pycache__/__init__.cpython-310.pyc,, +joblib/externals/cloudpickle/__init__.py,sha256=vb2JCOn1EpLUdVyPe1ESyhLymcvh-Rk3ISHJ-52aDLw,308 +joblib/externals/cloudpickle/__pycache__/__init__.cpython-310.pyc,, +joblib/externals/cloudpickle/__pycache__/cloudpickle.cpython-310.pyc,, +joblib/externals/cloudpickle/__pycache__/cloudpickle_fast.cpython-310.pyc,, +joblib/externals/cloudpickle/cloudpickle.py,sha256=APCGMuIfVpWcelGsLlo2zRmwKRloaoiznQEOAoEWH9Y,55283 +joblib/externals/cloudpickle/cloudpickle_fast.py,sha256=1GqUD4nLKsv0vv9ty2La3eVLyeWNrPFlhUCN-aNI-30,322 +joblib/externals/loky/__init__.py,sha256=T20hgxG9YPps-AhsElBMzIJD73q0h3x0Hys49alTvq8,1104 +joblib/externals/loky/__pycache__/__init__.cpython-310.pyc,, +joblib/externals/loky/__pycache__/_base.cpython-310.pyc,, +joblib/externals/loky/__pycache__/cloudpickle_wrapper.cpython-310.pyc,, +joblib/externals/loky/__pycache__/initializers.cpython-310.pyc,, +joblib/externals/loky/__pycache__/process_executor.cpython-310.pyc,, +joblib/externals/loky/__pycache__/reusable_executor.cpython-310.pyc,, +joblib/externals/loky/_base.py,sha256=LsQnEoKWKGhdeqGhMc68Aqwz4MrTnEs20KAYbFiUHzo,1057 +joblib/externals/loky/backend/__init__.py,sha256=Ix9KThV1CYk7-M5OQnJ_A_JrrrWJ-Jowa-HMMeGbp18,312 +joblib/externals/loky/backend/__pycache__/__init__.cpython-310.pyc,, +joblib/externals/loky/backend/__pycache__/_posix_reduction.cpython-310.pyc,, +joblib/externals/loky/backend/__pycache__/_win_reduction.cpython-310.pyc,, +joblib/externals/loky/backend/__pycache__/context.cpython-310.pyc,, +joblib/externals/loky/backend/__pycache__/fork_exec.cpython-310.pyc,, +joblib/externals/loky/backend/__pycache__/popen_loky_posix.cpython-310.pyc,, +joblib/externals/loky/backend/__pycache__/popen_loky_win32.cpython-310.pyc,, +joblib/externals/loky/backend/__pycache__/process.cpython-310.pyc,, +joblib/externals/loky/backend/__pycache__/queues.cpython-310.pyc,, +joblib/externals/loky/backend/__pycache__/reduction.cpython-310.pyc,, +joblib/externals/loky/backend/__pycache__/resource_tracker.cpython-310.pyc,, +joblib/externals/loky/backend/__pycache__/spawn.cpython-310.pyc,, +joblib/externals/loky/backend/__pycache__/synchronize.cpython-310.pyc,, +joblib/externals/loky/backend/__pycache__/utils.cpython-310.pyc,, +joblib/externals/loky/backend/_posix_reduction.py,sha256=xgCSrIaLI0k_MI0XNOBSp5e1ox1WN9idgrWbkWpMUr4,1776 +joblib/externals/loky/backend/_win_reduction.py,sha256=WmNB0NXtyJ_o_WzfPUEGh5dPhXIeI6FkEnFNXUxO2ws,683 +joblib/externals/loky/backend/context.py,sha256=GGBUGp4QDx1qvBuWDvyOSjNWYA79shxgAagsrcxA50E,13654 +joblib/externals/loky/backend/fork_exec.py,sha256=c3I22U_ewtT1T5Xn65SUXHrftspvllrGezGiv5KSRQY,1186 +joblib/externals/loky/backend/popen_loky_posix.py,sha256=ZFFs6H7Xp3CCQDE4oqmd_flWday6EdtlQ34Hkpa2PFQ,5580 +joblib/externals/loky/backend/popen_loky_win32.py,sha256=bYkhRA0w8qUcYFwoezeGwcnlCocEdheWXc6SZ-_rVxo,5325 +joblib/externals/loky/backend/process.py,sha256=4-Y94EoIrg4btsjTNxUBHAHhR96Nrugn_7_PGL6aU50,2018 +joblib/externals/loky/backend/queues.py,sha256=eETFvbPHwKfdoYyOgNQCyKq_Zlm-lzH3fwwpUIh-_4U,7322 +joblib/externals/loky/backend/reduction.py,sha256=CRu922R8xOxog2Snhop7y6fN1fPX9_h110brrICwZUE,7063 +joblib/externals/loky/backend/resource_tracker.py,sha256=421689XAmmxmNXktfkhNHNERIY3LbAcmWAsmRHPNUjg,14498 +joblib/externals/loky/backend/spawn.py,sha256=PVOHs8ou7IPcISb7t_Pp86FnUtPUw_KUAdxmDHOrAaU,8962 +joblib/externals/loky/backend/synchronize.py,sha256=nlDwBoLZB93m_l55qfZM_Ql-4L84PSYimoQqt5TzpDk,11768 +joblib/externals/loky/backend/utils.py,sha256=RVsxqyET4TJdbjc9uUHJmfhlQ2v4Uq-fiT_5b5rfC0s,5757 +joblib/externals/loky/cloudpickle_wrapper.py,sha256=9VfrXIfHiJcoXDqwgaHWbRsWipVA23oRJaxnXzSu7GM,3608 +joblib/externals/loky/initializers.py,sha256=dtKtRsJUmVwiJu0yZ-Ih0m8PvW_MxmouG7mShEcsStc,2567 +joblib/externals/loky/process_executor.py,sha256=eP5eVZFUsYPDhN91cxRHl23uEfqNWDeDQjrdNDB4c3o,51050 +joblib/externals/loky/reusable_executor.py,sha256=XVxLGMhCDij8CU6BDHbYKPxn8lG-rnqXaIfxO2nXGXg,10305 +joblib/func_inspect.py,sha256=Cwei03uwnZKJ9mbGmdCSjVGgq2_1lJNJSkeUn1GTvSo,14204 +joblib/hashing.py,sha256=4X9OOofkfDQr3N8NZKTsMOKWr8IiIV0kjBwqCUJCej4,10535 +joblib/logger.py,sha256=meT-hFPTZukfBRkeyCVCxhssPSr668_R5Nn25S-iXtc,5463 +joblib/memory.py,sha256=SDxiok3TiQOqy7Cd6A6yxrtWCZADWhMIiiWsBe97QRA,45990 +joblib/numpy_pickle.py,sha256=5_L7EbPg94T3iSODEdkikJNj_z2qvpZYaGei9HlMBWU,26886 +joblib/numpy_pickle_compat.py,sha256=U7zVNNF03an_7AgHOxhTxcOSbLJTWcGsnV_xbT3vRdY,8547 +joblib/numpy_pickle_utils.py,sha256=KccSp_MhMFsV41Mw9pSijchtGOCympDaHWH0VpIUeSs,8723 +joblib/parallel.py,sha256=foh2ez7ep8oIS5QInXU6qRSZ4xWWY_AeZjzDVDIOppc,84579 +joblib/pool.py,sha256=7jhFg7_qIuBbJ2URbKvyZtET9AxRKaYrYiBSes1VFxo,14411 +joblib/test/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +joblib/test/__pycache__/__init__.cpython-310.pyc,, +joblib/test/__pycache__/common.cpython-310.pyc,, +joblib/test/__pycache__/test_backports.cpython-310.pyc,, +joblib/test/__pycache__/test_cloudpickle_wrapper.cpython-310.pyc,, +joblib/test/__pycache__/test_config.cpython-310.pyc,, +joblib/test/__pycache__/test_dask.cpython-310.pyc,, +joblib/test/__pycache__/test_disk.cpython-310.pyc,, +joblib/test/__pycache__/test_func_inspect.cpython-310.pyc,, +joblib/test/__pycache__/test_func_inspect_special_encoding.cpython-310.pyc,, +joblib/test/__pycache__/test_hashing.cpython-310.pyc,, +joblib/test/__pycache__/test_init.cpython-310.pyc,, +joblib/test/__pycache__/test_logger.cpython-310.pyc,, +joblib/test/__pycache__/test_memmapping.cpython-310.pyc,, +joblib/test/__pycache__/test_memory.cpython-310.pyc,, +joblib/test/__pycache__/test_memory_async.cpython-310.pyc,, +joblib/test/__pycache__/test_missing_multiprocessing.cpython-310.pyc,, +joblib/test/__pycache__/test_module.cpython-310.pyc,, +joblib/test/__pycache__/test_numpy_pickle.cpython-310.pyc,, +joblib/test/__pycache__/test_numpy_pickle_compat.cpython-310.pyc,, +joblib/test/__pycache__/test_numpy_pickle_utils.cpython-310.pyc,, +joblib/test/__pycache__/test_parallel.cpython-310.pyc,, +joblib/test/__pycache__/test_store_backends.cpython-310.pyc,, +joblib/test/__pycache__/test_testing.cpython-310.pyc,, +joblib/test/__pycache__/test_utils.cpython-310.pyc,, +joblib/test/__pycache__/testutils.cpython-310.pyc,, +joblib/test/common.py,sha256=seNB39WwHwu0qfMo6qeV7y0m6wxCcJ1B-Q1MWLvn4Vk,2336 +joblib/test/data/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +joblib/test/data/__pycache__/__init__.cpython-310.pyc,, +joblib/test/data/__pycache__/create_numpy_pickle.cpython-310.pyc,, +joblib/test/data/create_numpy_pickle.py,sha256=BEaSv8dm3a48mvxl_Kf5ASwkkbGhwRplzNotTP4RCC0,3460 +joblib/test/data/joblib_0.10.0_compressed_pickle_py27_np16.gz,sha256=QYRH6Q2DSGVorjCSqWCxjTWCMOJKyew4Nl2qmfQVvQ8,769 +joblib/test/data/joblib_0.10.0_compressed_pickle_py27_np17.gz,sha256=ofTozM_KlPJa50TR8FCwc09mMmO6OO0GQhgUBLNIsXs,757 +joblib/test/data/joblib_0.10.0_compressed_pickle_py33_np18.gz,sha256=2eIVeA-XjOaT5IEQ6tI2UuHG3hwhiRciMmkBmPcIh4g,792 +joblib/test/data/joblib_0.10.0_compressed_pickle_py34_np19.gz,sha256=Gr2z_1tVWDH1H3_wCVHmakknf8KqeHKT8Yz4d1vmUCM,794 +joblib/test/data/joblib_0.10.0_compressed_pickle_py35_np19.gz,sha256=pWw_xuDbOkECqu1KGf1OFU7s2VbzC2v5F5iXhE7TwB4,790 +joblib/test/data/joblib_0.10.0_pickle_py27_np17.pkl,sha256=icRQjj374B-AHk5znxre0T9oWUHokoHIBQ8MqKo8l-U,986 +joblib/test/data/joblib_0.10.0_pickle_py27_np17.pkl.bz2,sha256=oYQVIyMiUxyRgWSuBBSOvCWKzToA-kUpcoQWdV4UoV4,997 +joblib/test/data/joblib_0.10.0_pickle_py27_np17.pkl.gzip,sha256=Jpv3iGcDgKTv-O4nZsUreIbUK7qnt2cugZ-VMgNeEDQ,798 +joblib/test/data/joblib_0.10.0_pickle_py27_np17.pkl.lzma,sha256=c0wu0x8pPv4BcStj7pE61rZpf68FLG_pNzQZ4e82zH8,660 +joblib/test/data/joblib_0.10.0_pickle_py27_np17.pkl.xz,sha256=77FG1FDG0GHQav-1bxc4Tn9ky6ubUW_MbE0_iGmz5wc,712 +joblib/test/data/joblib_0.10.0_pickle_py33_np18.pkl,sha256=4GTC7s_cWNVShERn2nvVbspZYJgyK_0man4TEqvdVzU,1068 +joblib/test/data/joblib_0.10.0_pickle_py33_np18.pkl.bz2,sha256=6G1vbs_iYmz2kYJ6w4qB1k7D67UnxUMus0S4SWeBtFo,1000 +joblib/test/data/joblib_0.10.0_pickle_py33_np18.pkl.gzip,sha256=tlRUWeJS1BXmcwtLNSNK9L0hDHekFl07CqWxTShinmY,831 +joblib/test/data/joblib_0.10.0_pickle_py33_np18.pkl.lzma,sha256=CorPwnfv3rR5hjNtJI01-sEBMOnkSxNlRVaWTszMopA,694 +joblib/test/data/joblib_0.10.0_pickle_py33_np18.pkl.xz,sha256=Dppj3MffOKsKETeptEtDaxPOv6MA6xnbpK5LzlDQ-oE,752 +joblib/test/data/joblib_0.10.0_pickle_py34_np19.pkl,sha256=HL5Fb1uR9aPLjjhoOPJ2wwM1Qyo1FCZoYYd2HVw0Fos,1068 +joblib/test/data/joblib_0.10.0_pickle_py34_np19.pkl.bz2,sha256=Pyr2fqZnwfUxXdyrBr-kRwBYY8HA_Yi7fgSguKy5pUs,1021 +joblib/test/data/joblib_0.10.0_pickle_py34_np19.pkl.gzip,sha256=os8NJjQI9FhnlZM-Ay9dX_Uo35gZnoJCgQSIVvcBPfE,831 +joblib/test/data/joblib_0.10.0_pickle_py34_np19.pkl.lzma,sha256=Q_0y43qU7_GqAabJ8y3PWVhOisurnCAq3GzuCu04V58,697 +joblib/test/data/joblib_0.10.0_pickle_py34_np19.pkl.xz,sha256=BNfmiQfpeLVpdfkwlJK4hJ5Cpgl0vreVyekyc5d_PNM,752 +joblib/test/data/joblib_0.10.0_pickle_py35_np19.pkl,sha256=l7nvLolhBDIdPFznOz3lBHiMOPBPCMi1bXop1tFSCpY,1068 +joblib/test/data/joblib_0.10.0_pickle_py35_np19.pkl.bz2,sha256=pqGpuIS-ZU4uP8mkglHs8MaSDiVcPy7l3XHYJSppRgY,1005 +joblib/test/data/joblib_0.10.0_pickle_py35_np19.pkl.gzip,sha256=YRFXE6LEb6qK72yPqnXdqQVY8Ts8xKUS9PWQKhLxWvk,833 +joblib/test/data/joblib_0.10.0_pickle_py35_np19.pkl.lzma,sha256=Bf7gCUeTuTjCkbcIdyZYz69irblX4SAVQEzxCnMQhNU,701 +joblib/test/data/joblib_0.10.0_pickle_py35_np19.pkl.xz,sha256=As8w2LGWwwNmKy3QNdKljK63Yq46gjRf_RJ0lh5_WqA,752 +joblib/test/data/joblib_0.11.0_compressed_pickle_py36_np111.gz,sha256=1WrnXDqDoNEPYOZX1Q5Wr2463b8vVV6fw4Wm5S4bMt4,800 +joblib/test/data/joblib_0.11.0_pickle_py36_np111.pkl,sha256=XmsOFxeC1f1aYdGETclG6yfF9rLoB11DayOAhDMULrw,1068 +joblib/test/data/joblib_0.11.0_pickle_py36_np111.pkl.bz2,sha256=vI2yWb50LKL_NgZyd_XkoD5teIg93uI42mWnx9ee-AQ,991 +joblib/test/data/joblib_0.11.0_pickle_py36_np111.pkl.gzip,sha256=1WrnXDqDoNEPYOZX1Q5Wr2463b8vVV6fw4Wm5S4bMt4,800 +joblib/test/data/joblib_0.11.0_pickle_py36_np111.pkl.lzma,sha256=IWA0JlZG2ur53HgTUDl1m7q79dcVq6b0VOq33gKoJU0,715 +joblib/test/data/joblib_0.11.0_pickle_py36_np111.pkl.xz,sha256=3Xh_NbMZdBjYx7ynfJ3Fyke28izSRSSzzNB0z5D4k9Y,752 +joblib/test/data/joblib_0.8.4_compressed_pickle_py27_np17.gz,sha256=Sp-ZT7i6pj5on2gbptszu7RarzJpOmHJ67UKOmCPQMg,659 +joblib/test/data/joblib_0.9.2_compressed_pickle_py27_np16.gz,sha256=NLtDrvo2XIH0KvUUAvhOqMeoXEjGW0IuTk_osu5XiDw,658 +joblib/test/data/joblib_0.9.2_compressed_pickle_py27_np17.gz,sha256=NLtDrvo2XIH0KvUUAvhOqMeoXEjGW0IuTk_osu5XiDw,658 +joblib/test/data/joblib_0.9.2_compressed_pickle_py34_np19.gz,sha256=nzO9iiGkG3KbBdrF3usOho8higkrDj_lmICUzxZyF_Y,673 +joblib/test/data/joblib_0.9.2_compressed_pickle_py35_np19.gz,sha256=nzO9iiGkG3KbBdrF3usOho8higkrDj_lmICUzxZyF_Y,673 +joblib/test/data/joblib_0.9.2_pickle_py27_np16.pkl,sha256=naijdk2xIeKdIa3mfJw0JlmOdtiN6uRM1yOJg6-M73M,670 +joblib/test/data/joblib_0.9.2_pickle_py27_np16.pkl_01.npy,sha256=DvvX2c5-7DpuCg20HnleA5bMo9awN9rWxhtGSEPSiAk,120 +joblib/test/data/joblib_0.9.2_pickle_py27_np16.pkl_02.npy,sha256=HBzzbLeB-8whuVO7CgtF3wktoOrg52WILlljzNcBBbE,120 +joblib/test/data/joblib_0.9.2_pickle_py27_np16.pkl_03.npy,sha256=oMRa4qKJhBy-uiRDt-uqOzHAqencxzKUrKVynaAJJAU,236 +joblib/test/data/joblib_0.9.2_pickle_py27_np16.pkl_04.npy,sha256=PsviRClLqT4IR5sWwbmpQR41af9mDtBFncodJBOB3wU,104 +joblib/test/data/joblib_0.9.2_pickle_py27_np17.pkl,sha256=LynX8dLOygfxDfFywOgm7wgWOhSxLG7z-oDsU6X83Dw,670 +joblib/test/data/joblib_0.9.2_pickle_py27_np17.pkl_01.npy,sha256=DvvX2c5-7DpuCg20HnleA5bMo9awN9rWxhtGSEPSiAk,120 +joblib/test/data/joblib_0.9.2_pickle_py27_np17.pkl_02.npy,sha256=HBzzbLeB-8whuVO7CgtF3wktoOrg52WILlljzNcBBbE,120 +joblib/test/data/joblib_0.9.2_pickle_py27_np17.pkl_03.npy,sha256=oMRa4qKJhBy-uiRDt-uqOzHAqencxzKUrKVynaAJJAU,236 +joblib/test/data/joblib_0.9.2_pickle_py27_np17.pkl_04.npy,sha256=PsviRClLqT4IR5sWwbmpQR41af9mDtBFncodJBOB3wU,104 +joblib/test/data/joblib_0.9.2_pickle_py33_np18.pkl,sha256=w9TLxpDTzp5TI6cU6lRvMsAasXEChcQgGE9s30sm_CU,691 +joblib/test/data/joblib_0.9.2_pickle_py33_np18.pkl_01.npy,sha256=DvvX2c5-7DpuCg20HnleA5bMo9awN9rWxhtGSEPSiAk,120 +joblib/test/data/joblib_0.9.2_pickle_py33_np18.pkl_02.npy,sha256=HBzzbLeB-8whuVO7CgtF3wktoOrg52WILlljzNcBBbE,120 +joblib/test/data/joblib_0.9.2_pickle_py33_np18.pkl_03.npy,sha256=jt6aZKUrJdfbMJUJVsl47As5MrfRSs1avGMhbmS6vec,307 +joblib/test/data/joblib_0.9.2_pickle_py33_np18.pkl_04.npy,sha256=PsviRClLqT4IR5sWwbmpQR41af9mDtBFncodJBOB3wU,104 +joblib/test/data/joblib_0.9.2_pickle_py34_np19.pkl,sha256=ilOBAOaulLFvKrD32S1NfnpiK-LfzA9rC3O2I7xROuI,691 +joblib/test/data/joblib_0.9.2_pickle_py34_np19.pkl_01.npy,sha256=DvvX2c5-7DpuCg20HnleA5bMo9awN9rWxhtGSEPSiAk,120 +joblib/test/data/joblib_0.9.2_pickle_py34_np19.pkl_02.npy,sha256=HBzzbLeB-8whuVO7CgtF3wktoOrg52WILlljzNcBBbE,120 +joblib/test/data/joblib_0.9.2_pickle_py34_np19.pkl_03.npy,sha256=jt6aZKUrJdfbMJUJVsl47As5MrfRSs1avGMhbmS6vec,307 +joblib/test/data/joblib_0.9.2_pickle_py34_np19.pkl_04.npy,sha256=PsviRClLqT4IR5sWwbmpQR41af9mDtBFncodJBOB3wU,104 +joblib/test/data/joblib_0.9.2_pickle_py35_np19.pkl,sha256=WfDVIqKcMzzh1gSAshIfzBoIpdLdZQuG79yYf5kfpOo,691 +joblib/test/data/joblib_0.9.2_pickle_py35_np19.pkl_01.npy,sha256=DvvX2c5-7DpuCg20HnleA5bMo9awN9rWxhtGSEPSiAk,120 +joblib/test/data/joblib_0.9.2_pickle_py35_np19.pkl_02.npy,sha256=HBzzbLeB-8whuVO7CgtF3wktoOrg52WILlljzNcBBbE,120 +joblib/test/data/joblib_0.9.2_pickle_py35_np19.pkl_03.npy,sha256=jt6aZKUrJdfbMJUJVsl47As5MrfRSs1avGMhbmS6vec,307 +joblib/test/data/joblib_0.9.2_pickle_py35_np19.pkl_04.npy,sha256=PsviRClLqT4IR5sWwbmpQR41af9mDtBFncodJBOB3wU,104 +joblib/test/data/joblib_0.9.4.dev0_compressed_cache_size_pickle_py35_np19.gz,sha256=8jYfWJsx0oY2J-3LlmEigK5cClnJSW2J2rfeSTZw-Ts,802 +joblib/test/data/joblib_0.9.4.dev0_compressed_cache_size_pickle_py35_np19.gz_01.npy.z,sha256=YT9VvT3sEl2uWlOyvH2CkyE9Sok4od9O3kWtgeuUUqE,43 +joblib/test/data/joblib_0.9.4.dev0_compressed_cache_size_pickle_py35_np19.gz_02.npy.z,sha256=txA5RDI0PRuiU_UNKY8pGp-zQgQQ9vaVvMi60hOPaVs,43 +joblib/test/data/joblib_0.9.4.dev0_compressed_cache_size_pickle_py35_np19.gz_03.npy.z,sha256=d3AwICvU2MpSNjh2aPIsdJeGZLlDjANAF1Soa6uM0Po,37 +joblib/test/test_backports.py,sha256=Y9bhGa6H-K_FgLkDyXaSHzpaWk148Rjn8R9IKCKdy-k,1175 +joblib/test/test_cloudpickle_wrapper.py,sha256=gc05MGe1LJfhCNTv02VdDXg8Z6FeJJ3rFTff1_WmMwc,751 +joblib/test/test_config.py,sha256=EvQKH7n4qXAcPd-BTnfu4duR6b5ag65xjaEjPMDM50w,5284 +joblib/test/test_dask.py,sha256=IIHdXXpH5NenurB6jem9rYisa_rP-usPyoPVaWzBEN0,18418 +joblib/test/test_disk.py,sha256=wJd1o9nLzqEjLqxxkgB9S7-UcKjHPQ8qK5l0czcNp0o,2205 +joblib/test/test_func_inspect.py,sha256=fyR9q1nGHwch5hD1hedut2YeN19hHDyUuDc8mXQuGeY,9488 +joblib/test/test_func_inspect_special_encoding.py,sha256=5xILDjSO-xtjQAMLvMeVD-L7IG4ZURb2gvBiShaDE78,145 +joblib/test/test_hashing.py,sha256=w8_WVgXL1TFueCSriX97GrCAS3mcmu5Ly062pOAOoRI,16054 +joblib/test/test_init.py,sha256=bgNF-9CIJl1MFNA75LBWOaiNtvduVfuvglz_u9Tt8Uc,422 +joblib/test/test_logger.py,sha256=CyA3E8Y74AHZfqJxetNrYfwXSOlLc1Pq1hqt7aJ6PwA,984 +joblib/test/test_memmapping.py,sha256=6kXT4ZMCtPykWhDf66QCcQvsdC9PaJumiCY0jHPwfio,43298 +joblib/test/test_memory.py,sha256=1dWfSPgMgdp2WdnY6lMILRdCbQ4v915DDOC_Q8d3WDE,49419 +joblib/test/test_memory_async.py,sha256=g-3Y09LqMccWR8Kw_yA0-yaduL-5zDBgaGUlgleOGDo,4807 +joblib/test/test_missing_multiprocessing.py,sha256=oeneMgi6iUVIbkfazGvXmOp6fqa7ok9uhj902Qjs_nk,1123 +joblib/test/test_module.py,sha256=HTHQSgnwa-1blkvhL_oVBO5yYdw8IrQTIJCK_QCsMtM,1936 +joblib/test/test_numpy_pickle.py,sha256=6rRMvj-9dgBVucPLY7J0qBIIcY2XOSQGh1owqOiGIrg,42485 +joblib/test/test_numpy_pickle_compat.py,sha256=o4mB8M6mVRX8Hxw-fxOmWPcbSAyswTdEHk64hVv_ZCk,609 +joblib/test/test_numpy_pickle_utils.py,sha256=-lQnhOAIGyd5oyWT2LGQPc3QO_Oxb3VhM_H3QjKm5e0,383 +joblib/test/test_parallel.py,sha256=fyvzDhsGADyjLoTy9Jycdketu1YZ1WyYtQCBIQ_YEso,73523 +joblib/test/test_store_backends.py,sha256=LCCUS9DN7feJniwnOD7x0pMKAQ-5rgCBBctwi5FhjWM,3121 +joblib/test/test_testing.py,sha256=1O5ZTJkdBjVkM-Ss4mComsEbJgrwROloVu8Mqv7Jv-w,2570 +joblib/test/test_utils.py,sha256=L6aBHiZrF6TUFbPGmQXGyNMnoAy5pEhOJoH5CAG_6CU,584 +joblib/test/testutils.py,sha256=6a7zVJm1kg6M-t4CH9tz8A6rMdC6ZY9sU6wBB8C7Zzo,251 +joblib/testing.py,sha256=6i2ctsokVXohLBhc1dXIZc2TiQDRiB-glGKFbfZDIqE,3093 diff --git a/env-llmeval/lib/python3.10/site-packages/joblib-1.4.0.dist-info/WHEEL b/env-llmeval/lib/python3.10/site-packages/joblib-1.4.0.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..57e3d840d59a650ac5bccbad5baeec47d155f0ad --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/joblib-1.4.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.38.4) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/env-llmeval/lib/python3.10/site-packages/joblib-1.4.0.dist-info/top_level.txt b/env-llmeval/lib/python3.10/site-packages/joblib-1.4.0.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..ca4af27e2b6e9917d9600060588a18cc9e3cc78c --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/joblib-1.4.0.dist-info/top_level.txt @@ -0,0 +1 @@ +joblib diff --git a/env-llmeval/lib/python3.10/site-packages/networkx/__init__.py b/env-llmeval/lib/python3.10/site-packages/networkx/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..54fdbd54b55ffd1314ea0a791ade64a8b01a97c4 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/networkx/__init__.py @@ -0,0 +1,49 @@ +""" +NetworkX +======== + +NetworkX is a Python package for the creation, manipulation, and study of the +structure, dynamics, and functions of complex networks. + +See https://networkx.org for complete documentation. +""" + +__version__ = "3.3" + + +# These are imported in order as listed +from networkx.lazy_imports import _lazy_import + +from networkx.exception import * + +from networkx import utils +from networkx.utils import _clear_cache, _dispatchable, config + +from networkx import classes +from networkx.classes import filters +from networkx.classes import * + +from networkx import convert +from networkx.convert import * + +from networkx import convert_matrix +from networkx.convert_matrix import * + +from networkx import relabel +from networkx.relabel import * + +from networkx import generators +from networkx.generators import * + +from networkx import readwrite +from networkx.readwrite import * + +# Need to test with SciPy, when available +from networkx import algorithms +from networkx.algorithms import * + +from networkx import linalg +from networkx.linalg import * + +from networkx import drawing +from networkx.drawing import * diff --git a/env-llmeval/lib/python3.10/site-packages/networkx/conftest.py b/env-llmeval/lib/python3.10/site-packages/networkx/conftest.py new file mode 100644 index 0000000000000000000000000000000000000000..a8d6e158124b2fb56f8510a029b935b3397369c5 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/networkx/conftest.py @@ -0,0 +1,289 @@ +""" +Testing +======= + +General guidelines for writing good tests: + +- doctests always assume ``import networkx as nx`` so don't add that +- prefer pytest fixtures over classes with setup methods. +- use the ``@pytest.mark.parametrize`` decorator +- use ``pytest.importorskip`` for numpy, scipy, pandas, and matplotlib b/c of PyPy. + and add the module to the relevant entries below. + +""" +import os +import sys +import warnings +from importlib.metadata import entry_points + +import pytest + +import networkx + + +def pytest_addoption(parser): + parser.addoption( + "--runslow", action="store_true", default=False, help="run slow tests" + ) + parser.addoption( + "--backend", + action="store", + default=None, + help="Run tests with a backend by auto-converting nx graphs to backend graphs", + ) + parser.addoption( + "--fallback-to-nx", + action="store_true", + default=False, + help="Run nx function if a backend doesn't implement a dispatchable function" + " (use with --backend)", + ) + + +def pytest_configure(config): + config.addinivalue_line("markers", "slow: mark test as slow to run") + backend = config.getoption("--backend") + if backend is None: + backend = os.environ.get("NETWORKX_TEST_BACKEND") + # nx-loopback backend is only available when testing + backends = entry_points(name="nx-loopback", group="networkx.backends") + if backends: + networkx.utils.backends.backends["nx-loopback"] = next(iter(backends)) + else: + warnings.warn( + "\n\n WARNING: Mixed NetworkX configuration! \n\n" + " This environment has mixed configuration for networkx.\n" + " The test object nx-loopback is not configured correctly.\n" + " You should not be seeing this message.\n" + " Try `pip install -e .`, or change your PYTHONPATH\n" + " Make sure python finds the networkx repo you are testing\n\n" + ) + if backend: + networkx.config["backend_priority"] = [backend] + fallback_to_nx = config.getoption("--fallback-to-nx") + if not fallback_to_nx: + fallback_to_nx = os.environ.get("NETWORKX_FALLBACK_TO_NX") + networkx.utils.backends._dispatchable._fallback_to_nx = bool(fallback_to_nx) + + +def pytest_collection_modifyitems(config, items): + # Setting this to True here allows tests to be set up before dispatching + # any function call to a backend. + networkx.utils.backends._dispatchable._is_testing = True + if backend_priority := networkx.config["backend_priority"]: + # Allow pluggable backends to add markers to tests (such as skip or xfail) + # when running in auto-conversion test mode + backend = networkx.utils.backends.backends[backend_priority[0]].load() + if hasattr(backend, "on_start_tests"): + getattr(backend, "on_start_tests")(items) + + if config.getoption("--runslow"): + # --runslow given in cli: do not skip slow tests + return + skip_slow = pytest.mark.skip(reason="need --runslow option to run") + for item in items: + if "slow" in item.keywords: + item.add_marker(skip_slow) + + +# TODO: The warnings below need to be dealt with, but for now we silence them. +@pytest.fixture(autouse=True) +def set_warnings(): + warnings.filterwarnings( + "ignore", + category=FutureWarning, + message="\n\nsingle_target_shortest_path_length", + ) + warnings.filterwarnings( + "ignore", + category=FutureWarning, + message="\n\nshortest_path", + ) + warnings.filterwarnings( + "ignore", category=DeprecationWarning, message="\nforest_str is deprecated" + ) + warnings.filterwarnings( + "ignore", category=DeprecationWarning, message="\n\nrandom_tree" + ) + warnings.filterwarnings( + "ignore", category=DeprecationWarning, message="Edmonds has been deprecated" + ) + warnings.filterwarnings( + "ignore", + category=DeprecationWarning, + message="MultiDiGraph_EdgeKey has been deprecated", + ) + warnings.filterwarnings( + "ignore", category=DeprecationWarning, message="\n\nThe `normalized`" + ) + warnings.filterwarnings( + "ignore", + category=DeprecationWarning, + message="The function `join` is deprecated", + ) + warnings.filterwarnings( + "ignore", + category=DeprecationWarning, + message="\n\nstrongly_connected_components_recursive", + ) + warnings.filterwarnings( + "ignore", category=DeprecationWarning, message="\n\nall_triplets" + ) + warnings.filterwarnings( + "ignore", category=DeprecationWarning, message="\n\nrandom_triad" + ) + warnings.filterwarnings( + "ignore", category=DeprecationWarning, message="minimal_d_separator" + ) + warnings.filterwarnings( + "ignore", category=DeprecationWarning, message="d_separated" + ) + warnings.filterwarnings("ignore", category=DeprecationWarning, message="\n\nk_core") + warnings.filterwarnings( + "ignore", category=DeprecationWarning, message="\n\nk_shell" + ) + warnings.filterwarnings( + "ignore", category=DeprecationWarning, message="\n\nk_crust" + ) + warnings.filterwarnings( + "ignore", category=DeprecationWarning, message="\n\nk_corona" + ) + warnings.filterwarnings( + "ignore", category=DeprecationWarning, message="\n\ntotal_spanning_tree_weight" + ) + warnings.filterwarnings( + "ignore", category=DeprecationWarning, message=r"\n\nThe 'create=matrix'" + ) + + +@pytest.fixture(autouse=True) +def add_nx(doctest_namespace): + doctest_namespace["nx"] = networkx + + +# What dependencies are installed? + +try: + import numpy + + has_numpy = True +except ImportError: + has_numpy = False + +try: + import scipy + + has_scipy = True +except ImportError: + has_scipy = False + +try: + import matplotlib + + has_matplotlib = True +except ImportError: + has_matplotlib = False + +try: + import pandas + + has_pandas = True +except ImportError: + has_pandas = False + +try: + import pygraphviz + + has_pygraphviz = True +except ImportError: + has_pygraphviz = False + +try: + import pydot + + has_pydot = True +except ImportError: + has_pydot = False + +try: + import sympy + + has_sympy = True +except ImportError: + has_sympy = False + + +# List of files that pytest should ignore + +collect_ignore = [] + +needs_numpy = [ + "algorithms/approximation/traveling_salesman.py", + "algorithms/centrality/current_flow_closeness.py", + "algorithms/node_classification.py", + "algorithms/non_randomness.py", + "algorithms/shortest_paths/dense.py", + "algorithms/tree/mst.py", + "generators/expanders.py", + "linalg/bethehessianmatrix.py", + "linalg/laplacianmatrix.py", + "utils/misc.py", + "algorithms/centrality/laplacian.py", +] +needs_scipy = [ + "algorithms/approximation/traveling_salesman.py", + "algorithms/assortativity/correlation.py", + "algorithms/assortativity/mixing.py", + "algorithms/assortativity/pairs.py", + "algorithms/bipartite/matrix.py", + "algorithms/bipartite/spectral.py", + "algorithms/centrality/current_flow_betweenness.py", + "algorithms/centrality/current_flow_betweenness_subset.py", + "algorithms/centrality/eigenvector.py", + "algorithms/centrality/katz.py", + "algorithms/centrality/laplacian.py", + "algorithms/centrality/second_order.py", + "algorithms/centrality/subgraph_alg.py", + "algorithms/communicability_alg.py", + "algorithms/community/divisive.py", + "algorithms/distance_measures.py", + "algorithms/link_analysis/hits_alg.py", + "algorithms/link_analysis/pagerank_alg.py", + "algorithms/node_classification.py", + "algorithms/similarity.py", + "algorithms/tree/mst.py", + "algorithms/walks.py", + "convert_matrix.py", + "drawing/layout.py", + "drawing/nx_pylab.py", + "generators/spectral_graph_forge.py", + "generators/expanders.py", + "linalg/algebraicconnectivity.py", + "linalg/attrmatrix.py", + "linalg/bethehessianmatrix.py", + "linalg/graphmatrix.py", + "linalg/laplacianmatrix.py", + "linalg/modularitymatrix.py", + "linalg/spectrum.py", + "utils/rcm.py", +] +needs_matplotlib = ["drawing/nx_pylab.py"] +needs_pandas = ["convert_matrix.py"] +needs_pygraphviz = ["drawing/nx_agraph.py"] +needs_pydot = ["drawing/nx_pydot.py"] +needs_sympy = ["algorithms/polynomials.py"] + +if not has_numpy: + collect_ignore += needs_numpy +if not has_scipy: + collect_ignore += needs_scipy +if not has_matplotlib: + collect_ignore += needs_matplotlib +if not has_pandas: + collect_ignore += needs_pandas +if not has_pygraphviz: + collect_ignore += needs_pygraphviz +if not has_pydot: + collect_ignore += needs_pydot +if not has_sympy: + collect_ignore += needs_sympy diff --git a/env-llmeval/lib/python3.10/site-packages/networkx/convert.py b/env-llmeval/lib/python3.10/site-packages/networkx/convert.py new file mode 100644 index 0000000000000000000000000000000000000000..7cc8fe401261a0af9b8dc8ad261293a735782272 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/networkx/convert.py @@ -0,0 +1,494 @@ +"""Functions to convert NetworkX graphs to and from other formats. + +The preferred way of converting data to a NetworkX graph is through the +graph constructor. The constructor calls the to_networkx_graph() function +which attempts to guess the input type and convert it automatically. + +Examples +-------- +Create a graph with a single edge from a dictionary of dictionaries + +>>> d = {0: {1: 1}} # dict-of-dicts single edge (0,1) +>>> G = nx.Graph(d) + +See Also +-------- +nx_agraph, nx_pydot +""" +import warnings +from collections.abc import Collection, Generator, Iterator + +import networkx as nx + +__all__ = [ + "to_networkx_graph", + "from_dict_of_dicts", + "to_dict_of_dicts", + "from_dict_of_lists", + "to_dict_of_lists", + "from_edgelist", + "to_edgelist", +] + + +def to_networkx_graph(data, create_using=None, multigraph_input=False): + """Make a NetworkX graph from a known data structure. + + The preferred way to call this is automatically + from the class constructor + + >>> d = {0: {1: {"weight": 1}}} # dict-of-dicts single edge (0,1) + >>> G = nx.Graph(d) + + instead of the equivalent + + >>> G = nx.from_dict_of_dicts(d) + + Parameters + ---------- + data : object to be converted + + Current known types are: + any NetworkX graph + dict-of-dicts + dict-of-lists + container (e.g. set, list, tuple) of edges + iterator (e.g. itertools.chain) that produces edges + generator of edges + Pandas DataFrame (row per edge) + 2D numpy array + scipy sparse array + pygraphviz agraph + + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + multigraph_input : bool (default False) + If True and data is a dict_of_dicts, + try to create a multigraph assuming dict_of_dict_of_lists. + If data and create_using are both multigraphs then create + a multigraph from a multigraph. + + """ + # NX graph + if hasattr(data, "adj"): + try: + result = from_dict_of_dicts( + data.adj, + create_using=create_using, + multigraph_input=data.is_multigraph(), + ) + # data.graph should be dict-like + result.graph.update(data.graph) + # data.nodes should be dict-like + # result.add_node_from(data.nodes.items()) possible but + # for custom node_attr_dict_factory which may be hashable + # will be unexpected behavior + for n, dd in data.nodes.items(): + result._node[n].update(dd) + return result + except Exception as err: + raise nx.NetworkXError("Input is not a correct NetworkX graph.") from err + + # pygraphviz agraph + if hasattr(data, "is_strict"): + try: + return nx.nx_agraph.from_agraph(data, create_using=create_using) + except Exception as err: + raise nx.NetworkXError("Input is not a correct pygraphviz graph.") from err + + # dict of dicts/lists + if isinstance(data, dict): + try: + return from_dict_of_dicts( + data, create_using=create_using, multigraph_input=multigraph_input + ) + except Exception as err1: + if multigraph_input is True: + raise nx.NetworkXError( + f"converting multigraph_input raised:\n{type(err1)}: {err1}" + ) + try: + return from_dict_of_lists(data, create_using=create_using) + except Exception as err2: + raise TypeError("Input is not known type.") from err2 + + # Pandas DataFrame + try: + import pandas as pd + + if isinstance(data, pd.DataFrame): + if data.shape[0] == data.shape[1]: + try: + return nx.from_pandas_adjacency(data, create_using=create_using) + except Exception as err: + msg = "Input is not a correct Pandas DataFrame adjacency matrix." + raise nx.NetworkXError(msg) from err + else: + try: + return nx.from_pandas_edgelist( + data, edge_attr=True, create_using=create_using + ) + except Exception as err: + msg = "Input is not a correct Pandas DataFrame edge-list." + raise nx.NetworkXError(msg) from err + except ImportError: + warnings.warn("pandas not found, skipping conversion test.", ImportWarning) + + # numpy array + try: + import numpy as np + + if isinstance(data, np.ndarray): + try: + return nx.from_numpy_array(data, create_using=create_using) + except Exception as err: + raise nx.NetworkXError( + f"Failed to interpret array as an adjacency matrix." + ) from err + except ImportError: + warnings.warn("numpy not found, skipping conversion test.", ImportWarning) + + # scipy sparse array - any format + try: + import scipy + + if hasattr(data, "format"): + try: + return nx.from_scipy_sparse_array(data, create_using=create_using) + except Exception as err: + raise nx.NetworkXError( + "Input is not a correct scipy sparse array type." + ) from err + except ImportError: + warnings.warn("scipy not found, skipping conversion test.", ImportWarning) + + # Note: most general check - should remain last in order of execution + # Includes containers (e.g. list, set, dict, etc.), generators, and + # iterators (e.g. itertools.chain) of edges + + if isinstance(data, Collection | Generator | Iterator): + try: + return from_edgelist(data, create_using=create_using) + except Exception as err: + raise nx.NetworkXError("Input is not a valid edge list") from err + + raise nx.NetworkXError("Input is not a known data type for conversion.") + + +@nx._dispatchable +def to_dict_of_lists(G, nodelist=None): + """Returns adjacency representation of graph as a dictionary of lists. + + Parameters + ---------- + G : graph + A NetworkX graph + + nodelist : list + Use only nodes specified in nodelist + + Notes + ----- + Completely ignores edge data for MultiGraph and MultiDiGraph. + + """ + if nodelist is None: + nodelist = G + + d = {} + for n in nodelist: + d[n] = [nbr for nbr in G.neighbors(n) if nbr in nodelist] + return d + + +@nx._dispatchable(graphs=None, returns_graph=True) +def from_dict_of_lists(d, create_using=None): + """Returns a graph from a dictionary of lists. + + Parameters + ---------- + d : dictionary of lists + A dictionary of lists adjacency representation. + + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + Examples + -------- + >>> dol = {0: [1]} # single edge (0,1) + >>> G = nx.from_dict_of_lists(dol) + + or + + >>> G = nx.Graph(dol) # use Graph constructor + + """ + G = nx.empty_graph(0, create_using) + G.add_nodes_from(d) + if G.is_multigraph() and not G.is_directed(): + # a dict_of_lists can't show multiedges. BUT for undirected graphs, + # each edge shows up twice in the dict_of_lists. + # So we need to treat this case separately. + seen = {} + for node, nbrlist in d.items(): + for nbr in nbrlist: + if nbr not in seen: + G.add_edge(node, nbr) + seen[node] = 1 # don't allow reverse edge to show up + else: + G.add_edges_from( + ((node, nbr) for node, nbrlist in d.items() for nbr in nbrlist) + ) + return G + + +def to_dict_of_dicts(G, nodelist=None, edge_data=None): + """Returns adjacency representation of graph as a dictionary of dictionaries. + + Parameters + ---------- + G : graph + A NetworkX graph + + nodelist : list + Use only nodes specified in nodelist + + edge_data : scalar, optional + If provided, the value of the dictionary will be set to `edge_data` for + all edges. Usual values could be `1` or `True`. If `edge_data` is + `None` (the default), the edgedata in `G` is used, resulting in a + dict-of-dict-of-dicts. If `G` is a MultiGraph, the result will be a + dict-of-dict-of-dict-of-dicts. See Notes for an approach to customize + handling edge data. `edge_data` should *not* be a container. + + Returns + ------- + dod : dict + A nested dictionary representation of `G`. Note that the level of + nesting depends on the type of `G` and the value of `edge_data` + (see Examples). + + See Also + -------- + from_dict_of_dicts, to_dict_of_lists + + Notes + ----- + For a more custom approach to handling edge data, try:: + + dod = { + n: {nbr: custom(n, nbr, dd) for nbr, dd in nbrdict.items()} + for n, nbrdict in G.adj.items() + } + + where `custom` returns the desired edge data for each edge between `n` and + `nbr`, given existing edge data `dd`. + + Examples + -------- + >>> G = nx.path_graph(3) + >>> nx.to_dict_of_dicts(G) + {0: {1: {}}, 1: {0: {}, 2: {}}, 2: {1: {}}} + + Edge data is preserved by default (``edge_data=None``), resulting + in dict-of-dict-of-dicts where the innermost dictionary contains the + edge data: + + >>> G = nx.Graph() + >>> G.add_edges_from( + ... [ + ... (0, 1, {"weight": 1.0}), + ... (1, 2, {"weight": 2.0}), + ... (2, 0, {"weight": 1.0}), + ... ] + ... ) + >>> d = nx.to_dict_of_dicts(G) + >>> d # doctest: +SKIP + {0: {1: {'weight': 1.0}, 2: {'weight': 1.0}}, + 1: {0: {'weight': 1.0}, 2: {'weight': 2.0}}, + 2: {1: {'weight': 2.0}, 0: {'weight': 1.0}}} + >>> d[1][2]["weight"] + 2.0 + + If `edge_data` is not `None`, edge data in the original graph (if any) is + replaced: + + >>> d = nx.to_dict_of_dicts(G, edge_data=1) + >>> d + {0: {1: 1, 2: 1}, 1: {0: 1, 2: 1}, 2: {1: 1, 0: 1}} + >>> d[1][2] + 1 + + This also applies to MultiGraphs: edge data is preserved by default: + + >>> G = nx.MultiGraph() + >>> G.add_edge(0, 1, key="a", weight=1.0) + 'a' + >>> G.add_edge(0, 1, key="b", weight=5.0) + 'b' + >>> d = nx.to_dict_of_dicts(G) + >>> d # doctest: +SKIP + {0: {1: {'a': {'weight': 1.0}, 'b': {'weight': 5.0}}}, + 1: {0: {'a': {'weight': 1.0}, 'b': {'weight': 5.0}}}} + >>> d[0][1]["b"]["weight"] + 5.0 + + But multi edge data is lost if `edge_data` is not `None`: + + >>> d = nx.to_dict_of_dicts(G, edge_data=10) + >>> d + {0: {1: 10}, 1: {0: 10}} + """ + dod = {} + if nodelist is None: + if edge_data is None: + for u, nbrdict in G.adjacency(): + dod[u] = nbrdict.copy() + else: # edge_data is not None + for u, nbrdict in G.adjacency(): + dod[u] = dod.fromkeys(nbrdict, edge_data) + else: # nodelist is not None + if edge_data is None: + for u in nodelist: + dod[u] = {} + for v, data in ((v, data) for v, data in G[u].items() if v in nodelist): + dod[u][v] = data + else: # nodelist and edge_data are not None + for u in nodelist: + dod[u] = {} + for v in (v for v in G[u] if v in nodelist): + dod[u][v] = edge_data + return dod + + +@nx._dispatchable(graphs=None, returns_graph=True) +def from_dict_of_dicts(d, create_using=None, multigraph_input=False): + """Returns a graph from a dictionary of dictionaries. + + Parameters + ---------- + d : dictionary of dictionaries + A dictionary of dictionaries adjacency representation. + + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + multigraph_input : bool (default False) + When True, the dict `d` is assumed + to be a dict-of-dict-of-dict-of-dict structure keyed by + node to neighbor to edge keys to edge data for multi-edges. + Otherwise this routine assumes dict-of-dict-of-dict keyed by + node to neighbor to edge data. + + Examples + -------- + >>> dod = {0: {1: {"weight": 1}}} # single edge (0,1) + >>> G = nx.from_dict_of_dicts(dod) + + or + + >>> G = nx.Graph(dod) # use Graph constructor + + """ + G = nx.empty_graph(0, create_using) + G.add_nodes_from(d) + # does dict d represent a MultiGraph or MultiDiGraph? + if multigraph_input: + if G.is_directed(): + if G.is_multigraph(): + G.add_edges_from( + (u, v, key, data) + for u, nbrs in d.items() + for v, datadict in nbrs.items() + for key, data in datadict.items() + ) + else: + G.add_edges_from( + (u, v, data) + for u, nbrs in d.items() + for v, datadict in nbrs.items() + for key, data in datadict.items() + ) + else: # Undirected + if G.is_multigraph(): + seen = set() # don't add both directions of undirected graph + for u, nbrs in d.items(): + for v, datadict in nbrs.items(): + if (u, v) not in seen: + G.add_edges_from( + (u, v, key, data) for key, data in datadict.items() + ) + seen.add((v, u)) + else: + seen = set() # don't add both directions of undirected graph + for u, nbrs in d.items(): + for v, datadict in nbrs.items(): + if (u, v) not in seen: + G.add_edges_from( + (u, v, data) for key, data in datadict.items() + ) + seen.add((v, u)) + + else: # not a multigraph to multigraph transfer + if G.is_multigraph() and not G.is_directed(): + # d can have both representations u-v, v-u in dict. Only add one. + # We don't need this check for digraphs since we add both directions, + # or for Graph() since it is done implicitly (parallel edges not allowed) + seen = set() + for u, nbrs in d.items(): + for v, data in nbrs.items(): + if (u, v) not in seen: + G.add_edge(u, v, key=0) + G[u][v][0].update(data) + seen.add((v, u)) + else: + G.add_edges_from( + ((u, v, data) for u, nbrs in d.items() for v, data in nbrs.items()) + ) + return G + + +@nx._dispatchable(preserve_edge_attrs=True) +def to_edgelist(G, nodelist=None): + """Returns a list of edges in the graph. + + Parameters + ---------- + G : graph + A NetworkX graph + + nodelist : list + Use only nodes specified in nodelist + + """ + if nodelist is None: + return G.edges(data=True) + return G.edges(nodelist, data=True) + + +@nx._dispatchable(graphs=None, returns_graph=True) +def from_edgelist(edgelist, create_using=None): + """Returns a graph from a list of edges. + + Parameters + ---------- + edgelist : list or iterator + Edge tuples + + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + Examples + -------- + >>> edgelist = [(0, 1)] # single edge (0,1) + >>> G = nx.from_edgelist(edgelist) + + or + + >>> G = nx.Graph(edgelist) # use Graph constructor + + """ + G = nx.empty_graph(0, create_using) + G.add_edges_from(edgelist) + return G diff --git a/env-llmeval/lib/python3.10/site-packages/networkx/convert_matrix.py b/env-llmeval/lib/python3.10/site-packages/networkx/convert_matrix.py new file mode 100644 index 0000000000000000000000000000000000000000..6165ac18e31e1aadb85676095e1110889dfead51 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/networkx/convert_matrix.py @@ -0,0 +1,1202 @@ +"""Functions to convert NetworkX graphs to and from common data containers +like numpy arrays, scipy sparse arrays, and pandas DataFrames. + +The preferred way of converting data to a NetworkX graph is through the +graph constructor. The constructor calls the `~networkx.convert.to_networkx_graph` +function which attempts to guess the input type and convert it automatically. + +Examples +-------- +Create a 10 node random graph from a numpy array + +>>> import numpy as np +>>> rng = np.random.default_rng() +>>> a = rng.integers(low=0, high=2, size=(10, 10)) +>>> DG = nx.from_numpy_array(a, create_using=nx.DiGraph) + +or equivalently: + +>>> DG = nx.DiGraph(a) + +which calls `from_numpy_array` internally based on the type of ``a``. + +See Also +-------- +nx_agraph, nx_pydot +""" + +import itertools +from collections import defaultdict + +import networkx as nx +from networkx.utils import not_implemented_for + +__all__ = [ + "from_pandas_adjacency", + "to_pandas_adjacency", + "from_pandas_edgelist", + "to_pandas_edgelist", + "from_scipy_sparse_array", + "to_scipy_sparse_array", + "from_numpy_array", + "to_numpy_array", +] + + +@nx._dispatchable(edge_attrs="weight") +def to_pandas_adjacency( + G, + nodelist=None, + dtype=None, + order=None, + multigraph_weight=sum, + weight="weight", + nonedge=0.0, +): + """Returns the graph adjacency matrix as a Pandas DataFrame. + + Parameters + ---------- + G : graph + The NetworkX graph used to construct the Pandas DataFrame. + + nodelist : list, optional + The rows and columns are ordered according to the nodes in `nodelist`. + If `nodelist` is None, then the ordering is produced by G.nodes(). + + multigraph_weight : {sum, min, max}, optional + An operator that determines how weights in multigraphs are handled. + The default is to sum the weights of the multiple edges. + + weight : string or None, optional + The edge attribute that holds the numerical value used for + the edge weight. If an edge does not have that attribute, then the + value 1 is used instead. + + nonedge : float, optional + The matrix values corresponding to nonedges are typically set to zero. + However, this could be undesirable if there are matrix values + corresponding to actual edges that also have the value zero. If so, + one might prefer nonedges to have some other value, such as nan. + + Returns + ------- + df : Pandas DataFrame + Graph adjacency matrix + + Notes + ----- + For directed graphs, entry i,j corresponds to an edge from i to j. + + The DataFrame entries are assigned to the weight edge attribute. When + an edge does not have a weight attribute, the value of the entry is set to + the number 1. For multiple (parallel) edges, the values of the entries + are determined by the 'multigraph_weight' parameter. The default is to + sum the weight attributes for each of the parallel edges. + + When `nodelist` does not contain every node in `G`, the matrix is built + from the subgraph of `G` that is induced by the nodes in `nodelist`. + + The convention used for self-loop edges in graphs is to assign the + diagonal matrix entry value to the weight attribute of the edge + (or the number 1 if the edge has no weight attribute). If the + alternate convention of doubling the edge weight is desired the + resulting Pandas DataFrame can be modified as follows:: + + >>> import pandas as pd + >>> G = nx.Graph([(1, 1), (2, 2)]) + >>> df = nx.to_pandas_adjacency(G) + >>> df + 1 2 + 1 1.0 0.0 + 2 0.0 1.0 + >>> diag_idx = list(range(len(df))) + >>> df.iloc[diag_idx, diag_idx] *= 2 + >>> df + 1 2 + 1 2.0 0.0 + 2 0.0 2.0 + + Examples + -------- + >>> G = nx.MultiDiGraph() + >>> G.add_edge(0, 1, weight=2) + 0 + >>> G.add_edge(1, 0) + 0 + >>> G.add_edge(2, 2, weight=3) + 0 + >>> G.add_edge(2, 2) + 1 + >>> nx.to_pandas_adjacency(G, nodelist=[0, 1, 2], dtype=int) + 0 1 2 + 0 0 2 0 + 1 1 0 0 + 2 0 0 4 + + """ + import pandas as pd + + M = to_numpy_array( + G, + nodelist=nodelist, + dtype=dtype, + order=order, + multigraph_weight=multigraph_weight, + weight=weight, + nonedge=nonedge, + ) + if nodelist is None: + nodelist = list(G) + return pd.DataFrame(data=M, index=nodelist, columns=nodelist) + + +@nx._dispatchable(graphs=None, returns_graph=True) +def from_pandas_adjacency(df, create_using=None): + r"""Returns a graph from Pandas DataFrame. + + The Pandas DataFrame is interpreted as an adjacency matrix for the graph. + + Parameters + ---------- + df : Pandas DataFrame + An adjacency matrix representation of a graph + + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + Notes + ----- + For directed graphs, explicitly mention create_using=nx.DiGraph, + and entry i,j of df corresponds to an edge from i to j. + + If `df` has a single data type for each entry it will be converted to an + appropriate Python data type. + + If you have node attributes stored in a separate dataframe `df_nodes`, + you can load those attributes to the graph `G` using the following code: + + ``` + df_nodes = pd.DataFrame({"node_id": [1, 2, 3], "attribute1": ["A", "B", "C"]}) + G.add_nodes_from((n, dict(d)) for n, d in df_nodes.iterrows()) + ``` + + If `df` has a user-specified compound data type the names + of the data fields will be used as attribute keys in the resulting + NetworkX graph. + + See Also + -------- + to_pandas_adjacency + + Examples + -------- + Simple integer weights on edges: + + >>> import pandas as pd + >>> pd.options.display.max_columns = 20 + >>> df = pd.DataFrame([[1, 1], [2, 1]]) + >>> df + 0 1 + 0 1 1 + 1 2 1 + >>> G = nx.from_pandas_adjacency(df) + >>> G.name = "Graph from pandas adjacency matrix" + >>> print(G) + Graph named 'Graph from pandas adjacency matrix' with 2 nodes and 3 edges + """ + + try: + df = df[df.index] + except Exception as err: + missing = list(set(df.index).difference(set(df.columns))) + msg = f"{missing} not in columns" + raise nx.NetworkXError("Columns must match Indices.", msg) from err + + A = df.values + G = from_numpy_array(A, create_using=create_using) + + nx.relabel.relabel_nodes(G, dict(enumerate(df.columns)), copy=False) + return G + + +@nx._dispatchable(preserve_edge_attrs=True) +def to_pandas_edgelist( + G, + source="source", + target="target", + nodelist=None, + dtype=None, + edge_key=None, +): + """Returns the graph edge list as a Pandas DataFrame. + + Parameters + ---------- + G : graph + The NetworkX graph used to construct the Pandas DataFrame. + + source : str or int, optional + A valid column name (string or integer) for the source nodes (for the + directed case). + + target : str or int, optional + A valid column name (string or integer) for the target nodes (for the + directed case). + + nodelist : list, optional + Use only nodes specified in nodelist + + dtype : dtype, default None + Use to create the DataFrame. Data type to force. + Only a single dtype is allowed. If None, infer. + + edge_key : str or int or None, optional (default=None) + A valid column name (string or integer) for the edge keys (for the + multigraph case). If None, edge keys are not stored in the DataFrame. + + Returns + ------- + df : Pandas DataFrame + Graph edge list + + Examples + -------- + >>> G = nx.Graph( + ... [ + ... ("A", "B", {"cost": 1, "weight": 7}), + ... ("C", "E", {"cost": 9, "weight": 10}), + ... ] + ... ) + >>> df = nx.to_pandas_edgelist(G, nodelist=["A", "C"]) + >>> df[["source", "target", "cost", "weight"]] + source target cost weight + 0 A B 1 7 + 1 C E 9 10 + + >>> G = nx.MultiGraph([("A", "B", {"cost": 1}), ("A", "B", {"cost": 9})]) + >>> df = nx.to_pandas_edgelist(G, nodelist=["A", "C"], edge_key="ekey") + >>> df[["source", "target", "cost", "ekey"]] + source target cost ekey + 0 A B 1 0 + 1 A B 9 1 + + """ + import pandas as pd + + if nodelist is None: + edgelist = G.edges(data=True) + else: + edgelist = G.edges(nodelist, data=True) + source_nodes = [s for s, _, _ in edgelist] + target_nodes = [t for _, t, _ in edgelist] + + all_attrs = set().union(*(d.keys() for _, _, d in edgelist)) + if source in all_attrs: + raise nx.NetworkXError(f"Source name {source!r} is an edge attr name") + if target in all_attrs: + raise nx.NetworkXError(f"Target name {target!r} is an edge attr name") + + nan = float("nan") + edge_attr = {k: [d.get(k, nan) for _, _, d in edgelist] for k in all_attrs} + + if G.is_multigraph() and edge_key is not None: + if edge_key in all_attrs: + raise nx.NetworkXError(f"Edge key name {edge_key!r} is an edge attr name") + edge_keys = [k for _, _, k in G.edges(keys=True)] + edgelistdict = {source: source_nodes, target: target_nodes, edge_key: edge_keys} + else: + edgelistdict = {source: source_nodes, target: target_nodes} + + edgelistdict.update(edge_attr) + return pd.DataFrame(edgelistdict, dtype=dtype) + + +@nx._dispatchable(graphs=None, returns_graph=True) +def from_pandas_edgelist( + df, + source="source", + target="target", + edge_attr=None, + create_using=None, + edge_key=None, +): + """Returns a graph from Pandas DataFrame containing an edge list. + + The Pandas DataFrame should contain at least two columns of node names and + zero or more columns of edge attributes. Each row will be processed as one + edge instance. + + Note: This function iterates over DataFrame.values, which is not + guaranteed to retain the data type across columns in the row. This is only + a problem if your row is entirely numeric and a mix of ints and floats. In + that case, all values will be returned as floats. See the + DataFrame.iterrows documentation for an example. + + Parameters + ---------- + df : Pandas DataFrame + An edge list representation of a graph + + source : str or int + A valid column name (string or integer) for the source nodes (for the + directed case). + + target : str or int + A valid column name (string or integer) for the target nodes (for the + directed case). + + edge_attr : str or int, iterable, True, or None + A valid column name (str or int) or iterable of column names that are + used to retrieve items and add them to the graph as edge attributes. + If `True`, all of the remaining columns will be added. + If `None`, no edge attributes are added to the graph. + + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + edge_key : str or None, optional (default=None) + A valid column name for the edge keys (for a MultiGraph). The values in + this column are used for the edge keys when adding edges if create_using + is a multigraph. + + If you have node attributes stored in a separate dataframe `df_nodes`, + you can load those attributes to the graph `G` using the following code: + + ``` + df_nodes = pd.DataFrame({"node_id": [1, 2, 3], "attribute1": ["A", "B", "C"]}) + G.add_nodes_from((n, dict(d)) for n, d in df_nodes.iterrows()) + ``` + + See Also + -------- + to_pandas_edgelist + + Examples + -------- + Simple integer weights on edges: + + >>> import pandas as pd + >>> pd.options.display.max_columns = 20 + >>> import numpy as np + >>> rng = np.random.RandomState(seed=5) + >>> ints = rng.randint(1, 11, size=(3, 2)) + >>> a = ["A", "B", "C"] + >>> b = ["D", "A", "E"] + >>> df = pd.DataFrame(ints, columns=["weight", "cost"]) + >>> df[0] = a + >>> df["b"] = b + >>> df[["weight", "cost", 0, "b"]] + weight cost 0 b + 0 4 7 A D + 1 7 1 B A + 2 10 9 C E + >>> G = nx.from_pandas_edgelist(df, 0, "b", ["weight", "cost"]) + >>> G["E"]["C"]["weight"] + 10 + >>> G["E"]["C"]["cost"] + 9 + >>> edges = pd.DataFrame( + ... { + ... "source": [0, 1, 2], + ... "target": [2, 2, 3], + ... "weight": [3, 4, 5], + ... "color": ["red", "blue", "blue"], + ... } + ... ) + >>> G = nx.from_pandas_edgelist(edges, edge_attr=True) + >>> G[0][2]["color"] + 'red' + + Build multigraph with custom keys: + + >>> edges = pd.DataFrame( + ... { + ... "source": [0, 1, 2, 0], + ... "target": [2, 2, 3, 2], + ... "my_edge_key": ["A", "B", "C", "D"], + ... "weight": [3, 4, 5, 6], + ... "color": ["red", "blue", "blue", "blue"], + ... } + ... ) + >>> G = nx.from_pandas_edgelist( + ... edges, + ... edge_key="my_edge_key", + ... edge_attr=["weight", "color"], + ... create_using=nx.MultiGraph(), + ... ) + >>> G[0][2] + AtlasView({'A': {'weight': 3, 'color': 'red'}, 'D': {'weight': 6, 'color': 'blue'}}) + + + """ + g = nx.empty_graph(0, create_using) + + if edge_attr is None: + g.add_edges_from(zip(df[source], df[target])) + return g + + reserved_columns = [source, target] + + # Additional columns requested + attr_col_headings = [] + attribute_data = [] + if edge_attr is True: + attr_col_headings = [c for c in df.columns if c not in reserved_columns] + elif isinstance(edge_attr, list | tuple): + attr_col_headings = edge_attr + else: + attr_col_headings = [edge_attr] + if len(attr_col_headings) == 0: + raise nx.NetworkXError( + f"Invalid edge_attr argument: No columns found with name: {attr_col_headings}" + ) + + try: + attribute_data = zip(*[df[col] for col in attr_col_headings]) + except (KeyError, TypeError) as err: + msg = f"Invalid edge_attr argument: {edge_attr}" + raise nx.NetworkXError(msg) from err + + if g.is_multigraph(): + # => append the edge keys from the df to the bundled data + if edge_key is not None: + try: + multigraph_edge_keys = df[edge_key] + attribute_data = zip(attribute_data, multigraph_edge_keys) + except (KeyError, TypeError) as err: + msg = f"Invalid edge_key argument: {edge_key}" + raise nx.NetworkXError(msg) from err + + for s, t, attrs in zip(df[source], df[target], attribute_data): + if edge_key is not None: + attrs, multigraph_edge_key = attrs + key = g.add_edge(s, t, key=multigraph_edge_key) + else: + key = g.add_edge(s, t) + + g[s][t][key].update(zip(attr_col_headings, attrs)) + else: + for s, t, attrs in zip(df[source], df[target], attribute_data): + g.add_edge(s, t) + g[s][t].update(zip(attr_col_headings, attrs)) + + return g + + +@nx._dispatchable(edge_attrs="weight") +def to_scipy_sparse_array(G, nodelist=None, dtype=None, weight="weight", format="csr"): + """Returns the graph adjacency matrix as a SciPy sparse array. + + Parameters + ---------- + G : graph + The NetworkX graph used to construct the sparse matrix. + + nodelist : list, optional + The rows and columns are ordered according to the nodes in `nodelist`. + If `nodelist` is None, then the ordering is produced by G.nodes(). + + dtype : NumPy data-type, optional + A valid NumPy dtype used to initialize the array. If None, then the + NumPy default is used. + + weight : string or None optional (default='weight') + The edge attribute that holds the numerical value used for + the edge weight. If None then all edge weights are 1. + + format : str in {'bsr', 'csr', 'csc', 'coo', 'lil', 'dia', 'dok'} + The type of the matrix to be returned (default 'csr'). For + some algorithms different implementations of sparse matrices + can perform better. See [1]_ for details. + + Returns + ------- + A : SciPy sparse array + Graph adjacency matrix. + + Notes + ----- + For directed graphs, matrix entry i,j corresponds to an edge from i to j. + + The matrix entries are populated using the edge attribute held in + parameter weight. When an edge does not have that attribute, the + value of the entry is 1. + + For multiple edges the matrix values are the sums of the edge weights. + + When `nodelist` does not contain every node in `G`, the adjacency matrix + is built from the subgraph of `G` that is induced by the nodes in + `nodelist`. + + The convention used for self-loop edges in graphs is to assign the + diagonal matrix entry value to the weight attribute of the edge + (or the number 1 if the edge has no weight attribute). If the + alternate convention of doubling the edge weight is desired the + resulting SciPy sparse array can be modified as follows: + + >>> G = nx.Graph([(1, 1)]) + >>> A = nx.to_scipy_sparse_array(G) + >>> print(A.todense()) + [[1]] + >>> A.setdiag(A.diagonal() * 2) + >>> print(A.toarray()) + [[2]] + + Examples + -------- + >>> G = nx.MultiDiGraph() + >>> G.add_edge(0, 1, weight=2) + 0 + >>> G.add_edge(1, 0) + 0 + >>> G.add_edge(2, 2, weight=3) + 0 + >>> G.add_edge(2, 2) + 1 + >>> S = nx.to_scipy_sparse_array(G, nodelist=[0, 1, 2]) + >>> print(S.toarray()) + [[0 2 0] + [1 0 0] + [0 0 4]] + + References + ---------- + .. [1] Scipy Dev. References, "Sparse Matrices", + https://docs.scipy.org/doc/scipy/reference/sparse.html + """ + import scipy as sp + + if len(G) == 0: + raise nx.NetworkXError("Graph has no nodes or edges") + + if nodelist is None: + nodelist = list(G) + nlen = len(G) + else: + nlen = len(nodelist) + if nlen == 0: + raise nx.NetworkXError("nodelist has no nodes") + nodeset = set(G.nbunch_iter(nodelist)) + if nlen != len(nodeset): + for n in nodelist: + if n not in G: + raise nx.NetworkXError(f"Node {n} in nodelist is not in G") + raise nx.NetworkXError("nodelist contains duplicates.") + if nlen < len(G): + G = G.subgraph(nodelist) + + index = dict(zip(nodelist, range(nlen))) + coefficients = zip( + *((index[u], index[v], wt) for u, v, wt in G.edges(data=weight, default=1)) + ) + try: + row, col, data = coefficients + except ValueError: + # there is no edge in the subgraph + row, col, data = [], [], [] + + if G.is_directed(): + A = sp.sparse.coo_array((data, (row, col)), shape=(nlen, nlen), dtype=dtype) + else: + # symmetrize matrix + d = data + data + r = row + col + c = col + row + # selfloop entries get double counted when symmetrizing + # so we subtract the data on the diagonal + selfloops = list(nx.selfloop_edges(G, data=weight, default=1)) + if selfloops: + diag_index, diag_data = zip(*((index[u], -wt) for u, v, wt in selfloops)) + d += diag_data + r += diag_index + c += diag_index + A = sp.sparse.coo_array((d, (r, c)), shape=(nlen, nlen), dtype=dtype) + try: + return A.asformat(format) + except ValueError as err: + raise nx.NetworkXError(f"Unknown sparse matrix format: {format}") from err + + +def _csr_gen_triples(A): + """Converts a SciPy sparse array in **Compressed Sparse Row** format to + an iterable of weighted edge triples. + + """ + nrows = A.shape[0] + indptr, dst_indices, data = A.indptr, A.indices, A.data + import numpy as np + + src_indices = np.repeat(np.arange(nrows), np.diff(indptr)) + return zip(src_indices.tolist(), dst_indices.tolist(), A.data.tolist()) + + +def _csc_gen_triples(A): + """Converts a SciPy sparse array in **Compressed Sparse Column** format to + an iterable of weighted edge triples. + + """ + ncols = A.shape[1] + indptr, src_indices, data = A.indptr, A.indices, A.data + import numpy as np + + dst_indices = np.repeat(np.arange(ncols), np.diff(indptr)) + return zip(src_indices.tolist(), dst_indices.tolist(), A.data.tolist()) + + +def _coo_gen_triples(A): + """Converts a SciPy sparse array in **Coordinate** format to an iterable + of weighted edge triples. + + """ + return zip(A.row.tolist(), A.col.tolist(), A.data.tolist()) + + +def _dok_gen_triples(A): + """Converts a SciPy sparse array in **Dictionary of Keys** format to an + iterable of weighted edge triples. + + """ + for (r, c), v in A.items(): + # Use `v.item()` to convert a NumPy scalar to the appropriate Python scalar + yield int(r), int(c), v.item() + + +def _generate_weighted_edges(A): + """Returns an iterable over (u, v, w) triples, where u and v are adjacent + vertices and w is the weight of the edge joining u and v. + + `A` is a SciPy sparse array (in any format). + + """ + if A.format == "csr": + return _csr_gen_triples(A) + if A.format == "csc": + return _csc_gen_triples(A) + if A.format == "dok": + return _dok_gen_triples(A) + # If A is in any other format (including COO), convert it to COO format. + return _coo_gen_triples(A.tocoo()) + + +@nx._dispatchable(graphs=None, returns_graph=True) +def from_scipy_sparse_array( + A, parallel_edges=False, create_using=None, edge_attribute="weight" +): + """Creates a new graph from an adjacency matrix given as a SciPy sparse + array. + + Parameters + ---------- + A: scipy.sparse array + An adjacency matrix representation of a graph + + parallel_edges : Boolean + If this is True, `create_using` is a multigraph, and `A` is an + integer matrix, then entry *(i, j)* in the matrix is interpreted as the + number of parallel edges joining vertices *i* and *j* in the graph. + If it is False, then the entries in the matrix are interpreted as + the weight of a single edge joining the vertices. + + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + edge_attribute: string + Name of edge attribute to store matrix numeric value. The data will + have the same type as the matrix entry (int, float, (real,imag)). + + Notes + ----- + For directed graphs, explicitly mention create_using=nx.DiGraph, + and entry i,j of A corresponds to an edge from i to j. + + If `create_using` is :class:`networkx.MultiGraph` or + :class:`networkx.MultiDiGraph`, `parallel_edges` is True, and the + entries of `A` are of type :class:`int`, then this function returns a + multigraph (constructed from `create_using`) with parallel edges. + In this case, `edge_attribute` will be ignored. + + If `create_using` indicates an undirected multigraph, then only the edges + indicated by the upper triangle of the matrix `A` will be added to the + graph. + + Examples + -------- + >>> import scipy as sp + >>> A = sp.sparse.eye(2, 2, 1) + >>> G = nx.from_scipy_sparse_array(A) + + If `create_using` indicates a multigraph and the matrix has only integer + entries and `parallel_edges` is False, then the entries will be treated + as weights for edges joining the nodes (without creating parallel edges): + + >>> A = sp.sparse.csr_array([[1, 1], [1, 2]]) + >>> G = nx.from_scipy_sparse_array(A, create_using=nx.MultiGraph) + >>> G[1][1] + AtlasView({0: {'weight': 2}}) + + If `create_using` indicates a multigraph and the matrix has only integer + entries and `parallel_edges` is True, then the entries will be treated + as the number of parallel edges joining those two vertices: + + >>> A = sp.sparse.csr_array([[1, 1], [1, 2]]) + >>> G = nx.from_scipy_sparse_array(A, parallel_edges=True, create_using=nx.MultiGraph) + >>> G[1][1] + AtlasView({0: {'weight': 1}, 1: {'weight': 1}}) + + """ + G = nx.empty_graph(0, create_using) + n, m = A.shape + if n != m: + raise nx.NetworkXError(f"Adjacency matrix not square: nx,ny={A.shape}") + # Make sure we get even the isolated nodes of the graph. + G.add_nodes_from(range(n)) + # Create an iterable over (u, v, w) triples and for each triple, add an + # edge from u to v with weight w. + triples = _generate_weighted_edges(A) + # If the entries in the adjacency matrix are integers, the graph is a + # multigraph, and parallel_edges is True, then create parallel edges, each + # with weight 1, for each entry in the adjacency matrix. Otherwise, create + # one edge for each positive entry in the adjacency matrix and set the + # weight of that edge to be the entry in the matrix. + if A.dtype.kind in ("i", "u") and G.is_multigraph() and parallel_edges: + chain = itertools.chain.from_iterable + # The following line is equivalent to: + # + # for (u, v) in edges: + # for d in range(A[u, v]): + # G.add_edge(u, v, weight=1) + # + triples = chain(((u, v, 1) for d in range(w)) for (u, v, w) in triples) + # If we are creating an undirected multigraph, only add the edges from the + # upper triangle of the matrix. Otherwise, add all the edges. This relies + # on the fact that the vertices created in the + # `_generated_weighted_edges()` function are actually the row/column + # indices for the matrix `A`. + # + # Without this check, we run into a problem where each edge is added twice + # when `G.add_weighted_edges_from()` is invoked below. + if G.is_multigraph() and not G.is_directed(): + triples = ((u, v, d) for u, v, d in triples if u <= v) + G.add_weighted_edges_from(triples, weight=edge_attribute) + return G + + +@nx._dispatchable(edge_attrs="weight") # edge attrs may also be obtained from `dtype` +def to_numpy_array( + G, + nodelist=None, + dtype=None, + order=None, + multigraph_weight=sum, + weight="weight", + nonedge=0.0, +): + """Returns the graph adjacency matrix as a NumPy array. + + Parameters + ---------- + G : graph + The NetworkX graph used to construct the NumPy array. + + nodelist : list, optional + The rows and columns are ordered according to the nodes in `nodelist`. + If `nodelist` is ``None``, then the ordering is produced by ``G.nodes()``. + + dtype : NumPy data type, optional + A NumPy data type used to initialize the array. If None, then the NumPy + default is used. The dtype can be structured if `weight=None`, in which + case the dtype field names are used to look up edge attributes. The + result is a structured array where each named field in the dtype + corresponds to the adjacency for that edge attribute. See examples for + details. + + order : {'C', 'F'}, optional + Whether to store multidimensional data in C- or Fortran-contiguous + (row- or column-wise) order in memory. If None, then the NumPy default + is used. + + multigraph_weight : callable, optional + An function that determines how weights in multigraphs are handled. + The function should accept a sequence of weights and return a single + value. The default is to sum the weights of the multiple edges. + + weight : string or None optional (default = 'weight') + The edge attribute that holds the numerical value used for + the edge weight. If an edge does not have that attribute, then the + value 1 is used instead. `weight` must be ``None`` if a structured + dtype is used. + + nonedge : array_like (default = 0.0) + The value used to represent non-edges in the adjacency matrix. + The array values corresponding to nonedges are typically set to zero. + However, this could be undesirable if there are array values + corresponding to actual edges that also have the value zero. If so, + one might prefer nonedges to have some other value, such as ``nan``. + + Returns + ------- + A : NumPy ndarray + Graph adjacency matrix + + Raises + ------ + NetworkXError + If `dtype` is a structured dtype and `G` is a multigraph + ValueError + If `dtype` is a structured dtype and `weight` is not `None` + + See Also + -------- + from_numpy_array + + Notes + ----- + For directed graphs, entry ``i, j`` corresponds to an edge from ``i`` to ``j``. + + Entries in the adjacency matrix are given by the `weight` edge attribute. + When an edge does not have a weight attribute, the value of the entry is + set to the number 1. For multiple (parallel) edges, the values of the + entries are determined by the `multigraph_weight` parameter. The default is + to sum the weight attributes for each of the parallel edges. + + When `nodelist` does not contain every node in `G`, the adjacency matrix is + built from the subgraph of `G` that is induced by the nodes in `nodelist`. + + The convention used for self-loop edges in graphs is to assign the + diagonal array entry value to the weight attribute of the edge + (or the number 1 if the edge has no weight attribute). If the + alternate convention of doubling the edge weight is desired the + resulting NumPy array can be modified as follows: + + >>> import numpy as np + >>> G = nx.Graph([(1, 1)]) + >>> A = nx.to_numpy_array(G) + >>> A + array([[1.]]) + >>> A[np.diag_indices_from(A)] *= 2 + >>> A + array([[2.]]) + + Examples + -------- + >>> G = nx.MultiDiGraph() + >>> G.add_edge(0, 1, weight=2) + 0 + >>> G.add_edge(1, 0) + 0 + >>> G.add_edge(2, 2, weight=3) + 0 + >>> G.add_edge(2, 2) + 1 + >>> nx.to_numpy_array(G, nodelist=[0, 1, 2]) + array([[0., 2., 0.], + [1., 0., 0.], + [0., 0., 4.]]) + + When `nodelist` argument is used, nodes of `G` which do not appear in the `nodelist` + and their edges are not included in the adjacency matrix. Here is an example: + + >>> G = nx.Graph() + >>> G.add_edge(3, 1) + >>> G.add_edge(2, 0) + >>> G.add_edge(2, 1) + >>> G.add_edge(3, 0) + >>> nx.to_numpy_array(G, nodelist=[1, 2, 3]) + array([[0., 1., 1.], + [1., 0., 0.], + [1., 0., 0.]]) + + This function can also be used to create adjacency matrices for multiple + edge attributes with structured dtypes: + + >>> G = nx.Graph() + >>> G.add_edge(0, 1, weight=10) + >>> G.add_edge(1, 2, cost=5) + >>> G.add_edge(2, 3, weight=3, cost=-4.0) + >>> dtype = np.dtype([("weight", int), ("cost", float)]) + >>> A = nx.to_numpy_array(G, dtype=dtype, weight=None) + >>> A["weight"] + array([[ 0, 10, 0, 0], + [10, 0, 1, 0], + [ 0, 1, 0, 3], + [ 0, 0, 3, 0]]) + >>> A["cost"] + array([[ 0., 1., 0., 0.], + [ 1., 0., 5., 0.], + [ 0., 5., 0., -4.], + [ 0., 0., -4., 0.]]) + + As stated above, the argument "nonedge" is useful especially when there are + actually edges with weight 0 in the graph. Setting a nonedge value different than 0, + makes it much clearer to differentiate such 0-weighted edges and actual nonedge values. + + >>> G = nx.Graph() + >>> G.add_edge(3, 1, weight=2) + >>> G.add_edge(2, 0, weight=0) + >>> G.add_edge(2, 1, weight=0) + >>> G.add_edge(3, 0, weight=1) + >>> nx.to_numpy_array(G, nonedge=-1.0) + array([[-1., 2., -1., 1.], + [ 2., -1., 0., -1.], + [-1., 0., -1., 0.], + [ 1., -1., 0., -1.]]) + """ + import numpy as np + + if nodelist is None: + nodelist = list(G) + nlen = len(nodelist) + + # Input validation + nodeset = set(nodelist) + if nodeset - set(G): + raise nx.NetworkXError(f"Nodes {nodeset - set(G)} in nodelist is not in G") + if len(nodeset) < nlen: + raise nx.NetworkXError("nodelist contains duplicates.") + + A = np.full((nlen, nlen), fill_value=nonedge, dtype=dtype, order=order) + + # Corner cases: empty nodelist or graph without any edges + if nlen == 0 or G.number_of_edges() == 0: + return A + + # If dtype is structured and weight is None, use dtype field names as + # edge attributes + edge_attrs = None # Only single edge attribute by default + if A.dtype.names: + if weight is None: + edge_attrs = dtype.names + else: + raise ValueError( + "Specifying `weight` not supported for structured dtypes\n." + "To create adjacency matrices from structured dtypes, use `weight=None`." + ) + + # Map nodes to row/col in matrix + idx = dict(zip(nodelist, range(nlen))) + if len(nodelist) < len(G): + G = G.subgraph(nodelist).copy() + + # Collect all edge weights and reduce with `multigraph_weights` + if G.is_multigraph(): + if edge_attrs: + raise nx.NetworkXError( + "Structured arrays are not supported for MultiGraphs" + ) + d = defaultdict(list) + for u, v, wt in G.edges(data=weight, default=1.0): + d[(idx[u], idx[v])].append(wt) + i, j = np.array(list(d.keys())).T # indices + wts = [multigraph_weight(ws) for ws in d.values()] # reduced weights + else: + i, j, wts = [], [], [] + + # Special branch: multi-attr adjacency from structured dtypes + if edge_attrs: + # Extract edges with all data + for u, v, data in G.edges(data=True): + i.append(idx[u]) + j.append(idx[v]) + wts.append(data) + # Map each attribute to the appropriate named field in the + # structured dtype + for attr in edge_attrs: + attr_data = [wt.get(attr, 1.0) for wt in wts] + A[attr][i, j] = attr_data + if not G.is_directed(): + A[attr][j, i] = attr_data + return A + + for u, v, wt in G.edges(data=weight, default=1.0): + i.append(idx[u]) + j.append(idx[v]) + wts.append(wt) + + # Set array values with advanced indexing + A[i, j] = wts + if not G.is_directed(): + A[j, i] = wts + + return A + + +@nx._dispatchable(graphs=None, returns_graph=True) +def from_numpy_array(A, parallel_edges=False, create_using=None, edge_attr="weight"): + """Returns a graph from a 2D NumPy array. + + The 2D NumPy array is interpreted as an adjacency matrix for the graph. + + Parameters + ---------- + A : a 2D numpy.ndarray + An adjacency matrix representation of a graph + + parallel_edges : Boolean + If this is True, `create_using` is a multigraph, and `A` is an + integer array, then entry *(i, j)* in the array is interpreted as the + number of parallel edges joining vertices *i* and *j* in the graph. + If it is False, then the entries in the array are interpreted as + the weight of a single edge joining the vertices. + + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + edge_attr : String, optional (default="weight") + The attribute to which the array values are assigned on each edge. If + it is None, edge attributes will not be assigned. + + Notes + ----- + For directed graphs, explicitly mention create_using=nx.DiGraph, + and entry i,j of A corresponds to an edge from i to j. + + If `create_using` is :class:`networkx.MultiGraph` or + :class:`networkx.MultiDiGraph`, `parallel_edges` is True, and the + entries of `A` are of type :class:`int`, then this function returns a + multigraph (of the same type as `create_using`) with parallel edges. + + If `create_using` indicates an undirected multigraph, then only the edges + indicated by the upper triangle of the array `A` will be added to the + graph. + + If `edge_attr` is Falsy (False or None), edge attributes will not be + assigned, and the array data will be treated like a binary mask of + edge presence or absence. Otherwise, the attributes will be assigned + as follows: + + If the NumPy array has a single data type for each array entry it + will be converted to an appropriate Python data type. + + If the NumPy array has a user-specified compound data type the names + of the data fields will be used as attribute keys in the resulting + NetworkX graph. + + See Also + -------- + to_numpy_array + + Examples + -------- + Simple integer weights on edges: + + >>> import numpy as np + >>> A = np.array([[1, 1], [2, 1]]) + >>> G = nx.from_numpy_array(A) + >>> G.edges(data=True) + EdgeDataView([(0, 0, {'weight': 1}), (0, 1, {'weight': 2}), (1, 1, {'weight': 1})]) + + If `create_using` indicates a multigraph and the array has only integer + entries and `parallel_edges` is False, then the entries will be treated + as weights for edges joining the nodes (without creating parallel edges): + + >>> A = np.array([[1, 1], [1, 2]]) + >>> G = nx.from_numpy_array(A, create_using=nx.MultiGraph) + >>> G[1][1] + AtlasView({0: {'weight': 2}}) + + If `create_using` indicates a multigraph and the array has only integer + entries and `parallel_edges` is True, then the entries will be treated + as the number of parallel edges joining those two vertices: + + >>> A = np.array([[1, 1], [1, 2]]) + >>> temp = nx.MultiGraph() + >>> G = nx.from_numpy_array(A, parallel_edges=True, create_using=temp) + >>> G[1][1] + AtlasView({0: {'weight': 1}, 1: {'weight': 1}}) + + User defined compound data type on edges: + + >>> dt = [("weight", float), ("cost", int)] + >>> A = np.array([[(1.0, 2)]], dtype=dt) + >>> G = nx.from_numpy_array(A) + >>> G.edges() + EdgeView([(0, 0)]) + >>> G[0][0]["cost"] + 2 + >>> G[0][0]["weight"] + 1.0 + + """ + kind_to_python_type = { + "f": float, + "i": int, + "u": int, + "b": bool, + "c": complex, + "S": str, + "U": str, + "V": "void", + } + G = nx.empty_graph(0, create_using) + if A.ndim != 2: + raise nx.NetworkXError(f"Input array must be 2D, not {A.ndim}") + n, m = A.shape + if n != m: + raise nx.NetworkXError(f"Adjacency matrix not square: nx,ny={A.shape}") + dt = A.dtype + try: + python_type = kind_to_python_type[dt.kind] + except Exception as err: + raise TypeError(f"Unknown numpy data type: {dt}") from err + + # Make sure we get even the isolated nodes of the graph. + G.add_nodes_from(range(n)) + # Get a list of all the entries in the array with nonzero entries. These + # coordinates become edges in the graph. (convert to int from np.int64) + edges = ((int(e[0]), int(e[1])) for e in zip(*A.nonzero())) + # handle numpy constructed data type + if python_type == "void": + # Sort the fields by their offset, then by dtype, then by name. + fields = sorted( + (offset, dtype, name) for name, (dtype, offset) in A.dtype.fields.items() + ) + triples = ( + ( + u, + v, + {} + if edge_attr in [False, None] + else { + name: kind_to_python_type[dtype.kind](val) + for (_, dtype, name), val in zip(fields, A[u, v]) + }, + ) + for u, v in edges + ) + # If the entries in the adjacency matrix are integers, the graph is a + # multigraph, and parallel_edges is True, then create parallel edges, each + # with weight 1, for each entry in the adjacency matrix. Otherwise, create + # one edge for each positive entry in the adjacency matrix and set the + # weight of that edge to be the entry in the matrix. + elif python_type is int and G.is_multigraph() and parallel_edges: + chain = itertools.chain.from_iterable + # The following line is equivalent to: + # + # for (u, v) in edges: + # for d in range(A[u, v]): + # G.add_edge(u, v, weight=1) + # + if edge_attr in [False, None]: + triples = chain(((u, v, {}) for d in range(A[u, v])) for (u, v) in edges) + else: + triples = chain( + ((u, v, {edge_attr: 1}) for d in range(A[u, v])) for (u, v) in edges + ) + else: # basic data type + if edge_attr in [False, None]: + triples = ((u, v, {}) for u, v in edges) + else: + triples = ((u, v, {edge_attr: python_type(A[u, v])}) for u, v in edges) + # If we are creating an undirected multigraph, only add the edges from the + # upper triangle of the matrix. Otherwise, add all the edges. This relies + # on the fact that the vertices created in the + # `_generated_weighted_edges()` function are actually the row/column + # indices for the matrix `A`. + # + # Without this check, we run into a problem where each edge is added twice + # when `G.add_edges_from()` is invoked below. + if G.is_multigraph() and not G.is_directed(): + triples = ((u, v, d) for u, v, d in triples if u <= v) + G.add_edges_from(triples) + return G diff --git a/env-llmeval/lib/python3.10/site-packages/networkx/drawing/__init__.py b/env-llmeval/lib/python3.10/site-packages/networkx/drawing/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..0f53309d4da23a445bcce8cb7570a6de364452b5 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/networkx/drawing/__init__.py @@ -0,0 +1,7 @@ +# graph drawing and interface to graphviz + +from .layout import * +from .nx_latex import * +from .nx_pylab import * +from . import nx_agraph +from . import nx_pydot diff --git a/env-llmeval/lib/python3.10/site-packages/networkx/drawing/nx_agraph.py b/env-llmeval/lib/python3.10/site-packages/networkx/drawing/nx_agraph.py new file mode 100644 index 0000000000000000000000000000000000000000..f91031fcae6a0ab9edeb3139a890220aa7db6d3b --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/networkx/drawing/nx_agraph.py @@ -0,0 +1,465 @@ +""" +*************** +Graphviz AGraph +*************** + +Interface to pygraphviz AGraph class. + +Examples +-------- +>>> G = nx.complete_graph(5) +>>> A = nx.nx_agraph.to_agraph(G) +>>> H = nx.nx_agraph.from_agraph(A) + +See Also +-------- + - Pygraphviz: http://pygraphviz.github.io/ + - Graphviz: https://www.graphviz.org + - DOT Language: http://www.graphviz.org/doc/info/lang.html +""" +import os +import tempfile + +import networkx as nx + +__all__ = [ + "from_agraph", + "to_agraph", + "write_dot", + "read_dot", + "graphviz_layout", + "pygraphviz_layout", + "view_pygraphviz", +] + + +@nx._dispatchable(graphs=None, returns_graph=True) +def from_agraph(A, create_using=None): + """Returns a NetworkX Graph or DiGraph from a PyGraphviz graph. + + Parameters + ---------- + A : PyGraphviz AGraph + A graph created with PyGraphviz + + create_using : NetworkX graph constructor, optional (default=None) + Graph type to create. If graph instance, then cleared before populated. + If `None`, then the appropriate Graph type is inferred from `A`. + + Examples + -------- + >>> K5 = nx.complete_graph(5) + >>> A = nx.nx_agraph.to_agraph(K5) + >>> G = nx.nx_agraph.from_agraph(A) + + Notes + ----- + The Graph G will have a dictionary G.graph_attr containing + the default graphviz attributes for graphs, nodes and edges. + + Default node attributes will be in the dictionary G.node_attr + which is keyed by node. + + Edge attributes will be returned as edge data in G. With + edge_attr=False the edge data will be the Graphviz edge weight + attribute or the value 1 if no edge weight attribute is found. + + """ + if create_using is None: + if A.is_directed(): + if A.is_strict(): + create_using = nx.DiGraph + else: + create_using = nx.MultiDiGraph + else: + if A.is_strict(): + create_using = nx.Graph + else: + create_using = nx.MultiGraph + + # assign defaults + N = nx.empty_graph(0, create_using) + if A.name is not None: + N.name = A.name + + # add graph attributes + N.graph.update(A.graph_attr) + + # add nodes, attributes to N.node_attr + for n in A.nodes(): + str_attr = {str(k): v for k, v in n.attr.items()} + N.add_node(str(n), **str_attr) + + # add edges, assign edge data as dictionary of attributes + for e in A.edges(): + u, v = str(e[0]), str(e[1]) + attr = dict(e.attr) + str_attr = {str(k): v for k, v in attr.items()} + if not N.is_multigraph(): + if e.name is not None: + str_attr["key"] = e.name + N.add_edge(u, v, **str_attr) + else: + N.add_edge(u, v, key=e.name, **str_attr) + + # add default attributes for graph, nodes, and edges + # hang them on N.graph_attr + N.graph["graph"] = dict(A.graph_attr) + N.graph["node"] = dict(A.node_attr) + N.graph["edge"] = dict(A.edge_attr) + return N + + +def to_agraph(N): + """Returns a pygraphviz graph from a NetworkX graph N. + + Parameters + ---------- + N : NetworkX graph + A graph created with NetworkX + + Examples + -------- + >>> K5 = nx.complete_graph(5) + >>> A = nx.nx_agraph.to_agraph(K5) + + Notes + ----- + If N has an dict N.graph_attr an attempt will be made first + to copy properties attached to the graph (see from_agraph) + and then updated with the calling arguments if any. + + """ + try: + import pygraphviz + except ImportError as err: + raise ImportError("requires pygraphviz http://pygraphviz.github.io/") from err + directed = N.is_directed() + strict = nx.number_of_selfloops(N) == 0 and not N.is_multigraph() + + for node in N: + if "pos" in N.nodes[node]: + N.nodes[node]["pos"] = "{},{}!".format( + N.nodes[node]["pos"][0], N.nodes[node]["pos"][1] + ) + + A = pygraphviz.AGraph(name=N.name, strict=strict, directed=directed) + + # default graph attributes + A.graph_attr.update(N.graph.get("graph", {})) + A.node_attr.update(N.graph.get("node", {})) + A.edge_attr.update(N.graph.get("edge", {})) + + A.graph_attr.update( + (k, v) for k, v in N.graph.items() if k not in ("graph", "node", "edge") + ) + + # add nodes + for n, nodedata in N.nodes(data=True): + A.add_node(n) + # Add node data + a = A.get_node(n) + a.attr.update({k: str(v) for k, v in nodedata.items()}) + + # loop over edges + if N.is_multigraph(): + for u, v, key, edgedata in N.edges(data=True, keys=True): + str_edgedata = {k: str(v) for k, v in edgedata.items() if k != "key"} + A.add_edge(u, v, key=str(key)) + # Add edge data + a = A.get_edge(u, v) + a.attr.update(str_edgedata) + + else: + for u, v, edgedata in N.edges(data=True): + str_edgedata = {k: str(v) for k, v in edgedata.items()} + A.add_edge(u, v) + # Add edge data + a = A.get_edge(u, v) + a.attr.update(str_edgedata) + + return A + + +def write_dot(G, path): + """Write NetworkX graph G to Graphviz dot format on path. + + Parameters + ---------- + G : graph + A networkx graph + path : filename + Filename or file handle to write + + Notes + ----- + To use a specific graph layout, call ``A.layout`` prior to `write_dot`. + Note that some graphviz layouts are not guaranteed to be deterministic, + see https://gitlab.com/graphviz/graphviz/-/issues/1767 for more info. + """ + A = to_agraph(G) + A.write(path) + A.clear() + return + + +@nx._dispatchable(name="agraph_read_dot", graphs=None, returns_graph=True) +def read_dot(path): + """Returns a NetworkX graph from a dot file on path. + + Parameters + ---------- + path : file or string + File name or file handle to read. + """ + try: + import pygraphviz + except ImportError as err: + raise ImportError( + "read_dot() requires pygraphviz http://pygraphviz.github.io/" + ) from err + A = pygraphviz.AGraph(file=path) + gr = from_agraph(A) + A.clear() + return gr + + +def graphviz_layout(G, prog="neato", root=None, args=""): + """Create node positions for G using Graphviz. + + Parameters + ---------- + G : NetworkX graph + A graph created with NetworkX + prog : string + Name of Graphviz layout program + root : string, optional + Root node for twopi layout + args : string, optional + Extra arguments to Graphviz layout program + + Returns + ------- + Dictionary of x, y, positions keyed by node. + + Examples + -------- + >>> G = nx.petersen_graph() + >>> pos = nx.nx_agraph.graphviz_layout(G) + >>> pos = nx.nx_agraph.graphviz_layout(G, prog="dot") + + Notes + ----- + This is a wrapper for pygraphviz_layout. + + Note that some graphviz layouts are not guaranteed to be deterministic, + see https://gitlab.com/graphviz/graphviz/-/issues/1767 for more info. + """ + return pygraphviz_layout(G, prog=prog, root=root, args=args) + + +def pygraphviz_layout(G, prog="neato", root=None, args=""): + """Create node positions for G using Graphviz. + + Parameters + ---------- + G : NetworkX graph + A graph created with NetworkX + prog : string + Name of Graphviz layout program + root : string, optional + Root node for twopi layout + args : string, optional + Extra arguments to Graphviz layout program + + Returns + ------- + node_pos : dict + Dictionary of x, y, positions keyed by node. + + Examples + -------- + >>> G = nx.petersen_graph() + >>> pos = nx.nx_agraph.graphviz_layout(G) + >>> pos = nx.nx_agraph.graphviz_layout(G, prog="dot") + + Notes + ----- + If you use complex node objects, they may have the same string + representation and GraphViz could treat them as the same node. + The layout may assign both nodes a single location. See Issue #1568 + If this occurs in your case, consider relabeling the nodes just + for the layout computation using something similar to:: + + >>> H = nx.convert_node_labels_to_integers(G, label_attribute="node_label") + >>> H_layout = nx.nx_agraph.pygraphviz_layout(G, prog="dot") + >>> G_layout = {H.nodes[n]["node_label"]: p for n, p in H_layout.items()} + + Note that some graphviz layouts are not guaranteed to be deterministic, + see https://gitlab.com/graphviz/graphviz/-/issues/1767 for more info. + """ + try: + import pygraphviz + except ImportError as err: + raise ImportError("requires pygraphviz http://pygraphviz.github.io/") from err + if root is not None: + args += f"-Groot={root}" + A = to_agraph(G) + A.layout(prog=prog, args=args) + node_pos = {} + for n in G: + node = pygraphviz.Node(A, n) + try: + xs = node.attr["pos"].split(",") + node_pos[n] = tuple(float(x) for x in xs) + except: + print("no position for node", n) + node_pos[n] = (0.0, 0.0) + return node_pos + + +@nx.utils.open_file(5, "w+b") +def view_pygraphviz( + G, edgelabel=None, prog="dot", args="", suffix="", path=None, show=True +): + """Views the graph G using the specified layout algorithm. + + Parameters + ---------- + G : NetworkX graph + The machine to draw. + edgelabel : str, callable, None + If a string, then it specifies the edge attribute to be displayed + on the edge labels. If a callable, then it is called for each + edge and it should return the string to be displayed on the edges. + The function signature of `edgelabel` should be edgelabel(data), + where `data` is the edge attribute dictionary. + prog : string + Name of Graphviz layout program. + args : str + Additional arguments to pass to the Graphviz layout program. + suffix : str + If `filename` is None, we save to a temporary file. The value of + `suffix` will appear at the tail end of the temporary filename. + path : str, None + The filename used to save the image. If None, save to a temporary + file. File formats are the same as those from pygraphviz.agraph.draw. + show : bool, default = True + Whether to display the graph with :mod:`PIL.Image.show`, + default is `True`. If `False`, the rendered graph is still available + at `path`. + + Returns + ------- + path : str + The filename of the generated image. + A : PyGraphviz graph + The PyGraphviz graph instance used to generate the image. + + Notes + ----- + If this function is called in succession too quickly, sometimes the + image is not displayed. So you might consider time.sleep(.5) between + calls if you experience problems. + + Note that some graphviz layouts are not guaranteed to be deterministic, + see https://gitlab.com/graphviz/graphviz/-/issues/1767 for more info. + + """ + if not len(G): + raise nx.NetworkXException("An empty graph cannot be drawn.") + + # If we are providing default values for graphviz, these must be set + # before any nodes or edges are added to the PyGraphviz graph object. + # The reason for this is that default values only affect incoming objects. + # If you change the default values after the objects have been added, + # then they inherit no value and are set only if explicitly set. + + # to_agraph() uses these values. + attrs = ["edge", "node", "graph"] + for attr in attrs: + if attr not in G.graph: + G.graph[attr] = {} + + # These are the default values. + edge_attrs = {"fontsize": "10"} + node_attrs = { + "style": "filled", + "fillcolor": "#0000FF40", + "height": "0.75", + "width": "0.75", + "shape": "circle", + } + graph_attrs = {} + + def update_attrs(which, attrs): + # Update graph attributes. Return list of those which were added. + added = [] + for k, v in attrs.items(): + if k not in G.graph[which]: + G.graph[which][k] = v + added.append(k) + + def clean_attrs(which, added): + # Remove added attributes + for attr in added: + del G.graph[which][attr] + if not G.graph[which]: + del G.graph[which] + + # Update all default values + update_attrs("edge", edge_attrs) + update_attrs("node", node_attrs) + update_attrs("graph", graph_attrs) + + # Convert to agraph, so we inherit default values + A = to_agraph(G) + + # Remove the default values we added to the original graph. + clean_attrs("edge", edge_attrs) + clean_attrs("node", node_attrs) + clean_attrs("graph", graph_attrs) + + # If the user passed in an edgelabel, we update the labels for all edges. + if edgelabel is not None: + if not callable(edgelabel): + + def func(data): + return "".join([" ", str(data[edgelabel]), " "]) + + else: + func = edgelabel + + # update all the edge labels + if G.is_multigraph(): + for u, v, key, data in G.edges(keys=True, data=True): + # PyGraphviz doesn't convert the key to a string. See #339 + edge = A.get_edge(u, v, str(key)) + edge.attr["label"] = str(func(data)) + else: + for u, v, data in G.edges(data=True): + edge = A.get_edge(u, v) + edge.attr["label"] = str(func(data)) + + if path is None: + ext = "png" + if suffix: + suffix = f"_{suffix}.{ext}" + else: + suffix = f".{ext}" + path = tempfile.NamedTemporaryFile(suffix=suffix, delete=False) + else: + # Assume the decorator worked and it is a file-object. + pass + + # Write graph to file + A.draw(path=path, format=None, prog=prog, args=args) + path.close() + + # Show graph in a new window (depends on platform configuration) + if show: + from PIL import Image + + Image.open(path.name).show() + + return path.name, A diff --git a/env-llmeval/lib/python3.10/site-packages/networkx/drawing/nx_latex.py b/env-llmeval/lib/python3.10/site-packages/networkx/drawing/nx_latex.py new file mode 100644 index 0000000000000000000000000000000000000000..8bc6ba2195499541a863e6d562f992148c54a1c2 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/networkx/drawing/nx_latex.py @@ -0,0 +1,571 @@ +r""" +***** +LaTeX +***** + +Export NetworkX graphs in LaTeX format using the TikZ library within TeX/LaTeX. +Usually, you will want the drawing to appear in a figure environment so +you use ``to_latex(G, caption="A caption")``. If you want the raw +drawing commands without a figure environment use :func:`to_latex_raw`. +And if you want to write to a file instead of just returning the latex +code as a string, use ``write_latex(G, "filename.tex", caption="A caption")``. + +To construct a figure with subfigures for each graph to be shown, provide +``to_latex`` or ``write_latex`` a list of graphs, a list of subcaptions, +and a number of rows of subfigures inside the figure. + +To be able to refer to the figures or subfigures in latex using ``\\ref``, +the keyword ``latex_label`` is available for figures and `sub_labels` for +a list of labels, one for each subfigure. + +We intend to eventually provide an interface to the TikZ Graph +features which include e.g. layout algorithms. + +Let us know via github what you'd like to see available, or better yet +give us some code to do it, or even better make a github pull request +to add the feature. + +The TikZ approach +================= +Drawing options can be stored on the graph as node/edge attributes, or +can be provided as dicts keyed by node/edge to a string of the options +for that node/edge. Similarly a label can be shown for each node/edge +by specifying the labels as graph node/edge attributes or by providing +a dict keyed by node/edge to the text to be written for that node/edge. + +Options for the tikzpicture environment (e.g. "[scale=2]") can be provided +via a keyword argument. Similarly default node and edge options can be +provided through keywords arguments. The default node options are applied +to the single TikZ "path" that draws all nodes (and no edges). The default edge +options are applied to a TikZ "scope" which contains a path for each edge. + +Examples +======== +>>> G = nx.path_graph(3) +>>> nx.write_latex(G, "just_my_figure.tex", as_document=True) +>>> nx.write_latex(G, "my_figure.tex", caption="A path graph", latex_label="fig1") +>>> latex_code = nx.to_latex(G) # a string rather than a file + +You can change many features of the nodes and edges. + +>>> G = nx.path_graph(4, create_using=nx.DiGraph) +>>> pos = {n: (n, n) for n in G} # nodes set on a line + +>>> G.nodes[0]["style"] = "blue" +>>> G.nodes[2]["style"] = "line width=3,draw" +>>> G.nodes[3]["label"] = "Stop" +>>> G.edges[(0, 1)]["label"] = "1st Step" +>>> G.edges[(0, 1)]["label_opts"] = "near start" +>>> G.edges[(1, 2)]["style"] = "line width=3" +>>> G.edges[(1, 2)]["label"] = "2nd Step" +>>> G.edges[(2, 3)]["style"] = "green" +>>> G.edges[(2, 3)]["label"] = "3rd Step" +>>> G.edges[(2, 3)]["label_opts"] = "near end" + +>>> nx.write_latex(G, "latex_graph.tex", pos=pos, as_document=True) + +Then compile the LaTeX using something like ``pdflatex latex_graph.tex`` +and view the pdf file created: ``latex_graph.pdf``. + +If you want **subfigures** each containing one graph, you can input a list of graphs. + +>>> H1 = nx.path_graph(4) +>>> H2 = nx.complete_graph(4) +>>> H3 = nx.path_graph(8) +>>> H4 = nx.complete_graph(8) +>>> graphs = [H1, H2, H3, H4] +>>> caps = ["Path 4", "Complete graph 4", "Path 8", "Complete graph 8"] +>>> lbls = ["fig2a", "fig2b", "fig2c", "fig2d"] +>>> nx.write_latex(graphs, "subfigs.tex", n_rows=2, sub_captions=caps, sub_labels=lbls) +>>> latex_code = nx.to_latex(graphs, n_rows=2, sub_captions=caps, sub_labels=lbls) + +>>> node_color = {0: "red", 1: "orange", 2: "blue", 3: "gray!90"} +>>> edge_width = {e: "line width=1.5" for e in H3.edges} +>>> pos = nx.circular_layout(H3) +>>> latex_code = nx.to_latex(H3, pos, node_options=node_color, edge_options=edge_width) +>>> print(latex_code) +\documentclass{report} +\usepackage{tikz} +\usepackage{subcaption} + +\begin{document} +\begin{figure} + \begin{tikzpicture} + \draw + (1.0, 0.0) node[red] (0){0} + (0.707, 0.707) node[orange] (1){1} + (-0.0, 1.0) node[blue] (2){2} + (-0.707, 0.707) node[gray!90] (3){3} + (-1.0, -0.0) node (4){4} + (-0.707, -0.707) node (5){5} + (0.0, -1.0) node (6){6} + (0.707, -0.707) node (7){7}; + \begin{scope}[-] + \draw[line width=1.5] (0) to (1); + \draw[line width=1.5] (1) to (2); + \draw[line width=1.5] (2) to (3); + \draw[line width=1.5] (3) to (4); + \draw[line width=1.5] (4) to (5); + \draw[line width=1.5] (5) to (6); + \draw[line width=1.5] (6) to (7); + \end{scope} + \end{tikzpicture} +\end{figure} +\end{document} + +Notes +----- +If you want to change the preamble/postamble of the figure/document/subfigure +environment, use the keyword arguments: `figure_wrapper`, `document_wrapper`, +`subfigure_wrapper`. The default values are stored in private variables +e.g. ``nx.nx_layout._DOCUMENT_WRAPPER`` + +References +---------- +TikZ: https://tikz.dev/ + +TikZ options details: https://tikz.dev/tikz-actions +""" +import numbers +import os + +import networkx as nx + +__all__ = [ + "to_latex_raw", + "to_latex", + "write_latex", +] + + +@nx.utils.not_implemented_for("multigraph") +def to_latex_raw( + G, + pos="pos", + tikz_options="", + default_node_options="", + node_options="node_options", + node_label="label", + default_edge_options="", + edge_options="edge_options", + edge_label="label", + edge_label_options="edge_label_options", +): + """Return a string of the LaTeX/TikZ code to draw `G` + + This function produces just the code for the tikzpicture + without any enclosing environment. + + Parameters + ========== + G : NetworkX graph + The NetworkX graph to be drawn + pos : string or dict (default "pos") + The name of the node attribute on `G` that holds the position of each node. + Positions can be sequences of length 2 with numbers for (x,y) coordinates. + They can also be strings to denote positions in TikZ style, such as (x, y) + or (angle:radius). + If a dict, it should be keyed by node to a position. + If an empty dict, a circular layout is computed by TikZ. + tikz_options : string + The tikzpicture options description defining the options for the picture. + Often large scale options like `[scale=2]`. + default_node_options : string + The draw options for a path of nodes. Individual node options override these. + node_options : string or dict + The name of the node attribute on `G` that holds the options for each node. + Or a dict keyed by node to a string holding the options for that node. + node_label : string or dict + The name of the node attribute on `G` that holds the node label (text) + displayed for each node. If the attribute is "" or not present, the node + itself is drawn as a string. LaTeX processing such as ``"$A_1$"`` is allowed. + Or a dict keyed by node to a string holding the label for that node. + default_edge_options : string + The options for the scope drawing all edges. The default is "[-]" for + undirected graphs and "[->]" for directed graphs. + edge_options : string or dict + The name of the edge attribute on `G` that holds the options for each edge. + If the edge is a self-loop and ``"loop" not in edge_options`` the option + "loop," is added to the options for the self-loop edge. Hence you can + use "[loop above]" explicitly, but the default is "[loop]". + Or a dict keyed by edge to a string holding the options for that edge. + edge_label : string or dict + The name of the edge attribute on `G` that holds the edge label (text) + displayed for each edge. If the attribute is "" or not present, no edge + label is drawn. + Or a dict keyed by edge to a string holding the label for that edge. + edge_label_options : string or dict + The name of the edge attribute on `G` that holds the label options for + each edge. For example, "[sloped,above,blue]". The default is no options. + Or a dict keyed by edge to a string holding the label options for that edge. + + Returns + ======= + latex_code : string + The text string which draws the desired graph(s) when compiled by LaTeX. + + See Also + ======== + to_latex + write_latex + """ + i4 = "\n " + i8 = "\n " + + # set up position dict + # TODO allow pos to be None and use a nice TikZ default + if not isinstance(pos, dict): + pos = nx.get_node_attributes(G, pos) + if not pos: + # circular layout with radius 2 + pos = {n: f"({round(360.0 * i / len(G), 3)}:2)" for i, n in enumerate(G)} + for node in G: + if node not in pos: + raise nx.NetworkXError(f"node {node} has no specified pos {pos}") + posnode = pos[node] + if not isinstance(posnode, str): + try: + posx, posy = posnode + pos[node] = f"({round(posx, 3)}, {round(posy, 3)})" + except (TypeError, ValueError): + msg = f"position pos[{node}] is not 2-tuple or a string: {posnode}" + raise nx.NetworkXError(msg) + + # set up all the dicts + if not isinstance(node_options, dict): + node_options = nx.get_node_attributes(G, node_options) + if not isinstance(node_label, dict): + node_label = nx.get_node_attributes(G, node_label) + if not isinstance(edge_options, dict): + edge_options = nx.get_edge_attributes(G, edge_options) + if not isinstance(edge_label, dict): + edge_label = nx.get_edge_attributes(G, edge_label) + if not isinstance(edge_label_options, dict): + edge_label_options = nx.get_edge_attributes(G, edge_label_options) + + # process default options (add brackets or not) + topts = "" if tikz_options == "" else f"[{tikz_options.strip('[]')}]" + defn = "" if default_node_options == "" else f"[{default_node_options.strip('[]')}]" + linestyle = f"{'->' if G.is_directed() else '-'}" + if default_edge_options == "": + defe = "[" + linestyle + "]" + elif "-" in default_edge_options: + defe = default_edge_options + else: + defe = f"[{linestyle},{default_edge_options.strip('[]')}]" + + # Construct the string line by line + result = " \\begin{tikzpicture}" + topts + result += i4 + " \\draw" + defn + # load the nodes + for n in G: + # node options goes inside square brackets + nopts = f"[{node_options[n].strip('[]')}]" if n in node_options else "" + # node text goes inside curly brackets {} + ntext = f"{{{node_label[n]}}}" if n in node_label else f"{{{n}}}" + + result += i8 + f"{pos[n]} node{nopts} ({n}){ntext}" + result += ";\n" + + # load the edges + result += " \\begin{scope}" + defe + for edge in G.edges: + u, v = edge[:2] + e_opts = f"{edge_options[edge]}".strip("[]") if edge in edge_options else "" + # add loop options for selfloops if not present + if u == v and "loop" not in e_opts: + e_opts = "loop," + e_opts + e_opts = f"[{e_opts}]" if e_opts != "" else "" + # TODO -- handle bending of multiedges + + els = edge_label_options[edge] if edge in edge_label_options else "" + # edge label options goes inside square brackets [] + els = f"[{els.strip('[]')}]" + # edge text is drawn using the TikZ node command inside curly brackets {} + e_label = f" node{els} {{{edge_label[edge]}}}" if edge in edge_label else "" + + result += i8 + f"\\draw{e_opts} ({u}) to{e_label} ({v});" + + result += "\n \\end{scope}\n \\end{tikzpicture}\n" + return result + + +_DOC_WRAPPER_TIKZ = r"""\documentclass{{report}} +\usepackage{{tikz}} +\usepackage{{subcaption}} + +\begin{{document}} +{content} +\end{{document}}""" + + +_FIG_WRAPPER = r"""\begin{{figure}} +{content}{caption}{label} +\end{{figure}}""" + + +_SUBFIG_WRAPPER = r""" \begin{{subfigure}}{{{size}\textwidth}} +{content}{caption}{label} + \end{{subfigure}}""" + + +def to_latex( + Gbunch, + pos="pos", + tikz_options="", + default_node_options="", + node_options="node_options", + node_label="node_label", + default_edge_options="", + edge_options="edge_options", + edge_label="edge_label", + edge_label_options="edge_label_options", + caption="", + latex_label="", + sub_captions=None, + sub_labels=None, + n_rows=1, + as_document=True, + document_wrapper=_DOC_WRAPPER_TIKZ, + figure_wrapper=_FIG_WRAPPER, + subfigure_wrapper=_SUBFIG_WRAPPER, +): + """Return latex code to draw the graph(s) in `Gbunch` + + The TikZ drawing utility in LaTeX is used to draw the graph(s). + If `Gbunch` is a graph, it is drawn in a figure environment. + If `Gbunch` is an iterable of graphs, each is drawn in a subfigure environment + within a single figure environment. + + If `as_document` is True, the figure is wrapped inside a document environment + so that the resulting string is ready to be compiled by LaTeX. Otherwise, + the string is ready for inclusion in a larger tex document using ``\\include`` + or ``\\input`` statements. + + Parameters + ========== + Gbunch : NetworkX graph or iterable of NetworkX graphs + The NetworkX graph to be drawn or an iterable of graphs + to be drawn inside subfigures of a single figure. + pos : string or list of strings + The name of the node attribute on `G` that holds the position of each node. + Positions can be sequences of length 2 with numbers for (x,y) coordinates. + They can also be strings to denote positions in TikZ style, such as (x, y) + or (angle:radius). + If a dict, it should be keyed by node to a position. + If an empty dict, a circular layout is computed by TikZ. + If you are drawing many graphs in subfigures, use a list of position dicts. + tikz_options : string + The tikzpicture options description defining the options for the picture. + Often large scale options like `[scale=2]`. + default_node_options : string + The draw options for a path of nodes. Individual node options override these. + node_options : string or dict + The name of the node attribute on `G` that holds the options for each node. + Or a dict keyed by node to a string holding the options for that node. + node_label : string or dict + The name of the node attribute on `G` that holds the node label (text) + displayed for each node. If the attribute is "" or not present, the node + itself is drawn as a string. LaTeX processing such as ``"$A_1$"`` is allowed. + Or a dict keyed by node to a string holding the label for that node. + default_edge_options : string + The options for the scope drawing all edges. The default is "[-]" for + undirected graphs and "[->]" for directed graphs. + edge_options : string or dict + The name of the edge attribute on `G` that holds the options for each edge. + If the edge is a self-loop and ``"loop" not in edge_options`` the option + "loop," is added to the options for the self-loop edge. Hence you can + use "[loop above]" explicitly, but the default is "[loop]". + Or a dict keyed by edge to a string holding the options for that edge. + edge_label : string or dict + The name of the edge attribute on `G` that holds the edge label (text) + displayed for each edge. If the attribute is "" or not present, no edge + label is drawn. + Or a dict keyed by edge to a string holding the label for that edge. + edge_label_options : string or dict + The name of the edge attribute on `G` that holds the label options for + each edge. For example, "[sloped,above,blue]". The default is no options. + Or a dict keyed by edge to a string holding the label options for that edge. + caption : string + The caption string for the figure environment + latex_label : string + The latex label used for the figure for easy referral from the main text + sub_captions : list of strings + The sub_caption string for each subfigure in the figure + sub_latex_labels : list of strings + The latex label for each subfigure in the figure + n_rows : int + The number of rows of subfigures to arrange for multiple graphs + as_document : bool + Whether to wrap the latex code in a document environment for compiling + document_wrapper : formatted text string with variable ``content``. + This text is called to evaluate the content embedded in a document + environment with a preamble setting up TikZ. + figure_wrapper : formatted text string + This text is evaluated with variables ``content``, ``caption`` and ``label``. + It wraps the content and if a caption is provided, adds the latex code for + that caption, and if a label is provided, adds the latex code for a label. + subfigure_wrapper : formatted text string + This text evaluate variables ``size``, ``content``, ``caption`` and ``label``. + It wraps the content and if a caption is provided, adds the latex code for + that caption, and if a label is provided, adds the latex code for a label. + The size is the vertical size of each row of subfigures as a fraction. + + Returns + ======= + latex_code : string + The text string which draws the desired graph(s) when compiled by LaTeX. + + See Also + ======== + write_latex + to_latex_raw + """ + if hasattr(Gbunch, "adj"): + raw = to_latex_raw( + Gbunch, + pos, + tikz_options, + default_node_options, + node_options, + node_label, + default_edge_options, + edge_options, + edge_label, + edge_label_options, + ) + else: # iterator of graphs + sbf = subfigure_wrapper + size = 1 / n_rows + + N = len(Gbunch) + if isinstance(pos, str | dict): + pos = [pos] * N + if sub_captions is None: + sub_captions = [""] * N + if sub_labels is None: + sub_labels = [""] * N + if not (len(Gbunch) == len(pos) == len(sub_captions) == len(sub_labels)): + raise nx.NetworkXError( + "length of Gbunch, sub_captions and sub_figures must agree" + ) + + raw = "" + for G, pos, subcap, sublbl in zip(Gbunch, pos, sub_captions, sub_labels): + subraw = to_latex_raw( + G, + pos, + tikz_options, + default_node_options, + node_options, + node_label, + default_edge_options, + edge_options, + edge_label, + edge_label_options, + ) + cap = f" \\caption{{{subcap}}}" if subcap else "" + lbl = f"\\label{{{sublbl}}}" if sublbl else "" + raw += sbf.format(size=size, content=subraw, caption=cap, label=lbl) + raw += "\n" + + # put raw latex code into a figure environment and optionally into a document + raw = raw[:-1] + cap = f"\n \\caption{{{caption}}}" if caption else "" + lbl = f"\\label{{{latex_label}}}" if latex_label else "" + fig = figure_wrapper.format(content=raw, caption=cap, label=lbl) + if as_document: + return document_wrapper.format(content=fig) + return fig + + +@nx.utils.open_file(1, mode="w") +def write_latex(Gbunch, path, **options): + """Write the latex code to draw the graph(s) onto `path`. + + This convenience function creates the latex drawing code as a string + and writes that to a file ready to be compiled when `as_document` is True + or ready to be ``import`` ed or ``include`` ed into your main LaTeX document. + + The `path` argument can be a string filename or a file handle to write to. + + Parameters + ---------- + Gbunch : NetworkX graph or iterable of NetworkX graphs + If Gbunch is a graph, it is drawn in a figure environment. + If Gbunch is an iterable of graphs, each is drawn in a subfigure + environment within a single figure environment. + path : filename + Filename or file handle to write to + options : dict + By default, TikZ is used with options: (others are ignored):: + + pos : string or dict or list + The name of the node attribute on `G` that holds the position of each node. + Positions can be sequences of length 2 with numbers for (x,y) coordinates. + They can also be strings to denote positions in TikZ style, such as (x, y) + or (angle:radius). + If a dict, it should be keyed by node to a position. + If an empty dict, a circular layout is computed by TikZ. + If you are drawing many graphs in subfigures, use a list of position dicts. + tikz_options : string + The tikzpicture options description defining the options for the picture. + Often large scale options like `[scale=2]`. + default_node_options : string + The draw options for a path of nodes. Individual node options override these. + node_options : string or dict + The name of the node attribute on `G` that holds the options for each node. + Or a dict keyed by node to a string holding the options for that node. + node_label : string or dict + The name of the node attribute on `G` that holds the node label (text) + displayed for each node. If the attribute is "" or not present, the node + itself is drawn as a string. LaTeX processing such as ``"$A_1$"`` is allowed. + Or a dict keyed by node to a string holding the label for that node. + default_edge_options : string + The options for the scope drawing all edges. The default is "[-]" for + undirected graphs and "[->]" for directed graphs. + edge_options : string or dict + The name of the edge attribute on `G` that holds the options for each edge. + If the edge is a self-loop and ``"loop" not in edge_options`` the option + "loop," is added to the options for the self-loop edge. Hence you can + use "[loop above]" explicitly, but the default is "[loop]". + Or a dict keyed by edge to a string holding the options for that edge. + edge_label : string or dict + The name of the edge attribute on `G` that holds the edge label (text) + displayed for each edge. If the attribute is "" or not present, no edge + label is drawn. + Or a dict keyed by edge to a string holding the label for that edge. + edge_label_options : string or dict + The name of the edge attribute on `G` that holds the label options for + each edge. For example, "[sloped,above,blue]". The default is no options. + Or a dict keyed by edge to a string holding the label options for that edge. + caption : string + The caption string for the figure environment + latex_label : string + The latex label used for the figure for easy referral from the main text + sub_captions : list of strings + The sub_caption string for each subfigure in the figure + sub_latex_labels : list of strings + The latex label for each subfigure in the figure + n_rows : int + The number of rows of subfigures to arrange for multiple graphs + as_document : bool + Whether to wrap the latex code in a document environment for compiling + document_wrapper : formatted text string with variable ``content``. + This text is called to evaluate the content embedded in a document + environment with a preamble setting up the TikZ syntax. + figure_wrapper : formatted text string + This text is evaluated with variables ``content``, ``caption`` and ``label``. + It wraps the content and if a caption is provided, adds the latex code for + that caption, and if a label is provided, adds the latex code for a label. + subfigure_wrapper : formatted text string + This text evaluate variables ``size``, ``content``, ``caption`` and ``label``. + It wraps the content and if a caption is provided, adds the latex code for + that caption, and if a label is provided, adds the latex code for a label. + The size is the vertical size of each row of subfigures as a fraction. + + See Also + ======== + to_latex + """ + path.write(to_latex(Gbunch, **options)) diff --git a/env-llmeval/lib/python3.10/site-packages/networkx/drawing/nx_pydot.py b/env-llmeval/lib/python3.10/site-packages/networkx/drawing/nx_pydot.py new file mode 100644 index 0000000000000000000000000000000000000000..92c5f333e1ce28127c4b5545843fba5292a4b64b --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/networkx/drawing/nx_pydot.py @@ -0,0 +1,411 @@ +""" +***** +Pydot +***** + +Import and export NetworkX graphs in Graphviz dot format using pydot. + +Either this module or nx_agraph can be used to interface with graphviz. + +Examples +-------- +>>> G = nx.complete_graph(5) +>>> PG = nx.nx_pydot.to_pydot(G) +>>> H = nx.nx_pydot.from_pydot(PG) + +See Also +-------- + - pydot: https://github.com/erocarrera/pydot + - Graphviz: https://www.graphviz.org + - DOT Language: http://www.graphviz.org/doc/info/lang.html +""" +from locale import getpreferredencoding + +import networkx as nx +from networkx.utils import open_file + +__all__ = [ + "write_dot", + "read_dot", + "graphviz_layout", + "pydot_layout", + "to_pydot", + "from_pydot", +] + + +@open_file(1, mode="w") +def write_dot(G, path): + """Write NetworkX graph G to Graphviz dot format on path. + + Path can be a string or a file handle. + """ + P = to_pydot(G) + path.write(P.to_string()) + return + + +@open_file(0, mode="r") +@nx._dispatchable(name="pydot_read_dot", graphs=None, returns_graph=True) +def read_dot(path): + """Returns a NetworkX :class:`MultiGraph` or :class:`MultiDiGraph` from the + dot file with the passed path. + + If this file contains multiple graphs, only the first such graph is + returned. All graphs _except_ the first are silently ignored. + + Parameters + ---------- + path : str or file + Filename or file handle. + + Returns + ------- + G : MultiGraph or MultiDiGraph + A :class:`MultiGraph` or :class:`MultiDiGraph`. + + Notes + ----- + Use `G = nx.Graph(nx.nx_pydot.read_dot(path))` to return a :class:`Graph` instead of a + :class:`MultiGraph`. + """ + import pydot + + data = path.read() + + # List of one or more "pydot.Dot" instances deserialized from this file. + P_list = pydot.graph_from_dot_data(data) + + # Convert only the first such instance into a NetworkX graph. + return from_pydot(P_list[0]) + + +@nx._dispatchable(graphs=None, returns_graph=True) +def from_pydot(P): + """Returns a NetworkX graph from a Pydot graph. + + Parameters + ---------- + P : Pydot graph + A graph created with Pydot + + Returns + ------- + G : NetworkX multigraph + A MultiGraph or MultiDiGraph. + + Examples + -------- + >>> K5 = nx.complete_graph(5) + >>> A = nx.nx_pydot.to_pydot(K5) + >>> G = nx.nx_pydot.from_pydot(A) # return MultiGraph + + # make a Graph instead of MultiGraph + >>> G = nx.Graph(nx.nx_pydot.from_pydot(A)) + + """ + + if P.get_strict(None): # pydot bug: get_strict() shouldn't take argument + multiedges = False + else: + multiedges = True + + if P.get_type() == "graph": # undirected + if multiedges: + N = nx.MultiGraph() + else: + N = nx.Graph() + else: + if multiedges: + N = nx.MultiDiGraph() + else: + N = nx.DiGraph() + + # assign defaults + name = P.get_name().strip('"') + if name != "": + N.name = name + + # add nodes, attributes to N.node_attr + for p in P.get_node_list(): + n = p.get_name().strip('"') + if n in ("node", "graph", "edge"): + continue + N.add_node(n, **p.get_attributes()) + + # add edges + for e in P.get_edge_list(): + u = e.get_source() + v = e.get_destination() + attr = e.get_attributes() + s = [] + d = [] + + if isinstance(u, str): + s.append(u.strip('"')) + else: + for unodes in u["nodes"]: + s.append(unodes.strip('"')) + + if isinstance(v, str): + d.append(v.strip('"')) + else: + for vnodes in v["nodes"]: + d.append(vnodes.strip('"')) + + for source_node in s: + for destination_node in d: + N.add_edge(source_node, destination_node, **attr) + + # add default attributes for graph, nodes, edges + pattr = P.get_attributes() + if pattr: + N.graph["graph"] = pattr + try: + N.graph["node"] = P.get_node_defaults()[0] + except (IndexError, TypeError): + pass # N.graph['node']={} + try: + N.graph["edge"] = P.get_edge_defaults()[0] + except (IndexError, TypeError): + pass # N.graph['edge']={} + return N + + +def _check_colon_quotes(s): + # A quick helper function to check if a string has a colon in it + # and if it is quoted properly with double quotes. + # refer https://github.com/pydot/pydot/issues/258 + return ":" in s and (s[0] != '"' or s[-1] != '"') + + +def to_pydot(N): + """Returns a pydot graph from a NetworkX graph N. + + Parameters + ---------- + N : NetworkX graph + A graph created with NetworkX + + Examples + -------- + >>> K5 = nx.complete_graph(5) + >>> P = nx.nx_pydot.to_pydot(K5) + + Notes + ----- + + """ + import pydot + + # set Graphviz graph type + if N.is_directed(): + graph_type = "digraph" + else: + graph_type = "graph" + strict = nx.number_of_selfloops(N) == 0 and not N.is_multigraph() + + name = N.name + graph_defaults = N.graph.get("graph", {}) + if name == "": + P = pydot.Dot("", graph_type=graph_type, strict=strict, **graph_defaults) + else: + P = pydot.Dot( + f'"{name}"', graph_type=graph_type, strict=strict, **graph_defaults + ) + try: + P.set_node_defaults(**N.graph["node"]) + except KeyError: + pass + try: + P.set_edge_defaults(**N.graph["edge"]) + except KeyError: + pass + + for n, nodedata in N.nodes(data=True): + str_nodedata = {str(k): str(v) for k, v in nodedata.items()} + # Explicitly catch nodes with ":" in node names or nodedata. + n = str(n) + raise_error = _check_colon_quotes(n) or ( + any( + (_check_colon_quotes(k) or _check_colon_quotes(v)) + for k, v in str_nodedata.items() + ) + ) + if raise_error: + raise ValueError( + f'Node names and attributes should not contain ":" unless they are quoted with "".\ + For example the string \'attribute:data1\' should be written as \'"attribute:data1"\'.\ + Please refer https://github.com/pydot/pydot/issues/258' + ) + p = pydot.Node(n, **str_nodedata) + P.add_node(p) + + if N.is_multigraph(): + for u, v, key, edgedata in N.edges(data=True, keys=True): + str_edgedata = {str(k): str(v) for k, v in edgedata.items() if k != "key"} + u, v = str(u), str(v) + raise_error = ( + _check_colon_quotes(u) + or _check_colon_quotes(v) + or ( + any( + (_check_colon_quotes(k) or _check_colon_quotes(val)) + for k, val in str_edgedata.items() + ) + ) + ) + if raise_error: + raise ValueError( + f'Node names and attributes should not contain ":" unless they are quoted with "".\ + For example the string \'attribute:data1\' should be written as \'"attribute:data1"\'.\ + Please refer https://github.com/pydot/pydot/issues/258' + ) + edge = pydot.Edge(u, v, key=str(key), **str_edgedata) + P.add_edge(edge) + + else: + for u, v, edgedata in N.edges(data=True): + str_edgedata = {str(k): str(v) for k, v in edgedata.items()} + u, v = str(u), str(v) + raise_error = ( + _check_colon_quotes(u) + or _check_colon_quotes(v) + or ( + any( + (_check_colon_quotes(k) or _check_colon_quotes(val)) + for k, val in str_edgedata.items() + ) + ) + ) + if raise_error: + raise ValueError( + f'Node names and attributes should not contain ":" unless they are quoted with "".\ + For example the string \'attribute:data1\' should be written as \'"attribute:data1"\'.\ + Please refer https://github.com/pydot/pydot/issues/258' + ) + edge = pydot.Edge(u, v, **str_edgedata) + P.add_edge(edge) + return P + + +def graphviz_layout(G, prog="neato", root=None): + """Create node positions using Pydot and Graphviz. + + Returns a dictionary of positions keyed by node. + + Parameters + ---------- + G : NetworkX Graph + The graph for which the layout is computed. + prog : string (default: 'neato') + The name of the GraphViz program to use for layout. + Options depend on GraphViz version but may include: + 'dot', 'twopi', 'fdp', 'sfdp', 'circo' + root : Node from G or None (default: None) + The node of G from which to start some layout algorithms. + + Returns + ------- + Dictionary of (x, y) positions keyed by node. + + Examples + -------- + >>> G = nx.complete_graph(4) + >>> pos = nx.nx_pydot.graphviz_layout(G) + >>> pos = nx.nx_pydot.graphviz_layout(G, prog="dot") + + Notes + ----- + This is a wrapper for pydot_layout. + """ + return pydot_layout(G=G, prog=prog, root=root) + + +def pydot_layout(G, prog="neato", root=None): + """Create node positions using :mod:`pydot` and Graphviz. + + Parameters + ---------- + G : Graph + NetworkX graph to be laid out. + prog : string (default: 'neato') + Name of the GraphViz command to use for layout. + Options depend on GraphViz version but may include: + 'dot', 'twopi', 'fdp', 'sfdp', 'circo' + root : Node from G or None (default: None) + The node of G from which to start some layout algorithms. + + Returns + ------- + dict + Dictionary of positions keyed by node. + + Examples + -------- + >>> G = nx.complete_graph(4) + >>> pos = nx.nx_pydot.pydot_layout(G) + >>> pos = nx.nx_pydot.pydot_layout(G, prog="dot") + + Notes + ----- + If you use complex node objects, they may have the same string + representation and GraphViz could treat them as the same node. + The layout may assign both nodes a single location. See Issue #1568 + If this occurs in your case, consider relabeling the nodes just + for the layout computation using something similar to:: + + H = nx.convert_node_labels_to_integers(G, label_attribute="node_label") + H_layout = nx.nx_pydot.pydot_layout(G, prog="dot") + G_layout = {H.nodes[n]["node_label"]: p for n, p in H_layout.items()} + + """ + import pydot + + P = to_pydot(G) + if root is not None: + P.set("root", str(root)) + + # List of low-level bytes comprising a string in the dot language converted + # from the passed graph with the passed external GraphViz command. + D_bytes = P.create_dot(prog=prog) + + # Unique string decoded from these bytes with the preferred locale encoding + D = str(D_bytes, encoding=getpreferredencoding()) + + if D == "": # no data returned + print(f"Graphviz layout with {prog} failed") + print() + print("To debug what happened try:") + print("P = nx.nx_pydot.to_pydot(G)") + print('P.write_dot("file.dot")') + print(f"And then run {prog} on file.dot") + return + + # List of one or more "pydot.Dot" instances deserialized from this string. + Q_list = pydot.graph_from_dot_data(D) + assert len(Q_list) == 1 + + # The first and only such instance, as guaranteed by the above assertion. + Q = Q_list[0] + + node_pos = {} + for n in G.nodes(): + str_n = str(n) + # Explicitly catch nodes with ":" in node names or nodedata. + if _check_colon_quotes(str_n): + raise ValueError( + f'Node names and node attributes should not contain ":" unless they are quoted with "".\ + For example the string \'attribute:data1\' should be written as \'"attribute:data1"\'.\ + Please refer https://github.com/pydot/pydot/issues/258' + ) + pydot_node = pydot.Node(str_n).get_name() + node = Q.get_node(pydot_node) + + if isinstance(node, list): + node = node[0] + pos = node.get_pos()[1:-1] # strip leading and trailing double quotes + if pos is not None: + xx, yy = pos.split(",") + node_pos[n] = (float(xx), float(yy)) + return node_pos diff --git a/env-llmeval/lib/python3.10/site-packages/networkx/drawing/nx_pylab.py b/env-llmeval/lib/python3.10/site-packages/networkx/drawing/nx_pylab.py new file mode 100644 index 0000000000000000000000000000000000000000..7c25f63bf4764da72236beb9b6a028ad48b82230 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/networkx/drawing/nx_pylab.py @@ -0,0 +1,1871 @@ +""" +********** +Matplotlib +********** + +Draw networks with matplotlib. + +Examples +-------- +>>> G = nx.complete_graph(5) +>>> nx.draw(G) + +See Also +-------- + - :doc:`matplotlib ` + - :func:`matplotlib.pyplot.scatter` + - :obj:`matplotlib.patches.FancyArrowPatch` +""" +import collections +import itertools +from numbers import Number + +import networkx as nx +from networkx.drawing.layout import ( + circular_layout, + kamada_kawai_layout, + planar_layout, + random_layout, + shell_layout, + spectral_layout, + spring_layout, +) + +__all__ = [ + "draw", + "draw_networkx", + "draw_networkx_nodes", + "draw_networkx_edges", + "draw_networkx_labels", + "draw_networkx_edge_labels", + "draw_circular", + "draw_kamada_kawai", + "draw_random", + "draw_spectral", + "draw_spring", + "draw_planar", + "draw_shell", +] + + +def draw(G, pos=None, ax=None, **kwds): + """Draw the graph G with Matplotlib. + + Draw the graph as a simple representation with no node + labels or edge labels and using the full Matplotlib figure area + and no axis labels by default. See draw_networkx() for more + full-featured drawing that allows title, axis labels etc. + + Parameters + ---------- + G : graph + A networkx graph + + pos : dictionary, optional + A dictionary with nodes as keys and positions as values. + If not specified a spring layout positioning will be computed. + See :py:mod:`networkx.drawing.layout` for functions that + compute node positions. + + ax : Matplotlib Axes object, optional + Draw the graph in specified Matplotlib axes. + + kwds : optional keywords + See networkx.draw_networkx() for a description of optional keywords. + + Examples + -------- + >>> G = nx.dodecahedral_graph() + >>> nx.draw(G) + >>> nx.draw(G, pos=nx.spring_layout(G)) # use spring layout + + See Also + -------- + draw_networkx + draw_networkx_nodes + draw_networkx_edges + draw_networkx_labels + draw_networkx_edge_labels + + Notes + ----- + This function has the same name as pylab.draw and pyplot.draw + so beware when using `from networkx import *` + + since you might overwrite the pylab.draw function. + + With pyplot use + + >>> import matplotlib.pyplot as plt + >>> G = nx.dodecahedral_graph() + >>> nx.draw(G) # networkx draw() + >>> plt.draw() # pyplot draw() + + Also see the NetworkX drawing examples at + https://networkx.org/documentation/latest/auto_examples/index.html + """ + import matplotlib.pyplot as plt + + if ax is None: + cf = plt.gcf() + else: + cf = ax.get_figure() + cf.set_facecolor("w") + if ax is None: + if cf.axes: + ax = cf.gca() + else: + ax = cf.add_axes((0, 0, 1, 1)) + + if "with_labels" not in kwds: + kwds["with_labels"] = "labels" in kwds + + draw_networkx(G, pos=pos, ax=ax, **kwds) + ax.set_axis_off() + plt.draw_if_interactive() + return + + +def draw_networkx(G, pos=None, arrows=None, with_labels=True, **kwds): + r"""Draw the graph G using Matplotlib. + + Draw the graph with Matplotlib with options for node positions, + labeling, titles, and many other drawing features. + See draw() for simple drawing without labels or axes. + + Parameters + ---------- + G : graph + A networkx graph + + pos : dictionary, optional + A dictionary with nodes as keys and positions as values. + If not specified a spring layout positioning will be computed. + See :py:mod:`networkx.drawing.layout` for functions that + compute node positions. + + arrows : bool or None, optional (default=None) + If `None`, directed graphs draw arrowheads with + `~matplotlib.patches.FancyArrowPatch`, while undirected graphs draw edges + via `~matplotlib.collections.LineCollection` for speed. + If `True`, draw arrowheads with FancyArrowPatches (bendable and stylish). + If `False`, draw edges using LineCollection (linear and fast). + For directed graphs, if True draw arrowheads. + Note: Arrows will be the same color as edges. + + arrowstyle : str (default='-\|>' for directed graphs) + For directed graphs, choose the style of the arrowsheads. + For undirected graphs default to '-' + + See `matplotlib.patches.ArrowStyle` for more options. + + arrowsize : int or list (default=10) + For directed graphs, choose the size of the arrow head's length and + width. A list of values can be passed in to assign a different size for arrow head's length and width. + See `matplotlib.patches.FancyArrowPatch` for attribute `mutation_scale` + for more info. + + with_labels : bool (default=True) + Set to True to draw labels on the nodes. + + ax : Matplotlib Axes object, optional + Draw the graph in the specified Matplotlib axes. + + nodelist : list (default=list(G)) + Draw only specified nodes + + edgelist : list (default=list(G.edges())) + Draw only specified edges + + node_size : scalar or array (default=300) + Size of nodes. If an array is specified it must be the + same length as nodelist. + + node_color : color or array of colors (default='#1f78b4') + Node color. Can be a single color or a sequence of colors with the same + length as nodelist. Color can be string or rgb (or rgba) tuple of + floats from 0-1. If numeric values are specified they will be + mapped to colors using the cmap and vmin,vmax parameters. See + matplotlib.scatter for more details. + + node_shape : string (default='o') + The shape of the node. Specification is as matplotlib.scatter + marker, one of 'so^>v>> G = nx.dodecahedral_graph() + >>> nx.draw(G) + >>> nx.draw(G, pos=nx.spring_layout(G)) # use spring layout + + >>> import matplotlib.pyplot as plt + >>> limits = plt.axis("off") # turn off axis + + Also see the NetworkX drawing examples at + https://networkx.org/documentation/latest/auto_examples/index.html + + See Also + -------- + draw + draw_networkx_nodes + draw_networkx_edges + draw_networkx_labels + draw_networkx_edge_labels + """ + from inspect import signature + + import matplotlib.pyplot as plt + + # Get all valid keywords by inspecting the signatures of draw_networkx_nodes, + # draw_networkx_edges, draw_networkx_labels + + valid_node_kwds = signature(draw_networkx_nodes).parameters.keys() + valid_edge_kwds = signature(draw_networkx_edges).parameters.keys() + valid_label_kwds = signature(draw_networkx_labels).parameters.keys() + + # Create a set with all valid keywords across the three functions and + # remove the arguments of this function (draw_networkx) + valid_kwds = (valid_node_kwds | valid_edge_kwds | valid_label_kwds) - { + "G", + "pos", + "arrows", + "with_labels", + } + + if any(k not in valid_kwds for k in kwds): + invalid_args = ", ".join([k for k in kwds if k not in valid_kwds]) + raise ValueError(f"Received invalid argument(s): {invalid_args}") + + node_kwds = {k: v for k, v in kwds.items() if k in valid_node_kwds} + edge_kwds = {k: v for k, v in kwds.items() if k in valid_edge_kwds} + label_kwds = {k: v for k, v in kwds.items() if k in valid_label_kwds} + + if pos is None: + pos = nx.drawing.spring_layout(G) # default to spring layout + + draw_networkx_nodes(G, pos, **node_kwds) + draw_networkx_edges(G, pos, arrows=arrows, **edge_kwds) + if with_labels: + draw_networkx_labels(G, pos, **label_kwds) + plt.draw_if_interactive() + + +def draw_networkx_nodes( + G, + pos, + nodelist=None, + node_size=300, + node_color="#1f78b4", + node_shape="o", + alpha=None, + cmap=None, + vmin=None, + vmax=None, + ax=None, + linewidths=None, + edgecolors=None, + label=None, + margins=None, + hide_ticks=True, +): + """Draw the nodes of the graph G. + + This draws only the nodes of the graph G. + + Parameters + ---------- + G : graph + A networkx graph + + pos : dictionary + A dictionary with nodes as keys and positions as values. + Positions should be sequences of length 2. + + ax : Matplotlib Axes object, optional + Draw the graph in the specified Matplotlib axes. + + nodelist : list (default list(G)) + Draw only specified nodes + + node_size : scalar or array (default=300) + Size of nodes. If an array it must be the same length as nodelist. + + node_color : color or array of colors (default='#1f78b4') + Node color. Can be a single color or a sequence of colors with the same + length as nodelist. Color can be string or rgb (or rgba) tuple of + floats from 0-1. If numeric values are specified they will be + mapped to colors using the cmap and vmin,vmax parameters. See + matplotlib.scatter for more details. + + node_shape : string (default='o') + The shape of the node. Specification is as matplotlib.scatter + marker, one of 'so^>v>> G = nx.dodecahedral_graph() + >>> nodes = nx.draw_networkx_nodes(G, pos=nx.spring_layout(G)) + + Also see the NetworkX drawing examples at + https://networkx.org/documentation/latest/auto_examples/index.html + + See Also + -------- + draw + draw_networkx + draw_networkx_edges + draw_networkx_labels + draw_networkx_edge_labels + """ + from collections.abc import Iterable + + import matplotlib as mpl + import matplotlib.collections # call as mpl.collections + import matplotlib.pyplot as plt + import numpy as np + + if ax is None: + ax = plt.gca() + + if nodelist is None: + nodelist = list(G) + + if len(nodelist) == 0: # empty nodelist, no drawing + return mpl.collections.PathCollection(None) + + try: + xy = np.asarray([pos[v] for v in nodelist]) + except KeyError as err: + raise nx.NetworkXError(f"Node {err} has no position.") from err + + if isinstance(alpha, Iterable): + node_color = apply_alpha(node_color, alpha, nodelist, cmap, vmin, vmax) + alpha = None + + node_collection = ax.scatter( + xy[:, 0], + xy[:, 1], + s=node_size, + c=node_color, + marker=node_shape, + cmap=cmap, + vmin=vmin, + vmax=vmax, + alpha=alpha, + linewidths=linewidths, + edgecolors=edgecolors, + label=label, + ) + if hide_ticks: + ax.tick_params( + axis="both", + which="both", + bottom=False, + left=False, + labelbottom=False, + labelleft=False, + ) + + if margins is not None: + if isinstance(margins, Iterable): + ax.margins(*margins) + else: + ax.margins(margins) + + node_collection.set_zorder(2) + return node_collection + + +class FancyArrowFactory: + """Draw arrows with `matplotlib.patches.FancyarrowPatch`""" + + class ConnectionStyleFactory: + def __init__(self, connectionstyles, selfloop_height, ax=None): + import matplotlib as mpl + import matplotlib.path # call as mpl.path + import numpy as np + + self.ax = ax + self.mpl = mpl + self.np = np + self.base_connection_styles = [ + mpl.patches.ConnectionStyle(cs) for cs in connectionstyles + ] + self.n = len(self.base_connection_styles) + self.selfloop_height = selfloop_height + + def curved(self, edge_index): + return self.base_connection_styles[edge_index % self.n] + + def self_loop(self, edge_index): + def self_loop_connection(posA, posB, *args, **kwargs): + if not self.np.all(posA == posB): + raise nx.NetworkXError( + "`self_loop` connection style method" + "is only to be used for self-loops" + ) + # this is called with _screen space_ values + # so convert back to data space + data_loc = self.ax.transData.inverted().transform(posA) + v_shift = 0.1 * self.selfloop_height + h_shift = v_shift * 0.5 + # put the top of the loop first so arrow is not hidden by node + path = self.np.asarray( + [ + # 1 + [0, v_shift], + # 4 4 4 + [h_shift, v_shift], + [h_shift, 0], + [0, 0], + # 4 4 4 + [-h_shift, 0], + [-h_shift, v_shift], + [0, v_shift], + ] + ) + # Rotate self loop 90 deg. if more than 1 + # This will allow for maximum of 4 visible self loops + if edge_index % 4: + x, y = path.T + for _ in range(edge_index % 4): + x, y = y, -x + path = self.np.array([x, y]).T + return self.mpl.path.Path( + self.ax.transData.transform(data_loc + path), [1, 4, 4, 4, 4, 4, 4] + ) + + return self_loop_connection + + def __init__( + self, + edge_pos, + edgelist, + nodelist, + edge_indices, + node_size, + selfloop_height, + connectionstyle="arc3", + node_shape="o", + arrowstyle="-", + arrowsize=10, + edge_color="k", + alpha=None, + linewidth=1.0, + style="solid", + min_source_margin=0, + min_target_margin=0, + ax=None, + ): + import matplotlib as mpl + import matplotlib.patches # call as mpl.patches + import matplotlib.pyplot as plt + import numpy as np + + if isinstance(connectionstyle, str): + connectionstyle = [connectionstyle] + elif np.iterable(connectionstyle): + connectionstyle = list(connectionstyle) + else: + msg = "ConnectionStyleFactory arg `connectionstyle` must be str or iterable" + raise nx.NetworkXError(msg) + self.ax = ax + self.mpl = mpl + self.np = np + self.edge_pos = edge_pos + self.edgelist = edgelist + self.nodelist = nodelist + self.node_shape = node_shape + self.min_source_margin = min_source_margin + self.min_target_margin = min_target_margin + self.edge_indices = edge_indices + self.node_size = node_size + self.connectionstyle_factory = self.ConnectionStyleFactory( + connectionstyle, selfloop_height, ax + ) + self.arrowstyle = arrowstyle + self.arrowsize = arrowsize + self.arrow_colors = mpl.colors.colorConverter.to_rgba_array(edge_color, alpha) + self.linewidth = linewidth + self.style = style + if isinstance(arrowsize, list) and len(arrowsize) != len(edge_pos): + raise ValueError("arrowsize should have the same length as edgelist") + + def __call__(self, i): + (x1, y1), (x2, y2) = self.edge_pos[i] + shrink_source = 0 # space from source to tail + shrink_target = 0 # space from head to target + if self.np.iterable(self.node_size): # many node sizes + source, target = self.edgelist[i][:2] + source_node_size = self.node_size[self.nodelist.index(source)] + target_node_size = self.node_size[self.nodelist.index(target)] + shrink_source = self.to_marker_edge(source_node_size, self.node_shape) + shrink_target = self.to_marker_edge(target_node_size, self.node_shape) + else: + shrink_source = self.to_marker_edge(self.node_size, self.node_shape) + shrink_target = shrink_source + shrink_source = max(shrink_source, self.min_source_margin) + shrink_target = max(shrink_target, self.min_target_margin) + + # scale factor of arrow head + if isinstance(self.arrowsize, list): + mutation_scale = self.arrowsize[i] + else: + mutation_scale = self.arrowsize + + if len(self.arrow_colors) > i: + arrow_color = self.arrow_colors[i] + elif len(self.arrow_colors) == 1: + arrow_color = self.arrow_colors[0] + else: # Cycle through colors + arrow_color = self.arrow_colors[i % len(self.arrow_colors)] + + if self.np.iterable(self.linewidth): + if len(self.linewidth) > i: + linewidth = self.linewidth[i] + else: + linewidth = self.linewidth[i % len(self.linewidth)] + else: + linewidth = self.linewidth + + if ( + self.np.iterable(self.style) + and not isinstance(self.style, str) + and not isinstance(self.style, tuple) + ): + if len(self.style) > i: + linestyle = self.style[i] + else: # Cycle through styles + linestyle = self.style[i % len(self.style)] + else: + linestyle = self.style + + if x1 == x2 and y1 == y2: + connectionstyle = self.connectionstyle_factory.self_loop( + self.edge_indices[i] + ) + else: + connectionstyle = self.connectionstyle_factory.curved(self.edge_indices[i]) + return self.mpl.patches.FancyArrowPatch( + (x1, y1), + (x2, y2), + arrowstyle=self.arrowstyle, + shrinkA=shrink_source, + shrinkB=shrink_target, + mutation_scale=mutation_scale, + color=arrow_color, + linewidth=linewidth, + connectionstyle=connectionstyle, + linestyle=linestyle, + zorder=1, # arrows go behind nodes + ) + + def to_marker_edge(self, marker_size, marker): + if marker in "s^>v', + For undirected graphs default to '-'. + + See `matplotlib.patches.ArrowStyle` for more options. + + arrowsize : int (default=10) + For directed graphs, choose the size of the arrow head's length and + width. See `matplotlib.patches.FancyArrowPatch` for attribute + `mutation_scale` for more info. + + connectionstyle : string or iterable of strings (default="arc3") + Pass the connectionstyle parameter to create curved arc of rounding + radius rad. For example, connectionstyle='arc3,rad=0.2'. + See `matplotlib.patches.ConnectionStyle` and + `matplotlib.patches.FancyArrowPatch` for more info. + If Iterable, index indicates i'th edge key of MultiGraph + + node_size : scalar or array (default=300) + Size of nodes. Though the nodes are not drawn with this function, the + node size is used in determining edge positioning. + + nodelist : list, optional (default=G.nodes()) + This provides the node order for the `node_size` array (if it is an array). + + node_shape : string (default='o') + The marker used for nodes, used in determining edge positioning. + Specification is as a `matplotlib.markers` marker, e.g. one of 'so^>v>> G = nx.dodecahedral_graph() + >>> edges = nx.draw_networkx_edges(G, pos=nx.spring_layout(G)) + + >>> G = nx.DiGraph() + >>> G.add_edges_from([(1, 2), (1, 3), (2, 3)]) + >>> arcs = nx.draw_networkx_edges(G, pos=nx.spring_layout(G)) + >>> alphas = [0.3, 0.4, 0.5] + >>> for i, arc in enumerate(arcs): # change alpha values of arcs + ... arc.set_alpha(alphas[i]) + + The FancyArrowPatches corresponding to self-loops are not always + returned, but can always be accessed via the ``patches`` attribute of the + `matplotlib.Axes` object. + + >>> import matplotlib.pyplot as plt + >>> fig, ax = plt.subplots() + >>> G = nx.Graph([(0, 1), (0, 0)]) # Self-loop at node 0 + >>> edge_collection = nx.draw_networkx_edges(G, pos=nx.circular_layout(G), ax=ax) + >>> self_loop_fap = ax.patches[0] + + Also see the NetworkX drawing examples at + https://networkx.org/documentation/latest/auto_examples/index.html + + See Also + -------- + draw + draw_networkx + draw_networkx_nodes + draw_networkx_labels + draw_networkx_edge_labels + + """ + import warnings + + import matplotlib as mpl + import matplotlib.collections # call as mpl.collections + import matplotlib.colors # call as mpl.colors + import matplotlib.pyplot as plt + import numpy as np + + # The default behavior is to use LineCollection to draw edges for + # undirected graphs (for performance reasons) and use FancyArrowPatches + # for directed graphs. + # The `arrows` keyword can be used to override the default behavior + if arrows is None: + use_linecollection = not (G.is_directed() or G.is_multigraph()) + else: + if not isinstance(arrows, bool): + raise TypeError("Argument `arrows` must be of type bool or None") + use_linecollection = not arrows + + if isinstance(connectionstyle, str): + connectionstyle = [connectionstyle] + elif np.iterable(connectionstyle): + connectionstyle = list(connectionstyle) + else: + msg = "draw_networkx_edges arg `connectionstyle` must be str or iterable" + raise nx.NetworkXError(msg) + + # Some kwargs only apply to FancyArrowPatches. Warn users when they use + # non-default values for these kwargs when LineCollection is being used + # instead of silently ignoring the specified option + if use_linecollection: + msg = ( + "\n\nThe {0} keyword argument is not applicable when drawing edges\n" + "with LineCollection.\n\n" + "To make this warning go away, either specify `arrows=True` to\n" + "force FancyArrowPatches or use the default values.\n" + "Note that using FancyArrowPatches may be slow for large graphs.\n" + ) + if arrowstyle is not None: + warnings.warn(msg.format("arrowstyle"), category=UserWarning, stacklevel=2) + if arrowsize != 10: + warnings.warn(msg.format("arrowsize"), category=UserWarning, stacklevel=2) + if min_source_margin != 0: + warnings.warn( + msg.format("min_source_margin"), category=UserWarning, stacklevel=2 + ) + if min_target_margin != 0: + warnings.warn( + msg.format("min_target_margin"), category=UserWarning, stacklevel=2 + ) + if any(cs != "arc3" for cs in connectionstyle): + warnings.warn( + msg.format("connectionstyle"), category=UserWarning, stacklevel=2 + ) + + # NOTE: Arrowstyle modification must occur after the warnings section + if arrowstyle is None: + arrowstyle = "-|>" if G.is_directed() else "-" + + if ax is None: + ax = plt.gca() + + if edgelist is None: + edgelist = list(G.edges) # (u, v, k) for multigraph (u, v) otherwise + + if len(edgelist): + if G.is_multigraph(): + key_count = collections.defaultdict(lambda: itertools.count(0)) + edge_indices = [next(key_count[tuple(e[:2])]) for e in edgelist] + else: + edge_indices = [0] * len(edgelist) + else: # no edges! + return [] + + if nodelist is None: + nodelist = list(G.nodes()) + + # FancyArrowPatch handles color=None different from LineCollection + if edge_color is None: + edge_color = "k" + + # set edge positions + edge_pos = np.asarray([(pos[e[0]], pos[e[1]]) for e in edgelist]) + + # Check if edge_color is an array of floats and map to edge_cmap. + # This is the only case handled differently from matplotlib + if ( + np.iterable(edge_color) + and (len(edge_color) == len(edge_pos)) + and np.all([isinstance(c, Number) for c in edge_color]) + ): + if edge_cmap is not None: + assert isinstance(edge_cmap, mpl.colors.Colormap) + else: + edge_cmap = plt.get_cmap() + if edge_vmin is None: + edge_vmin = min(edge_color) + if edge_vmax is None: + edge_vmax = max(edge_color) + color_normal = mpl.colors.Normalize(vmin=edge_vmin, vmax=edge_vmax) + edge_color = [edge_cmap(color_normal(e)) for e in edge_color] + + # compute initial view + minx = np.amin(np.ravel(edge_pos[:, :, 0])) + maxx = np.amax(np.ravel(edge_pos[:, :, 0])) + miny = np.amin(np.ravel(edge_pos[:, :, 1])) + maxy = np.amax(np.ravel(edge_pos[:, :, 1])) + w = maxx - minx + h = maxy - miny + + # Self-loops are scaled by view extent, except in cases the extent + # is 0, e.g. for a single node. In this case, fall back to scaling + # by the maximum node size + selfloop_height = h if h != 0 else 0.005 * np.array(node_size).max() + fancy_arrow_factory = FancyArrowFactory( + edge_pos, + edgelist, + nodelist, + edge_indices, + node_size, + selfloop_height, + connectionstyle, + node_shape, + arrowstyle, + arrowsize, + edge_color, + alpha, + width, + style, + min_source_margin, + min_target_margin, + ax=ax, + ) + + # Draw the edges + if use_linecollection: + edge_collection = mpl.collections.LineCollection( + edge_pos, + colors=edge_color, + linewidths=width, + antialiaseds=(1,), + linestyle=style, + alpha=alpha, + ) + edge_collection.set_cmap(edge_cmap) + edge_collection.set_clim(edge_vmin, edge_vmax) + edge_collection.set_zorder(1) # edges go behind nodes + edge_collection.set_label(label) + ax.add_collection(edge_collection) + edge_viz_obj = edge_collection + + # Make sure selfloop edges are also drawn + # --------------------------------------- + selfloops_to_draw = [loop for loop in nx.selfloop_edges(G) if loop in edgelist] + if selfloops_to_draw: + edgelist_tuple = list(map(tuple, edgelist)) + arrow_collection = [] + for loop in selfloops_to_draw: + i = edgelist_tuple.index(loop) + arrow = fancy_arrow_factory(i) + arrow_collection.append(arrow) + ax.add_patch(arrow) + else: + edge_viz_obj = [] + for i in range(len(edgelist)): + arrow = fancy_arrow_factory(i) + ax.add_patch(arrow) + edge_viz_obj.append(arrow) + + # update view after drawing + padx, pady = 0.05 * w, 0.05 * h + corners = (minx - padx, miny - pady), (maxx + padx, maxy + pady) + ax.update_datalim(corners) + ax.autoscale_view() + + if hide_ticks: + ax.tick_params( + axis="both", + which="both", + bottom=False, + left=False, + labelbottom=False, + labelleft=False, + ) + + return edge_viz_obj + + +def draw_networkx_labels( + G, + pos, + labels=None, + font_size=12, + font_color="k", + font_family="sans-serif", + font_weight="normal", + alpha=None, + bbox=None, + horizontalalignment="center", + verticalalignment="center", + ax=None, + clip_on=True, + hide_ticks=True, +): + """Draw node labels on the graph G. + + Parameters + ---------- + G : graph + A networkx graph + + pos : dictionary + A dictionary with nodes as keys and positions as values. + Positions should be sequences of length 2. + + labels : dictionary (default={n: n for n in G}) + Node labels in a dictionary of text labels keyed by node. + Node-keys in labels should appear as keys in `pos`. + If needed use: `{n:lab for n,lab in labels.items() if n in pos}` + + font_size : int (default=12) + Font size for text labels + + font_color : color (default='k' black) + Font color string. Color can be string or rgb (or rgba) tuple of + floats from 0-1. + + font_weight : string (default='normal') + Font weight + + font_family : string (default='sans-serif') + Font family + + alpha : float or None (default=None) + The text transparency + + bbox : Matplotlib bbox, (default is Matplotlib's ax.text default) + Specify text box properties (e.g. shape, color etc.) for node labels. + + horizontalalignment : string (default='center') + Horizontal alignment {'center', 'right', 'left'} + + verticalalignment : string (default='center') + Vertical alignment {'center', 'top', 'bottom', 'baseline', 'center_baseline'} + + ax : Matplotlib Axes object, optional + Draw the graph in the specified Matplotlib axes. + + clip_on : bool (default=True) + Turn on clipping of node labels at axis boundaries + + hide_ticks : bool, optional + Hide ticks of axes. When `True` (the default), ticks and ticklabels + are removed from the axes. To set ticks and tick labels to the pyplot default, + use ``hide_ticks=False``. + + Returns + ------- + dict + `dict` of labels keyed on the nodes + + Examples + -------- + >>> G = nx.dodecahedral_graph() + >>> labels = nx.draw_networkx_labels(G, pos=nx.spring_layout(G)) + + Also see the NetworkX drawing examples at + https://networkx.org/documentation/latest/auto_examples/index.html + + See Also + -------- + draw + draw_networkx + draw_networkx_nodes + draw_networkx_edges + draw_networkx_edge_labels + """ + import matplotlib.pyplot as plt + + if ax is None: + ax = plt.gca() + + if labels is None: + labels = {n: n for n in G.nodes()} + + text_items = {} # there is no text collection so we'll fake one + for n, label in labels.items(): + (x, y) = pos[n] + if not isinstance(label, str): + label = str(label) # this makes "1" and 1 labeled the same + t = ax.text( + x, + y, + label, + size=font_size, + color=font_color, + family=font_family, + weight=font_weight, + alpha=alpha, + horizontalalignment=horizontalalignment, + verticalalignment=verticalalignment, + transform=ax.transData, + bbox=bbox, + clip_on=clip_on, + ) + text_items[n] = t + + if hide_ticks: + ax.tick_params( + axis="both", + which="both", + bottom=False, + left=False, + labelbottom=False, + labelleft=False, + ) + + return text_items + + +def draw_networkx_edge_labels( + G, + pos, + edge_labels=None, + label_pos=0.5, + font_size=10, + font_color="k", + font_family="sans-serif", + font_weight="normal", + alpha=None, + bbox=None, + horizontalalignment="center", + verticalalignment="center", + ax=None, + rotate=True, + clip_on=True, + node_size=300, + nodelist=None, + connectionstyle="arc3", + hide_ticks=True, +): + """Draw edge labels. + + Parameters + ---------- + G : graph + A networkx graph + + pos : dictionary + A dictionary with nodes as keys and positions as values. + Positions should be sequences of length 2. + + edge_labels : dictionary (default=None) + Edge labels in a dictionary of labels keyed by edge two-tuple. + Only labels for the keys in the dictionary are drawn. + + label_pos : float (default=0.5) + Position of edge label along edge (0=head, 0.5=center, 1=tail) + + font_size : int (default=10) + Font size for text labels + + font_color : color (default='k' black) + Font color string. Color can be string or rgb (or rgba) tuple of + floats from 0-1. + + font_weight : string (default='normal') + Font weight + + font_family : string (default='sans-serif') + Font family + + alpha : float or None (default=None) + The text transparency + + bbox : Matplotlib bbox, optional + Specify text box properties (e.g. shape, color etc.) for edge labels. + Default is {boxstyle='round', ec=(1.0, 1.0, 1.0), fc=(1.0, 1.0, 1.0)}. + + horizontalalignment : string (default='center') + Horizontal alignment {'center', 'right', 'left'} + + verticalalignment : string (default='center') + Vertical alignment {'center', 'top', 'bottom', 'baseline', 'center_baseline'} + + ax : Matplotlib Axes object, optional + Draw the graph in the specified Matplotlib axes. + + rotate : bool (default=True) + Rotate edge labels to lie parallel to edges + + clip_on : bool (default=True) + Turn on clipping of edge labels at axis boundaries + + node_size : scalar or array (default=300) + Size of nodes. If an array it must be the same length as nodelist. + + nodelist : list, optional (default=G.nodes()) + This provides the node order for the `node_size` array (if it is an array). + + connectionstyle : string or iterable of strings (default="arc3") + Pass the connectionstyle parameter to create curved arc of rounding + radius rad. For example, connectionstyle='arc3,rad=0.2'. + See `matplotlib.patches.ConnectionStyle` and + `matplotlib.patches.FancyArrowPatch` for more info. + If Iterable, index indicates i'th edge key of MultiGraph + + hide_ticks : bool, optional + Hide ticks of axes. When `True` (the default), ticks and ticklabels + are removed from the axes. To set ticks and tick labels to the pyplot default, + use ``hide_ticks=False``. + + Returns + ------- + dict + `dict` of labels keyed by edge + + Examples + -------- + >>> G = nx.dodecahedral_graph() + >>> edge_labels = nx.draw_networkx_edge_labels(G, pos=nx.spring_layout(G)) + + Also see the NetworkX drawing examples at + https://networkx.org/documentation/latest/auto_examples/index.html + + See Also + -------- + draw + draw_networkx + draw_networkx_nodes + draw_networkx_edges + draw_networkx_labels + """ + import matplotlib as mpl + import matplotlib.pyplot as plt + import numpy as np + + class CurvedArrowText(mpl.text.Text): + def __init__( + self, + arrow, + *args, + label_pos=0.5, + labels_horizontal=False, + ax=None, + **kwargs, + ): + # Bind to FancyArrowPatch + self.arrow = arrow + # how far along the text should be on the curve, + # 0 is at start, 1 is at end etc. + self.label_pos = label_pos + self.labels_horizontal = labels_horizontal + if ax is None: + ax = plt.gca() + self.ax = ax + self.x, self.y, self.angle = self._update_text_pos_angle(arrow) + + # Create text object + super().__init__(self.x, self.y, *args, rotation=self.angle, **kwargs) + # Bind to axis + self.ax.add_artist(self) + + def _get_arrow_path_disp(self, arrow): + """ + This is part of FancyArrowPatch._get_path_in_displaycoord + It omits the second part of the method where path is converted + to polygon based on width + The transform is taken from ax, not the object, as the object + has not been added yet, and doesn't have transform + """ + dpi_cor = arrow._dpi_cor + # trans_data = arrow.get_transform() + trans_data = self.ax.transData + if arrow._posA_posB is not None: + posA = arrow._convert_xy_units(arrow._posA_posB[0]) + posB = arrow._convert_xy_units(arrow._posA_posB[1]) + (posA, posB) = trans_data.transform((posA, posB)) + _path = arrow.get_connectionstyle()( + posA, + posB, + patchA=arrow.patchA, + patchB=arrow.patchB, + shrinkA=arrow.shrinkA * dpi_cor, + shrinkB=arrow.shrinkB * dpi_cor, + ) + else: + _path = trans_data.transform_path(arrow._path_original) + # Return is in display coordinates + return _path + + def _update_text_pos_angle(self, arrow): + # Fractional label position + path_disp = self._get_arrow_path_disp(arrow) + (x1, y1), (cx, cy), (x2, y2) = path_disp.vertices + # Text position at a proportion t along the line in display coords + # default is 0.5 so text appears at the halfway point + t = self.label_pos + tt = 1 - t + x = tt**2 * x1 + 2 * t * tt * cx + t**2 * x2 + y = tt**2 * y1 + 2 * t * tt * cy + t**2 * y2 + if self.labels_horizontal: + # Horizontal text labels + angle = 0 + else: + # Labels parallel to curve + change_x = 2 * tt * (cx - x1) + 2 * t * (x2 - cx) + change_y = 2 * tt * (cy - y1) + 2 * t * (y2 - cy) + angle = (np.arctan2(change_y, change_x) / (2 * np.pi)) * 360 + # Text is "right way up" + if angle > 90: + angle -= 180 + if angle < -90: + angle += 180 + (x, y) = self.ax.transData.inverted().transform((x, y)) + return x, y, angle + + def draw(self, renderer): + # recalculate the text position and angle + self.x, self.y, self.angle = self._update_text_pos_angle(self.arrow) + self.set_position((self.x, self.y)) + self.set_rotation(self.angle) + # redraw text + super().draw(renderer) + + # use default box of white with white border + if bbox is None: + bbox = {"boxstyle": "round", "ec": (1.0, 1.0, 1.0), "fc": (1.0, 1.0, 1.0)} + + if isinstance(connectionstyle, str): + connectionstyle = [connectionstyle] + elif np.iterable(connectionstyle): + connectionstyle = list(connectionstyle) + else: + raise nx.NetworkXError( + "draw_networkx_edges arg `connectionstyle` must be" + "string or iterable of strings" + ) + + if ax is None: + ax = plt.gca() + + if edge_labels is None: + kwds = {"keys": True} if G.is_multigraph() else {} + edge_labels = {tuple(edge): d for *edge, d in G.edges(data=True, **kwds)} + # NOTHING TO PLOT + if not edge_labels: + return {} + edgelist, labels = zip(*edge_labels.items()) + + if nodelist is None: + nodelist = list(G.nodes()) + + # set edge positions + edge_pos = np.asarray([(pos[e[0]], pos[e[1]]) for e in edgelist]) + + if G.is_multigraph(): + key_count = collections.defaultdict(lambda: itertools.count(0)) + edge_indices = [next(key_count[tuple(e[:2])]) for e in edgelist] + else: + edge_indices = [0] * len(edgelist) + + # Used to determine self loop mid-point + # Note, that this will not be accurate, + # if not drawing edge_labels for all edges drawn + h = 0 + if edge_labels: + miny = np.amin(np.ravel(edge_pos[:, :, 1])) + maxy = np.amax(np.ravel(edge_pos[:, :, 1])) + h = maxy - miny + selfloop_height = h if h != 0 else 0.005 * np.array(node_size).max() + fancy_arrow_factory = FancyArrowFactory( + edge_pos, + edgelist, + nodelist, + edge_indices, + node_size, + selfloop_height, + connectionstyle, + ax=ax, + ) + + text_items = {} + for i, (edge, label) in enumerate(zip(edgelist, labels)): + if not isinstance(label, str): + label = str(label) # this makes "1" and 1 labeled the same + + n1, n2 = edge[:2] + arrow = fancy_arrow_factory(i) + if n1 == n2: + connectionstyle_obj = arrow.get_connectionstyle() + posA = ax.transData.transform(pos[n1]) + path_disp = connectionstyle_obj(posA, posA) + path_data = ax.transData.inverted().transform_path(path_disp) + x, y = path_data.vertices[0] + text_items[edge] = ax.text( + x, + y, + label, + size=font_size, + color=font_color, + family=font_family, + weight=font_weight, + alpha=alpha, + horizontalalignment=horizontalalignment, + verticalalignment=verticalalignment, + rotation=0, + transform=ax.transData, + bbox=bbox, + zorder=1, + clip_on=clip_on, + ) + else: + text_items[edge] = CurvedArrowText( + arrow, + label, + size=font_size, + color=font_color, + family=font_family, + weight=font_weight, + alpha=alpha, + horizontalalignment=horizontalalignment, + verticalalignment=verticalalignment, + transform=ax.transData, + bbox=bbox, + zorder=1, + clip_on=clip_on, + label_pos=label_pos, + labels_horizontal=not rotate, + ax=ax, + ) + + if hide_ticks: + ax.tick_params( + axis="both", + which="both", + bottom=False, + left=False, + labelbottom=False, + labelleft=False, + ) + + return text_items + + +def draw_circular(G, **kwargs): + """Draw the graph `G` with a circular layout. + + This is a convenience function equivalent to:: + + nx.draw(G, pos=nx.circular_layout(G), **kwargs) + + Parameters + ---------- + G : graph + A networkx graph + + kwargs : optional keywords + See `draw_networkx` for a description of optional keywords. + + Notes + ----- + The layout is computed each time this function is called. For + repeated drawing it is much more efficient to call + `~networkx.drawing.layout.circular_layout` directly and reuse the result:: + + >>> G = nx.complete_graph(5) + >>> pos = nx.circular_layout(G) + >>> nx.draw(G, pos=pos) # Draw the original graph + >>> # Draw a subgraph, reusing the same node positions + >>> nx.draw(G.subgraph([0, 1, 2]), pos=pos, node_color="red") + + Examples + -------- + >>> G = nx.path_graph(5) + >>> nx.draw_circular(G) + + See Also + -------- + :func:`~networkx.drawing.layout.circular_layout` + """ + draw(G, circular_layout(G), **kwargs) + + +def draw_kamada_kawai(G, **kwargs): + """Draw the graph `G` with a Kamada-Kawai force-directed layout. + + This is a convenience function equivalent to:: + + nx.draw(G, pos=nx.kamada_kawai_layout(G), **kwargs) + + Parameters + ---------- + G : graph + A networkx graph + + kwargs : optional keywords + See `draw_networkx` for a description of optional keywords. + + Notes + ----- + The layout is computed each time this function is called. + For repeated drawing it is much more efficient to call + `~networkx.drawing.layout.kamada_kawai_layout` directly and reuse the + result:: + + >>> G = nx.complete_graph(5) + >>> pos = nx.kamada_kawai_layout(G) + >>> nx.draw(G, pos=pos) # Draw the original graph + >>> # Draw a subgraph, reusing the same node positions + >>> nx.draw(G.subgraph([0, 1, 2]), pos=pos, node_color="red") + + Examples + -------- + >>> G = nx.path_graph(5) + >>> nx.draw_kamada_kawai(G) + + See Also + -------- + :func:`~networkx.drawing.layout.kamada_kawai_layout` + """ + draw(G, kamada_kawai_layout(G), **kwargs) + + +def draw_random(G, **kwargs): + """Draw the graph `G` with a random layout. + + This is a convenience function equivalent to:: + + nx.draw(G, pos=nx.random_layout(G), **kwargs) + + Parameters + ---------- + G : graph + A networkx graph + + kwargs : optional keywords + See `draw_networkx` for a description of optional keywords. + + Notes + ----- + The layout is computed each time this function is called. + For repeated drawing it is much more efficient to call + `~networkx.drawing.layout.random_layout` directly and reuse the result:: + + >>> G = nx.complete_graph(5) + >>> pos = nx.random_layout(G) + >>> nx.draw(G, pos=pos) # Draw the original graph + >>> # Draw a subgraph, reusing the same node positions + >>> nx.draw(G.subgraph([0, 1, 2]), pos=pos, node_color="red") + + Examples + -------- + >>> G = nx.lollipop_graph(4, 3) + >>> nx.draw_random(G) + + See Also + -------- + :func:`~networkx.drawing.layout.random_layout` + """ + draw(G, random_layout(G), **kwargs) + + +def draw_spectral(G, **kwargs): + """Draw the graph `G` with a spectral 2D layout. + + This is a convenience function equivalent to:: + + nx.draw(G, pos=nx.spectral_layout(G), **kwargs) + + For more information about how node positions are determined, see + `~networkx.drawing.layout.spectral_layout`. + + Parameters + ---------- + G : graph + A networkx graph + + kwargs : optional keywords + See `draw_networkx` for a description of optional keywords. + + Notes + ----- + The layout is computed each time this function is called. + For repeated drawing it is much more efficient to call + `~networkx.drawing.layout.spectral_layout` directly and reuse the result:: + + >>> G = nx.complete_graph(5) + >>> pos = nx.spectral_layout(G) + >>> nx.draw(G, pos=pos) # Draw the original graph + >>> # Draw a subgraph, reusing the same node positions + >>> nx.draw(G.subgraph([0, 1, 2]), pos=pos, node_color="red") + + Examples + -------- + >>> G = nx.path_graph(5) + >>> nx.draw_spectral(G) + + See Also + -------- + :func:`~networkx.drawing.layout.spectral_layout` + """ + draw(G, spectral_layout(G), **kwargs) + + +def draw_spring(G, **kwargs): + """Draw the graph `G` with a spring layout. + + This is a convenience function equivalent to:: + + nx.draw(G, pos=nx.spring_layout(G), **kwargs) + + Parameters + ---------- + G : graph + A networkx graph + + kwargs : optional keywords + See `draw_networkx` for a description of optional keywords. + + Notes + ----- + `~networkx.drawing.layout.spring_layout` is also the default layout for + `draw`, so this function is equivalent to `draw`. + + The layout is computed each time this function is called. + For repeated drawing it is much more efficient to call + `~networkx.drawing.layout.spring_layout` directly and reuse the result:: + + >>> G = nx.complete_graph(5) + >>> pos = nx.spring_layout(G) + >>> nx.draw(G, pos=pos) # Draw the original graph + >>> # Draw a subgraph, reusing the same node positions + >>> nx.draw(G.subgraph([0, 1, 2]), pos=pos, node_color="red") + + Examples + -------- + >>> G = nx.path_graph(20) + >>> nx.draw_spring(G) + + See Also + -------- + draw + :func:`~networkx.drawing.layout.spring_layout` + """ + draw(G, spring_layout(G), **kwargs) + + +def draw_shell(G, nlist=None, **kwargs): + """Draw networkx graph `G` with shell layout. + + This is a convenience function equivalent to:: + + nx.draw(G, pos=nx.shell_layout(G, nlist=nlist), **kwargs) + + Parameters + ---------- + G : graph + A networkx graph + + nlist : list of list of nodes, optional + A list containing lists of nodes representing the shells. + Default is `None`, meaning all nodes are in a single shell. + See `~networkx.drawing.layout.shell_layout` for details. + + kwargs : optional keywords + See `draw_networkx` for a description of optional keywords. + + Notes + ----- + The layout is computed each time this function is called. + For repeated drawing it is much more efficient to call + `~networkx.drawing.layout.shell_layout` directly and reuse the result:: + + >>> G = nx.complete_graph(5) + >>> pos = nx.shell_layout(G) + >>> nx.draw(G, pos=pos) # Draw the original graph + >>> # Draw a subgraph, reusing the same node positions + >>> nx.draw(G.subgraph([0, 1, 2]), pos=pos, node_color="red") + + Examples + -------- + >>> G = nx.path_graph(4) + >>> shells = [[0], [1, 2, 3]] + >>> nx.draw_shell(G, nlist=shells) + + See Also + -------- + :func:`~networkx.drawing.layout.shell_layout` + """ + draw(G, shell_layout(G, nlist=nlist), **kwargs) + + +def draw_planar(G, **kwargs): + """Draw a planar networkx graph `G` with planar layout. + + This is a convenience function equivalent to:: + + nx.draw(G, pos=nx.planar_layout(G), **kwargs) + + Parameters + ---------- + G : graph + A planar networkx graph + + kwargs : optional keywords + See `draw_networkx` for a description of optional keywords. + + Raises + ------ + NetworkXException + When `G` is not planar + + Notes + ----- + The layout is computed each time this function is called. + For repeated drawing it is much more efficient to call + `~networkx.drawing.layout.planar_layout` directly and reuse the result:: + + >>> G = nx.path_graph(5) + >>> pos = nx.planar_layout(G) + >>> nx.draw(G, pos=pos) # Draw the original graph + >>> # Draw a subgraph, reusing the same node positions + >>> nx.draw(G.subgraph([0, 1, 2]), pos=pos, node_color="red") + + Examples + -------- + >>> G = nx.path_graph(4) + >>> nx.draw_planar(G) + + See Also + -------- + :func:`~networkx.drawing.layout.planar_layout` + """ + draw(G, planar_layout(G), **kwargs) + + +def apply_alpha(colors, alpha, elem_list, cmap=None, vmin=None, vmax=None): + """Apply an alpha (or list of alphas) to the colors provided. + + Parameters + ---------- + + colors : color string or array of floats (default='r') + Color of element. Can be a single color format string, + or a sequence of colors with the same length as nodelist. + If numeric values are specified they will be mapped to + colors using the cmap and vmin,vmax parameters. See + matplotlib.scatter for more details. + + alpha : float or array of floats + Alpha values for elements. This can be a single alpha value, in + which case it will be applied to all the elements of color. Otherwise, + if it is an array, the elements of alpha will be applied to the colors + in order (cycling through alpha multiple times if necessary). + + elem_list : array of networkx objects + The list of elements which are being colored. These could be nodes, + edges or labels. + + cmap : matplotlib colormap + Color map for use if colors is a list of floats corresponding to points + on a color mapping. + + vmin, vmax : float + Minimum and maximum values for normalizing colors if a colormap is used + + Returns + ------- + + rgba_colors : numpy ndarray + Array containing RGBA format values for each of the node colours. + + """ + from itertools import cycle, islice + + import matplotlib as mpl + import matplotlib.cm # call as mpl.cm + import matplotlib.colors # call as mpl.colors + import numpy as np + + # If we have been provided with a list of numbers as long as elem_list, + # apply the color mapping. + if len(colors) == len(elem_list) and isinstance(colors[0], Number): + mapper = mpl.cm.ScalarMappable(cmap=cmap) + mapper.set_clim(vmin, vmax) + rgba_colors = mapper.to_rgba(colors) + # Otherwise, convert colors to matplotlib's RGB using the colorConverter + # object. These are converted to numpy ndarrays to be consistent with the + # to_rgba method of ScalarMappable. + else: + try: + rgba_colors = np.array([mpl.colors.colorConverter.to_rgba(colors)]) + except ValueError: + rgba_colors = np.array( + [mpl.colors.colorConverter.to_rgba(color) for color in colors] + ) + # Set the final column of the rgba_colors to have the relevant alpha values + try: + # If alpha is longer than the number of colors, resize to the number of + # elements. Also, if rgba_colors.size (the number of elements of + # rgba_colors) is the same as the number of elements, resize the array, + # to avoid it being interpreted as a colormap by scatter() + if len(alpha) > len(rgba_colors) or rgba_colors.size == len(elem_list): + rgba_colors = np.resize(rgba_colors, (len(elem_list), 4)) + rgba_colors[1:, 0] = rgba_colors[0, 0] + rgba_colors[1:, 1] = rgba_colors[0, 1] + rgba_colors[1:, 2] = rgba_colors[0, 2] + rgba_colors[:, 3] = list(islice(cycle(alpha), len(rgba_colors))) + except TypeError: + rgba_colors[:, -1] = alpha + return rgba_colors diff --git a/env-llmeval/lib/python3.10/site-packages/networkx/exception.py b/env-llmeval/lib/python3.10/site-packages/networkx/exception.py new file mode 100644 index 0000000000000000000000000000000000000000..96694cc32dcfbb8307cf99b0fa939e2fa0f5a46d --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/networkx/exception.py @@ -0,0 +1,125 @@ +""" +********** +Exceptions +********** + +Base exceptions and errors for NetworkX. +""" + +__all__ = [ + "HasACycle", + "NodeNotFound", + "PowerIterationFailedConvergence", + "ExceededMaxIterations", + "AmbiguousSolution", + "NetworkXAlgorithmError", + "NetworkXException", + "NetworkXError", + "NetworkXNoCycle", + "NetworkXNoPath", + "NetworkXNotImplemented", + "NetworkXPointlessConcept", + "NetworkXUnbounded", + "NetworkXUnfeasible", +] + + +class NetworkXException(Exception): + """Base class for exceptions in NetworkX.""" + + +class NetworkXError(NetworkXException): + """Exception for a serious error in NetworkX""" + + +class NetworkXPointlessConcept(NetworkXException): + """Raised when a null graph is provided as input to an algorithm + that cannot use it. + + The null graph is sometimes considered a pointless concept [1]_, + thus the name of the exception. + + References + ---------- + .. [1] Harary, F. and Read, R. "Is the Null Graph a Pointless + Concept?" In Graphs and Combinatorics Conference, George + Washington University. New York: Springer-Verlag, 1973. + + """ + + +class NetworkXAlgorithmError(NetworkXException): + """Exception for unexpected termination of algorithms.""" + + +class NetworkXUnfeasible(NetworkXAlgorithmError): + """Exception raised by algorithms trying to solve a problem + instance that has no feasible solution.""" + + +class NetworkXNoPath(NetworkXUnfeasible): + """Exception for algorithms that should return a path when running + on graphs where such a path does not exist.""" + + +class NetworkXNoCycle(NetworkXUnfeasible): + """Exception for algorithms that should return a cycle when running + on graphs where such a cycle does not exist.""" + + +class HasACycle(NetworkXException): + """Raised if a graph has a cycle when an algorithm expects that it + will have no cycles. + + """ + + +class NetworkXUnbounded(NetworkXAlgorithmError): + """Exception raised by algorithms trying to solve a maximization + or a minimization problem instance that is unbounded.""" + + +class NetworkXNotImplemented(NetworkXException): + """Exception raised by algorithms not implemented for a type of graph.""" + + +class NodeNotFound(NetworkXException): + """Exception raised if requested node is not present in the graph""" + + +class AmbiguousSolution(NetworkXException): + """Raised if more than one valid solution exists for an intermediary step + of an algorithm. + + In the face of ambiguity, refuse the temptation to guess. + This may occur, for example, when trying to determine the + bipartite node sets in a disconnected bipartite graph when + computing bipartite matchings. + + """ + + +class ExceededMaxIterations(NetworkXException): + """Raised if a loop iterates too many times without breaking. + + This may occur, for example, in an algorithm that computes + progressively better approximations to a value but exceeds an + iteration bound specified by the user. + + """ + + +class PowerIterationFailedConvergence(ExceededMaxIterations): + """Raised when the power iteration method fails to converge within a + specified iteration limit. + + `num_iterations` is the number of iterations that have been + completed when this exception was raised. + + """ + + def __init__(self, num_iterations, *args, **kw): + msg = f"power iteration failed to converge within {num_iterations} iterations" + exception_message = msg + superinit = super().__init__ + superinit(self, exception_message, *args, **kw) diff --git a/env-llmeval/lib/python3.10/site-packages/networkx/lazy_imports.py b/env-llmeval/lib/python3.10/site-packages/networkx/lazy_imports.py new file mode 100644 index 0000000000000000000000000000000000000000..396404ba38f5885bfcc65af36d7b4655e94ccc27 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/networkx/lazy_imports.py @@ -0,0 +1,188 @@ +import importlib +import importlib.util +import inspect +import os +import sys +import types + +__all__ = ["attach", "_lazy_import"] + + +def attach(module_name, submodules=None, submod_attrs=None): + """Attach lazily loaded submodules, and functions or other attributes. + + Typically, modules import submodules and attributes as follows:: + + import mysubmodule + import anothersubmodule + + from .foo import someattr + + The idea of this function is to replace the `__init__.py` + module's `__getattr__`, `__dir__`, and `__all__` attributes such that + all imports work exactly the way they normally would, except that the + actual import is delayed until the resulting module object is first used. + + The typical way to call this function, replacing the above imports, is:: + + __getattr__, __lazy_dir__, __all__ = lazy.attach( + __name__, ["mysubmodule", "anothersubmodule"], {"foo": "someattr"} + ) + + This functionality requires Python 3.7 or higher. + + Parameters + ---------- + module_name : str + Typically use __name__. + submodules : set + List of submodules to lazily import. + submod_attrs : dict + Dictionary of submodule -> list of attributes / functions. + These attributes are imported as they are used. + + Returns + ------- + __getattr__, __dir__, __all__ + + """ + if submod_attrs is None: + submod_attrs = {} + + if submodules is None: + submodules = set() + else: + submodules = set(submodules) + + attr_to_modules = { + attr: mod for mod, attrs in submod_attrs.items() for attr in attrs + } + + __all__ = list(submodules | attr_to_modules.keys()) + + def __getattr__(name): + if name in submodules: + return importlib.import_module(f"{module_name}.{name}") + elif name in attr_to_modules: + submod = importlib.import_module(f"{module_name}.{attr_to_modules[name]}") + return getattr(submod, name) + else: + raise AttributeError(f"No {module_name} attribute {name}") + + def __dir__(): + return __all__ + + if os.environ.get("EAGER_IMPORT", ""): + for attr in set(attr_to_modules.keys()) | submodules: + __getattr__(attr) + + return __getattr__, __dir__, list(__all__) + + +class DelayedImportErrorModule(types.ModuleType): + def __init__(self, frame_data, *args, **kwargs): + self.__frame_data = frame_data + super().__init__(*args, **kwargs) + + def __getattr__(self, x): + if x in ("__class__", "__file__", "__frame_data"): + super().__getattr__(x) + else: + fd = self.__frame_data + raise ModuleNotFoundError( + f"No module named '{fd['spec']}'\n\n" + "This error is lazily reported, having originally occurred in\n" + f' File {fd["filename"]}, line {fd["lineno"]}, in {fd["function"]}\n\n' + f'----> {"".join(fd["code_context"] or "").strip()}' + ) + + +def _lazy_import(fullname): + """Return a lazily imported proxy for a module or library. + + Warning + ------- + Importing using this function can currently cause trouble + when the user tries to import from a subpackage of a module before + the package is fully imported. In particular, this idiom may not work: + + np = lazy_import("numpy") + from numpy.lib import recfunctions + + This is due to a difference in the way Python's LazyLoader handles + subpackage imports compared to the normal import process. Hopefully + we will get Python's LazyLoader to fix this, or find a workaround. + In the meantime, this is a potential problem. + + The workaround is to import numpy before importing from the subpackage. + + Notes + ----- + We often see the following pattern:: + + def myfunc(): + import scipy as sp + sp.argmin(...) + .... + + This is to prevent a library, in this case `scipy`, from being + imported at function definition time, since that can be slow. + + This function provides a proxy module that, upon access, imports + the actual module. So the idiom equivalent to the above example is:: + + sp = lazy.load("scipy") + + def myfunc(): + sp.argmin(...) + .... + + The initial import time is fast because the actual import is delayed + until the first attribute is requested. The overall import time may + decrease as well for users that don't make use of large portions + of the library. + + Parameters + ---------- + fullname : str + The full name of the package or subpackage to import. For example:: + + sp = lazy.load("scipy") # import scipy as sp + spla = lazy.load("scipy.linalg") # import scipy.linalg as spla + + Returns + ------- + pm : importlib.util._LazyModule + Proxy module. Can be used like any regularly imported module. + Actual loading of the module occurs upon first attribute request. + + """ + try: + return sys.modules[fullname] + except: + pass + + # Not previously loaded -- look it up + spec = importlib.util.find_spec(fullname) + + if spec is None: + try: + parent = inspect.stack()[1] + frame_data = { + "spec": fullname, + "filename": parent.filename, + "lineno": parent.lineno, + "function": parent.function, + "code_context": parent.code_context, + } + return DelayedImportErrorModule(frame_data, "DelayedImportErrorModule") + finally: + del parent + + module = importlib.util.module_from_spec(spec) + sys.modules[fullname] = module + + loader = importlib.util.LazyLoader(spec.loader) + loader.exec_module(module) + + return module diff --git a/env-llmeval/lib/python3.10/site-packages/networkx/relabel.py b/env-llmeval/lib/python3.10/site-packages/networkx/relabel.py new file mode 100644 index 0000000000000000000000000000000000000000..4b870f726ef42e0bcaa7bf724e2ae6ab4145f288 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/networkx/relabel.py @@ -0,0 +1,285 @@ +import networkx as nx + +__all__ = ["convert_node_labels_to_integers", "relabel_nodes"] + + +@nx._dispatchable( + preserve_all_attrs=True, mutates_input={"not copy": 2}, returns_graph=True +) +def relabel_nodes(G, mapping, copy=True): + """Relabel the nodes of the graph G according to a given mapping. + + The original node ordering may not be preserved if `copy` is `False` and the + mapping includes overlap between old and new labels. + + Parameters + ---------- + G : graph + A NetworkX graph + + mapping : dictionary + A dictionary with the old labels as keys and new labels as values. + A partial mapping is allowed. Mapping 2 nodes to a single node is allowed. + Any non-node keys in the mapping are ignored. + + copy : bool (optional, default=True) + If True return a copy, or if False relabel the nodes in place. + + Examples + -------- + To create a new graph with nodes relabeled according to a given + dictionary: + + >>> G = nx.path_graph(3) + >>> sorted(G) + [0, 1, 2] + >>> mapping = {0: "a", 1: "b", 2: "c"} + >>> H = nx.relabel_nodes(G, mapping) + >>> sorted(H) + ['a', 'b', 'c'] + + Nodes can be relabeled with any hashable object, including numbers + and strings: + + >>> import string + >>> G = nx.path_graph(26) # nodes are integers 0 through 25 + >>> sorted(G)[:3] + [0, 1, 2] + >>> mapping = dict(zip(G, string.ascii_lowercase)) + >>> G = nx.relabel_nodes(G, mapping) # nodes are characters a through z + >>> sorted(G)[:3] + ['a', 'b', 'c'] + >>> mapping = dict(zip(G, range(1, 27))) + >>> G = nx.relabel_nodes(G, mapping) # nodes are integers 1 through 26 + >>> sorted(G)[:3] + [1, 2, 3] + + To perform a partial in-place relabeling, provide a dictionary + mapping only a subset of the nodes, and set the `copy` keyword + argument to False: + + >>> G = nx.path_graph(3) # nodes 0-1-2 + >>> mapping = {0: "a", 1: "b"} # 0->'a' and 1->'b' + >>> G = nx.relabel_nodes(G, mapping, copy=False) + >>> sorted(G, key=str) + [2, 'a', 'b'] + + A mapping can also be given as a function: + + >>> G = nx.path_graph(3) + >>> H = nx.relabel_nodes(G, lambda x: x**2) + >>> list(H) + [0, 1, 4] + + In a multigraph, relabeling two or more nodes to the same new node + will retain all edges, but may change the edge keys in the process: + + >>> G = nx.MultiGraph() + >>> G.add_edge(0, 1, value="a") # returns the key for this edge + 0 + >>> G.add_edge(0, 2, value="b") + 0 + >>> G.add_edge(0, 3, value="c") + 0 + >>> mapping = {1: 4, 2: 4, 3: 4} + >>> H = nx.relabel_nodes(G, mapping, copy=True) + >>> print(H[0]) + {4: {0: {'value': 'a'}, 1: {'value': 'b'}, 2: {'value': 'c'}}} + + This works for in-place relabeling too: + + >>> G = nx.relabel_nodes(G, mapping, copy=False) + >>> print(G[0]) + {4: {0: {'value': 'a'}, 1: {'value': 'b'}, 2: {'value': 'c'}}} + + Notes + ----- + Only the nodes specified in the mapping will be relabeled. + Any non-node keys in the mapping are ignored. + + The keyword setting copy=False modifies the graph in place. + Relabel_nodes avoids naming collisions by building a + directed graph from ``mapping`` which specifies the order of + relabelings. Naming collisions, such as a->b, b->c, are ordered + such that "b" gets renamed to "c" before "a" gets renamed "b". + In cases of circular mappings (e.g. a->b, b->a), modifying the + graph is not possible in-place and an exception is raised. + In that case, use copy=True. + + If a relabel operation on a multigraph would cause two or more + edges to have the same source, target and key, the second edge must + be assigned a new key to retain all edges. The new key is set + to the lowest non-negative integer not already used as a key + for edges between these two nodes. Note that this means non-numeric + keys may be replaced by numeric keys. + + See Also + -------- + convert_node_labels_to_integers + """ + # you can pass any callable e.g. f(old_label) -> new_label or + # e.g. str(old_label) -> new_label, but we'll just make a dictionary here regardless + m = {n: mapping(n) for n in G} if callable(mapping) else mapping + + if copy: + return _relabel_copy(G, m) + else: + return _relabel_inplace(G, m) + + +def _relabel_inplace(G, mapping): + if len(mapping.keys() & mapping.values()) > 0: + # labels sets overlap + # can we topological sort and still do the relabeling? + D = nx.DiGraph(list(mapping.items())) + D.remove_edges_from(nx.selfloop_edges(D)) + try: + nodes = reversed(list(nx.topological_sort(D))) + except nx.NetworkXUnfeasible as err: + raise nx.NetworkXUnfeasible( + "The node label sets are overlapping and no ordering can " + "resolve the mapping. Use copy=True." + ) from err + else: + # non-overlapping label sets, sort them in the order of G nodes + nodes = [n for n in G if n in mapping] + + multigraph = G.is_multigraph() + directed = G.is_directed() + + for old in nodes: + # Test that old is in both mapping and G, otherwise ignore. + try: + new = mapping[old] + G.add_node(new, **G.nodes[old]) + except KeyError: + continue + if new == old: + continue + if multigraph: + new_edges = [ + (new, new if old == target else target, key, data) + for (_, target, key, data) in G.edges(old, data=True, keys=True) + ] + if directed: + new_edges += [ + (new if old == source else source, new, key, data) + for (source, _, key, data) in G.in_edges(old, data=True, keys=True) + ] + # Ensure new edges won't overwrite existing ones + seen = set() + for i, (source, target, key, data) in enumerate(new_edges): + if target in G[source] and key in G[source][target]: + new_key = 0 if not isinstance(key, int | float) else key + while new_key in G[source][target] or (target, new_key) in seen: + new_key += 1 + new_edges[i] = (source, target, new_key, data) + seen.add((target, new_key)) + else: + new_edges = [ + (new, new if old == target else target, data) + for (_, target, data) in G.edges(old, data=True) + ] + if directed: + new_edges += [ + (new if old == source else source, new, data) + for (source, _, data) in G.in_edges(old, data=True) + ] + G.remove_node(old) + G.add_edges_from(new_edges) + return G + + +def _relabel_copy(G, mapping): + H = G.__class__() + H.add_nodes_from(mapping.get(n, n) for n in G) + H._node.update((mapping.get(n, n), d.copy()) for n, d in G.nodes.items()) + if G.is_multigraph(): + new_edges = [ + (mapping.get(n1, n1), mapping.get(n2, n2), k, d.copy()) + for (n1, n2, k, d) in G.edges(keys=True, data=True) + ] + + # check for conflicting edge-keys + undirected = not G.is_directed() + seen_edges = set() + for i, (source, target, key, data) in enumerate(new_edges): + while (source, target, key) in seen_edges: + if not isinstance(key, int | float): + key = 0 + key += 1 + seen_edges.add((source, target, key)) + if undirected: + seen_edges.add((target, source, key)) + new_edges[i] = (source, target, key, data) + + H.add_edges_from(new_edges) + else: + H.add_edges_from( + (mapping.get(n1, n1), mapping.get(n2, n2), d.copy()) + for (n1, n2, d) in G.edges(data=True) + ) + H.graph.update(G.graph) + return H + + +@nx._dispatchable(preserve_all_attrs=True, returns_graph=True) +def convert_node_labels_to_integers( + G, first_label=0, ordering="default", label_attribute=None +): + """Returns a copy of the graph G with the nodes relabeled using + consecutive integers. + + Parameters + ---------- + G : graph + A NetworkX graph + + first_label : int, optional (default=0) + An integer specifying the starting offset in numbering nodes. + The new integer labels are numbered first_label, ..., n-1+first_label. + + ordering : string + "default" : inherit node ordering from G.nodes() + "sorted" : inherit node ordering from sorted(G.nodes()) + "increasing degree" : nodes are sorted by increasing degree + "decreasing degree" : nodes are sorted by decreasing degree + + label_attribute : string, optional (default=None) + Name of node attribute to store old label. If None no attribute + is created. + + Notes + ----- + Node and edge attribute data are copied to the new (relabeled) graph. + + There is no guarantee that the relabeling of nodes to integers will + give the same two integers for two (even identical graphs). + Use the `ordering` argument to try to preserve the order. + + See Also + -------- + relabel_nodes + """ + N = G.number_of_nodes() + first_label + if ordering == "default": + mapping = dict(zip(G.nodes(), range(first_label, N))) + elif ordering == "sorted": + nlist = sorted(G.nodes()) + mapping = dict(zip(nlist, range(first_label, N))) + elif ordering == "increasing degree": + dv_pairs = [(d, n) for (n, d) in G.degree()] + dv_pairs.sort() # in-place sort from lowest to highest degree + mapping = dict(zip([n for d, n in dv_pairs], range(first_label, N))) + elif ordering == "decreasing degree": + dv_pairs = [(d, n) for (n, d) in G.degree()] + dv_pairs.sort() # in-place sort from lowest to highest degree + dv_pairs.reverse() + mapping = dict(zip([n for d, n in dv_pairs], range(first_label, N))) + else: + raise nx.NetworkXError(f"Unknown node ordering: {ordering}") + H = relabel_nodes(G, mapping) + # create node attribute with the old label + if label_attribute is not None: + nx.set_node_attributes(H, {v: k for k, v in mapping.items()}, label_attribute) + return H diff --git a/env-llmeval/lib/python3.10/site-packages/networkx/utils/__init__.py b/env-llmeval/lib/python3.10/site-packages/networkx/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..96ef984a13f71e4cab975c48274d3d98b09a3d34 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/networkx/utils/__init__.py @@ -0,0 +1,8 @@ +from networkx.utils.misc import * +from networkx.utils.decorators import * +from networkx.utils.random_sequence import * +from networkx.utils.union_find import * +from networkx.utils.rcm import * +from networkx.utils.heaps import * +from networkx.utils.backends import * +from networkx.utils.configs import * diff --git a/env-llmeval/lib/python3.10/site-packages/networkx/utils/__pycache__/__init__.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/networkx/utils/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a8a91388669d1810c51cb705c044c2d777ca7b8a Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/networkx/utils/__pycache__/__init__.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/networkx/utils/__pycache__/backends.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/networkx/utils/__pycache__/backends.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..222ea7b62190748e8b99aea96b8a4377a3c3bf47 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/networkx/utils/__pycache__/backends.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/networkx/utils/__pycache__/decorators.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/networkx/utils/__pycache__/decorators.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d0add7fe1bc05ef83115fcd4ad3de0b431bd16d8 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/networkx/utils/__pycache__/decorators.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/networkx/utils/__pycache__/misc.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/networkx/utils/__pycache__/misc.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..233daf7fa9743bbb1bd63c61514a79d7f6e4974b Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/networkx/utils/__pycache__/misc.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/networkx/utils/__pycache__/random_sequence.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/networkx/utils/__pycache__/random_sequence.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..10fa13dc84617436ff0de0e58d67609a27ff37d9 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/networkx/utils/__pycache__/random_sequence.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/networkx/utils/__pycache__/rcm.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/networkx/utils/__pycache__/rcm.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..52164ddb1953b1733d669f17b68b3e049d28a10e Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/networkx/utils/__pycache__/rcm.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/networkx/utils/__pycache__/union_find.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/networkx/utils/__pycache__/union_find.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fff5f1b9c065485b2cfd8b724fe1d4f823ee43eb Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/networkx/utils/__pycache__/union_find.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/networkx/utils/backends.py b/env-llmeval/lib/python3.10/site-packages/networkx/utils/backends.py new file mode 100644 index 0000000000000000000000000000000000000000..b48798d8023659ba9bbc241bdb4df84d6bb68dc1 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/networkx/utils/backends.py @@ -0,0 +1,1553 @@ +""" +NetworkX utilizes a plugin-dispatch architecture, which means we can plug in and +out of backends with minimal code changes. A valid NetworkX backend specifies +`entry points `_, +named ``networkx.backends`` and an optional ``networkx.backend_info`` when it is +installed (not imported). This allows NetworkX to dispatch (redirect) function calls +to the backend so the execution flows to the designated backend +implementation, similar to how plugging a charger into a socket redirects the +electricity to your phone. This design enhances flexibility and integration, making +NetworkX more adaptable and efficient. + +There are three main ways to use a backend after the package is installed. +You can set environment variables and run the exact same code you run for +NetworkX. You can use a keyword argument ``backend=...`` with the NetworkX +function. Or, you can convert the NetworkX Graph to a backend graph type and +call a NetworkX function supported by that backend. Environment variables +and backend keywords automatically convert your NetworkX Graph to the +backend type. Manually converting it yourself allows you to use that same +backend graph for more than one function call, reducing conversion time. + +For example, you can set an environment variable before starting python to request +all dispatchable functions automatically dispatch to the given backend:: + + bash> NETWORKX_AUTOMATIC_BACKENDS=cugraph python my_networkx_script.py + +or you can specify the backend as a kwarg:: + + nx.betweenness_centrality(G, k=10, backend="parallel") + +or you can convert the NetworkX Graph object ``G`` into a Graph-like +object specific to the backend and then pass that in the NetworkX function:: + + H = nx_parallel.ParallelGraph(G) + nx.betweenness_centrality(H, k=10) + +How it works: You might have seen the ``@nx._dispatchable`` decorator on +many of the NetworkX functions in the codebase. It decorates the function +with code that redirects execution to the function's backend implementation. +The code also manages any ``backend_kwargs`` you provide to the backend +version of the function. The code looks for the environment variable or +a ``backend`` keyword argument and if found, converts the input NetworkX +graph to the backend format before calling the backend's version of the +function. If no environment variable or backend keyword are found, the +dispatching code checks the input graph object for an attribute +called ``__networkx_backend__`` which tells it which backend provides this +graph type. That backend's version of the function is then called. +The backend system relies on Python ``entry_point`` system to signal +NetworkX that a backend is installed (even if not imported yet). Thus no +code needs to be changed between running with NetworkX and running with +a backend to NetworkX. The attribute ``__networkx_backend__`` holds a +string with the name of the ``entry_point``. If none of these options +are being used, the decorator code simply calls the NetworkX function +on the NetworkX graph as usual. + +The NetworkX library does not need to know that a backend exists for it +to work. So long as the backend package creates the entry_point, and +provides the correct interface, it will be called when the user requests +it using one of the three approaches described above. Some backends have +been working with the NetworkX developers to ensure smooth operation. +They are the following:: + +- `graphblas `_ +- `cugraph `_ +- `parallel `_ +- ``loopback`` is for testing purposes only and is not a real backend. + +Note that the ``backend_name`` is e.g. ``parallel``, the package installed +is ``nx-parallel``, and we use ``nx_parallel`` while importing the package. + +Creating a Custom backend +------------------------- + +1. To be a valid backend that is discoverable by NetworkX, your package must + register an `entry-point `_ + ``networkx.backends`` in the package's metadata, with a `key pointing to your + dispatch object `_ . + For example, if you are using ``setuptools`` to manage your backend package, + you can `add the following to your pyproject.toml file `_:: + + [project.entry-points."networkx.backends"] + backend_name = "your_dispatcher_class" + + You can also add the ``backend_info`` entry-point. It points towards the ``get_info`` + function that returns all the backend information, which is then used to build the + "Additional Backend Implementation" box at the end of algorithm's documentation + page (e.g. `nx-cugraph's get_info function `_):: + + [project.entry-points."networkx.backend_info"] + backend_name = "your_get_info_function" + + Note that this would only work if your backend is a trusted backend of NetworkX, + and is present in the `.circleci/config.yml` and + `.github/workflows/deploy-docs.yml` files in the NetworkX repository. + +2. The backend must create an ``nx.Graph``-like object which contains an attribute + ``__networkx_backend__`` with a value of the entry point name:: + + class BackendGraph: + __networkx_backend__ = "backend_name" + ... + + +Testing the Custom backend +-------------------------- + +To test your custom backend, you can run the NetworkX test suite with your backend. +This also ensures that the custom backend is compatible with NetworkX's API. + +Testing Environment Setup +~~~~~~~~~~~~~~~~~~~~~~~~~ + +To enable automatic testing with your custom backend, follow these steps: + +1. Set Backend Environment Variables: + - ``NETWORKX_TEST_BACKEND`` : Setting this to your registered backend key will let + the NetworkX's dispatch machinery automatically convert a regular NetworkX + ``Graph``, ``DiGraph``, ``MultiGraph``, etc. to their backend equivalents, using + ``your_dispatcher_class.convert_from_nx(G, ...)`` function. + - ``NETWORKX_FALLBACK_TO_NX`` (default=False) : Setting this variable to `True` will + instruct tests to use a NetworkX ``Graph`` for algorithms not implemented by your + custom backend. Setting this to `False` will only run the tests for algorithms + implemented by your custom backend and tests for other algorithms will ``xfail``. + +2. Defining ``convert_from_nx`` and ``convert_to_nx`` methods: + The arguments to ``convert_from_nx`` are: + + - ``G`` : NetworkX Graph + - ``edge_attrs`` : dict, optional + Dictionary mapping edge attributes to default values if missing in ``G``. + If None, then no edge attributes will be converted and default may be 1. + - ``node_attrs``: dict, optional + Dictionary mapping node attributes to default values if missing in ``G``. + If None, then no node attributes will be converted. + - ``preserve_edge_attrs`` : bool + Whether to preserve all edge attributes. + - ``preserve_node_attrs`` : bool + Whether to preserve all node attributes. + - ``preserve_graph_attrs`` : bool + Whether to preserve all graph attributes. + - ``preserve_all_attrs`` : bool + Whether to preserve all graph, node, and edge attributes. + - ``name`` : str + The name of the algorithm. + - ``graph_name`` : str + The name of the graph argument being converted. + +Running Tests +~~~~~~~~~~~~~ + +You can invoke NetworkX tests for your custom backend with the following commands:: + + NETWORKX_TEST_BACKEND= + NETWORKX_FALLBACK_TO_NX=True # or False + pytest --pyargs networkx + +Conversions while running tests : + +- Convert NetworkX graphs using ``.convert_from_nx(G, ...)`` into + the backend graph. +- Pass the backend graph objects to the backend implementation of the algorithm. +- Convert the result back to a form expected by NetworkX tests using + ``.convert_to_nx(result, ...)``. + +Notes +~~~~~ + +- Dispatchable algorithms that are not implemented by the backend + will cause a ``pytest.xfail``, giving some indication that not all + tests are running, while avoiding causing an explicit failure. + +- If a backend only partially implements some algorithms, it can define + a ``can_run(name, args, kwargs)`` function that returns True or False + indicating whether it can run the algorithm with the given arguments. + It may also return a string indicating why the algorithm can't be run; + this string may be used in the future to give helpful info to the user. + +- A backend may also define ``should_run(name, args, kwargs)`` that is similar + to ``can_run``, but answers whether the backend *should* be run (converting + if necessary). Like ``can_run``, it receives the original arguments so it + can decide whether it should be run by inspecting the arguments. ``can_run`` + runs before ``should_run``, so ``should_run`` may assume ``can_run`` is True. + If not implemented by the backend, ``can_run`` and ``should_run`` are + assumed to always return True if the backend implements the algorithm. + +- A special ``on_start_tests(items)`` function may be defined by the backend. + It will be called with the list of NetworkX tests discovered. Each item + is a test object that can be marked as xfail if the backend does not support + the test using ``item.add_marker(pytest.mark.xfail(reason=...))``. + +- A backend graph instance may have a ``G.__networkx_cache__`` dict to enable + caching, and care should be taken to clear the cache when appropriate. +""" + +import inspect +import itertools +import os +import warnings +from functools import partial +from importlib.metadata import entry_points + +import networkx as nx + +from .decorators import argmap + +__all__ = ["_dispatchable"] + + +def _do_nothing(): + """This does nothing at all, yet it helps turn `_dispatchable` into functions.""" + + +def _get_backends(group, *, load_and_call=False): + """ + Retrieve NetworkX ``backends`` and ``backend_info`` from the entry points. + + Parameters + ----------- + group : str + The entry_point to be retrieved. + load_and_call : bool, optional + If True, load and call the backend. Defaults to False. + + Returns + -------- + dict + A dictionary mapping backend names to their respective backend objects. + + Notes + ------ + If a backend is defined more than once, a warning is issued. + The `nx-loopback` backend is removed if it exists, as it is only available during testing. + A warning is displayed if an error occurs while loading a backend. + """ + items = entry_points(group=group) + rv = {} + for ep in items: + if ep.name in rv: + warnings.warn( + f"networkx backend defined more than once: {ep.name}", + RuntimeWarning, + stacklevel=2, + ) + elif load_and_call: + try: + rv[ep.name] = ep.load()() + except Exception as exc: + warnings.warn( + f"Error encountered when loading info for backend {ep.name}: {exc}", + RuntimeWarning, + stacklevel=2, + ) + else: + rv[ep.name] = ep + rv.pop("nx-loopback", None) + return rv + + +backends = _get_backends("networkx.backends") +backend_info = _get_backends("networkx.backend_info", load_and_call=True) + +# We must import from config after defining `backends` above +from .configs import Config, config + +# Get default configuration from environment variables at import time +config.backend_priority = [ + x.strip() + for x in os.environ.get( + "NETWORKX_BACKEND_PRIORITY", + os.environ.get("NETWORKX_AUTOMATIC_BACKENDS", ""), + ).split(",") + if x.strip() +] +# Initialize default configuration for backends +config.backends = Config( + **{ + backend: ( + cfg if isinstance(cfg := info["default_config"], Config) else Config(**cfg) + ) + if "default_config" in info + else Config() + for backend, info in backend_info.items() + } +) +type(config.backends).__doc__ = "All installed NetworkX backends and their configs." + +# Load and cache backends on-demand +_loaded_backends = {} # type: ignore[var-annotated] + + +def _always_run(name, args, kwargs): + return True + + +def _load_backend(backend_name): + if backend_name in _loaded_backends: + return _loaded_backends[backend_name] + rv = _loaded_backends[backend_name] = backends[backend_name].load() + if not hasattr(rv, "can_run"): + rv.can_run = _always_run + if not hasattr(rv, "should_run"): + rv.should_run = _always_run + return rv + + +_registered_algorithms = {} + + +class _dispatchable: + """Allow any of the following decorator forms: + - @_dispatchable + - @_dispatchable() + - @_dispatchable(name="override_name") + - @_dispatchable(graphs="graph") + - @_dispatchable(edge_attrs="weight") + - @_dispatchable(graphs={"G": 0, "H": 1}, edge_attrs={"weight": "default"}) + + These class attributes are currently used to allow backends to run networkx tests. + For example: `PYTHONPATH=. pytest --backend graphblas --fallback-to-nx` + Future work: add configuration to control these. + """ + + _is_testing = False + _fallback_to_nx = ( + os.environ.get("NETWORKX_FALLBACK_TO_NX", "true").strip().lower() == "true" + ) + + def __new__( + cls, + func=None, + *, + name=None, + graphs="G", + edge_attrs=None, + node_attrs=None, + preserve_edge_attrs=False, + preserve_node_attrs=False, + preserve_graph_attrs=False, + preserve_all_attrs=False, + mutates_input=False, + returns_graph=False, + ): + """A decorator that makes certain input graph types dispatch to ``func``'s + backend implementation. + + Usage can be any of the following decorator forms: + - @_dispatchable + - @_dispatchable() + - @_dispatchable(name="override_name") + - @_dispatchable(graphs="graph_var_name") + - @_dispatchable(edge_attrs="weight") + - @_dispatchable(graphs={"G": 0, "H": 1}, edge_attrs={"weight": "default"}) + with 0 and 1 giving the position in the signature function for graph objects. + When edge_attrs is a dict, keys are keyword names and values are defaults. + + The class attributes are used to allow backends to run networkx tests. + For example: `PYTHONPATH=. pytest --backend graphblas --fallback-to-nx` + Future work: add configuration to control these. + + Parameters + ---------- + func : callable, optional + The function to be decorated. If ``func`` is not provided, returns a + partial object that can be used to decorate a function later. If ``func`` + is provided, returns a new callable object that dispatches to a backend + algorithm based on input graph types. + + name : str, optional + The name of the algorithm to use for dispatching. If not provided, + the name of ``func`` will be used. ``name`` is useful to avoid name + conflicts, as all dispatched algorithms live in a single namespace. + For example, ``tournament.is_strongly_connected`` had a name conflict + with the standard ``nx.is_strongly_connected``, so we used + ``@_dispatchable(name="tournament_is_strongly_connected")``. + + graphs : str or dict or None, default "G" + If a string, the parameter name of the graph, which must be the first + argument of the wrapped function. If more than one graph is required + for the algorithm (or if the graph is not the first argument), provide + a dict of parameter name to argument position for each graph argument. + For example, ``@_dispatchable(graphs={"G": 0, "auxiliary?": 4})`` + indicates the 0th parameter ``G`` of the function is a required graph, + and the 4th parameter ``auxiliary`` is an optional graph. + To indicate an argument is a list of graphs, do e.g. ``"[graphs]"``. + Use ``graphs=None`` if *no* arguments are NetworkX graphs such as for + graph generators, readers, and conversion functions. + + edge_attrs : str or dict, optional + ``edge_attrs`` holds information about edge attribute arguments + and default values for those edge attributes. + If a string, ``edge_attrs`` holds the function argument name that + indicates a single edge attribute to include in the converted graph. + The default value for this attribute is 1. To indicate that an argument + is a list of attributes (all with default value 1), use e.g. ``"[attrs]"``. + If a dict, ``edge_attrs`` holds a dict keyed by argument names, with + values that are either the default value or, if a string, the argument + name that indicates the default value. + + node_attrs : str or dict, optional + Like ``edge_attrs``, but for node attributes. + + preserve_edge_attrs : bool or str or dict, optional + For bool, whether to preserve all edge attributes. + For str, the parameter name that may indicate (with ``True`` or a + callable argument) whether all edge attributes should be preserved + when converting. + For dict of ``{graph_name: {attr: default}}``, indicate pre-determined + edge attributes (and defaults) to preserve for input graphs. + + preserve_node_attrs : bool or str or dict, optional + Like ``preserve_edge_attrs``, but for node attributes. + + preserve_graph_attrs : bool or set + For bool, whether to preserve all graph attributes. + For set, which input graph arguments to preserve graph attributes. + + preserve_all_attrs : bool + Whether to preserve all edge, node and graph attributes. + This overrides all the other preserve_*_attrs. + + mutates_input : bool or dict, default False + For bool, whether the functions mutates an input graph argument. + For dict of ``{arg_name: arg_pos}``, arguments that indicates whether an + input graph will be mutated, and ``arg_name`` may begin with ``"not "`` + to negate the logic (for example, this is used by ``copy=`` arguments). + By default, dispatching doesn't convert input graphs to a different + backend for functions that mutate input graphs. + + returns_graph : bool, default False + Whether the function can return or yield a graph object. By default, + dispatching doesn't convert input graphs to a different backend for + functions that return graphs. + """ + if func is None: + return partial( + _dispatchable, + name=name, + graphs=graphs, + edge_attrs=edge_attrs, + node_attrs=node_attrs, + preserve_edge_attrs=preserve_edge_attrs, + preserve_node_attrs=preserve_node_attrs, + preserve_graph_attrs=preserve_graph_attrs, + preserve_all_attrs=preserve_all_attrs, + mutates_input=mutates_input, + returns_graph=returns_graph, + ) + if isinstance(func, str): + raise TypeError("'name' and 'graphs' must be passed by keyword") from None + # If name not provided, use the name of the function + if name is None: + name = func.__name__ + + self = object.__new__(cls) + + # standard function-wrapping stuff + # __annotations__ not used + self.__name__ = func.__name__ + # self.__doc__ = func.__doc__ # __doc__ handled as cached property + self.__defaults__ = func.__defaults__ + # We "magically" add `backend=` keyword argument to allow backend to be specified + if func.__kwdefaults__: + self.__kwdefaults__ = {**func.__kwdefaults__, "backend": None} + else: + self.__kwdefaults__ = {"backend": None} + self.__module__ = func.__module__ + self.__qualname__ = func.__qualname__ + self.__dict__.update(func.__dict__) + self.__wrapped__ = func + + # Supplement docstring with backend info; compute and cache when needed + self._orig_doc = func.__doc__ + self._cached_doc = None + + self.orig_func = func + self.name = name + self.edge_attrs = edge_attrs + self.node_attrs = node_attrs + self.preserve_edge_attrs = preserve_edge_attrs or preserve_all_attrs + self.preserve_node_attrs = preserve_node_attrs or preserve_all_attrs + self.preserve_graph_attrs = preserve_graph_attrs or preserve_all_attrs + self.mutates_input = mutates_input + # Keep `returns_graph` private for now, b/c we may extend info on return types + self._returns_graph = returns_graph + + if edge_attrs is not None and not isinstance(edge_attrs, str | dict): + raise TypeError( + f"Bad type for edge_attrs: {type(edge_attrs)}. Expected str or dict." + ) from None + if node_attrs is not None and not isinstance(node_attrs, str | dict): + raise TypeError( + f"Bad type for node_attrs: {type(node_attrs)}. Expected str or dict." + ) from None + if not isinstance(self.preserve_edge_attrs, bool | str | dict): + raise TypeError( + f"Bad type for preserve_edge_attrs: {type(self.preserve_edge_attrs)}." + " Expected bool, str, or dict." + ) from None + if not isinstance(self.preserve_node_attrs, bool | str | dict): + raise TypeError( + f"Bad type for preserve_node_attrs: {type(self.preserve_node_attrs)}." + " Expected bool, str, or dict." + ) from None + if not isinstance(self.preserve_graph_attrs, bool | set): + raise TypeError( + f"Bad type for preserve_graph_attrs: {type(self.preserve_graph_attrs)}." + " Expected bool or set." + ) from None + if not isinstance(self.mutates_input, bool | dict): + raise TypeError( + f"Bad type for mutates_input: {type(self.mutates_input)}." + " Expected bool or dict." + ) from None + if not isinstance(self._returns_graph, bool): + raise TypeError( + f"Bad type for returns_graph: {type(self._returns_graph)}." + " Expected bool." + ) from None + + if isinstance(graphs, str): + graphs = {graphs: 0} + elif graphs is None: + pass + elif not isinstance(graphs, dict): + raise TypeError( + f"Bad type for graphs: {type(graphs)}. Expected str or dict." + ) from None + elif len(graphs) == 0: + raise KeyError("'graphs' must contain at least one variable name") from None + + # This dict comprehension is complicated for better performance; equivalent shown below. + self.optional_graphs = set() + self.list_graphs = set() + if graphs is None: + self.graphs = {} + else: + self.graphs = { + self.optional_graphs.add(val := k[:-1]) or val + if (last := k[-1]) == "?" + else self.list_graphs.add(val := k[1:-1]) or val + if last == "]" + else k: v + for k, v in graphs.items() + } + # The above is equivalent to: + # self.optional_graphs = {k[:-1] for k in graphs if k[-1] == "?"} + # self.list_graphs = {k[1:-1] for k in graphs if k[-1] == "]"} + # self.graphs = {k[:-1] if k[-1] == "?" else k: v for k, v in graphs.items()} + + # Compute and cache the signature on-demand + self._sig = None + + # Which backends implement this function? + self.backends = { + backend + for backend, info in backend_info.items() + if "functions" in info and name in info["functions"] + } + + if name in _registered_algorithms: + raise KeyError( + f"Algorithm already exists in dispatch registry: {name}" + ) from None + # Use the magic of `argmap` to turn `self` into a function. This does result + # in small additional overhead compared to calling `_dispatchable` directly, + # but `argmap` has the magical property that it can stack with other `argmap` + # decorators "for free". Being a function is better for REPRs and type-checkers. + self = argmap(_do_nothing)(self) + _registered_algorithms[name] = self + return self + + @property + def __doc__(self): + """If the cached documentation exists, it is returned. + Otherwise, the documentation is generated using _make_doc() method, + cached, and then returned.""" + + if (rv := self._cached_doc) is not None: + return rv + rv = self._cached_doc = self._make_doc() + return rv + + @__doc__.setter + def __doc__(self, val): + """Sets the original documentation to the given value and resets the + cached documentation.""" + + self._orig_doc = val + self._cached_doc = None + + @property + def __signature__(self): + """Return the signature of the original function, with the addition of + the `backend` and `backend_kwargs` parameters.""" + + if self._sig is None: + sig = inspect.signature(self.orig_func) + # `backend` is now a reserved argument used by dispatching. + # assert "backend" not in sig.parameters + if not any( + p.kind == inspect.Parameter.VAR_KEYWORD for p in sig.parameters.values() + ): + sig = sig.replace( + parameters=[ + *sig.parameters.values(), + inspect.Parameter( + "backend", inspect.Parameter.KEYWORD_ONLY, default=None + ), + inspect.Parameter( + "backend_kwargs", inspect.Parameter.VAR_KEYWORD + ), + ] + ) + else: + *parameters, var_keyword = sig.parameters.values() + sig = sig.replace( + parameters=[ + *parameters, + inspect.Parameter( + "backend", inspect.Parameter.KEYWORD_ONLY, default=None + ), + var_keyword, + ] + ) + self._sig = sig + return self._sig + + def __call__(self, /, *args, backend=None, **kwargs): + """Returns the result of the original function, or the backend function if + the backend is specified and that backend implements `func`.""" + + if not backends: + # Fast path if no backends are installed + return self.orig_func(*args, **kwargs) + + # Use `backend_name` in this function instead of `backend` + backend_name = backend + if backend_name is not None and backend_name not in backends: + raise ImportError(f"Unable to load backend: {backend_name}") + + graphs_resolved = {} + for gname, pos in self.graphs.items(): + if pos < len(args): + if gname in kwargs: + raise TypeError(f"{self.name}() got multiple values for {gname!r}") + val = args[pos] + elif gname in kwargs: + val = kwargs[gname] + elif gname not in self.optional_graphs: + raise TypeError( + f"{self.name}() missing required graph argument: {gname}" + ) + else: + continue + if val is None: + if gname not in self.optional_graphs: + raise TypeError( + f"{self.name}() required graph argument {gname!r} is None; must be a graph" + ) + else: + graphs_resolved[gname] = val + + # Alternative to the above that does not check duplicated args or missing required graphs. + # graphs_resolved = { + # val + # for gname, pos in self.graphs.items() + # if (val := args[pos] if pos < len(args) else kwargs.get(gname)) is not None + # } + + # Check if any graph comes from a backend + if self.list_graphs: + # Make sure we don't lose values by consuming an iterator + args = list(args) + for gname in self.list_graphs & graphs_resolved.keys(): + val = list(graphs_resolved[gname]) + graphs_resolved[gname] = val + if gname in kwargs: + kwargs[gname] = val + else: + args[self.graphs[gname]] = val + + has_backends = any( + hasattr(g, "__networkx_backend__") + if gname not in self.list_graphs + else any(hasattr(g2, "__networkx_backend__") for g2 in g) + for gname, g in graphs_resolved.items() + ) + if has_backends: + graph_backend_names = { + getattr(g, "__networkx_backend__", "networkx") + for gname, g in graphs_resolved.items() + if gname not in self.list_graphs + } + for gname in self.list_graphs & graphs_resolved.keys(): + graph_backend_names.update( + getattr(g, "__networkx_backend__", "networkx") + for g in graphs_resolved[gname] + ) + else: + has_backends = any( + hasattr(g, "__networkx_backend__") for g in graphs_resolved.values() + ) + if has_backends: + graph_backend_names = { + getattr(g, "__networkx_backend__", "networkx") + for g in graphs_resolved.values() + } + + backend_priority = config.backend_priority + if self._is_testing and backend_priority and backend_name is None: + # Special path if we are running networkx tests with a backend. + # This even runs for (and handles) functions that mutate input graphs. + return self._convert_and_call_for_tests( + backend_priority[0], + args, + kwargs, + fallback_to_nx=self._fallback_to_nx, + ) + + if has_backends: + # Dispatchable graphs found! Dispatch to backend function. + # We don't handle calls with different backend graphs yet, + # but we may be able to convert additional networkx graphs. + backend_names = graph_backend_names - {"networkx"} + if len(backend_names) != 1: + # Future work: convert between backends and run if multiple backends found + raise TypeError( + f"{self.name}() graphs must all be from the same backend, found {backend_names}" + ) + [graph_backend_name] = backend_names + if backend_name is not None and backend_name != graph_backend_name: + # Future work: convert between backends to `backend_name` backend + raise TypeError( + f"{self.name}() is unable to convert graph from backend {graph_backend_name!r} " + f"to the specified backend {backend_name!r}." + ) + if graph_backend_name not in backends: + raise ImportError(f"Unable to load backend: {graph_backend_name}") + if ( + "networkx" in graph_backend_names + and graph_backend_name not in backend_priority + ): + # Not configured to convert networkx graphs to this backend + raise TypeError( + f"Unable to convert inputs and run {self.name}. " + f"{self.name}() has networkx and {graph_backend_name} graphs, but NetworkX is not " + f"configured to automatically convert graphs from networkx to {graph_backend_name}." + ) + backend = _load_backend(graph_backend_name) + if hasattr(backend, self.name): + if "networkx" in graph_backend_names: + # We need to convert networkx graphs to backend graphs. + # There is currently no need to check `self.mutates_input` here. + return self._convert_and_call( + graph_backend_name, + args, + kwargs, + fallback_to_nx=self._fallback_to_nx, + ) + # All graphs are backend graphs--no need to convert! + return getattr(backend, self.name)(*args, **kwargs) + # Future work: try to convert and run with other backends in backend_priority + raise nx.NetworkXNotImplemented( + f"'{self.name}' not implemented by {graph_backend_name}" + ) + + # If backend was explicitly given by the user, so we need to use it no matter what + if backend_name is not None: + return self._convert_and_call( + backend_name, args, kwargs, fallback_to_nx=False + ) + + # Only networkx graphs; try to convert and run with a backend with automatic + # conversion, but don't do this by default for graph generators or loaders, + # or if the functions mutates an input graph or returns a graph. + # Only convert and run if `backend.should_run(...)` returns True. + if ( + not self._returns_graph + and ( + not self.mutates_input + or isinstance(self.mutates_input, dict) + # If `mutates_input` begins with "not ", then assume the argument is boolean, + # otherwise treat it as a node or edge attribute if it's not None. + and any( + not ( + args[arg_pos] + if len(args) > arg_pos + else kwargs.get(arg_name[4:], True) + ) + if arg_name.startswith("not ") + else ( + args[arg_pos] if len(args) > arg_pos else kwargs.get(arg_name) + ) + is not None + for arg_name, arg_pos in self.mutates_input.items() + ) + ) + ): + # Should we warn or log if we don't convert b/c the input will be mutated? + for backend_name in backend_priority: + if self._should_backend_run(backend_name, *args, **kwargs): + return self._convert_and_call( + backend_name, + args, + kwargs, + fallback_to_nx=self._fallback_to_nx, + ) + # Default: run with networkx on networkx inputs + return self.orig_func(*args, **kwargs) + + def _can_backend_run(self, backend_name, /, *args, **kwargs): + """Can the specified backend run this algorithm with these arguments?""" + backend = _load_backend(backend_name) + # `backend.can_run` and `backend.should_run` may return strings that describe + # why they can't or shouldn't be run. We plan to use the strings in the future. + return ( + hasattr(backend, self.name) + and (can_run := backend.can_run(self.name, args, kwargs)) + and not isinstance(can_run, str) + ) + + def _should_backend_run(self, backend_name, /, *args, **kwargs): + """Can/should the specified backend run this algorithm with these arguments?""" + backend = _load_backend(backend_name) + # `backend.can_run` and `backend.should_run` may return strings that describe + # why they can't or shouldn't be run. We plan to use the strings in the future. + return ( + hasattr(backend, self.name) + and (can_run := backend.can_run(self.name, args, kwargs)) + and not isinstance(can_run, str) + and (should_run := backend.should_run(self.name, args, kwargs)) + and not isinstance(should_run, str) + ) + + def _convert_arguments(self, backend_name, args, kwargs, *, use_cache): + """Convert graph arguments to the specified backend. + + Returns + ------- + args tuple and kwargs dict + """ + bound = self.__signature__.bind(*args, **kwargs) + bound.apply_defaults() + if not self.graphs: + bound_kwargs = bound.kwargs + del bound_kwargs["backend"] + return bound.args, bound_kwargs + # Convert graphs into backend graph-like object + # Include the edge and/or node labels if provided to the algorithm + preserve_edge_attrs = self.preserve_edge_attrs + edge_attrs = self.edge_attrs + if preserve_edge_attrs is False: + # e.g. `preserve_edge_attrs=False` + pass + elif preserve_edge_attrs is True: + # e.g. `preserve_edge_attrs=True` + edge_attrs = None + elif isinstance(preserve_edge_attrs, str): + if bound.arguments[preserve_edge_attrs] is True or callable( + bound.arguments[preserve_edge_attrs] + ): + # e.g. `preserve_edge_attrs="attr"` and `func(attr=True)` + # e.g. `preserve_edge_attrs="attr"` and `func(attr=myfunc)` + preserve_edge_attrs = True + edge_attrs = None + elif bound.arguments[preserve_edge_attrs] is False and ( + isinstance(edge_attrs, str) + and edge_attrs == preserve_edge_attrs + or isinstance(edge_attrs, dict) + and preserve_edge_attrs in edge_attrs + ): + # e.g. `preserve_edge_attrs="attr"` and `func(attr=False)` + # Treat `False` argument as meaning "preserve_edge_data=False" + # and not `False` as the edge attribute to use. + preserve_edge_attrs = False + edge_attrs = None + else: + # e.g. `preserve_edge_attrs="attr"` and `func(attr="weight")` + preserve_edge_attrs = False + # Else: e.g. `preserve_edge_attrs={"G": {"weight": 1}}` + + if edge_attrs is None: + # May have been set to None above b/c all attributes are preserved + pass + elif isinstance(edge_attrs, str): + if edge_attrs[0] == "[": + # e.g. `edge_attrs="[edge_attributes]"` (argument of list of attributes) + # e.g. `func(edge_attributes=["foo", "bar"])` + edge_attrs = { + edge_attr: 1 for edge_attr in bound.arguments[edge_attrs[1:-1]] + } + elif callable(bound.arguments[edge_attrs]): + # e.g. `edge_attrs="weight"` and `func(weight=myfunc)` + preserve_edge_attrs = True + edge_attrs = None + elif bound.arguments[edge_attrs] is not None: + # e.g. `edge_attrs="weight"` and `func(weight="foo")` (default of 1) + edge_attrs = {bound.arguments[edge_attrs]: 1} + elif self.name == "to_numpy_array" and hasattr( + bound.arguments["dtype"], "names" + ): + # Custom handling: attributes may be obtained from `dtype` + edge_attrs = { + edge_attr: 1 for edge_attr in bound.arguments["dtype"].names + } + else: + # e.g. `edge_attrs="weight"` and `func(weight=None)` + edge_attrs = None + else: + # e.g. `edge_attrs={"attr": "default"}` and `func(attr="foo", default=7)` + # e.g. `edge_attrs={"attr": 0}` and `func(attr="foo")` + edge_attrs = { + edge_attr: bound.arguments.get(val, 1) if isinstance(val, str) else val + for key, val in edge_attrs.items() + if (edge_attr := bound.arguments[key]) is not None + } + + preserve_node_attrs = self.preserve_node_attrs + node_attrs = self.node_attrs + if preserve_node_attrs is False: + # e.g. `preserve_node_attrs=False` + pass + elif preserve_node_attrs is True: + # e.g. `preserve_node_attrs=True` + node_attrs = None + elif isinstance(preserve_node_attrs, str): + if bound.arguments[preserve_node_attrs] is True or callable( + bound.arguments[preserve_node_attrs] + ): + # e.g. `preserve_node_attrs="attr"` and `func(attr=True)` + # e.g. `preserve_node_attrs="attr"` and `func(attr=myfunc)` + preserve_node_attrs = True + node_attrs = None + elif bound.arguments[preserve_node_attrs] is False and ( + isinstance(node_attrs, str) + and node_attrs == preserve_node_attrs + or isinstance(node_attrs, dict) + and preserve_node_attrs in node_attrs + ): + # e.g. `preserve_node_attrs="attr"` and `func(attr=False)` + # Treat `False` argument as meaning "preserve_node_data=False" + # and not `False` as the node attribute to use. Is this used? + preserve_node_attrs = False + node_attrs = None + else: + # e.g. `preserve_node_attrs="attr"` and `func(attr="weight")` + preserve_node_attrs = False + # Else: e.g. `preserve_node_attrs={"G": {"pos": None}}` + + if node_attrs is None: + # May have been set to None above b/c all attributes are preserved + pass + elif isinstance(node_attrs, str): + if node_attrs[0] == "[": + # e.g. `node_attrs="[node_attributes]"` (argument of list of attributes) + # e.g. `func(node_attributes=["foo", "bar"])` + node_attrs = { + node_attr: None for node_attr in bound.arguments[node_attrs[1:-1]] + } + elif callable(bound.arguments[node_attrs]): + # e.g. `node_attrs="weight"` and `func(weight=myfunc)` + preserve_node_attrs = True + node_attrs = None + elif bound.arguments[node_attrs] is not None: + # e.g. `node_attrs="weight"` and `func(weight="foo")` + node_attrs = {bound.arguments[node_attrs]: None} + else: + # e.g. `node_attrs="weight"` and `func(weight=None)` + node_attrs = None + else: + # e.g. `node_attrs={"attr": "default"}` and `func(attr="foo", default=7)` + # e.g. `node_attrs={"attr": 0}` and `func(attr="foo")` + node_attrs = { + node_attr: bound.arguments.get(val) if isinstance(val, str) else val + for key, val in node_attrs.items() + if (node_attr := bound.arguments[key]) is not None + } + + preserve_graph_attrs = self.preserve_graph_attrs + + # It should be safe to assume that we either have networkx graphs or backend graphs. + # Future work: allow conversions between backends. + for gname in self.graphs: + if gname in self.list_graphs: + bound.arguments[gname] = [ + self._convert_graph( + backend_name, + g, + edge_attrs=edge_attrs, + node_attrs=node_attrs, + preserve_edge_attrs=preserve_edge_attrs, + preserve_node_attrs=preserve_node_attrs, + preserve_graph_attrs=preserve_graph_attrs, + graph_name=gname, + use_cache=use_cache, + ) + if getattr(g, "__networkx_backend__", "networkx") == "networkx" + else g + for g in bound.arguments[gname] + ] + else: + graph = bound.arguments[gname] + if graph is None: + if gname in self.optional_graphs: + continue + raise TypeError( + f"Missing required graph argument `{gname}` in {self.name} function" + ) + if isinstance(preserve_edge_attrs, dict): + preserve_edges = False + edges = preserve_edge_attrs.get(gname, edge_attrs) + else: + preserve_edges = preserve_edge_attrs + edges = edge_attrs + if isinstance(preserve_node_attrs, dict): + preserve_nodes = False + nodes = preserve_node_attrs.get(gname, node_attrs) + else: + preserve_nodes = preserve_node_attrs + nodes = node_attrs + if isinstance(preserve_graph_attrs, set): + preserve_graph = gname in preserve_graph_attrs + else: + preserve_graph = preserve_graph_attrs + if getattr(graph, "__networkx_backend__", "networkx") == "networkx": + bound.arguments[gname] = self._convert_graph( + backend_name, + graph, + edge_attrs=edges, + node_attrs=nodes, + preserve_edge_attrs=preserve_edges, + preserve_node_attrs=preserve_nodes, + preserve_graph_attrs=preserve_graph, + graph_name=gname, + use_cache=use_cache, + ) + bound_kwargs = bound.kwargs + del bound_kwargs["backend"] + return bound.args, bound_kwargs + + def _convert_graph( + self, + backend_name, + graph, + *, + edge_attrs, + node_attrs, + preserve_edge_attrs, + preserve_node_attrs, + preserve_graph_attrs, + graph_name, + use_cache, + ): + if ( + use_cache + and (nx_cache := getattr(graph, "__networkx_cache__", None)) is not None + ): + cache = nx_cache.setdefault("backends", {}).setdefault(backend_name, {}) + # edge_attrs: dict | None + # node_attrs: dict | None + # preserve_edge_attrs: bool (False if edge_attrs is not None) + # preserve_node_attrs: bool (False if node_attrs is not None) + # preserve_graph_attrs: bool + key = edge_key, node_key, graph_key = ( + frozenset(edge_attrs.items()) + if edge_attrs is not None + else preserve_edge_attrs, + frozenset(node_attrs.items()) + if node_attrs is not None + else preserve_node_attrs, + preserve_graph_attrs, + ) + if cache: + warning_message = ( + f"Using cached graph for {backend_name!r} backend in " + f"call to {self.name}.\n\nFor the cache to be consistent " + "(i.e., correct), the input graph must not have been " + "manually mutated since the cached graph was created. " + "Examples of manually mutating the graph data structures " + "resulting in an inconsistent cache include:\n\n" + " >>> G[u][v][key] = val\n\n" + "and\n\n" + " >>> for u, v, d in G.edges(data=True):\n" + " ... d[key] = val\n\n" + "Using methods such as `G.add_edge(u, v, weight=val)` " + "will correctly clear the cache to keep it consistent. " + "You may also use `G.__networkx_cache__.clear()` to " + "manually clear the cache, or set `G.__networkx_cache__` " + "to None to disable caching for G. Enable or disable " + "caching via `nx.config.cache_converted_graphs` config." + ) + # Do a simple search for a cached graph with compatible data. + # For example, if we need a single attribute, then it's okay + # to use a cached graph that preserved all attributes. + # This looks for an exact match first. + for compat_key in itertools.product( + (edge_key, True) if edge_key is not True else (True,), + (node_key, True) if node_key is not True else (True,), + (graph_key, True) if graph_key is not True else (True,), + ): + if (rv := cache.get(compat_key)) is not None: + warnings.warn(warning_message) + return rv + if edge_key is not True and node_key is not True: + # Iterate over the items in `cache` to see if any are compatible. + # For example, if no edge attributes are needed, then a graph + # with any edge attribute will suffice. We use the same logic + # below (but switched) to clear unnecessary items from the cache. + # Use `list(cache.items())` to be thread-safe. + for (ekey, nkey, gkey), val in list(cache.items()): + if edge_key is False or ekey is True: + pass + elif ( + edge_key is True + or ekey is False + or not edge_key.issubset(ekey) + ): + continue + if node_key is False or nkey is True: + pass + elif ( + node_key is True + or nkey is False + or not node_key.issubset(nkey) + ): + continue + if graph_key and not gkey: + continue + warnings.warn(warning_message) + return val + + backend = _load_backend(backend_name) + rv = backend.convert_from_nx( + graph, + edge_attrs=edge_attrs, + node_attrs=node_attrs, + preserve_edge_attrs=preserve_edge_attrs, + preserve_node_attrs=preserve_node_attrs, + preserve_graph_attrs=preserve_graph_attrs, + name=self.name, + graph_name=graph_name, + ) + if use_cache and nx_cache is not None: + # Remove old cached items that are no longer necessary since they + # are dominated/subsumed/outdated by what was just calculated. + # This uses the same logic as above, but with keys switched. + cache[key] = rv # Set at beginning to be thread-safe + for cur_key in list(cache): + if cur_key == key: + continue + ekey, nkey, gkey = cur_key + if ekey is False or edge_key is True: + pass + elif ekey is True or edge_key is False or not ekey.issubset(edge_key): + continue + if nkey is False or node_key is True: + pass + elif nkey is True or node_key is False or not nkey.issubset(node_key): + continue + if gkey and not graph_key: + continue + cache.pop(cur_key, None) # Use pop instead of del to be thread-safe + + return rv + + def _convert_and_call(self, backend_name, args, kwargs, *, fallback_to_nx=False): + """Call this dispatchable function with a backend, converting graphs if necessary.""" + backend = _load_backend(backend_name) + if not self._can_backend_run(backend_name, *args, **kwargs): + if fallback_to_nx: + return self.orig_func(*args, **kwargs) + msg = f"'{self.name}' not implemented by {backend_name}" + if hasattr(backend, self.name): + msg += " with the given arguments" + raise RuntimeError(msg) + + try: + converted_args, converted_kwargs = self._convert_arguments( + backend_name, args, kwargs, use_cache=config.cache_converted_graphs + ) + result = getattr(backend, self.name)(*converted_args, **converted_kwargs) + except (NotImplementedError, nx.NetworkXNotImplemented) as exc: + if fallback_to_nx: + return self.orig_func(*args, **kwargs) + raise + + return result + + def _convert_and_call_for_tests( + self, backend_name, args, kwargs, *, fallback_to_nx=False + ): + """Call this dispatchable function with a backend; for use with testing.""" + backend = _load_backend(backend_name) + if not self._can_backend_run(backend_name, *args, **kwargs): + if fallback_to_nx or not self.graphs: + return self.orig_func(*args, **kwargs) + + import pytest + + msg = f"'{self.name}' not implemented by {backend_name}" + if hasattr(backend, self.name): + msg += " with the given arguments" + pytest.xfail(msg) + + from collections.abc import Iterable, Iterator, Mapping + from copy import copy + from io import BufferedReader, BytesIO, StringIO, TextIOWrapper + from itertools import tee + from random import Random + + import numpy as np + from numpy.random import Generator, RandomState + from scipy.sparse import sparray + + # We sometimes compare the backend result to the original result, + # so we need two sets of arguments. We tee iterators and copy + # random state so that they may be used twice. + if not args: + args1 = args2 = args + else: + args1, args2 = zip( + *( + (arg, copy(arg)) + if isinstance( + arg, BytesIO | StringIO | Random | Generator | RandomState + ) + else tee(arg) + if isinstance(arg, Iterator) + and not isinstance(arg, BufferedReader | TextIOWrapper) + else (arg, arg) + for arg in args + ) + ) + if not kwargs: + kwargs1 = kwargs2 = kwargs + else: + kwargs1, kwargs2 = zip( + *( + ((k, v), (k, copy(v))) + if isinstance( + v, BytesIO | StringIO | Random | Generator | RandomState + ) + else ((k, (teed := tee(v))[0]), (k, teed[1])) + if isinstance(v, Iterator) + and not isinstance(v, BufferedReader | TextIOWrapper) + else ((k, v), (k, v)) + for k, v in kwargs.items() + ) + ) + kwargs1 = dict(kwargs1) + kwargs2 = dict(kwargs2) + try: + converted_args, converted_kwargs = self._convert_arguments( + backend_name, args1, kwargs1, use_cache=False + ) + result = getattr(backend, self.name)(*converted_args, **converted_kwargs) + except (NotImplementedError, nx.NetworkXNotImplemented) as exc: + if fallback_to_nx: + return self.orig_func(*args2, **kwargs2) + import pytest + + pytest.xfail( + exc.args[0] if exc.args else f"{self.name} raised {type(exc).__name__}" + ) + # Verify that `self._returns_graph` is correct. This compares the return type + # to the type expected from `self._returns_graph`. This handles tuple and list + # return types, but *does not* catch functions that yield graphs. + if ( + self._returns_graph + != ( + isinstance(result, nx.Graph) + or hasattr(result, "__networkx_backend__") + or isinstance(result, tuple | list) + and any( + isinstance(x, nx.Graph) or hasattr(x, "__networkx_backend__") + for x in result + ) + ) + and not ( + # May return Graph or None + self.name in {"check_planarity", "check_planarity_recursive"} + and any(x is None for x in result) + ) + and not ( + # May return Graph or dict + self.name in {"held_karp_ascent"} + and any(isinstance(x, dict) for x in result) + ) + and self.name + not in { + # yields graphs + "all_triads", + "general_k_edge_subgraphs", + # yields graphs or arrays + "nonisomorphic_trees", + } + ): + raise RuntimeError(f"`returns_graph` is incorrect for {self.name}") + + def check_result(val, depth=0): + if isinstance(val, np.number): + raise RuntimeError( + f"{self.name} returned a numpy scalar {val} ({type(val)}, depth={depth})" + ) + if isinstance(val, np.ndarray | sparray): + return + if isinstance(val, nx.Graph): + check_result(val._node, depth=depth + 1) + check_result(val._adj, depth=depth + 1) + return + if isinstance(val, Iterator): + raise NotImplementedError + if isinstance(val, Iterable) and not isinstance(val, str): + for x in val: + check_result(x, depth=depth + 1) + if isinstance(val, Mapping): + for x in val.values(): + check_result(x, depth=depth + 1) + + def check_iterator(it): + for val in it: + try: + check_result(val) + except RuntimeError as exc: + raise RuntimeError( + f"{self.name} returned a numpy scalar {val} ({type(val)})" + ) from exc + yield val + + if self.name in {"from_edgelist"}: + # numpy scalars are explicitly given as values in some tests + pass + elif isinstance(result, Iterator): + result = check_iterator(result) + else: + try: + check_result(result) + except RuntimeError as exc: + raise RuntimeError( + f"{self.name} returned a numpy scalar {result} ({type(result)})" + ) from exc + check_result(result) + + if self.name in { + "edmonds_karp", + "barycenter", + "contracted_edge", + "contracted_nodes", + "stochastic_graph", + "relabel_nodes", + "maximum_branching", + "incremental_closeness_centrality", + "minimal_branching", + "minimum_spanning_arborescence", + "recursive_simple_cycles", + "connected_double_edge_swap", + }: + # Special-case algorithms that mutate input graphs + bound = self.__signature__.bind(*converted_args, **converted_kwargs) + bound.apply_defaults() + bound2 = self.__signature__.bind(*args2, **kwargs2) + bound2.apply_defaults() + if self.name in { + "minimal_branching", + "minimum_spanning_arborescence", + "recursive_simple_cycles", + "connected_double_edge_swap", + }: + G1 = backend.convert_to_nx(bound.arguments["G"]) + G2 = bound2.arguments["G"] + G2._adj = G1._adj + nx._clear_cache(G2) + elif self.name == "edmonds_karp": + R1 = backend.convert_to_nx(bound.arguments["residual"]) + R2 = bound2.arguments["residual"] + if R1 is not None and R2 is not None: + for k, v in R1.edges.items(): + R2.edges[k]["flow"] = v["flow"] + R2.graph.update(R1.graph) + nx._clear_cache(R2) + elif self.name == "barycenter" and bound.arguments["attr"] is not None: + G1 = backend.convert_to_nx(bound.arguments["G"]) + G2 = bound2.arguments["G"] + attr = bound.arguments["attr"] + for k, v in G1.nodes.items(): + G2.nodes[k][attr] = v[attr] + nx._clear_cache(G2) + elif ( + self.name in {"contracted_nodes", "contracted_edge"} + and not bound.arguments["copy"] + ): + # Edges and nodes changed; node "contraction" and edge "weight" attrs + G1 = backend.convert_to_nx(bound.arguments["G"]) + G2 = bound2.arguments["G"] + G2.__dict__.update(G1.__dict__) + nx._clear_cache(G2) + elif self.name == "stochastic_graph" and not bound.arguments["copy"]: + G1 = backend.convert_to_nx(bound.arguments["G"]) + G2 = bound2.arguments["G"] + for k, v in G1.edges.items(): + G2.edges[k]["weight"] = v["weight"] + nx._clear_cache(G2) + elif ( + self.name == "relabel_nodes" + and not bound.arguments["copy"] + or self.name in {"incremental_closeness_centrality"} + ): + G1 = backend.convert_to_nx(bound.arguments["G"]) + G2 = bound2.arguments["G"] + if G1 is G2: + return G2 + G2._node.clear() + G2._node.update(G1._node) + G2._adj.clear() + G2._adj.update(G1._adj) + if hasattr(G1, "_pred") and hasattr(G2, "_pred"): + G2._pred.clear() + G2._pred.update(G1._pred) + if hasattr(G1, "_succ") and hasattr(G2, "_succ"): + G2._succ.clear() + G2._succ.update(G1._succ) + nx._clear_cache(G2) + if self.name == "relabel_nodes": + return G2 + return backend.convert_to_nx(result) + + converted_result = backend.convert_to_nx(result) + if isinstance(converted_result, nx.Graph) and self.name not in { + "boykov_kolmogorov", + "preflow_push", + "quotient_graph", + "shortest_augmenting_path", + "spectral_graph_forge", + # We don't handle tempfile.NamedTemporaryFile arguments + "read_gml", + "read_graph6", + "read_sparse6", + # We don't handle io.BufferedReader or io.TextIOWrapper arguments + "bipartite_read_edgelist", + "read_adjlist", + "read_edgelist", + "read_graphml", + "read_multiline_adjlist", + "read_pajek", + "from_pydot", + "pydot_read_dot", + "agraph_read_dot", + # graph comparison fails b/c of nan values + "read_gexf", + }: + # For graph return types (e.g. generators), we compare that results are + # the same between the backend and networkx, then return the original + # networkx result so the iteration order will be consistent in tests. + G = self.orig_func(*args2, **kwargs2) + if not nx.utils.graphs_equal(G, converted_result): + assert G.number_of_nodes() == converted_result.number_of_nodes() + assert G.number_of_edges() == converted_result.number_of_edges() + assert G.graph == converted_result.graph + assert G.nodes == converted_result.nodes + assert G.adj == converted_result.adj + assert type(G) is type(converted_result) + raise AssertionError("Graphs are not equal") + return G + return converted_result + + def _make_doc(self): + """Generate the backends section at the end for functions having an alternate + backend implementation(s) using the `backend_info` entry-point.""" + + if not self.backends: + return self._orig_doc + lines = [ + "Backends", + "--------", + ] + for backend in sorted(self.backends): + info = backend_info[backend] + if "short_summary" in info: + lines.append(f"{backend} : {info['short_summary']}") + else: + lines.append(backend) + if "functions" not in info or self.name not in info["functions"]: + lines.append("") + continue + + func_info = info["functions"][self.name] + + # Renaming extra_docstring to additional_docs + if func_docs := ( + func_info.get("additional_docs") or func_info.get("extra_docstring") + ): + lines.extend( + f" {line}" if line else line for line in func_docs.split("\n") + ) + add_gap = True + else: + add_gap = False + + # Renaming extra_parameters to additional_parameters + if extra_parameters := ( + func_info.get("extra_parameters") + or func_info.get("additional_parameters") + ): + if add_gap: + lines.append("") + lines.append(" Additional parameters:") + for param in sorted(extra_parameters): + lines.append(f" {param}") + if desc := extra_parameters[param]: + lines.append(f" {desc}") + lines.append("") + else: + lines.append("") + + if func_url := func_info.get("url"): + lines.append(f"[`Source <{func_url}>`_]") + lines.append("") + + lines.pop() # Remove last empty line + to_add = "\n ".join(lines) + return f"{self._orig_doc.rstrip()}\n\n {to_add}" + + def __reduce__(self): + """Allow this object to be serialized with pickle. + + This uses the global registry `_registered_algorithms` to deserialize. + """ + return _restore_dispatchable, (self.name,) + + +def _restore_dispatchable(name): + return _registered_algorithms[name] + + +if os.environ.get("_NETWORKX_BUILDING_DOCS_"): + # When building docs with Sphinx, use the original function with the + # dispatched __doc__, b/c Sphinx renders normal Python functions better. + # This doesn't show e.g. `*, backend=None, **backend_kwargs` in the + # signatures, which is probably okay. It does allow the docstring to be + # updated based on the installed backends. + _orig_dispatchable = _dispatchable + + def _dispatchable(func=None, **kwargs): # type: ignore[no-redef] + if func is None: + return partial(_dispatchable, **kwargs) + dispatched_func = _orig_dispatchable(func, **kwargs) + func.__doc__ = dispatched_func.__doc__ + return func + + _dispatchable.__doc__ = _orig_dispatchable.__new__.__doc__ # type: ignore[method-assign,assignment] + _sig = inspect.signature(_orig_dispatchable.__new__) + _dispatchable.__signature__ = _sig.replace( # type: ignore[method-assign,assignment] + parameters=[v for k, v in _sig.parameters.items() if k != "cls"] + ) diff --git a/env-llmeval/lib/python3.10/site-packages/networkx/utils/configs.py b/env-llmeval/lib/python3.10/site-packages/networkx/utils/configs.py new file mode 100644 index 0000000000000000000000000000000000000000..e61741e0a5e8f3a2431a00ec9a5ef24524eff373 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/networkx/utils/configs.py @@ -0,0 +1,260 @@ +import collections +import os +import typing +from dataclasses import dataclass + +__all__ = ["Config", "config"] + + +@dataclass(init=False, eq=False, slots=True, kw_only=True, match_args=False) +class Config: + """The base class for NetworkX configuration. + + There are two ways to use this to create configurations. The first is to + simply pass the initial configuration as keyword arguments to ``Config``: + + >>> cfg = Config(eggs=1, spam=5) + >>> cfg + Config(eggs=1, spam=5) + + The second--and preferred--way is to subclass ``Config`` with docs and annotations. + + >>> class MyConfig(Config): + ... '''Breakfast!''' + ... + ... eggs: int + ... spam: int + ... + ... def _check_config(self, key, value): + ... assert isinstance(value, int) and value >= 0 + >>> cfg = MyConfig(eggs=1, spam=5) + + Once defined, config items may be modified, but can't be added or deleted by default. + ``Config`` is a ``Mapping``, and can get and set configs via attributes or brackets: + + >>> cfg.eggs = 2 + >>> cfg.eggs + 2 + >>> cfg["spam"] = 42 + >>> cfg["spam"] + 42 + + Subclasses may also define ``_check_config`` (as done in the example above) + to ensure the value being assigned is valid: + + >>> cfg.spam = -1 + Traceback (most recent call last): + ... + AssertionError + + If a more flexible configuration object is needed that allows adding and deleting + configurations, then pass ``strict=False`` when defining the subclass: + + >>> class FlexibleConfig(Config, strict=False): + ... default_greeting: str = "Hello" + >>> flexcfg = FlexibleConfig() + >>> flexcfg.name = "Mr. Anderson" + >>> flexcfg + FlexibleConfig(default_greeting='Hello', name='Mr. Anderson') + """ + + def __init_subclass__(cls, strict=True): + cls._strict = strict + + def __new__(cls, **kwargs): + orig_class = cls + if cls is Config: + # Enable the "simple" case of accepting config definition as keywords + cls = type( + cls.__name__, + (cls,), + {"__annotations__": {key: typing.Any for key in kwargs}}, + ) + cls = dataclass( + eq=False, + repr=cls._strict, + slots=cls._strict, + kw_only=True, + match_args=False, + )(cls) + if not cls._strict: + cls.__repr__ = _flexible_repr + cls._orig_class = orig_class # Save original class so we can pickle + instance = object.__new__(cls) + instance.__init__(**kwargs) + return instance + + def _check_config(self, key, value): + """Check whether config value is valid. This is useful for subclasses.""" + + # Control behavior of attributes + def __dir__(self): + return self.__dataclass_fields__.keys() + + def __setattr__(self, key, value): + if self._strict and key not in self.__dataclass_fields__: + raise AttributeError(f"Invalid config name: {key!r}") + self._check_config(key, value) + object.__setattr__(self, key, value) + + def __delattr__(self, key): + if self._strict: + raise TypeError( + f"Configuration items can't be deleted (can't delete {key!r})." + ) + object.__delattr__(self, key) + + # Be a `collection.abc.Collection` + def __contains__(self, key): + return ( + key in self.__dataclass_fields__ if self._strict else key in self.__dict__ + ) + + def __iter__(self): + return iter(self.__dataclass_fields__ if self._strict else self.__dict__) + + def __len__(self): + return len(self.__dataclass_fields__ if self._strict else self.__dict__) + + def __reversed__(self): + return reversed(self.__dataclass_fields__ if self._strict else self.__dict__) + + # Add dunder methods for `collections.abc.Mapping` + def __getitem__(self, key): + try: + return getattr(self, key) + except AttributeError as err: + raise KeyError(*err.args) from None + + def __setitem__(self, key, value): + try: + self.__setattr__(key, value) + except AttributeError as err: + raise KeyError(*err.args) from None + + def __delitem__(self, key): + try: + self.__delattr__(key) + except AttributeError as err: + raise KeyError(*err.args) from None + + _ipython_key_completions_ = __dir__ # config[" + + # Go ahead and make it a `collections.abc.Mapping` + def get(self, key, default=None): + return getattr(self, key, default) + + def items(self): + return collections.abc.ItemsView(self) + + def keys(self): + return collections.abc.KeysView(self) + + def values(self): + return collections.abc.ValuesView(self) + + # dataclass can define __eq__ for us, but do it here so it works after pickling + def __eq__(self, other): + if not isinstance(other, Config): + return NotImplemented + return self._orig_class == other._orig_class and self.items() == other.items() + + # Make pickle work + def __reduce__(self): + return self._deserialize, (self._orig_class, dict(self)) + + @staticmethod + def _deserialize(cls, kwargs): + return cls(**kwargs) + + +def _flexible_repr(self): + return ( + f"{self.__class__.__qualname__}(" + + ", ".join(f"{key}={val!r}" for key, val in self.__dict__.items()) + + ")" + ) + + +# Register, b/c `Mapping.__subclasshook__` returns `NotImplemented` +collections.abc.Mapping.register(Config) + + +class NetworkXConfig(Config): + """Configuration for NetworkX that controls behaviors such as how to use backends. + + Attribute and bracket notation are supported for getting and setting configurations: + + >>> nx.config.backend_priority == nx.config["backend_priority"] + True + + Parameters + ---------- + backend_priority : list of backend names + Enable automatic conversion of graphs to backend graphs for algorithms + implemented by the backend. Priority is given to backends listed earlier. + Default is empty list. + + backends : Config mapping of backend names to backend Config + The keys of the Config mapping are names of all installed NetworkX backends, + and the values are their configurations as Config mappings. + + cache_converted_graphs : bool + If True, then save converted graphs to the cache of the input graph. Graph + conversion may occur when automatically using a backend from `backend_priority` + or when using the `backend=` keyword argument to a function call. Caching can + improve performance by avoiding repeated conversions, but it uses more memory. + Care should be taken to not manually mutate a graph that has cached graphs; for + example, ``G[u][v][k] = val`` changes the graph, but does not clear the cache. + Using methods such as ``G.add_edge(u, v, weight=val)`` will clear the cache to + keep it consistent. ``G.__networkx_cache__.clear()`` manually clears the cache. + Default is False. + + Notes + ----- + Environment variables may be used to control some default configurations: + + - NETWORKX_BACKEND_PRIORITY: set `backend_priority` from comma-separated names. + - NETWORKX_CACHE_CONVERTED_GRAPHS: set `cache_converted_graphs` to True if nonempty. + + This is a global configuration. Use with caution when using from multiple threads. + """ + + backend_priority: list[str] + backends: Config + cache_converted_graphs: bool + + def _check_config(self, key, value): + from .backends import backends + + if key == "backend_priority": + if not (isinstance(value, list) and all(isinstance(x, str) for x in value)): + raise TypeError( + f"{key!r} config must be a list of backend names; got {value!r}" + ) + if missing := {x for x in value if x not in backends}: + missing = ", ".join(map(repr, sorted(missing))) + raise ValueError(f"Unknown backend when setting {key!r}: {missing}") + elif key == "backends": + if not ( + isinstance(value, Config) + and all(isinstance(key, str) for key in value) + and all(isinstance(val, Config) for val in value.values()) + ): + raise TypeError( + f"{key!r} config must be a Config of backend configs; got {value!r}" + ) + if missing := {x for x in value if x not in backends}: + missing = ", ".join(map(repr, sorted(missing))) + raise ValueError(f"Unknown backend when setting {key!r}: {missing}") + elif key == "cache_converted_graphs": + if not isinstance(value, bool): + raise TypeError(f"{key!r} config must be True or False; got {value!r}") + + +# Backend configuration will be updated in backends.py +config = NetworkXConfig( + backend_priority=[], + backends=Config(), + cache_converted_graphs=bool(os.environ.get("NETWORKX_CACHE_CONVERTED_GRAPHS", "")), +) diff --git a/env-llmeval/lib/python3.10/site-packages/networkx/utils/decorators.py b/env-llmeval/lib/python3.10/site-packages/networkx/utils/decorators.py new file mode 100644 index 0000000000000000000000000000000000000000..205bf50054314a4f9b8d722cfa33c43dadcba6e3 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/networkx/utils/decorators.py @@ -0,0 +1,1295 @@ +import bz2 +import collections +import gzip +import inspect +import itertools +import re +import warnings +from collections import defaultdict +from contextlib import contextmanager +from functools import wraps +from inspect import Parameter, signature +from os.path import splitext +from pathlib import Path + +import networkx as nx +from networkx.utils import create_py_random_state, create_random_state + +__all__ = [ + "not_implemented_for", + "open_file", + "nodes_or_number", + "np_random_state", + "py_random_state", + "argmap", + "deprecate_positional_args", +] + + +def not_implemented_for(*graph_types): + """Decorator to mark algorithms as not implemented + + Parameters + ---------- + graph_types : container of strings + Entries must be one of "directed", "undirected", "multigraph", or "graph". + + Returns + ------- + _require : function + The decorated function. + + Raises + ------ + NetworkXNotImplemented + If any of the packages cannot be imported + + Notes + ----- + Multiple types are joined logically with "and". + For "or" use multiple @not_implemented_for() lines. + + Examples + -------- + Decorate functions like this:: + + @not_implemented_for("directed") + def sp_function(G): + pass + + + # rule out MultiDiGraph + @not_implemented_for("directed", "multigraph") + def sp_np_function(G): + pass + + + # rule out all except DiGraph + @not_implemented_for("undirected") + @not_implemented_for("multigraph") + def sp_np_function(G): + pass + """ + if ("directed" in graph_types) and ("undirected" in graph_types): + raise ValueError("Function not implemented on directed AND undirected graphs?") + if ("multigraph" in graph_types) and ("graph" in graph_types): + raise ValueError("Function not implemented on graph AND multigraphs?") + if not set(graph_types) < {"directed", "undirected", "multigraph", "graph"}: + raise KeyError( + "use one or more of directed, undirected, multigraph, graph. " + f"You used {graph_types}" + ) + + # 3-way logic: True if "directed" input, False if "undirected" input, else None + dval = ("directed" in graph_types) or "undirected" not in graph_types and None + mval = ("multigraph" in graph_types) or "graph" not in graph_types and None + errmsg = f"not implemented for {' '.join(graph_types)} type" + + def _not_implemented_for(g): + if (mval is None or mval == g.is_multigraph()) and ( + dval is None or dval == g.is_directed() + ): + raise nx.NetworkXNotImplemented(errmsg) + + return g + + return argmap(_not_implemented_for, 0) + + +# To handle new extensions, define a function accepting a `path` and `mode`. +# Then add the extension to _dispatch_dict. +fopeners = { + ".gz": gzip.open, + ".gzip": gzip.open, + ".bz2": bz2.BZ2File, +} +_dispatch_dict = defaultdict(lambda: open, **fopeners) + + +def open_file(path_arg, mode="r"): + """Decorator to ensure clean opening and closing of files. + + Parameters + ---------- + path_arg : string or int + Name or index of the argument that is a path. + + mode : str + String for opening mode. + + Returns + ------- + _open_file : function + Function which cleanly executes the io. + + Examples + -------- + Decorate functions like this:: + + @open_file(0, "r") + def read_function(pathname): + pass + + + @open_file(1, "w") + def write_function(G, pathname): + pass + + + @open_file(1, "w") + def write_function(G, pathname="graph.dot"): + pass + + + @open_file("pathname", "w") + def write_function(G, pathname="graph.dot"): + pass + + + @open_file("path", "w+") + def another_function(arg, **kwargs): + path = kwargs["path"] + pass + + Notes + ----- + Note that this decorator solves the problem when a path argument is + specified as a string, but it does not handle the situation when the + function wants to accept a default of None (and then handle it). + + Here is an example of how to handle this case:: + + @open_file("path") + def some_function(arg1, arg2, path=None): + if path is None: + fobj = tempfile.NamedTemporaryFile(delete=False) + else: + # `path` could have been a string or file object or something + # similar. In any event, the decorator has given us a file object + # and it will close it for us, if it should. + fobj = path + + try: + fobj.write("blah") + finally: + if path is None: + fobj.close() + + Normally, we'd want to use "with" to ensure that fobj gets closed. + However, the decorator will make `path` a file object for us, + and using "with" would undesirably close that file object. + Instead, we use a try block, as shown above. + When we exit the function, fobj will be closed, if it should be, by the decorator. + """ + + def _open_file(path): + # Now we have the path_arg. There are two types of input to consider: + # 1) string representing a path that should be opened + # 2) an already opened file object + if isinstance(path, str): + ext = splitext(path)[1] + elif isinstance(path, Path): + # path is a pathlib reference to a filename + ext = path.suffix + path = str(path) + else: + # could be None, or a file handle, in which case the algorithm will deal with it + return path, lambda: None + + fobj = _dispatch_dict[ext](path, mode=mode) + return fobj, lambda: fobj.close() + + return argmap(_open_file, path_arg, try_finally=True) + + +def nodes_or_number(which_args): + """Decorator to allow number of nodes or container of nodes. + + With this decorator, the specified argument can be either a number or a container + of nodes. If it is a number, the nodes used are `range(n)`. + This allows `nx.complete_graph(50)` in place of `nx.complete_graph(list(range(50)))`. + And it also allows `nx.complete_graph(any_list_of_nodes)`. + + Parameters + ---------- + which_args : string or int or sequence of strings or ints + If string, the name of the argument to be treated. + If int, the index of the argument to be treated. + If more than one node argument is allowed, can be a list of locations. + + Returns + ------- + _nodes_or_numbers : function + Function which replaces int args with ranges. + + Examples + -------- + Decorate functions like this:: + + @nodes_or_number("nodes") + def empty_graph(nodes): + # nodes is converted to a list of nodes + + @nodes_or_number(0) + def empty_graph(nodes): + # nodes is converted to a list of nodes + + @nodes_or_number(["m1", "m2"]) + def grid_2d_graph(m1, m2, periodic=False): + # m1 and m2 are each converted to a list of nodes + + @nodes_or_number([0, 1]) + def grid_2d_graph(m1, m2, periodic=False): + # m1 and m2 are each converted to a list of nodes + + @nodes_or_number(1) + def full_rary_tree(r, n) + # presumably r is a number. It is not handled by this decorator. + # n is converted to a list of nodes + """ + + def _nodes_or_number(n): + try: + nodes = list(range(n)) + except TypeError: + nodes = tuple(n) + else: + if n < 0: + raise nx.NetworkXError(f"Negative number of nodes not valid: {n}") + return (n, nodes) + + try: + iter_wa = iter(which_args) + except TypeError: + iter_wa = (which_args,) + + return argmap(_nodes_or_number, *iter_wa) + + +def np_random_state(random_state_argument): + """Decorator to generate a numpy RandomState or Generator instance. + + The decorator processes the argument indicated by `random_state_argument` + using :func:`nx.utils.create_random_state`. + The argument value can be a seed (integer), or a `numpy.random.RandomState` + or `numpy.random.RandomState` instance or (`None` or `numpy.random`). + The latter two options use the global random number generator for `numpy.random`. + + The returned instance is a `numpy.random.RandomState` or `numpy.random.Generator`. + + Parameters + ---------- + random_state_argument : string or int + The name or index of the argument to be converted + to a `numpy.random.RandomState` instance. + + Returns + ------- + _random_state : function + Function whose random_state keyword argument is a RandomState instance. + + Examples + -------- + Decorate functions like this:: + + @np_random_state("seed") + def random_float(seed=None): + return seed.rand() + + + @np_random_state(0) + def random_float(rng=None): + return rng.rand() + + + @np_random_state(1) + def random_array(dims, random_state=1): + return random_state.rand(*dims) + + See Also + -------- + py_random_state + """ + return argmap(create_random_state, random_state_argument) + + +def py_random_state(random_state_argument): + """Decorator to generate a random.Random instance (or equiv). + + This decorator processes `random_state_argument` using + :func:`nx.utils.create_py_random_state`. + The input value can be a seed (integer), or a random number generator:: + + If int, return a random.Random instance set with seed=int. + If random.Random instance, return it. + If None or the `random` package, return the global random number + generator used by `random`. + If np.random package, or the default numpy RandomState instance, + return the default numpy random number generator wrapped in a + `PythonRandomViaNumpyBits` class. + If np.random.Generator instance, return it wrapped in a + `PythonRandomViaNumpyBits` class. + + # Legacy options + If np.random.RandomState instance, return it wrapped in a + `PythonRandomInterface` class. + If a `PythonRandomInterface` instance, return it + + Parameters + ---------- + random_state_argument : string or int + The name of the argument or the index of the argument in args that is + to be converted to the random.Random instance or numpy.random.RandomState + instance that mimics basic methods of random.Random. + + Returns + ------- + _random_state : function + Function whose random_state_argument is converted to a Random instance. + + Examples + -------- + Decorate functions like this:: + + @py_random_state("random_state") + def random_float(random_state=None): + return random_state.rand() + + + @py_random_state(0) + def random_float(rng=None): + return rng.rand() + + + @py_random_state(1) + def random_array(dims, seed=12345): + return seed.rand(*dims) + + See Also + -------- + np_random_state + """ + + return argmap(create_py_random_state, random_state_argument) + + +class argmap: + """A decorator to apply a map to arguments before calling the function + + This class provides a decorator that maps (transforms) arguments of the function + before the function is called. Thus for example, we have similar code + in many functions to determine whether an argument is the number of nodes + to be created, or a list of nodes to be handled. The decorator provides + the code to accept either -- transforming the indicated argument into a + list of nodes before the actual function is called. + + This decorator class allows us to process single or multiple arguments. + The arguments to be processed can be specified by string, naming the argument, + or by index, specifying the item in the args list. + + Parameters + ---------- + func : callable + The function to apply to arguments + + *args : iterable of (int, str or tuple) + A list of parameters, specified either as strings (their names), ints + (numerical indices) or tuples, which may contain ints, strings, and + (recursively) tuples. Each indicates which parameters the decorator + should map. Tuples indicate that the map function takes (and returns) + multiple parameters in the same order and nested structure as indicated + here. + + try_finally : bool (default: False) + When True, wrap the function call in a try-finally block with code + for the finally block created by `func`. This is used when the map + function constructs an object (like a file handle) that requires + post-processing (like closing). + + Note: try_finally decorators cannot be used to decorate generator + functions. + + Examples + -------- + Most of these examples use `@argmap(...)` to apply the decorator to + the function defined on the next line. + In the NetworkX codebase however, `argmap` is used within a function to + construct a decorator. That is, the decorator defines a mapping function + and then uses `argmap` to build and return a decorated function. + A simple example is a decorator that specifies which currency to report money. + The decorator (named `convert_to`) would be used like:: + + @convert_to("US_Dollars", "income") + def show_me_the_money(name, income): + print(f"{name} : {income}") + + And the code to create the decorator might be:: + + def convert_to(currency, which_arg): + def _convert(amount): + if amount.currency != currency: + amount = amount.to_currency(currency) + return amount + + return argmap(_convert, which_arg) + + Despite this common idiom for argmap, most of the following examples + use the `@argmap(...)` idiom to save space. + + Here's an example use of argmap to sum the elements of two of the functions + arguments. The decorated function:: + + @argmap(sum, "xlist", "zlist") + def foo(xlist, y, zlist): + return xlist - y + zlist + + is syntactic sugar for:: + + def foo(xlist, y, zlist): + x = sum(xlist) + z = sum(zlist) + return x - y + z + + and is equivalent to (using argument indexes):: + + @argmap(sum, "xlist", 2) + def foo(xlist, y, zlist): + return xlist - y + zlist + + or:: + + @argmap(sum, "zlist", 0) + def foo(xlist, y, zlist): + return xlist - y + zlist + + Transforming functions can be applied to multiple arguments, such as:: + + def swap(x, y): + return y, x + + # the 2-tuple tells argmap that the map `swap` has 2 inputs/outputs. + @argmap(swap, ("a", "b")): + def foo(a, b, c): + return a / b * c + + is equivalent to:: + + def foo(a, b, c): + a, b = swap(a, b) + return a / b * c + + More generally, the applied arguments can be nested tuples of strings or ints. + The syntax `@argmap(some_func, ("a", ("b", "c")))` would expect `some_func` to + accept 2 inputs with the second expected to be a 2-tuple. It should then return + 2 outputs with the second a 2-tuple. The returns values would replace input "a" + "b" and "c" respectively. Similarly for `@argmap(some_func, (0, ("b", 2)))`. + + Also, note that an index larger than the number of named parameters is allowed + for variadic functions. For example:: + + def double(a): + return 2 * a + + + @argmap(double, 3) + def overflow(a, *args): + return a, args + + + print(overflow(1, 2, 3, 4, 5, 6)) # output is 1, (2, 3, 8, 5, 6) + + **Try Finally** + + Additionally, this `argmap` class can be used to create a decorator that + initiates a try...finally block. The decorator must be written to return + both the transformed argument and a closing function. + This feature was included to enable the `open_file` decorator which might + need to close the file or not depending on whether it had to open that file. + This feature uses the keyword-only `try_finally` argument to `@argmap`. + + For example this map opens a file and then makes sure it is closed:: + + def open_file(fn): + f = open(fn) + return f, lambda: f.close() + + The decorator applies that to the function `foo`:: + + @argmap(open_file, "file", try_finally=True) + def foo(file): + print(file.read()) + + is syntactic sugar for:: + + def foo(file): + file, close_file = open_file(file) + try: + print(file.read()) + finally: + close_file() + + and is equivalent to (using indexes):: + + @argmap(open_file, 0, try_finally=True) + def foo(file): + print(file.read()) + + Here's an example of the try_finally feature used to create a decorator:: + + def my_closing_decorator(which_arg): + def _opener(path): + if path is None: + path = open(path) + fclose = path.close + else: + # assume `path` handles the closing + fclose = lambda: None + return path, fclose + + return argmap(_opener, which_arg, try_finally=True) + + which can then be used as:: + + @my_closing_decorator("file") + def fancy_reader(file=None): + # this code doesn't need to worry about closing the file + print(file.read()) + + Decorators with try_finally = True cannot be used with generator functions, + because the `finally` block is evaluated before the generator is exhausted:: + + @argmap(open_file, "file", try_finally=True) + def file_to_lines(file): + for line in file.readlines(): + yield line + + is equivalent to:: + + def file_to_lines_wrapped(file): + for line in file.readlines(): + yield line + + + def file_to_lines_wrapper(file): + try: + file = open_file(file) + return file_to_lines_wrapped(file) + finally: + file.close() + + which behaves similarly to:: + + def file_to_lines_whoops(file): + file = open_file(file) + file.close() + for line in file.readlines(): + yield line + + because the `finally` block of `file_to_lines_wrapper` is executed before + the caller has a chance to exhaust the iterator. + + Notes + ----- + An object of this class is callable and intended to be used when + defining a decorator. Generally, a decorator takes a function as input + and constructs a function as output. Specifically, an `argmap` object + returns the input function decorated/wrapped so that specified arguments + are mapped (transformed) to new values before the decorated function is called. + + As an overview, the argmap object returns a new function with all the + dunder values of the original function (like `__doc__`, `__name__`, etc). + Code for this decorated function is built based on the original function's + signature. It starts by mapping the input arguments to potentially new + values. Then it calls the decorated function with these new values in place + of the indicated arguments that have been mapped. The return value of the + original function is then returned. This new function is the function that + is actually called by the user. + + Three additional features are provided. + 1) The code is lazily compiled. That is, the new function is returned + as an object without the code compiled, but with all information + needed so it can be compiled upon it's first invocation. This saves + time on import at the cost of additional time on the first call of + the function. Subsequent calls are then just as fast as normal. + + 2) If the "try_finally" keyword-only argument is True, a try block + follows each mapped argument, matched on the other side of the wrapped + call, by a finally block closing that mapping. We expect func to return + a 2-tuple: the mapped value and a function to be called in the finally + clause. This feature was included so the `open_file` decorator could + provide a file handle to the decorated function and close the file handle + after the function call. It even keeps track of whether to close the file + handle or not based on whether it had to open the file or the input was + already open. So, the decorated function does not need to include any + code to open or close files. + + 3) The maps applied can process multiple arguments. For example, + you could swap two arguments using a mapping, or transform + them to their sum and their difference. This was included to allow + a decorator in the `quality.py` module that checks that an input + `partition` is a valid partition of the nodes of the input graph `G`. + In this example, the map has inputs `(G, partition)`. After checking + for a valid partition, the map either raises an exception or leaves + the inputs unchanged. Thus many functions that make this check can + use the decorator rather than copy the checking code into each function. + More complicated nested argument structures are described below. + + The remaining notes describe the code structure and methods for this + class in broad terms to aid in understanding how to use it. + + Instantiating an `argmap` object simply stores the mapping function and + the input identifiers of which arguments to map. The resulting decorator + is ready to use this map to decorate any function. Calling that object + (`argmap.__call__`, but usually done via `@my_decorator`) a lazily + compiled thin wrapper of the decorated function is constructed, + wrapped with the necessary function dunder attributes like `__doc__` + and `__name__`. That thinly wrapped function is returned as the + decorated function. When that decorated function is called, the thin + wrapper of code calls `argmap._lazy_compile` which compiles the decorated + function (using `argmap.compile`) and replaces the code of the thin + wrapper with the newly compiled code. This saves the compilation step + every import of networkx, at the cost of compiling upon the first call + to the decorated function. + + When the decorated function is compiled, the code is recursively assembled + using the `argmap.assemble` method. The recursive nature is needed in + case of nested decorators. The result of the assembly is a number of + useful objects. + + sig : the function signature of the original decorated function as + constructed by :func:`argmap.signature`. This is constructed + using `inspect.signature` but enhanced with attribute + strings `sig_def` and `sig_call`, and other information + specific to mapping arguments of this function. + This information is used to construct a string of code defining + the new decorated function. + + wrapped_name : a unique internally used name constructed by argmap + for the decorated function. + + functions : a dict of the functions used inside the code of this + decorated function, to be used as `globals` in `exec`. + This dict is recursively updated to allow for nested decorating. + + mapblock : code (as a list of strings) to map the incoming argument + values to their mapped values. + + finallys : code (as a list of strings) to provide the possibly nested + set of finally clauses if needed. + + mutable_args : a bool indicating whether the `sig.args` tuple should be + converted to a list so mutation can occur. + + After this recursive assembly process, the `argmap.compile` method + constructs code (as strings) to convert the tuple `sig.args` to a list + if needed. It joins the defining code with appropriate indents and + compiles the result. Finally, this code is evaluated and the original + wrapper's implementation is replaced with the compiled version (see + `argmap._lazy_compile` for more details). + + Other `argmap` methods include `_name` and `_count` which allow internally + generated names to be unique within a python session. + The methods `_flatten` and `_indent` process the nested lists of strings + into properly indented python code ready to be compiled. + + More complicated nested tuples of arguments also allowed though + usually not used. For the simple 2 argument case, the argmap + input ("a", "b") implies the mapping function will take 2 arguments + and return a 2-tuple of mapped values. A more complicated example + with argmap input `("a", ("b", "c"))` requires the mapping function + take 2 inputs, with the second being a 2-tuple. It then must output + the 3 mapped values in the same nested structure `(newa, (newb, newc))`. + This level of generality is not often needed, but was convenient + to implement when handling the multiple arguments. + + See Also + -------- + not_implemented_for + open_file + nodes_or_number + py_random_state + networkx.algorithms.community.quality.require_partition + + """ + + def __init__(self, func, *args, try_finally=False): + self._func = func + self._args = args + self._finally = try_finally + + @staticmethod + def _lazy_compile(func): + """Compile the source of a wrapped function + + Assemble and compile the decorated function, and intrusively replace its + code with the compiled version's. The thinly wrapped function becomes + the decorated function. + + Parameters + ---------- + func : callable + A function returned by argmap.__call__ which is in the process + of being called for the first time. + + Returns + ------- + func : callable + The same function, with a new __code__ object. + + Notes + ----- + It was observed in NetworkX issue #4732 [1] that the import time of + NetworkX was significantly bloated by the use of decorators: over half + of the import time was being spent decorating functions. This was + somewhat improved by a change made to the `decorator` library, at the + cost of a relatively heavy-weight call to `inspect.Signature.bind` + for each call to the decorated function. + + The workaround we arrived at is to do minimal work at the time of + decoration. When the decorated function is called for the first time, + we compile a function with the same function signature as the wrapped + function. The resulting decorated function is faster than one made by + the `decorator` library, so that the overhead of the first call is + 'paid off' after a small number of calls. + + References + ---------- + + [1] https://github.com/networkx/networkx/issues/4732 + + """ + real_func = func.__argmap__.compile(func.__wrapped__) + func.__code__ = real_func.__code__ + func.__globals__.update(real_func.__globals__) + func.__dict__.update(real_func.__dict__) + return func + + def __call__(self, f): + """Construct a lazily decorated wrapper of f. + + The decorated function will be compiled when it is called for the first time, + and it will replace its own __code__ object so subsequent calls are fast. + + Parameters + ---------- + f : callable + A function to be decorated. + + Returns + ------- + func : callable + The decorated function. + + See Also + -------- + argmap._lazy_compile + """ + + def func(*args, __wrapper=None, **kwargs): + return argmap._lazy_compile(__wrapper)(*args, **kwargs) + + # standard function-wrapping stuff + func.__name__ = f.__name__ + func.__doc__ = f.__doc__ + func.__defaults__ = f.__defaults__ + func.__kwdefaults__.update(f.__kwdefaults__ or {}) + func.__module__ = f.__module__ + func.__qualname__ = f.__qualname__ + func.__dict__.update(f.__dict__) + func.__wrapped__ = f + + # now that we've wrapped f, we may have picked up some __dict__ or + # __kwdefaults__ items that were set by a previous argmap. Thus, we set + # these values after those update() calls. + + # If we attempt to access func from within itself, that happens through + # a closure -- which trips an error when we replace func.__code__. The + # standard workaround for functions which can't see themselves is to use + # a Y-combinator, as we do here. + func.__kwdefaults__["_argmap__wrapper"] = func + + # this self-reference is here because functools.wraps preserves + # everything in __dict__, and we don't want to mistake a non-argmap + # wrapper for an argmap wrapper + func.__self__ = func + + # this is used to variously call self.assemble and self.compile + func.__argmap__ = self + + if hasattr(f, "__argmap__"): + func.__is_generator = f.__is_generator + else: + func.__is_generator = inspect.isgeneratorfunction(f) + + if self._finally and func.__is_generator: + raise nx.NetworkXError("argmap cannot decorate generators with try_finally") + + return func + + __count = 0 + + @classmethod + def _count(cls): + """Maintain a globally-unique identifier for function names and "file" names + + Note that this counter is a class method reporting a class variable + so the count is unique within a Python session. It could differ from + session to session for a specific decorator depending on the order + that the decorators are created. But that doesn't disrupt `argmap`. + + This is used in two places: to construct unique variable names + in the `_name` method and to construct unique fictitious filenames + in the `_compile` method. + + Returns + ------- + count : int + An integer unique to this Python session (simply counts from zero) + """ + cls.__count += 1 + return cls.__count + + _bad_chars = re.compile("[^a-zA-Z0-9_]") + + @classmethod + def _name(cls, f): + """Mangle the name of a function to be unique but somewhat human-readable + + The names are unique within a Python session and set using `_count`. + + Parameters + ---------- + f : str or object + + Returns + ------- + name : str + The mangled version of `f.__name__` (if `f.__name__` exists) or `f` + + """ + f = f.__name__ if hasattr(f, "__name__") else f + fname = re.sub(cls._bad_chars, "_", f) + return f"argmap_{fname}_{cls._count()}" + + def compile(self, f): + """Compile the decorated function. + + Called once for a given decorated function -- collects the code from all + argmap decorators in the stack, and compiles the decorated function. + + Much of the work done here uses the `assemble` method to allow recursive + treatment of multiple argmap decorators on a single decorated function. + That flattens the argmap decorators, collects the source code to construct + a single decorated function, then compiles/executes/returns that function. + + The source code for the decorated function is stored as an attribute + `_code` on the function object itself. + + Note that Python's `compile` function requires a filename, but this + code is constructed without a file, so a fictitious filename is used + to describe where the function comes from. The name is something like: + "argmap compilation 4". + + Parameters + ---------- + f : callable + The function to be decorated + + Returns + ------- + func : callable + The decorated file + + """ + sig, wrapped_name, functions, mapblock, finallys, mutable_args = self.assemble( + f + ) + + call = f"{sig.call_sig.format(wrapped_name)}#" + mut_args = f"{sig.args} = list({sig.args})" if mutable_args else "" + body = argmap._indent(sig.def_sig, mut_args, mapblock, call, finallys) + code = "\n".join(body) + + locl = {} + globl = dict(functions.values()) + filename = f"{self.__class__} compilation {self._count()}" + compiled = compile(code, filename, "exec") + exec(compiled, globl, locl) + func = locl[sig.name] + func._code = code + return func + + def assemble(self, f): + """Collects components of the source for the decorated function wrapping f. + + If `f` has multiple argmap decorators, we recursively assemble the stack of + decorators into a single flattened function. + + This method is part of the `compile` method's process yet separated + from that method to allow recursive processing. The outputs are + strings, dictionaries and lists that collect needed info to + flatten any nested argmap-decoration. + + Parameters + ---------- + f : callable + The function to be decorated. If f is argmapped, we assemble it. + + Returns + ------- + sig : argmap.Signature + The function signature as an `argmap.Signature` object. + wrapped_name : str + The mangled name used to represent the wrapped function in the code + being assembled. + functions : dict + A dictionary mapping id(g) -> (mangled_name(g), g) for functions g + referred to in the code being assembled. These need to be present + in the ``globals`` scope of ``exec`` when defining the decorated + function. + mapblock : list of lists and/or strings + Code that implements mapping of parameters including any try blocks + if needed. This code will precede the decorated function call. + finallys : list of lists and/or strings + Code that implements the finally blocks to post-process the + arguments (usually close any files if needed) after the + decorated function is called. + mutable_args : bool + True if the decorator needs to modify positional arguments + via their indices. The compile method then turns the argument + tuple into a list so that the arguments can be modified. + """ + + # first, we check if f is already argmapped -- if that's the case, + # build up the function recursively. + # > mapblock is generally a list of function calls of the sort + # arg = func(arg) + # in addition to some try-blocks if needed. + # > finallys is a recursive list of finally blocks of the sort + # finally: + # close_func_1() + # finally: + # close_func_2() + # > functions is a dict of functions used in the scope of our decorated + # function. It will be used to construct globals used in compilation. + # We make functions[id(f)] = name_of_f, f to ensure that a given + # function is stored and named exactly once even if called by + # nested decorators. + if hasattr(f, "__argmap__") and f.__self__ is f: + ( + sig, + wrapped_name, + functions, + mapblock, + finallys, + mutable_args, + ) = f.__argmap__.assemble(f.__wrapped__) + functions = dict(functions) # shallow-copy just in case + else: + sig = self.signature(f) + wrapped_name = self._name(f) + mapblock, finallys = [], [] + functions = {id(f): (wrapped_name, f)} + mutable_args = False + + if id(self._func) in functions: + fname, _ = functions[id(self._func)] + else: + fname, _ = functions[id(self._func)] = self._name(self._func), self._func + + # this is a bit complicated -- we can call functions with a variety of + # nested arguments, so long as their input and output are tuples with + # the same nested structure. e.g. ("a", "b") maps arguments a and b. + # A more complicated nesting like (0, (3, 4)) maps arguments 0, 3, 4 + # expecting the mapping to output new values in the same nested shape. + # The ability to argmap multiple arguments was necessary for + # the decorator `nx.algorithms.community.quality.require_partition`, and + # while we're not taking full advantage of the ability to handle + # multiply-nested tuples, it was convenient to implement this in + # generality because the recursive call to `get_name` is necessary in + # any case. + applied = set() + + def get_name(arg, first=True): + nonlocal mutable_args + if isinstance(arg, tuple): + name = ", ".join(get_name(x, False) for x in arg) + return name if first else f"({name})" + if arg in applied: + raise nx.NetworkXError(f"argument {arg} is specified multiple times") + applied.add(arg) + if arg in sig.names: + return sig.names[arg] + elif isinstance(arg, str): + if sig.kwargs is None: + raise nx.NetworkXError( + f"name {arg} is not a named parameter and this function doesn't have kwargs" + ) + return f"{sig.kwargs}[{arg!r}]" + else: + if sig.args is None: + raise nx.NetworkXError( + f"index {arg} not a parameter index and this function doesn't have args" + ) + mutable_args = True + return f"{sig.args}[{arg - sig.n_positional}]" + + if self._finally: + # here's where we handle try_finally decorators. Such a decorator + # returns a mapped argument and a function to be called in a + # finally block. This feature was required by the open_file + # decorator. The below generates the code + # + # name, final = func(name) #<--append to mapblock + # try: #<--append to mapblock + # ... more argmapping and try blocks + # return WRAPPED_FUNCTION(...) + # ... more finally blocks + # finally: #<--prepend to finallys + # final() #<--prepend to finallys + # + for a in self._args: + name = get_name(a) + final = self._name(name) + mapblock.append(f"{name}, {final} = {fname}({name})") + mapblock.append("try:") + finallys = ["finally:", f"{final}()#", "#", finallys] + else: + mapblock.extend( + f"{name} = {fname}({name})" for name in map(get_name, self._args) + ) + + return sig, wrapped_name, functions, mapblock, finallys, mutable_args + + @classmethod + def signature(cls, f): + r"""Construct a Signature object describing `f` + + Compute a Signature so that we can write a function wrapping f with + the same signature and call-type. + + Parameters + ---------- + f : callable + A function to be decorated + + Returns + ------- + sig : argmap.Signature + The Signature of f + + Notes + ----- + The Signature is a namedtuple with names: + + name : a unique version of the name of the decorated function + signature : the inspect.signature of the decorated function + def_sig : a string used as code to define the new function + call_sig : a string used as code to call the decorated function + names : a dict keyed by argument name and index to the argument's name + n_positional : the number of positional arguments in the signature + args : the name of the VAR_POSITIONAL argument if any, i.e. \*theseargs + kwargs : the name of the VAR_KEYWORDS argument if any, i.e. \*\*kwargs + + These named attributes of the signature are used in `assemble` and `compile` + to construct a string of source code for the decorated function. + + """ + sig = inspect.signature(f, follow_wrapped=False) + def_sig = [] + call_sig = [] + names = {} + + kind = None + args = None + kwargs = None + npos = 0 + for i, param in enumerate(sig.parameters.values()): + # parameters can be position-only, keyword-or-position, keyword-only + # in any combination, but only in the order as above. we do edge + # detection to add the appropriate punctuation + prev = kind + kind = param.kind + if prev == param.POSITIONAL_ONLY != kind: + # the last token was position-only, but this one isn't + def_sig.append("/") + if ( + param.VAR_POSITIONAL + != prev + != param.KEYWORD_ONLY + == kind + != param.VAR_POSITIONAL + ): + # param is the first keyword-only arg and isn't starred + def_sig.append("*") + + # star arguments as appropriate + if kind == param.VAR_POSITIONAL: + name = "*" + param.name + args = param.name + count = 0 + elif kind == param.VAR_KEYWORD: + name = "**" + param.name + kwargs = param.name + count = 0 + else: + names[i] = names[param.name] = param.name + name = param.name + count = 1 + + # assign to keyword-only args in the function call + if kind == param.KEYWORD_ONLY: + call_sig.append(f"{name} = {name}") + else: + npos += count + call_sig.append(name) + + def_sig.append(name) + + fname = cls._name(f) + def_sig = f'def {fname}({", ".join(def_sig)}):' + + call_sig = f"return {{}}({', '.join(call_sig)})" + + return cls.Signature(fname, sig, def_sig, call_sig, names, npos, args, kwargs) + + Signature = collections.namedtuple( + "Signature", + [ + "name", + "signature", + "def_sig", + "call_sig", + "names", + "n_positional", + "args", + "kwargs", + ], + ) + + @staticmethod + def _flatten(nestlist, visited): + """flattens a recursive list of lists that doesn't have cyclic references + + Parameters + ---------- + nestlist : iterable + A recursive list of objects to be flattened into a single iterable + + visited : set + A set of object ids which have been walked -- initialize with an + empty set + + Yields + ------ + Non-list objects contained in nestlist + + """ + for thing in nestlist: + if isinstance(thing, list): + if id(thing) in visited: + raise ValueError("A cycle was found in nestlist. Be a tree.") + else: + visited.add(id(thing)) + yield from argmap._flatten(thing, visited) + else: + yield thing + + _tabs = " " * 64 + + @staticmethod + def _indent(*lines): + """Indent list of code lines to make executable Python code + + Indents a tree-recursive list of strings, following the rule that one + space is added to the tab after a line that ends in a colon, and one is + removed after a line that ends in an hashmark. + + Parameters + ---------- + *lines : lists and/or strings + A recursive list of strings to be assembled into properly indented + code. + + Returns + ------- + code : str + + Examples + -------- + + argmap._indent(*["try:", "try:", "pass#", "finally:", "pass#", "#", + "finally:", "pass#"]) + + renders to + + '''try: + try: + pass# + finally: + pass# + # + finally: + pass#''' + """ + depth = 0 + for line in argmap._flatten(lines, set()): + yield f"{argmap._tabs[:depth]}{line}" + depth += (line[-1:] == ":") - (line[-1:] == "#") + + +# Vendored in from https://github.com/scikit-learn/scikit-learn/blob/8ed0270b99344cee9bb253cbfa1d986561ea6cd7/sklearn/utils/validation.py#L37C1-L90C44 +def deprecate_positional_args(func=None, *, version): + """Decorator for methods that issues warnings for positional arguments. + + Using the keyword-only argument syntax in pep 3102, arguments after the + * will issue a warning when passed as a positional argument. + + Parameters + ---------- + func : callable, default=None + Function to check arguments on. + version : callable, default="1.3" + The version when positional arguments will result in error. + """ + + def _inner_deprecate_positional_args(f): + sig = signature(f) + kwonly_args = [] + all_args = [] + + for name, param in sig.parameters.items(): + if param.kind == Parameter.POSITIONAL_OR_KEYWORD: + all_args.append(name) + elif param.kind == Parameter.KEYWORD_ONLY: + kwonly_args.append(name) + + @wraps(f) + def inner_f(*args, **kwargs): + extra_args = len(args) - len(all_args) + if extra_args <= 0: + return f(*args, **kwargs) + + # extra_args > 0 + args_msg = [ + f"{name}={arg}" + for name, arg in zip(kwonly_args[:extra_args], args[-extra_args:]) + ] + args_msg = ", ".join(args_msg) + warnings.warn( + ( + f"Pass {args_msg} as keyword args. From NetworkX version " + f"{version} passing these as positional arguments " + "will result in an error" + ), + FutureWarning, + ) + kwargs.update(zip(sig.parameters, args)) + return f(**kwargs) + + return inner_f + + if func is not None: + return _inner_deprecate_positional_args(func) + + return _inner_deprecate_positional_args diff --git a/env-llmeval/lib/python3.10/site-packages/networkx/utils/heaps.py b/env-llmeval/lib/python3.10/site-packages/networkx/utils/heaps.py new file mode 100644 index 0000000000000000000000000000000000000000..3db27906314924380a8a87f2dfd3a81292ffbb9f --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/networkx/utils/heaps.py @@ -0,0 +1,340 @@ +""" +Min-heaps. +""" + +from heapq import heappop, heappush +from itertools import count + +import networkx as nx + +__all__ = ["MinHeap", "PairingHeap", "BinaryHeap"] + + +class MinHeap: + """Base class for min-heaps. + + A MinHeap stores a collection of key-value pairs ordered by their values. + It supports querying the minimum pair, inserting a new pair, decreasing the + value in an existing pair and deleting the minimum pair. + """ + + class _Item: + """Used by subclassess to represent a key-value pair.""" + + __slots__ = ("key", "value") + + def __init__(self, key, value): + self.key = key + self.value = value + + def __repr__(self): + return repr((self.key, self.value)) + + def __init__(self): + """Initialize a new min-heap.""" + self._dict = {} + + def min(self): + """Query the minimum key-value pair. + + Returns + ------- + key, value : tuple + The key-value pair with the minimum value in the heap. + + Raises + ------ + NetworkXError + If the heap is empty. + """ + raise NotImplementedError + + def pop(self): + """Delete the minimum pair in the heap. + + Returns + ------- + key, value : tuple + The key-value pair with the minimum value in the heap. + + Raises + ------ + NetworkXError + If the heap is empty. + """ + raise NotImplementedError + + def get(self, key, default=None): + """Returns the value associated with a key. + + Parameters + ---------- + key : hashable object + The key to be looked up. + + default : object + Default value to return if the key is not present in the heap. + Default value: None. + + Returns + ------- + value : object. + The value associated with the key. + """ + raise NotImplementedError + + def insert(self, key, value, allow_increase=False): + """Insert a new key-value pair or modify the value in an existing + pair. + + Parameters + ---------- + key : hashable object + The key. + + value : object comparable with existing values. + The value. + + allow_increase : bool + Whether the value is allowed to increase. If False, attempts to + increase an existing value have no effect. Default value: False. + + Returns + ------- + decreased : bool + True if a pair is inserted or the existing value is decreased. + """ + raise NotImplementedError + + def __nonzero__(self): + """Returns whether the heap if empty.""" + return bool(self._dict) + + def __bool__(self): + """Returns whether the heap if empty.""" + return bool(self._dict) + + def __len__(self): + """Returns the number of key-value pairs in the heap.""" + return len(self._dict) + + def __contains__(self, key): + """Returns whether a key exists in the heap. + + Parameters + ---------- + key : any hashable object. + The key to be looked up. + """ + return key in self._dict + + +class PairingHeap(MinHeap): + """A pairing heap.""" + + class _Node(MinHeap._Item): + """A node in a pairing heap. + + A tree in a pairing heap is stored using the left-child, right-sibling + representation. + """ + + __slots__ = ("left", "next", "prev", "parent") + + def __init__(self, key, value): + super().__init__(key, value) + # The leftmost child. + self.left = None + # The next sibling. + self.next = None + # The previous sibling. + self.prev = None + # The parent. + self.parent = None + + def __init__(self): + """Initialize a pairing heap.""" + super().__init__() + self._root = None + + def min(self): + if self._root is None: + raise nx.NetworkXError("heap is empty.") + return (self._root.key, self._root.value) + + def pop(self): + if self._root is None: + raise nx.NetworkXError("heap is empty.") + min_node = self._root + self._root = self._merge_children(self._root) + del self._dict[min_node.key] + return (min_node.key, min_node.value) + + def get(self, key, default=None): + node = self._dict.get(key) + return node.value if node is not None else default + + def insert(self, key, value, allow_increase=False): + node = self._dict.get(key) + root = self._root + if node is not None: + if value < node.value: + node.value = value + if node is not root and value < node.parent.value: + self._cut(node) + self._root = self._link(root, node) + return True + elif allow_increase and value > node.value: + node.value = value + child = self._merge_children(node) + # Nonstandard step: Link the merged subtree with the root. See + # below for the standard step. + if child is not None: + self._root = self._link(self._root, child) + # Standard step: Perform a decrease followed by a pop as if the + # value were the smallest in the heap. Then insert the new + # value into the heap. + # if node is not root: + # self._cut(node) + # if child is not None: + # root = self._link(root, child) + # self._root = self._link(root, node) + # else: + # self._root = (self._link(node, child) + # if child is not None else node) + return False + else: + # Insert a new key. + node = self._Node(key, value) + self._dict[key] = node + self._root = self._link(root, node) if root is not None else node + return True + + def _link(self, root, other): + """Link two nodes, making the one with the smaller value the parent of + the other. + """ + if other.value < root.value: + root, other = other, root + next = root.left + other.next = next + if next is not None: + next.prev = other + other.prev = None + root.left = other + other.parent = root + return root + + def _merge_children(self, root): + """Merge the subtrees of the root using the standard two-pass method. + The resulting subtree is detached from the root. + """ + node = root.left + root.left = None + if node is not None: + link = self._link + # Pass 1: Merge pairs of consecutive subtrees from left to right. + # At the end of the pass, only the prev pointers of the resulting + # subtrees have meaningful values. The other pointers will be fixed + # in pass 2. + prev = None + while True: + next = node.next + if next is None: + node.prev = prev + break + next_next = next.next + node = link(node, next) + node.prev = prev + prev = node + if next_next is None: + break + node = next_next + # Pass 2: Successively merge the subtrees produced by pass 1 from + # right to left with the rightmost one. + prev = node.prev + while prev is not None: + prev_prev = prev.prev + node = link(prev, node) + prev = prev_prev + # Now node can become the new root. Its has no parent nor siblings. + node.prev = None + node.next = None + node.parent = None + return node + + def _cut(self, node): + """Cut a node from its parent.""" + prev = node.prev + next = node.next + if prev is not None: + prev.next = next + else: + node.parent.left = next + node.prev = None + if next is not None: + next.prev = prev + node.next = None + node.parent = None + + +class BinaryHeap(MinHeap): + """A binary heap.""" + + def __init__(self): + """Initialize a binary heap.""" + super().__init__() + self._heap = [] + self._count = count() + + def min(self): + dict = self._dict + if not dict: + raise nx.NetworkXError("heap is empty") + heap = self._heap + pop = heappop + # Repeatedly remove stale key-value pairs until a up-to-date one is + # met. + while True: + value, _, key = heap[0] + if key in dict and value == dict[key]: + break + pop(heap) + return (key, value) + + def pop(self): + dict = self._dict + if not dict: + raise nx.NetworkXError("heap is empty") + heap = self._heap + pop = heappop + # Repeatedly remove stale key-value pairs until a up-to-date one is + # met. + while True: + value, _, key = heap[0] + pop(heap) + if key in dict and value == dict[key]: + break + del dict[key] + return (key, value) + + def get(self, key, default=None): + return self._dict.get(key, default) + + def insert(self, key, value, allow_increase=False): + dict = self._dict + if key in dict: + old_value = dict[key] + if value < old_value or (allow_increase and value > old_value): + # Since there is no way to efficiently obtain the location of a + # key-value pair in the heap, insert a new pair even if ones + # with the same key may already be present. Deem the old ones + # as stale and skip them when the minimum pair is queried. + dict[key] = value + heappush(self._heap, (value, next(self._count), key)) + return value < old_value + return False + else: + dict[key] = value + heappush(self._heap, (value, next(self._count), key)) + return True diff --git a/env-llmeval/lib/python3.10/site-packages/networkx/utils/mapped_queue.py b/env-llmeval/lib/python3.10/site-packages/networkx/utils/mapped_queue.py new file mode 100644 index 0000000000000000000000000000000000000000..afb97404cbe6ea16eb3392264ccca9c1d01dbd60 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/networkx/utils/mapped_queue.py @@ -0,0 +1,298 @@ +"""Priority queue class with updatable priorities. +""" + +import heapq + +__all__ = ["MappedQueue"] + + +class _HeapElement: + """This proxy class separates the heap element from its priority. + + The idea is that using a 2-tuple (priority, element) works + for sorting, but not for dict lookup because priorities are + often floating point values so round-off can mess up equality. + + So, we need inequalities to look at the priority (for sorting) + and equality (and hash) to look at the element to enable + updates to the priority. + + Unfortunately, this class can be tricky to work with if you forget that + `__lt__` compares the priority while `__eq__` compares the element. + In `greedy_modularity_communities()` the following code is + used to check that two _HeapElements differ in either element or priority: + + if d_oldmax != row_max or d_oldmax.priority != row_max.priority: + + If the priorities are the same, this implementation uses the element + as a tiebreaker. This provides compatibility with older systems that + use tuples to combine priority and elements. + """ + + __slots__ = ["priority", "element", "_hash"] + + def __init__(self, priority, element): + self.priority = priority + self.element = element + self._hash = hash(element) + + def __lt__(self, other): + try: + other_priority = other.priority + except AttributeError: + return self.priority < other + # assume comparing to another _HeapElement + if self.priority == other_priority: + try: + return self.element < other.element + except TypeError as err: + raise TypeError( + "Consider using a tuple, with a priority value that can be compared." + ) + return self.priority < other_priority + + def __gt__(self, other): + try: + other_priority = other.priority + except AttributeError: + return self.priority > other + # assume comparing to another _HeapElement + if self.priority == other_priority: + try: + return self.element > other.element + except TypeError as err: + raise TypeError( + "Consider using a tuple, with a priority value that can be compared." + ) + return self.priority > other_priority + + def __eq__(self, other): + try: + return self.element == other.element + except AttributeError: + return self.element == other + + def __hash__(self): + return self._hash + + def __getitem__(self, indx): + return self.priority if indx == 0 else self.element[indx - 1] + + def __iter__(self): + yield self.priority + try: + yield from self.element + except TypeError: + yield self.element + + def __repr__(self): + return f"_HeapElement({self.priority}, {self.element})" + + +class MappedQueue: + """The MappedQueue class implements a min-heap with removal and update-priority. + + The min heap uses heapq as well as custom written _siftup and _siftdown + methods to allow the heap positions to be tracked by an additional dict + keyed by element to position. The smallest element can be popped in O(1) time, + new elements can be pushed in O(log n) time, and any element can be removed + or updated in O(log n) time. The queue cannot contain duplicate elements + and an attempt to push an element already in the queue will have no effect. + + MappedQueue complements the heapq package from the python standard + library. While MappedQueue is designed for maximum compatibility with + heapq, it adds element removal, lookup, and priority update. + + Parameters + ---------- + data : dict or iterable + + Examples + -------- + + A `MappedQueue` can be created empty, or optionally, given a dictionary + of initial elements and priorities. The methods `push`, `pop`, + `remove`, and `update` operate on the queue. + + >>> colors_nm = {"red": 665, "blue": 470, "green": 550} + >>> q = MappedQueue(colors_nm) + >>> q.remove("red") + >>> q.update("green", "violet", 400) + >>> q.push("indigo", 425) + True + >>> [q.pop().element for i in range(len(q.heap))] + ['violet', 'indigo', 'blue'] + + A `MappedQueue` can also be initialized with a list or other iterable. The priority is assumed + to be the sort order of the items in the list. + + >>> q = MappedQueue([916, 50, 4609, 493, 237]) + >>> q.remove(493) + >>> q.update(237, 1117) + >>> [q.pop() for i in range(len(q.heap))] + [50, 916, 1117, 4609] + + An exception is raised if the elements are not comparable. + + >>> q = MappedQueue([100, "a"]) + Traceback (most recent call last): + ... + TypeError: '<' not supported between instances of 'int' and 'str' + + To avoid the exception, use a dictionary to assign priorities to the elements. + + >>> q = MappedQueue({100: 0, "a": 1}) + + References + ---------- + .. [1] Cormen, T. H., Leiserson, C. E., Rivest, R. L., & Stein, C. (2001). + Introduction to algorithms second edition. + .. [2] Knuth, D. E. (1997). The art of computer programming (Vol. 3). + Pearson Education. + """ + + def __init__(self, data=None): + """Priority queue class with updatable priorities.""" + if data is None: + self.heap = [] + elif isinstance(data, dict): + self.heap = [_HeapElement(v, k) for k, v in data.items()] + else: + self.heap = list(data) + self.position = {} + self._heapify() + + def _heapify(self): + """Restore heap invariant and recalculate map.""" + heapq.heapify(self.heap) + self.position = {elt: pos for pos, elt in enumerate(self.heap)} + if len(self.heap) != len(self.position): + raise AssertionError("Heap contains duplicate elements") + + def __len__(self): + return len(self.heap) + + def push(self, elt, priority=None): + """Add an element to the queue.""" + if priority is not None: + elt = _HeapElement(priority, elt) + # If element is already in queue, do nothing + if elt in self.position: + return False + # Add element to heap and dict + pos = len(self.heap) + self.heap.append(elt) + self.position[elt] = pos + # Restore invariant by sifting down + self._siftdown(0, pos) + return True + + def pop(self): + """Remove and return the smallest element in the queue.""" + # Remove smallest element + elt = self.heap[0] + del self.position[elt] + # If elt is last item, remove and return + if len(self.heap) == 1: + self.heap.pop() + return elt + # Replace root with last element + last = self.heap.pop() + self.heap[0] = last + self.position[last] = 0 + # Restore invariant by sifting up + self._siftup(0) + # Return smallest element + return elt + + def update(self, elt, new, priority=None): + """Replace an element in the queue with a new one.""" + if priority is not None: + new = _HeapElement(priority, new) + # Replace + pos = self.position[elt] + self.heap[pos] = new + del self.position[elt] + self.position[new] = pos + # Restore invariant by sifting up + self._siftup(pos) + + def remove(self, elt): + """Remove an element from the queue.""" + # Find and remove element + try: + pos = self.position[elt] + del self.position[elt] + except KeyError: + # Not in queue + raise + # If elt is last item, remove and return + if pos == len(self.heap) - 1: + self.heap.pop() + return + # Replace elt with last element + last = self.heap.pop() + self.heap[pos] = last + self.position[last] = pos + # Restore invariant by sifting up + self._siftup(pos) + + def _siftup(self, pos): + """Move smaller child up until hitting a leaf. + + Built to mimic code for heapq._siftup + only updating position dict too. + """ + heap, position = self.heap, self.position + end_pos = len(heap) + startpos = pos + newitem = heap[pos] + # Shift up the smaller child until hitting a leaf + child_pos = (pos << 1) + 1 # start with leftmost child position + while child_pos < end_pos: + # Set child_pos to index of smaller child. + child = heap[child_pos] + right_pos = child_pos + 1 + if right_pos < end_pos: + right = heap[right_pos] + if not child < right: + child = right + child_pos = right_pos + # Move the smaller child up. + heap[pos] = child + position[child] = pos + pos = child_pos + child_pos = (pos << 1) + 1 + # pos is a leaf position. Put newitem there, and bubble it up + # to its final resting place (by sifting its parents down). + while pos > 0: + parent_pos = (pos - 1) >> 1 + parent = heap[parent_pos] + if not newitem < parent: + break + heap[pos] = parent + position[parent] = pos + pos = parent_pos + heap[pos] = newitem + position[newitem] = pos + + def _siftdown(self, start_pos, pos): + """Restore invariant. keep swapping with parent until smaller. + + Built to mimic code for heapq._siftdown + only updating position dict too. + """ + heap, position = self.heap, self.position + newitem = heap[pos] + # Follow the path to the root, moving parents down until finding a place + # newitem fits. + while pos > start_pos: + parent_pos = (pos - 1) >> 1 + parent = heap[parent_pos] + if not newitem < parent: + break + heap[pos] = parent + position[parent] = pos + pos = parent_pos + heap[pos] = newitem + position[newitem] = pos diff --git a/env-llmeval/lib/python3.10/site-packages/networkx/utils/misc.py b/env-llmeval/lib/python3.10/site-packages/networkx/utils/misc.py new file mode 100644 index 0000000000000000000000000000000000000000..096e46ab6ae7bd4f1967ba5be92522be7ea2958d --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/networkx/utils/misc.py @@ -0,0 +1,601 @@ +""" +Miscellaneous Helpers for NetworkX. + +These are not imported into the base networkx namespace but +can be accessed, for example, as + +>>> import networkx +>>> networkx.utils.make_list_of_ints({1, 2, 3}) +[1, 2, 3] +>>> networkx.utils.arbitrary_element({5, 1, 7}) # doctest: +SKIP +1 +""" + +import random +import sys +import uuid +import warnings +from collections import defaultdict, deque +from collections.abc import Iterable, Iterator, Sized +from itertools import chain, tee + +import networkx as nx + +__all__ = [ + "flatten", + "make_list_of_ints", + "dict_to_numpy_array", + "arbitrary_element", + "pairwise", + "groups", + "create_random_state", + "create_py_random_state", + "PythonRandomInterface", + "PythonRandomViaNumpyBits", + "nodes_equal", + "edges_equal", + "graphs_equal", + "_clear_cache", +] + + +# some cookbook stuff +# used in deciding whether something is a bunch of nodes, edges, etc. +# see G.add_nodes and others in Graph Class in networkx/base.py + + +def flatten(obj, result=None): + """Return flattened version of (possibly nested) iterable object.""" + if not isinstance(obj, Iterable | Sized) or isinstance(obj, str): + return obj + if result is None: + result = [] + for item in obj: + if not isinstance(item, Iterable | Sized) or isinstance(item, str): + result.append(item) + else: + flatten(item, result) + return tuple(result) + + +def make_list_of_ints(sequence): + """Return list of ints from sequence of integral numbers. + + All elements of the sequence must satisfy int(element) == element + or a ValueError is raised. Sequence is iterated through once. + + If sequence is a list, the non-int values are replaced with ints. + So, no new list is created + """ + if not isinstance(sequence, list): + result = [] + for i in sequence: + errmsg = f"sequence is not all integers: {i}" + try: + ii = int(i) + except ValueError: + raise nx.NetworkXError(errmsg) from None + if ii != i: + raise nx.NetworkXError(errmsg) + result.append(ii) + return result + # original sequence is a list... in-place conversion to ints + for indx, i in enumerate(sequence): + errmsg = f"sequence is not all integers: {i}" + if isinstance(i, int): + continue + try: + ii = int(i) + except ValueError: + raise nx.NetworkXError(errmsg) from None + if ii != i: + raise nx.NetworkXError(errmsg) + sequence[indx] = ii + return sequence + + +def dict_to_numpy_array(d, mapping=None): + """Convert a dictionary of dictionaries to a numpy array + with optional mapping.""" + try: + return _dict_to_numpy_array2(d, mapping) + except (AttributeError, TypeError): + # AttributeError is when no mapping was provided and v.keys() fails. + # TypeError is when a mapping was provided and d[k1][k2] fails. + return _dict_to_numpy_array1(d, mapping) + + +def _dict_to_numpy_array2(d, mapping=None): + """Convert a dictionary of dictionaries to a 2d numpy array + with optional mapping. + + """ + import numpy as np + + if mapping is None: + s = set(d.keys()) + for k, v in d.items(): + s.update(v.keys()) + mapping = dict(zip(s, range(len(s)))) + n = len(mapping) + a = np.zeros((n, n)) + for k1, i in mapping.items(): + for k2, j in mapping.items(): + try: + a[i, j] = d[k1][k2] + except KeyError: + pass + return a + + +def _dict_to_numpy_array1(d, mapping=None): + """Convert a dictionary of numbers to a 1d numpy array with optional mapping.""" + import numpy as np + + if mapping is None: + s = set(d.keys()) + mapping = dict(zip(s, range(len(s)))) + n = len(mapping) + a = np.zeros(n) + for k1, i in mapping.items(): + i = mapping[k1] + a[i] = d[k1] + return a + + +def arbitrary_element(iterable): + """Returns an arbitrary element of `iterable` without removing it. + + This is most useful for "peeking" at an arbitrary element of a set, + but can be used for any list, dictionary, etc., as well. + + Parameters + ---------- + iterable : `abc.collections.Iterable` instance + Any object that implements ``__iter__``, e.g. set, dict, list, tuple, + etc. + + Returns + ------- + The object that results from ``next(iter(iterable))`` + + Raises + ------ + ValueError + If `iterable` is an iterator (because the current implementation of + this function would consume an element from the iterator). + + Examples + -------- + Arbitrary elements from common Iterable objects: + + >>> nx.utils.arbitrary_element([1, 2, 3]) # list + 1 + >>> nx.utils.arbitrary_element((1, 2, 3)) # tuple + 1 + >>> nx.utils.arbitrary_element({1, 2, 3}) # set + 1 + >>> d = {k: v for k, v in zip([1, 2, 3], [3, 2, 1])} + >>> nx.utils.arbitrary_element(d) # dict_keys + 1 + >>> nx.utils.arbitrary_element(d.values()) # dict values + 3 + + `str` is also an Iterable: + + >>> nx.utils.arbitrary_element("hello") + 'h' + + :exc:`ValueError` is raised if `iterable` is an iterator: + + >>> iterator = iter([1, 2, 3]) # Iterator, *not* Iterable + >>> nx.utils.arbitrary_element(iterator) + Traceback (most recent call last): + ... + ValueError: cannot return an arbitrary item from an iterator + + Notes + ----- + This function does not return a *random* element. If `iterable` is + ordered, sequential calls will return the same value:: + + >>> l = [1, 2, 3] + >>> nx.utils.arbitrary_element(l) + 1 + >>> nx.utils.arbitrary_element(l) + 1 + + """ + if isinstance(iterable, Iterator): + raise ValueError("cannot return an arbitrary item from an iterator") + # Another possible implementation is ``for x in iterable: return x``. + return next(iter(iterable)) + + +# Recipe from the itertools documentation. +def pairwise(iterable, cyclic=False): + "s -> (s0, s1), (s1, s2), (s2, s3), ..." + a, b = tee(iterable) + first = next(b, None) + if cyclic is True: + return zip(a, chain(b, (first,))) + return zip(a, b) + + +def groups(many_to_one): + """Converts a many-to-one mapping into a one-to-many mapping. + + `many_to_one` must be a dictionary whose keys and values are all + :term:`hashable`. + + The return value is a dictionary mapping values from `many_to_one` + to sets of keys from `many_to_one` that have that value. + + Examples + -------- + >>> from networkx.utils import groups + >>> many_to_one = {"a": 1, "b": 1, "c": 2, "d": 3, "e": 3} + >>> groups(many_to_one) # doctest: +SKIP + {1: {'a', 'b'}, 2: {'c'}, 3: {'e', 'd'}} + """ + one_to_many = defaultdict(set) + for v, k in many_to_one.items(): + one_to_many[k].add(v) + return dict(one_to_many) + + +def create_random_state(random_state=None): + """Returns a numpy.random.RandomState or numpy.random.Generator instance + depending on input. + + Parameters + ---------- + random_state : int or NumPy RandomState or Generator instance, optional (default=None) + If int, return a numpy.random.RandomState instance set with seed=int. + if `numpy.random.RandomState` instance, return it. + if `numpy.random.Generator` instance, return it. + if None or numpy.random, return the global random number generator used + by numpy.random. + """ + import numpy as np + + if random_state is None or random_state is np.random: + return np.random.mtrand._rand + if isinstance(random_state, np.random.RandomState): + return random_state + if isinstance(random_state, int): + return np.random.RandomState(random_state) + if isinstance(random_state, np.random.Generator): + return random_state + msg = ( + f"{random_state} cannot be used to create a numpy.random.RandomState or\n" + "numpy.random.Generator instance" + ) + raise ValueError(msg) + + +class PythonRandomViaNumpyBits(random.Random): + """Provide the random.random algorithms using a numpy.random bit generator + + The intent is to allow people to contribute code that uses Python's random + library, but still allow users to provide a single easily controlled random + bit-stream for all work with NetworkX. This implementation is based on helpful + comments and code from Robert Kern on NumPy's GitHub Issue #24458. + + This implementation supercedes that of `PythonRandomInterface` which rewrote + methods to account for subtle differences in API between `random` and + `numpy.random`. Instead this subclasses `random.Random` and overwrites + the methods `random`, `getrandbits`, `getstate`, `setstate` and `seed`. + It makes them use the rng values from an input numpy `RandomState` or `Generator`. + Those few methods allow the rest of the `random.Random` methods to provide + the API interface of `random.random` while using randomness generated by + a numpy generator. + """ + + def __init__(self, rng=None): + try: + import numpy as np + except ImportError: + msg = "numpy not found, only random.random available." + warnings.warn(msg, ImportWarning) + + if rng is None: + self._rng = np.random.mtrand._rand + else: + self._rng = rng + + # Not necessary, given our overriding of gauss() below, but it's + # in the superclass and nominally public, so initialize it here. + self.gauss_next = None + + def random(self): + """Get the next random number in the range 0.0 <= X < 1.0.""" + return self._rng.random() + + def getrandbits(self, k): + """getrandbits(k) -> x. Generates an int with k random bits.""" + if k < 0: + raise ValueError("number of bits must be non-negative") + numbytes = (k + 7) // 8 # bits / 8 and rounded up + x = int.from_bytes(self._rng.bytes(numbytes), "big") + return x >> (numbytes * 8 - k) # trim excess bits + + def getstate(self): + return self._rng.__getstate__() + + def setstate(self, state): + self._rng.__setstate__(state) + + def seed(self, *args, **kwds): + "Do nothing override method." + raise NotImplementedError("seed() not implemented in PythonRandomViaNumpyBits") + + +################################################################## +class PythonRandomInterface: + """PythonRandomInterface is included for backward compatibility + New code should use PythonRandomViaNumpyBits instead. + """ + + def __init__(self, rng=None): + try: + import numpy as np + except ImportError: + msg = "numpy not found, only random.random available." + warnings.warn(msg, ImportWarning) + + if rng is None: + self._rng = np.random.mtrand._rand + else: + self._rng = rng + + def random(self): + return self._rng.random() + + def uniform(self, a, b): + return a + (b - a) * self._rng.random() + + def randrange(self, a, b=None): + import numpy as np + + if b is None: + a, b = 0, a + if b > 9223372036854775807: # from np.iinfo(np.int64).max + tmp_rng = PythonRandomViaNumpyBits(self._rng) + return tmp_rng.randrange(a, b) + + if isinstance(self._rng, np.random.Generator): + return self._rng.integers(a, b) + return self._rng.randint(a, b) + + # NOTE: the numpy implementations of `choice` don't support strings, so + # this cannot be replaced with self._rng.choice + def choice(self, seq): + import numpy as np + + if isinstance(self._rng, np.random.Generator): + idx = self._rng.integers(0, len(seq)) + else: + idx = self._rng.randint(0, len(seq)) + return seq[idx] + + def gauss(self, mu, sigma): + return self._rng.normal(mu, sigma) + + def shuffle(self, seq): + return self._rng.shuffle(seq) + + # Some methods don't match API for numpy RandomState. + # Commented out versions are not used by NetworkX + + def sample(self, seq, k): + return self._rng.choice(list(seq), size=(k,), replace=False) + + def randint(self, a, b): + import numpy as np + + if b > 9223372036854775807: # from np.iinfo(np.int64).max + tmp_rng = PythonRandomViaNumpyBits(self._rng) + return tmp_rng.randint(a, b) + + if isinstance(self._rng, np.random.Generator): + return self._rng.integers(a, b + 1) + return self._rng.randint(a, b + 1) + + # exponential as expovariate with 1/argument, + def expovariate(self, scale): + return self._rng.exponential(1 / scale) + + # pareto as paretovariate with 1/argument, + def paretovariate(self, shape): + return self._rng.pareto(shape) + + +# weibull as weibullvariate multiplied by beta, +# def weibullvariate(self, alpha, beta): +# return self._rng.weibull(alpha) * beta +# +# def triangular(self, low, high, mode): +# return self._rng.triangular(low, mode, high) +# +# def choices(self, seq, weights=None, cum_weights=None, k=1): +# return self._rng.choice(seq + + +def create_py_random_state(random_state=None): + """Returns a random.Random instance depending on input. + + Parameters + ---------- + random_state : int or random number generator or None (default=None) + - If int, return a `random.Random` instance set with seed=int. + - If `random.Random` instance, return it. + - If None or the `np.random` package, return the global random number + generator used by `np.random`. + - If an `np.random.Generator` instance, or the `np.random` package, or + the global numpy random number generator, then return it. + wrapped in a `PythonRandomViaNumpyBits` class. + - If a `PythonRandomViaNumpyBits` instance, return it. + - If a `PythonRandomInterface` instance, return it. + - If a `np.random.RandomState` instance and not the global numpy default, + return it wrapped in `PythonRandomInterface` for backward bit-stream + matching with legacy code. + + Notes + ----- + - A diagram intending to illustrate the relationships behind our support + for numpy random numbers is called + `NetworkX Numpy Random Numbers `_. + - More discussion about this support also appears in + `gh-6869#comment `_. + - Wrappers of numpy.random number generators allow them to mimic the Python random + number generation algorithms. For example, Python can create arbitrarily large + random ints, and the wrappers use Numpy bit-streams with CPython's random module + to choose arbitrarily large random integers too. + - We provide two wrapper classes: + `PythonRandomViaNumpyBits` is usually what you want and is always used for + `np.Generator` instances. But for users who need to recreate random numbers + produced in NetworkX 3.2 or earlier, we maintain the `PythonRandomInterface` + wrapper as well. We use it only used if passed a (non-default) `np.RandomState` + instance pre-initialized from a seed. Otherwise the newer wrapper is used. + """ + if random_state is None or random_state is random: + return random._inst + if isinstance(random_state, random.Random): + return random_state + if isinstance(random_state, int): + return random.Random(random_state) + + try: + import numpy as np + except ImportError: + pass + else: + if isinstance(random_state, PythonRandomInterface | PythonRandomViaNumpyBits): + return random_state + if isinstance(random_state, np.random.Generator): + return PythonRandomViaNumpyBits(random_state) + if random_state is np.random: + return PythonRandomViaNumpyBits(np.random.mtrand._rand) + + if isinstance(random_state, np.random.RandomState): + if random_state is np.random.mtrand._rand: + return PythonRandomViaNumpyBits(random_state) + # Only need older interface if specially constructed RandomState used + return PythonRandomInterface(random_state) + + msg = f"{random_state} cannot be used to generate a random.Random instance" + raise ValueError(msg) + + +def nodes_equal(nodes1, nodes2): + """Check if nodes are equal. + + Equality here means equal as Python objects. + Node data must match if included. + The order of nodes is not relevant. + + Parameters + ---------- + nodes1, nodes2 : iterables of nodes, or (node, datadict) tuples + + Returns + ------- + bool + True if nodes are equal, False otherwise. + """ + nlist1 = list(nodes1) + nlist2 = list(nodes2) + try: + d1 = dict(nlist1) + d2 = dict(nlist2) + except (ValueError, TypeError): + d1 = dict.fromkeys(nlist1) + d2 = dict.fromkeys(nlist2) + return d1 == d2 + + +def edges_equal(edges1, edges2): + """Check if edges are equal. + + Equality here means equal as Python objects. + Edge data must match if included. + The order of the edges is not relevant. + + Parameters + ---------- + edges1, edges2 : iterables of with u, v nodes as + edge tuples (u, v), or + edge tuples with data dicts (u, v, d), or + edge tuples with keys and data dicts (u, v, k, d) + + Returns + ------- + bool + True if edges are equal, False otherwise. + """ + from collections import defaultdict + + d1 = defaultdict(dict) + d2 = defaultdict(dict) + c1 = 0 + for c1, e in enumerate(edges1): + u, v = e[0], e[1] + data = [e[2:]] + if v in d1[u]: + data = d1[u][v] + data + d1[u][v] = data + d1[v][u] = data + c2 = 0 + for c2, e in enumerate(edges2): + u, v = e[0], e[1] + data = [e[2:]] + if v in d2[u]: + data = d2[u][v] + data + d2[u][v] = data + d2[v][u] = data + if c1 != c2: + return False + # can check one direction because lengths are the same. + for n, nbrdict in d1.items(): + for nbr, datalist in nbrdict.items(): + if n not in d2: + return False + if nbr not in d2[n]: + return False + d2datalist = d2[n][nbr] + for data in datalist: + if datalist.count(data) != d2datalist.count(data): + return False + return True + + +def graphs_equal(graph1, graph2): + """Check if graphs are equal. + + Equality here means equal as Python objects (not isomorphism). + Node, edge and graph data must match. + + Parameters + ---------- + graph1, graph2 : graph + + Returns + ------- + bool + True if graphs are equal, False otherwise. + """ + return ( + graph1.adj == graph2.adj + and graph1.nodes == graph2.nodes + and graph1.graph == graph2.graph + ) + + +def _clear_cache(G): + """Clear the cache of a graph (currently stores converted graphs). + + Caching is controlled via ``nx.config.cache_converted_graphs`` configuration. + """ + if cache := getattr(G, "__networkx_cache__", None): + cache.clear() diff --git a/env-llmeval/lib/python3.10/site-packages/networkx/utils/random_sequence.py b/env-llmeval/lib/python3.10/site-packages/networkx/utils/random_sequence.py new file mode 100644 index 0000000000000000000000000000000000000000..20a7b5e0a7fcc426ed9840f8bed2abf500e357e5 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/networkx/utils/random_sequence.py @@ -0,0 +1,164 @@ +""" +Utilities for generating random numbers, random sequences, and +random selections. +""" + +import networkx as nx +from networkx.utils import py_random_state + +__all__ = [ + "powerlaw_sequence", + "zipf_rv", + "cumulative_distribution", + "discrete_sequence", + "random_weighted_sample", + "weighted_choice", +] + + +# The same helpers for choosing random sequences from distributions +# uses Python's random module +# https://docs.python.org/3/library/random.html + + +@py_random_state(2) +def powerlaw_sequence(n, exponent=2.0, seed=None): + """ + Return sample sequence of length n from a power law distribution. + """ + return [seed.paretovariate(exponent - 1) for i in range(n)] + + +@py_random_state(2) +def zipf_rv(alpha, xmin=1, seed=None): + r"""Returns a random value chosen from the Zipf distribution. + + The return value is an integer drawn from the probability distribution + + .. math:: + + p(x)=\frac{x^{-\alpha}}{\zeta(\alpha, x_{\min})}, + + where $\zeta(\alpha, x_{\min})$ is the Hurwitz zeta function. + + Parameters + ---------- + alpha : float + Exponent value of the distribution + xmin : int + Minimum value + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + x : int + Random value from Zipf distribution + + Raises + ------ + ValueError: + If xmin < 1 or + If alpha <= 1 + + Notes + ----- + The rejection algorithm generates random values for a the power-law + distribution in uniformly bounded expected time dependent on + parameters. See [1]_ for details on its operation. + + Examples + -------- + >>> nx.utils.zipf_rv(alpha=2, xmin=3, seed=42) + 8 + + References + ---------- + .. [1] Luc Devroye, Non-Uniform Random Variate Generation, + Springer-Verlag, New York, 1986. + """ + if xmin < 1: + raise ValueError("xmin < 1") + if alpha <= 1: + raise ValueError("a <= 1.0") + a1 = alpha - 1.0 + b = 2**a1 + while True: + u = 1.0 - seed.random() # u in (0,1] + v = seed.random() # v in [0,1) + x = int(xmin * u ** -(1.0 / a1)) + t = (1.0 + (1.0 / x)) ** a1 + if v * x * (t - 1.0) / (b - 1.0) <= t / b: + break + return x + + +def cumulative_distribution(distribution): + """Returns normalized cumulative distribution from discrete distribution.""" + + cdf = [0.0] + psum = sum(distribution) + for i in range(len(distribution)): + cdf.append(cdf[i] + distribution[i] / psum) + return cdf + + +@py_random_state(3) +def discrete_sequence(n, distribution=None, cdistribution=None, seed=None): + """ + Return sample sequence of length n from a given discrete distribution + or discrete cumulative distribution. + + One of the following must be specified. + + distribution = histogram of values, will be normalized + + cdistribution = normalized discrete cumulative distribution + + """ + import bisect + + if cdistribution is not None: + cdf = cdistribution + elif distribution is not None: + cdf = cumulative_distribution(distribution) + else: + raise nx.NetworkXError( + "discrete_sequence: distribution or cdistribution missing" + ) + + # get a uniform random number + inputseq = [seed.random() for i in range(n)] + + # choose from CDF + seq = [bisect.bisect_left(cdf, s) - 1 for s in inputseq] + return seq + + +@py_random_state(2) +def random_weighted_sample(mapping, k, seed=None): + """Returns k items without replacement from a weighted sample. + + The input is a dictionary of items with weights as values. + """ + if k > len(mapping): + raise ValueError("sample larger than population") + sample = set() + while len(sample) < k: + sample.add(weighted_choice(mapping, seed)) + return list(sample) + + +@py_random_state(1) +def weighted_choice(mapping, seed=None): + """Returns a single element from a weighted sample. + + The input is a dictionary of items with weights as values. + """ + # use roulette method + rnd = seed.random() * sum(mapping.values()) + for k, w in mapping.items(): + rnd -= w + if rnd < 0: + return k diff --git a/env-llmeval/lib/python3.10/site-packages/networkx/utils/rcm.py b/env-llmeval/lib/python3.10/site-packages/networkx/utils/rcm.py new file mode 100644 index 0000000000000000000000000000000000000000..f9e1bfee69d785a7ecaa4800a75616cbd8ac399b --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/networkx/utils/rcm.py @@ -0,0 +1,158 @@ +""" +Cuthill-McKee ordering of graph nodes to produce sparse matrices +""" +from collections import deque +from operator import itemgetter + +import networkx as nx + +from ..utils import arbitrary_element + +__all__ = ["cuthill_mckee_ordering", "reverse_cuthill_mckee_ordering"] + + +def cuthill_mckee_ordering(G, heuristic=None): + """Generate an ordering (permutation) of the graph nodes to make + a sparse matrix. + + Uses the Cuthill-McKee heuristic (based on breadth-first search) [1]_. + + Parameters + ---------- + G : graph + A NetworkX graph + + heuristic : function, optional + Function to choose starting node for RCM algorithm. If None + a node from a pseudo-peripheral pair is used. A user-defined function + can be supplied that takes a graph object and returns a single node. + + Returns + ------- + nodes : generator + Generator of nodes in Cuthill-McKee ordering. + + Examples + -------- + >>> from networkx.utils import cuthill_mckee_ordering + >>> G = nx.path_graph(4) + >>> rcm = list(cuthill_mckee_ordering(G)) + >>> A = nx.adjacency_matrix(G, nodelist=rcm) + + Smallest degree node as heuristic function: + + >>> def smallest_degree(G): + ... return min(G, key=G.degree) + >>> rcm = list(cuthill_mckee_ordering(G, heuristic=smallest_degree)) + + + See Also + -------- + reverse_cuthill_mckee_ordering + + Notes + ----- + The optimal solution the bandwidth reduction is NP-complete [2]_. + + + References + ---------- + .. [1] E. Cuthill and J. McKee. + Reducing the bandwidth of sparse symmetric matrices, + In Proc. 24th Nat. Conf. ACM, pages 157-172, 1969. + http://doi.acm.org/10.1145/800195.805928 + .. [2] Steven S. Skiena. 1997. The Algorithm Design Manual. + Springer-Verlag New York, Inc., New York, NY, USA. + """ + for c in nx.connected_components(G): + yield from connected_cuthill_mckee_ordering(G.subgraph(c), heuristic) + + +def reverse_cuthill_mckee_ordering(G, heuristic=None): + """Generate an ordering (permutation) of the graph nodes to make + a sparse matrix. + + Uses the reverse Cuthill-McKee heuristic (based on breadth-first search) + [1]_. + + Parameters + ---------- + G : graph + A NetworkX graph + + heuristic : function, optional + Function to choose starting node for RCM algorithm. If None + a node from a pseudo-peripheral pair is used. A user-defined function + can be supplied that takes a graph object and returns a single node. + + Returns + ------- + nodes : generator + Generator of nodes in reverse Cuthill-McKee ordering. + + Examples + -------- + >>> from networkx.utils import reverse_cuthill_mckee_ordering + >>> G = nx.path_graph(4) + >>> rcm = list(reverse_cuthill_mckee_ordering(G)) + >>> A = nx.adjacency_matrix(G, nodelist=rcm) + + Smallest degree node as heuristic function: + + >>> def smallest_degree(G): + ... return min(G, key=G.degree) + >>> rcm = list(reverse_cuthill_mckee_ordering(G, heuristic=smallest_degree)) + + + See Also + -------- + cuthill_mckee_ordering + + Notes + ----- + The optimal solution the bandwidth reduction is NP-complete [2]_. + + References + ---------- + .. [1] E. Cuthill and J. McKee. + Reducing the bandwidth of sparse symmetric matrices, + In Proc. 24th Nat. Conf. ACM, pages 157-72, 1969. + http://doi.acm.org/10.1145/800195.805928 + .. [2] Steven S. Skiena. 1997. The Algorithm Design Manual. + Springer-Verlag New York, Inc., New York, NY, USA. + """ + return reversed(list(cuthill_mckee_ordering(G, heuristic=heuristic))) + + +def connected_cuthill_mckee_ordering(G, heuristic=None): + # the cuthill mckee algorithm for connected graphs + if heuristic is None: + start = pseudo_peripheral_node(G) + else: + start = heuristic(G) + visited = {start} + queue = deque([start]) + while queue: + parent = queue.popleft() + yield parent + nd = sorted(G.degree(set(G[parent]) - visited), key=itemgetter(1)) + children = [n for n, d in nd] + visited.update(children) + queue.extend(children) + + +def pseudo_peripheral_node(G): + # helper for cuthill-mckee to find a node in a "pseudo peripheral pair" + # to use as good starting node + u = arbitrary_element(G) + lp = 0 + v = u + while True: + spl = dict(nx.shortest_path_length(G, v)) + l = max(spl.values()) + if l <= lp: + break + lp = l + farthest = (n for n, dist in spl.items() if dist == l) + v, deg = min(G.degree(farthest), key=itemgetter(1)) + return v diff --git a/env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/__pycache__/__init__.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..68be91de842476c056f81c57012ba3ef308b571e Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/__pycache__/__init__.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/__pycache__/test__init.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/__pycache__/test__init.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fe2474740403979887eeb3a1ffb2e0d1771aa0be Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/__pycache__/test__init.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/__pycache__/test_backends.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/__pycache__/test_backends.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7c4f9a8e8d770e6e957648fff677c8c7f6e5afbc Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/__pycache__/test_backends.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/__pycache__/test_config.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/__pycache__/test_config.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..271d2cf981462eeae2ccb2e3fe2bd7b028c9dece Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/__pycache__/test_config.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/__pycache__/test_decorators.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/__pycache__/test_decorators.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..725d775fcc4c0888814d611723e3495811c74944 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/__pycache__/test_decorators.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/__pycache__/test_heaps.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/__pycache__/test_heaps.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cf802d704b3d6165da68df460fb04018a8c48303 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/__pycache__/test_heaps.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/__pycache__/test_mapped_queue.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/__pycache__/test_mapped_queue.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..888e9ef6d9a3c1a99975c882bb1fab74256025ec Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/__pycache__/test_mapped_queue.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/__pycache__/test_misc.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/__pycache__/test_misc.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6a12778411d150762f8a4785d2f850bc3019c86b Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/__pycache__/test_misc.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/__pycache__/test_random_sequence.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/__pycache__/test_random_sequence.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6216611882e537b4d05d55dfe593eca483e9830a Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/__pycache__/test_random_sequence.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/__pycache__/test_rcm.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/__pycache__/test_rcm.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a5ecc75d39d0f5c668b30b7aee875d8f707faf62 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/__pycache__/test_rcm.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/test__init.py b/env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/test__init.py new file mode 100644 index 0000000000000000000000000000000000000000..ecbcce36df7cd37781dd45879f63f7d6f55e5567 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/test__init.py @@ -0,0 +1,11 @@ +import pytest + + +def test_utils_namespace(): + """Ensure objects are not unintentionally exposed in utils namespace.""" + with pytest.raises(ImportError): + from networkx.utils import nx + with pytest.raises(ImportError): + from networkx.utils import sys + with pytest.raises(ImportError): + from networkx.utils import defaultdict, deque diff --git a/env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/test_config.py b/env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/test_config.py new file mode 100644 index 0000000000000000000000000000000000000000..14a84485bf62f735eca43ae48b379150ca0da17e --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/test_config.py @@ -0,0 +1,186 @@ +import collections +import pickle + +import pytest + +import networkx as nx +from networkx.utils.configs import Config + + +# Define this at module level so we can test pickling +class ExampleConfig(Config): + """Example configuration.""" + + x: int + y: str + + def _check_config(self, key, value): + if key == "x" and value <= 0: + raise ValueError("x must be positive") + if key == "y" and not isinstance(value, str): + raise TypeError("y must be a str") + + +class EmptyConfig(Config): + pass + + +@pytest.mark.parametrize("cfg", [EmptyConfig(), Config()]) +def test_config_empty(cfg): + assert dir(cfg) == [] + with pytest.raises(AttributeError): + cfg.x = 1 + with pytest.raises(KeyError): + cfg["x"] = 1 + with pytest.raises(AttributeError): + cfg.x + with pytest.raises(KeyError): + cfg["x"] + assert len(cfg) == 0 + assert "x" not in cfg + assert cfg == cfg + assert cfg.get("x", 2) == 2 + assert set(cfg.keys()) == set() + assert set(cfg.values()) == set() + assert set(cfg.items()) == set() + cfg2 = pickle.loads(pickle.dumps(cfg)) + assert cfg == cfg2 + assert isinstance(cfg, collections.abc.Collection) + assert isinstance(cfg, collections.abc.Mapping) + + +def test_config_subclass(): + with pytest.raises(TypeError, match="missing 2 required keyword-only"): + ExampleConfig() + with pytest.raises(ValueError, match="x must be positive"): + ExampleConfig(x=0, y="foo") + with pytest.raises(TypeError, match="unexpected keyword"): + ExampleConfig(x=1, y="foo", z="bad config") + with pytest.raises(TypeError, match="unexpected keyword"): + EmptyConfig(z="bad config") + cfg = ExampleConfig(x=1, y="foo") + assert cfg.x == 1 + assert cfg["x"] == 1 + assert cfg["y"] == "foo" + assert cfg.y == "foo" + assert "x" in cfg + assert "y" in cfg + assert "z" not in cfg + assert len(cfg) == 2 + assert set(iter(cfg)) == {"x", "y"} + assert set(cfg.keys()) == {"x", "y"} + assert set(cfg.values()) == {1, "foo"} + assert set(cfg.items()) == {("x", 1), ("y", "foo")} + assert dir(cfg) == ["x", "y"] + cfg.x = 2 + cfg["y"] = "bar" + assert cfg["x"] == 2 + assert cfg.y == "bar" + with pytest.raises(TypeError, match="can't be deleted"): + del cfg.x + with pytest.raises(TypeError, match="can't be deleted"): + del cfg["y"] + assert cfg.x == 2 + assert cfg == cfg + assert cfg == ExampleConfig(x=2, y="bar") + assert cfg != ExampleConfig(x=3, y="baz") + assert cfg != Config(x=2, y="bar") + with pytest.raises(TypeError, match="y must be a str"): + cfg["y"] = 5 + with pytest.raises(ValueError, match="x must be positive"): + cfg.x = -5 + assert cfg.get("x", 10) == 2 + with pytest.raises(AttributeError): + cfg.z = 5 + with pytest.raises(KeyError): + cfg["z"] = 5 + with pytest.raises(AttributeError): + cfg.z + with pytest.raises(KeyError): + cfg["z"] + cfg2 = pickle.loads(pickle.dumps(cfg)) + assert cfg == cfg2 + assert cfg.__doc__ == "Example configuration." + assert cfg2.__doc__ == "Example configuration." + + +def test_config_defaults(): + class DefaultConfig(Config): + x: int = 0 + y: int + + cfg = DefaultConfig(y=1) + assert cfg.x == 0 + cfg = DefaultConfig(x=2, y=1) + assert cfg.x == 2 + + +def test_nxconfig(): + assert isinstance(nx.config.backend_priority, list) + assert isinstance(nx.config.backends, Config) + with pytest.raises(TypeError, match="must be a list of backend names"): + nx.config.backend_priority = "nx_loopback" + with pytest.raises(ValueError, match="Unknown backend when setting"): + nx.config.backend_priority = ["this_almost_certainly_is_not_a_backend"] + with pytest.raises(TypeError, match="must be a Config of backend configs"): + nx.config.backends = {} + with pytest.raises(TypeError, match="must be a Config of backend configs"): + nx.config.backends = Config(plausible_backend_name={}) + with pytest.raises(ValueError, match="Unknown backend when setting"): + nx.config.backends = Config(this_almost_certainly_is_not_a_backend=Config()) + with pytest.raises(TypeError, match="must be True or False"): + nx.config.cache_converted_graphs = "bad value" + + +def test_not_strict(): + class FlexibleConfig(Config, strict=False): + x: int + + cfg = FlexibleConfig(x=1) + assert "_strict" not in cfg + assert len(cfg) == 1 + assert list(cfg) == ["x"] + assert list(cfg.keys()) == ["x"] + assert list(cfg.values()) == [1] + assert list(cfg.items()) == [("x", 1)] + assert cfg.x == 1 + assert cfg["x"] == 1 + assert "x" in cfg + assert hasattr(cfg, "x") + assert "FlexibleConfig(x=1)" in repr(cfg) + assert cfg == FlexibleConfig(x=1) + del cfg.x + assert "FlexibleConfig()" in repr(cfg) + assert len(cfg) == 0 + assert not hasattr(cfg, "x") + assert "x" not in cfg + assert not hasattr(cfg, "y") + assert "y" not in cfg + cfg.y = 2 + assert len(cfg) == 1 + assert list(cfg) == ["y"] + assert list(cfg.keys()) == ["y"] + assert list(cfg.values()) == [2] + assert list(cfg.items()) == [("y", 2)] + assert cfg.y == 2 + assert cfg["y"] == 2 + assert hasattr(cfg, "y") + assert "y" in cfg + del cfg["y"] + assert len(cfg) == 0 + assert list(cfg) == [] + with pytest.raises(AttributeError, match="y"): + del cfg.y + with pytest.raises(KeyError, match="y"): + del cfg["y"] + with pytest.raises(TypeError, match="missing 1 required keyword-only"): + FlexibleConfig() + # Be strict when first creating the config object + with pytest.raises(TypeError, match="unexpected keyword argument 'y'"): + FlexibleConfig(x=1, y=2) + + class FlexibleConfigWithDefault(Config, strict=False): + x: int = 0 + + assert FlexibleConfigWithDefault().x == 0 + assert FlexibleConfigWithDefault(x=1)["x"] == 1 diff --git a/env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/test_decorators.py b/env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/test_decorators.py new file mode 100644 index 0000000000000000000000000000000000000000..0a4aeabfe0b016bec362eac628489f6f4244cc59 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/test_decorators.py @@ -0,0 +1,510 @@ +import os +import pathlib +import random +import tempfile + +import pytest + +import networkx as nx +from networkx.utils.decorators import ( + argmap, + not_implemented_for, + np_random_state, + open_file, + py_random_state, +) +from networkx.utils.misc import PythonRandomInterface, PythonRandomViaNumpyBits + + +def test_not_implemented_decorator(): + @not_implemented_for("directed") + def test_d(G): + pass + + test_d(nx.Graph()) + with pytest.raises(nx.NetworkXNotImplemented): + test_d(nx.DiGraph()) + + @not_implemented_for("undirected") + def test_u(G): + pass + + test_u(nx.DiGraph()) + with pytest.raises(nx.NetworkXNotImplemented): + test_u(nx.Graph()) + + @not_implemented_for("multigraph") + def test_m(G): + pass + + test_m(nx.Graph()) + with pytest.raises(nx.NetworkXNotImplemented): + test_m(nx.MultiGraph()) + + @not_implemented_for("graph") + def test_g(G): + pass + + test_g(nx.MultiGraph()) + with pytest.raises(nx.NetworkXNotImplemented): + test_g(nx.Graph()) + + # not MultiDiGraph (multiple arguments => AND) + @not_implemented_for("directed", "multigraph") + def test_not_md(G): + pass + + test_not_md(nx.Graph()) + test_not_md(nx.DiGraph()) + test_not_md(nx.MultiGraph()) + with pytest.raises(nx.NetworkXNotImplemented): + test_not_md(nx.MultiDiGraph()) + + # Graph only (multiple decorators => OR) + @not_implemented_for("directed") + @not_implemented_for("multigraph") + def test_graph_only(G): + pass + + test_graph_only(nx.Graph()) + with pytest.raises(nx.NetworkXNotImplemented): + test_graph_only(nx.DiGraph()) + with pytest.raises(nx.NetworkXNotImplemented): + test_graph_only(nx.MultiGraph()) + with pytest.raises(nx.NetworkXNotImplemented): + test_graph_only(nx.MultiDiGraph()) + + with pytest.raises(ValueError): + not_implemented_for("directed", "undirected") + + with pytest.raises(ValueError): + not_implemented_for("multigraph", "graph") + + +def test_not_implemented_decorator_key(): + with pytest.raises(KeyError): + + @not_implemented_for("foo") + def test1(G): + pass + + test1(nx.Graph()) + + +def test_not_implemented_decorator_raise(): + with pytest.raises(nx.NetworkXNotImplemented): + + @not_implemented_for("graph") + def test1(G): + pass + + test1(nx.Graph()) + + +class TestOpenFileDecorator: + def setup_method(self): + self.text = ["Blah... ", "BLAH ", "BLAH!!!!"] + self.fobj = tempfile.NamedTemporaryFile("wb+", delete=False) + self.name = self.fobj.name + + def teardown_method(self): + self.fobj.close() + os.unlink(self.name) + + def write(self, path): + for text in self.text: + path.write(text.encode("ascii")) + + @open_file(1, "r") + def read(self, path): + return path.readlines()[0] + + @staticmethod + @open_file(0, "wb") + def writer_arg0(path): + path.write(b"demo") + + @open_file(1, "wb+") + def writer_arg1(self, path): + self.write(path) + + @open_file(2, "wb") + def writer_arg2default(self, x, path=None): + if path is None: + with tempfile.NamedTemporaryFile("wb+") as fh: + self.write(fh) + else: + self.write(path) + + @open_file(4, "wb") + def writer_arg4default(self, x, y, other="hello", path=None, **kwargs): + if path is None: + with tempfile.NamedTemporaryFile("wb+") as fh: + self.write(fh) + else: + self.write(path) + + @open_file("path", "wb") + def writer_kwarg(self, **kwargs): + path = kwargs.get("path", None) + if path is None: + with tempfile.NamedTemporaryFile("wb+") as fh: + self.write(fh) + else: + self.write(path) + + def test_writer_arg0_str(self): + self.writer_arg0(self.name) + + def test_writer_arg0_fobj(self): + self.writer_arg0(self.fobj) + + def test_writer_arg0_pathlib(self): + self.writer_arg0(pathlib.Path(self.name)) + + def test_writer_arg1_str(self): + self.writer_arg1(self.name) + assert self.read(self.name) == "".join(self.text) + + def test_writer_arg1_fobj(self): + self.writer_arg1(self.fobj) + assert not self.fobj.closed + self.fobj.close() + assert self.read(self.name) == "".join(self.text) + + def test_writer_arg2default_str(self): + self.writer_arg2default(0, path=None) + self.writer_arg2default(0, path=self.name) + assert self.read(self.name) == "".join(self.text) + + def test_writer_arg2default_fobj(self): + self.writer_arg2default(0, path=self.fobj) + assert not self.fobj.closed + self.fobj.close() + assert self.read(self.name) == "".join(self.text) + + def test_writer_arg2default_fobj_path_none(self): + self.writer_arg2default(0, path=None) + + def test_writer_arg4default_fobj(self): + self.writer_arg4default(0, 1, dog="dog", other="other") + self.writer_arg4default(0, 1, dog="dog", other="other", path=self.name) + assert self.read(self.name) == "".join(self.text) + + def test_writer_kwarg_str(self): + self.writer_kwarg(path=self.name) + assert self.read(self.name) == "".join(self.text) + + def test_writer_kwarg_fobj(self): + self.writer_kwarg(path=self.fobj) + self.fobj.close() + assert self.read(self.name) == "".join(self.text) + + def test_writer_kwarg_path_none(self): + self.writer_kwarg(path=None) + + +class TestRandomState: + @classmethod + def setup_class(cls): + global np + np = pytest.importorskip("numpy") + + @np_random_state(1) + def instantiate_np_random_state(self, random_state): + allowed = (np.random.RandomState, np.random.Generator) + assert isinstance(random_state, allowed) + return random_state.random() + + @py_random_state(1) + def instantiate_py_random_state(self, random_state): + allowed = (random.Random, PythonRandomInterface, PythonRandomViaNumpyBits) + assert isinstance(random_state, allowed) + return random_state.random() + + def test_random_state_None(self): + np.random.seed(42) + rv = np.random.random() + np.random.seed(42) + assert rv == self.instantiate_np_random_state(None) + + random.seed(42) + rv = random.random() + random.seed(42) + assert rv == self.instantiate_py_random_state(None) + + def test_random_state_np_random(self): + np.random.seed(42) + rv = np.random.random() + np.random.seed(42) + assert rv == self.instantiate_np_random_state(np.random) + np.random.seed(42) + assert rv == self.instantiate_py_random_state(np.random) + + def test_random_state_int(self): + np.random.seed(42) + np_rv = np.random.random() + random.seed(42) + py_rv = random.random() + + np.random.seed(42) + seed = 1 + rval = self.instantiate_np_random_state(seed) + rval_expected = np.random.RandomState(seed).rand() + assert rval == rval_expected + # test that global seed wasn't changed in function + assert np_rv == np.random.random() + + random.seed(42) + rval = self.instantiate_py_random_state(seed) + rval_expected = random.Random(seed).random() + assert rval == rval_expected + # test that global seed wasn't changed in function + assert py_rv == random.random() + + def test_random_state_np_random_Generator(self): + np.random.seed(42) + np_rv = np.random.random() + np.random.seed(42) + seed = 1 + + rng = np.random.default_rng(seed) + rval = self.instantiate_np_random_state(rng) + rval_expected = np.random.default_rng(seed).random() + assert rval == rval_expected + + rval = self.instantiate_py_random_state(rng) + rval_expected = np.random.default_rng(seed).random(size=2)[1] + assert rval == rval_expected + # test that global seed wasn't changed in function + assert np_rv == np.random.random() + + def test_random_state_np_random_RandomState(self): + np.random.seed(42) + np_rv = np.random.random() + np.random.seed(42) + seed = 1 + + rng = np.random.RandomState(seed) + rval = self.instantiate_np_random_state(rng) + rval_expected = np.random.RandomState(seed).random() + assert rval == rval_expected + + rval = self.instantiate_py_random_state(rng) + rval_expected = np.random.RandomState(seed).random(size=2)[1] + assert rval == rval_expected + # test that global seed wasn't changed in function + assert np_rv == np.random.random() + + def test_random_state_py_random(self): + seed = 1 + rng = random.Random(seed) + rv = self.instantiate_py_random_state(rng) + assert rv == random.Random(seed).random() + + pytest.raises(ValueError, self.instantiate_np_random_state, rng) + + +def test_random_state_string_arg_index(): + with pytest.raises(nx.NetworkXError): + + @np_random_state("a") + def make_random_state(rs): + pass + + rstate = make_random_state(1) + + +def test_py_random_state_string_arg_index(): + with pytest.raises(nx.NetworkXError): + + @py_random_state("a") + def make_random_state(rs): + pass + + rstate = make_random_state(1) + + +def test_random_state_invalid_arg_index(): + with pytest.raises(nx.NetworkXError): + + @np_random_state(2) + def make_random_state(rs): + pass + + rstate = make_random_state(1) + + +def test_py_random_state_invalid_arg_index(): + with pytest.raises(nx.NetworkXError): + + @py_random_state(2) + def make_random_state(rs): + pass + + rstate = make_random_state(1) + + +class TestArgmap: + class ArgmapError(RuntimeError): + pass + + def test_trivial_function(self): + def do_not_call(x): + raise ArgmapError("do not call this function") + + @argmap(do_not_call) + def trivial_argmap(): + return 1 + + assert trivial_argmap() == 1 + + def test_trivial_iterator(self): + def do_not_call(x): + raise ArgmapError("do not call this function") + + @argmap(do_not_call) + def trivial_argmap(): + yield from (1, 2, 3) + + assert tuple(trivial_argmap()) == (1, 2, 3) + + def test_contextmanager(self): + container = [] + + def contextmanager(x): + nonlocal container + return x, lambda: container.append(x) + + @argmap(contextmanager, 0, 1, 2, try_finally=True) + def foo(x, y, z): + return x, y, z + + x, y, z = foo("a", "b", "c") + + # context exits are called in reverse + assert container == ["c", "b", "a"] + + def test_tryfinally_generator(self): + container = [] + + def singleton(x): + return (x,) + + with pytest.raises(nx.NetworkXError): + + @argmap(singleton, 0, 1, 2, try_finally=True) + def foo(x, y, z): + yield from (x, y, z) + + @argmap(singleton, 0, 1, 2) + def foo(x, y, z): + return x + y + z + + q = foo("a", "b", "c") + + assert q == ("a", "b", "c") + + def test_actual_vararg(self): + @argmap(lambda x: -x, 4) + def foo(x, y, *args): + return (x, y) + tuple(args) + + assert foo(1, 2, 3, 4, 5, 6) == (1, 2, 3, 4, -5, 6) + + def test_signature_destroying_intermediate_decorator(self): + def add_one_to_first_bad_decorator(f): + """Bad because it doesn't wrap the f signature (clobbers it)""" + + def decorated(a, *args, **kwargs): + return f(a + 1, *args, **kwargs) + + return decorated + + add_two_to_second = argmap(lambda b: b + 2, 1) + + @add_two_to_second + @add_one_to_first_bad_decorator + def add_one_and_two(a, b): + return a, b + + assert add_one_and_two(5, 5) == (6, 7) + + def test_actual_kwarg(self): + @argmap(lambda x: -x, "arg") + def foo(*, arg): + return arg + + assert foo(arg=3) == -3 + + def test_nested_tuple(self): + def xform(x, y): + u, v = y + return x + u + v, (x + u, x + v) + + # we're testing args and kwargs here, too + @argmap(xform, (0, ("t", 2))) + def foo(a, *args, **kwargs): + return a, args, kwargs + + a, args, kwargs = foo(1, 2, 3, t=4) + + assert a == 1 + 4 + 3 + assert args == (2, 1 + 3) + assert kwargs == {"t": 1 + 4} + + def test_flatten(self): + assert tuple(argmap._flatten([[[[[], []], [], []], [], [], []]], set())) == () + + rlist = ["a", ["b", "c"], [["d"], "e"], "f"] + assert "".join(argmap._flatten(rlist, set())) == "abcdef" + + def test_indent(self): + code = "\n".join( + argmap._indent( + *[ + "try:", + "try:", + "pass#", + "finally:", + "pass#", + "#", + "finally:", + "pass#", + ] + ) + ) + assert ( + code + == """try: + try: + pass# + finally: + pass# + # +finally: + pass#""" + ) + + def test_immediate_raise(self): + @not_implemented_for("directed") + def yield_nodes(G): + yield from G + + G = nx.Graph([(1, 2)]) + D = nx.DiGraph() + + # test first call (argmap is compiled and executed) + with pytest.raises(nx.NetworkXNotImplemented): + node_iter = yield_nodes(D) + + # test second call (argmap is only executed) + with pytest.raises(nx.NetworkXNotImplemented): + node_iter = yield_nodes(D) + + # ensure that generators still make generators + node_iter = yield_nodes(G) + next(node_iter) + next(node_iter) + with pytest.raises(StopIteration): + next(node_iter) diff --git a/env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/test_rcm.py b/env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/test_rcm.py new file mode 100644 index 0000000000000000000000000000000000000000..88702b3635dfa173f27eb283bc769d0930918e62 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/test_rcm.py @@ -0,0 +1,63 @@ +import networkx as nx +from networkx.utils import reverse_cuthill_mckee_ordering + + +def test_reverse_cuthill_mckee(): + # example graph from + # http://www.boost.org/doc/libs/1_37_0/libs/graph/example/cuthill_mckee_ordering.cpp + G = nx.Graph( + [ + (0, 3), + (0, 5), + (1, 2), + (1, 4), + (1, 6), + (1, 9), + (2, 3), + (2, 4), + (3, 5), + (3, 8), + (4, 6), + (5, 6), + (5, 7), + (6, 7), + ] + ) + rcm = list(reverse_cuthill_mckee_ordering(G)) + assert rcm in [[0, 8, 5, 7, 3, 6, 2, 4, 1, 9], [0, 8, 5, 7, 3, 6, 4, 2, 1, 9]] + + +def test_rcm_alternate_heuristic(): + # example from + G = nx.Graph( + [ + (0, 0), + (0, 4), + (1, 1), + (1, 2), + (1, 5), + (1, 7), + (2, 2), + (2, 4), + (3, 3), + (3, 6), + (4, 4), + (5, 5), + (5, 7), + (6, 6), + (7, 7), + ] + ) + + answers = [ + [6, 3, 5, 7, 1, 2, 4, 0], + [6, 3, 7, 5, 1, 2, 4, 0], + [7, 5, 1, 2, 4, 0, 6, 3], + ] + + def smallest_degree(G): + deg, node = min((d, n) for n, d in G.degree()) + return node + + rcm = list(reverse_cuthill_mckee_ordering(G, heuristic=smallest_degree)) + assert rcm in answers diff --git a/env-llmeval/lib/python3.10/site-packages/networkx/utils/union_find.py b/env-llmeval/lib/python3.10/site-packages/networkx/utils/union_find.py new file mode 100644 index 0000000000000000000000000000000000000000..2a07129f5427cd8a3caf30095efee125bc3d853b --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/networkx/utils/union_find.py @@ -0,0 +1,106 @@ +""" +Union-find data structure. +""" + +from networkx.utils import groups + + +class UnionFind: + """Union-find data structure. + + Each unionFind instance X maintains a family of disjoint sets of + hashable objects, supporting the following two methods: + + - X[item] returns a name for the set containing the given item. + Each set is named by an arbitrarily-chosen one of its members; as + long as the set remains unchanged it will keep the same name. If + the item is not yet part of a set in X, a new singleton set is + created for it. + + - X.union(item1, item2, ...) merges the sets containing each item + into a single larger set. If any item is not yet part of a set + in X, it is added to X as one of the members of the merged set. + + Union-find data structure. Based on Josiah Carlson's code, + https://code.activestate.com/recipes/215912/ + with significant additional changes by D. Eppstein. + http://www.ics.uci.edu/~eppstein/PADS/UnionFind.py + + """ + + def __init__(self, elements=None): + """Create a new empty union-find structure. + + If *elements* is an iterable, this structure will be initialized + with the discrete partition on the given set of elements. + + """ + if elements is None: + elements = () + self.parents = {} + self.weights = {} + for x in elements: + self.weights[x] = 1 + self.parents[x] = x + + def __getitem__(self, object): + """Find and return the name of the set containing the object.""" + + # check for previously unknown object + if object not in self.parents: + self.parents[object] = object + self.weights[object] = 1 + return object + + # find path of objects leading to the root + path = [] + root = self.parents[object] + while root != object: + path.append(object) + object = root + root = self.parents[object] + + # compress the path and return + for ancestor in path: + self.parents[ancestor] = root + return root + + def __iter__(self): + """Iterate through all items ever found or unioned by this structure.""" + return iter(self.parents) + + def to_sets(self): + """Iterates over the sets stored in this structure. + + For example:: + + >>> partition = UnionFind("xyz") + >>> sorted(map(sorted, partition.to_sets())) + [['x'], ['y'], ['z']] + >>> partition.union("x", "y") + >>> sorted(map(sorted, partition.to_sets())) + [['x', 'y'], ['z']] + + """ + # Ensure fully pruned paths + for x in self.parents: + _ = self[x] # Evaluated for side-effect only + + yield from groups(self.parents).values() + + def union(self, *objects): + """Find the sets containing the objects and merge them all.""" + # Find the heaviest root according to its weight. + roots = iter( + sorted( + {self[x] for x in objects}, key=lambda r: self.weights[r], reverse=True + ) + ) + try: + root = next(roots) + except StopIteration: + return + + for r in roots: + self.weights[root] += self.weights[r] + self.parents[r] = root diff --git a/env-llmeval/lib/python3.10/site-packages/typepy/type/__pycache__/__init__.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/typepy/type/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..106f16a7ab97511e4c868dd24f81ffbed0eac91d Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/typepy/type/__pycache__/__init__.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/typepy/type/__pycache__/_base.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/typepy/type/__pycache__/_base.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3e3367a8c5c35782a65a520cc7d0a61ac69c1c1a Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/typepy/type/__pycache__/_base.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/typepy/type/__pycache__/_binary.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/typepy/type/__pycache__/_binary.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..891e37a1c73151018233d81ef8dfe3fe69cf6c30 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/typepy/type/__pycache__/_binary.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/typepy/type/__pycache__/_bool.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/typepy/type/__pycache__/_bool.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d8029c0499c10188b42b4c9a1ca230c141258d2a Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/typepy/type/__pycache__/_bool.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/typepy/type/__pycache__/_bytes.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/typepy/type/__pycache__/_bytes.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f738a15688717ef5dd21d67c19776bd4db7b8ec2 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/typepy/type/__pycache__/_bytes.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/typepy/type/__pycache__/_datetime.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/typepy/type/__pycache__/_datetime.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..225613104d24d905d4594d211dc1a9630096c2b2 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/typepy/type/__pycache__/_datetime.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/typepy/type/__pycache__/_dictionary.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/typepy/type/__pycache__/_dictionary.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..24c5e7cf0bdca8d102065cae2f2db0b13b65afb6 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/typepy/type/__pycache__/_dictionary.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/typepy/type/__pycache__/_infinity.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/typepy/type/__pycache__/_infinity.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..52d905d480035e53ff6fac968a14a541b065b2df Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/typepy/type/__pycache__/_infinity.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/typepy/type/__pycache__/_integer.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/typepy/type/__pycache__/_integer.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8251956112b429fe74ebafeafc8f90cb94f2d5b4 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/typepy/type/__pycache__/_integer.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/typepy/type/__pycache__/_ipaddress.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/typepy/type/__pycache__/_ipaddress.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7a5f1b78fc12eebc4b463fd8819794ef5141c606 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/typepy/type/__pycache__/_ipaddress.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/typepy/type/__pycache__/_list.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/typepy/type/__pycache__/_list.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2d8e2169be193dce2bd9636b7d461d2257122c9a Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/typepy/type/__pycache__/_list.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/typepy/type/__pycache__/_nan.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/typepy/type/__pycache__/_nan.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4c58cadea9f31846337a33a3de39d1ab55d98200 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/typepy/type/__pycache__/_nan.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/typepy/type/__pycache__/_none.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/typepy/type/__pycache__/_none.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..85f072d6df385d4e1f1cb17f096f46fffc8a162f Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/typepy/type/__pycache__/_none.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/typepy/type/__pycache__/_realnumber.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/typepy/type/__pycache__/_realnumber.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e35b1303c97986ff0e385d8baa75e5d52a260281 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/typepy/type/__pycache__/_realnumber.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/typepy/type/__pycache__/_string.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/typepy/type/__pycache__/_string.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..368673bea8df918fd5f84119126858a612d4e554 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/typepy/type/__pycache__/_string.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/typepy/type/_binary.py b/env-llmeval/lib/python3.10/site-packages/typepy/type/_binary.py new file mode 100644 index 0000000000000000000000000000000000000000..a60ae6951a060df7ad32cc4629e40e99f664193d --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/typepy/type/_binary.py @@ -0,0 +1,34 @@ +""" +.. codeauthor:: Tsuyoshi Hombashi +""" + +from typing import Any + +from .._typecode import Typecode +from ._base import AbstractType + + +class Binary(AbstractType): + """ + |result_matrix_desc| + + :py:attr:`.strict_level` + |strict_level| + """ + + @property + def typecode(self) -> Typecode: + return Typecode.STRING + + def __init__(self, value: Any, strict_level: int = 1, **kwargs) -> None: + super().__init__(value, strict_level, **kwargs) + + def _create_type_checker(self): + from ..checker import BytesTypeChecker + + return BytesTypeChecker(self._data, self._strict_level) + + def _create_type_converter(self): + from ..converter import BytesConverter + + return BytesConverter(self._data, self._params) diff --git a/env-llmeval/lib/python3.10/site-packages/typepy/type/_datetime.py b/env-llmeval/lib/python3.10/site-packages/typepy/type/_datetime.py new file mode 100644 index 0000000000000000000000000000000000000000..c65d19d703584efe2df05cf8e1e5556b48cea56e --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/typepy/type/_datetime.py @@ -0,0 +1,34 @@ +""" +.. codeauthor:: Tsuyoshi Hombashi +""" + +from typing import Any + +from .._typecode import Typecode +from ..checker import DateTimeTypeChecker +from ..converter import DateTimeConverter +from ._base import AbstractType + + +class DateTime(AbstractType): + """ + |result_matrix_desc| + + .. include:: matrix_datetime_type.txt + + :py:attr:`.strict_level` + |strict_level| + """ + + @property + def typecode(self) -> Typecode: + return Typecode.DATETIME + + def __init__(self, value: Any, strict_level: int = 2, **kwargs) -> None: + super().__init__(value, strict_level, **kwargs) + + def _create_type_checker(self): + return DateTimeTypeChecker(self._data, self._strict_level) + + def _create_type_converter(self): + return DateTimeConverter(self._data, self._params) diff --git a/env-llmeval/lib/python3.10/site-packages/typepy/type/_infinity.py b/env-llmeval/lib/python3.10/site-packages/typepy/type/_infinity.py new file mode 100644 index 0000000000000000000000000000000000000000..687d6fa7ec6e64831f7a19404863f364e8e9bcc6 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/typepy/type/_infinity.py @@ -0,0 +1,34 @@ +""" +.. codeauthor:: Tsuyoshi Hombashi +""" + +from typing import Any + +from .._typecode import Typecode +from ..checker import InfinityTypeChecker +from ..converter import FloatConverter +from ._base import AbstractType + + +class Infinity(AbstractType): + """ + |result_matrix_desc| + + .. include:: matrix_infinity_type.txt + + :py:attr:`.strict_level` + |strict_level| + """ + + @property + def typecode(self) -> Typecode: + return Typecode.INFINITY + + def __init__(self, value: Any, strict_level: int = 1, **kwargs) -> None: + super().__init__(value, strict_level, **kwargs) + + def _create_type_checker(self): + return InfinityTypeChecker(self._data, self._strict_level) + + def _create_type_converter(self): + return FloatConverter(self._data, self._params) diff --git a/env-llmeval/lib/python3.10/site-packages/typepy/type/_integer.py b/env-llmeval/lib/python3.10/site-packages/typepy/type/_integer.py new file mode 100644 index 0000000000000000000000000000000000000000..992b1f7a1cee4c03951df0fae593a4571be7ecba --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/typepy/type/_integer.py @@ -0,0 +1,36 @@ +""" +.. codeauthor:: Tsuyoshi Hombashi +""" + +from typing import Any + +from .._typecode import Typecode +from ._base import AbstractType + + +class Integer(AbstractType): + """ + |result_matrix_desc| + + .. include:: matrix_integer_type.txt + + :py:attr:`.strict_level` + |strict_level| + """ + + @property + def typecode(self) -> Typecode: + return Typecode.INTEGER + + def __init__(self, value: Any, strict_level: int = 1, **kwargs) -> None: + super().__init__(value, strict_level, **kwargs) + + def _create_type_checker(self): + from ..checker._integer import IntegerTypeChecker + + return IntegerTypeChecker(self._data, self._strict_level) + + def _create_type_converter(self): + from ..converter._integer import IntegerConverter + + return IntegerConverter(self._data, self._params) diff --git a/env-llmeval/lib/python3.10/site-packages/typepy/type/_list.py b/env-llmeval/lib/python3.10/site-packages/typepy/type/_list.py new file mode 100644 index 0000000000000000000000000000000000000000..62d0d03fce0b6db9bb129a4438e5169d13454514 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/typepy/type/_list.py @@ -0,0 +1,34 @@ +""" +.. codeauthor:: Tsuyoshi Hombashi +""" + +from typing import Any + +from .._typecode import Typecode +from ..checker import ListTypeChecker +from ..converter import ListConverter +from ._base import AbstractType + + +class List(AbstractType): + """ + |result_matrix_desc| + + .. include:: matrix_list_type.txt + + :py:attr:`.strict_level` + |strict_level| + """ + + @property + def typecode(self) -> Typecode: + return Typecode.LIST + + def __init__(self, value: Any, strict_level: int = 1, **kwargs) -> None: + super().__init__(value, strict_level, **kwargs) + + def _create_type_checker(self): + return ListTypeChecker(self._data, self._strict_level) + + def _create_type_converter(self): + return ListConverter(self._data, self._params) diff --git a/env-llmeval/lib/python3.10/site-packages/typepy/type/_nan.py b/env-llmeval/lib/python3.10/site-packages/typepy/type/_nan.py new file mode 100644 index 0000000000000000000000000000000000000000..fcf630f1e7a5b805b277ce655e46ca7109068dca --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/typepy/type/_nan.py @@ -0,0 +1,34 @@ +""" +.. codeauthor:: Tsuyoshi Hombashi +""" + +from typing import Any + +from .._typecode import Typecode +from ..checker import NanTypeChecker +from ..converter import FloatConverter +from ._base import AbstractType + + +class Nan(AbstractType): + """ + |result_matrix_desc| + + .. include:: matrix_nan_type.txt + + :py:attr:`.strict_level` + |strict_level| + """ + + @property + def typecode(self) -> Typecode: + return Typecode.NAN + + def __init__(self, value: Any, strict_level: int = 1, **kwargs) -> None: + super().__init__(value, strict_level, **kwargs) + + def _create_type_checker(self): + return NanTypeChecker(self._data, self._strict_level) + + def _create_type_converter(self): + return FloatConverter(self._data, self._params) diff --git a/env-llmeval/lib/python3.10/site-packages/typepy/type/_none.py b/env-llmeval/lib/python3.10/site-packages/typepy/type/_none.py new file mode 100644 index 0000000000000000000000000000000000000000..67a92e0442a0569472d4138b05597e56c2167115 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/typepy/type/_none.py @@ -0,0 +1,37 @@ +""" +.. codeauthor:: Tsuyoshi Hombashi +""" + +from typing import Any + +from .._typecode import Typecode +from ..checker import NoneTypeChecker +from ..converter import NopConverter +from ._base import AbstractType + + +class NoneType(AbstractType): + """ + |result_matrix_desc| + + .. include:: matrix_none_type.txt + + :py:attr:`.strict_level` + |strict_level| + """ + + @property + def typecode(self) -> Typecode: + return Typecode.NONE + + def __init__(self, value: Any, strict_level: int = 0, **kwargs) -> None: + super().__init__(value, strict_level, **kwargs) + + def force_convert(self): + return None + + def _create_type_checker(self): + return NoneTypeChecker(self._data, self._strict_level) + + def _create_type_converter(self): + return NopConverter(self._data, self._params) diff --git a/env-llmeval/lib/python3.10/site-packages/typepy/type/_realnumber.py b/env-llmeval/lib/python3.10/site-packages/typepy/type/_realnumber.py new file mode 100644 index 0000000000000000000000000000000000000000..7a9e660c7b63501430f78e572ec22c605d93cc15 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/typepy/type/_realnumber.py @@ -0,0 +1,38 @@ +""" +.. codeauthor:: Tsuyoshi Hombashi +""" + +from typing import Any + +from .._typecode import Typecode +from ._base import AbstractType + + +class RealNumber(AbstractType): + """ + |result_matrix_desc| + + .. include:: matrix_realnumber_type.txt + + :py:attr:`.strict_level` + |strict_level| + """ + + @property + def typecode(self) -> Typecode: + return Typecode.REAL_NUMBER + + def __init__(self, value: Any, strict_level: int = 0, **kwargs) -> None: + super().__init__(value, strict_level, **kwargs) + + def _create_type_checker(self): + from ..checker._realnumber import RealNumberTypeChecker + + return RealNumberTypeChecker(self._data, self._strict_level) + + def _create_type_converter(self): + from ..converter._realnumber import FloatConverter + + converter = FloatConverter(self._data, self._params) + + return converter