Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- env-llmeval/lib/python3.10/site-packages/async_timeout/__init__.py +239 -0
- env-llmeval/lib/python3.10/site-packages/async_timeout/__pycache__/__init__.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/async_timeout/py.typed +1 -0
- env-llmeval/lib/python3.10/site-packages/joblib-1.4.0.dist-info/INSTALLER +1 -0
- env-llmeval/lib/python3.10/site-packages/joblib-1.4.0.dist-info/LICENSE.txt +29 -0
- env-llmeval/lib/python3.10/site-packages/joblib-1.4.0.dist-info/METADATA +165 -0
- env-llmeval/lib/python3.10/site-packages/joblib-1.4.0.dist-info/RECORD +217 -0
- env-llmeval/lib/python3.10/site-packages/joblib-1.4.0.dist-info/WHEEL +5 -0
- env-llmeval/lib/python3.10/site-packages/joblib-1.4.0.dist-info/top_level.txt +1 -0
- env-llmeval/lib/python3.10/site-packages/networkx/__init__.py +49 -0
- env-llmeval/lib/python3.10/site-packages/networkx/conftest.py +289 -0
- env-llmeval/lib/python3.10/site-packages/networkx/convert.py +494 -0
- env-llmeval/lib/python3.10/site-packages/networkx/convert_matrix.py +1202 -0
- env-llmeval/lib/python3.10/site-packages/networkx/drawing/__init__.py +7 -0
- env-llmeval/lib/python3.10/site-packages/networkx/drawing/nx_agraph.py +465 -0
- env-llmeval/lib/python3.10/site-packages/networkx/drawing/nx_latex.py +571 -0
- env-llmeval/lib/python3.10/site-packages/networkx/drawing/nx_pydot.py +411 -0
- env-llmeval/lib/python3.10/site-packages/networkx/drawing/nx_pylab.py +1871 -0
- env-llmeval/lib/python3.10/site-packages/networkx/exception.py +125 -0
- env-llmeval/lib/python3.10/site-packages/networkx/lazy_imports.py +188 -0
- env-llmeval/lib/python3.10/site-packages/networkx/relabel.py +285 -0
- env-llmeval/lib/python3.10/site-packages/networkx/utils/__init__.py +8 -0
- env-llmeval/lib/python3.10/site-packages/networkx/utils/__pycache__/__init__.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/networkx/utils/__pycache__/backends.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/networkx/utils/__pycache__/decorators.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/networkx/utils/__pycache__/misc.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/networkx/utils/__pycache__/random_sequence.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/networkx/utils/__pycache__/rcm.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/networkx/utils/__pycache__/union_find.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/networkx/utils/backends.py +1553 -0
- env-llmeval/lib/python3.10/site-packages/networkx/utils/configs.py +260 -0
- env-llmeval/lib/python3.10/site-packages/networkx/utils/decorators.py +1295 -0
- env-llmeval/lib/python3.10/site-packages/networkx/utils/heaps.py +340 -0
- env-llmeval/lib/python3.10/site-packages/networkx/utils/mapped_queue.py +298 -0
- env-llmeval/lib/python3.10/site-packages/networkx/utils/misc.py +601 -0
- env-llmeval/lib/python3.10/site-packages/networkx/utils/random_sequence.py +164 -0
- env-llmeval/lib/python3.10/site-packages/networkx/utils/rcm.py +158 -0
- env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/__pycache__/__init__.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/__pycache__/test__init.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/__pycache__/test_backends.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/__pycache__/test_config.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/__pycache__/test_decorators.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/__pycache__/test_heaps.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/__pycache__/test_mapped_queue.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/__pycache__/test_misc.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/__pycache__/test_random_sequence.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/__pycache__/test_rcm.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/test__init.py +11 -0
- env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/test_config.py +186 -0
- env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/test_decorators.py +510 -0
env-llmeval/lib/python3.10/site-packages/async_timeout/__init__.py
ADDED
@@ -0,0 +1,239 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import asyncio
|
2 |
+
import enum
|
3 |
+
import sys
|
4 |
+
import warnings
|
5 |
+
from types import TracebackType
|
6 |
+
from typing import Optional, Type
|
7 |
+
|
8 |
+
|
9 |
+
if sys.version_info >= (3, 8):
|
10 |
+
from typing import final
|
11 |
+
else:
|
12 |
+
from typing_extensions import final
|
13 |
+
|
14 |
+
|
15 |
+
if sys.version_info >= (3, 11):
|
16 |
+
|
17 |
+
def _uncancel_task(task: "asyncio.Task[object]") -> None:
|
18 |
+
task.uncancel()
|
19 |
+
|
20 |
+
else:
|
21 |
+
|
22 |
+
def _uncancel_task(task: "asyncio.Task[object]") -> None:
|
23 |
+
pass
|
24 |
+
|
25 |
+
|
26 |
+
__version__ = "4.0.3"
|
27 |
+
|
28 |
+
|
29 |
+
__all__ = ("timeout", "timeout_at", "Timeout")
|
30 |
+
|
31 |
+
|
32 |
+
def timeout(delay: Optional[float]) -> "Timeout":
|
33 |
+
"""timeout context manager.
|
34 |
+
|
35 |
+
Useful in cases when you want to apply timeout logic around block
|
36 |
+
of code or in cases when asyncio.wait_for is not suitable. For example:
|
37 |
+
|
38 |
+
>>> async with timeout(0.001):
|
39 |
+
... async with aiohttp.get('https://github.com') as r:
|
40 |
+
... await r.text()
|
41 |
+
|
42 |
+
|
43 |
+
delay - value in seconds or None to disable timeout logic
|
44 |
+
"""
|
45 |
+
loop = asyncio.get_running_loop()
|
46 |
+
if delay is not None:
|
47 |
+
deadline = loop.time() + delay # type: Optional[float]
|
48 |
+
else:
|
49 |
+
deadline = None
|
50 |
+
return Timeout(deadline, loop)
|
51 |
+
|
52 |
+
|
53 |
+
def timeout_at(deadline: Optional[float]) -> "Timeout":
|
54 |
+
"""Schedule the timeout at absolute time.
|
55 |
+
|
56 |
+
deadline argument points on the time in the same clock system
|
57 |
+
as loop.time().
|
58 |
+
|
59 |
+
Please note: it is not POSIX time but a time with
|
60 |
+
undefined starting base, e.g. the time of the system power on.
|
61 |
+
|
62 |
+
>>> async with timeout_at(loop.time() + 10):
|
63 |
+
... async with aiohttp.get('https://github.com') as r:
|
64 |
+
... await r.text()
|
65 |
+
|
66 |
+
|
67 |
+
"""
|
68 |
+
loop = asyncio.get_running_loop()
|
69 |
+
return Timeout(deadline, loop)
|
70 |
+
|
71 |
+
|
72 |
+
class _State(enum.Enum):
|
73 |
+
INIT = "INIT"
|
74 |
+
ENTER = "ENTER"
|
75 |
+
TIMEOUT = "TIMEOUT"
|
76 |
+
EXIT = "EXIT"
|
77 |
+
|
78 |
+
|
79 |
+
@final
|
80 |
+
class Timeout:
|
81 |
+
# Internal class, please don't instantiate it directly
|
82 |
+
# Use timeout() and timeout_at() public factories instead.
|
83 |
+
#
|
84 |
+
# Implementation note: `async with timeout()` is preferred
|
85 |
+
# over `with timeout()`.
|
86 |
+
# While technically the Timeout class implementation
|
87 |
+
# doesn't need to be async at all,
|
88 |
+
# the `async with` statement explicitly points that
|
89 |
+
# the context manager should be used from async function context.
|
90 |
+
#
|
91 |
+
# This design allows to avoid many silly misusages.
|
92 |
+
#
|
93 |
+
# TimeoutError is raised immediately when scheduled
|
94 |
+
# if the deadline is passed.
|
95 |
+
# The purpose is to time out as soon as possible
|
96 |
+
# without waiting for the next await expression.
|
97 |
+
|
98 |
+
__slots__ = ("_deadline", "_loop", "_state", "_timeout_handler", "_task")
|
99 |
+
|
100 |
+
def __init__(
|
101 |
+
self, deadline: Optional[float], loop: asyncio.AbstractEventLoop
|
102 |
+
) -> None:
|
103 |
+
self._loop = loop
|
104 |
+
self._state = _State.INIT
|
105 |
+
|
106 |
+
self._task: Optional["asyncio.Task[object]"] = None
|
107 |
+
self._timeout_handler = None # type: Optional[asyncio.Handle]
|
108 |
+
if deadline is None:
|
109 |
+
self._deadline = None # type: Optional[float]
|
110 |
+
else:
|
111 |
+
self.update(deadline)
|
112 |
+
|
113 |
+
def __enter__(self) -> "Timeout":
|
114 |
+
warnings.warn(
|
115 |
+
"with timeout() is deprecated, use async with timeout() instead",
|
116 |
+
DeprecationWarning,
|
117 |
+
stacklevel=2,
|
118 |
+
)
|
119 |
+
self._do_enter()
|
120 |
+
return self
|
121 |
+
|
122 |
+
def __exit__(
|
123 |
+
self,
|
124 |
+
exc_type: Optional[Type[BaseException]],
|
125 |
+
exc_val: Optional[BaseException],
|
126 |
+
exc_tb: Optional[TracebackType],
|
127 |
+
) -> Optional[bool]:
|
128 |
+
self._do_exit(exc_type)
|
129 |
+
return None
|
130 |
+
|
131 |
+
async def __aenter__(self) -> "Timeout":
|
132 |
+
self._do_enter()
|
133 |
+
return self
|
134 |
+
|
135 |
+
async def __aexit__(
|
136 |
+
self,
|
137 |
+
exc_type: Optional[Type[BaseException]],
|
138 |
+
exc_val: Optional[BaseException],
|
139 |
+
exc_tb: Optional[TracebackType],
|
140 |
+
) -> Optional[bool]:
|
141 |
+
self._do_exit(exc_type)
|
142 |
+
return None
|
143 |
+
|
144 |
+
@property
|
145 |
+
def expired(self) -> bool:
|
146 |
+
"""Is timeout expired during execution?"""
|
147 |
+
return self._state == _State.TIMEOUT
|
148 |
+
|
149 |
+
@property
|
150 |
+
def deadline(self) -> Optional[float]:
|
151 |
+
return self._deadline
|
152 |
+
|
153 |
+
def reject(self) -> None:
|
154 |
+
"""Reject scheduled timeout if any."""
|
155 |
+
# cancel is maybe better name but
|
156 |
+
# task.cancel() raises CancelledError in asyncio world.
|
157 |
+
if self._state not in (_State.INIT, _State.ENTER):
|
158 |
+
raise RuntimeError(f"invalid state {self._state.value}")
|
159 |
+
self._reject()
|
160 |
+
|
161 |
+
def _reject(self) -> None:
|
162 |
+
self._task = None
|
163 |
+
if self._timeout_handler is not None:
|
164 |
+
self._timeout_handler.cancel()
|
165 |
+
self._timeout_handler = None
|
166 |
+
|
167 |
+
def shift(self, delay: float) -> None:
|
168 |
+
"""Advance timeout on delay seconds.
|
169 |
+
|
170 |
+
The delay can be negative.
|
171 |
+
|
172 |
+
Raise RuntimeError if shift is called when deadline is not scheduled
|
173 |
+
"""
|
174 |
+
deadline = self._deadline
|
175 |
+
if deadline is None:
|
176 |
+
raise RuntimeError("cannot shift timeout if deadline is not scheduled")
|
177 |
+
self.update(deadline + delay)
|
178 |
+
|
179 |
+
def update(self, deadline: float) -> None:
|
180 |
+
"""Set deadline to absolute value.
|
181 |
+
|
182 |
+
deadline argument points on the time in the same clock system
|
183 |
+
as loop.time().
|
184 |
+
|
185 |
+
If new deadline is in the past the timeout is raised immediately.
|
186 |
+
|
187 |
+
Please note: it is not POSIX time but a time with
|
188 |
+
undefined starting base, e.g. the time of the system power on.
|
189 |
+
"""
|
190 |
+
if self._state == _State.EXIT:
|
191 |
+
raise RuntimeError("cannot reschedule after exit from context manager")
|
192 |
+
if self._state == _State.TIMEOUT:
|
193 |
+
raise RuntimeError("cannot reschedule expired timeout")
|
194 |
+
if self._timeout_handler is not None:
|
195 |
+
self._timeout_handler.cancel()
|
196 |
+
self._deadline = deadline
|
197 |
+
if self._state != _State.INIT:
|
198 |
+
self._reschedule()
|
199 |
+
|
200 |
+
def _reschedule(self) -> None:
|
201 |
+
assert self._state == _State.ENTER
|
202 |
+
deadline = self._deadline
|
203 |
+
if deadline is None:
|
204 |
+
return
|
205 |
+
|
206 |
+
now = self._loop.time()
|
207 |
+
if self._timeout_handler is not None:
|
208 |
+
self._timeout_handler.cancel()
|
209 |
+
|
210 |
+
self._task = asyncio.current_task()
|
211 |
+
if deadline <= now:
|
212 |
+
self._timeout_handler = self._loop.call_soon(self._on_timeout)
|
213 |
+
else:
|
214 |
+
self._timeout_handler = self._loop.call_at(deadline, self._on_timeout)
|
215 |
+
|
216 |
+
def _do_enter(self) -> None:
|
217 |
+
if self._state != _State.INIT:
|
218 |
+
raise RuntimeError(f"invalid state {self._state.value}")
|
219 |
+
self._state = _State.ENTER
|
220 |
+
self._reschedule()
|
221 |
+
|
222 |
+
def _do_exit(self, exc_type: Optional[Type[BaseException]]) -> None:
|
223 |
+
if exc_type is asyncio.CancelledError and self._state == _State.TIMEOUT:
|
224 |
+
assert self._task is not None
|
225 |
+
_uncancel_task(self._task)
|
226 |
+
self._timeout_handler = None
|
227 |
+
self._task = None
|
228 |
+
raise asyncio.TimeoutError
|
229 |
+
# timeout has not expired
|
230 |
+
self._state = _State.EXIT
|
231 |
+
self._reject()
|
232 |
+
return None
|
233 |
+
|
234 |
+
def _on_timeout(self) -> None:
|
235 |
+
assert self._task is not None
|
236 |
+
self._task.cancel()
|
237 |
+
self._state = _State.TIMEOUT
|
238 |
+
# drop the reference early
|
239 |
+
self._timeout_handler = None
|
env-llmeval/lib/python3.10/site-packages/async_timeout/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (6.57 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/async_timeout/py.typed
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
Placeholder
|
env-llmeval/lib/python3.10/site-packages/joblib-1.4.0.dist-info/INSTALLER
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
pip
|
env-llmeval/lib/python3.10/site-packages/joblib-1.4.0.dist-info/LICENSE.txt
ADDED
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
BSD 3-Clause License
|
2 |
+
|
3 |
+
Copyright (c) 2008-2021, The joblib developers.
|
4 |
+
All rights reserved.
|
5 |
+
|
6 |
+
Redistribution and use in source and binary forms, with or without
|
7 |
+
modification, are permitted provided that the following conditions are met:
|
8 |
+
|
9 |
+
* Redistributions of source code must retain the above copyright notice, this
|
10 |
+
list of conditions and the following disclaimer.
|
11 |
+
|
12 |
+
* Redistributions in binary form must reproduce the above copyright notice,
|
13 |
+
this list of conditions and the following disclaimer in the documentation
|
14 |
+
and/or other materials provided with the distribution.
|
15 |
+
|
16 |
+
* Neither the name of the copyright holder nor the names of its
|
17 |
+
contributors may be used to endorse or promote products derived from
|
18 |
+
this software without specific prior written permission.
|
19 |
+
|
20 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
21 |
+
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
22 |
+
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
23 |
+
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
24 |
+
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
25 |
+
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
26 |
+
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
27 |
+
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
28 |
+
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
29 |
+
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
env-llmeval/lib/python3.10/site-packages/joblib-1.4.0.dist-info/METADATA
ADDED
@@ -0,0 +1,165 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Metadata-Version: 2.1
|
2 |
+
Name: joblib
|
3 |
+
Version: 1.4.0
|
4 |
+
Summary: Lightweight pipelining with Python functions
|
5 |
+
Author-email: Gael Varoquaux <[email protected]>
|
6 |
+
License: BSD 3-Clause
|
7 |
+
Project-URL: Homepage, https://joblib.readthedocs.io
|
8 |
+
Project-URL: Source, https://github.com/joblib/joblib
|
9 |
+
Platform: any
|
10 |
+
Classifier: Development Status :: 5 - Production/Stable
|
11 |
+
Classifier: Environment :: Console
|
12 |
+
Classifier: Intended Audience :: Developers
|
13 |
+
Classifier: Intended Audience :: Science/Research
|
14 |
+
Classifier: Intended Audience :: Education
|
15 |
+
Classifier: License :: OSI Approved :: BSD License
|
16 |
+
Classifier: Operating System :: OS Independent
|
17 |
+
Classifier: Programming Language :: Python :: 3
|
18 |
+
Classifier: Programming Language :: Python :: 3.8
|
19 |
+
Classifier: Programming Language :: Python :: 3.9
|
20 |
+
Classifier: Programming Language :: Python :: 3.10
|
21 |
+
Classifier: Programming Language :: Python :: 3.11
|
22 |
+
Classifier: Programming Language :: Python :: 3.12
|
23 |
+
Classifier: Topic :: Scientific/Engineering
|
24 |
+
Classifier: Topic :: Utilities
|
25 |
+
Classifier: Topic :: Software Development :: Libraries
|
26 |
+
Requires-Python: >=3.8
|
27 |
+
Description-Content-Type: text/x-rst
|
28 |
+
License-File: LICENSE.txt
|
29 |
+
|
30 |
+
|PyPi| |Azure| |ReadTheDocs| |Codecov|
|
31 |
+
|
32 |
+
.. |PyPi| image:: https://badge.fury.io/py/joblib.svg
|
33 |
+
:target: https://badge.fury.io/py/joblib
|
34 |
+
:alt: Joblib version
|
35 |
+
|
36 |
+
.. |Azure| image:: https://dev.azure.com/joblib/joblib/_apis/build/status/joblib.joblib?branchName=master
|
37 |
+
:target: https://dev.azure.com/joblib/joblib/_build?definitionId=3&_a=summary&branchFilter=40
|
38 |
+
:alt: Azure CI status
|
39 |
+
|
40 |
+
.. |ReadTheDocs| image:: https://readthedocs.org/projects/joblib/badge/?version=latest
|
41 |
+
:target: https://joblib.readthedocs.io/en/latest/?badge=latest
|
42 |
+
:alt: Documentation Status
|
43 |
+
|
44 |
+
.. |Codecov| image:: https://codecov.io/gh/joblib/joblib/branch/master/graph/badge.svg
|
45 |
+
:target: https://codecov.io/gh/joblib/joblib
|
46 |
+
:alt: Codecov coverage
|
47 |
+
|
48 |
+
|
49 |
+
The homepage of joblib with user documentation is located on:
|
50 |
+
|
51 |
+
https://joblib.readthedocs.io
|
52 |
+
|
53 |
+
Getting the latest code
|
54 |
+
=======================
|
55 |
+
|
56 |
+
To get the latest code using git, simply type::
|
57 |
+
|
58 |
+
git clone https://github.com/joblib/joblib.git
|
59 |
+
|
60 |
+
If you don't have git installed, you can download a zip
|
61 |
+
of the latest code: https://github.com/joblib/joblib/archive/refs/heads/master.zip
|
62 |
+
|
63 |
+
Installing
|
64 |
+
==========
|
65 |
+
|
66 |
+
You can use `pip` to install joblib::
|
67 |
+
|
68 |
+
pip install joblib
|
69 |
+
|
70 |
+
from any directory or::
|
71 |
+
|
72 |
+
python setup.py install
|
73 |
+
|
74 |
+
from the source directory.
|
75 |
+
|
76 |
+
Dependencies
|
77 |
+
============
|
78 |
+
|
79 |
+
- Joblib has no mandatory dependencies besides Python (supported versions are
|
80 |
+
3.8+).
|
81 |
+
- Joblib has an optional dependency on Numpy (at least version 1.6.1) for array
|
82 |
+
manipulation.
|
83 |
+
- Joblib includes its own vendored copy of
|
84 |
+
`loky <https://github.com/tomMoral/loky>`_ for process management.
|
85 |
+
- Joblib can efficiently dump and load numpy arrays but does not require numpy
|
86 |
+
to be installed.
|
87 |
+
- Joblib has an optional dependency on
|
88 |
+
`python-lz4 <https://pypi.python.org/pypi/lz4>`_ as a faster alternative to
|
89 |
+
zlib and gzip for compressed serialization.
|
90 |
+
- Joblib has an optional dependency on psutil to mitigate memory leaks in
|
91 |
+
parallel worker processes.
|
92 |
+
- Some examples require external dependencies such as pandas. See the
|
93 |
+
instructions in the `Building the docs`_ section for details.
|
94 |
+
|
95 |
+
Workflow to contribute
|
96 |
+
======================
|
97 |
+
|
98 |
+
To contribute to joblib, first create an account on `github
|
99 |
+
<https://github.com/>`_. Once this is done, fork the `joblib repository
|
100 |
+
<https://github.com/joblib/joblib>`_ to have your own repository,
|
101 |
+
clone it using 'git clone' on the computers where you want to work. Make
|
102 |
+
your changes in your clone, push them to your github account, test them
|
103 |
+
on several computers, and when you are happy with them, send a pull
|
104 |
+
request to the main repository.
|
105 |
+
|
106 |
+
Running the test suite
|
107 |
+
======================
|
108 |
+
|
109 |
+
To run the test suite, you need the pytest (version >= 3) and coverage modules.
|
110 |
+
Run the test suite using::
|
111 |
+
|
112 |
+
pytest joblib
|
113 |
+
|
114 |
+
from the root of the project.
|
115 |
+
|
116 |
+
Building the docs
|
117 |
+
=================
|
118 |
+
|
119 |
+
To build the docs you need to have sphinx (>=1.4) and some dependencies
|
120 |
+
installed::
|
121 |
+
|
122 |
+
pip install -U -r .readthedocs-requirements.txt
|
123 |
+
|
124 |
+
The docs can then be built with the following command::
|
125 |
+
|
126 |
+
make doc
|
127 |
+
|
128 |
+
The html docs are located in the ``doc/_build/html`` directory.
|
129 |
+
|
130 |
+
|
131 |
+
Making a source tarball
|
132 |
+
=======================
|
133 |
+
|
134 |
+
To create a source tarball, eg for packaging or distributing, run the
|
135 |
+
following command::
|
136 |
+
|
137 |
+
python setup.py sdist
|
138 |
+
|
139 |
+
The tarball will be created in the `dist` directory. This command will
|
140 |
+
compile the docs, and the resulting tarball can be installed with
|
141 |
+
no extra dependencies than the Python standard library. You will need
|
142 |
+
setuptool and sphinx.
|
143 |
+
|
144 |
+
Making a release and uploading it to PyPI
|
145 |
+
=========================================
|
146 |
+
|
147 |
+
This command is only run by project manager, to make a release, and
|
148 |
+
upload in to PyPI::
|
149 |
+
|
150 |
+
python setup.py sdist bdist_wheel
|
151 |
+
twine upload dist/*
|
152 |
+
|
153 |
+
|
154 |
+
Note that the documentation should automatically get updated at each git
|
155 |
+
push. If that is not the case, try building th doc locally and resolve
|
156 |
+
any doc build error (in particular when running the examples).
|
157 |
+
|
158 |
+
Updating the changelog
|
159 |
+
======================
|
160 |
+
|
161 |
+
Changes are listed in the CHANGES.rst file. They must be manually updated
|
162 |
+
but, the following git command may be used to generate the lines::
|
163 |
+
|
164 |
+
git log --abbrev-commit --date=short --no-merges --sparse
|
165 |
+
|
env-llmeval/lib/python3.10/site-packages/joblib-1.4.0.dist-info/RECORD
ADDED
@@ -0,0 +1,217 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
joblib-1.4.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
2 |
+
joblib-1.4.0.dist-info/LICENSE.txt,sha256=QmEpEcGHLF5LQ_auDo7llGfNNQMyJBz3LOkGQCZPrmo,1527
|
3 |
+
joblib-1.4.0.dist-info/METADATA,sha256=RvoJhBm8jAYOnpWDNhwlybOVwN5nFst_OYytoS6_xRQ,5370
|
4 |
+
joblib-1.4.0.dist-info/RECORD,,
|
5 |
+
joblib-1.4.0.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
|
6 |
+
joblib-1.4.0.dist-info/top_level.txt,sha256=P0LsoZ45gBL7ckL4lqQt7tdbrHD4xlVYhffmhHeeT_U,7
|
7 |
+
joblib/__init__.py,sha256=ytsTJjbnDLtKYZ1il4QVYU8ReyTdKgKqd3zO_2FTv7Y,5132
|
8 |
+
joblib/__pycache__/__init__.cpython-310.pyc,,
|
9 |
+
joblib/__pycache__/_cloudpickle_wrapper.cpython-310.pyc,,
|
10 |
+
joblib/__pycache__/_dask.cpython-310.pyc,,
|
11 |
+
joblib/__pycache__/_memmapping_reducer.cpython-310.pyc,,
|
12 |
+
joblib/__pycache__/_multiprocessing_helpers.cpython-310.pyc,,
|
13 |
+
joblib/__pycache__/_parallel_backends.cpython-310.pyc,,
|
14 |
+
joblib/__pycache__/_store_backends.cpython-310.pyc,,
|
15 |
+
joblib/__pycache__/_utils.cpython-310.pyc,,
|
16 |
+
joblib/__pycache__/backports.cpython-310.pyc,,
|
17 |
+
joblib/__pycache__/compressor.cpython-310.pyc,,
|
18 |
+
joblib/__pycache__/disk.cpython-310.pyc,,
|
19 |
+
joblib/__pycache__/executor.cpython-310.pyc,,
|
20 |
+
joblib/__pycache__/func_inspect.cpython-310.pyc,,
|
21 |
+
joblib/__pycache__/hashing.cpython-310.pyc,,
|
22 |
+
joblib/__pycache__/logger.cpython-310.pyc,,
|
23 |
+
joblib/__pycache__/memory.cpython-310.pyc,,
|
24 |
+
joblib/__pycache__/numpy_pickle.cpython-310.pyc,,
|
25 |
+
joblib/__pycache__/numpy_pickle_compat.cpython-310.pyc,,
|
26 |
+
joblib/__pycache__/numpy_pickle_utils.cpython-310.pyc,,
|
27 |
+
joblib/__pycache__/parallel.cpython-310.pyc,,
|
28 |
+
joblib/__pycache__/pool.cpython-310.pyc,,
|
29 |
+
joblib/__pycache__/testing.cpython-310.pyc,,
|
30 |
+
joblib/_cloudpickle_wrapper.py,sha256=-PBMUqgZCfd5EMaKn5FCQhWKiAJfbq4i2o6Z6KZ1ynE,417
|
31 |
+
joblib/_dask.py,sha256=IhFjN6oMQVIcLC7bLHnA7KgO7nnEp7p6P4JH12934J4,13313
|
32 |
+
joblib/_memmapping_reducer.py,sha256=4xclMWTgI5l-C6OFyt6FvamH-ToJu3DmopAXwt7a9F4,28092
|
33 |
+
joblib/_multiprocessing_helpers.py,sha256=t7wIXfrLfzqFXjOeOYs4JP45tptxmYm5_yE8ylIRbR8,1925
|
34 |
+
joblib/_parallel_backends.py,sha256=82qX4mJ5eHz4i0e5gbdHgvzST3P6LJ59hpfee0IyZCQ,25489
|
35 |
+
joblib/_store_backends.py,sha256=PaoeHo4Mr7idPI3MvX766OqBQ_y_WkG_oZbE4MjbdKw,16683
|
36 |
+
joblib/_utils.py,sha256=BBUs4ZHpDRxpgTsdrZSyBb39MqacM6JauI3dTRmnHm4,2076
|
37 |
+
joblib/backports.py,sha256=aGh0cjs_xMhO2ym7RprpDjCBudiPtDhgD1vf5YMbMaI,5361
|
38 |
+
joblib/compressor.py,sha256=WwRzsQhdQp0lbQvvEbAcaevCOCM2_Qyv_HJ4PVHtobQ,19768
|
39 |
+
joblib/disk.py,sha256=PxUC63dBG2O1GriL1SLskHUjz7XzR-y6rqKEJHEY0jA,4389
|
40 |
+
joblib/executor.py,sha256=PiaOwENhecRVnOdvHZLwGsr-RMKfk_F_Noy8y7nntJY,5136
|
41 |
+
joblib/externals/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
42 |
+
joblib/externals/__pycache__/__init__.cpython-310.pyc,,
|
43 |
+
joblib/externals/cloudpickle/__init__.py,sha256=vb2JCOn1EpLUdVyPe1ESyhLymcvh-Rk3ISHJ-52aDLw,308
|
44 |
+
joblib/externals/cloudpickle/__pycache__/__init__.cpython-310.pyc,,
|
45 |
+
joblib/externals/cloudpickle/__pycache__/cloudpickle.cpython-310.pyc,,
|
46 |
+
joblib/externals/cloudpickle/__pycache__/cloudpickle_fast.cpython-310.pyc,,
|
47 |
+
joblib/externals/cloudpickle/cloudpickle.py,sha256=APCGMuIfVpWcelGsLlo2zRmwKRloaoiznQEOAoEWH9Y,55283
|
48 |
+
joblib/externals/cloudpickle/cloudpickle_fast.py,sha256=1GqUD4nLKsv0vv9ty2La3eVLyeWNrPFlhUCN-aNI-30,322
|
49 |
+
joblib/externals/loky/__init__.py,sha256=T20hgxG9YPps-AhsElBMzIJD73q0h3x0Hys49alTvq8,1104
|
50 |
+
joblib/externals/loky/__pycache__/__init__.cpython-310.pyc,,
|
51 |
+
joblib/externals/loky/__pycache__/_base.cpython-310.pyc,,
|
52 |
+
joblib/externals/loky/__pycache__/cloudpickle_wrapper.cpython-310.pyc,,
|
53 |
+
joblib/externals/loky/__pycache__/initializers.cpython-310.pyc,,
|
54 |
+
joblib/externals/loky/__pycache__/process_executor.cpython-310.pyc,,
|
55 |
+
joblib/externals/loky/__pycache__/reusable_executor.cpython-310.pyc,,
|
56 |
+
joblib/externals/loky/_base.py,sha256=LsQnEoKWKGhdeqGhMc68Aqwz4MrTnEs20KAYbFiUHzo,1057
|
57 |
+
joblib/externals/loky/backend/__init__.py,sha256=Ix9KThV1CYk7-M5OQnJ_A_JrrrWJ-Jowa-HMMeGbp18,312
|
58 |
+
joblib/externals/loky/backend/__pycache__/__init__.cpython-310.pyc,,
|
59 |
+
joblib/externals/loky/backend/__pycache__/_posix_reduction.cpython-310.pyc,,
|
60 |
+
joblib/externals/loky/backend/__pycache__/_win_reduction.cpython-310.pyc,,
|
61 |
+
joblib/externals/loky/backend/__pycache__/context.cpython-310.pyc,,
|
62 |
+
joblib/externals/loky/backend/__pycache__/fork_exec.cpython-310.pyc,,
|
63 |
+
joblib/externals/loky/backend/__pycache__/popen_loky_posix.cpython-310.pyc,,
|
64 |
+
joblib/externals/loky/backend/__pycache__/popen_loky_win32.cpython-310.pyc,,
|
65 |
+
joblib/externals/loky/backend/__pycache__/process.cpython-310.pyc,,
|
66 |
+
joblib/externals/loky/backend/__pycache__/queues.cpython-310.pyc,,
|
67 |
+
joblib/externals/loky/backend/__pycache__/reduction.cpython-310.pyc,,
|
68 |
+
joblib/externals/loky/backend/__pycache__/resource_tracker.cpython-310.pyc,,
|
69 |
+
joblib/externals/loky/backend/__pycache__/spawn.cpython-310.pyc,,
|
70 |
+
joblib/externals/loky/backend/__pycache__/synchronize.cpython-310.pyc,,
|
71 |
+
joblib/externals/loky/backend/__pycache__/utils.cpython-310.pyc,,
|
72 |
+
joblib/externals/loky/backend/_posix_reduction.py,sha256=xgCSrIaLI0k_MI0XNOBSp5e1ox1WN9idgrWbkWpMUr4,1776
|
73 |
+
joblib/externals/loky/backend/_win_reduction.py,sha256=WmNB0NXtyJ_o_WzfPUEGh5dPhXIeI6FkEnFNXUxO2ws,683
|
74 |
+
joblib/externals/loky/backend/context.py,sha256=GGBUGp4QDx1qvBuWDvyOSjNWYA79shxgAagsrcxA50E,13654
|
75 |
+
joblib/externals/loky/backend/fork_exec.py,sha256=c3I22U_ewtT1T5Xn65SUXHrftspvllrGezGiv5KSRQY,1186
|
76 |
+
joblib/externals/loky/backend/popen_loky_posix.py,sha256=ZFFs6H7Xp3CCQDE4oqmd_flWday6EdtlQ34Hkpa2PFQ,5580
|
77 |
+
joblib/externals/loky/backend/popen_loky_win32.py,sha256=bYkhRA0w8qUcYFwoezeGwcnlCocEdheWXc6SZ-_rVxo,5325
|
78 |
+
joblib/externals/loky/backend/process.py,sha256=4-Y94EoIrg4btsjTNxUBHAHhR96Nrugn_7_PGL6aU50,2018
|
79 |
+
joblib/externals/loky/backend/queues.py,sha256=eETFvbPHwKfdoYyOgNQCyKq_Zlm-lzH3fwwpUIh-_4U,7322
|
80 |
+
joblib/externals/loky/backend/reduction.py,sha256=CRu922R8xOxog2Snhop7y6fN1fPX9_h110brrICwZUE,7063
|
81 |
+
joblib/externals/loky/backend/resource_tracker.py,sha256=421689XAmmxmNXktfkhNHNERIY3LbAcmWAsmRHPNUjg,14498
|
82 |
+
joblib/externals/loky/backend/spawn.py,sha256=PVOHs8ou7IPcISb7t_Pp86FnUtPUw_KUAdxmDHOrAaU,8962
|
83 |
+
joblib/externals/loky/backend/synchronize.py,sha256=nlDwBoLZB93m_l55qfZM_Ql-4L84PSYimoQqt5TzpDk,11768
|
84 |
+
joblib/externals/loky/backend/utils.py,sha256=RVsxqyET4TJdbjc9uUHJmfhlQ2v4Uq-fiT_5b5rfC0s,5757
|
85 |
+
joblib/externals/loky/cloudpickle_wrapper.py,sha256=9VfrXIfHiJcoXDqwgaHWbRsWipVA23oRJaxnXzSu7GM,3608
|
86 |
+
joblib/externals/loky/initializers.py,sha256=dtKtRsJUmVwiJu0yZ-Ih0m8PvW_MxmouG7mShEcsStc,2567
|
87 |
+
joblib/externals/loky/process_executor.py,sha256=eP5eVZFUsYPDhN91cxRHl23uEfqNWDeDQjrdNDB4c3o,51050
|
88 |
+
joblib/externals/loky/reusable_executor.py,sha256=XVxLGMhCDij8CU6BDHbYKPxn8lG-rnqXaIfxO2nXGXg,10305
|
89 |
+
joblib/func_inspect.py,sha256=Cwei03uwnZKJ9mbGmdCSjVGgq2_1lJNJSkeUn1GTvSo,14204
|
90 |
+
joblib/hashing.py,sha256=4X9OOofkfDQr3N8NZKTsMOKWr8IiIV0kjBwqCUJCej4,10535
|
91 |
+
joblib/logger.py,sha256=meT-hFPTZukfBRkeyCVCxhssPSr668_R5Nn25S-iXtc,5463
|
92 |
+
joblib/memory.py,sha256=SDxiok3TiQOqy7Cd6A6yxrtWCZADWhMIiiWsBe97QRA,45990
|
93 |
+
joblib/numpy_pickle.py,sha256=5_L7EbPg94T3iSODEdkikJNj_z2qvpZYaGei9HlMBWU,26886
|
94 |
+
joblib/numpy_pickle_compat.py,sha256=U7zVNNF03an_7AgHOxhTxcOSbLJTWcGsnV_xbT3vRdY,8547
|
95 |
+
joblib/numpy_pickle_utils.py,sha256=KccSp_MhMFsV41Mw9pSijchtGOCympDaHWH0VpIUeSs,8723
|
96 |
+
joblib/parallel.py,sha256=foh2ez7ep8oIS5QInXU6qRSZ4xWWY_AeZjzDVDIOppc,84579
|
97 |
+
joblib/pool.py,sha256=7jhFg7_qIuBbJ2URbKvyZtET9AxRKaYrYiBSes1VFxo,14411
|
98 |
+
joblib/test/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
99 |
+
joblib/test/__pycache__/__init__.cpython-310.pyc,,
|
100 |
+
joblib/test/__pycache__/common.cpython-310.pyc,,
|
101 |
+
joblib/test/__pycache__/test_backports.cpython-310.pyc,,
|
102 |
+
joblib/test/__pycache__/test_cloudpickle_wrapper.cpython-310.pyc,,
|
103 |
+
joblib/test/__pycache__/test_config.cpython-310.pyc,,
|
104 |
+
joblib/test/__pycache__/test_dask.cpython-310.pyc,,
|
105 |
+
joblib/test/__pycache__/test_disk.cpython-310.pyc,,
|
106 |
+
joblib/test/__pycache__/test_func_inspect.cpython-310.pyc,,
|
107 |
+
joblib/test/__pycache__/test_func_inspect_special_encoding.cpython-310.pyc,,
|
108 |
+
joblib/test/__pycache__/test_hashing.cpython-310.pyc,,
|
109 |
+
joblib/test/__pycache__/test_init.cpython-310.pyc,,
|
110 |
+
joblib/test/__pycache__/test_logger.cpython-310.pyc,,
|
111 |
+
joblib/test/__pycache__/test_memmapping.cpython-310.pyc,,
|
112 |
+
joblib/test/__pycache__/test_memory.cpython-310.pyc,,
|
113 |
+
joblib/test/__pycache__/test_memory_async.cpython-310.pyc,,
|
114 |
+
joblib/test/__pycache__/test_missing_multiprocessing.cpython-310.pyc,,
|
115 |
+
joblib/test/__pycache__/test_module.cpython-310.pyc,,
|
116 |
+
joblib/test/__pycache__/test_numpy_pickle.cpython-310.pyc,,
|
117 |
+
joblib/test/__pycache__/test_numpy_pickle_compat.cpython-310.pyc,,
|
118 |
+
joblib/test/__pycache__/test_numpy_pickle_utils.cpython-310.pyc,,
|
119 |
+
joblib/test/__pycache__/test_parallel.cpython-310.pyc,,
|
120 |
+
joblib/test/__pycache__/test_store_backends.cpython-310.pyc,,
|
121 |
+
joblib/test/__pycache__/test_testing.cpython-310.pyc,,
|
122 |
+
joblib/test/__pycache__/test_utils.cpython-310.pyc,,
|
123 |
+
joblib/test/__pycache__/testutils.cpython-310.pyc,,
|
124 |
+
joblib/test/common.py,sha256=seNB39WwHwu0qfMo6qeV7y0m6wxCcJ1B-Q1MWLvn4Vk,2336
|
125 |
+
joblib/test/data/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
126 |
+
joblib/test/data/__pycache__/__init__.cpython-310.pyc,,
|
127 |
+
joblib/test/data/__pycache__/create_numpy_pickle.cpython-310.pyc,,
|
128 |
+
joblib/test/data/create_numpy_pickle.py,sha256=BEaSv8dm3a48mvxl_Kf5ASwkkbGhwRplzNotTP4RCC0,3460
|
129 |
+
joblib/test/data/joblib_0.10.0_compressed_pickle_py27_np16.gz,sha256=QYRH6Q2DSGVorjCSqWCxjTWCMOJKyew4Nl2qmfQVvQ8,769
|
130 |
+
joblib/test/data/joblib_0.10.0_compressed_pickle_py27_np17.gz,sha256=ofTozM_KlPJa50TR8FCwc09mMmO6OO0GQhgUBLNIsXs,757
|
131 |
+
joblib/test/data/joblib_0.10.0_compressed_pickle_py33_np18.gz,sha256=2eIVeA-XjOaT5IEQ6tI2UuHG3hwhiRciMmkBmPcIh4g,792
|
132 |
+
joblib/test/data/joblib_0.10.0_compressed_pickle_py34_np19.gz,sha256=Gr2z_1tVWDH1H3_wCVHmakknf8KqeHKT8Yz4d1vmUCM,794
|
133 |
+
joblib/test/data/joblib_0.10.0_compressed_pickle_py35_np19.gz,sha256=pWw_xuDbOkECqu1KGf1OFU7s2VbzC2v5F5iXhE7TwB4,790
|
134 |
+
joblib/test/data/joblib_0.10.0_pickle_py27_np17.pkl,sha256=icRQjj374B-AHk5znxre0T9oWUHokoHIBQ8MqKo8l-U,986
|
135 |
+
joblib/test/data/joblib_0.10.0_pickle_py27_np17.pkl.bz2,sha256=oYQVIyMiUxyRgWSuBBSOvCWKzToA-kUpcoQWdV4UoV4,997
|
136 |
+
joblib/test/data/joblib_0.10.0_pickle_py27_np17.pkl.gzip,sha256=Jpv3iGcDgKTv-O4nZsUreIbUK7qnt2cugZ-VMgNeEDQ,798
|
137 |
+
joblib/test/data/joblib_0.10.0_pickle_py27_np17.pkl.lzma,sha256=c0wu0x8pPv4BcStj7pE61rZpf68FLG_pNzQZ4e82zH8,660
|
138 |
+
joblib/test/data/joblib_0.10.0_pickle_py27_np17.pkl.xz,sha256=77FG1FDG0GHQav-1bxc4Tn9ky6ubUW_MbE0_iGmz5wc,712
|
139 |
+
joblib/test/data/joblib_0.10.0_pickle_py33_np18.pkl,sha256=4GTC7s_cWNVShERn2nvVbspZYJgyK_0man4TEqvdVzU,1068
|
140 |
+
joblib/test/data/joblib_0.10.0_pickle_py33_np18.pkl.bz2,sha256=6G1vbs_iYmz2kYJ6w4qB1k7D67UnxUMus0S4SWeBtFo,1000
|
141 |
+
joblib/test/data/joblib_0.10.0_pickle_py33_np18.pkl.gzip,sha256=tlRUWeJS1BXmcwtLNSNK9L0hDHekFl07CqWxTShinmY,831
|
142 |
+
joblib/test/data/joblib_0.10.0_pickle_py33_np18.pkl.lzma,sha256=CorPwnfv3rR5hjNtJI01-sEBMOnkSxNlRVaWTszMopA,694
|
143 |
+
joblib/test/data/joblib_0.10.0_pickle_py33_np18.pkl.xz,sha256=Dppj3MffOKsKETeptEtDaxPOv6MA6xnbpK5LzlDQ-oE,752
|
144 |
+
joblib/test/data/joblib_0.10.0_pickle_py34_np19.pkl,sha256=HL5Fb1uR9aPLjjhoOPJ2wwM1Qyo1FCZoYYd2HVw0Fos,1068
|
145 |
+
joblib/test/data/joblib_0.10.0_pickle_py34_np19.pkl.bz2,sha256=Pyr2fqZnwfUxXdyrBr-kRwBYY8HA_Yi7fgSguKy5pUs,1021
|
146 |
+
joblib/test/data/joblib_0.10.0_pickle_py34_np19.pkl.gzip,sha256=os8NJjQI9FhnlZM-Ay9dX_Uo35gZnoJCgQSIVvcBPfE,831
|
147 |
+
joblib/test/data/joblib_0.10.0_pickle_py34_np19.pkl.lzma,sha256=Q_0y43qU7_GqAabJ8y3PWVhOisurnCAq3GzuCu04V58,697
|
148 |
+
joblib/test/data/joblib_0.10.0_pickle_py34_np19.pkl.xz,sha256=BNfmiQfpeLVpdfkwlJK4hJ5Cpgl0vreVyekyc5d_PNM,752
|
149 |
+
joblib/test/data/joblib_0.10.0_pickle_py35_np19.pkl,sha256=l7nvLolhBDIdPFznOz3lBHiMOPBPCMi1bXop1tFSCpY,1068
|
150 |
+
joblib/test/data/joblib_0.10.0_pickle_py35_np19.pkl.bz2,sha256=pqGpuIS-ZU4uP8mkglHs8MaSDiVcPy7l3XHYJSppRgY,1005
|
151 |
+
joblib/test/data/joblib_0.10.0_pickle_py35_np19.pkl.gzip,sha256=YRFXE6LEb6qK72yPqnXdqQVY8Ts8xKUS9PWQKhLxWvk,833
|
152 |
+
joblib/test/data/joblib_0.10.0_pickle_py35_np19.pkl.lzma,sha256=Bf7gCUeTuTjCkbcIdyZYz69irblX4SAVQEzxCnMQhNU,701
|
153 |
+
joblib/test/data/joblib_0.10.0_pickle_py35_np19.pkl.xz,sha256=As8w2LGWwwNmKy3QNdKljK63Yq46gjRf_RJ0lh5_WqA,752
|
154 |
+
joblib/test/data/joblib_0.11.0_compressed_pickle_py36_np111.gz,sha256=1WrnXDqDoNEPYOZX1Q5Wr2463b8vVV6fw4Wm5S4bMt4,800
|
155 |
+
joblib/test/data/joblib_0.11.0_pickle_py36_np111.pkl,sha256=XmsOFxeC1f1aYdGETclG6yfF9rLoB11DayOAhDMULrw,1068
|
156 |
+
joblib/test/data/joblib_0.11.0_pickle_py36_np111.pkl.bz2,sha256=vI2yWb50LKL_NgZyd_XkoD5teIg93uI42mWnx9ee-AQ,991
|
157 |
+
joblib/test/data/joblib_0.11.0_pickle_py36_np111.pkl.gzip,sha256=1WrnXDqDoNEPYOZX1Q5Wr2463b8vVV6fw4Wm5S4bMt4,800
|
158 |
+
joblib/test/data/joblib_0.11.0_pickle_py36_np111.pkl.lzma,sha256=IWA0JlZG2ur53HgTUDl1m7q79dcVq6b0VOq33gKoJU0,715
|
159 |
+
joblib/test/data/joblib_0.11.0_pickle_py36_np111.pkl.xz,sha256=3Xh_NbMZdBjYx7ynfJ3Fyke28izSRSSzzNB0z5D4k9Y,752
|
160 |
+
joblib/test/data/joblib_0.8.4_compressed_pickle_py27_np17.gz,sha256=Sp-ZT7i6pj5on2gbptszu7RarzJpOmHJ67UKOmCPQMg,659
|
161 |
+
joblib/test/data/joblib_0.9.2_compressed_pickle_py27_np16.gz,sha256=NLtDrvo2XIH0KvUUAvhOqMeoXEjGW0IuTk_osu5XiDw,658
|
162 |
+
joblib/test/data/joblib_0.9.2_compressed_pickle_py27_np17.gz,sha256=NLtDrvo2XIH0KvUUAvhOqMeoXEjGW0IuTk_osu5XiDw,658
|
163 |
+
joblib/test/data/joblib_0.9.2_compressed_pickle_py34_np19.gz,sha256=nzO9iiGkG3KbBdrF3usOho8higkrDj_lmICUzxZyF_Y,673
|
164 |
+
joblib/test/data/joblib_0.9.2_compressed_pickle_py35_np19.gz,sha256=nzO9iiGkG3KbBdrF3usOho8higkrDj_lmICUzxZyF_Y,673
|
165 |
+
joblib/test/data/joblib_0.9.2_pickle_py27_np16.pkl,sha256=naijdk2xIeKdIa3mfJw0JlmOdtiN6uRM1yOJg6-M73M,670
|
166 |
+
joblib/test/data/joblib_0.9.2_pickle_py27_np16.pkl_01.npy,sha256=DvvX2c5-7DpuCg20HnleA5bMo9awN9rWxhtGSEPSiAk,120
|
167 |
+
joblib/test/data/joblib_0.9.2_pickle_py27_np16.pkl_02.npy,sha256=HBzzbLeB-8whuVO7CgtF3wktoOrg52WILlljzNcBBbE,120
|
168 |
+
joblib/test/data/joblib_0.9.2_pickle_py27_np16.pkl_03.npy,sha256=oMRa4qKJhBy-uiRDt-uqOzHAqencxzKUrKVynaAJJAU,236
|
169 |
+
joblib/test/data/joblib_0.9.2_pickle_py27_np16.pkl_04.npy,sha256=PsviRClLqT4IR5sWwbmpQR41af9mDtBFncodJBOB3wU,104
|
170 |
+
joblib/test/data/joblib_0.9.2_pickle_py27_np17.pkl,sha256=LynX8dLOygfxDfFywOgm7wgWOhSxLG7z-oDsU6X83Dw,670
|
171 |
+
joblib/test/data/joblib_0.9.2_pickle_py27_np17.pkl_01.npy,sha256=DvvX2c5-7DpuCg20HnleA5bMo9awN9rWxhtGSEPSiAk,120
|
172 |
+
joblib/test/data/joblib_0.9.2_pickle_py27_np17.pkl_02.npy,sha256=HBzzbLeB-8whuVO7CgtF3wktoOrg52WILlljzNcBBbE,120
|
173 |
+
joblib/test/data/joblib_0.9.2_pickle_py27_np17.pkl_03.npy,sha256=oMRa4qKJhBy-uiRDt-uqOzHAqencxzKUrKVynaAJJAU,236
|
174 |
+
joblib/test/data/joblib_0.9.2_pickle_py27_np17.pkl_04.npy,sha256=PsviRClLqT4IR5sWwbmpQR41af9mDtBFncodJBOB3wU,104
|
175 |
+
joblib/test/data/joblib_0.9.2_pickle_py33_np18.pkl,sha256=w9TLxpDTzp5TI6cU6lRvMsAasXEChcQgGE9s30sm_CU,691
|
176 |
+
joblib/test/data/joblib_0.9.2_pickle_py33_np18.pkl_01.npy,sha256=DvvX2c5-7DpuCg20HnleA5bMo9awN9rWxhtGSEPSiAk,120
|
177 |
+
joblib/test/data/joblib_0.9.2_pickle_py33_np18.pkl_02.npy,sha256=HBzzbLeB-8whuVO7CgtF3wktoOrg52WILlljzNcBBbE,120
|
178 |
+
joblib/test/data/joblib_0.9.2_pickle_py33_np18.pkl_03.npy,sha256=jt6aZKUrJdfbMJUJVsl47As5MrfRSs1avGMhbmS6vec,307
|
179 |
+
joblib/test/data/joblib_0.9.2_pickle_py33_np18.pkl_04.npy,sha256=PsviRClLqT4IR5sWwbmpQR41af9mDtBFncodJBOB3wU,104
|
180 |
+
joblib/test/data/joblib_0.9.2_pickle_py34_np19.pkl,sha256=ilOBAOaulLFvKrD32S1NfnpiK-LfzA9rC3O2I7xROuI,691
|
181 |
+
joblib/test/data/joblib_0.9.2_pickle_py34_np19.pkl_01.npy,sha256=DvvX2c5-7DpuCg20HnleA5bMo9awN9rWxhtGSEPSiAk,120
|
182 |
+
joblib/test/data/joblib_0.9.2_pickle_py34_np19.pkl_02.npy,sha256=HBzzbLeB-8whuVO7CgtF3wktoOrg52WILlljzNcBBbE,120
|
183 |
+
joblib/test/data/joblib_0.9.2_pickle_py34_np19.pkl_03.npy,sha256=jt6aZKUrJdfbMJUJVsl47As5MrfRSs1avGMhbmS6vec,307
|
184 |
+
joblib/test/data/joblib_0.9.2_pickle_py34_np19.pkl_04.npy,sha256=PsviRClLqT4IR5sWwbmpQR41af9mDtBFncodJBOB3wU,104
|
185 |
+
joblib/test/data/joblib_0.9.2_pickle_py35_np19.pkl,sha256=WfDVIqKcMzzh1gSAshIfzBoIpdLdZQuG79yYf5kfpOo,691
|
186 |
+
joblib/test/data/joblib_0.9.2_pickle_py35_np19.pkl_01.npy,sha256=DvvX2c5-7DpuCg20HnleA5bMo9awN9rWxhtGSEPSiAk,120
|
187 |
+
joblib/test/data/joblib_0.9.2_pickle_py35_np19.pkl_02.npy,sha256=HBzzbLeB-8whuVO7CgtF3wktoOrg52WILlljzNcBBbE,120
|
188 |
+
joblib/test/data/joblib_0.9.2_pickle_py35_np19.pkl_03.npy,sha256=jt6aZKUrJdfbMJUJVsl47As5MrfRSs1avGMhbmS6vec,307
|
189 |
+
joblib/test/data/joblib_0.9.2_pickle_py35_np19.pkl_04.npy,sha256=PsviRClLqT4IR5sWwbmpQR41af9mDtBFncodJBOB3wU,104
|
190 |
+
joblib/test/data/joblib_0.9.4.dev0_compressed_cache_size_pickle_py35_np19.gz,sha256=8jYfWJsx0oY2J-3LlmEigK5cClnJSW2J2rfeSTZw-Ts,802
|
191 |
+
joblib/test/data/joblib_0.9.4.dev0_compressed_cache_size_pickle_py35_np19.gz_01.npy.z,sha256=YT9VvT3sEl2uWlOyvH2CkyE9Sok4od9O3kWtgeuUUqE,43
|
192 |
+
joblib/test/data/joblib_0.9.4.dev0_compressed_cache_size_pickle_py35_np19.gz_02.npy.z,sha256=txA5RDI0PRuiU_UNKY8pGp-zQgQQ9vaVvMi60hOPaVs,43
|
193 |
+
joblib/test/data/joblib_0.9.4.dev0_compressed_cache_size_pickle_py35_np19.gz_03.npy.z,sha256=d3AwICvU2MpSNjh2aPIsdJeGZLlDjANAF1Soa6uM0Po,37
|
194 |
+
joblib/test/test_backports.py,sha256=Y9bhGa6H-K_FgLkDyXaSHzpaWk148Rjn8R9IKCKdy-k,1175
|
195 |
+
joblib/test/test_cloudpickle_wrapper.py,sha256=gc05MGe1LJfhCNTv02VdDXg8Z6FeJJ3rFTff1_WmMwc,751
|
196 |
+
joblib/test/test_config.py,sha256=EvQKH7n4qXAcPd-BTnfu4duR6b5ag65xjaEjPMDM50w,5284
|
197 |
+
joblib/test/test_dask.py,sha256=IIHdXXpH5NenurB6jem9rYisa_rP-usPyoPVaWzBEN0,18418
|
198 |
+
joblib/test/test_disk.py,sha256=wJd1o9nLzqEjLqxxkgB9S7-UcKjHPQ8qK5l0czcNp0o,2205
|
199 |
+
joblib/test/test_func_inspect.py,sha256=fyR9q1nGHwch5hD1hedut2YeN19hHDyUuDc8mXQuGeY,9488
|
200 |
+
joblib/test/test_func_inspect_special_encoding.py,sha256=5xILDjSO-xtjQAMLvMeVD-L7IG4ZURb2gvBiShaDE78,145
|
201 |
+
joblib/test/test_hashing.py,sha256=w8_WVgXL1TFueCSriX97GrCAS3mcmu5Ly062pOAOoRI,16054
|
202 |
+
joblib/test/test_init.py,sha256=bgNF-9CIJl1MFNA75LBWOaiNtvduVfuvglz_u9Tt8Uc,422
|
203 |
+
joblib/test/test_logger.py,sha256=CyA3E8Y74AHZfqJxetNrYfwXSOlLc1Pq1hqt7aJ6PwA,984
|
204 |
+
joblib/test/test_memmapping.py,sha256=6kXT4ZMCtPykWhDf66QCcQvsdC9PaJumiCY0jHPwfio,43298
|
205 |
+
joblib/test/test_memory.py,sha256=1dWfSPgMgdp2WdnY6lMILRdCbQ4v915DDOC_Q8d3WDE,49419
|
206 |
+
joblib/test/test_memory_async.py,sha256=g-3Y09LqMccWR8Kw_yA0-yaduL-5zDBgaGUlgleOGDo,4807
|
207 |
+
joblib/test/test_missing_multiprocessing.py,sha256=oeneMgi6iUVIbkfazGvXmOp6fqa7ok9uhj902Qjs_nk,1123
|
208 |
+
joblib/test/test_module.py,sha256=HTHQSgnwa-1blkvhL_oVBO5yYdw8IrQTIJCK_QCsMtM,1936
|
209 |
+
joblib/test/test_numpy_pickle.py,sha256=6rRMvj-9dgBVucPLY7J0qBIIcY2XOSQGh1owqOiGIrg,42485
|
210 |
+
joblib/test/test_numpy_pickle_compat.py,sha256=o4mB8M6mVRX8Hxw-fxOmWPcbSAyswTdEHk64hVv_ZCk,609
|
211 |
+
joblib/test/test_numpy_pickle_utils.py,sha256=-lQnhOAIGyd5oyWT2LGQPc3QO_Oxb3VhM_H3QjKm5e0,383
|
212 |
+
joblib/test/test_parallel.py,sha256=fyvzDhsGADyjLoTy9Jycdketu1YZ1WyYtQCBIQ_YEso,73523
|
213 |
+
joblib/test/test_store_backends.py,sha256=LCCUS9DN7feJniwnOD7x0pMKAQ-5rgCBBctwi5FhjWM,3121
|
214 |
+
joblib/test/test_testing.py,sha256=1O5ZTJkdBjVkM-Ss4mComsEbJgrwROloVu8Mqv7Jv-w,2570
|
215 |
+
joblib/test/test_utils.py,sha256=L6aBHiZrF6TUFbPGmQXGyNMnoAy5pEhOJoH5CAG_6CU,584
|
216 |
+
joblib/test/testutils.py,sha256=6a7zVJm1kg6M-t4CH9tz8A6rMdC6ZY9sU6wBB8C7Zzo,251
|
217 |
+
joblib/testing.py,sha256=6i2ctsokVXohLBhc1dXIZc2TiQDRiB-glGKFbfZDIqE,3093
|
env-llmeval/lib/python3.10/site-packages/joblib-1.4.0.dist-info/WHEEL
ADDED
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Wheel-Version: 1.0
|
2 |
+
Generator: bdist_wheel (0.38.4)
|
3 |
+
Root-Is-Purelib: true
|
4 |
+
Tag: py3-none-any
|
5 |
+
|
env-llmeval/lib/python3.10/site-packages/joblib-1.4.0.dist-info/top_level.txt
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
joblib
|
env-llmeval/lib/python3.10/site-packages/networkx/__init__.py
ADDED
@@ -0,0 +1,49 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
NetworkX
|
3 |
+
========
|
4 |
+
|
5 |
+
NetworkX is a Python package for the creation, manipulation, and study of the
|
6 |
+
structure, dynamics, and functions of complex networks.
|
7 |
+
|
8 |
+
See https://networkx.org for complete documentation.
|
9 |
+
"""
|
10 |
+
|
11 |
+
__version__ = "3.3"
|
12 |
+
|
13 |
+
|
14 |
+
# These are imported in order as listed
|
15 |
+
from networkx.lazy_imports import _lazy_import
|
16 |
+
|
17 |
+
from networkx.exception import *
|
18 |
+
|
19 |
+
from networkx import utils
|
20 |
+
from networkx.utils import _clear_cache, _dispatchable, config
|
21 |
+
|
22 |
+
from networkx import classes
|
23 |
+
from networkx.classes import filters
|
24 |
+
from networkx.classes import *
|
25 |
+
|
26 |
+
from networkx import convert
|
27 |
+
from networkx.convert import *
|
28 |
+
|
29 |
+
from networkx import convert_matrix
|
30 |
+
from networkx.convert_matrix import *
|
31 |
+
|
32 |
+
from networkx import relabel
|
33 |
+
from networkx.relabel import *
|
34 |
+
|
35 |
+
from networkx import generators
|
36 |
+
from networkx.generators import *
|
37 |
+
|
38 |
+
from networkx import readwrite
|
39 |
+
from networkx.readwrite import *
|
40 |
+
|
41 |
+
# Need to test with SciPy, when available
|
42 |
+
from networkx import algorithms
|
43 |
+
from networkx.algorithms import *
|
44 |
+
|
45 |
+
from networkx import linalg
|
46 |
+
from networkx.linalg import *
|
47 |
+
|
48 |
+
from networkx import drawing
|
49 |
+
from networkx.drawing import *
|
env-llmeval/lib/python3.10/site-packages/networkx/conftest.py
ADDED
@@ -0,0 +1,289 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
Testing
|
3 |
+
=======
|
4 |
+
|
5 |
+
General guidelines for writing good tests:
|
6 |
+
|
7 |
+
- doctests always assume ``import networkx as nx`` so don't add that
|
8 |
+
- prefer pytest fixtures over classes with setup methods.
|
9 |
+
- use the ``@pytest.mark.parametrize`` decorator
|
10 |
+
- use ``pytest.importorskip`` for numpy, scipy, pandas, and matplotlib b/c of PyPy.
|
11 |
+
and add the module to the relevant entries below.
|
12 |
+
|
13 |
+
"""
|
14 |
+
import os
|
15 |
+
import sys
|
16 |
+
import warnings
|
17 |
+
from importlib.metadata import entry_points
|
18 |
+
|
19 |
+
import pytest
|
20 |
+
|
21 |
+
import networkx
|
22 |
+
|
23 |
+
|
24 |
+
def pytest_addoption(parser):
|
25 |
+
parser.addoption(
|
26 |
+
"--runslow", action="store_true", default=False, help="run slow tests"
|
27 |
+
)
|
28 |
+
parser.addoption(
|
29 |
+
"--backend",
|
30 |
+
action="store",
|
31 |
+
default=None,
|
32 |
+
help="Run tests with a backend by auto-converting nx graphs to backend graphs",
|
33 |
+
)
|
34 |
+
parser.addoption(
|
35 |
+
"--fallback-to-nx",
|
36 |
+
action="store_true",
|
37 |
+
default=False,
|
38 |
+
help="Run nx function if a backend doesn't implement a dispatchable function"
|
39 |
+
" (use with --backend)",
|
40 |
+
)
|
41 |
+
|
42 |
+
|
43 |
+
def pytest_configure(config):
|
44 |
+
config.addinivalue_line("markers", "slow: mark test as slow to run")
|
45 |
+
backend = config.getoption("--backend")
|
46 |
+
if backend is None:
|
47 |
+
backend = os.environ.get("NETWORKX_TEST_BACKEND")
|
48 |
+
# nx-loopback backend is only available when testing
|
49 |
+
backends = entry_points(name="nx-loopback", group="networkx.backends")
|
50 |
+
if backends:
|
51 |
+
networkx.utils.backends.backends["nx-loopback"] = next(iter(backends))
|
52 |
+
else:
|
53 |
+
warnings.warn(
|
54 |
+
"\n\n WARNING: Mixed NetworkX configuration! \n\n"
|
55 |
+
" This environment has mixed configuration for networkx.\n"
|
56 |
+
" The test object nx-loopback is not configured correctly.\n"
|
57 |
+
" You should not be seeing this message.\n"
|
58 |
+
" Try `pip install -e .`, or change your PYTHONPATH\n"
|
59 |
+
" Make sure python finds the networkx repo you are testing\n\n"
|
60 |
+
)
|
61 |
+
if backend:
|
62 |
+
networkx.config["backend_priority"] = [backend]
|
63 |
+
fallback_to_nx = config.getoption("--fallback-to-nx")
|
64 |
+
if not fallback_to_nx:
|
65 |
+
fallback_to_nx = os.environ.get("NETWORKX_FALLBACK_TO_NX")
|
66 |
+
networkx.utils.backends._dispatchable._fallback_to_nx = bool(fallback_to_nx)
|
67 |
+
|
68 |
+
|
69 |
+
def pytest_collection_modifyitems(config, items):
|
70 |
+
# Setting this to True here allows tests to be set up before dispatching
|
71 |
+
# any function call to a backend.
|
72 |
+
networkx.utils.backends._dispatchable._is_testing = True
|
73 |
+
if backend_priority := networkx.config["backend_priority"]:
|
74 |
+
# Allow pluggable backends to add markers to tests (such as skip or xfail)
|
75 |
+
# when running in auto-conversion test mode
|
76 |
+
backend = networkx.utils.backends.backends[backend_priority[0]].load()
|
77 |
+
if hasattr(backend, "on_start_tests"):
|
78 |
+
getattr(backend, "on_start_tests")(items)
|
79 |
+
|
80 |
+
if config.getoption("--runslow"):
|
81 |
+
# --runslow given in cli: do not skip slow tests
|
82 |
+
return
|
83 |
+
skip_slow = pytest.mark.skip(reason="need --runslow option to run")
|
84 |
+
for item in items:
|
85 |
+
if "slow" in item.keywords:
|
86 |
+
item.add_marker(skip_slow)
|
87 |
+
|
88 |
+
|
89 |
+
# TODO: The warnings below need to be dealt with, but for now we silence them.
|
90 |
+
@pytest.fixture(autouse=True)
|
91 |
+
def set_warnings():
|
92 |
+
warnings.filterwarnings(
|
93 |
+
"ignore",
|
94 |
+
category=FutureWarning,
|
95 |
+
message="\n\nsingle_target_shortest_path_length",
|
96 |
+
)
|
97 |
+
warnings.filterwarnings(
|
98 |
+
"ignore",
|
99 |
+
category=FutureWarning,
|
100 |
+
message="\n\nshortest_path",
|
101 |
+
)
|
102 |
+
warnings.filterwarnings(
|
103 |
+
"ignore", category=DeprecationWarning, message="\nforest_str is deprecated"
|
104 |
+
)
|
105 |
+
warnings.filterwarnings(
|
106 |
+
"ignore", category=DeprecationWarning, message="\n\nrandom_tree"
|
107 |
+
)
|
108 |
+
warnings.filterwarnings(
|
109 |
+
"ignore", category=DeprecationWarning, message="Edmonds has been deprecated"
|
110 |
+
)
|
111 |
+
warnings.filterwarnings(
|
112 |
+
"ignore",
|
113 |
+
category=DeprecationWarning,
|
114 |
+
message="MultiDiGraph_EdgeKey has been deprecated",
|
115 |
+
)
|
116 |
+
warnings.filterwarnings(
|
117 |
+
"ignore", category=DeprecationWarning, message="\n\nThe `normalized`"
|
118 |
+
)
|
119 |
+
warnings.filterwarnings(
|
120 |
+
"ignore",
|
121 |
+
category=DeprecationWarning,
|
122 |
+
message="The function `join` is deprecated",
|
123 |
+
)
|
124 |
+
warnings.filterwarnings(
|
125 |
+
"ignore",
|
126 |
+
category=DeprecationWarning,
|
127 |
+
message="\n\nstrongly_connected_components_recursive",
|
128 |
+
)
|
129 |
+
warnings.filterwarnings(
|
130 |
+
"ignore", category=DeprecationWarning, message="\n\nall_triplets"
|
131 |
+
)
|
132 |
+
warnings.filterwarnings(
|
133 |
+
"ignore", category=DeprecationWarning, message="\n\nrandom_triad"
|
134 |
+
)
|
135 |
+
warnings.filterwarnings(
|
136 |
+
"ignore", category=DeprecationWarning, message="minimal_d_separator"
|
137 |
+
)
|
138 |
+
warnings.filterwarnings(
|
139 |
+
"ignore", category=DeprecationWarning, message="d_separated"
|
140 |
+
)
|
141 |
+
warnings.filterwarnings("ignore", category=DeprecationWarning, message="\n\nk_core")
|
142 |
+
warnings.filterwarnings(
|
143 |
+
"ignore", category=DeprecationWarning, message="\n\nk_shell"
|
144 |
+
)
|
145 |
+
warnings.filterwarnings(
|
146 |
+
"ignore", category=DeprecationWarning, message="\n\nk_crust"
|
147 |
+
)
|
148 |
+
warnings.filterwarnings(
|
149 |
+
"ignore", category=DeprecationWarning, message="\n\nk_corona"
|
150 |
+
)
|
151 |
+
warnings.filterwarnings(
|
152 |
+
"ignore", category=DeprecationWarning, message="\n\ntotal_spanning_tree_weight"
|
153 |
+
)
|
154 |
+
warnings.filterwarnings(
|
155 |
+
"ignore", category=DeprecationWarning, message=r"\n\nThe 'create=matrix'"
|
156 |
+
)
|
157 |
+
|
158 |
+
|
159 |
+
@pytest.fixture(autouse=True)
|
160 |
+
def add_nx(doctest_namespace):
|
161 |
+
doctest_namespace["nx"] = networkx
|
162 |
+
|
163 |
+
|
164 |
+
# What dependencies are installed?
|
165 |
+
|
166 |
+
try:
|
167 |
+
import numpy
|
168 |
+
|
169 |
+
has_numpy = True
|
170 |
+
except ImportError:
|
171 |
+
has_numpy = False
|
172 |
+
|
173 |
+
try:
|
174 |
+
import scipy
|
175 |
+
|
176 |
+
has_scipy = True
|
177 |
+
except ImportError:
|
178 |
+
has_scipy = False
|
179 |
+
|
180 |
+
try:
|
181 |
+
import matplotlib
|
182 |
+
|
183 |
+
has_matplotlib = True
|
184 |
+
except ImportError:
|
185 |
+
has_matplotlib = False
|
186 |
+
|
187 |
+
try:
|
188 |
+
import pandas
|
189 |
+
|
190 |
+
has_pandas = True
|
191 |
+
except ImportError:
|
192 |
+
has_pandas = False
|
193 |
+
|
194 |
+
try:
|
195 |
+
import pygraphviz
|
196 |
+
|
197 |
+
has_pygraphviz = True
|
198 |
+
except ImportError:
|
199 |
+
has_pygraphviz = False
|
200 |
+
|
201 |
+
try:
|
202 |
+
import pydot
|
203 |
+
|
204 |
+
has_pydot = True
|
205 |
+
except ImportError:
|
206 |
+
has_pydot = False
|
207 |
+
|
208 |
+
try:
|
209 |
+
import sympy
|
210 |
+
|
211 |
+
has_sympy = True
|
212 |
+
except ImportError:
|
213 |
+
has_sympy = False
|
214 |
+
|
215 |
+
|
216 |
+
# List of files that pytest should ignore
|
217 |
+
|
218 |
+
collect_ignore = []
|
219 |
+
|
220 |
+
needs_numpy = [
|
221 |
+
"algorithms/approximation/traveling_salesman.py",
|
222 |
+
"algorithms/centrality/current_flow_closeness.py",
|
223 |
+
"algorithms/node_classification.py",
|
224 |
+
"algorithms/non_randomness.py",
|
225 |
+
"algorithms/shortest_paths/dense.py",
|
226 |
+
"algorithms/tree/mst.py",
|
227 |
+
"generators/expanders.py",
|
228 |
+
"linalg/bethehessianmatrix.py",
|
229 |
+
"linalg/laplacianmatrix.py",
|
230 |
+
"utils/misc.py",
|
231 |
+
"algorithms/centrality/laplacian.py",
|
232 |
+
]
|
233 |
+
needs_scipy = [
|
234 |
+
"algorithms/approximation/traveling_salesman.py",
|
235 |
+
"algorithms/assortativity/correlation.py",
|
236 |
+
"algorithms/assortativity/mixing.py",
|
237 |
+
"algorithms/assortativity/pairs.py",
|
238 |
+
"algorithms/bipartite/matrix.py",
|
239 |
+
"algorithms/bipartite/spectral.py",
|
240 |
+
"algorithms/centrality/current_flow_betweenness.py",
|
241 |
+
"algorithms/centrality/current_flow_betweenness_subset.py",
|
242 |
+
"algorithms/centrality/eigenvector.py",
|
243 |
+
"algorithms/centrality/katz.py",
|
244 |
+
"algorithms/centrality/laplacian.py",
|
245 |
+
"algorithms/centrality/second_order.py",
|
246 |
+
"algorithms/centrality/subgraph_alg.py",
|
247 |
+
"algorithms/communicability_alg.py",
|
248 |
+
"algorithms/community/divisive.py",
|
249 |
+
"algorithms/distance_measures.py",
|
250 |
+
"algorithms/link_analysis/hits_alg.py",
|
251 |
+
"algorithms/link_analysis/pagerank_alg.py",
|
252 |
+
"algorithms/node_classification.py",
|
253 |
+
"algorithms/similarity.py",
|
254 |
+
"algorithms/tree/mst.py",
|
255 |
+
"algorithms/walks.py",
|
256 |
+
"convert_matrix.py",
|
257 |
+
"drawing/layout.py",
|
258 |
+
"drawing/nx_pylab.py",
|
259 |
+
"generators/spectral_graph_forge.py",
|
260 |
+
"generators/expanders.py",
|
261 |
+
"linalg/algebraicconnectivity.py",
|
262 |
+
"linalg/attrmatrix.py",
|
263 |
+
"linalg/bethehessianmatrix.py",
|
264 |
+
"linalg/graphmatrix.py",
|
265 |
+
"linalg/laplacianmatrix.py",
|
266 |
+
"linalg/modularitymatrix.py",
|
267 |
+
"linalg/spectrum.py",
|
268 |
+
"utils/rcm.py",
|
269 |
+
]
|
270 |
+
needs_matplotlib = ["drawing/nx_pylab.py"]
|
271 |
+
needs_pandas = ["convert_matrix.py"]
|
272 |
+
needs_pygraphviz = ["drawing/nx_agraph.py"]
|
273 |
+
needs_pydot = ["drawing/nx_pydot.py"]
|
274 |
+
needs_sympy = ["algorithms/polynomials.py"]
|
275 |
+
|
276 |
+
if not has_numpy:
|
277 |
+
collect_ignore += needs_numpy
|
278 |
+
if not has_scipy:
|
279 |
+
collect_ignore += needs_scipy
|
280 |
+
if not has_matplotlib:
|
281 |
+
collect_ignore += needs_matplotlib
|
282 |
+
if not has_pandas:
|
283 |
+
collect_ignore += needs_pandas
|
284 |
+
if not has_pygraphviz:
|
285 |
+
collect_ignore += needs_pygraphviz
|
286 |
+
if not has_pydot:
|
287 |
+
collect_ignore += needs_pydot
|
288 |
+
if not has_sympy:
|
289 |
+
collect_ignore += needs_sympy
|
env-llmeval/lib/python3.10/site-packages/networkx/convert.py
ADDED
@@ -0,0 +1,494 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Functions to convert NetworkX graphs to and from other formats.
|
2 |
+
|
3 |
+
The preferred way of converting data to a NetworkX graph is through the
|
4 |
+
graph constructor. The constructor calls the to_networkx_graph() function
|
5 |
+
which attempts to guess the input type and convert it automatically.
|
6 |
+
|
7 |
+
Examples
|
8 |
+
--------
|
9 |
+
Create a graph with a single edge from a dictionary of dictionaries
|
10 |
+
|
11 |
+
>>> d = {0: {1: 1}} # dict-of-dicts single edge (0,1)
|
12 |
+
>>> G = nx.Graph(d)
|
13 |
+
|
14 |
+
See Also
|
15 |
+
--------
|
16 |
+
nx_agraph, nx_pydot
|
17 |
+
"""
|
18 |
+
import warnings
|
19 |
+
from collections.abc import Collection, Generator, Iterator
|
20 |
+
|
21 |
+
import networkx as nx
|
22 |
+
|
23 |
+
__all__ = [
|
24 |
+
"to_networkx_graph",
|
25 |
+
"from_dict_of_dicts",
|
26 |
+
"to_dict_of_dicts",
|
27 |
+
"from_dict_of_lists",
|
28 |
+
"to_dict_of_lists",
|
29 |
+
"from_edgelist",
|
30 |
+
"to_edgelist",
|
31 |
+
]
|
32 |
+
|
33 |
+
|
34 |
+
def to_networkx_graph(data, create_using=None, multigraph_input=False):
|
35 |
+
"""Make a NetworkX graph from a known data structure.
|
36 |
+
|
37 |
+
The preferred way to call this is automatically
|
38 |
+
from the class constructor
|
39 |
+
|
40 |
+
>>> d = {0: {1: {"weight": 1}}} # dict-of-dicts single edge (0,1)
|
41 |
+
>>> G = nx.Graph(d)
|
42 |
+
|
43 |
+
instead of the equivalent
|
44 |
+
|
45 |
+
>>> G = nx.from_dict_of_dicts(d)
|
46 |
+
|
47 |
+
Parameters
|
48 |
+
----------
|
49 |
+
data : object to be converted
|
50 |
+
|
51 |
+
Current known types are:
|
52 |
+
any NetworkX graph
|
53 |
+
dict-of-dicts
|
54 |
+
dict-of-lists
|
55 |
+
container (e.g. set, list, tuple) of edges
|
56 |
+
iterator (e.g. itertools.chain) that produces edges
|
57 |
+
generator of edges
|
58 |
+
Pandas DataFrame (row per edge)
|
59 |
+
2D numpy array
|
60 |
+
scipy sparse array
|
61 |
+
pygraphviz agraph
|
62 |
+
|
63 |
+
create_using : NetworkX graph constructor, optional (default=nx.Graph)
|
64 |
+
Graph type to create. If graph instance, then cleared before populated.
|
65 |
+
|
66 |
+
multigraph_input : bool (default False)
|
67 |
+
If True and data is a dict_of_dicts,
|
68 |
+
try to create a multigraph assuming dict_of_dict_of_lists.
|
69 |
+
If data and create_using are both multigraphs then create
|
70 |
+
a multigraph from a multigraph.
|
71 |
+
|
72 |
+
"""
|
73 |
+
# NX graph
|
74 |
+
if hasattr(data, "adj"):
|
75 |
+
try:
|
76 |
+
result = from_dict_of_dicts(
|
77 |
+
data.adj,
|
78 |
+
create_using=create_using,
|
79 |
+
multigraph_input=data.is_multigraph(),
|
80 |
+
)
|
81 |
+
# data.graph should be dict-like
|
82 |
+
result.graph.update(data.graph)
|
83 |
+
# data.nodes should be dict-like
|
84 |
+
# result.add_node_from(data.nodes.items()) possible but
|
85 |
+
# for custom node_attr_dict_factory which may be hashable
|
86 |
+
# will be unexpected behavior
|
87 |
+
for n, dd in data.nodes.items():
|
88 |
+
result._node[n].update(dd)
|
89 |
+
return result
|
90 |
+
except Exception as err:
|
91 |
+
raise nx.NetworkXError("Input is not a correct NetworkX graph.") from err
|
92 |
+
|
93 |
+
# pygraphviz agraph
|
94 |
+
if hasattr(data, "is_strict"):
|
95 |
+
try:
|
96 |
+
return nx.nx_agraph.from_agraph(data, create_using=create_using)
|
97 |
+
except Exception as err:
|
98 |
+
raise nx.NetworkXError("Input is not a correct pygraphviz graph.") from err
|
99 |
+
|
100 |
+
# dict of dicts/lists
|
101 |
+
if isinstance(data, dict):
|
102 |
+
try:
|
103 |
+
return from_dict_of_dicts(
|
104 |
+
data, create_using=create_using, multigraph_input=multigraph_input
|
105 |
+
)
|
106 |
+
except Exception as err1:
|
107 |
+
if multigraph_input is True:
|
108 |
+
raise nx.NetworkXError(
|
109 |
+
f"converting multigraph_input raised:\n{type(err1)}: {err1}"
|
110 |
+
)
|
111 |
+
try:
|
112 |
+
return from_dict_of_lists(data, create_using=create_using)
|
113 |
+
except Exception as err2:
|
114 |
+
raise TypeError("Input is not known type.") from err2
|
115 |
+
|
116 |
+
# Pandas DataFrame
|
117 |
+
try:
|
118 |
+
import pandas as pd
|
119 |
+
|
120 |
+
if isinstance(data, pd.DataFrame):
|
121 |
+
if data.shape[0] == data.shape[1]:
|
122 |
+
try:
|
123 |
+
return nx.from_pandas_adjacency(data, create_using=create_using)
|
124 |
+
except Exception as err:
|
125 |
+
msg = "Input is not a correct Pandas DataFrame adjacency matrix."
|
126 |
+
raise nx.NetworkXError(msg) from err
|
127 |
+
else:
|
128 |
+
try:
|
129 |
+
return nx.from_pandas_edgelist(
|
130 |
+
data, edge_attr=True, create_using=create_using
|
131 |
+
)
|
132 |
+
except Exception as err:
|
133 |
+
msg = "Input is not a correct Pandas DataFrame edge-list."
|
134 |
+
raise nx.NetworkXError(msg) from err
|
135 |
+
except ImportError:
|
136 |
+
warnings.warn("pandas not found, skipping conversion test.", ImportWarning)
|
137 |
+
|
138 |
+
# numpy array
|
139 |
+
try:
|
140 |
+
import numpy as np
|
141 |
+
|
142 |
+
if isinstance(data, np.ndarray):
|
143 |
+
try:
|
144 |
+
return nx.from_numpy_array(data, create_using=create_using)
|
145 |
+
except Exception as err:
|
146 |
+
raise nx.NetworkXError(
|
147 |
+
f"Failed to interpret array as an adjacency matrix."
|
148 |
+
) from err
|
149 |
+
except ImportError:
|
150 |
+
warnings.warn("numpy not found, skipping conversion test.", ImportWarning)
|
151 |
+
|
152 |
+
# scipy sparse array - any format
|
153 |
+
try:
|
154 |
+
import scipy
|
155 |
+
|
156 |
+
if hasattr(data, "format"):
|
157 |
+
try:
|
158 |
+
return nx.from_scipy_sparse_array(data, create_using=create_using)
|
159 |
+
except Exception as err:
|
160 |
+
raise nx.NetworkXError(
|
161 |
+
"Input is not a correct scipy sparse array type."
|
162 |
+
) from err
|
163 |
+
except ImportError:
|
164 |
+
warnings.warn("scipy not found, skipping conversion test.", ImportWarning)
|
165 |
+
|
166 |
+
# Note: most general check - should remain last in order of execution
|
167 |
+
# Includes containers (e.g. list, set, dict, etc.), generators, and
|
168 |
+
# iterators (e.g. itertools.chain) of edges
|
169 |
+
|
170 |
+
if isinstance(data, Collection | Generator | Iterator):
|
171 |
+
try:
|
172 |
+
return from_edgelist(data, create_using=create_using)
|
173 |
+
except Exception as err:
|
174 |
+
raise nx.NetworkXError("Input is not a valid edge list") from err
|
175 |
+
|
176 |
+
raise nx.NetworkXError("Input is not a known data type for conversion.")
|
177 |
+
|
178 |
+
|
179 |
+
@nx._dispatchable
|
180 |
+
def to_dict_of_lists(G, nodelist=None):
|
181 |
+
"""Returns adjacency representation of graph as a dictionary of lists.
|
182 |
+
|
183 |
+
Parameters
|
184 |
+
----------
|
185 |
+
G : graph
|
186 |
+
A NetworkX graph
|
187 |
+
|
188 |
+
nodelist : list
|
189 |
+
Use only nodes specified in nodelist
|
190 |
+
|
191 |
+
Notes
|
192 |
+
-----
|
193 |
+
Completely ignores edge data for MultiGraph and MultiDiGraph.
|
194 |
+
|
195 |
+
"""
|
196 |
+
if nodelist is None:
|
197 |
+
nodelist = G
|
198 |
+
|
199 |
+
d = {}
|
200 |
+
for n in nodelist:
|
201 |
+
d[n] = [nbr for nbr in G.neighbors(n) if nbr in nodelist]
|
202 |
+
return d
|
203 |
+
|
204 |
+
|
205 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
206 |
+
def from_dict_of_lists(d, create_using=None):
|
207 |
+
"""Returns a graph from a dictionary of lists.
|
208 |
+
|
209 |
+
Parameters
|
210 |
+
----------
|
211 |
+
d : dictionary of lists
|
212 |
+
A dictionary of lists adjacency representation.
|
213 |
+
|
214 |
+
create_using : NetworkX graph constructor, optional (default=nx.Graph)
|
215 |
+
Graph type to create. If graph instance, then cleared before populated.
|
216 |
+
|
217 |
+
Examples
|
218 |
+
--------
|
219 |
+
>>> dol = {0: [1]} # single edge (0,1)
|
220 |
+
>>> G = nx.from_dict_of_lists(dol)
|
221 |
+
|
222 |
+
or
|
223 |
+
|
224 |
+
>>> G = nx.Graph(dol) # use Graph constructor
|
225 |
+
|
226 |
+
"""
|
227 |
+
G = nx.empty_graph(0, create_using)
|
228 |
+
G.add_nodes_from(d)
|
229 |
+
if G.is_multigraph() and not G.is_directed():
|
230 |
+
# a dict_of_lists can't show multiedges. BUT for undirected graphs,
|
231 |
+
# each edge shows up twice in the dict_of_lists.
|
232 |
+
# So we need to treat this case separately.
|
233 |
+
seen = {}
|
234 |
+
for node, nbrlist in d.items():
|
235 |
+
for nbr in nbrlist:
|
236 |
+
if nbr not in seen:
|
237 |
+
G.add_edge(node, nbr)
|
238 |
+
seen[node] = 1 # don't allow reverse edge to show up
|
239 |
+
else:
|
240 |
+
G.add_edges_from(
|
241 |
+
((node, nbr) for node, nbrlist in d.items() for nbr in nbrlist)
|
242 |
+
)
|
243 |
+
return G
|
244 |
+
|
245 |
+
|
246 |
+
def to_dict_of_dicts(G, nodelist=None, edge_data=None):
|
247 |
+
"""Returns adjacency representation of graph as a dictionary of dictionaries.
|
248 |
+
|
249 |
+
Parameters
|
250 |
+
----------
|
251 |
+
G : graph
|
252 |
+
A NetworkX graph
|
253 |
+
|
254 |
+
nodelist : list
|
255 |
+
Use only nodes specified in nodelist
|
256 |
+
|
257 |
+
edge_data : scalar, optional
|
258 |
+
If provided, the value of the dictionary will be set to `edge_data` for
|
259 |
+
all edges. Usual values could be `1` or `True`. If `edge_data` is
|
260 |
+
`None` (the default), the edgedata in `G` is used, resulting in a
|
261 |
+
dict-of-dict-of-dicts. If `G` is a MultiGraph, the result will be a
|
262 |
+
dict-of-dict-of-dict-of-dicts. See Notes for an approach to customize
|
263 |
+
handling edge data. `edge_data` should *not* be a container.
|
264 |
+
|
265 |
+
Returns
|
266 |
+
-------
|
267 |
+
dod : dict
|
268 |
+
A nested dictionary representation of `G`. Note that the level of
|
269 |
+
nesting depends on the type of `G` and the value of `edge_data`
|
270 |
+
(see Examples).
|
271 |
+
|
272 |
+
See Also
|
273 |
+
--------
|
274 |
+
from_dict_of_dicts, to_dict_of_lists
|
275 |
+
|
276 |
+
Notes
|
277 |
+
-----
|
278 |
+
For a more custom approach to handling edge data, try::
|
279 |
+
|
280 |
+
dod = {
|
281 |
+
n: {nbr: custom(n, nbr, dd) for nbr, dd in nbrdict.items()}
|
282 |
+
for n, nbrdict in G.adj.items()
|
283 |
+
}
|
284 |
+
|
285 |
+
where `custom` returns the desired edge data for each edge between `n` and
|
286 |
+
`nbr`, given existing edge data `dd`.
|
287 |
+
|
288 |
+
Examples
|
289 |
+
--------
|
290 |
+
>>> G = nx.path_graph(3)
|
291 |
+
>>> nx.to_dict_of_dicts(G)
|
292 |
+
{0: {1: {}}, 1: {0: {}, 2: {}}, 2: {1: {}}}
|
293 |
+
|
294 |
+
Edge data is preserved by default (``edge_data=None``), resulting
|
295 |
+
in dict-of-dict-of-dicts where the innermost dictionary contains the
|
296 |
+
edge data:
|
297 |
+
|
298 |
+
>>> G = nx.Graph()
|
299 |
+
>>> G.add_edges_from(
|
300 |
+
... [
|
301 |
+
... (0, 1, {"weight": 1.0}),
|
302 |
+
... (1, 2, {"weight": 2.0}),
|
303 |
+
... (2, 0, {"weight": 1.0}),
|
304 |
+
... ]
|
305 |
+
... )
|
306 |
+
>>> d = nx.to_dict_of_dicts(G)
|
307 |
+
>>> d # doctest: +SKIP
|
308 |
+
{0: {1: {'weight': 1.0}, 2: {'weight': 1.0}},
|
309 |
+
1: {0: {'weight': 1.0}, 2: {'weight': 2.0}},
|
310 |
+
2: {1: {'weight': 2.0}, 0: {'weight': 1.0}}}
|
311 |
+
>>> d[1][2]["weight"]
|
312 |
+
2.0
|
313 |
+
|
314 |
+
If `edge_data` is not `None`, edge data in the original graph (if any) is
|
315 |
+
replaced:
|
316 |
+
|
317 |
+
>>> d = nx.to_dict_of_dicts(G, edge_data=1)
|
318 |
+
>>> d
|
319 |
+
{0: {1: 1, 2: 1}, 1: {0: 1, 2: 1}, 2: {1: 1, 0: 1}}
|
320 |
+
>>> d[1][2]
|
321 |
+
1
|
322 |
+
|
323 |
+
This also applies to MultiGraphs: edge data is preserved by default:
|
324 |
+
|
325 |
+
>>> G = nx.MultiGraph()
|
326 |
+
>>> G.add_edge(0, 1, key="a", weight=1.0)
|
327 |
+
'a'
|
328 |
+
>>> G.add_edge(0, 1, key="b", weight=5.0)
|
329 |
+
'b'
|
330 |
+
>>> d = nx.to_dict_of_dicts(G)
|
331 |
+
>>> d # doctest: +SKIP
|
332 |
+
{0: {1: {'a': {'weight': 1.0}, 'b': {'weight': 5.0}}},
|
333 |
+
1: {0: {'a': {'weight': 1.0}, 'b': {'weight': 5.0}}}}
|
334 |
+
>>> d[0][1]["b"]["weight"]
|
335 |
+
5.0
|
336 |
+
|
337 |
+
But multi edge data is lost if `edge_data` is not `None`:
|
338 |
+
|
339 |
+
>>> d = nx.to_dict_of_dicts(G, edge_data=10)
|
340 |
+
>>> d
|
341 |
+
{0: {1: 10}, 1: {0: 10}}
|
342 |
+
"""
|
343 |
+
dod = {}
|
344 |
+
if nodelist is None:
|
345 |
+
if edge_data is None:
|
346 |
+
for u, nbrdict in G.adjacency():
|
347 |
+
dod[u] = nbrdict.copy()
|
348 |
+
else: # edge_data is not None
|
349 |
+
for u, nbrdict in G.adjacency():
|
350 |
+
dod[u] = dod.fromkeys(nbrdict, edge_data)
|
351 |
+
else: # nodelist is not None
|
352 |
+
if edge_data is None:
|
353 |
+
for u in nodelist:
|
354 |
+
dod[u] = {}
|
355 |
+
for v, data in ((v, data) for v, data in G[u].items() if v in nodelist):
|
356 |
+
dod[u][v] = data
|
357 |
+
else: # nodelist and edge_data are not None
|
358 |
+
for u in nodelist:
|
359 |
+
dod[u] = {}
|
360 |
+
for v in (v for v in G[u] if v in nodelist):
|
361 |
+
dod[u][v] = edge_data
|
362 |
+
return dod
|
363 |
+
|
364 |
+
|
365 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
366 |
+
def from_dict_of_dicts(d, create_using=None, multigraph_input=False):
|
367 |
+
"""Returns a graph from a dictionary of dictionaries.
|
368 |
+
|
369 |
+
Parameters
|
370 |
+
----------
|
371 |
+
d : dictionary of dictionaries
|
372 |
+
A dictionary of dictionaries adjacency representation.
|
373 |
+
|
374 |
+
create_using : NetworkX graph constructor, optional (default=nx.Graph)
|
375 |
+
Graph type to create. If graph instance, then cleared before populated.
|
376 |
+
|
377 |
+
multigraph_input : bool (default False)
|
378 |
+
When True, the dict `d` is assumed
|
379 |
+
to be a dict-of-dict-of-dict-of-dict structure keyed by
|
380 |
+
node to neighbor to edge keys to edge data for multi-edges.
|
381 |
+
Otherwise this routine assumes dict-of-dict-of-dict keyed by
|
382 |
+
node to neighbor to edge data.
|
383 |
+
|
384 |
+
Examples
|
385 |
+
--------
|
386 |
+
>>> dod = {0: {1: {"weight": 1}}} # single edge (0,1)
|
387 |
+
>>> G = nx.from_dict_of_dicts(dod)
|
388 |
+
|
389 |
+
or
|
390 |
+
|
391 |
+
>>> G = nx.Graph(dod) # use Graph constructor
|
392 |
+
|
393 |
+
"""
|
394 |
+
G = nx.empty_graph(0, create_using)
|
395 |
+
G.add_nodes_from(d)
|
396 |
+
# does dict d represent a MultiGraph or MultiDiGraph?
|
397 |
+
if multigraph_input:
|
398 |
+
if G.is_directed():
|
399 |
+
if G.is_multigraph():
|
400 |
+
G.add_edges_from(
|
401 |
+
(u, v, key, data)
|
402 |
+
for u, nbrs in d.items()
|
403 |
+
for v, datadict in nbrs.items()
|
404 |
+
for key, data in datadict.items()
|
405 |
+
)
|
406 |
+
else:
|
407 |
+
G.add_edges_from(
|
408 |
+
(u, v, data)
|
409 |
+
for u, nbrs in d.items()
|
410 |
+
for v, datadict in nbrs.items()
|
411 |
+
for key, data in datadict.items()
|
412 |
+
)
|
413 |
+
else: # Undirected
|
414 |
+
if G.is_multigraph():
|
415 |
+
seen = set() # don't add both directions of undirected graph
|
416 |
+
for u, nbrs in d.items():
|
417 |
+
for v, datadict in nbrs.items():
|
418 |
+
if (u, v) not in seen:
|
419 |
+
G.add_edges_from(
|
420 |
+
(u, v, key, data) for key, data in datadict.items()
|
421 |
+
)
|
422 |
+
seen.add((v, u))
|
423 |
+
else:
|
424 |
+
seen = set() # don't add both directions of undirected graph
|
425 |
+
for u, nbrs in d.items():
|
426 |
+
for v, datadict in nbrs.items():
|
427 |
+
if (u, v) not in seen:
|
428 |
+
G.add_edges_from(
|
429 |
+
(u, v, data) for key, data in datadict.items()
|
430 |
+
)
|
431 |
+
seen.add((v, u))
|
432 |
+
|
433 |
+
else: # not a multigraph to multigraph transfer
|
434 |
+
if G.is_multigraph() and not G.is_directed():
|
435 |
+
# d can have both representations u-v, v-u in dict. Only add one.
|
436 |
+
# We don't need this check for digraphs since we add both directions,
|
437 |
+
# or for Graph() since it is done implicitly (parallel edges not allowed)
|
438 |
+
seen = set()
|
439 |
+
for u, nbrs in d.items():
|
440 |
+
for v, data in nbrs.items():
|
441 |
+
if (u, v) not in seen:
|
442 |
+
G.add_edge(u, v, key=0)
|
443 |
+
G[u][v][0].update(data)
|
444 |
+
seen.add((v, u))
|
445 |
+
else:
|
446 |
+
G.add_edges_from(
|
447 |
+
((u, v, data) for u, nbrs in d.items() for v, data in nbrs.items())
|
448 |
+
)
|
449 |
+
return G
|
450 |
+
|
451 |
+
|
452 |
+
@nx._dispatchable(preserve_edge_attrs=True)
|
453 |
+
def to_edgelist(G, nodelist=None):
|
454 |
+
"""Returns a list of edges in the graph.
|
455 |
+
|
456 |
+
Parameters
|
457 |
+
----------
|
458 |
+
G : graph
|
459 |
+
A NetworkX graph
|
460 |
+
|
461 |
+
nodelist : list
|
462 |
+
Use only nodes specified in nodelist
|
463 |
+
|
464 |
+
"""
|
465 |
+
if nodelist is None:
|
466 |
+
return G.edges(data=True)
|
467 |
+
return G.edges(nodelist, data=True)
|
468 |
+
|
469 |
+
|
470 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
471 |
+
def from_edgelist(edgelist, create_using=None):
|
472 |
+
"""Returns a graph from a list of edges.
|
473 |
+
|
474 |
+
Parameters
|
475 |
+
----------
|
476 |
+
edgelist : list or iterator
|
477 |
+
Edge tuples
|
478 |
+
|
479 |
+
create_using : NetworkX graph constructor, optional (default=nx.Graph)
|
480 |
+
Graph type to create. If graph instance, then cleared before populated.
|
481 |
+
|
482 |
+
Examples
|
483 |
+
--------
|
484 |
+
>>> edgelist = [(0, 1)] # single edge (0,1)
|
485 |
+
>>> G = nx.from_edgelist(edgelist)
|
486 |
+
|
487 |
+
or
|
488 |
+
|
489 |
+
>>> G = nx.Graph(edgelist) # use Graph constructor
|
490 |
+
|
491 |
+
"""
|
492 |
+
G = nx.empty_graph(0, create_using)
|
493 |
+
G.add_edges_from(edgelist)
|
494 |
+
return G
|
env-llmeval/lib/python3.10/site-packages/networkx/convert_matrix.py
ADDED
@@ -0,0 +1,1202 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Functions to convert NetworkX graphs to and from common data containers
|
2 |
+
like numpy arrays, scipy sparse arrays, and pandas DataFrames.
|
3 |
+
|
4 |
+
The preferred way of converting data to a NetworkX graph is through the
|
5 |
+
graph constructor. The constructor calls the `~networkx.convert.to_networkx_graph`
|
6 |
+
function which attempts to guess the input type and convert it automatically.
|
7 |
+
|
8 |
+
Examples
|
9 |
+
--------
|
10 |
+
Create a 10 node random graph from a numpy array
|
11 |
+
|
12 |
+
>>> import numpy as np
|
13 |
+
>>> rng = np.random.default_rng()
|
14 |
+
>>> a = rng.integers(low=0, high=2, size=(10, 10))
|
15 |
+
>>> DG = nx.from_numpy_array(a, create_using=nx.DiGraph)
|
16 |
+
|
17 |
+
or equivalently:
|
18 |
+
|
19 |
+
>>> DG = nx.DiGraph(a)
|
20 |
+
|
21 |
+
which calls `from_numpy_array` internally based on the type of ``a``.
|
22 |
+
|
23 |
+
See Also
|
24 |
+
--------
|
25 |
+
nx_agraph, nx_pydot
|
26 |
+
"""
|
27 |
+
|
28 |
+
import itertools
|
29 |
+
from collections import defaultdict
|
30 |
+
|
31 |
+
import networkx as nx
|
32 |
+
from networkx.utils import not_implemented_for
|
33 |
+
|
34 |
+
__all__ = [
|
35 |
+
"from_pandas_adjacency",
|
36 |
+
"to_pandas_adjacency",
|
37 |
+
"from_pandas_edgelist",
|
38 |
+
"to_pandas_edgelist",
|
39 |
+
"from_scipy_sparse_array",
|
40 |
+
"to_scipy_sparse_array",
|
41 |
+
"from_numpy_array",
|
42 |
+
"to_numpy_array",
|
43 |
+
]
|
44 |
+
|
45 |
+
|
46 |
+
@nx._dispatchable(edge_attrs="weight")
|
47 |
+
def to_pandas_adjacency(
|
48 |
+
G,
|
49 |
+
nodelist=None,
|
50 |
+
dtype=None,
|
51 |
+
order=None,
|
52 |
+
multigraph_weight=sum,
|
53 |
+
weight="weight",
|
54 |
+
nonedge=0.0,
|
55 |
+
):
|
56 |
+
"""Returns the graph adjacency matrix as a Pandas DataFrame.
|
57 |
+
|
58 |
+
Parameters
|
59 |
+
----------
|
60 |
+
G : graph
|
61 |
+
The NetworkX graph used to construct the Pandas DataFrame.
|
62 |
+
|
63 |
+
nodelist : list, optional
|
64 |
+
The rows and columns are ordered according to the nodes in `nodelist`.
|
65 |
+
If `nodelist` is None, then the ordering is produced by G.nodes().
|
66 |
+
|
67 |
+
multigraph_weight : {sum, min, max}, optional
|
68 |
+
An operator that determines how weights in multigraphs are handled.
|
69 |
+
The default is to sum the weights of the multiple edges.
|
70 |
+
|
71 |
+
weight : string or None, optional
|
72 |
+
The edge attribute that holds the numerical value used for
|
73 |
+
the edge weight. If an edge does not have that attribute, then the
|
74 |
+
value 1 is used instead.
|
75 |
+
|
76 |
+
nonedge : float, optional
|
77 |
+
The matrix values corresponding to nonedges are typically set to zero.
|
78 |
+
However, this could be undesirable if there are matrix values
|
79 |
+
corresponding to actual edges that also have the value zero. If so,
|
80 |
+
one might prefer nonedges to have some other value, such as nan.
|
81 |
+
|
82 |
+
Returns
|
83 |
+
-------
|
84 |
+
df : Pandas DataFrame
|
85 |
+
Graph adjacency matrix
|
86 |
+
|
87 |
+
Notes
|
88 |
+
-----
|
89 |
+
For directed graphs, entry i,j corresponds to an edge from i to j.
|
90 |
+
|
91 |
+
The DataFrame entries are assigned to the weight edge attribute. When
|
92 |
+
an edge does not have a weight attribute, the value of the entry is set to
|
93 |
+
the number 1. For multiple (parallel) edges, the values of the entries
|
94 |
+
are determined by the 'multigraph_weight' parameter. The default is to
|
95 |
+
sum the weight attributes for each of the parallel edges.
|
96 |
+
|
97 |
+
When `nodelist` does not contain every node in `G`, the matrix is built
|
98 |
+
from the subgraph of `G` that is induced by the nodes in `nodelist`.
|
99 |
+
|
100 |
+
The convention used for self-loop edges in graphs is to assign the
|
101 |
+
diagonal matrix entry value to the weight attribute of the edge
|
102 |
+
(or the number 1 if the edge has no weight attribute). If the
|
103 |
+
alternate convention of doubling the edge weight is desired the
|
104 |
+
resulting Pandas DataFrame can be modified as follows::
|
105 |
+
|
106 |
+
>>> import pandas as pd
|
107 |
+
>>> G = nx.Graph([(1, 1), (2, 2)])
|
108 |
+
>>> df = nx.to_pandas_adjacency(G)
|
109 |
+
>>> df
|
110 |
+
1 2
|
111 |
+
1 1.0 0.0
|
112 |
+
2 0.0 1.0
|
113 |
+
>>> diag_idx = list(range(len(df)))
|
114 |
+
>>> df.iloc[diag_idx, diag_idx] *= 2
|
115 |
+
>>> df
|
116 |
+
1 2
|
117 |
+
1 2.0 0.0
|
118 |
+
2 0.0 2.0
|
119 |
+
|
120 |
+
Examples
|
121 |
+
--------
|
122 |
+
>>> G = nx.MultiDiGraph()
|
123 |
+
>>> G.add_edge(0, 1, weight=2)
|
124 |
+
0
|
125 |
+
>>> G.add_edge(1, 0)
|
126 |
+
0
|
127 |
+
>>> G.add_edge(2, 2, weight=3)
|
128 |
+
0
|
129 |
+
>>> G.add_edge(2, 2)
|
130 |
+
1
|
131 |
+
>>> nx.to_pandas_adjacency(G, nodelist=[0, 1, 2], dtype=int)
|
132 |
+
0 1 2
|
133 |
+
0 0 2 0
|
134 |
+
1 1 0 0
|
135 |
+
2 0 0 4
|
136 |
+
|
137 |
+
"""
|
138 |
+
import pandas as pd
|
139 |
+
|
140 |
+
M = to_numpy_array(
|
141 |
+
G,
|
142 |
+
nodelist=nodelist,
|
143 |
+
dtype=dtype,
|
144 |
+
order=order,
|
145 |
+
multigraph_weight=multigraph_weight,
|
146 |
+
weight=weight,
|
147 |
+
nonedge=nonedge,
|
148 |
+
)
|
149 |
+
if nodelist is None:
|
150 |
+
nodelist = list(G)
|
151 |
+
return pd.DataFrame(data=M, index=nodelist, columns=nodelist)
|
152 |
+
|
153 |
+
|
154 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
155 |
+
def from_pandas_adjacency(df, create_using=None):
|
156 |
+
r"""Returns a graph from Pandas DataFrame.
|
157 |
+
|
158 |
+
The Pandas DataFrame is interpreted as an adjacency matrix for the graph.
|
159 |
+
|
160 |
+
Parameters
|
161 |
+
----------
|
162 |
+
df : Pandas DataFrame
|
163 |
+
An adjacency matrix representation of a graph
|
164 |
+
|
165 |
+
create_using : NetworkX graph constructor, optional (default=nx.Graph)
|
166 |
+
Graph type to create. If graph instance, then cleared before populated.
|
167 |
+
|
168 |
+
Notes
|
169 |
+
-----
|
170 |
+
For directed graphs, explicitly mention create_using=nx.DiGraph,
|
171 |
+
and entry i,j of df corresponds to an edge from i to j.
|
172 |
+
|
173 |
+
If `df` has a single data type for each entry it will be converted to an
|
174 |
+
appropriate Python data type.
|
175 |
+
|
176 |
+
If you have node attributes stored in a separate dataframe `df_nodes`,
|
177 |
+
you can load those attributes to the graph `G` using the following code:
|
178 |
+
|
179 |
+
```
|
180 |
+
df_nodes = pd.DataFrame({"node_id": [1, 2, 3], "attribute1": ["A", "B", "C"]})
|
181 |
+
G.add_nodes_from((n, dict(d)) for n, d in df_nodes.iterrows())
|
182 |
+
```
|
183 |
+
|
184 |
+
If `df` has a user-specified compound data type the names
|
185 |
+
of the data fields will be used as attribute keys in the resulting
|
186 |
+
NetworkX graph.
|
187 |
+
|
188 |
+
See Also
|
189 |
+
--------
|
190 |
+
to_pandas_adjacency
|
191 |
+
|
192 |
+
Examples
|
193 |
+
--------
|
194 |
+
Simple integer weights on edges:
|
195 |
+
|
196 |
+
>>> import pandas as pd
|
197 |
+
>>> pd.options.display.max_columns = 20
|
198 |
+
>>> df = pd.DataFrame([[1, 1], [2, 1]])
|
199 |
+
>>> df
|
200 |
+
0 1
|
201 |
+
0 1 1
|
202 |
+
1 2 1
|
203 |
+
>>> G = nx.from_pandas_adjacency(df)
|
204 |
+
>>> G.name = "Graph from pandas adjacency matrix"
|
205 |
+
>>> print(G)
|
206 |
+
Graph named 'Graph from pandas adjacency matrix' with 2 nodes and 3 edges
|
207 |
+
"""
|
208 |
+
|
209 |
+
try:
|
210 |
+
df = df[df.index]
|
211 |
+
except Exception as err:
|
212 |
+
missing = list(set(df.index).difference(set(df.columns)))
|
213 |
+
msg = f"{missing} not in columns"
|
214 |
+
raise nx.NetworkXError("Columns must match Indices.", msg) from err
|
215 |
+
|
216 |
+
A = df.values
|
217 |
+
G = from_numpy_array(A, create_using=create_using)
|
218 |
+
|
219 |
+
nx.relabel.relabel_nodes(G, dict(enumerate(df.columns)), copy=False)
|
220 |
+
return G
|
221 |
+
|
222 |
+
|
223 |
+
@nx._dispatchable(preserve_edge_attrs=True)
|
224 |
+
def to_pandas_edgelist(
|
225 |
+
G,
|
226 |
+
source="source",
|
227 |
+
target="target",
|
228 |
+
nodelist=None,
|
229 |
+
dtype=None,
|
230 |
+
edge_key=None,
|
231 |
+
):
|
232 |
+
"""Returns the graph edge list as a Pandas DataFrame.
|
233 |
+
|
234 |
+
Parameters
|
235 |
+
----------
|
236 |
+
G : graph
|
237 |
+
The NetworkX graph used to construct the Pandas DataFrame.
|
238 |
+
|
239 |
+
source : str or int, optional
|
240 |
+
A valid column name (string or integer) for the source nodes (for the
|
241 |
+
directed case).
|
242 |
+
|
243 |
+
target : str or int, optional
|
244 |
+
A valid column name (string or integer) for the target nodes (for the
|
245 |
+
directed case).
|
246 |
+
|
247 |
+
nodelist : list, optional
|
248 |
+
Use only nodes specified in nodelist
|
249 |
+
|
250 |
+
dtype : dtype, default None
|
251 |
+
Use to create the DataFrame. Data type to force.
|
252 |
+
Only a single dtype is allowed. If None, infer.
|
253 |
+
|
254 |
+
edge_key : str or int or None, optional (default=None)
|
255 |
+
A valid column name (string or integer) for the edge keys (for the
|
256 |
+
multigraph case). If None, edge keys are not stored in the DataFrame.
|
257 |
+
|
258 |
+
Returns
|
259 |
+
-------
|
260 |
+
df : Pandas DataFrame
|
261 |
+
Graph edge list
|
262 |
+
|
263 |
+
Examples
|
264 |
+
--------
|
265 |
+
>>> G = nx.Graph(
|
266 |
+
... [
|
267 |
+
... ("A", "B", {"cost": 1, "weight": 7}),
|
268 |
+
... ("C", "E", {"cost": 9, "weight": 10}),
|
269 |
+
... ]
|
270 |
+
... )
|
271 |
+
>>> df = nx.to_pandas_edgelist(G, nodelist=["A", "C"])
|
272 |
+
>>> df[["source", "target", "cost", "weight"]]
|
273 |
+
source target cost weight
|
274 |
+
0 A B 1 7
|
275 |
+
1 C E 9 10
|
276 |
+
|
277 |
+
>>> G = nx.MultiGraph([("A", "B", {"cost": 1}), ("A", "B", {"cost": 9})])
|
278 |
+
>>> df = nx.to_pandas_edgelist(G, nodelist=["A", "C"], edge_key="ekey")
|
279 |
+
>>> df[["source", "target", "cost", "ekey"]]
|
280 |
+
source target cost ekey
|
281 |
+
0 A B 1 0
|
282 |
+
1 A B 9 1
|
283 |
+
|
284 |
+
"""
|
285 |
+
import pandas as pd
|
286 |
+
|
287 |
+
if nodelist is None:
|
288 |
+
edgelist = G.edges(data=True)
|
289 |
+
else:
|
290 |
+
edgelist = G.edges(nodelist, data=True)
|
291 |
+
source_nodes = [s for s, _, _ in edgelist]
|
292 |
+
target_nodes = [t for _, t, _ in edgelist]
|
293 |
+
|
294 |
+
all_attrs = set().union(*(d.keys() for _, _, d in edgelist))
|
295 |
+
if source in all_attrs:
|
296 |
+
raise nx.NetworkXError(f"Source name {source!r} is an edge attr name")
|
297 |
+
if target in all_attrs:
|
298 |
+
raise nx.NetworkXError(f"Target name {target!r} is an edge attr name")
|
299 |
+
|
300 |
+
nan = float("nan")
|
301 |
+
edge_attr = {k: [d.get(k, nan) for _, _, d in edgelist] for k in all_attrs}
|
302 |
+
|
303 |
+
if G.is_multigraph() and edge_key is not None:
|
304 |
+
if edge_key in all_attrs:
|
305 |
+
raise nx.NetworkXError(f"Edge key name {edge_key!r} is an edge attr name")
|
306 |
+
edge_keys = [k for _, _, k in G.edges(keys=True)]
|
307 |
+
edgelistdict = {source: source_nodes, target: target_nodes, edge_key: edge_keys}
|
308 |
+
else:
|
309 |
+
edgelistdict = {source: source_nodes, target: target_nodes}
|
310 |
+
|
311 |
+
edgelistdict.update(edge_attr)
|
312 |
+
return pd.DataFrame(edgelistdict, dtype=dtype)
|
313 |
+
|
314 |
+
|
315 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
316 |
+
def from_pandas_edgelist(
|
317 |
+
df,
|
318 |
+
source="source",
|
319 |
+
target="target",
|
320 |
+
edge_attr=None,
|
321 |
+
create_using=None,
|
322 |
+
edge_key=None,
|
323 |
+
):
|
324 |
+
"""Returns a graph from Pandas DataFrame containing an edge list.
|
325 |
+
|
326 |
+
The Pandas DataFrame should contain at least two columns of node names and
|
327 |
+
zero or more columns of edge attributes. Each row will be processed as one
|
328 |
+
edge instance.
|
329 |
+
|
330 |
+
Note: This function iterates over DataFrame.values, which is not
|
331 |
+
guaranteed to retain the data type across columns in the row. This is only
|
332 |
+
a problem if your row is entirely numeric and a mix of ints and floats. In
|
333 |
+
that case, all values will be returned as floats. See the
|
334 |
+
DataFrame.iterrows documentation for an example.
|
335 |
+
|
336 |
+
Parameters
|
337 |
+
----------
|
338 |
+
df : Pandas DataFrame
|
339 |
+
An edge list representation of a graph
|
340 |
+
|
341 |
+
source : str or int
|
342 |
+
A valid column name (string or integer) for the source nodes (for the
|
343 |
+
directed case).
|
344 |
+
|
345 |
+
target : str or int
|
346 |
+
A valid column name (string or integer) for the target nodes (for the
|
347 |
+
directed case).
|
348 |
+
|
349 |
+
edge_attr : str or int, iterable, True, or None
|
350 |
+
A valid column name (str or int) or iterable of column names that are
|
351 |
+
used to retrieve items and add them to the graph as edge attributes.
|
352 |
+
If `True`, all of the remaining columns will be added.
|
353 |
+
If `None`, no edge attributes are added to the graph.
|
354 |
+
|
355 |
+
create_using : NetworkX graph constructor, optional (default=nx.Graph)
|
356 |
+
Graph type to create. If graph instance, then cleared before populated.
|
357 |
+
|
358 |
+
edge_key : str or None, optional (default=None)
|
359 |
+
A valid column name for the edge keys (for a MultiGraph). The values in
|
360 |
+
this column are used for the edge keys when adding edges if create_using
|
361 |
+
is a multigraph.
|
362 |
+
|
363 |
+
If you have node attributes stored in a separate dataframe `df_nodes`,
|
364 |
+
you can load those attributes to the graph `G` using the following code:
|
365 |
+
|
366 |
+
```
|
367 |
+
df_nodes = pd.DataFrame({"node_id": [1, 2, 3], "attribute1": ["A", "B", "C"]})
|
368 |
+
G.add_nodes_from((n, dict(d)) for n, d in df_nodes.iterrows())
|
369 |
+
```
|
370 |
+
|
371 |
+
See Also
|
372 |
+
--------
|
373 |
+
to_pandas_edgelist
|
374 |
+
|
375 |
+
Examples
|
376 |
+
--------
|
377 |
+
Simple integer weights on edges:
|
378 |
+
|
379 |
+
>>> import pandas as pd
|
380 |
+
>>> pd.options.display.max_columns = 20
|
381 |
+
>>> import numpy as np
|
382 |
+
>>> rng = np.random.RandomState(seed=5)
|
383 |
+
>>> ints = rng.randint(1, 11, size=(3, 2))
|
384 |
+
>>> a = ["A", "B", "C"]
|
385 |
+
>>> b = ["D", "A", "E"]
|
386 |
+
>>> df = pd.DataFrame(ints, columns=["weight", "cost"])
|
387 |
+
>>> df[0] = a
|
388 |
+
>>> df["b"] = b
|
389 |
+
>>> df[["weight", "cost", 0, "b"]]
|
390 |
+
weight cost 0 b
|
391 |
+
0 4 7 A D
|
392 |
+
1 7 1 B A
|
393 |
+
2 10 9 C E
|
394 |
+
>>> G = nx.from_pandas_edgelist(df, 0, "b", ["weight", "cost"])
|
395 |
+
>>> G["E"]["C"]["weight"]
|
396 |
+
10
|
397 |
+
>>> G["E"]["C"]["cost"]
|
398 |
+
9
|
399 |
+
>>> edges = pd.DataFrame(
|
400 |
+
... {
|
401 |
+
... "source": [0, 1, 2],
|
402 |
+
... "target": [2, 2, 3],
|
403 |
+
... "weight": [3, 4, 5],
|
404 |
+
... "color": ["red", "blue", "blue"],
|
405 |
+
... }
|
406 |
+
... )
|
407 |
+
>>> G = nx.from_pandas_edgelist(edges, edge_attr=True)
|
408 |
+
>>> G[0][2]["color"]
|
409 |
+
'red'
|
410 |
+
|
411 |
+
Build multigraph with custom keys:
|
412 |
+
|
413 |
+
>>> edges = pd.DataFrame(
|
414 |
+
... {
|
415 |
+
... "source": [0, 1, 2, 0],
|
416 |
+
... "target": [2, 2, 3, 2],
|
417 |
+
... "my_edge_key": ["A", "B", "C", "D"],
|
418 |
+
... "weight": [3, 4, 5, 6],
|
419 |
+
... "color": ["red", "blue", "blue", "blue"],
|
420 |
+
... }
|
421 |
+
... )
|
422 |
+
>>> G = nx.from_pandas_edgelist(
|
423 |
+
... edges,
|
424 |
+
... edge_key="my_edge_key",
|
425 |
+
... edge_attr=["weight", "color"],
|
426 |
+
... create_using=nx.MultiGraph(),
|
427 |
+
... )
|
428 |
+
>>> G[0][2]
|
429 |
+
AtlasView({'A': {'weight': 3, 'color': 'red'}, 'D': {'weight': 6, 'color': 'blue'}})
|
430 |
+
|
431 |
+
|
432 |
+
"""
|
433 |
+
g = nx.empty_graph(0, create_using)
|
434 |
+
|
435 |
+
if edge_attr is None:
|
436 |
+
g.add_edges_from(zip(df[source], df[target]))
|
437 |
+
return g
|
438 |
+
|
439 |
+
reserved_columns = [source, target]
|
440 |
+
|
441 |
+
# Additional columns requested
|
442 |
+
attr_col_headings = []
|
443 |
+
attribute_data = []
|
444 |
+
if edge_attr is True:
|
445 |
+
attr_col_headings = [c for c in df.columns if c not in reserved_columns]
|
446 |
+
elif isinstance(edge_attr, list | tuple):
|
447 |
+
attr_col_headings = edge_attr
|
448 |
+
else:
|
449 |
+
attr_col_headings = [edge_attr]
|
450 |
+
if len(attr_col_headings) == 0:
|
451 |
+
raise nx.NetworkXError(
|
452 |
+
f"Invalid edge_attr argument: No columns found with name: {attr_col_headings}"
|
453 |
+
)
|
454 |
+
|
455 |
+
try:
|
456 |
+
attribute_data = zip(*[df[col] for col in attr_col_headings])
|
457 |
+
except (KeyError, TypeError) as err:
|
458 |
+
msg = f"Invalid edge_attr argument: {edge_attr}"
|
459 |
+
raise nx.NetworkXError(msg) from err
|
460 |
+
|
461 |
+
if g.is_multigraph():
|
462 |
+
# => append the edge keys from the df to the bundled data
|
463 |
+
if edge_key is not None:
|
464 |
+
try:
|
465 |
+
multigraph_edge_keys = df[edge_key]
|
466 |
+
attribute_data = zip(attribute_data, multigraph_edge_keys)
|
467 |
+
except (KeyError, TypeError) as err:
|
468 |
+
msg = f"Invalid edge_key argument: {edge_key}"
|
469 |
+
raise nx.NetworkXError(msg) from err
|
470 |
+
|
471 |
+
for s, t, attrs in zip(df[source], df[target], attribute_data):
|
472 |
+
if edge_key is not None:
|
473 |
+
attrs, multigraph_edge_key = attrs
|
474 |
+
key = g.add_edge(s, t, key=multigraph_edge_key)
|
475 |
+
else:
|
476 |
+
key = g.add_edge(s, t)
|
477 |
+
|
478 |
+
g[s][t][key].update(zip(attr_col_headings, attrs))
|
479 |
+
else:
|
480 |
+
for s, t, attrs in zip(df[source], df[target], attribute_data):
|
481 |
+
g.add_edge(s, t)
|
482 |
+
g[s][t].update(zip(attr_col_headings, attrs))
|
483 |
+
|
484 |
+
return g
|
485 |
+
|
486 |
+
|
487 |
+
@nx._dispatchable(edge_attrs="weight")
|
488 |
+
def to_scipy_sparse_array(G, nodelist=None, dtype=None, weight="weight", format="csr"):
|
489 |
+
"""Returns the graph adjacency matrix as a SciPy sparse array.
|
490 |
+
|
491 |
+
Parameters
|
492 |
+
----------
|
493 |
+
G : graph
|
494 |
+
The NetworkX graph used to construct the sparse matrix.
|
495 |
+
|
496 |
+
nodelist : list, optional
|
497 |
+
The rows and columns are ordered according to the nodes in `nodelist`.
|
498 |
+
If `nodelist` is None, then the ordering is produced by G.nodes().
|
499 |
+
|
500 |
+
dtype : NumPy data-type, optional
|
501 |
+
A valid NumPy dtype used to initialize the array. If None, then the
|
502 |
+
NumPy default is used.
|
503 |
+
|
504 |
+
weight : string or None optional (default='weight')
|
505 |
+
The edge attribute that holds the numerical value used for
|
506 |
+
the edge weight. If None then all edge weights are 1.
|
507 |
+
|
508 |
+
format : str in {'bsr', 'csr', 'csc', 'coo', 'lil', 'dia', 'dok'}
|
509 |
+
The type of the matrix to be returned (default 'csr'). For
|
510 |
+
some algorithms different implementations of sparse matrices
|
511 |
+
can perform better. See [1]_ for details.
|
512 |
+
|
513 |
+
Returns
|
514 |
+
-------
|
515 |
+
A : SciPy sparse array
|
516 |
+
Graph adjacency matrix.
|
517 |
+
|
518 |
+
Notes
|
519 |
+
-----
|
520 |
+
For directed graphs, matrix entry i,j corresponds to an edge from i to j.
|
521 |
+
|
522 |
+
The matrix entries are populated using the edge attribute held in
|
523 |
+
parameter weight. When an edge does not have that attribute, the
|
524 |
+
value of the entry is 1.
|
525 |
+
|
526 |
+
For multiple edges the matrix values are the sums of the edge weights.
|
527 |
+
|
528 |
+
When `nodelist` does not contain every node in `G`, the adjacency matrix
|
529 |
+
is built from the subgraph of `G` that is induced by the nodes in
|
530 |
+
`nodelist`.
|
531 |
+
|
532 |
+
The convention used for self-loop edges in graphs is to assign the
|
533 |
+
diagonal matrix entry value to the weight attribute of the edge
|
534 |
+
(or the number 1 if the edge has no weight attribute). If the
|
535 |
+
alternate convention of doubling the edge weight is desired the
|
536 |
+
resulting SciPy sparse array can be modified as follows:
|
537 |
+
|
538 |
+
>>> G = nx.Graph([(1, 1)])
|
539 |
+
>>> A = nx.to_scipy_sparse_array(G)
|
540 |
+
>>> print(A.todense())
|
541 |
+
[[1]]
|
542 |
+
>>> A.setdiag(A.diagonal() * 2)
|
543 |
+
>>> print(A.toarray())
|
544 |
+
[[2]]
|
545 |
+
|
546 |
+
Examples
|
547 |
+
--------
|
548 |
+
>>> G = nx.MultiDiGraph()
|
549 |
+
>>> G.add_edge(0, 1, weight=2)
|
550 |
+
0
|
551 |
+
>>> G.add_edge(1, 0)
|
552 |
+
0
|
553 |
+
>>> G.add_edge(2, 2, weight=3)
|
554 |
+
0
|
555 |
+
>>> G.add_edge(2, 2)
|
556 |
+
1
|
557 |
+
>>> S = nx.to_scipy_sparse_array(G, nodelist=[0, 1, 2])
|
558 |
+
>>> print(S.toarray())
|
559 |
+
[[0 2 0]
|
560 |
+
[1 0 0]
|
561 |
+
[0 0 4]]
|
562 |
+
|
563 |
+
References
|
564 |
+
----------
|
565 |
+
.. [1] Scipy Dev. References, "Sparse Matrices",
|
566 |
+
https://docs.scipy.org/doc/scipy/reference/sparse.html
|
567 |
+
"""
|
568 |
+
import scipy as sp
|
569 |
+
|
570 |
+
if len(G) == 0:
|
571 |
+
raise nx.NetworkXError("Graph has no nodes or edges")
|
572 |
+
|
573 |
+
if nodelist is None:
|
574 |
+
nodelist = list(G)
|
575 |
+
nlen = len(G)
|
576 |
+
else:
|
577 |
+
nlen = len(nodelist)
|
578 |
+
if nlen == 0:
|
579 |
+
raise nx.NetworkXError("nodelist has no nodes")
|
580 |
+
nodeset = set(G.nbunch_iter(nodelist))
|
581 |
+
if nlen != len(nodeset):
|
582 |
+
for n in nodelist:
|
583 |
+
if n not in G:
|
584 |
+
raise nx.NetworkXError(f"Node {n} in nodelist is not in G")
|
585 |
+
raise nx.NetworkXError("nodelist contains duplicates.")
|
586 |
+
if nlen < len(G):
|
587 |
+
G = G.subgraph(nodelist)
|
588 |
+
|
589 |
+
index = dict(zip(nodelist, range(nlen)))
|
590 |
+
coefficients = zip(
|
591 |
+
*((index[u], index[v], wt) for u, v, wt in G.edges(data=weight, default=1))
|
592 |
+
)
|
593 |
+
try:
|
594 |
+
row, col, data = coefficients
|
595 |
+
except ValueError:
|
596 |
+
# there is no edge in the subgraph
|
597 |
+
row, col, data = [], [], []
|
598 |
+
|
599 |
+
if G.is_directed():
|
600 |
+
A = sp.sparse.coo_array((data, (row, col)), shape=(nlen, nlen), dtype=dtype)
|
601 |
+
else:
|
602 |
+
# symmetrize matrix
|
603 |
+
d = data + data
|
604 |
+
r = row + col
|
605 |
+
c = col + row
|
606 |
+
# selfloop entries get double counted when symmetrizing
|
607 |
+
# so we subtract the data on the diagonal
|
608 |
+
selfloops = list(nx.selfloop_edges(G, data=weight, default=1))
|
609 |
+
if selfloops:
|
610 |
+
diag_index, diag_data = zip(*((index[u], -wt) for u, v, wt in selfloops))
|
611 |
+
d += diag_data
|
612 |
+
r += diag_index
|
613 |
+
c += diag_index
|
614 |
+
A = sp.sparse.coo_array((d, (r, c)), shape=(nlen, nlen), dtype=dtype)
|
615 |
+
try:
|
616 |
+
return A.asformat(format)
|
617 |
+
except ValueError as err:
|
618 |
+
raise nx.NetworkXError(f"Unknown sparse matrix format: {format}") from err
|
619 |
+
|
620 |
+
|
621 |
+
def _csr_gen_triples(A):
|
622 |
+
"""Converts a SciPy sparse array in **Compressed Sparse Row** format to
|
623 |
+
an iterable of weighted edge triples.
|
624 |
+
|
625 |
+
"""
|
626 |
+
nrows = A.shape[0]
|
627 |
+
indptr, dst_indices, data = A.indptr, A.indices, A.data
|
628 |
+
import numpy as np
|
629 |
+
|
630 |
+
src_indices = np.repeat(np.arange(nrows), np.diff(indptr))
|
631 |
+
return zip(src_indices.tolist(), dst_indices.tolist(), A.data.tolist())
|
632 |
+
|
633 |
+
|
634 |
+
def _csc_gen_triples(A):
|
635 |
+
"""Converts a SciPy sparse array in **Compressed Sparse Column** format to
|
636 |
+
an iterable of weighted edge triples.
|
637 |
+
|
638 |
+
"""
|
639 |
+
ncols = A.shape[1]
|
640 |
+
indptr, src_indices, data = A.indptr, A.indices, A.data
|
641 |
+
import numpy as np
|
642 |
+
|
643 |
+
dst_indices = np.repeat(np.arange(ncols), np.diff(indptr))
|
644 |
+
return zip(src_indices.tolist(), dst_indices.tolist(), A.data.tolist())
|
645 |
+
|
646 |
+
|
647 |
+
def _coo_gen_triples(A):
|
648 |
+
"""Converts a SciPy sparse array in **Coordinate** format to an iterable
|
649 |
+
of weighted edge triples.
|
650 |
+
|
651 |
+
"""
|
652 |
+
return zip(A.row.tolist(), A.col.tolist(), A.data.tolist())
|
653 |
+
|
654 |
+
|
655 |
+
def _dok_gen_triples(A):
|
656 |
+
"""Converts a SciPy sparse array in **Dictionary of Keys** format to an
|
657 |
+
iterable of weighted edge triples.
|
658 |
+
|
659 |
+
"""
|
660 |
+
for (r, c), v in A.items():
|
661 |
+
# Use `v.item()` to convert a NumPy scalar to the appropriate Python scalar
|
662 |
+
yield int(r), int(c), v.item()
|
663 |
+
|
664 |
+
|
665 |
+
def _generate_weighted_edges(A):
|
666 |
+
"""Returns an iterable over (u, v, w) triples, where u and v are adjacent
|
667 |
+
vertices and w is the weight of the edge joining u and v.
|
668 |
+
|
669 |
+
`A` is a SciPy sparse array (in any format).
|
670 |
+
|
671 |
+
"""
|
672 |
+
if A.format == "csr":
|
673 |
+
return _csr_gen_triples(A)
|
674 |
+
if A.format == "csc":
|
675 |
+
return _csc_gen_triples(A)
|
676 |
+
if A.format == "dok":
|
677 |
+
return _dok_gen_triples(A)
|
678 |
+
# If A is in any other format (including COO), convert it to COO format.
|
679 |
+
return _coo_gen_triples(A.tocoo())
|
680 |
+
|
681 |
+
|
682 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
683 |
+
def from_scipy_sparse_array(
|
684 |
+
A, parallel_edges=False, create_using=None, edge_attribute="weight"
|
685 |
+
):
|
686 |
+
"""Creates a new graph from an adjacency matrix given as a SciPy sparse
|
687 |
+
array.
|
688 |
+
|
689 |
+
Parameters
|
690 |
+
----------
|
691 |
+
A: scipy.sparse array
|
692 |
+
An adjacency matrix representation of a graph
|
693 |
+
|
694 |
+
parallel_edges : Boolean
|
695 |
+
If this is True, `create_using` is a multigraph, and `A` is an
|
696 |
+
integer matrix, then entry *(i, j)* in the matrix is interpreted as the
|
697 |
+
number of parallel edges joining vertices *i* and *j* in the graph.
|
698 |
+
If it is False, then the entries in the matrix are interpreted as
|
699 |
+
the weight of a single edge joining the vertices.
|
700 |
+
|
701 |
+
create_using : NetworkX graph constructor, optional (default=nx.Graph)
|
702 |
+
Graph type to create. If graph instance, then cleared before populated.
|
703 |
+
|
704 |
+
edge_attribute: string
|
705 |
+
Name of edge attribute to store matrix numeric value. The data will
|
706 |
+
have the same type as the matrix entry (int, float, (real,imag)).
|
707 |
+
|
708 |
+
Notes
|
709 |
+
-----
|
710 |
+
For directed graphs, explicitly mention create_using=nx.DiGraph,
|
711 |
+
and entry i,j of A corresponds to an edge from i to j.
|
712 |
+
|
713 |
+
If `create_using` is :class:`networkx.MultiGraph` or
|
714 |
+
:class:`networkx.MultiDiGraph`, `parallel_edges` is True, and the
|
715 |
+
entries of `A` are of type :class:`int`, then this function returns a
|
716 |
+
multigraph (constructed from `create_using`) with parallel edges.
|
717 |
+
In this case, `edge_attribute` will be ignored.
|
718 |
+
|
719 |
+
If `create_using` indicates an undirected multigraph, then only the edges
|
720 |
+
indicated by the upper triangle of the matrix `A` will be added to the
|
721 |
+
graph.
|
722 |
+
|
723 |
+
Examples
|
724 |
+
--------
|
725 |
+
>>> import scipy as sp
|
726 |
+
>>> A = sp.sparse.eye(2, 2, 1)
|
727 |
+
>>> G = nx.from_scipy_sparse_array(A)
|
728 |
+
|
729 |
+
If `create_using` indicates a multigraph and the matrix has only integer
|
730 |
+
entries and `parallel_edges` is False, then the entries will be treated
|
731 |
+
as weights for edges joining the nodes (without creating parallel edges):
|
732 |
+
|
733 |
+
>>> A = sp.sparse.csr_array([[1, 1], [1, 2]])
|
734 |
+
>>> G = nx.from_scipy_sparse_array(A, create_using=nx.MultiGraph)
|
735 |
+
>>> G[1][1]
|
736 |
+
AtlasView({0: {'weight': 2}})
|
737 |
+
|
738 |
+
If `create_using` indicates a multigraph and the matrix has only integer
|
739 |
+
entries and `parallel_edges` is True, then the entries will be treated
|
740 |
+
as the number of parallel edges joining those two vertices:
|
741 |
+
|
742 |
+
>>> A = sp.sparse.csr_array([[1, 1], [1, 2]])
|
743 |
+
>>> G = nx.from_scipy_sparse_array(A, parallel_edges=True, create_using=nx.MultiGraph)
|
744 |
+
>>> G[1][1]
|
745 |
+
AtlasView({0: {'weight': 1}, 1: {'weight': 1}})
|
746 |
+
|
747 |
+
"""
|
748 |
+
G = nx.empty_graph(0, create_using)
|
749 |
+
n, m = A.shape
|
750 |
+
if n != m:
|
751 |
+
raise nx.NetworkXError(f"Adjacency matrix not square: nx,ny={A.shape}")
|
752 |
+
# Make sure we get even the isolated nodes of the graph.
|
753 |
+
G.add_nodes_from(range(n))
|
754 |
+
# Create an iterable over (u, v, w) triples and for each triple, add an
|
755 |
+
# edge from u to v with weight w.
|
756 |
+
triples = _generate_weighted_edges(A)
|
757 |
+
# If the entries in the adjacency matrix are integers, the graph is a
|
758 |
+
# multigraph, and parallel_edges is True, then create parallel edges, each
|
759 |
+
# with weight 1, for each entry in the adjacency matrix. Otherwise, create
|
760 |
+
# one edge for each positive entry in the adjacency matrix and set the
|
761 |
+
# weight of that edge to be the entry in the matrix.
|
762 |
+
if A.dtype.kind in ("i", "u") and G.is_multigraph() and parallel_edges:
|
763 |
+
chain = itertools.chain.from_iterable
|
764 |
+
# The following line is equivalent to:
|
765 |
+
#
|
766 |
+
# for (u, v) in edges:
|
767 |
+
# for d in range(A[u, v]):
|
768 |
+
# G.add_edge(u, v, weight=1)
|
769 |
+
#
|
770 |
+
triples = chain(((u, v, 1) for d in range(w)) for (u, v, w) in triples)
|
771 |
+
# If we are creating an undirected multigraph, only add the edges from the
|
772 |
+
# upper triangle of the matrix. Otherwise, add all the edges. This relies
|
773 |
+
# on the fact that the vertices created in the
|
774 |
+
# `_generated_weighted_edges()` function are actually the row/column
|
775 |
+
# indices for the matrix `A`.
|
776 |
+
#
|
777 |
+
# Without this check, we run into a problem where each edge is added twice
|
778 |
+
# when `G.add_weighted_edges_from()` is invoked below.
|
779 |
+
if G.is_multigraph() and not G.is_directed():
|
780 |
+
triples = ((u, v, d) for u, v, d in triples if u <= v)
|
781 |
+
G.add_weighted_edges_from(triples, weight=edge_attribute)
|
782 |
+
return G
|
783 |
+
|
784 |
+
|
785 |
+
@nx._dispatchable(edge_attrs="weight") # edge attrs may also be obtained from `dtype`
|
786 |
+
def to_numpy_array(
|
787 |
+
G,
|
788 |
+
nodelist=None,
|
789 |
+
dtype=None,
|
790 |
+
order=None,
|
791 |
+
multigraph_weight=sum,
|
792 |
+
weight="weight",
|
793 |
+
nonedge=0.0,
|
794 |
+
):
|
795 |
+
"""Returns the graph adjacency matrix as a NumPy array.
|
796 |
+
|
797 |
+
Parameters
|
798 |
+
----------
|
799 |
+
G : graph
|
800 |
+
The NetworkX graph used to construct the NumPy array.
|
801 |
+
|
802 |
+
nodelist : list, optional
|
803 |
+
The rows and columns are ordered according to the nodes in `nodelist`.
|
804 |
+
If `nodelist` is ``None``, then the ordering is produced by ``G.nodes()``.
|
805 |
+
|
806 |
+
dtype : NumPy data type, optional
|
807 |
+
A NumPy data type used to initialize the array. If None, then the NumPy
|
808 |
+
default is used. The dtype can be structured if `weight=None`, in which
|
809 |
+
case the dtype field names are used to look up edge attributes. The
|
810 |
+
result is a structured array where each named field in the dtype
|
811 |
+
corresponds to the adjacency for that edge attribute. See examples for
|
812 |
+
details.
|
813 |
+
|
814 |
+
order : {'C', 'F'}, optional
|
815 |
+
Whether to store multidimensional data in C- or Fortran-contiguous
|
816 |
+
(row- or column-wise) order in memory. If None, then the NumPy default
|
817 |
+
is used.
|
818 |
+
|
819 |
+
multigraph_weight : callable, optional
|
820 |
+
An function that determines how weights in multigraphs are handled.
|
821 |
+
The function should accept a sequence of weights and return a single
|
822 |
+
value. The default is to sum the weights of the multiple edges.
|
823 |
+
|
824 |
+
weight : string or None optional (default = 'weight')
|
825 |
+
The edge attribute that holds the numerical value used for
|
826 |
+
the edge weight. If an edge does not have that attribute, then the
|
827 |
+
value 1 is used instead. `weight` must be ``None`` if a structured
|
828 |
+
dtype is used.
|
829 |
+
|
830 |
+
nonedge : array_like (default = 0.0)
|
831 |
+
The value used to represent non-edges in the adjacency matrix.
|
832 |
+
The array values corresponding to nonedges are typically set to zero.
|
833 |
+
However, this could be undesirable if there are array values
|
834 |
+
corresponding to actual edges that also have the value zero. If so,
|
835 |
+
one might prefer nonedges to have some other value, such as ``nan``.
|
836 |
+
|
837 |
+
Returns
|
838 |
+
-------
|
839 |
+
A : NumPy ndarray
|
840 |
+
Graph adjacency matrix
|
841 |
+
|
842 |
+
Raises
|
843 |
+
------
|
844 |
+
NetworkXError
|
845 |
+
If `dtype` is a structured dtype and `G` is a multigraph
|
846 |
+
ValueError
|
847 |
+
If `dtype` is a structured dtype and `weight` is not `None`
|
848 |
+
|
849 |
+
See Also
|
850 |
+
--------
|
851 |
+
from_numpy_array
|
852 |
+
|
853 |
+
Notes
|
854 |
+
-----
|
855 |
+
For directed graphs, entry ``i, j`` corresponds to an edge from ``i`` to ``j``.
|
856 |
+
|
857 |
+
Entries in the adjacency matrix are given by the `weight` edge attribute.
|
858 |
+
When an edge does not have a weight attribute, the value of the entry is
|
859 |
+
set to the number 1. For multiple (parallel) edges, the values of the
|
860 |
+
entries are determined by the `multigraph_weight` parameter. The default is
|
861 |
+
to sum the weight attributes for each of the parallel edges.
|
862 |
+
|
863 |
+
When `nodelist` does not contain every node in `G`, the adjacency matrix is
|
864 |
+
built from the subgraph of `G` that is induced by the nodes in `nodelist`.
|
865 |
+
|
866 |
+
The convention used for self-loop edges in graphs is to assign the
|
867 |
+
diagonal array entry value to the weight attribute of the edge
|
868 |
+
(or the number 1 if the edge has no weight attribute). If the
|
869 |
+
alternate convention of doubling the edge weight is desired the
|
870 |
+
resulting NumPy array can be modified as follows:
|
871 |
+
|
872 |
+
>>> import numpy as np
|
873 |
+
>>> G = nx.Graph([(1, 1)])
|
874 |
+
>>> A = nx.to_numpy_array(G)
|
875 |
+
>>> A
|
876 |
+
array([[1.]])
|
877 |
+
>>> A[np.diag_indices_from(A)] *= 2
|
878 |
+
>>> A
|
879 |
+
array([[2.]])
|
880 |
+
|
881 |
+
Examples
|
882 |
+
--------
|
883 |
+
>>> G = nx.MultiDiGraph()
|
884 |
+
>>> G.add_edge(0, 1, weight=2)
|
885 |
+
0
|
886 |
+
>>> G.add_edge(1, 0)
|
887 |
+
0
|
888 |
+
>>> G.add_edge(2, 2, weight=3)
|
889 |
+
0
|
890 |
+
>>> G.add_edge(2, 2)
|
891 |
+
1
|
892 |
+
>>> nx.to_numpy_array(G, nodelist=[0, 1, 2])
|
893 |
+
array([[0., 2., 0.],
|
894 |
+
[1., 0., 0.],
|
895 |
+
[0., 0., 4.]])
|
896 |
+
|
897 |
+
When `nodelist` argument is used, nodes of `G` which do not appear in the `nodelist`
|
898 |
+
and their edges are not included in the adjacency matrix. Here is an example:
|
899 |
+
|
900 |
+
>>> G = nx.Graph()
|
901 |
+
>>> G.add_edge(3, 1)
|
902 |
+
>>> G.add_edge(2, 0)
|
903 |
+
>>> G.add_edge(2, 1)
|
904 |
+
>>> G.add_edge(3, 0)
|
905 |
+
>>> nx.to_numpy_array(G, nodelist=[1, 2, 3])
|
906 |
+
array([[0., 1., 1.],
|
907 |
+
[1., 0., 0.],
|
908 |
+
[1., 0., 0.]])
|
909 |
+
|
910 |
+
This function can also be used to create adjacency matrices for multiple
|
911 |
+
edge attributes with structured dtypes:
|
912 |
+
|
913 |
+
>>> G = nx.Graph()
|
914 |
+
>>> G.add_edge(0, 1, weight=10)
|
915 |
+
>>> G.add_edge(1, 2, cost=5)
|
916 |
+
>>> G.add_edge(2, 3, weight=3, cost=-4.0)
|
917 |
+
>>> dtype = np.dtype([("weight", int), ("cost", float)])
|
918 |
+
>>> A = nx.to_numpy_array(G, dtype=dtype, weight=None)
|
919 |
+
>>> A["weight"]
|
920 |
+
array([[ 0, 10, 0, 0],
|
921 |
+
[10, 0, 1, 0],
|
922 |
+
[ 0, 1, 0, 3],
|
923 |
+
[ 0, 0, 3, 0]])
|
924 |
+
>>> A["cost"]
|
925 |
+
array([[ 0., 1., 0., 0.],
|
926 |
+
[ 1., 0., 5., 0.],
|
927 |
+
[ 0., 5., 0., -4.],
|
928 |
+
[ 0., 0., -4., 0.]])
|
929 |
+
|
930 |
+
As stated above, the argument "nonedge" is useful especially when there are
|
931 |
+
actually edges with weight 0 in the graph. Setting a nonedge value different than 0,
|
932 |
+
makes it much clearer to differentiate such 0-weighted edges and actual nonedge values.
|
933 |
+
|
934 |
+
>>> G = nx.Graph()
|
935 |
+
>>> G.add_edge(3, 1, weight=2)
|
936 |
+
>>> G.add_edge(2, 0, weight=0)
|
937 |
+
>>> G.add_edge(2, 1, weight=0)
|
938 |
+
>>> G.add_edge(3, 0, weight=1)
|
939 |
+
>>> nx.to_numpy_array(G, nonedge=-1.0)
|
940 |
+
array([[-1., 2., -1., 1.],
|
941 |
+
[ 2., -1., 0., -1.],
|
942 |
+
[-1., 0., -1., 0.],
|
943 |
+
[ 1., -1., 0., -1.]])
|
944 |
+
"""
|
945 |
+
import numpy as np
|
946 |
+
|
947 |
+
if nodelist is None:
|
948 |
+
nodelist = list(G)
|
949 |
+
nlen = len(nodelist)
|
950 |
+
|
951 |
+
# Input validation
|
952 |
+
nodeset = set(nodelist)
|
953 |
+
if nodeset - set(G):
|
954 |
+
raise nx.NetworkXError(f"Nodes {nodeset - set(G)} in nodelist is not in G")
|
955 |
+
if len(nodeset) < nlen:
|
956 |
+
raise nx.NetworkXError("nodelist contains duplicates.")
|
957 |
+
|
958 |
+
A = np.full((nlen, nlen), fill_value=nonedge, dtype=dtype, order=order)
|
959 |
+
|
960 |
+
# Corner cases: empty nodelist or graph without any edges
|
961 |
+
if nlen == 0 or G.number_of_edges() == 0:
|
962 |
+
return A
|
963 |
+
|
964 |
+
# If dtype is structured and weight is None, use dtype field names as
|
965 |
+
# edge attributes
|
966 |
+
edge_attrs = None # Only single edge attribute by default
|
967 |
+
if A.dtype.names:
|
968 |
+
if weight is None:
|
969 |
+
edge_attrs = dtype.names
|
970 |
+
else:
|
971 |
+
raise ValueError(
|
972 |
+
"Specifying `weight` not supported for structured dtypes\n."
|
973 |
+
"To create adjacency matrices from structured dtypes, use `weight=None`."
|
974 |
+
)
|
975 |
+
|
976 |
+
# Map nodes to row/col in matrix
|
977 |
+
idx = dict(zip(nodelist, range(nlen)))
|
978 |
+
if len(nodelist) < len(G):
|
979 |
+
G = G.subgraph(nodelist).copy()
|
980 |
+
|
981 |
+
# Collect all edge weights and reduce with `multigraph_weights`
|
982 |
+
if G.is_multigraph():
|
983 |
+
if edge_attrs:
|
984 |
+
raise nx.NetworkXError(
|
985 |
+
"Structured arrays are not supported for MultiGraphs"
|
986 |
+
)
|
987 |
+
d = defaultdict(list)
|
988 |
+
for u, v, wt in G.edges(data=weight, default=1.0):
|
989 |
+
d[(idx[u], idx[v])].append(wt)
|
990 |
+
i, j = np.array(list(d.keys())).T # indices
|
991 |
+
wts = [multigraph_weight(ws) for ws in d.values()] # reduced weights
|
992 |
+
else:
|
993 |
+
i, j, wts = [], [], []
|
994 |
+
|
995 |
+
# Special branch: multi-attr adjacency from structured dtypes
|
996 |
+
if edge_attrs:
|
997 |
+
# Extract edges with all data
|
998 |
+
for u, v, data in G.edges(data=True):
|
999 |
+
i.append(idx[u])
|
1000 |
+
j.append(idx[v])
|
1001 |
+
wts.append(data)
|
1002 |
+
# Map each attribute to the appropriate named field in the
|
1003 |
+
# structured dtype
|
1004 |
+
for attr in edge_attrs:
|
1005 |
+
attr_data = [wt.get(attr, 1.0) for wt in wts]
|
1006 |
+
A[attr][i, j] = attr_data
|
1007 |
+
if not G.is_directed():
|
1008 |
+
A[attr][j, i] = attr_data
|
1009 |
+
return A
|
1010 |
+
|
1011 |
+
for u, v, wt in G.edges(data=weight, default=1.0):
|
1012 |
+
i.append(idx[u])
|
1013 |
+
j.append(idx[v])
|
1014 |
+
wts.append(wt)
|
1015 |
+
|
1016 |
+
# Set array values with advanced indexing
|
1017 |
+
A[i, j] = wts
|
1018 |
+
if not G.is_directed():
|
1019 |
+
A[j, i] = wts
|
1020 |
+
|
1021 |
+
return A
|
1022 |
+
|
1023 |
+
|
1024 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
1025 |
+
def from_numpy_array(A, parallel_edges=False, create_using=None, edge_attr="weight"):
|
1026 |
+
"""Returns a graph from a 2D NumPy array.
|
1027 |
+
|
1028 |
+
The 2D NumPy array is interpreted as an adjacency matrix for the graph.
|
1029 |
+
|
1030 |
+
Parameters
|
1031 |
+
----------
|
1032 |
+
A : a 2D numpy.ndarray
|
1033 |
+
An adjacency matrix representation of a graph
|
1034 |
+
|
1035 |
+
parallel_edges : Boolean
|
1036 |
+
If this is True, `create_using` is a multigraph, and `A` is an
|
1037 |
+
integer array, then entry *(i, j)* in the array is interpreted as the
|
1038 |
+
number of parallel edges joining vertices *i* and *j* in the graph.
|
1039 |
+
If it is False, then the entries in the array are interpreted as
|
1040 |
+
the weight of a single edge joining the vertices.
|
1041 |
+
|
1042 |
+
create_using : NetworkX graph constructor, optional (default=nx.Graph)
|
1043 |
+
Graph type to create. If graph instance, then cleared before populated.
|
1044 |
+
|
1045 |
+
edge_attr : String, optional (default="weight")
|
1046 |
+
The attribute to which the array values are assigned on each edge. If
|
1047 |
+
it is None, edge attributes will not be assigned.
|
1048 |
+
|
1049 |
+
Notes
|
1050 |
+
-----
|
1051 |
+
For directed graphs, explicitly mention create_using=nx.DiGraph,
|
1052 |
+
and entry i,j of A corresponds to an edge from i to j.
|
1053 |
+
|
1054 |
+
If `create_using` is :class:`networkx.MultiGraph` or
|
1055 |
+
:class:`networkx.MultiDiGraph`, `parallel_edges` is True, and the
|
1056 |
+
entries of `A` are of type :class:`int`, then this function returns a
|
1057 |
+
multigraph (of the same type as `create_using`) with parallel edges.
|
1058 |
+
|
1059 |
+
If `create_using` indicates an undirected multigraph, then only the edges
|
1060 |
+
indicated by the upper triangle of the array `A` will be added to the
|
1061 |
+
graph.
|
1062 |
+
|
1063 |
+
If `edge_attr` is Falsy (False or None), edge attributes will not be
|
1064 |
+
assigned, and the array data will be treated like a binary mask of
|
1065 |
+
edge presence or absence. Otherwise, the attributes will be assigned
|
1066 |
+
as follows:
|
1067 |
+
|
1068 |
+
If the NumPy array has a single data type for each array entry it
|
1069 |
+
will be converted to an appropriate Python data type.
|
1070 |
+
|
1071 |
+
If the NumPy array has a user-specified compound data type the names
|
1072 |
+
of the data fields will be used as attribute keys in the resulting
|
1073 |
+
NetworkX graph.
|
1074 |
+
|
1075 |
+
See Also
|
1076 |
+
--------
|
1077 |
+
to_numpy_array
|
1078 |
+
|
1079 |
+
Examples
|
1080 |
+
--------
|
1081 |
+
Simple integer weights on edges:
|
1082 |
+
|
1083 |
+
>>> import numpy as np
|
1084 |
+
>>> A = np.array([[1, 1], [2, 1]])
|
1085 |
+
>>> G = nx.from_numpy_array(A)
|
1086 |
+
>>> G.edges(data=True)
|
1087 |
+
EdgeDataView([(0, 0, {'weight': 1}), (0, 1, {'weight': 2}), (1, 1, {'weight': 1})])
|
1088 |
+
|
1089 |
+
If `create_using` indicates a multigraph and the array has only integer
|
1090 |
+
entries and `parallel_edges` is False, then the entries will be treated
|
1091 |
+
as weights for edges joining the nodes (without creating parallel edges):
|
1092 |
+
|
1093 |
+
>>> A = np.array([[1, 1], [1, 2]])
|
1094 |
+
>>> G = nx.from_numpy_array(A, create_using=nx.MultiGraph)
|
1095 |
+
>>> G[1][1]
|
1096 |
+
AtlasView({0: {'weight': 2}})
|
1097 |
+
|
1098 |
+
If `create_using` indicates a multigraph and the array has only integer
|
1099 |
+
entries and `parallel_edges` is True, then the entries will be treated
|
1100 |
+
as the number of parallel edges joining those two vertices:
|
1101 |
+
|
1102 |
+
>>> A = np.array([[1, 1], [1, 2]])
|
1103 |
+
>>> temp = nx.MultiGraph()
|
1104 |
+
>>> G = nx.from_numpy_array(A, parallel_edges=True, create_using=temp)
|
1105 |
+
>>> G[1][1]
|
1106 |
+
AtlasView({0: {'weight': 1}, 1: {'weight': 1}})
|
1107 |
+
|
1108 |
+
User defined compound data type on edges:
|
1109 |
+
|
1110 |
+
>>> dt = [("weight", float), ("cost", int)]
|
1111 |
+
>>> A = np.array([[(1.0, 2)]], dtype=dt)
|
1112 |
+
>>> G = nx.from_numpy_array(A)
|
1113 |
+
>>> G.edges()
|
1114 |
+
EdgeView([(0, 0)])
|
1115 |
+
>>> G[0][0]["cost"]
|
1116 |
+
2
|
1117 |
+
>>> G[0][0]["weight"]
|
1118 |
+
1.0
|
1119 |
+
|
1120 |
+
"""
|
1121 |
+
kind_to_python_type = {
|
1122 |
+
"f": float,
|
1123 |
+
"i": int,
|
1124 |
+
"u": int,
|
1125 |
+
"b": bool,
|
1126 |
+
"c": complex,
|
1127 |
+
"S": str,
|
1128 |
+
"U": str,
|
1129 |
+
"V": "void",
|
1130 |
+
}
|
1131 |
+
G = nx.empty_graph(0, create_using)
|
1132 |
+
if A.ndim != 2:
|
1133 |
+
raise nx.NetworkXError(f"Input array must be 2D, not {A.ndim}")
|
1134 |
+
n, m = A.shape
|
1135 |
+
if n != m:
|
1136 |
+
raise nx.NetworkXError(f"Adjacency matrix not square: nx,ny={A.shape}")
|
1137 |
+
dt = A.dtype
|
1138 |
+
try:
|
1139 |
+
python_type = kind_to_python_type[dt.kind]
|
1140 |
+
except Exception as err:
|
1141 |
+
raise TypeError(f"Unknown numpy data type: {dt}") from err
|
1142 |
+
|
1143 |
+
# Make sure we get even the isolated nodes of the graph.
|
1144 |
+
G.add_nodes_from(range(n))
|
1145 |
+
# Get a list of all the entries in the array with nonzero entries. These
|
1146 |
+
# coordinates become edges in the graph. (convert to int from np.int64)
|
1147 |
+
edges = ((int(e[0]), int(e[1])) for e in zip(*A.nonzero()))
|
1148 |
+
# handle numpy constructed data type
|
1149 |
+
if python_type == "void":
|
1150 |
+
# Sort the fields by their offset, then by dtype, then by name.
|
1151 |
+
fields = sorted(
|
1152 |
+
(offset, dtype, name) for name, (dtype, offset) in A.dtype.fields.items()
|
1153 |
+
)
|
1154 |
+
triples = (
|
1155 |
+
(
|
1156 |
+
u,
|
1157 |
+
v,
|
1158 |
+
{}
|
1159 |
+
if edge_attr in [False, None]
|
1160 |
+
else {
|
1161 |
+
name: kind_to_python_type[dtype.kind](val)
|
1162 |
+
for (_, dtype, name), val in zip(fields, A[u, v])
|
1163 |
+
},
|
1164 |
+
)
|
1165 |
+
for u, v in edges
|
1166 |
+
)
|
1167 |
+
# If the entries in the adjacency matrix are integers, the graph is a
|
1168 |
+
# multigraph, and parallel_edges is True, then create parallel edges, each
|
1169 |
+
# with weight 1, for each entry in the adjacency matrix. Otherwise, create
|
1170 |
+
# one edge for each positive entry in the adjacency matrix and set the
|
1171 |
+
# weight of that edge to be the entry in the matrix.
|
1172 |
+
elif python_type is int and G.is_multigraph() and parallel_edges:
|
1173 |
+
chain = itertools.chain.from_iterable
|
1174 |
+
# The following line is equivalent to:
|
1175 |
+
#
|
1176 |
+
# for (u, v) in edges:
|
1177 |
+
# for d in range(A[u, v]):
|
1178 |
+
# G.add_edge(u, v, weight=1)
|
1179 |
+
#
|
1180 |
+
if edge_attr in [False, None]:
|
1181 |
+
triples = chain(((u, v, {}) for d in range(A[u, v])) for (u, v) in edges)
|
1182 |
+
else:
|
1183 |
+
triples = chain(
|
1184 |
+
((u, v, {edge_attr: 1}) for d in range(A[u, v])) for (u, v) in edges
|
1185 |
+
)
|
1186 |
+
else: # basic data type
|
1187 |
+
if edge_attr in [False, None]:
|
1188 |
+
triples = ((u, v, {}) for u, v in edges)
|
1189 |
+
else:
|
1190 |
+
triples = ((u, v, {edge_attr: python_type(A[u, v])}) for u, v in edges)
|
1191 |
+
# If we are creating an undirected multigraph, only add the edges from the
|
1192 |
+
# upper triangle of the matrix. Otherwise, add all the edges. This relies
|
1193 |
+
# on the fact that the vertices created in the
|
1194 |
+
# `_generated_weighted_edges()` function are actually the row/column
|
1195 |
+
# indices for the matrix `A`.
|
1196 |
+
#
|
1197 |
+
# Without this check, we run into a problem where each edge is added twice
|
1198 |
+
# when `G.add_edges_from()` is invoked below.
|
1199 |
+
if G.is_multigraph() and not G.is_directed():
|
1200 |
+
triples = ((u, v, d) for u, v, d in triples if u <= v)
|
1201 |
+
G.add_edges_from(triples)
|
1202 |
+
return G
|
env-llmeval/lib/python3.10/site-packages/networkx/drawing/__init__.py
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# graph drawing and interface to graphviz
|
2 |
+
|
3 |
+
from .layout import *
|
4 |
+
from .nx_latex import *
|
5 |
+
from .nx_pylab import *
|
6 |
+
from . import nx_agraph
|
7 |
+
from . import nx_pydot
|
env-llmeval/lib/python3.10/site-packages/networkx/drawing/nx_agraph.py
ADDED
@@ -0,0 +1,465 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
***************
|
3 |
+
Graphviz AGraph
|
4 |
+
***************
|
5 |
+
|
6 |
+
Interface to pygraphviz AGraph class.
|
7 |
+
|
8 |
+
Examples
|
9 |
+
--------
|
10 |
+
>>> G = nx.complete_graph(5)
|
11 |
+
>>> A = nx.nx_agraph.to_agraph(G)
|
12 |
+
>>> H = nx.nx_agraph.from_agraph(A)
|
13 |
+
|
14 |
+
See Also
|
15 |
+
--------
|
16 |
+
- Pygraphviz: http://pygraphviz.github.io/
|
17 |
+
- Graphviz: https://www.graphviz.org
|
18 |
+
- DOT Language: http://www.graphviz.org/doc/info/lang.html
|
19 |
+
"""
|
20 |
+
import os
|
21 |
+
import tempfile
|
22 |
+
|
23 |
+
import networkx as nx
|
24 |
+
|
25 |
+
__all__ = [
|
26 |
+
"from_agraph",
|
27 |
+
"to_agraph",
|
28 |
+
"write_dot",
|
29 |
+
"read_dot",
|
30 |
+
"graphviz_layout",
|
31 |
+
"pygraphviz_layout",
|
32 |
+
"view_pygraphviz",
|
33 |
+
]
|
34 |
+
|
35 |
+
|
36 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
37 |
+
def from_agraph(A, create_using=None):
|
38 |
+
"""Returns a NetworkX Graph or DiGraph from a PyGraphviz graph.
|
39 |
+
|
40 |
+
Parameters
|
41 |
+
----------
|
42 |
+
A : PyGraphviz AGraph
|
43 |
+
A graph created with PyGraphviz
|
44 |
+
|
45 |
+
create_using : NetworkX graph constructor, optional (default=None)
|
46 |
+
Graph type to create. If graph instance, then cleared before populated.
|
47 |
+
If `None`, then the appropriate Graph type is inferred from `A`.
|
48 |
+
|
49 |
+
Examples
|
50 |
+
--------
|
51 |
+
>>> K5 = nx.complete_graph(5)
|
52 |
+
>>> A = nx.nx_agraph.to_agraph(K5)
|
53 |
+
>>> G = nx.nx_agraph.from_agraph(A)
|
54 |
+
|
55 |
+
Notes
|
56 |
+
-----
|
57 |
+
The Graph G will have a dictionary G.graph_attr containing
|
58 |
+
the default graphviz attributes for graphs, nodes and edges.
|
59 |
+
|
60 |
+
Default node attributes will be in the dictionary G.node_attr
|
61 |
+
which is keyed by node.
|
62 |
+
|
63 |
+
Edge attributes will be returned as edge data in G. With
|
64 |
+
edge_attr=False the edge data will be the Graphviz edge weight
|
65 |
+
attribute or the value 1 if no edge weight attribute is found.
|
66 |
+
|
67 |
+
"""
|
68 |
+
if create_using is None:
|
69 |
+
if A.is_directed():
|
70 |
+
if A.is_strict():
|
71 |
+
create_using = nx.DiGraph
|
72 |
+
else:
|
73 |
+
create_using = nx.MultiDiGraph
|
74 |
+
else:
|
75 |
+
if A.is_strict():
|
76 |
+
create_using = nx.Graph
|
77 |
+
else:
|
78 |
+
create_using = nx.MultiGraph
|
79 |
+
|
80 |
+
# assign defaults
|
81 |
+
N = nx.empty_graph(0, create_using)
|
82 |
+
if A.name is not None:
|
83 |
+
N.name = A.name
|
84 |
+
|
85 |
+
# add graph attributes
|
86 |
+
N.graph.update(A.graph_attr)
|
87 |
+
|
88 |
+
# add nodes, attributes to N.node_attr
|
89 |
+
for n in A.nodes():
|
90 |
+
str_attr = {str(k): v for k, v in n.attr.items()}
|
91 |
+
N.add_node(str(n), **str_attr)
|
92 |
+
|
93 |
+
# add edges, assign edge data as dictionary of attributes
|
94 |
+
for e in A.edges():
|
95 |
+
u, v = str(e[0]), str(e[1])
|
96 |
+
attr = dict(e.attr)
|
97 |
+
str_attr = {str(k): v for k, v in attr.items()}
|
98 |
+
if not N.is_multigraph():
|
99 |
+
if e.name is not None:
|
100 |
+
str_attr["key"] = e.name
|
101 |
+
N.add_edge(u, v, **str_attr)
|
102 |
+
else:
|
103 |
+
N.add_edge(u, v, key=e.name, **str_attr)
|
104 |
+
|
105 |
+
# add default attributes for graph, nodes, and edges
|
106 |
+
# hang them on N.graph_attr
|
107 |
+
N.graph["graph"] = dict(A.graph_attr)
|
108 |
+
N.graph["node"] = dict(A.node_attr)
|
109 |
+
N.graph["edge"] = dict(A.edge_attr)
|
110 |
+
return N
|
111 |
+
|
112 |
+
|
113 |
+
def to_agraph(N):
|
114 |
+
"""Returns a pygraphviz graph from a NetworkX graph N.
|
115 |
+
|
116 |
+
Parameters
|
117 |
+
----------
|
118 |
+
N : NetworkX graph
|
119 |
+
A graph created with NetworkX
|
120 |
+
|
121 |
+
Examples
|
122 |
+
--------
|
123 |
+
>>> K5 = nx.complete_graph(5)
|
124 |
+
>>> A = nx.nx_agraph.to_agraph(K5)
|
125 |
+
|
126 |
+
Notes
|
127 |
+
-----
|
128 |
+
If N has an dict N.graph_attr an attempt will be made first
|
129 |
+
to copy properties attached to the graph (see from_agraph)
|
130 |
+
and then updated with the calling arguments if any.
|
131 |
+
|
132 |
+
"""
|
133 |
+
try:
|
134 |
+
import pygraphviz
|
135 |
+
except ImportError as err:
|
136 |
+
raise ImportError("requires pygraphviz http://pygraphviz.github.io/") from err
|
137 |
+
directed = N.is_directed()
|
138 |
+
strict = nx.number_of_selfloops(N) == 0 and not N.is_multigraph()
|
139 |
+
|
140 |
+
for node in N:
|
141 |
+
if "pos" in N.nodes[node]:
|
142 |
+
N.nodes[node]["pos"] = "{},{}!".format(
|
143 |
+
N.nodes[node]["pos"][0], N.nodes[node]["pos"][1]
|
144 |
+
)
|
145 |
+
|
146 |
+
A = pygraphviz.AGraph(name=N.name, strict=strict, directed=directed)
|
147 |
+
|
148 |
+
# default graph attributes
|
149 |
+
A.graph_attr.update(N.graph.get("graph", {}))
|
150 |
+
A.node_attr.update(N.graph.get("node", {}))
|
151 |
+
A.edge_attr.update(N.graph.get("edge", {}))
|
152 |
+
|
153 |
+
A.graph_attr.update(
|
154 |
+
(k, v) for k, v in N.graph.items() if k not in ("graph", "node", "edge")
|
155 |
+
)
|
156 |
+
|
157 |
+
# add nodes
|
158 |
+
for n, nodedata in N.nodes(data=True):
|
159 |
+
A.add_node(n)
|
160 |
+
# Add node data
|
161 |
+
a = A.get_node(n)
|
162 |
+
a.attr.update({k: str(v) for k, v in nodedata.items()})
|
163 |
+
|
164 |
+
# loop over edges
|
165 |
+
if N.is_multigraph():
|
166 |
+
for u, v, key, edgedata in N.edges(data=True, keys=True):
|
167 |
+
str_edgedata = {k: str(v) for k, v in edgedata.items() if k != "key"}
|
168 |
+
A.add_edge(u, v, key=str(key))
|
169 |
+
# Add edge data
|
170 |
+
a = A.get_edge(u, v)
|
171 |
+
a.attr.update(str_edgedata)
|
172 |
+
|
173 |
+
else:
|
174 |
+
for u, v, edgedata in N.edges(data=True):
|
175 |
+
str_edgedata = {k: str(v) for k, v in edgedata.items()}
|
176 |
+
A.add_edge(u, v)
|
177 |
+
# Add edge data
|
178 |
+
a = A.get_edge(u, v)
|
179 |
+
a.attr.update(str_edgedata)
|
180 |
+
|
181 |
+
return A
|
182 |
+
|
183 |
+
|
184 |
+
def write_dot(G, path):
|
185 |
+
"""Write NetworkX graph G to Graphviz dot format on path.
|
186 |
+
|
187 |
+
Parameters
|
188 |
+
----------
|
189 |
+
G : graph
|
190 |
+
A networkx graph
|
191 |
+
path : filename
|
192 |
+
Filename or file handle to write
|
193 |
+
|
194 |
+
Notes
|
195 |
+
-----
|
196 |
+
To use a specific graph layout, call ``A.layout`` prior to `write_dot`.
|
197 |
+
Note that some graphviz layouts are not guaranteed to be deterministic,
|
198 |
+
see https://gitlab.com/graphviz/graphviz/-/issues/1767 for more info.
|
199 |
+
"""
|
200 |
+
A = to_agraph(G)
|
201 |
+
A.write(path)
|
202 |
+
A.clear()
|
203 |
+
return
|
204 |
+
|
205 |
+
|
206 |
+
@nx._dispatchable(name="agraph_read_dot", graphs=None, returns_graph=True)
|
207 |
+
def read_dot(path):
|
208 |
+
"""Returns a NetworkX graph from a dot file on path.
|
209 |
+
|
210 |
+
Parameters
|
211 |
+
----------
|
212 |
+
path : file or string
|
213 |
+
File name or file handle to read.
|
214 |
+
"""
|
215 |
+
try:
|
216 |
+
import pygraphviz
|
217 |
+
except ImportError as err:
|
218 |
+
raise ImportError(
|
219 |
+
"read_dot() requires pygraphviz http://pygraphviz.github.io/"
|
220 |
+
) from err
|
221 |
+
A = pygraphviz.AGraph(file=path)
|
222 |
+
gr = from_agraph(A)
|
223 |
+
A.clear()
|
224 |
+
return gr
|
225 |
+
|
226 |
+
|
227 |
+
def graphviz_layout(G, prog="neato", root=None, args=""):
|
228 |
+
"""Create node positions for G using Graphviz.
|
229 |
+
|
230 |
+
Parameters
|
231 |
+
----------
|
232 |
+
G : NetworkX graph
|
233 |
+
A graph created with NetworkX
|
234 |
+
prog : string
|
235 |
+
Name of Graphviz layout program
|
236 |
+
root : string, optional
|
237 |
+
Root node for twopi layout
|
238 |
+
args : string, optional
|
239 |
+
Extra arguments to Graphviz layout program
|
240 |
+
|
241 |
+
Returns
|
242 |
+
-------
|
243 |
+
Dictionary of x, y, positions keyed by node.
|
244 |
+
|
245 |
+
Examples
|
246 |
+
--------
|
247 |
+
>>> G = nx.petersen_graph()
|
248 |
+
>>> pos = nx.nx_agraph.graphviz_layout(G)
|
249 |
+
>>> pos = nx.nx_agraph.graphviz_layout(G, prog="dot")
|
250 |
+
|
251 |
+
Notes
|
252 |
+
-----
|
253 |
+
This is a wrapper for pygraphviz_layout.
|
254 |
+
|
255 |
+
Note that some graphviz layouts are not guaranteed to be deterministic,
|
256 |
+
see https://gitlab.com/graphviz/graphviz/-/issues/1767 for more info.
|
257 |
+
"""
|
258 |
+
return pygraphviz_layout(G, prog=prog, root=root, args=args)
|
259 |
+
|
260 |
+
|
261 |
+
def pygraphviz_layout(G, prog="neato", root=None, args=""):
|
262 |
+
"""Create node positions for G using Graphviz.
|
263 |
+
|
264 |
+
Parameters
|
265 |
+
----------
|
266 |
+
G : NetworkX graph
|
267 |
+
A graph created with NetworkX
|
268 |
+
prog : string
|
269 |
+
Name of Graphviz layout program
|
270 |
+
root : string, optional
|
271 |
+
Root node for twopi layout
|
272 |
+
args : string, optional
|
273 |
+
Extra arguments to Graphviz layout program
|
274 |
+
|
275 |
+
Returns
|
276 |
+
-------
|
277 |
+
node_pos : dict
|
278 |
+
Dictionary of x, y, positions keyed by node.
|
279 |
+
|
280 |
+
Examples
|
281 |
+
--------
|
282 |
+
>>> G = nx.petersen_graph()
|
283 |
+
>>> pos = nx.nx_agraph.graphviz_layout(G)
|
284 |
+
>>> pos = nx.nx_agraph.graphviz_layout(G, prog="dot")
|
285 |
+
|
286 |
+
Notes
|
287 |
+
-----
|
288 |
+
If you use complex node objects, they may have the same string
|
289 |
+
representation and GraphViz could treat them as the same node.
|
290 |
+
The layout may assign both nodes a single location. See Issue #1568
|
291 |
+
If this occurs in your case, consider relabeling the nodes just
|
292 |
+
for the layout computation using something similar to::
|
293 |
+
|
294 |
+
>>> H = nx.convert_node_labels_to_integers(G, label_attribute="node_label")
|
295 |
+
>>> H_layout = nx.nx_agraph.pygraphviz_layout(G, prog="dot")
|
296 |
+
>>> G_layout = {H.nodes[n]["node_label"]: p for n, p in H_layout.items()}
|
297 |
+
|
298 |
+
Note that some graphviz layouts are not guaranteed to be deterministic,
|
299 |
+
see https://gitlab.com/graphviz/graphviz/-/issues/1767 for more info.
|
300 |
+
"""
|
301 |
+
try:
|
302 |
+
import pygraphviz
|
303 |
+
except ImportError as err:
|
304 |
+
raise ImportError("requires pygraphviz http://pygraphviz.github.io/") from err
|
305 |
+
if root is not None:
|
306 |
+
args += f"-Groot={root}"
|
307 |
+
A = to_agraph(G)
|
308 |
+
A.layout(prog=prog, args=args)
|
309 |
+
node_pos = {}
|
310 |
+
for n in G:
|
311 |
+
node = pygraphviz.Node(A, n)
|
312 |
+
try:
|
313 |
+
xs = node.attr["pos"].split(",")
|
314 |
+
node_pos[n] = tuple(float(x) for x in xs)
|
315 |
+
except:
|
316 |
+
print("no position for node", n)
|
317 |
+
node_pos[n] = (0.0, 0.0)
|
318 |
+
return node_pos
|
319 |
+
|
320 |
+
|
321 |
+
@nx.utils.open_file(5, "w+b")
|
322 |
+
def view_pygraphviz(
|
323 |
+
G, edgelabel=None, prog="dot", args="", suffix="", path=None, show=True
|
324 |
+
):
|
325 |
+
"""Views the graph G using the specified layout algorithm.
|
326 |
+
|
327 |
+
Parameters
|
328 |
+
----------
|
329 |
+
G : NetworkX graph
|
330 |
+
The machine to draw.
|
331 |
+
edgelabel : str, callable, None
|
332 |
+
If a string, then it specifies the edge attribute to be displayed
|
333 |
+
on the edge labels. If a callable, then it is called for each
|
334 |
+
edge and it should return the string to be displayed on the edges.
|
335 |
+
The function signature of `edgelabel` should be edgelabel(data),
|
336 |
+
where `data` is the edge attribute dictionary.
|
337 |
+
prog : string
|
338 |
+
Name of Graphviz layout program.
|
339 |
+
args : str
|
340 |
+
Additional arguments to pass to the Graphviz layout program.
|
341 |
+
suffix : str
|
342 |
+
If `filename` is None, we save to a temporary file. The value of
|
343 |
+
`suffix` will appear at the tail end of the temporary filename.
|
344 |
+
path : str, None
|
345 |
+
The filename used to save the image. If None, save to a temporary
|
346 |
+
file. File formats are the same as those from pygraphviz.agraph.draw.
|
347 |
+
show : bool, default = True
|
348 |
+
Whether to display the graph with :mod:`PIL.Image.show`,
|
349 |
+
default is `True`. If `False`, the rendered graph is still available
|
350 |
+
at `path`.
|
351 |
+
|
352 |
+
Returns
|
353 |
+
-------
|
354 |
+
path : str
|
355 |
+
The filename of the generated image.
|
356 |
+
A : PyGraphviz graph
|
357 |
+
The PyGraphviz graph instance used to generate the image.
|
358 |
+
|
359 |
+
Notes
|
360 |
+
-----
|
361 |
+
If this function is called in succession too quickly, sometimes the
|
362 |
+
image is not displayed. So you might consider time.sleep(.5) between
|
363 |
+
calls if you experience problems.
|
364 |
+
|
365 |
+
Note that some graphviz layouts are not guaranteed to be deterministic,
|
366 |
+
see https://gitlab.com/graphviz/graphviz/-/issues/1767 for more info.
|
367 |
+
|
368 |
+
"""
|
369 |
+
if not len(G):
|
370 |
+
raise nx.NetworkXException("An empty graph cannot be drawn.")
|
371 |
+
|
372 |
+
# If we are providing default values for graphviz, these must be set
|
373 |
+
# before any nodes or edges are added to the PyGraphviz graph object.
|
374 |
+
# The reason for this is that default values only affect incoming objects.
|
375 |
+
# If you change the default values after the objects have been added,
|
376 |
+
# then they inherit no value and are set only if explicitly set.
|
377 |
+
|
378 |
+
# to_agraph() uses these values.
|
379 |
+
attrs = ["edge", "node", "graph"]
|
380 |
+
for attr in attrs:
|
381 |
+
if attr not in G.graph:
|
382 |
+
G.graph[attr] = {}
|
383 |
+
|
384 |
+
# These are the default values.
|
385 |
+
edge_attrs = {"fontsize": "10"}
|
386 |
+
node_attrs = {
|
387 |
+
"style": "filled",
|
388 |
+
"fillcolor": "#0000FF40",
|
389 |
+
"height": "0.75",
|
390 |
+
"width": "0.75",
|
391 |
+
"shape": "circle",
|
392 |
+
}
|
393 |
+
graph_attrs = {}
|
394 |
+
|
395 |
+
def update_attrs(which, attrs):
|
396 |
+
# Update graph attributes. Return list of those which were added.
|
397 |
+
added = []
|
398 |
+
for k, v in attrs.items():
|
399 |
+
if k not in G.graph[which]:
|
400 |
+
G.graph[which][k] = v
|
401 |
+
added.append(k)
|
402 |
+
|
403 |
+
def clean_attrs(which, added):
|
404 |
+
# Remove added attributes
|
405 |
+
for attr in added:
|
406 |
+
del G.graph[which][attr]
|
407 |
+
if not G.graph[which]:
|
408 |
+
del G.graph[which]
|
409 |
+
|
410 |
+
# Update all default values
|
411 |
+
update_attrs("edge", edge_attrs)
|
412 |
+
update_attrs("node", node_attrs)
|
413 |
+
update_attrs("graph", graph_attrs)
|
414 |
+
|
415 |
+
# Convert to agraph, so we inherit default values
|
416 |
+
A = to_agraph(G)
|
417 |
+
|
418 |
+
# Remove the default values we added to the original graph.
|
419 |
+
clean_attrs("edge", edge_attrs)
|
420 |
+
clean_attrs("node", node_attrs)
|
421 |
+
clean_attrs("graph", graph_attrs)
|
422 |
+
|
423 |
+
# If the user passed in an edgelabel, we update the labels for all edges.
|
424 |
+
if edgelabel is not None:
|
425 |
+
if not callable(edgelabel):
|
426 |
+
|
427 |
+
def func(data):
|
428 |
+
return "".join([" ", str(data[edgelabel]), " "])
|
429 |
+
|
430 |
+
else:
|
431 |
+
func = edgelabel
|
432 |
+
|
433 |
+
# update all the edge labels
|
434 |
+
if G.is_multigraph():
|
435 |
+
for u, v, key, data in G.edges(keys=True, data=True):
|
436 |
+
# PyGraphviz doesn't convert the key to a string. See #339
|
437 |
+
edge = A.get_edge(u, v, str(key))
|
438 |
+
edge.attr["label"] = str(func(data))
|
439 |
+
else:
|
440 |
+
for u, v, data in G.edges(data=True):
|
441 |
+
edge = A.get_edge(u, v)
|
442 |
+
edge.attr["label"] = str(func(data))
|
443 |
+
|
444 |
+
if path is None:
|
445 |
+
ext = "png"
|
446 |
+
if suffix:
|
447 |
+
suffix = f"_{suffix}.{ext}"
|
448 |
+
else:
|
449 |
+
suffix = f".{ext}"
|
450 |
+
path = tempfile.NamedTemporaryFile(suffix=suffix, delete=False)
|
451 |
+
else:
|
452 |
+
# Assume the decorator worked and it is a file-object.
|
453 |
+
pass
|
454 |
+
|
455 |
+
# Write graph to file
|
456 |
+
A.draw(path=path, format=None, prog=prog, args=args)
|
457 |
+
path.close()
|
458 |
+
|
459 |
+
# Show graph in a new window (depends on platform configuration)
|
460 |
+
if show:
|
461 |
+
from PIL import Image
|
462 |
+
|
463 |
+
Image.open(path.name).show()
|
464 |
+
|
465 |
+
return path.name, A
|
env-llmeval/lib/python3.10/site-packages/networkx/drawing/nx_latex.py
ADDED
@@ -0,0 +1,571 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
r"""
|
2 |
+
*****
|
3 |
+
LaTeX
|
4 |
+
*****
|
5 |
+
|
6 |
+
Export NetworkX graphs in LaTeX format using the TikZ library within TeX/LaTeX.
|
7 |
+
Usually, you will want the drawing to appear in a figure environment so
|
8 |
+
you use ``to_latex(G, caption="A caption")``. If you want the raw
|
9 |
+
drawing commands without a figure environment use :func:`to_latex_raw`.
|
10 |
+
And if you want to write to a file instead of just returning the latex
|
11 |
+
code as a string, use ``write_latex(G, "filename.tex", caption="A caption")``.
|
12 |
+
|
13 |
+
To construct a figure with subfigures for each graph to be shown, provide
|
14 |
+
``to_latex`` or ``write_latex`` a list of graphs, a list of subcaptions,
|
15 |
+
and a number of rows of subfigures inside the figure.
|
16 |
+
|
17 |
+
To be able to refer to the figures or subfigures in latex using ``\\ref``,
|
18 |
+
the keyword ``latex_label`` is available for figures and `sub_labels` for
|
19 |
+
a list of labels, one for each subfigure.
|
20 |
+
|
21 |
+
We intend to eventually provide an interface to the TikZ Graph
|
22 |
+
features which include e.g. layout algorithms.
|
23 |
+
|
24 |
+
Let us know via github what you'd like to see available, or better yet
|
25 |
+
give us some code to do it, or even better make a github pull request
|
26 |
+
to add the feature.
|
27 |
+
|
28 |
+
The TikZ approach
|
29 |
+
=================
|
30 |
+
Drawing options can be stored on the graph as node/edge attributes, or
|
31 |
+
can be provided as dicts keyed by node/edge to a string of the options
|
32 |
+
for that node/edge. Similarly a label can be shown for each node/edge
|
33 |
+
by specifying the labels as graph node/edge attributes or by providing
|
34 |
+
a dict keyed by node/edge to the text to be written for that node/edge.
|
35 |
+
|
36 |
+
Options for the tikzpicture environment (e.g. "[scale=2]") can be provided
|
37 |
+
via a keyword argument. Similarly default node and edge options can be
|
38 |
+
provided through keywords arguments. The default node options are applied
|
39 |
+
to the single TikZ "path" that draws all nodes (and no edges). The default edge
|
40 |
+
options are applied to a TikZ "scope" which contains a path for each edge.
|
41 |
+
|
42 |
+
Examples
|
43 |
+
========
|
44 |
+
>>> G = nx.path_graph(3)
|
45 |
+
>>> nx.write_latex(G, "just_my_figure.tex", as_document=True)
|
46 |
+
>>> nx.write_latex(G, "my_figure.tex", caption="A path graph", latex_label="fig1")
|
47 |
+
>>> latex_code = nx.to_latex(G) # a string rather than a file
|
48 |
+
|
49 |
+
You can change many features of the nodes and edges.
|
50 |
+
|
51 |
+
>>> G = nx.path_graph(4, create_using=nx.DiGraph)
|
52 |
+
>>> pos = {n: (n, n) for n in G} # nodes set on a line
|
53 |
+
|
54 |
+
>>> G.nodes[0]["style"] = "blue"
|
55 |
+
>>> G.nodes[2]["style"] = "line width=3,draw"
|
56 |
+
>>> G.nodes[3]["label"] = "Stop"
|
57 |
+
>>> G.edges[(0, 1)]["label"] = "1st Step"
|
58 |
+
>>> G.edges[(0, 1)]["label_opts"] = "near start"
|
59 |
+
>>> G.edges[(1, 2)]["style"] = "line width=3"
|
60 |
+
>>> G.edges[(1, 2)]["label"] = "2nd Step"
|
61 |
+
>>> G.edges[(2, 3)]["style"] = "green"
|
62 |
+
>>> G.edges[(2, 3)]["label"] = "3rd Step"
|
63 |
+
>>> G.edges[(2, 3)]["label_opts"] = "near end"
|
64 |
+
|
65 |
+
>>> nx.write_latex(G, "latex_graph.tex", pos=pos, as_document=True)
|
66 |
+
|
67 |
+
Then compile the LaTeX using something like ``pdflatex latex_graph.tex``
|
68 |
+
and view the pdf file created: ``latex_graph.pdf``.
|
69 |
+
|
70 |
+
If you want **subfigures** each containing one graph, you can input a list of graphs.
|
71 |
+
|
72 |
+
>>> H1 = nx.path_graph(4)
|
73 |
+
>>> H2 = nx.complete_graph(4)
|
74 |
+
>>> H3 = nx.path_graph(8)
|
75 |
+
>>> H4 = nx.complete_graph(8)
|
76 |
+
>>> graphs = [H1, H2, H3, H4]
|
77 |
+
>>> caps = ["Path 4", "Complete graph 4", "Path 8", "Complete graph 8"]
|
78 |
+
>>> lbls = ["fig2a", "fig2b", "fig2c", "fig2d"]
|
79 |
+
>>> nx.write_latex(graphs, "subfigs.tex", n_rows=2, sub_captions=caps, sub_labels=lbls)
|
80 |
+
>>> latex_code = nx.to_latex(graphs, n_rows=2, sub_captions=caps, sub_labels=lbls)
|
81 |
+
|
82 |
+
>>> node_color = {0: "red", 1: "orange", 2: "blue", 3: "gray!90"}
|
83 |
+
>>> edge_width = {e: "line width=1.5" for e in H3.edges}
|
84 |
+
>>> pos = nx.circular_layout(H3)
|
85 |
+
>>> latex_code = nx.to_latex(H3, pos, node_options=node_color, edge_options=edge_width)
|
86 |
+
>>> print(latex_code)
|
87 |
+
\documentclass{report}
|
88 |
+
\usepackage{tikz}
|
89 |
+
\usepackage{subcaption}
|
90 |
+
<BLANKLINE>
|
91 |
+
\begin{document}
|
92 |
+
\begin{figure}
|
93 |
+
\begin{tikzpicture}
|
94 |
+
\draw
|
95 |
+
(1.0, 0.0) node[red] (0){0}
|
96 |
+
(0.707, 0.707) node[orange] (1){1}
|
97 |
+
(-0.0, 1.0) node[blue] (2){2}
|
98 |
+
(-0.707, 0.707) node[gray!90] (3){3}
|
99 |
+
(-1.0, -0.0) node (4){4}
|
100 |
+
(-0.707, -0.707) node (5){5}
|
101 |
+
(0.0, -1.0) node (6){6}
|
102 |
+
(0.707, -0.707) node (7){7};
|
103 |
+
\begin{scope}[-]
|
104 |
+
\draw[line width=1.5] (0) to (1);
|
105 |
+
\draw[line width=1.5] (1) to (2);
|
106 |
+
\draw[line width=1.5] (2) to (3);
|
107 |
+
\draw[line width=1.5] (3) to (4);
|
108 |
+
\draw[line width=1.5] (4) to (5);
|
109 |
+
\draw[line width=1.5] (5) to (6);
|
110 |
+
\draw[line width=1.5] (6) to (7);
|
111 |
+
\end{scope}
|
112 |
+
\end{tikzpicture}
|
113 |
+
\end{figure}
|
114 |
+
\end{document}
|
115 |
+
|
116 |
+
Notes
|
117 |
+
-----
|
118 |
+
If you want to change the preamble/postamble of the figure/document/subfigure
|
119 |
+
environment, use the keyword arguments: `figure_wrapper`, `document_wrapper`,
|
120 |
+
`subfigure_wrapper`. The default values are stored in private variables
|
121 |
+
e.g. ``nx.nx_layout._DOCUMENT_WRAPPER``
|
122 |
+
|
123 |
+
References
|
124 |
+
----------
|
125 |
+
TikZ: https://tikz.dev/
|
126 |
+
|
127 |
+
TikZ options details: https://tikz.dev/tikz-actions
|
128 |
+
"""
|
129 |
+
import numbers
|
130 |
+
import os
|
131 |
+
|
132 |
+
import networkx as nx
|
133 |
+
|
134 |
+
__all__ = [
|
135 |
+
"to_latex_raw",
|
136 |
+
"to_latex",
|
137 |
+
"write_latex",
|
138 |
+
]
|
139 |
+
|
140 |
+
|
141 |
+
@nx.utils.not_implemented_for("multigraph")
|
142 |
+
def to_latex_raw(
|
143 |
+
G,
|
144 |
+
pos="pos",
|
145 |
+
tikz_options="",
|
146 |
+
default_node_options="",
|
147 |
+
node_options="node_options",
|
148 |
+
node_label="label",
|
149 |
+
default_edge_options="",
|
150 |
+
edge_options="edge_options",
|
151 |
+
edge_label="label",
|
152 |
+
edge_label_options="edge_label_options",
|
153 |
+
):
|
154 |
+
"""Return a string of the LaTeX/TikZ code to draw `G`
|
155 |
+
|
156 |
+
This function produces just the code for the tikzpicture
|
157 |
+
without any enclosing environment.
|
158 |
+
|
159 |
+
Parameters
|
160 |
+
==========
|
161 |
+
G : NetworkX graph
|
162 |
+
The NetworkX graph to be drawn
|
163 |
+
pos : string or dict (default "pos")
|
164 |
+
The name of the node attribute on `G` that holds the position of each node.
|
165 |
+
Positions can be sequences of length 2 with numbers for (x,y) coordinates.
|
166 |
+
They can also be strings to denote positions in TikZ style, such as (x, y)
|
167 |
+
or (angle:radius).
|
168 |
+
If a dict, it should be keyed by node to a position.
|
169 |
+
If an empty dict, a circular layout is computed by TikZ.
|
170 |
+
tikz_options : string
|
171 |
+
The tikzpicture options description defining the options for the picture.
|
172 |
+
Often large scale options like `[scale=2]`.
|
173 |
+
default_node_options : string
|
174 |
+
The draw options for a path of nodes. Individual node options override these.
|
175 |
+
node_options : string or dict
|
176 |
+
The name of the node attribute on `G` that holds the options for each node.
|
177 |
+
Or a dict keyed by node to a string holding the options for that node.
|
178 |
+
node_label : string or dict
|
179 |
+
The name of the node attribute on `G` that holds the node label (text)
|
180 |
+
displayed for each node. If the attribute is "" or not present, the node
|
181 |
+
itself is drawn as a string. LaTeX processing such as ``"$A_1$"`` is allowed.
|
182 |
+
Or a dict keyed by node to a string holding the label for that node.
|
183 |
+
default_edge_options : string
|
184 |
+
The options for the scope drawing all edges. The default is "[-]" for
|
185 |
+
undirected graphs and "[->]" for directed graphs.
|
186 |
+
edge_options : string or dict
|
187 |
+
The name of the edge attribute on `G` that holds the options for each edge.
|
188 |
+
If the edge is a self-loop and ``"loop" not in edge_options`` the option
|
189 |
+
"loop," is added to the options for the self-loop edge. Hence you can
|
190 |
+
use "[loop above]" explicitly, but the default is "[loop]".
|
191 |
+
Or a dict keyed by edge to a string holding the options for that edge.
|
192 |
+
edge_label : string or dict
|
193 |
+
The name of the edge attribute on `G` that holds the edge label (text)
|
194 |
+
displayed for each edge. If the attribute is "" or not present, no edge
|
195 |
+
label is drawn.
|
196 |
+
Or a dict keyed by edge to a string holding the label for that edge.
|
197 |
+
edge_label_options : string or dict
|
198 |
+
The name of the edge attribute on `G` that holds the label options for
|
199 |
+
each edge. For example, "[sloped,above,blue]". The default is no options.
|
200 |
+
Or a dict keyed by edge to a string holding the label options for that edge.
|
201 |
+
|
202 |
+
Returns
|
203 |
+
=======
|
204 |
+
latex_code : string
|
205 |
+
The text string which draws the desired graph(s) when compiled by LaTeX.
|
206 |
+
|
207 |
+
See Also
|
208 |
+
========
|
209 |
+
to_latex
|
210 |
+
write_latex
|
211 |
+
"""
|
212 |
+
i4 = "\n "
|
213 |
+
i8 = "\n "
|
214 |
+
|
215 |
+
# set up position dict
|
216 |
+
# TODO allow pos to be None and use a nice TikZ default
|
217 |
+
if not isinstance(pos, dict):
|
218 |
+
pos = nx.get_node_attributes(G, pos)
|
219 |
+
if not pos:
|
220 |
+
# circular layout with radius 2
|
221 |
+
pos = {n: f"({round(360.0 * i / len(G), 3)}:2)" for i, n in enumerate(G)}
|
222 |
+
for node in G:
|
223 |
+
if node not in pos:
|
224 |
+
raise nx.NetworkXError(f"node {node} has no specified pos {pos}")
|
225 |
+
posnode = pos[node]
|
226 |
+
if not isinstance(posnode, str):
|
227 |
+
try:
|
228 |
+
posx, posy = posnode
|
229 |
+
pos[node] = f"({round(posx, 3)}, {round(posy, 3)})"
|
230 |
+
except (TypeError, ValueError):
|
231 |
+
msg = f"position pos[{node}] is not 2-tuple or a string: {posnode}"
|
232 |
+
raise nx.NetworkXError(msg)
|
233 |
+
|
234 |
+
# set up all the dicts
|
235 |
+
if not isinstance(node_options, dict):
|
236 |
+
node_options = nx.get_node_attributes(G, node_options)
|
237 |
+
if not isinstance(node_label, dict):
|
238 |
+
node_label = nx.get_node_attributes(G, node_label)
|
239 |
+
if not isinstance(edge_options, dict):
|
240 |
+
edge_options = nx.get_edge_attributes(G, edge_options)
|
241 |
+
if not isinstance(edge_label, dict):
|
242 |
+
edge_label = nx.get_edge_attributes(G, edge_label)
|
243 |
+
if not isinstance(edge_label_options, dict):
|
244 |
+
edge_label_options = nx.get_edge_attributes(G, edge_label_options)
|
245 |
+
|
246 |
+
# process default options (add brackets or not)
|
247 |
+
topts = "" if tikz_options == "" else f"[{tikz_options.strip('[]')}]"
|
248 |
+
defn = "" if default_node_options == "" else f"[{default_node_options.strip('[]')}]"
|
249 |
+
linestyle = f"{'->' if G.is_directed() else '-'}"
|
250 |
+
if default_edge_options == "":
|
251 |
+
defe = "[" + linestyle + "]"
|
252 |
+
elif "-" in default_edge_options:
|
253 |
+
defe = default_edge_options
|
254 |
+
else:
|
255 |
+
defe = f"[{linestyle},{default_edge_options.strip('[]')}]"
|
256 |
+
|
257 |
+
# Construct the string line by line
|
258 |
+
result = " \\begin{tikzpicture}" + topts
|
259 |
+
result += i4 + " \\draw" + defn
|
260 |
+
# load the nodes
|
261 |
+
for n in G:
|
262 |
+
# node options goes inside square brackets
|
263 |
+
nopts = f"[{node_options[n].strip('[]')}]" if n in node_options else ""
|
264 |
+
# node text goes inside curly brackets {}
|
265 |
+
ntext = f"{{{node_label[n]}}}" if n in node_label else f"{{{n}}}"
|
266 |
+
|
267 |
+
result += i8 + f"{pos[n]} node{nopts} ({n}){ntext}"
|
268 |
+
result += ";\n"
|
269 |
+
|
270 |
+
# load the edges
|
271 |
+
result += " \\begin{scope}" + defe
|
272 |
+
for edge in G.edges:
|
273 |
+
u, v = edge[:2]
|
274 |
+
e_opts = f"{edge_options[edge]}".strip("[]") if edge in edge_options else ""
|
275 |
+
# add loop options for selfloops if not present
|
276 |
+
if u == v and "loop" not in e_opts:
|
277 |
+
e_opts = "loop," + e_opts
|
278 |
+
e_opts = f"[{e_opts}]" if e_opts != "" else ""
|
279 |
+
# TODO -- handle bending of multiedges
|
280 |
+
|
281 |
+
els = edge_label_options[edge] if edge in edge_label_options else ""
|
282 |
+
# edge label options goes inside square brackets []
|
283 |
+
els = f"[{els.strip('[]')}]"
|
284 |
+
# edge text is drawn using the TikZ node command inside curly brackets {}
|
285 |
+
e_label = f" node{els} {{{edge_label[edge]}}}" if edge in edge_label else ""
|
286 |
+
|
287 |
+
result += i8 + f"\\draw{e_opts} ({u}) to{e_label} ({v});"
|
288 |
+
|
289 |
+
result += "\n \\end{scope}\n \\end{tikzpicture}\n"
|
290 |
+
return result
|
291 |
+
|
292 |
+
|
293 |
+
_DOC_WRAPPER_TIKZ = r"""\documentclass{{report}}
|
294 |
+
\usepackage{{tikz}}
|
295 |
+
\usepackage{{subcaption}}
|
296 |
+
|
297 |
+
\begin{{document}}
|
298 |
+
{content}
|
299 |
+
\end{{document}}"""
|
300 |
+
|
301 |
+
|
302 |
+
_FIG_WRAPPER = r"""\begin{{figure}}
|
303 |
+
{content}{caption}{label}
|
304 |
+
\end{{figure}}"""
|
305 |
+
|
306 |
+
|
307 |
+
_SUBFIG_WRAPPER = r""" \begin{{subfigure}}{{{size}\textwidth}}
|
308 |
+
{content}{caption}{label}
|
309 |
+
\end{{subfigure}}"""
|
310 |
+
|
311 |
+
|
312 |
+
def to_latex(
|
313 |
+
Gbunch,
|
314 |
+
pos="pos",
|
315 |
+
tikz_options="",
|
316 |
+
default_node_options="",
|
317 |
+
node_options="node_options",
|
318 |
+
node_label="node_label",
|
319 |
+
default_edge_options="",
|
320 |
+
edge_options="edge_options",
|
321 |
+
edge_label="edge_label",
|
322 |
+
edge_label_options="edge_label_options",
|
323 |
+
caption="",
|
324 |
+
latex_label="",
|
325 |
+
sub_captions=None,
|
326 |
+
sub_labels=None,
|
327 |
+
n_rows=1,
|
328 |
+
as_document=True,
|
329 |
+
document_wrapper=_DOC_WRAPPER_TIKZ,
|
330 |
+
figure_wrapper=_FIG_WRAPPER,
|
331 |
+
subfigure_wrapper=_SUBFIG_WRAPPER,
|
332 |
+
):
|
333 |
+
"""Return latex code to draw the graph(s) in `Gbunch`
|
334 |
+
|
335 |
+
The TikZ drawing utility in LaTeX is used to draw the graph(s).
|
336 |
+
If `Gbunch` is a graph, it is drawn in a figure environment.
|
337 |
+
If `Gbunch` is an iterable of graphs, each is drawn in a subfigure environment
|
338 |
+
within a single figure environment.
|
339 |
+
|
340 |
+
If `as_document` is True, the figure is wrapped inside a document environment
|
341 |
+
so that the resulting string is ready to be compiled by LaTeX. Otherwise,
|
342 |
+
the string is ready for inclusion in a larger tex document using ``\\include``
|
343 |
+
or ``\\input`` statements.
|
344 |
+
|
345 |
+
Parameters
|
346 |
+
==========
|
347 |
+
Gbunch : NetworkX graph or iterable of NetworkX graphs
|
348 |
+
The NetworkX graph to be drawn or an iterable of graphs
|
349 |
+
to be drawn inside subfigures of a single figure.
|
350 |
+
pos : string or list of strings
|
351 |
+
The name of the node attribute on `G` that holds the position of each node.
|
352 |
+
Positions can be sequences of length 2 with numbers for (x,y) coordinates.
|
353 |
+
They can also be strings to denote positions in TikZ style, such as (x, y)
|
354 |
+
or (angle:radius).
|
355 |
+
If a dict, it should be keyed by node to a position.
|
356 |
+
If an empty dict, a circular layout is computed by TikZ.
|
357 |
+
If you are drawing many graphs in subfigures, use a list of position dicts.
|
358 |
+
tikz_options : string
|
359 |
+
The tikzpicture options description defining the options for the picture.
|
360 |
+
Often large scale options like `[scale=2]`.
|
361 |
+
default_node_options : string
|
362 |
+
The draw options for a path of nodes. Individual node options override these.
|
363 |
+
node_options : string or dict
|
364 |
+
The name of the node attribute on `G` that holds the options for each node.
|
365 |
+
Or a dict keyed by node to a string holding the options for that node.
|
366 |
+
node_label : string or dict
|
367 |
+
The name of the node attribute on `G` that holds the node label (text)
|
368 |
+
displayed for each node. If the attribute is "" or not present, the node
|
369 |
+
itself is drawn as a string. LaTeX processing such as ``"$A_1$"`` is allowed.
|
370 |
+
Or a dict keyed by node to a string holding the label for that node.
|
371 |
+
default_edge_options : string
|
372 |
+
The options for the scope drawing all edges. The default is "[-]" for
|
373 |
+
undirected graphs and "[->]" for directed graphs.
|
374 |
+
edge_options : string or dict
|
375 |
+
The name of the edge attribute on `G` that holds the options for each edge.
|
376 |
+
If the edge is a self-loop and ``"loop" not in edge_options`` the option
|
377 |
+
"loop," is added to the options for the self-loop edge. Hence you can
|
378 |
+
use "[loop above]" explicitly, but the default is "[loop]".
|
379 |
+
Or a dict keyed by edge to a string holding the options for that edge.
|
380 |
+
edge_label : string or dict
|
381 |
+
The name of the edge attribute on `G` that holds the edge label (text)
|
382 |
+
displayed for each edge. If the attribute is "" or not present, no edge
|
383 |
+
label is drawn.
|
384 |
+
Or a dict keyed by edge to a string holding the label for that edge.
|
385 |
+
edge_label_options : string or dict
|
386 |
+
The name of the edge attribute on `G` that holds the label options for
|
387 |
+
each edge. For example, "[sloped,above,blue]". The default is no options.
|
388 |
+
Or a dict keyed by edge to a string holding the label options for that edge.
|
389 |
+
caption : string
|
390 |
+
The caption string for the figure environment
|
391 |
+
latex_label : string
|
392 |
+
The latex label used for the figure for easy referral from the main text
|
393 |
+
sub_captions : list of strings
|
394 |
+
The sub_caption string for each subfigure in the figure
|
395 |
+
sub_latex_labels : list of strings
|
396 |
+
The latex label for each subfigure in the figure
|
397 |
+
n_rows : int
|
398 |
+
The number of rows of subfigures to arrange for multiple graphs
|
399 |
+
as_document : bool
|
400 |
+
Whether to wrap the latex code in a document environment for compiling
|
401 |
+
document_wrapper : formatted text string with variable ``content``.
|
402 |
+
This text is called to evaluate the content embedded in a document
|
403 |
+
environment with a preamble setting up TikZ.
|
404 |
+
figure_wrapper : formatted text string
|
405 |
+
This text is evaluated with variables ``content``, ``caption`` and ``label``.
|
406 |
+
It wraps the content and if a caption is provided, adds the latex code for
|
407 |
+
that caption, and if a label is provided, adds the latex code for a label.
|
408 |
+
subfigure_wrapper : formatted text string
|
409 |
+
This text evaluate variables ``size``, ``content``, ``caption`` and ``label``.
|
410 |
+
It wraps the content and if a caption is provided, adds the latex code for
|
411 |
+
that caption, and if a label is provided, adds the latex code for a label.
|
412 |
+
The size is the vertical size of each row of subfigures as a fraction.
|
413 |
+
|
414 |
+
Returns
|
415 |
+
=======
|
416 |
+
latex_code : string
|
417 |
+
The text string which draws the desired graph(s) when compiled by LaTeX.
|
418 |
+
|
419 |
+
See Also
|
420 |
+
========
|
421 |
+
write_latex
|
422 |
+
to_latex_raw
|
423 |
+
"""
|
424 |
+
if hasattr(Gbunch, "adj"):
|
425 |
+
raw = to_latex_raw(
|
426 |
+
Gbunch,
|
427 |
+
pos,
|
428 |
+
tikz_options,
|
429 |
+
default_node_options,
|
430 |
+
node_options,
|
431 |
+
node_label,
|
432 |
+
default_edge_options,
|
433 |
+
edge_options,
|
434 |
+
edge_label,
|
435 |
+
edge_label_options,
|
436 |
+
)
|
437 |
+
else: # iterator of graphs
|
438 |
+
sbf = subfigure_wrapper
|
439 |
+
size = 1 / n_rows
|
440 |
+
|
441 |
+
N = len(Gbunch)
|
442 |
+
if isinstance(pos, str | dict):
|
443 |
+
pos = [pos] * N
|
444 |
+
if sub_captions is None:
|
445 |
+
sub_captions = [""] * N
|
446 |
+
if sub_labels is None:
|
447 |
+
sub_labels = [""] * N
|
448 |
+
if not (len(Gbunch) == len(pos) == len(sub_captions) == len(sub_labels)):
|
449 |
+
raise nx.NetworkXError(
|
450 |
+
"length of Gbunch, sub_captions and sub_figures must agree"
|
451 |
+
)
|
452 |
+
|
453 |
+
raw = ""
|
454 |
+
for G, pos, subcap, sublbl in zip(Gbunch, pos, sub_captions, sub_labels):
|
455 |
+
subraw = to_latex_raw(
|
456 |
+
G,
|
457 |
+
pos,
|
458 |
+
tikz_options,
|
459 |
+
default_node_options,
|
460 |
+
node_options,
|
461 |
+
node_label,
|
462 |
+
default_edge_options,
|
463 |
+
edge_options,
|
464 |
+
edge_label,
|
465 |
+
edge_label_options,
|
466 |
+
)
|
467 |
+
cap = f" \\caption{{{subcap}}}" if subcap else ""
|
468 |
+
lbl = f"\\label{{{sublbl}}}" if sublbl else ""
|
469 |
+
raw += sbf.format(size=size, content=subraw, caption=cap, label=lbl)
|
470 |
+
raw += "\n"
|
471 |
+
|
472 |
+
# put raw latex code into a figure environment and optionally into a document
|
473 |
+
raw = raw[:-1]
|
474 |
+
cap = f"\n \\caption{{{caption}}}" if caption else ""
|
475 |
+
lbl = f"\\label{{{latex_label}}}" if latex_label else ""
|
476 |
+
fig = figure_wrapper.format(content=raw, caption=cap, label=lbl)
|
477 |
+
if as_document:
|
478 |
+
return document_wrapper.format(content=fig)
|
479 |
+
return fig
|
480 |
+
|
481 |
+
|
482 |
+
@nx.utils.open_file(1, mode="w")
|
483 |
+
def write_latex(Gbunch, path, **options):
|
484 |
+
"""Write the latex code to draw the graph(s) onto `path`.
|
485 |
+
|
486 |
+
This convenience function creates the latex drawing code as a string
|
487 |
+
and writes that to a file ready to be compiled when `as_document` is True
|
488 |
+
or ready to be ``import`` ed or ``include`` ed into your main LaTeX document.
|
489 |
+
|
490 |
+
The `path` argument can be a string filename or a file handle to write to.
|
491 |
+
|
492 |
+
Parameters
|
493 |
+
----------
|
494 |
+
Gbunch : NetworkX graph or iterable of NetworkX graphs
|
495 |
+
If Gbunch is a graph, it is drawn in a figure environment.
|
496 |
+
If Gbunch is an iterable of graphs, each is drawn in a subfigure
|
497 |
+
environment within a single figure environment.
|
498 |
+
path : filename
|
499 |
+
Filename or file handle to write to
|
500 |
+
options : dict
|
501 |
+
By default, TikZ is used with options: (others are ignored)::
|
502 |
+
|
503 |
+
pos : string or dict or list
|
504 |
+
The name of the node attribute on `G` that holds the position of each node.
|
505 |
+
Positions can be sequences of length 2 with numbers for (x,y) coordinates.
|
506 |
+
They can also be strings to denote positions in TikZ style, such as (x, y)
|
507 |
+
or (angle:radius).
|
508 |
+
If a dict, it should be keyed by node to a position.
|
509 |
+
If an empty dict, a circular layout is computed by TikZ.
|
510 |
+
If you are drawing many graphs in subfigures, use a list of position dicts.
|
511 |
+
tikz_options : string
|
512 |
+
The tikzpicture options description defining the options for the picture.
|
513 |
+
Often large scale options like `[scale=2]`.
|
514 |
+
default_node_options : string
|
515 |
+
The draw options for a path of nodes. Individual node options override these.
|
516 |
+
node_options : string or dict
|
517 |
+
The name of the node attribute on `G` that holds the options for each node.
|
518 |
+
Or a dict keyed by node to a string holding the options for that node.
|
519 |
+
node_label : string or dict
|
520 |
+
The name of the node attribute on `G` that holds the node label (text)
|
521 |
+
displayed for each node. If the attribute is "" or not present, the node
|
522 |
+
itself is drawn as a string. LaTeX processing such as ``"$A_1$"`` is allowed.
|
523 |
+
Or a dict keyed by node to a string holding the label for that node.
|
524 |
+
default_edge_options : string
|
525 |
+
The options for the scope drawing all edges. The default is "[-]" for
|
526 |
+
undirected graphs and "[->]" for directed graphs.
|
527 |
+
edge_options : string or dict
|
528 |
+
The name of the edge attribute on `G` that holds the options for each edge.
|
529 |
+
If the edge is a self-loop and ``"loop" not in edge_options`` the option
|
530 |
+
"loop," is added to the options for the self-loop edge. Hence you can
|
531 |
+
use "[loop above]" explicitly, but the default is "[loop]".
|
532 |
+
Or a dict keyed by edge to a string holding the options for that edge.
|
533 |
+
edge_label : string or dict
|
534 |
+
The name of the edge attribute on `G` that holds the edge label (text)
|
535 |
+
displayed for each edge. If the attribute is "" or not present, no edge
|
536 |
+
label is drawn.
|
537 |
+
Or a dict keyed by edge to a string holding the label for that edge.
|
538 |
+
edge_label_options : string or dict
|
539 |
+
The name of the edge attribute on `G` that holds the label options for
|
540 |
+
each edge. For example, "[sloped,above,blue]". The default is no options.
|
541 |
+
Or a dict keyed by edge to a string holding the label options for that edge.
|
542 |
+
caption : string
|
543 |
+
The caption string for the figure environment
|
544 |
+
latex_label : string
|
545 |
+
The latex label used for the figure for easy referral from the main text
|
546 |
+
sub_captions : list of strings
|
547 |
+
The sub_caption string for each subfigure in the figure
|
548 |
+
sub_latex_labels : list of strings
|
549 |
+
The latex label for each subfigure in the figure
|
550 |
+
n_rows : int
|
551 |
+
The number of rows of subfigures to arrange for multiple graphs
|
552 |
+
as_document : bool
|
553 |
+
Whether to wrap the latex code in a document environment for compiling
|
554 |
+
document_wrapper : formatted text string with variable ``content``.
|
555 |
+
This text is called to evaluate the content embedded in a document
|
556 |
+
environment with a preamble setting up the TikZ syntax.
|
557 |
+
figure_wrapper : formatted text string
|
558 |
+
This text is evaluated with variables ``content``, ``caption`` and ``label``.
|
559 |
+
It wraps the content and if a caption is provided, adds the latex code for
|
560 |
+
that caption, and if a label is provided, adds the latex code for a label.
|
561 |
+
subfigure_wrapper : formatted text string
|
562 |
+
This text evaluate variables ``size``, ``content``, ``caption`` and ``label``.
|
563 |
+
It wraps the content and if a caption is provided, adds the latex code for
|
564 |
+
that caption, and if a label is provided, adds the latex code for a label.
|
565 |
+
The size is the vertical size of each row of subfigures as a fraction.
|
566 |
+
|
567 |
+
See Also
|
568 |
+
========
|
569 |
+
to_latex
|
570 |
+
"""
|
571 |
+
path.write(to_latex(Gbunch, **options))
|
env-llmeval/lib/python3.10/site-packages/networkx/drawing/nx_pydot.py
ADDED
@@ -0,0 +1,411 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
*****
|
3 |
+
Pydot
|
4 |
+
*****
|
5 |
+
|
6 |
+
Import and export NetworkX graphs in Graphviz dot format using pydot.
|
7 |
+
|
8 |
+
Either this module or nx_agraph can be used to interface with graphviz.
|
9 |
+
|
10 |
+
Examples
|
11 |
+
--------
|
12 |
+
>>> G = nx.complete_graph(5)
|
13 |
+
>>> PG = nx.nx_pydot.to_pydot(G)
|
14 |
+
>>> H = nx.nx_pydot.from_pydot(PG)
|
15 |
+
|
16 |
+
See Also
|
17 |
+
--------
|
18 |
+
- pydot: https://github.com/erocarrera/pydot
|
19 |
+
- Graphviz: https://www.graphviz.org
|
20 |
+
- DOT Language: http://www.graphviz.org/doc/info/lang.html
|
21 |
+
"""
|
22 |
+
from locale import getpreferredencoding
|
23 |
+
|
24 |
+
import networkx as nx
|
25 |
+
from networkx.utils import open_file
|
26 |
+
|
27 |
+
__all__ = [
|
28 |
+
"write_dot",
|
29 |
+
"read_dot",
|
30 |
+
"graphviz_layout",
|
31 |
+
"pydot_layout",
|
32 |
+
"to_pydot",
|
33 |
+
"from_pydot",
|
34 |
+
]
|
35 |
+
|
36 |
+
|
37 |
+
@open_file(1, mode="w")
|
38 |
+
def write_dot(G, path):
|
39 |
+
"""Write NetworkX graph G to Graphviz dot format on path.
|
40 |
+
|
41 |
+
Path can be a string or a file handle.
|
42 |
+
"""
|
43 |
+
P = to_pydot(G)
|
44 |
+
path.write(P.to_string())
|
45 |
+
return
|
46 |
+
|
47 |
+
|
48 |
+
@open_file(0, mode="r")
|
49 |
+
@nx._dispatchable(name="pydot_read_dot", graphs=None, returns_graph=True)
|
50 |
+
def read_dot(path):
|
51 |
+
"""Returns a NetworkX :class:`MultiGraph` or :class:`MultiDiGraph` from the
|
52 |
+
dot file with the passed path.
|
53 |
+
|
54 |
+
If this file contains multiple graphs, only the first such graph is
|
55 |
+
returned. All graphs _except_ the first are silently ignored.
|
56 |
+
|
57 |
+
Parameters
|
58 |
+
----------
|
59 |
+
path : str or file
|
60 |
+
Filename or file handle.
|
61 |
+
|
62 |
+
Returns
|
63 |
+
-------
|
64 |
+
G : MultiGraph or MultiDiGraph
|
65 |
+
A :class:`MultiGraph` or :class:`MultiDiGraph`.
|
66 |
+
|
67 |
+
Notes
|
68 |
+
-----
|
69 |
+
Use `G = nx.Graph(nx.nx_pydot.read_dot(path))` to return a :class:`Graph` instead of a
|
70 |
+
:class:`MultiGraph`.
|
71 |
+
"""
|
72 |
+
import pydot
|
73 |
+
|
74 |
+
data = path.read()
|
75 |
+
|
76 |
+
# List of one or more "pydot.Dot" instances deserialized from this file.
|
77 |
+
P_list = pydot.graph_from_dot_data(data)
|
78 |
+
|
79 |
+
# Convert only the first such instance into a NetworkX graph.
|
80 |
+
return from_pydot(P_list[0])
|
81 |
+
|
82 |
+
|
83 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
84 |
+
def from_pydot(P):
|
85 |
+
"""Returns a NetworkX graph from a Pydot graph.
|
86 |
+
|
87 |
+
Parameters
|
88 |
+
----------
|
89 |
+
P : Pydot graph
|
90 |
+
A graph created with Pydot
|
91 |
+
|
92 |
+
Returns
|
93 |
+
-------
|
94 |
+
G : NetworkX multigraph
|
95 |
+
A MultiGraph or MultiDiGraph.
|
96 |
+
|
97 |
+
Examples
|
98 |
+
--------
|
99 |
+
>>> K5 = nx.complete_graph(5)
|
100 |
+
>>> A = nx.nx_pydot.to_pydot(K5)
|
101 |
+
>>> G = nx.nx_pydot.from_pydot(A) # return MultiGraph
|
102 |
+
|
103 |
+
# make a Graph instead of MultiGraph
|
104 |
+
>>> G = nx.Graph(nx.nx_pydot.from_pydot(A))
|
105 |
+
|
106 |
+
"""
|
107 |
+
|
108 |
+
if P.get_strict(None): # pydot bug: get_strict() shouldn't take argument
|
109 |
+
multiedges = False
|
110 |
+
else:
|
111 |
+
multiedges = True
|
112 |
+
|
113 |
+
if P.get_type() == "graph": # undirected
|
114 |
+
if multiedges:
|
115 |
+
N = nx.MultiGraph()
|
116 |
+
else:
|
117 |
+
N = nx.Graph()
|
118 |
+
else:
|
119 |
+
if multiedges:
|
120 |
+
N = nx.MultiDiGraph()
|
121 |
+
else:
|
122 |
+
N = nx.DiGraph()
|
123 |
+
|
124 |
+
# assign defaults
|
125 |
+
name = P.get_name().strip('"')
|
126 |
+
if name != "":
|
127 |
+
N.name = name
|
128 |
+
|
129 |
+
# add nodes, attributes to N.node_attr
|
130 |
+
for p in P.get_node_list():
|
131 |
+
n = p.get_name().strip('"')
|
132 |
+
if n in ("node", "graph", "edge"):
|
133 |
+
continue
|
134 |
+
N.add_node(n, **p.get_attributes())
|
135 |
+
|
136 |
+
# add edges
|
137 |
+
for e in P.get_edge_list():
|
138 |
+
u = e.get_source()
|
139 |
+
v = e.get_destination()
|
140 |
+
attr = e.get_attributes()
|
141 |
+
s = []
|
142 |
+
d = []
|
143 |
+
|
144 |
+
if isinstance(u, str):
|
145 |
+
s.append(u.strip('"'))
|
146 |
+
else:
|
147 |
+
for unodes in u["nodes"]:
|
148 |
+
s.append(unodes.strip('"'))
|
149 |
+
|
150 |
+
if isinstance(v, str):
|
151 |
+
d.append(v.strip('"'))
|
152 |
+
else:
|
153 |
+
for vnodes in v["nodes"]:
|
154 |
+
d.append(vnodes.strip('"'))
|
155 |
+
|
156 |
+
for source_node in s:
|
157 |
+
for destination_node in d:
|
158 |
+
N.add_edge(source_node, destination_node, **attr)
|
159 |
+
|
160 |
+
# add default attributes for graph, nodes, edges
|
161 |
+
pattr = P.get_attributes()
|
162 |
+
if pattr:
|
163 |
+
N.graph["graph"] = pattr
|
164 |
+
try:
|
165 |
+
N.graph["node"] = P.get_node_defaults()[0]
|
166 |
+
except (IndexError, TypeError):
|
167 |
+
pass # N.graph['node']={}
|
168 |
+
try:
|
169 |
+
N.graph["edge"] = P.get_edge_defaults()[0]
|
170 |
+
except (IndexError, TypeError):
|
171 |
+
pass # N.graph['edge']={}
|
172 |
+
return N
|
173 |
+
|
174 |
+
|
175 |
+
def _check_colon_quotes(s):
|
176 |
+
# A quick helper function to check if a string has a colon in it
|
177 |
+
# and if it is quoted properly with double quotes.
|
178 |
+
# refer https://github.com/pydot/pydot/issues/258
|
179 |
+
return ":" in s and (s[0] != '"' or s[-1] != '"')
|
180 |
+
|
181 |
+
|
182 |
+
def to_pydot(N):
|
183 |
+
"""Returns a pydot graph from a NetworkX graph N.
|
184 |
+
|
185 |
+
Parameters
|
186 |
+
----------
|
187 |
+
N : NetworkX graph
|
188 |
+
A graph created with NetworkX
|
189 |
+
|
190 |
+
Examples
|
191 |
+
--------
|
192 |
+
>>> K5 = nx.complete_graph(5)
|
193 |
+
>>> P = nx.nx_pydot.to_pydot(K5)
|
194 |
+
|
195 |
+
Notes
|
196 |
+
-----
|
197 |
+
|
198 |
+
"""
|
199 |
+
import pydot
|
200 |
+
|
201 |
+
# set Graphviz graph type
|
202 |
+
if N.is_directed():
|
203 |
+
graph_type = "digraph"
|
204 |
+
else:
|
205 |
+
graph_type = "graph"
|
206 |
+
strict = nx.number_of_selfloops(N) == 0 and not N.is_multigraph()
|
207 |
+
|
208 |
+
name = N.name
|
209 |
+
graph_defaults = N.graph.get("graph", {})
|
210 |
+
if name == "":
|
211 |
+
P = pydot.Dot("", graph_type=graph_type, strict=strict, **graph_defaults)
|
212 |
+
else:
|
213 |
+
P = pydot.Dot(
|
214 |
+
f'"{name}"', graph_type=graph_type, strict=strict, **graph_defaults
|
215 |
+
)
|
216 |
+
try:
|
217 |
+
P.set_node_defaults(**N.graph["node"])
|
218 |
+
except KeyError:
|
219 |
+
pass
|
220 |
+
try:
|
221 |
+
P.set_edge_defaults(**N.graph["edge"])
|
222 |
+
except KeyError:
|
223 |
+
pass
|
224 |
+
|
225 |
+
for n, nodedata in N.nodes(data=True):
|
226 |
+
str_nodedata = {str(k): str(v) for k, v in nodedata.items()}
|
227 |
+
# Explicitly catch nodes with ":" in node names or nodedata.
|
228 |
+
n = str(n)
|
229 |
+
raise_error = _check_colon_quotes(n) or (
|
230 |
+
any(
|
231 |
+
(_check_colon_quotes(k) or _check_colon_quotes(v))
|
232 |
+
for k, v in str_nodedata.items()
|
233 |
+
)
|
234 |
+
)
|
235 |
+
if raise_error:
|
236 |
+
raise ValueError(
|
237 |
+
f'Node names and attributes should not contain ":" unless they are quoted with "".\
|
238 |
+
For example the string \'attribute:data1\' should be written as \'"attribute:data1"\'.\
|
239 |
+
Please refer https://github.com/pydot/pydot/issues/258'
|
240 |
+
)
|
241 |
+
p = pydot.Node(n, **str_nodedata)
|
242 |
+
P.add_node(p)
|
243 |
+
|
244 |
+
if N.is_multigraph():
|
245 |
+
for u, v, key, edgedata in N.edges(data=True, keys=True):
|
246 |
+
str_edgedata = {str(k): str(v) for k, v in edgedata.items() if k != "key"}
|
247 |
+
u, v = str(u), str(v)
|
248 |
+
raise_error = (
|
249 |
+
_check_colon_quotes(u)
|
250 |
+
or _check_colon_quotes(v)
|
251 |
+
or (
|
252 |
+
any(
|
253 |
+
(_check_colon_quotes(k) or _check_colon_quotes(val))
|
254 |
+
for k, val in str_edgedata.items()
|
255 |
+
)
|
256 |
+
)
|
257 |
+
)
|
258 |
+
if raise_error:
|
259 |
+
raise ValueError(
|
260 |
+
f'Node names and attributes should not contain ":" unless they are quoted with "".\
|
261 |
+
For example the string \'attribute:data1\' should be written as \'"attribute:data1"\'.\
|
262 |
+
Please refer https://github.com/pydot/pydot/issues/258'
|
263 |
+
)
|
264 |
+
edge = pydot.Edge(u, v, key=str(key), **str_edgedata)
|
265 |
+
P.add_edge(edge)
|
266 |
+
|
267 |
+
else:
|
268 |
+
for u, v, edgedata in N.edges(data=True):
|
269 |
+
str_edgedata = {str(k): str(v) for k, v in edgedata.items()}
|
270 |
+
u, v = str(u), str(v)
|
271 |
+
raise_error = (
|
272 |
+
_check_colon_quotes(u)
|
273 |
+
or _check_colon_quotes(v)
|
274 |
+
or (
|
275 |
+
any(
|
276 |
+
(_check_colon_quotes(k) or _check_colon_quotes(val))
|
277 |
+
for k, val in str_edgedata.items()
|
278 |
+
)
|
279 |
+
)
|
280 |
+
)
|
281 |
+
if raise_error:
|
282 |
+
raise ValueError(
|
283 |
+
f'Node names and attributes should not contain ":" unless they are quoted with "".\
|
284 |
+
For example the string \'attribute:data1\' should be written as \'"attribute:data1"\'.\
|
285 |
+
Please refer https://github.com/pydot/pydot/issues/258'
|
286 |
+
)
|
287 |
+
edge = pydot.Edge(u, v, **str_edgedata)
|
288 |
+
P.add_edge(edge)
|
289 |
+
return P
|
290 |
+
|
291 |
+
|
292 |
+
def graphviz_layout(G, prog="neato", root=None):
|
293 |
+
"""Create node positions using Pydot and Graphviz.
|
294 |
+
|
295 |
+
Returns a dictionary of positions keyed by node.
|
296 |
+
|
297 |
+
Parameters
|
298 |
+
----------
|
299 |
+
G : NetworkX Graph
|
300 |
+
The graph for which the layout is computed.
|
301 |
+
prog : string (default: 'neato')
|
302 |
+
The name of the GraphViz program to use for layout.
|
303 |
+
Options depend on GraphViz version but may include:
|
304 |
+
'dot', 'twopi', 'fdp', 'sfdp', 'circo'
|
305 |
+
root : Node from G or None (default: None)
|
306 |
+
The node of G from which to start some layout algorithms.
|
307 |
+
|
308 |
+
Returns
|
309 |
+
-------
|
310 |
+
Dictionary of (x, y) positions keyed by node.
|
311 |
+
|
312 |
+
Examples
|
313 |
+
--------
|
314 |
+
>>> G = nx.complete_graph(4)
|
315 |
+
>>> pos = nx.nx_pydot.graphviz_layout(G)
|
316 |
+
>>> pos = nx.nx_pydot.graphviz_layout(G, prog="dot")
|
317 |
+
|
318 |
+
Notes
|
319 |
+
-----
|
320 |
+
This is a wrapper for pydot_layout.
|
321 |
+
"""
|
322 |
+
return pydot_layout(G=G, prog=prog, root=root)
|
323 |
+
|
324 |
+
|
325 |
+
def pydot_layout(G, prog="neato", root=None):
|
326 |
+
"""Create node positions using :mod:`pydot` and Graphviz.
|
327 |
+
|
328 |
+
Parameters
|
329 |
+
----------
|
330 |
+
G : Graph
|
331 |
+
NetworkX graph to be laid out.
|
332 |
+
prog : string (default: 'neato')
|
333 |
+
Name of the GraphViz command to use for layout.
|
334 |
+
Options depend on GraphViz version but may include:
|
335 |
+
'dot', 'twopi', 'fdp', 'sfdp', 'circo'
|
336 |
+
root : Node from G or None (default: None)
|
337 |
+
The node of G from which to start some layout algorithms.
|
338 |
+
|
339 |
+
Returns
|
340 |
+
-------
|
341 |
+
dict
|
342 |
+
Dictionary of positions keyed by node.
|
343 |
+
|
344 |
+
Examples
|
345 |
+
--------
|
346 |
+
>>> G = nx.complete_graph(4)
|
347 |
+
>>> pos = nx.nx_pydot.pydot_layout(G)
|
348 |
+
>>> pos = nx.nx_pydot.pydot_layout(G, prog="dot")
|
349 |
+
|
350 |
+
Notes
|
351 |
+
-----
|
352 |
+
If you use complex node objects, they may have the same string
|
353 |
+
representation and GraphViz could treat them as the same node.
|
354 |
+
The layout may assign both nodes a single location. See Issue #1568
|
355 |
+
If this occurs in your case, consider relabeling the nodes just
|
356 |
+
for the layout computation using something similar to::
|
357 |
+
|
358 |
+
H = nx.convert_node_labels_to_integers(G, label_attribute="node_label")
|
359 |
+
H_layout = nx.nx_pydot.pydot_layout(G, prog="dot")
|
360 |
+
G_layout = {H.nodes[n]["node_label"]: p for n, p in H_layout.items()}
|
361 |
+
|
362 |
+
"""
|
363 |
+
import pydot
|
364 |
+
|
365 |
+
P = to_pydot(G)
|
366 |
+
if root is not None:
|
367 |
+
P.set("root", str(root))
|
368 |
+
|
369 |
+
# List of low-level bytes comprising a string in the dot language converted
|
370 |
+
# from the passed graph with the passed external GraphViz command.
|
371 |
+
D_bytes = P.create_dot(prog=prog)
|
372 |
+
|
373 |
+
# Unique string decoded from these bytes with the preferred locale encoding
|
374 |
+
D = str(D_bytes, encoding=getpreferredencoding())
|
375 |
+
|
376 |
+
if D == "": # no data returned
|
377 |
+
print(f"Graphviz layout with {prog} failed")
|
378 |
+
print()
|
379 |
+
print("To debug what happened try:")
|
380 |
+
print("P = nx.nx_pydot.to_pydot(G)")
|
381 |
+
print('P.write_dot("file.dot")')
|
382 |
+
print(f"And then run {prog} on file.dot")
|
383 |
+
return
|
384 |
+
|
385 |
+
# List of one or more "pydot.Dot" instances deserialized from this string.
|
386 |
+
Q_list = pydot.graph_from_dot_data(D)
|
387 |
+
assert len(Q_list) == 1
|
388 |
+
|
389 |
+
# The first and only such instance, as guaranteed by the above assertion.
|
390 |
+
Q = Q_list[0]
|
391 |
+
|
392 |
+
node_pos = {}
|
393 |
+
for n in G.nodes():
|
394 |
+
str_n = str(n)
|
395 |
+
# Explicitly catch nodes with ":" in node names or nodedata.
|
396 |
+
if _check_colon_quotes(str_n):
|
397 |
+
raise ValueError(
|
398 |
+
f'Node names and node attributes should not contain ":" unless they are quoted with "".\
|
399 |
+
For example the string \'attribute:data1\' should be written as \'"attribute:data1"\'.\
|
400 |
+
Please refer https://github.com/pydot/pydot/issues/258'
|
401 |
+
)
|
402 |
+
pydot_node = pydot.Node(str_n).get_name()
|
403 |
+
node = Q.get_node(pydot_node)
|
404 |
+
|
405 |
+
if isinstance(node, list):
|
406 |
+
node = node[0]
|
407 |
+
pos = node.get_pos()[1:-1] # strip leading and trailing double quotes
|
408 |
+
if pos is not None:
|
409 |
+
xx, yy = pos.split(",")
|
410 |
+
node_pos[n] = (float(xx), float(yy))
|
411 |
+
return node_pos
|
env-llmeval/lib/python3.10/site-packages/networkx/drawing/nx_pylab.py
ADDED
@@ -0,0 +1,1871 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
**********
|
3 |
+
Matplotlib
|
4 |
+
**********
|
5 |
+
|
6 |
+
Draw networks with matplotlib.
|
7 |
+
|
8 |
+
Examples
|
9 |
+
--------
|
10 |
+
>>> G = nx.complete_graph(5)
|
11 |
+
>>> nx.draw(G)
|
12 |
+
|
13 |
+
See Also
|
14 |
+
--------
|
15 |
+
- :doc:`matplotlib <matplotlib:index>`
|
16 |
+
- :func:`matplotlib.pyplot.scatter`
|
17 |
+
- :obj:`matplotlib.patches.FancyArrowPatch`
|
18 |
+
"""
|
19 |
+
import collections
|
20 |
+
import itertools
|
21 |
+
from numbers import Number
|
22 |
+
|
23 |
+
import networkx as nx
|
24 |
+
from networkx.drawing.layout import (
|
25 |
+
circular_layout,
|
26 |
+
kamada_kawai_layout,
|
27 |
+
planar_layout,
|
28 |
+
random_layout,
|
29 |
+
shell_layout,
|
30 |
+
spectral_layout,
|
31 |
+
spring_layout,
|
32 |
+
)
|
33 |
+
|
34 |
+
__all__ = [
|
35 |
+
"draw",
|
36 |
+
"draw_networkx",
|
37 |
+
"draw_networkx_nodes",
|
38 |
+
"draw_networkx_edges",
|
39 |
+
"draw_networkx_labels",
|
40 |
+
"draw_networkx_edge_labels",
|
41 |
+
"draw_circular",
|
42 |
+
"draw_kamada_kawai",
|
43 |
+
"draw_random",
|
44 |
+
"draw_spectral",
|
45 |
+
"draw_spring",
|
46 |
+
"draw_planar",
|
47 |
+
"draw_shell",
|
48 |
+
]
|
49 |
+
|
50 |
+
|
51 |
+
def draw(G, pos=None, ax=None, **kwds):
|
52 |
+
"""Draw the graph G with Matplotlib.
|
53 |
+
|
54 |
+
Draw the graph as a simple representation with no node
|
55 |
+
labels or edge labels and using the full Matplotlib figure area
|
56 |
+
and no axis labels by default. See draw_networkx() for more
|
57 |
+
full-featured drawing that allows title, axis labels etc.
|
58 |
+
|
59 |
+
Parameters
|
60 |
+
----------
|
61 |
+
G : graph
|
62 |
+
A networkx graph
|
63 |
+
|
64 |
+
pos : dictionary, optional
|
65 |
+
A dictionary with nodes as keys and positions as values.
|
66 |
+
If not specified a spring layout positioning will be computed.
|
67 |
+
See :py:mod:`networkx.drawing.layout` for functions that
|
68 |
+
compute node positions.
|
69 |
+
|
70 |
+
ax : Matplotlib Axes object, optional
|
71 |
+
Draw the graph in specified Matplotlib axes.
|
72 |
+
|
73 |
+
kwds : optional keywords
|
74 |
+
See networkx.draw_networkx() for a description of optional keywords.
|
75 |
+
|
76 |
+
Examples
|
77 |
+
--------
|
78 |
+
>>> G = nx.dodecahedral_graph()
|
79 |
+
>>> nx.draw(G)
|
80 |
+
>>> nx.draw(G, pos=nx.spring_layout(G)) # use spring layout
|
81 |
+
|
82 |
+
See Also
|
83 |
+
--------
|
84 |
+
draw_networkx
|
85 |
+
draw_networkx_nodes
|
86 |
+
draw_networkx_edges
|
87 |
+
draw_networkx_labels
|
88 |
+
draw_networkx_edge_labels
|
89 |
+
|
90 |
+
Notes
|
91 |
+
-----
|
92 |
+
This function has the same name as pylab.draw and pyplot.draw
|
93 |
+
so beware when using `from networkx import *`
|
94 |
+
|
95 |
+
since you might overwrite the pylab.draw function.
|
96 |
+
|
97 |
+
With pyplot use
|
98 |
+
|
99 |
+
>>> import matplotlib.pyplot as plt
|
100 |
+
>>> G = nx.dodecahedral_graph()
|
101 |
+
>>> nx.draw(G) # networkx draw()
|
102 |
+
>>> plt.draw() # pyplot draw()
|
103 |
+
|
104 |
+
Also see the NetworkX drawing examples at
|
105 |
+
https://networkx.org/documentation/latest/auto_examples/index.html
|
106 |
+
"""
|
107 |
+
import matplotlib.pyplot as plt
|
108 |
+
|
109 |
+
if ax is None:
|
110 |
+
cf = plt.gcf()
|
111 |
+
else:
|
112 |
+
cf = ax.get_figure()
|
113 |
+
cf.set_facecolor("w")
|
114 |
+
if ax is None:
|
115 |
+
if cf.axes:
|
116 |
+
ax = cf.gca()
|
117 |
+
else:
|
118 |
+
ax = cf.add_axes((0, 0, 1, 1))
|
119 |
+
|
120 |
+
if "with_labels" not in kwds:
|
121 |
+
kwds["with_labels"] = "labels" in kwds
|
122 |
+
|
123 |
+
draw_networkx(G, pos=pos, ax=ax, **kwds)
|
124 |
+
ax.set_axis_off()
|
125 |
+
plt.draw_if_interactive()
|
126 |
+
return
|
127 |
+
|
128 |
+
|
129 |
+
def draw_networkx(G, pos=None, arrows=None, with_labels=True, **kwds):
|
130 |
+
r"""Draw the graph G using Matplotlib.
|
131 |
+
|
132 |
+
Draw the graph with Matplotlib with options for node positions,
|
133 |
+
labeling, titles, and many other drawing features.
|
134 |
+
See draw() for simple drawing without labels or axes.
|
135 |
+
|
136 |
+
Parameters
|
137 |
+
----------
|
138 |
+
G : graph
|
139 |
+
A networkx graph
|
140 |
+
|
141 |
+
pos : dictionary, optional
|
142 |
+
A dictionary with nodes as keys and positions as values.
|
143 |
+
If not specified a spring layout positioning will be computed.
|
144 |
+
See :py:mod:`networkx.drawing.layout` for functions that
|
145 |
+
compute node positions.
|
146 |
+
|
147 |
+
arrows : bool or None, optional (default=None)
|
148 |
+
If `None`, directed graphs draw arrowheads with
|
149 |
+
`~matplotlib.patches.FancyArrowPatch`, while undirected graphs draw edges
|
150 |
+
via `~matplotlib.collections.LineCollection` for speed.
|
151 |
+
If `True`, draw arrowheads with FancyArrowPatches (bendable and stylish).
|
152 |
+
If `False`, draw edges using LineCollection (linear and fast).
|
153 |
+
For directed graphs, if True draw arrowheads.
|
154 |
+
Note: Arrows will be the same color as edges.
|
155 |
+
|
156 |
+
arrowstyle : str (default='-\|>' for directed graphs)
|
157 |
+
For directed graphs, choose the style of the arrowsheads.
|
158 |
+
For undirected graphs default to '-'
|
159 |
+
|
160 |
+
See `matplotlib.patches.ArrowStyle` for more options.
|
161 |
+
|
162 |
+
arrowsize : int or list (default=10)
|
163 |
+
For directed graphs, choose the size of the arrow head's length and
|
164 |
+
width. A list of values can be passed in to assign a different size for arrow head's length and width.
|
165 |
+
See `matplotlib.patches.FancyArrowPatch` for attribute `mutation_scale`
|
166 |
+
for more info.
|
167 |
+
|
168 |
+
with_labels : bool (default=True)
|
169 |
+
Set to True to draw labels on the nodes.
|
170 |
+
|
171 |
+
ax : Matplotlib Axes object, optional
|
172 |
+
Draw the graph in the specified Matplotlib axes.
|
173 |
+
|
174 |
+
nodelist : list (default=list(G))
|
175 |
+
Draw only specified nodes
|
176 |
+
|
177 |
+
edgelist : list (default=list(G.edges()))
|
178 |
+
Draw only specified edges
|
179 |
+
|
180 |
+
node_size : scalar or array (default=300)
|
181 |
+
Size of nodes. If an array is specified it must be the
|
182 |
+
same length as nodelist.
|
183 |
+
|
184 |
+
node_color : color or array of colors (default='#1f78b4')
|
185 |
+
Node color. Can be a single color or a sequence of colors with the same
|
186 |
+
length as nodelist. Color can be string or rgb (or rgba) tuple of
|
187 |
+
floats from 0-1. If numeric values are specified they will be
|
188 |
+
mapped to colors using the cmap and vmin,vmax parameters. See
|
189 |
+
matplotlib.scatter for more details.
|
190 |
+
|
191 |
+
node_shape : string (default='o')
|
192 |
+
The shape of the node. Specification is as matplotlib.scatter
|
193 |
+
marker, one of 'so^>v<dph8'.
|
194 |
+
|
195 |
+
alpha : float or None (default=None)
|
196 |
+
The node and edge transparency
|
197 |
+
|
198 |
+
cmap : Matplotlib colormap, optional
|
199 |
+
Colormap for mapping intensities of nodes
|
200 |
+
|
201 |
+
vmin,vmax : float, optional
|
202 |
+
Minimum and maximum for node colormap scaling
|
203 |
+
|
204 |
+
linewidths : scalar or sequence (default=1.0)
|
205 |
+
Line width of symbol border
|
206 |
+
|
207 |
+
width : float or array of floats (default=1.0)
|
208 |
+
Line width of edges
|
209 |
+
|
210 |
+
edge_color : color or array of colors (default='k')
|
211 |
+
Edge color. Can be a single color or a sequence of colors with the same
|
212 |
+
length as edgelist. Color can be string or rgb (or rgba) tuple of
|
213 |
+
floats from 0-1. If numeric values are specified they will be
|
214 |
+
mapped to colors using the edge_cmap and edge_vmin,edge_vmax parameters.
|
215 |
+
|
216 |
+
edge_cmap : Matplotlib colormap, optional
|
217 |
+
Colormap for mapping intensities of edges
|
218 |
+
|
219 |
+
edge_vmin,edge_vmax : floats, optional
|
220 |
+
Minimum and maximum for edge colormap scaling
|
221 |
+
|
222 |
+
style : string (default=solid line)
|
223 |
+
Edge line style e.g.: '-', '--', '-.', ':'
|
224 |
+
or words like 'solid' or 'dashed'.
|
225 |
+
(See `matplotlib.patches.FancyArrowPatch`: `linestyle`)
|
226 |
+
|
227 |
+
labels : dictionary (default=None)
|
228 |
+
Node labels in a dictionary of text labels keyed by node
|
229 |
+
|
230 |
+
font_size : int (default=12 for nodes, 10 for edges)
|
231 |
+
Font size for text labels
|
232 |
+
|
233 |
+
font_color : color (default='k' black)
|
234 |
+
Font color string. Color can be string or rgb (or rgba) tuple of
|
235 |
+
floats from 0-1.
|
236 |
+
|
237 |
+
font_weight : string (default='normal')
|
238 |
+
Font weight
|
239 |
+
|
240 |
+
font_family : string (default='sans-serif')
|
241 |
+
Font family
|
242 |
+
|
243 |
+
label : string, optional
|
244 |
+
Label for graph legend
|
245 |
+
|
246 |
+
hide_ticks : bool, optional
|
247 |
+
Hide ticks of axes. When `True` (the default), ticks and ticklabels
|
248 |
+
are removed from the axes. To set ticks and tick labels to the pyplot default,
|
249 |
+
use ``hide_ticks=False``.
|
250 |
+
|
251 |
+
kwds : optional keywords
|
252 |
+
See networkx.draw_networkx_nodes(), networkx.draw_networkx_edges(), and
|
253 |
+
networkx.draw_networkx_labels() for a description of optional keywords.
|
254 |
+
|
255 |
+
Notes
|
256 |
+
-----
|
257 |
+
For directed graphs, arrows are drawn at the head end. Arrows can be
|
258 |
+
turned off with keyword arrows=False.
|
259 |
+
|
260 |
+
Examples
|
261 |
+
--------
|
262 |
+
>>> G = nx.dodecahedral_graph()
|
263 |
+
>>> nx.draw(G)
|
264 |
+
>>> nx.draw(G, pos=nx.spring_layout(G)) # use spring layout
|
265 |
+
|
266 |
+
>>> import matplotlib.pyplot as plt
|
267 |
+
>>> limits = plt.axis("off") # turn off axis
|
268 |
+
|
269 |
+
Also see the NetworkX drawing examples at
|
270 |
+
https://networkx.org/documentation/latest/auto_examples/index.html
|
271 |
+
|
272 |
+
See Also
|
273 |
+
--------
|
274 |
+
draw
|
275 |
+
draw_networkx_nodes
|
276 |
+
draw_networkx_edges
|
277 |
+
draw_networkx_labels
|
278 |
+
draw_networkx_edge_labels
|
279 |
+
"""
|
280 |
+
from inspect import signature
|
281 |
+
|
282 |
+
import matplotlib.pyplot as plt
|
283 |
+
|
284 |
+
# Get all valid keywords by inspecting the signatures of draw_networkx_nodes,
|
285 |
+
# draw_networkx_edges, draw_networkx_labels
|
286 |
+
|
287 |
+
valid_node_kwds = signature(draw_networkx_nodes).parameters.keys()
|
288 |
+
valid_edge_kwds = signature(draw_networkx_edges).parameters.keys()
|
289 |
+
valid_label_kwds = signature(draw_networkx_labels).parameters.keys()
|
290 |
+
|
291 |
+
# Create a set with all valid keywords across the three functions and
|
292 |
+
# remove the arguments of this function (draw_networkx)
|
293 |
+
valid_kwds = (valid_node_kwds | valid_edge_kwds | valid_label_kwds) - {
|
294 |
+
"G",
|
295 |
+
"pos",
|
296 |
+
"arrows",
|
297 |
+
"with_labels",
|
298 |
+
}
|
299 |
+
|
300 |
+
if any(k not in valid_kwds for k in kwds):
|
301 |
+
invalid_args = ", ".join([k for k in kwds if k not in valid_kwds])
|
302 |
+
raise ValueError(f"Received invalid argument(s): {invalid_args}")
|
303 |
+
|
304 |
+
node_kwds = {k: v for k, v in kwds.items() if k in valid_node_kwds}
|
305 |
+
edge_kwds = {k: v for k, v in kwds.items() if k in valid_edge_kwds}
|
306 |
+
label_kwds = {k: v for k, v in kwds.items() if k in valid_label_kwds}
|
307 |
+
|
308 |
+
if pos is None:
|
309 |
+
pos = nx.drawing.spring_layout(G) # default to spring layout
|
310 |
+
|
311 |
+
draw_networkx_nodes(G, pos, **node_kwds)
|
312 |
+
draw_networkx_edges(G, pos, arrows=arrows, **edge_kwds)
|
313 |
+
if with_labels:
|
314 |
+
draw_networkx_labels(G, pos, **label_kwds)
|
315 |
+
plt.draw_if_interactive()
|
316 |
+
|
317 |
+
|
318 |
+
def draw_networkx_nodes(
|
319 |
+
G,
|
320 |
+
pos,
|
321 |
+
nodelist=None,
|
322 |
+
node_size=300,
|
323 |
+
node_color="#1f78b4",
|
324 |
+
node_shape="o",
|
325 |
+
alpha=None,
|
326 |
+
cmap=None,
|
327 |
+
vmin=None,
|
328 |
+
vmax=None,
|
329 |
+
ax=None,
|
330 |
+
linewidths=None,
|
331 |
+
edgecolors=None,
|
332 |
+
label=None,
|
333 |
+
margins=None,
|
334 |
+
hide_ticks=True,
|
335 |
+
):
|
336 |
+
"""Draw the nodes of the graph G.
|
337 |
+
|
338 |
+
This draws only the nodes of the graph G.
|
339 |
+
|
340 |
+
Parameters
|
341 |
+
----------
|
342 |
+
G : graph
|
343 |
+
A networkx graph
|
344 |
+
|
345 |
+
pos : dictionary
|
346 |
+
A dictionary with nodes as keys and positions as values.
|
347 |
+
Positions should be sequences of length 2.
|
348 |
+
|
349 |
+
ax : Matplotlib Axes object, optional
|
350 |
+
Draw the graph in the specified Matplotlib axes.
|
351 |
+
|
352 |
+
nodelist : list (default list(G))
|
353 |
+
Draw only specified nodes
|
354 |
+
|
355 |
+
node_size : scalar or array (default=300)
|
356 |
+
Size of nodes. If an array it must be the same length as nodelist.
|
357 |
+
|
358 |
+
node_color : color or array of colors (default='#1f78b4')
|
359 |
+
Node color. Can be a single color or a sequence of colors with the same
|
360 |
+
length as nodelist. Color can be string or rgb (or rgba) tuple of
|
361 |
+
floats from 0-1. If numeric values are specified they will be
|
362 |
+
mapped to colors using the cmap and vmin,vmax parameters. See
|
363 |
+
matplotlib.scatter for more details.
|
364 |
+
|
365 |
+
node_shape : string (default='o')
|
366 |
+
The shape of the node. Specification is as matplotlib.scatter
|
367 |
+
marker, one of 'so^>v<dph8'.
|
368 |
+
|
369 |
+
alpha : float or array of floats (default=None)
|
370 |
+
The node transparency. This can be a single alpha value,
|
371 |
+
in which case it will be applied to all the nodes of color. Otherwise,
|
372 |
+
if it is an array, the elements of alpha will be applied to the colors
|
373 |
+
in order (cycling through alpha multiple times if necessary).
|
374 |
+
|
375 |
+
cmap : Matplotlib colormap (default=None)
|
376 |
+
Colormap for mapping intensities of nodes
|
377 |
+
|
378 |
+
vmin,vmax : floats or None (default=None)
|
379 |
+
Minimum and maximum for node colormap scaling
|
380 |
+
|
381 |
+
linewidths : [None | scalar | sequence] (default=1.0)
|
382 |
+
Line width of symbol border
|
383 |
+
|
384 |
+
edgecolors : [None | scalar | sequence] (default = node_color)
|
385 |
+
Colors of node borders. Can be a single color or a sequence of colors with the
|
386 |
+
same length as nodelist. Color can be string or rgb (or rgba) tuple of floats
|
387 |
+
from 0-1. If numeric values are specified they will be mapped to colors
|
388 |
+
using the cmap and vmin,vmax parameters. See `~matplotlib.pyplot.scatter` for more details.
|
389 |
+
|
390 |
+
label : [None | string]
|
391 |
+
Label for legend
|
392 |
+
|
393 |
+
margins : float or 2-tuple, optional
|
394 |
+
Sets the padding for axis autoscaling. Increase margin to prevent
|
395 |
+
clipping for nodes that are near the edges of an image. Values should
|
396 |
+
be in the range ``[0, 1]``. See :meth:`matplotlib.axes.Axes.margins`
|
397 |
+
for details. The default is `None`, which uses the Matplotlib default.
|
398 |
+
|
399 |
+
hide_ticks : bool, optional
|
400 |
+
Hide ticks of axes. When `True` (the default), ticks and ticklabels
|
401 |
+
are removed from the axes. To set ticks and tick labels to the pyplot default,
|
402 |
+
use ``hide_ticks=False``.
|
403 |
+
|
404 |
+
Returns
|
405 |
+
-------
|
406 |
+
matplotlib.collections.PathCollection
|
407 |
+
`PathCollection` of the nodes.
|
408 |
+
|
409 |
+
Examples
|
410 |
+
--------
|
411 |
+
>>> G = nx.dodecahedral_graph()
|
412 |
+
>>> nodes = nx.draw_networkx_nodes(G, pos=nx.spring_layout(G))
|
413 |
+
|
414 |
+
Also see the NetworkX drawing examples at
|
415 |
+
https://networkx.org/documentation/latest/auto_examples/index.html
|
416 |
+
|
417 |
+
See Also
|
418 |
+
--------
|
419 |
+
draw
|
420 |
+
draw_networkx
|
421 |
+
draw_networkx_edges
|
422 |
+
draw_networkx_labels
|
423 |
+
draw_networkx_edge_labels
|
424 |
+
"""
|
425 |
+
from collections.abc import Iterable
|
426 |
+
|
427 |
+
import matplotlib as mpl
|
428 |
+
import matplotlib.collections # call as mpl.collections
|
429 |
+
import matplotlib.pyplot as plt
|
430 |
+
import numpy as np
|
431 |
+
|
432 |
+
if ax is None:
|
433 |
+
ax = plt.gca()
|
434 |
+
|
435 |
+
if nodelist is None:
|
436 |
+
nodelist = list(G)
|
437 |
+
|
438 |
+
if len(nodelist) == 0: # empty nodelist, no drawing
|
439 |
+
return mpl.collections.PathCollection(None)
|
440 |
+
|
441 |
+
try:
|
442 |
+
xy = np.asarray([pos[v] for v in nodelist])
|
443 |
+
except KeyError as err:
|
444 |
+
raise nx.NetworkXError(f"Node {err} has no position.") from err
|
445 |
+
|
446 |
+
if isinstance(alpha, Iterable):
|
447 |
+
node_color = apply_alpha(node_color, alpha, nodelist, cmap, vmin, vmax)
|
448 |
+
alpha = None
|
449 |
+
|
450 |
+
node_collection = ax.scatter(
|
451 |
+
xy[:, 0],
|
452 |
+
xy[:, 1],
|
453 |
+
s=node_size,
|
454 |
+
c=node_color,
|
455 |
+
marker=node_shape,
|
456 |
+
cmap=cmap,
|
457 |
+
vmin=vmin,
|
458 |
+
vmax=vmax,
|
459 |
+
alpha=alpha,
|
460 |
+
linewidths=linewidths,
|
461 |
+
edgecolors=edgecolors,
|
462 |
+
label=label,
|
463 |
+
)
|
464 |
+
if hide_ticks:
|
465 |
+
ax.tick_params(
|
466 |
+
axis="both",
|
467 |
+
which="both",
|
468 |
+
bottom=False,
|
469 |
+
left=False,
|
470 |
+
labelbottom=False,
|
471 |
+
labelleft=False,
|
472 |
+
)
|
473 |
+
|
474 |
+
if margins is not None:
|
475 |
+
if isinstance(margins, Iterable):
|
476 |
+
ax.margins(*margins)
|
477 |
+
else:
|
478 |
+
ax.margins(margins)
|
479 |
+
|
480 |
+
node_collection.set_zorder(2)
|
481 |
+
return node_collection
|
482 |
+
|
483 |
+
|
484 |
+
class FancyArrowFactory:
|
485 |
+
"""Draw arrows with `matplotlib.patches.FancyarrowPatch`"""
|
486 |
+
|
487 |
+
class ConnectionStyleFactory:
|
488 |
+
def __init__(self, connectionstyles, selfloop_height, ax=None):
|
489 |
+
import matplotlib as mpl
|
490 |
+
import matplotlib.path # call as mpl.path
|
491 |
+
import numpy as np
|
492 |
+
|
493 |
+
self.ax = ax
|
494 |
+
self.mpl = mpl
|
495 |
+
self.np = np
|
496 |
+
self.base_connection_styles = [
|
497 |
+
mpl.patches.ConnectionStyle(cs) for cs in connectionstyles
|
498 |
+
]
|
499 |
+
self.n = len(self.base_connection_styles)
|
500 |
+
self.selfloop_height = selfloop_height
|
501 |
+
|
502 |
+
def curved(self, edge_index):
|
503 |
+
return self.base_connection_styles[edge_index % self.n]
|
504 |
+
|
505 |
+
def self_loop(self, edge_index):
|
506 |
+
def self_loop_connection(posA, posB, *args, **kwargs):
|
507 |
+
if not self.np.all(posA == posB):
|
508 |
+
raise nx.NetworkXError(
|
509 |
+
"`self_loop` connection style method"
|
510 |
+
"is only to be used for self-loops"
|
511 |
+
)
|
512 |
+
# this is called with _screen space_ values
|
513 |
+
# so convert back to data space
|
514 |
+
data_loc = self.ax.transData.inverted().transform(posA)
|
515 |
+
v_shift = 0.1 * self.selfloop_height
|
516 |
+
h_shift = v_shift * 0.5
|
517 |
+
# put the top of the loop first so arrow is not hidden by node
|
518 |
+
path = self.np.asarray(
|
519 |
+
[
|
520 |
+
# 1
|
521 |
+
[0, v_shift],
|
522 |
+
# 4 4 4
|
523 |
+
[h_shift, v_shift],
|
524 |
+
[h_shift, 0],
|
525 |
+
[0, 0],
|
526 |
+
# 4 4 4
|
527 |
+
[-h_shift, 0],
|
528 |
+
[-h_shift, v_shift],
|
529 |
+
[0, v_shift],
|
530 |
+
]
|
531 |
+
)
|
532 |
+
# Rotate self loop 90 deg. if more than 1
|
533 |
+
# This will allow for maximum of 4 visible self loops
|
534 |
+
if edge_index % 4:
|
535 |
+
x, y = path.T
|
536 |
+
for _ in range(edge_index % 4):
|
537 |
+
x, y = y, -x
|
538 |
+
path = self.np.array([x, y]).T
|
539 |
+
return self.mpl.path.Path(
|
540 |
+
self.ax.transData.transform(data_loc + path), [1, 4, 4, 4, 4, 4, 4]
|
541 |
+
)
|
542 |
+
|
543 |
+
return self_loop_connection
|
544 |
+
|
545 |
+
def __init__(
|
546 |
+
self,
|
547 |
+
edge_pos,
|
548 |
+
edgelist,
|
549 |
+
nodelist,
|
550 |
+
edge_indices,
|
551 |
+
node_size,
|
552 |
+
selfloop_height,
|
553 |
+
connectionstyle="arc3",
|
554 |
+
node_shape="o",
|
555 |
+
arrowstyle="-",
|
556 |
+
arrowsize=10,
|
557 |
+
edge_color="k",
|
558 |
+
alpha=None,
|
559 |
+
linewidth=1.0,
|
560 |
+
style="solid",
|
561 |
+
min_source_margin=0,
|
562 |
+
min_target_margin=0,
|
563 |
+
ax=None,
|
564 |
+
):
|
565 |
+
import matplotlib as mpl
|
566 |
+
import matplotlib.patches # call as mpl.patches
|
567 |
+
import matplotlib.pyplot as plt
|
568 |
+
import numpy as np
|
569 |
+
|
570 |
+
if isinstance(connectionstyle, str):
|
571 |
+
connectionstyle = [connectionstyle]
|
572 |
+
elif np.iterable(connectionstyle):
|
573 |
+
connectionstyle = list(connectionstyle)
|
574 |
+
else:
|
575 |
+
msg = "ConnectionStyleFactory arg `connectionstyle` must be str or iterable"
|
576 |
+
raise nx.NetworkXError(msg)
|
577 |
+
self.ax = ax
|
578 |
+
self.mpl = mpl
|
579 |
+
self.np = np
|
580 |
+
self.edge_pos = edge_pos
|
581 |
+
self.edgelist = edgelist
|
582 |
+
self.nodelist = nodelist
|
583 |
+
self.node_shape = node_shape
|
584 |
+
self.min_source_margin = min_source_margin
|
585 |
+
self.min_target_margin = min_target_margin
|
586 |
+
self.edge_indices = edge_indices
|
587 |
+
self.node_size = node_size
|
588 |
+
self.connectionstyle_factory = self.ConnectionStyleFactory(
|
589 |
+
connectionstyle, selfloop_height, ax
|
590 |
+
)
|
591 |
+
self.arrowstyle = arrowstyle
|
592 |
+
self.arrowsize = arrowsize
|
593 |
+
self.arrow_colors = mpl.colors.colorConverter.to_rgba_array(edge_color, alpha)
|
594 |
+
self.linewidth = linewidth
|
595 |
+
self.style = style
|
596 |
+
if isinstance(arrowsize, list) and len(arrowsize) != len(edge_pos):
|
597 |
+
raise ValueError("arrowsize should have the same length as edgelist")
|
598 |
+
|
599 |
+
def __call__(self, i):
|
600 |
+
(x1, y1), (x2, y2) = self.edge_pos[i]
|
601 |
+
shrink_source = 0 # space from source to tail
|
602 |
+
shrink_target = 0 # space from head to target
|
603 |
+
if self.np.iterable(self.node_size): # many node sizes
|
604 |
+
source, target = self.edgelist[i][:2]
|
605 |
+
source_node_size = self.node_size[self.nodelist.index(source)]
|
606 |
+
target_node_size = self.node_size[self.nodelist.index(target)]
|
607 |
+
shrink_source = self.to_marker_edge(source_node_size, self.node_shape)
|
608 |
+
shrink_target = self.to_marker_edge(target_node_size, self.node_shape)
|
609 |
+
else:
|
610 |
+
shrink_source = self.to_marker_edge(self.node_size, self.node_shape)
|
611 |
+
shrink_target = shrink_source
|
612 |
+
shrink_source = max(shrink_source, self.min_source_margin)
|
613 |
+
shrink_target = max(shrink_target, self.min_target_margin)
|
614 |
+
|
615 |
+
# scale factor of arrow head
|
616 |
+
if isinstance(self.arrowsize, list):
|
617 |
+
mutation_scale = self.arrowsize[i]
|
618 |
+
else:
|
619 |
+
mutation_scale = self.arrowsize
|
620 |
+
|
621 |
+
if len(self.arrow_colors) > i:
|
622 |
+
arrow_color = self.arrow_colors[i]
|
623 |
+
elif len(self.arrow_colors) == 1:
|
624 |
+
arrow_color = self.arrow_colors[0]
|
625 |
+
else: # Cycle through colors
|
626 |
+
arrow_color = self.arrow_colors[i % len(self.arrow_colors)]
|
627 |
+
|
628 |
+
if self.np.iterable(self.linewidth):
|
629 |
+
if len(self.linewidth) > i:
|
630 |
+
linewidth = self.linewidth[i]
|
631 |
+
else:
|
632 |
+
linewidth = self.linewidth[i % len(self.linewidth)]
|
633 |
+
else:
|
634 |
+
linewidth = self.linewidth
|
635 |
+
|
636 |
+
if (
|
637 |
+
self.np.iterable(self.style)
|
638 |
+
and not isinstance(self.style, str)
|
639 |
+
and not isinstance(self.style, tuple)
|
640 |
+
):
|
641 |
+
if len(self.style) > i:
|
642 |
+
linestyle = self.style[i]
|
643 |
+
else: # Cycle through styles
|
644 |
+
linestyle = self.style[i % len(self.style)]
|
645 |
+
else:
|
646 |
+
linestyle = self.style
|
647 |
+
|
648 |
+
if x1 == x2 and y1 == y2:
|
649 |
+
connectionstyle = self.connectionstyle_factory.self_loop(
|
650 |
+
self.edge_indices[i]
|
651 |
+
)
|
652 |
+
else:
|
653 |
+
connectionstyle = self.connectionstyle_factory.curved(self.edge_indices[i])
|
654 |
+
return self.mpl.patches.FancyArrowPatch(
|
655 |
+
(x1, y1),
|
656 |
+
(x2, y2),
|
657 |
+
arrowstyle=self.arrowstyle,
|
658 |
+
shrinkA=shrink_source,
|
659 |
+
shrinkB=shrink_target,
|
660 |
+
mutation_scale=mutation_scale,
|
661 |
+
color=arrow_color,
|
662 |
+
linewidth=linewidth,
|
663 |
+
connectionstyle=connectionstyle,
|
664 |
+
linestyle=linestyle,
|
665 |
+
zorder=1, # arrows go behind nodes
|
666 |
+
)
|
667 |
+
|
668 |
+
def to_marker_edge(self, marker_size, marker):
|
669 |
+
if marker in "s^>v<d": # `large` markers need extra space
|
670 |
+
return self.np.sqrt(2 * marker_size) / 2
|
671 |
+
else:
|
672 |
+
return self.np.sqrt(marker_size) / 2
|
673 |
+
|
674 |
+
|
675 |
+
def draw_networkx_edges(
|
676 |
+
G,
|
677 |
+
pos,
|
678 |
+
edgelist=None,
|
679 |
+
width=1.0,
|
680 |
+
edge_color="k",
|
681 |
+
style="solid",
|
682 |
+
alpha=None,
|
683 |
+
arrowstyle=None,
|
684 |
+
arrowsize=10,
|
685 |
+
edge_cmap=None,
|
686 |
+
edge_vmin=None,
|
687 |
+
edge_vmax=None,
|
688 |
+
ax=None,
|
689 |
+
arrows=None,
|
690 |
+
label=None,
|
691 |
+
node_size=300,
|
692 |
+
nodelist=None,
|
693 |
+
node_shape="o",
|
694 |
+
connectionstyle="arc3",
|
695 |
+
min_source_margin=0,
|
696 |
+
min_target_margin=0,
|
697 |
+
hide_ticks=True,
|
698 |
+
):
|
699 |
+
r"""Draw the edges of the graph G.
|
700 |
+
|
701 |
+
This draws only the edges of the graph G.
|
702 |
+
|
703 |
+
Parameters
|
704 |
+
----------
|
705 |
+
G : graph
|
706 |
+
A networkx graph
|
707 |
+
|
708 |
+
pos : dictionary
|
709 |
+
A dictionary with nodes as keys and positions as values.
|
710 |
+
Positions should be sequences of length 2.
|
711 |
+
|
712 |
+
edgelist : collection of edge tuples (default=G.edges())
|
713 |
+
Draw only specified edges
|
714 |
+
|
715 |
+
width : float or array of floats (default=1.0)
|
716 |
+
Line width of edges
|
717 |
+
|
718 |
+
edge_color : color or array of colors (default='k')
|
719 |
+
Edge color. Can be a single color or a sequence of colors with the same
|
720 |
+
length as edgelist. Color can be string or rgb (or rgba) tuple of
|
721 |
+
floats from 0-1. If numeric values are specified they will be
|
722 |
+
mapped to colors using the edge_cmap and edge_vmin,edge_vmax parameters.
|
723 |
+
|
724 |
+
style : string or array of strings (default='solid')
|
725 |
+
Edge line style e.g.: '-', '--', '-.', ':'
|
726 |
+
or words like 'solid' or 'dashed'.
|
727 |
+
Can be a single style or a sequence of styles with the same
|
728 |
+
length as the edge list.
|
729 |
+
If less styles than edges are given the styles will cycle.
|
730 |
+
If more styles than edges are given the styles will be used sequentially
|
731 |
+
and not be exhausted.
|
732 |
+
Also, `(offset, onoffseq)` tuples can be used as style instead of a strings.
|
733 |
+
(See `matplotlib.patches.FancyArrowPatch`: `linestyle`)
|
734 |
+
|
735 |
+
alpha : float or array of floats (default=None)
|
736 |
+
The edge transparency. This can be a single alpha value,
|
737 |
+
in which case it will be applied to all specified edges. Otherwise,
|
738 |
+
if it is an array, the elements of alpha will be applied to the colors
|
739 |
+
in order (cycling through alpha multiple times if necessary).
|
740 |
+
|
741 |
+
edge_cmap : Matplotlib colormap, optional
|
742 |
+
Colormap for mapping intensities of edges
|
743 |
+
|
744 |
+
edge_vmin,edge_vmax : floats, optional
|
745 |
+
Minimum and maximum for edge colormap scaling
|
746 |
+
|
747 |
+
ax : Matplotlib Axes object, optional
|
748 |
+
Draw the graph in the specified Matplotlib axes.
|
749 |
+
|
750 |
+
arrows : bool or None, optional (default=None)
|
751 |
+
If `None`, directed graphs draw arrowheads with
|
752 |
+
`~matplotlib.patches.FancyArrowPatch`, while undirected graphs draw edges
|
753 |
+
via `~matplotlib.collections.LineCollection` for speed.
|
754 |
+
If `True`, draw arrowheads with FancyArrowPatches (bendable and stylish).
|
755 |
+
If `False`, draw edges using LineCollection (linear and fast).
|
756 |
+
|
757 |
+
Note: Arrowheads will be the same color as edges.
|
758 |
+
|
759 |
+
arrowstyle : str (default='-\|>' for directed graphs)
|
760 |
+
For directed graphs and `arrows==True` defaults to '-\|>',
|
761 |
+
For undirected graphs default to '-'.
|
762 |
+
|
763 |
+
See `matplotlib.patches.ArrowStyle` for more options.
|
764 |
+
|
765 |
+
arrowsize : int (default=10)
|
766 |
+
For directed graphs, choose the size of the arrow head's length and
|
767 |
+
width. See `matplotlib.patches.FancyArrowPatch` for attribute
|
768 |
+
`mutation_scale` for more info.
|
769 |
+
|
770 |
+
connectionstyle : string or iterable of strings (default="arc3")
|
771 |
+
Pass the connectionstyle parameter to create curved arc of rounding
|
772 |
+
radius rad. For example, connectionstyle='arc3,rad=0.2'.
|
773 |
+
See `matplotlib.patches.ConnectionStyle` and
|
774 |
+
`matplotlib.patches.FancyArrowPatch` for more info.
|
775 |
+
If Iterable, index indicates i'th edge key of MultiGraph
|
776 |
+
|
777 |
+
node_size : scalar or array (default=300)
|
778 |
+
Size of nodes. Though the nodes are not drawn with this function, the
|
779 |
+
node size is used in determining edge positioning.
|
780 |
+
|
781 |
+
nodelist : list, optional (default=G.nodes())
|
782 |
+
This provides the node order for the `node_size` array (if it is an array).
|
783 |
+
|
784 |
+
node_shape : string (default='o')
|
785 |
+
The marker used for nodes, used in determining edge positioning.
|
786 |
+
Specification is as a `matplotlib.markers` marker, e.g. one of 'so^>v<dph8'.
|
787 |
+
|
788 |
+
label : None or string
|
789 |
+
Label for legend
|
790 |
+
|
791 |
+
min_source_margin : int (default=0)
|
792 |
+
The minimum margin (gap) at the beginning of the edge at the source.
|
793 |
+
|
794 |
+
min_target_margin : int (default=0)
|
795 |
+
The minimum margin (gap) at the end of the edge at the target.
|
796 |
+
|
797 |
+
hide_ticks : bool, optional
|
798 |
+
Hide ticks of axes. When `True` (the default), ticks and ticklabels
|
799 |
+
are removed from the axes. To set ticks and tick labels to the pyplot default,
|
800 |
+
use ``hide_ticks=False``.
|
801 |
+
|
802 |
+
Returns
|
803 |
+
-------
|
804 |
+
matplotlib.collections.LineCollection or a list of matplotlib.patches.FancyArrowPatch
|
805 |
+
If ``arrows=True``, a list of FancyArrowPatches is returned.
|
806 |
+
If ``arrows=False``, a LineCollection is returned.
|
807 |
+
If ``arrows=None`` (the default), then a LineCollection is returned if
|
808 |
+
`G` is undirected, otherwise returns a list of FancyArrowPatches.
|
809 |
+
|
810 |
+
Notes
|
811 |
+
-----
|
812 |
+
For directed graphs, arrows are drawn at the head end. Arrows can be
|
813 |
+
turned off with keyword arrows=False or by passing an arrowstyle without
|
814 |
+
an arrow on the end.
|
815 |
+
|
816 |
+
Be sure to include `node_size` as a keyword argument; arrows are
|
817 |
+
drawn considering the size of nodes.
|
818 |
+
|
819 |
+
Self-loops are always drawn with `~matplotlib.patches.FancyArrowPatch`
|
820 |
+
regardless of the value of `arrows` or whether `G` is directed.
|
821 |
+
When ``arrows=False`` or ``arrows=None`` and `G` is undirected, the
|
822 |
+
FancyArrowPatches corresponding to the self-loops are not explicitly
|
823 |
+
returned. They should instead be accessed via the ``Axes.patches``
|
824 |
+
attribute (see examples).
|
825 |
+
|
826 |
+
Examples
|
827 |
+
--------
|
828 |
+
>>> G = nx.dodecahedral_graph()
|
829 |
+
>>> edges = nx.draw_networkx_edges(G, pos=nx.spring_layout(G))
|
830 |
+
|
831 |
+
>>> G = nx.DiGraph()
|
832 |
+
>>> G.add_edges_from([(1, 2), (1, 3), (2, 3)])
|
833 |
+
>>> arcs = nx.draw_networkx_edges(G, pos=nx.spring_layout(G))
|
834 |
+
>>> alphas = [0.3, 0.4, 0.5]
|
835 |
+
>>> for i, arc in enumerate(arcs): # change alpha values of arcs
|
836 |
+
... arc.set_alpha(alphas[i])
|
837 |
+
|
838 |
+
The FancyArrowPatches corresponding to self-loops are not always
|
839 |
+
returned, but can always be accessed via the ``patches`` attribute of the
|
840 |
+
`matplotlib.Axes` object.
|
841 |
+
|
842 |
+
>>> import matplotlib.pyplot as plt
|
843 |
+
>>> fig, ax = plt.subplots()
|
844 |
+
>>> G = nx.Graph([(0, 1), (0, 0)]) # Self-loop at node 0
|
845 |
+
>>> edge_collection = nx.draw_networkx_edges(G, pos=nx.circular_layout(G), ax=ax)
|
846 |
+
>>> self_loop_fap = ax.patches[0]
|
847 |
+
|
848 |
+
Also see the NetworkX drawing examples at
|
849 |
+
https://networkx.org/documentation/latest/auto_examples/index.html
|
850 |
+
|
851 |
+
See Also
|
852 |
+
--------
|
853 |
+
draw
|
854 |
+
draw_networkx
|
855 |
+
draw_networkx_nodes
|
856 |
+
draw_networkx_labels
|
857 |
+
draw_networkx_edge_labels
|
858 |
+
|
859 |
+
"""
|
860 |
+
import warnings
|
861 |
+
|
862 |
+
import matplotlib as mpl
|
863 |
+
import matplotlib.collections # call as mpl.collections
|
864 |
+
import matplotlib.colors # call as mpl.colors
|
865 |
+
import matplotlib.pyplot as plt
|
866 |
+
import numpy as np
|
867 |
+
|
868 |
+
# The default behavior is to use LineCollection to draw edges for
|
869 |
+
# undirected graphs (for performance reasons) and use FancyArrowPatches
|
870 |
+
# for directed graphs.
|
871 |
+
# The `arrows` keyword can be used to override the default behavior
|
872 |
+
if arrows is None:
|
873 |
+
use_linecollection = not (G.is_directed() or G.is_multigraph())
|
874 |
+
else:
|
875 |
+
if not isinstance(arrows, bool):
|
876 |
+
raise TypeError("Argument `arrows` must be of type bool or None")
|
877 |
+
use_linecollection = not arrows
|
878 |
+
|
879 |
+
if isinstance(connectionstyle, str):
|
880 |
+
connectionstyle = [connectionstyle]
|
881 |
+
elif np.iterable(connectionstyle):
|
882 |
+
connectionstyle = list(connectionstyle)
|
883 |
+
else:
|
884 |
+
msg = "draw_networkx_edges arg `connectionstyle` must be str or iterable"
|
885 |
+
raise nx.NetworkXError(msg)
|
886 |
+
|
887 |
+
# Some kwargs only apply to FancyArrowPatches. Warn users when they use
|
888 |
+
# non-default values for these kwargs when LineCollection is being used
|
889 |
+
# instead of silently ignoring the specified option
|
890 |
+
if use_linecollection:
|
891 |
+
msg = (
|
892 |
+
"\n\nThe {0} keyword argument is not applicable when drawing edges\n"
|
893 |
+
"with LineCollection.\n\n"
|
894 |
+
"To make this warning go away, either specify `arrows=True` to\n"
|
895 |
+
"force FancyArrowPatches or use the default values.\n"
|
896 |
+
"Note that using FancyArrowPatches may be slow for large graphs.\n"
|
897 |
+
)
|
898 |
+
if arrowstyle is not None:
|
899 |
+
warnings.warn(msg.format("arrowstyle"), category=UserWarning, stacklevel=2)
|
900 |
+
if arrowsize != 10:
|
901 |
+
warnings.warn(msg.format("arrowsize"), category=UserWarning, stacklevel=2)
|
902 |
+
if min_source_margin != 0:
|
903 |
+
warnings.warn(
|
904 |
+
msg.format("min_source_margin"), category=UserWarning, stacklevel=2
|
905 |
+
)
|
906 |
+
if min_target_margin != 0:
|
907 |
+
warnings.warn(
|
908 |
+
msg.format("min_target_margin"), category=UserWarning, stacklevel=2
|
909 |
+
)
|
910 |
+
if any(cs != "arc3" for cs in connectionstyle):
|
911 |
+
warnings.warn(
|
912 |
+
msg.format("connectionstyle"), category=UserWarning, stacklevel=2
|
913 |
+
)
|
914 |
+
|
915 |
+
# NOTE: Arrowstyle modification must occur after the warnings section
|
916 |
+
if arrowstyle is None:
|
917 |
+
arrowstyle = "-|>" if G.is_directed() else "-"
|
918 |
+
|
919 |
+
if ax is None:
|
920 |
+
ax = plt.gca()
|
921 |
+
|
922 |
+
if edgelist is None:
|
923 |
+
edgelist = list(G.edges) # (u, v, k) for multigraph (u, v) otherwise
|
924 |
+
|
925 |
+
if len(edgelist):
|
926 |
+
if G.is_multigraph():
|
927 |
+
key_count = collections.defaultdict(lambda: itertools.count(0))
|
928 |
+
edge_indices = [next(key_count[tuple(e[:2])]) for e in edgelist]
|
929 |
+
else:
|
930 |
+
edge_indices = [0] * len(edgelist)
|
931 |
+
else: # no edges!
|
932 |
+
return []
|
933 |
+
|
934 |
+
if nodelist is None:
|
935 |
+
nodelist = list(G.nodes())
|
936 |
+
|
937 |
+
# FancyArrowPatch handles color=None different from LineCollection
|
938 |
+
if edge_color is None:
|
939 |
+
edge_color = "k"
|
940 |
+
|
941 |
+
# set edge positions
|
942 |
+
edge_pos = np.asarray([(pos[e[0]], pos[e[1]]) for e in edgelist])
|
943 |
+
|
944 |
+
# Check if edge_color is an array of floats and map to edge_cmap.
|
945 |
+
# This is the only case handled differently from matplotlib
|
946 |
+
if (
|
947 |
+
np.iterable(edge_color)
|
948 |
+
and (len(edge_color) == len(edge_pos))
|
949 |
+
and np.all([isinstance(c, Number) for c in edge_color])
|
950 |
+
):
|
951 |
+
if edge_cmap is not None:
|
952 |
+
assert isinstance(edge_cmap, mpl.colors.Colormap)
|
953 |
+
else:
|
954 |
+
edge_cmap = plt.get_cmap()
|
955 |
+
if edge_vmin is None:
|
956 |
+
edge_vmin = min(edge_color)
|
957 |
+
if edge_vmax is None:
|
958 |
+
edge_vmax = max(edge_color)
|
959 |
+
color_normal = mpl.colors.Normalize(vmin=edge_vmin, vmax=edge_vmax)
|
960 |
+
edge_color = [edge_cmap(color_normal(e)) for e in edge_color]
|
961 |
+
|
962 |
+
# compute initial view
|
963 |
+
minx = np.amin(np.ravel(edge_pos[:, :, 0]))
|
964 |
+
maxx = np.amax(np.ravel(edge_pos[:, :, 0]))
|
965 |
+
miny = np.amin(np.ravel(edge_pos[:, :, 1]))
|
966 |
+
maxy = np.amax(np.ravel(edge_pos[:, :, 1]))
|
967 |
+
w = maxx - minx
|
968 |
+
h = maxy - miny
|
969 |
+
|
970 |
+
# Self-loops are scaled by view extent, except in cases the extent
|
971 |
+
# is 0, e.g. for a single node. In this case, fall back to scaling
|
972 |
+
# by the maximum node size
|
973 |
+
selfloop_height = h if h != 0 else 0.005 * np.array(node_size).max()
|
974 |
+
fancy_arrow_factory = FancyArrowFactory(
|
975 |
+
edge_pos,
|
976 |
+
edgelist,
|
977 |
+
nodelist,
|
978 |
+
edge_indices,
|
979 |
+
node_size,
|
980 |
+
selfloop_height,
|
981 |
+
connectionstyle,
|
982 |
+
node_shape,
|
983 |
+
arrowstyle,
|
984 |
+
arrowsize,
|
985 |
+
edge_color,
|
986 |
+
alpha,
|
987 |
+
width,
|
988 |
+
style,
|
989 |
+
min_source_margin,
|
990 |
+
min_target_margin,
|
991 |
+
ax=ax,
|
992 |
+
)
|
993 |
+
|
994 |
+
# Draw the edges
|
995 |
+
if use_linecollection:
|
996 |
+
edge_collection = mpl.collections.LineCollection(
|
997 |
+
edge_pos,
|
998 |
+
colors=edge_color,
|
999 |
+
linewidths=width,
|
1000 |
+
antialiaseds=(1,),
|
1001 |
+
linestyle=style,
|
1002 |
+
alpha=alpha,
|
1003 |
+
)
|
1004 |
+
edge_collection.set_cmap(edge_cmap)
|
1005 |
+
edge_collection.set_clim(edge_vmin, edge_vmax)
|
1006 |
+
edge_collection.set_zorder(1) # edges go behind nodes
|
1007 |
+
edge_collection.set_label(label)
|
1008 |
+
ax.add_collection(edge_collection)
|
1009 |
+
edge_viz_obj = edge_collection
|
1010 |
+
|
1011 |
+
# Make sure selfloop edges are also drawn
|
1012 |
+
# ---------------------------------------
|
1013 |
+
selfloops_to_draw = [loop for loop in nx.selfloop_edges(G) if loop in edgelist]
|
1014 |
+
if selfloops_to_draw:
|
1015 |
+
edgelist_tuple = list(map(tuple, edgelist))
|
1016 |
+
arrow_collection = []
|
1017 |
+
for loop in selfloops_to_draw:
|
1018 |
+
i = edgelist_tuple.index(loop)
|
1019 |
+
arrow = fancy_arrow_factory(i)
|
1020 |
+
arrow_collection.append(arrow)
|
1021 |
+
ax.add_patch(arrow)
|
1022 |
+
else:
|
1023 |
+
edge_viz_obj = []
|
1024 |
+
for i in range(len(edgelist)):
|
1025 |
+
arrow = fancy_arrow_factory(i)
|
1026 |
+
ax.add_patch(arrow)
|
1027 |
+
edge_viz_obj.append(arrow)
|
1028 |
+
|
1029 |
+
# update view after drawing
|
1030 |
+
padx, pady = 0.05 * w, 0.05 * h
|
1031 |
+
corners = (minx - padx, miny - pady), (maxx + padx, maxy + pady)
|
1032 |
+
ax.update_datalim(corners)
|
1033 |
+
ax.autoscale_view()
|
1034 |
+
|
1035 |
+
if hide_ticks:
|
1036 |
+
ax.tick_params(
|
1037 |
+
axis="both",
|
1038 |
+
which="both",
|
1039 |
+
bottom=False,
|
1040 |
+
left=False,
|
1041 |
+
labelbottom=False,
|
1042 |
+
labelleft=False,
|
1043 |
+
)
|
1044 |
+
|
1045 |
+
return edge_viz_obj
|
1046 |
+
|
1047 |
+
|
1048 |
+
def draw_networkx_labels(
|
1049 |
+
G,
|
1050 |
+
pos,
|
1051 |
+
labels=None,
|
1052 |
+
font_size=12,
|
1053 |
+
font_color="k",
|
1054 |
+
font_family="sans-serif",
|
1055 |
+
font_weight="normal",
|
1056 |
+
alpha=None,
|
1057 |
+
bbox=None,
|
1058 |
+
horizontalalignment="center",
|
1059 |
+
verticalalignment="center",
|
1060 |
+
ax=None,
|
1061 |
+
clip_on=True,
|
1062 |
+
hide_ticks=True,
|
1063 |
+
):
|
1064 |
+
"""Draw node labels on the graph G.
|
1065 |
+
|
1066 |
+
Parameters
|
1067 |
+
----------
|
1068 |
+
G : graph
|
1069 |
+
A networkx graph
|
1070 |
+
|
1071 |
+
pos : dictionary
|
1072 |
+
A dictionary with nodes as keys and positions as values.
|
1073 |
+
Positions should be sequences of length 2.
|
1074 |
+
|
1075 |
+
labels : dictionary (default={n: n for n in G})
|
1076 |
+
Node labels in a dictionary of text labels keyed by node.
|
1077 |
+
Node-keys in labels should appear as keys in `pos`.
|
1078 |
+
If needed use: `{n:lab for n,lab in labels.items() if n in pos}`
|
1079 |
+
|
1080 |
+
font_size : int (default=12)
|
1081 |
+
Font size for text labels
|
1082 |
+
|
1083 |
+
font_color : color (default='k' black)
|
1084 |
+
Font color string. Color can be string or rgb (or rgba) tuple of
|
1085 |
+
floats from 0-1.
|
1086 |
+
|
1087 |
+
font_weight : string (default='normal')
|
1088 |
+
Font weight
|
1089 |
+
|
1090 |
+
font_family : string (default='sans-serif')
|
1091 |
+
Font family
|
1092 |
+
|
1093 |
+
alpha : float or None (default=None)
|
1094 |
+
The text transparency
|
1095 |
+
|
1096 |
+
bbox : Matplotlib bbox, (default is Matplotlib's ax.text default)
|
1097 |
+
Specify text box properties (e.g. shape, color etc.) for node labels.
|
1098 |
+
|
1099 |
+
horizontalalignment : string (default='center')
|
1100 |
+
Horizontal alignment {'center', 'right', 'left'}
|
1101 |
+
|
1102 |
+
verticalalignment : string (default='center')
|
1103 |
+
Vertical alignment {'center', 'top', 'bottom', 'baseline', 'center_baseline'}
|
1104 |
+
|
1105 |
+
ax : Matplotlib Axes object, optional
|
1106 |
+
Draw the graph in the specified Matplotlib axes.
|
1107 |
+
|
1108 |
+
clip_on : bool (default=True)
|
1109 |
+
Turn on clipping of node labels at axis boundaries
|
1110 |
+
|
1111 |
+
hide_ticks : bool, optional
|
1112 |
+
Hide ticks of axes. When `True` (the default), ticks and ticklabels
|
1113 |
+
are removed from the axes. To set ticks and tick labels to the pyplot default,
|
1114 |
+
use ``hide_ticks=False``.
|
1115 |
+
|
1116 |
+
Returns
|
1117 |
+
-------
|
1118 |
+
dict
|
1119 |
+
`dict` of labels keyed on the nodes
|
1120 |
+
|
1121 |
+
Examples
|
1122 |
+
--------
|
1123 |
+
>>> G = nx.dodecahedral_graph()
|
1124 |
+
>>> labels = nx.draw_networkx_labels(G, pos=nx.spring_layout(G))
|
1125 |
+
|
1126 |
+
Also see the NetworkX drawing examples at
|
1127 |
+
https://networkx.org/documentation/latest/auto_examples/index.html
|
1128 |
+
|
1129 |
+
See Also
|
1130 |
+
--------
|
1131 |
+
draw
|
1132 |
+
draw_networkx
|
1133 |
+
draw_networkx_nodes
|
1134 |
+
draw_networkx_edges
|
1135 |
+
draw_networkx_edge_labels
|
1136 |
+
"""
|
1137 |
+
import matplotlib.pyplot as plt
|
1138 |
+
|
1139 |
+
if ax is None:
|
1140 |
+
ax = plt.gca()
|
1141 |
+
|
1142 |
+
if labels is None:
|
1143 |
+
labels = {n: n for n in G.nodes()}
|
1144 |
+
|
1145 |
+
text_items = {} # there is no text collection so we'll fake one
|
1146 |
+
for n, label in labels.items():
|
1147 |
+
(x, y) = pos[n]
|
1148 |
+
if not isinstance(label, str):
|
1149 |
+
label = str(label) # this makes "1" and 1 labeled the same
|
1150 |
+
t = ax.text(
|
1151 |
+
x,
|
1152 |
+
y,
|
1153 |
+
label,
|
1154 |
+
size=font_size,
|
1155 |
+
color=font_color,
|
1156 |
+
family=font_family,
|
1157 |
+
weight=font_weight,
|
1158 |
+
alpha=alpha,
|
1159 |
+
horizontalalignment=horizontalalignment,
|
1160 |
+
verticalalignment=verticalalignment,
|
1161 |
+
transform=ax.transData,
|
1162 |
+
bbox=bbox,
|
1163 |
+
clip_on=clip_on,
|
1164 |
+
)
|
1165 |
+
text_items[n] = t
|
1166 |
+
|
1167 |
+
if hide_ticks:
|
1168 |
+
ax.tick_params(
|
1169 |
+
axis="both",
|
1170 |
+
which="both",
|
1171 |
+
bottom=False,
|
1172 |
+
left=False,
|
1173 |
+
labelbottom=False,
|
1174 |
+
labelleft=False,
|
1175 |
+
)
|
1176 |
+
|
1177 |
+
return text_items
|
1178 |
+
|
1179 |
+
|
1180 |
+
def draw_networkx_edge_labels(
|
1181 |
+
G,
|
1182 |
+
pos,
|
1183 |
+
edge_labels=None,
|
1184 |
+
label_pos=0.5,
|
1185 |
+
font_size=10,
|
1186 |
+
font_color="k",
|
1187 |
+
font_family="sans-serif",
|
1188 |
+
font_weight="normal",
|
1189 |
+
alpha=None,
|
1190 |
+
bbox=None,
|
1191 |
+
horizontalalignment="center",
|
1192 |
+
verticalalignment="center",
|
1193 |
+
ax=None,
|
1194 |
+
rotate=True,
|
1195 |
+
clip_on=True,
|
1196 |
+
node_size=300,
|
1197 |
+
nodelist=None,
|
1198 |
+
connectionstyle="arc3",
|
1199 |
+
hide_ticks=True,
|
1200 |
+
):
|
1201 |
+
"""Draw edge labels.
|
1202 |
+
|
1203 |
+
Parameters
|
1204 |
+
----------
|
1205 |
+
G : graph
|
1206 |
+
A networkx graph
|
1207 |
+
|
1208 |
+
pos : dictionary
|
1209 |
+
A dictionary with nodes as keys and positions as values.
|
1210 |
+
Positions should be sequences of length 2.
|
1211 |
+
|
1212 |
+
edge_labels : dictionary (default=None)
|
1213 |
+
Edge labels in a dictionary of labels keyed by edge two-tuple.
|
1214 |
+
Only labels for the keys in the dictionary are drawn.
|
1215 |
+
|
1216 |
+
label_pos : float (default=0.5)
|
1217 |
+
Position of edge label along edge (0=head, 0.5=center, 1=tail)
|
1218 |
+
|
1219 |
+
font_size : int (default=10)
|
1220 |
+
Font size for text labels
|
1221 |
+
|
1222 |
+
font_color : color (default='k' black)
|
1223 |
+
Font color string. Color can be string or rgb (or rgba) tuple of
|
1224 |
+
floats from 0-1.
|
1225 |
+
|
1226 |
+
font_weight : string (default='normal')
|
1227 |
+
Font weight
|
1228 |
+
|
1229 |
+
font_family : string (default='sans-serif')
|
1230 |
+
Font family
|
1231 |
+
|
1232 |
+
alpha : float or None (default=None)
|
1233 |
+
The text transparency
|
1234 |
+
|
1235 |
+
bbox : Matplotlib bbox, optional
|
1236 |
+
Specify text box properties (e.g. shape, color etc.) for edge labels.
|
1237 |
+
Default is {boxstyle='round', ec=(1.0, 1.0, 1.0), fc=(1.0, 1.0, 1.0)}.
|
1238 |
+
|
1239 |
+
horizontalalignment : string (default='center')
|
1240 |
+
Horizontal alignment {'center', 'right', 'left'}
|
1241 |
+
|
1242 |
+
verticalalignment : string (default='center')
|
1243 |
+
Vertical alignment {'center', 'top', 'bottom', 'baseline', 'center_baseline'}
|
1244 |
+
|
1245 |
+
ax : Matplotlib Axes object, optional
|
1246 |
+
Draw the graph in the specified Matplotlib axes.
|
1247 |
+
|
1248 |
+
rotate : bool (default=True)
|
1249 |
+
Rotate edge labels to lie parallel to edges
|
1250 |
+
|
1251 |
+
clip_on : bool (default=True)
|
1252 |
+
Turn on clipping of edge labels at axis boundaries
|
1253 |
+
|
1254 |
+
node_size : scalar or array (default=300)
|
1255 |
+
Size of nodes. If an array it must be the same length as nodelist.
|
1256 |
+
|
1257 |
+
nodelist : list, optional (default=G.nodes())
|
1258 |
+
This provides the node order for the `node_size` array (if it is an array).
|
1259 |
+
|
1260 |
+
connectionstyle : string or iterable of strings (default="arc3")
|
1261 |
+
Pass the connectionstyle parameter to create curved arc of rounding
|
1262 |
+
radius rad. For example, connectionstyle='arc3,rad=0.2'.
|
1263 |
+
See `matplotlib.patches.ConnectionStyle` and
|
1264 |
+
`matplotlib.patches.FancyArrowPatch` for more info.
|
1265 |
+
If Iterable, index indicates i'th edge key of MultiGraph
|
1266 |
+
|
1267 |
+
hide_ticks : bool, optional
|
1268 |
+
Hide ticks of axes. When `True` (the default), ticks and ticklabels
|
1269 |
+
are removed from the axes. To set ticks and tick labels to the pyplot default,
|
1270 |
+
use ``hide_ticks=False``.
|
1271 |
+
|
1272 |
+
Returns
|
1273 |
+
-------
|
1274 |
+
dict
|
1275 |
+
`dict` of labels keyed by edge
|
1276 |
+
|
1277 |
+
Examples
|
1278 |
+
--------
|
1279 |
+
>>> G = nx.dodecahedral_graph()
|
1280 |
+
>>> edge_labels = nx.draw_networkx_edge_labels(G, pos=nx.spring_layout(G))
|
1281 |
+
|
1282 |
+
Also see the NetworkX drawing examples at
|
1283 |
+
https://networkx.org/documentation/latest/auto_examples/index.html
|
1284 |
+
|
1285 |
+
See Also
|
1286 |
+
--------
|
1287 |
+
draw
|
1288 |
+
draw_networkx
|
1289 |
+
draw_networkx_nodes
|
1290 |
+
draw_networkx_edges
|
1291 |
+
draw_networkx_labels
|
1292 |
+
"""
|
1293 |
+
import matplotlib as mpl
|
1294 |
+
import matplotlib.pyplot as plt
|
1295 |
+
import numpy as np
|
1296 |
+
|
1297 |
+
class CurvedArrowText(mpl.text.Text):
|
1298 |
+
def __init__(
|
1299 |
+
self,
|
1300 |
+
arrow,
|
1301 |
+
*args,
|
1302 |
+
label_pos=0.5,
|
1303 |
+
labels_horizontal=False,
|
1304 |
+
ax=None,
|
1305 |
+
**kwargs,
|
1306 |
+
):
|
1307 |
+
# Bind to FancyArrowPatch
|
1308 |
+
self.arrow = arrow
|
1309 |
+
# how far along the text should be on the curve,
|
1310 |
+
# 0 is at start, 1 is at end etc.
|
1311 |
+
self.label_pos = label_pos
|
1312 |
+
self.labels_horizontal = labels_horizontal
|
1313 |
+
if ax is None:
|
1314 |
+
ax = plt.gca()
|
1315 |
+
self.ax = ax
|
1316 |
+
self.x, self.y, self.angle = self._update_text_pos_angle(arrow)
|
1317 |
+
|
1318 |
+
# Create text object
|
1319 |
+
super().__init__(self.x, self.y, *args, rotation=self.angle, **kwargs)
|
1320 |
+
# Bind to axis
|
1321 |
+
self.ax.add_artist(self)
|
1322 |
+
|
1323 |
+
def _get_arrow_path_disp(self, arrow):
|
1324 |
+
"""
|
1325 |
+
This is part of FancyArrowPatch._get_path_in_displaycoord
|
1326 |
+
It omits the second part of the method where path is converted
|
1327 |
+
to polygon based on width
|
1328 |
+
The transform is taken from ax, not the object, as the object
|
1329 |
+
has not been added yet, and doesn't have transform
|
1330 |
+
"""
|
1331 |
+
dpi_cor = arrow._dpi_cor
|
1332 |
+
# trans_data = arrow.get_transform()
|
1333 |
+
trans_data = self.ax.transData
|
1334 |
+
if arrow._posA_posB is not None:
|
1335 |
+
posA = arrow._convert_xy_units(arrow._posA_posB[0])
|
1336 |
+
posB = arrow._convert_xy_units(arrow._posA_posB[1])
|
1337 |
+
(posA, posB) = trans_data.transform((posA, posB))
|
1338 |
+
_path = arrow.get_connectionstyle()(
|
1339 |
+
posA,
|
1340 |
+
posB,
|
1341 |
+
patchA=arrow.patchA,
|
1342 |
+
patchB=arrow.patchB,
|
1343 |
+
shrinkA=arrow.shrinkA * dpi_cor,
|
1344 |
+
shrinkB=arrow.shrinkB * dpi_cor,
|
1345 |
+
)
|
1346 |
+
else:
|
1347 |
+
_path = trans_data.transform_path(arrow._path_original)
|
1348 |
+
# Return is in display coordinates
|
1349 |
+
return _path
|
1350 |
+
|
1351 |
+
def _update_text_pos_angle(self, arrow):
|
1352 |
+
# Fractional label position
|
1353 |
+
path_disp = self._get_arrow_path_disp(arrow)
|
1354 |
+
(x1, y1), (cx, cy), (x2, y2) = path_disp.vertices
|
1355 |
+
# Text position at a proportion t along the line in display coords
|
1356 |
+
# default is 0.5 so text appears at the halfway point
|
1357 |
+
t = self.label_pos
|
1358 |
+
tt = 1 - t
|
1359 |
+
x = tt**2 * x1 + 2 * t * tt * cx + t**2 * x2
|
1360 |
+
y = tt**2 * y1 + 2 * t * tt * cy + t**2 * y2
|
1361 |
+
if self.labels_horizontal:
|
1362 |
+
# Horizontal text labels
|
1363 |
+
angle = 0
|
1364 |
+
else:
|
1365 |
+
# Labels parallel to curve
|
1366 |
+
change_x = 2 * tt * (cx - x1) + 2 * t * (x2 - cx)
|
1367 |
+
change_y = 2 * tt * (cy - y1) + 2 * t * (y2 - cy)
|
1368 |
+
angle = (np.arctan2(change_y, change_x) / (2 * np.pi)) * 360
|
1369 |
+
# Text is "right way up"
|
1370 |
+
if angle > 90:
|
1371 |
+
angle -= 180
|
1372 |
+
if angle < -90:
|
1373 |
+
angle += 180
|
1374 |
+
(x, y) = self.ax.transData.inverted().transform((x, y))
|
1375 |
+
return x, y, angle
|
1376 |
+
|
1377 |
+
def draw(self, renderer):
|
1378 |
+
# recalculate the text position and angle
|
1379 |
+
self.x, self.y, self.angle = self._update_text_pos_angle(self.arrow)
|
1380 |
+
self.set_position((self.x, self.y))
|
1381 |
+
self.set_rotation(self.angle)
|
1382 |
+
# redraw text
|
1383 |
+
super().draw(renderer)
|
1384 |
+
|
1385 |
+
# use default box of white with white border
|
1386 |
+
if bbox is None:
|
1387 |
+
bbox = {"boxstyle": "round", "ec": (1.0, 1.0, 1.0), "fc": (1.0, 1.0, 1.0)}
|
1388 |
+
|
1389 |
+
if isinstance(connectionstyle, str):
|
1390 |
+
connectionstyle = [connectionstyle]
|
1391 |
+
elif np.iterable(connectionstyle):
|
1392 |
+
connectionstyle = list(connectionstyle)
|
1393 |
+
else:
|
1394 |
+
raise nx.NetworkXError(
|
1395 |
+
"draw_networkx_edges arg `connectionstyle` must be"
|
1396 |
+
"string or iterable of strings"
|
1397 |
+
)
|
1398 |
+
|
1399 |
+
if ax is None:
|
1400 |
+
ax = plt.gca()
|
1401 |
+
|
1402 |
+
if edge_labels is None:
|
1403 |
+
kwds = {"keys": True} if G.is_multigraph() else {}
|
1404 |
+
edge_labels = {tuple(edge): d for *edge, d in G.edges(data=True, **kwds)}
|
1405 |
+
# NOTHING TO PLOT
|
1406 |
+
if not edge_labels:
|
1407 |
+
return {}
|
1408 |
+
edgelist, labels = zip(*edge_labels.items())
|
1409 |
+
|
1410 |
+
if nodelist is None:
|
1411 |
+
nodelist = list(G.nodes())
|
1412 |
+
|
1413 |
+
# set edge positions
|
1414 |
+
edge_pos = np.asarray([(pos[e[0]], pos[e[1]]) for e in edgelist])
|
1415 |
+
|
1416 |
+
if G.is_multigraph():
|
1417 |
+
key_count = collections.defaultdict(lambda: itertools.count(0))
|
1418 |
+
edge_indices = [next(key_count[tuple(e[:2])]) for e in edgelist]
|
1419 |
+
else:
|
1420 |
+
edge_indices = [0] * len(edgelist)
|
1421 |
+
|
1422 |
+
# Used to determine self loop mid-point
|
1423 |
+
# Note, that this will not be accurate,
|
1424 |
+
# if not drawing edge_labels for all edges drawn
|
1425 |
+
h = 0
|
1426 |
+
if edge_labels:
|
1427 |
+
miny = np.amin(np.ravel(edge_pos[:, :, 1]))
|
1428 |
+
maxy = np.amax(np.ravel(edge_pos[:, :, 1]))
|
1429 |
+
h = maxy - miny
|
1430 |
+
selfloop_height = h if h != 0 else 0.005 * np.array(node_size).max()
|
1431 |
+
fancy_arrow_factory = FancyArrowFactory(
|
1432 |
+
edge_pos,
|
1433 |
+
edgelist,
|
1434 |
+
nodelist,
|
1435 |
+
edge_indices,
|
1436 |
+
node_size,
|
1437 |
+
selfloop_height,
|
1438 |
+
connectionstyle,
|
1439 |
+
ax=ax,
|
1440 |
+
)
|
1441 |
+
|
1442 |
+
text_items = {}
|
1443 |
+
for i, (edge, label) in enumerate(zip(edgelist, labels)):
|
1444 |
+
if not isinstance(label, str):
|
1445 |
+
label = str(label) # this makes "1" and 1 labeled the same
|
1446 |
+
|
1447 |
+
n1, n2 = edge[:2]
|
1448 |
+
arrow = fancy_arrow_factory(i)
|
1449 |
+
if n1 == n2:
|
1450 |
+
connectionstyle_obj = arrow.get_connectionstyle()
|
1451 |
+
posA = ax.transData.transform(pos[n1])
|
1452 |
+
path_disp = connectionstyle_obj(posA, posA)
|
1453 |
+
path_data = ax.transData.inverted().transform_path(path_disp)
|
1454 |
+
x, y = path_data.vertices[0]
|
1455 |
+
text_items[edge] = ax.text(
|
1456 |
+
x,
|
1457 |
+
y,
|
1458 |
+
label,
|
1459 |
+
size=font_size,
|
1460 |
+
color=font_color,
|
1461 |
+
family=font_family,
|
1462 |
+
weight=font_weight,
|
1463 |
+
alpha=alpha,
|
1464 |
+
horizontalalignment=horizontalalignment,
|
1465 |
+
verticalalignment=verticalalignment,
|
1466 |
+
rotation=0,
|
1467 |
+
transform=ax.transData,
|
1468 |
+
bbox=bbox,
|
1469 |
+
zorder=1,
|
1470 |
+
clip_on=clip_on,
|
1471 |
+
)
|
1472 |
+
else:
|
1473 |
+
text_items[edge] = CurvedArrowText(
|
1474 |
+
arrow,
|
1475 |
+
label,
|
1476 |
+
size=font_size,
|
1477 |
+
color=font_color,
|
1478 |
+
family=font_family,
|
1479 |
+
weight=font_weight,
|
1480 |
+
alpha=alpha,
|
1481 |
+
horizontalalignment=horizontalalignment,
|
1482 |
+
verticalalignment=verticalalignment,
|
1483 |
+
transform=ax.transData,
|
1484 |
+
bbox=bbox,
|
1485 |
+
zorder=1,
|
1486 |
+
clip_on=clip_on,
|
1487 |
+
label_pos=label_pos,
|
1488 |
+
labels_horizontal=not rotate,
|
1489 |
+
ax=ax,
|
1490 |
+
)
|
1491 |
+
|
1492 |
+
if hide_ticks:
|
1493 |
+
ax.tick_params(
|
1494 |
+
axis="both",
|
1495 |
+
which="both",
|
1496 |
+
bottom=False,
|
1497 |
+
left=False,
|
1498 |
+
labelbottom=False,
|
1499 |
+
labelleft=False,
|
1500 |
+
)
|
1501 |
+
|
1502 |
+
return text_items
|
1503 |
+
|
1504 |
+
|
1505 |
+
def draw_circular(G, **kwargs):
|
1506 |
+
"""Draw the graph `G` with a circular layout.
|
1507 |
+
|
1508 |
+
This is a convenience function equivalent to::
|
1509 |
+
|
1510 |
+
nx.draw(G, pos=nx.circular_layout(G), **kwargs)
|
1511 |
+
|
1512 |
+
Parameters
|
1513 |
+
----------
|
1514 |
+
G : graph
|
1515 |
+
A networkx graph
|
1516 |
+
|
1517 |
+
kwargs : optional keywords
|
1518 |
+
See `draw_networkx` for a description of optional keywords.
|
1519 |
+
|
1520 |
+
Notes
|
1521 |
+
-----
|
1522 |
+
The layout is computed each time this function is called. For
|
1523 |
+
repeated drawing it is much more efficient to call
|
1524 |
+
`~networkx.drawing.layout.circular_layout` directly and reuse the result::
|
1525 |
+
|
1526 |
+
>>> G = nx.complete_graph(5)
|
1527 |
+
>>> pos = nx.circular_layout(G)
|
1528 |
+
>>> nx.draw(G, pos=pos) # Draw the original graph
|
1529 |
+
>>> # Draw a subgraph, reusing the same node positions
|
1530 |
+
>>> nx.draw(G.subgraph([0, 1, 2]), pos=pos, node_color="red")
|
1531 |
+
|
1532 |
+
Examples
|
1533 |
+
--------
|
1534 |
+
>>> G = nx.path_graph(5)
|
1535 |
+
>>> nx.draw_circular(G)
|
1536 |
+
|
1537 |
+
See Also
|
1538 |
+
--------
|
1539 |
+
:func:`~networkx.drawing.layout.circular_layout`
|
1540 |
+
"""
|
1541 |
+
draw(G, circular_layout(G), **kwargs)
|
1542 |
+
|
1543 |
+
|
1544 |
+
def draw_kamada_kawai(G, **kwargs):
|
1545 |
+
"""Draw the graph `G` with a Kamada-Kawai force-directed layout.
|
1546 |
+
|
1547 |
+
This is a convenience function equivalent to::
|
1548 |
+
|
1549 |
+
nx.draw(G, pos=nx.kamada_kawai_layout(G), **kwargs)
|
1550 |
+
|
1551 |
+
Parameters
|
1552 |
+
----------
|
1553 |
+
G : graph
|
1554 |
+
A networkx graph
|
1555 |
+
|
1556 |
+
kwargs : optional keywords
|
1557 |
+
See `draw_networkx` for a description of optional keywords.
|
1558 |
+
|
1559 |
+
Notes
|
1560 |
+
-----
|
1561 |
+
The layout is computed each time this function is called.
|
1562 |
+
For repeated drawing it is much more efficient to call
|
1563 |
+
`~networkx.drawing.layout.kamada_kawai_layout` directly and reuse the
|
1564 |
+
result::
|
1565 |
+
|
1566 |
+
>>> G = nx.complete_graph(5)
|
1567 |
+
>>> pos = nx.kamada_kawai_layout(G)
|
1568 |
+
>>> nx.draw(G, pos=pos) # Draw the original graph
|
1569 |
+
>>> # Draw a subgraph, reusing the same node positions
|
1570 |
+
>>> nx.draw(G.subgraph([0, 1, 2]), pos=pos, node_color="red")
|
1571 |
+
|
1572 |
+
Examples
|
1573 |
+
--------
|
1574 |
+
>>> G = nx.path_graph(5)
|
1575 |
+
>>> nx.draw_kamada_kawai(G)
|
1576 |
+
|
1577 |
+
See Also
|
1578 |
+
--------
|
1579 |
+
:func:`~networkx.drawing.layout.kamada_kawai_layout`
|
1580 |
+
"""
|
1581 |
+
draw(G, kamada_kawai_layout(G), **kwargs)
|
1582 |
+
|
1583 |
+
|
1584 |
+
def draw_random(G, **kwargs):
|
1585 |
+
"""Draw the graph `G` with a random layout.
|
1586 |
+
|
1587 |
+
This is a convenience function equivalent to::
|
1588 |
+
|
1589 |
+
nx.draw(G, pos=nx.random_layout(G), **kwargs)
|
1590 |
+
|
1591 |
+
Parameters
|
1592 |
+
----------
|
1593 |
+
G : graph
|
1594 |
+
A networkx graph
|
1595 |
+
|
1596 |
+
kwargs : optional keywords
|
1597 |
+
See `draw_networkx` for a description of optional keywords.
|
1598 |
+
|
1599 |
+
Notes
|
1600 |
+
-----
|
1601 |
+
The layout is computed each time this function is called.
|
1602 |
+
For repeated drawing it is much more efficient to call
|
1603 |
+
`~networkx.drawing.layout.random_layout` directly and reuse the result::
|
1604 |
+
|
1605 |
+
>>> G = nx.complete_graph(5)
|
1606 |
+
>>> pos = nx.random_layout(G)
|
1607 |
+
>>> nx.draw(G, pos=pos) # Draw the original graph
|
1608 |
+
>>> # Draw a subgraph, reusing the same node positions
|
1609 |
+
>>> nx.draw(G.subgraph([0, 1, 2]), pos=pos, node_color="red")
|
1610 |
+
|
1611 |
+
Examples
|
1612 |
+
--------
|
1613 |
+
>>> G = nx.lollipop_graph(4, 3)
|
1614 |
+
>>> nx.draw_random(G)
|
1615 |
+
|
1616 |
+
See Also
|
1617 |
+
--------
|
1618 |
+
:func:`~networkx.drawing.layout.random_layout`
|
1619 |
+
"""
|
1620 |
+
draw(G, random_layout(G), **kwargs)
|
1621 |
+
|
1622 |
+
|
1623 |
+
def draw_spectral(G, **kwargs):
|
1624 |
+
"""Draw the graph `G` with a spectral 2D layout.
|
1625 |
+
|
1626 |
+
This is a convenience function equivalent to::
|
1627 |
+
|
1628 |
+
nx.draw(G, pos=nx.spectral_layout(G), **kwargs)
|
1629 |
+
|
1630 |
+
For more information about how node positions are determined, see
|
1631 |
+
`~networkx.drawing.layout.spectral_layout`.
|
1632 |
+
|
1633 |
+
Parameters
|
1634 |
+
----------
|
1635 |
+
G : graph
|
1636 |
+
A networkx graph
|
1637 |
+
|
1638 |
+
kwargs : optional keywords
|
1639 |
+
See `draw_networkx` for a description of optional keywords.
|
1640 |
+
|
1641 |
+
Notes
|
1642 |
+
-----
|
1643 |
+
The layout is computed each time this function is called.
|
1644 |
+
For repeated drawing it is much more efficient to call
|
1645 |
+
`~networkx.drawing.layout.spectral_layout` directly and reuse the result::
|
1646 |
+
|
1647 |
+
>>> G = nx.complete_graph(5)
|
1648 |
+
>>> pos = nx.spectral_layout(G)
|
1649 |
+
>>> nx.draw(G, pos=pos) # Draw the original graph
|
1650 |
+
>>> # Draw a subgraph, reusing the same node positions
|
1651 |
+
>>> nx.draw(G.subgraph([0, 1, 2]), pos=pos, node_color="red")
|
1652 |
+
|
1653 |
+
Examples
|
1654 |
+
--------
|
1655 |
+
>>> G = nx.path_graph(5)
|
1656 |
+
>>> nx.draw_spectral(G)
|
1657 |
+
|
1658 |
+
See Also
|
1659 |
+
--------
|
1660 |
+
:func:`~networkx.drawing.layout.spectral_layout`
|
1661 |
+
"""
|
1662 |
+
draw(G, spectral_layout(G), **kwargs)
|
1663 |
+
|
1664 |
+
|
1665 |
+
def draw_spring(G, **kwargs):
|
1666 |
+
"""Draw the graph `G` with a spring layout.
|
1667 |
+
|
1668 |
+
This is a convenience function equivalent to::
|
1669 |
+
|
1670 |
+
nx.draw(G, pos=nx.spring_layout(G), **kwargs)
|
1671 |
+
|
1672 |
+
Parameters
|
1673 |
+
----------
|
1674 |
+
G : graph
|
1675 |
+
A networkx graph
|
1676 |
+
|
1677 |
+
kwargs : optional keywords
|
1678 |
+
See `draw_networkx` for a description of optional keywords.
|
1679 |
+
|
1680 |
+
Notes
|
1681 |
+
-----
|
1682 |
+
`~networkx.drawing.layout.spring_layout` is also the default layout for
|
1683 |
+
`draw`, so this function is equivalent to `draw`.
|
1684 |
+
|
1685 |
+
The layout is computed each time this function is called.
|
1686 |
+
For repeated drawing it is much more efficient to call
|
1687 |
+
`~networkx.drawing.layout.spring_layout` directly and reuse the result::
|
1688 |
+
|
1689 |
+
>>> G = nx.complete_graph(5)
|
1690 |
+
>>> pos = nx.spring_layout(G)
|
1691 |
+
>>> nx.draw(G, pos=pos) # Draw the original graph
|
1692 |
+
>>> # Draw a subgraph, reusing the same node positions
|
1693 |
+
>>> nx.draw(G.subgraph([0, 1, 2]), pos=pos, node_color="red")
|
1694 |
+
|
1695 |
+
Examples
|
1696 |
+
--------
|
1697 |
+
>>> G = nx.path_graph(20)
|
1698 |
+
>>> nx.draw_spring(G)
|
1699 |
+
|
1700 |
+
See Also
|
1701 |
+
--------
|
1702 |
+
draw
|
1703 |
+
:func:`~networkx.drawing.layout.spring_layout`
|
1704 |
+
"""
|
1705 |
+
draw(G, spring_layout(G), **kwargs)
|
1706 |
+
|
1707 |
+
|
1708 |
+
def draw_shell(G, nlist=None, **kwargs):
|
1709 |
+
"""Draw networkx graph `G` with shell layout.
|
1710 |
+
|
1711 |
+
This is a convenience function equivalent to::
|
1712 |
+
|
1713 |
+
nx.draw(G, pos=nx.shell_layout(G, nlist=nlist), **kwargs)
|
1714 |
+
|
1715 |
+
Parameters
|
1716 |
+
----------
|
1717 |
+
G : graph
|
1718 |
+
A networkx graph
|
1719 |
+
|
1720 |
+
nlist : list of list of nodes, optional
|
1721 |
+
A list containing lists of nodes representing the shells.
|
1722 |
+
Default is `None`, meaning all nodes are in a single shell.
|
1723 |
+
See `~networkx.drawing.layout.shell_layout` for details.
|
1724 |
+
|
1725 |
+
kwargs : optional keywords
|
1726 |
+
See `draw_networkx` for a description of optional keywords.
|
1727 |
+
|
1728 |
+
Notes
|
1729 |
+
-----
|
1730 |
+
The layout is computed each time this function is called.
|
1731 |
+
For repeated drawing it is much more efficient to call
|
1732 |
+
`~networkx.drawing.layout.shell_layout` directly and reuse the result::
|
1733 |
+
|
1734 |
+
>>> G = nx.complete_graph(5)
|
1735 |
+
>>> pos = nx.shell_layout(G)
|
1736 |
+
>>> nx.draw(G, pos=pos) # Draw the original graph
|
1737 |
+
>>> # Draw a subgraph, reusing the same node positions
|
1738 |
+
>>> nx.draw(G.subgraph([0, 1, 2]), pos=pos, node_color="red")
|
1739 |
+
|
1740 |
+
Examples
|
1741 |
+
--------
|
1742 |
+
>>> G = nx.path_graph(4)
|
1743 |
+
>>> shells = [[0], [1, 2, 3]]
|
1744 |
+
>>> nx.draw_shell(G, nlist=shells)
|
1745 |
+
|
1746 |
+
See Also
|
1747 |
+
--------
|
1748 |
+
:func:`~networkx.drawing.layout.shell_layout`
|
1749 |
+
"""
|
1750 |
+
draw(G, shell_layout(G, nlist=nlist), **kwargs)
|
1751 |
+
|
1752 |
+
|
1753 |
+
def draw_planar(G, **kwargs):
|
1754 |
+
"""Draw a planar networkx graph `G` with planar layout.
|
1755 |
+
|
1756 |
+
This is a convenience function equivalent to::
|
1757 |
+
|
1758 |
+
nx.draw(G, pos=nx.planar_layout(G), **kwargs)
|
1759 |
+
|
1760 |
+
Parameters
|
1761 |
+
----------
|
1762 |
+
G : graph
|
1763 |
+
A planar networkx graph
|
1764 |
+
|
1765 |
+
kwargs : optional keywords
|
1766 |
+
See `draw_networkx` for a description of optional keywords.
|
1767 |
+
|
1768 |
+
Raises
|
1769 |
+
------
|
1770 |
+
NetworkXException
|
1771 |
+
When `G` is not planar
|
1772 |
+
|
1773 |
+
Notes
|
1774 |
+
-----
|
1775 |
+
The layout is computed each time this function is called.
|
1776 |
+
For repeated drawing it is much more efficient to call
|
1777 |
+
`~networkx.drawing.layout.planar_layout` directly and reuse the result::
|
1778 |
+
|
1779 |
+
>>> G = nx.path_graph(5)
|
1780 |
+
>>> pos = nx.planar_layout(G)
|
1781 |
+
>>> nx.draw(G, pos=pos) # Draw the original graph
|
1782 |
+
>>> # Draw a subgraph, reusing the same node positions
|
1783 |
+
>>> nx.draw(G.subgraph([0, 1, 2]), pos=pos, node_color="red")
|
1784 |
+
|
1785 |
+
Examples
|
1786 |
+
--------
|
1787 |
+
>>> G = nx.path_graph(4)
|
1788 |
+
>>> nx.draw_planar(G)
|
1789 |
+
|
1790 |
+
See Also
|
1791 |
+
--------
|
1792 |
+
:func:`~networkx.drawing.layout.planar_layout`
|
1793 |
+
"""
|
1794 |
+
draw(G, planar_layout(G), **kwargs)
|
1795 |
+
|
1796 |
+
|
1797 |
+
def apply_alpha(colors, alpha, elem_list, cmap=None, vmin=None, vmax=None):
|
1798 |
+
"""Apply an alpha (or list of alphas) to the colors provided.
|
1799 |
+
|
1800 |
+
Parameters
|
1801 |
+
----------
|
1802 |
+
|
1803 |
+
colors : color string or array of floats (default='r')
|
1804 |
+
Color of element. Can be a single color format string,
|
1805 |
+
or a sequence of colors with the same length as nodelist.
|
1806 |
+
If numeric values are specified they will be mapped to
|
1807 |
+
colors using the cmap and vmin,vmax parameters. See
|
1808 |
+
matplotlib.scatter for more details.
|
1809 |
+
|
1810 |
+
alpha : float or array of floats
|
1811 |
+
Alpha values for elements. This can be a single alpha value, in
|
1812 |
+
which case it will be applied to all the elements of color. Otherwise,
|
1813 |
+
if it is an array, the elements of alpha will be applied to the colors
|
1814 |
+
in order (cycling through alpha multiple times if necessary).
|
1815 |
+
|
1816 |
+
elem_list : array of networkx objects
|
1817 |
+
The list of elements which are being colored. These could be nodes,
|
1818 |
+
edges or labels.
|
1819 |
+
|
1820 |
+
cmap : matplotlib colormap
|
1821 |
+
Color map for use if colors is a list of floats corresponding to points
|
1822 |
+
on a color mapping.
|
1823 |
+
|
1824 |
+
vmin, vmax : float
|
1825 |
+
Minimum and maximum values for normalizing colors if a colormap is used
|
1826 |
+
|
1827 |
+
Returns
|
1828 |
+
-------
|
1829 |
+
|
1830 |
+
rgba_colors : numpy ndarray
|
1831 |
+
Array containing RGBA format values for each of the node colours.
|
1832 |
+
|
1833 |
+
"""
|
1834 |
+
from itertools import cycle, islice
|
1835 |
+
|
1836 |
+
import matplotlib as mpl
|
1837 |
+
import matplotlib.cm # call as mpl.cm
|
1838 |
+
import matplotlib.colors # call as mpl.colors
|
1839 |
+
import numpy as np
|
1840 |
+
|
1841 |
+
# If we have been provided with a list of numbers as long as elem_list,
|
1842 |
+
# apply the color mapping.
|
1843 |
+
if len(colors) == len(elem_list) and isinstance(colors[0], Number):
|
1844 |
+
mapper = mpl.cm.ScalarMappable(cmap=cmap)
|
1845 |
+
mapper.set_clim(vmin, vmax)
|
1846 |
+
rgba_colors = mapper.to_rgba(colors)
|
1847 |
+
# Otherwise, convert colors to matplotlib's RGB using the colorConverter
|
1848 |
+
# object. These are converted to numpy ndarrays to be consistent with the
|
1849 |
+
# to_rgba method of ScalarMappable.
|
1850 |
+
else:
|
1851 |
+
try:
|
1852 |
+
rgba_colors = np.array([mpl.colors.colorConverter.to_rgba(colors)])
|
1853 |
+
except ValueError:
|
1854 |
+
rgba_colors = np.array(
|
1855 |
+
[mpl.colors.colorConverter.to_rgba(color) for color in colors]
|
1856 |
+
)
|
1857 |
+
# Set the final column of the rgba_colors to have the relevant alpha values
|
1858 |
+
try:
|
1859 |
+
# If alpha is longer than the number of colors, resize to the number of
|
1860 |
+
# elements. Also, if rgba_colors.size (the number of elements of
|
1861 |
+
# rgba_colors) is the same as the number of elements, resize the array,
|
1862 |
+
# to avoid it being interpreted as a colormap by scatter()
|
1863 |
+
if len(alpha) > len(rgba_colors) or rgba_colors.size == len(elem_list):
|
1864 |
+
rgba_colors = np.resize(rgba_colors, (len(elem_list), 4))
|
1865 |
+
rgba_colors[1:, 0] = rgba_colors[0, 0]
|
1866 |
+
rgba_colors[1:, 1] = rgba_colors[0, 1]
|
1867 |
+
rgba_colors[1:, 2] = rgba_colors[0, 2]
|
1868 |
+
rgba_colors[:, 3] = list(islice(cycle(alpha), len(rgba_colors)))
|
1869 |
+
except TypeError:
|
1870 |
+
rgba_colors[:, -1] = alpha
|
1871 |
+
return rgba_colors
|
env-llmeval/lib/python3.10/site-packages/networkx/exception.py
ADDED
@@ -0,0 +1,125 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
**********
|
3 |
+
Exceptions
|
4 |
+
**********
|
5 |
+
|
6 |
+
Base exceptions and errors for NetworkX.
|
7 |
+
"""
|
8 |
+
|
9 |
+
__all__ = [
|
10 |
+
"HasACycle",
|
11 |
+
"NodeNotFound",
|
12 |
+
"PowerIterationFailedConvergence",
|
13 |
+
"ExceededMaxIterations",
|
14 |
+
"AmbiguousSolution",
|
15 |
+
"NetworkXAlgorithmError",
|
16 |
+
"NetworkXException",
|
17 |
+
"NetworkXError",
|
18 |
+
"NetworkXNoCycle",
|
19 |
+
"NetworkXNoPath",
|
20 |
+
"NetworkXNotImplemented",
|
21 |
+
"NetworkXPointlessConcept",
|
22 |
+
"NetworkXUnbounded",
|
23 |
+
"NetworkXUnfeasible",
|
24 |
+
]
|
25 |
+
|
26 |
+
|
27 |
+
class NetworkXException(Exception):
|
28 |
+
"""Base class for exceptions in NetworkX."""
|
29 |
+
|
30 |
+
|
31 |
+
class NetworkXError(NetworkXException):
|
32 |
+
"""Exception for a serious error in NetworkX"""
|
33 |
+
|
34 |
+
|
35 |
+
class NetworkXPointlessConcept(NetworkXException):
|
36 |
+
"""Raised when a null graph is provided as input to an algorithm
|
37 |
+
that cannot use it.
|
38 |
+
|
39 |
+
The null graph is sometimes considered a pointless concept [1]_,
|
40 |
+
thus the name of the exception.
|
41 |
+
|
42 |
+
References
|
43 |
+
----------
|
44 |
+
.. [1] Harary, F. and Read, R. "Is the Null Graph a Pointless
|
45 |
+
Concept?" In Graphs and Combinatorics Conference, George
|
46 |
+
Washington University. New York: Springer-Verlag, 1973.
|
47 |
+
|
48 |
+
"""
|
49 |
+
|
50 |
+
|
51 |
+
class NetworkXAlgorithmError(NetworkXException):
|
52 |
+
"""Exception for unexpected termination of algorithms."""
|
53 |
+
|
54 |
+
|
55 |
+
class NetworkXUnfeasible(NetworkXAlgorithmError):
|
56 |
+
"""Exception raised by algorithms trying to solve a problem
|
57 |
+
instance that has no feasible solution."""
|
58 |
+
|
59 |
+
|
60 |
+
class NetworkXNoPath(NetworkXUnfeasible):
|
61 |
+
"""Exception for algorithms that should return a path when running
|
62 |
+
on graphs where such a path does not exist."""
|
63 |
+
|
64 |
+
|
65 |
+
class NetworkXNoCycle(NetworkXUnfeasible):
|
66 |
+
"""Exception for algorithms that should return a cycle when running
|
67 |
+
on graphs where such a cycle does not exist."""
|
68 |
+
|
69 |
+
|
70 |
+
class HasACycle(NetworkXException):
|
71 |
+
"""Raised if a graph has a cycle when an algorithm expects that it
|
72 |
+
will have no cycles.
|
73 |
+
|
74 |
+
"""
|
75 |
+
|
76 |
+
|
77 |
+
class NetworkXUnbounded(NetworkXAlgorithmError):
|
78 |
+
"""Exception raised by algorithms trying to solve a maximization
|
79 |
+
or a minimization problem instance that is unbounded."""
|
80 |
+
|
81 |
+
|
82 |
+
class NetworkXNotImplemented(NetworkXException):
|
83 |
+
"""Exception raised by algorithms not implemented for a type of graph."""
|
84 |
+
|
85 |
+
|
86 |
+
class NodeNotFound(NetworkXException):
|
87 |
+
"""Exception raised if requested node is not present in the graph"""
|
88 |
+
|
89 |
+
|
90 |
+
class AmbiguousSolution(NetworkXException):
|
91 |
+
"""Raised if more than one valid solution exists for an intermediary step
|
92 |
+
of an algorithm.
|
93 |
+
|
94 |
+
In the face of ambiguity, refuse the temptation to guess.
|
95 |
+
This may occur, for example, when trying to determine the
|
96 |
+
bipartite node sets in a disconnected bipartite graph when
|
97 |
+
computing bipartite matchings.
|
98 |
+
|
99 |
+
"""
|
100 |
+
|
101 |
+
|
102 |
+
class ExceededMaxIterations(NetworkXException):
|
103 |
+
"""Raised if a loop iterates too many times without breaking.
|
104 |
+
|
105 |
+
This may occur, for example, in an algorithm that computes
|
106 |
+
progressively better approximations to a value but exceeds an
|
107 |
+
iteration bound specified by the user.
|
108 |
+
|
109 |
+
"""
|
110 |
+
|
111 |
+
|
112 |
+
class PowerIterationFailedConvergence(ExceededMaxIterations):
|
113 |
+
"""Raised when the power iteration method fails to converge within a
|
114 |
+
specified iteration limit.
|
115 |
+
|
116 |
+
`num_iterations` is the number of iterations that have been
|
117 |
+
completed when this exception was raised.
|
118 |
+
|
119 |
+
"""
|
120 |
+
|
121 |
+
def __init__(self, num_iterations, *args, **kw):
|
122 |
+
msg = f"power iteration failed to converge within {num_iterations} iterations"
|
123 |
+
exception_message = msg
|
124 |
+
superinit = super().__init__
|
125 |
+
superinit(self, exception_message, *args, **kw)
|
env-llmeval/lib/python3.10/site-packages/networkx/lazy_imports.py
ADDED
@@ -0,0 +1,188 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import importlib
|
2 |
+
import importlib.util
|
3 |
+
import inspect
|
4 |
+
import os
|
5 |
+
import sys
|
6 |
+
import types
|
7 |
+
|
8 |
+
__all__ = ["attach", "_lazy_import"]
|
9 |
+
|
10 |
+
|
11 |
+
def attach(module_name, submodules=None, submod_attrs=None):
|
12 |
+
"""Attach lazily loaded submodules, and functions or other attributes.
|
13 |
+
|
14 |
+
Typically, modules import submodules and attributes as follows::
|
15 |
+
|
16 |
+
import mysubmodule
|
17 |
+
import anothersubmodule
|
18 |
+
|
19 |
+
from .foo import someattr
|
20 |
+
|
21 |
+
The idea of this function is to replace the `__init__.py`
|
22 |
+
module's `__getattr__`, `__dir__`, and `__all__` attributes such that
|
23 |
+
all imports work exactly the way they normally would, except that the
|
24 |
+
actual import is delayed until the resulting module object is first used.
|
25 |
+
|
26 |
+
The typical way to call this function, replacing the above imports, is::
|
27 |
+
|
28 |
+
__getattr__, __lazy_dir__, __all__ = lazy.attach(
|
29 |
+
__name__, ["mysubmodule", "anothersubmodule"], {"foo": "someattr"}
|
30 |
+
)
|
31 |
+
|
32 |
+
This functionality requires Python 3.7 or higher.
|
33 |
+
|
34 |
+
Parameters
|
35 |
+
----------
|
36 |
+
module_name : str
|
37 |
+
Typically use __name__.
|
38 |
+
submodules : set
|
39 |
+
List of submodules to lazily import.
|
40 |
+
submod_attrs : dict
|
41 |
+
Dictionary of submodule -> list of attributes / functions.
|
42 |
+
These attributes are imported as they are used.
|
43 |
+
|
44 |
+
Returns
|
45 |
+
-------
|
46 |
+
__getattr__, __dir__, __all__
|
47 |
+
|
48 |
+
"""
|
49 |
+
if submod_attrs is None:
|
50 |
+
submod_attrs = {}
|
51 |
+
|
52 |
+
if submodules is None:
|
53 |
+
submodules = set()
|
54 |
+
else:
|
55 |
+
submodules = set(submodules)
|
56 |
+
|
57 |
+
attr_to_modules = {
|
58 |
+
attr: mod for mod, attrs in submod_attrs.items() for attr in attrs
|
59 |
+
}
|
60 |
+
|
61 |
+
__all__ = list(submodules | attr_to_modules.keys())
|
62 |
+
|
63 |
+
def __getattr__(name):
|
64 |
+
if name in submodules:
|
65 |
+
return importlib.import_module(f"{module_name}.{name}")
|
66 |
+
elif name in attr_to_modules:
|
67 |
+
submod = importlib.import_module(f"{module_name}.{attr_to_modules[name]}")
|
68 |
+
return getattr(submod, name)
|
69 |
+
else:
|
70 |
+
raise AttributeError(f"No {module_name} attribute {name}")
|
71 |
+
|
72 |
+
def __dir__():
|
73 |
+
return __all__
|
74 |
+
|
75 |
+
if os.environ.get("EAGER_IMPORT", ""):
|
76 |
+
for attr in set(attr_to_modules.keys()) | submodules:
|
77 |
+
__getattr__(attr)
|
78 |
+
|
79 |
+
return __getattr__, __dir__, list(__all__)
|
80 |
+
|
81 |
+
|
82 |
+
class DelayedImportErrorModule(types.ModuleType):
|
83 |
+
def __init__(self, frame_data, *args, **kwargs):
|
84 |
+
self.__frame_data = frame_data
|
85 |
+
super().__init__(*args, **kwargs)
|
86 |
+
|
87 |
+
def __getattr__(self, x):
|
88 |
+
if x in ("__class__", "__file__", "__frame_data"):
|
89 |
+
super().__getattr__(x)
|
90 |
+
else:
|
91 |
+
fd = self.__frame_data
|
92 |
+
raise ModuleNotFoundError(
|
93 |
+
f"No module named '{fd['spec']}'\n\n"
|
94 |
+
"This error is lazily reported, having originally occurred in\n"
|
95 |
+
f' File {fd["filename"]}, line {fd["lineno"]}, in {fd["function"]}\n\n'
|
96 |
+
f'----> {"".join(fd["code_context"] or "").strip()}'
|
97 |
+
)
|
98 |
+
|
99 |
+
|
100 |
+
def _lazy_import(fullname):
|
101 |
+
"""Return a lazily imported proxy for a module or library.
|
102 |
+
|
103 |
+
Warning
|
104 |
+
-------
|
105 |
+
Importing using this function can currently cause trouble
|
106 |
+
when the user tries to import from a subpackage of a module before
|
107 |
+
the package is fully imported. In particular, this idiom may not work:
|
108 |
+
|
109 |
+
np = lazy_import("numpy")
|
110 |
+
from numpy.lib import recfunctions
|
111 |
+
|
112 |
+
This is due to a difference in the way Python's LazyLoader handles
|
113 |
+
subpackage imports compared to the normal import process. Hopefully
|
114 |
+
we will get Python's LazyLoader to fix this, or find a workaround.
|
115 |
+
In the meantime, this is a potential problem.
|
116 |
+
|
117 |
+
The workaround is to import numpy before importing from the subpackage.
|
118 |
+
|
119 |
+
Notes
|
120 |
+
-----
|
121 |
+
We often see the following pattern::
|
122 |
+
|
123 |
+
def myfunc():
|
124 |
+
import scipy as sp
|
125 |
+
sp.argmin(...)
|
126 |
+
....
|
127 |
+
|
128 |
+
This is to prevent a library, in this case `scipy`, from being
|
129 |
+
imported at function definition time, since that can be slow.
|
130 |
+
|
131 |
+
This function provides a proxy module that, upon access, imports
|
132 |
+
the actual module. So the idiom equivalent to the above example is::
|
133 |
+
|
134 |
+
sp = lazy.load("scipy")
|
135 |
+
|
136 |
+
def myfunc():
|
137 |
+
sp.argmin(...)
|
138 |
+
....
|
139 |
+
|
140 |
+
The initial import time is fast because the actual import is delayed
|
141 |
+
until the first attribute is requested. The overall import time may
|
142 |
+
decrease as well for users that don't make use of large portions
|
143 |
+
of the library.
|
144 |
+
|
145 |
+
Parameters
|
146 |
+
----------
|
147 |
+
fullname : str
|
148 |
+
The full name of the package or subpackage to import. For example::
|
149 |
+
|
150 |
+
sp = lazy.load("scipy") # import scipy as sp
|
151 |
+
spla = lazy.load("scipy.linalg") # import scipy.linalg as spla
|
152 |
+
|
153 |
+
Returns
|
154 |
+
-------
|
155 |
+
pm : importlib.util._LazyModule
|
156 |
+
Proxy module. Can be used like any regularly imported module.
|
157 |
+
Actual loading of the module occurs upon first attribute request.
|
158 |
+
|
159 |
+
"""
|
160 |
+
try:
|
161 |
+
return sys.modules[fullname]
|
162 |
+
except:
|
163 |
+
pass
|
164 |
+
|
165 |
+
# Not previously loaded -- look it up
|
166 |
+
spec = importlib.util.find_spec(fullname)
|
167 |
+
|
168 |
+
if spec is None:
|
169 |
+
try:
|
170 |
+
parent = inspect.stack()[1]
|
171 |
+
frame_data = {
|
172 |
+
"spec": fullname,
|
173 |
+
"filename": parent.filename,
|
174 |
+
"lineno": parent.lineno,
|
175 |
+
"function": parent.function,
|
176 |
+
"code_context": parent.code_context,
|
177 |
+
}
|
178 |
+
return DelayedImportErrorModule(frame_data, "DelayedImportErrorModule")
|
179 |
+
finally:
|
180 |
+
del parent
|
181 |
+
|
182 |
+
module = importlib.util.module_from_spec(spec)
|
183 |
+
sys.modules[fullname] = module
|
184 |
+
|
185 |
+
loader = importlib.util.LazyLoader(spec.loader)
|
186 |
+
loader.exec_module(module)
|
187 |
+
|
188 |
+
return module
|
env-llmeval/lib/python3.10/site-packages/networkx/relabel.py
ADDED
@@ -0,0 +1,285 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import networkx as nx
|
2 |
+
|
3 |
+
__all__ = ["convert_node_labels_to_integers", "relabel_nodes"]
|
4 |
+
|
5 |
+
|
6 |
+
@nx._dispatchable(
|
7 |
+
preserve_all_attrs=True, mutates_input={"not copy": 2}, returns_graph=True
|
8 |
+
)
|
9 |
+
def relabel_nodes(G, mapping, copy=True):
|
10 |
+
"""Relabel the nodes of the graph G according to a given mapping.
|
11 |
+
|
12 |
+
The original node ordering may not be preserved if `copy` is `False` and the
|
13 |
+
mapping includes overlap between old and new labels.
|
14 |
+
|
15 |
+
Parameters
|
16 |
+
----------
|
17 |
+
G : graph
|
18 |
+
A NetworkX graph
|
19 |
+
|
20 |
+
mapping : dictionary
|
21 |
+
A dictionary with the old labels as keys and new labels as values.
|
22 |
+
A partial mapping is allowed. Mapping 2 nodes to a single node is allowed.
|
23 |
+
Any non-node keys in the mapping are ignored.
|
24 |
+
|
25 |
+
copy : bool (optional, default=True)
|
26 |
+
If True return a copy, or if False relabel the nodes in place.
|
27 |
+
|
28 |
+
Examples
|
29 |
+
--------
|
30 |
+
To create a new graph with nodes relabeled according to a given
|
31 |
+
dictionary:
|
32 |
+
|
33 |
+
>>> G = nx.path_graph(3)
|
34 |
+
>>> sorted(G)
|
35 |
+
[0, 1, 2]
|
36 |
+
>>> mapping = {0: "a", 1: "b", 2: "c"}
|
37 |
+
>>> H = nx.relabel_nodes(G, mapping)
|
38 |
+
>>> sorted(H)
|
39 |
+
['a', 'b', 'c']
|
40 |
+
|
41 |
+
Nodes can be relabeled with any hashable object, including numbers
|
42 |
+
and strings:
|
43 |
+
|
44 |
+
>>> import string
|
45 |
+
>>> G = nx.path_graph(26) # nodes are integers 0 through 25
|
46 |
+
>>> sorted(G)[:3]
|
47 |
+
[0, 1, 2]
|
48 |
+
>>> mapping = dict(zip(G, string.ascii_lowercase))
|
49 |
+
>>> G = nx.relabel_nodes(G, mapping) # nodes are characters a through z
|
50 |
+
>>> sorted(G)[:3]
|
51 |
+
['a', 'b', 'c']
|
52 |
+
>>> mapping = dict(zip(G, range(1, 27)))
|
53 |
+
>>> G = nx.relabel_nodes(G, mapping) # nodes are integers 1 through 26
|
54 |
+
>>> sorted(G)[:3]
|
55 |
+
[1, 2, 3]
|
56 |
+
|
57 |
+
To perform a partial in-place relabeling, provide a dictionary
|
58 |
+
mapping only a subset of the nodes, and set the `copy` keyword
|
59 |
+
argument to False:
|
60 |
+
|
61 |
+
>>> G = nx.path_graph(3) # nodes 0-1-2
|
62 |
+
>>> mapping = {0: "a", 1: "b"} # 0->'a' and 1->'b'
|
63 |
+
>>> G = nx.relabel_nodes(G, mapping, copy=False)
|
64 |
+
>>> sorted(G, key=str)
|
65 |
+
[2, 'a', 'b']
|
66 |
+
|
67 |
+
A mapping can also be given as a function:
|
68 |
+
|
69 |
+
>>> G = nx.path_graph(3)
|
70 |
+
>>> H = nx.relabel_nodes(G, lambda x: x**2)
|
71 |
+
>>> list(H)
|
72 |
+
[0, 1, 4]
|
73 |
+
|
74 |
+
In a multigraph, relabeling two or more nodes to the same new node
|
75 |
+
will retain all edges, but may change the edge keys in the process:
|
76 |
+
|
77 |
+
>>> G = nx.MultiGraph()
|
78 |
+
>>> G.add_edge(0, 1, value="a") # returns the key for this edge
|
79 |
+
0
|
80 |
+
>>> G.add_edge(0, 2, value="b")
|
81 |
+
0
|
82 |
+
>>> G.add_edge(0, 3, value="c")
|
83 |
+
0
|
84 |
+
>>> mapping = {1: 4, 2: 4, 3: 4}
|
85 |
+
>>> H = nx.relabel_nodes(G, mapping, copy=True)
|
86 |
+
>>> print(H[0])
|
87 |
+
{4: {0: {'value': 'a'}, 1: {'value': 'b'}, 2: {'value': 'c'}}}
|
88 |
+
|
89 |
+
This works for in-place relabeling too:
|
90 |
+
|
91 |
+
>>> G = nx.relabel_nodes(G, mapping, copy=False)
|
92 |
+
>>> print(G[0])
|
93 |
+
{4: {0: {'value': 'a'}, 1: {'value': 'b'}, 2: {'value': 'c'}}}
|
94 |
+
|
95 |
+
Notes
|
96 |
+
-----
|
97 |
+
Only the nodes specified in the mapping will be relabeled.
|
98 |
+
Any non-node keys in the mapping are ignored.
|
99 |
+
|
100 |
+
The keyword setting copy=False modifies the graph in place.
|
101 |
+
Relabel_nodes avoids naming collisions by building a
|
102 |
+
directed graph from ``mapping`` which specifies the order of
|
103 |
+
relabelings. Naming collisions, such as a->b, b->c, are ordered
|
104 |
+
such that "b" gets renamed to "c" before "a" gets renamed "b".
|
105 |
+
In cases of circular mappings (e.g. a->b, b->a), modifying the
|
106 |
+
graph is not possible in-place and an exception is raised.
|
107 |
+
In that case, use copy=True.
|
108 |
+
|
109 |
+
If a relabel operation on a multigraph would cause two or more
|
110 |
+
edges to have the same source, target and key, the second edge must
|
111 |
+
be assigned a new key to retain all edges. The new key is set
|
112 |
+
to the lowest non-negative integer not already used as a key
|
113 |
+
for edges between these two nodes. Note that this means non-numeric
|
114 |
+
keys may be replaced by numeric keys.
|
115 |
+
|
116 |
+
See Also
|
117 |
+
--------
|
118 |
+
convert_node_labels_to_integers
|
119 |
+
"""
|
120 |
+
# you can pass any callable e.g. f(old_label) -> new_label or
|
121 |
+
# e.g. str(old_label) -> new_label, but we'll just make a dictionary here regardless
|
122 |
+
m = {n: mapping(n) for n in G} if callable(mapping) else mapping
|
123 |
+
|
124 |
+
if copy:
|
125 |
+
return _relabel_copy(G, m)
|
126 |
+
else:
|
127 |
+
return _relabel_inplace(G, m)
|
128 |
+
|
129 |
+
|
130 |
+
def _relabel_inplace(G, mapping):
|
131 |
+
if len(mapping.keys() & mapping.values()) > 0:
|
132 |
+
# labels sets overlap
|
133 |
+
# can we topological sort and still do the relabeling?
|
134 |
+
D = nx.DiGraph(list(mapping.items()))
|
135 |
+
D.remove_edges_from(nx.selfloop_edges(D))
|
136 |
+
try:
|
137 |
+
nodes = reversed(list(nx.topological_sort(D)))
|
138 |
+
except nx.NetworkXUnfeasible as err:
|
139 |
+
raise nx.NetworkXUnfeasible(
|
140 |
+
"The node label sets are overlapping and no ordering can "
|
141 |
+
"resolve the mapping. Use copy=True."
|
142 |
+
) from err
|
143 |
+
else:
|
144 |
+
# non-overlapping label sets, sort them in the order of G nodes
|
145 |
+
nodes = [n for n in G if n in mapping]
|
146 |
+
|
147 |
+
multigraph = G.is_multigraph()
|
148 |
+
directed = G.is_directed()
|
149 |
+
|
150 |
+
for old in nodes:
|
151 |
+
# Test that old is in both mapping and G, otherwise ignore.
|
152 |
+
try:
|
153 |
+
new = mapping[old]
|
154 |
+
G.add_node(new, **G.nodes[old])
|
155 |
+
except KeyError:
|
156 |
+
continue
|
157 |
+
if new == old:
|
158 |
+
continue
|
159 |
+
if multigraph:
|
160 |
+
new_edges = [
|
161 |
+
(new, new if old == target else target, key, data)
|
162 |
+
for (_, target, key, data) in G.edges(old, data=True, keys=True)
|
163 |
+
]
|
164 |
+
if directed:
|
165 |
+
new_edges += [
|
166 |
+
(new if old == source else source, new, key, data)
|
167 |
+
for (source, _, key, data) in G.in_edges(old, data=True, keys=True)
|
168 |
+
]
|
169 |
+
# Ensure new edges won't overwrite existing ones
|
170 |
+
seen = set()
|
171 |
+
for i, (source, target, key, data) in enumerate(new_edges):
|
172 |
+
if target in G[source] and key in G[source][target]:
|
173 |
+
new_key = 0 if not isinstance(key, int | float) else key
|
174 |
+
while new_key in G[source][target] or (target, new_key) in seen:
|
175 |
+
new_key += 1
|
176 |
+
new_edges[i] = (source, target, new_key, data)
|
177 |
+
seen.add((target, new_key))
|
178 |
+
else:
|
179 |
+
new_edges = [
|
180 |
+
(new, new if old == target else target, data)
|
181 |
+
for (_, target, data) in G.edges(old, data=True)
|
182 |
+
]
|
183 |
+
if directed:
|
184 |
+
new_edges += [
|
185 |
+
(new if old == source else source, new, data)
|
186 |
+
for (source, _, data) in G.in_edges(old, data=True)
|
187 |
+
]
|
188 |
+
G.remove_node(old)
|
189 |
+
G.add_edges_from(new_edges)
|
190 |
+
return G
|
191 |
+
|
192 |
+
|
193 |
+
def _relabel_copy(G, mapping):
|
194 |
+
H = G.__class__()
|
195 |
+
H.add_nodes_from(mapping.get(n, n) for n in G)
|
196 |
+
H._node.update((mapping.get(n, n), d.copy()) for n, d in G.nodes.items())
|
197 |
+
if G.is_multigraph():
|
198 |
+
new_edges = [
|
199 |
+
(mapping.get(n1, n1), mapping.get(n2, n2), k, d.copy())
|
200 |
+
for (n1, n2, k, d) in G.edges(keys=True, data=True)
|
201 |
+
]
|
202 |
+
|
203 |
+
# check for conflicting edge-keys
|
204 |
+
undirected = not G.is_directed()
|
205 |
+
seen_edges = set()
|
206 |
+
for i, (source, target, key, data) in enumerate(new_edges):
|
207 |
+
while (source, target, key) in seen_edges:
|
208 |
+
if not isinstance(key, int | float):
|
209 |
+
key = 0
|
210 |
+
key += 1
|
211 |
+
seen_edges.add((source, target, key))
|
212 |
+
if undirected:
|
213 |
+
seen_edges.add((target, source, key))
|
214 |
+
new_edges[i] = (source, target, key, data)
|
215 |
+
|
216 |
+
H.add_edges_from(new_edges)
|
217 |
+
else:
|
218 |
+
H.add_edges_from(
|
219 |
+
(mapping.get(n1, n1), mapping.get(n2, n2), d.copy())
|
220 |
+
for (n1, n2, d) in G.edges(data=True)
|
221 |
+
)
|
222 |
+
H.graph.update(G.graph)
|
223 |
+
return H
|
224 |
+
|
225 |
+
|
226 |
+
@nx._dispatchable(preserve_all_attrs=True, returns_graph=True)
|
227 |
+
def convert_node_labels_to_integers(
|
228 |
+
G, first_label=0, ordering="default", label_attribute=None
|
229 |
+
):
|
230 |
+
"""Returns a copy of the graph G with the nodes relabeled using
|
231 |
+
consecutive integers.
|
232 |
+
|
233 |
+
Parameters
|
234 |
+
----------
|
235 |
+
G : graph
|
236 |
+
A NetworkX graph
|
237 |
+
|
238 |
+
first_label : int, optional (default=0)
|
239 |
+
An integer specifying the starting offset in numbering nodes.
|
240 |
+
The new integer labels are numbered first_label, ..., n-1+first_label.
|
241 |
+
|
242 |
+
ordering : string
|
243 |
+
"default" : inherit node ordering from G.nodes()
|
244 |
+
"sorted" : inherit node ordering from sorted(G.nodes())
|
245 |
+
"increasing degree" : nodes are sorted by increasing degree
|
246 |
+
"decreasing degree" : nodes are sorted by decreasing degree
|
247 |
+
|
248 |
+
label_attribute : string, optional (default=None)
|
249 |
+
Name of node attribute to store old label. If None no attribute
|
250 |
+
is created.
|
251 |
+
|
252 |
+
Notes
|
253 |
+
-----
|
254 |
+
Node and edge attribute data are copied to the new (relabeled) graph.
|
255 |
+
|
256 |
+
There is no guarantee that the relabeling of nodes to integers will
|
257 |
+
give the same two integers for two (even identical graphs).
|
258 |
+
Use the `ordering` argument to try to preserve the order.
|
259 |
+
|
260 |
+
See Also
|
261 |
+
--------
|
262 |
+
relabel_nodes
|
263 |
+
"""
|
264 |
+
N = G.number_of_nodes() + first_label
|
265 |
+
if ordering == "default":
|
266 |
+
mapping = dict(zip(G.nodes(), range(first_label, N)))
|
267 |
+
elif ordering == "sorted":
|
268 |
+
nlist = sorted(G.nodes())
|
269 |
+
mapping = dict(zip(nlist, range(first_label, N)))
|
270 |
+
elif ordering == "increasing degree":
|
271 |
+
dv_pairs = [(d, n) for (n, d) in G.degree()]
|
272 |
+
dv_pairs.sort() # in-place sort from lowest to highest degree
|
273 |
+
mapping = dict(zip([n for d, n in dv_pairs], range(first_label, N)))
|
274 |
+
elif ordering == "decreasing degree":
|
275 |
+
dv_pairs = [(d, n) for (n, d) in G.degree()]
|
276 |
+
dv_pairs.sort() # in-place sort from lowest to highest degree
|
277 |
+
dv_pairs.reverse()
|
278 |
+
mapping = dict(zip([n for d, n in dv_pairs], range(first_label, N)))
|
279 |
+
else:
|
280 |
+
raise nx.NetworkXError(f"Unknown node ordering: {ordering}")
|
281 |
+
H = relabel_nodes(G, mapping)
|
282 |
+
# create node attribute with the old label
|
283 |
+
if label_attribute is not None:
|
284 |
+
nx.set_node_attributes(H, {v: k for k, v in mapping.items()}, label_attribute)
|
285 |
+
return H
|
env-llmeval/lib/python3.10/site-packages/networkx/utils/__init__.py
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from networkx.utils.misc import *
|
2 |
+
from networkx.utils.decorators import *
|
3 |
+
from networkx.utils.random_sequence import *
|
4 |
+
from networkx.utils.union_find import *
|
5 |
+
from networkx.utils.rcm import *
|
6 |
+
from networkx.utils.heaps import *
|
7 |
+
from networkx.utils.backends import *
|
8 |
+
from networkx.utils.configs import *
|
env-llmeval/lib/python3.10/site-packages/networkx/utils/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (462 Bytes). View file
|
|
env-llmeval/lib/python3.10/site-packages/networkx/utils/__pycache__/backends.cpython-310.pyc
ADDED
Binary file (41.1 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/networkx/utils/__pycache__/decorators.cpython-310.pyc
ADDED
Binary file (41.4 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/networkx/utils/__pycache__/misc.cpython-310.pyc
ADDED
Binary file (17.5 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/networkx/utils/__pycache__/random_sequence.cpython-310.pyc
ADDED
Binary file (4.45 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/networkx/utils/__pycache__/rcm.cpython-310.pyc
ADDED
Binary file (5 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/networkx/utils/__pycache__/union_find.cpython-310.pyc
ADDED
Binary file (3.56 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/networkx/utils/backends.py
ADDED
@@ -0,0 +1,1553 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
NetworkX utilizes a plugin-dispatch architecture, which means we can plug in and
|
3 |
+
out of backends with minimal code changes. A valid NetworkX backend specifies
|
4 |
+
`entry points <https://packaging.python.org/en/latest/specifications/entry-points>`_,
|
5 |
+
named ``networkx.backends`` and an optional ``networkx.backend_info`` when it is
|
6 |
+
installed (not imported). This allows NetworkX to dispatch (redirect) function calls
|
7 |
+
to the backend so the execution flows to the designated backend
|
8 |
+
implementation, similar to how plugging a charger into a socket redirects the
|
9 |
+
electricity to your phone. This design enhances flexibility and integration, making
|
10 |
+
NetworkX more adaptable and efficient.
|
11 |
+
|
12 |
+
There are three main ways to use a backend after the package is installed.
|
13 |
+
You can set environment variables and run the exact same code you run for
|
14 |
+
NetworkX. You can use a keyword argument ``backend=...`` with the NetworkX
|
15 |
+
function. Or, you can convert the NetworkX Graph to a backend graph type and
|
16 |
+
call a NetworkX function supported by that backend. Environment variables
|
17 |
+
and backend keywords automatically convert your NetworkX Graph to the
|
18 |
+
backend type. Manually converting it yourself allows you to use that same
|
19 |
+
backend graph for more than one function call, reducing conversion time.
|
20 |
+
|
21 |
+
For example, you can set an environment variable before starting python to request
|
22 |
+
all dispatchable functions automatically dispatch to the given backend::
|
23 |
+
|
24 |
+
bash> NETWORKX_AUTOMATIC_BACKENDS=cugraph python my_networkx_script.py
|
25 |
+
|
26 |
+
or you can specify the backend as a kwarg::
|
27 |
+
|
28 |
+
nx.betweenness_centrality(G, k=10, backend="parallel")
|
29 |
+
|
30 |
+
or you can convert the NetworkX Graph object ``G`` into a Graph-like
|
31 |
+
object specific to the backend and then pass that in the NetworkX function::
|
32 |
+
|
33 |
+
H = nx_parallel.ParallelGraph(G)
|
34 |
+
nx.betweenness_centrality(H, k=10)
|
35 |
+
|
36 |
+
How it works: You might have seen the ``@nx._dispatchable`` decorator on
|
37 |
+
many of the NetworkX functions in the codebase. It decorates the function
|
38 |
+
with code that redirects execution to the function's backend implementation.
|
39 |
+
The code also manages any ``backend_kwargs`` you provide to the backend
|
40 |
+
version of the function. The code looks for the environment variable or
|
41 |
+
a ``backend`` keyword argument and if found, converts the input NetworkX
|
42 |
+
graph to the backend format before calling the backend's version of the
|
43 |
+
function. If no environment variable or backend keyword are found, the
|
44 |
+
dispatching code checks the input graph object for an attribute
|
45 |
+
called ``__networkx_backend__`` which tells it which backend provides this
|
46 |
+
graph type. That backend's version of the function is then called.
|
47 |
+
The backend system relies on Python ``entry_point`` system to signal
|
48 |
+
NetworkX that a backend is installed (even if not imported yet). Thus no
|
49 |
+
code needs to be changed between running with NetworkX and running with
|
50 |
+
a backend to NetworkX. The attribute ``__networkx_backend__`` holds a
|
51 |
+
string with the name of the ``entry_point``. If none of these options
|
52 |
+
are being used, the decorator code simply calls the NetworkX function
|
53 |
+
on the NetworkX graph as usual.
|
54 |
+
|
55 |
+
The NetworkX library does not need to know that a backend exists for it
|
56 |
+
to work. So long as the backend package creates the entry_point, and
|
57 |
+
provides the correct interface, it will be called when the user requests
|
58 |
+
it using one of the three approaches described above. Some backends have
|
59 |
+
been working with the NetworkX developers to ensure smooth operation.
|
60 |
+
They are the following::
|
61 |
+
|
62 |
+
- `graphblas <https://github.com/python-graphblas/graphblas-algorithms>`_
|
63 |
+
- `cugraph <https://github.com/rapidsai/cugraph/tree/branch-24.04/python/nx-cugraph>`_
|
64 |
+
- `parallel <https://github.com/networkx/nx-parallel>`_
|
65 |
+
- ``loopback`` is for testing purposes only and is not a real backend.
|
66 |
+
|
67 |
+
Note that the ``backend_name`` is e.g. ``parallel``, the package installed
|
68 |
+
is ``nx-parallel``, and we use ``nx_parallel`` while importing the package.
|
69 |
+
|
70 |
+
Creating a Custom backend
|
71 |
+
-------------------------
|
72 |
+
|
73 |
+
1. To be a valid backend that is discoverable by NetworkX, your package must
|
74 |
+
register an `entry-point <https://packaging.python.org/en/latest/specifications/entry-points/#entry-points>`_
|
75 |
+
``networkx.backends`` in the package's metadata, with a `key pointing to your
|
76 |
+
dispatch object <https://packaging.python.org/en/latest/guides/creating-and-discovering-plugins/#using-package-metadata>`_ .
|
77 |
+
For example, if you are using ``setuptools`` to manage your backend package,
|
78 |
+
you can `add the following to your pyproject.toml file <https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_::
|
79 |
+
|
80 |
+
[project.entry-points."networkx.backends"]
|
81 |
+
backend_name = "your_dispatcher_class"
|
82 |
+
|
83 |
+
You can also add the ``backend_info`` entry-point. It points towards the ``get_info``
|
84 |
+
function that returns all the backend information, which is then used to build the
|
85 |
+
"Additional Backend Implementation" box at the end of algorithm's documentation
|
86 |
+
page (e.g. `nx-cugraph's get_info function <https://github.com/rapidsai/cugraph/blob/branch-24.04/python/nx-cugraph/_nx_cugraph/__init__.py>`_)::
|
87 |
+
|
88 |
+
[project.entry-points."networkx.backend_info"]
|
89 |
+
backend_name = "your_get_info_function"
|
90 |
+
|
91 |
+
Note that this would only work if your backend is a trusted backend of NetworkX,
|
92 |
+
and is present in the `.circleci/config.yml` and
|
93 |
+
`.github/workflows/deploy-docs.yml` files in the NetworkX repository.
|
94 |
+
|
95 |
+
2. The backend must create an ``nx.Graph``-like object which contains an attribute
|
96 |
+
``__networkx_backend__`` with a value of the entry point name::
|
97 |
+
|
98 |
+
class BackendGraph:
|
99 |
+
__networkx_backend__ = "backend_name"
|
100 |
+
...
|
101 |
+
|
102 |
+
|
103 |
+
Testing the Custom backend
|
104 |
+
--------------------------
|
105 |
+
|
106 |
+
To test your custom backend, you can run the NetworkX test suite with your backend.
|
107 |
+
This also ensures that the custom backend is compatible with NetworkX's API.
|
108 |
+
|
109 |
+
Testing Environment Setup
|
110 |
+
~~~~~~~~~~~~~~~~~~~~~~~~~
|
111 |
+
|
112 |
+
To enable automatic testing with your custom backend, follow these steps:
|
113 |
+
|
114 |
+
1. Set Backend Environment Variables:
|
115 |
+
- ``NETWORKX_TEST_BACKEND`` : Setting this to your registered backend key will let
|
116 |
+
the NetworkX's dispatch machinery automatically convert a regular NetworkX
|
117 |
+
``Graph``, ``DiGraph``, ``MultiGraph``, etc. to their backend equivalents, using
|
118 |
+
``your_dispatcher_class.convert_from_nx(G, ...)`` function.
|
119 |
+
- ``NETWORKX_FALLBACK_TO_NX`` (default=False) : Setting this variable to `True` will
|
120 |
+
instruct tests to use a NetworkX ``Graph`` for algorithms not implemented by your
|
121 |
+
custom backend. Setting this to `False` will only run the tests for algorithms
|
122 |
+
implemented by your custom backend and tests for other algorithms will ``xfail``.
|
123 |
+
|
124 |
+
2. Defining ``convert_from_nx`` and ``convert_to_nx`` methods:
|
125 |
+
The arguments to ``convert_from_nx`` are:
|
126 |
+
|
127 |
+
- ``G`` : NetworkX Graph
|
128 |
+
- ``edge_attrs`` : dict, optional
|
129 |
+
Dictionary mapping edge attributes to default values if missing in ``G``.
|
130 |
+
If None, then no edge attributes will be converted and default may be 1.
|
131 |
+
- ``node_attrs``: dict, optional
|
132 |
+
Dictionary mapping node attributes to default values if missing in ``G``.
|
133 |
+
If None, then no node attributes will be converted.
|
134 |
+
- ``preserve_edge_attrs`` : bool
|
135 |
+
Whether to preserve all edge attributes.
|
136 |
+
- ``preserve_node_attrs`` : bool
|
137 |
+
Whether to preserve all node attributes.
|
138 |
+
- ``preserve_graph_attrs`` : bool
|
139 |
+
Whether to preserve all graph attributes.
|
140 |
+
- ``preserve_all_attrs`` : bool
|
141 |
+
Whether to preserve all graph, node, and edge attributes.
|
142 |
+
- ``name`` : str
|
143 |
+
The name of the algorithm.
|
144 |
+
- ``graph_name`` : str
|
145 |
+
The name of the graph argument being converted.
|
146 |
+
|
147 |
+
Running Tests
|
148 |
+
~~~~~~~~~~~~~
|
149 |
+
|
150 |
+
You can invoke NetworkX tests for your custom backend with the following commands::
|
151 |
+
|
152 |
+
NETWORKX_TEST_BACKEND=<backend_name>
|
153 |
+
NETWORKX_FALLBACK_TO_NX=True # or False
|
154 |
+
pytest --pyargs networkx
|
155 |
+
|
156 |
+
Conversions while running tests :
|
157 |
+
|
158 |
+
- Convert NetworkX graphs using ``<your_dispatcher_class>.convert_from_nx(G, ...)`` into
|
159 |
+
the backend graph.
|
160 |
+
- Pass the backend graph objects to the backend implementation of the algorithm.
|
161 |
+
- Convert the result back to a form expected by NetworkX tests using
|
162 |
+
``<your_dispatcher_class>.convert_to_nx(result, ...)``.
|
163 |
+
|
164 |
+
Notes
|
165 |
+
~~~~~
|
166 |
+
|
167 |
+
- Dispatchable algorithms that are not implemented by the backend
|
168 |
+
will cause a ``pytest.xfail``, giving some indication that not all
|
169 |
+
tests are running, while avoiding causing an explicit failure.
|
170 |
+
|
171 |
+
- If a backend only partially implements some algorithms, it can define
|
172 |
+
a ``can_run(name, args, kwargs)`` function that returns True or False
|
173 |
+
indicating whether it can run the algorithm with the given arguments.
|
174 |
+
It may also return a string indicating why the algorithm can't be run;
|
175 |
+
this string may be used in the future to give helpful info to the user.
|
176 |
+
|
177 |
+
- A backend may also define ``should_run(name, args, kwargs)`` that is similar
|
178 |
+
to ``can_run``, but answers whether the backend *should* be run (converting
|
179 |
+
if necessary). Like ``can_run``, it receives the original arguments so it
|
180 |
+
can decide whether it should be run by inspecting the arguments. ``can_run``
|
181 |
+
runs before ``should_run``, so ``should_run`` may assume ``can_run`` is True.
|
182 |
+
If not implemented by the backend, ``can_run`` and ``should_run`` are
|
183 |
+
assumed to always return True if the backend implements the algorithm.
|
184 |
+
|
185 |
+
- A special ``on_start_tests(items)`` function may be defined by the backend.
|
186 |
+
It will be called with the list of NetworkX tests discovered. Each item
|
187 |
+
is a test object that can be marked as xfail if the backend does not support
|
188 |
+
the test using ``item.add_marker(pytest.mark.xfail(reason=...))``.
|
189 |
+
|
190 |
+
- A backend graph instance may have a ``G.__networkx_cache__`` dict to enable
|
191 |
+
caching, and care should be taken to clear the cache when appropriate.
|
192 |
+
"""
|
193 |
+
|
194 |
+
import inspect
|
195 |
+
import itertools
|
196 |
+
import os
|
197 |
+
import warnings
|
198 |
+
from functools import partial
|
199 |
+
from importlib.metadata import entry_points
|
200 |
+
|
201 |
+
import networkx as nx
|
202 |
+
|
203 |
+
from .decorators import argmap
|
204 |
+
|
205 |
+
__all__ = ["_dispatchable"]
|
206 |
+
|
207 |
+
|
208 |
+
def _do_nothing():
|
209 |
+
"""This does nothing at all, yet it helps turn `_dispatchable` into functions."""
|
210 |
+
|
211 |
+
|
212 |
+
def _get_backends(group, *, load_and_call=False):
|
213 |
+
"""
|
214 |
+
Retrieve NetworkX ``backends`` and ``backend_info`` from the entry points.
|
215 |
+
|
216 |
+
Parameters
|
217 |
+
-----------
|
218 |
+
group : str
|
219 |
+
The entry_point to be retrieved.
|
220 |
+
load_and_call : bool, optional
|
221 |
+
If True, load and call the backend. Defaults to False.
|
222 |
+
|
223 |
+
Returns
|
224 |
+
--------
|
225 |
+
dict
|
226 |
+
A dictionary mapping backend names to their respective backend objects.
|
227 |
+
|
228 |
+
Notes
|
229 |
+
------
|
230 |
+
If a backend is defined more than once, a warning is issued.
|
231 |
+
The `nx-loopback` backend is removed if it exists, as it is only available during testing.
|
232 |
+
A warning is displayed if an error occurs while loading a backend.
|
233 |
+
"""
|
234 |
+
items = entry_points(group=group)
|
235 |
+
rv = {}
|
236 |
+
for ep in items:
|
237 |
+
if ep.name in rv:
|
238 |
+
warnings.warn(
|
239 |
+
f"networkx backend defined more than once: {ep.name}",
|
240 |
+
RuntimeWarning,
|
241 |
+
stacklevel=2,
|
242 |
+
)
|
243 |
+
elif load_and_call:
|
244 |
+
try:
|
245 |
+
rv[ep.name] = ep.load()()
|
246 |
+
except Exception as exc:
|
247 |
+
warnings.warn(
|
248 |
+
f"Error encountered when loading info for backend {ep.name}: {exc}",
|
249 |
+
RuntimeWarning,
|
250 |
+
stacklevel=2,
|
251 |
+
)
|
252 |
+
else:
|
253 |
+
rv[ep.name] = ep
|
254 |
+
rv.pop("nx-loopback", None)
|
255 |
+
return rv
|
256 |
+
|
257 |
+
|
258 |
+
backends = _get_backends("networkx.backends")
|
259 |
+
backend_info = _get_backends("networkx.backend_info", load_and_call=True)
|
260 |
+
|
261 |
+
# We must import from config after defining `backends` above
|
262 |
+
from .configs import Config, config
|
263 |
+
|
264 |
+
# Get default configuration from environment variables at import time
|
265 |
+
config.backend_priority = [
|
266 |
+
x.strip()
|
267 |
+
for x in os.environ.get(
|
268 |
+
"NETWORKX_BACKEND_PRIORITY",
|
269 |
+
os.environ.get("NETWORKX_AUTOMATIC_BACKENDS", ""),
|
270 |
+
).split(",")
|
271 |
+
if x.strip()
|
272 |
+
]
|
273 |
+
# Initialize default configuration for backends
|
274 |
+
config.backends = Config(
|
275 |
+
**{
|
276 |
+
backend: (
|
277 |
+
cfg if isinstance(cfg := info["default_config"], Config) else Config(**cfg)
|
278 |
+
)
|
279 |
+
if "default_config" in info
|
280 |
+
else Config()
|
281 |
+
for backend, info in backend_info.items()
|
282 |
+
}
|
283 |
+
)
|
284 |
+
type(config.backends).__doc__ = "All installed NetworkX backends and their configs."
|
285 |
+
|
286 |
+
# Load and cache backends on-demand
|
287 |
+
_loaded_backends = {} # type: ignore[var-annotated]
|
288 |
+
|
289 |
+
|
290 |
+
def _always_run(name, args, kwargs):
|
291 |
+
return True
|
292 |
+
|
293 |
+
|
294 |
+
def _load_backend(backend_name):
|
295 |
+
if backend_name in _loaded_backends:
|
296 |
+
return _loaded_backends[backend_name]
|
297 |
+
rv = _loaded_backends[backend_name] = backends[backend_name].load()
|
298 |
+
if not hasattr(rv, "can_run"):
|
299 |
+
rv.can_run = _always_run
|
300 |
+
if not hasattr(rv, "should_run"):
|
301 |
+
rv.should_run = _always_run
|
302 |
+
return rv
|
303 |
+
|
304 |
+
|
305 |
+
_registered_algorithms = {}
|
306 |
+
|
307 |
+
|
308 |
+
class _dispatchable:
|
309 |
+
"""Allow any of the following decorator forms:
|
310 |
+
- @_dispatchable
|
311 |
+
- @_dispatchable()
|
312 |
+
- @_dispatchable(name="override_name")
|
313 |
+
- @_dispatchable(graphs="graph")
|
314 |
+
- @_dispatchable(edge_attrs="weight")
|
315 |
+
- @_dispatchable(graphs={"G": 0, "H": 1}, edge_attrs={"weight": "default"})
|
316 |
+
|
317 |
+
These class attributes are currently used to allow backends to run networkx tests.
|
318 |
+
For example: `PYTHONPATH=. pytest --backend graphblas --fallback-to-nx`
|
319 |
+
Future work: add configuration to control these.
|
320 |
+
"""
|
321 |
+
|
322 |
+
_is_testing = False
|
323 |
+
_fallback_to_nx = (
|
324 |
+
os.environ.get("NETWORKX_FALLBACK_TO_NX", "true").strip().lower() == "true"
|
325 |
+
)
|
326 |
+
|
327 |
+
def __new__(
|
328 |
+
cls,
|
329 |
+
func=None,
|
330 |
+
*,
|
331 |
+
name=None,
|
332 |
+
graphs="G",
|
333 |
+
edge_attrs=None,
|
334 |
+
node_attrs=None,
|
335 |
+
preserve_edge_attrs=False,
|
336 |
+
preserve_node_attrs=False,
|
337 |
+
preserve_graph_attrs=False,
|
338 |
+
preserve_all_attrs=False,
|
339 |
+
mutates_input=False,
|
340 |
+
returns_graph=False,
|
341 |
+
):
|
342 |
+
"""A decorator that makes certain input graph types dispatch to ``func``'s
|
343 |
+
backend implementation.
|
344 |
+
|
345 |
+
Usage can be any of the following decorator forms:
|
346 |
+
- @_dispatchable
|
347 |
+
- @_dispatchable()
|
348 |
+
- @_dispatchable(name="override_name")
|
349 |
+
- @_dispatchable(graphs="graph_var_name")
|
350 |
+
- @_dispatchable(edge_attrs="weight")
|
351 |
+
- @_dispatchable(graphs={"G": 0, "H": 1}, edge_attrs={"weight": "default"})
|
352 |
+
with 0 and 1 giving the position in the signature function for graph objects.
|
353 |
+
When edge_attrs is a dict, keys are keyword names and values are defaults.
|
354 |
+
|
355 |
+
The class attributes are used to allow backends to run networkx tests.
|
356 |
+
For example: `PYTHONPATH=. pytest --backend graphblas --fallback-to-nx`
|
357 |
+
Future work: add configuration to control these.
|
358 |
+
|
359 |
+
Parameters
|
360 |
+
----------
|
361 |
+
func : callable, optional
|
362 |
+
The function to be decorated. If ``func`` is not provided, returns a
|
363 |
+
partial object that can be used to decorate a function later. If ``func``
|
364 |
+
is provided, returns a new callable object that dispatches to a backend
|
365 |
+
algorithm based on input graph types.
|
366 |
+
|
367 |
+
name : str, optional
|
368 |
+
The name of the algorithm to use for dispatching. If not provided,
|
369 |
+
the name of ``func`` will be used. ``name`` is useful to avoid name
|
370 |
+
conflicts, as all dispatched algorithms live in a single namespace.
|
371 |
+
For example, ``tournament.is_strongly_connected`` had a name conflict
|
372 |
+
with the standard ``nx.is_strongly_connected``, so we used
|
373 |
+
``@_dispatchable(name="tournament_is_strongly_connected")``.
|
374 |
+
|
375 |
+
graphs : str or dict or None, default "G"
|
376 |
+
If a string, the parameter name of the graph, which must be the first
|
377 |
+
argument of the wrapped function. If more than one graph is required
|
378 |
+
for the algorithm (or if the graph is not the first argument), provide
|
379 |
+
a dict of parameter name to argument position for each graph argument.
|
380 |
+
For example, ``@_dispatchable(graphs={"G": 0, "auxiliary?": 4})``
|
381 |
+
indicates the 0th parameter ``G`` of the function is a required graph,
|
382 |
+
and the 4th parameter ``auxiliary`` is an optional graph.
|
383 |
+
To indicate an argument is a list of graphs, do e.g. ``"[graphs]"``.
|
384 |
+
Use ``graphs=None`` if *no* arguments are NetworkX graphs such as for
|
385 |
+
graph generators, readers, and conversion functions.
|
386 |
+
|
387 |
+
edge_attrs : str or dict, optional
|
388 |
+
``edge_attrs`` holds information about edge attribute arguments
|
389 |
+
and default values for those edge attributes.
|
390 |
+
If a string, ``edge_attrs`` holds the function argument name that
|
391 |
+
indicates a single edge attribute to include in the converted graph.
|
392 |
+
The default value for this attribute is 1. To indicate that an argument
|
393 |
+
is a list of attributes (all with default value 1), use e.g. ``"[attrs]"``.
|
394 |
+
If a dict, ``edge_attrs`` holds a dict keyed by argument names, with
|
395 |
+
values that are either the default value or, if a string, the argument
|
396 |
+
name that indicates the default value.
|
397 |
+
|
398 |
+
node_attrs : str or dict, optional
|
399 |
+
Like ``edge_attrs``, but for node attributes.
|
400 |
+
|
401 |
+
preserve_edge_attrs : bool or str or dict, optional
|
402 |
+
For bool, whether to preserve all edge attributes.
|
403 |
+
For str, the parameter name that may indicate (with ``True`` or a
|
404 |
+
callable argument) whether all edge attributes should be preserved
|
405 |
+
when converting.
|
406 |
+
For dict of ``{graph_name: {attr: default}}``, indicate pre-determined
|
407 |
+
edge attributes (and defaults) to preserve for input graphs.
|
408 |
+
|
409 |
+
preserve_node_attrs : bool or str or dict, optional
|
410 |
+
Like ``preserve_edge_attrs``, but for node attributes.
|
411 |
+
|
412 |
+
preserve_graph_attrs : bool or set
|
413 |
+
For bool, whether to preserve all graph attributes.
|
414 |
+
For set, which input graph arguments to preserve graph attributes.
|
415 |
+
|
416 |
+
preserve_all_attrs : bool
|
417 |
+
Whether to preserve all edge, node and graph attributes.
|
418 |
+
This overrides all the other preserve_*_attrs.
|
419 |
+
|
420 |
+
mutates_input : bool or dict, default False
|
421 |
+
For bool, whether the functions mutates an input graph argument.
|
422 |
+
For dict of ``{arg_name: arg_pos}``, arguments that indicates whether an
|
423 |
+
input graph will be mutated, and ``arg_name`` may begin with ``"not "``
|
424 |
+
to negate the logic (for example, this is used by ``copy=`` arguments).
|
425 |
+
By default, dispatching doesn't convert input graphs to a different
|
426 |
+
backend for functions that mutate input graphs.
|
427 |
+
|
428 |
+
returns_graph : bool, default False
|
429 |
+
Whether the function can return or yield a graph object. By default,
|
430 |
+
dispatching doesn't convert input graphs to a different backend for
|
431 |
+
functions that return graphs.
|
432 |
+
"""
|
433 |
+
if func is None:
|
434 |
+
return partial(
|
435 |
+
_dispatchable,
|
436 |
+
name=name,
|
437 |
+
graphs=graphs,
|
438 |
+
edge_attrs=edge_attrs,
|
439 |
+
node_attrs=node_attrs,
|
440 |
+
preserve_edge_attrs=preserve_edge_attrs,
|
441 |
+
preserve_node_attrs=preserve_node_attrs,
|
442 |
+
preserve_graph_attrs=preserve_graph_attrs,
|
443 |
+
preserve_all_attrs=preserve_all_attrs,
|
444 |
+
mutates_input=mutates_input,
|
445 |
+
returns_graph=returns_graph,
|
446 |
+
)
|
447 |
+
if isinstance(func, str):
|
448 |
+
raise TypeError("'name' and 'graphs' must be passed by keyword") from None
|
449 |
+
# If name not provided, use the name of the function
|
450 |
+
if name is None:
|
451 |
+
name = func.__name__
|
452 |
+
|
453 |
+
self = object.__new__(cls)
|
454 |
+
|
455 |
+
# standard function-wrapping stuff
|
456 |
+
# __annotations__ not used
|
457 |
+
self.__name__ = func.__name__
|
458 |
+
# self.__doc__ = func.__doc__ # __doc__ handled as cached property
|
459 |
+
self.__defaults__ = func.__defaults__
|
460 |
+
# We "magically" add `backend=` keyword argument to allow backend to be specified
|
461 |
+
if func.__kwdefaults__:
|
462 |
+
self.__kwdefaults__ = {**func.__kwdefaults__, "backend": None}
|
463 |
+
else:
|
464 |
+
self.__kwdefaults__ = {"backend": None}
|
465 |
+
self.__module__ = func.__module__
|
466 |
+
self.__qualname__ = func.__qualname__
|
467 |
+
self.__dict__.update(func.__dict__)
|
468 |
+
self.__wrapped__ = func
|
469 |
+
|
470 |
+
# Supplement docstring with backend info; compute and cache when needed
|
471 |
+
self._orig_doc = func.__doc__
|
472 |
+
self._cached_doc = None
|
473 |
+
|
474 |
+
self.orig_func = func
|
475 |
+
self.name = name
|
476 |
+
self.edge_attrs = edge_attrs
|
477 |
+
self.node_attrs = node_attrs
|
478 |
+
self.preserve_edge_attrs = preserve_edge_attrs or preserve_all_attrs
|
479 |
+
self.preserve_node_attrs = preserve_node_attrs or preserve_all_attrs
|
480 |
+
self.preserve_graph_attrs = preserve_graph_attrs or preserve_all_attrs
|
481 |
+
self.mutates_input = mutates_input
|
482 |
+
# Keep `returns_graph` private for now, b/c we may extend info on return types
|
483 |
+
self._returns_graph = returns_graph
|
484 |
+
|
485 |
+
if edge_attrs is not None and not isinstance(edge_attrs, str | dict):
|
486 |
+
raise TypeError(
|
487 |
+
f"Bad type for edge_attrs: {type(edge_attrs)}. Expected str or dict."
|
488 |
+
) from None
|
489 |
+
if node_attrs is not None and not isinstance(node_attrs, str | dict):
|
490 |
+
raise TypeError(
|
491 |
+
f"Bad type for node_attrs: {type(node_attrs)}. Expected str or dict."
|
492 |
+
) from None
|
493 |
+
if not isinstance(self.preserve_edge_attrs, bool | str | dict):
|
494 |
+
raise TypeError(
|
495 |
+
f"Bad type for preserve_edge_attrs: {type(self.preserve_edge_attrs)}."
|
496 |
+
" Expected bool, str, or dict."
|
497 |
+
) from None
|
498 |
+
if not isinstance(self.preserve_node_attrs, bool | str | dict):
|
499 |
+
raise TypeError(
|
500 |
+
f"Bad type for preserve_node_attrs: {type(self.preserve_node_attrs)}."
|
501 |
+
" Expected bool, str, or dict."
|
502 |
+
) from None
|
503 |
+
if not isinstance(self.preserve_graph_attrs, bool | set):
|
504 |
+
raise TypeError(
|
505 |
+
f"Bad type for preserve_graph_attrs: {type(self.preserve_graph_attrs)}."
|
506 |
+
" Expected bool or set."
|
507 |
+
) from None
|
508 |
+
if not isinstance(self.mutates_input, bool | dict):
|
509 |
+
raise TypeError(
|
510 |
+
f"Bad type for mutates_input: {type(self.mutates_input)}."
|
511 |
+
" Expected bool or dict."
|
512 |
+
) from None
|
513 |
+
if not isinstance(self._returns_graph, bool):
|
514 |
+
raise TypeError(
|
515 |
+
f"Bad type for returns_graph: {type(self._returns_graph)}."
|
516 |
+
" Expected bool."
|
517 |
+
) from None
|
518 |
+
|
519 |
+
if isinstance(graphs, str):
|
520 |
+
graphs = {graphs: 0}
|
521 |
+
elif graphs is None:
|
522 |
+
pass
|
523 |
+
elif not isinstance(graphs, dict):
|
524 |
+
raise TypeError(
|
525 |
+
f"Bad type for graphs: {type(graphs)}. Expected str or dict."
|
526 |
+
) from None
|
527 |
+
elif len(graphs) == 0:
|
528 |
+
raise KeyError("'graphs' must contain at least one variable name") from None
|
529 |
+
|
530 |
+
# This dict comprehension is complicated for better performance; equivalent shown below.
|
531 |
+
self.optional_graphs = set()
|
532 |
+
self.list_graphs = set()
|
533 |
+
if graphs is None:
|
534 |
+
self.graphs = {}
|
535 |
+
else:
|
536 |
+
self.graphs = {
|
537 |
+
self.optional_graphs.add(val := k[:-1]) or val
|
538 |
+
if (last := k[-1]) == "?"
|
539 |
+
else self.list_graphs.add(val := k[1:-1]) or val
|
540 |
+
if last == "]"
|
541 |
+
else k: v
|
542 |
+
for k, v in graphs.items()
|
543 |
+
}
|
544 |
+
# The above is equivalent to:
|
545 |
+
# self.optional_graphs = {k[:-1] for k in graphs if k[-1] == "?"}
|
546 |
+
# self.list_graphs = {k[1:-1] for k in graphs if k[-1] == "]"}
|
547 |
+
# self.graphs = {k[:-1] if k[-1] == "?" else k: v for k, v in graphs.items()}
|
548 |
+
|
549 |
+
# Compute and cache the signature on-demand
|
550 |
+
self._sig = None
|
551 |
+
|
552 |
+
# Which backends implement this function?
|
553 |
+
self.backends = {
|
554 |
+
backend
|
555 |
+
for backend, info in backend_info.items()
|
556 |
+
if "functions" in info and name in info["functions"]
|
557 |
+
}
|
558 |
+
|
559 |
+
if name in _registered_algorithms:
|
560 |
+
raise KeyError(
|
561 |
+
f"Algorithm already exists in dispatch registry: {name}"
|
562 |
+
) from None
|
563 |
+
# Use the magic of `argmap` to turn `self` into a function. This does result
|
564 |
+
# in small additional overhead compared to calling `_dispatchable` directly,
|
565 |
+
# but `argmap` has the magical property that it can stack with other `argmap`
|
566 |
+
# decorators "for free". Being a function is better for REPRs and type-checkers.
|
567 |
+
self = argmap(_do_nothing)(self)
|
568 |
+
_registered_algorithms[name] = self
|
569 |
+
return self
|
570 |
+
|
571 |
+
@property
|
572 |
+
def __doc__(self):
|
573 |
+
"""If the cached documentation exists, it is returned.
|
574 |
+
Otherwise, the documentation is generated using _make_doc() method,
|
575 |
+
cached, and then returned."""
|
576 |
+
|
577 |
+
if (rv := self._cached_doc) is not None:
|
578 |
+
return rv
|
579 |
+
rv = self._cached_doc = self._make_doc()
|
580 |
+
return rv
|
581 |
+
|
582 |
+
@__doc__.setter
|
583 |
+
def __doc__(self, val):
|
584 |
+
"""Sets the original documentation to the given value and resets the
|
585 |
+
cached documentation."""
|
586 |
+
|
587 |
+
self._orig_doc = val
|
588 |
+
self._cached_doc = None
|
589 |
+
|
590 |
+
@property
|
591 |
+
def __signature__(self):
|
592 |
+
"""Return the signature of the original function, with the addition of
|
593 |
+
the `backend` and `backend_kwargs` parameters."""
|
594 |
+
|
595 |
+
if self._sig is None:
|
596 |
+
sig = inspect.signature(self.orig_func)
|
597 |
+
# `backend` is now a reserved argument used by dispatching.
|
598 |
+
# assert "backend" not in sig.parameters
|
599 |
+
if not any(
|
600 |
+
p.kind == inspect.Parameter.VAR_KEYWORD for p in sig.parameters.values()
|
601 |
+
):
|
602 |
+
sig = sig.replace(
|
603 |
+
parameters=[
|
604 |
+
*sig.parameters.values(),
|
605 |
+
inspect.Parameter(
|
606 |
+
"backend", inspect.Parameter.KEYWORD_ONLY, default=None
|
607 |
+
),
|
608 |
+
inspect.Parameter(
|
609 |
+
"backend_kwargs", inspect.Parameter.VAR_KEYWORD
|
610 |
+
),
|
611 |
+
]
|
612 |
+
)
|
613 |
+
else:
|
614 |
+
*parameters, var_keyword = sig.parameters.values()
|
615 |
+
sig = sig.replace(
|
616 |
+
parameters=[
|
617 |
+
*parameters,
|
618 |
+
inspect.Parameter(
|
619 |
+
"backend", inspect.Parameter.KEYWORD_ONLY, default=None
|
620 |
+
),
|
621 |
+
var_keyword,
|
622 |
+
]
|
623 |
+
)
|
624 |
+
self._sig = sig
|
625 |
+
return self._sig
|
626 |
+
|
627 |
+
def __call__(self, /, *args, backend=None, **kwargs):
|
628 |
+
"""Returns the result of the original function, or the backend function if
|
629 |
+
the backend is specified and that backend implements `func`."""
|
630 |
+
|
631 |
+
if not backends:
|
632 |
+
# Fast path if no backends are installed
|
633 |
+
return self.orig_func(*args, **kwargs)
|
634 |
+
|
635 |
+
# Use `backend_name` in this function instead of `backend`
|
636 |
+
backend_name = backend
|
637 |
+
if backend_name is not None and backend_name not in backends:
|
638 |
+
raise ImportError(f"Unable to load backend: {backend_name}")
|
639 |
+
|
640 |
+
graphs_resolved = {}
|
641 |
+
for gname, pos in self.graphs.items():
|
642 |
+
if pos < len(args):
|
643 |
+
if gname in kwargs:
|
644 |
+
raise TypeError(f"{self.name}() got multiple values for {gname!r}")
|
645 |
+
val = args[pos]
|
646 |
+
elif gname in kwargs:
|
647 |
+
val = kwargs[gname]
|
648 |
+
elif gname not in self.optional_graphs:
|
649 |
+
raise TypeError(
|
650 |
+
f"{self.name}() missing required graph argument: {gname}"
|
651 |
+
)
|
652 |
+
else:
|
653 |
+
continue
|
654 |
+
if val is None:
|
655 |
+
if gname not in self.optional_graphs:
|
656 |
+
raise TypeError(
|
657 |
+
f"{self.name}() required graph argument {gname!r} is None; must be a graph"
|
658 |
+
)
|
659 |
+
else:
|
660 |
+
graphs_resolved[gname] = val
|
661 |
+
|
662 |
+
# Alternative to the above that does not check duplicated args or missing required graphs.
|
663 |
+
# graphs_resolved = {
|
664 |
+
# val
|
665 |
+
# for gname, pos in self.graphs.items()
|
666 |
+
# if (val := args[pos] if pos < len(args) else kwargs.get(gname)) is not None
|
667 |
+
# }
|
668 |
+
|
669 |
+
# Check if any graph comes from a backend
|
670 |
+
if self.list_graphs:
|
671 |
+
# Make sure we don't lose values by consuming an iterator
|
672 |
+
args = list(args)
|
673 |
+
for gname in self.list_graphs & graphs_resolved.keys():
|
674 |
+
val = list(graphs_resolved[gname])
|
675 |
+
graphs_resolved[gname] = val
|
676 |
+
if gname in kwargs:
|
677 |
+
kwargs[gname] = val
|
678 |
+
else:
|
679 |
+
args[self.graphs[gname]] = val
|
680 |
+
|
681 |
+
has_backends = any(
|
682 |
+
hasattr(g, "__networkx_backend__")
|
683 |
+
if gname not in self.list_graphs
|
684 |
+
else any(hasattr(g2, "__networkx_backend__") for g2 in g)
|
685 |
+
for gname, g in graphs_resolved.items()
|
686 |
+
)
|
687 |
+
if has_backends:
|
688 |
+
graph_backend_names = {
|
689 |
+
getattr(g, "__networkx_backend__", "networkx")
|
690 |
+
for gname, g in graphs_resolved.items()
|
691 |
+
if gname not in self.list_graphs
|
692 |
+
}
|
693 |
+
for gname in self.list_graphs & graphs_resolved.keys():
|
694 |
+
graph_backend_names.update(
|
695 |
+
getattr(g, "__networkx_backend__", "networkx")
|
696 |
+
for g in graphs_resolved[gname]
|
697 |
+
)
|
698 |
+
else:
|
699 |
+
has_backends = any(
|
700 |
+
hasattr(g, "__networkx_backend__") for g in graphs_resolved.values()
|
701 |
+
)
|
702 |
+
if has_backends:
|
703 |
+
graph_backend_names = {
|
704 |
+
getattr(g, "__networkx_backend__", "networkx")
|
705 |
+
for g in graphs_resolved.values()
|
706 |
+
}
|
707 |
+
|
708 |
+
backend_priority = config.backend_priority
|
709 |
+
if self._is_testing and backend_priority and backend_name is None:
|
710 |
+
# Special path if we are running networkx tests with a backend.
|
711 |
+
# This even runs for (and handles) functions that mutate input graphs.
|
712 |
+
return self._convert_and_call_for_tests(
|
713 |
+
backend_priority[0],
|
714 |
+
args,
|
715 |
+
kwargs,
|
716 |
+
fallback_to_nx=self._fallback_to_nx,
|
717 |
+
)
|
718 |
+
|
719 |
+
if has_backends:
|
720 |
+
# Dispatchable graphs found! Dispatch to backend function.
|
721 |
+
# We don't handle calls with different backend graphs yet,
|
722 |
+
# but we may be able to convert additional networkx graphs.
|
723 |
+
backend_names = graph_backend_names - {"networkx"}
|
724 |
+
if len(backend_names) != 1:
|
725 |
+
# Future work: convert between backends and run if multiple backends found
|
726 |
+
raise TypeError(
|
727 |
+
f"{self.name}() graphs must all be from the same backend, found {backend_names}"
|
728 |
+
)
|
729 |
+
[graph_backend_name] = backend_names
|
730 |
+
if backend_name is not None and backend_name != graph_backend_name:
|
731 |
+
# Future work: convert between backends to `backend_name` backend
|
732 |
+
raise TypeError(
|
733 |
+
f"{self.name}() is unable to convert graph from backend {graph_backend_name!r} "
|
734 |
+
f"to the specified backend {backend_name!r}."
|
735 |
+
)
|
736 |
+
if graph_backend_name not in backends:
|
737 |
+
raise ImportError(f"Unable to load backend: {graph_backend_name}")
|
738 |
+
if (
|
739 |
+
"networkx" in graph_backend_names
|
740 |
+
and graph_backend_name not in backend_priority
|
741 |
+
):
|
742 |
+
# Not configured to convert networkx graphs to this backend
|
743 |
+
raise TypeError(
|
744 |
+
f"Unable to convert inputs and run {self.name}. "
|
745 |
+
f"{self.name}() has networkx and {graph_backend_name} graphs, but NetworkX is not "
|
746 |
+
f"configured to automatically convert graphs from networkx to {graph_backend_name}."
|
747 |
+
)
|
748 |
+
backend = _load_backend(graph_backend_name)
|
749 |
+
if hasattr(backend, self.name):
|
750 |
+
if "networkx" in graph_backend_names:
|
751 |
+
# We need to convert networkx graphs to backend graphs.
|
752 |
+
# There is currently no need to check `self.mutates_input` here.
|
753 |
+
return self._convert_and_call(
|
754 |
+
graph_backend_name,
|
755 |
+
args,
|
756 |
+
kwargs,
|
757 |
+
fallback_to_nx=self._fallback_to_nx,
|
758 |
+
)
|
759 |
+
# All graphs are backend graphs--no need to convert!
|
760 |
+
return getattr(backend, self.name)(*args, **kwargs)
|
761 |
+
# Future work: try to convert and run with other backends in backend_priority
|
762 |
+
raise nx.NetworkXNotImplemented(
|
763 |
+
f"'{self.name}' not implemented by {graph_backend_name}"
|
764 |
+
)
|
765 |
+
|
766 |
+
# If backend was explicitly given by the user, so we need to use it no matter what
|
767 |
+
if backend_name is not None:
|
768 |
+
return self._convert_and_call(
|
769 |
+
backend_name, args, kwargs, fallback_to_nx=False
|
770 |
+
)
|
771 |
+
|
772 |
+
# Only networkx graphs; try to convert and run with a backend with automatic
|
773 |
+
# conversion, but don't do this by default for graph generators or loaders,
|
774 |
+
# or if the functions mutates an input graph or returns a graph.
|
775 |
+
# Only convert and run if `backend.should_run(...)` returns True.
|
776 |
+
if (
|
777 |
+
not self._returns_graph
|
778 |
+
and (
|
779 |
+
not self.mutates_input
|
780 |
+
or isinstance(self.mutates_input, dict)
|
781 |
+
# If `mutates_input` begins with "not ", then assume the argument is boolean,
|
782 |
+
# otherwise treat it as a node or edge attribute if it's not None.
|
783 |
+
and any(
|
784 |
+
not (
|
785 |
+
args[arg_pos]
|
786 |
+
if len(args) > arg_pos
|
787 |
+
else kwargs.get(arg_name[4:], True)
|
788 |
+
)
|
789 |
+
if arg_name.startswith("not ")
|
790 |
+
else (
|
791 |
+
args[arg_pos] if len(args) > arg_pos else kwargs.get(arg_name)
|
792 |
+
)
|
793 |
+
is not None
|
794 |
+
for arg_name, arg_pos in self.mutates_input.items()
|
795 |
+
)
|
796 |
+
)
|
797 |
+
):
|
798 |
+
# Should we warn or log if we don't convert b/c the input will be mutated?
|
799 |
+
for backend_name in backend_priority:
|
800 |
+
if self._should_backend_run(backend_name, *args, **kwargs):
|
801 |
+
return self._convert_and_call(
|
802 |
+
backend_name,
|
803 |
+
args,
|
804 |
+
kwargs,
|
805 |
+
fallback_to_nx=self._fallback_to_nx,
|
806 |
+
)
|
807 |
+
# Default: run with networkx on networkx inputs
|
808 |
+
return self.orig_func(*args, **kwargs)
|
809 |
+
|
810 |
+
def _can_backend_run(self, backend_name, /, *args, **kwargs):
|
811 |
+
"""Can the specified backend run this algorithm with these arguments?"""
|
812 |
+
backend = _load_backend(backend_name)
|
813 |
+
# `backend.can_run` and `backend.should_run` may return strings that describe
|
814 |
+
# why they can't or shouldn't be run. We plan to use the strings in the future.
|
815 |
+
return (
|
816 |
+
hasattr(backend, self.name)
|
817 |
+
and (can_run := backend.can_run(self.name, args, kwargs))
|
818 |
+
and not isinstance(can_run, str)
|
819 |
+
)
|
820 |
+
|
821 |
+
def _should_backend_run(self, backend_name, /, *args, **kwargs):
|
822 |
+
"""Can/should the specified backend run this algorithm with these arguments?"""
|
823 |
+
backend = _load_backend(backend_name)
|
824 |
+
# `backend.can_run` and `backend.should_run` may return strings that describe
|
825 |
+
# why they can't or shouldn't be run. We plan to use the strings in the future.
|
826 |
+
return (
|
827 |
+
hasattr(backend, self.name)
|
828 |
+
and (can_run := backend.can_run(self.name, args, kwargs))
|
829 |
+
and not isinstance(can_run, str)
|
830 |
+
and (should_run := backend.should_run(self.name, args, kwargs))
|
831 |
+
and not isinstance(should_run, str)
|
832 |
+
)
|
833 |
+
|
834 |
+
def _convert_arguments(self, backend_name, args, kwargs, *, use_cache):
|
835 |
+
"""Convert graph arguments to the specified backend.
|
836 |
+
|
837 |
+
Returns
|
838 |
+
-------
|
839 |
+
args tuple and kwargs dict
|
840 |
+
"""
|
841 |
+
bound = self.__signature__.bind(*args, **kwargs)
|
842 |
+
bound.apply_defaults()
|
843 |
+
if not self.graphs:
|
844 |
+
bound_kwargs = bound.kwargs
|
845 |
+
del bound_kwargs["backend"]
|
846 |
+
return bound.args, bound_kwargs
|
847 |
+
# Convert graphs into backend graph-like object
|
848 |
+
# Include the edge and/or node labels if provided to the algorithm
|
849 |
+
preserve_edge_attrs = self.preserve_edge_attrs
|
850 |
+
edge_attrs = self.edge_attrs
|
851 |
+
if preserve_edge_attrs is False:
|
852 |
+
# e.g. `preserve_edge_attrs=False`
|
853 |
+
pass
|
854 |
+
elif preserve_edge_attrs is True:
|
855 |
+
# e.g. `preserve_edge_attrs=True`
|
856 |
+
edge_attrs = None
|
857 |
+
elif isinstance(preserve_edge_attrs, str):
|
858 |
+
if bound.arguments[preserve_edge_attrs] is True or callable(
|
859 |
+
bound.arguments[preserve_edge_attrs]
|
860 |
+
):
|
861 |
+
# e.g. `preserve_edge_attrs="attr"` and `func(attr=True)`
|
862 |
+
# e.g. `preserve_edge_attrs="attr"` and `func(attr=myfunc)`
|
863 |
+
preserve_edge_attrs = True
|
864 |
+
edge_attrs = None
|
865 |
+
elif bound.arguments[preserve_edge_attrs] is False and (
|
866 |
+
isinstance(edge_attrs, str)
|
867 |
+
and edge_attrs == preserve_edge_attrs
|
868 |
+
or isinstance(edge_attrs, dict)
|
869 |
+
and preserve_edge_attrs in edge_attrs
|
870 |
+
):
|
871 |
+
# e.g. `preserve_edge_attrs="attr"` and `func(attr=False)`
|
872 |
+
# Treat `False` argument as meaning "preserve_edge_data=False"
|
873 |
+
# and not `False` as the edge attribute to use.
|
874 |
+
preserve_edge_attrs = False
|
875 |
+
edge_attrs = None
|
876 |
+
else:
|
877 |
+
# e.g. `preserve_edge_attrs="attr"` and `func(attr="weight")`
|
878 |
+
preserve_edge_attrs = False
|
879 |
+
# Else: e.g. `preserve_edge_attrs={"G": {"weight": 1}}`
|
880 |
+
|
881 |
+
if edge_attrs is None:
|
882 |
+
# May have been set to None above b/c all attributes are preserved
|
883 |
+
pass
|
884 |
+
elif isinstance(edge_attrs, str):
|
885 |
+
if edge_attrs[0] == "[":
|
886 |
+
# e.g. `edge_attrs="[edge_attributes]"` (argument of list of attributes)
|
887 |
+
# e.g. `func(edge_attributes=["foo", "bar"])`
|
888 |
+
edge_attrs = {
|
889 |
+
edge_attr: 1 for edge_attr in bound.arguments[edge_attrs[1:-1]]
|
890 |
+
}
|
891 |
+
elif callable(bound.arguments[edge_attrs]):
|
892 |
+
# e.g. `edge_attrs="weight"` and `func(weight=myfunc)`
|
893 |
+
preserve_edge_attrs = True
|
894 |
+
edge_attrs = None
|
895 |
+
elif bound.arguments[edge_attrs] is not None:
|
896 |
+
# e.g. `edge_attrs="weight"` and `func(weight="foo")` (default of 1)
|
897 |
+
edge_attrs = {bound.arguments[edge_attrs]: 1}
|
898 |
+
elif self.name == "to_numpy_array" and hasattr(
|
899 |
+
bound.arguments["dtype"], "names"
|
900 |
+
):
|
901 |
+
# Custom handling: attributes may be obtained from `dtype`
|
902 |
+
edge_attrs = {
|
903 |
+
edge_attr: 1 for edge_attr in bound.arguments["dtype"].names
|
904 |
+
}
|
905 |
+
else:
|
906 |
+
# e.g. `edge_attrs="weight"` and `func(weight=None)`
|
907 |
+
edge_attrs = None
|
908 |
+
else:
|
909 |
+
# e.g. `edge_attrs={"attr": "default"}` and `func(attr="foo", default=7)`
|
910 |
+
# e.g. `edge_attrs={"attr": 0}` and `func(attr="foo")`
|
911 |
+
edge_attrs = {
|
912 |
+
edge_attr: bound.arguments.get(val, 1) if isinstance(val, str) else val
|
913 |
+
for key, val in edge_attrs.items()
|
914 |
+
if (edge_attr := bound.arguments[key]) is not None
|
915 |
+
}
|
916 |
+
|
917 |
+
preserve_node_attrs = self.preserve_node_attrs
|
918 |
+
node_attrs = self.node_attrs
|
919 |
+
if preserve_node_attrs is False:
|
920 |
+
# e.g. `preserve_node_attrs=False`
|
921 |
+
pass
|
922 |
+
elif preserve_node_attrs is True:
|
923 |
+
# e.g. `preserve_node_attrs=True`
|
924 |
+
node_attrs = None
|
925 |
+
elif isinstance(preserve_node_attrs, str):
|
926 |
+
if bound.arguments[preserve_node_attrs] is True or callable(
|
927 |
+
bound.arguments[preserve_node_attrs]
|
928 |
+
):
|
929 |
+
# e.g. `preserve_node_attrs="attr"` and `func(attr=True)`
|
930 |
+
# e.g. `preserve_node_attrs="attr"` and `func(attr=myfunc)`
|
931 |
+
preserve_node_attrs = True
|
932 |
+
node_attrs = None
|
933 |
+
elif bound.arguments[preserve_node_attrs] is False and (
|
934 |
+
isinstance(node_attrs, str)
|
935 |
+
and node_attrs == preserve_node_attrs
|
936 |
+
or isinstance(node_attrs, dict)
|
937 |
+
and preserve_node_attrs in node_attrs
|
938 |
+
):
|
939 |
+
# e.g. `preserve_node_attrs="attr"` and `func(attr=False)`
|
940 |
+
# Treat `False` argument as meaning "preserve_node_data=False"
|
941 |
+
# and not `False` as the node attribute to use. Is this used?
|
942 |
+
preserve_node_attrs = False
|
943 |
+
node_attrs = None
|
944 |
+
else:
|
945 |
+
# e.g. `preserve_node_attrs="attr"` and `func(attr="weight")`
|
946 |
+
preserve_node_attrs = False
|
947 |
+
# Else: e.g. `preserve_node_attrs={"G": {"pos": None}}`
|
948 |
+
|
949 |
+
if node_attrs is None:
|
950 |
+
# May have been set to None above b/c all attributes are preserved
|
951 |
+
pass
|
952 |
+
elif isinstance(node_attrs, str):
|
953 |
+
if node_attrs[0] == "[":
|
954 |
+
# e.g. `node_attrs="[node_attributes]"` (argument of list of attributes)
|
955 |
+
# e.g. `func(node_attributes=["foo", "bar"])`
|
956 |
+
node_attrs = {
|
957 |
+
node_attr: None for node_attr in bound.arguments[node_attrs[1:-1]]
|
958 |
+
}
|
959 |
+
elif callable(bound.arguments[node_attrs]):
|
960 |
+
# e.g. `node_attrs="weight"` and `func(weight=myfunc)`
|
961 |
+
preserve_node_attrs = True
|
962 |
+
node_attrs = None
|
963 |
+
elif bound.arguments[node_attrs] is not None:
|
964 |
+
# e.g. `node_attrs="weight"` and `func(weight="foo")`
|
965 |
+
node_attrs = {bound.arguments[node_attrs]: None}
|
966 |
+
else:
|
967 |
+
# e.g. `node_attrs="weight"` and `func(weight=None)`
|
968 |
+
node_attrs = None
|
969 |
+
else:
|
970 |
+
# e.g. `node_attrs={"attr": "default"}` and `func(attr="foo", default=7)`
|
971 |
+
# e.g. `node_attrs={"attr": 0}` and `func(attr="foo")`
|
972 |
+
node_attrs = {
|
973 |
+
node_attr: bound.arguments.get(val) if isinstance(val, str) else val
|
974 |
+
for key, val in node_attrs.items()
|
975 |
+
if (node_attr := bound.arguments[key]) is not None
|
976 |
+
}
|
977 |
+
|
978 |
+
preserve_graph_attrs = self.preserve_graph_attrs
|
979 |
+
|
980 |
+
# It should be safe to assume that we either have networkx graphs or backend graphs.
|
981 |
+
# Future work: allow conversions between backends.
|
982 |
+
for gname in self.graphs:
|
983 |
+
if gname in self.list_graphs:
|
984 |
+
bound.arguments[gname] = [
|
985 |
+
self._convert_graph(
|
986 |
+
backend_name,
|
987 |
+
g,
|
988 |
+
edge_attrs=edge_attrs,
|
989 |
+
node_attrs=node_attrs,
|
990 |
+
preserve_edge_attrs=preserve_edge_attrs,
|
991 |
+
preserve_node_attrs=preserve_node_attrs,
|
992 |
+
preserve_graph_attrs=preserve_graph_attrs,
|
993 |
+
graph_name=gname,
|
994 |
+
use_cache=use_cache,
|
995 |
+
)
|
996 |
+
if getattr(g, "__networkx_backend__", "networkx") == "networkx"
|
997 |
+
else g
|
998 |
+
for g in bound.arguments[gname]
|
999 |
+
]
|
1000 |
+
else:
|
1001 |
+
graph = bound.arguments[gname]
|
1002 |
+
if graph is None:
|
1003 |
+
if gname in self.optional_graphs:
|
1004 |
+
continue
|
1005 |
+
raise TypeError(
|
1006 |
+
f"Missing required graph argument `{gname}` in {self.name} function"
|
1007 |
+
)
|
1008 |
+
if isinstance(preserve_edge_attrs, dict):
|
1009 |
+
preserve_edges = False
|
1010 |
+
edges = preserve_edge_attrs.get(gname, edge_attrs)
|
1011 |
+
else:
|
1012 |
+
preserve_edges = preserve_edge_attrs
|
1013 |
+
edges = edge_attrs
|
1014 |
+
if isinstance(preserve_node_attrs, dict):
|
1015 |
+
preserve_nodes = False
|
1016 |
+
nodes = preserve_node_attrs.get(gname, node_attrs)
|
1017 |
+
else:
|
1018 |
+
preserve_nodes = preserve_node_attrs
|
1019 |
+
nodes = node_attrs
|
1020 |
+
if isinstance(preserve_graph_attrs, set):
|
1021 |
+
preserve_graph = gname in preserve_graph_attrs
|
1022 |
+
else:
|
1023 |
+
preserve_graph = preserve_graph_attrs
|
1024 |
+
if getattr(graph, "__networkx_backend__", "networkx") == "networkx":
|
1025 |
+
bound.arguments[gname] = self._convert_graph(
|
1026 |
+
backend_name,
|
1027 |
+
graph,
|
1028 |
+
edge_attrs=edges,
|
1029 |
+
node_attrs=nodes,
|
1030 |
+
preserve_edge_attrs=preserve_edges,
|
1031 |
+
preserve_node_attrs=preserve_nodes,
|
1032 |
+
preserve_graph_attrs=preserve_graph,
|
1033 |
+
graph_name=gname,
|
1034 |
+
use_cache=use_cache,
|
1035 |
+
)
|
1036 |
+
bound_kwargs = bound.kwargs
|
1037 |
+
del bound_kwargs["backend"]
|
1038 |
+
return bound.args, bound_kwargs
|
1039 |
+
|
1040 |
+
def _convert_graph(
|
1041 |
+
self,
|
1042 |
+
backend_name,
|
1043 |
+
graph,
|
1044 |
+
*,
|
1045 |
+
edge_attrs,
|
1046 |
+
node_attrs,
|
1047 |
+
preserve_edge_attrs,
|
1048 |
+
preserve_node_attrs,
|
1049 |
+
preserve_graph_attrs,
|
1050 |
+
graph_name,
|
1051 |
+
use_cache,
|
1052 |
+
):
|
1053 |
+
if (
|
1054 |
+
use_cache
|
1055 |
+
and (nx_cache := getattr(graph, "__networkx_cache__", None)) is not None
|
1056 |
+
):
|
1057 |
+
cache = nx_cache.setdefault("backends", {}).setdefault(backend_name, {})
|
1058 |
+
# edge_attrs: dict | None
|
1059 |
+
# node_attrs: dict | None
|
1060 |
+
# preserve_edge_attrs: bool (False if edge_attrs is not None)
|
1061 |
+
# preserve_node_attrs: bool (False if node_attrs is not None)
|
1062 |
+
# preserve_graph_attrs: bool
|
1063 |
+
key = edge_key, node_key, graph_key = (
|
1064 |
+
frozenset(edge_attrs.items())
|
1065 |
+
if edge_attrs is not None
|
1066 |
+
else preserve_edge_attrs,
|
1067 |
+
frozenset(node_attrs.items())
|
1068 |
+
if node_attrs is not None
|
1069 |
+
else preserve_node_attrs,
|
1070 |
+
preserve_graph_attrs,
|
1071 |
+
)
|
1072 |
+
if cache:
|
1073 |
+
warning_message = (
|
1074 |
+
f"Using cached graph for {backend_name!r} backend in "
|
1075 |
+
f"call to {self.name}.\n\nFor the cache to be consistent "
|
1076 |
+
"(i.e., correct), the input graph must not have been "
|
1077 |
+
"manually mutated since the cached graph was created. "
|
1078 |
+
"Examples of manually mutating the graph data structures "
|
1079 |
+
"resulting in an inconsistent cache include:\n\n"
|
1080 |
+
" >>> G[u][v][key] = val\n\n"
|
1081 |
+
"and\n\n"
|
1082 |
+
" >>> for u, v, d in G.edges(data=True):\n"
|
1083 |
+
" ... d[key] = val\n\n"
|
1084 |
+
"Using methods such as `G.add_edge(u, v, weight=val)` "
|
1085 |
+
"will correctly clear the cache to keep it consistent. "
|
1086 |
+
"You may also use `G.__networkx_cache__.clear()` to "
|
1087 |
+
"manually clear the cache, or set `G.__networkx_cache__` "
|
1088 |
+
"to None to disable caching for G. Enable or disable "
|
1089 |
+
"caching via `nx.config.cache_converted_graphs` config."
|
1090 |
+
)
|
1091 |
+
# Do a simple search for a cached graph with compatible data.
|
1092 |
+
# For example, if we need a single attribute, then it's okay
|
1093 |
+
# to use a cached graph that preserved all attributes.
|
1094 |
+
# This looks for an exact match first.
|
1095 |
+
for compat_key in itertools.product(
|
1096 |
+
(edge_key, True) if edge_key is not True else (True,),
|
1097 |
+
(node_key, True) if node_key is not True else (True,),
|
1098 |
+
(graph_key, True) if graph_key is not True else (True,),
|
1099 |
+
):
|
1100 |
+
if (rv := cache.get(compat_key)) is not None:
|
1101 |
+
warnings.warn(warning_message)
|
1102 |
+
return rv
|
1103 |
+
if edge_key is not True and node_key is not True:
|
1104 |
+
# Iterate over the items in `cache` to see if any are compatible.
|
1105 |
+
# For example, if no edge attributes are needed, then a graph
|
1106 |
+
# with any edge attribute will suffice. We use the same logic
|
1107 |
+
# below (but switched) to clear unnecessary items from the cache.
|
1108 |
+
# Use `list(cache.items())` to be thread-safe.
|
1109 |
+
for (ekey, nkey, gkey), val in list(cache.items()):
|
1110 |
+
if edge_key is False or ekey is True:
|
1111 |
+
pass
|
1112 |
+
elif (
|
1113 |
+
edge_key is True
|
1114 |
+
or ekey is False
|
1115 |
+
or not edge_key.issubset(ekey)
|
1116 |
+
):
|
1117 |
+
continue
|
1118 |
+
if node_key is False or nkey is True:
|
1119 |
+
pass
|
1120 |
+
elif (
|
1121 |
+
node_key is True
|
1122 |
+
or nkey is False
|
1123 |
+
or not node_key.issubset(nkey)
|
1124 |
+
):
|
1125 |
+
continue
|
1126 |
+
if graph_key and not gkey:
|
1127 |
+
continue
|
1128 |
+
warnings.warn(warning_message)
|
1129 |
+
return val
|
1130 |
+
|
1131 |
+
backend = _load_backend(backend_name)
|
1132 |
+
rv = backend.convert_from_nx(
|
1133 |
+
graph,
|
1134 |
+
edge_attrs=edge_attrs,
|
1135 |
+
node_attrs=node_attrs,
|
1136 |
+
preserve_edge_attrs=preserve_edge_attrs,
|
1137 |
+
preserve_node_attrs=preserve_node_attrs,
|
1138 |
+
preserve_graph_attrs=preserve_graph_attrs,
|
1139 |
+
name=self.name,
|
1140 |
+
graph_name=graph_name,
|
1141 |
+
)
|
1142 |
+
if use_cache and nx_cache is not None:
|
1143 |
+
# Remove old cached items that are no longer necessary since they
|
1144 |
+
# are dominated/subsumed/outdated by what was just calculated.
|
1145 |
+
# This uses the same logic as above, but with keys switched.
|
1146 |
+
cache[key] = rv # Set at beginning to be thread-safe
|
1147 |
+
for cur_key in list(cache):
|
1148 |
+
if cur_key == key:
|
1149 |
+
continue
|
1150 |
+
ekey, nkey, gkey = cur_key
|
1151 |
+
if ekey is False or edge_key is True:
|
1152 |
+
pass
|
1153 |
+
elif ekey is True or edge_key is False or not ekey.issubset(edge_key):
|
1154 |
+
continue
|
1155 |
+
if nkey is False or node_key is True:
|
1156 |
+
pass
|
1157 |
+
elif nkey is True or node_key is False or not nkey.issubset(node_key):
|
1158 |
+
continue
|
1159 |
+
if gkey and not graph_key:
|
1160 |
+
continue
|
1161 |
+
cache.pop(cur_key, None) # Use pop instead of del to be thread-safe
|
1162 |
+
|
1163 |
+
return rv
|
1164 |
+
|
1165 |
+
def _convert_and_call(self, backend_name, args, kwargs, *, fallback_to_nx=False):
|
1166 |
+
"""Call this dispatchable function with a backend, converting graphs if necessary."""
|
1167 |
+
backend = _load_backend(backend_name)
|
1168 |
+
if not self._can_backend_run(backend_name, *args, **kwargs):
|
1169 |
+
if fallback_to_nx:
|
1170 |
+
return self.orig_func(*args, **kwargs)
|
1171 |
+
msg = f"'{self.name}' not implemented by {backend_name}"
|
1172 |
+
if hasattr(backend, self.name):
|
1173 |
+
msg += " with the given arguments"
|
1174 |
+
raise RuntimeError(msg)
|
1175 |
+
|
1176 |
+
try:
|
1177 |
+
converted_args, converted_kwargs = self._convert_arguments(
|
1178 |
+
backend_name, args, kwargs, use_cache=config.cache_converted_graphs
|
1179 |
+
)
|
1180 |
+
result = getattr(backend, self.name)(*converted_args, **converted_kwargs)
|
1181 |
+
except (NotImplementedError, nx.NetworkXNotImplemented) as exc:
|
1182 |
+
if fallback_to_nx:
|
1183 |
+
return self.orig_func(*args, **kwargs)
|
1184 |
+
raise
|
1185 |
+
|
1186 |
+
return result
|
1187 |
+
|
1188 |
+
def _convert_and_call_for_tests(
|
1189 |
+
self, backend_name, args, kwargs, *, fallback_to_nx=False
|
1190 |
+
):
|
1191 |
+
"""Call this dispatchable function with a backend; for use with testing."""
|
1192 |
+
backend = _load_backend(backend_name)
|
1193 |
+
if not self._can_backend_run(backend_name, *args, **kwargs):
|
1194 |
+
if fallback_to_nx or not self.graphs:
|
1195 |
+
return self.orig_func(*args, **kwargs)
|
1196 |
+
|
1197 |
+
import pytest
|
1198 |
+
|
1199 |
+
msg = f"'{self.name}' not implemented by {backend_name}"
|
1200 |
+
if hasattr(backend, self.name):
|
1201 |
+
msg += " with the given arguments"
|
1202 |
+
pytest.xfail(msg)
|
1203 |
+
|
1204 |
+
from collections.abc import Iterable, Iterator, Mapping
|
1205 |
+
from copy import copy
|
1206 |
+
from io import BufferedReader, BytesIO, StringIO, TextIOWrapper
|
1207 |
+
from itertools import tee
|
1208 |
+
from random import Random
|
1209 |
+
|
1210 |
+
import numpy as np
|
1211 |
+
from numpy.random import Generator, RandomState
|
1212 |
+
from scipy.sparse import sparray
|
1213 |
+
|
1214 |
+
# We sometimes compare the backend result to the original result,
|
1215 |
+
# so we need two sets of arguments. We tee iterators and copy
|
1216 |
+
# random state so that they may be used twice.
|
1217 |
+
if not args:
|
1218 |
+
args1 = args2 = args
|
1219 |
+
else:
|
1220 |
+
args1, args2 = zip(
|
1221 |
+
*(
|
1222 |
+
(arg, copy(arg))
|
1223 |
+
if isinstance(
|
1224 |
+
arg, BytesIO | StringIO | Random | Generator | RandomState
|
1225 |
+
)
|
1226 |
+
else tee(arg)
|
1227 |
+
if isinstance(arg, Iterator)
|
1228 |
+
and not isinstance(arg, BufferedReader | TextIOWrapper)
|
1229 |
+
else (arg, arg)
|
1230 |
+
for arg in args
|
1231 |
+
)
|
1232 |
+
)
|
1233 |
+
if not kwargs:
|
1234 |
+
kwargs1 = kwargs2 = kwargs
|
1235 |
+
else:
|
1236 |
+
kwargs1, kwargs2 = zip(
|
1237 |
+
*(
|
1238 |
+
((k, v), (k, copy(v)))
|
1239 |
+
if isinstance(
|
1240 |
+
v, BytesIO | StringIO | Random | Generator | RandomState
|
1241 |
+
)
|
1242 |
+
else ((k, (teed := tee(v))[0]), (k, teed[1]))
|
1243 |
+
if isinstance(v, Iterator)
|
1244 |
+
and not isinstance(v, BufferedReader | TextIOWrapper)
|
1245 |
+
else ((k, v), (k, v))
|
1246 |
+
for k, v in kwargs.items()
|
1247 |
+
)
|
1248 |
+
)
|
1249 |
+
kwargs1 = dict(kwargs1)
|
1250 |
+
kwargs2 = dict(kwargs2)
|
1251 |
+
try:
|
1252 |
+
converted_args, converted_kwargs = self._convert_arguments(
|
1253 |
+
backend_name, args1, kwargs1, use_cache=False
|
1254 |
+
)
|
1255 |
+
result = getattr(backend, self.name)(*converted_args, **converted_kwargs)
|
1256 |
+
except (NotImplementedError, nx.NetworkXNotImplemented) as exc:
|
1257 |
+
if fallback_to_nx:
|
1258 |
+
return self.orig_func(*args2, **kwargs2)
|
1259 |
+
import pytest
|
1260 |
+
|
1261 |
+
pytest.xfail(
|
1262 |
+
exc.args[0] if exc.args else f"{self.name} raised {type(exc).__name__}"
|
1263 |
+
)
|
1264 |
+
# Verify that `self._returns_graph` is correct. This compares the return type
|
1265 |
+
# to the type expected from `self._returns_graph`. This handles tuple and list
|
1266 |
+
# return types, but *does not* catch functions that yield graphs.
|
1267 |
+
if (
|
1268 |
+
self._returns_graph
|
1269 |
+
!= (
|
1270 |
+
isinstance(result, nx.Graph)
|
1271 |
+
or hasattr(result, "__networkx_backend__")
|
1272 |
+
or isinstance(result, tuple | list)
|
1273 |
+
and any(
|
1274 |
+
isinstance(x, nx.Graph) or hasattr(x, "__networkx_backend__")
|
1275 |
+
for x in result
|
1276 |
+
)
|
1277 |
+
)
|
1278 |
+
and not (
|
1279 |
+
# May return Graph or None
|
1280 |
+
self.name in {"check_planarity", "check_planarity_recursive"}
|
1281 |
+
and any(x is None for x in result)
|
1282 |
+
)
|
1283 |
+
and not (
|
1284 |
+
# May return Graph or dict
|
1285 |
+
self.name in {"held_karp_ascent"}
|
1286 |
+
and any(isinstance(x, dict) for x in result)
|
1287 |
+
)
|
1288 |
+
and self.name
|
1289 |
+
not in {
|
1290 |
+
# yields graphs
|
1291 |
+
"all_triads",
|
1292 |
+
"general_k_edge_subgraphs",
|
1293 |
+
# yields graphs or arrays
|
1294 |
+
"nonisomorphic_trees",
|
1295 |
+
}
|
1296 |
+
):
|
1297 |
+
raise RuntimeError(f"`returns_graph` is incorrect for {self.name}")
|
1298 |
+
|
1299 |
+
def check_result(val, depth=0):
|
1300 |
+
if isinstance(val, np.number):
|
1301 |
+
raise RuntimeError(
|
1302 |
+
f"{self.name} returned a numpy scalar {val} ({type(val)}, depth={depth})"
|
1303 |
+
)
|
1304 |
+
if isinstance(val, np.ndarray | sparray):
|
1305 |
+
return
|
1306 |
+
if isinstance(val, nx.Graph):
|
1307 |
+
check_result(val._node, depth=depth + 1)
|
1308 |
+
check_result(val._adj, depth=depth + 1)
|
1309 |
+
return
|
1310 |
+
if isinstance(val, Iterator):
|
1311 |
+
raise NotImplementedError
|
1312 |
+
if isinstance(val, Iterable) and not isinstance(val, str):
|
1313 |
+
for x in val:
|
1314 |
+
check_result(x, depth=depth + 1)
|
1315 |
+
if isinstance(val, Mapping):
|
1316 |
+
for x in val.values():
|
1317 |
+
check_result(x, depth=depth + 1)
|
1318 |
+
|
1319 |
+
def check_iterator(it):
|
1320 |
+
for val in it:
|
1321 |
+
try:
|
1322 |
+
check_result(val)
|
1323 |
+
except RuntimeError as exc:
|
1324 |
+
raise RuntimeError(
|
1325 |
+
f"{self.name} returned a numpy scalar {val} ({type(val)})"
|
1326 |
+
) from exc
|
1327 |
+
yield val
|
1328 |
+
|
1329 |
+
if self.name in {"from_edgelist"}:
|
1330 |
+
# numpy scalars are explicitly given as values in some tests
|
1331 |
+
pass
|
1332 |
+
elif isinstance(result, Iterator):
|
1333 |
+
result = check_iterator(result)
|
1334 |
+
else:
|
1335 |
+
try:
|
1336 |
+
check_result(result)
|
1337 |
+
except RuntimeError as exc:
|
1338 |
+
raise RuntimeError(
|
1339 |
+
f"{self.name} returned a numpy scalar {result} ({type(result)})"
|
1340 |
+
) from exc
|
1341 |
+
check_result(result)
|
1342 |
+
|
1343 |
+
if self.name in {
|
1344 |
+
"edmonds_karp",
|
1345 |
+
"barycenter",
|
1346 |
+
"contracted_edge",
|
1347 |
+
"contracted_nodes",
|
1348 |
+
"stochastic_graph",
|
1349 |
+
"relabel_nodes",
|
1350 |
+
"maximum_branching",
|
1351 |
+
"incremental_closeness_centrality",
|
1352 |
+
"minimal_branching",
|
1353 |
+
"minimum_spanning_arborescence",
|
1354 |
+
"recursive_simple_cycles",
|
1355 |
+
"connected_double_edge_swap",
|
1356 |
+
}:
|
1357 |
+
# Special-case algorithms that mutate input graphs
|
1358 |
+
bound = self.__signature__.bind(*converted_args, **converted_kwargs)
|
1359 |
+
bound.apply_defaults()
|
1360 |
+
bound2 = self.__signature__.bind(*args2, **kwargs2)
|
1361 |
+
bound2.apply_defaults()
|
1362 |
+
if self.name in {
|
1363 |
+
"minimal_branching",
|
1364 |
+
"minimum_spanning_arborescence",
|
1365 |
+
"recursive_simple_cycles",
|
1366 |
+
"connected_double_edge_swap",
|
1367 |
+
}:
|
1368 |
+
G1 = backend.convert_to_nx(bound.arguments["G"])
|
1369 |
+
G2 = bound2.arguments["G"]
|
1370 |
+
G2._adj = G1._adj
|
1371 |
+
nx._clear_cache(G2)
|
1372 |
+
elif self.name == "edmonds_karp":
|
1373 |
+
R1 = backend.convert_to_nx(bound.arguments["residual"])
|
1374 |
+
R2 = bound2.arguments["residual"]
|
1375 |
+
if R1 is not None and R2 is not None:
|
1376 |
+
for k, v in R1.edges.items():
|
1377 |
+
R2.edges[k]["flow"] = v["flow"]
|
1378 |
+
R2.graph.update(R1.graph)
|
1379 |
+
nx._clear_cache(R2)
|
1380 |
+
elif self.name == "barycenter" and bound.arguments["attr"] is not None:
|
1381 |
+
G1 = backend.convert_to_nx(bound.arguments["G"])
|
1382 |
+
G2 = bound2.arguments["G"]
|
1383 |
+
attr = bound.arguments["attr"]
|
1384 |
+
for k, v in G1.nodes.items():
|
1385 |
+
G2.nodes[k][attr] = v[attr]
|
1386 |
+
nx._clear_cache(G2)
|
1387 |
+
elif (
|
1388 |
+
self.name in {"contracted_nodes", "contracted_edge"}
|
1389 |
+
and not bound.arguments["copy"]
|
1390 |
+
):
|
1391 |
+
# Edges and nodes changed; node "contraction" and edge "weight" attrs
|
1392 |
+
G1 = backend.convert_to_nx(bound.arguments["G"])
|
1393 |
+
G2 = bound2.arguments["G"]
|
1394 |
+
G2.__dict__.update(G1.__dict__)
|
1395 |
+
nx._clear_cache(G2)
|
1396 |
+
elif self.name == "stochastic_graph" and not bound.arguments["copy"]:
|
1397 |
+
G1 = backend.convert_to_nx(bound.arguments["G"])
|
1398 |
+
G2 = bound2.arguments["G"]
|
1399 |
+
for k, v in G1.edges.items():
|
1400 |
+
G2.edges[k]["weight"] = v["weight"]
|
1401 |
+
nx._clear_cache(G2)
|
1402 |
+
elif (
|
1403 |
+
self.name == "relabel_nodes"
|
1404 |
+
and not bound.arguments["copy"]
|
1405 |
+
or self.name in {"incremental_closeness_centrality"}
|
1406 |
+
):
|
1407 |
+
G1 = backend.convert_to_nx(bound.arguments["G"])
|
1408 |
+
G2 = bound2.arguments["G"]
|
1409 |
+
if G1 is G2:
|
1410 |
+
return G2
|
1411 |
+
G2._node.clear()
|
1412 |
+
G2._node.update(G1._node)
|
1413 |
+
G2._adj.clear()
|
1414 |
+
G2._adj.update(G1._adj)
|
1415 |
+
if hasattr(G1, "_pred") and hasattr(G2, "_pred"):
|
1416 |
+
G2._pred.clear()
|
1417 |
+
G2._pred.update(G1._pred)
|
1418 |
+
if hasattr(G1, "_succ") and hasattr(G2, "_succ"):
|
1419 |
+
G2._succ.clear()
|
1420 |
+
G2._succ.update(G1._succ)
|
1421 |
+
nx._clear_cache(G2)
|
1422 |
+
if self.name == "relabel_nodes":
|
1423 |
+
return G2
|
1424 |
+
return backend.convert_to_nx(result)
|
1425 |
+
|
1426 |
+
converted_result = backend.convert_to_nx(result)
|
1427 |
+
if isinstance(converted_result, nx.Graph) and self.name not in {
|
1428 |
+
"boykov_kolmogorov",
|
1429 |
+
"preflow_push",
|
1430 |
+
"quotient_graph",
|
1431 |
+
"shortest_augmenting_path",
|
1432 |
+
"spectral_graph_forge",
|
1433 |
+
# We don't handle tempfile.NamedTemporaryFile arguments
|
1434 |
+
"read_gml",
|
1435 |
+
"read_graph6",
|
1436 |
+
"read_sparse6",
|
1437 |
+
# We don't handle io.BufferedReader or io.TextIOWrapper arguments
|
1438 |
+
"bipartite_read_edgelist",
|
1439 |
+
"read_adjlist",
|
1440 |
+
"read_edgelist",
|
1441 |
+
"read_graphml",
|
1442 |
+
"read_multiline_adjlist",
|
1443 |
+
"read_pajek",
|
1444 |
+
"from_pydot",
|
1445 |
+
"pydot_read_dot",
|
1446 |
+
"agraph_read_dot",
|
1447 |
+
# graph comparison fails b/c of nan values
|
1448 |
+
"read_gexf",
|
1449 |
+
}:
|
1450 |
+
# For graph return types (e.g. generators), we compare that results are
|
1451 |
+
# the same between the backend and networkx, then return the original
|
1452 |
+
# networkx result so the iteration order will be consistent in tests.
|
1453 |
+
G = self.orig_func(*args2, **kwargs2)
|
1454 |
+
if not nx.utils.graphs_equal(G, converted_result):
|
1455 |
+
assert G.number_of_nodes() == converted_result.number_of_nodes()
|
1456 |
+
assert G.number_of_edges() == converted_result.number_of_edges()
|
1457 |
+
assert G.graph == converted_result.graph
|
1458 |
+
assert G.nodes == converted_result.nodes
|
1459 |
+
assert G.adj == converted_result.adj
|
1460 |
+
assert type(G) is type(converted_result)
|
1461 |
+
raise AssertionError("Graphs are not equal")
|
1462 |
+
return G
|
1463 |
+
return converted_result
|
1464 |
+
|
1465 |
+
def _make_doc(self):
|
1466 |
+
"""Generate the backends section at the end for functions having an alternate
|
1467 |
+
backend implementation(s) using the `backend_info` entry-point."""
|
1468 |
+
|
1469 |
+
if not self.backends:
|
1470 |
+
return self._orig_doc
|
1471 |
+
lines = [
|
1472 |
+
"Backends",
|
1473 |
+
"--------",
|
1474 |
+
]
|
1475 |
+
for backend in sorted(self.backends):
|
1476 |
+
info = backend_info[backend]
|
1477 |
+
if "short_summary" in info:
|
1478 |
+
lines.append(f"{backend} : {info['short_summary']}")
|
1479 |
+
else:
|
1480 |
+
lines.append(backend)
|
1481 |
+
if "functions" not in info or self.name not in info["functions"]:
|
1482 |
+
lines.append("")
|
1483 |
+
continue
|
1484 |
+
|
1485 |
+
func_info = info["functions"][self.name]
|
1486 |
+
|
1487 |
+
# Renaming extra_docstring to additional_docs
|
1488 |
+
if func_docs := (
|
1489 |
+
func_info.get("additional_docs") or func_info.get("extra_docstring")
|
1490 |
+
):
|
1491 |
+
lines.extend(
|
1492 |
+
f" {line}" if line else line for line in func_docs.split("\n")
|
1493 |
+
)
|
1494 |
+
add_gap = True
|
1495 |
+
else:
|
1496 |
+
add_gap = False
|
1497 |
+
|
1498 |
+
# Renaming extra_parameters to additional_parameters
|
1499 |
+
if extra_parameters := (
|
1500 |
+
func_info.get("extra_parameters")
|
1501 |
+
or func_info.get("additional_parameters")
|
1502 |
+
):
|
1503 |
+
if add_gap:
|
1504 |
+
lines.append("")
|
1505 |
+
lines.append(" Additional parameters:")
|
1506 |
+
for param in sorted(extra_parameters):
|
1507 |
+
lines.append(f" {param}")
|
1508 |
+
if desc := extra_parameters[param]:
|
1509 |
+
lines.append(f" {desc}")
|
1510 |
+
lines.append("")
|
1511 |
+
else:
|
1512 |
+
lines.append("")
|
1513 |
+
|
1514 |
+
if func_url := func_info.get("url"):
|
1515 |
+
lines.append(f"[`Source <{func_url}>`_]")
|
1516 |
+
lines.append("")
|
1517 |
+
|
1518 |
+
lines.pop() # Remove last empty line
|
1519 |
+
to_add = "\n ".join(lines)
|
1520 |
+
return f"{self._orig_doc.rstrip()}\n\n {to_add}"
|
1521 |
+
|
1522 |
+
def __reduce__(self):
|
1523 |
+
"""Allow this object to be serialized with pickle.
|
1524 |
+
|
1525 |
+
This uses the global registry `_registered_algorithms` to deserialize.
|
1526 |
+
"""
|
1527 |
+
return _restore_dispatchable, (self.name,)
|
1528 |
+
|
1529 |
+
|
1530 |
+
def _restore_dispatchable(name):
|
1531 |
+
return _registered_algorithms[name]
|
1532 |
+
|
1533 |
+
|
1534 |
+
if os.environ.get("_NETWORKX_BUILDING_DOCS_"):
|
1535 |
+
# When building docs with Sphinx, use the original function with the
|
1536 |
+
# dispatched __doc__, b/c Sphinx renders normal Python functions better.
|
1537 |
+
# This doesn't show e.g. `*, backend=None, **backend_kwargs` in the
|
1538 |
+
# signatures, which is probably okay. It does allow the docstring to be
|
1539 |
+
# updated based on the installed backends.
|
1540 |
+
_orig_dispatchable = _dispatchable
|
1541 |
+
|
1542 |
+
def _dispatchable(func=None, **kwargs): # type: ignore[no-redef]
|
1543 |
+
if func is None:
|
1544 |
+
return partial(_dispatchable, **kwargs)
|
1545 |
+
dispatched_func = _orig_dispatchable(func, **kwargs)
|
1546 |
+
func.__doc__ = dispatched_func.__doc__
|
1547 |
+
return func
|
1548 |
+
|
1549 |
+
_dispatchable.__doc__ = _orig_dispatchable.__new__.__doc__ # type: ignore[method-assign,assignment]
|
1550 |
+
_sig = inspect.signature(_orig_dispatchable.__new__)
|
1551 |
+
_dispatchable.__signature__ = _sig.replace( # type: ignore[method-assign,assignment]
|
1552 |
+
parameters=[v for k, v in _sig.parameters.items() if k != "cls"]
|
1553 |
+
)
|
env-llmeval/lib/python3.10/site-packages/networkx/utils/configs.py
ADDED
@@ -0,0 +1,260 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import collections
|
2 |
+
import os
|
3 |
+
import typing
|
4 |
+
from dataclasses import dataclass
|
5 |
+
|
6 |
+
__all__ = ["Config", "config"]
|
7 |
+
|
8 |
+
|
9 |
+
@dataclass(init=False, eq=False, slots=True, kw_only=True, match_args=False)
|
10 |
+
class Config:
|
11 |
+
"""The base class for NetworkX configuration.
|
12 |
+
|
13 |
+
There are two ways to use this to create configurations. The first is to
|
14 |
+
simply pass the initial configuration as keyword arguments to ``Config``:
|
15 |
+
|
16 |
+
>>> cfg = Config(eggs=1, spam=5)
|
17 |
+
>>> cfg
|
18 |
+
Config(eggs=1, spam=5)
|
19 |
+
|
20 |
+
The second--and preferred--way is to subclass ``Config`` with docs and annotations.
|
21 |
+
|
22 |
+
>>> class MyConfig(Config):
|
23 |
+
... '''Breakfast!'''
|
24 |
+
...
|
25 |
+
... eggs: int
|
26 |
+
... spam: int
|
27 |
+
...
|
28 |
+
... def _check_config(self, key, value):
|
29 |
+
... assert isinstance(value, int) and value >= 0
|
30 |
+
>>> cfg = MyConfig(eggs=1, spam=5)
|
31 |
+
|
32 |
+
Once defined, config items may be modified, but can't be added or deleted by default.
|
33 |
+
``Config`` is a ``Mapping``, and can get and set configs via attributes or brackets:
|
34 |
+
|
35 |
+
>>> cfg.eggs = 2
|
36 |
+
>>> cfg.eggs
|
37 |
+
2
|
38 |
+
>>> cfg["spam"] = 42
|
39 |
+
>>> cfg["spam"]
|
40 |
+
42
|
41 |
+
|
42 |
+
Subclasses may also define ``_check_config`` (as done in the example above)
|
43 |
+
to ensure the value being assigned is valid:
|
44 |
+
|
45 |
+
>>> cfg.spam = -1
|
46 |
+
Traceback (most recent call last):
|
47 |
+
...
|
48 |
+
AssertionError
|
49 |
+
|
50 |
+
If a more flexible configuration object is needed that allows adding and deleting
|
51 |
+
configurations, then pass ``strict=False`` when defining the subclass:
|
52 |
+
|
53 |
+
>>> class FlexibleConfig(Config, strict=False):
|
54 |
+
... default_greeting: str = "Hello"
|
55 |
+
>>> flexcfg = FlexibleConfig()
|
56 |
+
>>> flexcfg.name = "Mr. Anderson"
|
57 |
+
>>> flexcfg
|
58 |
+
FlexibleConfig(default_greeting='Hello', name='Mr. Anderson')
|
59 |
+
"""
|
60 |
+
|
61 |
+
def __init_subclass__(cls, strict=True):
|
62 |
+
cls._strict = strict
|
63 |
+
|
64 |
+
def __new__(cls, **kwargs):
|
65 |
+
orig_class = cls
|
66 |
+
if cls is Config:
|
67 |
+
# Enable the "simple" case of accepting config definition as keywords
|
68 |
+
cls = type(
|
69 |
+
cls.__name__,
|
70 |
+
(cls,),
|
71 |
+
{"__annotations__": {key: typing.Any for key in kwargs}},
|
72 |
+
)
|
73 |
+
cls = dataclass(
|
74 |
+
eq=False,
|
75 |
+
repr=cls._strict,
|
76 |
+
slots=cls._strict,
|
77 |
+
kw_only=True,
|
78 |
+
match_args=False,
|
79 |
+
)(cls)
|
80 |
+
if not cls._strict:
|
81 |
+
cls.__repr__ = _flexible_repr
|
82 |
+
cls._orig_class = orig_class # Save original class so we can pickle
|
83 |
+
instance = object.__new__(cls)
|
84 |
+
instance.__init__(**kwargs)
|
85 |
+
return instance
|
86 |
+
|
87 |
+
def _check_config(self, key, value):
|
88 |
+
"""Check whether config value is valid. This is useful for subclasses."""
|
89 |
+
|
90 |
+
# Control behavior of attributes
|
91 |
+
def __dir__(self):
|
92 |
+
return self.__dataclass_fields__.keys()
|
93 |
+
|
94 |
+
def __setattr__(self, key, value):
|
95 |
+
if self._strict and key not in self.__dataclass_fields__:
|
96 |
+
raise AttributeError(f"Invalid config name: {key!r}")
|
97 |
+
self._check_config(key, value)
|
98 |
+
object.__setattr__(self, key, value)
|
99 |
+
|
100 |
+
def __delattr__(self, key):
|
101 |
+
if self._strict:
|
102 |
+
raise TypeError(
|
103 |
+
f"Configuration items can't be deleted (can't delete {key!r})."
|
104 |
+
)
|
105 |
+
object.__delattr__(self, key)
|
106 |
+
|
107 |
+
# Be a `collection.abc.Collection`
|
108 |
+
def __contains__(self, key):
|
109 |
+
return (
|
110 |
+
key in self.__dataclass_fields__ if self._strict else key in self.__dict__
|
111 |
+
)
|
112 |
+
|
113 |
+
def __iter__(self):
|
114 |
+
return iter(self.__dataclass_fields__ if self._strict else self.__dict__)
|
115 |
+
|
116 |
+
def __len__(self):
|
117 |
+
return len(self.__dataclass_fields__ if self._strict else self.__dict__)
|
118 |
+
|
119 |
+
def __reversed__(self):
|
120 |
+
return reversed(self.__dataclass_fields__ if self._strict else self.__dict__)
|
121 |
+
|
122 |
+
# Add dunder methods for `collections.abc.Mapping`
|
123 |
+
def __getitem__(self, key):
|
124 |
+
try:
|
125 |
+
return getattr(self, key)
|
126 |
+
except AttributeError as err:
|
127 |
+
raise KeyError(*err.args) from None
|
128 |
+
|
129 |
+
def __setitem__(self, key, value):
|
130 |
+
try:
|
131 |
+
self.__setattr__(key, value)
|
132 |
+
except AttributeError as err:
|
133 |
+
raise KeyError(*err.args) from None
|
134 |
+
|
135 |
+
def __delitem__(self, key):
|
136 |
+
try:
|
137 |
+
self.__delattr__(key)
|
138 |
+
except AttributeError as err:
|
139 |
+
raise KeyError(*err.args) from None
|
140 |
+
|
141 |
+
_ipython_key_completions_ = __dir__ # config["<TAB>
|
142 |
+
|
143 |
+
# Go ahead and make it a `collections.abc.Mapping`
|
144 |
+
def get(self, key, default=None):
|
145 |
+
return getattr(self, key, default)
|
146 |
+
|
147 |
+
def items(self):
|
148 |
+
return collections.abc.ItemsView(self)
|
149 |
+
|
150 |
+
def keys(self):
|
151 |
+
return collections.abc.KeysView(self)
|
152 |
+
|
153 |
+
def values(self):
|
154 |
+
return collections.abc.ValuesView(self)
|
155 |
+
|
156 |
+
# dataclass can define __eq__ for us, but do it here so it works after pickling
|
157 |
+
def __eq__(self, other):
|
158 |
+
if not isinstance(other, Config):
|
159 |
+
return NotImplemented
|
160 |
+
return self._orig_class == other._orig_class and self.items() == other.items()
|
161 |
+
|
162 |
+
# Make pickle work
|
163 |
+
def __reduce__(self):
|
164 |
+
return self._deserialize, (self._orig_class, dict(self))
|
165 |
+
|
166 |
+
@staticmethod
|
167 |
+
def _deserialize(cls, kwargs):
|
168 |
+
return cls(**kwargs)
|
169 |
+
|
170 |
+
|
171 |
+
def _flexible_repr(self):
|
172 |
+
return (
|
173 |
+
f"{self.__class__.__qualname__}("
|
174 |
+
+ ", ".join(f"{key}={val!r}" for key, val in self.__dict__.items())
|
175 |
+
+ ")"
|
176 |
+
)
|
177 |
+
|
178 |
+
|
179 |
+
# Register, b/c `Mapping.__subclasshook__` returns `NotImplemented`
|
180 |
+
collections.abc.Mapping.register(Config)
|
181 |
+
|
182 |
+
|
183 |
+
class NetworkXConfig(Config):
|
184 |
+
"""Configuration for NetworkX that controls behaviors such as how to use backends.
|
185 |
+
|
186 |
+
Attribute and bracket notation are supported for getting and setting configurations:
|
187 |
+
|
188 |
+
>>> nx.config.backend_priority == nx.config["backend_priority"]
|
189 |
+
True
|
190 |
+
|
191 |
+
Parameters
|
192 |
+
----------
|
193 |
+
backend_priority : list of backend names
|
194 |
+
Enable automatic conversion of graphs to backend graphs for algorithms
|
195 |
+
implemented by the backend. Priority is given to backends listed earlier.
|
196 |
+
Default is empty list.
|
197 |
+
|
198 |
+
backends : Config mapping of backend names to backend Config
|
199 |
+
The keys of the Config mapping are names of all installed NetworkX backends,
|
200 |
+
and the values are their configurations as Config mappings.
|
201 |
+
|
202 |
+
cache_converted_graphs : bool
|
203 |
+
If True, then save converted graphs to the cache of the input graph. Graph
|
204 |
+
conversion may occur when automatically using a backend from `backend_priority`
|
205 |
+
or when using the `backend=` keyword argument to a function call. Caching can
|
206 |
+
improve performance by avoiding repeated conversions, but it uses more memory.
|
207 |
+
Care should be taken to not manually mutate a graph that has cached graphs; for
|
208 |
+
example, ``G[u][v][k] = val`` changes the graph, but does not clear the cache.
|
209 |
+
Using methods such as ``G.add_edge(u, v, weight=val)`` will clear the cache to
|
210 |
+
keep it consistent. ``G.__networkx_cache__.clear()`` manually clears the cache.
|
211 |
+
Default is False.
|
212 |
+
|
213 |
+
Notes
|
214 |
+
-----
|
215 |
+
Environment variables may be used to control some default configurations:
|
216 |
+
|
217 |
+
- NETWORKX_BACKEND_PRIORITY: set `backend_priority` from comma-separated names.
|
218 |
+
- NETWORKX_CACHE_CONVERTED_GRAPHS: set `cache_converted_graphs` to True if nonempty.
|
219 |
+
|
220 |
+
This is a global configuration. Use with caution when using from multiple threads.
|
221 |
+
"""
|
222 |
+
|
223 |
+
backend_priority: list[str]
|
224 |
+
backends: Config
|
225 |
+
cache_converted_graphs: bool
|
226 |
+
|
227 |
+
def _check_config(self, key, value):
|
228 |
+
from .backends import backends
|
229 |
+
|
230 |
+
if key == "backend_priority":
|
231 |
+
if not (isinstance(value, list) and all(isinstance(x, str) for x in value)):
|
232 |
+
raise TypeError(
|
233 |
+
f"{key!r} config must be a list of backend names; got {value!r}"
|
234 |
+
)
|
235 |
+
if missing := {x for x in value if x not in backends}:
|
236 |
+
missing = ", ".join(map(repr, sorted(missing)))
|
237 |
+
raise ValueError(f"Unknown backend when setting {key!r}: {missing}")
|
238 |
+
elif key == "backends":
|
239 |
+
if not (
|
240 |
+
isinstance(value, Config)
|
241 |
+
and all(isinstance(key, str) for key in value)
|
242 |
+
and all(isinstance(val, Config) for val in value.values())
|
243 |
+
):
|
244 |
+
raise TypeError(
|
245 |
+
f"{key!r} config must be a Config of backend configs; got {value!r}"
|
246 |
+
)
|
247 |
+
if missing := {x for x in value if x not in backends}:
|
248 |
+
missing = ", ".join(map(repr, sorted(missing)))
|
249 |
+
raise ValueError(f"Unknown backend when setting {key!r}: {missing}")
|
250 |
+
elif key == "cache_converted_graphs":
|
251 |
+
if not isinstance(value, bool):
|
252 |
+
raise TypeError(f"{key!r} config must be True or False; got {value!r}")
|
253 |
+
|
254 |
+
|
255 |
+
# Backend configuration will be updated in backends.py
|
256 |
+
config = NetworkXConfig(
|
257 |
+
backend_priority=[],
|
258 |
+
backends=Config(),
|
259 |
+
cache_converted_graphs=bool(os.environ.get("NETWORKX_CACHE_CONVERTED_GRAPHS", "")),
|
260 |
+
)
|
env-llmeval/lib/python3.10/site-packages/networkx/utils/decorators.py
ADDED
@@ -0,0 +1,1295 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import bz2
|
2 |
+
import collections
|
3 |
+
import gzip
|
4 |
+
import inspect
|
5 |
+
import itertools
|
6 |
+
import re
|
7 |
+
import warnings
|
8 |
+
from collections import defaultdict
|
9 |
+
from contextlib import contextmanager
|
10 |
+
from functools import wraps
|
11 |
+
from inspect import Parameter, signature
|
12 |
+
from os.path import splitext
|
13 |
+
from pathlib import Path
|
14 |
+
|
15 |
+
import networkx as nx
|
16 |
+
from networkx.utils import create_py_random_state, create_random_state
|
17 |
+
|
18 |
+
__all__ = [
|
19 |
+
"not_implemented_for",
|
20 |
+
"open_file",
|
21 |
+
"nodes_or_number",
|
22 |
+
"np_random_state",
|
23 |
+
"py_random_state",
|
24 |
+
"argmap",
|
25 |
+
"deprecate_positional_args",
|
26 |
+
]
|
27 |
+
|
28 |
+
|
29 |
+
def not_implemented_for(*graph_types):
|
30 |
+
"""Decorator to mark algorithms as not implemented
|
31 |
+
|
32 |
+
Parameters
|
33 |
+
----------
|
34 |
+
graph_types : container of strings
|
35 |
+
Entries must be one of "directed", "undirected", "multigraph", or "graph".
|
36 |
+
|
37 |
+
Returns
|
38 |
+
-------
|
39 |
+
_require : function
|
40 |
+
The decorated function.
|
41 |
+
|
42 |
+
Raises
|
43 |
+
------
|
44 |
+
NetworkXNotImplemented
|
45 |
+
If any of the packages cannot be imported
|
46 |
+
|
47 |
+
Notes
|
48 |
+
-----
|
49 |
+
Multiple types are joined logically with "and".
|
50 |
+
For "or" use multiple @not_implemented_for() lines.
|
51 |
+
|
52 |
+
Examples
|
53 |
+
--------
|
54 |
+
Decorate functions like this::
|
55 |
+
|
56 |
+
@not_implemented_for("directed")
|
57 |
+
def sp_function(G):
|
58 |
+
pass
|
59 |
+
|
60 |
+
|
61 |
+
# rule out MultiDiGraph
|
62 |
+
@not_implemented_for("directed", "multigraph")
|
63 |
+
def sp_np_function(G):
|
64 |
+
pass
|
65 |
+
|
66 |
+
|
67 |
+
# rule out all except DiGraph
|
68 |
+
@not_implemented_for("undirected")
|
69 |
+
@not_implemented_for("multigraph")
|
70 |
+
def sp_np_function(G):
|
71 |
+
pass
|
72 |
+
"""
|
73 |
+
if ("directed" in graph_types) and ("undirected" in graph_types):
|
74 |
+
raise ValueError("Function not implemented on directed AND undirected graphs?")
|
75 |
+
if ("multigraph" in graph_types) and ("graph" in graph_types):
|
76 |
+
raise ValueError("Function not implemented on graph AND multigraphs?")
|
77 |
+
if not set(graph_types) < {"directed", "undirected", "multigraph", "graph"}:
|
78 |
+
raise KeyError(
|
79 |
+
"use one or more of directed, undirected, multigraph, graph. "
|
80 |
+
f"You used {graph_types}"
|
81 |
+
)
|
82 |
+
|
83 |
+
# 3-way logic: True if "directed" input, False if "undirected" input, else None
|
84 |
+
dval = ("directed" in graph_types) or "undirected" not in graph_types and None
|
85 |
+
mval = ("multigraph" in graph_types) or "graph" not in graph_types and None
|
86 |
+
errmsg = f"not implemented for {' '.join(graph_types)} type"
|
87 |
+
|
88 |
+
def _not_implemented_for(g):
|
89 |
+
if (mval is None or mval == g.is_multigraph()) and (
|
90 |
+
dval is None or dval == g.is_directed()
|
91 |
+
):
|
92 |
+
raise nx.NetworkXNotImplemented(errmsg)
|
93 |
+
|
94 |
+
return g
|
95 |
+
|
96 |
+
return argmap(_not_implemented_for, 0)
|
97 |
+
|
98 |
+
|
99 |
+
# To handle new extensions, define a function accepting a `path` and `mode`.
|
100 |
+
# Then add the extension to _dispatch_dict.
|
101 |
+
fopeners = {
|
102 |
+
".gz": gzip.open,
|
103 |
+
".gzip": gzip.open,
|
104 |
+
".bz2": bz2.BZ2File,
|
105 |
+
}
|
106 |
+
_dispatch_dict = defaultdict(lambda: open, **fopeners)
|
107 |
+
|
108 |
+
|
109 |
+
def open_file(path_arg, mode="r"):
|
110 |
+
"""Decorator to ensure clean opening and closing of files.
|
111 |
+
|
112 |
+
Parameters
|
113 |
+
----------
|
114 |
+
path_arg : string or int
|
115 |
+
Name or index of the argument that is a path.
|
116 |
+
|
117 |
+
mode : str
|
118 |
+
String for opening mode.
|
119 |
+
|
120 |
+
Returns
|
121 |
+
-------
|
122 |
+
_open_file : function
|
123 |
+
Function which cleanly executes the io.
|
124 |
+
|
125 |
+
Examples
|
126 |
+
--------
|
127 |
+
Decorate functions like this::
|
128 |
+
|
129 |
+
@open_file(0, "r")
|
130 |
+
def read_function(pathname):
|
131 |
+
pass
|
132 |
+
|
133 |
+
|
134 |
+
@open_file(1, "w")
|
135 |
+
def write_function(G, pathname):
|
136 |
+
pass
|
137 |
+
|
138 |
+
|
139 |
+
@open_file(1, "w")
|
140 |
+
def write_function(G, pathname="graph.dot"):
|
141 |
+
pass
|
142 |
+
|
143 |
+
|
144 |
+
@open_file("pathname", "w")
|
145 |
+
def write_function(G, pathname="graph.dot"):
|
146 |
+
pass
|
147 |
+
|
148 |
+
|
149 |
+
@open_file("path", "w+")
|
150 |
+
def another_function(arg, **kwargs):
|
151 |
+
path = kwargs["path"]
|
152 |
+
pass
|
153 |
+
|
154 |
+
Notes
|
155 |
+
-----
|
156 |
+
Note that this decorator solves the problem when a path argument is
|
157 |
+
specified as a string, but it does not handle the situation when the
|
158 |
+
function wants to accept a default of None (and then handle it).
|
159 |
+
|
160 |
+
Here is an example of how to handle this case::
|
161 |
+
|
162 |
+
@open_file("path")
|
163 |
+
def some_function(arg1, arg2, path=None):
|
164 |
+
if path is None:
|
165 |
+
fobj = tempfile.NamedTemporaryFile(delete=False)
|
166 |
+
else:
|
167 |
+
# `path` could have been a string or file object or something
|
168 |
+
# similar. In any event, the decorator has given us a file object
|
169 |
+
# and it will close it for us, if it should.
|
170 |
+
fobj = path
|
171 |
+
|
172 |
+
try:
|
173 |
+
fobj.write("blah")
|
174 |
+
finally:
|
175 |
+
if path is None:
|
176 |
+
fobj.close()
|
177 |
+
|
178 |
+
Normally, we'd want to use "with" to ensure that fobj gets closed.
|
179 |
+
However, the decorator will make `path` a file object for us,
|
180 |
+
and using "with" would undesirably close that file object.
|
181 |
+
Instead, we use a try block, as shown above.
|
182 |
+
When we exit the function, fobj will be closed, if it should be, by the decorator.
|
183 |
+
"""
|
184 |
+
|
185 |
+
def _open_file(path):
|
186 |
+
# Now we have the path_arg. There are two types of input to consider:
|
187 |
+
# 1) string representing a path that should be opened
|
188 |
+
# 2) an already opened file object
|
189 |
+
if isinstance(path, str):
|
190 |
+
ext = splitext(path)[1]
|
191 |
+
elif isinstance(path, Path):
|
192 |
+
# path is a pathlib reference to a filename
|
193 |
+
ext = path.suffix
|
194 |
+
path = str(path)
|
195 |
+
else:
|
196 |
+
# could be None, or a file handle, in which case the algorithm will deal with it
|
197 |
+
return path, lambda: None
|
198 |
+
|
199 |
+
fobj = _dispatch_dict[ext](path, mode=mode)
|
200 |
+
return fobj, lambda: fobj.close()
|
201 |
+
|
202 |
+
return argmap(_open_file, path_arg, try_finally=True)
|
203 |
+
|
204 |
+
|
205 |
+
def nodes_or_number(which_args):
|
206 |
+
"""Decorator to allow number of nodes or container of nodes.
|
207 |
+
|
208 |
+
With this decorator, the specified argument can be either a number or a container
|
209 |
+
of nodes. If it is a number, the nodes used are `range(n)`.
|
210 |
+
This allows `nx.complete_graph(50)` in place of `nx.complete_graph(list(range(50)))`.
|
211 |
+
And it also allows `nx.complete_graph(any_list_of_nodes)`.
|
212 |
+
|
213 |
+
Parameters
|
214 |
+
----------
|
215 |
+
which_args : string or int or sequence of strings or ints
|
216 |
+
If string, the name of the argument to be treated.
|
217 |
+
If int, the index of the argument to be treated.
|
218 |
+
If more than one node argument is allowed, can be a list of locations.
|
219 |
+
|
220 |
+
Returns
|
221 |
+
-------
|
222 |
+
_nodes_or_numbers : function
|
223 |
+
Function which replaces int args with ranges.
|
224 |
+
|
225 |
+
Examples
|
226 |
+
--------
|
227 |
+
Decorate functions like this::
|
228 |
+
|
229 |
+
@nodes_or_number("nodes")
|
230 |
+
def empty_graph(nodes):
|
231 |
+
# nodes is converted to a list of nodes
|
232 |
+
|
233 |
+
@nodes_or_number(0)
|
234 |
+
def empty_graph(nodes):
|
235 |
+
# nodes is converted to a list of nodes
|
236 |
+
|
237 |
+
@nodes_or_number(["m1", "m2"])
|
238 |
+
def grid_2d_graph(m1, m2, periodic=False):
|
239 |
+
# m1 and m2 are each converted to a list of nodes
|
240 |
+
|
241 |
+
@nodes_or_number([0, 1])
|
242 |
+
def grid_2d_graph(m1, m2, periodic=False):
|
243 |
+
# m1 and m2 are each converted to a list of nodes
|
244 |
+
|
245 |
+
@nodes_or_number(1)
|
246 |
+
def full_rary_tree(r, n)
|
247 |
+
# presumably r is a number. It is not handled by this decorator.
|
248 |
+
# n is converted to a list of nodes
|
249 |
+
"""
|
250 |
+
|
251 |
+
def _nodes_or_number(n):
|
252 |
+
try:
|
253 |
+
nodes = list(range(n))
|
254 |
+
except TypeError:
|
255 |
+
nodes = tuple(n)
|
256 |
+
else:
|
257 |
+
if n < 0:
|
258 |
+
raise nx.NetworkXError(f"Negative number of nodes not valid: {n}")
|
259 |
+
return (n, nodes)
|
260 |
+
|
261 |
+
try:
|
262 |
+
iter_wa = iter(which_args)
|
263 |
+
except TypeError:
|
264 |
+
iter_wa = (which_args,)
|
265 |
+
|
266 |
+
return argmap(_nodes_or_number, *iter_wa)
|
267 |
+
|
268 |
+
|
269 |
+
def np_random_state(random_state_argument):
|
270 |
+
"""Decorator to generate a numpy RandomState or Generator instance.
|
271 |
+
|
272 |
+
The decorator processes the argument indicated by `random_state_argument`
|
273 |
+
using :func:`nx.utils.create_random_state`.
|
274 |
+
The argument value can be a seed (integer), or a `numpy.random.RandomState`
|
275 |
+
or `numpy.random.RandomState` instance or (`None` or `numpy.random`).
|
276 |
+
The latter two options use the global random number generator for `numpy.random`.
|
277 |
+
|
278 |
+
The returned instance is a `numpy.random.RandomState` or `numpy.random.Generator`.
|
279 |
+
|
280 |
+
Parameters
|
281 |
+
----------
|
282 |
+
random_state_argument : string or int
|
283 |
+
The name or index of the argument to be converted
|
284 |
+
to a `numpy.random.RandomState` instance.
|
285 |
+
|
286 |
+
Returns
|
287 |
+
-------
|
288 |
+
_random_state : function
|
289 |
+
Function whose random_state keyword argument is a RandomState instance.
|
290 |
+
|
291 |
+
Examples
|
292 |
+
--------
|
293 |
+
Decorate functions like this::
|
294 |
+
|
295 |
+
@np_random_state("seed")
|
296 |
+
def random_float(seed=None):
|
297 |
+
return seed.rand()
|
298 |
+
|
299 |
+
|
300 |
+
@np_random_state(0)
|
301 |
+
def random_float(rng=None):
|
302 |
+
return rng.rand()
|
303 |
+
|
304 |
+
|
305 |
+
@np_random_state(1)
|
306 |
+
def random_array(dims, random_state=1):
|
307 |
+
return random_state.rand(*dims)
|
308 |
+
|
309 |
+
See Also
|
310 |
+
--------
|
311 |
+
py_random_state
|
312 |
+
"""
|
313 |
+
return argmap(create_random_state, random_state_argument)
|
314 |
+
|
315 |
+
|
316 |
+
def py_random_state(random_state_argument):
|
317 |
+
"""Decorator to generate a random.Random instance (or equiv).
|
318 |
+
|
319 |
+
This decorator processes `random_state_argument` using
|
320 |
+
:func:`nx.utils.create_py_random_state`.
|
321 |
+
The input value can be a seed (integer), or a random number generator::
|
322 |
+
|
323 |
+
If int, return a random.Random instance set with seed=int.
|
324 |
+
If random.Random instance, return it.
|
325 |
+
If None or the `random` package, return the global random number
|
326 |
+
generator used by `random`.
|
327 |
+
If np.random package, or the default numpy RandomState instance,
|
328 |
+
return the default numpy random number generator wrapped in a
|
329 |
+
`PythonRandomViaNumpyBits` class.
|
330 |
+
If np.random.Generator instance, return it wrapped in a
|
331 |
+
`PythonRandomViaNumpyBits` class.
|
332 |
+
|
333 |
+
# Legacy options
|
334 |
+
If np.random.RandomState instance, return it wrapped in a
|
335 |
+
`PythonRandomInterface` class.
|
336 |
+
If a `PythonRandomInterface` instance, return it
|
337 |
+
|
338 |
+
Parameters
|
339 |
+
----------
|
340 |
+
random_state_argument : string or int
|
341 |
+
The name of the argument or the index of the argument in args that is
|
342 |
+
to be converted to the random.Random instance or numpy.random.RandomState
|
343 |
+
instance that mimics basic methods of random.Random.
|
344 |
+
|
345 |
+
Returns
|
346 |
+
-------
|
347 |
+
_random_state : function
|
348 |
+
Function whose random_state_argument is converted to a Random instance.
|
349 |
+
|
350 |
+
Examples
|
351 |
+
--------
|
352 |
+
Decorate functions like this::
|
353 |
+
|
354 |
+
@py_random_state("random_state")
|
355 |
+
def random_float(random_state=None):
|
356 |
+
return random_state.rand()
|
357 |
+
|
358 |
+
|
359 |
+
@py_random_state(0)
|
360 |
+
def random_float(rng=None):
|
361 |
+
return rng.rand()
|
362 |
+
|
363 |
+
|
364 |
+
@py_random_state(1)
|
365 |
+
def random_array(dims, seed=12345):
|
366 |
+
return seed.rand(*dims)
|
367 |
+
|
368 |
+
See Also
|
369 |
+
--------
|
370 |
+
np_random_state
|
371 |
+
"""
|
372 |
+
|
373 |
+
return argmap(create_py_random_state, random_state_argument)
|
374 |
+
|
375 |
+
|
376 |
+
class argmap:
|
377 |
+
"""A decorator to apply a map to arguments before calling the function
|
378 |
+
|
379 |
+
This class provides a decorator that maps (transforms) arguments of the function
|
380 |
+
before the function is called. Thus for example, we have similar code
|
381 |
+
in many functions to determine whether an argument is the number of nodes
|
382 |
+
to be created, or a list of nodes to be handled. The decorator provides
|
383 |
+
the code to accept either -- transforming the indicated argument into a
|
384 |
+
list of nodes before the actual function is called.
|
385 |
+
|
386 |
+
This decorator class allows us to process single or multiple arguments.
|
387 |
+
The arguments to be processed can be specified by string, naming the argument,
|
388 |
+
or by index, specifying the item in the args list.
|
389 |
+
|
390 |
+
Parameters
|
391 |
+
----------
|
392 |
+
func : callable
|
393 |
+
The function to apply to arguments
|
394 |
+
|
395 |
+
*args : iterable of (int, str or tuple)
|
396 |
+
A list of parameters, specified either as strings (their names), ints
|
397 |
+
(numerical indices) or tuples, which may contain ints, strings, and
|
398 |
+
(recursively) tuples. Each indicates which parameters the decorator
|
399 |
+
should map. Tuples indicate that the map function takes (and returns)
|
400 |
+
multiple parameters in the same order and nested structure as indicated
|
401 |
+
here.
|
402 |
+
|
403 |
+
try_finally : bool (default: False)
|
404 |
+
When True, wrap the function call in a try-finally block with code
|
405 |
+
for the finally block created by `func`. This is used when the map
|
406 |
+
function constructs an object (like a file handle) that requires
|
407 |
+
post-processing (like closing).
|
408 |
+
|
409 |
+
Note: try_finally decorators cannot be used to decorate generator
|
410 |
+
functions.
|
411 |
+
|
412 |
+
Examples
|
413 |
+
--------
|
414 |
+
Most of these examples use `@argmap(...)` to apply the decorator to
|
415 |
+
the function defined on the next line.
|
416 |
+
In the NetworkX codebase however, `argmap` is used within a function to
|
417 |
+
construct a decorator. That is, the decorator defines a mapping function
|
418 |
+
and then uses `argmap` to build and return a decorated function.
|
419 |
+
A simple example is a decorator that specifies which currency to report money.
|
420 |
+
The decorator (named `convert_to`) would be used like::
|
421 |
+
|
422 |
+
@convert_to("US_Dollars", "income")
|
423 |
+
def show_me_the_money(name, income):
|
424 |
+
print(f"{name} : {income}")
|
425 |
+
|
426 |
+
And the code to create the decorator might be::
|
427 |
+
|
428 |
+
def convert_to(currency, which_arg):
|
429 |
+
def _convert(amount):
|
430 |
+
if amount.currency != currency:
|
431 |
+
amount = amount.to_currency(currency)
|
432 |
+
return amount
|
433 |
+
|
434 |
+
return argmap(_convert, which_arg)
|
435 |
+
|
436 |
+
Despite this common idiom for argmap, most of the following examples
|
437 |
+
use the `@argmap(...)` idiom to save space.
|
438 |
+
|
439 |
+
Here's an example use of argmap to sum the elements of two of the functions
|
440 |
+
arguments. The decorated function::
|
441 |
+
|
442 |
+
@argmap(sum, "xlist", "zlist")
|
443 |
+
def foo(xlist, y, zlist):
|
444 |
+
return xlist - y + zlist
|
445 |
+
|
446 |
+
is syntactic sugar for::
|
447 |
+
|
448 |
+
def foo(xlist, y, zlist):
|
449 |
+
x = sum(xlist)
|
450 |
+
z = sum(zlist)
|
451 |
+
return x - y + z
|
452 |
+
|
453 |
+
and is equivalent to (using argument indexes)::
|
454 |
+
|
455 |
+
@argmap(sum, "xlist", 2)
|
456 |
+
def foo(xlist, y, zlist):
|
457 |
+
return xlist - y + zlist
|
458 |
+
|
459 |
+
or::
|
460 |
+
|
461 |
+
@argmap(sum, "zlist", 0)
|
462 |
+
def foo(xlist, y, zlist):
|
463 |
+
return xlist - y + zlist
|
464 |
+
|
465 |
+
Transforming functions can be applied to multiple arguments, such as::
|
466 |
+
|
467 |
+
def swap(x, y):
|
468 |
+
return y, x
|
469 |
+
|
470 |
+
# the 2-tuple tells argmap that the map `swap` has 2 inputs/outputs.
|
471 |
+
@argmap(swap, ("a", "b")):
|
472 |
+
def foo(a, b, c):
|
473 |
+
return a / b * c
|
474 |
+
|
475 |
+
is equivalent to::
|
476 |
+
|
477 |
+
def foo(a, b, c):
|
478 |
+
a, b = swap(a, b)
|
479 |
+
return a / b * c
|
480 |
+
|
481 |
+
More generally, the applied arguments can be nested tuples of strings or ints.
|
482 |
+
The syntax `@argmap(some_func, ("a", ("b", "c")))` would expect `some_func` to
|
483 |
+
accept 2 inputs with the second expected to be a 2-tuple. It should then return
|
484 |
+
2 outputs with the second a 2-tuple. The returns values would replace input "a"
|
485 |
+
"b" and "c" respectively. Similarly for `@argmap(some_func, (0, ("b", 2)))`.
|
486 |
+
|
487 |
+
Also, note that an index larger than the number of named parameters is allowed
|
488 |
+
for variadic functions. For example::
|
489 |
+
|
490 |
+
def double(a):
|
491 |
+
return 2 * a
|
492 |
+
|
493 |
+
|
494 |
+
@argmap(double, 3)
|
495 |
+
def overflow(a, *args):
|
496 |
+
return a, args
|
497 |
+
|
498 |
+
|
499 |
+
print(overflow(1, 2, 3, 4, 5, 6)) # output is 1, (2, 3, 8, 5, 6)
|
500 |
+
|
501 |
+
**Try Finally**
|
502 |
+
|
503 |
+
Additionally, this `argmap` class can be used to create a decorator that
|
504 |
+
initiates a try...finally block. The decorator must be written to return
|
505 |
+
both the transformed argument and a closing function.
|
506 |
+
This feature was included to enable the `open_file` decorator which might
|
507 |
+
need to close the file or not depending on whether it had to open that file.
|
508 |
+
This feature uses the keyword-only `try_finally` argument to `@argmap`.
|
509 |
+
|
510 |
+
For example this map opens a file and then makes sure it is closed::
|
511 |
+
|
512 |
+
def open_file(fn):
|
513 |
+
f = open(fn)
|
514 |
+
return f, lambda: f.close()
|
515 |
+
|
516 |
+
The decorator applies that to the function `foo`::
|
517 |
+
|
518 |
+
@argmap(open_file, "file", try_finally=True)
|
519 |
+
def foo(file):
|
520 |
+
print(file.read())
|
521 |
+
|
522 |
+
is syntactic sugar for::
|
523 |
+
|
524 |
+
def foo(file):
|
525 |
+
file, close_file = open_file(file)
|
526 |
+
try:
|
527 |
+
print(file.read())
|
528 |
+
finally:
|
529 |
+
close_file()
|
530 |
+
|
531 |
+
and is equivalent to (using indexes)::
|
532 |
+
|
533 |
+
@argmap(open_file, 0, try_finally=True)
|
534 |
+
def foo(file):
|
535 |
+
print(file.read())
|
536 |
+
|
537 |
+
Here's an example of the try_finally feature used to create a decorator::
|
538 |
+
|
539 |
+
def my_closing_decorator(which_arg):
|
540 |
+
def _opener(path):
|
541 |
+
if path is None:
|
542 |
+
path = open(path)
|
543 |
+
fclose = path.close
|
544 |
+
else:
|
545 |
+
# assume `path` handles the closing
|
546 |
+
fclose = lambda: None
|
547 |
+
return path, fclose
|
548 |
+
|
549 |
+
return argmap(_opener, which_arg, try_finally=True)
|
550 |
+
|
551 |
+
which can then be used as::
|
552 |
+
|
553 |
+
@my_closing_decorator("file")
|
554 |
+
def fancy_reader(file=None):
|
555 |
+
# this code doesn't need to worry about closing the file
|
556 |
+
print(file.read())
|
557 |
+
|
558 |
+
Decorators with try_finally = True cannot be used with generator functions,
|
559 |
+
because the `finally` block is evaluated before the generator is exhausted::
|
560 |
+
|
561 |
+
@argmap(open_file, "file", try_finally=True)
|
562 |
+
def file_to_lines(file):
|
563 |
+
for line in file.readlines():
|
564 |
+
yield line
|
565 |
+
|
566 |
+
is equivalent to::
|
567 |
+
|
568 |
+
def file_to_lines_wrapped(file):
|
569 |
+
for line in file.readlines():
|
570 |
+
yield line
|
571 |
+
|
572 |
+
|
573 |
+
def file_to_lines_wrapper(file):
|
574 |
+
try:
|
575 |
+
file = open_file(file)
|
576 |
+
return file_to_lines_wrapped(file)
|
577 |
+
finally:
|
578 |
+
file.close()
|
579 |
+
|
580 |
+
which behaves similarly to::
|
581 |
+
|
582 |
+
def file_to_lines_whoops(file):
|
583 |
+
file = open_file(file)
|
584 |
+
file.close()
|
585 |
+
for line in file.readlines():
|
586 |
+
yield line
|
587 |
+
|
588 |
+
because the `finally` block of `file_to_lines_wrapper` is executed before
|
589 |
+
the caller has a chance to exhaust the iterator.
|
590 |
+
|
591 |
+
Notes
|
592 |
+
-----
|
593 |
+
An object of this class is callable and intended to be used when
|
594 |
+
defining a decorator. Generally, a decorator takes a function as input
|
595 |
+
and constructs a function as output. Specifically, an `argmap` object
|
596 |
+
returns the input function decorated/wrapped so that specified arguments
|
597 |
+
are mapped (transformed) to new values before the decorated function is called.
|
598 |
+
|
599 |
+
As an overview, the argmap object returns a new function with all the
|
600 |
+
dunder values of the original function (like `__doc__`, `__name__`, etc).
|
601 |
+
Code for this decorated function is built based on the original function's
|
602 |
+
signature. It starts by mapping the input arguments to potentially new
|
603 |
+
values. Then it calls the decorated function with these new values in place
|
604 |
+
of the indicated arguments that have been mapped. The return value of the
|
605 |
+
original function is then returned. This new function is the function that
|
606 |
+
is actually called by the user.
|
607 |
+
|
608 |
+
Three additional features are provided.
|
609 |
+
1) The code is lazily compiled. That is, the new function is returned
|
610 |
+
as an object without the code compiled, but with all information
|
611 |
+
needed so it can be compiled upon it's first invocation. This saves
|
612 |
+
time on import at the cost of additional time on the first call of
|
613 |
+
the function. Subsequent calls are then just as fast as normal.
|
614 |
+
|
615 |
+
2) If the "try_finally" keyword-only argument is True, a try block
|
616 |
+
follows each mapped argument, matched on the other side of the wrapped
|
617 |
+
call, by a finally block closing that mapping. We expect func to return
|
618 |
+
a 2-tuple: the mapped value and a function to be called in the finally
|
619 |
+
clause. This feature was included so the `open_file` decorator could
|
620 |
+
provide a file handle to the decorated function and close the file handle
|
621 |
+
after the function call. It even keeps track of whether to close the file
|
622 |
+
handle or not based on whether it had to open the file or the input was
|
623 |
+
already open. So, the decorated function does not need to include any
|
624 |
+
code to open or close files.
|
625 |
+
|
626 |
+
3) The maps applied can process multiple arguments. For example,
|
627 |
+
you could swap two arguments using a mapping, or transform
|
628 |
+
them to their sum and their difference. This was included to allow
|
629 |
+
a decorator in the `quality.py` module that checks that an input
|
630 |
+
`partition` is a valid partition of the nodes of the input graph `G`.
|
631 |
+
In this example, the map has inputs `(G, partition)`. After checking
|
632 |
+
for a valid partition, the map either raises an exception or leaves
|
633 |
+
the inputs unchanged. Thus many functions that make this check can
|
634 |
+
use the decorator rather than copy the checking code into each function.
|
635 |
+
More complicated nested argument structures are described below.
|
636 |
+
|
637 |
+
The remaining notes describe the code structure and methods for this
|
638 |
+
class in broad terms to aid in understanding how to use it.
|
639 |
+
|
640 |
+
Instantiating an `argmap` object simply stores the mapping function and
|
641 |
+
the input identifiers of which arguments to map. The resulting decorator
|
642 |
+
is ready to use this map to decorate any function. Calling that object
|
643 |
+
(`argmap.__call__`, but usually done via `@my_decorator`) a lazily
|
644 |
+
compiled thin wrapper of the decorated function is constructed,
|
645 |
+
wrapped with the necessary function dunder attributes like `__doc__`
|
646 |
+
and `__name__`. That thinly wrapped function is returned as the
|
647 |
+
decorated function. When that decorated function is called, the thin
|
648 |
+
wrapper of code calls `argmap._lazy_compile` which compiles the decorated
|
649 |
+
function (using `argmap.compile`) and replaces the code of the thin
|
650 |
+
wrapper with the newly compiled code. This saves the compilation step
|
651 |
+
every import of networkx, at the cost of compiling upon the first call
|
652 |
+
to the decorated function.
|
653 |
+
|
654 |
+
When the decorated function is compiled, the code is recursively assembled
|
655 |
+
using the `argmap.assemble` method. The recursive nature is needed in
|
656 |
+
case of nested decorators. The result of the assembly is a number of
|
657 |
+
useful objects.
|
658 |
+
|
659 |
+
sig : the function signature of the original decorated function as
|
660 |
+
constructed by :func:`argmap.signature`. This is constructed
|
661 |
+
using `inspect.signature` but enhanced with attribute
|
662 |
+
strings `sig_def` and `sig_call`, and other information
|
663 |
+
specific to mapping arguments of this function.
|
664 |
+
This information is used to construct a string of code defining
|
665 |
+
the new decorated function.
|
666 |
+
|
667 |
+
wrapped_name : a unique internally used name constructed by argmap
|
668 |
+
for the decorated function.
|
669 |
+
|
670 |
+
functions : a dict of the functions used inside the code of this
|
671 |
+
decorated function, to be used as `globals` in `exec`.
|
672 |
+
This dict is recursively updated to allow for nested decorating.
|
673 |
+
|
674 |
+
mapblock : code (as a list of strings) to map the incoming argument
|
675 |
+
values to their mapped values.
|
676 |
+
|
677 |
+
finallys : code (as a list of strings) to provide the possibly nested
|
678 |
+
set of finally clauses if needed.
|
679 |
+
|
680 |
+
mutable_args : a bool indicating whether the `sig.args` tuple should be
|
681 |
+
converted to a list so mutation can occur.
|
682 |
+
|
683 |
+
After this recursive assembly process, the `argmap.compile` method
|
684 |
+
constructs code (as strings) to convert the tuple `sig.args` to a list
|
685 |
+
if needed. It joins the defining code with appropriate indents and
|
686 |
+
compiles the result. Finally, this code is evaluated and the original
|
687 |
+
wrapper's implementation is replaced with the compiled version (see
|
688 |
+
`argmap._lazy_compile` for more details).
|
689 |
+
|
690 |
+
Other `argmap` methods include `_name` and `_count` which allow internally
|
691 |
+
generated names to be unique within a python session.
|
692 |
+
The methods `_flatten` and `_indent` process the nested lists of strings
|
693 |
+
into properly indented python code ready to be compiled.
|
694 |
+
|
695 |
+
More complicated nested tuples of arguments also allowed though
|
696 |
+
usually not used. For the simple 2 argument case, the argmap
|
697 |
+
input ("a", "b") implies the mapping function will take 2 arguments
|
698 |
+
and return a 2-tuple of mapped values. A more complicated example
|
699 |
+
with argmap input `("a", ("b", "c"))` requires the mapping function
|
700 |
+
take 2 inputs, with the second being a 2-tuple. It then must output
|
701 |
+
the 3 mapped values in the same nested structure `(newa, (newb, newc))`.
|
702 |
+
This level of generality is not often needed, but was convenient
|
703 |
+
to implement when handling the multiple arguments.
|
704 |
+
|
705 |
+
See Also
|
706 |
+
--------
|
707 |
+
not_implemented_for
|
708 |
+
open_file
|
709 |
+
nodes_or_number
|
710 |
+
py_random_state
|
711 |
+
networkx.algorithms.community.quality.require_partition
|
712 |
+
|
713 |
+
"""
|
714 |
+
|
715 |
+
def __init__(self, func, *args, try_finally=False):
|
716 |
+
self._func = func
|
717 |
+
self._args = args
|
718 |
+
self._finally = try_finally
|
719 |
+
|
720 |
+
@staticmethod
|
721 |
+
def _lazy_compile(func):
|
722 |
+
"""Compile the source of a wrapped function
|
723 |
+
|
724 |
+
Assemble and compile the decorated function, and intrusively replace its
|
725 |
+
code with the compiled version's. The thinly wrapped function becomes
|
726 |
+
the decorated function.
|
727 |
+
|
728 |
+
Parameters
|
729 |
+
----------
|
730 |
+
func : callable
|
731 |
+
A function returned by argmap.__call__ which is in the process
|
732 |
+
of being called for the first time.
|
733 |
+
|
734 |
+
Returns
|
735 |
+
-------
|
736 |
+
func : callable
|
737 |
+
The same function, with a new __code__ object.
|
738 |
+
|
739 |
+
Notes
|
740 |
+
-----
|
741 |
+
It was observed in NetworkX issue #4732 [1] that the import time of
|
742 |
+
NetworkX was significantly bloated by the use of decorators: over half
|
743 |
+
of the import time was being spent decorating functions. This was
|
744 |
+
somewhat improved by a change made to the `decorator` library, at the
|
745 |
+
cost of a relatively heavy-weight call to `inspect.Signature.bind`
|
746 |
+
for each call to the decorated function.
|
747 |
+
|
748 |
+
The workaround we arrived at is to do minimal work at the time of
|
749 |
+
decoration. When the decorated function is called for the first time,
|
750 |
+
we compile a function with the same function signature as the wrapped
|
751 |
+
function. The resulting decorated function is faster than one made by
|
752 |
+
the `decorator` library, so that the overhead of the first call is
|
753 |
+
'paid off' after a small number of calls.
|
754 |
+
|
755 |
+
References
|
756 |
+
----------
|
757 |
+
|
758 |
+
[1] https://github.com/networkx/networkx/issues/4732
|
759 |
+
|
760 |
+
"""
|
761 |
+
real_func = func.__argmap__.compile(func.__wrapped__)
|
762 |
+
func.__code__ = real_func.__code__
|
763 |
+
func.__globals__.update(real_func.__globals__)
|
764 |
+
func.__dict__.update(real_func.__dict__)
|
765 |
+
return func
|
766 |
+
|
767 |
+
def __call__(self, f):
|
768 |
+
"""Construct a lazily decorated wrapper of f.
|
769 |
+
|
770 |
+
The decorated function will be compiled when it is called for the first time,
|
771 |
+
and it will replace its own __code__ object so subsequent calls are fast.
|
772 |
+
|
773 |
+
Parameters
|
774 |
+
----------
|
775 |
+
f : callable
|
776 |
+
A function to be decorated.
|
777 |
+
|
778 |
+
Returns
|
779 |
+
-------
|
780 |
+
func : callable
|
781 |
+
The decorated function.
|
782 |
+
|
783 |
+
See Also
|
784 |
+
--------
|
785 |
+
argmap._lazy_compile
|
786 |
+
"""
|
787 |
+
|
788 |
+
def func(*args, __wrapper=None, **kwargs):
|
789 |
+
return argmap._lazy_compile(__wrapper)(*args, **kwargs)
|
790 |
+
|
791 |
+
# standard function-wrapping stuff
|
792 |
+
func.__name__ = f.__name__
|
793 |
+
func.__doc__ = f.__doc__
|
794 |
+
func.__defaults__ = f.__defaults__
|
795 |
+
func.__kwdefaults__.update(f.__kwdefaults__ or {})
|
796 |
+
func.__module__ = f.__module__
|
797 |
+
func.__qualname__ = f.__qualname__
|
798 |
+
func.__dict__.update(f.__dict__)
|
799 |
+
func.__wrapped__ = f
|
800 |
+
|
801 |
+
# now that we've wrapped f, we may have picked up some __dict__ or
|
802 |
+
# __kwdefaults__ items that were set by a previous argmap. Thus, we set
|
803 |
+
# these values after those update() calls.
|
804 |
+
|
805 |
+
# If we attempt to access func from within itself, that happens through
|
806 |
+
# a closure -- which trips an error when we replace func.__code__. The
|
807 |
+
# standard workaround for functions which can't see themselves is to use
|
808 |
+
# a Y-combinator, as we do here.
|
809 |
+
func.__kwdefaults__["_argmap__wrapper"] = func
|
810 |
+
|
811 |
+
# this self-reference is here because functools.wraps preserves
|
812 |
+
# everything in __dict__, and we don't want to mistake a non-argmap
|
813 |
+
# wrapper for an argmap wrapper
|
814 |
+
func.__self__ = func
|
815 |
+
|
816 |
+
# this is used to variously call self.assemble and self.compile
|
817 |
+
func.__argmap__ = self
|
818 |
+
|
819 |
+
if hasattr(f, "__argmap__"):
|
820 |
+
func.__is_generator = f.__is_generator
|
821 |
+
else:
|
822 |
+
func.__is_generator = inspect.isgeneratorfunction(f)
|
823 |
+
|
824 |
+
if self._finally and func.__is_generator:
|
825 |
+
raise nx.NetworkXError("argmap cannot decorate generators with try_finally")
|
826 |
+
|
827 |
+
return func
|
828 |
+
|
829 |
+
__count = 0
|
830 |
+
|
831 |
+
@classmethod
|
832 |
+
def _count(cls):
|
833 |
+
"""Maintain a globally-unique identifier for function names and "file" names
|
834 |
+
|
835 |
+
Note that this counter is a class method reporting a class variable
|
836 |
+
so the count is unique within a Python session. It could differ from
|
837 |
+
session to session for a specific decorator depending on the order
|
838 |
+
that the decorators are created. But that doesn't disrupt `argmap`.
|
839 |
+
|
840 |
+
This is used in two places: to construct unique variable names
|
841 |
+
in the `_name` method and to construct unique fictitious filenames
|
842 |
+
in the `_compile` method.
|
843 |
+
|
844 |
+
Returns
|
845 |
+
-------
|
846 |
+
count : int
|
847 |
+
An integer unique to this Python session (simply counts from zero)
|
848 |
+
"""
|
849 |
+
cls.__count += 1
|
850 |
+
return cls.__count
|
851 |
+
|
852 |
+
_bad_chars = re.compile("[^a-zA-Z0-9_]")
|
853 |
+
|
854 |
+
@classmethod
|
855 |
+
def _name(cls, f):
|
856 |
+
"""Mangle the name of a function to be unique but somewhat human-readable
|
857 |
+
|
858 |
+
The names are unique within a Python session and set using `_count`.
|
859 |
+
|
860 |
+
Parameters
|
861 |
+
----------
|
862 |
+
f : str or object
|
863 |
+
|
864 |
+
Returns
|
865 |
+
-------
|
866 |
+
name : str
|
867 |
+
The mangled version of `f.__name__` (if `f.__name__` exists) or `f`
|
868 |
+
|
869 |
+
"""
|
870 |
+
f = f.__name__ if hasattr(f, "__name__") else f
|
871 |
+
fname = re.sub(cls._bad_chars, "_", f)
|
872 |
+
return f"argmap_{fname}_{cls._count()}"
|
873 |
+
|
874 |
+
def compile(self, f):
|
875 |
+
"""Compile the decorated function.
|
876 |
+
|
877 |
+
Called once for a given decorated function -- collects the code from all
|
878 |
+
argmap decorators in the stack, and compiles the decorated function.
|
879 |
+
|
880 |
+
Much of the work done here uses the `assemble` method to allow recursive
|
881 |
+
treatment of multiple argmap decorators on a single decorated function.
|
882 |
+
That flattens the argmap decorators, collects the source code to construct
|
883 |
+
a single decorated function, then compiles/executes/returns that function.
|
884 |
+
|
885 |
+
The source code for the decorated function is stored as an attribute
|
886 |
+
`_code` on the function object itself.
|
887 |
+
|
888 |
+
Note that Python's `compile` function requires a filename, but this
|
889 |
+
code is constructed without a file, so a fictitious filename is used
|
890 |
+
to describe where the function comes from. The name is something like:
|
891 |
+
"argmap compilation 4".
|
892 |
+
|
893 |
+
Parameters
|
894 |
+
----------
|
895 |
+
f : callable
|
896 |
+
The function to be decorated
|
897 |
+
|
898 |
+
Returns
|
899 |
+
-------
|
900 |
+
func : callable
|
901 |
+
The decorated file
|
902 |
+
|
903 |
+
"""
|
904 |
+
sig, wrapped_name, functions, mapblock, finallys, mutable_args = self.assemble(
|
905 |
+
f
|
906 |
+
)
|
907 |
+
|
908 |
+
call = f"{sig.call_sig.format(wrapped_name)}#"
|
909 |
+
mut_args = f"{sig.args} = list({sig.args})" if mutable_args else ""
|
910 |
+
body = argmap._indent(sig.def_sig, mut_args, mapblock, call, finallys)
|
911 |
+
code = "\n".join(body)
|
912 |
+
|
913 |
+
locl = {}
|
914 |
+
globl = dict(functions.values())
|
915 |
+
filename = f"{self.__class__} compilation {self._count()}"
|
916 |
+
compiled = compile(code, filename, "exec")
|
917 |
+
exec(compiled, globl, locl)
|
918 |
+
func = locl[sig.name]
|
919 |
+
func._code = code
|
920 |
+
return func
|
921 |
+
|
922 |
+
def assemble(self, f):
|
923 |
+
"""Collects components of the source for the decorated function wrapping f.
|
924 |
+
|
925 |
+
If `f` has multiple argmap decorators, we recursively assemble the stack of
|
926 |
+
decorators into a single flattened function.
|
927 |
+
|
928 |
+
This method is part of the `compile` method's process yet separated
|
929 |
+
from that method to allow recursive processing. The outputs are
|
930 |
+
strings, dictionaries and lists that collect needed info to
|
931 |
+
flatten any nested argmap-decoration.
|
932 |
+
|
933 |
+
Parameters
|
934 |
+
----------
|
935 |
+
f : callable
|
936 |
+
The function to be decorated. If f is argmapped, we assemble it.
|
937 |
+
|
938 |
+
Returns
|
939 |
+
-------
|
940 |
+
sig : argmap.Signature
|
941 |
+
The function signature as an `argmap.Signature` object.
|
942 |
+
wrapped_name : str
|
943 |
+
The mangled name used to represent the wrapped function in the code
|
944 |
+
being assembled.
|
945 |
+
functions : dict
|
946 |
+
A dictionary mapping id(g) -> (mangled_name(g), g) for functions g
|
947 |
+
referred to in the code being assembled. These need to be present
|
948 |
+
in the ``globals`` scope of ``exec`` when defining the decorated
|
949 |
+
function.
|
950 |
+
mapblock : list of lists and/or strings
|
951 |
+
Code that implements mapping of parameters including any try blocks
|
952 |
+
if needed. This code will precede the decorated function call.
|
953 |
+
finallys : list of lists and/or strings
|
954 |
+
Code that implements the finally blocks to post-process the
|
955 |
+
arguments (usually close any files if needed) after the
|
956 |
+
decorated function is called.
|
957 |
+
mutable_args : bool
|
958 |
+
True if the decorator needs to modify positional arguments
|
959 |
+
via their indices. The compile method then turns the argument
|
960 |
+
tuple into a list so that the arguments can be modified.
|
961 |
+
"""
|
962 |
+
|
963 |
+
# first, we check if f is already argmapped -- if that's the case,
|
964 |
+
# build up the function recursively.
|
965 |
+
# > mapblock is generally a list of function calls of the sort
|
966 |
+
# arg = func(arg)
|
967 |
+
# in addition to some try-blocks if needed.
|
968 |
+
# > finallys is a recursive list of finally blocks of the sort
|
969 |
+
# finally:
|
970 |
+
# close_func_1()
|
971 |
+
# finally:
|
972 |
+
# close_func_2()
|
973 |
+
# > functions is a dict of functions used in the scope of our decorated
|
974 |
+
# function. It will be used to construct globals used in compilation.
|
975 |
+
# We make functions[id(f)] = name_of_f, f to ensure that a given
|
976 |
+
# function is stored and named exactly once even if called by
|
977 |
+
# nested decorators.
|
978 |
+
if hasattr(f, "__argmap__") and f.__self__ is f:
|
979 |
+
(
|
980 |
+
sig,
|
981 |
+
wrapped_name,
|
982 |
+
functions,
|
983 |
+
mapblock,
|
984 |
+
finallys,
|
985 |
+
mutable_args,
|
986 |
+
) = f.__argmap__.assemble(f.__wrapped__)
|
987 |
+
functions = dict(functions) # shallow-copy just in case
|
988 |
+
else:
|
989 |
+
sig = self.signature(f)
|
990 |
+
wrapped_name = self._name(f)
|
991 |
+
mapblock, finallys = [], []
|
992 |
+
functions = {id(f): (wrapped_name, f)}
|
993 |
+
mutable_args = False
|
994 |
+
|
995 |
+
if id(self._func) in functions:
|
996 |
+
fname, _ = functions[id(self._func)]
|
997 |
+
else:
|
998 |
+
fname, _ = functions[id(self._func)] = self._name(self._func), self._func
|
999 |
+
|
1000 |
+
# this is a bit complicated -- we can call functions with a variety of
|
1001 |
+
# nested arguments, so long as their input and output are tuples with
|
1002 |
+
# the same nested structure. e.g. ("a", "b") maps arguments a and b.
|
1003 |
+
# A more complicated nesting like (0, (3, 4)) maps arguments 0, 3, 4
|
1004 |
+
# expecting the mapping to output new values in the same nested shape.
|
1005 |
+
# The ability to argmap multiple arguments was necessary for
|
1006 |
+
# the decorator `nx.algorithms.community.quality.require_partition`, and
|
1007 |
+
# while we're not taking full advantage of the ability to handle
|
1008 |
+
# multiply-nested tuples, it was convenient to implement this in
|
1009 |
+
# generality because the recursive call to `get_name` is necessary in
|
1010 |
+
# any case.
|
1011 |
+
applied = set()
|
1012 |
+
|
1013 |
+
def get_name(arg, first=True):
|
1014 |
+
nonlocal mutable_args
|
1015 |
+
if isinstance(arg, tuple):
|
1016 |
+
name = ", ".join(get_name(x, False) for x in arg)
|
1017 |
+
return name if first else f"({name})"
|
1018 |
+
if arg in applied:
|
1019 |
+
raise nx.NetworkXError(f"argument {arg} is specified multiple times")
|
1020 |
+
applied.add(arg)
|
1021 |
+
if arg in sig.names:
|
1022 |
+
return sig.names[arg]
|
1023 |
+
elif isinstance(arg, str):
|
1024 |
+
if sig.kwargs is None:
|
1025 |
+
raise nx.NetworkXError(
|
1026 |
+
f"name {arg} is not a named parameter and this function doesn't have kwargs"
|
1027 |
+
)
|
1028 |
+
return f"{sig.kwargs}[{arg!r}]"
|
1029 |
+
else:
|
1030 |
+
if sig.args is None:
|
1031 |
+
raise nx.NetworkXError(
|
1032 |
+
f"index {arg} not a parameter index and this function doesn't have args"
|
1033 |
+
)
|
1034 |
+
mutable_args = True
|
1035 |
+
return f"{sig.args}[{arg - sig.n_positional}]"
|
1036 |
+
|
1037 |
+
if self._finally:
|
1038 |
+
# here's where we handle try_finally decorators. Such a decorator
|
1039 |
+
# returns a mapped argument and a function to be called in a
|
1040 |
+
# finally block. This feature was required by the open_file
|
1041 |
+
# decorator. The below generates the code
|
1042 |
+
#
|
1043 |
+
# name, final = func(name) #<--append to mapblock
|
1044 |
+
# try: #<--append to mapblock
|
1045 |
+
# ... more argmapping and try blocks
|
1046 |
+
# return WRAPPED_FUNCTION(...)
|
1047 |
+
# ... more finally blocks
|
1048 |
+
# finally: #<--prepend to finallys
|
1049 |
+
# final() #<--prepend to finallys
|
1050 |
+
#
|
1051 |
+
for a in self._args:
|
1052 |
+
name = get_name(a)
|
1053 |
+
final = self._name(name)
|
1054 |
+
mapblock.append(f"{name}, {final} = {fname}({name})")
|
1055 |
+
mapblock.append("try:")
|
1056 |
+
finallys = ["finally:", f"{final}()#", "#", finallys]
|
1057 |
+
else:
|
1058 |
+
mapblock.extend(
|
1059 |
+
f"{name} = {fname}({name})" for name in map(get_name, self._args)
|
1060 |
+
)
|
1061 |
+
|
1062 |
+
return sig, wrapped_name, functions, mapblock, finallys, mutable_args
|
1063 |
+
|
1064 |
+
@classmethod
|
1065 |
+
def signature(cls, f):
|
1066 |
+
r"""Construct a Signature object describing `f`
|
1067 |
+
|
1068 |
+
Compute a Signature so that we can write a function wrapping f with
|
1069 |
+
the same signature and call-type.
|
1070 |
+
|
1071 |
+
Parameters
|
1072 |
+
----------
|
1073 |
+
f : callable
|
1074 |
+
A function to be decorated
|
1075 |
+
|
1076 |
+
Returns
|
1077 |
+
-------
|
1078 |
+
sig : argmap.Signature
|
1079 |
+
The Signature of f
|
1080 |
+
|
1081 |
+
Notes
|
1082 |
+
-----
|
1083 |
+
The Signature is a namedtuple with names:
|
1084 |
+
|
1085 |
+
name : a unique version of the name of the decorated function
|
1086 |
+
signature : the inspect.signature of the decorated function
|
1087 |
+
def_sig : a string used as code to define the new function
|
1088 |
+
call_sig : a string used as code to call the decorated function
|
1089 |
+
names : a dict keyed by argument name and index to the argument's name
|
1090 |
+
n_positional : the number of positional arguments in the signature
|
1091 |
+
args : the name of the VAR_POSITIONAL argument if any, i.e. \*theseargs
|
1092 |
+
kwargs : the name of the VAR_KEYWORDS argument if any, i.e. \*\*kwargs
|
1093 |
+
|
1094 |
+
These named attributes of the signature are used in `assemble` and `compile`
|
1095 |
+
to construct a string of source code for the decorated function.
|
1096 |
+
|
1097 |
+
"""
|
1098 |
+
sig = inspect.signature(f, follow_wrapped=False)
|
1099 |
+
def_sig = []
|
1100 |
+
call_sig = []
|
1101 |
+
names = {}
|
1102 |
+
|
1103 |
+
kind = None
|
1104 |
+
args = None
|
1105 |
+
kwargs = None
|
1106 |
+
npos = 0
|
1107 |
+
for i, param in enumerate(sig.parameters.values()):
|
1108 |
+
# parameters can be position-only, keyword-or-position, keyword-only
|
1109 |
+
# in any combination, but only in the order as above. we do edge
|
1110 |
+
# detection to add the appropriate punctuation
|
1111 |
+
prev = kind
|
1112 |
+
kind = param.kind
|
1113 |
+
if prev == param.POSITIONAL_ONLY != kind:
|
1114 |
+
# the last token was position-only, but this one isn't
|
1115 |
+
def_sig.append("/")
|
1116 |
+
if (
|
1117 |
+
param.VAR_POSITIONAL
|
1118 |
+
!= prev
|
1119 |
+
!= param.KEYWORD_ONLY
|
1120 |
+
== kind
|
1121 |
+
!= param.VAR_POSITIONAL
|
1122 |
+
):
|
1123 |
+
# param is the first keyword-only arg and isn't starred
|
1124 |
+
def_sig.append("*")
|
1125 |
+
|
1126 |
+
# star arguments as appropriate
|
1127 |
+
if kind == param.VAR_POSITIONAL:
|
1128 |
+
name = "*" + param.name
|
1129 |
+
args = param.name
|
1130 |
+
count = 0
|
1131 |
+
elif kind == param.VAR_KEYWORD:
|
1132 |
+
name = "**" + param.name
|
1133 |
+
kwargs = param.name
|
1134 |
+
count = 0
|
1135 |
+
else:
|
1136 |
+
names[i] = names[param.name] = param.name
|
1137 |
+
name = param.name
|
1138 |
+
count = 1
|
1139 |
+
|
1140 |
+
# assign to keyword-only args in the function call
|
1141 |
+
if kind == param.KEYWORD_ONLY:
|
1142 |
+
call_sig.append(f"{name} = {name}")
|
1143 |
+
else:
|
1144 |
+
npos += count
|
1145 |
+
call_sig.append(name)
|
1146 |
+
|
1147 |
+
def_sig.append(name)
|
1148 |
+
|
1149 |
+
fname = cls._name(f)
|
1150 |
+
def_sig = f'def {fname}({", ".join(def_sig)}):'
|
1151 |
+
|
1152 |
+
call_sig = f"return {{}}({', '.join(call_sig)})"
|
1153 |
+
|
1154 |
+
return cls.Signature(fname, sig, def_sig, call_sig, names, npos, args, kwargs)
|
1155 |
+
|
1156 |
+
Signature = collections.namedtuple(
|
1157 |
+
"Signature",
|
1158 |
+
[
|
1159 |
+
"name",
|
1160 |
+
"signature",
|
1161 |
+
"def_sig",
|
1162 |
+
"call_sig",
|
1163 |
+
"names",
|
1164 |
+
"n_positional",
|
1165 |
+
"args",
|
1166 |
+
"kwargs",
|
1167 |
+
],
|
1168 |
+
)
|
1169 |
+
|
1170 |
+
@staticmethod
|
1171 |
+
def _flatten(nestlist, visited):
|
1172 |
+
"""flattens a recursive list of lists that doesn't have cyclic references
|
1173 |
+
|
1174 |
+
Parameters
|
1175 |
+
----------
|
1176 |
+
nestlist : iterable
|
1177 |
+
A recursive list of objects to be flattened into a single iterable
|
1178 |
+
|
1179 |
+
visited : set
|
1180 |
+
A set of object ids which have been walked -- initialize with an
|
1181 |
+
empty set
|
1182 |
+
|
1183 |
+
Yields
|
1184 |
+
------
|
1185 |
+
Non-list objects contained in nestlist
|
1186 |
+
|
1187 |
+
"""
|
1188 |
+
for thing in nestlist:
|
1189 |
+
if isinstance(thing, list):
|
1190 |
+
if id(thing) in visited:
|
1191 |
+
raise ValueError("A cycle was found in nestlist. Be a tree.")
|
1192 |
+
else:
|
1193 |
+
visited.add(id(thing))
|
1194 |
+
yield from argmap._flatten(thing, visited)
|
1195 |
+
else:
|
1196 |
+
yield thing
|
1197 |
+
|
1198 |
+
_tabs = " " * 64
|
1199 |
+
|
1200 |
+
@staticmethod
|
1201 |
+
def _indent(*lines):
|
1202 |
+
"""Indent list of code lines to make executable Python code
|
1203 |
+
|
1204 |
+
Indents a tree-recursive list of strings, following the rule that one
|
1205 |
+
space is added to the tab after a line that ends in a colon, and one is
|
1206 |
+
removed after a line that ends in an hashmark.
|
1207 |
+
|
1208 |
+
Parameters
|
1209 |
+
----------
|
1210 |
+
*lines : lists and/or strings
|
1211 |
+
A recursive list of strings to be assembled into properly indented
|
1212 |
+
code.
|
1213 |
+
|
1214 |
+
Returns
|
1215 |
+
-------
|
1216 |
+
code : str
|
1217 |
+
|
1218 |
+
Examples
|
1219 |
+
--------
|
1220 |
+
|
1221 |
+
argmap._indent(*["try:", "try:", "pass#", "finally:", "pass#", "#",
|
1222 |
+
"finally:", "pass#"])
|
1223 |
+
|
1224 |
+
renders to
|
1225 |
+
|
1226 |
+
'''try:
|
1227 |
+
try:
|
1228 |
+
pass#
|
1229 |
+
finally:
|
1230 |
+
pass#
|
1231 |
+
#
|
1232 |
+
finally:
|
1233 |
+
pass#'''
|
1234 |
+
"""
|
1235 |
+
depth = 0
|
1236 |
+
for line in argmap._flatten(lines, set()):
|
1237 |
+
yield f"{argmap._tabs[:depth]}{line}"
|
1238 |
+
depth += (line[-1:] == ":") - (line[-1:] == "#")
|
1239 |
+
|
1240 |
+
|
1241 |
+
# Vendored in from https://github.com/scikit-learn/scikit-learn/blob/8ed0270b99344cee9bb253cbfa1d986561ea6cd7/sklearn/utils/validation.py#L37C1-L90C44
|
1242 |
+
def deprecate_positional_args(func=None, *, version):
|
1243 |
+
"""Decorator for methods that issues warnings for positional arguments.
|
1244 |
+
|
1245 |
+
Using the keyword-only argument syntax in pep 3102, arguments after the
|
1246 |
+
* will issue a warning when passed as a positional argument.
|
1247 |
+
|
1248 |
+
Parameters
|
1249 |
+
----------
|
1250 |
+
func : callable, default=None
|
1251 |
+
Function to check arguments on.
|
1252 |
+
version : callable, default="1.3"
|
1253 |
+
The version when positional arguments will result in error.
|
1254 |
+
"""
|
1255 |
+
|
1256 |
+
def _inner_deprecate_positional_args(f):
|
1257 |
+
sig = signature(f)
|
1258 |
+
kwonly_args = []
|
1259 |
+
all_args = []
|
1260 |
+
|
1261 |
+
for name, param in sig.parameters.items():
|
1262 |
+
if param.kind == Parameter.POSITIONAL_OR_KEYWORD:
|
1263 |
+
all_args.append(name)
|
1264 |
+
elif param.kind == Parameter.KEYWORD_ONLY:
|
1265 |
+
kwonly_args.append(name)
|
1266 |
+
|
1267 |
+
@wraps(f)
|
1268 |
+
def inner_f(*args, **kwargs):
|
1269 |
+
extra_args = len(args) - len(all_args)
|
1270 |
+
if extra_args <= 0:
|
1271 |
+
return f(*args, **kwargs)
|
1272 |
+
|
1273 |
+
# extra_args > 0
|
1274 |
+
args_msg = [
|
1275 |
+
f"{name}={arg}"
|
1276 |
+
for name, arg in zip(kwonly_args[:extra_args], args[-extra_args:])
|
1277 |
+
]
|
1278 |
+
args_msg = ", ".join(args_msg)
|
1279 |
+
warnings.warn(
|
1280 |
+
(
|
1281 |
+
f"Pass {args_msg} as keyword args. From NetworkX version "
|
1282 |
+
f"{version} passing these as positional arguments "
|
1283 |
+
"will result in an error"
|
1284 |
+
),
|
1285 |
+
FutureWarning,
|
1286 |
+
)
|
1287 |
+
kwargs.update(zip(sig.parameters, args))
|
1288 |
+
return f(**kwargs)
|
1289 |
+
|
1290 |
+
return inner_f
|
1291 |
+
|
1292 |
+
if func is not None:
|
1293 |
+
return _inner_deprecate_positional_args(func)
|
1294 |
+
|
1295 |
+
return _inner_deprecate_positional_args
|
env-llmeval/lib/python3.10/site-packages/networkx/utils/heaps.py
ADDED
@@ -0,0 +1,340 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
Min-heaps.
|
3 |
+
"""
|
4 |
+
|
5 |
+
from heapq import heappop, heappush
|
6 |
+
from itertools import count
|
7 |
+
|
8 |
+
import networkx as nx
|
9 |
+
|
10 |
+
__all__ = ["MinHeap", "PairingHeap", "BinaryHeap"]
|
11 |
+
|
12 |
+
|
13 |
+
class MinHeap:
|
14 |
+
"""Base class for min-heaps.
|
15 |
+
|
16 |
+
A MinHeap stores a collection of key-value pairs ordered by their values.
|
17 |
+
It supports querying the minimum pair, inserting a new pair, decreasing the
|
18 |
+
value in an existing pair and deleting the minimum pair.
|
19 |
+
"""
|
20 |
+
|
21 |
+
class _Item:
|
22 |
+
"""Used by subclassess to represent a key-value pair."""
|
23 |
+
|
24 |
+
__slots__ = ("key", "value")
|
25 |
+
|
26 |
+
def __init__(self, key, value):
|
27 |
+
self.key = key
|
28 |
+
self.value = value
|
29 |
+
|
30 |
+
def __repr__(self):
|
31 |
+
return repr((self.key, self.value))
|
32 |
+
|
33 |
+
def __init__(self):
|
34 |
+
"""Initialize a new min-heap."""
|
35 |
+
self._dict = {}
|
36 |
+
|
37 |
+
def min(self):
|
38 |
+
"""Query the minimum key-value pair.
|
39 |
+
|
40 |
+
Returns
|
41 |
+
-------
|
42 |
+
key, value : tuple
|
43 |
+
The key-value pair with the minimum value in the heap.
|
44 |
+
|
45 |
+
Raises
|
46 |
+
------
|
47 |
+
NetworkXError
|
48 |
+
If the heap is empty.
|
49 |
+
"""
|
50 |
+
raise NotImplementedError
|
51 |
+
|
52 |
+
def pop(self):
|
53 |
+
"""Delete the minimum pair in the heap.
|
54 |
+
|
55 |
+
Returns
|
56 |
+
-------
|
57 |
+
key, value : tuple
|
58 |
+
The key-value pair with the minimum value in the heap.
|
59 |
+
|
60 |
+
Raises
|
61 |
+
------
|
62 |
+
NetworkXError
|
63 |
+
If the heap is empty.
|
64 |
+
"""
|
65 |
+
raise NotImplementedError
|
66 |
+
|
67 |
+
def get(self, key, default=None):
|
68 |
+
"""Returns the value associated with a key.
|
69 |
+
|
70 |
+
Parameters
|
71 |
+
----------
|
72 |
+
key : hashable object
|
73 |
+
The key to be looked up.
|
74 |
+
|
75 |
+
default : object
|
76 |
+
Default value to return if the key is not present in the heap.
|
77 |
+
Default value: None.
|
78 |
+
|
79 |
+
Returns
|
80 |
+
-------
|
81 |
+
value : object.
|
82 |
+
The value associated with the key.
|
83 |
+
"""
|
84 |
+
raise NotImplementedError
|
85 |
+
|
86 |
+
def insert(self, key, value, allow_increase=False):
|
87 |
+
"""Insert a new key-value pair or modify the value in an existing
|
88 |
+
pair.
|
89 |
+
|
90 |
+
Parameters
|
91 |
+
----------
|
92 |
+
key : hashable object
|
93 |
+
The key.
|
94 |
+
|
95 |
+
value : object comparable with existing values.
|
96 |
+
The value.
|
97 |
+
|
98 |
+
allow_increase : bool
|
99 |
+
Whether the value is allowed to increase. If False, attempts to
|
100 |
+
increase an existing value have no effect. Default value: False.
|
101 |
+
|
102 |
+
Returns
|
103 |
+
-------
|
104 |
+
decreased : bool
|
105 |
+
True if a pair is inserted or the existing value is decreased.
|
106 |
+
"""
|
107 |
+
raise NotImplementedError
|
108 |
+
|
109 |
+
def __nonzero__(self):
|
110 |
+
"""Returns whether the heap if empty."""
|
111 |
+
return bool(self._dict)
|
112 |
+
|
113 |
+
def __bool__(self):
|
114 |
+
"""Returns whether the heap if empty."""
|
115 |
+
return bool(self._dict)
|
116 |
+
|
117 |
+
def __len__(self):
|
118 |
+
"""Returns the number of key-value pairs in the heap."""
|
119 |
+
return len(self._dict)
|
120 |
+
|
121 |
+
def __contains__(self, key):
|
122 |
+
"""Returns whether a key exists in the heap.
|
123 |
+
|
124 |
+
Parameters
|
125 |
+
----------
|
126 |
+
key : any hashable object.
|
127 |
+
The key to be looked up.
|
128 |
+
"""
|
129 |
+
return key in self._dict
|
130 |
+
|
131 |
+
|
132 |
+
class PairingHeap(MinHeap):
|
133 |
+
"""A pairing heap."""
|
134 |
+
|
135 |
+
class _Node(MinHeap._Item):
|
136 |
+
"""A node in a pairing heap.
|
137 |
+
|
138 |
+
A tree in a pairing heap is stored using the left-child, right-sibling
|
139 |
+
representation.
|
140 |
+
"""
|
141 |
+
|
142 |
+
__slots__ = ("left", "next", "prev", "parent")
|
143 |
+
|
144 |
+
def __init__(self, key, value):
|
145 |
+
super().__init__(key, value)
|
146 |
+
# The leftmost child.
|
147 |
+
self.left = None
|
148 |
+
# The next sibling.
|
149 |
+
self.next = None
|
150 |
+
# The previous sibling.
|
151 |
+
self.prev = None
|
152 |
+
# The parent.
|
153 |
+
self.parent = None
|
154 |
+
|
155 |
+
def __init__(self):
|
156 |
+
"""Initialize a pairing heap."""
|
157 |
+
super().__init__()
|
158 |
+
self._root = None
|
159 |
+
|
160 |
+
def min(self):
|
161 |
+
if self._root is None:
|
162 |
+
raise nx.NetworkXError("heap is empty.")
|
163 |
+
return (self._root.key, self._root.value)
|
164 |
+
|
165 |
+
def pop(self):
|
166 |
+
if self._root is None:
|
167 |
+
raise nx.NetworkXError("heap is empty.")
|
168 |
+
min_node = self._root
|
169 |
+
self._root = self._merge_children(self._root)
|
170 |
+
del self._dict[min_node.key]
|
171 |
+
return (min_node.key, min_node.value)
|
172 |
+
|
173 |
+
def get(self, key, default=None):
|
174 |
+
node = self._dict.get(key)
|
175 |
+
return node.value if node is not None else default
|
176 |
+
|
177 |
+
def insert(self, key, value, allow_increase=False):
|
178 |
+
node = self._dict.get(key)
|
179 |
+
root = self._root
|
180 |
+
if node is not None:
|
181 |
+
if value < node.value:
|
182 |
+
node.value = value
|
183 |
+
if node is not root and value < node.parent.value:
|
184 |
+
self._cut(node)
|
185 |
+
self._root = self._link(root, node)
|
186 |
+
return True
|
187 |
+
elif allow_increase and value > node.value:
|
188 |
+
node.value = value
|
189 |
+
child = self._merge_children(node)
|
190 |
+
# Nonstandard step: Link the merged subtree with the root. See
|
191 |
+
# below for the standard step.
|
192 |
+
if child is not None:
|
193 |
+
self._root = self._link(self._root, child)
|
194 |
+
# Standard step: Perform a decrease followed by a pop as if the
|
195 |
+
# value were the smallest in the heap. Then insert the new
|
196 |
+
# value into the heap.
|
197 |
+
# if node is not root:
|
198 |
+
# self._cut(node)
|
199 |
+
# if child is not None:
|
200 |
+
# root = self._link(root, child)
|
201 |
+
# self._root = self._link(root, node)
|
202 |
+
# else:
|
203 |
+
# self._root = (self._link(node, child)
|
204 |
+
# if child is not None else node)
|
205 |
+
return False
|
206 |
+
else:
|
207 |
+
# Insert a new key.
|
208 |
+
node = self._Node(key, value)
|
209 |
+
self._dict[key] = node
|
210 |
+
self._root = self._link(root, node) if root is not None else node
|
211 |
+
return True
|
212 |
+
|
213 |
+
def _link(self, root, other):
|
214 |
+
"""Link two nodes, making the one with the smaller value the parent of
|
215 |
+
the other.
|
216 |
+
"""
|
217 |
+
if other.value < root.value:
|
218 |
+
root, other = other, root
|
219 |
+
next = root.left
|
220 |
+
other.next = next
|
221 |
+
if next is not None:
|
222 |
+
next.prev = other
|
223 |
+
other.prev = None
|
224 |
+
root.left = other
|
225 |
+
other.parent = root
|
226 |
+
return root
|
227 |
+
|
228 |
+
def _merge_children(self, root):
|
229 |
+
"""Merge the subtrees of the root using the standard two-pass method.
|
230 |
+
The resulting subtree is detached from the root.
|
231 |
+
"""
|
232 |
+
node = root.left
|
233 |
+
root.left = None
|
234 |
+
if node is not None:
|
235 |
+
link = self._link
|
236 |
+
# Pass 1: Merge pairs of consecutive subtrees from left to right.
|
237 |
+
# At the end of the pass, only the prev pointers of the resulting
|
238 |
+
# subtrees have meaningful values. The other pointers will be fixed
|
239 |
+
# in pass 2.
|
240 |
+
prev = None
|
241 |
+
while True:
|
242 |
+
next = node.next
|
243 |
+
if next is None:
|
244 |
+
node.prev = prev
|
245 |
+
break
|
246 |
+
next_next = next.next
|
247 |
+
node = link(node, next)
|
248 |
+
node.prev = prev
|
249 |
+
prev = node
|
250 |
+
if next_next is None:
|
251 |
+
break
|
252 |
+
node = next_next
|
253 |
+
# Pass 2: Successively merge the subtrees produced by pass 1 from
|
254 |
+
# right to left with the rightmost one.
|
255 |
+
prev = node.prev
|
256 |
+
while prev is not None:
|
257 |
+
prev_prev = prev.prev
|
258 |
+
node = link(prev, node)
|
259 |
+
prev = prev_prev
|
260 |
+
# Now node can become the new root. Its has no parent nor siblings.
|
261 |
+
node.prev = None
|
262 |
+
node.next = None
|
263 |
+
node.parent = None
|
264 |
+
return node
|
265 |
+
|
266 |
+
def _cut(self, node):
|
267 |
+
"""Cut a node from its parent."""
|
268 |
+
prev = node.prev
|
269 |
+
next = node.next
|
270 |
+
if prev is not None:
|
271 |
+
prev.next = next
|
272 |
+
else:
|
273 |
+
node.parent.left = next
|
274 |
+
node.prev = None
|
275 |
+
if next is not None:
|
276 |
+
next.prev = prev
|
277 |
+
node.next = None
|
278 |
+
node.parent = None
|
279 |
+
|
280 |
+
|
281 |
+
class BinaryHeap(MinHeap):
|
282 |
+
"""A binary heap."""
|
283 |
+
|
284 |
+
def __init__(self):
|
285 |
+
"""Initialize a binary heap."""
|
286 |
+
super().__init__()
|
287 |
+
self._heap = []
|
288 |
+
self._count = count()
|
289 |
+
|
290 |
+
def min(self):
|
291 |
+
dict = self._dict
|
292 |
+
if not dict:
|
293 |
+
raise nx.NetworkXError("heap is empty")
|
294 |
+
heap = self._heap
|
295 |
+
pop = heappop
|
296 |
+
# Repeatedly remove stale key-value pairs until a up-to-date one is
|
297 |
+
# met.
|
298 |
+
while True:
|
299 |
+
value, _, key = heap[0]
|
300 |
+
if key in dict and value == dict[key]:
|
301 |
+
break
|
302 |
+
pop(heap)
|
303 |
+
return (key, value)
|
304 |
+
|
305 |
+
def pop(self):
|
306 |
+
dict = self._dict
|
307 |
+
if not dict:
|
308 |
+
raise nx.NetworkXError("heap is empty")
|
309 |
+
heap = self._heap
|
310 |
+
pop = heappop
|
311 |
+
# Repeatedly remove stale key-value pairs until a up-to-date one is
|
312 |
+
# met.
|
313 |
+
while True:
|
314 |
+
value, _, key = heap[0]
|
315 |
+
pop(heap)
|
316 |
+
if key in dict and value == dict[key]:
|
317 |
+
break
|
318 |
+
del dict[key]
|
319 |
+
return (key, value)
|
320 |
+
|
321 |
+
def get(self, key, default=None):
|
322 |
+
return self._dict.get(key, default)
|
323 |
+
|
324 |
+
def insert(self, key, value, allow_increase=False):
|
325 |
+
dict = self._dict
|
326 |
+
if key in dict:
|
327 |
+
old_value = dict[key]
|
328 |
+
if value < old_value or (allow_increase and value > old_value):
|
329 |
+
# Since there is no way to efficiently obtain the location of a
|
330 |
+
# key-value pair in the heap, insert a new pair even if ones
|
331 |
+
# with the same key may already be present. Deem the old ones
|
332 |
+
# as stale and skip them when the minimum pair is queried.
|
333 |
+
dict[key] = value
|
334 |
+
heappush(self._heap, (value, next(self._count), key))
|
335 |
+
return value < old_value
|
336 |
+
return False
|
337 |
+
else:
|
338 |
+
dict[key] = value
|
339 |
+
heappush(self._heap, (value, next(self._count), key))
|
340 |
+
return True
|
env-llmeval/lib/python3.10/site-packages/networkx/utils/mapped_queue.py
ADDED
@@ -0,0 +1,298 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Priority queue class with updatable priorities.
|
2 |
+
"""
|
3 |
+
|
4 |
+
import heapq
|
5 |
+
|
6 |
+
__all__ = ["MappedQueue"]
|
7 |
+
|
8 |
+
|
9 |
+
class _HeapElement:
|
10 |
+
"""This proxy class separates the heap element from its priority.
|
11 |
+
|
12 |
+
The idea is that using a 2-tuple (priority, element) works
|
13 |
+
for sorting, but not for dict lookup because priorities are
|
14 |
+
often floating point values so round-off can mess up equality.
|
15 |
+
|
16 |
+
So, we need inequalities to look at the priority (for sorting)
|
17 |
+
and equality (and hash) to look at the element to enable
|
18 |
+
updates to the priority.
|
19 |
+
|
20 |
+
Unfortunately, this class can be tricky to work with if you forget that
|
21 |
+
`__lt__` compares the priority while `__eq__` compares the element.
|
22 |
+
In `greedy_modularity_communities()` the following code is
|
23 |
+
used to check that two _HeapElements differ in either element or priority:
|
24 |
+
|
25 |
+
if d_oldmax != row_max or d_oldmax.priority != row_max.priority:
|
26 |
+
|
27 |
+
If the priorities are the same, this implementation uses the element
|
28 |
+
as a tiebreaker. This provides compatibility with older systems that
|
29 |
+
use tuples to combine priority and elements.
|
30 |
+
"""
|
31 |
+
|
32 |
+
__slots__ = ["priority", "element", "_hash"]
|
33 |
+
|
34 |
+
def __init__(self, priority, element):
|
35 |
+
self.priority = priority
|
36 |
+
self.element = element
|
37 |
+
self._hash = hash(element)
|
38 |
+
|
39 |
+
def __lt__(self, other):
|
40 |
+
try:
|
41 |
+
other_priority = other.priority
|
42 |
+
except AttributeError:
|
43 |
+
return self.priority < other
|
44 |
+
# assume comparing to another _HeapElement
|
45 |
+
if self.priority == other_priority:
|
46 |
+
try:
|
47 |
+
return self.element < other.element
|
48 |
+
except TypeError as err:
|
49 |
+
raise TypeError(
|
50 |
+
"Consider using a tuple, with a priority value that can be compared."
|
51 |
+
)
|
52 |
+
return self.priority < other_priority
|
53 |
+
|
54 |
+
def __gt__(self, other):
|
55 |
+
try:
|
56 |
+
other_priority = other.priority
|
57 |
+
except AttributeError:
|
58 |
+
return self.priority > other
|
59 |
+
# assume comparing to another _HeapElement
|
60 |
+
if self.priority == other_priority:
|
61 |
+
try:
|
62 |
+
return self.element > other.element
|
63 |
+
except TypeError as err:
|
64 |
+
raise TypeError(
|
65 |
+
"Consider using a tuple, with a priority value that can be compared."
|
66 |
+
)
|
67 |
+
return self.priority > other_priority
|
68 |
+
|
69 |
+
def __eq__(self, other):
|
70 |
+
try:
|
71 |
+
return self.element == other.element
|
72 |
+
except AttributeError:
|
73 |
+
return self.element == other
|
74 |
+
|
75 |
+
def __hash__(self):
|
76 |
+
return self._hash
|
77 |
+
|
78 |
+
def __getitem__(self, indx):
|
79 |
+
return self.priority if indx == 0 else self.element[indx - 1]
|
80 |
+
|
81 |
+
def __iter__(self):
|
82 |
+
yield self.priority
|
83 |
+
try:
|
84 |
+
yield from self.element
|
85 |
+
except TypeError:
|
86 |
+
yield self.element
|
87 |
+
|
88 |
+
def __repr__(self):
|
89 |
+
return f"_HeapElement({self.priority}, {self.element})"
|
90 |
+
|
91 |
+
|
92 |
+
class MappedQueue:
|
93 |
+
"""The MappedQueue class implements a min-heap with removal and update-priority.
|
94 |
+
|
95 |
+
The min heap uses heapq as well as custom written _siftup and _siftdown
|
96 |
+
methods to allow the heap positions to be tracked by an additional dict
|
97 |
+
keyed by element to position. The smallest element can be popped in O(1) time,
|
98 |
+
new elements can be pushed in O(log n) time, and any element can be removed
|
99 |
+
or updated in O(log n) time. The queue cannot contain duplicate elements
|
100 |
+
and an attempt to push an element already in the queue will have no effect.
|
101 |
+
|
102 |
+
MappedQueue complements the heapq package from the python standard
|
103 |
+
library. While MappedQueue is designed for maximum compatibility with
|
104 |
+
heapq, it adds element removal, lookup, and priority update.
|
105 |
+
|
106 |
+
Parameters
|
107 |
+
----------
|
108 |
+
data : dict or iterable
|
109 |
+
|
110 |
+
Examples
|
111 |
+
--------
|
112 |
+
|
113 |
+
A `MappedQueue` can be created empty, or optionally, given a dictionary
|
114 |
+
of initial elements and priorities. The methods `push`, `pop`,
|
115 |
+
`remove`, and `update` operate on the queue.
|
116 |
+
|
117 |
+
>>> colors_nm = {"red": 665, "blue": 470, "green": 550}
|
118 |
+
>>> q = MappedQueue(colors_nm)
|
119 |
+
>>> q.remove("red")
|
120 |
+
>>> q.update("green", "violet", 400)
|
121 |
+
>>> q.push("indigo", 425)
|
122 |
+
True
|
123 |
+
>>> [q.pop().element for i in range(len(q.heap))]
|
124 |
+
['violet', 'indigo', 'blue']
|
125 |
+
|
126 |
+
A `MappedQueue` can also be initialized with a list or other iterable. The priority is assumed
|
127 |
+
to be the sort order of the items in the list.
|
128 |
+
|
129 |
+
>>> q = MappedQueue([916, 50, 4609, 493, 237])
|
130 |
+
>>> q.remove(493)
|
131 |
+
>>> q.update(237, 1117)
|
132 |
+
>>> [q.pop() for i in range(len(q.heap))]
|
133 |
+
[50, 916, 1117, 4609]
|
134 |
+
|
135 |
+
An exception is raised if the elements are not comparable.
|
136 |
+
|
137 |
+
>>> q = MappedQueue([100, "a"])
|
138 |
+
Traceback (most recent call last):
|
139 |
+
...
|
140 |
+
TypeError: '<' not supported between instances of 'int' and 'str'
|
141 |
+
|
142 |
+
To avoid the exception, use a dictionary to assign priorities to the elements.
|
143 |
+
|
144 |
+
>>> q = MappedQueue({100: 0, "a": 1})
|
145 |
+
|
146 |
+
References
|
147 |
+
----------
|
148 |
+
.. [1] Cormen, T. H., Leiserson, C. E., Rivest, R. L., & Stein, C. (2001).
|
149 |
+
Introduction to algorithms second edition.
|
150 |
+
.. [2] Knuth, D. E. (1997). The art of computer programming (Vol. 3).
|
151 |
+
Pearson Education.
|
152 |
+
"""
|
153 |
+
|
154 |
+
def __init__(self, data=None):
|
155 |
+
"""Priority queue class with updatable priorities."""
|
156 |
+
if data is None:
|
157 |
+
self.heap = []
|
158 |
+
elif isinstance(data, dict):
|
159 |
+
self.heap = [_HeapElement(v, k) for k, v in data.items()]
|
160 |
+
else:
|
161 |
+
self.heap = list(data)
|
162 |
+
self.position = {}
|
163 |
+
self._heapify()
|
164 |
+
|
165 |
+
def _heapify(self):
|
166 |
+
"""Restore heap invariant and recalculate map."""
|
167 |
+
heapq.heapify(self.heap)
|
168 |
+
self.position = {elt: pos for pos, elt in enumerate(self.heap)}
|
169 |
+
if len(self.heap) != len(self.position):
|
170 |
+
raise AssertionError("Heap contains duplicate elements")
|
171 |
+
|
172 |
+
def __len__(self):
|
173 |
+
return len(self.heap)
|
174 |
+
|
175 |
+
def push(self, elt, priority=None):
|
176 |
+
"""Add an element to the queue."""
|
177 |
+
if priority is not None:
|
178 |
+
elt = _HeapElement(priority, elt)
|
179 |
+
# If element is already in queue, do nothing
|
180 |
+
if elt in self.position:
|
181 |
+
return False
|
182 |
+
# Add element to heap and dict
|
183 |
+
pos = len(self.heap)
|
184 |
+
self.heap.append(elt)
|
185 |
+
self.position[elt] = pos
|
186 |
+
# Restore invariant by sifting down
|
187 |
+
self._siftdown(0, pos)
|
188 |
+
return True
|
189 |
+
|
190 |
+
def pop(self):
|
191 |
+
"""Remove and return the smallest element in the queue."""
|
192 |
+
# Remove smallest element
|
193 |
+
elt = self.heap[0]
|
194 |
+
del self.position[elt]
|
195 |
+
# If elt is last item, remove and return
|
196 |
+
if len(self.heap) == 1:
|
197 |
+
self.heap.pop()
|
198 |
+
return elt
|
199 |
+
# Replace root with last element
|
200 |
+
last = self.heap.pop()
|
201 |
+
self.heap[0] = last
|
202 |
+
self.position[last] = 0
|
203 |
+
# Restore invariant by sifting up
|
204 |
+
self._siftup(0)
|
205 |
+
# Return smallest element
|
206 |
+
return elt
|
207 |
+
|
208 |
+
def update(self, elt, new, priority=None):
|
209 |
+
"""Replace an element in the queue with a new one."""
|
210 |
+
if priority is not None:
|
211 |
+
new = _HeapElement(priority, new)
|
212 |
+
# Replace
|
213 |
+
pos = self.position[elt]
|
214 |
+
self.heap[pos] = new
|
215 |
+
del self.position[elt]
|
216 |
+
self.position[new] = pos
|
217 |
+
# Restore invariant by sifting up
|
218 |
+
self._siftup(pos)
|
219 |
+
|
220 |
+
def remove(self, elt):
|
221 |
+
"""Remove an element from the queue."""
|
222 |
+
# Find and remove element
|
223 |
+
try:
|
224 |
+
pos = self.position[elt]
|
225 |
+
del self.position[elt]
|
226 |
+
except KeyError:
|
227 |
+
# Not in queue
|
228 |
+
raise
|
229 |
+
# If elt is last item, remove and return
|
230 |
+
if pos == len(self.heap) - 1:
|
231 |
+
self.heap.pop()
|
232 |
+
return
|
233 |
+
# Replace elt with last element
|
234 |
+
last = self.heap.pop()
|
235 |
+
self.heap[pos] = last
|
236 |
+
self.position[last] = pos
|
237 |
+
# Restore invariant by sifting up
|
238 |
+
self._siftup(pos)
|
239 |
+
|
240 |
+
def _siftup(self, pos):
|
241 |
+
"""Move smaller child up until hitting a leaf.
|
242 |
+
|
243 |
+
Built to mimic code for heapq._siftup
|
244 |
+
only updating position dict too.
|
245 |
+
"""
|
246 |
+
heap, position = self.heap, self.position
|
247 |
+
end_pos = len(heap)
|
248 |
+
startpos = pos
|
249 |
+
newitem = heap[pos]
|
250 |
+
# Shift up the smaller child until hitting a leaf
|
251 |
+
child_pos = (pos << 1) + 1 # start with leftmost child position
|
252 |
+
while child_pos < end_pos:
|
253 |
+
# Set child_pos to index of smaller child.
|
254 |
+
child = heap[child_pos]
|
255 |
+
right_pos = child_pos + 1
|
256 |
+
if right_pos < end_pos:
|
257 |
+
right = heap[right_pos]
|
258 |
+
if not child < right:
|
259 |
+
child = right
|
260 |
+
child_pos = right_pos
|
261 |
+
# Move the smaller child up.
|
262 |
+
heap[pos] = child
|
263 |
+
position[child] = pos
|
264 |
+
pos = child_pos
|
265 |
+
child_pos = (pos << 1) + 1
|
266 |
+
# pos is a leaf position. Put newitem there, and bubble it up
|
267 |
+
# to its final resting place (by sifting its parents down).
|
268 |
+
while pos > 0:
|
269 |
+
parent_pos = (pos - 1) >> 1
|
270 |
+
parent = heap[parent_pos]
|
271 |
+
if not newitem < parent:
|
272 |
+
break
|
273 |
+
heap[pos] = parent
|
274 |
+
position[parent] = pos
|
275 |
+
pos = parent_pos
|
276 |
+
heap[pos] = newitem
|
277 |
+
position[newitem] = pos
|
278 |
+
|
279 |
+
def _siftdown(self, start_pos, pos):
|
280 |
+
"""Restore invariant. keep swapping with parent until smaller.
|
281 |
+
|
282 |
+
Built to mimic code for heapq._siftdown
|
283 |
+
only updating position dict too.
|
284 |
+
"""
|
285 |
+
heap, position = self.heap, self.position
|
286 |
+
newitem = heap[pos]
|
287 |
+
# Follow the path to the root, moving parents down until finding a place
|
288 |
+
# newitem fits.
|
289 |
+
while pos > start_pos:
|
290 |
+
parent_pos = (pos - 1) >> 1
|
291 |
+
parent = heap[parent_pos]
|
292 |
+
if not newitem < parent:
|
293 |
+
break
|
294 |
+
heap[pos] = parent
|
295 |
+
position[parent] = pos
|
296 |
+
pos = parent_pos
|
297 |
+
heap[pos] = newitem
|
298 |
+
position[newitem] = pos
|
env-llmeval/lib/python3.10/site-packages/networkx/utils/misc.py
ADDED
@@ -0,0 +1,601 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
Miscellaneous Helpers for NetworkX.
|
3 |
+
|
4 |
+
These are not imported into the base networkx namespace but
|
5 |
+
can be accessed, for example, as
|
6 |
+
|
7 |
+
>>> import networkx
|
8 |
+
>>> networkx.utils.make_list_of_ints({1, 2, 3})
|
9 |
+
[1, 2, 3]
|
10 |
+
>>> networkx.utils.arbitrary_element({5, 1, 7}) # doctest: +SKIP
|
11 |
+
1
|
12 |
+
"""
|
13 |
+
|
14 |
+
import random
|
15 |
+
import sys
|
16 |
+
import uuid
|
17 |
+
import warnings
|
18 |
+
from collections import defaultdict, deque
|
19 |
+
from collections.abc import Iterable, Iterator, Sized
|
20 |
+
from itertools import chain, tee
|
21 |
+
|
22 |
+
import networkx as nx
|
23 |
+
|
24 |
+
__all__ = [
|
25 |
+
"flatten",
|
26 |
+
"make_list_of_ints",
|
27 |
+
"dict_to_numpy_array",
|
28 |
+
"arbitrary_element",
|
29 |
+
"pairwise",
|
30 |
+
"groups",
|
31 |
+
"create_random_state",
|
32 |
+
"create_py_random_state",
|
33 |
+
"PythonRandomInterface",
|
34 |
+
"PythonRandomViaNumpyBits",
|
35 |
+
"nodes_equal",
|
36 |
+
"edges_equal",
|
37 |
+
"graphs_equal",
|
38 |
+
"_clear_cache",
|
39 |
+
]
|
40 |
+
|
41 |
+
|
42 |
+
# some cookbook stuff
|
43 |
+
# used in deciding whether something is a bunch of nodes, edges, etc.
|
44 |
+
# see G.add_nodes and others in Graph Class in networkx/base.py
|
45 |
+
|
46 |
+
|
47 |
+
def flatten(obj, result=None):
|
48 |
+
"""Return flattened version of (possibly nested) iterable object."""
|
49 |
+
if not isinstance(obj, Iterable | Sized) or isinstance(obj, str):
|
50 |
+
return obj
|
51 |
+
if result is None:
|
52 |
+
result = []
|
53 |
+
for item in obj:
|
54 |
+
if not isinstance(item, Iterable | Sized) or isinstance(item, str):
|
55 |
+
result.append(item)
|
56 |
+
else:
|
57 |
+
flatten(item, result)
|
58 |
+
return tuple(result)
|
59 |
+
|
60 |
+
|
61 |
+
def make_list_of_ints(sequence):
|
62 |
+
"""Return list of ints from sequence of integral numbers.
|
63 |
+
|
64 |
+
All elements of the sequence must satisfy int(element) == element
|
65 |
+
or a ValueError is raised. Sequence is iterated through once.
|
66 |
+
|
67 |
+
If sequence is a list, the non-int values are replaced with ints.
|
68 |
+
So, no new list is created
|
69 |
+
"""
|
70 |
+
if not isinstance(sequence, list):
|
71 |
+
result = []
|
72 |
+
for i in sequence:
|
73 |
+
errmsg = f"sequence is not all integers: {i}"
|
74 |
+
try:
|
75 |
+
ii = int(i)
|
76 |
+
except ValueError:
|
77 |
+
raise nx.NetworkXError(errmsg) from None
|
78 |
+
if ii != i:
|
79 |
+
raise nx.NetworkXError(errmsg)
|
80 |
+
result.append(ii)
|
81 |
+
return result
|
82 |
+
# original sequence is a list... in-place conversion to ints
|
83 |
+
for indx, i in enumerate(sequence):
|
84 |
+
errmsg = f"sequence is not all integers: {i}"
|
85 |
+
if isinstance(i, int):
|
86 |
+
continue
|
87 |
+
try:
|
88 |
+
ii = int(i)
|
89 |
+
except ValueError:
|
90 |
+
raise nx.NetworkXError(errmsg) from None
|
91 |
+
if ii != i:
|
92 |
+
raise nx.NetworkXError(errmsg)
|
93 |
+
sequence[indx] = ii
|
94 |
+
return sequence
|
95 |
+
|
96 |
+
|
97 |
+
def dict_to_numpy_array(d, mapping=None):
|
98 |
+
"""Convert a dictionary of dictionaries to a numpy array
|
99 |
+
with optional mapping."""
|
100 |
+
try:
|
101 |
+
return _dict_to_numpy_array2(d, mapping)
|
102 |
+
except (AttributeError, TypeError):
|
103 |
+
# AttributeError is when no mapping was provided and v.keys() fails.
|
104 |
+
# TypeError is when a mapping was provided and d[k1][k2] fails.
|
105 |
+
return _dict_to_numpy_array1(d, mapping)
|
106 |
+
|
107 |
+
|
108 |
+
def _dict_to_numpy_array2(d, mapping=None):
|
109 |
+
"""Convert a dictionary of dictionaries to a 2d numpy array
|
110 |
+
with optional mapping.
|
111 |
+
|
112 |
+
"""
|
113 |
+
import numpy as np
|
114 |
+
|
115 |
+
if mapping is None:
|
116 |
+
s = set(d.keys())
|
117 |
+
for k, v in d.items():
|
118 |
+
s.update(v.keys())
|
119 |
+
mapping = dict(zip(s, range(len(s))))
|
120 |
+
n = len(mapping)
|
121 |
+
a = np.zeros((n, n))
|
122 |
+
for k1, i in mapping.items():
|
123 |
+
for k2, j in mapping.items():
|
124 |
+
try:
|
125 |
+
a[i, j] = d[k1][k2]
|
126 |
+
except KeyError:
|
127 |
+
pass
|
128 |
+
return a
|
129 |
+
|
130 |
+
|
131 |
+
def _dict_to_numpy_array1(d, mapping=None):
|
132 |
+
"""Convert a dictionary of numbers to a 1d numpy array with optional mapping."""
|
133 |
+
import numpy as np
|
134 |
+
|
135 |
+
if mapping is None:
|
136 |
+
s = set(d.keys())
|
137 |
+
mapping = dict(zip(s, range(len(s))))
|
138 |
+
n = len(mapping)
|
139 |
+
a = np.zeros(n)
|
140 |
+
for k1, i in mapping.items():
|
141 |
+
i = mapping[k1]
|
142 |
+
a[i] = d[k1]
|
143 |
+
return a
|
144 |
+
|
145 |
+
|
146 |
+
def arbitrary_element(iterable):
|
147 |
+
"""Returns an arbitrary element of `iterable` without removing it.
|
148 |
+
|
149 |
+
This is most useful for "peeking" at an arbitrary element of a set,
|
150 |
+
but can be used for any list, dictionary, etc., as well.
|
151 |
+
|
152 |
+
Parameters
|
153 |
+
----------
|
154 |
+
iterable : `abc.collections.Iterable` instance
|
155 |
+
Any object that implements ``__iter__``, e.g. set, dict, list, tuple,
|
156 |
+
etc.
|
157 |
+
|
158 |
+
Returns
|
159 |
+
-------
|
160 |
+
The object that results from ``next(iter(iterable))``
|
161 |
+
|
162 |
+
Raises
|
163 |
+
------
|
164 |
+
ValueError
|
165 |
+
If `iterable` is an iterator (because the current implementation of
|
166 |
+
this function would consume an element from the iterator).
|
167 |
+
|
168 |
+
Examples
|
169 |
+
--------
|
170 |
+
Arbitrary elements from common Iterable objects:
|
171 |
+
|
172 |
+
>>> nx.utils.arbitrary_element([1, 2, 3]) # list
|
173 |
+
1
|
174 |
+
>>> nx.utils.arbitrary_element((1, 2, 3)) # tuple
|
175 |
+
1
|
176 |
+
>>> nx.utils.arbitrary_element({1, 2, 3}) # set
|
177 |
+
1
|
178 |
+
>>> d = {k: v for k, v in zip([1, 2, 3], [3, 2, 1])}
|
179 |
+
>>> nx.utils.arbitrary_element(d) # dict_keys
|
180 |
+
1
|
181 |
+
>>> nx.utils.arbitrary_element(d.values()) # dict values
|
182 |
+
3
|
183 |
+
|
184 |
+
`str` is also an Iterable:
|
185 |
+
|
186 |
+
>>> nx.utils.arbitrary_element("hello")
|
187 |
+
'h'
|
188 |
+
|
189 |
+
:exc:`ValueError` is raised if `iterable` is an iterator:
|
190 |
+
|
191 |
+
>>> iterator = iter([1, 2, 3]) # Iterator, *not* Iterable
|
192 |
+
>>> nx.utils.arbitrary_element(iterator)
|
193 |
+
Traceback (most recent call last):
|
194 |
+
...
|
195 |
+
ValueError: cannot return an arbitrary item from an iterator
|
196 |
+
|
197 |
+
Notes
|
198 |
+
-----
|
199 |
+
This function does not return a *random* element. If `iterable` is
|
200 |
+
ordered, sequential calls will return the same value::
|
201 |
+
|
202 |
+
>>> l = [1, 2, 3]
|
203 |
+
>>> nx.utils.arbitrary_element(l)
|
204 |
+
1
|
205 |
+
>>> nx.utils.arbitrary_element(l)
|
206 |
+
1
|
207 |
+
|
208 |
+
"""
|
209 |
+
if isinstance(iterable, Iterator):
|
210 |
+
raise ValueError("cannot return an arbitrary item from an iterator")
|
211 |
+
# Another possible implementation is ``for x in iterable: return x``.
|
212 |
+
return next(iter(iterable))
|
213 |
+
|
214 |
+
|
215 |
+
# Recipe from the itertools documentation.
|
216 |
+
def pairwise(iterable, cyclic=False):
|
217 |
+
"s -> (s0, s1), (s1, s2), (s2, s3), ..."
|
218 |
+
a, b = tee(iterable)
|
219 |
+
first = next(b, None)
|
220 |
+
if cyclic is True:
|
221 |
+
return zip(a, chain(b, (first,)))
|
222 |
+
return zip(a, b)
|
223 |
+
|
224 |
+
|
225 |
+
def groups(many_to_one):
|
226 |
+
"""Converts a many-to-one mapping into a one-to-many mapping.
|
227 |
+
|
228 |
+
`many_to_one` must be a dictionary whose keys and values are all
|
229 |
+
:term:`hashable`.
|
230 |
+
|
231 |
+
The return value is a dictionary mapping values from `many_to_one`
|
232 |
+
to sets of keys from `many_to_one` that have that value.
|
233 |
+
|
234 |
+
Examples
|
235 |
+
--------
|
236 |
+
>>> from networkx.utils import groups
|
237 |
+
>>> many_to_one = {"a": 1, "b": 1, "c": 2, "d": 3, "e": 3}
|
238 |
+
>>> groups(many_to_one) # doctest: +SKIP
|
239 |
+
{1: {'a', 'b'}, 2: {'c'}, 3: {'e', 'd'}}
|
240 |
+
"""
|
241 |
+
one_to_many = defaultdict(set)
|
242 |
+
for v, k in many_to_one.items():
|
243 |
+
one_to_many[k].add(v)
|
244 |
+
return dict(one_to_many)
|
245 |
+
|
246 |
+
|
247 |
+
def create_random_state(random_state=None):
|
248 |
+
"""Returns a numpy.random.RandomState or numpy.random.Generator instance
|
249 |
+
depending on input.
|
250 |
+
|
251 |
+
Parameters
|
252 |
+
----------
|
253 |
+
random_state : int or NumPy RandomState or Generator instance, optional (default=None)
|
254 |
+
If int, return a numpy.random.RandomState instance set with seed=int.
|
255 |
+
if `numpy.random.RandomState` instance, return it.
|
256 |
+
if `numpy.random.Generator` instance, return it.
|
257 |
+
if None or numpy.random, return the global random number generator used
|
258 |
+
by numpy.random.
|
259 |
+
"""
|
260 |
+
import numpy as np
|
261 |
+
|
262 |
+
if random_state is None or random_state is np.random:
|
263 |
+
return np.random.mtrand._rand
|
264 |
+
if isinstance(random_state, np.random.RandomState):
|
265 |
+
return random_state
|
266 |
+
if isinstance(random_state, int):
|
267 |
+
return np.random.RandomState(random_state)
|
268 |
+
if isinstance(random_state, np.random.Generator):
|
269 |
+
return random_state
|
270 |
+
msg = (
|
271 |
+
f"{random_state} cannot be used to create a numpy.random.RandomState or\n"
|
272 |
+
"numpy.random.Generator instance"
|
273 |
+
)
|
274 |
+
raise ValueError(msg)
|
275 |
+
|
276 |
+
|
277 |
+
class PythonRandomViaNumpyBits(random.Random):
|
278 |
+
"""Provide the random.random algorithms using a numpy.random bit generator
|
279 |
+
|
280 |
+
The intent is to allow people to contribute code that uses Python's random
|
281 |
+
library, but still allow users to provide a single easily controlled random
|
282 |
+
bit-stream for all work with NetworkX. This implementation is based on helpful
|
283 |
+
comments and code from Robert Kern on NumPy's GitHub Issue #24458.
|
284 |
+
|
285 |
+
This implementation supercedes that of `PythonRandomInterface` which rewrote
|
286 |
+
methods to account for subtle differences in API between `random` and
|
287 |
+
`numpy.random`. Instead this subclasses `random.Random` and overwrites
|
288 |
+
the methods `random`, `getrandbits`, `getstate`, `setstate` and `seed`.
|
289 |
+
It makes them use the rng values from an input numpy `RandomState` or `Generator`.
|
290 |
+
Those few methods allow the rest of the `random.Random` methods to provide
|
291 |
+
the API interface of `random.random` while using randomness generated by
|
292 |
+
a numpy generator.
|
293 |
+
"""
|
294 |
+
|
295 |
+
def __init__(self, rng=None):
|
296 |
+
try:
|
297 |
+
import numpy as np
|
298 |
+
except ImportError:
|
299 |
+
msg = "numpy not found, only random.random available."
|
300 |
+
warnings.warn(msg, ImportWarning)
|
301 |
+
|
302 |
+
if rng is None:
|
303 |
+
self._rng = np.random.mtrand._rand
|
304 |
+
else:
|
305 |
+
self._rng = rng
|
306 |
+
|
307 |
+
# Not necessary, given our overriding of gauss() below, but it's
|
308 |
+
# in the superclass and nominally public, so initialize it here.
|
309 |
+
self.gauss_next = None
|
310 |
+
|
311 |
+
def random(self):
|
312 |
+
"""Get the next random number in the range 0.0 <= X < 1.0."""
|
313 |
+
return self._rng.random()
|
314 |
+
|
315 |
+
def getrandbits(self, k):
|
316 |
+
"""getrandbits(k) -> x. Generates an int with k random bits."""
|
317 |
+
if k < 0:
|
318 |
+
raise ValueError("number of bits must be non-negative")
|
319 |
+
numbytes = (k + 7) // 8 # bits / 8 and rounded up
|
320 |
+
x = int.from_bytes(self._rng.bytes(numbytes), "big")
|
321 |
+
return x >> (numbytes * 8 - k) # trim excess bits
|
322 |
+
|
323 |
+
def getstate(self):
|
324 |
+
return self._rng.__getstate__()
|
325 |
+
|
326 |
+
def setstate(self, state):
|
327 |
+
self._rng.__setstate__(state)
|
328 |
+
|
329 |
+
def seed(self, *args, **kwds):
|
330 |
+
"Do nothing override method."
|
331 |
+
raise NotImplementedError("seed() not implemented in PythonRandomViaNumpyBits")
|
332 |
+
|
333 |
+
|
334 |
+
##################################################################
|
335 |
+
class PythonRandomInterface:
|
336 |
+
"""PythonRandomInterface is included for backward compatibility
|
337 |
+
New code should use PythonRandomViaNumpyBits instead.
|
338 |
+
"""
|
339 |
+
|
340 |
+
def __init__(self, rng=None):
|
341 |
+
try:
|
342 |
+
import numpy as np
|
343 |
+
except ImportError:
|
344 |
+
msg = "numpy not found, only random.random available."
|
345 |
+
warnings.warn(msg, ImportWarning)
|
346 |
+
|
347 |
+
if rng is None:
|
348 |
+
self._rng = np.random.mtrand._rand
|
349 |
+
else:
|
350 |
+
self._rng = rng
|
351 |
+
|
352 |
+
def random(self):
|
353 |
+
return self._rng.random()
|
354 |
+
|
355 |
+
def uniform(self, a, b):
|
356 |
+
return a + (b - a) * self._rng.random()
|
357 |
+
|
358 |
+
def randrange(self, a, b=None):
|
359 |
+
import numpy as np
|
360 |
+
|
361 |
+
if b is None:
|
362 |
+
a, b = 0, a
|
363 |
+
if b > 9223372036854775807: # from np.iinfo(np.int64).max
|
364 |
+
tmp_rng = PythonRandomViaNumpyBits(self._rng)
|
365 |
+
return tmp_rng.randrange(a, b)
|
366 |
+
|
367 |
+
if isinstance(self._rng, np.random.Generator):
|
368 |
+
return self._rng.integers(a, b)
|
369 |
+
return self._rng.randint(a, b)
|
370 |
+
|
371 |
+
# NOTE: the numpy implementations of `choice` don't support strings, so
|
372 |
+
# this cannot be replaced with self._rng.choice
|
373 |
+
def choice(self, seq):
|
374 |
+
import numpy as np
|
375 |
+
|
376 |
+
if isinstance(self._rng, np.random.Generator):
|
377 |
+
idx = self._rng.integers(0, len(seq))
|
378 |
+
else:
|
379 |
+
idx = self._rng.randint(0, len(seq))
|
380 |
+
return seq[idx]
|
381 |
+
|
382 |
+
def gauss(self, mu, sigma):
|
383 |
+
return self._rng.normal(mu, sigma)
|
384 |
+
|
385 |
+
def shuffle(self, seq):
|
386 |
+
return self._rng.shuffle(seq)
|
387 |
+
|
388 |
+
# Some methods don't match API for numpy RandomState.
|
389 |
+
# Commented out versions are not used by NetworkX
|
390 |
+
|
391 |
+
def sample(self, seq, k):
|
392 |
+
return self._rng.choice(list(seq), size=(k,), replace=False)
|
393 |
+
|
394 |
+
def randint(self, a, b):
|
395 |
+
import numpy as np
|
396 |
+
|
397 |
+
if b > 9223372036854775807: # from np.iinfo(np.int64).max
|
398 |
+
tmp_rng = PythonRandomViaNumpyBits(self._rng)
|
399 |
+
return tmp_rng.randint(a, b)
|
400 |
+
|
401 |
+
if isinstance(self._rng, np.random.Generator):
|
402 |
+
return self._rng.integers(a, b + 1)
|
403 |
+
return self._rng.randint(a, b + 1)
|
404 |
+
|
405 |
+
# exponential as expovariate with 1/argument,
|
406 |
+
def expovariate(self, scale):
|
407 |
+
return self._rng.exponential(1 / scale)
|
408 |
+
|
409 |
+
# pareto as paretovariate with 1/argument,
|
410 |
+
def paretovariate(self, shape):
|
411 |
+
return self._rng.pareto(shape)
|
412 |
+
|
413 |
+
|
414 |
+
# weibull as weibullvariate multiplied by beta,
|
415 |
+
# def weibullvariate(self, alpha, beta):
|
416 |
+
# return self._rng.weibull(alpha) * beta
|
417 |
+
#
|
418 |
+
# def triangular(self, low, high, mode):
|
419 |
+
# return self._rng.triangular(low, mode, high)
|
420 |
+
#
|
421 |
+
# def choices(self, seq, weights=None, cum_weights=None, k=1):
|
422 |
+
# return self._rng.choice(seq
|
423 |
+
|
424 |
+
|
425 |
+
def create_py_random_state(random_state=None):
|
426 |
+
"""Returns a random.Random instance depending on input.
|
427 |
+
|
428 |
+
Parameters
|
429 |
+
----------
|
430 |
+
random_state : int or random number generator or None (default=None)
|
431 |
+
- If int, return a `random.Random` instance set with seed=int.
|
432 |
+
- If `random.Random` instance, return it.
|
433 |
+
- If None or the `np.random` package, return the global random number
|
434 |
+
generator used by `np.random`.
|
435 |
+
- If an `np.random.Generator` instance, or the `np.random` package, or
|
436 |
+
the global numpy random number generator, then return it.
|
437 |
+
wrapped in a `PythonRandomViaNumpyBits` class.
|
438 |
+
- If a `PythonRandomViaNumpyBits` instance, return it.
|
439 |
+
- If a `PythonRandomInterface` instance, return it.
|
440 |
+
- If a `np.random.RandomState` instance and not the global numpy default,
|
441 |
+
return it wrapped in `PythonRandomInterface` for backward bit-stream
|
442 |
+
matching with legacy code.
|
443 |
+
|
444 |
+
Notes
|
445 |
+
-----
|
446 |
+
- A diagram intending to illustrate the relationships behind our support
|
447 |
+
for numpy random numbers is called
|
448 |
+
`NetworkX Numpy Random Numbers <https://excalidraw.com/#room=b5303f2b03d3af7ccc6a,e5ZDIWdWWCTTsg8OqoRvPA>`_.
|
449 |
+
- More discussion about this support also appears in
|
450 |
+
`gh-6869#comment <https://github.com/networkx/networkx/pull/6869#issuecomment-1944799534>`_.
|
451 |
+
- Wrappers of numpy.random number generators allow them to mimic the Python random
|
452 |
+
number generation algorithms. For example, Python can create arbitrarily large
|
453 |
+
random ints, and the wrappers use Numpy bit-streams with CPython's random module
|
454 |
+
to choose arbitrarily large random integers too.
|
455 |
+
- We provide two wrapper classes:
|
456 |
+
`PythonRandomViaNumpyBits` is usually what you want and is always used for
|
457 |
+
`np.Generator` instances. But for users who need to recreate random numbers
|
458 |
+
produced in NetworkX 3.2 or earlier, we maintain the `PythonRandomInterface`
|
459 |
+
wrapper as well. We use it only used if passed a (non-default) `np.RandomState`
|
460 |
+
instance pre-initialized from a seed. Otherwise the newer wrapper is used.
|
461 |
+
"""
|
462 |
+
if random_state is None or random_state is random:
|
463 |
+
return random._inst
|
464 |
+
if isinstance(random_state, random.Random):
|
465 |
+
return random_state
|
466 |
+
if isinstance(random_state, int):
|
467 |
+
return random.Random(random_state)
|
468 |
+
|
469 |
+
try:
|
470 |
+
import numpy as np
|
471 |
+
except ImportError:
|
472 |
+
pass
|
473 |
+
else:
|
474 |
+
if isinstance(random_state, PythonRandomInterface | PythonRandomViaNumpyBits):
|
475 |
+
return random_state
|
476 |
+
if isinstance(random_state, np.random.Generator):
|
477 |
+
return PythonRandomViaNumpyBits(random_state)
|
478 |
+
if random_state is np.random:
|
479 |
+
return PythonRandomViaNumpyBits(np.random.mtrand._rand)
|
480 |
+
|
481 |
+
if isinstance(random_state, np.random.RandomState):
|
482 |
+
if random_state is np.random.mtrand._rand:
|
483 |
+
return PythonRandomViaNumpyBits(random_state)
|
484 |
+
# Only need older interface if specially constructed RandomState used
|
485 |
+
return PythonRandomInterface(random_state)
|
486 |
+
|
487 |
+
msg = f"{random_state} cannot be used to generate a random.Random instance"
|
488 |
+
raise ValueError(msg)
|
489 |
+
|
490 |
+
|
491 |
+
def nodes_equal(nodes1, nodes2):
|
492 |
+
"""Check if nodes are equal.
|
493 |
+
|
494 |
+
Equality here means equal as Python objects.
|
495 |
+
Node data must match if included.
|
496 |
+
The order of nodes is not relevant.
|
497 |
+
|
498 |
+
Parameters
|
499 |
+
----------
|
500 |
+
nodes1, nodes2 : iterables of nodes, or (node, datadict) tuples
|
501 |
+
|
502 |
+
Returns
|
503 |
+
-------
|
504 |
+
bool
|
505 |
+
True if nodes are equal, False otherwise.
|
506 |
+
"""
|
507 |
+
nlist1 = list(nodes1)
|
508 |
+
nlist2 = list(nodes2)
|
509 |
+
try:
|
510 |
+
d1 = dict(nlist1)
|
511 |
+
d2 = dict(nlist2)
|
512 |
+
except (ValueError, TypeError):
|
513 |
+
d1 = dict.fromkeys(nlist1)
|
514 |
+
d2 = dict.fromkeys(nlist2)
|
515 |
+
return d1 == d2
|
516 |
+
|
517 |
+
|
518 |
+
def edges_equal(edges1, edges2):
|
519 |
+
"""Check if edges are equal.
|
520 |
+
|
521 |
+
Equality here means equal as Python objects.
|
522 |
+
Edge data must match if included.
|
523 |
+
The order of the edges is not relevant.
|
524 |
+
|
525 |
+
Parameters
|
526 |
+
----------
|
527 |
+
edges1, edges2 : iterables of with u, v nodes as
|
528 |
+
edge tuples (u, v), or
|
529 |
+
edge tuples with data dicts (u, v, d), or
|
530 |
+
edge tuples with keys and data dicts (u, v, k, d)
|
531 |
+
|
532 |
+
Returns
|
533 |
+
-------
|
534 |
+
bool
|
535 |
+
True if edges are equal, False otherwise.
|
536 |
+
"""
|
537 |
+
from collections import defaultdict
|
538 |
+
|
539 |
+
d1 = defaultdict(dict)
|
540 |
+
d2 = defaultdict(dict)
|
541 |
+
c1 = 0
|
542 |
+
for c1, e in enumerate(edges1):
|
543 |
+
u, v = e[0], e[1]
|
544 |
+
data = [e[2:]]
|
545 |
+
if v in d1[u]:
|
546 |
+
data = d1[u][v] + data
|
547 |
+
d1[u][v] = data
|
548 |
+
d1[v][u] = data
|
549 |
+
c2 = 0
|
550 |
+
for c2, e in enumerate(edges2):
|
551 |
+
u, v = e[0], e[1]
|
552 |
+
data = [e[2:]]
|
553 |
+
if v in d2[u]:
|
554 |
+
data = d2[u][v] + data
|
555 |
+
d2[u][v] = data
|
556 |
+
d2[v][u] = data
|
557 |
+
if c1 != c2:
|
558 |
+
return False
|
559 |
+
# can check one direction because lengths are the same.
|
560 |
+
for n, nbrdict in d1.items():
|
561 |
+
for nbr, datalist in nbrdict.items():
|
562 |
+
if n not in d2:
|
563 |
+
return False
|
564 |
+
if nbr not in d2[n]:
|
565 |
+
return False
|
566 |
+
d2datalist = d2[n][nbr]
|
567 |
+
for data in datalist:
|
568 |
+
if datalist.count(data) != d2datalist.count(data):
|
569 |
+
return False
|
570 |
+
return True
|
571 |
+
|
572 |
+
|
573 |
+
def graphs_equal(graph1, graph2):
|
574 |
+
"""Check if graphs are equal.
|
575 |
+
|
576 |
+
Equality here means equal as Python objects (not isomorphism).
|
577 |
+
Node, edge and graph data must match.
|
578 |
+
|
579 |
+
Parameters
|
580 |
+
----------
|
581 |
+
graph1, graph2 : graph
|
582 |
+
|
583 |
+
Returns
|
584 |
+
-------
|
585 |
+
bool
|
586 |
+
True if graphs are equal, False otherwise.
|
587 |
+
"""
|
588 |
+
return (
|
589 |
+
graph1.adj == graph2.adj
|
590 |
+
and graph1.nodes == graph2.nodes
|
591 |
+
and graph1.graph == graph2.graph
|
592 |
+
)
|
593 |
+
|
594 |
+
|
595 |
+
def _clear_cache(G):
|
596 |
+
"""Clear the cache of a graph (currently stores converted graphs).
|
597 |
+
|
598 |
+
Caching is controlled via ``nx.config.cache_converted_graphs`` configuration.
|
599 |
+
"""
|
600 |
+
if cache := getattr(G, "__networkx_cache__", None):
|
601 |
+
cache.clear()
|
env-llmeval/lib/python3.10/site-packages/networkx/utils/random_sequence.py
ADDED
@@ -0,0 +1,164 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
Utilities for generating random numbers, random sequences, and
|
3 |
+
random selections.
|
4 |
+
"""
|
5 |
+
|
6 |
+
import networkx as nx
|
7 |
+
from networkx.utils import py_random_state
|
8 |
+
|
9 |
+
__all__ = [
|
10 |
+
"powerlaw_sequence",
|
11 |
+
"zipf_rv",
|
12 |
+
"cumulative_distribution",
|
13 |
+
"discrete_sequence",
|
14 |
+
"random_weighted_sample",
|
15 |
+
"weighted_choice",
|
16 |
+
]
|
17 |
+
|
18 |
+
|
19 |
+
# The same helpers for choosing random sequences from distributions
|
20 |
+
# uses Python's random module
|
21 |
+
# https://docs.python.org/3/library/random.html
|
22 |
+
|
23 |
+
|
24 |
+
@py_random_state(2)
|
25 |
+
def powerlaw_sequence(n, exponent=2.0, seed=None):
|
26 |
+
"""
|
27 |
+
Return sample sequence of length n from a power law distribution.
|
28 |
+
"""
|
29 |
+
return [seed.paretovariate(exponent - 1) for i in range(n)]
|
30 |
+
|
31 |
+
|
32 |
+
@py_random_state(2)
|
33 |
+
def zipf_rv(alpha, xmin=1, seed=None):
|
34 |
+
r"""Returns a random value chosen from the Zipf distribution.
|
35 |
+
|
36 |
+
The return value is an integer drawn from the probability distribution
|
37 |
+
|
38 |
+
.. math::
|
39 |
+
|
40 |
+
p(x)=\frac{x^{-\alpha}}{\zeta(\alpha, x_{\min})},
|
41 |
+
|
42 |
+
where $\zeta(\alpha, x_{\min})$ is the Hurwitz zeta function.
|
43 |
+
|
44 |
+
Parameters
|
45 |
+
----------
|
46 |
+
alpha : float
|
47 |
+
Exponent value of the distribution
|
48 |
+
xmin : int
|
49 |
+
Minimum value
|
50 |
+
seed : integer, random_state, or None (default)
|
51 |
+
Indicator of random number generation state.
|
52 |
+
See :ref:`Randomness<randomness>`.
|
53 |
+
|
54 |
+
Returns
|
55 |
+
-------
|
56 |
+
x : int
|
57 |
+
Random value from Zipf distribution
|
58 |
+
|
59 |
+
Raises
|
60 |
+
------
|
61 |
+
ValueError:
|
62 |
+
If xmin < 1 or
|
63 |
+
If alpha <= 1
|
64 |
+
|
65 |
+
Notes
|
66 |
+
-----
|
67 |
+
The rejection algorithm generates random values for a the power-law
|
68 |
+
distribution in uniformly bounded expected time dependent on
|
69 |
+
parameters. See [1]_ for details on its operation.
|
70 |
+
|
71 |
+
Examples
|
72 |
+
--------
|
73 |
+
>>> nx.utils.zipf_rv(alpha=2, xmin=3, seed=42)
|
74 |
+
8
|
75 |
+
|
76 |
+
References
|
77 |
+
----------
|
78 |
+
.. [1] Luc Devroye, Non-Uniform Random Variate Generation,
|
79 |
+
Springer-Verlag, New York, 1986.
|
80 |
+
"""
|
81 |
+
if xmin < 1:
|
82 |
+
raise ValueError("xmin < 1")
|
83 |
+
if alpha <= 1:
|
84 |
+
raise ValueError("a <= 1.0")
|
85 |
+
a1 = alpha - 1.0
|
86 |
+
b = 2**a1
|
87 |
+
while True:
|
88 |
+
u = 1.0 - seed.random() # u in (0,1]
|
89 |
+
v = seed.random() # v in [0,1)
|
90 |
+
x = int(xmin * u ** -(1.0 / a1))
|
91 |
+
t = (1.0 + (1.0 / x)) ** a1
|
92 |
+
if v * x * (t - 1.0) / (b - 1.0) <= t / b:
|
93 |
+
break
|
94 |
+
return x
|
95 |
+
|
96 |
+
|
97 |
+
def cumulative_distribution(distribution):
|
98 |
+
"""Returns normalized cumulative distribution from discrete distribution."""
|
99 |
+
|
100 |
+
cdf = [0.0]
|
101 |
+
psum = sum(distribution)
|
102 |
+
for i in range(len(distribution)):
|
103 |
+
cdf.append(cdf[i] + distribution[i] / psum)
|
104 |
+
return cdf
|
105 |
+
|
106 |
+
|
107 |
+
@py_random_state(3)
|
108 |
+
def discrete_sequence(n, distribution=None, cdistribution=None, seed=None):
|
109 |
+
"""
|
110 |
+
Return sample sequence of length n from a given discrete distribution
|
111 |
+
or discrete cumulative distribution.
|
112 |
+
|
113 |
+
One of the following must be specified.
|
114 |
+
|
115 |
+
distribution = histogram of values, will be normalized
|
116 |
+
|
117 |
+
cdistribution = normalized discrete cumulative distribution
|
118 |
+
|
119 |
+
"""
|
120 |
+
import bisect
|
121 |
+
|
122 |
+
if cdistribution is not None:
|
123 |
+
cdf = cdistribution
|
124 |
+
elif distribution is not None:
|
125 |
+
cdf = cumulative_distribution(distribution)
|
126 |
+
else:
|
127 |
+
raise nx.NetworkXError(
|
128 |
+
"discrete_sequence: distribution or cdistribution missing"
|
129 |
+
)
|
130 |
+
|
131 |
+
# get a uniform random number
|
132 |
+
inputseq = [seed.random() for i in range(n)]
|
133 |
+
|
134 |
+
# choose from CDF
|
135 |
+
seq = [bisect.bisect_left(cdf, s) - 1 for s in inputseq]
|
136 |
+
return seq
|
137 |
+
|
138 |
+
|
139 |
+
@py_random_state(2)
|
140 |
+
def random_weighted_sample(mapping, k, seed=None):
|
141 |
+
"""Returns k items without replacement from a weighted sample.
|
142 |
+
|
143 |
+
The input is a dictionary of items with weights as values.
|
144 |
+
"""
|
145 |
+
if k > len(mapping):
|
146 |
+
raise ValueError("sample larger than population")
|
147 |
+
sample = set()
|
148 |
+
while len(sample) < k:
|
149 |
+
sample.add(weighted_choice(mapping, seed))
|
150 |
+
return list(sample)
|
151 |
+
|
152 |
+
|
153 |
+
@py_random_state(1)
|
154 |
+
def weighted_choice(mapping, seed=None):
|
155 |
+
"""Returns a single element from a weighted sample.
|
156 |
+
|
157 |
+
The input is a dictionary of items with weights as values.
|
158 |
+
"""
|
159 |
+
# use roulette method
|
160 |
+
rnd = seed.random() * sum(mapping.values())
|
161 |
+
for k, w in mapping.items():
|
162 |
+
rnd -= w
|
163 |
+
if rnd < 0:
|
164 |
+
return k
|
env-llmeval/lib/python3.10/site-packages/networkx/utils/rcm.py
ADDED
@@ -0,0 +1,158 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
Cuthill-McKee ordering of graph nodes to produce sparse matrices
|
3 |
+
"""
|
4 |
+
from collections import deque
|
5 |
+
from operator import itemgetter
|
6 |
+
|
7 |
+
import networkx as nx
|
8 |
+
|
9 |
+
from ..utils import arbitrary_element
|
10 |
+
|
11 |
+
__all__ = ["cuthill_mckee_ordering", "reverse_cuthill_mckee_ordering"]
|
12 |
+
|
13 |
+
|
14 |
+
def cuthill_mckee_ordering(G, heuristic=None):
|
15 |
+
"""Generate an ordering (permutation) of the graph nodes to make
|
16 |
+
a sparse matrix.
|
17 |
+
|
18 |
+
Uses the Cuthill-McKee heuristic (based on breadth-first search) [1]_.
|
19 |
+
|
20 |
+
Parameters
|
21 |
+
----------
|
22 |
+
G : graph
|
23 |
+
A NetworkX graph
|
24 |
+
|
25 |
+
heuristic : function, optional
|
26 |
+
Function to choose starting node for RCM algorithm. If None
|
27 |
+
a node from a pseudo-peripheral pair is used. A user-defined function
|
28 |
+
can be supplied that takes a graph object and returns a single node.
|
29 |
+
|
30 |
+
Returns
|
31 |
+
-------
|
32 |
+
nodes : generator
|
33 |
+
Generator of nodes in Cuthill-McKee ordering.
|
34 |
+
|
35 |
+
Examples
|
36 |
+
--------
|
37 |
+
>>> from networkx.utils import cuthill_mckee_ordering
|
38 |
+
>>> G = nx.path_graph(4)
|
39 |
+
>>> rcm = list(cuthill_mckee_ordering(G))
|
40 |
+
>>> A = nx.adjacency_matrix(G, nodelist=rcm)
|
41 |
+
|
42 |
+
Smallest degree node as heuristic function:
|
43 |
+
|
44 |
+
>>> def smallest_degree(G):
|
45 |
+
... return min(G, key=G.degree)
|
46 |
+
>>> rcm = list(cuthill_mckee_ordering(G, heuristic=smallest_degree))
|
47 |
+
|
48 |
+
|
49 |
+
See Also
|
50 |
+
--------
|
51 |
+
reverse_cuthill_mckee_ordering
|
52 |
+
|
53 |
+
Notes
|
54 |
+
-----
|
55 |
+
The optimal solution the bandwidth reduction is NP-complete [2]_.
|
56 |
+
|
57 |
+
|
58 |
+
References
|
59 |
+
----------
|
60 |
+
.. [1] E. Cuthill and J. McKee.
|
61 |
+
Reducing the bandwidth of sparse symmetric matrices,
|
62 |
+
In Proc. 24th Nat. Conf. ACM, pages 157-172, 1969.
|
63 |
+
http://doi.acm.org/10.1145/800195.805928
|
64 |
+
.. [2] Steven S. Skiena. 1997. The Algorithm Design Manual.
|
65 |
+
Springer-Verlag New York, Inc., New York, NY, USA.
|
66 |
+
"""
|
67 |
+
for c in nx.connected_components(G):
|
68 |
+
yield from connected_cuthill_mckee_ordering(G.subgraph(c), heuristic)
|
69 |
+
|
70 |
+
|
71 |
+
def reverse_cuthill_mckee_ordering(G, heuristic=None):
|
72 |
+
"""Generate an ordering (permutation) of the graph nodes to make
|
73 |
+
a sparse matrix.
|
74 |
+
|
75 |
+
Uses the reverse Cuthill-McKee heuristic (based on breadth-first search)
|
76 |
+
[1]_.
|
77 |
+
|
78 |
+
Parameters
|
79 |
+
----------
|
80 |
+
G : graph
|
81 |
+
A NetworkX graph
|
82 |
+
|
83 |
+
heuristic : function, optional
|
84 |
+
Function to choose starting node for RCM algorithm. If None
|
85 |
+
a node from a pseudo-peripheral pair is used. A user-defined function
|
86 |
+
can be supplied that takes a graph object and returns a single node.
|
87 |
+
|
88 |
+
Returns
|
89 |
+
-------
|
90 |
+
nodes : generator
|
91 |
+
Generator of nodes in reverse Cuthill-McKee ordering.
|
92 |
+
|
93 |
+
Examples
|
94 |
+
--------
|
95 |
+
>>> from networkx.utils import reverse_cuthill_mckee_ordering
|
96 |
+
>>> G = nx.path_graph(4)
|
97 |
+
>>> rcm = list(reverse_cuthill_mckee_ordering(G))
|
98 |
+
>>> A = nx.adjacency_matrix(G, nodelist=rcm)
|
99 |
+
|
100 |
+
Smallest degree node as heuristic function:
|
101 |
+
|
102 |
+
>>> def smallest_degree(G):
|
103 |
+
... return min(G, key=G.degree)
|
104 |
+
>>> rcm = list(reverse_cuthill_mckee_ordering(G, heuristic=smallest_degree))
|
105 |
+
|
106 |
+
|
107 |
+
See Also
|
108 |
+
--------
|
109 |
+
cuthill_mckee_ordering
|
110 |
+
|
111 |
+
Notes
|
112 |
+
-----
|
113 |
+
The optimal solution the bandwidth reduction is NP-complete [2]_.
|
114 |
+
|
115 |
+
References
|
116 |
+
----------
|
117 |
+
.. [1] E. Cuthill and J. McKee.
|
118 |
+
Reducing the bandwidth of sparse symmetric matrices,
|
119 |
+
In Proc. 24th Nat. Conf. ACM, pages 157-72, 1969.
|
120 |
+
http://doi.acm.org/10.1145/800195.805928
|
121 |
+
.. [2] Steven S. Skiena. 1997. The Algorithm Design Manual.
|
122 |
+
Springer-Verlag New York, Inc., New York, NY, USA.
|
123 |
+
"""
|
124 |
+
return reversed(list(cuthill_mckee_ordering(G, heuristic=heuristic)))
|
125 |
+
|
126 |
+
|
127 |
+
def connected_cuthill_mckee_ordering(G, heuristic=None):
|
128 |
+
# the cuthill mckee algorithm for connected graphs
|
129 |
+
if heuristic is None:
|
130 |
+
start = pseudo_peripheral_node(G)
|
131 |
+
else:
|
132 |
+
start = heuristic(G)
|
133 |
+
visited = {start}
|
134 |
+
queue = deque([start])
|
135 |
+
while queue:
|
136 |
+
parent = queue.popleft()
|
137 |
+
yield parent
|
138 |
+
nd = sorted(G.degree(set(G[parent]) - visited), key=itemgetter(1))
|
139 |
+
children = [n for n, d in nd]
|
140 |
+
visited.update(children)
|
141 |
+
queue.extend(children)
|
142 |
+
|
143 |
+
|
144 |
+
def pseudo_peripheral_node(G):
|
145 |
+
# helper for cuthill-mckee to find a node in a "pseudo peripheral pair"
|
146 |
+
# to use as good starting node
|
147 |
+
u = arbitrary_element(G)
|
148 |
+
lp = 0
|
149 |
+
v = u
|
150 |
+
while True:
|
151 |
+
spl = dict(nx.shortest_path_length(G, v))
|
152 |
+
l = max(spl.values())
|
153 |
+
if l <= lp:
|
154 |
+
break
|
155 |
+
lp = l
|
156 |
+
farthest = (n for n, dist in spl.items() if dist == l)
|
157 |
+
v, deg = min(G.degree(farthest), key=itemgetter(1))
|
158 |
+
return v
|
env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (185 Bytes). View file
|
|
env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/__pycache__/test__init.cpython-310.pyc
ADDED
Binary file (688 Bytes). View file
|
|
env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/__pycache__/test_backends.cpython-310.pyc
ADDED
Binary file (2.63 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/__pycache__/test_config.cpython-310.pyc
ADDED
Binary file (6.44 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/__pycache__/test_decorators.cpython-310.pyc
ADDED
Binary file (19.3 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/__pycache__/test_heaps.cpython-310.pyc
ADDED
Binary file (3.22 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/__pycache__/test_mapped_queue.cpython-310.pyc
ADDED
Binary file (10 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/__pycache__/test_misc.cpython-310.pyc
ADDED
Binary file (8.88 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/__pycache__/test_random_sequence.cpython-310.pyc
ADDED
Binary file (1.31 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/__pycache__/test_rcm.cpython-310.pyc
ADDED
Binary file (1.68 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/test__init.py
ADDED
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pytest
|
2 |
+
|
3 |
+
|
4 |
+
def test_utils_namespace():
|
5 |
+
"""Ensure objects are not unintentionally exposed in utils namespace."""
|
6 |
+
with pytest.raises(ImportError):
|
7 |
+
from networkx.utils import nx
|
8 |
+
with pytest.raises(ImportError):
|
9 |
+
from networkx.utils import sys
|
10 |
+
with pytest.raises(ImportError):
|
11 |
+
from networkx.utils import defaultdict, deque
|
env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/test_config.py
ADDED
@@ -0,0 +1,186 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import collections
|
2 |
+
import pickle
|
3 |
+
|
4 |
+
import pytest
|
5 |
+
|
6 |
+
import networkx as nx
|
7 |
+
from networkx.utils.configs import Config
|
8 |
+
|
9 |
+
|
10 |
+
# Define this at module level so we can test pickling
|
11 |
+
class ExampleConfig(Config):
|
12 |
+
"""Example configuration."""
|
13 |
+
|
14 |
+
x: int
|
15 |
+
y: str
|
16 |
+
|
17 |
+
def _check_config(self, key, value):
|
18 |
+
if key == "x" and value <= 0:
|
19 |
+
raise ValueError("x must be positive")
|
20 |
+
if key == "y" and not isinstance(value, str):
|
21 |
+
raise TypeError("y must be a str")
|
22 |
+
|
23 |
+
|
24 |
+
class EmptyConfig(Config):
|
25 |
+
pass
|
26 |
+
|
27 |
+
|
28 |
+
@pytest.mark.parametrize("cfg", [EmptyConfig(), Config()])
|
29 |
+
def test_config_empty(cfg):
|
30 |
+
assert dir(cfg) == []
|
31 |
+
with pytest.raises(AttributeError):
|
32 |
+
cfg.x = 1
|
33 |
+
with pytest.raises(KeyError):
|
34 |
+
cfg["x"] = 1
|
35 |
+
with pytest.raises(AttributeError):
|
36 |
+
cfg.x
|
37 |
+
with pytest.raises(KeyError):
|
38 |
+
cfg["x"]
|
39 |
+
assert len(cfg) == 0
|
40 |
+
assert "x" not in cfg
|
41 |
+
assert cfg == cfg
|
42 |
+
assert cfg.get("x", 2) == 2
|
43 |
+
assert set(cfg.keys()) == set()
|
44 |
+
assert set(cfg.values()) == set()
|
45 |
+
assert set(cfg.items()) == set()
|
46 |
+
cfg2 = pickle.loads(pickle.dumps(cfg))
|
47 |
+
assert cfg == cfg2
|
48 |
+
assert isinstance(cfg, collections.abc.Collection)
|
49 |
+
assert isinstance(cfg, collections.abc.Mapping)
|
50 |
+
|
51 |
+
|
52 |
+
def test_config_subclass():
|
53 |
+
with pytest.raises(TypeError, match="missing 2 required keyword-only"):
|
54 |
+
ExampleConfig()
|
55 |
+
with pytest.raises(ValueError, match="x must be positive"):
|
56 |
+
ExampleConfig(x=0, y="foo")
|
57 |
+
with pytest.raises(TypeError, match="unexpected keyword"):
|
58 |
+
ExampleConfig(x=1, y="foo", z="bad config")
|
59 |
+
with pytest.raises(TypeError, match="unexpected keyword"):
|
60 |
+
EmptyConfig(z="bad config")
|
61 |
+
cfg = ExampleConfig(x=1, y="foo")
|
62 |
+
assert cfg.x == 1
|
63 |
+
assert cfg["x"] == 1
|
64 |
+
assert cfg["y"] == "foo"
|
65 |
+
assert cfg.y == "foo"
|
66 |
+
assert "x" in cfg
|
67 |
+
assert "y" in cfg
|
68 |
+
assert "z" not in cfg
|
69 |
+
assert len(cfg) == 2
|
70 |
+
assert set(iter(cfg)) == {"x", "y"}
|
71 |
+
assert set(cfg.keys()) == {"x", "y"}
|
72 |
+
assert set(cfg.values()) == {1, "foo"}
|
73 |
+
assert set(cfg.items()) == {("x", 1), ("y", "foo")}
|
74 |
+
assert dir(cfg) == ["x", "y"]
|
75 |
+
cfg.x = 2
|
76 |
+
cfg["y"] = "bar"
|
77 |
+
assert cfg["x"] == 2
|
78 |
+
assert cfg.y == "bar"
|
79 |
+
with pytest.raises(TypeError, match="can't be deleted"):
|
80 |
+
del cfg.x
|
81 |
+
with pytest.raises(TypeError, match="can't be deleted"):
|
82 |
+
del cfg["y"]
|
83 |
+
assert cfg.x == 2
|
84 |
+
assert cfg == cfg
|
85 |
+
assert cfg == ExampleConfig(x=2, y="bar")
|
86 |
+
assert cfg != ExampleConfig(x=3, y="baz")
|
87 |
+
assert cfg != Config(x=2, y="bar")
|
88 |
+
with pytest.raises(TypeError, match="y must be a str"):
|
89 |
+
cfg["y"] = 5
|
90 |
+
with pytest.raises(ValueError, match="x must be positive"):
|
91 |
+
cfg.x = -5
|
92 |
+
assert cfg.get("x", 10) == 2
|
93 |
+
with pytest.raises(AttributeError):
|
94 |
+
cfg.z = 5
|
95 |
+
with pytest.raises(KeyError):
|
96 |
+
cfg["z"] = 5
|
97 |
+
with pytest.raises(AttributeError):
|
98 |
+
cfg.z
|
99 |
+
with pytest.raises(KeyError):
|
100 |
+
cfg["z"]
|
101 |
+
cfg2 = pickle.loads(pickle.dumps(cfg))
|
102 |
+
assert cfg == cfg2
|
103 |
+
assert cfg.__doc__ == "Example configuration."
|
104 |
+
assert cfg2.__doc__ == "Example configuration."
|
105 |
+
|
106 |
+
|
107 |
+
def test_config_defaults():
|
108 |
+
class DefaultConfig(Config):
|
109 |
+
x: int = 0
|
110 |
+
y: int
|
111 |
+
|
112 |
+
cfg = DefaultConfig(y=1)
|
113 |
+
assert cfg.x == 0
|
114 |
+
cfg = DefaultConfig(x=2, y=1)
|
115 |
+
assert cfg.x == 2
|
116 |
+
|
117 |
+
|
118 |
+
def test_nxconfig():
|
119 |
+
assert isinstance(nx.config.backend_priority, list)
|
120 |
+
assert isinstance(nx.config.backends, Config)
|
121 |
+
with pytest.raises(TypeError, match="must be a list of backend names"):
|
122 |
+
nx.config.backend_priority = "nx_loopback"
|
123 |
+
with pytest.raises(ValueError, match="Unknown backend when setting"):
|
124 |
+
nx.config.backend_priority = ["this_almost_certainly_is_not_a_backend"]
|
125 |
+
with pytest.raises(TypeError, match="must be a Config of backend configs"):
|
126 |
+
nx.config.backends = {}
|
127 |
+
with pytest.raises(TypeError, match="must be a Config of backend configs"):
|
128 |
+
nx.config.backends = Config(plausible_backend_name={})
|
129 |
+
with pytest.raises(ValueError, match="Unknown backend when setting"):
|
130 |
+
nx.config.backends = Config(this_almost_certainly_is_not_a_backend=Config())
|
131 |
+
with pytest.raises(TypeError, match="must be True or False"):
|
132 |
+
nx.config.cache_converted_graphs = "bad value"
|
133 |
+
|
134 |
+
|
135 |
+
def test_not_strict():
|
136 |
+
class FlexibleConfig(Config, strict=False):
|
137 |
+
x: int
|
138 |
+
|
139 |
+
cfg = FlexibleConfig(x=1)
|
140 |
+
assert "_strict" not in cfg
|
141 |
+
assert len(cfg) == 1
|
142 |
+
assert list(cfg) == ["x"]
|
143 |
+
assert list(cfg.keys()) == ["x"]
|
144 |
+
assert list(cfg.values()) == [1]
|
145 |
+
assert list(cfg.items()) == [("x", 1)]
|
146 |
+
assert cfg.x == 1
|
147 |
+
assert cfg["x"] == 1
|
148 |
+
assert "x" in cfg
|
149 |
+
assert hasattr(cfg, "x")
|
150 |
+
assert "FlexibleConfig(x=1)" in repr(cfg)
|
151 |
+
assert cfg == FlexibleConfig(x=1)
|
152 |
+
del cfg.x
|
153 |
+
assert "FlexibleConfig()" in repr(cfg)
|
154 |
+
assert len(cfg) == 0
|
155 |
+
assert not hasattr(cfg, "x")
|
156 |
+
assert "x" not in cfg
|
157 |
+
assert not hasattr(cfg, "y")
|
158 |
+
assert "y" not in cfg
|
159 |
+
cfg.y = 2
|
160 |
+
assert len(cfg) == 1
|
161 |
+
assert list(cfg) == ["y"]
|
162 |
+
assert list(cfg.keys()) == ["y"]
|
163 |
+
assert list(cfg.values()) == [2]
|
164 |
+
assert list(cfg.items()) == [("y", 2)]
|
165 |
+
assert cfg.y == 2
|
166 |
+
assert cfg["y"] == 2
|
167 |
+
assert hasattr(cfg, "y")
|
168 |
+
assert "y" in cfg
|
169 |
+
del cfg["y"]
|
170 |
+
assert len(cfg) == 0
|
171 |
+
assert list(cfg) == []
|
172 |
+
with pytest.raises(AttributeError, match="y"):
|
173 |
+
del cfg.y
|
174 |
+
with pytest.raises(KeyError, match="y"):
|
175 |
+
del cfg["y"]
|
176 |
+
with pytest.raises(TypeError, match="missing 1 required keyword-only"):
|
177 |
+
FlexibleConfig()
|
178 |
+
# Be strict when first creating the config object
|
179 |
+
with pytest.raises(TypeError, match="unexpected keyword argument 'y'"):
|
180 |
+
FlexibleConfig(x=1, y=2)
|
181 |
+
|
182 |
+
class FlexibleConfigWithDefault(Config, strict=False):
|
183 |
+
x: int = 0
|
184 |
+
|
185 |
+
assert FlexibleConfigWithDefault().x == 0
|
186 |
+
assert FlexibleConfigWithDefault(x=1)["x"] == 1
|
env-llmeval/lib/python3.10/site-packages/networkx/utils/tests/test_decorators.py
ADDED
@@ -0,0 +1,510 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import pathlib
|
3 |
+
import random
|
4 |
+
import tempfile
|
5 |
+
|
6 |
+
import pytest
|
7 |
+
|
8 |
+
import networkx as nx
|
9 |
+
from networkx.utils.decorators import (
|
10 |
+
argmap,
|
11 |
+
not_implemented_for,
|
12 |
+
np_random_state,
|
13 |
+
open_file,
|
14 |
+
py_random_state,
|
15 |
+
)
|
16 |
+
from networkx.utils.misc import PythonRandomInterface, PythonRandomViaNumpyBits
|
17 |
+
|
18 |
+
|
19 |
+
def test_not_implemented_decorator():
|
20 |
+
@not_implemented_for("directed")
|
21 |
+
def test_d(G):
|
22 |
+
pass
|
23 |
+
|
24 |
+
test_d(nx.Graph())
|
25 |
+
with pytest.raises(nx.NetworkXNotImplemented):
|
26 |
+
test_d(nx.DiGraph())
|
27 |
+
|
28 |
+
@not_implemented_for("undirected")
|
29 |
+
def test_u(G):
|
30 |
+
pass
|
31 |
+
|
32 |
+
test_u(nx.DiGraph())
|
33 |
+
with pytest.raises(nx.NetworkXNotImplemented):
|
34 |
+
test_u(nx.Graph())
|
35 |
+
|
36 |
+
@not_implemented_for("multigraph")
|
37 |
+
def test_m(G):
|
38 |
+
pass
|
39 |
+
|
40 |
+
test_m(nx.Graph())
|
41 |
+
with pytest.raises(nx.NetworkXNotImplemented):
|
42 |
+
test_m(nx.MultiGraph())
|
43 |
+
|
44 |
+
@not_implemented_for("graph")
|
45 |
+
def test_g(G):
|
46 |
+
pass
|
47 |
+
|
48 |
+
test_g(nx.MultiGraph())
|
49 |
+
with pytest.raises(nx.NetworkXNotImplemented):
|
50 |
+
test_g(nx.Graph())
|
51 |
+
|
52 |
+
# not MultiDiGraph (multiple arguments => AND)
|
53 |
+
@not_implemented_for("directed", "multigraph")
|
54 |
+
def test_not_md(G):
|
55 |
+
pass
|
56 |
+
|
57 |
+
test_not_md(nx.Graph())
|
58 |
+
test_not_md(nx.DiGraph())
|
59 |
+
test_not_md(nx.MultiGraph())
|
60 |
+
with pytest.raises(nx.NetworkXNotImplemented):
|
61 |
+
test_not_md(nx.MultiDiGraph())
|
62 |
+
|
63 |
+
# Graph only (multiple decorators => OR)
|
64 |
+
@not_implemented_for("directed")
|
65 |
+
@not_implemented_for("multigraph")
|
66 |
+
def test_graph_only(G):
|
67 |
+
pass
|
68 |
+
|
69 |
+
test_graph_only(nx.Graph())
|
70 |
+
with pytest.raises(nx.NetworkXNotImplemented):
|
71 |
+
test_graph_only(nx.DiGraph())
|
72 |
+
with pytest.raises(nx.NetworkXNotImplemented):
|
73 |
+
test_graph_only(nx.MultiGraph())
|
74 |
+
with pytest.raises(nx.NetworkXNotImplemented):
|
75 |
+
test_graph_only(nx.MultiDiGraph())
|
76 |
+
|
77 |
+
with pytest.raises(ValueError):
|
78 |
+
not_implemented_for("directed", "undirected")
|
79 |
+
|
80 |
+
with pytest.raises(ValueError):
|
81 |
+
not_implemented_for("multigraph", "graph")
|
82 |
+
|
83 |
+
|
84 |
+
def test_not_implemented_decorator_key():
|
85 |
+
with pytest.raises(KeyError):
|
86 |
+
|
87 |
+
@not_implemented_for("foo")
|
88 |
+
def test1(G):
|
89 |
+
pass
|
90 |
+
|
91 |
+
test1(nx.Graph())
|
92 |
+
|
93 |
+
|
94 |
+
def test_not_implemented_decorator_raise():
|
95 |
+
with pytest.raises(nx.NetworkXNotImplemented):
|
96 |
+
|
97 |
+
@not_implemented_for("graph")
|
98 |
+
def test1(G):
|
99 |
+
pass
|
100 |
+
|
101 |
+
test1(nx.Graph())
|
102 |
+
|
103 |
+
|
104 |
+
class TestOpenFileDecorator:
|
105 |
+
def setup_method(self):
|
106 |
+
self.text = ["Blah... ", "BLAH ", "BLAH!!!!"]
|
107 |
+
self.fobj = tempfile.NamedTemporaryFile("wb+", delete=False)
|
108 |
+
self.name = self.fobj.name
|
109 |
+
|
110 |
+
def teardown_method(self):
|
111 |
+
self.fobj.close()
|
112 |
+
os.unlink(self.name)
|
113 |
+
|
114 |
+
def write(self, path):
|
115 |
+
for text in self.text:
|
116 |
+
path.write(text.encode("ascii"))
|
117 |
+
|
118 |
+
@open_file(1, "r")
|
119 |
+
def read(self, path):
|
120 |
+
return path.readlines()[0]
|
121 |
+
|
122 |
+
@staticmethod
|
123 |
+
@open_file(0, "wb")
|
124 |
+
def writer_arg0(path):
|
125 |
+
path.write(b"demo")
|
126 |
+
|
127 |
+
@open_file(1, "wb+")
|
128 |
+
def writer_arg1(self, path):
|
129 |
+
self.write(path)
|
130 |
+
|
131 |
+
@open_file(2, "wb")
|
132 |
+
def writer_arg2default(self, x, path=None):
|
133 |
+
if path is None:
|
134 |
+
with tempfile.NamedTemporaryFile("wb+") as fh:
|
135 |
+
self.write(fh)
|
136 |
+
else:
|
137 |
+
self.write(path)
|
138 |
+
|
139 |
+
@open_file(4, "wb")
|
140 |
+
def writer_arg4default(self, x, y, other="hello", path=None, **kwargs):
|
141 |
+
if path is None:
|
142 |
+
with tempfile.NamedTemporaryFile("wb+") as fh:
|
143 |
+
self.write(fh)
|
144 |
+
else:
|
145 |
+
self.write(path)
|
146 |
+
|
147 |
+
@open_file("path", "wb")
|
148 |
+
def writer_kwarg(self, **kwargs):
|
149 |
+
path = kwargs.get("path", None)
|
150 |
+
if path is None:
|
151 |
+
with tempfile.NamedTemporaryFile("wb+") as fh:
|
152 |
+
self.write(fh)
|
153 |
+
else:
|
154 |
+
self.write(path)
|
155 |
+
|
156 |
+
def test_writer_arg0_str(self):
|
157 |
+
self.writer_arg0(self.name)
|
158 |
+
|
159 |
+
def test_writer_arg0_fobj(self):
|
160 |
+
self.writer_arg0(self.fobj)
|
161 |
+
|
162 |
+
def test_writer_arg0_pathlib(self):
|
163 |
+
self.writer_arg0(pathlib.Path(self.name))
|
164 |
+
|
165 |
+
def test_writer_arg1_str(self):
|
166 |
+
self.writer_arg1(self.name)
|
167 |
+
assert self.read(self.name) == "".join(self.text)
|
168 |
+
|
169 |
+
def test_writer_arg1_fobj(self):
|
170 |
+
self.writer_arg1(self.fobj)
|
171 |
+
assert not self.fobj.closed
|
172 |
+
self.fobj.close()
|
173 |
+
assert self.read(self.name) == "".join(self.text)
|
174 |
+
|
175 |
+
def test_writer_arg2default_str(self):
|
176 |
+
self.writer_arg2default(0, path=None)
|
177 |
+
self.writer_arg2default(0, path=self.name)
|
178 |
+
assert self.read(self.name) == "".join(self.text)
|
179 |
+
|
180 |
+
def test_writer_arg2default_fobj(self):
|
181 |
+
self.writer_arg2default(0, path=self.fobj)
|
182 |
+
assert not self.fobj.closed
|
183 |
+
self.fobj.close()
|
184 |
+
assert self.read(self.name) == "".join(self.text)
|
185 |
+
|
186 |
+
def test_writer_arg2default_fobj_path_none(self):
|
187 |
+
self.writer_arg2default(0, path=None)
|
188 |
+
|
189 |
+
def test_writer_arg4default_fobj(self):
|
190 |
+
self.writer_arg4default(0, 1, dog="dog", other="other")
|
191 |
+
self.writer_arg4default(0, 1, dog="dog", other="other", path=self.name)
|
192 |
+
assert self.read(self.name) == "".join(self.text)
|
193 |
+
|
194 |
+
def test_writer_kwarg_str(self):
|
195 |
+
self.writer_kwarg(path=self.name)
|
196 |
+
assert self.read(self.name) == "".join(self.text)
|
197 |
+
|
198 |
+
def test_writer_kwarg_fobj(self):
|
199 |
+
self.writer_kwarg(path=self.fobj)
|
200 |
+
self.fobj.close()
|
201 |
+
assert self.read(self.name) == "".join(self.text)
|
202 |
+
|
203 |
+
def test_writer_kwarg_path_none(self):
|
204 |
+
self.writer_kwarg(path=None)
|
205 |
+
|
206 |
+
|
207 |
+
class TestRandomState:
|
208 |
+
@classmethod
|
209 |
+
def setup_class(cls):
|
210 |
+
global np
|
211 |
+
np = pytest.importorskip("numpy")
|
212 |
+
|
213 |
+
@np_random_state(1)
|
214 |
+
def instantiate_np_random_state(self, random_state):
|
215 |
+
allowed = (np.random.RandomState, np.random.Generator)
|
216 |
+
assert isinstance(random_state, allowed)
|
217 |
+
return random_state.random()
|
218 |
+
|
219 |
+
@py_random_state(1)
|
220 |
+
def instantiate_py_random_state(self, random_state):
|
221 |
+
allowed = (random.Random, PythonRandomInterface, PythonRandomViaNumpyBits)
|
222 |
+
assert isinstance(random_state, allowed)
|
223 |
+
return random_state.random()
|
224 |
+
|
225 |
+
def test_random_state_None(self):
|
226 |
+
np.random.seed(42)
|
227 |
+
rv = np.random.random()
|
228 |
+
np.random.seed(42)
|
229 |
+
assert rv == self.instantiate_np_random_state(None)
|
230 |
+
|
231 |
+
random.seed(42)
|
232 |
+
rv = random.random()
|
233 |
+
random.seed(42)
|
234 |
+
assert rv == self.instantiate_py_random_state(None)
|
235 |
+
|
236 |
+
def test_random_state_np_random(self):
|
237 |
+
np.random.seed(42)
|
238 |
+
rv = np.random.random()
|
239 |
+
np.random.seed(42)
|
240 |
+
assert rv == self.instantiate_np_random_state(np.random)
|
241 |
+
np.random.seed(42)
|
242 |
+
assert rv == self.instantiate_py_random_state(np.random)
|
243 |
+
|
244 |
+
def test_random_state_int(self):
|
245 |
+
np.random.seed(42)
|
246 |
+
np_rv = np.random.random()
|
247 |
+
random.seed(42)
|
248 |
+
py_rv = random.random()
|
249 |
+
|
250 |
+
np.random.seed(42)
|
251 |
+
seed = 1
|
252 |
+
rval = self.instantiate_np_random_state(seed)
|
253 |
+
rval_expected = np.random.RandomState(seed).rand()
|
254 |
+
assert rval == rval_expected
|
255 |
+
# test that global seed wasn't changed in function
|
256 |
+
assert np_rv == np.random.random()
|
257 |
+
|
258 |
+
random.seed(42)
|
259 |
+
rval = self.instantiate_py_random_state(seed)
|
260 |
+
rval_expected = random.Random(seed).random()
|
261 |
+
assert rval == rval_expected
|
262 |
+
# test that global seed wasn't changed in function
|
263 |
+
assert py_rv == random.random()
|
264 |
+
|
265 |
+
def test_random_state_np_random_Generator(self):
|
266 |
+
np.random.seed(42)
|
267 |
+
np_rv = np.random.random()
|
268 |
+
np.random.seed(42)
|
269 |
+
seed = 1
|
270 |
+
|
271 |
+
rng = np.random.default_rng(seed)
|
272 |
+
rval = self.instantiate_np_random_state(rng)
|
273 |
+
rval_expected = np.random.default_rng(seed).random()
|
274 |
+
assert rval == rval_expected
|
275 |
+
|
276 |
+
rval = self.instantiate_py_random_state(rng)
|
277 |
+
rval_expected = np.random.default_rng(seed).random(size=2)[1]
|
278 |
+
assert rval == rval_expected
|
279 |
+
# test that global seed wasn't changed in function
|
280 |
+
assert np_rv == np.random.random()
|
281 |
+
|
282 |
+
def test_random_state_np_random_RandomState(self):
|
283 |
+
np.random.seed(42)
|
284 |
+
np_rv = np.random.random()
|
285 |
+
np.random.seed(42)
|
286 |
+
seed = 1
|
287 |
+
|
288 |
+
rng = np.random.RandomState(seed)
|
289 |
+
rval = self.instantiate_np_random_state(rng)
|
290 |
+
rval_expected = np.random.RandomState(seed).random()
|
291 |
+
assert rval == rval_expected
|
292 |
+
|
293 |
+
rval = self.instantiate_py_random_state(rng)
|
294 |
+
rval_expected = np.random.RandomState(seed).random(size=2)[1]
|
295 |
+
assert rval == rval_expected
|
296 |
+
# test that global seed wasn't changed in function
|
297 |
+
assert np_rv == np.random.random()
|
298 |
+
|
299 |
+
def test_random_state_py_random(self):
|
300 |
+
seed = 1
|
301 |
+
rng = random.Random(seed)
|
302 |
+
rv = self.instantiate_py_random_state(rng)
|
303 |
+
assert rv == random.Random(seed).random()
|
304 |
+
|
305 |
+
pytest.raises(ValueError, self.instantiate_np_random_state, rng)
|
306 |
+
|
307 |
+
|
308 |
+
def test_random_state_string_arg_index():
|
309 |
+
with pytest.raises(nx.NetworkXError):
|
310 |
+
|
311 |
+
@np_random_state("a")
|
312 |
+
def make_random_state(rs):
|
313 |
+
pass
|
314 |
+
|
315 |
+
rstate = make_random_state(1)
|
316 |
+
|
317 |
+
|
318 |
+
def test_py_random_state_string_arg_index():
|
319 |
+
with pytest.raises(nx.NetworkXError):
|
320 |
+
|
321 |
+
@py_random_state("a")
|
322 |
+
def make_random_state(rs):
|
323 |
+
pass
|
324 |
+
|
325 |
+
rstate = make_random_state(1)
|
326 |
+
|
327 |
+
|
328 |
+
def test_random_state_invalid_arg_index():
|
329 |
+
with pytest.raises(nx.NetworkXError):
|
330 |
+
|
331 |
+
@np_random_state(2)
|
332 |
+
def make_random_state(rs):
|
333 |
+
pass
|
334 |
+
|
335 |
+
rstate = make_random_state(1)
|
336 |
+
|
337 |
+
|
338 |
+
def test_py_random_state_invalid_arg_index():
|
339 |
+
with pytest.raises(nx.NetworkXError):
|
340 |
+
|
341 |
+
@py_random_state(2)
|
342 |
+
def make_random_state(rs):
|
343 |
+
pass
|
344 |
+
|
345 |
+
rstate = make_random_state(1)
|
346 |
+
|
347 |
+
|
348 |
+
class TestArgmap:
|
349 |
+
class ArgmapError(RuntimeError):
|
350 |
+
pass
|
351 |
+
|
352 |
+
def test_trivial_function(self):
|
353 |
+
def do_not_call(x):
|
354 |
+
raise ArgmapError("do not call this function")
|
355 |
+
|
356 |
+
@argmap(do_not_call)
|
357 |
+
def trivial_argmap():
|
358 |
+
return 1
|
359 |
+
|
360 |
+
assert trivial_argmap() == 1
|
361 |
+
|
362 |
+
def test_trivial_iterator(self):
|
363 |
+
def do_not_call(x):
|
364 |
+
raise ArgmapError("do not call this function")
|
365 |
+
|
366 |
+
@argmap(do_not_call)
|
367 |
+
def trivial_argmap():
|
368 |
+
yield from (1, 2, 3)
|
369 |
+
|
370 |
+
assert tuple(trivial_argmap()) == (1, 2, 3)
|
371 |
+
|
372 |
+
def test_contextmanager(self):
|
373 |
+
container = []
|
374 |
+
|
375 |
+
def contextmanager(x):
|
376 |
+
nonlocal container
|
377 |
+
return x, lambda: container.append(x)
|
378 |
+
|
379 |
+
@argmap(contextmanager, 0, 1, 2, try_finally=True)
|
380 |
+
def foo(x, y, z):
|
381 |
+
return x, y, z
|
382 |
+
|
383 |
+
x, y, z = foo("a", "b", "c")
|
384 |
+
|
385 |
+
# context exits are called in reverse
|
386 |
+
assert container == ["c", "b", "a"]
|
387 |
+
|
388 |
+
def test_tryfinally_generator(self):
|
389 |
+
container = []
|
390 |
+
|
391 |
+
def singleton(x):
|
392 |
+
return (x,)
|
393 |
+
|
394 |
+
with pytest.raises(nx.NetworkXError):
|
395 |
+
|
396 |
+
@argmap(singleton, 0, 1, 2, try_finally=True)
|
397 |
+
def foo(x, y, z):
|
398 |
+
yield from (x, y, z)
|
399 |
+
|
400 |
+
@argmap(singleton, 0, 1, 2)
|
401 |
+
def foo(x, y, z):
|
402 |
+
return x + y + z
|
403 |
+
|
404 |
+
q = foo("a", "b", "c")
|
405 |
+
|
406 |
+
assert q == ("a", "b", "c")
|
407 |
+
|
408 |
+
def test_actual_vararg(self):
|
409 |
+
@argmap(lambda x: -x, 4)
|
410 |
+
def foo(x, y, *args):
|
411 |
+
return (x, y) + tuple(args)
|
412 |
+
|
413 |
+
assert foo(1, 2, 3, 4, 5, 6) == (1, 2, 3, 4, -5, 6)
|
414 |
+
|
415 |
+
def test_signature_destroying_intermediate_decorator(self):
|
416 |
+
def add_one_to_first_bad_decorator(f):
|
417 |
+
"""Bad because it doesn't wrap the f signature (clobbers it)"""
|
418 |
+
|
419 |
+
def decorated(a, *args, **kwargs):
|
420 |
+
return f(a + 1, *args, **kwargs)
|
421 |
+
|
422 |
+
return decorated
|
423 |
+
|
424 |
+
add_two_to_second = argmap(lambda b: b + 2, 1)
|
425 |
+
|
426 |
+
@add_two_to_second
|
427 |
+
@add_one_to_first_bad_decorator
|
428 |
+
def add_one_and_two(a, b):
|
429 |
+
return a, b
|
430 |
+
|
431 |
+
assert add_one_and_two(5, 5) == (6, 7)
|
432 |
+
|
433 |
+
def test_actual_kwarg(self):
|
434 |
+
@argmap(lambda x: -x, "arg")
|
435 |
+
def foo(*, arg):
|
436 |
+
return arg
|
437 |
+
|
438 |
+
assert foo(arg=3) == -3
|
439 |
+
|
440 |
+
def test_nested_tuple(self):
|
441 |
+
def xform(x, y):
|
442 |
+
u, v = y
|
443 |
+
return x + u + v, (x + u, x + v)
|
444 |
+
|
445 |
+
# we're testing args and kwargs here, too
|
446 |
+
@argmap(xform, (0, ("t", 2)))
|
447 |
+
def foo(a, *args, **kwargs):
|
448 |
+
return a, args, kwargs
|
449 |
+
|
450 |
+
a, args, kwargs = foo(1, 2, 3, t=4)
|
451 |
+
|
452 |
+
assert a == 1 + 4 + 3
|
453 |
+
assert args == (2, 1 + 3)
|
454 |
+
assert kwargs == {"t": 1 + 4}
|
455 |
+
|
456 |
+
def test_flatten(self):
|
457 |
+
assert tuple(argmap._flatten([[[[[], []], [], []], [], [], []]], set())) == ()
|
458 |
+
|
459 |
+
rlist = ["a", ["b", "c"], [["d"], "e"], "f"]
|
460 |
+
assert "".join(argmap._flatten(rlist, set())) == "abcdef"
|
461 |
+
|
462 |
+
def test_indent(self):
|
463 |
+
code = "\n".join(
|
464 |
+
argmap._indent(
|
465 |
+
*[
|
466 |
+
"try:",
|
467 |
+
"try:",
|
468 |
+
"pass#",
|
469 |
+
"finally:",
|
470 |
+
"pass#",
|
471 |
+
"#",
|
472 |
+
"finally:",
|
473 |
+
"pass#",
|
474 |
+
]
|
475 |
+
)
|
476 |
+
)
|
477 |
+
assert (
|
478 |
+
code
|
479 |
+
== """try:
|
480 |
+
try:
|
481 |
+
pass#
|
482 |
+
finally:
|
483 |
+
pass#
|
484 |
+
#
|
485 |
+
finally:
|
486 |
+
pass#"""
|
487 |
+
)
|
488 |
+
|
489 |
+
def test_immediate_raise(self):
|
490 |
+
@not_implemented_for("directed")
|
491 |
+
def yield_nodes(G):
|
492 |
+
yield from G
|
493 |
+
|
494 |
+
G = nx.Graph([(1, 2)])
|
495 |
+
D = nx.DiGraph()
|
496 |
+
|
497 |
+
# test first call (argmap is compiled and executed)
|
498 |
+
with pytest.raises(nx.NetworkXNotImplemented):
|
499 |
+
node_iter = yield_nodes(D)
|
500 |
+
|
501 |
+
# test second call (argmap is only executed)
|
502 |
+
with pytest.raises(nx.NetworkXNotImplemented):
|
503 |
+
node_iter = yield_nodes(D)
|
504 |
+
|
505 |
+
# ensure that generators still make generators
|
506 |
+
node_iter = yield_nodes(G)
|
507 |
+
next(node_iter)
|
508 |
+
next(node_iter)
|
509 |
+
with pytest.raises(StopIteration):
|
510 |
+
next(node_iter)
|