mehhl commited on
Commit
ef5ff9c
·
verified ·
1 Parent(s): b7940a9

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. temp_venv/lib/python3.13/site-packages/IPython/__pycache__/__init__.cpython-313.pyc +0 -0
  2. temp_venv/lib/python3.13/site-packages/IPython/__pycache__/display.cpython-313.pyc +0 -0
  3. temp_venv/lib/python3.13/site-packages/IPython/__pycache__/paths.cpython-313.pyc +0 -0
  4. temp_venv/lib/python3.13/site-packages/IPython/extensions/__init__.py +2 -0
  5. temp_venv/lib/python3.13/site-packages/IPython/extensions/__pycache__/__init__.cpython-313.pyc +0 -0
  6. temp_venv/lib/python3.13/site-packages/IPython/extensions/__pycache__/storemagic.cpython-313.pyc +0 -0
  7. temp_venv/lib/python3.13/site-packages/IPython/extensions/autoreload.py +752 -0
  8. temp_venv/lib/python3.13/site-packages/IPython/extensions/deduperreload/__init__.py +0 -0
  9. temp_venv/lib/python3.13/site-packages/IPython/extensions/deduperreload/deduperreload.py +608 -0
  10. temp_venv/lib/python3.13/site-packages/IPython/extensions/deduperreload/deduperreload_patching.py +141 -0
  11. temp_venv/lib/python3.13/site-packages/IPython/extensions/storemagic.py +236 -0
  12. temp_venv/lib/python3.13/site-packages/IPython/external/__init__.py +7 -0
  13. temp_venv/lib/python3.13/site-packages/IPython/external/__pycache__/__init__.cpython-313.pyc +0 -0
  14. temp_venv/lib/python3.13/site-packages/IPython/external/__pycache__/pickleshare.cpython-313.pyc +0 -0
  15. temp_venv/lib/python3.13/site-packages/IPython/external/pickleshare.py +361 -0
  16. temp_venv/lib/python3.13/site-packages/IPython/external/qt_for_kernel.py +124 -0
  17. temp_venv/lib/python3.13/site-packages/IPython/external/qt_loaders.py +423 -0
  18. temp_venv/lib/python3.13/site-packages/IPython/lib/__init__.py +11 -0
  19. temp_venv/lib/python3.13/site-packages/IPython/lib/__pycache__/__init__.cpython-313.pyc +0 -0
  20. temp_venv/lib/python3.13/site-packages/IPython/lib/__pycache__/clipboard.cpython-313.pyc +0 -0
  21. temp_venv/lib/python3.13/site-packages/IPython/lib/__pycache__/display.cpython-313.pyc +0 -0
  22. temp_venv/lib/python3.13/site-packages/IPython/lib/__pycache__/pretty.cpython-313.pyc +0 -0
  23. temp_venv/lib/python3.13/site-packages/IPython/lib/backgroundjobs.py +491 -0
  24. temp_venv/lib/python3.13/site-packages/IPython/lib/clipboard.py +102 -0
  25. temp_venv/lib/python3.13/site-packages/IPython/lib/deepreload.py +310 -0
  26. temp_venv/lib/python3.13/site-packages/IPython/lib/demo.py +672 -0
  27. temp_venv/lib/python3.13/site-packages/IPython/lib/display.py +677 -0
  28. temp_venv/lib/python3.13/site-packages/IPython/lib/editorhooks.py +127 -0
  29. temp_venv/lib/python3.13/site-packages/IPython/lib/guisupport.py +155 -0
  30. temp_venv/lib/python3.13/site-packages/IPython/lib/latextools.py +257 -0
  31. temp_venv/lib/python3.13/site-packages/IPython/lib/lexers.py +32 -0
  32. temp_venv/lib/python3.13/site-packages/IPython/lib/pretty.py +954 -0
  33. temp_venv/lib/python3.13/site-packages/IPython/testing/__init__.py +20 -0
  34. temp_venv/lib/python3.13/site-packages/IPython/testing/__pycache__/__init__.cpython-313.pyc +0 -0
  35. temp_venv/lib/python3.13/site-packages/IPython/testing/__pycache__/skipdoctest.cpython-313.pyc +0 -0
  36. temp_venv/lib/python3.13/site-packages/IPython/testing/decorators.py +148 -0
  37. temp_venv/lib/python3.13/site-packages/IPython/testing/globalipapp.py +114 -0
  38. temp_venv/lib/python3.13/site-packages/IPython/testing/ipunittest.py +187 -0
  39. temp_venv/lib/python3.13/site-packages/IPython/testing/plugin/__init__.py +0 -0
  40. temp_venv/lib/python3.13/site-packages/IPython/testing/plugin/dtexample.py +167 -0
  41. temp_venv/lib/python3.13/site-packages/IPython/testing/plugin/ipdoctest.py +299 -0
  42. temp_venv/lib/python3.13/site-packages/IPython/testing/plugin/pytest_ipdoctest.py +880 -0
  43. temp_venv/lib/python3.13/site-packages/IPython/testing/plugin/setup.py +18 -0
  44. temp_venv/lib/python3.13/site-packages/IPython/testing/plugin/simple.py +45 -0
  45. temp_venv/lib/python3.13/site-packages/IPython/testing/plugin/simplevars.py +2 -0
  46. temp_venv/lib/python3.13/site-packages/IPython/testing/plugin/test_combo.txt +36 -0
  47. temp_venv/lib/python3.13/site-packages/IPython/testing/plugin/test_example.txt +24 -0
  48. temp_venv/lib/python3.13/site-packages/IPython/testing/plugin/test_exampleip.txt +30 -0
  49. temp_venv/lib/python3.13/site-packages/IPython/testing/plugin/test_ipdoctest.py +92 -0
  50. temp_venv/lib/python3.13/site-packages/IPython/testing/plugin/test_refs.py +39 -0
temp_venv/lib/python3.13/site-packages/IPython/__pycache__/__init__.cpython-313.pyc ADDED
Binary file (4.82 kB). View file
 
temp_venv/lib/python3.13/site-packages/IPython/__pycache__/display.cpython-313.pyc ADDED
Binary file (905 Bytes). View file
 
temp_venv/lib/python3.13/site-packages/IPython/__pycache__/paths.cpython-313.pyc ADDED
Binary file (5.69 kB). View file
 
temp_venv/lib/python3.13/site-packages/IPython/extensions/__init__.py ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ """This directory is meant for IPython extensions."""
temp_venv/lib/python3.13/site-packages/IPython/extensions/__pycache__/__init__.cpython-313.pyc ADDED
Binary file (268 Bytes). View file
 
temp_venv/lib/python3.13/site-packages/IPython/extensions/__pycache__/storemagic.cpython-313.pyc ADDED
Binary file (10 kB). View file
 
temp_venv/lib/python3.13/site-packages/IPython/extensions/autoreload.py ADDED
@@ -0,0 +1,752 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """IPython extension to reload modules before executing user code.
2
+
3
+ ``autoreload`` reloads modules automatically before entering the execution of
4
+ code typed at the IPython prompt.
5
+
6
+ This makes for example the following workflow possible:
7
+
8
+ .. sourcecode:: ipython
9
+
10
+ In [1]: %load_ext autoreload
11
+
12
+ In [2]: %autoreload 2
13
+
14
+ In [3]: from foo import some_function
15
+
16
+ In [4]: some_function()
17
+ Out[4]: 42
18
+
19
+ In [5]: # open foo.py in an editor and change some_function to return 43
20
+
21
+ In [6]: some_function()
22
+ Out[6]: 43
23
+
24
+ The module was reloaded without reloading it explicitly, and the object
25
+ imported with ``from foo import ...`` was also updated.
26
+
27
+ Usage
28
+ =====
29
+
30
+ The following magic commands are provided:
31
+
32
+ ``%autoreload``, ``%autoreload now``
33
+
34
+ Reload all modules (except those excluded by ``%aimport``)
35
+ automatically now.
36
+
37
+ ``%autoreload 0``, ``%autoreload off``
38
+
39
+ Disable automatic reloading.
40
+
41
+ ``%autoreload 1``, ``%autoreload explicit``
42
+
43
+ Reload all modules imported with ``%aimport`` every time before
44
+ executing the Python code typed.
45
+
46
+ ``%autoreload 2``, ``%autoreload all``
47
+
48
+ Reload all modules (except those excluded by ``%aimport``) every
49
+ time before executing the Python code typed.
50
+
51
+ ``%autoreload 3``, ``%autoreload complete``
52
+
53
+ Same as 2/all, but also adds any new objects in the module. See
54
+ unit test at IPython/extensions/tests/test_autoreload.py::test_autoload_newly_added_objects
55
+
56
+ Adding ``--print`` or ``-p`` to the ``%autoreload`` line will print autoreload activity to
57
+ standard out. ``--log`` or ``-l`` will do it to the log at INFO level; both can be used
58
+ simultaneously.
59
+
60
+ ``%aimport``
61
+
62
+ List modules which are to be automatically imported or not to be imported.
63
+
64
+ ``%aimport foo``
65
+
66
+ Import module 'foo' and mark it to be autoreloaded for ``%autoreload 1``
67
+
68
+ ``%aimport foo, bar``
69
+
70
+ Import modules 'foo', 'bar' and mark them to be autoreloaded for ``%autoreload 1``
71
+
72
+ ``%aimport -foo``
73
+
74
+ Mark module 'foo' to not be autoreloaded.
75
+
76
+ Caveats
77
+ =======
78
+
79
+ Reloading Python modules in a reliable way is in general difficult,
80
+ and unexpected things may occur. ``%autoreload`` tries to work around
81
+ common pitfalls by replacing function code objects and parts of
82
+ classes previously in the module with new versions. This makes the
83
+ following things to work:
84
+
85
+ - Functions and classes imported via 'from xxx import foo' are upgraded
86
+ to new versions when 'xxx' is reloaded.
87
+
88
+ - Methods and properties of classes are upgraded on reload, so that
89
+ calling 'c.foo()' on an object 'c' created before the reload causes
90
+ the new code for 'foo' to be executed.
91
+
92
+ Some of the known remaining caveats are:
93
+
94
+ - Replacing code objects does not always succeed: changing a @property
95
+ in a class to an ordinary method or a method to a member variable
96
+ can cause problems (but in old objects only).
97
+
98
+ - Functions that are removed (eg. via monkey-patching) from a module
99
+ before it is reloaded are not upgraded.
100
+
101
+ - C extension modules cannot be reloaded, and so cannot be autoreloaded.
102
+
103
+ - While comparing Enum and Flag, the 'is' Identity Operator is used (even in the case '==' has been used (Similar to the 'None' keyword)).
104
+
105
+ - Reloading a module, or importing the same module by a different name, creates new Enums. These may look the same, but are not.
106
+ """
107
+
108
+ from IPython.core import magic_arguments
109
+ from IPython.core.magic import Magics, magics_class, line_magic
110
+ from IPython.extensions.deduperreload.deduperreload import DeduperReloader
111
+
112
+ __skip_doctest__ = True
113
+
114
+ # -----------------------------------------------------------------------------
115
+ # Copyright (C) 2000 Thomas Heller
116
+ # Copyright (C) 2008 Pauli Virtanen <[email protected]>
117
+ # Copyright (C) 2012 The IPython Development Team
118
+ #
119
+ # Distributed under the terms of the BSD License. The full license is in
120
+ # the file COPYING, distributed as part of this software.
121
+ # -----------------------------------------------------------------------------
122
+ #
123
+ # This IPython module is written by Pauli Virtanen, based on the autoreload
124
+ # code by Thomas Heller.
125
+
126
+ # -----------------------------------------------------------------------------
127
+ # Imports
128
+ # -----------------------------------------------------------------------------
129
+
130
+ import os
131
+ import sys
132
+ import traceback
133
+ import types
134
+ import weakref
135
+ import gc
136
+ import logging
137
+ from importlib import import_module, reload
138
+ from importlib.util import source_from_cache
139
+
140
+ # ------------------------------------------------------------------------------
141
+ # Autoreload functionality
142
+ # ------------------------------------------------------------------------------
143
+
144
+
145
+ class ModuleReloader:
146
+ enabled = False
147
+ """Whether this reloader is enabled"""
148
+
149
+ check_all = True
150
+ """Autoreload all modules, not just those listed in 'modules'"""
151
+
152
+ autoload_obj = False
153
+ """Autoreload all modules AND autoload all new objects"""
154
+
155
+ def __init__(self, shell=None):
156
+ # Modules that failed to reload: {module: mtime-on-failed-reload, ...}
157
+ self.failed = {}
158
+ # Modules specially marked as autoreloadable.
159
+ self.modules = {}
160
+ # Modules specially marked as not autoreloadable.
161
+ self.skip_modules = {}
162
+ # (module-name, name) -> weakref, for replacing old code objects
163
+ self.old_objects = {}
164
+ # Module modification timestamps
165
+ self.modules_mtimes = {}
166
+ self.shell = shell
167
+
168
+ # Reporting callable for verbosity
169
+ self._report = lambda msg: None # by default, be quiet.
170
+
171
+ # Deduper reloader
172
+ self.deduper_reloader = DeduperReloader()
173
+
174
+ # Cache module modification times
175
+ self.check(check_all=True, do_reload=False)
176
+
177
+ # To hide autoreload errors
178
+ self.hide_errors = False
179
+
180
+ def mark_module_skipped(self, module_name):
181
+ """Skip reloading the named module in the future"""
182
+ try:
183
+ del self.modules[module_name]
184
+ except KeyError:
185
+ pass
186
+ self.skip_modules[module_name] = True
187
+
188
+ def mark_module_reloadable(self, module_name):
189
+ """Reload the named module in the future (if it is imported)"""
190
+ try:
191
+ del self.skip_modules[module_name]
192
+ except KeyError:
193
+ pass
194
+ self.modules[module_name] = True
195
+
196
+ def aimport_module(self, module_name):
197
+ """Import a module, and mark it reloadable
198
+
199
+ Returns
200
+ -------
201
+ top_module : module
202
+ The imported module if it is top-level, or the top-level
203
+ top_name : module
204
+ Name of top_module
205
+
206
+ """
207
+ self.mark_module_reloadable(module_name)
208
+
209
+ import_module(module_name)
210
+ top_name = module_name.split(".")[0]
211
+ top_module = sys.modules[top_name]
212
+ return top_module, top_name
213
+
214
+ def filename_and_mtime(self, module):
215
+ if not hasattr(module, "__file__") or module.__file__ is None:
216
+ return None, None
217
+
218
+ if getattr(module, "__name__", None) in [None, "__mp_main__", "__main__"]:
219
+ # we cannot reload(__main__) or reload(__mp_main__)
220
+ return None, None
221
+
222
+ filename = module.__file__
223
+ path, ext = os.path.splitext(filename)
224
+
225
+ if ext.lower() == ".py":
226
+ py_filename = filename
227
+ else:
228
+ try:
229
+ py_filename = source_from_cache(filename)
230
+ except ValueError:
231
+ return None, None
232
+
233
+ try:
234
+ pymtime = os.stat(py_filename).st_mtime
235
+ except OSError:
236
+ return None, None
237
+
238
+ return py_filename, pymtime
239
+
240
+ def check(self, check_all=False, do_reload=True):
241
+ """Check whether some modules need to be reloaded."""
242
+
243
+ if not self.enabled and not check_all:
244
+ return
245
+
246
+ if check_all or self.check_all:
247
+ modules = list(sys.modules.keys())
248
+ else:
249
+ modules = list(self.modules.keys())
250
+
251
+ for modname in modules:
252
+ m = sys.modules.get(modname, None)
253
+
254
+ if modname in self.skip_modules:
255
+ continue
256
+
257
+ py_filename, pymtime = self.filename_and_mtime(m)
258
+ if py_filename is None:
259
+ continue
260
+
261
+ try:
262
+ if pymtime <= self.modules_mtimes[modname]:
263
+ continue
264
+ except KeyError:
265
+ self.modules_mtimes[modname] = pymtime
266
+ continue
267
+ else:
268
+ if self.failed.get(py_filename, None) == pymtime:
269
+ continue
270
+
271
+ self.modules_mtimes[modname] = pymtime
272
+
273
+ # If we've reached this point, we should try to reload the module
274
+ if do_reload:
275
+ self._report(f"Reloading '{modname}'.")
276
+ try:
277
+ if self.autoload_obj:
278
+ superreload(m, reload, self.old_objects, self.shell)
279
+ # if not using autoload, check if deduperreload is viable for this module
280
+ elif self.deduper_reloader.maybe_reload_module(m):
281
+ pass
282
+ else:
283
+ superreload(m, reload, self.old_objects)
284
+ if py_filename in self.failed:
285
+ del self.failed[py_filename]
286
+ except:
287
+ if not self.hide_errors:
288
+ print(
289
+ "[autoreload of {} failed: {}]".format(
290
+ modname, traceback.format_exc(10)
291
+ ),
292
+ file=sys.stderr,
293
+ )
294
+ self.failed[py_filename] = pymtime
295
+ self.deduper_reloader.update_sources()
296
+
297
+
298
+ # ------------------------------------------------------------------------------
299
+ # superreload
300
+ # ------------------------------------------------------------------------------
301
+
302
+
303
+ func_attrs = [
304
+ "__code__",
305
+ "__defaults__",
306
+ "__doc__",
307
+ "__closure__",
308
+ "__globals__",
309
+ "__dict__",
310
+ ]
311
+
312
+
313
+ def update_function(old, new):
314
+ """Upgrade the code object of a function"""
315
+ for name in func_attrs:
316
+ try:
317
+ setattr(old, name, getattr(new, name))
318
+ except (AttributeError, TypeError):
319
+ pass
320
+
321
+
322
+ def update_instances(old, new):
323
+ """Use garbage collector to find all instances that refer to the old
324
+ class definition and update their __class__ to point to the new class
325
+ definition"""
326
+
327
+ refs = gc.get_referrers(old)
328
+
329
+ for ref in refs:
330
+ if type(ref) is old:
331
+ object.__setattr__(ref, "__class__", new)
332
+
333
+
334
+ def update_class(old, new):
335
+ """Replace stuff in the __dict__ of a class, and upgrade
336
+ method code objects, and add new methods, if any"""
337
+ for key in list(old.__dict__.keys()):
338
+ old_obj = getattr(old, key)
339
+ try:
340
+ new_obj = getattr(new, key)
341
+ # explicitly checking that comparison returns True to handle
342
+ # cases where `==` doesn't return a boolean.
343
+ if (old_obj == new_obj) is True:
344
+ continue
345
+ except AttributeError:
346
+ # obsolete attribute: remove it
347
+ try:
348
+ delattr(old, key)
349
+ except (AttributeError, TypeError):
350
+ pass
351
+ continue
352
+ except ValueError:
353
+ # can't compare nested structures containing
354
+ # numpy arrays using `==`
355
+ pass
356
+
357
+ if update_generic(old_obj, new_obj):
358
+ continue
359
+
360
+ try:
361
+ setattr(old, key, getattr(new, key))
362
+ except (AttributeError, TypeError):
363
+ pass # skip non-writable attributes
364
+
365
+ for key in list(new.__dict__.keys()):
366
+ if key not in list(old.__dict__.keys()):
367
+ try:
368
+ setattr(old, key, getattr(new, key))
369
+ except (AttributeError, TypeError):
370
+ pass # skip non-writable attributes
371
+
372
+ # update all instances of class
373
+ update_instances(old, new)
374
+
375
+
376
+ def update_property(old, new):
377
+ """Replace get/set/del functions of a property"""
378
+ update_generic(old.fdel, new.fdel)
379
+ update_generic(old.fget, new.fget)
380
+ update_generic(old.fset, new.fset)
381
+
382
+
383
+ def isinstance2(a, b, typ):
384
+ return isinstance(a, typ) and isinstance(b, typ)
385
+
386
+
387
+ UPDATE_RULES = [
388
+ (lambda a, b: isinstance2(a, b, type), update_class),
389
+ (lambda a, b: isinstance2(a, b, types.FunctionType), update_function),
390
+ (lambda a, b: isinstance2(a, b, property), update_property),
391
+ ]
392
+ UPDATE_RULES.extend(
393
+ [
394
+ (
395
+ lambda a, b: isinstance2(a, b, types.MethodType),
396
+ lambda a, b: update_function(a.__func__, b.__func__),
397
+ ),
398
+ ]
399
+ )
400
+
401
+
402
+ def update_generic(a, b):
403
+ for type_check, update in UPDATE_RULES:
404
+ if type_check(a, b):
405
+ update(a, b)
406
+ return True
407
+ return False
408
+
409
+
410
+ class StrongRef:
411
+ def __init__(self, obj):
412
+ self.obj = obj
413
+
414
+ def __call__(self):
415
+ return self.obj
416
+
417
+
418
+ mod_attrs = [
419
+ "__name__",
420
+ "__doc__",
421
+ "__package__",
422
+ "__loader__",
423
+ "__spec__",
424
+ "__file__",
425
+ "__cached__",
426
+ "__builtins__",
427
+ ]
428
+
429
+
430
+ def append_obj(module, d, name, obj, autoload=False):
431
+ in_module = hasattr(obj, "__module__") and obj.__module__ == module.__name__
432
+ if autoload:
433
+ # check needed for module global built-ins
434
+ if not in_module and name in mod_attrs:
435
+ return False
436
+ else:
437
+ if not in_module:
438
+ return False
439
+
440
+ key = (module.__name__, name)
441
+ try:
442
+ d.setdefault(key, []).append(weakref.ref(obj))
443
+ except TypeError:
444
+ pass
445
+ return True
446
+
447
+
448
+ def superreload(module, reload=reload, old_objects=None, shell=None):
449
+ """Enhanced version of the builtin reload function.
450
+
451
+ superreload remembers objects previously in the module, and
452
+
453
+ - upgrades the class dictionary of every old class in the module
454
+ - upgrades the code object of every old function and method
455
+ - clears the module's namespace before reloading
456
+
457
+ """
458
+ if old_objects is None:
459
+ old_objects = {}
460
+
461
+ # collect old objects in the module
462
+ for name, obj in list(module.__dict__.items()):
463
+ if not append_obj(module, old_objects, name, obj):
464
+ continue
465
+ key = (module.__name__, name)
466
+ try:
467
+ old_objects.setdefault(key, []).append(weakref.ref(obj))
468
+ except TypeError:
469
+ pass
470
+
471
+ # reload module
472
+ try:
473
+ # clear namespace first from old cruft
474
+ old_dict = module.__dict__.copy()
475
+ old_name = module.__name__
476
+ module.__dict__.clear()
477
+ module.__dict__["__name__"] = old_name
478
+ module.__dict__["__loader__"] = old_dict["__loader__"]
479
+ except (TypeError, AttributeError, KeyError):
480
+ pass
481
+
482
+ try:
483
+ module = reload(module)
484
+ except:
485
+ # restore module dictionary on failed reload
486
+ module.__dict__.update(old_dict)
487
+ raise
488
+
489
+ # iterate over all objects and update functions & classes
490
+ for name, new_obj in list(module.__dict__.items()):
491
+ key = (module.__name__, name)
492
+ if key not in old_objects:
493
+ # here 'shell' acts both as a flag and as an output var
494
+ if (
495
+ shell is None
496
+ or name == "Enum"
497
+ or not append_obj(module, old_objects, name, new_obj, True)
498
+ ):
499
+ continue
500
+ shell.user_ns[name] = new_obj
501
+
502
+ new_refs = []
503
+ for old_ref in old_objects[key]:
504
+ old_obj = old_ref()
505
+ if old_obj is None:
506
+ continue
507
+ new_refs.append(old_ref)
508
+ update_generic(old_obj, new_obj)
509
+
510
+ if new_refs:
511
+ old_objects[key] = new_refs
512
+ else:
513
+ del old_objects[key]
514
+
515
+ return module
516
+
517
+
518
+ # ------------------------------------------------------------------------------
519
+ # IPython connectivity
520
+ # ------------------------------------------------------------------------------
521
+
522
+
523
+ @magics_class
524
+ class AutoreloadMagics(Magics):
525
+ def __init__(self, *a, **kw):
526
+ super().__init__(*a, **kw)
527
+ self._reloader = ModuleReloader(self.shell)
528
+ self._reloader.check_all = False
529
+ self._reloader.autoload_obj = False
530
+ self.loaded_modules = set(sys.modules)
531
+
532
+ @line_magic
533
+ @magic_arguments.magic_arguments()
534
+ @magic_arguments.argument(
535
+ "mode",
536
+ type=str,
537
+ default="now",
538
+ nargs="?",
539
+ help="""blank or 'now' - Reload all modules (except those excluded by %%aimport)
540
+ automatically now.
541
+
542
+ '0' or 'off' - Disable automatic reloading.
543
+
544
+ '1' or 'explicit' - Reload only modules imported with %%aimport every
545
+ time before executing the Python code typed.
546
+
547
+ '2' or 'all' - Reload all modules (except those excluded by %%aimport)
548
+ every time before executing the Python code typed.
549
+
550
+ '3' or 'complete' - Same as 2/all, but also adds any new
551
+ objects in the module.
552
+
553
+ By default, a newer autoreload algorithm that diffs the module's source code
554
+ with the previous version and only reloads changed parts is applied for modes
555
+ 2 and below. To use the original algorithm, add the `-` suffix to the mode,
556
+ e.g. '%autoreload 2-', or pass in --full.
557
+ """,
558
+ )
559
+ @magic_arguments.argument(
560
+ "-p",
561
+ "--print",
562
+ action="store_true",
563
+ default=False,
564
+ help="Show autoreload activity using `print` statements",
565
+ )
566
+ @magic_arguments.argument(
567
+ "-l",
568
+ "--log",
569
+ action="store_true",
570
+ default=False,
571
+ help="Show autoreload activity using the logger",
572
+ )
573
+ @magic_arguments.argument(
574
+ "--hide-errors",
575
+ action="store_true",
576
+ default=False,
577
+ help="Hide autoreload errors",
578
+ )
579
+ @magic_arguments.argument(
580
+ "--full",
581
+ action="store_true",
582
+ default=False,
583
+ help="Don't ever use new diffing algorithm",
584
+ )
585
+ def autoreload(self, line=""):
586
+ r"""%autoreload => Reload modules automatically
587
+
588
+ %autoreload or %autoreload now
589
+ Reload all modules (except those excluded by %aimport) automatically
590
+ now.
591
+
592
+ %autoreload 0 or %autoreload off
593
+ Disable automatic reloading.
594
+
595
+ %autoreload 1 or %autoreload explicit
596
+ Reload only modules imported with %aimport every time before executing
597
+ the Python code typed.
598
+
599
+ %autoreload 2 or %autoreload all
600
+ Reload all modules (except those excluded by %aimport) every time
601
+ before executing the Python code typed.
602
+
603
+ %autoreload 3 or %autoreload complete
604
+ Same as 2/all, but also but also adds any new objects in the module. See
605
+ unit test at IPython/extensions/tests/test_autoreload.py::test_autoload_newly_added_objects
606
+
607
+ The optional arguments --print and --log control display of autoreload activity. The default
608
+ is to act silently; --print (or -p) will print out the names of modules that are being
609
+ reloaded, and --log (or -l) outputs them to the log at INFO level.
610
+
611
+ The optional argument --hide-errors hides any errors that can happen when trying to
612
+ reload code.
613
+
614
+ Reloading Python modules in a reliable way is in general
615
+ difficult, and unexpected things may occur. %autoreload tries to
616
+ work around common pitfalls by replacing function code objects and
617
+ parts of classes previously in the module with new versions. This
618
+ makes the following things to work:
619
+
620
+ - Functions and classes imported via 'from xxx import foo' are upgraded
621
+ to new versions when 'xxx' is reloaded.
622
+
623
+ - Methods and properties of classes are upgraded on reload, so that
624
+ calling 'c.foo()' on an object 'c' created before the reload causes
625
+ the new code for 'foo' to be executed.
626
+
627
+ Some of the known remaining caveats are:
628
+
629
+ - Replacing code objects does not always succeed: changing a @property
630
+ in a class to an ordinary method or a method to a member variable
631
+ can cause problems (but in old objects only).
632
+
633
+ - Functions that are removed (eg. via monkey-patching) from a module
634
+ before it is reloaded are not upgraded.
635
+
636
+ - C extension modules cannot be reloaded, and so cannot be
637
+ autoreloaded.
638
+
639
+ """
640
+ args = magic_arguments.parse_argstring(self.autoreload, line)
641
+ mode = args.mode.lower()
642
+
643
+ enable_deduperreload = not args.full
644
+ if mode.endswith("-"):
645
+ enable_deduperreload = False
646
+ mode = mode[:-1]
647
+ self._reloader.deduper_reloader.enabled = enable_deduperreload
648
+
649
+ p = print
650
+
651
+ logger = logging.getLogger("autoreload")
652
+
653
+ l = logger.info
654
+
655
+ def pl(msg):
656
+ p(msg)
657
+ l(msg)
658
+
659
+ if args.print is False and args.log is False:
660
+ self._reloader._report = lambda msg: None
661
+ elif args.print is True:
662
+ if args.log is True:
663
+ self._reloader._report = pl
664
+ else:
665
+ self._reloader._report = p
666
+ elif args.log is True:
667
+ self._reloader._report = l
668
+
669
+ self._reloader.hide_errors = args.hide_errors
670
+
671
+ if mode == "" or mode == "now":
672
+ self._reloader.check(True)
673
+ elif mode == "0" or mode == "off":
674
+ self._reloader.enabled = False
675
+ elif mode == "1" or mode == "explicit":
676
+ self._reloader.enabled = True
677
+ self._reloader.check_all = False
678
+ self._reloader.autoload_obj = False
679
+ elif mode == "2" or mode == "all":
680
+ self._reloader.enabled = True
681
+ self._reloader.check_all = True
682
+ self._reloader.autoload_obj = False
683
+ elif mode == "3" or mode == "complete":
684
+ self._reloader.enabled = True
685
+ self._reloader.check_all = True
686
+ self._reloader.autoload_obj = True
687
+ else:
688
+ raise ValueError(f'Unrecognized autoreload mode "{mode}".')
689
+
690
+ @line_magic
691
+ def aimport(self, parameter_s="", stream=None):
692
+ """%aimport => Import modules for automatic reloading.
693
+
694
+ %aimport
695
+ List modules to automatically import and not to import.
696
+
697
+ %aimport foo
698
+ Import module 'foo' and mark it to be autoreloaded for %autoreload explicit
699
+
700
+ %aimport foo, bar
701
+ Import modules 'foo', 'bar' and mark them to be autoreloaded for %autoreload explicit
702
+
703
+ %aimport -foo, bar
704
+ Mark module 'foo' to not be autoreloaded for %autoreload explicit, all, or complete, and 'bar'
705
+ to be autoreloaded for mode explicit.
706
+ """
707
+ modname = parameter_s
708
+ if not modname:
709
+ to_reload = sorted(self._reloader.modules.keys())
710
+ to_skip = sorted(self._reloader.skip_modules.keys())
711
+ if stream is None:
712
+ stream = sys.stdout
713
+ if self._reloader.check_all:
714
+ stream.write("Modules to reload:\nall-except-skipped\n")
715
+ else:
716
+ stream.write("Modules to reload:\n%s\n" % " ".join(to_reload))
717
+ stream.write("\nModules to skip:\n%s\n" % " ".join(to_skip))
718
+ else:
719
+ for _module in [_.strip() for _ in modname.split(",")]:
720
+ if _module.startswith("-"):
721
+ _module = _module[1:].strip()
722
+ self._reloader.mark_module_skipped(_module)
723
+ else:
724
+ top_module, top_name = self._reloader.aimport_module(_module)
725
+
726
+ # Inject module to user namespace
727
+ self.shell.push({top_name: top_module})
728
+
729
+ def pre_run_cell(self, info):
730
+ if self._reloader.enabled:
731
+ try:
732
+ self._reloader.check()
733
+ except:
734
+ pass
735
+
736
+ def post_execute_hook(self):
737
+ """Cache the modification times of any modules imported in this execution"""
738
+ newly_loaded_modules = set(sys.modules) - self.loaded_modules
739
+ for modname in newly_loaded_modules:
740
+ _, pymtime = self._reloader.filename_and_mtime(sys.modules[modname])
741
+ if pymtime is not None:
742
+ self._reloader.modules_mtimes[modname] = pymtime
743
+
744
+ self.loaded_modules.update(newly_loaded_modules)
745
+
746
+
747
+ def load_ipython_extension(ip):
748
+ """Load the extension in IPython."""
749
+ auto_reload = AutoreloadMagics(ip)
750
+ ip.register_magics(auto_reload)
751
+ ip.events.register("pre_run_cell", auto_reload.pre_run_cell)
752
+ ip.events.register("post_execute", auto_reload.post_execute_hook)
temp_venv/lib/python3.13/site-packages/IPython/extensions/deduperreload/__init__.py ADDED
File without changes
temp_venv/lib/python3.13/site-packages/IPython/extensions/deduperreload/deduperreload.py ADDED
@@ -0,0 +1,608 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+ import ast
3
+ import builtins
4
+ import contextlib
5
+ import itertools
6
+ import os
7
+ import platform
8
+ import sys
9
+ import textwrap
10
+ from types import ModuleType
11
+ from typing import TYPE_CHECKING, Any, Generator, Iterable, NamedTuple, cast
12
+
13
+ from IPython.extensions.deduperreload.deduperreload_patching import (
14
+ DeduperReloaderPatchingMixin,
15
+ )
16
+
17
+ if TYPE_CHECKING:
18
+ TDefinitionAst = (
19
+ ast.FunctionDef
20
+ | ast.AsyncFunctionDef
21
+ | ast.Import
22
+ | ast.ImportFrom
23
+ | ast.Assign
24
+ | ast.AnnAssign
25
+ )
26
+
27
+
28
+ def get_module_file_name(module: ModuleType | str) -> str:
29
+ """Returns the module's file path, or the empty string if it's inaccessible"""
30
+ if (mod := sys.modules.get(module) if isinstance(module, str) else module) is None:
31
+ return ""
32
+ return getattr(mod, "__file__", "") or ""
33
+
34
+
35
+ def compare_ast(node1: ast.AST | list[ast.AST], node2: ast.AST | list[ast.AST]) -> bool:
36
+ """Checks if node1 and node2 have identical AST structure/values, apart from some attributes"""
37
+ if type(node1) is not type(node2):
38
+ return False
39
+
40
+ if isinstance(node1, ast.AST):
41
+ for k, v in node1.__dict__.items():
42
+ if k in (
43
+ "lineno",
44
+ "end_lineno",
45
+ "col_offset",
46
+ "end_col_offset",
47
+ "ctx",
48
+ "parent",
49
+ ):
50
+ continue
51
+ if not hasattr(node2, k) or not compare_ast(v, getattr(node2, k)):
52
+ return False
53
+ return True
54
+
55
+ elif isinstance(node1, list) and isinstance( # type:ignore [redundant-expr]
56
+ node2, list
57
+ ):
58
+ return len(node1) == len(node2) and all(
59
+ compare_ast(n1, n2) for n1, n2 in zip(node1, node2)
60
+ )
61
+ else:
62
+ return node1 == node2
63
+
64
+
65
+ class DependencyNode(NamedTuple):
66
+ """
67
+ Each node represents a function.
68
+ qualified_name: string which represents the namespace/name of the function
69
+ abstract_syntax_tree: subtree of the overall module which corresponds to this function
70
+
71
+ qualified_name is of the structure: (namespace1, namespace2, ..., name)
72
+
73
+ For example, foo() in the following would be represented as (A, B, foo):
74
+
75
+ class A:
76
+ class B:
77
+ def foo():
78
+ pass
79
+ """
80
+
81
+ qualified_name: tuple[str, ...]
82
+ abstract_syntax_tree: ast.AST
83
+
84
+
85
+ class GatherResult(NamedTuple):
86
+ import_defs: list[tuple[tuple[str, ...], ast.Import | ast.ImportFrom]] = []
87
+ assign_defs: list[tuple[tuple[str, ...], ast.Assign | ast.AnnAssign]] = []
88
+ function_defs: list[
89
+ tuple[tuple[str, ...], ast.FunctionDef | ast.AsyncFunctionDef]
90
+ ] = []
91
+ classes: dict[str, ast.ClassDef] = {}
92
+ unfixable: list[ast.AST] = []
93
+
94
+ @classmethod
95
+ def create(cls) -> GatherResult:
96
+ return cls([], [], [], {}, [])
97
+
98
+ def all_defs(self) -> Iterable[tuple[tuple[str, ...], TDefinitionAst]]:
99
+ return itertools.chain(self.import_defs, self.assign_defs, self.function_defs)
100
+
101
+ def inplace_merge(self, other: GatherResult) -> None:
102
+ self.import_defs.extend(other.import_defs)
103
+ self.assign_defs.extend(other.assign_defs)
104
+ self.function_defs.extend(other.function_defs)
105
+ self.classes.update(other.classes)
106
+ self.unfixable.extend(other.unfixable)
107
+
108
+
109
+ class ConstexprDetector(ast.NodeVisitor):
110
+ def __init__(self) -> None:
111
+ self.is_constexpr = True
112
+ self._allow_builtins_exceptions = True
113
+
114
+ @contextlib.contextmanager
115
+ def disallow_builtins_exceptions(self) -> Generator[None, None, None]:
116
+ prev_allow = self._allow_builtins_exceptions
117
+ self._allow_builtins_exceptions = False
118
+ try:
119
+ yield
120
+ finally:
121
+ self._allow_builtins_exceptions = prev_allow
122
+
123
+ def visit_Attribute(self, node: ast.Attribute) -> None:
124
+ with self.disallow_builtins_exceptions():
125
+ self.visit(node.value)
126
+
127
+ def visit_Name(self, node: ast.Name) -> None:
128
+ if self._allow_builtins_exceptions and hasattr(builtins, node.id):
129
+ return
130
+ self.is_constexpr = False
131
+
132
+ def visit(self, node: ast.AST) -> None:
133
+ if not self.is_constexpr:
134
+ # can short-circuit if we've already detected that it's not a constexpr
135
+ return
136
+ super().visit(node)
137
+
138
+ def __call__(self, node: ast.AST) -> bool:
139
+ self.is_constexpr = True
140
+ self.visit(node)
141
+ return self.is_constexpr
142
+
143
+
144
+ class AutoreloadTree:
145
+ """
146
+ Recursive data structure to keep track of reloadable functions/methods. Each object corresponds to a specific scope level.
147
+ children: classes inside given scope, maps class name to autoreload tree for that class's scope
148
+ funcs_to_autoreload: list of function names that can be autoreloaded in given scope.
149
+ new_nested_classes: Classes getting added in new autoreload cycle
150
+ """
151
+
152
+ def __init__(self) -> None:
153
+ self.children: dict[str, AutoreloadTree] = {}
154
+ self.defs_to_reload: list[tuple[tuple[str, ...], ast.AST]] = []
155
+ self.defs_to_delete: set[str] = set()
156
+ self.new_nested_classes: dict[str, ast.AST] = {}
157
+
158
+ def traverse_prefixes(self, prefixes: list[str]) -> AutoreloadTree:
159
+ """
160
+ Return ref to the AutoreloadTree at the namespace specified by prefixes
161
+ """
162
+ cur = self
163
+ for prefix in prefixes:
164
+ if prefix not in cur.children:
165
+ cur.children[prefix] = AutoreloadTree()
166
+ cur = cur.children[prefix]
167
+ return cur
168
+
169
+
170
+ class DeduperReloader(DeduperReloaderPatchingMixin):
171
+ """
172
+ This version of autoreload detects when we can leverage targeted recompilation of a subset of a module and patching
173
+ existing function/method objects to reflect these changes.
174
+
175
+ Detects what functions/methods can be reloaded by recursively comparing the old/new AST of module-level classes,
176
+ module-level classes' methods, recursing through nested classes' methods. If other changes are made, original
177
+ autoreload algorithm is called directly.
178
+ """
179
+
180
+ def __init__(self) -> None:
181
+ self._to_autoreload: AutoreloadTree = AutoreloadTree()
182
+ self.source_by_modname: dict[str, str] = {}
183
+ self.dependency_graph: dict[tuple[str, ...], list[DependencyNode]] = {}
184
+ self._enabled = True
185
+
186
+ @property
187
+ def enabled(self) -> bool:
188
+ return self._enabled and platform.python_implementation() == "CPython"
189
+
190
+ @enabled.setter
191
+ def enabled(self, value: bool) -> None:
192
+ self._enabled = value
193
+
194
+ def update_sources(self) -> None:
195
+ """
196
+ Update dictionary source_by_modname with current modules' source codes.
197
+ """
198
+ if not self.enabled:
199
+ return
200
+ for new_modname in sys.modules.keys() - self.source_by_modname.keys():
201
+ new_module = sys.modules[new_modname]
202
+ if (
203
+ (fname := get_module_file_name(new_module))
204
+ is None # type:ignore [redundant-expr]
205
+ or "site-packages" in fname
206
+ or "dist-packages" in fname
207
+ or not os.access(fname, os.R_OK)
208
+ ):
209
+ self.source_by_modname[new_modname] = ""
210
+ continue
211
+ with open(fname, "r") as f:
212
+ try:
213
+ self.source_by_modname[new_modname] = f.read()
214
+ except Exception:
215
+ self.source_by_modname[new_modname] = ""
216
+
217
+ constexpr_detector = ConstexprDetector()
218
+
219
+ @staticmethod
220
+ def is_enum_subclass(node: ast.Module | ast.ClassDef) -> bool:
221
+ if isinstance(node, ast.Module):
222
+ return False
223
+ for base in node.bases:
224
+ if isinstance(base, ast.Name) and base.id == "Enum":
225
+ return True
226
+ elif (
227
+ isinstance(base, ast.Attribute)
228
+ and base.attr == "Enum"
229
+ and isinstance(base.value, ast.Name)
230
+ and base.value.id == "enum"
231
+ ):
232
+ return True
233
+ return False
234
+
235
+ @classmethod
236
+ def is_constexpr_assign(
237
+ cls, node: ast.AST, parent_node: ast.Module | ast.ClassDef
238
+ ) -> bool:
239
+ if not isinstance(node, (ast.Assign, ast.AnnAssign)) or node.value is None:
240
+ return False
241
+ if cls.is_enum_subclass(parent_node):
242
+ return False
243
+ for target in node.targets if isinstance(node, ast.Assign) else [node.target]:
244
+ if not isinstance(target, ast.Name):
245
+ return False
246
+ return cls.constexpr_detector(node.value)
247
+
248
+ @classmethod
249
+ def _gather_children(
250
+ cls, body: list[ast.stmt], parent_node: ast.Module | ast.ClassDef
251
+ ) -> GatherResult:
252
+ """
253
+ Given list of ast elements, return:
254
+ 1. dict mapping function names to their ASTs.
255
+ 2. dict mapping class names to their ASTs.
256
+ 3. list of any other ASTs.
257
+ """
258
+ result = GatherResult.create()
259
+ for ast_node in body:
260
+ ast_elt: ast.expr | ast.stmt = ast_node
261
+ while isinstance(ast_elt, ast.Expr):
262
+ ast_elt = ast_elt.value
263
+ if isinstance(ast_elt, (ast.FunctionDef, ast.AsyncFunctionDef)):
264
+ result.function_defs.append(((ast_elt.name,), ast_elt))
265
+ elif isinstance(ast_elt, (ast.Import, ast.ImportFrom)):
266
+ result.import_defs.append(
267
+ (tuple(name.asname or name.name for name in ast_elt.names), ast_elt)
268
+ )
269
+ elif isinstance(ast_elt, ast.ClassDef):
270
+ result.classes[ast_elt.name] = ast_elt
271
+ elif isinstance(ast_elt, ast.If):
272
+ result.unfixable.append(ast_elt.test)
273
+ result.inplace_merge(cls._gather_children(ast_elt.body, parent_node))
274
+ result.inplace_merge(cls._gather_children(ast_elt.orelse, parent_node))
275
+ elif isinstance(ast_elt, (ast.AsyncWith, ast.With)):
276
+ result.unfixable.extend(ast_elt.items)
277
+ result.inplace_merge(cls._gather_children(ast_elt.body, parent_node))
278
+ elif isinstance(ast_elt, ast.Try):
279
+ result.inplace_merge(cls._gather_children(ast_elt.body, parent_node))
280
+ result.inplace_merge(cls._gather_children(ast_elt.orelse, parent_node))
281
+ result.inplace_merge(
282
+ cls._gather_children(ast_elt.finalbody, parent_node)
283
+ )
284
+ for handler in ast_elt.handlers:
285
+ if handler.type is not None:
286
+ result.unfixable.append(handler.type)
287
+ result.inplace_merge(
288
+ cls._gather_children(handler.body, parent_node)
289
+ )
290
+ elif not isinstance(ast_elt, (ast.Ellipsis, ast.Pass)):
291
+ if cls.is_constexpr_assign(ast_elt, parent_node):
292
+ assert isinstance(ast_elt, (ast.Assign, ast.AnnAssign))
293
+ targets = (
294
+ ast_elt.targets
295
+ if isinstance(ast_elt, ast.Assign)
296
+ else [ast_elt.target]
297
+ )
298
+ result.assign_defs.append(
299
+ (
300
+ tuple(cast(ast.Name, target).id for target in targets),
301
+ ast_elt,
302
+ )
303
+ )
304
+ else:
305
+ result.unfixable.append(ast_elt)
306
+ return result
307
+
308
+ def detect_autoreload(
309
+ self,
310
+ old_node: ast.Module | ast.ClassDef,
311
+ new_node: ast.Module | ast.ClassDef,
312
+ prefixes: list[str] | None = None,
313
+ ) -> bool:
314
+ """
315
+ Returns
316
+ -------
317
+ `True` if we can run our targeted autoreload algorithm safely.
318
+ `False` if we should instead use IPython's original autoreload implementation.
319
+ """
320
+ if not self.enabled:
321
+ return False
322
+ prefixes = prefixes or []
323
+
324
+ old_result = self._gather_children(old_node.body, old_node)
325
+ new_result = self._gather_children(new_node.body, new_node)
326
+ old_defs_by_name: dict[str, ast.AST] = {
327
+ name: ast_def for names, ast_def in old_result.all_defs() for name in names
328
+ }
329
+ new_defs_by_name: dict[str, ast.AST] = {
330
+ name: ast_def for names, ast_def in new_result.all_defs() for name in names
331
+ }
332
+
333
+ if not compare_ast(old_result.unfixable, new_result.unfixable):
334
+ return False
335
+
336
+ cur = self._to_autoreload.traverse_prefixes(prefixes)
337
+ for names, new_ast_def in new_result.all_defs():
338
+ names_to_reload = []
339
+ for name in names:
340
+ if new_defs_by_name[name] is not new_ast_def:
341
+ continue
342
+ if name not in old_defs_by_name or not compare_ast(
343
+ new_ast_def, old_defs_by_name[name]
344
+ ):
345
+ names_to_reload.append(name)
346
+ if names_to_reload:
347
+ cur.defs_to_reload.append((tuple(names), new_ast_def))
348
+ cur.defs_to_delete |= set(old_defs_by_name.keys()) - set(
349
+ new_defs_by_name.keys()
350
+ )
351
+ for name, new_ast_def_class in new_result.classes.items():
352
+ if name not in old_result.classes:
353
+ cur.new_nested_classes[name] = new_ast_def_class
354
+ elif not compare_ast(
355
+ new_ast_def_class, old_result.classes[name]
356
+ ) and not self.detect_autoreload(
357
+ old_result.classes[name], new_ast_def_class, prefixes + [name]
358
+ ):
359
+ return False
360
+ return True
361
+
362
+ def _check_dependents(self) -> bool:
363
+ """
364
+ If a decorator function is modified, we should similarly reload the functions which are decorated by this
365
+ decorator. Iterate through the Dependency Graph to find such cases in the given AutoreloadTree.
366
+ """
367
+ for node in self._check_dependents_inner():
368
+ self._add_node_to_autoreload_tree(node)
369
+ return True
370
+
371
+ def _add_node_to_autoreload_tree(self, node: DependencyNode) -> None:
372
+ """
373
+ Given a node of the dependency graph, add decorator dependencies to the autoreload tree.
374
+ """
375
+ if len(node.qualified_name) == 0:
376
+ return
377
+ cur = self._to_autoreload.traverse_prefixes(list(node.qualified_name[:-1]))
378
+ if node.abstract_syntax_tree is not None:
379
+ cur.defs_to_reload.append(
380
+ ((node.qualified_name[-1],), node.abstract_syntax_tree)
381
+ )
382
+
383
+ def _check_dependents_inner(
384
+ self, prefixes: list[str] | None = None
385
+ ) -> list[DependencyNode]:
386
+ prefixes = prefixes or []
387
+ cur = self._to_autoreload.traverse_prefixes(prefixes)
388
+ ans = []
389
+ for (func_name, *_), _ in cur.defs_to_reload:
390
+ node = tuple(prefixes + [func_name])
391
+ ans.extend(self._gen_dependents(node))
392
+ for class_name in cur.new_nested_classes:
393
+ ans.extend(self._check_dependents_inner(prefixes + [class_name]))
394
+ return ans
395
+
396
+ def _gen_dependents(self, qualname: tuple[str, ...]) -> list[DependencyNode]:
397
+ ans = []
398
+ if qualname not in self.dependency_graph:
399
+ return []
400
+ for elt in self.dependency_graph[qualname]:
401
+ ans.extend(self._gen_dependents(elt.qualified_name))
402
+ ans.append(elt)
403
+ return ans
404
+
405
+ def _patch_namespace_inner(
406
+ self, ns: ModuleType | type, prefixes: list[str] | None = None
407
+ ) -> bool:
408
+ """
409
+ This function patches module functions and methods. Specifically, only objects with their name in
410
+ self.to_autoreload will be considered for patching. If an object has been marked to be autoreloaded,
411
+ new_source_code gets executed in the old version's global environment. Then, replace the old function's
412
+ attributes with the new function's attributes.
413
+ """
414
+ prefixes = prefixes or []
415
+ cur = self._to_autoreload.traverse_prefixes(prefixes)
416
+ namespace_to_check = ns
417
+ for prefix in prefixes:
418
+ namespace_to_check = namespace_to_check.__dict__[prefix]
419
+ for names, new_ast_def in cur.defs_to_reload:
420
+ local_env: dict[str, Any] = {}
421
+ if (
422
+ isinstance(new_ast_def, (ast.FunctionDef, ast.AsyncFunctionDef))
423
+ and (name := names[0]) in namespace_to_check.__dict__
424
+ ):
425
+ assert len(names) == 1
426
+ to_patch_to = namespace_to_check.__dict__[name]
427
+ if isinstance(to_patch_to, (staticmethod, classmethod)):
428
+ to_patch_to = to_patch_to.__func__
429
+ # exec new source code using old function's (obj) globals environment.
430
+ func_code = textwrap.dedent(ast.unparse(new_ast_def))
431
+ if is_method := (len(prefixes) > 0):
432
+ func_code = "class __autoreload_class__:\n" + textwrap.indent(
433
+ func_code, " "
434
+ )
435
+ global_env = namespace_to_check.__dict__
436
+ if hasattr(to_patch_to, "__globals__"):
437
+ global_env = to_patch_to.__globals__
438
+ elif isinstance(to_patch_to, property):
439
+ if to_patch_to.fget is not None:
440
+ global_env = to_patch_to.fget.__globals__
441
+ elif to_patch_to.fset is not None:
442
+ global_env = to_patch_to.fset.__globals__
443
+ elif to_patch_to.fdel is not None:
444
+ global_env = to_patch_to.fdel.__globals__
445
+ if not isinstance(global_env, dict):
446
+ global_env = dict(global_env)
447
+ exec(func_code, global_env, local_env) # type: ignore[arg-type]
448
+ # local_env contains the function exec'd from new version of function
449
+ if is_method:
450
+ to_patch_from = getattr(local_env["__autoreload_class__"], name)
451
+ else:
452
+ to_patch_from = local_env[name]
453
+ if isinstance(to_patch_from, (staticmethod, classmethod)):
454
+ to_patch_from = to_patch_from.__func__
455
+ if isinstance(to_patch_to, property) and isinstance(
456
+ to_patch_from, property
457
+ ):
458
+ for attr in ("fget", "fset", "fdel"):
459
+ if (
460
+ getattr(to_patch_to, attr) is None
461
+ or getattr(to_patch_from, attr) is None
462
+ ):
463
+ self.try_patch_attr(to_patch_to, to_patch_from, attr)
464
+ else:
465
+ self.patch_function(
466
+ getattr(to_patch_to, attr),
467
+ getattr(to_patch_from, attr),
468
+ is_method,
469
+ )
470
+ elif not isinstance(to_patch_to, property) and not isinstance(
471
+ to_patch_from, property
472
+ ):
473
+ self.patch_function(to_patch_to, to_patch_from, is_method)
474
+ else:
475
+ raise ValueError(
476
+ "adding or removing property decorations not supported"
477
+ )
478
+ else:
479
+ exec(
480
+ ast.unparse(new_ast_def),
481
+ ns.__dict__ | namespace_to_check.__dict__,
482
+ local_env,
483
+ )
484
+ for name in names:
485
+ setattr(namespace_to_check, name, local_env[name])
486
+ cur.defs_to_reload.clear()
487
+ for name in cur.defs_to_delete:
488
+ try:
489
+ delattr(namespace_to_check, name)
490
+ except (AttributeError, TypeError, ValueError):
491
+ # give up on deleting the attribute, let the stale one dangle
492
+ pass
493
+ cur.defs_to_delete.clear()
494
+ for class_name, class_ast_node in cur.new_nested_classes.items():
495
+ local_env_class: dict[str, Any] = {}
496
+ exec(
497
+ ast.unparse(class_ast_node),
498
+ ns.__dict__ | namespace_to_check.__dict__,
499
+ local_env_class,
500
+ )
501
+ setattr(namespace_to_check, class_name, local_env_class[class_name])
502
+ cur.new_nested_classes.clear()
503
+ for class_name in cur.children.keys():
504
+ if not self._patch_namespace(ns, prefixes + [class_name]):
505
+ return False
506
+ cur.children.clear()
507
+ return True
508
+
509
+ def _patch_namespace(
510
+ self, ns: ModuleType | type, prefixes: list[str] | None = None
511
+ ) -> bool:
512
+ """
513
+ Wrapper for patching all elements in a namespace as specified by the to_autoreload member variable.
514
+ Returns `true` if patching was successful, and `false` if unsuccessful.
515
+ """
516
+ try:
517
+ return self._patch_namespace_inner(ns, prefixes=prefixes)
518
+ except Exception:
519
+ return False
520
+
521
+ def maybe_reload_module(self, module: ModuleType) -> bool:
522
+ """
523
+ Uses Deduperreload to try to update a module.
524
+ Returns `true` on success and `false` on failure.
525
+ """
526
+ if not self.enabled:
527
+ return False
528
+ if not (modname := getattr(module, "__name__", None)):
529
+ return False
530
+ if (fname := get_module_file_name(module)) is None:
531
+ return False
532
+ with open(fname, "r") as f:
533
+ new_source_code = f.read()
534
+ patched_flag = False
535
+ if old_source_code := self.source_by_modname.get(modname):
536
+ # get old/new module ast
537
+ try:
538
+ old_module_ast = ast.parse(old_source_code)
539
+ new_module_ast = ast.parse(new_source_code)
540
+ except Exception:
541
+ return False
542
+ # detect if we are able to use our autoreload algorithm
543
+ ctx = contextlib.suppress()
544
+ with ctx:
545
+ self._build_dependency_graph(new_module_ast)
546
+ if (
547
+ self.detect_autoreload(old_module_ast, new_module_ast)
548
+ and self._check_dependents()
549
+ and self._patch_namespace(module)
550
+ ):
551
+ patched_flag = True
552
+
553
+ self.source_by_modname[modname] = new_source_code
554
+ self._to_autoreload = AutoreloadTree()
555
+ return patched_flag
556
+
557
+ def _separate_name(
558
+ self,
559
+ decorator: ast.Attribute | ast.Name | ast.Call | ast.expr,
560
+ accept_calls: bool,
561
+ ) -> list[str] | None:
562
+ """
563
+ Generates a qualified name for a given decorator by finding its relative namespace.
564
+ """
565
+ if isinstance(decorator, ast.Name):
566
+ return [decorator.id]
567
+ elif isinstance(decorator, ast.Call):
568
+ if accept_calls:
569
+ return self._separate_name(decorator.func, False)
570
+ else:
571
+ return None
572
+ if not isinstance(decorator, ast.Attribute):
573
+ return None
574
+ if pref := self._separate_name(decorator.value, False):
575
+ return pref + [decorator.attr]
576
+ else:
577
+ return None
578
+
579
+ def _gather_dependents(
580
+ self, body: list[ast.stmt], body_prefixes: list[str] | None = None
581
+ ) -> bool:
582
+ body_prefixes = body_prefixes or []
583
+ for ast_node in body:
584
+ ast_elt: ast.expr | ast.stmt = ast_node
585
+ if isinstance(ast_elt, ast.ClassDef):
586
+ self._gather_dependents(ast_elt.body, body_prefixes + [ast_elt.name])
587
+ continue
588
+ if not isinstance(ast_elt, (ast.FunctionDef, ast.AsyncFunctionDef)):
589
+ continue
590
+ qualified_name = tuple(body_prefixes + [ast_elt.name])
591
+ cur_dependency_node = DependencyNode(qualified_name, ast_elt)
592
+ for decorator in ast_elt.decorator_list:
593
+ decorator_path = self._separate_name(decorator, True)
594
+ if not decorator_path:
595
+ continue
596
+ decorator_path_tuple = tuple(decorator_path)
597
+ self.dependency_graph.setdefault(decorator_path_tuple, []).append(
598
+ cur_dependency_node
599
+ )
600
+ return True
601
+
602
+ def _build_dependency_graph(self, new_ast: ast.Module | ast.ClassDef) -> bool:
603
+ """
604
+ Wrapper function for generating dependency graph given some AST.
605
+ Returns `true` on success. Returns `false` on failure.
606
+ Currently, only returns `true` as we do not block on failure to build this graph.
607
+ """
608
+ return self._gather_dependents(new_ast.body)
temp_venv/lib/python3.13/site-packages/IPython/extensions/deduperreload/deduperreload_patching.py ADDED
@@ -0,0 +1,141 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+ import ctypes
3
+ import sys
4
+ from typing import Any
5
+
6
+ NOT_FOUND: object = object()
7
+ _MAX_FIELD_SEARCH_OFFSET = 50
8
+
9
+ if sys.maxsize > 2**32:
10
+ WORD_TYPE: type[ctypes.c_int32] | type[ctypes.c_int64] = ctypes.c_int64
11
+ WORD_N_BYTES = 8
12
+ else:
13
+ WORD_TYPE = ctypes.c_int32
14
+ WORD_N_BYTES = 4
15
+
16
+
17
+ class DeduperReloaderPatchingMixin:
18
+ @staticmethod
19
+ def infer_field_offset(
20
+ obj: object,
21
+ field: str,
22
+ ) -> int:
23
+ field_value = getattr(obj, field, NOT_FOUND)
24
+ if field_value is NOT_FOUND:
25
+ return -1
26
+ obj_addr = ctypes.c_void_p.from_buffer(ctypes.py_object(obj)).value
27
+ field_addr = ctypes.c_void_p.from_buffer(ctypes.py_object(field_value)).value
28
+ if obj_addr is None or field_addr is None:
29
+ return -1
30
+ ret = -1
31
+ for offset in range(1, _MAX_FIELD_SEARCH_OFFSET):
32
+ if (
33
+ ctypes.cast(
34
+ obj_addr + WORD_N_BYTES * offset, ctypes.POINTER(WORD_TYPE)
35
+ ).contents.value
36
+ == field_addr
37
+ ):
38
+ ret = offset
39
+ break
40
+ return ret
41
+
42
+ @classmethod
43
+ def try_write_readonly_attr(
44
+ cls,
45
+ obj: object,
46
+ field: str,
47
+ new_value: object,
48
+ offset: int | None = None,
49
+ ) -> None:
50
+ prev_value = getattr(obj, field, NOT_FOUND)
51
+ if prev_value is NOT_FOUND:
52
+ return
53
+ if offset is None:
54
+ offset = cls.infer_field_offset(obj, field)
55
+ if offset == -1:
56
+ return
57
+ obj_addr = ctypes.c_void_p.from_buffer(ctypes.py_object(obj)).value
58
+ new_value_addr = ctypes.c_void_p.from_buffer(ctypes.py_object(new_value)).value
59
+ if obj_addr is None or new_value_addr is None:
60
+ return
61
+ if prev_value is not None:
62
+ ctypes.pythonapi.Py_DecRef(ctypes.py_object(prev_value))
63
+ if new_value is not None:
64
+ ctypes.pythonapi.Py_IncRef(ctypes.py_object(new_value))
65
+ ctypes.cast(
66
+ obj_addr + WORD_N_BYTES * offset, ctypes.POINTER(WORD_TYPE)
67
+ ).contents.value = new_value_addr
68
+
69
+ @classmethod
70
+ def try_patch_readonly_attr(
71
+ cls,
72
+ old: object,
73
+ new: object,
74
+ field: str,
75
+ new_is_value: bool = False,
76
+ offset: int = -1,
77
+ ) -> None:
78
+
79
+ old_value = getattr(old, field, NOT_FOUND)
80
+ new_value = new if new_is_value else getattr(new, field, NOT_FOUND)
81
+ if old_value is NOT_FOUND or new_value is NOT_FOUND:
82
+ return
83
+ elif old_value is new_value:
84
+ return
85
+ elif old_value is not None and offset < 0:
86
+ offset = cls.infer_field_offset(old, field)
87
+ elif offset < 0:
88
+ assert not new_is_value
89
+ assert new_value is not None
90
+ offset = cls.infer_field_offset(new, field)
91
+ cls.try_write_readonly_attr(old, field, new_value, offset=offset)
92
+
93
+ @classmethod
94
+ def try_patch_attr(
95
+ cls,
96
+ old: object,
97
+ new: object,
98
+ field: str,
99
+ new_is_value: bool = False,
100
+ offset: int = -1,
101
+ ) -> None:
102
+ try:
103
+ setattr(old, field, new if new_is_value else getattr(new, field))
104
+ except (AttributeError, TypeError, ValueError):
105
+ cls.try_patch_readonly_attr(old, new, field, new_is_value, offset)
106
+
107
+ @classmethod
108
+ def patch_function(
109
+ cls, to_patch_to: Any, to_patch_from: Any, is_method: bool
110
+ ) -> None:
111
+ new_freevars = []
112
+ new_closure = []
113
+ for i, v in enumerate(to_patch_to.__code__.co_freevars):
114
+ if v not in to_patch_from.__code__.co_freevars or v == "__class__":
115
+ new_freevars.append(v)
116
+ new_closure.append(to_patch_to.__closure__[i])
117
+ for i, v in enumerate(to_patch_from.__code__.co_freevars):
118
+ if v not in new_freevars:
119
+ new_freevars.append(v)
120
+ new_closure.append(to_patch_from.__closure__[i])
121
+ code_with_new_freevars = to_patch_from.__code__.replace(
122
+ co_freevars=tuple(new_freevars)
123
+ )
124
+ # lambdas may complain if there is more than one freevar
125
+ cls.try_patch_attr(
126
+ to_patch_to, code_with_new_freevars, "__code__", new_is_value=True
127
+ )
128
+ offset = -1
129
+ if to_patch_to.__closure__ is None and to_patch_from.__closure__ is not None:
130
+ offset = cls.infer_field_offset(to_patch_from, "__closure__")
131
+ cls.try_patch_readonly_attr(
132
+ to_patch_to,
133
+ tuple(new_closure) or None,
134
+ "__closure__",
135
+ new_is_value=True,
136
+ offset=offset,
137
+ )
138
+ for attr in ("__defaults__", "__kwdefaults__", "__doc__", "__dict__"):
139
+ cls.try_patch_attr(to_patch_to, to_patch_from, attr)
140
+ if is_method:
141
+ cls.try_patch_readonly_attr(to_patch_to, to_patch_from, "__self__")
temp_venv/lib/python3.13/site-packages/IPython/extensions/storemagic.py ADDED
@@ -0,0 +1,236 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ """
3
+ %store magic for lightweight persistence.
4
+
5
+ Stores variables, aliases and macros in IPython's database.
6
+
7
+ To automatically restore stored variables at startup, add this to your
8
+ :file:`ipython_config.py` file::
9
+
10
+ c.StoreMagics.autorestore = True
11
+ """
12
+
13
+ # Copyright (c) IPython Development Team.
14
+ # Distributed under the terms of the Modified BSD License.
15
+
16
+ import inspect, os, sys, textwrap
17
+
18
+ from IPython.core.error import UsageError
19
+ from IPython.core.magic import Magics, magics_class, line_magic
20
+ from IPython.testing.skipdoctest import skip_doctest
21
+ from traitlets import Bool
22
+
23
+
24
+ def restore_aliases(ip, alias=None):
25
+ staliases = ip.db.get('stored_aliases', {})
26
+ if alias is None:
27
+ for k,v in staliases.items():
28
+ # print("restore alias",k,v) # dbg
29
+ #self.alias_table[k] = v
30
+ ip.alias_manager.define_alias(k,v)
31
+ else:
32
+ ip.alias_manager.define_alias(alias, staliases[alias])
33
+
34
+
35
+ def refresh_variables(ip):
36
+ db = ip.db
37
+ for key in db.keys('autorestore/*'):
38
+ # strip autorestore
39
+ justkey = os.path.basename(key)
40
+ try:
41
+ obj = db[key]
42
+ except KeyError:
43
+ print("Unable to restore variable '%s', ignoring (use %%store -d to forget!)" % justkey)
44
+ print("The error was:", sys.exc_info()[0])
45
+ else:
46
+ # print("restored",justkey,"=",obj) # dbg
47
+ ip.user_ns[justkey] = obj
48
+
49
+
50
+ def restore_dhist(ip):
51
+ ip.user_ns['_dh'] = ip.db.get('dhist',[])
52
+
53
+
54
+ def restore_data(ip):
55
+ refresh_variables(ip)
56
+ restore_aliases(ip)
57
+ restore_dhist(ip)
58
+
59
+
60
+ @magics_class
61
+ class StoreMagics(Magics):
62
+ """Lightweight persistence for python variables.
63
+
64
+ Provides the %store magic."""
65
+
66
+ autorestore = Bool(False, help=
67
+ """If True, any %store-d variables will be automatically restored
68
+ when IPython starts.
69
+ """
70
+ ).tag(config=True)
71
+
72
+ def __init__(self, shell):
73
+ super(StoreMagics, self).__init__(shell=shell)
74
+ self.shell.configurables.append(self)
75
+ if self.autorestore:
76
+ restore_data(self.shell)
77
+
78
+ @skip_doctest
79
+ @line_magic
80
+ def store(self, parameter_s=''):
81
+ """Lightweight persistence for python variables.
82
+
83
+ Example::
84
+
85
+ In [1]: l = ['hello',10,'world']
86
+ In [2]: %store l
87
+ Stored 'l' (list)
88
+ In [3]: exit
89
+
90
+ (IPython session is closed and started again...)
91
+
92
+ ville@badger:~$ ipython
93
+ In [1]: l
94
+ NameError: name 'l' is not defined
95
+ In [2]: %store -r
96
+ In [3]: l
97
+ Out[3]: ['hello', 10, 'world']
98
+
99
+ Usage:
100
+
101
+ * ``%store`` - Show list of all variables and their current
102
+ values
103
+ * ``%store spam bar`` - Store the *current* value of the variables spam
104
+ and bar to disk
105
+ * ``%store -d spam`` - Remove the variable and its value from storage
106
+ * ``%store -z`` - Remove all variables from storage
107
+ * ``%store -r`` - Refresh all variables, aliases and directory history
108
+ from store (overwrite current vals)
109
+ * ``%store -r spam bar`` - Refresh specified variables and aliases from store
110
+ (delete current val)
111
+ * ``%store foo >a.txt`` - Store value of foo to new file a.txt
112
+ * ``%store foo >>a.txt`` - Append value of foo to file a.txt
113
+
114
+ It should be noted that if you change the value of a variable, you
115
+ need to %store it again if you want to persist the new value.
116
+
117
+ Note also that the variables will need to be pickleable; most basic
118
+ python types can be safely %store'd.
119
+
120
+ Also aliases can be %store'd across sessions.
121
+ To remove an alias from the storage, use the %unalias magic.
122
+ """
123
+
124
+ opts,argsl = self.parse_options(parameter_s,'drz',mode='string')
125
+ args = argsl.split()
126
+ ip = self.shell
127
+ db = ip.db
128
+ # delete
129
+ if 'd' in opts:
130
+ try:
131
+ todel = args[0]
132
+ except IndexError as e:
133
+ raise UsageError('You must provide the variable to forget') from e
134
+ else:
135
+ try:
136
+ del db['autorestore/' + todel]
137
+ except BaseException as e:
138
+ raise UsageError("Can't delete variable '%s'" % todel) from e
139
+ # reset
140
+ elif 'z' in opts:
141
+ for k in db.keys('autorestore/*'):
142
+ del db[k]
143
+
144
+ elif 'r' in opts:
145
+ if args:
146
+ for arg in args:
147
+ try:
148
+ obj = db["autorestore/" + arg]
149
+ except KeyError:
150
+ try:
151
+ restore_aliases(ip, alias=arg)
152
+ except KeyError:
153
+ print("no stored variable or alias %s" % arg)
154
+ else:
155
+ ip.user_ns[arg] = obj
156
+ else:
157
+ restore_data(ip)
158
+
159
+ # run without arguments -> list variables & values
160
+ elif not args:
161
+ vars = db.keys('autorestore/*')
162
+ vars.sort()
163
+ if vars:
164
+ size = max(map(len, vars))
165
+ else:
166
+ size = 0
167
+
168
+ print('Stored variables and their in-db values:')
169
+ fmt = '%-'+str(size)+'s -> %s'
170
+ get = db.get
171
+ for var in vars:
172
+ justkey = os.path.basename(var)
173
+ # print 30 first characters from every var
174
+ print(fmt % (justkey, repr(get(var, '<unavailable>'))[:50]))
175
+
176
+ # default action - store the variable
177
+ else:
178
+ # %store foo >file.txt or >>file.txt
179
+ if len(args) > 1 and args[1].startswith(">"):
180
+ fnam = os.path.expanduser(args[1].lstrip(">").lstrip())
181
+ if args[1].startswith(">>"):
182
+ fil = open(fnam, "a", encoding="utf-8")
183
+ else:
184
+ fil = open(fnam, "w", encoding="utf-8")
185
+ with fil:
186
+ obj = ip.ev(args[0])
187
+ print("Writing '%s' (%s) to file '%s'." % (args[0],
188
+ obj.__class__.__name__, fnam))
189
+
190
+ if not isinstance (obj, str):
191
+ from pprint import pprint
192
+ pprint(obj, fil)
193
+ else:
194
+ fil.write(obj)
195
+ if not obj.endswith('\n'):
196
+ fil.write('\n')
197
+
198
+ return
199
+
200
+ # %store foo
201
+ for arg in args:
202
+ try:
203
+ obj = ip.user_ns[arg]
204
+ except KeyError:
205
+ # it might be an alias
206
+ name = arg
207
+ try:
208
+ cmd = ip.alias_manager.retrieve_alias(name)
209
+ except ValueError as e:
210
+ raise UsageError("Unknown variable '%s'" % name) from e
211
+
212
+ staliases = db.get('stored_aliases',{})
213
+ staliases[name] = cmd
214
+ db['stored_aliases'] = staliases
215
+ print("Alias stored: %s (%s)" % (name, cmd))
216
+ return
217
+
218
+ else:
219
+ modname = getattr(inspect.getmodule(obj), '__name__', '')
220
+ if modname == '__main__':
221
+ print(textwrap.dedent("""\
222
+ Warning:%s is %s
223
+ Proper storage of interactively declared classes (or instances
224
+ of those classes) is not possible! Only instances
225
+ of classes in real modules on file system can be %%store'd.
226
+ """ % (arg, obj) ))
227
+ return
228
+ #pickled = pickle.dumps(obj)
229
+ db[ 'autorestore/' + arg ] = obj
230
+ print("Stored '%s' (%s)" % (arg, obj.__class__.__name__))
231
+
232
+
233
+ def load_ipython_extension(ip):
234
+ """Load the extension in IPython."""
235
+ ip.register_magics(StoreMagics)
236
+
temp_venv/lib/python3.13/site-packages/IPython/external/__init__.py ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ """
2
+ This package contains all third-party modules bundled with IPython.
3
+ """
4
+
5
+ from typing import List
6
+
7
+ __all__: List[str] = []
temp_venv/lib/python3.13/site-packages/IPython/external/__pycache__/__init__.cpython-313.pyc ADDED
Binary file (405 Bytes). View file
 
temp_venv/lib/python3.13/site-packages/IPython/external/__pycache__/pickleshare.cpython-313.pyc ADDED
Binary file (15.1 kB). View file
 
temp_venv/lib/python3.13/site-packages/IPython/external/pickleshare.py ADDED
@@ -0,0 +1,361 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+
3
+ """PickleShare - a small 'shelve' like datastore with concurrency support
4
+
5
+ Like shelve, a PickleShareDB object acts like a normal dictionary. Unlike
6
+ shelve, many processes can access the database simultaneously. Changing a
7
+ value in database is immediately visible to other processes accessing the
8
+ same database.
9
+
10
+ Concurrency is possible because the values are stored in separate files. Hence
11
+ the "database" is a directory where *all* files are governed by PickleShare.
12
+
13
+ Example usage::
14
+
15
+ from pickleshare import *
16
+ db = PickleShareDB('~/testpickleshare')
17
+ db.clear()
18
+ print "Should be empty:",db.items()
19
+ db['hello'] = 15
20
+ db['aku ankka'] = [1,2,313]
21
+ db['paths/are/ok/key'] = [1,(5,46)]
22
+ print db.keys()
23
+ del db['aku ankka']
24
+
25
+ This module is certainly not ZODB, but can be used for low-load
26
+ (non-mission-critical) situations where tiny code size trumps the
27
+ advanced features of a "real" object database.
28
+
29
+ Installation guide: pip install pickleshare
30
+
31
+ Author: Ville Vainio <[email protected]>
32
+ License: MIT open source license.
33
+
34
+ """
35
+
36
+ from __future__ import print_function
37
+
38
+
39
+ __version__ = "0.7.5"
40
+
41
+ try:
42
+ from pathlib import Path
43
+ except ImportError:
44
+ # Python 2 backport
45
+ from pathlib2 import Path
46
+
47
+ import os, stat, time
48
+
49
+ try:
50
+ import collections.abc as collections_abc
51
+ except ImportError:
52
+ import collections as collections_abc
53
+ try:
54
+ import cPickle as pickle
55
+ except ImportError:
56
+ import pickle
57
+ import errno
58
+ import sys
59
+
60
+ if sys.version_info[0] >= 3:
61
+ string_types = (str,)
62
+ else:
63
+ string_types = (str, unicode)
64
+
65
+
66
+ def gethashfile(key):
67
+ return ("%02x" % abs(hash(key) % 256))[-2:]
68
+
69
+
70
+ _sentinel = object()
71
+
72
+
73
+ class PickleShareDB(collections_abc.MutableMapping):
74
+ """The main 'connection' object for PickleShare database"""
75
+
76
+ def __init__(self, root):
77
+ """Return a db object that will manage the specied directory"""
78
+ if not isinstance(root, string_types):
79
+ root = str(root)
80
+ root = os.path.abspath(os.path.expanduser(root))
81
+ self.root = Path(root)
82
+ if not self.root.is_dir():
83
+ # catching the exception is necessary if multiple processes are concurrently trying to create a folder
84
+ # exists_ok keyword argument of mkdir does the same but only from Python 3.5
85
+ try:
86
+ self.root.mkdir(parents=True)
87
+ except OSError as e:
88
+ if e.errno != errno.EEXIST:
89
+ raise
90
+ # cache has { 'key' : (obj, orig_mod_time) }
91
+ self.cache = {}
92
+
93
+ def __getitem__(self, key):
94
+ """db['key'] reading"""
95
+ fil = self.root / key
96
+ try:
97
+ mtime = fil.stat()[stat.ST_MTIME]
98
+ except OSError:
99
+ raise KeyError(key)
100
+
101
+ if fil in self.cache and mtime == self.cache[fil][1]:
102
+ return self.cache[fil][0]
103
+ try:
104
+ # The cached item has expired, need to read
105
+ with fil.open("rb") as f:
106
+ obj = pickle.loads(f.read())
107
+ except:
108
+ raise KeyError(key)
109
+
110
+ self.cache[fil] = (obj, mtime)
111
+ return obj
112
+
113
+ def __setitem__(self, key, value):
114
+ """db['key'] = 5"""
115
+ fil = self.root / key
116
+ parent = fil.parent
117
+ if parent and not parent.is_dir():
118
+ parent.mkdir(parents=True)
119
+ # We specify protocol 2, so that we can mostly go between Python 2
120
+ # and Python 3. We can upgrade to protocol 3 when Python 2 is obsolete.
121
+ with fil.open("wb") as f:
122
+ pickle.dump(value, f, protocol=2)
123
+ try:
124
+ self.cache[fil] = (value, fil.stat().st_mtime)
125
+ except OSError as e:
126
+ if e.errno != errno.ENOENT:
127
+ raise
128
+
129
+ def hset(self, hashroot, key, value):
130
+ """hashed set"""
131
+ hroot = self.root / hashroot
132
+ if not hroot.is_dir():
133
+ hroot.mkdir()
134
+ hfile = hroot / gethashfile(key)
135
+ d = self.get(hfile, {})
136
+ d.update({key: value})
137
+ self[hfile] = d
138
+
139
+ def hget(self, hashroot, key, default=_sentinel, fast_only=True):
140
+ """hashed get"""
141
+ hroot = self.root / hashroot
142
+ hfile = hroot / gethashfile(key)
143
+
144
+ d = self.get(hfile, _sentinel)
145
+ # print "got dict",d,"from",hfile
146
+ if d is _sentinel:
147
+ if fast_only:
148
+ if default is _sentinel:
149
+ raise KeyError(key)
150
+
151
+ return default
152
+
153
+ # slow mode ok, works even after hcompress()
154
+ d = self.hdict(hashroot)
155
+
156
+ return d.get(key, default)
157
+
158
+ def hdict(self, hashroot):
159
+ """Get all data contained in hashed category 'hashroot' as dict"""
160
+ hfiles = self.keys(hashroot + "/*")
161
+ hfiles.sort()
162
+ last = len(hfiles) and hfiles[-1] or ""
163
+ if last.endswith("xx"):
164
+ # print "using xx"
165
+ hfiles = [last] + hfiles[:-1]
166
+
167
+ all = {}
168
+
169
+ for f in hfiles:
170
+ # print "using",f
171
+ try:
172
+ all.update(self[f])
173
+ except KeyError:
174
+ print("Corrupt", f, "deleted - hset is not threadsafe!")
175
+ del self[f]
176
+
177
+ self.uncache(f)
178
+
179
+ return all
180
+
181
+ def hcompress(self, hashroot):
182
+ """Compress category 'hashroot', so hset is fast again
183
+
184
+ hget will fail if fast_only is True for compressed items (that were
185
+ hset before hcompress).
186
+
187
+ """
188
+ hfiles = self.keys(hashroot + "/*")
189
+ all = {}
190
+ for f in hfiles:
191
+ # print "using",f
192
+ all.update(self[f])
193
+ self.uncache(f)
194
+
195
+ self[hashroot + "/xx"] = all
196
+ for f in hfiles:
197
+ p = self.root / f
198
+ if p.name == "xx":
199
+ continue
200
+ p.unlink()
201
+
202
+ def __delitem__(self, key):
203
+ """del db["key"]"""
204
+ fil = self.root / key
205
+ self.cache.pop(fil, None)
206
+ try:
207
+ fil.unlink()
208
+ except OSError:
209
+ # notfound and permission denied are ok - we
210
+ # lost, the other process wins the conflict
211
+ pass
212
+
213
+ def _normalized(self, p):
214
+ """Make a key suitable for user's eyes"""
215
+ return str(p.relative_to(self.root)).replace("\\", "/")
216
+
217
+ def keys(self, globpat=None):
218
+ """All keys in DB, or all keys matching a glob"""
219
+
220
+ if globpat is None:
221
+ files = self.root.rglob("*")
222
+ else:
223
+ files = self.root.glob(globpat)
224
+ return [self._normalized(p) for p in files if p.is_file()]
225
+
226
+ def __iter__(self):
227
+ return iter(self.keys())
228
+
229
+ def __len__(self):
230
+ return len(self.keys())
231
+
232
+ def uncache(self, *items):
233
+ """Removes all, or specified items from cache
234
+
235
+ Use this after reading a large amount of large objects
236
+ to free up memory, when you won't be needing the objects
237
+ for a while.
238
+
239
+ """
240
+ if not items:
241
+ self.cache = {}
242
+ for it in items:
243
+ self.cache.pop(it, None)
244
+
245
+ def waitget(self, key, maxwaittime=60):
246
+ """Wait (poll) for a key to get a value
247
+
248
+ Will wait for `maxwaittime` seconds before raising a KeyError.
249
+ The call exits normally if the `key` field in db gets a value
250
+ within the timeout period.
251
+
252
+ Use this for synchronizing different processes or for ensuring
253
+ that an unfortunately timed "db['key'] = newvalue" operation
254
+ in another process (which causes all 'get' operation to cause a
255
+ KeyError for the duration of pickling) won't screw up your program
256
+ logic.
257
+ """
258
+
259
+ wtimes = [0.2] * 3 + [0.5] * 2 + [1]
260
+ tries = 0
261
+ waited = 0
262
+ while 1:
263
+ try:
264
+ val = self[key]
265
+ return val
266
+ except KeyError:
267
+ pass
268
+
269
+ if waited > maxwaittime:
270
+ raise KeyError(key)
271
+
272
+ time.sleep(wtimes[tries])
273
+ waited += wtimes[tries]
274
+ if tries < len(wtimes) - 1:
275
+ tries += 1
276
+
277
+ def getlink(self, folder):
278
+ """Get a convenient link for accessing items"""
279
+ return PickleShareLink(self, folder)
280
+
281
+ def __repr__(self):
282
+ return "PickleShareDB('%s')" % self.root
283
+
284
+
285
+ class PickleShareLink:
286
+ """A shortdand for accessing nested PickleShare data conveniently.
287
+
288
+ Created through PickleShareDB.getlink(), example::
289
+
290
+ lnk = db.getlink('myobjects/test')
291
+ lnk.foo = 2
292
+ lnk.bar = lnk.foo + 5
293
+
294
+ """
295
+
296
+ def __init__(self, db, keydir):
297
+ self.__dict__.update(locals())
298
+
299
+ def __getattr__(self, key):
300
+ return self.__dict__["db"][self.__dict__["keydir"] + "/" + key]
301
+
302
+ def __setattr__(self, key, val):
303
+ self.db[self.keydir + "/" + key] = val
304
+
305
+ def __repr__(self):
306
+ db = self.__dict__["db"]
307
+ keys = db.keys(self.__dict__["keydir"] + "/*")
308
+ return "<PickleShareLink '%s': %s>" % (
309
+ self.__dict__["keydir"],
310
+ ";".join([Path(k).basename() for k in keys]),
311
+ )
312
+
313
+
314
+ def main():
315
+ import textwrap
316
+
317
+ usage = textwrap.dedent(
318
+ """\
319
+ pickleshare - manage PickleShare databases
320
+
321
+ Usage:
322
+
323
+ pickleshare dump /path/to/db > dump.txt
324
+ pickleshare load /path/to/db < dump.txt
325
+ pickleshare test /path/to/db
326
+ """
327
+ )
328
+ DB = PickleShareDB
329
+ import sys
330
+
331
+ if len(sys.argv) < 2:
332
+ print(usage)
333
+ return
334
+
335
+ cmd = sys.argv[1]
336
+ args = sys.argv[2:]
337
+ if cmd == "dump":
338
+ if not args:
339
+ args = ["."]
340
+ db = DB(args[0])
341
+ import pprint
342
+
343
+ pprint.pprint(db.items())
344
+ elif cmd == "load":
345
+ cont = sys.stdin.read()
346
+ db = DB(args[0])
347
+ data = eval(cont)
348
+ db.clear()
349
+ for k, v in db.items():
350
+ db[k] = v
351
+ elif cmd == "testwait":
352
+ db = DB(args[0])
353
+ db.clear()
354
+ print(db.waitget("250"))
355
+ elif cmd == "test":
356
+ test()
357
+ stress()
358
+
359
+
360
+ if __name__ == "__main__":
361
+ main()
temp_venv/lib/python3.13/site-packages/IPython/external/qt_for_kernel.py ADDED
@@ -0,0 +1,124 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Import Qt in a manner suitable for an IPython kernel.
2
+
3
+ This is the import used for the `gui=qt` or `matplotlib=qt` initialization.
4
+
5
+ Import Priority:
6
+
7
+ if Qt has been imported anywhere else:
8
+ use that
9
+
10
+ if matplotlib has been imported and doesn't support v2 (<= 1.0.1):
11
+ use PyQt4 @v1
12
+
13
+ Next, ask QT_API env variable
14
+
15
+ if QT_API not set:
16
+ ask matplotlib what it's using. If Qt4Agg or Qt5Agg, then use the
17
+ version matplotlib is configured with
18
+
19
+ else: (matplotlib said nothing)
20
+ # this is the default path - nobody told us anything
21
+ try in this order:
22
+ PyQt default version, PySide, PyQt5
23
+ else:
24
+ use what QT_API says
25
+
26
+ Note that %gui's implementation will always set a `QT_API`, see
27
+ `IPython.terminal.pt_inputhooks.get_inputhook_name_and_func`
28
+
29
+ """
30
+ # NOTE: This is no longer an external, third-party module, and should be
31
+ # considered part of IPython. For compatibility however, it is being kept in
32
+ # IPython/external.
33
+
34
+ import os
35
+ import sys
36
+
37
+ from IPython.external.qt_loaders import (
38
+ load_qt,
39
+ loaded_api,
40
+ enum_factory,
41
+ # QT6
42
+ QT_API_PYQT6,
43
+ QT_API_PYSIDE6,
44
+ # QT5
45
+ QT_API_PYQT5,
46
+ QT_API_PYSIDE2,
47
+ # QT4
48
+ QT_API_PYQT,
49
+ QT_API_PYSIDE,
50
+ # default
51
+ QT_API_PYQT_DEFAULT,
52
+ )
53
+
54
+ _qt_apis = (
55
+ # QT6
56
+ QT_API_PYQT6,
57
+ QT_API_PYSIDE6,
58
+ # QT5
59
+ QT_API_PYQT5,
60
+ QT_API_PYSIDE2,
61
+ # default
62
+ QT_API_PYQT_DEFAULT,
63
+ )
64
+
65
+
66
+ def matplotlib_options(mpl):
67
+ """Constraints placed on an imported matplotlib."""
68
+ if mpl is None:
69
+ return
70
+ backend = mpl.rcParams.get('backend', None)
71
+ if backend == 'Qt4Agg':
72
+ mpqt = mpl.rcParams.get('backend.qt4', None)
73
+ if mpqt is None:
74
+ return None
75
+ if mpqt.lower() == 'pyside':
76
+ return [QT_API_PYSIDE]
77
+ elif mpqt.lower() == 'pyqt4':
78
+ return [QT_API_PYQT_DEFAULT]
79
+ elif mpqt.lower() == 'pyqt4v2':
80
+ return [QT_API_PYQT]
81
+ raise ImportError("unhandled value for backend.qt4 from matplotlib: %r" %
82
+ mpqt)
83
+ elif backend == 'Qt5Agg':
84
+ mpqt = mpl.rcParams.get('backend.qt5', None)
85
+ if mpqt is None:
86
+ return None
87
+ if mpqt.lower() == 'pyqt5':
88
+ return [QT_API_PYQT5]
89
+ raise ImportError("unhandled value for backend.qt5 from matplotlib: %r" %
90
+ mpqt)
91
+
92
+ def get_options():
93
+ """Return a list of acceptable QT APIs, in decreasing order of preference."""
94
+ #already imported Qt somewhere. Use that
95
+ loaded = loaded_api()
96
+ if loaded is not None:
97
+ return [loaded]
98
+
99
+ mpl = sys.modules.get("matplotlib", None)
100
+
101
+ if mpl is not None and tuple(mpl.__version__.split(".")) < ("1", "0", "2"):
102
+ # 1.0.1 only supports PyQt4 v1
103
+ return [QT_API_PYQT_DEFAULT]
104
+
105
+ qt_api = os.environ.get('QT_API', None)
106
+ if qt_api is None:
107
+ #no ETS variable. Ask mpl, then use default fallback path
108
+ return matplotlib_options(mpl) or [
109
+ QT_API_PYQT_DEFAULT,
110
+ QT_API_PYQT6,
111
+ QT_API_PYSIDE6,
112
+ QT_API_PYQT5,
113
+ QT_API_PYSIDE2,
114
+ ]
115
+ elif qt_api not in _qt_apis:
116
+ raise RuntimeError("Invalid Qt API %r, valid values are: %r" %
117
+ (qt_api, ', '.join(_qt_apis)))
118
+ else:
119
+ return [qt_api]
120
+
121
+
122
+ api_opts = get_options()
123
+ QtCore, QtGui, QtSvg, QT_API = load_qt(api_opts)
124
+ enum_helper = enum_factory(QT_API, QtCore)
temp_venv/lib/python3.13/site-packages/IPython/external/qt_loaders.py ADDED
@@ -0,0 +1,423 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ This module contains factory functions that attempt
3
+ to return Qt submodules from the various python Qt bindings.
4
+
5
+ It also protects against double-importing Qt with different
6
+ bindings, which is unstable and likely to crash
7
+
8
+ This is used primarily by qt and qt_for_kernel, and shouldn't
9
+ be accessed directly from the outside
10
+ """
11
+
12
+ import importlib.abc
13
+ import sys
14
+ import os
15
+ import types
16
+ from functools import partial, lru_cache
17
+ import operator
18
+
19
+ # ### Available APIs.
20
+ # Qt6
21
+ QT_API_PYQT6 = "pyqt6"
22
+ QT_API_PYSIDE6 = "pyside6"
23
+
24
+ # Qt5
25
+ QT_API_PYQT5 = 'pyqt5'
26
+ QT_API_PYSIDE2 = 'pyside2'
27
+
28
+ # Qt4
29
+ # NOTE: Here for legacy matplotlib compatibility, but not really supported on the IPython side.
30
+ QT_API_PYQT = "pyqt" # Force version 2
31
+ QT_API_PYQTv1 = "pyqtv1" # Force version 2
32
+ QT_API_PYSIDE = "pyside"
33
+
34
+ QT_API_PYQT_DEFAULT = "pyqtdefault" # use system default for version 1 vs. 2
35
+
36
+ api_to_module = {
37
+ # Qt6
38
+ QT_API_PYQT6: "PyQt6",
39
+ QT_API_PYSIDE6: "PySide6",
40
+ # Qt5
41
+ QT_API_PYQT5: "PyQt5",
42
+ QT_API_PYSIDE2: "PySide2",
43
+ # Qt4
44
+ QT_API_PYSIDE: "PySide",
45
+ QT_API_PYQT: "PyQt4",
46
+ QT_API_PYQTv1: "PyQt4",
47
+ # default
48
+ QT_API_PYQT_DEFAULT: "PyQt6",
49
+ }
50
+
51
+
52
+ class ImportDenier(importlib.abc.MetaPathFinder):
53
+ """Import Hook that will guard against bad Qt imports
54
+ once IPython commits to a specific binding
55
+ """
56
+
57
+ def __init__(self):
58
+ self.__forbidden = set()
59
+
60
+ def forbid(self, module_name):
61
+ sys.modules.pop(module_name, None)
62
+ self.__forbidden.add(module_name)
63
+
64
+ def find_spec(self, fullname, path, target=None):
65
+ if path:
66
+ return
67
+ if fullname in self.__forbidden:
68
+ raise ImportError(
69
+ """
70
+ Importing %s disabled by IPython, which has
71
+ already imported an Incompatible QT Binding: %s
72
+ """
73
+ % (fullname, loaded_api())
74
+ )
75
+
76
+
77
+ ID = ImportDenier()
78
+ sys.meta_path.insert(0, ID)
79
+
80
+
81
+ def commit_api(api):
82
+ """Commit to a particular API, and trigger ImportErrors on subsequent
83
+ dangerous imports"""
84
+ modules = set(api_to_module.values())
85
+
86
+ modules.remove(api_to_module[api])
87
+ for mod in modules:
88
+ ID.forbid(mod)
89
+
90
+
91
+ def loaded_api():
92
+ """Return which API is loaded, if any
93
+
94
+ If this returns anything besides None,
95
+ importing any other Qt binding is unsafe.
96
+
97
+ Returns
98
+ -------
99
+ None, 'pyside6', 'pyqt6', 'pyside2', 'pyside', 'pyqt', 'pyqt5', 'pyqtv1'
100
+ """
101
+ if sys.modules.get("PyQt6.QtCore"):
102
+ return QT_API_PYQT6
103
+ elif sys.modules.get("PySide6.QtCore"):
104
+ return QT_API_PYSIDE6
105
+ elif sys.modules.get("PyQt5.QtCore"):
106
+ return QT_API_PYQT5
107
+ elif sys.modules.get("PySide2.QtCore"):
108
+ return QT_API_PYSIDE2
109
+ elif sys.modules.get("PyQt4.QtCore"):
110
+ if qtapi_version() == 2:
111
+ return QT_API_PYQT
112
+ else:
113
+ return QT_API_PYQTv1
114
+ elif sys.modules.get("PySide.QtCore"):
115
+ return QT_API_PYSIDE
116
+
117
+ return None
118
+
119
+
120
+ def has_binding(api):
121
+ """Safely check for PyQt4/5, PySide or PySide2, without importing submodules
122
+
123
+ Parameters
124
+ ----------
125
+ api : str [ 'pyqtv1' | 'pyqt' | 'pyqt5' | 'pyside' | 'pyside2' | 'pyqtdefault']
126
+ Which module to check for
127
+
128
+ Returns
129
+ -------
130
+ True if the relevant module appears to be importable
131
+ """
132
+ module_name = api_to_module[api]
133
+ from importlib.util import find_spec
134
+
135
+ required = ['QtCore', 'QtGui', 'QtSvg']
136
+ if api in (QT_API_PYQT5, QT_API_PYSIDE2, QT_API_PYQT6, QT_API_PYSIDE6):
137
+ # QT5 requires QtWidgets too
138
+ required.append('QtWidgets')
139
+
140
+ for submod in required:
141
+ try:
142
+ spec = find_spec('%s.%s' % (module_name, submod))
143
+ except ImportError:
144
+ # Package (e.g. PyQt5) not found
145
+ return False
146
+ else:
147
+ if spec is None:
148
+ # Submodule (e.g. PyQt5.QtCore) not found
149
+ return False
150
+
151
+ if api == QT_API_PYSIDE:
152
+ # We can also safely check PySide version
153
+ import PySide
154
+
155
+ return PySide.__version_info__ >= (1, 0, 3)
156
+
157
+ return True
158
+
159
+
160
+ def qtapi_version():
161
+ """Return which QString API has been set, if any
162
+
163
+ Returns
164
+ -------
165
+ The QString API version (1 or 2), or None if not set
166
+ """
167
+ try:
168
+ import sip
169
+ except ImportError:
170
+ # as of PyQt5 5.11, sip is no longer available as a top-level
171
+ # module and needs to be imported from the PyQt5 namespace
172
+ try:
173
+ from PyQt5 import sip
174
+ except ImportError:
175
+ return
176
+ try:
177
+ return sip.getapi('QString')
178
+ except ValueError:
179
+ return
180
+
181
+
182
+ def can_import(api):
183
+ """Safely query whether an API is importable, without importing it"""
184
+ if not has_binding(api):
185
+ return False
186
+
187
+ current = loaded_api()
188
+ if api == QT_API_PYQT_DEFAULT:
189
+ return current in [QT_API_PYQT6, None]
190
+ else:
191
+ return current in [api, None]
192
+
193
+
194
+ def import_pyqt4(version=2):
195
+ """
196
+ Import PyQt4
197
+
198
+ Parameters
199
+ ----------
200
+ version : 1, 2, or None
201
+ Which QString/QVariant API to use. Set to None to use the system
202
+ default
203
+ ImportErrors raised within this function are non-recoverable
204
+ """
205
+ # The new-style string API (version=2) automatically
206
+ # converts QStrings to Unicode Python strings. Also, automatically unpacks
207
+ # QVariants to their underlying objects.
208
+ import sip
209
+
210
+ if version is not None:
211
+ sip.setapi('QString', version)
212
+ sip.setapi('QVariant', version)
213
+
214
+ from PyQt4 import QtGui, QtCore, QtSvg
215
+
216
+ if QtCore.PYQT_VERSION < 0x040700:
217
+ raise ImportError("IPython requires PyQt4 >= 4.7, found %s" %
218
+ QtCore.PYQT_VERSION_STR)
219
+
220
+ # Alias PyQt-specific functions for PySide compatibility.
221
+ QtCore.Signal = QtCore.pyqtSignal
222
+ QtCore.Slot = QtCore.pyqtSlot
223
+
224
+ # query for the API version (in case version == None)
225
+ version = sip.getapi('QString')
226
+ api = QT_API_PYQTv1 if version == 1 else QT_API_PYQT
227
+ return QtCore, QtGui, QtSvg, api
228
+
229
+
230
+ def import_pyqt5():
231
+ """
232
+ Import PyQt5
233
+
234
+ ImportErrors raised within this function are non-recoverable
235
+ """
236
+
237
+ from PyQt5 import QtCore, QtSvg, QtWidgets, QtGui
238
+
239
+ # Alias PyQt-specific functions for PySide compatibility.
240
+ QtCore.Signal = QtCore.pyqtSignal
241
+ QtCore.Slot = QtCore.pyqtSlot
242
+
243
+ # Join QtGui and QtWidgets for Qt4 compatibility.
244
+ QtGuiCompat = types.ModuleType('QtGuiCompat')
245
+ QtGuiCompat.__dict__.update(QtGui.__dict__)
246
+ QtGuiCompat.__dict__.update(QtWidgets.__dict__)
247
+
248
+ api = QT_API_PYQT5
249
+ return QtCore, QtGuiCompat, QtSvg, api
250
+
251
+
252
+ def import_pyqt6():
253
+ """
254
+ Import PyQt6
255
+
256
+ ImportErrors raised within this function are non-recoverable
257
+ """
258
+
259
+ from PyQt6 import QtCore, QtSvg, QtWidgets, QtGui
260
+
261
+ # Alias PyQt-specific functions for PySide compatibility.
262
+ QtCore.Signal = QtCore.pyqtSignal
263
+ QtCore.Slot = QtCore.pyqtSlot
264
+
265
+ # Join QtGui and QtWidgets for Qt4 compatibility.
266
+ QtGuiCompat = types.ModuleType("QtGuiCompat")
267
+ QtGuiCompat.__dict__.update(QtGui.__dict__)
268
+ QtGuiCompat.__dict__.update(QtWidgets.__dict__)
269
+
270
+ api = QT_API_PYQT6
271
+ return QtCore, QtGuiCompat, QtSvg, api
272
+
273
+
274
+ def import_pyside():
275
+ """
276
+ Import PySide
277
+
278
+ ImportErrors raised within this function are non-recoverable
279
+ """
280
+ from PySide import QtGui, QtCore, QtSvg
281
+ return QtCore, QtGui, QtSvg, QT_API_PYSIDE
282
+
283
+ def import_pyside2():
284
+ """
285
+ Import PySide2
286
+
287
+ ImportErrors raised within this function are non-recoverable
288
+ """
289
+ from PySide2 import QtGui, QtCore, QtSvg, QtWidgets, QtPrintSupport
290
+
291
+ # Join QtGui and QtWidgets for Qt4 compatibility.
292
+ QtGuiCompat = types.ModuleType('QtGuiCompat')
293
+ QtGuiCompat.__dict__.update(QtGui.__dict__)
294
+ QtGuiCompat.__dict__.update(QtWidgets.__dict__)
295
+ QtGuiCompat.__dict__.update(QtPrintSupport.__dict__)
296
+
297
+ return QtCore, QtGuiCompat, QtSvg, QT_API_PYSIDE2
298
+
299
+
300
+ def import_pyside6():
301
+ """
302
+ Import PySide6
303
+
304
+ ImportErrors raised within this function are non-recoverable
305
+ """
306
+
307
+ def get_attrs(module):
308
+ return {
309
+ name: getattr(module, name)
310
+ for name in dir(module)
311
+ if not name.startswith("_")
312
+ }
313
+
314
+ from PySide6 import QtGui, QtCore, QtSvg, QtWidgets, QtPrintSupport
315
+
316
+ # Join QtGui and QtWidgets for Qt4 compatibility.
317
+ QtGuiCompat = types.ModuleType("QtGuiCompat")
318
+ QtGuiCompat.__dict__.update(QtGui.__dict__)
319
+ if QtCore.__version_info__ < (6, 7):
320
+ QtGuiCompat.__dict__.update(QtWidgets.__dict__)
321
+ QtGuiCompat.__dict__.update(QtPrintSupport.__dict__)
322
+ else:
323
+ QtGuiCompat.__dict__.update(get_attrs(QtWidgets))
324
+ QtGuiCompat.__dict__.update(get_attrs(QtPrintSupport))
325
+
326
+ return QtCore, QtGuiCompat, QtSvg, QT_API_PYSIDE6
327
+
328
+
329
+ def load_qt(api_options):
330
+ """
331
+ Attempt to import Qt, given a preference list
332
+ of permissible bindings
333
+
334
+ It is safe to call this function multiple times.
335
+
336
+ Parameters
337
+ ----------
338
+ api_options : List of strings
339
+ The order of APIs to try. Valid items are 'pyside', 'pyside2',
340
+ 'pyqt', 'pyqt5', 'pyqtv1' and 'pyqtdefault'
341
+
342
+ Returns
343
+ -------
344
+ A tuple of QtCore, QtGui, QtSvg, QT_API
345
+ The first three are the Qt modules. The last is the
346
+ string indicating which module was loaded.
347
+
348
+ Raises
349
+ ------
350
+ ImportError, if it isn't possible to import any requested
351
+ bindings (either because they aren't installed, or because
352
+ an incompatible library has already been installed)
353
+ """
354
+ loaders = {
355
+ # Qt6
356
+ QT_API_PYQT6: import_pyqt6,
357
+ QT_API_PYSIDE6: import_pyside6,
358
+ # Qt5
359
+ QT_API_PYQT5: import_pyqt5,
360
+ QT_API_PYSIDE2: import_pyside2,
361
+ # Qt4
362
+ QT_API_PYSIDE: import_pyside,
363
+ QT_API_PYQT: import_pyqt4,
364
+ QT_API_PYQTv1: partial(import_pyqt4, version=1),
365
+ # default
366
+ QT_API_PYQT_DEFAULT: import_pyqt6,
367
+ }
368
+
369
+ for api in api_options:
370
+
371
+ if api not in loaders:
372
+ raise RuntimeError(
373
+ "Invalid Qt API %r, valid values are: %s" %
374
+ (api, ", ".join(["%r" % k for k in loaders.keys()])))
375
+
376
+ if not can_import(api):
377
+ continue
378
+
379
+ #cannot safely recover from an ImportError during this
380
+ result = loaders[api]()
381
+ api = result[-1] # changed if api = QT_API_PYQT_DEFAULT
382
+ commit_api(api)
383
+ return result
384
+ else:
385
+ # Clear the environment variable since it doesn't work.
386
+ if "QT_API" in os.environ:
387
+ del os.environ["QT_API"]
388
+
389
+ raise ImportError(
390
+ """
391
+ Could not load requested Qt binding. Please ensure that
392
+ PyQt4 >= 4.7, PyQt5, PyQt6, PySide >= 1.0.3, PySide2, or
393
+ PySide6 is available, and only one is imported per session.
394
+
395
+ Currently-imported Qt library: %r
396
+ PyQt5 available (requires QtCore, QtGui, QtSvg, QtWidgets): %s
397
+ PyQt6 available (requires QtCore, QtGui, QtSvg, QtWidgets): %s
398
+ PySide2 installed: %s
399
+ PySide6 installed: %s
400
+ Tried to load: %r
401
+ """
402
+ % (
403
+ loaded_api(),
404
+ has_binding(QT_API_PYQT5),
405
+ has_binding(QT_API_PYQT6),
406
+ has_binding(QT_API_PYSIDE2),
407
+ has_binding(QT_API_PYSIDE6),
408
+ api_options,
409
+ )
410
+ )
411
+
412
+
413
+ def enum_factory(QT_API, QtCore):
414
+ """Construct an enum helper to account for PyQt5 <-> PyQt6 changes."""
415
+
416
+ @lru_cache(None)
417
+ def _enum(name):
418
+ # foo.bar.Enum.Entry (PyQt6) <=> foo.bar.Entry (non-PyQt6).
419
+ return operator.attrgetter(
420
+ name if QT_API == QT_API_PYQT6 else name.rpartition(".")[0]
421
+ )(sys.modules[QtCore.__package__])
422
+
423
+ return _enum
temp_venv/lib/python3.13/site-packages/IPython/lib/__init__.py ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # encoding: utf-8
2
+ """
3
+ Extra capabilities for IPython
4
+ """
5
+
6
+ # -----------------------------------------------------------------------------
7
+ # Copyright (C) 2008-2011 The IPython Development Team
8
+ #
9
+ # Distributed under the terms of the BSD License. The full license is in
10
+ # the file COPYING, distributed as part of this software.
11
+ # -----------------------------------------------------------------------------
temp_venv/lib/python3.13/site-packages/IPython/lib/__pycache__/__init__.cpython-313.pyc ADDED
Binary file (248 Bytes). View file
 
temp_venv/lib/python3.13/site-packages/IPython/lib/__pycache__/clipboard.cpython-313.pyc ADDED
Binary file (5.02 kB). View file
 
temp_venv/lib/python3.13/site-packages/IPython/lib/__pycache__/display.cpython-313.pyc ADDED
Binary file (28.8 kB). View file
 
temp_venv/lib/python3.13/site-packages/IPython/lib/__pycache__/pretty.cpython-313.pyc ADDED
Binary file (44.5 kB). View file
 
temp_venv/lib/python3.13/site-packages/IPython/lib/backgroundjobs.py ADDED
@@ -0,0 +1,491 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ """Manage background (threaded) jobs conveniently from an interactive shell.
3
+
4
+ This module provides a BackgroundJobManager class. This is the main class
5
+ meant for public usage, it implements an object which can create and manage
6
+ new background jobs.
7
+
8
+ It also provides the actual job classes managed by these BackgroundJobManager
9
+ objects, see their docstrings below.
10
+
11
+
12
+ This system was inspired by discussions with B. Granger and the
13
+ BackgroundCommand class described in the book Python Scripting for
14
+ Computational Science, by H. P. Langtangen:
15
+
16
+ http://folk.uio.no/hpl/scripting
17
+
18
+ (although ultimately no code from this text was used, as IPython's system is a
19
+ separate implementation).
20
+
21
+ An example notebook is provided in our documentation illustrating interactive
22
+ use of the system.
23
+ """
24
+
25
+ #*****************************************************************************
26
+ # Copyright (C) 2005-2006 Fernando Perez <[email protected]>
27
+ #
28
+ # Distributed under the terms of the BSD License. The full license is in
29
+ # the file COPYING, distributed as part of this software.
30
+ #*****************************************************************************
31
+
32
+ # Code begins
33
+ import sys
34
+ import threading
35
+
36
+ from IPython import get_ipython
37
+ from IPython.core.ultratb import AutoFormattedTB
38
+ from logging import error, debug
39
+
40
+
41
+ class BackgroundJobManager:
42
+ """Class to manage a pool of backgrounded threaded jobs.
43
+
44
+ Below, we assume that 'jobs' is a BackgroundJobManager instance.
45
+
46
+ Usage summary (see the method docstrings for details):
47
+
48
+ jobs.new(...) -> start a new job
49
+
50
+ jobs() or jobs.status() -> print status summary of all jobs
51
+
52
+ jobs[N] -> returns job number N.
53
+
54
+ foo = jobs[N].result -> assign to variable foo the result of job N
55
+
56
+ jobs[N].traceback() -> print the traceback of dead job N
57
+
58
+ jobs.remove(N) -> remove (finished) job N
59
+
60
+ jobs.flush() -> remove all finished jobs
61
+
62
+ As a convenience feature, BackgroundJobManager instances provide the
63
+ utility result and traceback methods which retrieve the corresponding
64
+ information from the jobs list:
65
+
66
+ jobs.result(N) <--> jobs[N].result
67
+ jobs.traceback(N) <--> jobs[N].traceback()
68
+
69
+ While this appears minor, it allows you to use tab completion
70
+ interactively on the job manager instance.
71
+ """
72
+
73
+ def __init__(self):
74
+ # Lists for job management, accessed via a property to ensure they're
75
+ # up to date.x
76
+ self._running = []
77
+ self._completed = []
78
+ self._dead = []
79
+ # A dict of all jobs, so users can easily access any of them
80
+ self.all = {}
81
+ # For reporting
82
+ self._comp_report = []
83
+ self._dead_report = []
84
+ # Store status codes locally for fast lookups
85
+ self._s_created = BackgroundJobBase.stat_created_c
86
+ self._s_running = BackgroundJobBase.stat_running_c
87
+ self._s_completed = BackgroundJobBase.stat_completed_c
88
+ self._s_dead = BackgroundJobBase.stat_dead_c
89
+ self._current_job_id = 0
90
+
91
+ @property
92
+ def running(self):
93
+ self._update_status()
94
+ return self._running
95
+
96
+ @property
97
+ def dead(self):
98
+ self._update_status()
99
+ return self._dead
100
+
101
+ @property
102
+ def completed(self):
103
+ self._update_status()
104
+ return self._completed
105
+
106
+ def new(self, func_or_exp, *args, **kwargs):
107
+ """Add a new background job and start it in a separate thread.
108
+
109
+ There are two types of jobs which can be created:
110
+
111
+ 1. Jobs based on expressions which can be passed to an eval() call.
112
+ The expression must be given as a string. For example:
113
+
114
+ job_manager.new('myfunc(x,y,z=1)'[,glob[,loc]])
115
+
116
+ The given expression is passed to eval(), along with the optional
117
+ global/local dicts provided. If no dicts are given, they are
118
+ extracted automatically from the caller's frame.
119
+
120
+ A Python statement is NOT a valid eval() expression. Basically, you
121
+ can only use as an eval() argument something which can go on the right
122
+ of an '=' sign and be assigned to a variable.
123
+
124
+ For example,"print 'hello'" is not valid, but '2+3' is.
125
+
126
+ 2. Jobs given a function object, optionally passing additional
127
+ positional arguments:
128
+
129
+ job_manager.new(myfunc, x, y)
130
+
131
+ The function is called with the given arguments.
132
+
133
+ If you need to pass keyword arguments to your function, you must
134
+ supply them as a dict named kw:
135
+
136
+ job_manager.new(myfunc, x, y, kw=dict(z=1))
137
+
138
+ The reason for this asymmetry is that the new() method needs to
139
+ maintain access to its own keywords, and this prevents name collisions
140
+ between arguments to new() and arguments to your own functions.
141
+
142
+ In both cases, the result is stored in the job.result field of the
143
+ background job object.
144
+
145
+ You can set `daemon` attribute of the thread by giving the keyword
146
+ argument `daemon`.
147
+
148
+ Notes and caveats:
149
+
150
+ 1. All threads running share the same standard output. Thus, if your
151
+ background jobs generate output, it will come out on top of whatever
152
+ you are currently writing. For this reason, background jobs are best
153
+ used with silent functions which simply return their output.
154
+
155
+ 2. Threads also all work within the same global namespace, and this
156
+ system does not lock interactive variables. So if you send job to the
157
+ background which operates on a mutable object for a long time, and
158
+ start modifying that same mutable object interactively (or in another
159
+ backgrounded job), all sorts of bizarre behaviour will occur.
160
+
161
+ 3. If a background job is spending a lot of time inside a C extension
162
+ module which does not release the Python Global Interpreter Lock
163
+ (GIL), this will block the IPython prompt. This is simply because the
164
+ Python interpreter can only switch between threads at Python
165
+ bytecodes. While the execution is inside C code, the interpreter must
166
+ simply wait unless the extension module releases the GIL.
167
+
168
+ 4. There is no way, due to limitations in the Python threads library,
169
+ to kill a thread once it has started."""
170
+
171
+ if callable(func_or_exp):
172
+ kw = kwargs.get('kw',{})
173
+ job = BackgroundJobFunc(func_or_exp,*args,**kw)
174
+ elif isinstance(func_or_exp, str):
175
+ if not args:
176
+ frame = sys._getframe(1)
177
+ glob, loc = frame.f_globals, frame.f_locals
178
+ elif len(args)==1:
179
+ glob = loc = args[0]
180
+ elif len(args)==2:
181
+ glob,loc = args
182
+ else:
183
+ raise ValueError(
184
+ 'Expression jobs take at most 2 args (globals,locals)')
185
+ job = BackgroundJobExpr(func_or_exp, glob, loc)
186
+ else:
187
+ raise TypeError('invalid args for new job')
188
+
189
+ if kwargs.get('daemon', False):
190
+ job.daemon = True
191
+ job.num = self._current_job_id
192
+ self._current_job_id += 1
193
+ self.running.append(job)
194
+ self.all[job.num] = job
195
+ debug('Starting job # %s in a separate thread.' % job.num)
196
+ job.start()
197
+ return job
198
+
199
+ def __getitem__(self, job_key):
200
+ num = job_key if isinstance(job_key, int) else job_key.num
201
+ return self.all[num]
202
+
203
+ def __call__(self):
204
+ """An alias to self.status(),
205
+
206
+ This allows you to simply call a job manager instance much like the
207
+ Unix `jobs` shell command."""
208
+
209
+ return self.status()
210
+
211
+ def _update_status(self):
212
+ """Update the status of the job lists.
213
+
214
+ This method moves finished jobs to one of two lists:
215
+ - self.completed: jobs which completed successfully
216
+ - self.dead: jobs which finished but died.
217
+
218
+ It also copies those jobs to corresponding _report lists. These lists
219
+ are used to report jobs completed/dead since the last update, and are
220
+ then cleared by the reporting function after each call."""
221
+
222
+ # Status codes
223
+ srun, scomp, sdead = self._s_running, self._s_completed, self._s_dead
224
+ # State lists, use the actual lists b/c the public names are properties
225
+ # that call this very function on access
226
+ running, completed, dead = self._running, self._completed, self._dead
227
+
228
+ # Now, update all state lists
229
+ for num, job in enumerate(running):
230
+ stat = job.stat_code
231
+ if stat == srun:
232
+ continue
233
+ elif stat == scomp:
234
+ completed.append(job)
235
+ self._comp_report.append(job)
236
+ running[num] = False
237
+ elif stat == sdead:
238
+ dead.append(job)
239
+ self._dead_report.append(job)
240
+ running[num] = False
241
+ # Remove dead/completed jobs from running list
242
+ running[:] = filter(None, running)
243
+
244
+ def _group_report(self,group,name):
245
+ """Report summary for a given job group.
246
+
247
+ Return True if the group had any elements."""
248
+
249
+ if group:
250
+ print('%s jobs:' % name)
251
+ for job in group:
252
+ print('%s : %s' % (job.num,job))
253
+ print()
254
+ return True
255
+
256
+ def _group_flush(self,group,name):
257
+ """Flush a given job group
258
+
259
+ Return True if the group had any elements."""
260
+
261
+ njobs = len(group)
262
+ if njobs:
263
+ plural = {1:''}.setdefault(njobs,'s')
264
+ print('Flushing %s %s job%s.' % (njobs,name,plural))
265
+ group[:] = []
266
+ return True
267
+
268
+ def _status_new(self):
269
+ """Print the status of newly finished jobs.
270
+
271
+ Return True if any new jobs are reported.
272
+
273
+ This call resets its own state every time, so it only reports jobs
274
+ which have finished since the last time it was called."""
275
+
276
+ self._update_status()
277
+ new_comp = self._group_report(self._comp_report, 'Completed')
278
+ new_dead = self._group_report(self._dead_report,
279
+ 'Dead, call jobs.traceback() for details')
280
+ self._comp_report[:] = []
281
+ self._dead_report[:] = []
282
+ return new_comp or new_dead
283
+
284
+ def status(self,verbose=0):
285
+ """Print a status of all jobs currently being managed."""
286
+
287
+ self._update_status()
288
+ self._group_report(self.running,'Running')
289
+ self._group_report(self.completed,'Completed')
290
+ self._group_report(self.dead,'Dead')
291
+ # Also flush the report queues
292
+ self._comp_report[:] = []
293
+ self._dead_report[:] = []
294
+
295
+ def remove(self,num):
296
+ """Remove a finished (completed or dead) job."""
297
+
298
+ try:
299
+ job = self.all[num]
300
+ except KeyError:
301
+ error('Job #%s not found' % num)
302
+ else:
303
+ stat_code = job.stat_code
304
+ if stat_code == self._s_running:
305
+ error('Job #%s is still running, it can not be removed.' % num)
306
+ return
307
+ elif stat_code == self._s_completed:
308
+ self.completed.remove(job)
309
+ elif stat_code == self._s_dead:
310
+ self.dead.remove(job)
311
+
312
+ def flush(self):
313
+ """Flush all finished jobs (completed and dead) from lists.
314
+
315
+ Running jobs are never flushed.
316
+
317
+ It first calls _status_new(), to update info. If any jobs have
318
+ completed since the last _status_new() call, the flush operation
319
+ aborts."""
320
+
321
+ # Remove the finished jobs from the master dict
322
+ alljobs = self.all
323
+ for job in self.completed+self.dead:
324
+ del(alljobs[job.num])
325
+
326
+ # Now flush these lists completely
327
+ fl_comp = self._group_flush(self.completed, 'Completed')
328
+ fl_dead = self._group_flush(self.dead, 'Dead')
329
+ if not (fl_comp or fl_dead):
330
+ print('No jobs to flush.')
331
+
332
+ def result(self,num):
333
+ """result(N) -> return the result of job N."""
334
+ try:
335
+ return self.all[num].result
336
+ except KeyError:
337
+ error('Job #%s not found' % num)
338
+
339
+ def _traceback(self, job):
340
+ num = job if isinstance(job, int) else job.num
341
+ try:
342
+ self.all[num].traceback()
343
+ except KeyError:
344
+ error('Job #%s not found' % num)
345
+
346
+ def traceback(self, job=None):
347
+ if job is None:
348
+ self._update_status()
349
+ for deadjob in self.dead:
350
+ print("Traceback for: %r" % deadjob)
351
+ self._traceback(deadjob)
352
+ print()
353
+ else:
354
+ self._traceback(job)
355
+
356
+
357
+ class BackgroundJobBase(threading.Thread):
358
+ """Base class to build BackgroundJob classes.
359
+
360
+ The derived classes must implement:
361
+
362
+ - Their own __init__, since the one here raises NotImplementedError. The
363
+ derived constructor must call self._init() at the end, to provide common
364
+ initialization.
365
+
366
+ - A strform attribute used in calls to __str__.
367
+
368
+ - A call() method, which will make the actual execution call and must
369
+ return a value to be held in the 'result' field of the job object.
370
+ """
371
+
372
+ # Class constants for status, in string and as numerical codes (when
373
+ # updating jobs lists, we don't want to do string comparisons). This will
374
+ # be done at every user prompt, so it has to be as fast as possible
375
+ stat_created = 'Created'; stat_created_c = 0
376
+ stat_running = 'Running'; stat_running_c = 1
377
+ stat_completed = 'Completed'; stat_completed_c = 2
378
+ stat_dead = 'Dead (Exception), call jobs.traceback() for details'
379
+ stat_dead_c = -1
380
+
381
+ def __init__(self):
382
+ """Must be implemented in subclasses.
383
+
384
+ Subclasses must call :meth:`_init` for standard initialisation.
385
+ """
386
+ raise NotImplementedError("This class can not be instantiated directly.")
387
+
388
+ def _init(self):
389
+ """Common initialization for all BackgroundJob objects"""
390
+
391
+ for attr in ['call','strform']:
392
+ assert hasattr(self,attr), "Missing attribute <%s>" % attr
393
+
394
+ # The num tag can be set by an external job manager
395
+ self.num = None
396
+
397
+ self.status = BackgroundJobBase.stat_created
398
+ self.stat_code = BackgroundJobBase.stat_created_c
399
+ self.finished = False
400
+ self.result = '<BackgroundJob has not completed>'
401
+
402
+ # reuse the ipython traceback handler if we can get to it, otherwise
403
+ # make a new one
404
+ try:
405
+ make_tb = get_ipython().InteractiveTB.text
406
+ except:
407
+ make_tb = AutoFormattedTB(
408
+ mode="Context", color_scheme="nocolor", tb_offset=1
409
+ ).text
410
+ # Note that the actual API for text() requires the three args to be
411
+ # passed in, so we wrap it in a simple lambda.
412
+ self._make_tb = lambda : make_tb(None, None, None)
413
+
414
+ # Hold a formatted traceback if one is generated.
415
+ self._tb = None
416
+
417
+ threading.Thread.__init__(self)
418
+
419
+ def __str__(self):
420
+ return self.strform
421
+
422
+ def __repr__(self):
423
+ return '<BackgroundJob #%d: %s>' % (self.num, self.strform)
424
+
425
+ def traceback(self):
426
+ print(self._tb)
427
+
428
+ def run(self):
429
+ try:
430
+ self.status = BackgroundJobBase.stat_running
431
+ self.stat_code = BackgroundJobBase.stat_running_c
432
+ self.result = self.call()
433
+ except:
434
+ self.status = BackgroundJobBase.stat_dead
435
+ self.stat_code = BackgroundJobBase.stat_dead_c
436
+ self.finished = None
437
+ self.result = ('<BackgroundJob died, call jobs.traceback() for details>')
438
+ self._tb = self._make_tb()
439
+ else:
440
+ self.status = BackgroundJobBase.stat_completed
441
+ self.stat_code = BackgroundJobBase.stat_completed_c
442
+ self.finished = True
443
+
444
+
445
+ class BackgroundJobExpr(BackgroundJobBase):
446
+ """Evaluate an expression as a background job (uses a separate thread)."""
447
+
448
+ def __init__(self, expression, glob=None, loc=None):
449
+ """Create a new job from a string which can be fed to eval().
450
+
451
+ global/locals dicts can be provided, which will be passed to the eval
452
+ call."""
453
+
454
+ # fail immediately if the given expression can't be compiled
455
+ self.code = compile(expression,'<BackgroundJob compilation>','eval')
456
+
457
+ glob = {} if glob is None else glob
458
+ loc = {} if loc is None else loc
459
+ self.expression = self.strform = expression
460
+ self.glob = glob
461
+ self.loc = loc
462
+ self._init()
463
+
464
+ def call(self):
465
+ return eval(self.code,self.glob,self.loc)
466
+
467
+
468
+ class BackgroundJobFunc(BackgroundJobBase):
469
+ """Run a function call as a background job (uses a separate thread)."""
470
+
471
+ def __init__(self, func, *args, **kwargs):
472
+ """Create a new job from a callable object.
473
+
474
+ Any positional arguments and keyword args given to this constructor
475
+ after the initial callable are passed directly to it."""
476
+
477
+ if not callable(func):
478
+ raise TypeError(
479
+ 'first argument to BackgroundJobFunc must be callable')
480
+
481
+ self.func = func
482
+ self.args = args
483
+ self.kwargs = kwargs
484
+ # The string form will only include the function passed, because
485
+ # generating string representations of the arguments is a potentially
486
+ # _very_ expensive operation (e.g. with large arrays).
487
+ self.strform = str(func)
488
+ self._init()
489
+
490
+ def call(self):
491
+ return self.func(*self.args, **self.kwargs)
temp_venv/lib/python3.13/site-packages/IPython/lib/clipboard.py ADDED
@@ -0,0 +1,102 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """ Utilities for accessing the platform's clipboard.
2
+ """
3
+
4
+ import os
5
+ import subprocess
6
+
7
+ from IPython.core.error import TryNext
8
+ import IPython.utils.py3compat as py3compat
9
+
10
+
11
+ class ClipboardEmpty(ValueError):
12
+ pass
13
+
14
+
15
+ def win32_clipboard_get():
16
+ """ Get the current clipboard's text on Windows.
17
+
18
+ Requires Mark Hammond's pywin32 extensions.
19
+ """
20
+ try:
21
+ import win32clipboard
22
+ except ImportError as e:
23
+ raise TryNext("Getting text from the clipboard requires the pywin32 "
24
+ "extensions: http://sourceforge.net/projects/pywin32/") from e
25
+ win32clipboard.OpenClipboard()
26
+ try:
27
+ text = win32clipboard.GetClipboardData(win32clipboard.CF_UNICODETEXT)
28
+ except (TypeError, win32clipboard.error):
29
+ try:
30
+ text = win32clipboard.GetClipboardData(win32clipboard.CF_TEXT)
31
+ text = py3compat.cast_unicode(text, py3compat.DEFAULT_ENCODING)
32
+ except (TypeError, win32clipboard.error) as e:
33
+ raise ClipboardEmpty from e
34
+ finally:
35
+ win32clipboard.CloseClipboard()
36
+ return text
37
+
38
+
39
+ def osx_clipboard_get() -> str:
40
+ """ Get the clipboard's text on OS X.
41
+ """
42
+ p = subprocess.Popen(['pbpaste', '-Prefer', 'ascii'],
43
+ stdout=subprocess.PIPE)
44
+ bytes_, stderr = p.communicate()
45
+ # Text comes in with old Mac \r line endings. Change them to \n.
46
+ bytes_ = bytes_.replace(b'\r', b'\n')
47
+ text = py3compat.decode(bytes_)
48
+ return text
49
+
50
+
51
+ def tkinter_clipboard_get():
52
+ """ Get the clipboard's text using Tkinter.
53
+
54
+ This is the default on systems that are not Windows or OS X. It may
55
+ interfere with other UI toolkits and should be replaced with an
56
+ implementation that uses that toolkit.
57
+ """
58
+ try:
59
+ from tkinter import Tk, TclError
60
+ except ImportError as e:
61
+ raise TryNext("Getting text from the clipboard on this platform requires tkinter.") from e
62
+
63
+ root = Tk()
64
+ root.withdraw()
65
+ try:
66
+ text = root.clipboard_get()
67
+ except TclError as e:
68
+ raise ClipboardEmpty from e
69
+ finally:
70
+ root.destroy()
71
+ text = py3compat.cast_unicode(text, py3compat.DEFAULT_ENCODING)
72
+ return text
73
+
74
+
75
+ def wayland_clipboard_get():
76
+ """Get the clipboard's text under Wayland using wl-paste command.
77
+
78
+ This requires Wayland and wl-clipboard installed and running.
79
+ """
80
+ if os.environ.get("XDG_SESSION_TYPE") != "wayland":
81
+ raise TryNext("wayland is not detected")
82
+
83
+ try:
84
+ with subprocess.Popen(["wl-paste"], stdout=subprocess.PIPE) as p:
85
+ raw, err = p.communicate()
86
+ if p.wait():
87
+ raise TryNext(err)
88
+ except FileNotFoundError as e:
89
+ raise TryNext(
90
+ "Getting text from the clipboard under Wayland requires the wl-clipboard "
91
+ "extension: https://github.com/bugaevc/wl-clipboard"
92
+ ) from e
93
+
94
+ if not raw:
95
+ raise ClipboardEmpty
96
+
97
+ try:
98
+ text = py3compat.decode(raw)
99
+ except UnicodeDecodeError as e:
100
+ raise ClipboardEmpty from e
101
+
102
+ return text
temp_venv/lib/python3.13/site-packages/IPython/lib/deepreload.py ADDED
@@ -0,0 +1,310 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ """
3
+ Provides a reload() function that acts recursively.
4
+
5
+ Python's normal :func:`python:reload` function only reloads the module that it's
6
+ passed. The :func:`reload` function in this module also reloads everything
7
+ imported from that module, which is useful when you're changing files deep
8
+ inside a package.
9
+
10
+ To use this as your default reload function, type this::
11
+
12
+ import builtins
13
+ from IPython.lib import deepreload
14
+ builtins.reload = deepreload.reload
15
+
16
+ A reference to the original :func:`python:reload` is stored in this module as
17
+ :data:`original_reload`, so you can restore it later.
18
+
19
+ This code is almost entirely based on knee.py, which is a Python
20
+ re-implementation of hierarchical module import.
21
+ """
22
+ #*****************************************************************************
23
+ # Copyright (C) 2001 Nathaniel Gray <[email protected]>
24
+ #
25
+ # Distributed under the terms of the BSD License. The full license is in
26
+ # the file COPYING, distributed as part of this software.
27
+ #*****************************************************************************
28
+
29
+ import builtins as builtin_mod
30
+ from contextlib import contextmanager
31
+ import importlib
32
+ import sys
33
+
34
+ from types import ModuleType
35
+ from warnings import warn
36
+ import types
37
+
38
+ original_import = builtin_mod.__import__
39
+
40
+ @contextmanager
41
+ def replace_import_hook(new_import):
42
+ saved_import = builtin_mod.__import__
43
+ builtin_mod.__import__ = new_import
44
+ try:
45
+ yield
46
+ finally:
47
+ builtin_mod.__import__ = saved_import
48
+
49
+ def get_parent(globals, level):
50
+ """
51
+ parent, name = get_parent(globals, level)
52
+
53
+ Return the package that an import is being performed in. If globals comes
54
+ from the module foo.bar.bat (not itself a package), this returns the
55
+ sys.modules entry for foo.bar. If globals is from a package's __init__.py,
56
+ the package's entry in sys.modules is returned.
57
+
58
+ If globals doesn't come from a package or a module in a package, or a
59
+ corresponding entry is not found in sys.modules, None is returned.
60
+ """
61
+ orig_level = level
62
+
63
+ if not level or not isinstance(globals, dict):
64
+ return None, ''
65
+
66
+ pkgname = globals.get('__package__', None)
67
+
68
+ if pkgname is not None:
69
+ # __package__ is set, so use it
70
+ if not hasattr(pkgname, 'rindex'):
71
+ raise ValueError('__package__ set to non-string')
72
+ if len(pkgname) == 0:
73
+ if level > 0:
74
+ raise ValueError('Attempted relative import in non-package')
75
+ return None, ''
76
+ name = pkgname
77
+ else:
78
+ # __package__ not set, so figure it out and set it
79
+ if '__name__' not in globals:
80
+ return None, ''
81
+ modname = globals['__name__']
82
+
83
+ if '__path__' in globals:
84
+ # __path__ is set, so modname is already the package name
85
+ globals['__package__'] = name = modname
86
+ else:
87
+ # Normal module, so work out the package name if any
88
+ lastdot = modname.rfind('.')
89
+ if lastdot < 0 < level:
90
+ raise ValueError("Attempted relative import in non-package")
91
+ if lastdot < 0:
92
+ globals['__package__'] = None
93
+ return None, ''
94
+ globals['__package__'] = name = modname[:lastdot]
95
+
96
+ dot = len(name)
97
+ for x in range(level, 1, -1):
98
+ try:
99
+ dot = name.rindex('.', 0, dot)
100
+ except ValueError as e:
101
+ raise ValueError("attempted relative import beyond top-level "
102
+ "package") from e
103
+ name = name[:dot]
104
+
105
+ try:
106
+ parent = sys.modules[name]
107
+ except BaseException as e:
108
+ if orig_level < 1:
109
+ warn("Parent module '%.200s' not found while handling absolute "
110
+ "import" % name)
111
+ parent = None
112
+ else:
113
+ raise SystemError("Parent module '%.200s' not loaded, cannot "
114
+ "perform relative import" % name) from e
115
+
116
+ # We expect, but can't guarantee, if parent != None, that:
117
+ # - parent.__name__ == name
118
+ # - parent.__dict__ is globals
119
+ # If this is violated... Who cares?
120
+ return parent, name
121
+
122
+ def load_next(mod, altmod, name, buf):
123
+ """
124
+ mod, name, buf = load_next(mod, altmod, name, buf)
125
+
126
+ altmod is either None or same as mod
127
+ """
128
+
129
+ if len(name) == 0:
130
+ # completely empty module name should only happen in
131
+ # 'from . import' (or '__import__("")')
132
+ return mod, None, buf
133
+
134
+ dot = name.find('.')
135
+ if dot == 0:
136
+ raise ValueError('Empty module name')
137
+
138
+ if dot < 0:
139
+ subname = name
140
+ next = None
141
+ else:
142
+ subname = name[:dot]
143
+ next = name[dot+1:]
144
+
145
+ if buf != '':
146
+ buf += '.'
147
+ buf += subname
148
+
149
+ result = import_submodule(mod, subname, buf)
150
+ if result is None and mod != altmod:
151
+ result = import_submodule(altmod, subname, subname)
152
+ if result is not None:
153
+ buf = subname
154
+
155
+ if result is None:
156
+ raise ImportError("No module named %.200s" % name)
157
+
158
+ return result, next, buf
159
+
160
+
161
+ # Need to keep track of what we've already reloaded to prevent cyclic evil
162
+ found_now = {}
163
+
164
+ def import_submodule(mod, subname, fullname):
165
+ """m = import_submodule(mod, subname, fullname)"""
166
+ # Require:
167
+ # if mod == None: subname == fullname
168
+ # else: mod.__name__ + "." + subname == fullname
169
+
170
+ global found_now
171
+ if fullname in found_now and fullname in sys.modules:
172
+ m = sys.modules[fullname]
173
+ else:
174
+ print('Reloading', fullname)
175
+ found_now[fullname] = 1
176
+ oldm = sys.modules.get(fullname, None)
177
+ try:
178
+ if oldm is not None:
179
+ m = importlib.reload(oldm)
180
+ else:
181
+ m = importlib.import_module(subname, mod)
182
+ except:
183
+ # load_module probably removed name from modules because of
184
+ # the error. Put back the original module object.
185
+ if oldm:
186
+ sys.modules[fullname] = oldm
187
+ raise
188
+
189
+ add_submodule(mod, m, fullname, subname)
190
+
191
+ return m
192
+
193
+ def add_submodule(mod, submod, fullname, subname):
194
+ """mod.{subname} = submod"""
195
+ if mod is None:
196
+ return #Nothing to do here.
197
+
198
+ if submod is None:
199
+ submod = sys.modules[fullname]
200
+
201
+ setattr(mod, subname, submod)
202
+
203
+ return
204
+
205
+ def ensure_fromlist(mod, fromlist, buf, recursive):
206
+ """Handle 'from module import a, b, c' imports."""
207
+ if not hasattr(mod, '__path__'):
208
+ return
209
+ for item in fromlist:
210
+ if not hasattr(item, 'rindex'):
211
+ raise TypeError("Item in ``from list'' not a string")
212
+ if item == '*':
213
+ if recursive:
214
+ continue # avoid endless recursion
215
+ try:
216
+ all = mod.__all__
217
+ except AttributeError:
218
+ pass
219
+ else:
220
+ ret = ensure_fromlist(mod, all, buf, 1)
221
+ if not ret:
222
+ return 0
223
+ elif not hasattr(mod, item):
224
+ import_submodule(mod, item, buf + '.' + item)
225
+
226
+ def deep_import_hook(name, globals=None, locals=None, fromlist=None, level=-1):
227
+ """Replacement for __import__()"""
228
+ parent, buf = get_parent(globals, level)
229
+
230
+ head, name, buf = load_next(parent, None if level < 0 else parent, name, buf)
231
+
232
+ tail = head
233
+ while name:
234
+ tail, name, buf = load_next(tail, tail, name, buf)
235
+
236
+ # If tail is None, both get_parent and load_next found
237
+ # an empty module name: someone called __import__("") or
238
+ # doctored faulty bytecode
239
+ if tail is None:
240
+ raise ValueError('Empty module name')
241
+
242
+ if not fromlist:
243
+ return head
244
+
245
+ ensure_fromlist(tail, fromlist, buf, 0)
246
+ return tail
247
+
248
+ modules_reloading = {}
249
+
250
+ def deep_reload_hook(m):
251
+ """Replacement for reload()."""
252
+ # Hardcode this one as it would raise a NotImplementedError from the
253
+ # bowels of Python and screw up the import machinery after.
254
+ # unlike other imports the `exclude` list already in place is not enough.
255
+
256
+ if m is types:
257
+ return m
258
+ if not isinstance(m, ModuleType):
259
+ raise TypeError("reload() argument must be module")
260
+
261
+ name = m.__name__
262
+
263
+ if name not in sys.modules:
264
+ raise ImportError("reload(): module %.200s not in sys.modules" % name)
265
+
266
+ global modules_reloading
267
+ try:
268
+ return modules_reloading[name]
269
+ except:
270
+ modules_reloading[name] = m
271
+
272
+ try:
273
+ newm = importlib.reload(m)
274
+ except:
275
+ sys.modules[name] = m
276
+ raise
277
+ finally:
278
+ modules_reloading.clear()
279
+ return newm
280
+
281
+ # Save the original hooks
282
+ original_reload = importlib.reload
283
+
284
+ # Replacement for reload()
285
+ def reload(
286
+ module,
287
+ exclude=(
288
+ *sys.builtin_module_names,
289
+ "sys",
290
+ "os.path",
291
+ "builtins",
292
+ "__main__",
293
+ "numpy",
294
+ "numpy._globals",
295
+ ),
296
+ ):
297
+ """Recursively reload all modules used in the given module. Optionally
298
+ takes a list of modules to exclude from reloading. The default exclude
299
+ list contains modules listed in sys.builtin_module_names with additional
300
+ sys, os.path, builtins and __main__, to prevent, e.g., resetting
301
+ display, exception, and io hooks.
302
+ """
303
+ global found_now
304
+ for i in exclude:
305
+ found_now[i] = 1
306
+ try:
307
+ with replace_import_hook(deep_import_hook):
308
+ return deep_reload_hook(module)
309
+ finally:
310
+ found_now = {}
temp_venv/lib/python3.13/site-packages/IPython/lib/demo.py ADDED
@@ -0,0 +1,672 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Module for interactive demos using IPython.
2
+
3
+ This module implements a few classes for running Python scripts interactively
4
+ in IPython for demonstrations. With very simple markup (a few tags in
5
+ comments), you can control points where the script stops executing and returns
6
+ control to IPython.
7
+
8
+
9
+ Provided classes
10
+ ----------------
11
+
12
+ The classes are (see their docstrings for further details):
13
+
14
+ - Demo: pure python demos
15
+
16
+ - IPythonDemo: demos with input to be processed by IPython as if it had been
17
+ typed interactively (so magics work, as well as any other special syntax you
18
+ may have added via input prefilters).
19
+
20
+ - LineDemo: single-line version of the Demo class. These demos are executed
21
+ one line at a time, and require no markup.
22
+
23
+ - IPythonLineDemo: IPython version of the LineDemo class (the demo is
24
+ executed a line at a time, but processed via IPython).
25
+
26
+ - ClearMixin: mixin to make Demo classes with less visual clutter. It
27
+ declares an empty marquee and a pre_cmd that clears the screen before each
28
+ block (see Subclassing below).
29
+
30
+ - ClearDemo, ClearIPDemo: mixin-enabled versions of the Demo and IPythonDemo
31
+ classes.
32
+
33
+ Inheritance diagram:
34
+
35
+ .. inheritance-diagram:: IPython.lib.demo
36
+ :parts: 3
37
+
38
+ Subclassing
39
+ -----------
40
+
41
+ The classes here all include a few methods meant to make customization by
42
+ subclassing more convenient. Their docstrings below have some more details:
43
+
44
+ - highlight(): format every block and optionally highlight comments and
45
+ docstring content.
46
+
47
+ - marquee(): generates a marquee to provide visible on-screen markers at each
48
+ block start and end.
49
+
50
+ - pre_cmd(): run right before the execution of each block.
51
+
52
+ - post_cmd(): run right after the execution of each block. If the block
53
+ raises an exception, this is NOT called.
54
+
55
+
56
+ Operation
57
+ ---------
58
+
59
+ The file is run in its own empty namespace (though you can pass it a string of
60
+ arguments as if in a command line environment, and it will see those as
61
+ sys.argv). But at each stop, the global IPython namespace is updated with the
62
+ current internal demo namespace, so you can work interactively with the data
63
+ accumulated so far.
64
+
65
+ By default, each block of code is printed (with syntax highlighting) before
66
+ executing it and you have to confirm execution. This is intended to show the
67
+ code to an audience first so you can discuss it, and only proceed with
68
+ execution once you agree. There are a few tags which allow you to modify this
69
+ behavior.
70
+
71
+ The supported tags are:
72
+
73
+ # <demo> stop
74
+
75
+ Defines block boundaries, the points where IPython stops execution of the
76
+ file and returns to the interactive prompt.
77
+
78
+ You can optionally mark the stop tag with extra dashes before and after the
79
+ word 'stop', to help visually distinguish the blocks in a text editor:
80
+
81
+ # <demo> --- stop ---
82
+
83
+
84
+ # <demo> silent
85
+
86
+ Make a block execute silently (and hence automatically). Typically used in
87
+ cases where you have some boilerplate or initialization code which you need
88
+ executed but do not want to be seen in the demo.
89
+
90
+ # <demo> auto
91
+
92
+ Make a block execute automatically, but still being printed. Useful for
93
+ simple code which does not warrant discussion, since it avoids the extra
94
+ manual confirmation.
95
+
96
+ # <demo> auto_all
97
+
98
+ This tag can _only_ be in the first block, and if given it overrides the
99
+ individual auto tags to make the whole demo fully automatic (no block asks
100
+ for confirmation). It can also be given at creation time (or the attribute
101
+ set later) to override what's in the file.
102
+
103
+ While _any_ python file can be run as a Demo instance, if there are no stop
104
+ tags the whole file will run in a single block (no different that calling
105
+ first %pycat and then %run). The minimal markup to make this useful is to
106
+ place a set of stop tags; the other tags are only there to let you fine-tune
107
+ the execution.
108
+
109
+ This is probably best explained with the simple example file below. You can
110
+ copy this into a file named ex_demo.py, and try running it via::
111
+
112
+ from IPython.lib.demo import Demo
113
+ d = Demo('ex_demo.py')
114
+ d()
115
+
116
+ Each time you call the demo object, it runs the next block. The demo object
117
+ has a few useful methods for navigation, like again(), edit(), jump(), seek()
118
+ and back(). It can be reset for a new run via reset() or reloaded from disk
119
+ (in case you've edited the source) via reload(). See their docstrings below.
120
+
121
+ Note: To make this simpler to explore, a file called "demo-exercizer.py" has
122
+ been added to the "docs/examples/core" directory. Just cd to this directory in
123
+ an IPython session, and type::
124
+
125
+ %run demo-exercizer.py
126
+
127
+ and then follow the directions.
128
+
129
+ Example
130
+ -------
131
+
132
+ The following is a very simple example of a valid demo file.
133
+
134
+ ::
135
+
136
+ #################### EXAMPLE DEMO <ex_demo.py> ###############################
137
+ '''A simple interactive demo to illustrate the use of IPython's Demo class.'''
138
+
139
+ print('Hello, welcome to an interactive IPython demo.')
140
+
141
+ # The mark below defines a block boundary, which is a point where IPython will
142
+ # stop execution and return to the interactive prompt. The dashes are actually
143
+ # optional and used only as a visual aid to clearly separate blocks while
144
+ # editing the demo code.
145
+ # <demo> stop
146
+
147
+ x = 1
148
+ y = 2
149
+
150
+ # <demo> stop
151
+
152
+ # the mark below makes this block as silent
153
+ # <demo> silent
154
+
155
+ print('This is a silent block, which gets executed but not printed.')
156
+
157
+ # <demo> stop
158
+ # <demo> auto
159
+ print('This is an automatic block.')
160
+ print('It is executed without asking for confirmation, but printed.')
161
+ z = x + y
162
+
163
+ print('z =', x)
164
+
165
+ # <demo> stop
166
+ # This is just another normal block.
167
+ print('z is now:', z)
168
+
169
+ print('bye!')
170
+ ################### END EXAMPLE DEMO <ex_demo.py> ############################
171
+ """
172
+
173
+
174
+ #*****************************************************************************
175
+ # Copyright (C) 2005-2006 Fernando Perez. <[email protected]>
176
+ #
177
+ # Distributed under the terms of the BSD License. The full license is in
178
+ # the file COPYING, distributed as part of this software.
179
+ #
180
+ #*****************************************************************************
181
+
182
+ import os
183
+ import re
184
+ import shlex
185
+ import sys
186
+ import pygments
187
+ from pathlib import Path
188
+
189
+ from IPython.utils.text import marquee
190
+ from IPython.utils import openpy
191
+ from IPython.utils import py3compat
192
+ __all__ = ['Demo','IPythonDemo','LineDemo','IPythonLineDemo','DemoError']
193
+
194
+ class DemoError(Exception): pass
195
+
196
+ def re_mark(mark):
197
+ return re.compile(r'^\s*#\s+<demo>\s+%s\s*$' % mark,re.MULTILINE)
198
+
199
+ class Demo:
200
+
201
+ re_stop = re_mark(r'-*\s?stop\s?-*')
202
+ re_silent = re_mark('silent')
203
+ re_auto = re_mark('auto')
204
+ re_auto_all = re_mark('auto_all')
205
+
206
+ def __init__(self,src,title='',arg_str='',auto_all=None, format_rst=False,
207
+ formatter='terminal', style='default'):
208
+ """Make a new demo object. To run the demo, simply call the object.
209
+
210
+ See the module docstring for full details and an example (you can use
211
+ IPython.Demo? in IPython to see it).
212
+
213
+ Inputs:
214
+
215
+ - src is either a file, or file-like object, or a
216
+ string that can be resolved to a filename.
217
+
218
+ Optional inputs:
219
+
220
+ - title: a string to use as the demo name. Of most use when the demo
221
+ you are making comes from an object that has no filename, or if you
222
+ want an alternate denotation distinct from the filename.
223
+
224
+ - arg_str(''): a string of arguments, internally converted to a list
225
+ just like sys.argv, so the demo script can see a similar
226
+ environment.
227
+
228
+ - auto_all(None): global flag to run all blocks automatically without
229
+ confirmation. This attribute overrides the block-level tags and
230
+ applies to the whole demo. It is an attribute of the object, and
231
+ can be changed at runtime simply by reassigning it to a boolean
232
+ value.
233
+
234
+ - format_rst(False): a bool to enable comments and doc strings
235
+ formatting with pygments rst lexer
236
+
237
+ - formatter('terminal'): a string of pygments formatter name to be
238
+ used. Useful values for terminals: terminal, terminal256,
239
+ terminal16m
240
+
241
+ - style('default'): a string of pygments style name to be used.
242
+ """
243
+ if hasattr(src, "read"):
244
+ # It seems to be a file or a file-like object
245
+ self.fname = "from a file-like object"
246
+ if title == '':
247
+ self.title = "from a file-like object"
248
+ else:
249
+ self.title = title
250
+ else:
251
+ # Assume it's a string or something that can be converted to one
252
+ self.fname = src
253
+ if title == '':
254
+ (filepath, filename) = os.path.split(src)
255
+ self.title = filename
256
+ else:
257
+ self.title = title
258
+ self.sys_argv = [src] + shlex.split(arg_str)
259
+ self.auto_all = auto_all
260
+ self.src = src
261
+
262
+ try:
263
+ ip = get_ipython() # this is in builtins whenever IPython is running
264
+ self.inside_ipython = True
265
+ except NameError:
266
+ self.inside_ipython = False
267
+
268
+ if self.inside_ipython:
269
+ # get a few things from ipython. While it's a bit ugly design-wise,
270
+ # it ensures that things like color scheme and the like are always in
271
+ # sync with the ipython mode being used. This class is only meant to
272
+ # be used inside ipython anyways, so it's OK.
273
+ self.ip_ns = ip.user_ns
274
+ self.ip_colorize = ip.pycolorize
275
+ self.ip_showtb = ip.showtraceback
276
+ self.ip_run_cell = ip.run_cell
277
+ self.shell = ip
278
+
279
+ self.formatter = pygments.formatters.get_formatter_by_name(formatter,
280
+ style=style)
281
+ self.python_lexer = pygments.lexers.get_lexer_by_name("py3")
282
+ self.format_rst = format_rst
283
+ if format_rst:
284
+ self.rst_lexer = pygments.lexers.get_lexer_by_name("rst")
285
+
286
+ # load user data and initialize data structures
287
+ self.reload()
288
+
289
+ def fload(self):
290
+ """Load file object."""
291
+ # read data and parse into blocks
292
+ if hasattr(self, 'fobj') and self.fobj is not None:
293
+ self.fobj.close()
294
+ if hasattr(self.src, "read"):
295
+ # It seems to be a file or a file-like object
296
+ self.fobj = self.src
297
+ else:
298
+ # Assume it's a string or something that can be converted to one
299
+ self.fobj = openpy.open(self.fname)
300
+
301
+ def reload(self):
302
+ """Reload source from disk and initialize state."""
303
+ self.fload()
304
+
305
+ self.src = "".join(openpy.strip_encoding_cookie(self.fobj))
306
+ src_b = [b.strip() for b in self.re_stop.split(self.src) if b]
307
+ self._silent = [bool(self.re_silent.findall(b)) for b in src_b]
308
+ self._auto = [bool(self.re_auto.findall(b)) for b in src_b]
309
+
310
+ # if auto_all is not given (def. None), we read it from the file
311
+ if self.auto_all is None:
312
+ self.auto_all = bool(self.re_auto_all.findall(src_b[0]))
313
+ else:
314
+ self.auto_all = bool(self.auto_all)
315
+
316
+ # Clean the sources from all markup so it doesn't get displayed when
317
+ # running the demo
318
+ src_blocks = []
319
+ auto_strip = lambda s: self.re_auto.sub('',s)
320
+ for i,b in enumerate(src_b):
321
+ if self._auto[i]:
322
+ src_blocks.append(auto_strip(b))
323
+ else:
324
+ src_blocks.append(b)
325
+ # remove the auto_all marker
326
+ src_blocks[0] = self.re_auto_all.sub('',src_blocks[0])
327
+
328
+ self.nblocks = len(src_blocks)
329
+ self.src_blocks = src_blocks
330
+
331
+ # also build syntax-highlighted source
332
+ self.src_blocks_colored = list(map(self.highlight,self.src_blocks))
333
+
334
+ # ensure clean namespace and seek offset
335
+ self.reset()
336
+
337
+ def reset(self):
338
+ """Reset the namespace and seek pointer to restart the demo"""
339
+ self.user_ns = {}
340
+ self.finished = False
341
+ self.block_index = 0
342
+
343
+ def _validate_index(self,index):
344
+ if index<0 or index>=self.nblocks:
345
+ raise ValueError('invalid block index %s' % index)
346
+
347
+ def _get_index(self,index):
348
+ """Get the current block index, validating and checking status.
349
+
350
+ Returns None if the demo is finished"""
351
+
352
+ if index is None:
353
+ if self.finished:
354
+ print('Demo finished. Use <demo_name>.reset() if you want to rerun it.')
355
+ return None
356
+ index = self.block_index
357
+ else:
358
+ self._validate_index(index)
359
+ return index
360
+
361
+ def seek(self,index):
362
+ """Move the current seek pointer to the given block.
363
+
364
+ You can use negative indices to seek from the end, with identical
365
+ semantics to those of Python lists."""
366
+ if index<0:
367
+ index = self.nblocks + index
368
+ self._validate_index(index)
369
+ self.block_index = index
370
+ self.finished = False
371
+
372
+ def back(self,num=1):
373
+ """Move the seek pointer back num blocks (default is 1)."""
374
+ self.seek(self.block_index-num)
375
+
376
+ def jump(self,num=1):
377
+ """Jump a given number of blocks relative to the current one.
378
+
379
+ The offset can be positive or negative, defaults to 1."""
380
+ self.seek(self.block_index+num)
381
+
382
+ def again(self):
383
+ """Move the seek pointer back one block and re-execute."""
384
+ self.back(1)
385
+ self()
386
+
387
+ def edit(self,index=None):
388
+ """Edit a block.
389
+
390
+ If no number is given, use the last block executed.
391
+
392
+ This edits the in-memory copy of the demo, it does NOT modify the
393
+ original source file. If you want to do that, simply open the file in
394
+ an editor and use reload() when you make changes to the file. This
395
+ method is meant to let you change a block during a demonstration for
396
+ explanatory purposes, without damaging your original script."""
397
+
398
+ index = self._get_index(index)
399
+ if index is None:
400
+ return
401
+ # decrease the index by one (unless we're at the very beginning), so
402
+ # that the default demo.edit() call opens up the sblock we've last run
403
+ if index>0:
404
+ index -= 1
405
+
406
+ filename = self.shell.mktempfile(self.src_blocks[index])
407
+ self.shell.hooks.editor(filename, 1)
408
+ with open(Path(filename), "r", encoding="utf-8") as f:
409
+ new_block = f.read()
410
+ # update the source and colored block
411
+ self.src_blocks[index] = new_block
412
+ self.src_blocks_colored[index] = self.highlight(new_block)
413
+ self.block_index = index
414
+ # call to run with the newly edited index
415
+ self()
416
+
417
+ def show(self,index=None):
418
+ """Show a single block on screen"""
419
+
420
+ index = self._get_index(index)
421
+ if index is None:
422
+ return
423
+
424
+ print(self.marquee('<%s> block # %s (%s remaining)' %
425
+ (self.title,index,self.nblocks-index-1)))
426
+ print(self.src_blocks_colored[index])
427
+ sys.stdout.flush()
428
+
429
+ def show_all(self):
430
+ """Show entire demo on screen, block by block"""
431
+
432
+ fname = self.title
433
+ title = self.title
434
+ nblocks = self.nblocks
435
+ silent = self._silent
436
+ marquee = self.marquee
437
+ for index,block in enumerate(self.src_blocks_colored):
438
+ if silent[index]:
439
+ print(marquee('<%s> SILENT block # %s (%s remaining)' %
440
+ (title,index,nblocks-index-1)))
441
+ else:
442
+ print(marquee('<%s> block # %s (%s remaining)' %
443
+ (title,index,nblocks-index-1)))
444
+ print(block, end=' ')
445
+ sys.stdout.flush()
446
+
447
+ def run_cell(self,source):
448
+ """Execute a string with one or more lines of code"""
449
+
450
+ exec(source, self.user_ns)
451
+
452
+ def __call__(self,index=None):
453
+ """run a block of the demo.
454
+
455
+ If index is given, it should be an integer >=1 and <= nblocks. This
456
+ means that the calling convention is one off from typical Python
457
+ lists. The reason for the inconsistency is that the demo always
458
+ prints 'Block n/N, and N is the total, so it would be very odd to use
459
+ zero-indexing here."""
460
+
461
+ index = self._get_index(index)
462
+ if index is None:
463
+ return
464
+ try:
465
+ marquee = self.marquee
466
+ next_block = self.src_blocks[index]
467
+ self.block_index += 1
468
+ if self._silent[index]:
469
+ print(marquee('Executing silent block # %s (%s remaining)' %
470
+ (index,self.nblocks-index-1)))
471
+ else:
472
+ self.pre_cmd()
473
+ self.show(index)
474
+ if self.auto_all or self._auto[index]:
475
+ print(marquee('output:'))
476
+ else:
477
+ print(marquee('Press <q> to quit, <Enter> to execute...'), end=' ')
478
+ ans = py3compat.input().strip()
479
+ if ans:
480
+ print(marquee('Block NOT executed'))
481
+ return
482
+ try:
483
+ save_argv = sys.argv
484
+ sys.argv = self.sys_argv
485
+ self.run_cell(next_block)
486
+ self.post_cmd()
487
+ finally:
488
+ sys.argv = save_argv
489
+
490
+ except:
491
+ if self.inside_ipython:
492
+ self.ip_showtb(filename=self.fname)
493
+ else:
494
+ if self.inside_ipython:
495
+ self.ip_ns.update(self.user_ns)
496
+
497
+ if self.block_index == self.nblocks:
498
+ mq1 = self.marquee('END OF DEMO')
499
+ if mq1:
500
+ # avoid spurious print if empty marquees are used
501
+ print()
502
+ print(mq1)
503
+ print(self.marquee('Use <demo_name>.reset() if you want to rerun it.'))
504
+ self.finished = True
505
+
506
+ # These methods are meant to be overridden by subclasses who may wish to
507
+ # customize the behavior of of their demos.
508
+ def marquee(self,txt='',width=78,mark='*'):
509
+ """Return the input string centered in a 'marquee'."""
510
+ return marquee(txt,width,mark)
511
+
512
+ def pre_cmd(self):
513
+ """Method called before executing each block."""
514
+ pass
515
+
516
+ def post_cmd(self):
517
+ """Method called after executing each block."""
518
+ pass
519
+
520
+ def highlight(self, block):
521
+ """Method called on each block to highlight it content"""
522
+ tokens = pygments.lex(block, self.python_lexer)
523
+ if self.format_rst:
524
+ from pygments.token import Token
525
+ toks = []
526
+ for token in tokens:
527
+ if token[0] == Token.String.Doc and len(token[1]) > 6:
528
+ toks += pygments.lex(token[1][:3], self.python_lexer)
529
+ # parse doc string content by rst lexer
530
+ toks += pygments.lex(token[1][3:-3], self.rst_lexer)
531
+ toks += pygments.lex(token[1][-3:], self.python_lexer)
532
+ elif token[0] == Token.Comment.Single:
533
+ toks.append((Token.Comment.Single, token[1][0]))
534
+ # parse comment content by rst lexer
535
+ # remove the extra newline added by rst lexer
536
+ toks += list(pygments.lex(token[1][1:], self.rst_lexer))[:-1]
537
+ else:
538
+ toks.append(token)
539
+ tokens = toks
540
+ return pygments.format(tokens, self.formatter)
541
+
542
+
543
+ class IPythonDemo(Demo):
544
+ """Class for interactive demos with IPython's input processing applied.
545
+
546
+ This subclasses Demo, but instead of executing each block by the Python
547
+ interpreter (via exec), it actually calls IPython on it, so that any input
548
+ filters which may be in place are applied to the input block.
549
+
550
+ If you have an interactive environment which exposes special input
551
+ processing, you can use this class instead to write demo scripts which
552
+ operate exactly as if you had typed them interactively. The default Demo
553
+ class requires the input to be valid, pure Python code.
554
+ """
555
+
556
+ def run_cell(self,source):
557
+ """Execute a string with one or more lines of code"""
558
+
559
+ self.shell.run_cell(source)
560
+
561
+ class LineDemo(Demo):
562
+ """Demo where each line is executed as a separate block.
563
+
564
+ The input script should be valid Python code.
565
+
566
+ This class doesn't require any markup at all, and it's meant for simple
567
+ scripts (with no nesting or any kind of indentation) which consist of
568
+ multiple lines of input to be executed, one at a time, as if they had been
569
+ typed in the interactive prompt.
570
+
571
+ Note: the input can not have *any* indentation, which means that only
572
+ single-lines of input are accepted, not even function definitions are
573
+ valid."""
574
+
575
+ def reload(self):
576
+ """Reload source from disk and initialize state."""
577
+ # read data and parse into blocks
578
+ self.fload()
579
+ lines = self.fobj.readlines()
580
+ src_b = [l for l in lines if l.strip()]
581
+ nblocks = len(src_b)
582
+ self.src = ''.join(lines)
583
+ self._silent = [False]*nblocks
584
+ self._auto = [True]*nblocks
585
+ self.auto_all = True
586
+ self.nblocks = nblocks
587
+ self.src_blocks = src_b
588
+
589
+ # also build syntax-highlighted source
590
+ self.src_blocks_colored = list(map(self.highlight,self.src_blocks))
591
+
592
+ # ensure clean namespace and seek offset
593
+ self.reset()
594
+
595
+
596
+ class IPythonLineDemo(IPythonDemo,LineDemo):
597
+ """Variant of the LineDemo class whose input is processed by IPython."""
598
+ pass
599
+
600
+
601
+ class ClearMixin:
602
+ """Use this mixin to make Demo classes with less visual clutter.
603
+
604
+ Demos using this mixin will clear the screen before every block and use
605
+ blank marquees.
606
+
607
+ Note that in order for the methods defined here to actually override those
608
+ of the classes it's mixed with, it must go /first/ in the inheritance
609
+ tree. For example:
610
+
611
+ class ClearIPDemo(ClearMixin,IPythonDemo): pass
612
+
613
+ will provide an IPythonDemo class with the mixin's features.
614
+ """
615
+
616
+ def marquee(self,txt='',width=78,mark='*'):
617
+ """Blank marquee that returns '' no matter what the input."""
618
+ return ''
619
+
620
+ def pre_cmd(self):
621
+ """Method called before executing each block.
622
+
623
+ This one simply clears the screen."""
624
+ from IPython.utils.terminal import _term_clear
625
+ _term_clear()
626
+
627
+ class ClearDemo(ClearMixin,Demo):
628
+ pass
629
+
630
+
631
+ class ClearIPDemo(ClearMixin,IPythonDemo):
632
+ pass
633
+
634
+
635
+ def slide(file_path, noclear=False, format_rst=True, formatter="terminal",
636
+ style="native", auto_all=False, delimiter='...'):
637
+ if noclear:
638
+ demo_class = Demo
639
+ else:
640
+ demo_class = ClearDemo
641
+ demo = demo_class(file_path, format_rst=format_rst, formatter=formatter,
642
+ style=style, auto_all=auto_all)
643
+ while not demo.finished:
644
+ demo()
645
+ try:
646
+ py3compat.input('\n' + delimiter)
647
+ except KeyboardInterrupt:
648
+ exit(1)
649
+
650
+ if __name__ == '__main__':
651
+ import argparse
652
+ parser = argparse.ArgumentParser(description='Run python demos')
653
+ parser.add_argument('--noclear', '-C', action='store_true',
654
+ help='Do not clear terminal on each slide')
655
+ parser.add_argument('--rst', '-r', action='store_true',
656
+ help='Highlight comments and dostrings as rst')
657
+ parser.add_argument('--formatter', '-f', default='terminal',
658
+ help='pygments formatter name could be: terminal, '
659
+ 'terminal256, terminal16m')
660
+ parser.add_argument('--style', '-s', default='default',
661
+ help='pygments style name')
662
+ parser.add_argument('--auto', '-a', action='store_true',
663
+ help='Run all blocks automatically without'
664
+ 'confirmation')
665
+ parser.add_argument('--delimiter', '-d', default='...',
666
+ help='slides delimiter added after each slide run')
667
+ parser.add_argument('file', nargs=1,
668
+ help='python demo file')
669
+ args = parser.parse_args()
670
+ slide(args.file[0], noclear=args.noclear, format_rst=args.rst,
671
+ formatter=args.formatter, style=args.style, auto_all=args.auto,
672
+ delimiter=args.delimiter)
temp_venv/lib/python3.13/site-packages/IPython/lib/display.py ADDED
@@ -0,0 +1,677 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Various display related classes.
2
+
3
+ Authors : MinRK, gregcaporaso, dannystaple
4
+ """
5
+ from html import escape as html_escape
6
+ from os.path import exists, isfile, splitext, abspath, join, isdir
7
+ from os import walk, sep, fsdecode
8
+
9
+ from IPython.core.display import DisplayObject, TextDisplayObject
10
+
11
+ from typing import Tuple, Iterable, Optional
12
+
13
+ __all__ = ['Audio', 'IFrame', 'YouTubeVideo', 'VimeoVideo', 'ScribdDocument',
14
+ 'FileLink', 'FileLinks', 'Code']
15
+
16
+
17
+ class Audio(DisplayObject):
18
+ """Create an audio object.
19
+
20
+ When this object is returned by an input cell or passed to the
21
+ display function, it will result in Audio controls being displayed
22
+ in the frontend (only works in the notebook).
23
+
24
+ Parameters
25
+ ----------
26
+ data : numpy array, list, unicode, str or bytes
27
+ Can be one of
28
+
29
+ * Numpy 1d array containing the desired waveform (mono)
30
+ * Numpy 2d array containing waveforms for each channel.
31
+ Shape=(NCHAN, NSAMPLES). For the standard channel order, see
32
+ http://msdn.microsoft.com/en-us/library/windows/hardware/dn653308(v=vs.85).aspx
33
+ * List of float or integer representing the waveform (mono)
34
+ * String containing the filename
35
+ * Bytestring containing raw PCM data or
36
+ * URL pointing to a file on the web.
37
+
38
+ If the array option is used, the waveform will be normalized.
39
+
40
+ If a filename or url is used, the format support will be browser
41
+ dependent.
42
+ url : unicode
43
+ A URL to download the data from.
44
+ filename : unicode
45
+ Path to a local file to load the data from.
46
+ embed : boolean
47
+ Should the audio data be embedded using a data URI (True) or should
48
+ the original source be referenced. Set this to True if you want the
49
+ audio to playable later with no internet connection in the notebook.
50
+
51
+ Default is `True`, unless the keyword argument `url` is set, then
52
+ default value is `False`.
53
+ rate : integer
54
+ The sampling rate of the raw data.
55
+ Only required when data parameter is being used as an array
56
+ autoplay : bool
57
+ Set to True if the audio should immediately start playing.
58
+ Default is `False`.
59
+ normalize : bool
60
+ Whether audio should be normalized (rescaled) to the maximum possible
61
+ range. Default is `True`. When set to `False`, `data` must be between
62
+ -1 and 1 (inclusive), otherwise an error is raised.
63
+ Applies only when `data` is a list or array of samples; other types of
64
+ audio are never normalized.
65
+
66
+ Examples
67
+ --------
68
+
69
+ >>> import pytest
70
+ >>> np = pytest.importorskip("numpy")
71
+
72
+ Generate a sound
73
+
74
+ >>> import numpy as np
75
+ >>> framerate = 44100
76
+ >>> t = np.linspace(0,5,framerate*5)
77
+ >>> data = np.sin(2*np.pi*220*t) + np.sin(2*np.pi*224*t)
78
+ >>> Audio(data, rate=framerate)
79
+ <IPython.lib.display.Audio object>
80
+
81
+ Can also do stereo or more channels
82
+
83
+ >>> dataleft = np.sin(2*np.pi*220*t)
84
+ >>> dataright = np.sin(2*np.pi*224*t)
85
+ >>> Audio([dataleft, dataright], rate=framerate)
86
+ <IPython.lib.display.Audio object>
87
+
88
+ From URL:
89
+
90
+ >>> Audio("http://www.nch.com.au/acm/8k16bitpcm.wav") # doctest: +SKIP
91
+ >>> Audio(url="http://www.w3schools.com/html/horse.ogg") # doctest: +SKIP
92
+
93
+ From a File:
94
+
95
+ >>> Audio('IPython/lib/tests/test.wav') # doctest: +SKIP
96
+ >>> Audio(filename='IPython/lib/tests/test.wav') # doctest: +SKIP
97
+
98
+ From Bytes:
99
+
100
+ >>> Audio(b'RAW_WAV_DATA..') # doctest: +SKIP
101
+ >>> Audio(data=b'RAW_WAV_DATA..') # doctest: +SKIP
102
+
103
+ See Also
104
+ --------
105
+ ipywidgets.Audio
106
+
107
+ Audio widget with more more flexibility and options.
108
+
109
+ """
110
+ _read_flags = 'rb'
111
+
112
+ def __init__(self, data=None, filename=None, url=None, embed=None, rate=None, autoplay=False, normalize=True, *,
113
+ element_id=None):
114
+ if filename is None and url is None and data is None:
115
+ raise ValueError("No audio data found. Expecting filename, url, or data.")
116
+ if embed is False and url is None:
117
+ raise ValueError("No url found. Expecting url when embed=False")
118
+
119
+ if url is not None and embed is not True:
120
+ self.embed = False
121
+ else:
122
+ self.embed = True
123
+ self.autoplay = autoplay
124
+ self.element_id = element_id
125
+ super(Audio, self).__init__(data=data, url=url, filename=filename)
126
+
127
+ if self.data is not None and not isinstance(self.data, bytes):
128
+ if rate is None:
129
+ raise ValueError("rate must be specified when data is a numpy array or list of audio samples.")
130
+ self.data = Audio._make_wav(data, rate, normalize)
131
+
132
+ def reload(self):
133
+ """Reload the raw data from file or URL."""
134
+ import mimetypes
135
+ if self.embed:
136
+ super(Audio, self).reload()
137
+
138
+ if self.filename is not None:
139
+ self.mimetype = mimetypes.guess_type(self.filename)[0]
140
+ elif self.url is not None:
141
+ self.mimetype = mimetypes.guess_type(self.url)[0]
142
+ else:
143
+ self.mimetype = "audio/wav"
144
+
145
+ @staticmethod
146
+ def _make_wav(data, rate, normalize):
147
+ """ Transform a numpy array to a PCM bytestring """
148
+ from io import BytesIO
149
+ import wave
150
+
151
+ try:
152
+ scaled, nchan = Audio._validate_and_normalize_with_numpy(data, normalize)
153
+ except ImportError:
154
+ scaled, nchan = Audio._validate_and_normalize_without_numpy(data, normalize)
155
+
156
+ fp = BytesIO()
157
+ waveobj = wave.open(fp,mode='wb')
158
+ waveobj.setnchannels(nchan)
159
+ waveobj.setframerate(rate)
160
+ waveobj.setsampwidth(2)
161
+ waveobj.setcomptype('NONE','NONE')
162
+ waveobj.writeframes(scaled)
163
+ val = fp.getvalue()
164
+ waveobj.close()
165
+
166
+ return val
167
+
168
+ @staticmethod
169
+ def _validate_and_normalize_with_numpy(data, normalize) -> Tuple[bytes, int]:
170
+ import numpy as np
171
+
172
+ data = np.array(data, dtype=float)
173
+ if len(data.shape) == 1:
174
+ nchan = 1
175
+ elif len(data.shape) == 2:
176
+ # In wave files,channels are interleaved. E.g.,
177
+ # "L1R1L2R2..." for stereo. See
178
+ # http://msdn.microsoft.com/en-us/library/windows/hardware/dn653308(v=vs.85).aspx
179
+ # for channel ordering
180
+ nchan = data.shape[0]
181
+ data = data.T.ravel()
182
+ else:
183
+ raise ValueError('Array audio input must be a 1D or 2D array')
184
+
185
+ max_abs_value = np.max(np.abs(data))
186
+ normalization_factor = Audio._get_normalization_factor(max_abs_value, normalize)
187
+ scaled = data / normalization_factor * 32767
188
+ return scaled.astype("<h").tobytes(), nchan
189
+
190
+ @staticmethod
191
+ def _validate_and_normalize_without_numpy(data, normalize):
192
+ import array
193
+ import sys
194
+
195
+ data = array.array('f', data)
196
+
197
+ try:
198
+ max_abs_value = float(max([abs(x) for x in data]))
199
+ except TypeError as e:
200
+ raise TypeError('Only lists of mono audio are '
201
+ 'supported if numpy is not installed') from e
202
+
203
+ normalization_factor = Audio._get_normalization_factor(max_abs_value, normalize)
204
+ scaled = array.array('h', [int(x / normalization_factor * 32767) for x in data])
205
+ if sys.byteorder == 'big':
206
+ scaled.byteswap()
207
+ nchan = 1
208
+ return scaled.tobytes(), nchan
209
+
210
+ @staticmethod
211
+ def _get_normalization_factor(max_abs_value, normalize):
212
+ if not normalize and max_abs_value > 1:
213
+ raise ValueError('Audio data must be between -1 and 1 when normalize=False.')
214
+ return max_abs_value if normalize else 1
215
+
216
+ def _data_and_metadata(self):
217
+ """shortcut for returning metadata with url information, if defined"""
218
+ md = {}
219
+ if self.url:
220
+ md['url'] = self.url
221
+ if md:
222
+ return self.data, md
223
+ else:
224
+ return self.data
225
+
226
+ def _repr_html_(self):
227
+ src = """
228
+ <audio {element_id} controls="controls" {autoplay}>
229
+ <source src="{src}" type="{type}" />
230
+ Your browser does not support the audio element.
231
+ </audio>
232
+ """
233
+ return src.format(src=self.src_attr(), type=self.mimetype, autoplay=self.autoplay_attr(),
234
+ element_id=self.element_id_attr())
235
+
236
+ def src_attr(self):
237
+ import base64
238
+ if self.embed and (self.data is not None):
239
+ data = base64=base64.b64encode(self.data).decode('ascii')
240
+ return """data:{type};base64,{base64}""".format(type=self.mimetype,
241
+ base64=data)
242
+ elif self.url is not None:
243
+ return self.url
244
+ else:
245
+ return ""
246
+
247
+ def autoplay_attr(self):
248
+ if(self.autoplay):
249
+ return 'autoplay="autoplay"'
250
+ else:
251
+ return ''
252
+
253
+ def element_id_attr(self):
254
+ if (self.element_id):
255
+ return 'id="{element_id}"'.format(element_id=self.element_id)
256
+ else:
257
+ return ''
258
+
259
+ class IFrame:
260
+ """
261
+ Generic class to embed an iframe in an IPython notebook
262
+ """
263
+
264
+ iframe = """
265
+ <iframe
266
+ width="{width}"
267
+ height="{height}"
268
+ src="{src}{params}"
269
+ frameborder="0"
270
+ allowfullscreen
271
+ {extras}
272
+ ></iframe>
273
+ """
274
+
275
+ def __init__(
276
+ self, src, width, height, extras: Optional[Iterable[str]] = None, **kwargs
277
+ ):
278
+ if extras is None:
279
+ extras = []
280
+
281
+ self.src = src
282
+ self.width = width
283
+ self.height = height
284
+ self.extras = extras
285
+ self.params = kwargs
286
+
287
+ def _repr_html_(self):
288
+ """return the embed iframe"""
289
+ if self.params:
290
+ from urllib.parse import urlencode
291
+ params = "?" + urlencode(self.params)
292
+ else:
293
+ params = ""
294
+ return self.iframe.format(
295
+ src=self.src,
296
+ width=self.width,
297
+ height=self.height,
298
+ params=params,
299
+ extras=" ".join(self.extras),
300
+ )
301
+
302
+
303
+ class YouTubeVideo(IFrame):
304
+ """Class for embedding a YouTube Video in an IPython session, based on its video id.
305
+
306
+ e.g. to embed the video from https://www.youtube.com/watch?v=foo , you would
307
+ do::
308
+
309
+ vid = YouTubeVideo("foo")
310
+ display(vid)
311
+
312
+ To start from 30 seconds::
313
+
314
+ vid = YouTubeVideo("abc", start=30)
315
+ display(vid)
316
+
317
+ To calculate seconds from time as hours, minutes, seconds use
318
+ :class:`datetime.timedelta`::
319
+
320
+ start=int(timedelta(hours=1, minutes=46, seconds=40).total_seconds())
321
+
322
+ Other parameters can be provided as documented at
323
+ https://developers.google.com/youtube/player_parameters#Parameters
324
+
325
+ When converting the notebook using nbconvert, a jpeg representation of the video
326
+ will be inserted in the document.
327
+ """
328
+
329
+ def __init__(self, id, width=400, height=300, allow_autoplay=False, **kwargs):
330
+ self.id=id
331
+ src = "https://www.youtube.com/embed/{0}".format(id)
332
+ if allow_autoplay:
333
+ extras = list(kwargs.get("extras", [])) + ['allow="autoplay"']
334
+ kwargs.update(autoplay=1, extras=extras)
335
+ super(YouTubeVideo, self).__init__(src, width, height, **kwargs)
336
+
337
+ def _repr_jpeg_(self):
338
+ # Deferred import
339
+ from urllib.request import urlopen
340
+
341
+ try:
342
+ return urlopen("https://img.youtube.com/vi/{id}/hqdefault.jpg".format(id=self.id)).read()
343
+ except IOError:
344
+ return None
345
+
346
+ class VimeoVideo(IFrame):
347
+ """
348
+ Class for embedding a Vimeo video in an IPython session, based on its video id.
349
+ """
350
+
351
+ def __init__(self, id, width=400, height=300, **kwargs):
352
+ src="https://player.vimeo.com/video/{0}".format(id)
353
+ super(VimeoVideo, self).__init__(src, width, height, **kwargs)
354
+
355
+ class ScribdDocument(IFrame):
356
+ """
357
+ Class for embedding a Scribd document in an IPython session
358
+
359
+ Use the start_page params to specify a starting point in the document
360
+ Use the view_mode params to specify display type one off scroll | slideshow | book
361
+
362
+ e.g to Display Wes' foundational paper about PANDAS in book mode from page 3
363
+
364
+ ScribdDocument(71048089, width=800, height=400, start_page=3, view_mode="book")
365
+ """
366
+
367
+ def __init__(self, id, width=400, height=300, **kwargs):
368
+ src="https://www.scribd.com/embeds/{0}/content".format(id)
369
+ super(ScribdDocument, self).__init__(src, width, height, **kwargs)
370
+
371
+ class FileLink:
372
+ """Class for embedding a local file link in an IPython session, based on path
373
+
374
+ e.g. to embed a link that was generated in the IPython notebook as my/data.txt
375
+
376
+ you would do::
377
+
378
+ local_file = FileLink("my/data.txt")
379
+ display(local_file)
380
+
381
+ or in the HTML notebook, just::
382
+
383
+ FileLink("my/data.txt")
384
+ """
385
+
386
+ html_link_str = "<a href='%s' target='_blank'>%s</a>"
387
+
388
+ def __init__(self,
389
+ path,
390
+ url_prefix='',
391
+ result_html_prefix='',
392
+ result_html_suffix='<br>'):
393
+ """
394
+ Parameters
395
+ ----------
396
+ path : str
397
+ path to the file or directory that should be formatted
398
+ url_prefix : str
399
+ prefix to be prepended to all files to form a working link [default:
400
+ '']
401
+ result_html_prefix : str
402
+ text to append to beginning to link [default: '']
403
+ result_html_suffix : str
404
+ text to append at the end of link [default: '<br>']
405
+ """
406
+ if isdir(path):
407
+ raise ValueError("Cannot display a directory using FileLink. "
408
+ "Use FileLinks to display '%s'." % path)
409
+ self.path = fsdecode(path)
410
+ self.url_prefix = url_prefix
411
+ self.result_html_prefix = result_html_prefix
412
+ self.result_html_suffix = result_html_suffix
413
+
414
+ def _format_path(self):
415
+ fp = ''.join([self.url_prefix, html_escape(self.path)])
416
+ return ''.join([self.result_html_prefix,
417
+ self.html_link_str % \
418
+ (fp, html_escape(self.path, quote=False)),
419
+ self.result_html_suffix])
420
+
421
+ def _repr_html_(self):
422
+ """return html link to file
423
+ """
424
+ if not exists(self.path):
425
+ return ("Path (<tt>%s</tt>) doesn't exist. "
426
+ "It may still be in the process of "
427
+ "being generated, or you may have the "
428
+ "incorrect path." % self.path)
429
+
430
+ return self._format_path()
431
+
432
+ def __repr__(self):
433
+ """return absolute path to file
434
+ """
435
+ return abspath(self.path)
436
+
437
+ class FileLinks(FileLink):
438
+ """Class for embedding local file links in an IPython session, based on path
439
+
440
+ e.g. to embed links to files that were generated in the IPython notebook
441
+ under ``my/data``, you would do::
442
+
443
+ local_files = FileLinks("my/data")
444
+ display(local_files)
445
+
446
+ or in the HTML notebook, just::
447
+
448
+ FileLinks("my/data")
449
+ """
450
+ def __init__(self,
451
+ path,
452
+ url_prefix='',
453
+ included_suffixes=None,
454
+ result_html_prefix='',
455
+ result_html_suffix='<br>',
456
+ notebook_display_formatter=None,
457
+ terminal_display_formatter=None,
458
+ recursive=True):
459
+ """
460
+ See :class:`FileLink` for the ``path``, ``url_prefix``,
461
+ ``result_html_prefix`` and ``result_html_suffix`` parameters.
462
+
463
+ included_suffixes : list
464
+ Filename suffixes to include when formatting output [default: include
465
+ all files]
466
+
467
+ notebook_display_formatter : function
468
+ Used to format links for display in the notebook. See discussion of
469
+ formatter functions below.
470
+
471
+ terminal_display_formatter : function
472
+ Used to format links for display in the terminal. See discussion of
473
+ formatter functions below.
474
+
475
+ Formatter functions must be of the form::
476
+
477
+ f(dirname, fnames, included_suffixes)
478
+
479
+ dirname : str
480
+ The name of a directory
481
+ fnames : list
482
+ The files in that directory
483
+ included_suffixes : list
484
+ The file suffixes that should be included in the output (passing None
485
+ meansto include all suffixes in the output in the built-in formatters)
486
+ recursive : boolean
487
+ Whether to recurse into subdirectories. Default is True.
488
+
489
+ The function should return a list of lines that will be printed in the
490
+ notebook (if passing notebook_display_formatter) or the terminal (if
491
+ passing terminal_display_formatter). This function is iterated over for
492
+ each directory in self.path. Default formatters are in place, can be
493
+ passed here to support alternative formatting.
494
+
495
+ """
496
+ if isfile(path):
497
+ raise ValueError("Cannot display a file using FileLinks. "
498
+ "Use FileLink to display '%s'." % path)
499
+ self.included_suffixes = included_suffixes
500
+ # remove trailing slashes for more consistent output formatting
501
+ path = path.rstrip('/')
502
+
503
+ self.path = path
504
+ self.url_prefix = url_prefix
505
+ self.result_html_prefix = result_html_prefix
506
+ self.result_html_suffix = result_html_suffix
507
+
508
+ self.notebook_display_formatter = \
509
+ notebook_display_formatter or self._get_notebook_display_formatter()
510
+ self.terminal_display_formatter = \
511
+ terminal_display_formatter or self._get_terminal_display_formatter()
512
+
513
+ self.recursive = recursive
514
+
515
+ def _get_display_formatter(
516
+ self, dirname_output_format, fname_output_format, fp_format, fp_cleaner=None
517
+ ):
518
+ """generate built-in formatter function
519
+
520
+ this is used to define both the notebook and terminal built-in
521
+ formatters as they only differ by some wrapper text for each entry
522
+
523
+ dirname_output_format: string to use for formatting directory
524
+ names, dirname will be substituted for a single "%s" which
525
+ must appear in this string
526
+ fname_output_format: string to use for formatting file names,
527
+ if a single "%s" appears in the string, fname will be substituted
528
+ if two "%s" appear in the string, the path to fname will be
529
+ substituted for the first and fname will be substituted for the
530
+ second
531
+ fp_format: string to use for formatting filepaths, must contain
532
+ exactly two "%s" and the dirname will be substituted for the first
533
+ and fname will be substituted for the second
534
+ """
535
+ def f(dirname, fnames, included_suffixes=None):
536
+ result = []
537
+ # begin by figuring out which filenames, if any,
538
+ # are going to be displayed
539
+ display_fnames = []
540
+ for fname in fnames:
541
+ if (isfile(join(dirname,fname)) and
542
+ (included_suffixes is None or
543
+ splitext(fname)[1] in included_suffixes)):
544
+ display_fnames.append(fname)
545
+
546
+ if len(display_fnames) == 0:
547
+ # if there are no filenames to display, don't print anything
548
+ # (not even the directory name)
549
+ pass
550
+ else:
551
+ # otherwise print the formatted directory name followed by
552
+ # the formatted filenames
553
+ dirname_output_line = dirname_output_format % dirname
554
+ result.append(dirname_output_line)
555
+ for fname in display_fnames:
556
+ fp = fp_format % (dirname,fname)
557
+ if fp_cleaner is not None:
558
+ fp = fp_cleaner(fp)
559
+ try:
560
+ # output can include both a filepath and a filename...
561
+ fname_output_line = fname_output_format % (fp, fname)
562
+ except TypeError:
563
+ # ... or just a single filepath
564
+ fname_output_line = fname_output_format % fname
565
+ result.append(fname_output_line)
566
+ return result
567
+ return f
568
+
569
+ def _get_notebook_display_formatter(self,
570
+ spacer="&nbsp;&nbsp;"):
571
+ """ generate function to use for notebook formatting
572
+ """
573
+ dirname_output_format = \
574
+ self.result_html_prefix + "%s/" + self.result_html_suffix
575
+ fname_output_format = \
576
+ self.result_html_prefix + spacer + self.html_link_str + self.result_html_suffix
577
+ fp_format = self.url_prefix + '%s/%s'
578
+ if sep == "\\":
579
+ # Working on a platform where the path separator is "\", so
580
+ # must convert these to "/" for generating a URI
581
+ def fp_cleaner(fp):
582
+ # Replace all occurrences of backslash ("\") with a forward
583
+ # slash ("/") - this is necessary on windows when a path is
584
+ # provided as input, but we must link to a URI
585
+ return fp.replace('\\','/')
586
+ else:
587
+ fp_cleaner = None
588
+
589
+ return self._get_display_formatter(dirname_output_format,
590
+ fname_output_format,
591
+ fp_format,
592
+ fp_cleaner)
593
+
594
+ def _get_terminal_display_formatter(self,
595
+ spacer=" "):
596
+ """ generate function to use for terminal formatting
597
+ """
598
+ dirname_output_format = "%s/"
599
+ fname_output_format = spacer + "%s"
600
+ fp_format = '%s/%s'
601
+
602
+ return self._get_display_formatter(dirname_output_format,
603
+ fname_output_format,
604
+ fp_format)
605
+
606
+ def _format_path(self):
607
+ result_lines = []
608
+ if self.recursive:
609
+ walked_dir = list(walk(self.path))
610
+ else:
611
+ walked_dir = [next(walk(self.path))]
612
+ walked_dir.sort()
613
+ for dirname, subdirs, fnames in walked_dir:
614
+ result_lines += self.notebook_display_formatter(dirname, fnames, self.included_suffixes)
615
+ return '\n'.join(result_lines)
616
+
617
+ def __repr__(self):
618
+ """return newline-separated absolute paths
619
+ """
620
+ result_lines = []
621
+ if self.recursive:
622
+ walked_dir = list(walk(self.path))
623
+ else:
624
+ walked_dir = [next(walk(self.path))]
625
+ walked_dir.sort()
626
+ for dirname, subdirs, fnames in walked_dir:
627
+ result_lines += self.terminal_display_formatter(dirname, fnames, self.included_suffixes)
628
+ return '\n'.join(result_lines)
629
+
630
+
631
+ class Code(TextDisplayObject):
632
+ """Display syntax-highlighted source code.
633
+
634
+ This uses Pygments to highlight the code for HTML and Latex output.
635
+
636
+ Parameters
637
+ ----------
638
+ data : str
639
+ The code as a string
640
+ url : str
641
+ A URL to fetch the code from
642
+ filename : str
643
+ A local filename to load the code from
644
+ language : str
645
+ The short name of a Pygments lexer to use for highlighting.
646
+ If not specified, it will guess the lexer based on the filename
647
+ or the code. Available lexers: http://pygments.org/docs/lexers/
648
+ """
649
+ def __init__(self, data=None, url=None, filename=None, language=None):
650
+ self.language = language
651
+ super().__init__(data=data, url=url, filename=filename)
652
+
653
+ def _get_lexer(self):
654
+ if self.language:
655
+ from pygments.lexers import get_lexer_by_name
656
+ return get_lexer_by_name(self.language)
657
+ elif self.filename:
658
+ from pygments.lexers import get_lexer_for_filename
659
+ return get_lexer_for_filename(self.filename)
660
+ else:
661
+ from pygments.lexers import guess_lexer
662
+ return guess_lexer(self.data)
663
+
664
+ def __repr__(self):
665
+ return self.data
666
+
667
+ def _repr_html_(self):
668
+ from pygments import highlight
669
+ from pygments.formatters import HtmlFormatter
670
+ fmt = HtmlFormatter()
671
+ style = '<style>{}</style>'.format(fmt.get_style_defs('.output_html'))
672
+ return style + highlight(self.data, self._get_lexer(), fmt)
673
+
674
+ def _repr_latex_(self):
675
+ from pygments import highlight
676
+ from pygments.formatters import LatexFormatter
677
+ return highlight(self.data, self._get_lexer(), LatexFormatter())
temp_venv/lib/python3.13/site-packages/IPython/lib/editorhooks.py ADDED
@@ -0,0 +1,127 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """ 'editor' hooks for common editors that work well with ipython
2
+
3
+ They should honor the line number argument, at least.
4
+
5
+ Contributions are *very* welcome.
6
+ """
7
+
8
+ import os
9
+ import shlex
10
+ import subprocess
11
+ import sys
12
+
13
+ from IPython import get_ipython
14
+ from IPython.core.error import TryNext
15
+ from IPython.utils import py3compat
16
+
17
+
18
+ def install_editor(template, wait=False):
19
+ """Installs the editor that is called by IPython for the %edit magic.
20
+
21
+ This overrides the default editor, which is generally set by your EDITOR
22
+ environment variable or is notepad (windows) or vi (linux). By supplying a
23
+ template string `run_template`, you can control how the editor is invoked
24
+ by IPython -- (e.g. the format in which it accepts command line options)
25
+
26
+ Parameters
27
+ ----------
28
+ template : basestring
29
+ run_template acts as a template for how your editor is invoked by
30
+ the shell. It should contain '{filename}', which will be replaced on
31
+ invocation with the file name, and '{line}', $line by line number
32
+ (or 0) to invoke the file with.
33
+ wait : bool
34
+ If `wait` is true, wait until the user presses enter before returning,
35
+ to facilitate non-blocking editors that exit immediately after
36
+ the call.
37
+ """
38
+
39
+ # not all editors support $line, so we'll leave out this check
40
+ # for substitution in ['$file', '$line']:
41
+ # if not substitution in run_template:
42
+ # raise ValueError(('run_template should contain %s'
43
+ # ' for string substitution. You supplied "%s"' % (substitution,
44
+ # run_template)))
45
+
46
+ def call_editor(self, filename, line=0):
47
+ if line is None:
48
+ line = 0
49
+ cmd = template.format(filename=shlex.quote(filename), line=line)
50
+ print(">", cmd)
51
+ # shlex.quote doesn't work right on Windows, but it does after splitting
52
+ if sys.platform.startswith('win'):
53
+ cmd = shlex.split(cmd)
54
+ proc = subprocess.Popen(cmd, shell=True)
55
+ if proc.wait() != 0:
56
+ raise TryNext()
57
+ if wait:
58
+ py3compat.input("Press Enter when done editing:")
59
+
60
+ get_ipython().set_hook('editor', call_editor)
61
+ get_ipython().editor = template
62
+
63
+
64
+ # in these, exe is always the path/name of the executable. Useful
65
+ # if you don't have the editor directory in your path
66
+ def komodo(exe=u'komodo'):
67
+ """ Activestate Komodo [Edit] """
68
+ install_editor(exe + u' -l {line} {filename}', wait=True)
69
+
70
+
71
+ def scite(exe=u"scite"):
72
+ """ SciTE or Sc1 """
73
+ install_editor(exe + u' {filename} -goto:{line}')
74
+
75
+
76
+ def notepadplusplus(exe=u'notepad++'):
77
+ """ Notepad++ http://notepad-plus.sourceforge.net """
78
+ install_editor(exe + u' -n{line} {filename}')
79
+
80
+
81
+ def jed(exe=u'jed'):
82
+ """ JED, the lightweight emacsish editor """
83
+ install_editor(exe + u' +{line} {filename}')
84
+
85
+
86
+ def idle(exe=u'idle'):
87
+ """ Idle, the editor bundled with python
88
+
89
+ Parameters
90
+ ----------
91
+ exe : str, None
92
+ If none, should be pretty smart about finding the executable.
93
+ """
94
+ if exe is None:
95
+ import idlelib
96
+ p = os.path.dirname(idlelib.__filename__)
97
+ # i'm not sure if this actually works. Is this idle.py script
98
+ # guaranteed to be executable?
99
+ exe = os.path.join(p, 'idle.py')
100
+ install_editor(exe + u' {filename}')
101
+
102
+
103
+ def mate(exe=u'mate'):
104
+ """ TextMate, the missing editor"""
105
+ # wait=True is not required since we're using the -w flag to mate
106
+ install_editor(exe + u' -w -l {line} {filename}')
107
+
108
+
109
+ # ##########################################
110
+ # these are untested, report any problems
111
+ # ##########################################
112
+
113
+
114
+ def emacs(exe=u'emacs'):
115
+ install_editor(exe + u' +{line} {filename}')
116
+
117
+
118
+ def gnuclient(exe=u'gnuclient'):
119
+ install_editor(exe + u' -nw +{line} {filename}')
120
+
121
+
122
+ def crimson_editor(exe=u'cedt.exe'):
123
+ install_editor(exe + u' /L:{line} {filename}')
124
+
125
+
126
+ def kate(exe=u'kate'):
127
+ install_editor(exe + u' -u -l {line} {filename}')
temp_venv/lib/python3.13/site-packages/IPython/lib/guisupport.py ADDED
@@ -0,0 +1,155 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # coding: utf-8
2
+ """
3
+ Support for creating GUI apps and starting event loops.
4
+
5
+ IPython's GUI integration allows interactive plotting and GUI usage in IPython
6
+ session. IPython has two different types of GUI integration:
7
+
8
+ 1. The terminal based IPython supports GUI event loops through Python's
9
+ PyOS_InputHook. PyOS_InputHook is a hook that Python calls periodically
10
+ whenever raw_input is waiting for a user to type code. We implement GUI
11
+ support in the terminal by setting PyOS_InputHook to a function that
12
+ iterates the event loop for a short while. It is important to note that
13
+ in this situation, the real GUI event loop is NOT run in the normal
14
+ manner, so you can't use the normal means to detect that it is running.
15
+ 2. In the two process IPython kernel/frontend, the GUI event loop is run in
16
+ the kernel. In this case, the event loop is run in the normal manner by
17
+ calling the function or method of the GUI toolkit that starts the event
18
+ loop.
19
+
20
+ In addition to starting the GUI event loops in one of these two ways, IPython
21
+ will *always* create an appropriate GUI application object when GUi
22
+ integration is enabled.
23
+
24
+ If you want your GUI apps to run in IPython you need to do two things:
25
+
26
+ 1. Test to see if there is already an existing main application object. If
27
+ there is, you should use it. If there is not an existing application object
28
+ you should create one.
29
+ 2. Test to see if the GUI event loop is running. If it is, you should not
30
+ start it. If the event loop is not running you may start it.
31
+
32
+ This module contains functions for each toolkit that perform these things
33
+ in a consistent manner. Because of how PyOS_InputHook runs the event loop
34
+ you cannot detect if the event loop is running using the traditional calls
35
+ (such as ``wx.GetApp.IsMainLoopRunning()`` in wxPython). If PyOS_InputHook is
36
+ set These methods will return a false negative. That is, they will say the
37
+ event loop is not running, when is actually is. To work around this limitation
38
+ we proposed the following informal protocol:
39
+
40
+ * Whenever someone starts the event loop, they *must* set the ``_in_event_loop``
41
+ attribute of the main application object to ``True``. This should be done
42
+ regardless of how the event loop is actually run.
43
+ * Whenever someone stops the event loop, they *must* set the ``_in_event_loop``
44
+ attribute of the main application object to ``False``.
45
+ * If you want to see if the event loop is running, you *must* use ``hasattr``
46
+ to see if ``_in_event_loop`` attribute has been set. If it is set, you
47
+ *must* use its value. If it has not been set, you can query the toolkit
48
+ in the normal manner.
49
+ * If you want GUI support and no one else has created an application or
50
+ started the event loop you *must* do this. We don't want projects to
51
+ attempt to defer these things to someone else if they themselves need it.
52
+
53
+ The functions below implement this logic for each GUI toolkit. If you need
54
+ to create custom application subclasses, you will likely have to modify this
55
+ code for your own purposes. This code can be copied into your own project
56
+ so you don't have to depend on IPython.
57
+
58
+ """
59
+
60
+ # Copyright (c) IPython Development Team.
61
+ # Distributed under the terms of the Modified BSD License.
62
+
63
+ from IPython.core.getipython import get_ipython
64
+
65
+ #-----------------------------------------------------------------------------
66
+ # wx
67
+ #-----------------------------------------------------------------------------
68
+
69
+ def get_app_wx(*args, **kwargs):
70
+ """Create a new wx app or return an exiting one."""
71
+ import wx
72
+ app = wx.GetApp()
73
+ if app is None:
74
+ if 'redirect' not in kwargs:
75
+ kwargs['redirect'] = False
76
+ app = wx.PySimpleApp(*args, **kwargs)
77
+ return app
78
+
79
+ def is_event_loop_running_wx(app=None):
80
+ """Is the wx event loop running."""
81
+ # New way: check attribute on shell instance
82
+ ip = get_ipython()
83
+ if ip is not None:
84
+ if ip.active_eventloop and ip.active_eventloop == 'wx':
85
+ return True
86
+ # Fall through to checking the application, because Wx has a native way
87
+ # to check if the event loop is running, unlike Qt.
88
+
89
+ # Old way: check Wx application
90
+ if app is None:
91
+ app = get_app_wx()
92
+ if hasattr(app, '_in_event_loop'):
93
+ return app._in_event_loop
94
+ else:
95
+ return app.IsMainLoopRunning()
96
+
97
+ def start_event_loop_wx(app=None):
98
+ """Start the wx event loop in a consistent manner."""
99
+ if app is None:
100
+ app = get_app_wx()
101
+ if not is_event_loop_running_wx(app):
102
+ app._in_event_loop = True
103
+ app.MainLoop()
104
+ app._in_event_loop = False
105
+ else:
106
+ app._in_event_loop = True
107
+
108
+ #-----------------------------------------------------------------------------
109
+ # Qt
110
+ #-----------------------------------------------------------------------------
111
+
112
+ def get_app_qt4(*args, **kwargs):
113
+ """Create a new Qt app or return an existing one."""
114
+ from IPython.external.qt_for_kernel import QtGui
115
+ app = QtGui.QApplication.instance()
116
+ if app is None:
117
+ if not args:
118
+ args = ([""],)
119
+ app = QtGui.QApplication(*args, **kwargs)
120
+ return app
121
+
122
+ def is_event_loop_running_qt4(app=None):
123
+ """Is the qt event loop running."""
124
+ # New way: check attribute on shell instance
125
+ ip = get_ipython()
126
+ if ip is not None:
127
+ return ip.active_eventloop and ip.active_eventloop.startswith('qt')
128
+
129
+ # Old way: check attribute on QApplication singleton
130
+ if app is None:
131
+ app = get_app_qt4([""])
132
+ if hasattr(app, '_in_event_loop'):
133
+ return app._in_event_loop
134
+ else:
135
+ # Does qt provide a other way to detect this?
136
+ return False
137
+
138
+ def start_event_loop_qt4(app=None):
139
+ """Start the qt event loop in a consistent manner."""
140
+ if app is None:
141
+ app = get_app_qt4([""])
142
+ if not is_event_loop_running_qt4(app):
143
+ app._in_event_loop = True
144
+ app.exec_()
145
+ app._in_event_loop = False
146
+ else:
147
+ app._in_event_loop = True
148
+
149
+ #-----------------------------------------------------------------------------
150
+ # Tk
151
+ #-----------------------------------------------------------------------------
152
+
153
+ #-----------------------------------------------------------------------------
154
+ # gtk
155
+ #-----------------------------------------------------------------------------
temp_venv/lib/python3.13/site-packages/IPython/lib/latextools.py ADDED
@@ -0,0 +1,257 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ """Tools for handling LaTeX."""
3
+
4
+ # Copyright (c) IPython Development Team.
5
+ # Distributed under the terms of the Modified BSD License.
6
+
7
+ from io import BytesIO
8
+ import os
9
+ import tempfile
10
+ import shutil
11
+ import subprocess
12
+ from base64 import encodebytes
13
+ import textwrap
14
+
15
+ from pathlib import Path
16
+
17
+ from IPython.utils.process import find_cmd, FindCmdError
18
+ from traitlets.config import get_config
19
+ from traitlets.config.configurable import SingletonConfigurable
20
+ from traitlets import List, Bool, Unicode
21
+
22
+
23
+ class LaTeXTool(SingletonConfigurable):
24
+ """An object to store configuration of the LaTeX tool."""
25
+ def _config_default(self):
26
+ return get_config()
27
+
28
+ backends = List(
29
+ Unicode(), ["matplotlib", "dvipng"],
30
+ help="Preferred backend to draw LaTeX math equations. "
31
+ "Backends in the list are checked one by one and the first "
32
+ "usable one is used. Note that `matplotlib` backend "
33
+ "is usable only for inline style equations. To draw "
34
+ "display style equations, `dvipng` backend must be specified. ",
35
+ # It is a List instead of Enum, to make configuration more
36
+ # flexible. For example, to use matplotlib mainly but dvipng
37
+ # for display style, the default ["matplotlib", "dvipng"] can
38
+ # be used. To NOT use dvipng so that other repr such as
39
+ # unicode pretty printing is used, you can use ["matplotlib"].
40
+ ).tag(config=True)
41
+
42
+ use_breqn = Bool(
43
+ True,
44
+ help="Use breqn.sty to automatically break long equations. "
45
+ "This configuration takes effect only for dvipng backend.",
46
+ ).tag(config=True)
47
+
48
+ packages = List(
49
+ ['amsmath', 'amsthm', 'amssymb', 'bm'],
50
+ help="A list of packages to use for dvipng backend. "
51
+ "'breqn' will be automatically appended when use_breqn=True.",
52
+ ).tag(config=True)
53
+
54
+ preamble = Unicode(
55
+ help="Additional preamble to use when generating LaTeX source "
56
+ "for dvipng backend.",
57
+ ).tag(config=True)
58
+
59
+
60
+ def latex_to_png(
61
+ s: str, encode=False, backend=None, wrap=False, color="Black", scale=1.0
62
+ ):
63
+ """Render a LaTeX string to PNG.
64
+
65
+ Parameters
66
+ ----------
67
+ s : str
68
+ The raw string containing valid inline LaTeX.
69
+ encode : bool, optional
70
+ Should the PNG data base64 encoded to make it JSON'able.
71
+ backend : {matplotlib, dvipng}
72
+ Backend for producing PNG data.
73
+ wrap : bool
74
+ If true, Automatically wrap `s` as a LaTeX equation.
75
+ color : string
76
+ Foreground color name among dvipsnames, e.g. 'Maroon' or on hex RGB
77
+ format, e.g. '#AA20FA'.
78
+ scale : float
79
+ Scale factor for the resulting PNG.
80
+ None is returned when the backend cannot be used.
81
+
82
+ """
83
+ assert isinstance(s, str)
84
+ allowed_backends = LaTeXTool.instance().backends
85
+ if backend is None:
86
+ backend = allowed_backends[0]
87
+ if backend not in allowed_backends:
88
+ return None
89
+ if backend == 'matplotlib':
90
+ f = latex_to_png_mpl
91
+ elif backend == 'dvipng':
92
+ f = latex_to_png_dvipng
93
+ if color.startswith('#'):
94
+ # Convert hex RGB color to LaTeX RGB color.
95
+ if len(color) == 7:
96
+ try:
97
+ color = "RGB {}".format(" ".join([str(int(x, 16)) for x in
98
+ textwrap.wrap(color[1:], 2)]))
99
+ except ValueError as e:
100
+ raise ValueError('Invalid color specification {}.'.format(color)) from e
101
+ else:
102
+ raise ValueError('Invalid color specification {}.'.format(color))
103
+ else:
104
+ raise ValueError('No such backend {0}'.format(backend))
105
+ bin_data = f(s, wrap, color, scale)
106
+ if encode and bin_data:
107
+ bin_data = encodebytes(bin_data)
108
+ return bin_data
109
+
110
+
111
+ def latex_to_png_mpl(s, wrap, color='Black', scale=1.0):
112
+ try:
113
+ from matplotlib import figure, font_manager, mathtext
114
+ from matplotlib.backends import backend_agg
115
+ from pyparsing import ParseFatalException
116
+ except ImportError:
117
+ return None
118
+
119
+ # mpl mathtext doesn't support display math, force inline
120
+ s = s.replace('$$', '$')
121
+ if wrap:
122
+ s = u'${0}$'.format(s)
123
+
124
+ try:
125
+ prop = font_manager.FontProperties(size=12)
126
+ dpi = 120 * scale
127
+ buffer = BytesIO()
128
+
129
+ # Adapted from mathtext.math_to_image
130
+ parser = mathtext.MathTextParser("path")
131
+ width, height, depth, _, _ = parser.parse(s, dpi=72, prop=prop)
132
+ fig = figure.Figure(figsize=(width / 72, height / 72))
133
+ fig.text(0, depth / height, s, fontproperties=prop, color=color)
134
+ backend_agg.FigureCanvasAgg(fig)
135
+ fig.savefig(buffer, dpi=dpi, format="png", transparent=True)
136
+ return buffer.getvalue()
137
+ except (ValueError, RuntimeError, ParseFatalException):
138
+ return None
139
+
140
+
141
+ def latex_to_png_dvipng(s, wrap, color='Black', scale=1.0):
142
+ try:
143
+ find_cmd('latex')
144
+ find_cmd('dvipng')
145
+ except FindCmdError:
146
+ return None
147
+
148
+ startupinfo = None
149
+ if os.name == "nt":
150
+ # prevent popup-windows
151
+ startupinfo = subprocess.STARTUPINFO()
152
+ startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
153
+
154
+ try:
155
+ workdir = Path(tempfile.mkdtemp())
156
+ tmpfile = "tmp.tex"
157
+ dvifile = "tmp.dvi"
158
+ outfile = "tmp.png"
159
+
160
+ with workdir.joinpath(tmpfile).open("w", encoding="utf8") as f:
161
+ f.writelines(genelatex(s, wrap))
162
+
163
+ subprocess.check_call(
164
+ ["latex", "-halt-on-error", "-interaction", "batchmode", tmpfile],
165
+ cwd=workdir,
166
+ stdout=subprocess.DEVNULL,
167
+ stderr=subprocess.DEVNULL,
168
+ startupinfo=startupinfo,
169
+ )
170
+
171
+ resolution = round(150 * scale)
172
+ subprocess.check_call(
173
+ [
174
+ "dvipng",
175
+ "-T",
176
+ "tight",
177
+ "-D",
178
+ str(resolution),
179
+ "-z",
180
+ "9",
181
+ "-bg",
182
+ "Transparent",
183
+ "-o",
184
+ outfile,
185
+ dvifile,
186
+ "-fg",
187
+ color,
188
+ ],
189
+ cwd=workdir,
190
+ stdout=subprocess.DEVNULL,
191
+ stderr=subprocess.DEVNULL,
192
+ startupinfo=startupinfo,
193
+ )
194
+
195
+ with workdir.joinpath(outfile).open("rb") as f:
196
+ return f.read()
197
+ except subprocess.CalledProcessError:
198
+ return None
199
+ finally:
200
+ shutil.rmtree(workdir)
201
+
202
+
203
+ def kpsewhich(filename):
204
+ """Invoke kpsewhich command with an argument `filename`."""
205
+ try:
206
+ find_cmd("kpsewhich")
207
+ proc = subprocess.Popen(
208
+ ["kpsewhich", filename],
209
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE)
210
+ (stdout, stderr) = proc.communicate()
211
+ return stdout.strip().decode('utf8', 'replace')
212
+ except FindCmdError:
213
+ pass
214
+
215
+
216
+ def genelatex(body, wrap):
217
+ """Generate LaTeX document for dvipng backend."""
218
+ lt = LaTeXTool.instance()
219
+ breqn = wrap and lt.use_breqn and kpsewhich("breqn.sty")
220
+ yield r'\documentclass{article}'
221
+ packages = lt.packages
222
+ if breqn:
223
+ packages = packages + ['breqn']
224
+ for pack in packages:
225
+ yield r'\usepackage{{{0}}}'.format(pack)
226
+ yield r'\pagestyle{empty}'
227
+ if lt.preamble:
228
+ yield lt.preamble
229
+ yield r'\begin{document}'
230
+ if breqn:
231
+ yield r'\begin{dmath*}'
232
+ yield body
233
+ yield r'\end{dmath*}'
234
+ elif wrap:
235
+ yield u'$${0}$$'.format(body)
236
+ else:
237
+ yield body
238
+ yield u'\\end{document}'
239
+
240
+
241
+ _data_uri_template_png = u"""<img src="data:image/png;base64,%s" alt=%s />"""
242
+
243
+ def latex_to_html(s, alt='image'):
244
+ """Render LaTeX to HTML with embedded PNG data using data URIs.
245
+
246
+ Parameters
247
+ ----------
248
+ s : str
249
+ The raw string containing valid inline LateX.
250
+ alt : str
251
+ The alt text to use for the HTML.
252
+ """
253
+ base64_data = latex_to_png(s, encode=True).decode('ascii')
254
+ if base64_data:
255
+ return _data_uri_template_png % (base64_data, alt)
256
+
257
+
temp_venv/lib/python3.13/site-packages/IPython/lib/lexers.py ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ """
3
+ The IPython lexers are now a separate package, ipython-pygments-lexers.
4
+
5
+ Importing from here is deprecated and may break in the future.
6
+ """
7
+ # -----------------------------------------------------------------------------
8
+ # Copyright (c) 2013, the IPython Development Team.
9
+ #
10
+ # Distributed under the terms of the Modified BSD License.
11
+ #
12
+ # The full license is in the file COPYING.txt, distributed with this software.
13
+ # -----------------------------------------------------------------------------
14
+
15
+ from ipython_pygments_lexers import (
16
+ IPythonLexer,
17
+ IPython3Lexer,
18
+ IPythonPartialTracebackLexer,
19
+ IPythonTracebackLexer,
20
+ IPythonConsoleLexer,
21
+ IPyLexer,
22
+ )
23
+
24
+
25
+ __all__ = [
26
+ "IPython3Lexer",
27
+ "IPythonLexer",
28
+ "IPythonPartialTracebackLexer",
29
+ "IPythonTracebackLexer",
30
+ "IPythonConsoleLexer",
31
+ "IPyLexer",
32
+ ]
temp_venv/lib/python3.13/site-packages/IPython/lib/pretty.py ADDED
@@ -0,0 +1,954 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Python advanced pretty printer. This pretty printer is intended to
3
+ replace the old `pprint` python module which does not allow developers
4
+ to provide their own pretty print callbacks.
5
+
6
+ This module is based on ruby's `prettyprint.rb` library by `Tanaka Akira`.
7
+
8
+
9
+ Example Usage
10
+ -------------
11
+
12
+ To directly print the representation of an object use `pprint`::
13
+
14
+ from pretty import pprint
15
+ pprint(complex_object)
16
+
17
+ To get a string of the output use `pretty`::
18
+
19
+ from pretty import pretty
20
+ string = pretty(complex_object)
21
+
22
+
23
+ Extending
24
+ ---------
25
+
26
+ The pretty library allows developers to add pretty printing rules for their
27
+ own objects. This process is straightforward. All you have to do is to
28
+ add a `_repr_pretty_` method to your object and call the methods on the
29
+ pretty printer passed::
30
+
31
+ class MyObject(object):
32
+
33
+ def _repr_pretty_(self, p, cycle):
34
+ ...
35
+
36
+ Here's an example for a class with a simple constructor::
37
+
38
+ class MySimpleObject:
39
+
40
+ def __init__(self, a, b, *, c=None):
41
+ self.a = a
42
+ self.b = b
43
+ self.c = c
44
+
45
+ def _repr_pretty_(self, p, cycle):
46
+ ctor = CallExpression.factory(self.__class__.__name__)
47
+ if self.c is None:
48
+ p.pretty(ctor(a, b))
49
+ else:
50
+ p.pretty(ctor(a, b, c=c))
51
+
52
+ Here is an example implementation of a `_repr_pretty_` method for a list
53
+ subclass::
54
+
55
+ class MyList(list):
56
+
57
+ def _repr_pretty_(self, p, cycle):
58
+ if cycle:
59
+ p.text('MyList(...)')
60
+ else:
61
+ with p.group(8, 'MyList([', '])'):
62
+ for idx, item in enumerate(self):
63
+ if idx:
64
+ p.text(',')
65
+ p.breakable()
66
+ p.pretty(item)
67
+
68
+ The `cycle` parameter is `True` if pretty detected a cycle. You *have* to
69
+ react to that or the result is an infinite loop. `p.text()` just adds
70
+ non breaking text to the output, `p.breakable()` either adds a whitespace
71
+ or breaks here. If you pass it an argument it's used instead of the
72
+ default space. `p.pretty` prettyprints another object using the pretty print
73
+ method.
74
+
75
+ The first parameter to the `group` function specifies the extra indentation
76
+ of the next line. In this example the next item will either be on the same
77
+ line (if the items are short enough) or aligned with the right edge of the
78
+ opening bracket of `MyList`.
79
+
80
+ If you just want to indent something you can use the group function
81
+ without open / close parameters. You can also use this code::
82
+
83
+ with p.indent(2):
84
+ ...
85
+
86
+ Inheritance diagram:
87
+
88
+ .. inheritance-diagram:: IPython.lib.pretty
89
+ :parts: 3
90
+
91
+ :copyright: 2007 by Armin Ronacher.
92
+ Portions (c) 2009 by Robert Kern.
93
+ :license: BSD License.
94
+ """
95
+
96
+ from contextlib import contextmanager
97
+ import datetime
98
+ import os
99
+ import re
100
+ import sys
101
+ import types
102
+ from collections import deque
103
+ from inspect import signature
104
+ from io import StringIO
105
+ from warnings import warn
106
+
107
+ from IPython.utils.decorators import undoc
108
+ from IPython.utils.py3compat import PYPY
109
+
110
+ from typing import Dict
111
+
112
+ __all__ = ['pretty', 'pprint', 'PrettyPrinter', 'RepresentationPrinter',
113
+ 'for_type', 'for_type_by_name', 'RawText', 'RawStringLiteral', 'CallExpression']
114
+
115
+
116
+ MAX_SEQ_LENGTH = 1000
117
+ _re_pattern_type = type(re.compile(''))
118
+
119
+ def _safe_getattr(obj, attr, default=None):
120
+ """Safe version of getattr.
121
+
122
+ Same as getattr, but will return ``default`` on any Exception,
123
+ rather than raising.
124
+ """
125
+ try:
126
+ return getattr(obj, attr, default)
127
+ except Exception:
128
+ return default
129
+
130
+ def _sorted_for_pprint(items):
131
+ """
132
+ Sort the given items for pretty printing. Since some predictable
133
+ sorting is better than no sorting at all, we sort on the string
134
+ representation if normal sorting fails.
135
+ """
136
+ items = list(items)
137
+ try:
138
+ return sorted(items)
139
+ except Exception:
140
+ try:
141
+ return sorted(items, key=str)
142
+ except Exception:
143
+ return items
144
+
145
+ def pretty(obj, verbose=False, max_width=79, newline='\n', max_seq_length=MAX_SEQ_LENGTH):
146
+ """
147
+ Pretty print the object's representation.
148
+ """
149
+ stream = StringIO()
150
+ printer = RepresentationPrinter(stream, verbose, max_width, newline, max_seq_length=max_seq_length)
151
+ printer.pretty(obj)
152
+ printer.flush()
153
+ return stream.getvalue()
154
+
155
+
156
+ def pprint(obj, verbose=False, max_width=79, newline='\n', max_seq_length=MAX_SEQ_LENGTH):
157
+ """
158
+ Like `pretty` but print to stdout.
159
+ """
160
+ printer = RepresentationPrinter(sys.stdout, verbose, max_width, newline, max_seq_length=max_seq_length)
161
+ printer.pretty(obj)
162
+ printer.flush()
163
+ sys.stdout.write(newline)
164
+ sys.stdout.flush()
165
+
166
+ class _PrettyPrinterBase:
167
+
168
+ @contextmanager
169
+ def indent(self, indent):
170
+ """with statement support for indenting/dedenting."""
171
+ self.indentation += indent
172
+ try:
173
+ yield
174
+ finally:
175
+ self.indentation -= indent
176
+
177
+ @contextmanager
178
+ def group(self, indent=0, open='', close=''):
179
+ """like begin_group / end_group but for the with statement."""
180
+ self.begin_group(indent, open)
181
+ try:
182
+ yield
183
+ finally:
184
+ self.end_group(indent, close)
185
+
186
+ class PrettyPrinter(_PrettyPrinterBase):
187
+ """
188
+ Baseclass for the `RepresentationPrinter` prettyprinter that is used to
189
+ generate pretty reprs of objects. Contrary to the `RepresentationPrinter`
190
+ this printer knows nothing about the default pprinters or the `_repr_pretty_`
191
+ callback method.
192
+ """
193
+
194
+ def __init__(self, output, max_width=79, newline='\n', max_seq_length=MAX_SEQ_LENGTH):
195
+ self.output = output
196
+ self.max_width = max_width
197
+ self.newline = newline
198
+ self.max_seq_length = max_seq_length
199
+ self.output_width = 0
200
+ self.buffer_width = 0
201
+ self.buffer = deque()
202
+
203
+ root_group = Group(0)
204
+ self.group_stack = [root_group]
205
+ self.group_queue = GroupQueue(root_group)
206
+ self.indentation = 0
207
+
208
+ def _break_one_group(self, group):
209
+ while group.breakables:
210
+ x = self.buffer.popleft()
211
+ self.output_width = x.output(self.output, self.output_width)
212
+ self.buffer_width -= x.width
213
+ while self.buffer and isinstance(self.buffer[0], Text):
214
+ x = self.buffer.popleft()
215
+ self.output_width = x.output(self.output, self.output_width)
216
+ self.buffer_width -= x.width
217
+
218
+ def _break_outer_groups(self):
219
+ while self.max_width < self.output_width + self.buffer_width:
220
+ group = self.group_queue.deq()
221
+ if not group:
222
+ return
223
+ self._break_one_group(group)
224
+
225
+ def text(self, obj):
226
+ """Add literal text to the output."""
227
+ width = len(obj)
228
+ if self.buffer:
229
+ text = self.buffer[-1]
230
+ if not isinstance(text, Text):
231
+ text = Text()
232
+ self.buffer.append(text)
233
+ text.add(obj, width)
234
+ self.buffer_width += width
235
+ self._break_outer_groups()
236
+ else:
237
+ self.output.write(obj)
238
+ self.output_width += width
239
+
240
+ def breakable(self, sep=' '):
241
+ """
242
+ Add a breakable separator to the output. This does not mean that it
243
+ will automatically break here. If no breaking on this position takes
244
+ place the `sep` is inserted which default to one space.
245
+ """
246
+ width = len(sep)
247
+ group = self.group_stack[-1]
248
+ if group.want_break:
249
+ self.flush()
250
+ self.output.write(self.newline)
251
+ self.output.write(' ' * self.indentation)
252
+ self.output_width = self.indentation
253
+ self.buffer_width = 0
254
+ else:
255
+ self.buffer.append(Breakable(sep, width, self))
256
+ self.buffer_width += width
257
+ self._break_outer_groups()
258
+
259
+ def break_(self):
260
+ """
261
+ Explicitly insert a newline into the output, maintaining correct indentation.
262
+ """
263
+ group = self.group_queue.deq()
264
+ if group:
265
+ self._break_one_group(group)
266
+ self.flush()
267
+ self.output.write(self.newline)
268
+ self.output.write(' ' * self.indentation)
269
+ self.output_width = self.indentation
270
+ self.buffer_width = 0
271
+
272
+
273
+ def begin_group(self, indent=0, open=''):
274
+ """
275
+ Begin a group.
276
+ The first parameter specifies the indentation for the next line (usually
277
+ the width of the opening text), the second the opening text. All
278
+ parameters are optional.
279
+ """
280
+ if open:
281
+ self.text(open)
282
+ group = Group(self.group_stack[-1].depth + 1)
283
+ self.group_stack.append(group)
284
+ self.group_queue.enq(group)
285
+ self.indentation += indent
286
+
287
+ def _enumerate(self, seq):
288
+ """like enumerate, but with an upper limit on the number of items"""
289
+ for idx, x in enumerate(seq):
290
+ if self.max_seq_length and idx >= self.max_seq_length:
291
+ self.text(',')
292
+ self.breakable()
293
+ self.text('...')
294
+ return
295
+ yield idx, x
296
+
297
+ def end_group(self, dedent=0, close=''):
298
+ """End a group. See `begin_group` for more details."""
299
+ self.indentation -= dedent
300
+ group = self.group_stack.pop()
301
+ if not group.breakables:
302
+ self.group_queue.remove(group)
303
+ if close:
304
+ self.text(close)
305
+
306
+ def flush(self):
307
+ """Flush data that is left in the buffer."""
308
+ for data in self.buffer:
309
+ self.output_width += data.output(self.output, self.output_width)
310
+ self.buffer.clear()
311
+ self.buffer_width = 0
312
+
313
+
314
+ def _get_mro(obj_class):
315
+ """ Get a reasonable method resolution order of a class and its superclasses
316
+ for both old-style and new-style classes.
317
+ """
318
+ if not hasattr(obj_class, '__mro__'):
319
+ # Old-style class. Mix in object to make a fake new-style class.
320
+ try:
321
+ obj_class = type(obj_class.__name__, (obj_class, object), {})
322
+ except TypeError:
323
+ # Old-style extension type that does not descend from object.
324
+ # FIXME: try to construct a more thorough MRO.
325
+ mro = [obj_class]
326
+ else:
327
+ mro = obj_class.__mro__[1:-1]
328
+ else:
329
+ mro = obj_class.__mro__
330
+ return mro
331
+
332
+
333
+ class RepresentationPrinter(PrettyPrinter):
334
+ """
335
+ Special pretty printer that has a `pretty` method that calls the pretty
336
+ printer for a python object.
337
+
338
+ This class stores processing data on `self` so you must *never* use
339
+ this class in a threaded environment. Always lock it or reinstanciate
340
+ it.
341
+
342
+ Instances also have a verbose flag callbacks can access to control their
343
+ output. For example the default instance repr prints all attributes and
344
+ methods that are not prefixed by an underscore if the printer is in
345
+ verbose mode.
346
+ """
347
+
348
+ def __init__(self, output, verbose=False, max_width=79, newline='\n',
349
+ singleton_pprinters=None, type_pprinters=None, deferred_pprinters=None,
350
+ max_seq_length=MAX_SEQ_LENGTH):
351
+
352
+ PrettyPrinter.__init__(self, output, max_width, newline, max_seq_length=max_seq_length)
353
+ self.verbose = verbose
354
+ self.stack = []
355
+ if singleton_pprinters is None:
356
+ singleton_pprinters = _singleton_pprinters.copy()
357
+ self.singleton_pprinters = singleton_pprinters
358
+ if type_pprinters is None:
359
+ type_pprinters = _type_pprinters.copy()
360
+ self.type_pprinters = type_pprinters
361
+ if deferred_pprinters is None:
362
+ deferred_pprinters = _deferred_type_pprinters.copy()
363
+ self.deferred_pprinters = deferred_pprinters
364
+
365
+ def pretty(self, obj):
366
+ """Pretty print the given object."""
367
+ obj_id = id(obj)
368
+ cycle = obj_id in self.stack
369
+ self.stack.append(obj_id)
370
+ self.begin_group()
371
+ try:
372
+ obj_class = _safe_getattr(obj, '__class__', None) or type(obj)
373
+ # First try to find registered singleton printers for the type.
374
+ try:
375
+ printer = self.singleton_pprinters[obj_id]
376
+ except (TypeError, KeyError):
377
+ pass
378
+ else:
379
+ return printer(obj, self, cycle)
380
+ # Next walk the mro and check for either:
381
+ # 1) a registered printer
382
+ # 2) a _repr_pretty_ method
383
+ for cls in _get_mro(obj_class):
384
+ if cls in self.type_pprinters:
385
+ # printer registered in self.type_pprinters
386
+ return self.type_pprinters[cls](obj, self, cycle)
387
+ else:
388
+ # deferred printer
389
+ printer = self._in_deferred_types(cls)
390
+ if printer is not None:
391
+ return printer(obj, self, cycle)
392
+ else:
393
+ # Finally look for special method names.
394
+ # Some objects automatically create any requested
395
+ # attribute. Try to ignore most of them by checking for
396
+ # callability.
397
+ if '_repr_pretty_' in cls.__dict__:
398
+ meth = cls._repr_pretty_
399
+ if callable(meth):
400
+ return meth(obj, self, cycle)
401
+ if (
402
+ cls is not object
403
+ # check if cls defines __repr__
404
+ and "__repr__" in cls.__dict__
405
+ # check if __repr__ is callable.
406
+ # Note: we need to test getattr(cls, '__repr__')
407
+ # instead of cls.__dict__['__repr__']
408
+ # in order to work with descriptors like partialmethod,
409
+ and callable(_safe_getattr(cls, "__repr__", None))
410
+ ):
411
+ return _repr_pprint(obj, self, cycle)
412
+
413
+ return _default_pprint(obj, self, cycle)
414
+ finally:
415
+ self.end_group()
416
+ self.stack.pop()
417
+
418
+ def _in_deferred_types(self, cls):
419
+ """
420
+ Check if the given class is specified in the deferred type registry.
421
+
422
+ Returns the printer from the registry if it exists, and None if the
423
+ class is not in the registry. Successful matches will be moved to the
424
+ regular type registry for future use.
425
+ """
426
+ mod = _safe_getattr(cls, '__module__', None)
427
+ name = _safe_getattr(cls, '__name__', None)
428
+ key = (mod, name)
429
+ printer = None
430
+ if key in self.deferred_pprinters:
431
+ # Move the printer over to the regular registry.
432
+ printer = self.deferred_pprinters.pop(key)
433
+ self.type_pprinters[cls] = printer
434
+ return printer
435
+
436
+
437
+ class Printable:
438
+
439
+ def output(self, stream, output_width):
440
+ return output_width
441
+
442
+
443
+ class Text(Printable):
444
+
445
+ def __init__(self):
446
+ self.objs = []
447
+ self.width = 0
448
+
449
+ def output(self, stream, output_width):
450
+ for obj in self.objs:
451
+ stream.write(obj)
452
+ return output_width + self.width
453
+
454
+ def add(self, obj, width):
455
+ self.objs.append(obj)
456
+ self.width += width
457
+
458
+
459
+ class Breakable(Printable):
460
+
461
+ def __init__(self, seq, width, pretty):
462
+ self.obj = seq
463
+ self.width = width
464
+ self.pretty = pretty
465
+ self.indentation = pretty.indentation
466
+ self.group = pretty.group_stack[-1]
467
+ self.group.breakables.append(self)
468
+
469
+ def output(self, stream, output_width):
470
+ self.group.breakables.popleft()
471
+ if self.group.want_break:
472
+ stream.write(self.pretty.newline)
473
+ stream.write(' ' * self.indentation)
474
+ return self.indentation
475
+ if not self.group.breakables:
476
+ self.pretty.group_queue.remove(self.group)
477
+ stream.write(self.obj)
478
+ return output_width + self.width
479
+
480
+
481
+ class Group(Printable):
482
+
483
+ def __init__(self, depth):
484
+ self.depth = depth
485
+ self.breakables = deque()
486
+ self.want_break = False
487
+
488
+
489
+ class GroupQueue:
490
+
491
+ def __init__(self, *groups):
492
+ self.queue = []
493
+ for group in groups:
494
+ self.enq(group)
495
+
496
+ def enq(self, group):
497
+ depth = group.depth
498
+ while depth > len(self.queue) - 1:
499
+ self.queue.append([])
500
+ self.queue[depth].append(group)
501
+
502
+ def deq(self):
503
+ for stack in self.queue:
504
+ for idx, group in enumerate(reversed(stack)):
505
+ if group.breakables:
506
+ del stack[idx]
507
+ group.want_break = True
508
+ return group
509
+ for group in stack:
510
+ group.want_break = True
511
+ del stack[:]
512
+
513
+ def remove(self, group):
514
+ try:
515
+ self.queue[group.depth].remove(group)
516
+ except ValueError:
517
+ pass
518
+
519
+
520
+ class RawText:
521
+ """ Object such that ``p.pretty(RawText(value))`` is the same as ``p.text(value)``.
522
+
523
+ An example usage of this would be to show a list as binary numbers, using
524
+ ``p.pretty([RawText(bin(i)) for i in integers])``.
525
+ """
526
+ def __init__(self, value):
527
+ self.value = value
528
+
529
+ def _repr_pretty_(self, p, cycle):
530
+ p.text(self.value)
531
+
532
+
533
+ class CallExpression:
534
+ """ Object which emits a line-wrapped call expression in the form `__name(*args, **kwargs)` """
535
+ def __init__(__self, __name, *args, **kwargs):
536
+ # dunders are to avoid clashes with kwargs, as python's name managing
537
+ # will kick in.
538
+ self = __self
539
+ self.name = __name
540
+ self.args = args
541
+ self.kwargs = kwargs
542
+
543
+ @classmethod
544
+ def factory(cls, name):
545
+ def inner(*args, **kwargs):
546
+ return cls(name, *args, **kwargs)
547
+ return inner
548
+
549
+ def _repr_pretty_(self, p, cycle):
550
+ # dunders are to avoid clashes with kwargs, as python's name managing
551
+ # will kick in.
552
+
553
+ started = False
554
+ def new_item():
555
+ nonlocal started
556
+ if started:
557
+ p.text(",")
558
+ p.breakable()
559
+ started = True
560
+
561
+ prefix = self.name + "("
562
+ with p.group(len(prefix), prefix, ")"):
563
+ for arg in self.args:
564
+ new_item()
565
+ p.pretty(arg)
566
+ for arg_name, arg in self.kwargs.items():
567
+ new_item()
568
+ arg_prefix = arg_name + "="
569
+ with p.group(len(arg_prefix), arg_prefix):
570
+ p.pretty(arg)
571
+
572
+
573
+ class RawStringLiteral:
574
+ """ Wrapper that shows a string with a `r` prefix """
575
+ def __init__(self, value):
576
+ self.value = value
577
+
578
+ def _repr_pretty_(self, p, cycle):
579
+ base_repr = repr(self.value)
580
+ if base_repr[:1] in 'uU':
581
+ base_repr = base_repr[1:]
582
+ prefix = 'ur'
583
+ else:
584
+ prefix = 'r'
585
+ base_repr = prefix + base_repr.replace('\\\\', '\\')
586
+ p.text(base_repr)
587
+
588
+
589
+ def _default_pprint(obj, p, cycle):
590
+ """
591
+ The default print function. Used if an object does not provide one and
592
+ it's none of the builtin objects.
593
+ """
594
+ klass = _safe_getattr(obj, '__class__', None) or type(obj)
595
+ if _safe_getattr(klass, '__repr__', None) is not object.__repr__:
596
+ # A user-provided repr. Find newlines and replace them with p.break_()
597
+ _repr_pprint(obj, p, cycle)
598
+ return
599
+ p.begin_group(1, '<')
600
+ p.pretty(klass)
601
+ p.text(' at 0x%x' % id(obj))
602
+ if cycle:
603
+ p.text(' ...')
604
+ elif p.verbose:
605
+ first = True
606
+ for key in dir(obj):
607
+ if not key.startswith('_'):
608
+ try:
609
+ value = getattr(obj, key)
610
+ except AttributeError:
611
+ continue
612
+ if isinstance(value, types.MethodType):
613
+ continue
614
+ if not first:
615
+ p.text(',')
616
+ p.breakable()
617
+ p.text(key)
618
+ p.text('=')
619
+ step = len(key) + 1
620
+ p.indentation += step
621
+ p.pretty(value)
622
+ p.indentation -= step
623
+ first = False
624
+ p.end_group(1, '>')
625
+
626
+
627
+ def _seq_pprinter_factory(start, end):
628
+ """
629
+ Factory that returns a pprint function useful for sequences. Used by
630
+ the default pprint for tuples and lists.
631
+ """
632
+ def inner(obj, p, cycle):
633
+ if cycle:
634
+ return p.text(start + '...' + end)
635
+ step = len(start)
636
+ p.begin_group(step, start)
637
+ for idx, x in p._enumerate(obj):
638
+ if idx:
639
+ p.text(',')
640
+ p.breakable()
641
+ p.pretty(x)
642
+ if len(obj) == 1 and isinstance(obj, tuple):
643
+ # Special case for 1-item tuples.
644
+ p.text(',')
645
+ p.end_group(step, end)
646
+ return inner
647
+
648
+
649
+ def _set_pprinter_factory(start, end):
650
+ """
651
+ Factory that returns a pprint function useful for sets and frozensets.
652
+ """
653
+ def inner(obj, p, cycle):
654
+ if cycle:
655
+ return p.text(start + '...' + end)
656
+ if len(obj) == 0:
657
+ # Special case.
658
+ p.text(type(obj).__name__ + '()')
659
+ else:
660
+ step = len(start)
661
+ p.begin_group(step, start)
662
+ # Like dictionary keys, we will try to sort the items if there aren't too many
663
+ if not (p.max_seq_length and len(obj) >= p.max_seq_length):
664
+ items = _sorted_for_pprint(obj)
665
+ else:
666
+ items = obj
667
+ for idx, x in p._enumerate(items):
668
+ if idx:
669
+ p.text(',')
670
+ p.breakable()
671
+ p.pretty(x)
672
+ p.end_group(step, end)
673
+ return inner
674
+
675
+
676
+ def _dict_pprinter_factory(start, end):
677
+ """
678
+ Factory that returns a pprint function used by the default pprint of
679
+ dicts and dict proxies.
680
+ """
681
+ def inner(obj, p, cycle):
682
+ if cycle:
683
+ return p.text('{...}')
684
+ step = len(start)
685
+ p.begin_group(step, start)
686
+ keys = obj.keys()
687
+ for idx, key in p._enumerate(keys):
688
+ if idx:
689
+ p.text(',')
690
+ p.breakable()
691
+ p.pretty(key)
692
+ p.text(': ')
693
+ p.pretty(obj[key])
694
+ p.end_group(step, end)
695
+ return inner
696
+
697
+
698
+ def _super_pprint(obj, p, cycle):
699
+ """The pprint for the super type."""
700
+ p.begin_group(8, '<super: ')
701
+ p.pretty(obj.__thisclass__)
702
+ p.text(',')
703
+ p.breakable()
704
+ if PYPY: # In PyPy, super() objects don't have __self__ attributes
705
+ dself = obj.__repr__.__self__
706
+ p.pretty(None if dself is obj else dself)
707
+ else:
708
+ p.pretty(obj.__self__)
709
+ p.end_group(8, '>')
710
+
711
+
712
+
713
+ class _ReFlags:
714
+ def __init__(self, value):
715
+ self.value = value
716
+
717
+ def _repr_pretty_(self, p, cycle):
718
+ done_one = False
719
+ for flag in (
720
+ "IGNORECASE",
721
+ "LOCALE",
722
+ "MULTILINE",
723
+ "DOTALL",
724
+ "UNICODE",
725
+ "VERBOSE",
726
+ "DEBUG",
727
+ ):
728
+ if self.value & getattr(re, flag):
729
+ if done_one:
730
+ p.text('|')
731
+ p.text('re.' + flag)
732
+ done_one = True
733
+
734
+
735
+ def _re_pattern_pprint(obj, p, cycle):
736
+ """The pprint function for regular expression patterns."""
737
+ re_compile = CallExpression.factory('re.compile')
738
+ if obj.flags:
739
+ p.pretty(re_compile(RawStringLiteral(obj.pattern), _ReFlags(obj.flags)))
740
+ else:
741
+ p.pretty(re_compile(RawStringLiteral(obj.pattern)))
742
+
743
+
744
+ def _types_simplenamespace_pprint(obj, p, cycle):
745
+ """The pprint function for types.SimpleNamespace."""
746
+ namespace = CallExpression.factory('namespace')
747
+ if cycle:
748
+ p.pretty(namespace(RawText("...")))
749
+ else:
750
+ p.pretty(namespace(**obj.__dict__))
751
+
752
+
753
+ def _type_pprint(obj, p, cycle):
754
+ """The pprint for classes and types."""
755
+ # Heap allocated types might not have the module attribute,
756
+ # and others may set it to None.
757
+
758
+ # Checks for a __repr__ override in the metaclass. Can't compare the
759
+ # type(obj).__repr__ directly because in PyPy the representation function
760
+ # inherited from type isn't the same type.__repr__
761
+ if [m for m in _get_mro(type(obj)) if "__repr__" in vars(m)][:1] != [type]:
762
+ _repr_pprint(obj, p, cycle)
763
+ return
764
+
765
+ mod = _safe_getattr(obj, '__module__', None)
766
+ try:
767
+ name = obj.__qualname__
768
+ if not isinstance(name, str):
769
+ # This can happen if the type implements __qualname__ as a property
770
+ # or other descriptor in Python 2.
771
+ raise Exception("Try __name__")
772
+ except Exception:
773
+ name = obj.__name__
774
+ if not isinstance(name, str):
775
+ name = '<unknown type>'
776
+
777
+ if mod in (None, '__builtin__', 'builtins', 'exceptions'):
778
+ p.text(name)
779
+ else:
780
+ p.text(mod + '.' + name)
781
+
782
+
783
+ def _repr_pprint(obj, p, cycle):
784
+ """A pprint that just redirects to the normal repr function."""
785
+ # Find newlines and replace them with p.break_()
786
+ output = repr(obj)
787
+ lines = output.splitlines()
788
+ with p.group():
789
+ for idx, output_line in enumerate(lines):
790
+ if idx:
791
+ p.break_()
792
+ p.text(output_line)
793
+
794
+
795
+ def _function_pprint(obj, p, cycle):
796
+ """Base pprint for all functions and builtin functions."""
797
+ name = _safe_getattr(obj, '__qualname__', obj.__name__)
798
+ mod = obj.__module__
799
+ if mod and mod not in ('__builtin__', 'builtins', 'exceptions'):
800
+ name = mod + '.' + name
801
+ try:
802
+ func_def = name + str(signature(obj))
803
+ except ValueError:
804
+ func_def = name
805
+ p.text('<function %s>' % func_def)
806
+
807
+
808
+ def _exception_pprint(obj, p, cycle):
809
+ """Base pprint for all exceptions."""
810
+ name = getattr(obj.__class__, '__qualname__', obj.__class__.__name__)
811
+ if obj.__class__.__module__ not in ('exceptions', 'builtins'):
812
+ name = '%s.%s' % (obj.__class__.__module__, name)
813
+
814
+ p.pretty(CallExpression(name, *getattr(obj, 'args', ())))
815
+
816
+
817
+ #: the exception base
818
+ _exception_base: type
819
+ try:
820
+ _exception_base = BaseException
821
+ except NameError:
822
+ _exception_base = Exception
823
+
824
+
825
+ #: printers for builtin types
826
+ _type_pprinters = {
827
+ int: _repr_pprint,
828
+ float: _repr_pprint,
829
+ str: _repr_pprint,
830
+ tuple: _seq_pprinter_factory('(', ')'),
831
+ list: _seq_pprinter_factory('[', ']'),
832
+ dict: _dict_pprinter_factory('{', '}'),
833
+ set: _set_pprinter_factory('{', '}'),
834
+ frozenset: _set_pprinter_factory('frozenset({', '})'),
835
+ super: _super_pprint,
836
+ _re_pattern_type: _re_pattern_pprint,
837
+ type: _type_pprint,
838
+ types.FunctionType: _function_pprint,
839
+ types.BuiltinFunctionType: _function_pprint,
840
+ types.MethodType: _repr_pprint,
841
+ types.SimpleNamespace: _types_simplenamespace_pprint,
842
+ datetime.datetime: _repr_pprint,
843
+ datetime.timedelta: _repr_pprint,
844
+ _exception_base: _exception_pprint
845
+ }
846
+
847
+ # render os.environ like a dict
848
+ _env_type = type(os.environ)
849
+ # future-proof in case os.environ becomes a plain dict?
850
+ if _env_type is not dict:
851
+ _type_pprinters[_env_type] = _dict_pprinter_factory('environ{', '}')
852
+
853
+ _type_pprinters[types.MappingProxyType] = _dict_pprinter_factory("mappingproxy({", "})")
854
+ _type_pprinters[slice] = _repr_pprint
855
+
856
+ _type_pprinters[range] = _repr_pprint
857
+ _type_pprinters[bytes] = _repr_pprint
858
+
859
+ #: printers for types specified by name
860
+ _deferred_type_pprinters: Dict = {}
861
+
862
+
863
+ def for_type(typ, func):
864
+ """
865
+ Add a pretty printer for a given type.
866
+ """
867
+ oldfunc = _type_pprinters.get(typ, None)
868
+ if func is not None:
869
+ # To support easy restoration of old pprinters, we need to ignore Nones.
870
+ _type_pprinters[typ] = func
871
+ return oldfunc
872
+
873
+ def for_type_by_name(type_module, type_name, func):
874
+ """
875
+ Add a pretty printer for a type specified by the module and name of a type
876
+ rather than the type object itself.
877
+ """
878
+ key = (type_module, type_name)
879
+ oldfunc = _deferred_type_pprinters.get(key, None)
880
+ if func is not None:
881
+ # To support easy restoration of old pprinters, we need to ignore Nones.
882
+ _deferred_type_pprinters[key] = func
883
+ return oldfunc
884
+
885
+
886
+ #: printers for the default singletons
887
+ _singleton_pprinters = dict.fromkeys(map(id, [None, True, False, Ellipsis,
888
+ NotImplemented]), _repr_pprint)
889
+
890
+
891
+ def _defaultdict_pprint(obj, p, cycle):
892
+ cls_ctor = CallExpression.factory(obj.__class__.__name__)
893
+ if cycle:
894
+ p.pretty(cls_ctor(RawText("...")))
895
+ else:
896
+ p.pretty(cls_ctor(obj.default_factory, dict(obj)))
897
+
898
+ def _ordereddict_pprint(obj, p, cycle):
899
+ cls_ctor = CallExpression.factory(obj.__class__.__name__)
900
+ if cycle:
901
+ p.pretty(cls_ctor(RawText("...")))
902
+ elif len(obj):
903
+ p.pretty(cls_ctor(list(obj.items())))
904
+ else:
905
+ p.pretty(cls_ctor())
906
+
907
+ def _deque_pprint(obj, p, cycle):
908
+ cls_ctor = CallExpression.factory(obj.__class__.__name__)
909
+ if cycle:
910
+ p.pretty(cls_ctor(RawText("...")))
911
+ elif obj.maxlen is not None:
912
+ p.pretty(cls_ctor(list(obj), maxlen=obj.maxlen))
913
+ else:
914
+ p.pretty(cls_ctor(list(obj)))
915
+
916
+ def _counter_pprint(obj, p, cycle):
917
+ cls_ctor = CallExpression.factory(obj.__class__.__name__)
918
+ if cycle:
919
+ p.pretty(cls_ctor(RawText("...")))
920
+ elif len(obj):
921
+ p.pretty(cls_ctor(dict(obj.most_common())))
922
+ else:
923
+ p.pretty(cls_ctor())
924
+
925
+
926
+ def _userlist_pprint(obj, p, cycle):
927
+ cls_ctor = CallExpression.factory(obj.__class__.__name__)
928
+ if cycle:
929
+ p.pretty(cls_ctor(RawText("...")))
930
+ else:
931
+ p.pretty(cls_ctor(obj.data))
932
+
933
+
934
+ for_type_by_name('collections', 'defaultdict', _defaultdict_pprint)
935
+ for_type_by_name('collections', 'OrderedDict', _ordereddict_pprint)
936
+ for_type_by_name('collections', 'deque', _deque_pprint)
937
+ for_type_by_name('collections', 'Counter', _counter_pprint)
938
+ for_type_by_name("collections", "UserList", _userlist_pprint)
939
+
940
+ if __name__ == '__main__':
941
+ from random import randrange
942
+
943
+ class Foo:
944
+ def __init__(self):
945
+ self.foo = 1
946
+ self.bar = re.compile(r'\s+')
947
+ self.blub = dict.fromkeys(range(30), randrange(1, 40))
948
+ self.hehe = 23424.234234
949
+ self.list = ["blub", "blah", self]
950
+
951
+ def get_foo(self):
952
+ print("foo")
953
+
954
+ pprint(Foo(), verbose=True)
temp_venv/lib/python3.13/site-packages/IPython/testing/__init__.py ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Testing support (tools to test IPython itself).
2
+ """
3
+
4
+ #-----------------------------------------------------------------------------
5
+ # Copyright (C) 2009-2011 The IPython Development Team
6
+ #
7
+ # Distributed under the terms of the BSD License. The full license is in
8
+ # the file COPYING, distributed as part of this software.
9
+ #-----------------------------------------------------------------------------
10
+
11
+
12
+ import os
13
+
14
+ #-----------------------------------------------------------------------------
15
+ # Constants
16
+ #-----------------------------------------------------------------------------
17
+
18
+ # We scale all timeouts via this factor, slow machines can increase it
19
+ IPYTHON_TESTING_TIMEOUT_SCALE = float(os.getenv(
20
+ 'IPYTHON_TESTING_TIMEOUT_SCALE', 1))
temp_venv/lib/python3.13/site-packages/IPython/testing/__pycache__/__init__.cpython-313.pyc ADDED
Binary file (427 Bytes). View file
 
temp_venv/lib/python3.13/site-packages/IPython/testing/__pycache__/skipdoctest.cpython-313.pyc ADDED
Binary file (899 Bytes). View file
 
temp_venv/lib/python3.13/site-packages/IPython/testing/decorators.py ADDED
@@ -0,0 +1,148 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import shutil
3
+ import sys
4
+ import tempfile
5
+ from importlib import import_module
6
+
7
+ import pytest
8
+
9
+ # Expose the unittest-driven decorators
10
+ from .ipunittest import ipdoctest, ipdocstring
11
+
12
+ def skipif(skip_condition, msg=None):
13
+ """Make function raise SkipTest exception if skip_condition is true
14
+
15
+ Parameters
16
+ ----------
17
+
18
+ skip_condition : bool or callable
19
+ Flag to determine whether to skip test. If the condition is a
20
+ callable, it is used at runtime to dynamically make the decision. This
21
+ is useful for tests that may require costly imports, to delay the cost
22
+ until the test suite is actually executed.
23
+ msg : string
24
+ Message to give on raising a SkipTest exception.
25
+
26
+ Returns
27
+ -------
28
+ decorator : function
29
+ Decorator, which, when applied to a function, causes SkipTest
30
+ to be raised when the skip_condition was True, and the function
31
+ to be called normally otherwise.
32
+ """
33
+ if msg is None:
34
+ msg = "Test skipped due to test condition."
35
+
36
+ assert isinstance(skip_condition, bool)
37
+ return pytest.mark.skipif(skip_condition, reason=msg)
38
+
39
+
40
+ # A version with the condition set to true, common case just to attach a message
41
+ # to a skip decorator
42
+ def skip(msg=None):
43
+ """Decorator factory - mark a test function for skipping from test suite.
44
+
45
+ Parameters
46
+ ----------
47
+ msg : string
48
+ Optional message to be added.
49
+
50
+ Returns
51
+ -------
52
+ decorator : function
53
+ Decorator, which, when applied to a function, causes SkipTest
54
+ to be raised, with the optional message added.
55
+ """
56
+ if msg and not isinstance(msg, str):
57
+ raise ValueError(
58
+ "invalid object passed to `@skip` decorator, did you "
59
+ "meant `@skip()` with brackets ?"
60
+ )
61
+ return skipif(True, msg)
62
+
63
+
64
+ def onlyif(condition, msg):
65
+ """The reverse from skipif, see skipif for details."""
66
+
67
+ return skipif(not condition, msg)
68
+
69
+
70
+ # -----------------------------------------------------------------------------
71
+ # Utility functions for decorators
72
+ def module_not_available(module):
73
+ """Can module be imported? Returns true if module does NOT import.
74
+
75
+ This is used to make a decorator to skip tests that require module to be
76
+ available, but delay the 'import numpy' to test execution time.
77
+ """
78
+ try:
79
+ mod = import_module(module)
80
+ mod_not_avail = False
81
+ except ImportError:
82
+ mod_not_avail = True
83
+
84
+ return mod_not_avail
85
+
86
+
87
+ # -----------------------------------------------------------------------------
88
+ # Decorators for public use
89
+
90
+ # Decorators to skip certain tests on specific platforms.
91
+ skip_win32 = skipif(sys.platform == "win32", "This test does not run under Windows")
92
+
93
+
94
+ # Decorators to skip tests if not on specific platforms.
95
+ skip_if_not_win32 = skipif(sys.platform != "win32", "This test only runs under Windows")
96
+ skip_if_not_osx = skipif(
97
+ not sys.platform.startswith("darwin"), "This test only runs under macOS"
98
+ )
99
+
100
+ _x11_skip_cond = (
101
+ sys.platform not in ("darwin", "win32") and os.environ.get("DISPLAY", "") == ""
102
+ )
103
+ _x11_skip_msg = "Skipped under *nix when X11/XOrg not available"
104
+
105
+ skip_if_no_x11 = skipif(_x11_skip_cond, _x11_skip_msg)
106
+
107
+ # Other skip decorators
108
+
109
+ # generic skip without module
110
+ skip_without = lambda mod: skipif(
111
+ module_not_available(mod), "This test requires %s" % mod
112
+ )
113
+
114
+ skipif_not_numpy = skip_without("numpy")
115
+
116
+ skipif_not_matplotlib = skip_without("matplotlib")
117
+
118
+ # A null 'decorator', useful to make more readable code that needs to pick
119
+ # between different decorators based on OS or other conditions
120
+ null_deco = lambda f: f
121
+
122
+ # Some tests only run where we can use unicode paths. Note that we can't just
123
+ # check os.path.supports_unicode_filenames, which is always False on Linux.
124
+ try:
125
+ f = tempfile.NamedTemporaryFile(prefix="tmp€")
126
+ except UnicodeEncodeError:
127
+ unicode_paths = False
128
+ # TODO: should this be finnally ?
129
+ else:
130
+ unicode_paths = True
131
+ f.close()
132
+
133
+ onlyif_unicode_paths = onlyif(
134
+ unicode_paths,
135
+ ("This test is only applicable where we can use unicode in filenames."),
136
+ )
137
+
138
+
139
+ def onlyif_cmds_exist(*commands):
140
+ """
141
+ Decorator to skip test when at least one of `commands` is not found.
142
+ """
143
+ for cmd in commands:
144
+ reason = f"This test runs only if command '{cmd}' is installed"
145
+ if not shutil.which(cmd):
146
+
147
+ return pytest.mark.skip(reason=reason)
148
+ return null_deco
temp_venv/lib/python3.13/site-packages/IPython/testing/globalipapp.py ADDED
@@ -0,0 +1,114 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Global IPython app to support test running.
2
+
3
+ We must start our own ipython object and heavily muck with it so that all the
4
+ modifications IPython makes to system behavior don't send the doctest machinery
5
+ into a fit. This code should be considered a gross hack, but it gets the job
6
+ done.
7
+ """
8
+
9
+ # Copyright (c) IPython Development Team.
10
+ # Distributed under the terms of the Modified BSD License.
11
+
12
+ import builtins as builtin_mod
13
+ import sys
14
+ import types
15
+
16
+ from pathlib import Path
17
+
18
+ from . import tools
19
+
20
+ from IPython.core import page
21
+ from IPython.utils import io
22
+ from IPython.terminal.interactiveshell import TerminalInteractiveShell
23
+
24
+
25
+ def get_ipython():
26
+ # This will get replaced by the real thing once we start IPython below
27
+ return start_ipython()
28
+
29
+
30
+ # A couple of methods to override those in the running IPython to interact
31
+ # better with doctest (doctest captures on raw stdout, so we need to direct
32
+ # various types of output there otherwise it will miss them).
33
+
34
+ def xsys(self, cmd):
35
+ """Replace the default system call with a capturing one for doctest.
36
+ """
37
+ # We use getoutput, but we need to strip it because pexpect captures
38
+ # the trailing newline differently from commands.getoutput
39
+ print(self.getoutput(cmd, split=False, depth=1).rstrip(), end='', file=sys.stdout)
40
+ sys.stdout.flush()
41
+
42
+
43
+ def _showtraceback(self, etype, evalue, stb):
44
+ """Print the traceback purely on stdout for doctest to capture it.
45
+ """
46
+ print(self.InteractiveTB.stb2text(stb), file=sys.stdout)
47
+
48
+
49
+ def start_ipython():
50
+ """Start a global IPython shell, which we need for IPython-specific syntax.
51
+ """
52
+ global get_ipython
53
+
54
+ # This function should only ever run once!
55
+ if hasattr(start_ipython, 'already_called'):
56
+ return
57
+ start_ipython.already_called = True
58
+
59
+ # Store certain global objects that IPython modifies
60
+ _displayhook = sys.displayhook
61
+ _excepthook = sys.excepthook
62
+ _main = sys.modules.get('__main__')
63
+
64
+ # Create custom argv and namespaces for our IPython to be test-friendly
65
+ config = tools.default_config()
66
+ config.TerminalInteractiveShell.simple_prompt = True
67
+
68
+ # Create and initialize our test-friendly IPython instance.
69
+ shell = TerminalInteractiveShell.instance(config=config,
70
+ )
71
+
72
+ # A few more tweaks needed for playing nicely with doctests...
73
+
74
+ # remove history file
75
+ shell.tempfiles.append(Path(config.HistoryManager.hist_file))
76
+
77
+ # These traps are normally only active for interactive use, set them
78
+ # permanently since we'll be mocking interactive sessions.
79
+ shell.builtin_trap.activate()
80
+
81
+ # Modify the IPython system call with one that uses getoutput, so that we
82
+ # can capture subcommands and print them to Python's stdout, otherwise the
83
+ # doctest machinery would miss them.
84
+ shell.system = types.MethodType(xsys, shell)
85
+
86
+ shell._showtraceback = types.MethodType(_showtraceback, shell)
87
+
88
+ # IPython is ready, now clean up some global state...
89
+
90
+ # Deactivate the various python system hooks added by ipython for
91
+ # interactive convenience so we don't confuse the doctest system
92
+ sys.modules['__main__'] = _main
93
+ sys.displayhook = _displayhook
94
+ sys.excepthook = _excepthook
95
+
96
+ # So that ipython magics and aliases can be doctested (they work by making
97
+ # a call into a global _ip object). Also make the top-level get_ipython
98
+ # now return this without recursively calling here again.
99
+ _ip = shell
100
+ get_ipython = _ip.get_ipython
101
+ builtin_mod._ip = _ip
102
+ builtin_mod.ip = _ip
103
+ builtin_mod.get_ipython = get_ipython
104
+
105
+ # Override paging, so we don't require user interaction during the tests.
106
+ def nopage(strng, start=0, screen_lines=0, pager_cmd=None):
107
+ if isinstance(strng, dict):
108
+ strng = strng.get('text/plain', '')
109
+ print(strng)
110
+
111
+ page.orig_page = page.pager_page
112
+ page.pager_page = nopage
113
+
114
+ return _ip
temp_venv/lib/python3.13/site-packages/IPython/testing/ipunittest.py ADDED
@@ -0,0 +1,187 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Experimental code for cleaner support of IPython syntax with unittest.
2
+
3
+ In IPython up until 0.10, we've used very hacked up nose machinery for running
4
+ tests with IPython special syntax, and this has proved to be extremely slow.
5
+ This module provides decorators to try a different approach, stemming from a
6
+ conversation Brian and I (FP) had about this problem Sept/09.
7
+
8
+ The goal is to be able to easily write simple functions that can be seen by
9
+ unittest as tests, and ultimately for these to support doctests with full
10
+ IPython syntax. Nose already offers this based on naming conventions and our
11
+ hackish plugins, but we are seeking to move away from nose dependencies if
12
+ possible.
13
+
14
+ This module follows a different approach, based on decorators.
15
+
16
+ - A decorator called @ipdoctest can mark any function as having a docstring
17
+ that should be viewed as a doctest, but after syntax conversion.
18
+
19
+ Authors
20
+ -------
21
+
22
+ - Fernando Perez <[email protected]>
23
+ """
24
+
25
+
26
+ #-----------------------------------------------------------------------------
27
+ # Copyright (C) 2009-2011 The IPython Development Team
28
+ #
29
+ # Distributed under the terms of the BSD License. The full license is in
30
+ # the file COPYING, distributed as part of this software.
31
+ #-----------------------------------------------------------------------------
32
+
33
+ #-----------------------------------------------------------------------------
34
+ # Imports
35
+ #-----------------------------------------------------------------------------
36
+
37
+ # Stdlib
38
+ import re
39
+ import sys
40
+ import unittest
41
+ import builtins
42
+ from doctest import DocTestFinder, DocTestRunner, TestResults
43
+ from IPython.terminal.interactiveshell import InteractiveShell
44
+
45
+ #-----------------------------------------------------------------------------
46
+ # Classes and functions
47
+ #-----------------------------------------------------------------------------
48
+
49
+ def count_failures(runner):
50
+ """Count number of failures in a doctest runner.
51
+
52
+ Code modeled after the summarize() method in doctest.
53
+ """
54
+ if sys.version_info < (3, 13):
55
+ return [TestResults(f, t) for f, t in runner._name2ft.values() if f > 0]
56
+ else:
57
+ return [
58
+ TestResults(failure, try_)
59
+ for failure, try_, skip in runner._stats.values()
60
+ if failure > 0
61
+ ]
62
+
63
+
64
+ class IPython2PythonConverter:
65
+ """Convert IPython 'syntax' to valid Python.
66
+
67
+ Eventually this code may grow to be the full IPython syntax conversion
68
+ implementation, but for now it only does prompt conversion."""
69
+
70
+ def __init__(self):
71
+ self.rps1 = re.compile(r'In\ \[\d+\]: ')
72
+ self.rps2 = re.compile(r'\ \ \ \.\.\.+: ')
73
+ self.rout = re.compile(r'Out\[\d+\]: \s*?\n?')
74
+ self.pyps1 = '>>> '
75
+ self.pyps2 = '... '
76
+ self.rpyps1 = re.compile (r'(\s*%s)(.*)$' % self.pyps1)
77
+ self.rpyps2 = re.compile (r'(\s*%s)(.*)$' % self.pyps2)
78
+
79
+ def __call__(self, ds):
80
+ """Convert IPython prompts to python ones in a string."""
81
+ from . import globalipapp
82
+
83
+ pyps1 = '>>> '
84
+ pyps2 = '... '
85
+ pyout = ''
86
+
87
+ dnew = ds
88
+ dnew = self.rps1.sub(pyps1, dnew)
89
+ dnew = self.rps2.sub(pyps2, dnew)
90
+ dnew = self.rout.sub(pyout, dnew)
91
+ ip = InteractiveShell.instance()
92
+
93
+ # Convert input IPython source into valid Python.
94
+ out = []
95
+ newline = out.append
96
+ for line in dnew.splitlines():
97
+
98
+ mps1 = self.rpyps1.match(line)
99
+ if mps1 is not None:
100
+ prompt, text = mps1.groups()
101
+ newline(prompt+ip.prefilter(text, False))
102
+ continue
103
+
104
+ mps2 = self.rpyps2.match(line)
105
+ if mps2 is not None:
106
+ prompt, text = mps2.groups()
107
+ newline(prompt+ip.prefilter(text, True))
108
+ continue
109
+
110
+ newline(line)
111
+ newline('') # ensure a closing newline, needed by doctest
112
+ # print("PYSRC:", '\n'.join(out)) # dbg
113
+ return '\n'.join(out)
114
+
115
+ #return dnew
116
+
117
+
118
+ class Doc2UnitTester:
119
+ """Class whose instances act as a decorator for docstring testing.
120
+
121
+ In practice we're only likely to need one instance ever, made below (though
122
+ no attempt is made at turning it into a singleton, there is no need for
123
+ that).
124
+ """
125
+ def __init__(self, verbose=False):
126
+ """New decorator.
127
+
128
+ Parameters
129
+ ----------
130
+
131
+ verbose : boolean, optional (False)
132
+ Passed to the doctest finder and runner to control verbosity.
133
+ """
134
+ self.verbose = verbose
135
+ # We can reuse the same finder for all instances
136
+ self.finder = DocTestFinder(verbose=verbose, recurse=False)
137
+
138
+ def __call__(self, func):
139
+ """Use as a decorator: doctest a function's docstring as a unittest.
140
+
141
+ This version runs normal doctests, but the idea is to make it later run
142
+ ipython syntax instead."""
143
+
144
+ # Capture the enclosing instance with a different name, so the new
145
+ # class below can see it without confusion regarding its own 'self'
146
+ # that will point to the test instance at runtime
147
+ d2u = self
148
+
149
+ # Rewrite the function's docstring to have python syntax
150
+ if func.__doc__ is not None:
151
+ func.__doc__ = ip2py(func.__doc__)
152
+
153
+ # Now, create a tester object that is a real unittest instance, so
154
+ # normal unittest machinery (or Nose, or Trial) can find it.
155
+ class Tester(unittest.TestCase):
156
+ def test(self):
157
+ # Make a new runner per function to be tested
158
+ runner = DocTestRunner(verbose=d2u.verbose)
159
+ for the_test in d2u.finder.find(func, func.__name__):
160
+ runner.run(the_test)
161
+ failed = count_failures(runner)
162
+ if failed:
163
+ # Since we only looked at a single function's docstring,
164
+ # failed should contain at most one item. More than that
165
+ # is a case we can't handle and should error out on
166
+ if len(failed) > 1:
167
+ err = "Invalid number of test results: %s" % failed
168
+ raise ValueError(err)
169
+ # Report a normal failure.
170
+ self.fail('failed doctests: %s' % str(failed[0]))
171
+
172
+ # Rename it so test reports have the original signature.
173
+ Tester.__name__ = func.__name__
174
+ return Tester
175
+
176
+
177
+ def ipdocstring(func):
178
+ """Change the function docstring via ip2py.
179
+ """
180
+ if func.__doc__ is not None:
181
+ func.__doc__ = ip2py(func.__doc__)
182
+ return func
183
+
184
+
185
+ # Make an instance of the classes for public use
186
+ ipdoctest = Doc2UnitTester()
187
+ ip2py = IPython2PythonConverter()
temp_venv/lib/python3.13/site-packages/IPython/testing/plugin/__init__.py ADDED
File without changes
temp_venv/lib/python3.13/site-packages/IPython/testing/plugin/dtexample.py ADDED
@@ -0,0 +1,167 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Simple example using doctests.
2
+
3
+ This file just contains doctests both using plain python and IPython prompts.
4
+ All tests should be loaded by nose.
5
+ """
6
+
7
+ import os
8
+
9
+
10
+ def pyfunc():
11
+ """Some pure python tests...
12
+
13
+ >>> pyfunc()
14
+ 'pyfunc'
15
+
16
+ >>> import os
17
+
18
+ >>> 2+3
19
+ 5
20
+
21
+ >>> for i in range(3):
22
+ ... print(i, end=' ')
23
+ ... print(i+1, end=' ')
24
+ ...
25
+ 0 1 1 2 2 3
26
+ """
27
+ return 'pyfunc'
28
+
29
+ def ipfunc():
30
+ """Some ipython tests...
31
+
32
+ In [1]: import os
33
+
34
+ In [3]: 2+3
35
+ Out[3]: 5
36
+
37
+ In [26]: for i in range(3):
38
+ ....: print(i, end=' ')
39
+ ....: print(i+1, end=' ')
40
+ ....:
41
+ 0 1 1 2 2 3
42
+
43
+
44
+ It's OK to use '_' for the last result, but do NOT try to use IPython's
45
+ numbered history of _NN outputs, since those won't exist under the
46
+ doctest environment:
47
+
48
+ In [7]: 'hi'
49
+ Out[7]: 'hi'
50
+
51
+ In [8]: print(repr(_))
52
+ 'hi'
53
+
54
+ In [7]: 3+4
55
+ Out[7]: 7
56
+
57
+ In [8]: _+3
58
+ Out[8]: 10
59
+
60
+ In [9]: ipfunc()
61
+ Out[9]: 'ipfunc'
62
+ """
63
+ return "ipfunc"
64
+
65
+
66
+ def ipos():
67
+ """Examples that access the operating system work:
68
+
69
+ In [1]: !echo hello
70
+ hello
71
+
72
+ In [2]: !echo hello > /tmp/foo_iptest
73
+
74
+ In [3]: !cat /tmp/foo_iptest
75
+ hello
76
+
77
+ In [4]: rm -f /tmp/foo_iptest
78
+ """
79
+ pass
80
+
81
+
82
+ ipos.__skip_doctest__ = os.name == "nt"
83
+
84
+
85
+ def ranfunc():
86
+ """A function with some random output.
87
+
88
+ Normal examples are verified as usual:
89
+ >>> 1+3
90
+ 4
91
+
92
+ But if you put '# random' in the output, it is ignored:
93
+ >>> 1+3
94
+ junk goes here... # random
95
+
96
+ >>> 1+2
97
+ again, anything goes #random
98
+ if multiline, the random mark is only needed once.
99
+
100
+ >>> 1+2
101
+ You can also put the random marker at the end:
102
+ # random
103
+
104
+ >>> 1+2
105
+ # random
106
+ .. or at the beginning.
107
+
108
+ More correct input is properly verified:
109
+ >>> ranfunc()
110
+ 'ranfunc'
111
+ """
112
+ return 'ranfunc'
113
+
114
+
115
+ def random_all():
116
+ """A function where we ignore the output of ALL examples.
117
+
118
+ Examples:
119
+
120
+ # all-random
121
+
122
+ This mark tells the testing machinery that all subsequent examples should
123
+ be treated as random (ignoring their output). They are still executed,
124
+ so if a they raise an error, it will be detected as such, but their
125
+ output is completely ignored.
126
+
127
+ >>> 1+3
128
+ junk goes here...
129
+
130
+ >>> 1+3
131
+ klasdfj;
132
+
133
+ >>> 1+2
134
+ again, anything goes
135
+ blah...
136
+ """
137
+ pass
138
+
139
+ def iprand():
140
+ """Some ipython tests with random output.
141
+
142
+ In [7]: 3+4
143
+ Out[7]: 7
144
+
145
+ In [8]: print('hello')
146
+ world # random
147
+
148
+ In [9]: iprand()
149
+ Out[9]: 'iprand'
150
+ """
151
+ return 'iprand'
152
+
153
+ def iprand_all():
154
+ """Some ipython tests with fully random output.
155
+
156
+ # all-random
157
+
158
+ In [7]: 1
159
+ Out[7]: 99
160
+
161
+ In [8]: print('hello')
162
+ world
163
+
164
+ In [9]: iprand_all()
165
+ Out[9]: 'junk'
166
+ """
167
+ return 'iprand_all'
temp_venv/lib/python3.13/site-packages/IPython/testing/plugin/ipdoctest.py ADDED
@@ -0,0 +1,299 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Nose Plugin that supports IPython doctests.
2
+
3
+ Limitations:
4
+
5
+ - When generating examples for use as doctests, make sure that you have
6
+ pretty-printing OFF. This can be done either by setting the
7
+ ``PlainTextFormatter.pprint`` option in your configuration file to False, or
8
+ by interactively disabling it with %Pprint. This is required so that IPython
9
+ output matches that of normal Python, which is used by doctest for internal
10
+ execution.
11
+
12
+ - Do not rely on specific prompt numbers for results (such as using
13
+ '_34==True', for example). For IPython tests run via an external process the
14
+ prompt numbers may be different, and IPython tests run as normal python code
15
+ won't even have these special _NN variables set at all.
16
+ """
17
+
18
+ #-----------------------------------------------------------------------------
19
+ # Module imports
20
+
21
+ # From the standard library
22
+ import doctest
23
+ import logging
24
+ import re
25
+
26
+ from testpath import modified_env
27
+
28
+ #-----------------------------------------------------------------------------
29
+ # Module globals and other constants
30
+ #-----------------------------------------------------------------------------
31
+
32
+ log = logging.getLogger(__name__)
33
+
34
+
35
+ #-----------------------------------------------------------------------------
36
+ # Classes and functions
37
+ #-----------------------------------------------------------------------------
38
+
39
+
40
+ class DocTestFinder(doctest.DocTestFinder):
41
+ def _get_test(self, obj, name, module, globs, source_lines):
42
+ test = super()._get_test(obj, name, module, globs, source_lines)
43
+
44
+ if bool(getattr(obj, "__skip_doctest__", False)) and test is not None:
45
+ for example in test.examples:
46
+ example.options[doctest.SKIP] = True
47
+
48
+ return test
49
+
50
+
51
+ class IPDoctestOutputChecker(doctest.OutputChecker):
52
+ """Second-chance checker with support for random tests.
53
+
54
+ If the default comparison doesn't pass, this checker looks in the expected
55
+ output string for flags that tell us to ignore the output.
56
+ """
57
+
58
+ random_re = re.compile(r'#\s*random\s+')
59
+
60
+ def check_output(self, want, got, optionflags):
61
+ """Check output, accepting special markers embedded in the output.
62
+
63
+ If the output didn't pass the default validation but the special string
64
+ '#random' is included, we accept it."""
65
+
66
+ # Let the original tester verify first, in case people have valid tests
67
+ # that happen to have a comment saying '#random' embedded in.
68
+ ret = doctest.OutputChecker.check_output(self, want, got,
69
+ optionflags)
70
+ if not ret and self.random_re.search(want):
71
+ # print('RANDOM OK:',want, file=sys.stderr) # dbg
72
+ return True
73
+
74
+ return ret
75
+
76
+
77
+ # A simple subclassing of the original with a different class name, so we can
78
+ # distinguish and treat differently IPython examples from pure python ones.
79
+ class IPExample(doctest.Example): pass
80
+
81
+
82
+ class IPDocTestParser(doctest.DocTestParser):
83
+ """
84
+ A class used to parse strings containing doctest examples.
85
+
86
+ Note: This is a version modified to properly recognize IPython input and
87
+ convert any IPython examples into valid Python ones.
88
+ """
89
+ # This regular expression is used to find doctest examples in a
90
+ # string. It defines three groups: `source` is the source code
91
+ # (including leading indentation and prompts); `indent` is the
92
+ # indentation of the first (PS1) line of the source code; and
93
+ # `want` is the expected output (including leading indentation).
94
+
95
+ # Classic Python prompts or default IPython ones
96
+ _PS1_PY = r'>>>'
97
+ _PS2_PY = r'\.\.\.'
98
+
99
+ _PS1_IP = r'In\ \[\d+\]:'
100
+ _PS2_IP = r'\ \ \ \.\.\.+:'
101
+
102
+ _RE_TPL = r'''
103
+ # Source consists of a PS1 line followed by zero or more PS2 lines.
104
+ (?P<source>
105
+ (?:^(?P<indent> [ ]*) (?P<ps1> %s) .*) # PS1 line
106
+ (?:\n [ ]* (?P<ps2> %s) .*)*) # PS2 lines
107
+ \n? # a newline
108
+ # Want consists of any non-blank lines that do not start with PS1.
109
+ (?P<want> (?:(?![ ]*$) # Not a blank line
110
+ (?![ ]*%s) # Not a line starting with PS1
111
+ (?![ ]*%s) # Not a line starting with PS2
112
+ .*$\n? # But any other line
113
+ )*)
114
+ '''
115
+
116
+ _EXAMPLE_RE_PY = re.compile( _RE_TPL % (_PS1_PY,_PS2_PY,_PS1_PY,_PS2_PY),
117
+ re.MULTILINE | re.VERBOSE)
118
+
119
+ _EXAMPLE_RE_IP = re.compile( _RE_TPL % (_PS1_IP,_PS2_IP,_PS1_IP,_PS2_IP),
120
+ re.MULTILINE | re.VERBOSE)
121
+
122
+ # Mark a test as being fully random. In this case, we simply append the
123
+ # random marker ('#random') to each individual example's output. This way
124
+ # we don't need to modify any other code.
125
+ _RANDOM_TEST = re.compile(r'#\s*all-random\s+')
126
+
127
+ def ip2py(self,source):
128
+ """Convert input IPython source into valid Python."""
129
+ block = _ip.input_transformer_manager.transform_cell(source)
130
+ if len(block.splitlines()) == 1:
131
+ return _ip.prefilter(block)
132
+ else:
133
+ return block
134
+
135
+ def parse(self, string, name='<string>'):
136
+ """
137
+ Divide the given string into examples and intervening text,
138
+ and return them as a list of alternating Examples and strings.
139
+ Line numbers for the Examples are 0-based. The optional
140
+ argument `name` is a name identifying this string, and is only
141
+ used for error messages.
142
+ """
143
+
144
+ # print('Parse string:\n',string) # dbg
145
+
146
+ string = string.expandtabs()
147
+ # If all lines begin with the same indentation, then strip it.
148
+ min_indent = self._min_indent(string)
149
+ if min_indent > 0:
150
+ string = '\n'.join([l[min_indent:] for l in string.split('\n')])
151
+
152
+ output = []
153
+ charno, lineno = 0, 0
154
+
155
+ # We make 'all random' tests by adding the '# random' mark to every
156
+ # block of output in the test.
157
+ if self._RANDOM_TEST.search(string):
158
+ random_marker = '\n# random'
159
+ else:
160
+ random_marker = ''
161
+
162
+ # Whether to convert the input from ipython to python syntax
163
+ ip2py = False
164
+ # Find all doctest examples in the string. First, try them as Python
165
+ # examples, then as IPython ones
166
+ terms = list(self._EXAMPLE_RE_PY.finditer(string))
167
+ if terms:
168
+ # Normal Python example
169
+ Example = doctest.Example
170
+ else:
171
+ # It's an ipython example.
172
+ terms = list(self._EXAMPLE_RE_IP.finditer(string))
173
+ Example = IPExample
174
+ ip2py = True
175
+
176
+ for m in terms:
177
+ # Add the pre-example text to `output`.
178
+ output.append(string[charno:m.start()])
179
+ # Update lineno (lines before this example)
180
+ lineno += string.count('\n', charno, m.start())
181
+ # Extract info from the regexp match.
182
+ (source, options, want, exc_msg) = \
183
+ self._parse_example(m, name, lineno,ip2py)
184
+
185
+ # Append the random-output marker (it defaults to empty in most
186
+ # cases, it's only non-empty for 'all-random' tests):
187
+ want += random_marker
188
+
189
+ # Create an Example, and add it to the list.
190
+ if not self._IS_BLANK_OR_COMMENT(source):
191
+ output.append(Example(source, want, exc_msg,
192
+ lineno=lineno,
193
+ indent=min_indent+len(m.group('indent')),
194
+ options=options))
195
+ # Update lineno (lines inside this example)
196
+ lineno += string.count('\n', m.start(), m.end())
197
+ # Update charno.
198
+ charno = m.end()
199
+ # Add any remaining post-example text to `output`.
200
+ output.append(string[charno:])
201
+ return output
202
+
203
+ def _parse_example(self, m, name, lineno,ip2py=False):
204
+ """
205
+ Given a regular expression match from `_EXAMPLE_RE` (`m`),
206
+ return a pair `(source, want)`, where `source` is the matched
207
+ example's source code (with prompts and indentation stripped);
208
+ and `want` is the example's expected output (with indentation
209
+ stripped).
210
+
211
+ `name` is the string's name, and `lineno` is the line number
212
+ where the example starts; both are used for error messages.
213
+
214
+ Optional:
215
+ `ip2py`: if true, filter the input via IPython to convert the syntax
216
+ into valid python.
217
+ """
218
+
219
+ # Get the example's indentation level.
220
+ indent = len(m.group('indent'))
221
+
222
+ # Divide source into lines; check that they're properly
223
+ # indented; and then strip their indentation & prompts.
224
+ source_lines = m.group('source').split('\n')
225
+
226
+ # We're using variable-length input prompts
227
+ ps1 = m.group('ps1')
228
+ ps2 = m.group('ps2')
229
+ ps1_len = len(ps1)
230
+
231
+ self._check_prompt_blank(source_lines, indent, name, lineno,ps1_len)
232
+ if ps2:
233
+ self._check_prefix(source_lines[1:], ' '*indent + ps2, name, lineno)
234
+
235
+ source = '\n'.join([sl[indent+ps1_len+1:] for sl in source_lines])
236
+
237
+ if ip2py:
238
+ # Convert source input from IPython into valid Python syntax
239
+ source = self.ip2py(source)
240
+
241
+ # Divide want into lines; check that it's properly indented; and
242
+ # then strip the indentation. Spaces before the last newline should
243
+ # be preserved, so plain rstrip() isn't good enough.
244
+ want = m.group('want')
245
+ want_lines = want.split('\n')
246
+ if len(want_lines) > 1 and re.match(r' *$', want_lines[-1]):
247
+ del want_lines[-1] # forget final newline & spaces after it
248
+ self._check_prefix(want_lines, ' '*indent, name,
249
+ lineno + len(source_lines))
250
+
251
+ # Remove ipython output prompt that might be present in the first line
252
+ want_lines[0] = re.sub(r'Out\[\d+\]: \s*?\n?','',want_lines[0])
253
+
254
+ want = '\n'.join([wl[indent:] for wl in want_lines])
255
+
256
+ # If `want` contains a traceback message, then extract it.
257
+ m = self._EXCEPTION_RE.match(want)
258
+ if m:
259
+ exc_msg = m.group('msg')
260
+ else:
261
+ exc_msg = None
262
+
263
+ # Extract options from the source.
264
+ options = self._find_options(source, name, lineno)
265
+
266
+ return source, options, want, exc_msg
267
+
268
+ def _check_prompt_blank(self, lines, indent, name, lineno, ps1_len):
269
+ """
270
+ Given the lines of a source string (including prompts and
271
+ leading indentation), check to make sure that every prompt is
272
+ followed by a space character. If any line is not followed by
273
+ a space character, then raise ValueError.
274
+
275
+ Note: IPython-modified version which takes the input prompt length as a
276
+ parameter, so that prompts of variable length can be dealt with.
277
+ """
278
+ space_idx = indent+ps1_len
279
+ min_len = space_idx+1
280
+ for i, line in enumerate(lines):
281
+ if len(line) >= min_len and line[space_idx] != ' ':
282
+ raise ValueError('line %r of the docstring for %s '
283
+ 'lacks blank after %s: %r' %
284
+ (lineno+i+1, name,
285
+ line[indent:space_idx], line))
286
+
287
+
288
+ SKIP = doctest.register_optionflag('SKIP')
289
+
290
+
291
+ class IPDocTestRunner(doctest.DocTestRunner):
292
+ """Test runner that synchronizes the IPython namespace with test globals.
293
+ """
294
+
295
+ def run(self, test, compileflags=None, out=None, clear_globs=True):
296
+ # Override terminal size to standardise traceback format
297
+ with modified_env({'COLUMNS': '80', 'LINES': '24'}):
298
+ return super(IPDocTestRunner,self).run(test,
299
+ compileflags,out,clear_globs)
temp_venv/lib/python3.13/site-packages/IPython/testing/plugin/pytest_ipdoctest.py ADDED
@@ -0,0 +1,880 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Based on Pytest doctest.py
2
+ # Original license:
3
+ # The MIT License (MIT)
4
+ #
5
+ # Copyright (c) 2004-2021 Holger Krekel and others
6
+ """Discover and run ipdoctests in modules and test files."""
7
+
8
+ import bdb
9
+ import builtins
10
+ import inspect
11
+ import os
12
+ import platform
13
+ import sys
14
+ import traceback
15
+ import types
16
+ import warnings
17
+ from contextlib import contextmanager
18
+ from pathlib import Path
19
+ from typing import (
20
+ TYPE_CHECKING,
21
+ Any,
22
+ Callable,
23
+ Dict,
24
+ Generator,
25
+ Iterable,
26
+ List,
27
+ Optional,
28
+ Pattern,
29
+ Sequence,
30
+ Tuple,
31
+ Type,
32
+ Union,
33
+ )
34
+
35
+ import pytest
36
+ from _pytest import outcomes
37
+ from _pytest._code.code import ExceptionInfo, ReprFileLocation, TerminalRepr
38
+ from _pytest._io import TerminalWriter
39
+ from _pytest.compat import safe_getattr
40
+ from _pytest.config import Config
41
+ from _pytest.config.argparsing import Parser
42
+
43
+ try:
44
+ from _pytest.fixtures import TopRequest as FixtureRequest
45
+ except ImportError:
46
+ from _pytest.fixtures import FixtureRequest
47
+ from _pytest.nodes import Collector
48
+ from _pytest.outcomes import OutcomeException
49
+ from _pytest.pathlib import fnmatch_ex, import_path
50
+ from _pytest.python_api import approx
51
+ from _pytest.warning_types import PytestWarning
52
+
53
+ if TYPE_CHECKING:
54
+ import doctest
55
+
56
+ from .ipdoctest import IPDoctestOutputChecker
57
+
58
+ DOCTEST_REPORT_CHOICE_NONE = "none"
59
+ DOCTEST_REPORT_CHOICE_CDIFF = "cdiff"
60
+ DOCTEST_REPORT_CHOICE_NDIFF = "ndiff"
61
+ DOCTEST_REPORT_CHOICE_UDIFF = "udiff"
62
+ DOCTEST_REPORT_CHOICE_ONLY_FIRST_FAILURE = "only_first_failure"
63
+
64
+ DOCTEST_REPORT_CHOICES = (
65
+ DOCTEST_REPORT_CHOICE_NONE,
66
+ DOCTEST_REPORT_CHOICE_CDIFF,
67
+ DOCTEST_REPORT_CHOICE_NDIFF,
68
+ DOCTEST_REPORT_CHOICE_UDIFF,
69
+ DOCTEST_REPORT_CHOICE_ONLY_FIRST_FAILURE,
70
+ )
71
+
72
+ # Lazy definition of runner class
73
+ RUNNER_CLASS = None
74
+ # Lazy definition of output checker class
75
+ CHECKER_CLASS: Optional[Type["IPDoctestOutputChecker"]] = None
76
+
77
+ pytest_version = tuple([int(part) for part in pytest.__version__.split(".")])
78
+
79
+
80
+ def pytest_addoption(parser: Parser) -> None:
81
+ parser.addini(
82
+ "ipdoctest_optionflags",
83
+ "option flags for ipdoctests",
84
+ type="args",
85
+ default=["ELLIPSIS"],
86
+ )
87
+ parser.addini(
88
+ "ipdoctest_encoding", "encoding used for ipdoctest files", default="utf-8"
89
+ )
90
+ group = parser.getgroup("collect")
91
+ group.addoption(
92
+ "--ipdoctest-modules",
93
+ action="store_true",
94
+ default=False,
95
+ help="run ipdoctests in all .py modules",
96
+ dest="ipdoctestmodules",
97
+ )
98
+ group.addoption(
99
+ "--ipdoctest-report",
100
+ type=str.lower,
101
+ default="udiff",
102
+ help="choose another output format for diffs on ipdoctest failure",
103
+ choices=DOCTEST_REPORT_CHOICES,
104
+ dest="ipdoctestreport",
105
+ )
106
+ group.addoption(
107
+ "--ipdoctest-glob",
108
+ action="append",
109
+ default=[],
110
+ metavar="pat",
111
+ help="ipdoctests file matching pattern, default: test*.txt",
112
+ dest="ipdoctestglob",
113
+ )
114
+ group.addoption(
115
+ "--ipdoctest-ignore-import-errors",
116
+ action="store_true",
117
+ default=False,
118
+ help="ignore ipdoctest ImportErrors",
119
+ dest="ipdoctest_ignore_import_errors",
120
+ )
121
+ group.addoption(
122
+ "--ipdoctest-continue-on-failure",
123
+ action="store_true",
124
+ default=False,
125
+ help="for a given ipdoctest, continue to run after the first failure",
126
+ dest="ipdoctest_continue_on_failure",
127
+ )
128
+
129
+
130
+ def pytest_unconfigure() -> None:
131
+ global RUNNER_CLASS
132
+
133
+ RUNNER_CLASS = None
134
+
135
+
136
+ def pytest_collect_file(
137
+ file_path: Path,
138
+ parent: Collector,
139
+ ) -> Optional[Union["IPDoctestModule", "IPDoctestTextfile"]]:
140
+ config = parent.config
141
+ if file_path.suffix == ".py":
142
+ if config.option.ipdoctestmodules and not any(
143
+ (_is_setup_py(file_path), _is_main_py(file_path))
144
+ ):
145
+ mod: IPDoctestModule = IPDoctestModule.from_parent(parent, path=file_path)
146
+ return mod
147
+ elif _is_ipdoctest(config, file_path, parent):
148
+ txt: IPDoctestTextfile = IPDoctestTextfile.from_parent(parent, path=file_path)
149
+ return txt
150
+ return None
151
+
152
+
153
+ if pytest_version[0] < 7:
154
+ _collect_file = pytest_collect_file
155
+
156
+ def pytest_collect_file(
157
+ path,
158
+ parent: Collector,
159
+ ) -> Optional[Union["IPDoctestModule", "IPDoctestTextfile"]]:
160
+ return _collect_file(Path(path), parent)
161
+
162
+ _import_path = import_path
163
+
164
+ def import_path(path, root):
165
+ import py.path
166
+
167
+ return _import_path(py.path.local(path))
168
+
169
+
170
+ def _is_setup_py(path: Path) -> bool:
171
+ if path.name != "setup.py":
172
+ return False
173
+ contents = path.read_bytes()
174
+ return b"setuptools" in contents or b"distutils" in contents
175
+
176
+
177
+ def _is_ipdoctest(config: Config, path: Path, parent: Collector) -> bool:
178
+ if path.suffix in (".txt", ".rst") and parent.session.isinitpath(path):
179
+ return True
180
+ globs = config.getoption("ipdoctestglob") or ["test*.txt"]
181
+ return any(fnmatch_ex(glob, path) for glob in globs)
182
+
183
+
184
+ def _is_main_py(path: Path) -> bool:
185
+ return path.name == "__main__.py"
186
+
187
+
188
+ class ReprFailDoctest(TerminalRepr):
189
+ def __init__(
190
+ self, reprlocation_lines: Sequence[Tuple[ReprFileLocation, Sequence[str]]]
191
+ ) -> None:
192
+ self.reprlocation_lines = reprlocation_lines
193
+
194
+ def toterminal(self, tw: TerminalWriter) -> None:
195
+ for reprlocation, lines in self.reprlocation_lines:
196
+ for line in lines:
197
+ tw.line(line)
198
+ reprlocation.toterminal(tw)
199
+
200
+
201
+ class MultipleDoctestFailures(Exception):
202
+ def __init__(self, failures: Sequence["doctest.DocTestFailure"]) -> None:
203
+ super().__init__()
204
+ self.failures = failures
205
+
206
+
207
+ def _init_runner_class() -> Type["IPDocTestRunner"]:
208
+ import doctest
209
+ from .ipdoctest import IPDocTestRunner
210
+
211
+ class PytestDoctestRunner(IPDocTestRunner):
212
+ """Runner to collect failures.
213
+
214
+ Note that the out variable in this case is a list instead of a
215
+ stdout-like object.
216
+ """
217
+
218
+ def __init__(
219
+ self,
220
+ checker: Optional["IPDoctestOutputChecker"] = None,
221
+ verbose: Optional[bool] = None,
222
+ optionflags: int = 0,
223
+ continue_on_failure: bool = True,
224
+ ) -> None:
225
+ super().__init__(checker=checker, verbose=verbose, optionflags=optionflags)
226
+ self.continue_on_failure = continue_on_failure
227
+
228
+ def report_failure(
229
+ self,
230
+ out,
231
+ test: "doctest.DocTest",
232
+ example: "doctest.Example",
233
+ got: str,
234
+ ) -> None:
235
+ failure = doctest.DocTestFailure(test, example, got)
236
+ if self.continue_on_failure:
237
+ out.append(failure)
238
+ else:
239
+ raise failure
240
+
241
+ def report_unexpected_exception(
242
+ self,
243
+ out,
244
+ test: "doctest.DocTest",
245
+ example: "doctest.Example",
246
+ exc_info: Tuple[Type[BaseException], BaseException, types.TracebackType],
247
+ ) -> None:
248
+ if isinstance(exc_info[1], OutcomeException):
249
+ raise exc_info[1]
250
+ if isinstance(exc_info[1], bdb.BdbQuit):
251
+ outcomes.exit("Quitting debugger")
252
+ failure = doctest.UnexpectedException(test, example, exc_info)
253
+ if self.continue_on_failure:
254
+ out.append(failure)
255
+ else:
256
+ raise failure
257
+
258
+ return PytestDoctestRunner
259
+
260
+
261
+ def _get_runner(
262
+ checker: Optional["IPDoctestOutputChecker"] = None,
263
+ verbose: Optional[bool] = None,
264
+ optionflags: int = 0,
265
+ continue_on_failure: bool = True,
266
+ ) -> "IPDocTestRunner":
267
+ # We need this in order to do a lazy import on doctest
268
+ global RUNNER_CLASS
269
+ if RUNNER_CLASS is None:
270
+ RUNNER_CLASS = _init_runner_class()
271
+ # Type ignored because the continue_on_failure argument is only defined on
272
+ # PytestDoctestRunner, which is lazily defined so can't be used as a type.
273
+ return RUNNER_CLASS( # type: ignore
274
+ checker=checker,
275
+ verbose=verbose,
276
+ optionflags=optionflags,
277
+ continue_on_failure=continue_on_failure,
278
+ )
279
+
280
+
281
+ class IPDoctestItem(pytest.Item):
282
+ _user_ns_orig: Dict[str, Any]
283
+
284
+ def __init__(
285
+ self,
286
+ name: str,
287
+ parent: "Union[IPDoctestTextfile, IPDoctestModule]",
288
+ runner: Optional["IPDocTestRunner"] = None,
289
+ dtest: Optional["doctest.DocTest"] = None,
290
+ ) -> None:
291
+ super().__init__(name, parent)
292
+ self.runner = runner
293
+ self.dtest = dtest
294
+ self.obj = None
295
+ self.fixture_request: Optional[FixtureRequest] = None
296
+ self._user_ns_orig = {}
297
+
298
+ @classmethod
299
+ def from_parent( # type: ignore
300
+ cls,
301
+ parent: "Union[IPDoctestTextfile, IPDoctestModule]",
302
+ *,
303
+ name: str,
304
+ runner: "IPDocTestRunner",
305
+ dtest: "doctest.DocTest",
306
+ ):
307
+ # incompatible signature due to imposed limits on subclass
308
+ """The public named constructor."""
309
+ return super().from_parent(name=name, parent=parent, runner=runner, dtest=dtest)
310
+
311
+ def setup(self) -> None:
312
+ if self.dtest is not None:
313
+ self.fixture_request = _setup_fixtures(self)
314
+ globs = dict(getfixture=self.fixture_request.getfixturevalue)
315
+ for name, value in self.fixture_request.getfixturevalue(
316
+ "ipdoctest_namespace"
317
+ ).items():
318
+ globs[name] = value
319
+ self.dtest.globs.update(globs)
320
+
321
+ from .ipdoctest import IPExample
322
+
323
+ if isinstance(self.dtest.examples[0], IPExample):
324
+ # for IPython examples *only*, we swap the globals with the ipython
325
+ # namespace, after updating it with the globals (which doctest
326
+ # fills with the necessary info from the module being tested).
327
+ self._user_ns_orig = {}
328
+ self._user_ns_orig.update(_ip.user_ns)
329
+ _ip.user_ns.update(self.dtest.globs)
330
+ # We must remove the _ key in the namespace, so that Python's
331
+ # doctest code sets it naturally
332
+ _ip.user_ns.pop("_", None)
333
+ _ip.user_ns["__builtins__"] = builtins
334
+ self.dtest.globs = _ip.user_ns
335
+
336
+ def teardown(self) -> None:
337
+ from .ipdoctest import IPExample
338
+
339
+ # Undo the test.globs reassignment we made
340
+ if isinstance(self.dtest.examples[0], IPExample):
341
+ self.dtest.globs = {}
342
+ _ip.user_ns.clear()
343
+ _ip.user_ns.update(self._user_ns_orig)
344
+ del self._user_ns_orig
345
+
346
+ self.dtest.globs.clear()
347
+
348
+ def runtest(self) -> None:
349
+ assert self.dtest is not None
350
+ assert self.runner is not None
351
+ _check_all_skipped(self.dtest)
352
+ self._disable_output_capturing_for_darwin()
353
+ failures: List[doctest.DocTestFailure] = []
354
+
355
+ # exec(compile(..., "single", ...), ...) puts result in builtins._
356
+ had_underscore_value = hasattr(builtins, "_")
357
+ underscore_original_value = getattr(builtins, "_", None)
358
+
359
+ # Save our current directory and switch out to the one where the
360
+ # test was originally created, in case another doctest did a
361
+ # directory change. We'll restore this in the finally clause.
362
+ curdir = os.getcwd()
363
+ os.chdir(self.fspath.dirname)
364
+ try:
365
+ # Type ignored because we change the type of `out` from what
366
+ # ipdoctest expects.
367
+ self.runner.run(self.dtest, out=failures, clear_globs=False) # type: ignore[arg-type]
368
+ finally:
369
+ os.chdir(curdir)
370
+ if had_underscore_value:
371
+ setattr(builtins, "_", underscore_original_value)
372
+ elif hasattr(builtins, "_"):
373
+ delattr(builtins, "_")
374
+
375
+ if failures:
376
+ raise MultipleDoctestFailures(failures)
377
+
378
+ def _disable_output_capturing_for_darwin(self) -> None:
379
+ """Disable output capturing. Otherwise, stdout is lost to ipdoctest (pytest#985)."""
380
+ if platform.system() != "Darwin":
381
+ return
382
+ capman = self.config.pluginmanager.getplugin("capturemanager")
383
+ if capman:
384
+ capman.suspend_global_capture(in_=True)
385
+ out, err = capman.read_global_capture()
386
+ sys.stdout.write(out)
387
+ sys.stderr.write(err)
388
+
389
+ # TODO: Type ignored -- breaks Liskov Substitution.
390
+ def repr_failure( # type: ignore[override]
391
+ self,
392
+ excinfo: ExceptionInfo[BaseException],
393
+ ) -> Union[str, TerminalRepr]:
394
+ import doctest
395
+
396
+ failures: Optional[
397
+ Sequence[Union[doctest.DocTestFailure, doctest.UnexpectedException]]
398
+ ] = None
399
+ if isinstance(
400
+ excinfo.value, (doctest.DocTestFailure, doctest.UnexpectedException)
401
+ ):
402
+ failures = [excinfo.value]
403
+ elif isinstance(excinfo.value, MultipleDoctestFailures):
404
+ failures = excinfo.value.failures
405
+
406
+ if failures is None:
407
+ return super().repr_failure(excinfo)
408
+
409
+ reprlocation_lines = []
410
+ for failure in failures:
411
+ example = failure.example
412
+ test = failure.test
413
+ filename = test.filename
414
+ if test.lineno is None:
415
+ lineno = None
416
+ else:
417
+ lineno = test.lineno + example.lineno + 1
418
+ message = type(failure).__name__
419
+ # TODO: ReprFileLocation doesn't expect a None lineno.
420
+ reprlocation = ReprFileLocation(filename, lineno, message) # type: ignore[arg-type]
421
+ checker = _get_checker()
422
+ report_choice = _get_report_choice(self.config.getoption("ipdoctestreport"))
423
+ if lineno is not None:
424
+ assert failure.test.docstring is not None
425
+ lines = failure.test.docstring.splitlines(False)
426
+ # add line numbers to the left of the error message
427
+ assert test.lineno is not None
428
+ lines = [
429
+ "%03d %s" % (i + test.lineno + 1, x) for (i, x) in enumerate(lines)
430
+ ]
431
+ # trim docstring error lines to 10
432
+ lines = lines[max(example.lineno - 9, 0) : example.lineno + 1]
433
+ else:
434
+ lines = [
435
+ "EXAMPLE LOCATION UNKNOWN, not showing all tests of that example"
436
+ ]
437
+ indent = ">>>"
438
+ for line in example.source.splitlines():
439
+ lines.append(f"??? {indent} {line}")
440
+ indent = "..."
441
+ if isinstance(failure, doctest.DocTestFailure):
442
+ lines += checker.output_difference(
443
+ example, failure.got, report_choice
444
+ ).split("\n")
445
+ else:
446
+ inner_excinfo = ExceptionInfo.from_exc_info(failure.exc_info)
447
+ lines += ["UNEXPECTED EXCEPTION: %s" % repr(inner_excinfo.value)]
448
+ lines += [
449
+ x.strip("\n") for x in traceback.format_exception(*failure.exc_info)
450
+ ]
451
+ reprlocation_lines.append((reprlocation, lines))
452
+ return ReprFailDoctest(reprlocation_lines)
453
+
454
+ def reportinfo(self) -> Tuple[Union["os.PathLike[str]", str], Optional[int], str]:
455
+ assert self.dtest is not None
456
+ return self.path, self.dtest.lineno, "[ipdoctest] %s" % self.name
457
+
458
+ if pytest_version[0] < 7:
459
+
460
+ @property
461
+ def path(self) -> Path:
462
+ return Path(self.fspath)
463
+
464
+
465
+ def _get_flag_lookup() -> Dict[str, int]:
466
+ import doctest
467
+
468
+ return dict(
469
+ DONT_ACCEPT_TRUE_FOR_1=doctest.DONT_ACCEPT_TRUE_FOR_1,
470
+ DONT_ACCEPT_BLANKLINE=doctest.DONT_ACCEPT_BLANKLINE,
471
+ NORMALIZE_WHITESPACE=doctest.NORMALIZE_WHITESPACE,
472
+ ELLIPSIS=doctest.ELLIPSIS,
473
+ IGNORE_EXCEPTION_DETAIL=doctest.IGNORE_EXCEPTION_DETAIL,
474
+ COMPARISON_FLAGS=doctest.COMPARISON_FLAGS,
475
+ ALLOW_UNICODE=_get_allow_unicode_flag(),
476
+ ALLOW_BYTES=_get_allow_bytes_flag(),
477
+ NUMBER=_get_number_flag(),
478
+ )
479
+
480
+
481
+ def get_optionflags(parent):
482
+ optionflags_str = parent.config.getini("ipdoctest_optionflags")
483
+ flag_lookup_table = _get_flag_lookup()
484
+ flag_acc = 0
485
+ for flag in optionflags_str:
486
+ flag_acc |= flag_lookup_table[flag]
487
+ return flag_acc
488
+
489
+
490
+ def _get_continue_on_failure(config):
491
+ continue_on_failure = config.getvalue("ipdoctest_continue_on_failure")
492
+ if continue_on_failure:
493
+ # We need to turn off this if we use pdb since we should stop at
494
+ # the first failure.
495
+ if config.getvalue("usepdb"):
496
+ continue_on_failure = False
497
+ return continue_on_failure
498
+
499
+
500
+ class IPDoctestTextfile(pytest.Module):
501
+ obj = None
502
+
503
+ def collect(self) -> Iterable[IPDoctestItem]:
504
+ import doctest
505
+ from .ipdoctest import IPDocTestParser
506
+
507
+ # Inspired by doctest.testfile; ideally we would use it directly,
508
+ # but it doesn't support passing a custom checker.
509
+ encoding = self.config.getini("ipdoctest_encoding")
510
+ text = self.path.read_text(encoding)
511
+ filename = str(self.path)
512
+ name = self.path.name
513
+ globs = {"__name__": "__main__"}
514
+
515
+ optionflags = get_optionflags(self)
516
+
517
+ runner = _get_runner(
518
+ verbose=False,
519
+ optionflags=optionflags,
520
+ checker=_get_checker(),
521
+ continue_on_failure=_get_continue_on_failure(self.config),
522
+ )
523
+
524
+ parser = IPDocTestParser()
525
+ test = parser.get_doctest(text, globs, name, filename, 0)
526
+ if test.examples:
527
+ yield IPDoctestItem.from_parent(
528
+ self, name=test.name, runner=runner, dtest=test
529
+ )
530
+
531
+ if pytest_version[0] < 7:
532
+
533
+ @property
534
+ def path(self) -> Path:
535
+ return Path(self.fspath)
536
+
537
+ @classmethod
538
+ def from_parent(
539
+ cls,
540
+ parent,
541
+ *,
542
+ fspath=None,
543
+ path: Optional[Path] = None,
544
+ **kw,
545
+ ):
546
+ if path is not None:
547
+ import py.path
548
+
549
+ fspath = py.path.local(path)
550
+ return super().from_parent(parent=parent, fspath=fspath, **kw)
551
+
552
+
553
+ def _check_all_skipped(test: "doctest.DocTest") -> None:
554
+ """Raise pytest.skip() if all examples in the given DocTest have the SKIP
555
+ option set."""
556
+ import doctest
557
+
558
+ all_skipped = all(x.options.get(doctest.SKIP, False) for x in test.examples)
559
+ if all_skipped:
560
+ pytest.skip("all docstests skipped by +SKIP option")
561
+
562
+
563
+ def _is_mocked(obj: object) -> bool:
564
+ """Return if an object is possibly a mock object by checking the
565
+ existence of a highly improbable attribute."""
566
+ return (
567
+ safe_getattr(obj, "pytest_mock_example_attribute_that_shouldnt_exist", None)
568
+ is not None
569
+ )
570
+
571
+
572
+ @contextmanager
573
+ def _patch_unwrap_mock_aware() -> Generator[None, None, None]:
574
+ """Context manager which replaces ``inspect.unwrap`` with a version
575
+ that's aware of mock objects and doesn't recurse into them."""
576
+ real_unwrap = inspect.unwrap
577
+
578
+ def _mock_aware_unwrap(
579
+ func: Callable[..., Any], *, stop: Optional[Callable[[Any], Any]] = None
580
+ ) -> Any:
581
+ try:
582
+ if stop is None or stop is _is_mocked:
583
+ return real_unwrap(func, stop=_is_mocked)
584
+ _stop = stop
585
+ return real_unwrap(func, stop=lambda obj: _is_mocked(obj) or _stop(func))
586
+ except Exception as e:
587
+ warnings.warn(
588
+ "Got %r when unwrapping %r. This is usually caused "
589
+ "by a violation of Python's object protocol; see e.g. "
590
+ "https://github.com/pytest-dev/pytest/issues/5080" % (e, func),
591
+ PytestWarning,
592
+ )
593
+ raise
594
+
595
+ inspect.unwrap = _mock_aware_unwrap
596
+ try:
597
+ yield
598
+ finally:
599
+ inspect.unwrap = real_unwrap
600
+
601
+
602
+ class IPDoctestModule(pytest.Module):
603
+ def collect(self) -> Iterable[IPDoctestItem]:
604
+ import doctest
605
+ from .ipdoctest import DocTestFinder, IPDocTestParser
606
+
607
+ class MockAwareDocTestFinder(DocTestFinder):
608
+ """A hackish ipdoctest finder that overrides stdlib internals to fix a stdlib bug.
609
+
610
+ https://github.com/pytest-dev/pytest/issues/3456
611
+ https://bugs.python.org/issue25532
612
+ """
613
+
614
+ def _find_lineno(self, obj, source_lines):
615
+ """Doctest code does not take into account `@property`, this
616
+ is a hackish way to fix it. https://bugs.python.org/issue17446
617
+
618
+ Wrapped Doctests will need to be unwrapped so the correct
619
+ line number is returned. This will be reported upstream. #8796
620
+ """
621
+ if isinstance(obj, property):
622
+ obj = getattr(obj, "fget", obj)
623
+
624
+ if hasattr(obj, "__wrapped__"):
625
+ # Get the main obj in case of it being wrapped
626
+ obj = inspect.unwrap(obj)
627
+
628
+ # Type ignored because this is a private function.
629
+ return super()._find_lineno( # type:ignore[misc]
630
+ obj,
631
+ source_lines,
632
+ )
633
+
634
+ def _find(
635
+ self, tests, obj, name, module, source_lines, globs, seen
636
+ ) -> None:
637
+ if _is_mocked(obj):
638
+ return
639
+ with _patch_unwrap_mock_aware():
640
+ # Type ignored because this is a private function.
641
+ super()._find( # type:ignore[misc]
642
+ tests, obj, name, module, source_lines, globs, seen
643
+ )
644
+
645
+ if self.path.name == "conftest.py":
646
+ if pytest_version[0] < 7:
647
+ module = self.config.pluginmanager._importconftest(
648
+ self.path,
649
+ self.config.getoption("importmode"),
650
+ )
651
+ else:
652
+ kwargs = {"rootpath": self.config.rootpath}
653
+ if pytest_version >= (8, 1):
654
+ kwargs["consider_namespace_packages"] = False
655
+ module = self.config.pluginmanager._importconftest(
656
+ self.path,
657
+ self.config.getoption("importmode"),
658
+ **kwargs,
659
+ )
660
+ else:
661
+ try:
662
+ kwargs = {"root": self.config.rootpath}
663
+ if pytest_version >= (8, 1):
664
+ kwargs["consider_namespace_packages"] = False
665
+ module = import_path(self.path, **kwargs)
666
+ except ImportError:
667
+ if self.config.getvalue("ipdoctest_ignore_import_errors"):
668
+ pytest.skip("unable to import module %r" % self.path)
669
+ else:
670
+ raise
671
+ # Uses internal doctest module parsing mechanism.
672
+ finder = MockAwareDocTestFinder(parser=IPDocTestParser())
673
+ optionflags = get_optionflags(self)
674
+ runner = _get_runner(
675
+ verbose=False,
676
+ optionflags=optionflags,
677
+ checker=_get_checker(),
678
+ continue_on_failure=_get_continue_on_failure(self.config),
679
+ )
680
+
681
+ for test in finder.find(module, module.__name__):
682
+ if test.examples: # skip empty ipdoctests
683
+ yield IPDoctestItem.from_parent(
684
+ self, name=test.name, runner=runner, dtest=test
685
+ )
686
+
687
+ if pytest_version[0] < 7:
688
+
689
+ @property
690
+ def path(self) -> Path:
691
+ return Path(self.fspath)
692
+
693
+ @classmethod
694
+ def from_parent(
695
+ cls,
696
+ parent,
697
+ *,
698
+ fspath=None,
699
+ path: Optional[Path] = None,
700
+ **kw,
701
+ ):
702
+ if path is not None:
703
+ import py.path
704
+
705
+ fspath = py.path.local(path)
706
+ return super().from_parent(parent=parent, fspath=fspath, **kw)
707
+
708
+
709
+ def _setup_fixtures(doctest_item: IPDoctestItem) -> FixtureRequest:
710
+ """Used by IPDoctestTextfile and IPDoctestItem to setup fixture information."""
711
+
712
+ def func() -> None:
713
+ pass
714
+
715
+ doctest_item.funcargs = {} # type: ignore[attr-defined]
716
+ fm = doctest_item.session._fixturemanager
717
+ kwargs = {"node": doctest_item, "func": func, "cls": None}
718
+ if pytest_version <= (8, 0):
719
+ kwargs["funcargs"] = False
720
+ doctest_item._fixtureinfo = fm.getfixtureinfo( # type: ignore[attr-defined]
721
+ **kwargs
722
+ )
723
+ fixture_request = FixtureRequest(doctest_item, _ispytest=True)
724
+ if pytest_version <= (8, 0):
725
+ fixture_request._fillfixtures()
726
+ return fixture_request
727
+
728
+
729
+ def _init_checker_class() -> Type["IPDoctestOutputChecker"]:
730
+ import doctest
731
+ import re
732
+ from .ipdoctest import IPDoctestOutputChecker
733
+
734
+ class LiteralsOutputChecker(IPDoctestOutputChecker):
735
+ # Based on doctest_nose_plugin.py from the nltk project
736
+ # (https://github.com/nltk/nltk) and on the "numtest" doctest extension
737
+ # by Sebastien Boisgerault (https://github.com/boisgera/numtest).
738
+
739
+ _unicode_literal_re = re.compile(r"(\W|^)[uU]([rR]?[\'\"])", re.UNICODE)
740
+ _bytes_literal_re = re.compile(r"(\W|^)[bB]([rR]?[\'\"])", re.UNICODE)
741
+ _number_re = re.compile(
742
+ r"""
743
+ (?P<number>
744
+ (?P<mantissa>
745
+ (?P<integer1> [+-]?\d*)\.(?P<fraction>\d+)
746
+ |
747
+ (?P<integer2> [+-]?\d+)\.
748
+ )
749
+ (?:
750
+ [Ee]
751
+ (?P<exponent1> [+-]?\d+)
752
+ )?
753
+ |
754
+ (?P<integer3> [+-]?\d+)
755
+ (?:
756
+ [Ee]
757
+ (?P<exponent2> [+-]?\d+)
758
+ )
759
+ )
760
+ """,
761
+ re.VERBOSE,
762
+ )
763
+
764
+ def check_output(self, want: str, got: str, optionflags: int) -> bool:
765
+ if super().check_output(want, got, optionflags):
766
+ return True
767
+
768
+ allow_unicode = optionflags & _get_allow_unicode_flag()
769
+ allow_bytes = optionflags & _get_allow_bytes_flag()
770
+ allow_number = optionflags & _get_number_flag()
771
+
772
+ if not allow_unicode and not allow_bytes and not allow_number:
773
+ return False
774
+
775
+ def remove_prefixes(regex: Pattern[str], txt: str) -> str:
776
+ return re.sub(regex, r"\1\2", txt)
777
+
778
+ if allow_unicode:
779
+ want = remove_prefixes(self._unicode_literal_re, want)
780
+ got = remove_prefixes(self._unicode_literal_re, got)
781
+
782
+ if allow_bytes:
783
+ want = remove_prefixes(self._bytes_literal_re, want)
784
+ got = remove_prefixes(self._bytes_literal_re, got)
785
+
786
+ if allow_number:
787
+ got = self._remove_unwanted_precision(want, got)
788
+
789
+ return super().check_output(want, got, optionflags)
790
+
791
+ def _remove_unwanted_precision(self, want: str, got: str) -> str:
792
+ wants = list(self._number_re.finditer(want))
793
+ gots = list(self._number_re.finditer(got))
794
+ if len(wants) != len(gots):
795
+ return got
796
+ offset = 0
797
+ for w, g in zip(wants, gots):
798
+ fraction: Optional[str] = w.group("fraction")
799
+ exponent: Optional[str] = w.group("exponent1")
800
+ if exponent is None:
801
+ exponent = w.group("exponent2")
802
+ precision = 0 if fraction is None else len(fraction)
803
+ if exponent is not None:
804
+ precision -= int(exponent)
805
+ if float(w.group()) == approx(float(g.group()), abs=10**-precision):
806
+ # They're close enough. Replace the text we actually
807
+ # got with the text we want, so that it will match when we
808
+ # check the string literally.
809
+ got = (
810
+ got[: g.start() + offset] + w.group() + got[g.end() + offset :]
811
+ )
812
+ offset += w.end() - w.start() - (g.end() - g.start())
813
+ return got
814
+
815
+ return LiteralsOutputChecker
816
+
817
+
818
+ def _get_checker() -> "IPDoctestOutputChecker":
819
+ """Return a IPDoctestOutputChecker subclass that supports some
820
+ additional options:
821
+
822
+ * ALLOW_UNICODE and ALLOW_BYTES options to ignore u'' and b''
823
+ prefixes (respectively) in string literals. Useful when the same
824
+ ipdoctest should run in Python 2 and Python 3.
825
+
826
+ * NUMBER to ignore floating-point differences smaller than the
827
+ precision of the literal number in the ipdoctest.
828
+
829
+ An inner class is used to avoid importing "ipdoctest" at the module
830
+ level.
831
+ """
832
+ global CHECKER_CLASS
833
+ if CHECKER_CLASS is None:
834
+ CHECKER_CLASS = _init_checker_class()
835
+ return CHECKER_CLASS()
836
+
837
+
838
+ def _get_allow_unicode_flag() -> int:
839
+ """Register and return the ALLOW_UNICODE flag."""
840
+ import doctest
841
+
842
+ return doctest.register_optionflag("ALLOW_UNICODE")
843
+
844
+
845
+ def _get_allow_bytes_flag() -> int:
846
+ """Register and return the ALLOW_BYTES flag."""
847
+ import doctest
848
+
849
+ return doctest.register_optionflag("ALLOW_BYTES")
850
+
851
+
852
+ def _get_number_flag() -> int:
853
+ """Register and return the NUMBER flag."""
854
+ import doctest
855
+
856
+ return doctest.register_optionflag("NUMBER")
857
+
858
+
859
+ def _get_report_choice(key: str) -> int:
860
+ """Return the actual `ipdoctest` module flag value.
861
+
862
+ We want to do it as late as possible to avoid importing `ipdoctest` and all
863
+ its dependencies when parsing options, as it adds overhead and breaks tests.
864
+ """
865
+ import doctest
866
+
867
+ return {
868
+ DOCTEST_REPORT_CHOICE_UDIFF: doctest.REPORT_UDIFF,
869
+ DOCTEST_REPORT_CHOICE_CDIFF: doctest.REPORT_CDIFF,
870
+ DOCTEST_REPORT_CHOICE_NDIFF: doctest.REPORT_NDIFF,
871
+ DOCTEST_REPORT_CHOICE_ONLY_FIRST_FAILURE: doctest.REPORT_ONLY_FIRST_FAILURE,
872
+ DOCTEST_REPORT_CHOICE_NONE: 0,
873
+ }[key]
874
+
875
+
876
+ @pytest.fixture(scope="session")
877
+ def ipdoctest_namespace() -> Dict[str, Any]:
878
+ """Fixture that returns a :py:class:`dict` that will be injected into the
879
+ namespace of ipdoctests."""
880
+ return dict()
temp_venv/lib/python3.13/site-packages/IPython/testing/plugin/setup.py ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ """A Nose plugin to support IPython doctests.
3
+ """
4
+
5
+ from setuptools import setup
6
+
7
+ setup(name='IPython doctest plugin',
8
+ version='0.1',
9
+ author='The IPython Team',
10
+ description = 'Nose plugin to load IPython-extended doctests',
11
+ license = 'LGPL',
12
+ py_modules = ['ipdoctest'],
13
+ entry_points = {
14
+ 'nose.plugins.0.10': ['ipdoctest = ipdoctest:IPythonDoctest',
15
+ 'extdoctest = ipdoctest:ExtensionDoctest',
16
+ ],
17
+ },
18
+ )
temp_venv/lib/python3.13/site-packages/IPython/testing/plugin/simple.py ADDED
@@ -0,0 +1,45 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Simple example using doctests.
2
+
3
+ This file just contains doctests both using plain python and IPython prompts.
4
+ All tests should be loaded by Pytest.
5
+ """
6
+
7
+
8
+ def pyfunc():
9
+ """Some pure python tests...
10
+
11
+ >>> pyfunc()
12
+ 'pyfunc'
13
+
14
+ >>> import os
15
+
16
+ >>> 2+3
17
+ 5
18
+
19
+ >>> for i in range(3):
20
+ ... print(i, end=' ')
21
+ ... print(i+1, end=' ')
22
+ ...
23
+ 0 1 1 2 2 3
24
+ """
25
+ return "pyfunc"
26
+
27
+
28
+ def ipyfunc():
29
+ """Some IPython tests...
30
+
31
+ In [1]: ipyfunc()
32
+ Out[1]: 'ipyfunc'
33
+
34
+ In [2]: import os
35
+
36
+ In [3]: 2+3
37
+ Out[3]: 5
38
+
39
+ In [4]: for i in range(3):
40
+ ...: print(i, end=' ')
41
+ ...: print(i+1, end=' ')
42
+ ...:
43
+ Out[4]: 0 1 1 2 2 3
44
+ """
45
+ return "ipyfunc"
temp_venv/lib/python3.13/site-packages/IPython/testing/plugin/simplevars.py ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ x = 1
2
+ print("x is:", x)
temp_venv/lib/python3.13/site-packages/IPython/testing/plugin/test_combo.txt ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ =======================
2
+ Combo testing example
3
+ =======================
4
+
5
+ This is a simple example that mixes ipython doctests::
6
+
7
+ In [1]: import code
8
+
9
+ In [2]: 2**12
10
+ Out[2]: 4096
11
+
12
+ with command-line example information that does *not* get executed::
13
+
14
+ $ mpirun -n 4 ipengine --controller-port=10000 --controller-ip=host0
15
+
16
+ and with literal examples of Python source code::
17
+
18
+ controller = dict(host='myhost',
19
+ engine_port=None, # default is 10105
20
+ control_port=None,
21
+ )
22
+
23
+ # keys are hostnames, values are the number of engine on that host
24
+ engines = dict(node1=2,
25
+ node2=2,
26
+ node3=2,
27
+ node3=2,
28
+ )
29
+
30
+ # Force failure to detect that this test is being run.
31
+ 1/0
32
+
33
+ These source code examples are executed but no output is compared at all. An
34
+ error or failure is reported only if an exception is raised.
35
+
36
+ NOTE: the execution of pure python blocks is not yet working!
temp_venv/lib/python3.13/site-packages/IPython/testing/plugin/test_example.txt ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ =====================================
2
+ Tests in example form - pure python
3
+ =====================================
4
+
5
+ This file contains doctest examples embedded as code blocks, using normal
6
+ Python prompts. See the accompanying file for similar examples using IPython
7
+ prompts (you can't mix both types within one file). The following will be run
8
+ as a test::
9
+
10
+ >>> 1+1
11
+ 2
12
+ >>> print ("hello")
13
+ hello
14
+
15
+ More than one example works::
16
+
17
+ >>> s="Hello World"
18
+
19
+ >>> s.upper()
20
+ 'HELLO WORLD'
21
+
22
+ but you should note that the *entire* test file is considered to be a single
23
+ test. Individual code blocks that fail are printed separately as ``example
24
+ failures``, but the whole file is still counted and reported as one test.
temp_venv/lib/python3.13/site-packages/IPython/testing/plugin/test_exampleip.txt ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ =================================
2
+ Tests in example form - IPython
3
+ =================================
4
+
5
+ You can write text files with examples that use IPython prompts (as long as you
6
+ use the nose ipython doctest plugin), but you can not mix and match prompt
7
+ styles in a single file. That is, you either use all ``>>>`` prompts or all
8
+ IPython-style prompts. Your test suite *can* have both types, you just need to
9
+ put each type of example in a separate. Using IPython prompts, you can paste
10
+ directly from your session::
11
+
12
+ In [5]: s="Hello World"
13
+
14
+ In [6]: s.upper()
15
+ Out[6]: 'HELLO WORLD'
16
+
17
+ Another example::
18
+
19
+ In [8]: 1+3
20
+ Out[8]: 4
21
+
22
+ Just like in IPython docstrings, you can use all IPython syntax and features::
23
+
24
+ In [9]: !echo hello
25
+ hello
26
+
27
+ In [10]: a='hi'
28
+
29
+ In [11]: !echo $a
30
+ hi
temp_venv/lib/python3.13/site-packages/IPython/testing/plugin/test_ipdoctest.py ADDED
@@ -0,0 +1,92 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Tests for the ipdoctest machinery itself.
2
+
3
+ Note: in a file named test_X, functions whose only test is their docstring (as
4
+ a doctest) and which have no test functionality of their own, should be called
5
+ 'doctest_foo' instead of 'test_foo', otherwise they get double-counted (the
6
+ empty function call is counted as a test, which just inflates tests numbers
7
+ artificially).
8
+ """
9
+
10
+ def doctest_simple():
11
+ """ipdoctest must handle simple inputs
12
+
13
+ In [1]: 1
14
+ Out[1]: 1
15
+
16
+ In [2]: print(1)
17
+ 1
18
+ """
19
+
20
+ def doctest_multiline1():
21
+ """The ipdoctest machinery must handle multiline examples gracefully.
22
+
23
+ In [2]: for i in range(4):
24
+ ...: print(i)
25
+ ...:
26
+ 0
27
+ 1
28
+ 2
29
+ 3
30
+ """
31
+
32
+ def doctest_multiline2():
33
+ """Multiline examples that define functions and print output.
34
+
35
+ In [7]: def f(x):
36
+ ...: return x+1
37
+ ...:
38
+
39
+ In [8]: f(1)
40
+ Out[8]: 2
41
+
42
+ In [9]: def g(x):
43
+ ...: print('x is:',x)
44
+ ...:
45
+
46
+ In [10]: g(1)
47
+ x is: 1
48
+
49
+ In [11]: g('hello')
50
+ x is: hello
51
+ """
52
+
53
+
54
+ def doctest_multiline3():
55
+ """Multiline examples with blank lines.
56
+
57
+ In [12]: def h(x):
58
+ ....: if x>1:
59
+ ....: return x**2
60
+ ....: # To leave a blank line in the input, you must mark it
61
+ ....: # with a comment character:
62
+ ....: #
63
+ ....: # otherwise the doctest parser gets confused.
64
+ ....: else:
65
+ ....: return -1
66
+ ....:
67
+
68
+ In [13]: h(5)
69
+ Out[13]: 25
70
+
71
+ In [14]: h(1)
72
+ Out[14]: -1
73
+
74
+ In [15]: h(0)
75
+ Out[15]: -1
76
+ """
77
+
78
+
79
+ def doctest_builtin_underscore():
80
+ """Defining builtins._ should not break anything outside the doctest
81
+ while also should be working as expected inside the doctest.
82
+
83
+ In [1]: import builtins
84
+
85
+ In [2]: builtins._ = 42
86
+
87
+ In [3]: builtins._
88
+ Out[3]: 42
89
+
90
+ In [4]: _
91
+ Out[4]: 42
92
+ """
temp_venv/lib/python3.13/site-packages/IPython/testing/plugin/test_refs.py ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Some simple tests for the plugin while running scripts.
2
+ """
3
+ # Module imports
4
+ # Std lib
5
+ import inspect
6
+
7
+ # Our own
8
+
9
+ #-----------------------------------------------------------------------------
10
+ # Testing functions
11
+
12
+ def test_trivial():
13
+ """A trivial passing test."""
14
+ pass
15
+
16
+ def doctest_run():
17
+ """Test running a trivial script.
18
+
19
+ In [13]: run simplevars.py
20
+ x is: 1
21
+ """
22
+
23
+ def doctest_runvars():
24
+ """Test that variables defined in scripts get loaded correctly via %run.
25
+
26
+ In [13]: run simplevars.py
27
+ x is: 1
28
+
29
+ In [14]: x
30
+ Out[14]: 1
31
+ """
32
+
33
+ def doctest_ivars():
34
+ """Test that variables defined interactively are picked up.
35
+ In [5]: zz=1
36
+
37
+ In [6]: zz
38
+ Out[6]: 1
39
+ """