diff --git a/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/__init__.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/__init__.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..eb45ca50eb66a8a84b8bb4ed2559ed634bbd120f Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/__init__.cpython-313.pyc differ diff --git a/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/application.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/application.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c1b1c66e37efe8b9dd99498fccac9069389d642b Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/application.cpython-313.pyc differ diff --git a/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/async_helpers.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/async_helpers.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..32996e46d7f6b066740a308f7221bf07b4dce33e Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/async_helpers.cpython-313.pyc differ diff --git a/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/autocall.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/autocall.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..952dd2f2031badacd9d5f54a56f2f56b3db19646 Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/autocall.cpython-313.pyc differ diff --git a/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/builtin_trap.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/builtin_trap.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..207e19c6eecf0cc0c44a362f6a21b60f89637834 Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/builtin_trap.cpython-313.pyc differ diff --git a/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/compilerop.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/compilerop.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b6ef28c10254ff0b140bfcf5a7a2bd23aff9294f Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/compilerop.cpython-313.pyc differ diff --git a/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/completerlib.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/completerlib.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..81b71fa23fbdfba6cf815a04cb5198c62b60076c Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/completerlib.cpython-313.pyc differ diff --git a/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/debugger.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/debugger.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..be58294853df9e7c6dd03e96c1d286c185c175cc Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/debugger.cpython-313.pyc differ diff --git a/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/display_functions.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/display_functions.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f5c6a4b2e7cbc9da17d941dd6c70620dd92b2317 Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/display_functions.cpython-313.pyc differ diff --git a/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/displayhook.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/displayhook.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5a7ed14b43240ad218c8912bd285f94d2ebc3bb1 Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/displayhook.cpython-313.pyc differ diff --git a/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/displaypub.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/displaypub.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bc27f952bcb0fe939ebf486fabf0e42ceb34f095 Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/displaypub.cpython-313.pyc differ diff --git a/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/doctb.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/doctb.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d823af2e48a290bcceb98dfdc7a3af18054ad01b Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/doctb.cpython-313.pyc differ diff --git a/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/error.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/error.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cf6d1f1146958c94911eaac5de3c38e787830d75 Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/error.cpython-313.pyc differ diff --git a/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/events.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/events.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..08cb85ee26a7202b1da583afb66d52b850802ed0 Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/events.cpython-313.pyc differ diff --git a/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/formatters.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/formatters.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b99dca106f2bbf3598a05ff064ea6fa288120879 Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/formatters.cpython-313.pyc differ diff --git a/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/guarded_eval.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/guarded_eval.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b909aca979917342f2219ba0fd658617961e60ef Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/guarded_eval.cpython-313.pyc differ diff --git a/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/history.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/history.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bf0bd420069683cbe503b85539b1df6af1628626 Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/history.cpython-313.pyc differ diff --git a/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/hooks.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/hooks.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..13c3df11ff801d9c5b3ea55b6dbc76af3d27b04c Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/hooks.cpython-313.pyc differ diff --git a/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/inputtransformer2.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/inputtransformer2.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..64bf94879fc5dbcfd77639d1777bcb2c7df416ed Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/inputtransformer2.cpython-313.pyc differ diff --git a/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/latex_symbols.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/latex_symbols.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6b26a64cec8fe948c195a616c8d703bdecbaeacd Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/latex_symbols.cpython-313.pyc differ diff --git a/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/logger.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/logger.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..682485900794e78cc779027eb3c47ee32ac020f9 Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/logger.cpython-313.pyc differ diff --git a/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/macro.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/macro.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..670e1ebba11138d9b48ade10880e7282a000e882 Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/macro.cpython-313.pyc differ diff --git a/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/magic.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/magic.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..13e314729a7d25558e86002aadf4e4f043e83d78 Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/magic.cpython-313.pyc differ diff --git a/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/magic_arguments.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/magic_arguments.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c1e76d46b711440c952994231599762f218eb48b Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/magic_arguments.cpython-313.pyc differ diff --git a/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/page.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/page.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a7990c735168cdbea23346b74b5a7b74fc2e478b Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/page.cpython-313.pyc differ diff --git a/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/prefilter.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/prefilter.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f4c2c742a84d7970c3056139665824efd07b19cf Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/prefilter.cpython-313.pyc differ diff --git a/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/profiledir.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/profiledir.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7cfad19f5e32f0a2f05c20cc48306f2221f01f76 Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/profiledir.cpython-313.pyc differ diff --git a/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/release.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/release.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4e0feed4906989668e8247c67a89db4c11250e2b Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/release.cpython-313.pyc differ diff --git a/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/shellapp.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/shellapp.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..28e3c171ca87ee1b0465062415fb41c86d8ea976 Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/shellapp.cpython-313.pyc differ diff --git a/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/splitinput.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/splitinput.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..239e1b1e875ccea40dde1012f41b00e838f808d0 Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/splitinput.cpython-313.pyc differ diff --git a/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/tbtools.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/tbtools.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f69a63f2ea400e89f91b3300ac6934b10c54b1f5 Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/tbtools.cpython-313.pyc differ diff --git a/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/tips.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/tips.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..82b09c790a8dedee40450e4ef8ac1c7b66110009 Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/tips.cpython-313.pyc differ diff --git a/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/usage.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/usage.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..23d198b7842a8722c65de6055f959dbe2830164e Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/IPython/core/__pycache__/usage.cpython-313.pyc differ diff --git a/temp_venv/lib/python3.13/site-packages/IPython/core/magics/__pycache__/__init__.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/IPython/core/magics/__pycache__/__init__.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ac9a46049d785c3fa5caa353f9c31d799f916e3a Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/IPython/core/magics/__pycache__/__init__.cpython-313.pyc differ diff --git a/temp_venv/lib/python3.13/site-packages/IPython/core/magics/__pycache__/ast_mod.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/IPython/core/magics/__pycache__/ast_mod.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6910e66e4aa269661bfa4496df39f59f2f04c8fa Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/IPython/core/magics/__pycache__/ast_mod.cpython-313.pyc differ diff --git a/temp_venv/lib/python3.13/site-packages/IPython/core/magics/__pycache__/auto.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/IPython/core/magics/__pycache__/auto.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..288b44c8f52f934c307dcf1c700eab7ce103cb65 Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/IPython/core/magics/__pycache__/auto.cpython-313.pyc differ diff --git a/temp_venv/lib/python3.13/site-packages/IPython/core/magics/__pycache__/basic.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/IPython/core/magics/__pycache__/basic.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..59b382a4e412ad9e082d226ef6d1c081f6bd1d75 Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/IPython/core/magics/__pycache__/basic.cpython-313.pyc differ diff --git a/temp_venv/lib/python3.13/site-packages/IPython/core/magics/__pycache__/code.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/IPython/core/magics/__pycache__/code.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e191ab8d1b374917193dce68f8caf1737531e907 Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/IPython/core/magics/__pycache__/code.cpython-313.pyc differ diff --git a/temp_venv/lib/python3.13/site-packages/IPython/core/magics/__pycache__/config.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/IPython/core/magics/__pycache__/config.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..77f280dbd938a436b054250d0604822dd2cf65bb Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/IPython/core/magics/__pycache__/config.cpython-313.pyc differ diff --git a/temp_venv/lib/python3.13/site-packages/IPython/core/magics/__pycache__/display.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/IPython/core/magics/__pycache__/display.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3e488c5ee41748e337e0b9e7f38dd3b24417a2a7 Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/IPython/core/magics/__pycache__/display.cpython-313.pyc differ diff --git a/temp_venv/lib/python3.13/site-packages/IPython/core/magics/__pycache__/execution.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/IPython/core/magics/__pycache__/execution.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..64d7ec366e125192d259434f67113d3b61e081bc Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/IPython/core/magics/__pycache__/execution.cpython-313.pyc differ diff --git a/temp_venv/lib/python3.13/site-packages/IPython/core/magics/__pycache__/extension.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/IPython/core/magics/__pycache__/extension.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ebf5d3f26dacad1998197a6ed8c765d725d97d89 Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/IPython/core/magics/__pycache__/extension.cpython-313.pyc differ diff --git a/temp_venv/lib/python3.13/site-packages/IPython/core/magics/__pycache__/history.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/IPython/core/magics/__pycache__/history.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..52cbcb8a9e7324a29172ee5549159f43cd71482c Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/IPython/core/magics/__pycache__/history.cpython-313.pyc differ diff --git a/temp_venv/lib/python3.13/site-packages/IPython/core/magics/__pycache__/logging.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/IPython/core/magics/__pycache__/logging.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..621ace1f90e72154ae83ea76843c4eb9275b813f Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/IPython/core/magics/__pycache__/logging.cpython-313.pyc differ diff --git a/temp_venv/lib/python3.13/site-packages/IPython/core/magics/__pycache__/namespace.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/IPython/core/magics/__pycache__/namespace.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..673f476e8c53de5b0dc853d8f49cb2464a9dd663 Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/IPython/core/magics/__pycache__/namespace.cpython-313.pyc differ diff --git a/temp_venv/lib/python3.13/site-packages/IPython/core/magics/__pycache__/osm.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/IPython/core/magics/__pycache__/osm.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..45eae364e0e35cf4c5588deb045658a710767d5b Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/IPython/core/magics/__pycache__/osm.cpython-313.pyc differ diff --git a/temp_venv/lib/python3.13/site-packages/IPython/core/magics/__pycache__/packaging.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/IPython/core/magics/__pycache__/packaging.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4aaa9b01fc7501dca7ed99c5a17c07b57b9ea420 Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/IPython/core/magics/__pycache__/packaging.cpython-313.pyc differ diff --git a/temp_venv/lib/python3.13/site-packages/IPython/core/magics/__pycache__/pylab.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/IPython/core/magics/__pycache__/pylab.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..30a41a30ee5553f5c3d5e6aeb36ddc95e0e1bf0e Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/IPython/core/magics/__pycache__/pylab.cpython-313.pyc differ diff --git a/temp_venv/lib/python3.13/site-packages/IPython/core/magics/__pycache__/script.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/IPython/core/magics/__pycache__/script.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..84709573d35362d3a2b86fbf45a2226a58b063db Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/IPython/core/magics/__pycache__/script.cpython-313.pyc differ diff --git a/temp_venv/lib/python3.13/site-packages/IPython/core/profile/README_STARTUP b/temp_venv/lib/python3.13/site-packages/IPython/core/profile/README_STARTUP new file mode 100644 index 0000000000000000000000000000000000000000..61d470004218ae459ce7bfdc974f7c86e0790486 --- /dev/null +++ b/temp_venv/lib/python3.13/site-packages/IPython/core/profile/README_STARTUP @@ -0,0 +1,11 @@ +This is the IPython startup directory + +.py and .ipy files in this directory will be run *prior* to any code or files specified +via the exec_lines or exec_files configurables whenever you load this profile. + +Files will be run in lexicographical order, so you can control the execution order of files +with a prefix, e.g.:: + + 00-first.py + 50-middle.py + 99-last.ipy diff --git a/temp_venv/lib/python3.13/site-packages/pure_eval/__pycache__/__init__.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/pure_eval/__pycache__/__init__.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ea7c5b47d6b4af3846bbcae3743260be9316ffa7 Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/pure_eval/__pycache__/__init__.cpython-313.pyc differ diff --git a/temp_venv/lib/python3.13/site-packages/pure_eval/__pycache__/core.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/pure_eval/__pycache__/core.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ecd44b7246b9c88d092a6a258af1149859195df0 Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/pure_eval/__pycache__/core.cpython-313.pyc differ diff --git a/temp_venv/lib/python3.13/site-packages/pure_eval/__pycache__/my_getattr_static.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/pure_eval/__pycache__/my_getattr_static.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9a36b8c310813b2fa5fe4df3ff47d2a909564180 Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/pure_eval/__pycache__/my_getattr_static.cpython-313.pyc differ diff --git a/temp_venv/lib/python3.13/site-packages/pure_eval/__pycache__/utils.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/pure_eval/__pycache__/utils.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b7b8e6cbd2633e586ed66d5abb972221062b2544 Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/pure_eval/__pycache__/utils.cpython-313.pyc differ diff --git a/temp_venv/lib/python3.13/site-packages/pure_eval/__pycache__/version.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/pure_eval/__pycache__/version.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..028e2a4a06b3360d12d8347fe4d07944565abeb4 Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/pure_eval/__pycache__/version.cpython-313.pyc differ diff --git a/temp_venv/lib/python3.13/site-packages/pygments/filters/__init__.py b/temp_venv/lib/python3.13/site-packages/pygments/filters/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..2fed761a01ee5e9e25918b9a0619fcb47c8811b1 --- /dev/null +++ b/temp_venv/lib/python3.13/site-packages/pygments/filters/__init__.py @@ -0,0 +1,940 @@ +""" + pygments.filters + ~~~~~~~~~~~~~~~~ + + Module containing filter lookup functions and default + filters. + + :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re + +from pygments.token import String, Comment, Keyword, Name, Error, Whitespace, \ + string_to_tokentype +from pygments.filter import Filter +from pygments.util import get_list_opt, get_int_opt, get_bool_opt, \ + get_choice_opt, ClassNotFound, OptionError +from pygments.plugin import find_plugin_filters + + +def find_filter_class(filtername): + """Lookup a filter by name. Return None if not found.""" + if filtername in FILTERS: + return FILTERS[filtername] + for name, cls in find_plugin_filters(): + if name == filtername: + return cls + return None + + +def get_filter_by_name(filtername, **options): + """Return an instantiated filter. + + Options are passed to the filter initializer if wanted. + Raise a ClassNotFound if not found. + """ + cls = find_filter_class(filtername) + if cls: + return cls(**options) + else: + raise ClassNotFound(f'filter {filtername!r} not found') + + +def get_all_filters(): + """Return a generator of all filter names.""" + yield from FILTERS + for name, _ in find_plugin_filters(): + yield name + + +def _replace_special(ttype, value, regex, specialttype, + replacefunc=lambda x: x): + last = 0 + for match in regex.finditer(value): + start, end = match.start(), match.end() + if start != last: + yield ttype, value[last:start] + yield specialttype, replacefunc(value[start:end]) + last = end + if last != len(value): + yield ttype, value[last:] + + +class CodeTagFilter(Filter): + """Highlight special code tags in comments and docstrings. + + Options accepted: + + `codetags` : list of strings + A list of strings that are flagged as code tags. The default is to + highlight ``XXX``, ``TODO``, ``FIXME``, ``BUG`` and ``NOTE``. + + .. versionchanged:: 2.13 + Now recognizes ``FIXME`` by default. + """ + + def __init__(self, **options): + Filter.__init__(self, **options) + tags = get_list_opt(options, 'codetags', + ['XXX', 'TODO', 'FIXME', 'BUG', 'NOTE']) + self.tag_re = re.compile(r'\b({})\b'.format('|'.join([ + re.escape(tag) for tag in tags if tag + ]))) + + def filter(self, lexer, stream): + regex = self.tag_re + for ttype, value in stream: + if ttype in String.Doc or \ + ttype in Comment and \ + ttype not in Comment.Preproc: + yield from _replace_special(ttype, value, regex, Comment.Special) + else: + yield ttype, value + + +class SymbolFilter(Filter): + """Convert mathematical symbols such as \\ in Isabelle + or \\longrightarrow in LaTeX into Unicode characters. + + This is mostly useful for HTML or console output when you want to + approximate the source rendering you'd see in an IDE. + + Options accepted: + + `lang` : string + The symbol language. Must be one of ``'isabelle'`` or + ``'latex'``. The default is ``'isabelle'``. + """ + + latex_symbols = { + '\\alpha' : '\U000003b1', + '\\beta' : '\U000003b2', + '\\gamma' : '\U000003b3', + '\\delta' : '\U000003b4', + '\\varepsilon' : '\U000003b5', + '\\zeta' : '\U000003b6', + '\\eta' : '\U000003b7', + '\\vartheta' : '\U000003b8', + '\\iota' : '\U000003b9', + '\\kappa' : '\U000003ba', + '\\lambda' : '\U000003bb', + '\\mu' : '\U000003bc', + '\\nu' : '\U000003bd', + '\\xi' : '\U000003be', + '\\pi' : '\U000003c0', + '\\varrho' : '\U000003c1', + '\\sigma' : '\U000003c3', + '\\tau' : '\U000003c4', + '\\upsilon' : '\U000003c5', + '\\varphi' : '\U000003c6', + '\\chi' : '\U000003c7', + '\\psi' : '\U000003c8', + '\\omega' : '\U000003c9', + '\\Gamma' : '\U00000393', + '\\Delta' : '\U00000394', + '\\Theta' : '\U00000398', + '\\Lambda' : '\U0000039b', + '\\Xi' : '\U0000039e', + '\\Pi' : '\U000003a0', + '\\Sigma' : '\U000003a3', + '\\Upsilon' : '\U000003a5', + '\\Phi' : '\U000003a6', + '\\Psi' : '\U000003a8', + '\\Omega' : '\U000003a9', + '\\leftarrow' : '\U00002190', + '\\longleftarrow' : '\U000027f5', + '\\rightarrow' : '\U00002192', + '\\longrightarrow' : '\U000027f6', + '\\Leftarrow' : '\U000021d0', + '\\Longleftarrow' : '\U000027f8', + '\\Rightarrow' : '\U000021d2', + '\\Longrightarrow' : '\U000027f9', + '\\leftrightarrow' : '\U00002194', + '\\longleftrightarrow' : '\U000027f7', + '\\Leftrightarrow' : '\U000021d4', + '\\Longleftrightarrow' : '\U000027fa', + '\\mapsto' : '\U000021a6', + '\\longmapsto' : '\U000027fc', + '\\relbar' : '\U00002500', + '\\Relbar' : '\U00002550', + '\\hookleftarrow' : '\U000021a9', + '\\hookrightarrow' : '\U000021aa', + '\\leftharpoondown' : '\U000021bd', + '\\rightharpoondown' : '\U000021c1', + '\\leftharpoonup' : '\U000021bc', + '\\rightharpoonup' : '\U000021c0', + '\\rightleftharpoons' : '\U000021cc', + '\\leadsto' : '\U0000219d', + '\\downharpoonleft' : '\U000021c3', + '\\downharpoonright' : '\U000021c2', + '\\upharpoonleft' : '\U000021bf', + '\\upharpoonright' : '\U000021be', + '\\restriction' : '\U000021be', + '\\uparrow' : '\U00002191', + '\\Uparrow' : '\U000021d1', + '\\downarrow' : '\U00002193', + '\\Downarrow' : '\U000021d3', + '\\updownarrow' : '\U00002195', + '\\Updownarrow' : '\U000021d5', + '\\langle' : '\U000027e8', + '\\rangle' : '\U000027e9', + '\\lceil' : '\U00002308', + '\\rceil' : '\U00002309', + '\\lfloor' : '\U0000230a', + '\\rfloor' : '\U0000230b', + '\\flqq' : '\U000000ab', + '\\frqq' : '\U000000bb', + '\\bot' : '\U000022a5', + '\\top' : '\U000022a4', + '\\wedge' : '\U00002227', + '\\bigwedge' : '\U000022c0', + '\\vee' : '\U00002228', + '\\bigvee' : '\U000022c1', + '\\forall' : '\U00002200', + '\\exists' : '\U00002203', + '\\nexists' : '\U00002204', + '\\neg' : '\U000000ac', + '\\Box' : '\U000025a1', + '\\Diamond' : '\U000025c7', + '\\vdash' : '\U000022a2', + '\\models' : '\U000022a8', + '\\dashv' : '\U000022a3', + '\\surd' : '\U0000221a', + '\\le' : '\U00002264', + '\\ge' : '\U00002265', + '\\ll' : '\U0000226a', + '\\gg' : '\U0000226b', + '\\lesssim' : '\U00002272', + '\\gtrsim' : '\U00002273', + '\\lessapprox' : '\U00002a85', + '\\gtrapprox' : '\U00002a86', + '\\in' : '\U00002208', + '\\notin' : '\U00002209', + '\\subset' : '\U00002282', + '\\supset' : '\U00002283', + '\\subseteq' : '\U00002286', + '\\supseteq' : '\U00002287', + '\\sqsubset' : '\U0000228f', + '\\sqsupset' : '\U00002290', + '\\sqsubseteq' : '\U00002291', + '\\sqsupseteq' : '\U00002292', + '\\cap' : '\U00002229', + '\\bigcap' : '\U000022c2', + '\\cup' : '\U0000222a', + '\\bigcup' : '\U000022c3', + '\\sqcup' : '\U00002294', + '\\bigsqcup' : '\U00002a06', + '\\sqcap' : '\U00002293', + '\\Bigsqcap' : '\U00002a05', + '\\setminus' : '\U00002216', + '\\propto' : '\U0000221d', + '\\uplus' : '\U0000228e', + '\\bigplus' : '\U00002a04', + '\\sim' : '\U0000223c', + '\\doteq' : '\U00002250', + '\\simeq' : '\U00002243', + '\\approx' : '\U00002248', + '\\asymp' : '\U0000224d', + '\\cong' : '\U00002245', + '\\equiv' : '\U00002261', + '\\Join' : '\U000022c8', + '\\bowtie' : '\U00002a1d', + '\\prec' : '\U0000227a', + '\\succ' : '\U0000227b', + '\\preceq' : '\U0000227c', + '\\succeq' : '\U0000227d', + '\\parallel' : '\U00002225', + '\\mid' : '\U000000a6', + '\\pm' : '\U000000b1', + '\\mp' : '\U00002213', + '\\times' : '\U000000d7', + '\\div' : '\U000000f7', + '\\cdot' : '\U000022c5', + '\\star' : '\U000022c6', + '\\circ' : '\U00002218', + '\\dagger' : '\U00002020', + '\\ddagger' : '\U00002021', + '\\lhd' : '\U000022b2', + '\\rhd' : '\U000022b3', + '\\unlhd' : '\U000022b4', + '\\unrhd' : '\U000022b5', + '\\triangleleft' : '\U000025c3', + '\\triangleright' : '\U000025b9', + '\\triangle' : '\U000025b3', + '\\triangleq' : '\U0000225c', + '\\oplus' : '\U00002295', + '\\bigoplus' : '\U00002a01', + '\\otimes' : '\U00002297', + '\\bigotimes' : '\U00002a02', + '\\odot' : '\U00002299', + '\\bigodot' : '\U00002a00', + '\\ominus' : '\U00002296', + '\\oslash' : '\U00002298', + '\\dots' : '\U00002026', + '\\cdots' : '\U000022ef', + '\\sum' : '\U00002211', + '\\prod' : '\U0000220f', + '\\coprod' : '\U00002210', + '\\infty' : '\U0000221e', + '\\int' : '\U0000222b', + '\\oint' : '\U0000222e', + '\\clubsuit' : '\U00002663', + '\\diamondsuit' : '\U00002662', + '\\heartsuit' : '\U00002661', + '\\spadesuit' : '\U00002660', + '\\aleph' : '\U00002135', + '\\emptyset' : '\U00002205', + '\\nabla' : '\U00002207', + '\\partial' : '\U00002202', + '\\flat' : '\U0000266d', + '\\natural' : '\U0000266e', + '\\sharp' : '\U0000266f', + '\\angle' : '\U00002220', + '\\copyright' : '\U000000a9', + '\\textregistered' : '\U000000ae', + '\\textonequarter' : '\U000000bc', + '\\textonehalf' : '\U000000bd', + '\\textthreequarters' : '\U000000be', + '\\textordfeminine' : '\U000000aa', + '\\textordmasculine' : '\U000000ba', + '\\euro' : '\U000020ac', + '\\pounds' : '\U000000a3', + '\\yen' : '\U000000a5', + '\\textcent' : '\U000000a2', + '\\textcurrency' : '\U000000a4', + '\\textdegree' : '\U000000b0', + } + + isabelle_symbols = { + '\\' : '\U0001d7ec', + '\\' : '\U0001d7ed', + '\\' : '\U0001d7ee', + '\\' : '\U0001d7ef', + '\\' : '\U0001d7f0', + '\\' : '\U0001d7f1', + '\\' : '\U0001d7f2', + '\\' : '\U0001d7f3', + '\\' : '\U0001d7f4', + '\\' : '\U0001d7f5', + '\\' : '\U0001d49c', + '\\' : '\U0000212c', + '\\' : '\U0001d49e', + '\\' : '\U0001d49f', + '\\' : '\U00002130', + '\\' : '\U00002131', + '\\' : '\U0001d4a2', + '\\' : '\U0000210b', + '\\' : '\U00002110', + '\\' : '\U0001d4a5', + '\\' : '\U0001d4a6', + '\\' : '\U00002112', + '\\' : '\U00002133', + '\\' : '\U0001d4a9', + '\\' : '\U0001d4aa', + '\\

' : '\U0001d5c9', + '\\' : '\U0001d5ca', + '\\' : '\U0001d5cb', + '\\' : '\U0001d5cc', + '\\' : '\U0001d5cd', + '\\' : '\U0001d5ce', + '\\' : '\U0001d5cf', + '\\' : '\U0001d5d0', + '\\' : '\U0001d5d1', + '\\' : '\U0001d5d2', + '\\' : '\U0001d5d3', + '\\' : '\U0001d504', + '\\' : '\U0001d505', + '\\' : '\U0000212d', + '\\

' : '\U0001d507', + '\\' : '\U0001d508', + '\\' : '\U0001d509', + '\\' : '\U0001d50a', + '\\' : '\U0000210c', + '\\' : '\U00002111', + '\\' : '\U0001d50d', + '\\' : '\U0001d50e', + '\\' : '\U0001d50f', + '\\' : '\U0001d510', + '\\' : '\U0001d511', + '\\' : '\U0001d512', + '\\' : '\U0001d513', + '\\' : '\U0001d514', + '\\' : '\U0000211c', + '\\' : '\U0001d516', + '\\' : '\U0001d517', + '\\' : '\U0001d518', + '\\' : '\U0001d519', + '\\' : '\U0001d51a', + '\\' : '\U0001d51b', + '\\' : '\U0001d51c', + '\\' : '\U00002128', + '\\' : '\U0001d51e', + '\\' : '\U0001d51f', + '\\' : '\U0001d520', + '\\
' : '\U0001d521', + '\\' : '\U0001d522', + '\\' : '\U0001d523', + '\\' : '\U0001d524', + '\\' : '\U0001d525', + '\\' : '\U0001d526', + '\\' : '\U0001d527', + '\\' : '\U0001d528', + '\\' : '\U0001d529', + '\\' : '\U0001d52a', + '\\' : '\U0001d52b', + '\\' : '\U0001d52c', + '\\' : '\U0001d52d', + '\\' : '\U0001d52e', + '\\' : '\U0001d52f', + '\\' : '\U0001d530', + '\\' : '\U0001d531', + '\\' : '\U0001d532', + '\\' : '\U0001d533', + '\\' : '\U0001d534', + '\\' : '\U0001d535', + '\\' : '\U0001d536', + '\\' : '\U0001d537', + '\\' : '\U000003b1', + '\\' : '\U000003b2', + '\\' : '\U000003b3', + '\\' : '\U000003b4', + '\\' : '\U000003b5', + '\\' : '\U000003b6', + '\\' : '\U000003b7', + '\\' : '\U000003b8', + '\\' : '\U000003b9', + '\\' : '\U000003ba', + '\\' : '\U000003bb', + '\\' : '\U000003bc', + '\\' : '\U000003bd', + '\\' : '\U000003be', + '\\' : '\U000003c0', + '\\' : '\U000003c1', + '\\' : '\U000003c3', + '\\' : '\U000003c4', + '\\' : '\U000003c5', + '\\' : '\U000003c6', + '\\' : '\U000003c7', + '\\' : '\U000003c8', + '\\' : '\U000003c9', + '\\' : '\U00000393', + '\\' : '\U00000394', + '\\' : '\U00000398', + '\\' : '\U0000039b', + '\\' : '\U0000039e', + '\\' : '\U000003a0', + '\\' : '\U000003a3', + '\\' : '\U000003a5', + '\\' : '\U000003a6', + '\\' : '\U000003a8', + '\\' : '\U000003a9', + '\\' : '\U0001d539', + '\\' : '\U00002102', + '\\' : '\U00002115', + '\\' : '\U0000211a', + '\\' : '\U0000211d', + '\\' : '\U00002124', + '\\' : '\U00002190', + '\\' : '\U000027f5', + '\\' : '\U00002192', + '\\' : '\U000027f6', + '\\' : '\U000021d0', + '\\' : '\U000027f8', + '\\' : '\U000021d2', + '\\' : '\U000027f9', + '\\' : '\U00002194', + '\\' : '\U000027f7', + '\\' : '\U000021d4', + '\\' : '\U000027fa', + '\\' : '\U000021a6', + '\\' : '\U000027fc', + '\\' : '\U00002500', + '\\' : '\U00002550', + '\\' : '\U000021a9', + '\\' : '\U000021aa', + '\\' : '\U000021bd', + '\\' : '\U000021c1', + '\\' : '\U000021bc', + '\\' : '\U000021c0', + '\\' : '\U000021cc', + '\\' : '\U0000219d', + '\\' : '\U000021c3', + '\\' : '\U000021c2', + '\\' : '\U000021bf', + '\\' : '\U000021be', + '\\' : '\U000021be', + '\\' : '\U00002237', + '\\' : '\U00002191', + '\\' : '\U000021d1', + '\\' : '\U00002193', + '\\' : '\U000021d3', + '\\' : '\U00002195', + '\\' : '\U000021d5', + '\\' : '\U000027e8', + '\\' : '\U000027e9', + '\\' : '\U00002308', + '\\' : '\U00002309', + '\\' : '\U0000230a', + '\\' : '\U0000230b', + '\\' : '\U00002987', + '\\' : '\U00002988', + '\\' : '\U000027e6', + '\\' : '\U000027e7', + '\\' : '\U00002983', + '\\' : '\U00002984', + '\\' : '\U000000ab', + '\\' : '\U000000bb', + '\\' : '\U000022a5', + '\\' : '\U000022a4', + '\\' : '\U00002227', + '\\' : '\U000022c0', + '\\' : '\U00002228', + '\\' : '\U000022c1', + '\\' : '\U00002200', + '\\' : '\U00002203', + '\\' : '\U00002204', + '\\' : '\U000000ac', + '\\' : '\U000025a1', + '\\' : '\U000025c7', + '\\' : '\U000022a2', + '\\' : '\U000022a8', + '\\' : '\U000022a9', + '\\' : '\U000022ab', + '\\' : '\U000022a3', + '\\' : '\U0000221a', + '\\' : '\U00002264', + '\\' : '\U00002265', + '\\' : '\U0000226a', + '\\' : '\U0000226b', + '\\' : '\U00002272', + '\\' : '\U00002273', + '\\' : '\U00002a85', + '\\' : '\U00002a86', + '\\' : '\U00002208', + '\\' : '\U00002209', + '\\' : '\U00002282', + '\\' : '\U00002283', + '\\' : '\U00002286', + '\\' : '\U00002287', + '\\' : '\U0000228f', + '\\' : '\U00002290', + '\\' : '\U00002291', + '\\' : '\U00002292', + '\\' : '\U00002229', + '\\' : '\U000022c2', + '\\' : '\U0000222a', + '\\' : '\U000022c3', + '\\' : '\U00002294', + '\\' : '\U00002a06', + '\\' : '\U00002293', + '\\' : '\U00002a05', + '\\' : '\U00002216', + '\\' : '\U0000221d', + '\\' : '\U0000228e', + '\\' : '\U00002a04', + '\\' : '\U00002260', + '\\' : '\U0000223c', + '\\' : '\U00002250', + '\\' : '\U00002243', + '\\' : '\U00002248', + '\\' : '\U0000224d', + '\\' : '\U00002245', + '\\' : '\U00002323', + '\\' : '\U00002261', + '\\' : '\U00002322', + '\\' : '\U000022c8', + '\\' : '\U00002a1d', + '\\' : '\U0000227a', + '\\' : '\U0000227b', + '\\' : '\U0000227c', + '\\' : '\U0000227d', + '\\' : '\U00002225', + '\\' : '\U000000a6', + '\\' : '\U000000b1', + '\\' : '\U00002213', + '\\' : '\U000000d7', + '\\
' : '\U000000f7', + '\\' : '\U000022c5', + '\\' : '\U000022c6', + '\\' : '\U00002219', + '\\' : '\U00002218', + '\\' : '\U00002020', + '\\' : '\U00002021', + '\\' : '\U000022b2', + '\\' : '\U000022b3', + '\\' : '\U000022b4', + '\\' : '\U000022b5', + '\\' : '\U000025c3', + '\\' : '\U000025b9', + '\\' : '\U000025b3', + '\\' : '\U0000225c', + '\\' : '\U00002295', + '\\' : '\U00002a01', + '\\' : '\U00002297', + '\\' : '\U00002a02', + '\\' : '\U00002299', + '\\' : '\U00002a00', + '\\' : '\U00002296', + '\\' : '\U00002298', + '\\' : '\U00002026', + '\\' : '\U000022ef', + '\\' : '\U00002211', + '\\' : '\U0000220f', + '\\' : '\U00002210', + '\\' : '\U0000221e', + '\\' : '\U0000222b', + '\\' : '\U0000222e', + '\\' : '\U00002663', + '\\' : '\U00002662', + '\\' : '\U00002661', + '\\' : '\U00002660', + '\\' : '\U00002135', + '\\' : '\U00002205', + '\\' : '\U00002207', + '\\' : '\U00002202', + '\\' : '\U0000266d', + '\\' : '\U0000266e', + '\\' : '\U0000266f', + '\\' : '\U00002220', + '\\' : '\U000000a9', + '\\' : '\U000000ae', + '\\' : '\U000000ad', + '\\' : '\U000000af', + '\\' : '\U000000bc', + '\\' : '\U000000bd', + '\\' : '\U000000be', + '\\' : '\U000000aa', + '\\' : '\U000000ba', + '\\
' : '\U000000a7', + '\\' : '\U000000b6', + '\\' : '\U000000a1', + '\\' : '\U000000bf', + '\\' : '\U000020ac', + '\\' : '\U000000a3', + '\\' : '\U000000a5', + '\\' : '\U000000a2', + '\\' : '\U000000a4', + '\\' : '\U000000b0', + '\\' : '\U00002a3f', + '\\' : '\U00002127', + '\\' : '\U000025ca', + '\\' : '\U00002118', + '\\' : '\U00002240', + '\\' : '\U000022c4', + '\\' : '\U000000b4', + '\\' : '\U00000131', + '\\' : '\U000000a8', + '\\' : '\U000000b8', + '\\' : '\U000002dd', + '\\' : '\U000003f5', + '\\' : '\U000023ce', + '\\' : '\U00002039', + '\\' : '\U0000203a', + '\\' : '\U00002302', + '\\<^sub>' : '\U000021e9', + '\\<^sup>' : '\U000021e7', + '\\<^bold>' : '\U00002759', + '\\<^bsub>' : '\U000021d8', + '\\<^esub>' : '\U000021d9', + '\\<^bsup>' : '\U000021d7', + '\\<^esup>' : '\U000021d6', + } + + lang_map = {'isabelle' : isabelle_symbols, 'latex' : latex_symbols} + + def __init__(self, **options): + Filter.__init__(self, **options) + lang = get_choice_opt(options, 'lang', + ['isabelle', 'latex'], 'isabelle') + self.symbols = self.lang_map[lang] + + def filter(self, lexer, stream): + for ttype, value in stream: + if value in self.symbols: + yield ttype, self.symbols[value] + else: + yield ttype, value + + +class KeywordCaseFilter(Filter): + """Convert keywords to lowercase or uppercase or capitalize them, which + means first letter uppercase, rest lowercase. + + This can be useful e.g. if you highlight Pascal code and want to adapt the + code to your styleguide. + + Options accepted: + + `case` : string + The casing to convert keywords to. Must be one of ``'lower'``, + ``'upper'`` or ``'capitalize'``. The default is ``'lower'``. + """ + + def __init__(self, **options): + Filter.__init__(self, **options) + case = get_choice_opt(options, 'case', + ['lower', 'upper', 'capitalize'], 'lower') + self.convert = getattr(str, case) + + def filter(self, lexer, stream): + for ttype, value in stream: + if ttype in Keyword: + yield ttype, self.convert(value) + else: + yield ttype, value + + +class NameHighlightFilter(Filter): + """Highlight a normal Name (and Name.*) token with a different token type. + + Example:: + + filter = NameHighlightFilter( + names=['foo', 'bar', 'baz'], + tokentype=Name.Function, + ) + + This would highlight the names "foo", "bar" and "baz" + as functions. `Name.Function` is the default token type. + + Options accepted: + + `names` : list of strings + A list of names that should be given the different token type. + There is no default. + `tokentype` : TokenType or string + A token type or a string containing a token type name that is + used for highlighting the strings in `names`. The default is + `Name.Function`. + """ + + def __init__(self, **options): + Filter.__init__(self, **options) + self.names = set(get_list_opt(options, 'names', [])) + tokentype = options.get('tokentype') + if tokentype: + self.tokentype = string_to_tokentype(tokentype) + else: + self.tokentype = Name.Function + + def filter(self, lexer, stream): + for ttype, value in stream: + if ttype in Name and value in self.names: + yield self.tokentype, value + else: + yield ttype, value + + +class ErrorToken(Exception): + pass + + +class RaiseOnErrorTokenFilter(Filter): + """Raise an exception when the lexer generates an error token. + + Options accepted: + + `excclass` : Exception class + The exception class to raise. + The default is `pygments.filters.ErrorToken`. + + .. versionadded:: 0.8 + """ + + def __init__(self, **options): + Filter.__init__(self, **options) + self.exception = options.get('excclass', ErrorToken) + try: + # issubclass() will raise TypeError if first argument is not a class + if not issubclass(self.exception, Exception): + raise TypeError + except TypeError: + raise OptionError('excclass option is not an exception class') + + def filter(self, lexer, stream): + for ttype, value in stream: + if ttype is Error: + raise self.exception(value) + yield ttype, value + + +class VisibleWhitespaceFilter(Filter): + """Convert tabs, newlines and/or spaces to visible characters. + + Options accepted: + + `spaces` : string or bool + If this is a one-character string, spaces will be replaces by this string. + If it is another true value, spaces will be replaced by ``·`` (unicode + MIDDLE DOT). If it is a false value, spaces will not be replaced. The + default is ``False``. + `tabs` : string or bool + The same as for `spaces`, but the default replacement character is ``»`` + (unicode RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK). The default value + is ``False``. Note: this will not work if the `tabsize` option for the + lexer is nonzero, as tabs will already have been expanded then. + `tabsize` : int + If tabs are to be replaced by this filter (see the `tabs` option), this + is the total number of characters that a tab should be expanded to. + The default is ``8``. + `newlines` : string or bool + The same as for `spaces`, but the default replacement character is ``¶`` + (unicode PILCROW SIGN). The default value is ``False``. + `wstokentype` : bool + If true, give whitespace the special `Whitespace` token type. This allows + styling the visible whitespace differently (e.g. greyed out), but it can + disrupt background colors. The default is ``True``. + + .. versionadded:: 0.8 + """ + + def __init__(self, **options): + Filter.__init__(self, **options) + for name, default in [('spaces', '·'), + ('tabs', '»'), + ('newlines', '¶')]: + opt = options.get(name, False) + if isinstance(opt, str) and len(opt) == 1: + setattr(self, name, opt) + else: + setattr(self, name, (opt and default or '')) + tabsize = get_int_opt(options, 'tabsize', 8) + if self.tabs: + self.tabs += ' ' * (tabsize - 1) + if self.newlines: + self.newlines += '\n' + self.wstt = get_bool_opt(options, 'wstokentype', True) + + def filter(self, lexer, stream): + if self.wstt: + spaces = self.spaces or ' ' + tabs = self.tabs or '\t' + newlines = self.newlines or '\n' + regex = re.compile(r'\s') + + def replacefunc(wschar): + if wschar == ' ': + return spaces + elif wschar == '\t': + return tabs + elif wschar == '\n': + return newlines + return wschar + + for ttype, value in stream: + yield from _replace_special(ttype, value, regex, Whitespace, + replacefunc) + else: + spaces, tabs, newlines = self.spaces, self.tabs, self.newlines + # simpler processing + for ttype, value in stream: + if spaces: + value = value.replace(' ', spaces) + if tabs: + value = value.replace('\t', tabs) + if newlines: + value = value.replace('\n', newlines) + yield ttype, value + + +class GobbleFilter(Filter): + """Gobbles source code lines (eats initial characters). + + This filter drops the first ``n`` characters off every line of code. This + may be useful when the source code fed to the lexer is indented by a fixed + amount of space that isn't desired in the output. + + Options accepted: + + `n` : int + The number of characters to gobble. + + .. versionadded:: 1.2 + """ + def __init__(self, **options): + Filter.__init__(self, **options) + self.n = get_int_opt(options, 'n', 0) + + def gobble(self, value, left): + if left < len(value): + return value[left:], 0 + else: + return '', left - len(value) + + def filter(self, lexer, stream): + n = self.n + left = n # How many characters left to gobble. + for ttype, value in stream: + # Remove ``left`` tokens from first line, ``n`` from all others. + parts = value.split('\n') + (parts[0], left) = self.gobble(parts[0], left) + for i in range(1, len(parts)): + (parts[i], left) = self.gobble(parts[i], n) + value = '\n'.join(parts) + + if value != '': + yield ttype, value + + +class TokenMergeFilter(Filter): + """Merges consecutive tokens with the same token type in the output + stream of a lexer. + + .. versionadded:: 1.2 + """ + def __init__(self, **options): + Filter.__init__(self, **options) + + def filter(self, lexer, stream): + current_type = None + current_value = None + for ttype, value in stream: + if ttype is current_type: + current_value += value + else: + if current_type is not None: + yield current_type, current_value + current_type = ttype + current_value = value + if current_type is not None: + yield current_type, current_value + + +FILTERS = { + 'codetagify': CodeTagFilter, + 'keywordcase': KeywordCaseFilter, + 'highlight': NameHighlightFilter, + 'raiseonerror': RaiseOnErrorTokenFilter, + 'whitespace': VisibleWhitespaceFilter, + 'gobble': GobbleFilter, + 'tokenmerge': TokenMergeFilter, + 'symbols': SymbolFilter, +} diff --git a/temp_venv/lib/python3.13/site-packages/pygments/lexers/_lua_builtins.py b/temp_venv/lib/python3.13/site-packages/pygments/lexers/_lua_builtins.py new file mode 100644 index 0000000000000000000000000000000000000000..e0977a0a4204fcda8bd216a8b1fb927842a0d620 --- /dev/null +++ b/temp_venv/lib/python3.13/site-packages/pygments/lexers/_lua_builtins.py @@ -0,0 +1,285 @@ +""" + pygments.lexers._lua_builtins + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + This file contains the names and modules of lua functions + It is able to re-generate itself, but for adding new functions you + probably have to add some callbacks (see function module_callbacks). + + Do not edit the MODULES dict by hand. + + Run with `python -I` to regenerate. + + :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +MODULES = {'basic': ('_G', + '_VERSION', + 'assert', + 'collectgarbage', + 'dofile', + 'error', + 'getmetatable', + 'ipairs', + 'load', + 'loadfile', + 'next', + 'pairs', + 'pcall', + 'print', + 'rawequal', + 'rawget', + 'rawlen', + 'rawset', + 'select', + 'setmetatable', + 'tonumber', + 'tostring', + 'type', + 'warn', + 'xpcall'), + 'bit32': ('bit32.arshift', + 'bit32.band', + 'bit32.bnot', + 'bit32.bor', + 'bit32.btest', + 'bit32.bxor', + 'bit32.extract', + 'bit32.lrotate', + 'bit32.lshift', + 'bit32.replace', + 'bit32.rrotate', + 'bit32.rshift'), + 'coroutine': ('coroutine.close', + 'coroutine.create', + 'coroutine.isyieldable', + 'coroutine.resume', + 'coroutine.running', + 'coroutine.status', + 'coroutine.wrap', + 'coroutine.yield'), + 'debug': ('debug.debug', + 'debug.gethook', + 'debug.getinfo', + 'debug.getlocal', + 'debug.getmetatable', + 'debug.getregistry', + 'debug.getupvalue', + 'debug.getuservalue', + 'debug.sethook', + 'debug.setlocal', + 'debug.setmetatable', + 'debug.setupvalue', + 'debug.setuservalue', + 'debug.traceback', + 'debug.upvalueid', + 'debug.upvaluejoin'), + 'io': ('io.close', + 'io.flush', + 'io.input', + 'io.lines', + 'io.open', + 'io.output', + 'io.popen', + 'io.read', + 'io.stderr', + 'io.stdin', + 'io.stdout', + 'io.tmpfile', + 'io.type', + 'io.write'), + 'math': ('math.abs', + 'math.acos', + 'math.asin', + 'math.atan', + 'math.atan2', + 'math.ceil', + 'math.cos', + 'math.cosh', + 'math.deg', + 'math.exp', + 'math.floor', + 'math.fmod', + 'math.frexp', + 'math.huge', + 'math.ldexp', + 'math.log', + 'math.max', + 'math.maxinteger', + 'math.min', + 'math.mininteger', + 'math.modf', + 'math.pi', + 'math.pow', + 'math.rad', + 'math.random', + 'math.randomseed', + 'math.sin', + 'math.sinh', + 'math.sqrt', + 'math.tan', + 'math.tanh', + 'math.tointeger', + 'math.type', + 'math.ult'), + 'modules': ('package.config', + 'package.cpath', + 'package.loaded', + 'package.loadlib', + 'package.path', + 'package.preload', + 'package.searchers', + 'package.searchpath', + 'require'), + 'os': ('os.clock', + 'os.date', + 'os.difftime', + 'os.execute', + 'os.exit', + 'os.getenv', + 'os.remove', + 'os.rename', + 'os.setlocale', + 'os.time', + 'os.tmpname'), + 'string': ('string.byte', + 'string.char', + 'string.dump', + 'string.find', + 'string.format', + 'string.gmatch', + 'string.gsub', + 'string.len', + 'string.lower', + 'string.match', + 'string.pack', + 'string.packsize', + 'string.rep', + 'string.reverse', + 'string.sub', + 'string.unpack', + 'string.upper'), + 'table': ('table.concat', + 'table.insert', + 'table.move', + 'table.pack', + 'table.remove', + 'table.sort', + 'table.unpack'), + 'utf8': ('utf8.char', + 'utf8.charpattern', + 'utf8.codepoint', + 'utf8.codes', + 'utf8.len', + 'utf8.offset')} + +if __name__ == '__main__': # pragma: no cover + import re + from urllib.request import urlopen + import pprint + + # you can't generally find out what module a function belongs to if you + # have only its name. Because of this, here are some callback functions + # that recognize if a gioven function belongs to a specific module + def module_callbacks(): + def is_in_coroutine_module(name): + return name.startswith('coroutine.') + + def is_in_modules_module(name): + if name in ['require', 'module'] or name.startswith('package'): + return True + else: + return False + + def is_in_string_module(name): + return name.startswith('string.') + + def is_in_table_module(name): + return name.startswith('table.') + + def is_in_math_module(name): + return name.startswith('math') + + def is_in_io_module(name): + return name.startswith('io.') + + def is_in_os_module(name): + return name.startswith('os.') + + def is_in_debug_module(name): + return name.startswith('debug.') + + return {'coroutine': is_in_coroutine_module, + 'modules': is_in_modules_module, + 'string': is_in_string_module, + 'table': is_in_table_module, + 'math': is_in_math_module, + 'io': is_in_io_module, + 'os': is_in_os_module, + 'debug': is_in_debug_module} + + + + def get_newest_version(): + f = urlopen('http://www.lua.org/manual/') + r = re.compile(r'^(Lua )?\1') + for line in f: + m = r.match(line.decode('iso-8859-1')) + if m is not None: + return m.groups()[0] + + def get_lua_functions(version): + f = urlopen(f'http://www.lua.org/manual/{version}/') + r = re.compile(r'^\1') + functions = [] + for line in f: + m = r.match(line.decode('iso-8859-1')) + if m is not None: + functions.append(m.groups()[0]) + return functions + + def get_function_module(name): + for mod, cb in module_callbacks().items(): + if cb(name): + return mod + if '.' in name: + return name.split('.')[0] + else: + return 'basic' + + def regenerate(filename, modules): + with open(filename, encoding='utf-8') as fp: + content = fp.read() + + header = content[:content.find('MODULES = {')] + footer = content[content.find("if __name__ == '__main__':"):] + + + with open(filename, 'w', encoding='utf-8') as fp: + fp.write(header) + fp.write(f'MODULES = {pprint.pformat(modules)}\n\n') + fp.write(footer) + + def run(): + version = get_newest_version() + functions = set() + for v in ('5.2', version): + print(f'> Downloading function index for Lua {v}') + f = get_lua_functions(v) + print('> %d functions found, %d new:' % + (len(f), len(set(f) - functions))) + functions |= set(f) + + functions = sorted(functions) + + modules = {} + for full_function_name in functions: + print(f'>> {full_function_name}') + m = get_function_module(full_function_name) + modules.setdefault(m, []).append(full_function_name) + modules = {k: tuple(v) for k, v in modules.items()} + + regenerate(__file__, modules) + + run() diff --git a/temp_venv/lib/python3.13/site-packages/pygments/lexers/_postgres_builtins.py b/temp_venv/lib/python3.13/site-packages/pygments/lexers/_postgres_builtins.py new file mode 100644 index 0000000000000000000000000000000000000000..80bc728e96f0be12914cf480017075b61dc1936f --- /dev/null +++ b/temp_venv/lib/python3.13/site-packages/pygments/lexers/_postgres_builtins.py @@ -0,0 +1,739 @@ +""" + pygments.lexers._postgres_builtins + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + Self-updating data files for PostgreSQL lexer. + + Run with `python -I` to update itself. + + :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +# Autogenerated: please edit them if you like wasting your time. + +KEYWORDS = ( + 'ABORT', + 'ABSOLUTE', + 'ACCESS', + 'ACTION', + 'ADD', + 'ADMIN', + 'AFTER', + 'AGGREGATE', + 'ALL', + 'ALSO', + 'ALTER', + 'ALWAYS', + 'ANALYSE', + 'ANALYZE', + 'AND', + 'ANY', + 'ARRAY', + 'AS', + 'ASC', + 'ASENSITIVE', + 'ASSERTION', + 'ASSIGNMENT', + 'ASYMMETRIC', + 'AT', + 'ATOMIC', + 'ATTACH', + 'ATTRIBUTE', + 'AUTHORIZATION', + 'BACKWARD', + 'BEFORE', + 'BEGIN', + 'BETWEEN', + 'BIGINT', + 'BINARY', + 'BIT', + 'BOOLEAN', + 'BOTH', + 'BREADTH', + 'BY', + 'CACHE', + 'CALL', + 'CALLED', + 'CASCADE', + 'CASCADED', + 'CASE', + 'CAST', + 'CATALOG', + 'CHAIN', + 'CHAR', + 'CHARACTER', + 'CHARACTERISTICS', + 'CHECK', + 'CHECKPOINT', + 'CLASS', + 'CLOSE', + 'CLUSTER', + 'COALESCE', + 'COLLATE', + 'COLLATION', + 'COLUMN', + 'COLUMNS', + 'COMMENT', + 'COMMENTS', + 'COMMIT', + 'COMMITTED', + 'COMPRESSION', + 'CONCURRENTLY', + 'CONFIGURATION', + 'CONFLICT', + 'CONNECTION', + 'CONSTRAINT', + 'CONSTRAINTS', + 'CONTENT', + 'CONTINUE', + 'CONVERSION', + 'COPY', + 'COST', + 'CREATE', + 'CROSS', + 'CSV', + 'CUBE', + 'CURRENT', + 'CURRENT_CATALOG', + 'CURRENT_DATE', + 'CURRENT_ROLE', + 'CURRENT_SCHEMA', + 'CURRENT_TIME', + 'CURRENT_TIMESTAMP', + 'CURRENT_USER', + 'CURSOR', + 'CYCLE', + 'DATA', + 'DATABASE', + 'DAY', + 'DEALLOCATE', + 'DEC', + 'DECIMAL', + 'DECLARE', + 'DEFAULT', + 'DEFAULTS', + 'DEFERRABLE', + 'DEFERRED', + 'DEFINER', + 'DELETE', + 'DELIMITER', + 'DELIMITERS', + 'DEPENDS', + 'DEPTH', + 'DESC', + 'DETACH', + 'DICTIONARY', + 'DISABLE', + 'DISCARD', + 'DISTINCT', + 'DO', + 'DOCUMENT', + 'DOMAIN', + 'DOUBLE', + 'DROP', + 'EACH', + 'ELSE', + 'ENABLE', + 'ENCODING', + 'ENCRYPTED', + 'END', + 'ENUM', + 'ESCAPE', + 'EVENT', + 'EXCEPT', + 'EXCLUDE', + 'EXCLUDING', + 'EXCLUSIVE', + 'EXECUTE', + 'EXISTS', + 'EXPLAIN', + 'EXPRESSION', + 'EXTENSION', + 'EXTERNAL', + 'EXTRACT', + 'FALSE', + 'FAMILY', + 'FETCH', + 'FILTER', + 'FINALIZE', + 'FIRST', + 'FLOAT', + 'FOLLOWING', + 'FOR', + 'FORCE', + 'FOREIGN', + 'FORWARD', + 'FREEZE', + 'FROM', + 'FULL', + 'FUNCTION', + 'FUNCTIONS', + 'GENERATED', + 'GLOBAL', + 'GRANT', + 'GRANTED', + 'GREATEST', + 'GROUP', + 'GROUPING', + 'GROUPS', + 'HANDLER', + 'HAVING', + 'HEADER', + 'HOLD', + 'HOUR', + 'IDENTITY', + 'IF', + 'ILIKE', + 'IMMEDIATE', + 'IMMUTABLE', + 'IMPLICIT', + 'IMPORT', + 'IN', + 'INCLUDE', + 'INCLUDING', + 'INCREMENT', + 'INDEX', + 'INDEXES', + 'INHERIT', + 'INHERITS', + 'INITIALLY', + 'INLINE', + 'INNER', + 'INOUT', + 'INPUT', + 'INSENSITIVE', + 'INSERT', + 'INSTEAD', + 'INT', + 'INTEGER', + 'INTERSECT', + 'INTERVAL', + 'INTO', + 'INVOKER', + 'IS', + 'ISNULL', + 'ISOLATION', + 'JOIN', + 'KEY', + 'LABEL', + 'LANGUAGE', + 'LARGE', + 'LAST', + 'LATERAL', + 'LEADING', + 'LEAKPROOF', + 'LEAST', + 'LEFT', + 'LEVEL', + 'LIKE', + 'LIMIT', + 'LISTEN', + 'LOAD', + 'LOCAL', + 'LOCALTIME', + 'LOCALTIMESTAMP', + 'LOCATION', + 'LOCK', + 'LOCKED', + 'LOGGED', + 'MAPPING', + 'MATCH', + 'MATERIALIZED', + 'MAXVALUE', + 'METHOD', + 'MINUTE', + 'MINVALUE', + 'MODE', + 'MONTH', + 'MOVE', + 'NAME', + 'NAMES', + 'NATIONAL', + 'NATURAL', + 'NCHAR', + 'NEW', + 'NEXT', + 'NFC', + 'NFD', + 'NFKC', + 'NFKD', + 'NO', + 'NONE', + 'NORMALIZE', + 'NORMALIZED', + 'NOT', + 'NOTHING', + 'NOTIFY', + 'NOTNULL', + 'NOWAIT', + 'NULL', + 'NULLIF', + 'NULLS', + 'NUMERIC', + 'OBJECT', + 'OF', + 'OFF', + 'OFFSET', + 'OIDS', + 'OLD', + 'ON', + 'ONLY', + 'OPERATOR', + 'OPTION', + 'OPTIONS', + 'OR', + 'ORDER', + 'ORDINALITY', + 'OTHERS', + 'OUT', + 'OUTER', + 'OVER', + 'OVERLAPS', + 'OVERLAY', + 'OVERRIDING', + 'OWNED', + 'OWNER', + 'PARALLEL', + 'PARSER', + 'PARTIAL', + 'PARTITION', + 'PASSING', + 'PASSWORD', + 'PLACING', + 'PLANS', + 'POLICY', + 'POSITION', + 'PRECEDING', + 'PRECISION', + 'PREPARE', + 'PREPARED', + 'PRESERVE', + 'PRIMARY', + 'PRIOR', + 'PRIVILEGES', + 'PROCEDURAL', + 'PROCEDURE', + 'PROCEDURES', + 'PROGRAM', + 'PUBLICATION', + 'QUOTE', + 'RANGE', + 'READ', + 'REAL', + 'REASSIGN', + 'RECHECK', + 'RECURSIVE', + 'REF', + 'REFERENCES', + 'REFERENCING', + 'REFRESH', + 'REINDEX', + 'RELATIVE', + 'RELEASE', + 'RENAME', + 'REPEATABLE', + 'REPLACE', + 'REPLICA', + 'RESET', + 'RESTART', + 'RESTRICT', + 'RETURN', + 'RETURNING', + 'RETURNS', + 'REVOKE', + 'RIGHT', + 'ROLE', + 'ROLLBACK', + 'ROLLUP', + 'ROUTINE', + 'ROUTINES', + 'ROW', + 'ROWS', + 'RULE', + 'SAVEPOINT', + 'SCHEMA', + 'SCHEMAS', + 'SCROLL', + 'SEARCH', + 'SECOND', + 'SECURITY', + 'SELECT', + 'SEQUENCE', + 'SEQUENCES', + 'SERIALIZABLE', + 'SERVER', + 'SESSION', + 'SESSION_USER', + 'SET', + 'SETOF', + 'SETS', + 'SHARE', + 'SHOW', + 'SIMILAR', + 'SIMPLE', + 'SKIP', + 'SMALLINT', + 'SNAPSHOT', + 'SOME', + 'SQL', + 'STABLE', + 'STANDALONE', + 'START', + 'STATEMENT', + 'STATISTICS', + 'STDIN', + 'STDOUT', + 'STORAGE', + 'STORED', + 'STRICT', + 'STRIP', + 'SUBSCRIPTION', + 'SUBSTRING', + 'SUPPORT', + 'SYMMETRIC', + 'SYSID', + 'SYSTEM', + 'TABLE', + 'TABLES', + 'TABLESAMPLE', + 'TABLESPACE', + 'TEMP', + 'TEMPLATE', + 'TEMPORARY', + 'TEXT', + 'THEN', + 'TIES', + 'TIME', + 'TIMESTAMP', + 'TO', + 'TRAILING', + 'TRANSACTION', + 'TRANSFORM', + 'TREAT', + 'TRIGGER', + 'TRIM', + 'TRUE', + 'TRUNCATE', + 'TRUSTED', + 'TYPE', + 'TYPES', + 'UESCAPE', + 'UNBOUNDED', + 'UNCOMMITTED', + 'UNENCRYPTED', + 'UNION', + 'UNIQUE', + 'UNKNOWN', + 'UNLISTEN', + 'UNLOGGED', + 'UNTIL', + 'UPDATE', + 'USER', + 'USING', + 'VACUUM', + 'VALID', + 'VALIDATE', + 'VALIDATOR', + 'VALUE', + 'VALUES', + 'VARCHAR', + 'VARIADIC', + 'VARYING', + 'VERBOSE', + 'VERSION', + 'VIEW', + 'VIEWS', + 'VOLATILE', + 'WHEN', + 'WHERE', + 'WHITESPACE', + 'WINDOW', + 'WITH', + 'WITHIN', + 'WITHOUT', + 'WORK', + 'WRAPPER', + 'WRITE', + 'XML', + 'XMLATTRIBUTES', + 'XMLCONCAT', + 'XMLELEMENT', + 'XMLEXISTS', + 'XMLFOREST', + 'XMLNAMESPACES', + 'XMLPARSE', + 'XMLPI', + 'XMLROOT', + 'XMLSERIALIZE', + 'XMLTABLE', + 'YEAR', + 'YES', + 'ZONE', +) + +DATATYPES = ( + 'bigint', + 'bigserial', + 'bit', + 'bit varying', + 'bool', + 'boolean', + 'box', + 'bytea', + 'char', + 'character', + 'character varying', + 'cidr', + 'circle', + 'date', + 'decimal', + 'double precision', + 'float4', + 'float8', + 'inet', + 'int', + 'int2', + 'int4', + 'int8', + 'integer', + 'interval', + 'json', + 'jsonb', + 'line', + 'lseg', + 'macaddr', + 'macaddr8', + 'money', + 'numeric', + 'path', + 'pg_lsn', + 'pg_snapshot', + 'point', + 'polygon', + 'real', + 'serial', + 'serial2', + 'serial4', + 'serial8', + 'smallint', + 'smallserial', + 'text', + 'time', + 'timestamp', + 'timestamptz', + 'timetz', + 'tsquery', + 'tsvector', + 'txid_snapshot', + 'uuid', + 'varbit', + 'varchar', + 'with time zone', + 'without time zone', + 'xml', +) + +PSEUDO_TYPES = ( + 'any', + 'anyarray', + 'anycompatible', + 'anycompatiblearray', + 'anycompatiblemultirange', + 'anycompatiblenonarray', + 'anycompatiblerange', + 'anyelement', + 'anyenum', + 'anymultirange', + 'anynonarray', + 'anyrange', + 'cstring', + 'event_trigger', + 'fdw_handler', + 'index_am_handler', + 'internal', + 'language_handler', + 'pg_ddl_command', + 'record', + 'table_am_handler', + 'trigger', + 'tsm_handler', + 'unknown', + 'void', +) + +# Remove 'trigger' from types +PSEUDO_TYPES = tuple(sorted(set(PSEUDO_TYPES) - set(map(str.lower, KEYWORDS)))) + +PLPGSQL_KEYWORDS = ( + 'ALIAS', 'CONSTANT', 'DIAGNOSTICS', 'ELSIF', 'EXCEPTION', 'EXIT', + 'FOREACH', 'GET', 'LOOP', 'NOTICE', 'OPEN', 'PERFORM', 'QUERY', 'RAISE', + 'RETURN', 'REVERSE', 'SQLSTATE', 'WHILE', +) + +# Most of these keywords are from ExplainNode function +# in src/backend/commands/explain.c + +EXPLAIN_KEYWORDS = ( + 'Aggregate', + 'Append', + 'Bitmap Heap Scan', + 'Bitmap Index Scan', + 'BitmapAnd', + 'BitmapOr', + 'CTE Scan', + 'Custom Scan', + 'Delete', + 'Foreign Scan', + 'Function Scan', + 'Gather Merge', + 'Gather', + 'Group', + 'GroupAggregate', + 'Hash Join', + 'Hash', + 'HashAggregate', + 'Incremental Sort', + 'Index Only Scan', + 'Index Scan', + 'Insert', + 'Limit', + 'LockRows', + 'Materialize', + 'Memoize', + 'Merge Append', + 'Merge Join', + 'Merge', + 'MixedAggregate', + 'Named Tuplestore Scan', + 'Nested Loop', + 'ProjectSet', + 'Recursive Union', + 'Result', + 'Sample Scan', + 'Seq Scan', + 'SetOp', + 'Sort', + 'SubPlan', + 'Subquery Scan', + 'Table Function Scan', + 'Tid Range Scan', + 'Tid Scan', + 'Unique', + 'Update', + 'Values Scan', + 'WindowAgg', + 'WorkTable Scan', +) + + +if __name__ == '__main__': # pragma: no cover + import re + from urllib.request import urlopen + + from pygments.util import format_lines + + # One man's constant is another man's variable. + SOURCE_URL = 'https://github.com/postgres/postgres/raw/master' + KEYWORDS_URL = SOURCE_URL + '/src/include/parser/kwlist.h' + DATATYPES_URL = SOURCE_URL + '/doc/src/sgml/datatype.sgml' + + def update_myself(): + content = urlopen(DATATYPES_URL).read().decode('utf-8', errors='ignore') + data_file = list(content.splitlines()) + datatypes = parse_datatypes(data_file) + pseudos = parse_pseudos(data_file) + + content = urlopen(KEYWORDS_URL).read().decode('utf-8', errors='ignore') + keywords = parse_keywords(content) + + update_consts(__file__, 'DATATYPES', datatypes) + update_consts(__file__, 'PSEUDO_TYPES', pseudos) + update_consts(__file__, 'KEYWORDS', keywords) + + def parse_keywords(f): + kw = [] + for m in re.finditer(r'PG_KEYWORD\("(.+?)"', f): + kw.append(m.group(1).upper()) + + if not kw: + raise ValueError('no keyword found') + + kw.sort() + return kw + + def parse_datatypes(f): + dt = set() + for line in f: + if '' not in line: + continue + + # Parse a string such as + # time [ (p) ] [ without time zone ] + # into types "time" and "without time zone" + + # remove all the tags + line = re.sub("[^<]+", "", line) + line = re.sub("<[^>]+>", "", line) + + # Drop the parts containing braces + for tmp in [t for tmp in line.split('[') + for t in tmp.split(']') if "(" not in t]: + for t in tmp.split(','): + t = t.strip() + if not t: + continue + dt.add(" ".join(t.split())) + + dt = list(dt) + dt.sort() + return dt + + def parse_pseudos(f): + dt = [] + re_start = re.compile(r'\s*') + re_entry = re.compile(r'\s*(.+?)') + re_end = re.compile(r'\s*
') + + f = iter(f) + for line in f: + if re_start.match(line) is not None: + break + else: + raise ValueError('pseudo datatypes table not found') + + for line in f: + m = re_entry.match(line) + if m is not None: + dt.append(m.group(1)) + + if re_end.match(line) is not None: + break + else: + raise ValueError('end of pseudo datatypes table not found') + + if not dt: + raise ValueError('pseudo datatypes not found') + + dt.sort() + return dt + + def update_consts(filename, constname, content): + with open(filename, encoding='utf-8') as f: + data = f.read() + + # Line to start/end inserting + re_match = re.compile(rf'^{constname}\s*=\s*\($.*?^\s*\)$', re.M | re.S) + m = re_match.search(data) + if not m: + raise ValueError(f'Could not find existing definition for {constname}') + + new_block = format_lines(constname, content) + data = data[:m.start()] + new_block + data[m.end():] + + with open(filename, 'w', encoding='utf-8', newline='\n') as f: + f.write(data) + + update_myself() diff --git a/temp_venv/lib/python3.13/site-packages/pygments/lexers/ada.py b/temp_venv/lib/python3.13/site-packages/pygments/lexers/ada.py new file mode 100644 index 0000000000000000000000000000000000000000..cb286ca7c74a8cbf5a94220499f2b8f45dd94840 --- /dev/null +++ b/temp_venv/lib/python3.13/site-packages/pygments/lexers/ada.py @@ -0,0 +1,144 @@ +""" + pygments.lexers.ada + ~~~~~~~~~~~~~~~~~~~ + + Lexers for Ada family languages. + + :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re + +from pygments.lexer import RegexLexer, include, bygroups, words, using, this, \ + default +from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number, Punctuation +from pygments.lexers._ada_builtins import KEYWORD_LIST, BUILTIN_LIST + +__all__ = ['AdaLexer'] + + +class AdaLexer(RegexLexer): + """ + For Ada source code. + """ + + name = 'Ada' + aliases = ['ada', 'ada95', 'ada2005'] + filenames = ['*.adb', '*.ads', '*.ada'] + mimetypes = ['text/x-ada'] + url = 'https://www.adaic.org' + version_added = '1.3' + + flags = re.MULTILINE | re.IGNORECASE + + tokens = { + 'root': [ + (r'[^\S\n]+', Text), + (r'--.*?\n', Comment.Single), + (r'[^\S\n]+', Text), + (r'function|procedure|entry', Keyword.Declaration, 'subprogram'), + (r'(subtype|type)(\s+)(\w+)', + bygroups(Keyword.Declaration, Text, Keyword.Type), 'type_def'), + (r'task|protected', Keyword.Declaration), + (r'(subtype)(\s+)', bygroups(Keyword.Declaration, Text)), + (r'(end)(\s+)', bygroups(Keyword.Reserved, Text), 'end'), + (r'(pragma)(\s+)(\w+)', bygroups(Keyword.Reserved, Text, + Comment.Preproc)), + (r'(true|false|null)\b', Keyword.Constant), + # builtin types + (words(BUILTIN_LIST, suffix=r'\b'), Keyword.Type), + (r'(and(\s+then)?|in|mod|not|or(\s+else)|rem)\b', Operator.Word), + (r'generic|private', Keyword.Declaration), + (r'package', Keyword.Declaration, 'package'), + (r'array\b', Keyword.Reserved, 'array_def'), + (r'(with|use)(\s+)', bygroups(Keyword.Namespace, Text), 'import'), + (r'(\w+)(\s*)(:)(\s*)(constant)', + bygroups(Name.Constant, Text, Punctuation, Text, + Keyword.Reserved)), + (r'<<\w+>>', Name.Label), + (r'(\w+)(\s*)(:)(\s*)(declare|begin|loop|for|while)', + bygroups(Name.Label, Text, Punctuation, Text, Keyword.Reserved)), + # keywords + (words(KEYWORD_LIST, prefix=r'\b', suffix=r'\b'), + Keyword.Reserved), + (r'"[^"]*"', String), + include('attribute'), + include('numbers'), + (r"'[^']'", String.Character), + (r'(\w+)(\s*|[(,])', bygroups(Name, using(this))), + (r"(<>|=>|:=|@|[\[\]]|[()|:;,.'])", Punctuation), + (r'[*<>+=/&-]', Operator), + (r'\n+', Text), + ], + 'numbers': [ + (r'[0-9_]+#[0-9a-f_\.]+#', Number.Hex), + (r'[0-9_]+\.[0-9_]*', Number.Float), + (r'[0-9_]+', Number.Integer), + ], + 'attribute': [ + (r"(')(\w+)", bygroups(Punctuation, Name.Attribute)), + ], + 'subprogram': [ + (r'\(', Punctuation, ('#pop', 'formal_part')), + (r';', Punctuation, '#pop'), + (r'is\b', Keyword.Reserved, '#pop'), + (r'"[^"]+"|\w+', Name.Function), + include('root'), + ], + 'end': [ + ('(if|case|record|loop|select)', Keyword.Reserved), + (r'"[^"]+"|[\w.]+', Name.Function), + (r'\s+', Text), + (';', Punctuation, '#pop'), + ], + 'type_def': [ + (r';', Punctuation, '#pop'), + (r'\(', Punctuation, 'formal_part'), + (r'\[', Punctuation, 'formal_part'), + (r'with|and|use', Keyword.Reserved), + (r'array\b', Keyword.Reserved, ('#pop', 'array_def')), + (r'record\b', Keyword.Reserved, ('record_def')), + (r'(null record)(;)', bygroups(Keyword.Reserved, Punctuation), '#pop'), + include('root'), + ], + 'array_def': [ + (r';', Punctuation, '#pop'), + (r'(\w+)(\s+)(range)', bygroups(Keyword.Type, Text, Keyword.Reserved)), + include('root'), + ], + 'record_def': [ + (r'end record', Keyword.Reserved, '#pop'), + include('root'), + ], + 'import': [ + # TODO: use Name.Namespace if appropriate. This needs + # work to disinguish imports from aspects. + (r'[\w.]+', Name, '#pop'), + default('#pop'), + ], + 'formal_part': [ + (r'\)', Punctuation, '#pop'), + (r'\]', Punctuation, '#pop'), + (r'\w+', Name.Variable), + (r',|:[^=]', Punctuation), + (r'(in|not|null|out|access)\b', Keyword.Reserved), + include('root'), + ], + 'package': [ + ('body', Keyword.Declaration), + (r'is\s+new|renames', Keyword.Reserved), + ('is', Keyword.Reserved, '#pop'), + (';', Punctuation, '#pop'), + (r'\(', Punctuation, 'package_instantiation'), + (r'([\w.]+)', Name.Class), + include('root'), + ], + 'package_instantiation': [ + (r'("[^"]+"|\w+)(\s+)(=>)', bygroups(Name.Variable, Text, Punctuation)), + (r'[\w.\'"]', Text), + (r'\)', Punctuation, '#pop'), + include('root'), + ], + } diff --git a/temp_venv/lib/python3.13/site-packages/pygments/lexers/agile.py b/temp_venv/lib/python3.13/site-packages/pygments/lexers/agile.py new file mode 100644 index 0000000000000000000000000000000000000000..cb22a13bf795ed22be715452e0a9a18c8b3c9901 --- /dev/null +++ b/temp_venv/lib/python3.13/site-packages/pygments/lexers/agile.py @@ -0,0 +1,25 @@ +""" + pygments.lexers.agile + ~~~~~~~~~~~~~~~~~~~~~ + + Just export lexer classes previously contained in this module. + + :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +# ruff: noqa: F401 + +from pygments.lexers.lisp import SchemeLexer +from pygments.lexers.jvm import IokeLexer, ClojureLexer +from pygments.lexers.python import PythonLexer, PythonConsoleLexer, \ + PythonTracebackLexer, Python3Lexer, Python3TracebackLexer, DgLexer +from pygments.lexers.ruby import RubyLexer, RubyConsoleLexer, FancyLexer +from pygments.lexers.perl import PerlLexer, Perl6Lexer +from pygments.lexers.d import CrocLexer, MiniDLexer +from pygments.lexers.iolang import IoLexer +from pygments.lexers.tcl import TclLexer +from pygments.lexers.factor import FactorLexer +from pygments.lexers.scripting import LuaLexer, MoonScriptLexer + +__all__ = [] diff --git a/temp_venv/lib/python3.13/site-packages/pygments/lexers/ampl.py b/temp_venv/lib/python3.13/site-packages/pygments/lexers/ampl.py new file mode 100644 index 0000000000000000000000000000000000000000..bc4082b1b188f7f1e85bfa48c8f5de440973ab00 --- /dev/null +++ b/temp_venv/lib/python3.13/site-packages/pygments/lexers/ampl.py @@ -0,0 +1,87 @@ +""" + pygments.lexers.ampl + ~~~~~~~~~~~~~~~~~~~~ + + Lexers for the AMPL language. + + :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +from pygments.lexer import RegexLexer, bygroups, using, this, words +from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number, Punctuation, Whitespace + +__all__ = ['AmplLexer'] + + +class AmplLexer(RegexLexer): + """ + For AMPL source code. + """ + name = 'Ampl' + url = 'http://ampl.com/' + aliases = ['ampl'] + filenames = ['*.run'] + version_added = '2.2' + + tokens = { + 'root': [ + (r'\n', Text), + (r'\s+', Whitespace), + (r'#.*?\n', Comment.Single), + (r'/[*](.|\n)*?[*]/', Comment.Multiline), + (words(( + 'call', 'cd', 'close', 'commands', 'data', 'delete', 'display', + 'drop', 'end', 'environ', 'exit', 'expand', 'include', 'load', + 'model', 'objective', 'option', 'problem', 'purge', 'quit', + 'redeclare', 'reload', 'remove', 'reset', 'restore', 'shell', + 'show', 'solexpand', 'solution', 'solve', 'update', 'unload', + 'xref', 'coeff', 'coef', 'cover', 'obj', 'interval', 'default', + 'from', 'to', 'to_come', 'net_in', 'net_out', 'dimen', + 'dimension', 'check', 'complements', 'write', 'function', + 'pipe', 'format', 'if', 'then', 'else', 'in', 'while', 'repeat', + 'for'), suffix=r'\b'), Keyword.Reserved), + (r'(integer|binary|symbolic|ordered|circular|reversed|INOUT|IN|OUT|LOCAL)', + Keyword.Type), + (r'\".*?\"', String.Double), + (r'\'.*?\'', String.Single), + (r'[()\[\]{},;:]+', Punctuation), + (r'\b(\w+)(\.)(astatus|init0|init|lb0|lb1|lb2|lb|lrc|' + r'lslack|rc|relax|slack|sstatus|status|ub0|ub1|ub2|' + r'ub|urc|uslack|val)', + bygroups(Name.Variable, Punctuation, Keyword.Reserved)), + (r'(set|param|var|arc|minimize|maximize|subject to|s\.t\.|subj to|' + r'node|table|suffix|read table|write table)(\s+)(\w+)', + bygroups(Keyword.Declaration, Whitespace, Name.Variable)), + (r'(param)(\s*)(:)(\s*)(\w+)(\s*)(:)(\s*)((\w|\s)+)', + bygroups(Keyword.Declaration, Whitespace, Punctuation, Whitespace, + Name.Variable, Whitespace, Punctuation, Whitespace, Name.Variable)), + (r'(let|fix|unfix)(\s*)((?:\{.*\})?)(\s*)(\w+)', + bygroups(Keyword.Declaration, Whitespace, using(this), Whitespace, + Name.Variable)), + (words(( + 'abs', 'acos', 'acosh', 'alias', 'asin', 'asinh', 'atan', 'atan2', + 'atanh', 'ceil', 'ctime', 'cos', 'exp', 'floor', 'log', 'log10', + 'max', 'min', 'precision', 'round', 'sin', 'sinh', 'sqrt', 'tan', + 'tanh', 'time', 'trunc', 'Beta', 'Cauchy', 'Exponential', 'Gamma', + 'Irand224', 'Normal', 'Normal01', 'Poisson', 'Uniform', 'Uniform01', + 'num', 'num0', 'ichar', 'char', 'length', 'substr', 'sprintf', + 'match', 'sub', 'gsub', 'print', 'printf', 'next', 'nextw', 'prev', + 'prevw', 'first', 'last', 'ord', 'ord0', 'card', 'arity', + 'indexarity'), prefix=r'\b', suffix=r'\b'), Name.Builtin), + (r'(\+|\-|\*|/|\*\*|=|<=|>=|==|\||\^|<|>|\!|\.\.|:=|\&|\!=|<<|>>)', + Operator), + (words(( + 'or', 'exists', 'forall', 'and', 'in', 'not', 'within', 'union', + 'diff', 'difference', 'symdiff', 'inter', 'intersect', + 'intersection', 'cross', 'setof', 'by', 'less', 'sum', 'prod', + 'product', 'div', 'mod'), suffix=r'\b'), + Keyword.Reserved), # Operator.Name but not enough emphasized with that + (r'(\d+\.(?!\.)\d*|\.(?!.)\d+)([eE][+-]?\d+)?', Number.Float), + (r'\d+([eE][+-]?\d+)?', Number.Integer), + (r'[+-]?Infinity', Number.Integer), + (r'(\w+|(\.(?!\.)))', Text) + ] + + } diff --git a/temp_venv/lib/python3.13/site-packages/pygments/lexers/c_like.py b/temp_venv/lib/python3.13/site-packages/pygments/lexers/c_like.py new file mode 100644 index 0000000000000000000000000000000000000000..1ba269e829b7b5789b5ef74d0c6d2251423e5019 --- /dev/null +++ b/temp_venv/lib/python3.13/site-packages/pygments/lexers/c_like.py @@ -0,0 +1,738 @@ +""" + pygments.lexers.c_like + ~~~~~~~~~~~~~~~~~~~~~~ + + Lexers for other C-like languages. + + :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re + +from pygments.lexer import RegexLexer, include, bygroups, inherit, words, \ + default +from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number, Punctuation, Whitespace + +from pygments.lexers.c_cpp import CLexer, CppLexer +from pygments.lexers import _mql_builtins + +__all__ = ['PikeLexer', 'NesCLexer', 'ClayLexer', 'ECLexer', 'ValaLexer', + 'CudaLexer', 'SwigLexer', 'MqlLexer', 'ArduinoLexer', 'CharmciLexer', + 'OmgIdlLexer', 'PromelaLexer'] + + +class PikeLexer(CppLexer): + """ + For `Pike `_ source code. + """ + name = 'Pike' + aliases = ['pike'] + filenames = ['*.pike', '*.pmod'] + mimetypes = ['text/x-pike'] + version_added = '2.0' + + tokens = { + 'statements': [ + (words(( + 'catch', 'new', 'private', 'protected', 'public', 'gauge', + 'throw', 'throws', 'class', 'interface', 'implement', 'abstract', + 'extends', 'from', 'this', 'super', 'constant', 'final', 'static', + 'import', 'use', 'extern', 'inline', 'proto', 'break', 'continue', + 'if', 'else', 'for', 'while', 'do', 'switch', 'case', 'as', 'in', + 'version', 'return', 'true', 'false', 'null', + '__VERSION__', '__MAJOR__', '__MINOR__', '__BUILD__', '__REAL_VERSION__', + '__REAL_MAJOR__', '__REAL_MINOR__', '__REAL_BUILD__', '__DATE__', '__TIME__', + '__FILE__', '__DIR__', '__LINE__', '__AUTO_BIGNUM__', '__NT__', '__PIKE__', + '__amigaos__', '_Pragma', 'static_assert', 'defined', 'sscanf'), suffix=r'\b'), + Keyword), + (r'(bool|int|long|float|short|double|char|string|object|void|mapping|' + r'array|multiset|program|function|lambda|mixed|' + r'[a-z_][a-z0-9_]*_t)\b', + Keyword.Type), + (r'(class)(\s+)', bygroups(Keyword, Whitespace), 'classname'), + (r'[~!%^&*+=|?:<>/@-]', Operator), + inherit, + ], + 'classname': [ + (r'[a-zA-Z_]\w*', Name.Class, '#pop'), + # template specification + (r'\s*(?=>)', Whitespace, '#pop'), + ], + } + + +class NesCLexer(CLexer): + """ + For `nesC `_ source code with preprocessor + directives. + """ + name = 'nesC' + aliases = ['nesc'] + filenames = ['*.nc'] + mimetypes = ['text/x-nescsrc'] + version_added = '2.0' + + tokens = { + 'statements': [ + (words(( + 'abstract', 'as', 'async', 'atomic', 'call', 'command', 'component', + 'components', 'configuration', 'event', 'extends', 'generic', + 'implementation', 'includes', 'interface', 'module', 'new', 'norace', + 'post', 'provides', 'signal', 'task', 'uses'), suffix=r'\b'), + Keyword), + (words(('nx_struct', 'nx_union', 'nx_int8_t', 'nx_int16_t', 'nx_int32_t', + 'nx_int64_t', 'nx_uint8_t', 'nx_uint16_t', 'nx_uint32_t', + 'nx_uint64_t'), suffix=r'\b'), + Keyword.Type), + inherit, + ], + } + + +class ClayLexer(RegexLexer): + """ + For Clay source. + """ + name = 'Clay' + filenames = ['*.clay'] + aliases = ['clay'] + mimetypes = ['text/x-clay'] + url = 'http://claylabs.com/clay' + version_added = '2.0' + + tokens = { + 'root': [ + (r'\s+', Whitespace), + (r'//.*?$', Comment.Single), + (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline), + (r'\b(public|private|import|as|record|variant|instance' + r'|define|overload|default|external|alias' + r'|rvalue|ref|forward|inline|noinline|forceinline' + r'|enum|var|and|or|not|if|else|goto|return|while' + r'|switch|case|break|continue|for|in|true|false|try|catch|throw' + r'|finally|onerror|staticassert|eval|when|newtype' + r'|__FILE__|__LINE__|__COLUMN__|__ARG__' + r')\b', Keyword), + (r'[~!%^&*+=|:<>/-]', Operator), + (r'[#(){}\[\],;.]', Punctuation), + (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex), + (r'\d+[LlUu]*', Number.Integer), + (r'\b(true|false)\b', Name.Builtin), + (r'(?i)[a-z_?][\w?]*', Name), + (r'"""', String, 'tdqs'), + (r'"', String, 'dqs'), + ], + 'strings': [ + (r'(?i)\\(x[0-9a-f]{2}|.)', String.Escape), + (r'[^\\"]+', String), + ], + 'nl': [ + (r'\n', String), + ], + 'dqs': [ + (r'"', String, '#pop'), + include('strings'), + ], + 'tdqs': [ + (r'"""', String, '#pop'), + include('strings'), + include('nl'), + ], + } + + +class ECLexer(CLexer): + """ + For eC source code with preprocessor directives. + """ + name = 'eC' + aliases = ['ec'] + filenames = ['*.ec', '*.eh'] + mimetypes = ['text/x-echdr', 'text/x-ecsrc'] + url = 'https://ec-lang.org' + version_added = '1.5' + + tokens = { + 'statements': [ + (words(( + 'virtual', 'class', 'private', 'public', 'property', 'import', + 'delete', 'new', 'new0', 'renew', 'renew0', 'define', 'get', + 'set', 'remote', 'dllexport', 'dllimport', 'stdcall', 'subclass', + '__on_register_module', 'namespace', 'using', 'typed_object', + 'any_object', 'incref', 'register', 'watch', 'stopwatching', 'firewatchers', + 'watchable', 'class_designer', 'class_fixed', 'class_no_expansion', 'isset', + 'class_default_property', 'property_category', 'class_data', + 'class_property', 'thisclass', 'dbtable', 'dbindex', + 'database_open', 'dbfield'), suffix=r'\b'), Keyword), + (words(('uint', 'uint16', 'uint32', 'uint64', 'bool', 'byte', + 'unichar', 'int64'), suffix=r'\b'), + Keyword.Type), + (r'(class)(\s+)', bygroups(Keyword, Whitespace), 'classname'), + (r'(null|value|this)\b', Name.Builtin), + inherit, + ] + } + + +class ValaLexer(RegexLexer): + """ + For Vala source code with preprocessor directives. + """ + name = 'Vala' + aliases = ['vala', 'vapi'] + filenames = ['*.vala', '*.vapi'] + mimetypes = ['text/x-vala'] + url = 'https://vala.dev' + version_added = '1.1' + + tokens = { + 'whitespace': [ + (r'^\s*#if\s+0', Comment.Preproc, 'if0'), + (r'\n', Whitespace), + (r'\s+', Whitespace), + (r'\\\n', Text), # line continuation + (r'//(\n|(.|\n)*?[^\\]\n)', Comment.Single), + (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline), + ], + 'statements': [ + (r'[L@]?"', String, 'string'), + (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", + String.Char), + (r'(?s)""".*?"""', String), # verbatim strings + (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float), + (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float), + (r'0x[0-9a-fA-F]+[Ll]?', Number.Hex), + (r'0[0-7]+[Ll]?', Number.Oct), + (r'\d+[Ll]?', Number.Integer), + (r'[~!%^&*+=|?:<>/-]', Operator), + (r'(\[)(Compact|Immutable|(?:Boolean|Simple)Type)(\])', + bygroups(Punctuation, Name.Decorator, Punctuation)), + # TODO: "correctly" parse complex code attributes + (r'(\[)(CCode|(?:Integer|Floating)Type)', + bygroups(Punctuation, Name.Decorator)), + (r'[()\[\],.]', Punctuation), + (words(( + 'as', 'base', 'break', 'case', 'catch', 'construct', 'continue', + 'default', 'delete', 'do', 'else', 'enum', 'finally', 'for', + 'foreach', 'get', 'if', 'in', 'is', 'lock', 'new', 'out', 'params', + 'return', 'set', 'sizeof', 'switch', 'this', 'throw', 'try', + 'typeof', 'while', 'yield'), suffix=r'\b'), + Keyword), + (words(( + 'abstract', 'const', 'delegate', 'dynamic', 'ensures', 'extern', + 'inline', 'internal', 'override', 'owned', 'private', 'protected', + 'public', 'ref', 'requires', 'signal', 'static', 'throws', 'unowned', + 'var', 'virtual', 'volatile', 'weak', 'yields'), suffix=r'\b'), + Keyword.Declaration), + (r'(namespace|using)(\s+)', bygroups(Keyword.Namespace, Whitespace), + 'namespace'), + (r'(class|errordomain|interface|struct)(\s+)', + bygroups(Keyword.Declaration, Whitespace), 'class'), + (r'(\.)([a-zA-Z_]\w*)', + bygroups(Operator, Name.Attribute)), + # void is an actual keyword, others are in glib-2.0.vapi + (words(( + 'void', 'bool', 'char', 'double', 'float', 'int', 'int8', 'int16', + 'int32', 'int64', 'long', 'short', 'size_t', 'ssize_t', 'string', + 'time_t', 'uchar', 'uint', 'uint8', 'uint16', 'uint32', 'uint64', + 'ulong', 'unichar', 'ushort'), suffix=r'\b'), + Keyword.Type), + (r'(true|false|null)\b', Name.Builtin), + (r'[a-zA-Z_]\w*', Name), + ], + 'root': [ + include('whitespace'), + default('statement'), + ], + 'statement': [ + include('whitespace'), + include('statements'), + ('[{}]', Punctuation), + (';', Punctuation, '#pop'), + ], + 'string': [ + (r'"', String, '#pop'), + (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape), + (r'[^\\"\n]+', String), # all other characters + (r'\\\n', String), # line continuation + (r'\\', String), # stray backslash + ], + 'if0': [ + (r'^\s*#if.*?(?>>'} + + def get_tokens_unprocessed(self, text, stack=('root',)): + for index, token, value in CLexer.get_tokens_unprocessed(self, text, stack): + if token is Name: + if value in self.variable_qualifiers: + token = Keyword.Type + elif value in self.vector_types: + token = Keyword.Type + elif value in self.variables: + token = Name.Builtin + elif value in self.execution_confs: + token = Keyword.Pseudo + elif value in self.function_qualifiers: + token = Keyword.Reserved + elif value in self.functions: + token = Name.Function + yield index, token, value + + +class SwigLexer(CppLexer): + """ + For `SWIG `_ source code. + """ + name = 'SWIG' + aliases = ['swig'] + filenames = ['*.swg', '*.i'] + mimetypes = ['text/swig'] + version_added = '2.0' + priority = 0.04 # Lower than C/C++ and Objective C/C++ + + tokens = { + 'root': [ + # Match it here so it won't be matched as a function in the rest of root + (r'\$\**\&?\w+', Name), + inherit + ], + 'statements': [ + # SWIG directives + (r'(%[a-z_][a-z0-9_]*)', Name.Function), + # Special variables + (r'\$\**\&?\w+', Name), + # Stringification / additional preprocessor directives + (r'##*[a-zA-Z_]\w*', Comment.Preproc), + inherit, + ], + } + + # This is a far from complete set of SWIG directives + swig_directives = { + # Most common directives + '%apply', '%define', '%director', '%enddef', '%exception', '%extend', + '%feature', '%fragment', '%ignore', '%immutable', '%import', '%include', + '%inline', '%insert', '%module', '%newobject', '%nspace', '%pragma', + '%rename', '%shared_ptr', '%template', '%typecheck', '%typemap', + # Less common directives + '%arg', '%attribute', '%bang', '%begin', '%callback', '%catches', '%clear', + '%constant', '%copyctor', '%csconst', '%csconstvalue', '%csenum', + '%csmethodmodifiers', '%csnothrowexception', '%default', '%defaultctor', + '%defaultdtor', '%defined', '%delete', '%delobject', '%descriptor', + '%exceptionclass', '%exceptionvar', '%extend_smart_pointer', '%fragments', + '%header', '%ifcplusplus', '%ignorewarn', '%implicit', '%implicitconv', + '%init', '%javaconst', '%javaconstvalue', '%javaenum', '%javaexception', + '%javamethodmodifiers', '%kwargs', '%luacode', '%mutable', '%naturalvar', + '%nestedworkaround', '%perlcode', '%pythonabc', '%pythonappend', + '%pythoncallback', '%pythoncode', '%pythondynamic', '%pythonmaybecall', + '%pythonnondynamic', '%pythonprepend', '%refobject', '%shadow', '%sizeof', + '%trackobjects', '%types', '%unrefobject', '%varargs', '%warn', + '%warnfilter'} + + def analyse_text(text): + rv = 0 + # Search for SWIG directives, which are conventionally at the beginning of + # a line. The probability of them being within a line is low, so let another + # lexer win in this case. + matches = re.findall(r'^\s*(%[a-z_][a-z0-9_]*)', text, re.M) + for m in matches: + if m in SwigLexer.swig_directives: + rv = 0.98 + break + else: + rv = 0.91 # Fraction higher than MatlabLexer + return rv + + +class MqlLexer(CppLexer): + """ + For `MQL4 `_ and + `MQL5 `_ source code. + """ + name = 'MQL' + aliases = ['mql', 'mq4', 'mq5', 'mql4', 'mql5'] + filenames = ['*.mq4', '*.mq5', '*.mqh'] + mimetypes = ['text/x-mql'] + version_added = '2.0' + + tokens = { + 'statements': [ + (words(_mql_builtins.keywords, suffix=r'\b'), Keyword), + (words(_mql_builtins.c_types, suffix=r'\b'), Keyword.Type), + (words(_mql_builtins.types, suffix=r'\b'), Name.Function), + (words(_mql_builtins.constants, suffix=r'\b'), Name.Constant), + (words(_mql_builtins.colors, prefix='(clr)?', suffix=r'\b'), + Name.Constant), + inherit, + ], + } + + +class ArduinoLexer(CppLexer): + """ + For `Arduino(tm) `_ source. + + This is an extension of the CppLexer, as the Arduino® Language is a superset + of C++ + """ + + name = 'Arduino' + aliases = ['arduino'] + filenames = ['*.ino'] + mimetypes = ['text/x-arduino'] + version_added = '2.1' + + # Language sketch main structure functions + structure = {'setup', 'loop'} + + # Language operators + operators = {'not', 'or', 'and', 'xor'} + + # Language 'variables' + variables = { + 'DIGITAL_MESSAGE', 'FIRMATA_STRING', 'ANALOG_MESSAGE', 'REPORT_DIGITAL', + 'REPORT_ANALOG', 'INPUT_PULLUP', 'SET_PIN_MODE', 'INTERNAL2V56', 'SYSTEM_RESET', + 'LED_BUILTIN', 'INTERNAL1V1', 'SYSEX_START', 'INTERNAL', 'EXTERNAL', 'HIGH', + 'LOW', 'INPUT', 'OUTPUT', 'INPUT_PULLUP', 'LED_BUILTIN', 'true', 'false', + 'void', 'boolean', 'char', 'unsigned char', 'byte', 'int', 'unsigned int', + 'word', 'long', 'unsigned long', 'short', 'float', 'double', 'string', 'String', + 'array', 'static', 'volatile', 'const', 'boolean', 'byte', 'word', 'string', + 'String', 'array', 'int', 'float', 'private', 'char', 'virtual', 'operator', + 'sizeof', 'uint8_t', 'uint16_t', 'uint32_t', 'uint64_t', 'int8_t', 'int16_t', + 'int32_t', 'int64_t', 'dynamic_cast', 'typedef', 'const_cast', 'const', + 'struct', 'static_cast', 'union', 'unsigned', 'long', 'volatile', 'static', + 'protected', 'bool', 'public', 'friend', 'auto', 'void', 'enum', 'extern', + 'class', 'short', 'reinterpret_cast', 'double', 'register', 'explicit', + 'signed', 'inline', 'delete', '_Bool', 'complex', '_Complex', '_Imaginary', + 'atomic_bool', 'atomic_char', 'atomic_schar', 'atomic_uchar', 'atomic_short', + 'atomic_ushort', 'atomic_int', 'atomic_uint', 'atomic_long', 'atomic_ulong', + 'atomic_llong', 'atomic_ullong', 'PROGMEM'} + + # Language shipped functions and class ( ) + functions = { + 'KeyboardController', 'MouseController', 'SoftwareSerial', 'EthernetServer', + 'EthernetClient', 'LiquidCrystal', 'RobotControl', 'GSMVoiceCall', + 'EthernetUDP', 'EsploraTFT', 'HttpClient', 'RobotMotor', 'WiFiClient', + 'GSMScanner', 'FileSystem', 'Scheduler', 'GSMServer', 'YunClient', 'YunServer', + 'IPAddress', 'GSMClient', 'GSMModem', 'Keyboard', 'Ethernet', 'Console', + 'GSMBand', 'Esplora', 'Stepper', 'Process', 'WiFiUDP', 'GSM_SMS', 'Mailbox', + 'USBHost', 'Firmata', 'PImage', 'Client', 'Server', 'GSMPIN', 'FileIO', + 'Bridge', 'Serial', 'EEPROM', 'Stream', 'Mouse', 'Audio', 'Servo', 'File', + 'Task', 'GPRS', 'WiFi', 'Wire', 'TFT', 'GSM', 'SPI', 'SD', + 'runShellCommandAsynchronously', 'analogWriteResolution', + 'retrieveCallingNumber', 'printFirmwareVersion', 'analogReadResolution', + 'sendDigitalPortPair', 'noListenOnLocalhost', 'readJoystickButton', + 'setFirmwareVersion', 'readJoystickSwitch', 'scrollDisplayRight', + 'getVoiceCallStatus', 'scrollDisplayLeft', 'writeMicroseconds', + 'delayMicroseconds', 'beginTransmission', 'getSignalStrength', + 'runAsynchronously', 'getAsynchronously', 'listenOnLocalhost', + 'getCurrentCarrier', 'readAccelerometer', 'messageAvailable', + 'sendDigitalPorts', 'lineFollowConfig', 'countryNameWrite', 'runShellCommand', + 'readStringUntil', 'rewindDirectory', 'readTemperature', 'setClockDivider', + 'readLightSensor', 'endTransmission', 'analogReference', 'detachInterrupt', + 'countryNameRead', 'attachInterrupt', 'encryptionType', 'readBytesUntil', + 'robotNameWrite', 'readMicrophone', 'robotNameRead', 'cityNameWrite', + 'userNameWrite', 'readJoystickY', 'readJoystickX', 'mouseReleased', + 'openNextFile', 'scanNetworks', 'noInterrupts', 'digitalWrite', 'beginSpeaker', + 'mousePressed', 'isActionDone', 'mouseDragged', 'displayLogos', 'noAutoscroll', + 'addParameter', 'remoteNumber', 'getModifiers', 'keyboardRead', 'userNameRead', + 'waitContinue', 'processInput', 'parseCommand', 'printVersion', 'readNetworks', + 'writeMessage', 'blinkVersion', 'cityNameRead', 'readMessage', 'setDataMode', + 'parsePacket', 'isListening', 'setBitOrder', 'beginPacket', 'isDirectory', + 'motorsWrite', 'drawCompass', 'digitalRead', 'clearScreen', 'serialEvent', + 'rightToLeft', 'setTextSize', 'leftToRight', 'requestFrom', 'keyReleased', + 'compassRead', 'analogWrite', 'interrupts', 'WiFiServer', 'disconnect', + 'playMelody', 'parseFloat', 'autoscroll', 'getPINUsed', 'setPINUsed', + 'setTimeout', 'sendAnalog', 'readSlider', 'analogRead', 'beginWrite', + 'createChar', 'motorsStop', 'keyPressed', 'tempoWrite', 'readButton', + 'subnetMask', 'debugPrint', 'macAddress', 'writeGreen', 'randomSeed', + 'attachGPRS', 'readString', 'sendString', 'remotePort', 'releaseAll', + 'mouseMoved', 'background', 'getXChange', 'getYChange', 'answerCall', + 'getResult', 'voiceCall', 'endPacket', 'constrain', 'getSocket', 'writeJSON', + 'getButton', 'available', 'connected', 'findUntil', 'readBytes', 'exitValue', + 'readGreen', 'writeBlue', 'startLoop', 'IPAddress', 'isPressed', 'sendSysex', + 'pauseMode', 'gatewayIP', 'setCursor', 'getOemKey', 'tuneWrite', 'noDisplay', + 'loadImage', 'switchPIN', 'onRequest', 'onReceive', 'changePIN', 'playFile', + 'noBuffer', 'parseInt', 'overflow', 'checkPIN', 'knobRead', 'beginTFT', + 'bitClear', 'updateIR', 'bitWrite', 'position', 'writeRGB', 'highByte', + 'writeRed', 'setSpeed', 'readBlue', 'noStroke', 'remoteIP', 'transfer', + 'shutdown', 'hangCall', 'beginSMS', 'endWrite', 'attached', 'maintain', + 'noCursor', 'checkReg', 'checkPUK', 'shiftOut', 'isValid', 'shiftIn', 'pulseIn', + 'connect', 'println', 'localIP', 'pinMode', 'getIMEI', 'display', 'noBlink', + 'process', 'getBand', 'running', 'beginSD', 'drawBMP', 'lowByte', 'setBand', + 'release', 'bitRead', 'prepare', 'pointTo', 'readRed', 'setMode', 'noFill', + 'remove', 'listen', 'stroke', 'detach', 'attach', 'noTone', 'exists', 'buffer', + 'height', 'bitSet', 'circle', 'config', 'cursor', 'random', 'IRread', 'setDNS', + 'endSMS', 'getKey', 'micros', 'millis', 'begin', 'print', 'write', 'ready', + 'flush', 'width', 'isPIN', 'blink', 'clear', 'press', 'mkdir', 'rmdir', 'close', + 'point', 'yield', 'image', 'BSSID', 'click', 'delay', 'read', 'text', 'move', + 'peek', 'beep', 'rect', 'line', 'open', 'seek', 'fill', 'size', 'turn', 'stop', + 'home', 'find', 'step', 'tone', 'sqrt', 'RSSI', 'SSID', 'end', 'bit', 'tan', + 'cos', 'sin', 'pow', 'map', 'abs', 'max', 'min', 'get', 'run', 'put', + 'isAlphaNumeric', 'isAlpha', 'isAscii', 'isWhitespace', 'isControl', 'isDigit', + 'isGraph', 'isLowerCase', 'isPrintable', 'isPunct', 'isSpace', 'isUpperCase', + 'isHexadecimalDigit'} + + # do not highlight + suppress_highlight = { + 'namespace', 'template', 'mutable', 'using', 'asm', 'typeid', + 'typename', 'this', 'alignof', 'constexpr', 'decltype', 'noexcept', + 'static_assert', 'thread_local', 'restrict'} + + def get_tokens_unprocessed(self, text, stack=('root',)): + for index, token, value in CppLexer.get_tokens_unprocessed(self, text, stack): + if value in self.structure: + yield index, Name.Builtin, value + elif value in self.operators: + yield index, Operator, value + elif value in self.variables: + yield index, Keyword.Reserved, value + elif value in self.suppress_highlight: + yield index, Name, value + elif value in self.functions: + yield index, Name.Function, value + else: + yield index, token, value + + +class CharmciLexer(CppLexer): + """ + For `Charm++ `_ interface files (.ci). + """ + + name = 'Charmci' + aliases = ['charmci'] + filenames = ['*.ci'] + version_added = '2.4' + + mimetypes = [] + + tokens = { + 'keywords': [ + (r'(module)(\s+)', bygroups(Keyword, Text), 'classname'), + (words(('mainmodule', 'mainchare', 'chare', 'array', 'group', + 'nodegroup', 'message', 'conditional')), Keyword), + (words(('entry', 'aggregate', 'threaded', 'sync', 'exclusive', + 'nokeep', 'notrace', 'immediate', 'expedited', 'inline', + 'local', 'python', 'accel', 'readwrite', 'writeonly', + 'accelblock', 'memcritical', 'packed', 'varsize', + 'initproc', 'initnode', 'initcall', 'stacksize', + 'createhere', 'createhome', 'reductiontarget', 'iget', + 'nocopy', 'mutable', 'migratable', 'readonly')), Keyword), + inherit, + ], + } + + +class OmgIdlLexer(CLexer): + """ + Lexer for Object Management Group Interface Definition Language. + """ + + name = 'OMG Interface Definition Language' + url = 'https://www.omg.org/spec/IDL/About-IDL/' + aliases = ['omg-idl'] + filenames = ['*.idl', '*.pidl'] + mimetypes = [] + version_added = '2.9' + + scoped_name = r'((::)?\w+)+' + + tokens = { + 'values': [ + (words(('true', 'false'), prefix=r'(?i)', suffix=r'\b'), Number), + (r'([Ll]?)(")', bygroups(String.Affix, String.Double), 'string'), + (r'([Ll]?)(\')(\\[^\']+)(\')', + bygroups(String.Affix, String.Char, String.Escape, String.Char)), + (r'([Ll]?)(\')(\\\')(\')', + bygroups(String.Affix, String.Char, String.Escape, String.Char)), + (r'([Ll]?)(\'.\')', bygroups(String.Affix, String.Char)), + (r'[+-]?\d+(\.\d*)?[Ee][+-]?\d+', Number.Float), + (r'[+-]?(\d+\.\d*)|(\d*\.\d+)([Ee][+-]?\d+)?', Number.Float), + (r'(?i)[+-]?0x[0-9a-f]+', Number.Hex), + (r'[+-]?[1-9]\d*', Number.Integer), + (r'[+-]?0[0-7]*', Number.Oct), + (r'[\+\-\*\/%^&\|~]', Operator), + (words(('<<', '>>')), Operator), + (scoped_name, Name), + (r'[{};:,<>\[\]]', Punctuation), + ], + 'annotation_params': [ + include('whitespace'), + (r'\(', Punctuation, '#push'), + include('values'), + (r'=', Punctuation), + (r'\)', Punctuation, '#pop'), + ], + 'annotation_params_maybe': [ + (r'\(', Punctuation, 'annotation_params'), + include('whitespace'), + default('#pop'), + ], + 'annotation_appl': [ + (r'@' + scoped_name, Name.Decorator, 'annotation_params_maybe'), + ], + 'enum': [ + include('whitespace'), + (r'[{,]', Punctuation), + (r'\w+', Name.Constant), + include('annotation_appl'), + (r'\}', Punctuation, '#pop'), + ], + 'root': [ + include('whitespace'), + (words(( + 'typedef', 'const', + 'in', 'out', 'inout', 'local', + ), prefix=r'(?i)', suffix=r'\b'), Keyword.Declaration), + (words(( + 'void', 'any', 'native', 'bitfield', + 'unsigned', 'boolean', 'char', 'wchar', 'octet', 'short', 'long', + 'int8', 'uint8', 'int16', 'int32', 'int64', 'uint16', 'uint32', 'uint64', + 'float', 'double', 'fixed', + 'sequence', 'string', 'wstring', 'map', + ), prefix=r'(?i)', suffix=r'\b'), Keyword.Type), + (words(( + '@annotation', 'struct', 'union', 'bitset', 'interface', + 'exception', 'valuetype', 'eventtype', 'component', + ), prefix=r'(?i)', suffix=r'(\s+)(\w+)'), bygroups(Keyword, Whitespace, Name.Class)), + (words(( + 'abstract', 'alias', 'attribute', 'case', 'connector', + 'consumes', 'context', 'custom', 'default', 'emits', 'factory', + 'finder', 'getraises', 'home', 'import', 'manages', 'mirrorport', + 'multiple', 'Object', 'oneway', 'primarykey', 'private', 'port', + 'porttype', 'provides', 'public', 'publishes', 'raises', + 'readonly', 'setraises', 'supports', 'switch', 'truncatable', + 'typeid', 'typename', 'typeprefix', 'uses', 'ValueBase', + ), prefix=r'(?i)', suffix=r'\b'), Keyword), + (r'(?i)(enum|bitmask)(\s+)(\w+)', + bygroups(Keyword, Whitespace, Name.Class), 'enum'), + (r'(?i)(module)(\s+)(\w+)', + bygroups(Keyword.Namespace, Whitespace, Name.Namespace)), + (r'(\w+)(\s*)(=)', bygroups(Name.Constant, Whitespace, Operator)), + (r'[\(\)]', Punctuation), + include('values'), + include('annotation_appl'), + ], + } + + +class PromelaLexer(CLexer): + """ + For the Promela language used with SPIN. + """ + + name = 'Promela' + aliases = ['promela'] + filenames = ['*.pml', '*.prom', '*.prm', '*.promela', '*.pr', '*.pm'] + mimetypes = ['text/x-promela'] + url = 'https://spinroot.com/spin/whatispin.html' + version_added = '2.18' + + # Promela's language reference: + # https://spinroot.com/spin/Man/promela.html + # Promela's grammar definition: + # https://spinroot.com/spin/Man/grammar.html + + tokens = { + 'statements': [ + (r'(\[\]|<>|/\\|\\/)|(U|W|V)\b', Operator), # LTL Operators + (r'@', Punctuation), #remoterefs + (r'(\.)([a-zA-Z_]\w*)', bygroups(Operator, Name.Attribute)), + inherit + ], + 'types': [ + # Predefined (data types) + (words(( + 'bit', 'bool', 'byte', 'pid', 'short', 'int', 'unsigned'), + suffix=r'\b'), + Keyword.Type), + ], + 'keywords': [ + # ControlFlow + (words(( + 'atomic', 'break', 'd_step', 'do', 'od', 'for', 'in', 'goto', + 'if', 'fi', 'unless'), suffix=r'\b'), + Keyword), + # BasicStatements + (words(( + 'assert', 'get_priority', 'printf', 'printm', 'set_priority'), + suffix=r'\b'), + Name.Function), + # Embedded C Code + (words(( + 'c_code', 'c_decl', 'c_expr', 'c_state', 'c_track'), + suffix=r'\b'), + Keyword), + # Predefined (local/global variables) + (words(( + '_', '_last', '_nr_pr', '_pid', '_priority', 'else', 'np_', + 'STDIN'), suffix=r'\b'), + Name.Builtin), + # Predefined (functions) + (words(( + 'empty', 'enabled', 'eval', 'full', 'len', 'nempty', 'nfull', + 'pc_value'), suffix=r'\b'), + Name.Function), + # Predefined (operators) + (r'run\b', Operator.Word), + # Declarators + (words(( + 'active', 'chan', 'D_proctype', 'hidden', 'init', 'local', + 'mtype', 'never', 'notrace', 'proctype', 'show', 'trace', + 'typedef', 'xr', 'xs'), suffix=r'\b'), + Keyword.Declaration), + # Declarators (suffixes) + (words(( + 'priority', 'provided'), suffix=r'\b'), + Keyword), + # MetaTerms (declarators) + (words(( + 'inline', 'ltl', 'select'), suffix=r'\b'), + Keyword.Declaration), + # MetaTerms (keywords) + (r'skip\b', Keyword), + ], + } diff --git a/temp_venv/lib/python3.13/site-packages/pygments/lexers/console.py b/temp_venv/lib/python3.13/site-packages/pygments/lexers/console.py new file mode 100644 index 0000000000000000000000000000000000000000..d66cbaa957d564cbdb48c4a8c7c8272100866168 --- /dev/null +++ b/temp_venv/lib/python3.13/site-packages/pygments/lexers/console.py @@ -0,0 +1,114 @@ +""" + pygments.lexers.console + ~~~~~~~~~~~~~~~~~~~~~~~ + + Lexers for misc console output. + + :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +from pygments.lexer import RegexLexer, include, bygroups +from pygments.token import Generic, Comment, String, Text, Keyword, Name, \ + Punctuation, Number, Whitespace + +__all__ = ['VCTreeStatusLexer', 'PyPyLogLexer'] + + +class VCTreeStatusLexer(RegexLexer): + """ + For colorizing output of version control status commands, like "hg + status" or "svn status". + """ + name = 'VCTreeStatus' + aliases = ['vctreestatus'] + filenames = [] + mimetypes = [] + url = "" + version_added = '2.0' + + tokens = { + 'root': [ + (r'^A \+ C\s+', Generic.Error), + (r'^A\s+\+?\s+', String), + (r'^M\s+', Generic.Inserted), + (r'^C\s+', Generic.Error), + (r'^D\s+', Generic.Deleted), + (r'^[?!]\s+', Comment.Preproc), + (r' >\s+.*\n', Comment.Preproc), + (r'\S+', Text), + (r'\s+', Whitespace), + ] + } + + +class PyPyLogLexer(RegexLexer): + """ + Lexer for PyPy log files. + """ + name = "PyPy Log" + aliases = ["pypylog", "pypy"] + filenames = ["*.pypylog"] + mimetypes = ['application/x-pypylog'] + url = 'pypy.org' + version_added = '1.5' + + tokens = { + "root": [ + (r"\[\w+\] \{jit-log-.*?$", Keyword, "jit-log"), + (r"\[\w+\] \{jit-backend-counts$", Keyword, "jit-backend-counts"), + include("extra-stuff"), + ], + "jit-log": [ + (r"\[\w+\] jit-log-.*?}$", Keyword, "#pop"), + (r"^\+\d+: ", Comment), + (r"--end of the loop--", Comment), + (r"[ifp]\d+", Name), + (r"ptr\d+", Name), + (r"(\()(\w+(?:\.\w+)?)(\))", + bygroups(Punctuation, Name.Builtin, Punctuation)), + (r"[\[\]=,()]", Punctuation), + (r"(\d+\.\d+|inf|-inf)", Number.Float), + (r"-?\d+", Number.Integer), + (r"'.*'", String), + (r"(None|descr|ConstClass|ConstPtr|TargetToken)", Name), + (r"<.*?>+", Name.Builtin), + (r"(label|debug_merge_point|jump|finish)", Name.Class), + (r"(int_add_ovf|int_add|int_sub_ovf|int_sub|int_mul_ovf|int_mul|" + r"int_floordiv|int_mod|int_lshift|int_rshift|int_and|int_or|" + r"int_xor|int_eq|int_ne|int_ge|int_gt|int_le|int_lt|int_is_zero|" + r"int_is_true|" + r"uint_floordiv|uint_ge|uint_lt|" + r"float_add|float_sub|float_mul|float_truediv|float_neg|" + r"float_eq|float_ne|float_ge|float_gt|float_le|float_lt|float_abs|" + r"ptr_eq|ptr_ne|instance_ptr_eq|instance_ptr_ne|" + r"cast_int_to_float|cast_float_to_int|" + r"force_token|quasiimmut_field|same_as|virtual_ref_finish|" + r"virtual_ref|mark_opaque_ptr|" + r"call_may_force|call_assembler|call_loopinvariant|" + r"call_release_gil|call_pure|call|" + r"new_with_vtable|new_array|newstr|newunicode|new|" + r"arraylen_gc|" + r"getarrayitem_gc_pure|getarrayitem_gc|setarrayitem_gc|" + r"getarrayitem_raw|setarrayitem_raw|getfield_gc_pure|" + r"getfield_gc|getinteriorfield_gc|setinteriorfield_gc|" + r"getfield_raw|setfield_gc|setfield_raw|" + r"strgetitem|strsetitem|strlen|copystrcontent|" + r"unicodegetitem|unicodesetitem|unicodelen|" + r"guard_true|guard_false|guard_value|guard_isnull|" + r"guard_nonnull_class|guard_nonnull|guard_class|guard_no_overflow|" + r"guard_not_forced|guard_no_exception|guard_not_invalidated)", + Name.Builtin), + include("extra-stuff"), + ], + "jit-backend-counts": [ + (r"\[\w+\] jit-backend-counts}$", Keyword, "#pop"), + (r":", Punctuation), + (r"\d+", Number), + include("extra-stuff"), + ], + "extra-stuff": [ + (r"\s+", Whitespace), + (r"#.*?$", Comment), + ], + } diff --git a/temp_venv/lib/python3.13/site-packages/pygments/lexers/ecl.py b/temp_venv/lib/python3.13/site-packages/pygments/lexers/ecl.py new file mode 100644 index 0000000000000000000000000000000000000000..f1ee3bf176df8c912b8d2bd61c507051e2ce683b --- /dev/null +++ b/temp_venv/lib/python3.13/site-packages/pygments/lexers/ecl.py @@ -0,0 +1,144 @@ +""" + pygments.lexers.ecl + ~~~~~~~~~~~~~~~~~~~ + + Lexers for the ECL language. + + :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re + +from pygments.lexer import RegexLexer, include, bygroups, words +from pygments.token import Comment, Operator, Keyword, Name, String, \ + Number, Punctuation, Whitespace + +__all__ = ['ECLLexer'] + + +class ECLLexer(RegexLexer): + """ + Lexer for the declarative big-data ECL language. + """ + + name = 'ECL' + url = 'https://hpccsystems.com/training/documentation/ecl-language-reference/html' + aliases = ['ecl'] + filenames = ['*.ecl'] + mimetypes = ['application/x-ecl'] + version_added = '1.5' + + flags = re.IGNORECASE | re.MULTILINE + + tokens = { + 'root': [ + include('whitespace'), + include('statements'), + ], + 'whitespace': [ + (r'\s+', Whitespace), + (r'\/\/.*', Comment.Single), + (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment.Multiline), + ], + 'statements': [ + include('types'), + include('keywords'), + include('functions'), + include('hash'), + (r'"', String, 'string'), + (r'\'', String, 'string'), + (r'(\d+\.\d*|\.\d+|\d+)e[+-]?\d+[lu]*', Number.Float), + (r'(\d+\.\d*|\.\d+|\d+f)f?', Number.Float), + (r'0x[0-9a-f]+[lu]*', Number.Hex), + (r'0[0-7]+[lu]*', Number.Oct), + (r'\d+[lu]*', Number.Integer), + (r'[~!%^&*+=|?:<>/-]+', Operator), + (r'[{}()\[\],.;]', Punctuation), + (r'[a-z_]\w*', Name), + ], + 'hash': [ + (r'^#.*$', Comment.Preproc), + ], + 'types': [ + (r'(RECORD|END)\D', Keyword.Declaration), + (r'((?:ASCII|BIG_ENDIAN|BOOLEAN|DATA|DECIMAL|EBCDIC|INTEGER|PATTERN|' + r'QSTRING|REAL|RECORD|RULE|SET OF|STRING|TOKEN|UDECIMAL|UNICODE|' + r'UNSIGNED|VARSTRING|VARUNICODE)\d*)(\s+)', + bygroups(Keyword.Type, Whitespace)), + ], + 'keywords': [ + (words(( + 'APPLY', 'ASSERT', 'BUILD', 'BUILDINDEX', 'EVALUATE', 'FAIL', + 'KEYDIFF', 'KEYPATCH', 'LOADXML', 'NOTHOR', 'NOTIFY', 'OUTPUT', + 'PARALLEL', 'SEQUENTIAL', 'SOAPCALL', 'CHECKPOINT', 'DEPRECATED', + 'FAILCODE', 'FAILMESSAGE', 'FAILURE', 'GLOBAL', 'INDEPENDENT', + 'ONWARNING', 'PERSIST', 'PRIORITY', 'RECOVERY', 'STORED', 'SUCCESS', + 'WAIT', 'WHEN'), suffix=r'\b'), + Keyword.Reserved), + # These are classed differently, check later + (words(( + 'ALL', 'AND', 'ANY', 'AS', 'ATMOST', 'BEFORE', 'BEGINC++', 'BEST', + 'BETWEEN', 'CASE', 'CONST', 'COUNTER', 'CSV', 'DESCEND', 'ENCRYPT', + 'ENDC++', 'ENDMACRO', 'EXCEPT', 'EXCLUSIVE', 'EXPIRE', 'EXPORT', + 'EXTEND', 'FALSE', 'FEW', 'FIRST', 'FLAT', 'FULL', 'FUNCTION', + 'GROUP', 'HEADER', 'HEADING', 'HOLE', 'IFBLOCK', 'IMPORT', 'IN', + 'JOINED', 'KEEP', 'KEYED', 'LAST', 'LEFT', 'LIMIT', 'LOAD', 'LOCAL', + 'LOCALE', 'LOOKUP', 'MACRO', 'MANY', 'MAXCOUNT', 'MAXLENGTH', + 'MIN SKEW', 'MODULE', 'INTERFACE', 'NAMED', 'NOCASE', 'NOROOT', + 'NOSCAN', 'NOSORT', 'NOT', 'OF', 'ONLY', 'OPT', 'OR', 'OUTER', + 'OVERWRITE', 'PACKED', 'PARTITION', 'PENALTY', 'PHYSICALLENGTH', + 'PIPE', 'QUOTE', 'RELATIONSHIP', 'REPEAT', 'RETURN', 'RIGHT', + 'SCAN', 'SELF', 'SEPARATOR', 'SERVICE', 'SHARED', 'SKEW', 'SKIP', + 'SQL', 'STORE', 'TERMINATOR', 'THOR', 'THRESHOLD', 'TOKEN', + 'TRANSFORM', 'TRIM', 'TRUE', 'TYPE', 'UNICODEORDER', 'UNSORTED', + 'VALIDATE', 'VIRTUAL', 'WHOLE', 'WILD', 'WITHIN', 'XML', 'XPATH', + '__COMPRESSED__'), suffix=r'\b'), + Keyword.Reserved), + ], + 'functions': [ + (words(( + 'ABS', 'ACOS', 'ALLNODES', 'ASCII', 'ASIN', 'ASSTRING', 'ATAN', + 'ATAN2', 'AVE', 'CASE', 'CHOOSE', 'CHOOSEN', 'CHOOSESETS', + 'CLUSTERSIZE', 'COMBINE', 'CORRELATION', 'COS', 'COSH', 'COUNT', + 'COVARIANCE', 'CRON', 'DATASET', 'DEDUP', 'DEFINE', 'DENORMALIZE', + 'DISTRIBUTE', 'DISTRIBUTED', 'DISTRIBUTION', 'EBCDIC', 'ENTH', + 'ERROR', 'EVALUATE', 'EVENT', 'EVENTEXTRA', 'EVENTNAME', 'EXISTS', + 'EXP', 'FAILCODE', 'FAILMESSAGE', 'FETCH', 'FROMUNICODE', + 'GETISVALID', 'GLOBAL', 'GRAPH', 'GROUP', 'HASH', 'HASH32', + 'HASH64', 'HASHCRC', 'HASHMD5', 'HAVING', 'IF', 'INDEX', + 'INTFORMAT', 'ISVALID', 'ITERATE', 'JOIN', 'KEYUNICODE', 'LENGTH', + 'LIBRARY', 'LIMIT', 'LN', 'LOCAL', 'LOG', 'LOOP', 'MAP', 'MATCHED', + 'MATCHLENGTH', 'MATCHPOSITION', 'MATCHTEXT', 'MATCHUNICODE', 'MAX', + 'MERGE', 'MERGEJOIN', 'MIN', 'NOLOCAL', 'NONEMPTY', 'NORMALIZE', + 'PARSE', 'PIPE', 'POWER', 'PRELOAD', 'PROCESS', 'PROJECT', 'PULL', + 'RANDOM', 'RANGE', 'RANK', 'RANKED', 'REALFORMAT', 'RECORDOF', + 'REGEXFIND', 'REGEXREPLACE', 'REGROUP', 'REJECTED', 'ROLLUP', + 'ROUND', 'ROUNDUP', 'ROW', 'ROWDIFF', 'SAMPLE', 'SET', 'SIN', + 'SINH', 'SIZEOF', 'SOAPCALL', 'SORT', 'SORTED', 'SQRT', 'STEPPED', + 'STORED', 'SUM', 'TABLE', 'TAN', 'TANH', 'THISNODE', 'TOPN', + 'TOUNICODE', 'TRANSFER', 'TRIM', 'TRUNCATE', 'TYPEOF', 'UNGROUP', + 'UNICODEORDER', 'VARIANCE', 'WHICH', 'WORKUNIT', 'XMLDECODE', + 'XMLENCODE', 'XMLTEXT', 'XMLUNICODE'), suffix=r'\b'), + Name.Function), + ], + 'string': [ + (r'"', String, '#pop'), + (r'\'', String, '#pop'), + (r'[^"\']+', String), + ], + } + + def analyse_text(text): + """This is very difficult to guess relative to other business languages. + -> in conjunction with BEGIN/END seems relatively rare though.""" + result = 0 + + if '->' in text: + result += 0.01 + if 'BEGIN' in text: + result += 0.01 + if 'END' in text: + result += 0.01 + + return result diff --git a/temp_venv/lib/python3.13/site-packages/pygments/lexers/futhark.py b/temp_venv/lib/python3.13/site-packages/pygments/lexers/futhark.py new file mode 100644 index 0000000000000000000000000000000000000000..dd2efe83e709f962a9b8580d5f7324a75083b005 --- /dev/null +++ b/temp_venv/lib/python3.13/site-packages/pygments/lexers/futhark.py @@ -0,0 +1,105 @@ +""" + pygments.lexers.futhark + ~~~~~~~~~~~~~~~~~~~~~~~ + + Lexer for the Futhark language + + :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +from pygments.lexer import RegexLexer, bygroups +from pygments.token import Comment, Operator, Keyword, Name, String, \ + Number, Punctuation, Whitespace +from pygments import unistring as uni + +__all__ = ['FutharkLexer'] + + +class FutharkLexer(RegexLexer): + """ + A Futhark lexer + """ + name = 'Futhark' + url = 'https://futhark-lang.org/' + aliases = ['futhark'] + filenames = ['*.fut'] + mimetypes = ['text/x-futhark'] + version_added = '2.8' + + num_types = ('i8', 'i16', 'i32', 'i64', 'u8', 'u16', 'u32', 'u64', 'f32', 'f64') + + other_types = ('bool', ) + + reserved = ('if', 'then', 'else', 'def', 'let', 'loop', 'in', 'with', + 'type', 'type~', 'type^', + 'val', 'entry', 'for', 'while', 'do', 'case', 'match', + 'include', 'import', 'module', 'open', 'local', 'assert', '_') + + ascii = ('NUL', 'SOH', '[SE]TX', 'EOT', 'ENQ', 'ACK', + 'BEL', 'BS', 'HT', 'LF', 'VT', 'FF', 'CR', 'S[OI]', 'DLE', + 'DC[1-4]', 'NAK', 'SYN', 'ETB', 'CAN', + 'EM', 'SUB', 'ESC', '[FGRU]S', 'SP', 'DEL') + + num_postfix = r'({})?'.format('|'.join(num_types)) + + identifier_re = '[a-zA-Z_][a-zA-Z_0-9\']*' + + # opstart_re = '+\-\*/%=\!><\|&\^' + + tokens = { + 'root': [ + (r'--(.*?)$', Comment.Single), + (r'\s+', Whitespace), + (r'\(\)', Punctuation), + (r'\b({})(?!\')\b'.format('|'.join(reserved)), Keyword.Reserved), + (r'\b({})(?!\')\b'.format('|'.join(num_types + other_types)), Keyword.Type), + + # Identifiers + (r'#\[([a-zA-Z_\(\) ]*)\]', Comment.Preproc), + (rf'[#!]?({identifier_re}\.)*{identifier_re}', Name), + + (r'\\', Operator), + (r'[-+/%=!><|&*^][-+/%=!><|&*^.]*', Operator), + (r'[][(),:;`{}?.\'~^]', Punctuation), + + # Numbers + (r'0[xX]_*[\da-fA-F](_*[\da-fA-F])*_*[pP][+-]?\d(_*\d)*' + num_postfix, + Number.Float), + (r'0[xX]_*[\da-fA-F](_*[\da-fA-F])*\.[\da-fA-F](_*[\da-fA-F])*' + r'(_*[pP][+-]?\d(_*\d)*)?' + num_postfix, Number.Float), + (r'\d(_*\d)*_*[eE][+-]?\d(_*\d)*' + num_postfix, Number.Float), + (r'\d(_*\d)*\.\d(_*\d)*(_*[eE][+-]?\d(_*\d)*)?' + num_postfix, Number.Float), + (r'0[bB]_*[01](_*[01])*' + num_postfix, Number.Bin), + (r'0[xX]_*[\da-fA-F](_*[\da-fA-F])*' + num_postfix, Number.Hex), + (r'\d(_*\d)*' + num_postfix, Number.Integer), + + # Character/String Literals + (r"'", String.Char, 'character'), + (r'"', String, 'string'), + # Special + (r'\[[a-zA-Z_\d]*\]', Keyword.Type), + (r'\(\)', Name.Builtin), + ], + 'character': [ + # Allows multi-chars, incorrectly. + (r"[^\\']'", String.Char, '#pop'), + (r"\\", String.Escape, 'escape'), + ("'", String.Char, '#pop'), + ], + 'string': [ + (r'[^\\"]+', String), + (r"\\", String.Escape, 'escape'), + ('"', String, '#pop'), + ], + + 'escape': [ + (r'[abfnrtv"\'&\\]', String.Escape, '#pop'), + (r'\^[][' + uni.Lu + r'@^_]', String.Escape, '#pop'), + ('|'.join(ascii), String.Escape, '#pop'), + (r'o[0-7]+', String.Escape, '#pop'), + (r'x[\da-fA-F]+', String.Escape, '#pop'), + (r'\d+', String.Escape, '#pop'), + (r'(\s+)(\\)', bygroups(Whitespace, String.Escape), '#pop'), + ], + } diff --git a/temp_venv/lib/python3.13/site-packages/pygments/lexers/graphics.py b/temp_venv/lib/python3.13/site-packages/pygments/lexers/graphics.py new file mode 100644 index 0000000000000000000000000000000000000000..400be4fd8950b130454b9b43c039415fdc166309 --- /dev/null +++ b/temp_venv/lib/python3.13/site-packages/pygments/lexers/graphics.py @@ -0,0 +1,794 @@ +""" + pygments.lexers.graphics + ~~~~~~~~~~~~~~~~~~~~~~~~ + + Lexers for computer graphics and plotting related languages. + + :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +from pygments.lexer import RegexLexer, words, include, bygroups, using, \ + this, default +from pygments.token import Text, Comment, Operator, Keyword, Name, \ + Number, Punctuation, String, Whitespace + +__all__ = ['GLShaderLexer', 'PostScriptLexer', 'AsymptoteLexer', 'GnuplotLexer', + 'PovrayLexer', 'HLSLShaderLexer'] + + +class GLShaderLexer(RegexLexer): + """ + GLSL (OpenGL Shader) lexer. + """ + name = 'GLSL' + aliases = ['glsl'] + filenames = ['*.vert', '*.frag', '*.geo'] + mimetypes = ['text/x-glslsrc'] + url = 'https://www.khronos.org/api/opengl' + version_added = '1.1' + + tokens = { + 'root': [ + (r'#(?:.*\\\n)*.*$', Comment.Preproc), + (r'//.*$', Comment.Single), + (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline), + (r'\+|-|~|!=?|\*|/|%|<<|>>|<=?|>=?|==?|&&?|\^|\|\|?', + Operator), + (r'[?:]', Operator), # quick hack for ternary + (r'\bdefined\b', Operator), + (r'[;{}(),\[\]]', Punctuation), + # FIXME when e is present, no decimal point needed + (r'[+-]?\d*\.\d+([eE][-+]?\d+)?', Number.Float), + (r'[+-]?\d+\.\d*([eE][-+]?\d+)?', Number.Float), + (r'0[xX][0-9a-fA-F]*', Number.Hex), + (r'0[0-7]*', Number.Oct), + (r'[1-9][0-9]*', Number.Integer), + (words(( + # Storage qualifiers + 'attribute', 'const', 'uniform', 'varying', + 'buffer', 'shared', 'in', 'out', + # Layout qualifiers + 'layout', + # Interpolation qualifiers + 'flat', 'smooth', 'noperspective', + # Auxiliary qualifiers + 'centroid', 'sample', 'patch', + # Parameter qualifiers. Some double as Storage qualifiers + 'inout', + # Precision qualifiers + 'lowp', 'mediump', 'highp', 'precision', + # Invariance qualifiers + 'invariant', + # Precise qualifiers + 'precise', + # Memory qualifiers + 'coherent', 'volatile', 'restrict', 'readonly', 'writeonly', + # Statements + 'break', 'continue', 'do', 'for', 'while', 'switch', + 'case', 'default', 'if', 'else', 'subroutine', + 'discard', 'return', 'struct'), + prefix=r'\b', suffix=r'\b'), + Keyword), + (words(( + # Boolean values + 'true', 'false'), + prefix=r'\b', suffix=r'\b'), + Keyword.Constant), + (words(( + # Miscellaneous types + 'void', 'atomic_uint', + # Floating-point scalars and vectors + 'float', 'vec2', 'vec3', 'vec4', + 'double', 'dvec2', 'dvec3', 'dvec4', + # Integer scalars and vectors + 'int', 'ivec2', 'ivec3', 'ivec4', + 'uint', 'uvec2', 'uvec3', 'uvec4', + # Boolean scalars and vectors + 'bool', 'bvec2', 'bvec3', 'bvec4', + # Matrices + 'mat2', 'mat3', 'mat4', 'dmat2', 'dmat3', 'dmat4', + 'mat2x2', 'mat2x3', 'mat2x4', 'dmat2x2', 'dmat2x3', 'dmat2x4', + 'mat3x2', 'mat3x3', 'mat3x4', 'dmat3x2', 'dmat3x3', + 'dmat3x4', 'mat4x2', 'mat4x3', 'mat4x4', 'dmat4x2', 'dmat4x3', 'dmat4x4', + # Floating-point samplers + 'sampler1D', 'sampler2D', 'sampler3D', 'samplerCube', + 'sampler1DArray', 'sampler2DArray', 'samplerCubeArray', + 'sampler2DRect', 'samplerBuffer', + 'sampler2DMS', 'sampler2DMSArray', + # Shadow samplers + 'sampler1DShadow', 'sampler2DShadow', 'samplerCubeShadow', + 'sampler1DArrayShadow', 'sampler2DArrayShadow', + 'samplerCubeArrayShadow', 'sampler2DRectShadow', + # Signed integer samplers + 'isampler1D', 'isampler2D', 'isampler3D', 'isamplerCube', + 'isampler1DArray', 'isampler2DArray', 'isamplerCubeArray', + 'isampler2DRect', 'isamplerBuffer', + 'isampler2DMS', 'isampler2DMSArray', + # Unsigned integer samplers + 'usampler1D', 'usampler2D', 'usampler3D', 'usamplerCube', + 'usampler1DArray', 'usampler2DArray', 'usamplerCubeArray', + 'usampler2DRect', 'usamplerBuffer', + 'usampler2DMS', 'usampler2DMSArray', + # Floating-point image types + 'image1D', 'image2D', 'image3D', 'imageCube', + 'image1DArray', 'image2DArray', 'imageCubeArray', + 'image2DRect', 'imageBuffer', + 'image2DMS', 'image2DMSArray', + # Signed integer image types + 'iimage1D', 'iimage2D', 'iimage3D', 'iimageCube', + 'iimage1DArray', 'iimage2DArray', 'iimageCubeArray', + 'iimage2DRect', 'iimageBuffer', + 'iimage2DMS', 'iimage2DMSArray', + # Unsigned integer image types + 'uimage1D', 'uimage2D', 'uimage3D', 'uimageCube', + 'uimage1DArray', 'uimage2DArray', 'uimageCubeArray', + 'uimage2DRect', 'uimageBuffer', + 'uimage2DMS', 'uimage2DMSArray'), + prefix=r'\b', suffix=r'\b'), + Keyword.Type), + (words(( + # Reserved for future use. + 'common', 'partition', 'active', 'asm', 'class', + 'union', 'enum', 'typedef', 'template', 'this', + 'resource', 'goto', 'inline', 'noinline', 'public', + 'static', 'extern', 'external', 'interface', 'long', + 'short', 'half', 'fixed', 'unsigned', 'superp', 'input', + 'output', 'hvec2', 'hvec3', 'hvec4', 'fvec2', 'fvec3', + 'fvec4', 'sampler3DRect', 'filter', 'sizeof', 'cast', + 'namespace', 'using'), + prefix=r'\b', suffix=r'\b'), + Keyword.Reserved), + # All names beginning with "gl_" are reserved. + (r'gl_\w*', Name.Builtin), + (r'[a-zA-Z_]\w*', Name), + (r'\.', Punctuation), + (r'\s+', Whitespace), + ], + } + + +class HLSLShaderLexer(RegexLexer): + """ + HLSL (Microsoft Direct3D Shader) lexer. + """ + name = 'HLSL' + aliases = ['hlsl'] + filenames = ['*.hlsl', '*.hlsli'] + mimetypes = ['text/x-hlsl'] + url = 'https://learn.microsoft.com/en-us/windows/win32/direct3dhlsl/dx-graphics-hlsl' + version_added = '2.3' + + tokens = { + 'root': [ + (r'#(?:.*\\\n)*.*$', Comment.Preproc), + (r'//.*$', Comment.Single), + (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline), + (r'\+|-|~|!=?|\*|/|%|<<|>>|<=?|>=?|==?|&&?|\^|\|\|?', + Operator), + (r'[?:]', Operator), # quick hack for ternary + (r'\bdefined\b', Operator), + (r'[;{}(),.\[\]]', Punctuation), + # FIXME when e is present, no decimal point needed + (r'[+-]?\d*\.\d+([eE][-+]?\d+)?f?', Number.Float), + (r'[+-]?\d+\.\d*([eE][-+]?\d+)?f?', Number.Float), + (r'0[xX][0-9a-fA-F]*', Number.Hex), + (r'0[0-7]*', Number.Oct), + (r'[1-9][0-9]*', Number.Integer), + (r'"', String, 'string'), + (words(( + 'asm','asm_fragment','break','case','cbuffer','centroid','class', + 'column_major','compile','compile_fragment','const','continue', + 'default','discard','do','else','export','extern','for','fxgroup', + 'globallycoherent','groupshared','if','in','inline','inout', + 'interface','line','lineadj','linear','namespace','nointerpolation', + 'noperspective','NULL','out','packoffset','pass','pixelfragment', + 'point','precise','return','register','row_major','sample', + 'sampler','shared','stateblock','stateblock_state','static', + 'struct','switch','tbuffer','technique','technique10', + 'technique11','texture','typedef','triangle','triangleadj', + 'uniform','vertexfragment','volatile','while'), + prefix=r'\b', suffix=r'\b'), + Keyword), + (words(('true','false'), prefix=r'\b', suffix=r'\b'), + Keyword.Constant), + (words(( + 'auto','catch','char','const_cast','delete','dynamic_cast','enum', + 'explicit','friend','goto','long','mutable','new','operator', + 'private','protected','public','reinterpret_cast','short','signed', + 'sizeof','static_cast','template','this','throw','try','typename', + 'union','unsigned','using','virtual'), + prefix=r'\b', suffix=r'\b'), + Keyword.Reserved), + (words(( + 'dword','matrix','snorm','string','unorm','unsigned','void','vector', + 'BlendState','Buffer','ByteAddressBuffer','ComputeShader', + 'DepthStencilState','DepthStencilView','DomainShader', + 'GeometryShader','HullShader','InputPatch','LineStream', + 'OutputPatch','PixelShader','PointStream','RasterizerState', + 'RenderTargetView','RasterizerOrderedBuffer', + 'RasterizerOrderedByteAddressBuffer', + 'RasterizerOrderedStructuredBuffer','RasterizerOrderedTexture1D', + 'RasterizerOrderedTexture1DArray','RasterizerOrderedTexture2D', + 'RasterizerOrderedTexture2DArray','RasterizerOrderedTexture3D', + 'RWBuffer','RWByteAddressBuffer','RWStructuredBuffer', + 'RWTexture1D','RWTexture1DArray','RWTexture2D','RWTexture2DArray', + 'RWTexture3D','SamplerState','SamplerComparisonState', + 'StructuredBuffer','Texture1D','Texture1DArray','Texture2D', + 'Texture2DArray','Texture2DMS','Texture2DMSArray','Texture3D', + 'TextureCube','TextureCubeArray','TriangleStream','VertexShader'), + prefix=r'\b', suffix=r'\b'), + Keyword.Type), + (words(( + 'bool','double','float','int','half','min16float','min10float', + 'min16int','min12int','min16uint','uint'), + prefix=r'\b', suffix=r'([1-4](x[1-4])?)?\b'), + Keyword.Type), # vector and matrix types + (words(( + 'abort','abs','acos','all','AllMemoryBarrier', + 'AllMemoryBarrierWithGroupSync','any','AppendStructuredBuffer', + 'asdouble','asfloat','asin','asint','asuint','asuint','atan', + 'atan2','ceil','CheckAccessFullyMapped','clamp','clip', + 'CompileShader','ConsumeStructuredBuffer','cos','cosh','countbits', + 'cross','D3DCOLORtoUBYTE4','ddx','ddx_coarse','ddx_fine','ddy', + 'ddy_coarse','ddy_fine','degrees','determinant', + 'DeviceMemoryBarrier','DeviceMemoryBarrierWithGroupSync','distance', + 'dot','dst','errorf','EvaluateAttributeAtCentroid', + 'EvaluateAttributeAtSample','EvaluateAttributeSnapped','exp', + 'exp2','f16tof32','f32tof16','faceforward','firstbithigh', + 'firstbitlow','floor','fma','fmod','frac','frexp','fwidth', + 'GetRenderTargetSampleCount','GetRenderTargetSamplePosition', + 'GlobalOrderedCountIncrement','GroupMemoryBarrier', + 'GroupMemoryBarrierWithGroupSync','InterlockedAdd','InterlockedAnd', + 'InterlockedCompareExchange','InterlockedCompareStore', + 'InterlockedExchange','InterlockedMax','InterlockedMin', + 'InterlockedOr','InterlockedXor','isfinite','isinf','isnan', + 'ldexp','length','lerp','lit','log','log10','log2','mad','max', + 'min','modf','msad4','mul','noise','normalize','pow','printf', + 'Process2DQuadTessFactorsAvg','Process2DQuadTessFactorsMax', + 'Process2DQuadTessFactorsMin','ProcessIsolineTessFactors', + 'ProcessQuadTessFactorsAvg','ProcessQuadTessFactorsMax', + 'ProcessQuadTessFactorsMin','ProcessTriTessFactorsAvg', + 'ProcessTriTessFactorsMax','ProcessTriTessFactorsMin', + 'QuadReadLaneAt','QuadSwapX','QuadSwapY','radians','rcp', + 'reflect','refract','reversebits','round','rsqrt','saturate', + 'sign','sin','sincos','sinh','smoothstep','sqrt','step','tan', + 'tanh','tex1D','tex1D','tex1Dbias','tex1Dgrad','tex1Dlod', + 'tex1Dproj','tex2D','tex2D','tex2Dbias','tex2Dgrad','tex2Dlod', + 'tex2Dproj','tex3D','tex3D','tex3Dbias','tex3Dgrad','tex3Dlod', + 'tex3Dproj','texCUBE','texCUBE','texCUBEbias','texCUBEgrad', + 'texCUBElod','texCUBEproj','transpose','trunc','WaveAllBitAnd', + 'WaveAllMax','WaveAllMin','WaveAllBitOr','WaveAllBitXor', + 'WaveAllEqual','WaveAllProduct','WaveAllSum','WaveAllTrue', + 'WaveAnyTrue','WaveBallot','WaveGetLaneCount','WaveGetLaneIndex', + 'WaveGetOrderedIndex','WaveIsHelperLane','WaveOnce', + 'WavePrefixProduct','WavePrefixSum','WaveReadFirstLane', + 'WaveReadLaneAt'), + prefix=r'\b', suffix=r'\b'), + Name.Builtin), # built-in functions + (words(( + 'SV_ClipDistance','SV_ClipDistance0','SV_ClipDistance1', + 'SV_Culldistance','SV_CullDistance0','SV_CullDistance1', + 'SV_Coverage','SV_Depth','SV_DepthGreaterEqual', + 'SV_DepthLessEqual','SV_DispatchThreadID','SV_DomainLocation', + 'SV_GroupID','SV_GroupIndex','SV_GroupThreadID','SV_GSInstanceID', + 'SV_InnerCoverage','SV_InsideTessFactor','SV_InstanceID', + 'SV_IsFrontFace','SV_OutputControlPointID','SV_Position', + 'SV_PrimitiveID','SV_RenderTargetArrayIndex','SV_SampleIndex', + 'SV_StencilRef','SV_TessFactor','SV_VertexID', + 'SV_ViewportArrayIndex'), + prefix=r'\b', suffix=r'\b'), + Name.Decorator), # system-value semantics + (r'\bSV_Target[0-7]?\b', Name.Decorator), + (words(( + 'allow_uav_condition','branch','call','domain','earlydepthstencil', + 'fastopt','flatten','forcecase','instance','loop','maxtessfactor', + 'numthreads','outputcontrolpoints','outputtopology','partitioning', + 'patchconstantfunc','unroll'), + prefix=r'\b', suffix=r'\b'), + Name.Decorator), # attributes + (r'[a-zA-Z_]\w*', Name), + (r'\\$', Comment.Preproc), # backslash at end of line -- usually macro continuation + (r'\s+', Whitespace), + ], + 'string': [ + (r'"', String, '#pop'), + (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|' + r'u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})', String.Escape), + (r'[^\\"\n]+', String), # all other characters + (r'\\\n', String), # line continuation + (r'\\', String), # stray backslash + ], + } + + +class PostScriptLexer(RegexLexer): + """ + Lexer for PostScript files. + """ + name = 'PostScript' + url = 'https://en.wikipedia.org/wiki/PostScript' + aliases = ['postscript', 'postscr'] + filenames = ['*.ps', '*.eps'] + mimetypes = ['application/postscript'] + version_added = '1.4' + + delimiter = r'()<>\[\]{}/%\s' + delimiter_end = rf'(?=[{delimiter}])' + + valid_name_chars = rf'[^{delimiter}]' + valid_name = rf"{valid_name_chars}+{delimiter_end}" + + tokens = { + 'root': [ + # All comment types + (r'^%!.+$', Comment.Preproc), + (r'%%.*$', Comment.Special), + (r'(^%.*\n){2,}', Comment.Multiline), + (r'%.*$', Comment.Single), + + # String literals are awkward; enter separate state. + (r'\(', String, 'stringliteral'), + + (r'[{}<>\[\]]', Punctuation), + + # Numbers + (r'<[0-9A-Fa-f]+>' + delimiter_end, Number.Hex), + # Slight abuse: use Oct to signify any explicit base system + (r'[0-9]+\#(\-|\+)?([0-9]+\.?|[0-9]*\.[0-9]+|[0-9]+\.[0-9]*)' + r'((e|E)[0-9]+)?' + delimiter_end, Number.Oct), + (r'(\-|\+)?([0-9]+\.?|[0-9]*\.[0-9]+|[0-9]+\.[0-9]*)((e|E)[0-9]+)?' + + delimiter_end, Number.Float), + (r'(\-|\+)?[0-9]+' + delimiter_end, Number.Integer), + + # References + (rf'\/{valid_name}', Name.Variable), + + # Names + (valid_name, Name.Function), # Anything else is executed + + # These keywords taken from + # + # Is there an authoritative list anywhere that doesn't involve + # trawling documentation? + + (r'(false|true)' + delimiter_end, Keyword.Constant), + + # Conditionals / flow control + (r'(eq|ne|g[et]|l[et]|and|or|not|if(?:else)?|for(?:all)?)' + + delimiter_end, Keyword.Reserved), + + (words(( + 'abs', 'add', 'aload', 'arc', 'arcn', 'array', 'atan', 'begin', + 'bind', 'ceiling', 'charpath', 'clip', 'closepath', 'concat', + 'concatmatrix', 'copy', 'cos', 'currentlinewidth', 'currentmatrix', + 'currentpoint', 'curveto', 'cvi', 'cvs', 'def', 'defaultmatrix', + 'dict', 'dictstackoverflow', 'div', 'dtransform', 'dup', 'end', + 'exch', 'exec', 'exit', 'exp', 'fill', 'findfont', 'floor', 'get', + 'getinterval', 'grestore', 'gsave', 'gt', 'identmatrix', 'idiv', + 'idtransform', 'index', 'invertmatrix', 'itransform', 'length', + 'lineto', 'ln', 'load', 'log', 'loop', 'matrix', 'mod', 'moveto', + 'mul', 'neg', 'newpath', 'pathforall', 'pathbbox', 'pop', 'print', + 'pstack', 'put', 'quit', 'rand', 'rangecheck', 'rcurveto', 'repeat', + 'restore', 'rlineto', 'rmoveto', 'roll', 'rotate', 'round', 'run', + 'save', 'scale', 'scalefont', 'setdash', 'setfont', 'setgray', + 'setlinecap', 'setlinejoin', 'setlinewidth', 'setmatrix', + 'setrgbcolor', 'shfill', 'show', 'showpage', 'sin', 'sqrt', + 'stack', 'stringwidth', 'stroke', 'strokepath', 'sub', 'syntaxerror', + 'transform', 'translate', 'truncate', 'typecheck', 'undefined', + 'undefinedfilename', 'undefinedresult'), suffix=delimiter_end), + Name.Builtin), + + (r'\s+', Whitespace), + ], + + 'stringliteral': [ + (r'[^()\\]+', String), + (r'\\', String.Escape, 'escape'), + (r'\(', String, '#push'), + (r'\)', String, '#pop'), + ], + + 'escape': [ + (r'[0-8]{3}|n|r|t|b|f|\\|\(|\)', String.Escape, '#pop'), + default('#pop'), + ], + } + + +class AsymptoteLexer(RegexLexer): + """ + For Asymptote source code. + """ + name = 'Asymptote' + url = 'http://asymptote.sf.net/' + aliases = ['asymptote', 'asy'] + filenames = ['*.asy'] + mimetypes = ['text/x-asymptote'] + version_added = '1.2' + + #: optional Comment or Whitespace + _ws = r'(?:\s|//.*?\n|/\*.*?\*/)+' + + tokens = { + 'whitespace': [ + (r'\n', Whitespace), + (r'\s+', Whitespace), + (r'(\\)(\n)', bygroups(Text, Whitespace)), # line continuation + (r'//(\n|(.|\n)*?[^\\]\n)', Comment), + (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment), + ], + 'statements': [ + # simple string (TeX friendly) + (r'"(\\\\|\\[^\\]|[^"\\])*"', String), + # C style string (with character escapes) + (r"'", String, 'string'), + (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float), + (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float), + (r'0x[0-9a-fA-F]+[Ll]?', Number.Hex), + (r'0[0-7]+[Ll]?', Number.Oct), + (r'\d+[Ll]?', Number.Integer), + (r'[~!%^&*+=|?:<>/-]', Operator), + (r'[()\[\],.]', Punctuation), + (r'\b(case)(.+?)(:)', bygroups(Keyword, using(this), Text)), + (r'(and|controls|tension|atleast|curl|if|else|while|for|do|' + r'return|break|continue|struct|typedef|new|access|import|' + r'unravel|from|include|quote|static|public|private|restricted|' + r'this|explicit|true|false|null|cycle|newframe|operator)\b', Keyword), + # Since an asy-type-name can be also an asy-function-name, + # in the following we test if the string " [a-zA-Z]" follows + # the Keyword.Type. + # Of course it is not perfect ! + (r'(Braid|FitResult|Label|Legend|TreeNode|abscissa|arc|arrowhead|' + r'binarytree|binarytreeNode|block|bool|bool3|bounds|bqe|circle|' + r'conic|coord|coordsys|cputime|ellipse|file|filltype|frame|grid3|' + r'guide|horner|hsv|hyperbola|indexedTransform|int|inversion|key|' + r'light|line|linefit|marginT|marker|mass|object|pair|parabola|path|' + r'path3|pen|picture|point|position|projection|real|revolution|' + r'scaleT|scientific|segment|side|slice|splitface|string|surface|' + r'tensionSpecifier|ticklocate|ticksgridT|tickvalues|transform|' + r'transformation|tree|triangle|trilinear|triple|vector|' + r'vertex|void)(?=\s+[a-zA-Z])', Keyword.Type), + # Now the asy-type-name which are not asy-function-name + # except yours ! + # Perhaps useless + (r'(Braid|FitResult|TreeNode|abscissa|arrowhead|block|bool|bool3|' + r'bounds|coord|frame|guide|horner|int|linefit|marginT|pair|pen|' + r'picture|position|real|revolution|slice|splitface|ticksgridT|' + r'tickvalues|tree|triple|vertex|void)\b', Keyword.Type), + (r'[a-zA-Z_]\w*:(?!:)', Name.Label), + (r'[a-zA-Z_]\w*', Name), + ], + 'root': [ + include('whitespace'), + # functions + (r'((?:[\w*\s])+?(?:\s|\*))' # return arguments + r'([a-zA-Z_]\w*)' # method name + r'(\s*\([^;]*?\))' # signature + r'(' + _ws + r')(\{)', + bygroups(using(this), Name.Function, using(this), using(this), + Punctuation), + 'function'), + # function declarations + (r'((?:[\w*\s])+?(?:\s|\*))' # return arguments + r'([a-zA-Z_]\w*)' # method name + r'(\s*\([^;]*?\))' # signature + r'(' + _ws + r')(;)', + bygroups(using(this), Name.Function, using(this), using(this), + Punctuation)), + default('statement'), + ], + 'statement': [ + include('whitespace'), + include('statements'), + ('[{}]', Punctuation), + (';', Punctuation, '#pop'), + ], + 'function': [ + include('whitespace'), + include('statements'), + (';', Punctuation), + (r'\{', Punctuation, '#push'), + (r'\}', Punctuation, '#pop'), + ], + 'string': [ + (r"'", String, '#pop'), + (r'\\([\\abfnrtv"\'?]|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape), + (r'\n', String), + (r"[^\\'\n]+", String), # all other characters + (r'\\\n', String), + (r'\\n', String), # line continuation + (r'\\', String), # stray backslash + ], + } + + def get_tokens_unprocessed(self, text): + from pygments.lexers._asy_builtins import ASYFUNCNAME, ASYVARNAME + for index, token, value in \ + RegexLexer.get_tokens_unprocessed(self, text): + if token is Name and value in ASYFUNCNAME: + token = Name.Function + elif token is Name and value in ASYVARNAME: + token = Name.Variable + yield index, token, value + + +def _shortened(word): + dpos = word.find('$') + return '|'.join(word[:dpos] + word[dpos+1:i] + r'\b' + for i in range(len(word), dpos, -1)) + + +def _shortened_many(*words): + return '|'.join(map(_shortened, words)) + + +class GnuplotLexer(RegexLexer): + """ + For Gnuplot plotting scripts. + """ + + name = 'Gnuplot' + url = 'http://gnuplot.info/' + aliases = ['gnuplot'] + filenames = ['*.plot', '*.plt'] + mimetypes = ['text/x-gnuplot'] + version_added = '0.11' + + tokens = { + 'root': [ + include('whitespace'), + (_shortened('bi$nd'), Keyword, 'bind'), + (_shortened_many('ex$it', 'q$uit'), Keyword, 'quit'), + (_shortened('f$it'), Keyword, 'fit'), + (r'(if)(\s*)(\()', bygroups(Keyword, Text, Punctuation), 'if'), + (r'else\b', Keyword), + (_shortened('pa$use'), Keyword, 'pause'), + (_shortened_many('p$lot', 'rep$lot', 'sp$lot'), Keyword, 'plot'), + (_shortened('sa$ve'), Keyword, 'save'), + (_shortened('se$t'), Keyword, ('genericargs', 'optionarg')), + (_shortened_many('sh$ow', 'uns$et'), + Keyword, ('noargs', 'optionarg')), + (_shortened_many('low$er', 'ra$ise', 'ca$ll', 'cd$', 'cl$ear', + 'h$elp', '\\?$', 'hi$story', 'l$oad', 'pr$int', + 'pwd$', 're$read', 'res$et', 'scr$eendump', + 'she$ll', 'sy$stem', 'up$date'), + Keyword, 'genericargs'), + (_shortened_many('pwd$', 're$read', 'res$et', 'scr$eendump', + 'she$ll', 'test$'), + Keyword, 'noargs'), + (r'([a-zA-Z_]\w*)(\s*)(=)', + bygroups(Name.Variable, Whitespace, Operator), 'genericargs'), + (r'([a-zA-Z_]\w*)(\s*)(\()(.*?)(\))(\s*)(=)', + bygroups(Name.Function, Whitespace, Punctuation, + Text, Punctuation, Whitespace, Operator), 'genericargs'), + (r'@[a-zA-Z_]\w*', Name.Constant), # macros + (r';', Keyword), + ], + 'comment': [ + (r'[^\\\n]+', Comment), + (r'\\\n', Comment), + (r'\\', Comment), + # don't add the newline to the Comment token + default('#pop'), + ], + 'whitespace': [ + ('#', Comment, 'comment'), + (r'[ \t\v\f]+', Whitespace), + ], + 'noargs': [ + include('whitespace'), + # semicolon and newline end the argument list + (r';', Punctuation, '#pop'), + (r'\n', Whitespace, '#pop'), + ], + 'dqstring': [ + (r'"', String, '#pop'), + (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape), + (r'[^\\"\n]+', String), # all other characters + (r'\\\n', String), # line continuation + (r'\\', String), # stray backslash + (r'\n', Whitespace, '#pop'), # newline ends the string too + ], + 'sqstring': [ + (r"''", String), # escaped single quote + (r"'", String, '#pop'), + (r"[^\\'\n]+", String), # all other characters + (r'\\\n', String), # line continuation + (r'\\', String), # normal backslash + (r'\n', Whitespace, '#pop'), # newline ends the string too + ], + 'genericargs': [ + include('noargs'), + (r'"', String, 'dqstring'), + (r"'", String, 'sqstring'), + (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+', Number.Float), + (r'(\d+\.\d*|\.\d+)', Number.Float), + (r'-?\d+', Number.Integer), + ('[,.~!%^&*+=|?:<>/-]', Operator), + (r'[{}()\[\]]', Punctuation), + (r'(eq|ne)\b', Operator.Word), + (r'([a-zA-Z_]\w*)(\s*)(\()', + bygroups(Name.Function, Text, Punctuation)), + (r'[a-zA-Z_]\w*', Name), + (r'@[a-zA-Z_]\w*', Name.Constant), # macros + (r'(\\)(\n)', bygroups(Text, Whitespace)), + ], + 'optionarg': [ + include('whitespace'), + (_shortened_many( + "a$ll", "an$gles", "ar$row", "au$toscale", "b$ars", "bor$der", + "box$width", "cl$abel", "c$lip", "cn$trparam", "co$ntour", "da$ta", + "data$file", "dg$rid3d", "du$mmy", "enc$oding", "dec$imalsign", + "fit$", "font$path", "fo$rmat", "fu$nction", "fu$nctions", "g$rid", + "hid$den3d", "his$torysize", "is$osamples", "k$ey", "keyt$itle", + "la$bel", "li$nestyle", "ls$", "loa$dpath", "loc$ale", "log$scale", + "mac$ros", "map$ping", "map$ping3d", "mar$gin", "lmar$gin", + "rmar$gin", "tmar$gin", "bmar$gin", "mo$use", "multi$plot", + "mxt$ics", "nomxt$ics", "mx2t$ics", "nomx2t$ics", "myt$ics", + "nomyt$ics", "my2t$ics", "nomy2t$ics", "mzt$ics", "nomzt$ics", + "mcbt$ics", "nomcbt$ics", "of$fsets", "or$igin", "o$utput", + "pa$rametric", "pm$3d", "pal$ette", "colorb$ox", "p$lot", + "poi$ntsize", "pol$ar", "pr$int", "obj$ect", "sa$mples", "si$ze", + "st$yle", "su$rface", "table$", "t$erminal", "termo$ptions", "ti$cs", + "ticsc$ale", "ticsl$evel", "timef$mt", "tim$estamp", "tit$le", + "v$ariables", "ve$rsion", "vi$ew", "xyp$lane", "xda$ta", "x2da$ta", + "yda$ta", "y2da$ta", "zda$ta", "cbda$ta", "xl$abel", "x2l$abel", + "yl$abel", "y2l$abel", "zl$abel", "cbl$abel", "xti$cs", "noxti$cs", + "x2ti$cs", "nox2ti$cs", "yti$cs", "noyti$cs", "y2ti$cs", "noy2ti$cs", + "zti$cs", "nozti$cs", "cbti$cs", "nocbti$cs", "xdti$cs", "noxdti$cs", + "x2dti$cs", "nox2dti$cs", "ydti$cs", "noydti$cs", "y2dti$cs", + "noy2dti$cs", "zdti$cs", "nozdti$cs", "cbdti$cs", "nocbdti$cs", + "xmti$cs", "noxmti$cs", "x2mti$cs", "nox2mti$cs", "ymti$cs", + "noymti$cs", "y2mti$cs", "noy2mti$cs", "zmti$cs", "nozmti$cs", + "cbmti$cs", "nocbmti$cs", "xr$ange", "x2r$ange", "yr$ange", + "y2r$ange", "zr$ange", "cbr$ange", "rr$ange", "tr$ange", "ur$ange", + "vr$ange", "xzeroa$xis", "x2zeroa$xis", "yzeroa$xis", "y2zeroa$xis", + "zzeroa$xis", "zeroa$xis", "z$ero"), Name.Builtin, '#pop'), + ], + 'bind': [ + ('!', Keyword, '#pop'), + (_shortened('all$windows'), Name.Builtin), + include('genericargs'), + ], + 'quit': [ + (r'gnuplot\b', Keyword), + include('noargs'), + ], + 'fit': [ + (r'via\b', Name.Builtin), + include('plot'), + ], + 'if': [ + (r'\)', Punctuation, '#pop'), + include('genericargs'), + ], + 'pause': [ + (r'(mouse|any|button1|button2|button3)\b', Name.Builtin), + (_shortened('key$press'), Name.Builtin), + include('genericargs'), + ], + 'plot': [ + (_shortened_many('ax$es', 'axi$s', 'bin$ary', 'ev$ery', 'i$ndex', + 'mat$rix', 's$mooth', 'thru$', 't$itle', + 'not$itle', 'u$sing', 'w$ith'), + Name.Builtin), + include('genericargs'), + ], + 'save': [ + (_shortened_many('f$unctions', 's$et', 't$erminal', 'v$ariables'), + Name.Builtin), + include('genericargs'), + ], + } + + +class PovrayLexer(RegexLexer): + """ + For Persistence of Vision Raytracer files. + """ + name = 'POVRay' + url = 'http://www.povray.org/' + aliases = ['pov'] + filenames = ['*.pov', '*.inc'] + mimetypes = ['text/x-povray'] + version_added = '0.11' + + tokens = { + 'root': [ + (r'/\*[\w\W]*?\*/', Comment.Multiline), + (r'//.*$', Comment.Single), + (r'(?s)"(?:\\.|[^"\\])+"', String.Double), + (words(( + 'break', 'case', 'debug', 'declare', 'default', 'define', 'else', + 'elseif', 'end', 'error', 'fclose', 'fopen', 'for', 'if', 'ifdef', + 'ifndef', 'include', 'local', 'macro', 'range', 'read', 'render', + 'statistics', 'switch', 'undef', 'version', 'warning', 'while', + 'write'), prefix=r'#', suffix=r'\b'), + Comment.Preproc), + (words(( + 'aa_level', 'aa_threshold', 'abs', 'acos', 'acosh', 'adaptive', 'adc_bailout', + 'agate', 'agate_turb', 'all', 'alpha', 'ambient', 'ambient_light', 'angle', + 'aperture', 'arc_angle', 'area_light', 'asc', 'asin', 'asinh', 'assumed_gamma', + 'atan', 'atan2', 'atanh', 'atmosphere', 'atmospheric_attenuation', + 'attenuating', 'average', 'background', 'black_hole', 'blue', 'blur_samples', + 'bounded_by', 'box_mapping', 'bozo', 'break', 'brick', 'brick_size', + 'brightness', 'brilliance', 'bumps', 'bumpy1', 'bumpy2', 'bumpy3', 'bump_map', + 'bump_size', 'case', 'caustics', 'ceil', 'checker', 'chr', 'clipped_by', 'clock', + 'color', 'color_map', 'colour', 'colour_map', 'component', 'composite', 'concat', + 'confidence', 'conic_sweep', 'constant', 'control0', 'control1', 'cos', 'cosh', + 'count', 'crackle', 'crand', 'cube', 'cubic_spline', 'cylindrical_mapping', + 'debug', 'declare', 'default', 'degrees', 'dents', 'diffuse', 'direction', + 'distance', 'distance_maximum', 'div', 'dust', 'dust_type', 'eccentricity', + 'else', 'emitting', 'end', 'error', 'error_bound', 'exp', 'exponent', + 'fade_distance', 'fade_power', 'falloff', 'falloff_angle', 'false', + 'file_exists', 'filter', 'finish', 'fisheye', 'flatness', 'flip', 'floor', + 'focal_point', 'fog', 'fog_alt', 'fog_offset', 'fog_type', 'frequency', 'gif', + 'global_settings', 'glowing', 'gradient', 'granite', 'gray_threshold', + 'green', 'halo', 'hexagon', 'hf_gray_16', 'hierarchy', 'hollow', 'hypercomplex', + 'if', 'ifdef', 'iff', 'image_map', 'incidence', 'include', 'int', 'interpolate', + 'inverse', 'ior', 'irid', 'irid_wavelength', 'jitter', 'lambda', 'leopard', + 'linear', 'linear_spline', 'linear_sweep', 'location', 'log', 'looks_like', + 'look_at', 'low_error_factor', 'mandel', 'map_type', 'marble', 'material_map', + 'matrix', 'max', 'max_intersections', 'max_iteration', 'max_trace_level', + 'max_value', 'metallic', 'min', 'minimum_reuse', 'mod', 'mortar', + 'nearest_count', 'no', 'normal', 'normal_map', 'no_shadow', 'number_of_waves', + 'octaves', 'off', 'offset', 'omega', 'omnimax', 'on', 'once', 'onion', 'open', + 'orthographic', 'panoramic', 'pattern1', 'pattern2', 'pattern3', + 'perspective', 'pgm', 'phase', 'phong', 'phong_size', 'pi', 'pigment', + 'pigment_map', 'planar_mapping', 'png', 'point_at', 'pot', 'pow', 'ppm', + 'precision', 'pwr', 'quadratic_spline', 'quaternion', 'quick_color', + 'quick_colour', 'quilted', 'radial', 'radians', 'radiosity', 'radius', 'rainbow', + 'ramp_wave', 'rand', 'range', 'reciprocal', 'recursion_limit', 'red', + 'reflection', 'refraction', 'render', 'repeat', 'rgb', 'rgbf', 'rgbft', 'rgbt', + 'right', 'ripples', 'rotate', 'roughness', 'samples', 'scale', 'scallop_wave', + 'scattering', 'seed', 'shadowless', 'sin', 'sine_wave', 'sinh', 'sky', 'sky_sphere', + 'slice', 'slope_map', 'smooth', 'specular', 'spherical_mapping', 'spiral', + 'spiral1', 'spiral2', 'spotlight', 'spotted', 'sqr', 'sqrt', 'statistics', 'str', + 'strcmp', 'strength', 'strlen', 'strlwr', 'strupr', 'sturm', 'substr', 'switch', 'sys', + 't', 'tan', 'tanh', 'test_camera_1', 'test_camera_2', 'test_camera_3', + 'test_camera_4', 'texture', 'texture_map', 'tga', 'thickness', 'threshold', + 'tightness', 'tile2', 'tiles', 'track', 'transform', 'translate', 'transmit', + 'triangle_wave', 'true', 'ttf', 'turbulence', 'turb_depth', 'type', + 'ultra_wide_angle', 'up', 'use_color', 'use_colour', 'use_index', 'u_steps', + 'val', 'variance', 'vaxis_rotate', 'vcross', 'vdot', 'version', 'vlength', + 'vnormalize', 'volume_object', 'volume_rendered', 'vol_with_light', + 'vrotate', 'v_steps', 'warning', 'warp', 'water_level', 'waves', 'while', 'width', + 'wood', 'wrinkles', 'yes'), prefix=r'\b', suffix=r'\b'), + Keyword), + (words(( + 'bicubic_patch', 'blob', 'box', 'camera', 'cone', 'cubic', 'cylinder', 'difference', + 'disc', 'height_field', 'intersection', 'julia_fractal', 'lathe', + 'light_source', 'merge', 'mesh', 'object', 'plane', 'poly', 'polygon', 'prism', + 'quadric', 'quartic', 'smooth_triangle', 'sor', 'sphere', 'superellipsoid', + 'text', 'torus', 'triangle', 'union'), suffix=r'\b'), + Name.Builtin), + (r'\b(x|y|z|u|v)\b', Name.Builtin.Pseudo), + (r'[a-zA-Z_]\w*', Name), + (r'[0-9]*\.[0-9]+', Number.Float), + (r'[0-9]+', Number.Integer), + (r'[\[\](){}<>;,]', Punctuation), + (r'[-+*/=.|&]|<=|>=|!=', Operator), + (r'"(\\\\|\\[^\\]|[^"\\])*"', String), + (r'\s+', Whitespace), + ] + } + + def analyse_text(text): + """POVRAY is similar to JSON/C, but the combination of camera and + light_source is probably not very likely elsewhere. HLSL or GLSL + are similar (GLSL even has #version), but they miss #declare, and + light_source/camera are not keywords anywhere else -- it's fair + to assume though that any POVRAY scene must have a camera and + lightsource.""" + result = 0 + if '#version' in text: + result += 0.05 + if '#declare' in text: + result += 0.05 + if 'camera' in text: + result += 0.05 + if 'light_source' in text: + result += 0.1 + + return result diff --git a/temp_venv/lib/python3.13/site-packages/pygments/lexers/hdl.py b/temp_venv/lib/python3.13/site-packages/pygments/lexers/hdl.py new file mode 100644 index 0000000000000000000000000000000000000000..d22b66ff6a035d5512aa32997dc66f7788eb0ab2 --- /dev/null +++ b/temp_venv/lib/python3.13/site-packages/pygments/lexers/hdl.py @@ -0,0 +1,466 @@ +""" + pygments.lexers.hdl + ~~~~~~~~~~~~~~~~~~~ + + Lexers for hardware descriptor languages. + + :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re + +from pygments.lexer import RegexLexer, bygroups, include, using, this, words +from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number, Punctuation, Whitespace + +__all__ = ['VerilogLexer', 'SystemVerilogLexer', 'VhdlLexer'] + + +class VerilogLexer(RegexLexer): + """ + For verilog source code with preprocessor directives. + """ + name = 'verilog' + aliases = ['verilog', 'v'] + filenames = ['*.v'] + mimetypes = ['text/x-verilog'] + url = 'https://en.wikipedia.org/wiki/Verilog' + version_added = '1.4' + + #: optional Comment or Whitespace + _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+' + + tokens = { + 'root': [ + (r'^\s*`define', Comment.Preproc, 'macro'), + (r'\s+', Whitespace), + (r'(\\)(\n)', bygroups(String.Escape, Whitespace)), # line continuation + (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single), + (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline), + (r'[{}#@]', Punctuation), + (r'L?"', String, 'string'), + (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char), + (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float), + (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float), + (r'([0-9]+)|(\'h)[0-9a-fA-F]+', Number.Hex), + (r'([0-9]+)|(\'b)[01]+', Number.Bin), + (r'([0-9]+)|(\'d)[0-9]+', Number.Integer), + (r'([0-9]+)|(\'o)[0-7]+', Number.Oct), + (r'\'[01xz]', Number), + (r'\d+[Ll]?', Number.Integer), + (r'[~!%^&*+=|?:<>/-]', Operator), + (r'[()\[\],.;\']', Punctuation), + (r'`[a-zA-Z_]\w*', Name.Constant), + + (r'^(\s*)(package)(\s+)', bygroups(Whitespace, Keyword.Namespace, Text)), + (r'^(\s*)(import)(\s+)', bygroups(Whitespace, Keyword.Namespace, Text), + 'import'), + + (words(( + 'always', 'always_comb', 'always_ff', 'always_latch', 'and', + 'assign', 'automatic', 'begin', 'break', 'buf', 'bufif0', 'bufif1', + 'case', 'casex', 'casez', 'cmos', 'const', 'continue', 'deassign', + 'default', 'defparam', 'disable', 'do', 'edge', 'else', 'end', 'endcase', + 'endfunction', 'endgenerate', 'endmodule', 'endpackage', 'endprimitive', + 'endspecify', 'endtable', 'endtask', 'enum', 'event', 'final', 'for', + 'force', 'forever', 'fork', 'function', 'generate', 'genvar', 'highz0', + 'highz1', 'if', 'initial', 'inout', 'input', 'integer', 'join', 'large', + 'localparam', 'macromodule', 'medium', 'module', 'nand', 'negedge', + 'nmos', 'nor', 'not', 'notif0', 'notif1', 'or', 'output', 'packed', + 'parameter', 'pmos', 'posedge', 'primitive', 'pull0', 'pull1', + 'pulldown', 'pullup', 'rcmos', 'ref', 'release', 'repeat', 'return', + 'rnmos', 'rpmos', 'rtran', 'rtranif0', 'rtranif1', 'scalared', 'signed', + 'small', 'specify', 'specparam', 'strength', 'string', 'strong0', + 'strong1', 'struct', 'table', 'task', 'tran', 'tranif0', 'tranif1', + 'type', 'typedef', 'unsigned', 'var', 'vectored', 'void', 'wait', + 'weak0', 'weak1', 'while', 'xnor', 'xor'), suffix=r'\b'), + Keyword), + + (words(( + 'accelerate', 'autoexpand_vectornets', 'celldefine', 'default_nettype', + 'else', 'elsif', 'endcelldefine', 'endif', 'endprotect', 'endprotected', + 'expand_vectornets', 'ifdef', 'ifndef', 'include', 'noaccelerate', + 'noexpand_vectornets', 'noremove_gatenames', 'noremove_netnames', + 'nounconnected_drive', 'protect', 'protected', 'remove_gatenames', + 'remove_netnames', 'resetall', 'timescale', 'unconnected_drive', + 'undef'), prefix=r'`', suffix=r'\b'), + Comment.Preproc), + + (words(( + 'bits', 'bitstoreal', 'bitstoshortreal', 'countdrivers', 'display', 'fclose', + 'fdisplay', 'finish', 'floor', 'fmonitor', 'fopen', 'fstrobe', 'fwrite', + 'getpattern', 'history', 'incsave', 'input', 'itor', 'key', 'list', 'log', + 'monitor', 'monitoroff', 'monitoron', 'nokey', 'nolog', 'printtimescale', + 'random', 'readmemb', 'readmemh', 'realtime', 'realtobits', 'reset', + 'reset_count', 'reset_value', 'restart', 'rtoi', 'save', 'scale', 'scope', + 'shortrealtobits', 'showscopes', 'showvariables', 'showvars', 'sreadmemb', + 'sreadmemh', 'stime', 'stop', 'strobe', 'time', 'timeformat', 'write'), + prefix=r'\$', suffix=r'\b'), + Name.Builtin), + + (words(( + 'byte', 'shortint', 'int', 'longint', 'integer', 'time', + 'bit', 'logic', 'reg', 'supply0', 'supply1', 'tri', 'triand', + 'trior', 'tri0', 'tri1', 'trireg', 'uwire', 'wire', 'wand', 'wor' + 'shortreal', 'real', 'realtime'), suffix=r'\b'), + Keyword.Type), + (r'[a-zA-Z_]\w*:(?!:)', Name.Label), + (r'\$?[a-zA-Z_]\w*', Name), + (r'\\(\S+)', Name), + ], + 'string': [ + (r'"', String, '#pop'), + (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape), + (r'[^\\"\n]+', String), # all other characters + (r'(\\)(\n)', bygroups(String.Escape, Whitespace)), # line continuation + (r'\\', String), # stray backslash + ], + 'macro': [ + (r'[^/\n]+', Comment.Preproc), + (r'/[*](.|\n)*?[*]/', Comment.Multiline), + (r'//.*?\n', Comment.Single, '#pop'), + (r'/', Comment.Preproc), + (r'(?<=\\)\n', Comment.Preproc), + (r'\n', Whitespace, '#pop'), + ], + 'import': [ + (r'[\w:]+\*?', Name.Namespace, '#pop') + ] + } + + def analyse_text(text): + """Verilog code will use one of reg/wire/assign for sure, and that + is not common elsewhere.""" + result = 0 + if 'reg' in text: + result += 0.1 + if 'wire' in text: + result += 0.1 + if 'assign' in text: + result += 0.1 + + return result + + +class SystemVerilogLexer(RegexLexer): + """ + Extends verilog lexer to recognise all SystemVerilog keywords from IEEE + 1800-2009 standard. + """ + name = 'systemverilog' + aliases = ['systemverilog', 'sv'] + filenames = ['*.sv', '*.svh'] + mimetypes = ['text/x-systemverilog'] + url = 'https://en.wikipedia.org/wiki/SystemVerilog' + version_added = '1.5' + + #: optional Comment or Whitespace + _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+' + + tokens = { + 'root': [ + (r'^(\s*)(`define)', bygroups(Whitespace, Comment.Preproc), 'macro'), + (r'^(\s*)(package)(\s+)', bygroups(Whitespace, Keyword.Namespace, Whitespace)), + (r'^(\s*)(import)(\s+)', bygroups(Whitespace, Keyword.Namespace, Whitespace), 'import'), + + (r'\s+', Whitespace), + (r'(\\)(\n)', bygroups(String.Escape, Whitespace)), # line continuation + (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single), + (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline), + (r'[{}#@]', Punctuation), + (r'L?"', String, 'string'), + (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char), + + (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float), + (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float), + + (r'([1-9][_0-9]*)?\s*\'[sS]?[bB]\s*[xXzZ?01][_xXzZ?01]*', + Number.Bin), + (r'([1-9][_0-9]*)?\s*\'[sS]?[oO]\s*[xXzZ?0-7][_xXzZ?0-7]*', + Number.Oct), + (r'([1-9][_0-9]*)?\s*\'[sS]?[dD]\s*[xXzZ?0-9][_xXzZ?0-9]*', + Number.Integer), + (r'([1-9][_0-9]*)?\s*\'[sS]?[hH]\s*[xXzZ?0-9a-fA-F][_xXzZ?0-9a-fA-F]*', + Number.Hex), + + (r'\'[01xXzZ]', Number), + (r'[0-9][_0-9]*', Number.Integer), + + (r'[~!%^&*+=|?:<>/-]', Operator), + (words(('inside', 'dist'), suffix=r'\b'), Operator.Word), + + (r'[()\[\],.;\'$]', Punctuation), + (r'`[a-zA-Z_]\w*', Name.Constant), + + (words(( + 'accept_on', 'alias', 'always', 'always_comb', 'always_ff', + 'always_latch', 'and', 'assert', 'assign', 'assume', 'automatic', + 'before', 'begin', 'bind', 'bins', 'binsof', 'break', 'buf', + 'bufif0', 'bufif1', 'case', 'casex', 'casez', 'cell', + 'checker', 'clocking', 'cmos', 'config', + 'constraint', 'context', 'continue', 'cover', 'covergroup', + 'coverpoint', 'cross', 'deassign', 'default', 'defparam', 'design', + 'disable', 'do', 'edge', 'else', 'end', 'endcase', + 'endchecker', 'endclocking', 'endconfig', 'endfunction', + 'endgenerate', 'endgroup', 'endinterface', 'endmodule', 'endpackage', + 'endprimitive', 'endprogram', 'endproperty', 'endsequence', + 'endspecify', 'endtable', 'endtask', 'enum', 'eventually', + 'expect', 'export', 'extern', 'final', 'first_match', + 'for', 'force', 'foreach', 'forever', 'fork', 'forkjoin', 'function', + 'generate', 'genvar', 'global', 'highz0', 'highz1', 'if', 'iff', + 'ifnone', 'ignore_bins', 'illegal_bins', 'implies', 'implements', 'import', + 'incdir', 'include', 'initial', 'inout', 'input', + 'instance', 'interconnect', 'interface', 'intersect', 'join', + 'join_any', 'join_none', 'large', 'let', 'liblist', 'library', + 'local', 'localparam', 'macromodule', 'matches', + 'medium', 'modport', 'module', 'nand', 'negedge', 'nettype', 'new', 'nexttime', + 'nmos', 'nor', 'noshowcancelled', 'not', 'notif0', 'notif1', 'null', + 'or', 'output', 'package', 'packed', 'parameter', 'pmos', 'posedge', + 'primitive', 'priority', 'program', 'property', 'protected', 'pull0', + 'pull1', 'pulldown', 'pullup', 'pulsestyle_ondetect', + 'pulsestyle_onevent', 'pure', 'rand', 'randc', 'randcase', + 'randsequence', 'rcmos', 'ref', + 'reject_on', 'release', 'repeat', 'restrict', 'return', 'rnmos', + 'rpmos', 'rtran', 'rtranif0', 'rtranif1', 's_always', 's_eventually', + 's_nexttime', 's_until', 's_until_with', 'scalared', 'sequence', + 'showcancelled', 'small', 'soft', 'solve', + 'specify', 'specparam', 'static', 'strong', 'strong0', + 'strong1', 'struct', 'super', 'sync_accept_on', + 'sync_reject_on', 'table', 'tagged', 'task', 'this', 'throughout', + 'timeprecision', 'timeunit', 'tran', 'tranif0', 'tranif1', + 'typedef', 'union', 'unique', 'unique0', 'until', + 'until_with', 'untyped', 'use', 'vectored', + 'virtual', 'wait', 'wait_order', 'weak', 'weak0', + 'weak1', 'while', 'wildcard', 'with', 'within', + 'xnor', 'xor'), + suffix=r'\b'), + Keyword), + + (r'(class)(\s+)([a-zA-Z_]\w*)', + bygroups(Keyword.Declaration, Whitespace, Name.Class)), + (r'(extends)(\s+)([a-zA-Z_]\w*)', + bygroups(Keyword.Declaration, Whitespace, Name.Class)), + (r'(endclass\b)(?:(\s*)(:)(\s*)([a-zA-Z_]\w*))?', + bygroups(Keyword.Declaration, Whitespace, Punctuation, Whitespace, Name.Class)), + + (words(( + # Variable types + 'bit', 'byte', 'chandle', 'const', 'event', 'int', 'integer', + 'logic', 'longint', 'real', 'realtime', 'reg', 'shortint', + 'shortreal', 'signed', 'string', 'time', 'type', 'unsigned', + 'var', 'void', + # Net types + 'supply0', 'supply1', 'tri', 'triand', 'trior', 'trireg', + 'tri0', 'tri1', 'uwire', 'wand', 'wire', 'wor'), + suffix=r'\b'), + Keyword.Type), + + (words(( + '`__FILE__', '`__LINE__', '`begin_keywords', '`celldefine', + '`default_nettype', '`define', '`else', '`elsif', '`end_keywords', + '`endcelldefine', '`endif', '`ifdef', '`ifndef', '`include', + '`line', '`nounconnected_drive', '`pragma', '`resetall', + '`timescale', '`unconnected_drive', '`undef', '`undefineall'), + suffix=r'\b'), + Comment.Preproc), + + (words(( + # Simulation control tasks (20.2) + '$exit', '$finish', '$stop', + # Simulation time functions (20.3) + '$realtime', '$stime', '$time', + # Timescale tasks (20.4) + '$printtimescale', '$timeformat', + # Conversion functions + '$bitstoreal', '$bitstoshortreal', '$cast', '$itor', + '$realtobits', '$rtoi', '$shortrealtobits', '$signed', + '$unsigned', + # Data query functions (20.6) + '$bits', '$isunbounded', '$typename', + # Array query functions (20.7) + '$dimensions', '$high', '$increment', '$left', '$low', '$right', + '$size', '$unpacked_dimensions', + # Math functions (20.8) + '$acos', '$acosh', '$asin', '$asinh', '$atan', '$atan2', + '$atanh', '$ceil', '$clog2', '$cos', '$cosh', '$exp', '$floor', + '$hypot', '$ln', '$log10', '$pow', '$sin', '$sinh', '$sqrt', + '$tan', '$tanh', + # Bit vector system functions (20.9) + '$countbits', '$countones', '$isunknown', '$onehot', '$onehot0', + # Severity tasks (20.10) + '$info', '$error', '$fatal', '$warning', + # Assertion control tasks (20.12) + '$assertcontrol', '$assertfailoff', '$assertfailon', + '$assertkill', '$assertnonvacuouson', '$assertoff', '$asserton', + '$assertpassoff', '$assertpasson', '$assertvacuousoff', + # Sampled value system functions (20.13) + '$changed', '$changed_gclk', '$changing_gclk', '$falling_gclk', + '$fell', '$fell_gclk', '$future_gclk', '$past', '$past_gclk', + '$rising_gclk', '$rose', '$rose_gclk', '$sampled', '$stable', + '$stable_gclk', '$steady_gclk', + # Coverage control functions (20.14) + '$coverage_control', '$coverage_get', '$coverage_get_max', + '$coverage_merge', '$coverage_save', '$get_coverage', + '$load_coverage_db', '$set_coverage_db_name', + # Probabilistic distribution functions (20.15) + '$dist_chi_square', '$dist_erlang', '$dist_exponential', + '$dist_normal', '$dist_poisson', '$dist_t', '$dist_uniform', + '$random', + # Stochastic analysis tasks and functions (20.16) + '$q_add', '$q_exam', '$q_full', '$q_initialize', '$q_remove', + # PLA modeling tasks (20.17) + '$async$and$array', '$async$and$plane', '$async$nand$array', + '$async$nand$plane', '$async$nor$array', '$async$nor$plane', + '$async$or$array', '$async$or$plane', '$sync$and$array', + '$sync$and$plane', '$sync$nand$array', '$sync$nand$plane', + '$sync$nor$array', '$sync$nor$plane', '$sync$or$array', + '$sync$or$plane', + # Miscellaneous tasks and functions (20.18) + '$system', + # Display tasks (21.2) + '$display', '$displayb', '$displayh', '$displayo', '$monitor', + '$monitorb', '$monitorh', '$monitoro', '$monitoroff', + '$monitoron', '$strobe', '$strobeb', '$strobeh', '$strobeo', + '$write', '$writeb', '$writeh', '$writeo', + # File I/O tasks and functions (21.3) + '$fclose', '$fdisplay', '$fdisplayb', '$fdisplayh', + '$fdisplayo', '$feof', '$ferror', '$fflush', '$fgetc', '$fgets', + '$fmonitor', '$fmonitorb', '$fmonitorh', '$fmonitoro', '$fopen', + '$fread', '$fscanf', '$fseek', '$fstrobe', '$fstrobeb', + '$fstrobeh', '$fstrobeo', '$ftell', '$fwrite', '$fwriteb', + '$fwriteh', '$fwriteo', '$rewind', '$sformat', '$sformatf', + '$sscanf', '$swrite', '$swriteb', '$swriteh', '$swriteo', + '$ungetc', + # Memory load tasks (21.4) + '$readmemb', '$readmemh', + # Memory dump tasks (21.5) + '$writememb', '$writememh', + # Command line input (21.6) + '$test$plusargs', '$value$plusargs', + # VCD tasks (21.7) + '$dumpall', '$dumpfile', '$dumpflush', '$dumplimit', '$dumpoff', + '$dumpon', '$dumpports', '$dumpportsall', '$dumpportsflush', + '$dumpportslimit', '$dumpportsoff', '$dumpportson', '$dumpvars', + ), suffix=r'\b'), + Name.Builtin), + + (r'[a-zA-Z_]\w*:(?!:)', Name.Label), + (r'\$?[a-zA-Z_]\w*', Name), + (r'\\(\S+)', Name), + ], + 'string': [ + (r'"', String, '#pop'), + (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape), + (r'[^\\"\n]+', String), # all other characters + (r'(\\)(\n)', bygroups(String.Escape, Whitespace)), # line continuation + (r'\\', String), # stray backslash + ], + 'macro': [ + (r'[^/\n]+', Comment.Preproc), + (r'/[*](.|\n)*?[*]/', Comment.Multiline), + (r'//.*?$', Comment.Single, '#pop'), + (r'/', Comment.Preproc), + (r'(?<=\\)\n', Comment.Preproc), + (r'\n', Whitespace, '#pop'), + ], + 'import': [ + (r'[\w:]+\*?', Name.Namespace, '#pop') + ] + } + + +class VhdlLexer(RegexLexer): + """ + For VHDL source code. + """ + name = 'vhdl' + aliases = ['vhdl'] + filenames = ['*.vhdl', '*.vhd'] + mimetypes = ['text/x-vhdl'] + url = 'https://en.wikipedia.org/wiki/VHDL' + version_added = '1.5' + flags = re.MULTILINE | re.IGNORECASE + + tokens = { + 'root': [ + (r'\s+', Whitespace), + (r'(\\)(\n)', bygroups(String.Escape, Whitespace)), # line continuation + (r'--.*?$', Comment.Single), + (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline), + (r"'(U|X|0|1|Z|W|L|H|-)'", String.Char), + (r'[~!%^&*+=|?:<>/-]', Operator), + (r"'[a-z_]\w*", Name.Attribute), + (r'[()\[\],.;\']', Punctuation), + (r'"[^\n\\"]*"', String), + + (r'(library)(\s+)([a-z_]\w*)', + bygroups(Keyword, Whitespace, Name.Namespace)), + (r'(use)(\s+)(entity)', bygroups(Keyword, Whitespace, Keyword)), + (r'(use)(\s+)([a-z_][\w.]*\.)(all)', + bygroups(Keyword, Whitespace, Name.Namespace, Keyword)), + (r'(use)(\s+)([a-z_][\w.]*)', + bygroups(Keyword, Whitespace, Name.Namespace)), + (r'(std|ieee)(\.[a-z_]\w*)', + bygroups(Name.Namespace, Name.Namespace)), + (words(('std', 'ieee', 'work'), suffix=r'\b'), + Name.Namespace), + (r'(entity|component)(\s+)([a-z_]\w*)', + bygroups(Keyword, Whitespace, Name.Class)), + (r'(architecture|configuration)(\s+)([a-z_]\w*)(\s+)' + r'(of)(\s+)([a-z_]\w*)(\s+)(is)', + bygroups(Keyword, Whitespace, Name.Class, Whitespace, Keyword, Whitespace, + Name.Class, Whitespace, Keyword)), + (r'([a-z_]\w*)(:)(\s+)(process|for)', + bygroups(Name.Class, Operator, Whitespace, Keyword)), + (r'(end)(\s+)', bygroups(using(this), Whitespace), 'endblock'), + + include('types'), + include('keywords'), + include('numbers'), + + (r'[a-z_]\w*', Name), + ], + 'endblock': [ + include('keywords'), + (r'[a-z_]\w*', Name.Class), + (r'\s+', Whitespace), + (r';', Punctuation, '#pop'), + ], + 'types': [ + (words(( + 'boolean', 'bit', 'character', 'severity_level', 'integer', 'time', + 'delay_length', 'natural', 'positive', 'string', 'bit_vector', + 'file_open_kind', 'file_open_status', 'std_ulogic', 'std_ulogic_vector', + 'std_logic', 'std_logic_vector', 'signed', 'unsigned'), suffix=r'\b'), + Keyword.Type), + ], + 'keywords': [ + (words(( + 'abs', 'access', 'after', 'alias', 'all', 'and', + 'architecture', 'array', 'assert', 'attribute', 'begin', 'block', + 'body', 'buffer', 'bus', 'case', 'component', 'configuration', + 'constant', 'disconnect', 'downto', 'else', 'elsif', 'end', + 'entity', 'exit', 'file', 'for', 'function', 'generate', + 'generic', 'group', 'guarded', 'if', 'impure', 'in', + 'inertial', 'inout', 'is', 'label', 'library', 'linkage', + 'literal', 'loop', 'map', 'mod', 'nand', 'new', + 'next', 'nor', 'not', 'null', 'of', 'on', + 'open', 'or', 'others', 'out', 'package', 'port', + 'postponed', 'procedure', 'process', 'pure', 'range', 'record', + 'register', 'reject', 'rem', 'return', 'rol', 'ror', 'select', + 'severity', 'signal', 'shared', 'sla', 'sll', 'sra', + 'srl', 'subtype', 'then', 'to', 'transport', 'type', + 'units', 'until', 'use', 'variable', 'wait', 'when', + 'while', 'with', 'xnor', 'xor'), suffix=r'\b'), + Keyword), + ], + 'numbers': [ + (r'\d{1,2}#[0-9a-f_]+#?', Number.Integer), + (r'\d+', Number.Integer), + (r'(\d+\.\d*|\.\d+|\d+)E[+-]?\d+', Number.Float), + (r'X"[0-9a-f_]+"', Number.Hex), + (r'O"[0-7_]+"', Number.Oct), + (r'B"[01_]+"', Number.Bin), + ], + } diff --git a/temp_venv/lib/python3.13/site-packages/pygments/lexers/mime.py b/temp_venv/lib/python3.13/site-packages/pygments/lexers/mime.py new file mode 100644 index 0000000000000000000000000000000000000000..45f27f428955061b67cf3d7501a63c75672214c8 --- /dev/null +++ b/temp_venv/lib/python3.13/site-packages/pygments/lexers/mime.py @@ -0,0 +1,210 @@ +""" + pygments.lexers.mime + ~~~~~~~~~~~~~~~~~~~~ + + Lexer for Multipurpose Internet Mail Extensions (MIME) data. + + :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re + +from pygments.lexer import RegexLexer, include +from pygments.lexers import get_lexer_for_mimetype +from pygments.token import Text, Name, String, Operator, Comment, Other +from pygments.util import get_int_opt, ClassNotFound + +__all__ = ["MIMELexer"] + + +class MIMELexer(RegexLexer): + """ + Lexer for Multipurpose Internet Mail Extensions (MIME) data. This lexer is + designed to process nested multipart data. + + It assumes that the given data contains both header and body (and is + split at an empty line). If no valid header is found, then the entire data + will be treated as body. + + Additional options accepted: + + `MIME-max-level` + Max recursion level for nested MIME structure. Any negative number + would treated as unlimited. (default: -1) + + `Content-Type` + Treat the data as a specific content type. Useful when header is + missing, or this lexer would try to parse from header. (default: + `text/plain`) + + `Multipart-Boundary` + Set the default multipart boundary delimiter. This option is only used + when `Content-Type` is `multipart` and header is missing. This lexer + would try to parse from header by default. (default: None) + + `Content-Transfer-Encoding` + Treat the data as a specific encoding. Or this lexer would try to parse + from header by default. (default: None) + """ + + name = "MIME" + aliases = ["mime"] + mimetypes = ["multipart/mixed", + "multipart/related", + "multipart/alternative"] + url = 'https://en.wikipedia.org/wiki/MIME' + version_added = '2.5' + + def __init__(self, **options): + super().__init__(**options) + self.boundary = options.get("Multipart-Boundary") + self.content_transfer_encoding = options.get("Content_Transfer_Encoding") + self.content_type = options.get("Content_Type", "text/plain") + self.max_nested_level = get_int_opt(options, "MIME-max-level", -1) + + def get_header_tokens(self, match): + field = match.group(1) + + if field.lower() in self.attention_headers: + yield match.start(1), Name.Tag, field + ":" + yield match.start(2), Text.Whitespace, match.group(2) + + pos = match.end(2) + body = match.group(3) + for i, t, v in self.get_tokens_unprocessed(body, ("root", field.lower())): + yield pos + i, t, v + + else: + yield match.start(), Comment, match.group() + + def get_body_tokens(self, match): + pos_body_start = match.start() + entire_body = match.group() + + # skip first newline + if entire_body[0] == '\n': + yield pos_body_start, Text.Whitespace, '\n' + pos_body_start = pos_body_start + 1 + entire_body = entire_body[1:] + + # if it is not a multipart + if not self.content_type.startswith("multipart") or not self.boundary: + for i, t, v in self.get_bodypart_tokens(entire_body): + yield pos_body_start + i, t, v + return + + # find boundary + bdry_pattern = rf"^--{re.escape(self.boundary)}(--)?\n" + bdry_matcher = re.compile(bdry_pattern, re.MULTILINE) + + # some data has prefix text before first boundary + m = bdry_matcher.search(entire_body) + if m: + pos_part_start = pos_body_start + m.end() + pos_iter_start = lpos_end = m.end() + yield pos_body_start, Text, entire_body[:m.start()] + yield pos_body_start + lpos_end, String.Delimiter, m.group() + else: + pos_part_start = pos_body_start + pos_iter_start = 0 + + # process tokens of each body part + for m in bdry_matcher.finditer(entire_body, pos_iter_start): + # bodypart + lpos_start = pos_part_start - pos_body_start + lpos_end = m.start() + part = entire_body[lpos_start:lpos_end] + for i, t, v in self.get_bodypart_tokens(part): + yield pos_part_start + i, t, v + + # boundary + yield pos_body_start + lpos_end, String.Delimiter, m.group() + pos_part_start = pos_body_start + m.end() + + # some data has suffix text after last boundary + lpos_start = pos_part_start - pos_body_start + if lpos_start != len(entire_body): + yield pos_part_start, Text, entire_body[lpos_start:] + + def get_bodypart_tokens(self, text): + # return if: + # * no content + # * no content type specific + # * content encoding is not readable + # * max recurrsion exceed + if not text.strip() or not self.content_type: + return [(0, Other, text)] + + cte = self.content_transfer_encoding + if cte and cte not in {"8bit", "7bit", "quoted-printable"}: + return [(0, Other, text)] + + if self.max_nested_level == 0: + return [(0, Other, text)] + + # get lexer + try: + lexer = get_lexer_for_mimetype(self.content_type) + except ClassNotFound: + return [(0, Other, text)] + + if isinstance(lexer, type(self)): + lexer.max_nested_level = self.max_nested_level - 1 + + return lexer.get_tokens_unprocessed(text) + + def store_content_type(self, match): + self.content_type = match.group(1) + + prefix_len = match.start(1) - match.start(0) + yield match.start(0), Text.Whitespace, match.group(0)[:prefix_len] + yield match.start(1), Name.Label, match.group(2) + yield match.end(2), String.Delimiter, '/' + yield match.start(3), Name.Label, match.group(3) + + def get_content_type_subtokens(self, match): + yield match.start(1), Text, match.group(1) + yield match.start(2), Text.Whitespace, match.group(2) + yield match.start(3), Name.Attribute, match.group(3) + yield match.start(4), Operator, match.group(4) + yield match.start(5), String, match.group(5) + + if match.group(3).lower() == "boundary": + boundary = match.group(5).strip() + if boundary[0] == '"' and boundary[-1] == '"': + boundary = boundary[1:-1] + self.boundary = boundary + + def store_content_transfer_encoding(self, match): + self.content_transfer_encoding = match.group(0).lower() + yield match.start(0), Name.Constant, match.group(0) + + attention_headers = {"content-type", "content-transfer-encoding"} + + tokens = { + "root": [ + (r"^([\w-]+):( *)([\s\S]*?\n)(?![ \t])", get_header_tokens), + (r"^$[\s\S]+", get_body_tokens), + ], + "header": [ + # folding + (r"\n[ \t]", Text.Whitespace), + (r"\n(?![ \t])", Text.Whitespace, "#pop"), + ], + "content-type": [ + include("header"), + ( + r"^\s*((multipart|application|audio|font|image|model|text|video" + r"|message)/([\w-]+))", + store_content_type, + ), + (r'(;)((?:[ \t]|\n[ \t])*)([\w:-]+)(=)([\s\S]*?)(?=;|\n(?![ \t]))', + get_content_type_subtokens), + (r';[ \t]*\n(?![ \t])', Text, '#pop'), + ], + "content-transfer-encoding": [ + include("header"), + (r"([\w-]+)", store_content_transfer_encoding), + ], + } diff --git a/temp_venv/lib/python3.13/site-packages/pygments/lexers/soong.py b/temp_venv/lib/python3.13/site-packages/pygments/lexers/soong.py new file mode 100644 index 0000000000000000000000000000000000000000..bbf204dd22543f06c65849d54613c3a573dda899 --- /dev/null +++ b/temp_venv/lib/python3.13/site-packages/pygments/lexers/soong.py @@ -0,0 +1,78 @@ +""" + pygments.lexers.soong + ~~~~~~~~~~~~~~~~~~~~~ + + Lexers for Soong (Android.bp Blueprint) files. + + :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +from pygments.lexer import RegexLexer, bygroups, include +from pygments.token import Comment, Name, Number, Operator, Punctuation, \ + String, Whitespace + +__all__ = ['SoongLexer'] + +class SoongLexer(RegexLexer): + name = 'Soong' + version_added = '2.18' + url = 'https://source.android.com/docs/setup/reference/androidbp' + aliases = ['androidbp', 'bp', 'soong'] + filenames = ['Android.bp'] + + tokens = { + 'root': [ + # A variable assignment + (r'(\w*)(\s*)(\+?=)(\s*)', + bygroups(Name.Variable, Whitespace, Operator, Whitespace), + 'assign-rhs'), + + # A top-level module + (r'(\w*)(\s*)(\{)', + bygroups(Name.Function, Whitespace, Punctuation), + 'in-rule'), + + # Everything else + include('comments'), + (r'\s+', Whitespace), # newlines okay + ], + 'assign-rhs': [ + include('expr'), + (r'\n', Whitespace, '#pop'), + ], + 'in-list': [ + include('expr'), + include('comments'), + (r'\s+', Whitespace), # newlines okay in a list + (r',', Punctuation), + (r'\]', Punctuation, '#pop'), + ], + 'in-map': [ + # A map key + (r'(\w+)(:)(\s*)', bygroups(Name, Punctuation, Whitespace)), + + include('expr'), + include('comments'), + (r'\s+', Whitespace), # newlines okay in a map + (r',', Punctuation), + (r'\}', Punctuation, '#pop'), + ], + 'in-rule': [ + # Just re-use map syntax + include('in-map'), + ], + 'comments': [ + (r'//.*', Comment.Single), + (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline), + ], + 'expr': [ + (r'(true|false)\b', Name.Builtin), + (r'0x[0-9a-fA-F]+', Number.Hex), + (r'\d+', Number.Integer), + (r'".*?"', String), + (r'\{', Punctuation, 'in-map'), + (r'\[', Punctuation, 'in-list'), + (r'\w+', Name), + ], + } diff --git a/temp_venv/lib/python3.13/site-packages/stack_data/__pycache__/__init__.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/stack_data/__pycache__/__init__.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5bd062a4d9314fba3b56465818873fe2657f7a4c Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/stack_data/__pycache__/__init__.cpython-313.pyc differ diff --git a/temp_venv/lib/python3.13/site-packages/stack_data/__pycache__/core.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/stack_data/__pycache__/core.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a24512d1ec3d58119f7bd902e2e174b827033722 Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/stack_data/__pycache__/core.cpython-313.pyc differ diff --git a/temp_venv/lib/python3.13/site-packages/stack_data/__pycache__/formatting.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/stack_data/__pycache__/formatting.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..36c6831b19bacbfbadc378df872409b377b22e3b Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/stack_data/__pycache__/formatting.cpython-313.pyc differ diff --git a/temp_venv/lib/python3.13/site-packages/stack_data/__pycache__/serializing.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/stack_data/__pycache__/serializing.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..90bf090ff1e52d0dbaeec9499e333210789097b0 Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/stack_data/__pycache__/serializing.cpython-313.pyc differ diff --git a/temp_venv/lib/python3.13/site-packages/stack_data/__pycache__/utils.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/stack_data/__pycache__/utils.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..88c6f33027ece777fb1e66680a60633488be639b Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/stack_data/__pycache__/utils.cpython-313.pyc differ diff --git a/temp_venv/lib/python3.13/site-packages/stack_data/__pycache__/version.cpython-313.pyc b/temp_venv/lib/python3.13/site-packages/stack_data/__pycache__/version.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c5e46a614d5ae91e225d29e0d272a927d12e9a2d Binary files /dev/null and b/temp_venv/lib/python3.13/site-packages/stack_data/__pycache__/version.cpython-313.pyc differ

' : '\U0001d4ab', + '\\' : '\U0001d4ac', + '\\' : '\U0000211b', + '\\' : '\U0001d4ae', + '\\' : '\U0001d4af', + '\\' : '\U0001d4b0', + '\\' : '\U0001d4b1', + '\\' : '\U0001d4b2', + '\\' : '\U0001d4b3', + '\\' : '\U0001d4b4', + '\\' : '\U0001d4b5', + '\\' : '\U0001d5ba', + '\\' : '\U0001d5bb', + '\\' : '\U0001d5bc', + '\\' : '\U0001d5bd', + '\\' : '\U0001d5be', + '\\' : '\U0001d5bf', + '\\' : '\U0001d5c0', + '\\' : '\U0001d5c1', + '\\' : '\U0001d5c2', + '\\' : '\U0001d5c3', + '\\' : '\U0001d5c4', + '\\' : '\U0001d5c5', + '\\' : '\U0001d5c6', + '\\' : '\U0001d5c7', + '\\' : '\U0001d5c8', + '\\