Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +1 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/__pycache__/__init__.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/__pycache__/distro.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/__pycache__/six.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/__pycache__/ansitowin32.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/ansi.py +102 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/winterm.py +169 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/__init__.py +83 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/__main__.py +17 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/__pycache__/filter.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/__pycache__/formatter.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/__pycache__/lexer.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/__pycache__/modeline.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/__pycache__/regexopt.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/__pycache__/sphinxext.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/__pycache__/unistring.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/cmdline.py +663 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/console.py +70 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/filter.py +71 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/filters/__init__.py +937 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/formatter.py +94 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/lexer.py +879 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/lexers/__init__.py +341 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/lexers/__pycache__/__init__.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/lexers/__pycache__/_mapping.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/lexers/__pycache__/python.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/lexers/_mapping.py +580 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/lexers/python.py +1188 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/modeline.py +43 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/plugin.py +69 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/regexopt.py +91 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/scanner.py +104 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/sphinxext.py +155 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/style.py +197 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/token.py +212 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/unistring.py +153 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/util.py +308 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pyparsing/__init__.py +328 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pyparsing/__pycache__/__init__.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pyparsing/__pycache__/actions.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pyparsing/__pycache__/common.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pyparsing/__pycache__/core.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pyparsing/__pycache__/exceptions.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pyparsing/__pycache__/helpers.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pyparsing/__pycache__/results.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pyparsing/__pycache__/testing.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pyparsing/__pycache__/unicode.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pyparsing/__pycache__/util.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pyparsing/actions.py +207 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pyparsing/common.py +424 -0
.gitattributes
CHANGED
@@ -60,3 +60,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
60 |
scripts/temp.txt filter=lfs diff=lfs merge=lfs -text
|
61 |
scripts/blacklisted_urls.txt filter=lfs diff=lfs merge=lfs -text
|
62 |
scripts/unique_source_documents.json filter=lfs diff=lfs merge=lfs -text
|
|
|
|
60 |
scripts/temp.txt filter=lfs diff=lfs merge=lfs -text
|
61 |
scripts/blacklisted_urls.txt filter=lfs diff=lfs merge=lfs -text
|
62 |
scripts/unique_source_documents.json filter=lfs diff=lfs merge=lfs -text
|
63 |
+
scripts/new_unique_source_documents_with_url.json filter=lfs diff=lfs merge=lfs -text
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (2.92 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/__pycache__/distro.cpython-310.pyc
ADDED
Binary file (38.2 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/__pycache__/six.cpython-310.pyc
ADDED
Binary file (27.6 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/__pycache__/ansitowin32.cpython-310.pyc
ADDED
Binary file (7.91 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/ansi.py
ADDED
@@ -0,0 +1,102 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
|
2 |
+
'''
|
3 |
+
This module generates ANSI character codes to printing colors to terminals.
|
4 |
+
See: http://en.wikipedia.org/wiki/ANSI_escape_code
|
5 |
+
'''
|
6 |
+
|
7 |
+
CSI = '\033['
|
8 |
+
OSC = '\033]'
|
9 |
+
BEL = '\a'
|
10 |
+
|
11 |
+
|
12 |
+
def code_to_chars(code):
|
13 |
+
return CSI + str(code) + 'm'
|
14 |
+
|
15 |
+
def set_title(title):
|
16 |
+
return OSC + '2;' + title + BEL
|
17 |
+
|
18 |
+
def clear_screen(mode=2):
|
19 |
+
return CSI + str(mode) + 'J'
|
20 |
+
|
21 |
+
def clear_line(mode=2):
|
22 |
+
return CSI + str(mode) + 'K'
|
23 |
+
|
24 |
+
|
25 |
+
class AnsiCodes(object):
|
26 |
+
def __init__(self):
|
27 |
+
# the subclasses declare class attributes which are numbers.
|
28 |
+
# Upon instantiation we define instance attributes, which are the same
|
29 |
+
# as the class attributes but wrapped with the ANSI escape sequence
|
30 |
+
for name in dir(self):
|
31 |
+
if not name.startswith('_'):
|
32 |
+
value = getattr(self, name)
|
33 |
+
setattr(self, name, code_to_chars(value))
|
34 |
+
|
35 |
+
|
36 |
+
class AnsiCursor(object):
|
37 |
+
def UP(self, n=1):
|
38 |
+
return CSI + str(n) + 'A'
|
39 |
+
def DOWN(self, n=1):
|
40 |
+
return CSI + str(n) + 'B'
|
41 |
+
def FORWARD(self, n=1):
|
42 |
+
return CSI + str(n) + 'C'
|
43 |
+
def BACK(self, n=1):
|
44 |
+
return CSI + str(n) + 'D'
|
45 |
+
def POS(self, x=1, y=1):
|
46 |
+
return CSI + str(y) + ';' + str(x) + 'H'
|
47 |
+
|
48 |
+
|
49 |
+
class AnsiFore(AnsiCodes):
|
50 |
+
BLACK = 30
|
51 |
+
RED = 31
|
52 |
+
GREEN = 32
|
53 |
+
YELLOW = 33
|
54 |
+
BLUE = 34
|
55 |
+
MAGENTA = 35
|
56 |
+
CYAN = 36
|
57 |
+
WHITE = 37
|
58 |
+
RESET = 39
|
59 |
+
|
60 |
+
# These are fairly well supported, but not part of the standard.
|
61 |
+
LIGHTBLACK_EX = 90
|
62 |
+
LIGHTRED_EX = 91
|
63 |
+
LIGHTGREEN_EX = 92
|
64 |
+
LIGHTYELLOW_EX = 93
|
65 |
+
LIGHTBLUE_EX = 94
|
66 |
+
LIGHTMAGENTA_EX = 95
|
67 |
+
LIGHTCYAN_EX = 96
|
68 |
+
LIGHTWHITE_EX = 97
|
69 |
+
|
70 |
+
|
71 |
+
class AnsiBack(AnsiCodes):
|
72 |
+
BLACK = 40
|
73 |
+
RED = 41
|
74 |
+
GREEN = 42
|
75 |
+
YELLOW = 43
|
76 |
+
BLUE = 44
|
77 |
+
MAGENTA = 45
|
78 |
+
CYAN = 46
|
79 |
+
WHITE = 47
|
80 |
+
RESET = 49
|
81 |
+
|
82 |
+
# These are fairly well supported, but not part of the standard.
|
83 |
+
LIGHTBLACK_EX = 100
|
84 |
+
LIGHTRED_EX = 101
|
85 |
+
LIGHTGREEN_EX = 102
|
86 |
+
LIGHTYELLOW_EX = 103
|
87 |
+
LIGHTBLUE_EX = 104
|
88 |
+
LIGHTMAGENTA_EX = 105
|
89 |
+
LIGHTCYAN_EX = 106
|
90 |
+
LIGHTWHITE_EX = 107
|
91 |
+
|
92 |
+
|
93 |
+
class AnsiStyle(AnsiCodes):
|
94 |
+
BRIGHT = 1
|
95 |
+
DIM = 2
|
96 |
+
NORMAL = 22
|
97 |
+
RESET_ALL = 0
|
98 |
+
|
99 |
+
Fore = AnsiFore()
|
100 |
+
Back = AnsiBack()
|
101 |
+
Style = AnsiStyle()
|
102 |
+
Cursor = AnsiCursor()
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/winterm.py
ADDED
@@ -0,0 +1,169 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
|
2 |
+
from . import win32
|
3 |
+
|
4 |
+
|
5 |
+
# from wincon.h
|
6 |
+
class WinColor(object):
|
7 |
+
BLACK = 0
|
8 |
+
BLUE = 1
|
9 |
+
GREEN = 2
|
10 |
+
CYAN = 3
|
11 |
+
RED = 4
|
12 |
+
MAGENTA = 5
|
13 |
+
YELLOW = 6
|
14 |
+
GREY = 7
|
15 |
+
|
16 |
+
# from wincon.h
|
17 |
+
class WinStyle(object):
|
18 |
+
NORMAL = 0x00 # dim text, dim background
|
19 |
+
BRIGHT = 0x08 # bright text, dim background
|
20 |
+
BRIGHT_BACKGROUND = 0x80 # dim text, bright background
|
21 |
+
|
22 |
+
class WinTerm(object):
|
23 |
+
|
24 |
+
def __init__(self):
|
25 |
+
self._default = win32.GetConsoleScreenBufferInfo(win32.STDOUT).wAttributes
|
26 |
+
self.set_attrs(self._default)
|
27 |
+
self._default_fore = self._fore
|
28 |
+
self._default_back = self._back
|
29 |
+
self._default_style = self._style
|
30 |
+
# In order to emulate LIGHT_EX in windows, we borrow the BRIGHT style.
|
31 |
+
# So that LIGHT_EX colors and BRIGHT style do not clobber each other,
|
32 |
+
# we track them separately, since LIGHT_EX is overwritten by Fore/Back
|
33 |
+
# and BRIGHT is overwritten by Style codes.
|
34 |
+
self._light = 0
|
35 |
+
|
36 |
+
def get_attrs(self):
|
37 |
+
return self._fore + self._back * 16 + (self._style | self._light)
|
38 |
+
|
39 |
+
def set_attrs(self, value):
|
40 |
+
self._fore = value & 7
|
41 |
+
self._back = (value >> 4) & 7
|
42 |
+
self._style = value & (WinStyle.BRIGHT | WinStyle.BRIGHT_BACKGROUND)
|
43 |
+
|
44 |
+
def reset_all(self, on_stderr=None):
|
45 |
+
self.set_attrs(self._default)
|
46 |
+
self.set_console(attrs=self._default)
|
47 |
+
self._light = 0
|
48 |
+
|
49 |
+
def fore(self, fore=None, light=False, on_stderr=False):
|
50 |
+
if fore is None:
|
51 |
+
fore = self._default_fore
|
52 |
+
self._fore = fore
|
53 |
+
# Emulate LIGHT_EX with BRIGHT Style
|
54 |
+
if light:
|
55 |
+
self._light |= WinStyle.BRIGHT
|
56 |
+
else:
|
57 |
+
self._light &= ~WinStyle.BRIGHT
|
58 |
+
self.set_console(on_stderr=on_stderr)
|
59 |
+
|
60 |
+
def back(self, back=None, light=False, on_stderr=False):
|
61 |
+
if back is None:
|
62 |
+
back = self._default_back
|
63 |
+
self._back = back
|
64 |
+
# Emulate LIGHT_EX with BRIGHT_BACKGROUND Style
|
65 |
+
if light:
|
66 |
+
self._light |= WinStyle.BRIGHT_BACKGROUND
|
67 |
+
else:
|
68 |
+
self._light &= ~WinStyle.BRIGHT_BACKGROUND
|
69 |
+
self.set_console(on_stderr=on_stderr)
|
70 |
+
|
71 |
+
def style(self, style=None, on_stderr=False):
|
72 |
+
if style is None:
|
73 |
+
style = self._default_style
|
74 |
+
self._style = style
|
75 |
+
self.set_console(on_stderr=on_stderr)
|
76 |
+
|
77 |
+
def set_console(self, attrs=None, on_stderr=False):
|
78 |
+
if attrs is None:
|
79 |
+
attrs = self.get_attrs()
|
80 |
+
handle = win32.STDOUT
|
81 |
+
if on_stderr:
|
82 |
+
handle = win32.STDERR
|
83 |
+
win32.SetConsoleTextAttribute(handle, attrs)
|
84 |
+
|
85 |
+
def get_position(self, handle):
|
86 |
+
position = win32.GetConsoleScreenBufferInfo(handle).dwCursorPosition
|
87 |
+
# Because Windows coordinates are 0-based,
|
88 |
+
# and win32.SetConsoleCursorPosition expects 1-based.
|
89 |
+
position.X += 1
|
90 |
+
position.Y += 1
|
91 |
+
return position
|
92 |
+
|
93 |
+
def set_cursor_position(self, position=None, on_stderr=False):
|
94 |
+
if position is None:
|
95 |
+
# I'm not currently tracking the position, so there is no default.
|
96 |
+
# position = self.get_position()
|
97 |
+
return
|
98 |
+
handle = win32.STDOUT
|
99 |
+
if on_stderr:
|
100 |
+
handle = win32.STDERR
|
101 |
+
win32.SetConsoleCursorPosition(handle, position)
|
102 |
+
|
103 |
+
def cursor_adjust(self, x, y, on_stderr=False):
|
104 |
+
handle = win32.STDOUT
|
105 |
+
if on_stderr:
|
106 |
+
handle = win32.STDERR
|
107 |
+
position = self.get_position(handle)
|
108 |
+
adjusted_position = (position.Y + y, position.X + x)
|
109 |
+
win32.SetConsoleCursorPosition(handle, adjusted_position, adjust=False)
|
110 |
+
|
111 |
+
def erase_screen(self, mode=0, on_stderr=False):
|
112 |
+
# 0 should clear from the cursor to the end of the screen.
|
113 |
+
# 1 should clear from the cursor to the beginning of the screen.
|
114 |
+
# 2 should clear the entire screen, and move cursor to (1,1)
|
115 |
+
handle = win32.STDOUT
|
116 |
+
if on_stderr:
|
117 |
+
handle = win32.STDERR
|
118 |
+
csbi = win32.GetConsoleScreenBufferInfo(handle)
|
119 |
+
# get the number of character cells in the current buffer
|
120 |
+
cells_in_screen = csbi.dwSize.X * csbi.dwSize.Y
|
121 |
+
# get number of character cells before current cursor position
|
122 |
+
cells_before_cursor = csbi.dwSize.X * csbi.dwCursorPosition.Y + csbi.dwCursorPosition.X
|
123 |
+
if mode == 0:
|
124 |
+
from_coord = csbi.dwCursorPosition
|
125 |
+
cells_to_erase = cells_in_screen - cells_before_cursor
|
126 |
+
elif mode == 1:
|
127 |
+
from_coord = win32.COORD(0, 0)
|
128 |
+
cells_to_erase = cells_before_cursor
|
129 |
+
elif mode == 2:
|
130 |
+
from_coord = win32.COORD(0, 0)
|
131 |
+
cells_to_erase = cells_in_screen
|
132 |
+
else:
|
133 |
+
# invalid mode
|
134 |
+
return
|
135 |
+
# fill the entire screen with blanks
|
136 |
+
win32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord)
|
137 |
+
# now set the buffer's attributes accordingly
|
138 |
+
win32.FillConsoleOutputAttribute(handle, self.get_attrs(), cells_to_erase, from_coord)
|
139 |
+
if mode == 2:
|
140 |
+
# put the cursor where needed
|
141 |
+
win32.SetConsoleCursorPosition(handle, (1, 1))
|
142 |
+
|
143 |
+
def erase_line(self, mode=0, on_stderr=False):
|
144 |
+
# 0 should clear from the cursor to the end of the line.
|
145 |
+
# 1 should clear from the cursor to the beginning of the line.
|
146 |
+
# 2 should clear the entire line.
|
147 |
+
handle = win32.STDOUT
|
148 |
+
if on_stderr:
|
149 |
+
handle = win32.STDERR
|
150 |
+
csbi = win32.GetConsoleScreenBufferInfo(handle)
|
151 |
+
if mode == 0:
|
152 |
+
from_coord = csbi.dwCursorPosition
|
153 |
+
cells_to_erase = csbi.dwSize.X - csbi.dwCursorPosition.X
|
154 |
+
elif mode == 1:
|
155 |
+
from_coord = win32.COORD(0, csbi.dwCursorPosition.Y)
|
156 |
+
cells_to_erase = csbi.dwCursorPosition.X
|
157 |
+
elif mode == 2:
|
158 |
+
from_coord = win32.COORD(0, csbi.dwCursorPosition.Y)
|
159 |
+
cells_to_erase = csbi.dwSize.X
|
160 |
+
else:
|
161 |
+
# invalid mode
|
162 |
+
return
|
163 |
+
# fill the entire screen with blanks
|
164 |
+
win32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord)
|
165 |
+
# now set the buffer's attributes accordingly
|
166 |
+
win32.FillConsoleOutputAttribute(handle, self.get_attrs(), cells_to_erase, from_coord)
|
167 |
+
|
168 |
+
def set_title(self, title):
|
169 |
+
win32.SetConsoleTitle(title)
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/__init__.py
ADDED
@@ -0,0 +1,83 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
Pygments
|
3 |
+
~~~~~~~~
|
4 |
+
|
5 |
+
Pygments is a syntax highlighting package written in Python.
|
6 |
+
|
7 |
+
It is a generic syntax highlighter for general use in all kinds of software
|
8 |
+
such as forum systems, wikis or other applications that need to prettify
|
9 |
+
source code. Highlights are:
|
10 |
+
|
11 |
+
* a wide range of common languages and markup formats is supported
|
12 |
+
* special attention is paid to details, increasing quality by a fair amount
|
13 |
+
* support for new languages and formats are added easily
|
14 |
+
* a number of output formats, presently HTML, LaTeX, RTF, SVG, all image
|
15 |
+
formats that PIL supports, and ANSI sequences
|
16 |
+
* it is usable as a command-line tool and as a library
|
17 |
+
* ... and it highlights even Brainfuck!
|
18 |
+
|
19 |
+
The `Pygments master branch`_ is installable with ``easy_install Pygments==dev``.
|
20 |
+
|
21 |
+
.. _Pygments master branch:
|
22 |
+
https://github.com/pygments/pygments/archive/master.zip#egg=Pygments-dev
|
23 |
+
|
24 |
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
|
25 |
+
:license: BSD, see LICENSE for details.
|
26 |
+
"""
|
27 |
+
from io import StringIO, BytesIO
|
28 |
+
|
29 |
+
__version__ = '2.11.2'
|
30 |
+
__docformat__ = 'restructuredtext'
|
31 |
+
|
32 |
+
__all__ = ['lex', 'format', 'highlight']
|
33 |
+
|
34 |
+
|
35 |
+
def lex(code, lexer):
|
36 |
+
"""
|
37 |
+
Lex ``code`` with ``lexer`` and return an iterable of tokens.
|
38 |
+
"""
|
39 |
+
try:
|
40 |
+
return lexer.get_tokens(code)
|
41 |
+
except TypeError as err:
|
42 |
+
if (isinstance(err.args[0], str) and
|
43 |
+
('unbound method get_tokens' in err.args[0] or
|
44 |
+
'missing 1 required positional argument' in err.args[0])):
|
45 |
+
raise TypeError('lex() argument must be a lexer instance, '
|
46 |
+
'not a class')
|
47 |
+
raise
|
48 |
+
|
49 |
+
|
50 |
+
def format(tokens, formatter, outfile=None): # pylint: disable=redefined-builtin
|
51 |
+
"""
|
52 |
+
Format a tokenlist ``tokens`` with the formatter ``formatter``.
|
53 |
+
|
54 |
+
If ``outfile`` is given and a valid file object (an object
|
55 |
+
with a ``write`` method), the result will be written to it, otherwise
|
56 |
+
it is returned as a string.
|
57 |
+
"""
|
58 |
+
try:
|
59 |
+
if not outfile:
|
60 |
+
realoutfile = getattr(formatter, 'encoding', None) and BytesIO() or StringIO()
|
61 |
+
formatter.format(tokens, realoutfile)
|
62 |
+
return realoutfile.getvalue()
|
63 |
+
else:
|
64 |
+
formatter.format(tokens, outfile)
|
65 |
+
except TypeError as err:
|
66 |
+
if (isinstance(err.args[0], str) and
|
67 |
+
('unbound method format' in err.args[0] or
|
68 |
+
'missing 1 required positional argument' in err.args[0])):
|
69 |
+
raise TypeError('format() argument must be a formatter instance, '
|
70 |
+
'not a class')
|
71 |
+
raise
|
72 |
+
|
73 |
+
|
74 |
+
def highlight(code, lexer, formatter, outfile=None):
|
75 |
+
"""
|
76 |
+
Lex ``code`` with ``lexer`` and format it with the formatter ``formatter``.
|
77 |
+
|
78 |
+
If ``outfile`` is given and a valid file object (an object
|
79 |
+
with a ``write`` method), the result will be written to it, otherwise
|
80 |
+
it is returned as a string.
|
81 |
+
"""
|
82 |
+
return format(lex(code, lexer), formatter, outfile)
|
83 |
+
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/__main__.py
ADDED
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
pygments.__main__
|
3 |
+
~~~~~~~~~~~~~~~~~
|
4 |
+
|
5 |
+
Main entry point for ``python -m pygments``.
|
6 |
+
|
7 |
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
|
8 |
+
:license: BSD, see LICENSE for details.
|
9 |
+
"""
|
10 |
+
|
11 |
+
import sys
|
12 |
+
from pip._vendor.pygments.cmdline import main
|
13 |
+
|
14 |
+
try:
|
15 |
+
sys.exit(main(sys.argv))
|
16 |
+
except KeyboardInterrupt:
|
17 |
+
sys.exit(1)
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/__pycache__/filter.cpython-310.pyc
ADDED
Binary file (2.66 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/__pycache__/formatter.cpython-310.pyc
ADDED
Binary file (3.01 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/__pycache__/lexer.cpython-310.pyc
ADDED
Binary file (24.4 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/__pycache__/modeline.cpython-310.pyc
ADDED
Binary file (1.2 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/__pycache__/regexopt.cpython-310.pyc
ADDED
Binary file (2.96 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/__pycache__/sphinxext.cpython-310.pyc
ADDED
Binary file (4.55 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/__pycache__/unistring.cpython-310.pyc
ADDED
Binary file (31.2 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/cmdline.py
ADDED
@@ -0,0 +1,663 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
pygments.cmdline
|
3 |
+
~~~~~~~~~~~~~~~~
|
4 |
+
|
5 |
+
Command line interface.
|
6 |
+
|
7 |
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
|
8 |
+
:license: BSD, see LICENSE for details.
|
9 |
+
"""
|
10 |
+
|
11 |
+
import os
|
12 |
+
import sys
|
13 |
+
import shutil
|
14 |
+
import argparse
|
15 |
+
from textwrap import dedent
|
16 |
+
|
17 |
+
from pip._vendor.pygments import __version__, highlight
|
18 |
+
from pip._vendor.pygments.util import ClassNotFound, OptionError, docstring_headline, \
|
19 |
+
guess_decode, guess_decode_from_terminal, terminal_encoding, \
|
20 |
+
UnclosingTextIOWrapper
|
21 |
+
from pip._vendor.pygments.lexers import get_all_lexers, get_lexer_by_name, guess_lexer, \
|
22 |
+
load_lexer_from_file, get_lexer_for_filename, find_lexer_class_for_filename
|
23 |
+
from pip._vendor.pygments.lexers.special import TextLexer
|
24 |
+
from pip._vendor.pygments.formatters.latex import LatexEmbeddedLexer, LatexFormatter
|
25 |
+
from pip._vendor.pygments.formatters import get_all_formatters, get_formatter_by_name, \
|
26 |
+
load_formatter_from_file, get_formatter_for_filename, find_formatter_class
|
27 |
+
from pip._vendor.pygments.formatters.terminal import TerminalFormatter
|
28 |
+
from pip._vendor.pygments.formatters.terminal256 import Terminal256Formatter
|
29 |
+
from pip._vendor.pygments.filters import get_all_filters, find_filter_class
|
30 |
+
from pip._vendor.pygments.styles import get_all_styles, get_style_by_name
|
31 |
+
|
32 |
+
|
33 |
+
def _parse_options(o_strs):
|
34 |
+
opts = {}
|
35 |
+
if not o_strs:
|
36 |
+
return opts
|
37 |
+
for o_str in o_strs:
|
38 |
+
if not o_str.strip():
|
39 |
+
continue
|
40 |
+
o_args = o_str.split(',')
|
41 |
+
for o_arg in o_args:
|
42 |
+
o_arg = o_arg.strip()
|
43 |
+
try:
|
44 |
+
o_key, o_val = o_arg.split('=', 1)
|
45 |
+
o_key = o_key.strip()
|
46 |
+
o_val = o_val.strip()
|
47 |
+
except ValueError:
|
48 |
+
opts[o_arg] = True
|
49 |
+
else:
|
50 |
+
opts[o_key] = o_val
|
51 |
+
return opts
|
52 |
+
|
53 |
+
|
54 |
+
def _parse_filters(f_strs):
|
55 |
+
filters = []
|
56 |
+
if not f_strs:
|
57 |
+
return filters
|
58 |
+
for f_str in f_strs:
|
59 |
+
if ':' in f_str:
|
60 |
+
fname, fopts = f_str.split(':', 1)
|
61 |
+
filters.append((fname, _parse_options([fopts])))
|
62 |
+
else:
|
63 |
+
filters.append((f_str, {}))
|
64 |
+
return filters
|
65 |
+
|
66 |
+
|
67 |
+
def _print_help(what, name):
|
68 |
+
try:
|
69 |
+
if what == 'lexer':
|
70 |
+
cls = get_lexer_by_name(name)
|
71 |
+
print("Help on the %s lexer:" % cls.name)
|
72 |
+
print(dedent(cls.__doc__))
|
73 |
+
elif what == 'formatter':
|
74 |
+
cls = find_formatter_class(name)
|
75 |
+
print("Help on the %s formatter:" % cls.name)
|
76 |
+
print(dedent(cls.__doc__))
|
77 |
+
elif what == 'filter':
|
78 |
+
cls = find_filter_class(name)
|
79 |
+
print("Help on the %s filter:" % name)
|
80 |
+
print(dedent(cls.__doc__))
|
81 |
+
return 0
|
82 |
+
except (AttributeError, ValueError):
|
83 |
+
print("%s not found!" % what, file=sys.stderr)
|
84 |
+
return 1
|
85 |
+
|
86 |
+
|
87 |
+
def _print_list(what):
|
88 |
+
if what == 'lexer':
|
89 |
+
print()
|
90 |
+
print("Lexers:")
|
91 |
+
print("~~~~~~~")
|
92 |
+
|
93 |
+
info = []
|
94 |
+
for fullname, names, exts, _ in get_all_lexers():
|
95 |
+
tup = (', '.join(names)+':', fullname,
|
96 |
+
exts and '(filenames ' + ', '.join(exts) + ')' or '')
|
97 |
+
info.append(tup)
|
98 |
+
info.sort()
|
99 |
+
for i in info:
|
100 |
+
print(('* %s\n %s %s') % i)
|
101 |
+
|
102 |
+
elif what == 'formatter':
|
103 |
+
print()
|
104 |
+
print("Formatters:")
|
105 |
+
print("~~~~~~~~~~~")
|
106 |
+
|
107 |
+
info = []
|
108 |
+
for cls in get_all_formatters():
|
109 |
+
doc = docstring_headline(cls)
|
110 |
+
tup = (', '.join(cls.aliases) + ':', doc, cls.filenames and
|
111 |
+
'(filenames ' + ', '.join(cls.filenames) + ')' or '')
|
112 |
+
info.append(tup)
|
113 |
+
info.sort()
|
114 |
+
for i in info:
|
115 |
+
print(('* %s\n %s %s') % i)
|
116 |
+
|
117 |
+
elif what == 'filter':
|
118 |
+
print()
|
119 |
+
print("Filters:")
|
120 |
+
print("~~~~~~~~")
|
121 |
+
|
122 |
+
for name in get_all_filters():
|
123 |
+
cls = find_filter_class(name)
|
124 |
+
print("* " + name + ':')
|
125 |
+
print(" %s" % docstring_headline(cls))
|
126 |
+
|
127 |
+
elif what == 'style':
|
128 |
+
print()
|
129 |
+
print("Styles:")
|
130 |
+
print("~~~~~~~")
|
131 |
+
|
132 |
+
for name in get_all_styles():
|
133 |
+
cls = get_style_by_name(name)
|
134 |
+
print("* " + name + ':')
|
135 |
+
print(" %s" % docstring_headline(cls))
|
136 |
+
|
137 |
+
|
138 |
+
def _print_list_as_json(requested_items):
|
139 |
+
import json
|
140 |
+
result = {}
|
141 |
+
if 'lexer' in requested_items:
|
142 |
+
info = {}
|
143 |
+
for fullname, names, filenames, mimetypes in get_all_lexers():
|
144 |
+
info[fullname] = {
|
145 |
+
'aliases': names,
|
146 |
+
'filenames': filenames,
|
147 |
+
'mimetypes': mimetypes
|
148 |
+
}
|
149 |
+
result['lexers'] = info
|
150 |
+
|
151 |
+
if 'formatter' in requested_items:
|
152 |
+
info = {}
|
153 |
+
for cls in get_all_formatters():
|
154 |
+
doc = docstring_headline(cls)
|
155 |
+
info[cls.name] = {
|
156 |
+
'aliases': cls.aliases,
|
157 |
+
'filenames': cls.filenames,
|
158 |
+
'doc': doc
|
159 |
+
}
|
160 |
+
result['formatters'] = info
|
161 |
+
|
162 |
+
if 'filter' in requested_items:
|
163 |
+
info = {}
|
164 |
+
for name in get_all_filters():
|
165 |
+
cls = find_filter_class(name)
|
166 |
+
info[name] = {
|
167 |
+
'doc': docstring_headline(cls)
|
168 |
+
}
|
169 |
+
result['filters'] = info
|
170 |
+
|
171 |
+
if 'style' in requested_items:
|
172 |
+
info = {}
|
173 |
+
for name in get_all_styles():
|
174 |
+
cls = get_style_by_name(name)
|
175 |
+
info[name] = {
|
176 |
+
'doc': docstring_headline(cls)
|
177 |
+
}
|
178 |
+
result['styles'] = info
|
179 |
+
|
180 |
+
json.dump(result, sys.stdout)
|
181 |
+
|
182 |
+
def main_inner(parser, argns):
|
183 |
+
if argns.help:
|
184 |
+
parser.print_help()
|
185 |
+
return 0
|
186 |
+
|
187 |
+
if argns.V:
|
188 |
+
print('Pygments version %s, (c) 2006-2021 by Georg Brandl, Matthäus '
|
189 |
+
'Chajdas and contributors.' % __version__)
|
190 |
+
return 0
|
191 |
+
|
192 |
+
def is_only_option(opt):
|
193 |
+
return not any(v for (k, v) in vars(argns).items() if k != opt)
|
194 |
+
|
195 |
+
# handle ``pygmentize -L``
|
196 |
+
if argns.L is not None:
|
197 |
+
arg_set = set()
|
198 |
+
for k, v in vars(argns).items():
|
199 |
+
if v:
|
200 |
+
arg_set.add(k)
|
201 |
+
|
202 |
+
arg_set.discard('L')
|
203 |
+
arg_set.discard('json')
|
204 |
+
|
205 |
+
if arg_set:
|
206 |
+
parser.print_help(sys.stderr)
|
207 |
+
return 2
|
208 |
+
|
209 |
+
# print version
|
210 |
+
if not argns.json:
|
211 |
+
main(['', '-V'])
|
212 |
+
allowed_types = {'lexer', 'formatter', 'filter', 'style'}
|
213 |
+
largs = [arg.rstrip('s') for arg in argns.L]
|
214 |
+
if any(arg not in allowed_types for arg in largs):
|
215 |
+
parser.print_help(sys.stderr)
|
216 |
+
return 0
|
217 |
+
if not largs:
|
218 |
+
largs = allowed_types
|
219 |
+
if not argns.json:
|
220 |
+
for arg in largs:
|
221 |
+
_print_list(arg)
|
222 |
+
else:
|
223 |
+
_print_list_as_json(largs)
|
224 |
+
return 0
|
225 |
+
|
226 |
+
# handle ``pygmentize -H``
|
227 |
+
if argns.H:
|
228 |
+
if not is_only_option('H'):
|
229 |
+
parser.print_help(sys.stderr)
|
230 |
+
return 2
|
231 |
+
what, name = argns.H
|
232 |
+
if what not in ('lexer', 'formatter', 'filter'):
|
233 |
+
parser.print_help(sys.stderr)
|
234 |
+
return 2
|
235 |
+
return _print_help(what, name)
|
236 |
+
|
237 |
+
# parse -O options
|
238 |
+
parsed_opts = _parse_options(argns.O or [])
|
239 |
+
|
240 |
+
# parse -P options
|
241 |
+
for p_opt in argns.P or []:
|
242 |
+
try:
|
243 |
+
name, value = p_opt.split('=', 1)
|
244 |
+
except ValueError:
|
245 |
+
parsed_opts[p_opt] = True
|
246 |
+
else:
|
247 |
+
parsed_opts[name] = value
|
248 |
+
|
249 |
+
# encodings
|
250 |
+
inencoding = parsed_opts.get('inencoding', parsed_opts.get('encoding'))
|
251 |
+
outencoding = parsed_opts.get('outencoding', parsed_opts.get('encoding'))
|
252 |
+
|
253 |
+
# handle ``pygmentize -N``
|
254 |
+
if argns.N:
|
255 |
+
lexer = find_lexer_class_for_filename(argns.N)
|
256 |
+
if lexer is None:
|
257 |
+
lexer = TextLexer
|
258 |
+
|
259 |
+
print(lexer.aliases[0])
|
260 |
+
return 0
|
261 |
+
|
262 |
+
# handle ``pygmentize -C``
|
263 |
+
if argns.C:
|
264 |
+
inp = sys.stdin.buffer.read()
|
265 |
+
try:
|
266 |
+
lexer = guess_lexer(inp, inencoding=inencoding)
|
267 |
+
except ClassNotFound:
|
268 |
+
lexer = TextLexer
|
269 |
+
|
270 |
+
print(lexer.aliases[0])
|
271 |
+
return 0
|
272 |
+
|
273 |
+
# handle ``pygmentize -S``
|
274 |
+
S_opt = argns.S
|
275 |
+
a_opt = argns.a
|
276 |
+
if S_opt is not None:
|
277 |
+
f_opt = argns.f
|
278 |
+
if not f_opt:
|
279 |
+
parser.print_help(sys.stderr)
|
280 |
+
return 2
|
281 |
+
if argns.l or argns.INPUTFILE:
|
282 |
+
parser.print_help(sys.stderr)
|
283 |
+
return 2
|
284 |
+
|
285 |
+
try:
|
286 |
+
parsed_opts['style'] = S_opt
|
287 |
+
fmter = get_formatter_by_name(f_opt, **parsed_opts)
|
288 |
+
except ClassNotFound as err:
|
289 |
+
print(err, file=sys.stderr)
|
290 |
+
return 1
|
291 |
+
|
292 |
+
print(fmter.get_style_defs(a_opt or ''))
|
293 |
+
return 0
|
294 |
+
|
295 |
+
# if no -S is given, -a is not allowed
|
296 |
+
if argns.a is not None:
|
297 |
+
parser.print_help(sys.stderr)
|
298 |
+
return 2
|
299 |
+
|
300 |
+
# parse -F options
|
301 |
+
F_opts = _parse_filters(argns.F or [])
|
302 |
+
|
303 |
+
# -x: allow custom (eXternal) lexers and formatters
|
304 |
+
allow_custom_lexer_formatter = bool(argns.x)
|
305 |
+
|
306 |
+
# select lexer
|
307 |
+
lexer = None
|
308 |
+
|
309 |
+
# given by name?
|
310 |
+
lexername = argns.l
|
311 |
+
if lexername:
|
312 |
+
# custom lexer, located relative to user's cwd
|
313 |
+
if allow_custom_lexer_formatter and '.py' in lexername:
|
314 |
+
try:
|
315 |
+
filename = None
|
316 |
+
name = None
|
317 |
+
if ':' in lexername:
|
318 |
+
filename, name = lexername.rsplit(':', 1)
|
319 |
+
|
320 |
+
if '.py' in name:
|
321 |
+
# This can happen on Windows: If the lexername is
|
322 |
+
# C:\lexer.py -- return to normal load path in that case
|
323 |
+
name = None
|
324 |
+
|
325 |
+
if filename and name:
|
326 |
+
lexer = load_lexer_from_file(filename, name,
|
327 |
+
**parsed_opts)
|
328 |
+
else:
|
329 |
+
lexer = load_lexer_from_file(lexername, **parsed_opts)
|
330 |
+
except ClassNotFound as err:
|
331 |
+
print('Error:', err, file=sys.stderr)
|
332 |
+
return 1
|
333 |
+
else:
|
334 |
+
try:
|
335 |
+
lexer = get_lexer_by_name(lexername, **parsed_opts)
|
336 |
+
except (OptionError, ClassNotFound) as err:
|
337 |
+
print('Error:', err, file=sys.stderr)
|
338 |
+
return 1
|
339 |
+
|
340 |
+
# read input code
|
341 |
+
code = None
|
342 |
+
|
343 |
+
if argns.INPUTFILE:
|
344 |
+
if argns.s:
|
345 |
+
print('Error: -s option not usable when input file specified',
|
346 |
+
file=sys.stderr)
|
347 |
+
return 2
|
348 |
+
|
349 |
+
infn = argns.INPUTFILE
|
350 |
+
try:
|
351 |
+
with open(infn, 'rb') as infp:
|
352 |
+
code = infp.read()
|
353 |
+
except Exception as err:
|
354 |
+
print('Error: cannot read infile:', err, file=sys.stderr)
|
355 |
+
return 1
|
356 |
+
if not inencoding:
|
357 |
+
code, inencoding = guess_decode(code)
|
358 |
+
|
359 |
+
# do we have to guess the lexer?
|
360 |
+
if not lexer:
|
361 |
+
try:
|
362 |
+
lexer = get_lexer_for_filename(infn, code, **parsed_opts)
|
363 |
+
except ClassNotFound as err:
|
364 |
+
if argns.g:
|
365 |
+
try:
|
366 |
+
lexer = guess_lexer(code, **parsed_opts)
|
367 |
+
except ClassNotFound:
|
368 |
+
lexer = TextLexer(**parsed_opts)
|
369 |
+
else:
|
370 |
+
print('Error:', err, file=sys.stderr)
|
371 |
+
return 1
|
372 |
+
except OptionError as err:
|
373 |
+
print('Error:', err, file=sys.stderr)
|
374 |
+
return 1
|
375 |
+
|
376 |
+
elif not argns.s: # treat stdin as full file (-s support is later)
|
377 |
+
# read code from terminal, always in binary mode since we want to
|
378 |
+
# decode ourselves and be tolerant with it
|
379 |
+
code = sys.stdin.buffer.read() # use .buffer to get a binary stream
|
380 |
+
if not inencoding:
|
381 |
+
code, inencoding = guess_decode_from_terminal(code, sys.stdin)
|
382 |
+
# else the lexer will do the decoding
|
383 |
+
if not lexer:
|
384 |
+
try:
|
385 |
+
lexer = guess_lexer(code, **parsed_opts)
|
386 |
+
except ClassNotFound:
|
387 |
+
lexer = TextLexer(**parsed_opts)
|
388 |
+
|
389 |
+
else: # -s option needs a lexer with -l
|
390 |
+
if not lexer:
|
391 |
+
print('Error: when using -s a lexer has to be selected with -l',
|
392 |
+
file=sys.stderr)
|
393 |
+
return 2
|
394 |
+
|
395 |
+
# process filters
|
396 |
+
for fname, fopts in F_opts:
|
397 |
+
try:
|
398 |
+
lexer.add_filter(fname, **fopts)
|
399 |
+
except ClassNotFound as err:
|
400 |
+
print('Error:', err, file=sys.stderr)
|
401 |
+
return 1
|
402 |
+
|
403 |
+
# select formatter
|
404 |
+
outfn = argns.o
|
405 |
+
fmter = argns.f
|
406 |
+
if fmter:
|
407 |
+
# custom formatter, located relative to user's cwd
|
408 |
+
if allow_custom_lexer_formatter and '.py' in fmter:
|
409 |
+
try:
|
410 |
+
filename = None
|
411 |
+
name = None
|
412 |
+
if ':' in fmter:
|
413 |
+
# Same logic as above for custom lexer
|
414 |
+
filename, name = fmter.rsplit(':', 1)
|
415 |
+
|
416 |
+
if '.py' in name:
|
417 |
+
name = None
|
418 |
+
|
419 |
+
if filename and name:
|
420 |
+
fmter = load_formatter_from_file(filename, name,
|
421 |
+
**parsed_opts)
|
422 |
+
else:
|
423 |
+
fmter = load_formatter_from_file(fmter, **parsed_opts)
|
424 |
+
except ClassNotFound as err:
|
425 |
+
print('Error:', err, file=sys.stderr)
|
426 |
+
return 1
|
427 |
+
else:
|
428 |
+
try:
|
429 |
+
fmter = get_formatter_by_name(fmter, **parsed_opts)
|
430 |
+
except (OptionError, ClassNotFound) as err:
|
431 |
+
print('Error:', err, file=sys.stderr)
|
432 |
+
return 1
|
433 |
+
|
434 |
+
if outfn:
|
435 |
+
if not fmter:
|
436 |
+
try:
|
437 |
+
fmter = get_formatter_for_filename(outfn, **parsed_opts)
|
438 |
+
except (OptionError, ClassNotFound) as err:
|
439 |
+
print('Error:', err, file=sys.stderr)
|
440 |
+
return 1
|
441 |
+
try:
|
442 |
+
outfile = open(outfn, 'wb')
|
443 |
+
except Exception as err:
|
444 |
+
print('Error: cannot open outfile:', err, file=sys.stderr)
|
445 |
+
return 1
|
446 |
+
else:
|
447 |
+
if not fmter:
|
448 |
+
if '256' in os.environ.get('TERM', ''):
|
449 |
+
fmter = Terminal256Formatter(**parsed_opts)
|
450 |
+
else:
|
451 |
+
fmter = TerminalFormatter(**parsed_opts)
|
452 |
+
outfile = sys.stdout.buffer
|
453 |
+
|
454 |
+
# determine output encoding if not explicitly selected
|
455 |
+
if not outencoding:
|
456 |
+
if outfn:
|
457 |
+
# output file? use lexer encoding for now (can still be None)
|
458 |
+
fmter.encoding = inencoding
|
459 |
+
else:
|
460 |
+
# else use terminal encoding
|
461 |
+
fmter.encoding = terminal_encoding(sys.stdout)
|
462 |
+
|
463 |
+
# provide coloring under Windows, if possible
|
464 |
+
if not outfn and sys.platform in ('win32', 'cygwin') and \
|
465 |
+
fmter.name in ('Terminal', 'Terminal256'): # pragma: no cover
|
466 |
+
# unfortunately colorama doesn't support binary streams on Py3
|
467 |
+
outfile = UnclosingTextIOWrapper(outfile, encoding=fmter.encoding)
|
468 |
+
fmter.encoding = None
|
469 |
+
try:
|
470 |
+
import pip._vendor.colorama.initialise as colorama_initialise
|
471 |
+
except ImportError:
|
472 |
+
pass
|
473 |
+
else:
|
474 |
+
outfile = colorama_initialise.wrap_stream(
|
475 |
+
outfile, convert=None, strip=None, autoreset=False, wrap=True)
|
476 |
+
|
477 |
+
# When using the LaTeX formatter and the option `escapeinside` is
|
478 |
+
# specified, we need a special lexer which collects escaped text
|
479 |
+
# before running the chosen language lexer.
|
480 |
+
escapeinside = parsed_opts.get('escapeinside', '')
|
481 |
+
if len(escapeinside) == 2 and isinstance(fmter, LatexFormatter):
|
482 |
+
left = escapeinside[0]
|
483 |
+
right = escapeinside[1]
|
484 |
+
lexer = LatexEmbeddedLexer(left, right, lexer)
|
485 |
+
|
486 |
+
# ... and do it!
|
487 |
+
if not argns.s:
|
488 |
+
# process whole input as per normal...
|
489 |
+
try:
|
490 |
+
highlight(code, lexer, fmter, outfile)
|
491 |
+
finally:
|
492 |
+
if outfn:
|
493 |
+
outfile.close()
|
494 |
+
return 0
|
495 |
+
else:
|
496 |
+
# line by line processing of stdin (eg: for 'tail -f')...
|
497 |
+
try:
|
498 |
+
while 1:
|
499 |
+
line = sys.stdin.buffer.readline()
|
500 |
+
if not line:
|
501 |
+
break
|
502 |
+
if not inencoding:
|
503 |
+
line = guess_decode_from_terminal(line, sys.stdin)[0]
|
504 |
+
highlight(line, lexer, fmter, outfile)
|
505 |
+
if hasattr(outfile, 'flush'):
|
506 |
+
outfile.flush()
|
507 |
+
return 0
|
508 |
+
except KeyboardInterrupt: # pragma: no cover
|
509 |
+
return 0
|
510 |
+
finally:
|
511 |
+
if outfn:
|
512 |
+
outfile.close()
|
513 |
+
|
514 |
+
|
515 |
+
class HelpFormatter(argparse.HelpFormatter):
|
516 |
+
def __init__(self, prog, indent_increment=2, max_help_position=16, width=None):
|
517 |
+
if width is None:
|
518 |
+
try:
|
519 |
+
width = shutil.get_terminal_size().columns - 2
|
520 |
+
except Exception:
|
521 |
+
pass
|
522 |
+
argparse.HelpFormatter.__init__(self, prog, indent_increment,
|
523 |
+
max_help_position, width)
|
524 |
+
|
525 |
+
|
526 |
+
def main(args=sys.argv):
|
527 |
+
"""
|
528 |
+
Main command line entry point.
|
529 |
+
"""
|
530 |
+
desc = "Highlight an input file and write the result to an output file."
|
531 |
+
parser = argparse.ArgumentParser(description=desc, add_help=False,
|
532 |
+
formatter_class=HelpFormatter)
|
533 |
+
|
534 |
+
operation = parser.add_argument_group('Main operation')
|
535 |
+
lexersel = operation.add_mutually_exclusive_group()
|
536 |
+
lexersel.add_argument(
|
537 |
+
'-l', metavar='LEXER',
|
538 |
+
help='Specify the lexer to use. (Query names with -L.) If not '
|
539 |
+
'given and -g is not present, the lexer is guessed from the filename.')
|
540 |
+
lexersel.add_argument(
|
541 |
+
'-g', action='store_true',
|
542 |
+
help='Guess the lexer from the file contents, or pass through '
|
543 |
+
'as plain text if nothing can be guessed.')
|
544 |
+
operation.add_argument(
|
545 |
+
'-F', metavar='FILTER[:options]', action='append',
|
546 |
+
help='Add a filter to the token stream. (Query names with -L.) '
|
547 |
+
'Filter options are given after a colon if necessary.')
|
548 |
+
operation.add_argument(
|
549 |
+
'-f', metavar='FORMATTER',
|
550 |
+
help='Specify the formatter to use. (Query names with -L.) '
|
551 |
+
'If not given, the formatter is guessed from the output filename, '
|
552 |
+
'and defaults to the terminal formatter if the output is to the '
|
553 |
+
'terminal or an unknown file extension.')
|
554 |
+
operation.add_argument(
|
555 |
+
'-O', metavar='OPTION=value[,OPTION=value,...]', action='append',
|
556 |
+
help='Give options to the lexer and formatter as a comma-separated '
|
557 |
+
'list of key-value pairs. '
|
558 |
+
'Example: `-O bg=light,python=cool`.')
|
559 |
+
operation.add_argument(
|
560 |
+
'-P', metavar='OPTION=value', action='append',
|
561 |
+
help='Give a single option to the lexer and formatter - with this '
|
562 |
+
'you can pass options whose value contains commas and equal signs. '
|
563 |
+
'Example: `-P "heading=Pygments, the Python highlighter"`.')
|
564 |
+
operation.add_argument(
|
565 |
+
'-o', metavar='OUTPUTFILE',
|
566 |
+
help='Where to write the output. Defaults to standard output.')
|
567 |
+
|
568 |
+
operation.add_argument(
|
569 |
+
'INPUTFILE', nargs='?',
|
570 |
+
help='Where to read the input. Defaults to standard input.')
|
571 |
+
|
572 |
+
flags = parser.add_argument_group('Operation flags')
|
573 |
+
flags.add_argument(
|
574 |
+
'-v', action='store_true',
|
575 |
+
help='Print a detailed traceback on unhandled exceptions, which '
|
576 |
+
'is useful for debugging and bug reports.')
|
577 |
+
flags.add_argument(
|
578 |
+
'-s', action='store_true',
|
579 |
+
help='Process lines one at a time until EOF, rather than waiting to '
|
580 |
+
'process the entire file. This only works for stdin, only for lexers '
|
581 |
+
'with no line-spanning constructs, and is intended for streaming '
|
582 |
+
'input such as you get from `tail -f`. '
|
583 |
+
'Example usage: `tail -f sql.log | pygmentize -s -l sql`.')
|
584 |
+
flags.add_argument(
|
585 |
+
'-x', action='store_true',
|
586 |
+
help='Allow custom lexers and formatters to be loaded from a .py file '
|
587 |
+
'relative to the current working directory. For example, '
|
588 |
+
'`-l ./customlexer.py -x`. By default, this option expects a file '
|
589 |
+
'with a class named CustomLexer or CustomFormatter; you can also '
|
590 |
+
'specify your own class name with a colon (`-l ./lexer.py:MyLexer`). '
|
591 |
+
'Users should be very careful not to use this option with untrusted '
|
592 |
+
'files, because it will import and run them.')
|
593 |
+
flags.add_argument('--json', help='Output as JSON. This can '
|
594 |
+
'be only used in conjunction with -L.',
|
595 |
+
default=False,
|
596 |
+
action='store_true')
|
597 |
+
|
598 |
+
special_modes_group = parser.add_argument_group(
|
599 |
+
'Special modes - do not do any highlighting')
|
600 |
+
special_modes = special_modes_group.add_mutually_exclusive_group()
|
601 |
+
special_modes.add_argument(
|
602 |
+
'-S', metavar='STYLE -f formatter',
|
603 |
+
help='Print style definitions for STYLE for a formatter '
|
604 |
+
'given with -f. The argument given by -a is formatter '
|
605 |
+
'dependent.')
|
606 |
+
special_modes.add_argument(
|
607 |
+
'-L', nargs='*', metavar='WHAT',
|
608 |
+
help='List lexers, formatters, styles or filters -- '
|
609 |
+
'give additional arguments for the thing(s) you want to list '
|
610 |
+
'(e.g. "styles"), or omit them to list everything.')
|
611 |
+
special_modes.add_argument(
|
612 |
+
'-N', metavar='FILENAME',
|
613 |
+
help='Guess and print out a lexer name based solely on the given '
|
614 |
+
'filename. Does not take input or highlight anything. If no specific '
|
615 |
+
'lexer can be determined, "text" is printed.')
|
616 |
+
special_modes.add_argument(
|
617 |
+
'-C', action='store_true',
|
618 |
+
help='Like -N, but print out a lexer name based solely on '
|
619 |
+
'a given content from standard input.')
|
620 |
+
special_modes.add_argument(
|
621 |
+
'-H', action='store', nargs=2, metavar=('NAME', 'TYPE'),
|
622 |
+
help='Print detailed help for the object <name> of type <type>, '
|
623 |
+
'where <type> is one of "lexer", "formatter" or "filter".')
|
624 |
+
special_modes.add_argument(
|
625 |
+
'-V', action='store_true',
|
626 |
+
help='Print the package version.')
|
627 |
+
special_modes.add_argument(
|
628 |
+
'-h', '--help', action='store_true',
|
629 |
+
help='Print this help.')
|
630 |
+
special_modes_group.add_argument(
|
631 |
+
'-a', metavar='ARG',
|
632 |
+
help='Formatter-specific additional argument for the -S (print '
|
633 |
+
'style sheet) mode.')
|
634 |
+
|
635 |
+
argns = parser.parse_args(args[1:])
|
636 |
+
|
637 |
+
try:
|
638 |
+
return main_inner(parser, argns)
|
639 |
+
except Exception:
|
640 |
+
if argns.v:
|
641 |
+
print(file=sys.stderr)
|
642 |
+
print('*' * 65, file=sys.stderr)
|
643 |
+
print('An unhandled exception occurred while highlighting.',
|
644 |
+
file=sys.stderr)
|
645 |
+
print('Please report the whole traceback to the issue tracker at',
|
646 |
+
file=sys.stderr)
|
647 |
+
print('<https://github.com/pygments/pygments/issues>.',
|
648 |
+
file=sys.stderr)
|
649 |
+
print('*' * 65, file=sys.stderr)
|
650 |
+
print(file=sys.stderr)
|
651 |
+
raise
|
652 |
+
import traceback
|
653 |
+
info = traceback.format_exception(*sys.exc_info())
|
654 |
+
msg = info[-1].strip()
|
655 |
+
if len(info) >= 3:
|
656 |
+
# extract relevant file and position info
|
657 |
+
msg += '\n (f%s)' % info[-2].split('\n')[0].strip()[1:]
|
658 |
+
print(file=sys.stderr)
|
659 |
+
print('*** Error while highlighting:', file=sys.stderr)
|
660 |
+
print(msg, file=sys.stderr)
|
661 |
+
print('*** If this is a bug you want to report, please rerun with -v.',
|
662 |
+
file=sys.stderr)
|
663 |
+
return 1
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/console.py
ADDED
@@ -0,0 +1,70 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
pygments.console
|
3 |
+
~~~~~~~~~~~~~~~~
|
4 |
+
|
5 |
+
Format colored console output.
|
6 |
+
|
7 |
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
|
8 |
+
:license: BSD, see LICENSE for details.
|
9 |
+
"""
|
10 |
+
|
11 |
+
esc = "\x1b["
|
12 |
+
|
13 |
+
codes = {}
|
14 |
+
codes[""] = ""
|
15 |
+
codes["reset"] = esc + "39;49;00m"
|
16 |
+
|
17 |
+
codes["bold"] = esc + "01m"
|
18 |
+
codes["faint"] = esc + "02m"
|
19 |
+
codes["standout"] = esc + "03m"
|
20 |
+
codes["underline"] = esc + "04m"
|
21 |
+
codes["blink"] = esc + "05m"
|
22 |
+
codes["overline"] = esc + "06m"
|
23 |
+
|
24 |
+
dark_colors = ["black", "red", "green", "yellow", "blue",
|
25 |
+
"magenta", "cyan", "gray"]
|
26 |
+
light_colors = ["brightblack", "brightred", "brightgreen", "brightyellow", "brightblue",
|
27 |
+
"brightmagenta", "brightcyan", "white"]
|
28 |
+
|
29 |
+
x = 30
|
30 |
+
for d, l in zip(dark_colors, light_colors):
|
31 |
+
codes[d] = esc + "%im" % x
|
32 |
+
codes[l] = esc + "%im" % (60 + x)
|
33 |
+
x += 1
|
34 |
+
|
35 |
+
del d, l, x
|
36 |
+
|
37 |
+
codes["white"] = codes["bold"]
|
38 |
+
|
39 |
+
|
40 |
+
def reset_color():
|
41 |
+
return codes["reset"]
|
42 |
+
|
43 |
+
|
44 |
+
def colorize(color_key, text):
|
45 |
+
return codes[color_key] + text + codes["reset"]
|
46 |
+
|
47 |
+
|
48 |
+
def ansiformat(attr, text):
|
49 |
+
"""
|
50 |
+
Format ``text`` with a color and/or some attributes::
|
51 |
+
|
52 |
+
color normal color
|
53 |
+
*color* bold color
|
54 |
+
_color_ underlined color
|
55 |
+
+color+ blinking color
|
56 |
+
"""
|
57 |
+
result = []
|
58 |
+
if attr[:1] == attr[-1:] == '+':
|
59 |
+
result.append(codes['blink'])
|
60 |
+
attr = attr[1:-1]
|
61 |
+
if attr[:1] == attr[-1:] == '*':
|
62 |
+
result.append(codes['bold'])
|
63 |
+
attr = attr[1:-1]
|
64 |
+
if attr[:1] == attr[-1:] == '_':
|
65 |
+
result.append(codes['underline'])
|
66 |
+
attr = attr[1:-1]
|
67 |
+
result.append(codes[attr])
|
68 |
+
result.append(text)
|
69 |
+
result.append(codes['reset'])
|
70 |
+
return ''.join(result)
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/filter.py
ADDED
@@ -0,0 +1,71 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
pygments.filter
|
3 |
+
~~~~~~~~~~~~~~~
|
4 |
+
|
5 |
+
Module that implements the default filter.
|
6 |
+
|
7 |
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
|
8 |
+
:license: BSD, see LICENSE for details.
|
9 |
+
"""
|
10 |
+
|
11 |
+
|
12 |
+
def apply_filters(stream, filters, lexer=None):
|
13 |
+
"""
|
14 |
+
Use this method to apply an iterable of filters to
|
15 |
+
a stream. If lexer is given it's forwarded to the
|
16 |
+
filter, otherwise the filter receives `None`.
|
17 |
+
"""
|
18 |
+
def _apply(filter_, stream):
|
19 |
+
yield from filter_.filter(lexer, stream)
|
20 |
+
for filter_ in filters:
|
21 |
+
stream = _apply(filter_, stream)
|
22 |
+
return stream
|
23 |
+
|
24 |
+
|
25 |
+
def simplefilter(f):
|
26 |
+
"""
|
27 |
+
Decorator that converts a function into a filter::
|
28 |
+
|
29 |
+
@simplefilter
|
30 |
+
def lowercase(self, lexer, stream, options):
|
31 |
+
for ttype, value in stream:
|
32 |
+
yield ttype, value.lower()
|
33 |
+
"""
|
34 |
+
return type(f.__name__, (FunctionFilter,), {
|
35 |
+
'__module__': getattr(f, '__module__'),
|
36 |
+
'__doc__': f.__doc__,
|
37 |
+
'function': f,
|
38 |
+
})
|
39 |
+
|
40 |
+
|
41 |
+
class Filter:
|
42 |
+
"""
|
43 |
+
Default filter. Subclass this class or use the `simplefilter`
|
44 |
+
decorator to create own filters.
|
45 |
+
"""
|
46 |
+
|
47 |
+
def __init__(self, **options):
|
48 |
+
self.options = options
|
49 |
+
|
50 |
+
def filter(self, lexer, stream):
|
51 |
+
raise NotImplementedError()
|
52 |
+
|
53 |
+
|
54 |
+
class FunctionFilter(Filter):
|
55 |
+
"""
|
56 |
+
Abstract class used by `simplefilter` to create simple
|
57 |
+
function filters on the fly. The `simplefilter` decorator
|
58 |
+
automatically creates subclasses of this class for
|
59 |
+
functions passed to it.
|
60 |
+
"""
|
61 |
+
function = None
|
62 |
+
|
63 |
+
def __init__(self, **options):
|
64 |
+
if not hasattr(self, 'function'):
|
65 |
+
raise TypeError('%r used without bound function' %
|
66 |
+
self.__class__.__name__)
|
67 |
+
Filter.__init__(self, **options)
|
68 |
+
|
69 |
+
def filter(self, lexer, stream):
|
70 |
+
# pylint: disable=not-callable
|
71 |
+
yield from self.function(lexer, stream, self.options)
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/filters/__init__.py
ADDED
@@ -0,0 +1,937 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
pygments.filters
|
3 |
+
~~~~~~~~~~~~~~~~
|
4 |
+
|
5 |
+
Module containing filter lookup functions and default
|
6 |
+
filters.
|
7 |
+
|
8 |
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
|
9 |
+
:license: BSD, see LICENSE for details.
|
10 |
+
"""
|
11 |
+
|
12 |
+
import re
|
13 |
+
|
14 |
+
from pip._vendor.pygments.token import String, Comment, Keyword, Name, Error, Whitespace, \
|
15 |
+
string_to_tokentype
|
16 |
+
from pip._vendor.pygments.filter import Filter
|
17 |
+
from pip._vendor.pygments.util import get_list_opt, get_int_opt, get_bool_opt, \
|
18 |
+
get_choice_opt, ClassNotFound, OptionError
|
19 |
+
from pip._vendor.pygments.plugin import find_plugin_filters
|
20 |
+
|
21 |
+
|
22 |
+
def find_filter_class(filtername):
|
23 |
+
"""Lookup a filter by name. Return None if not found."""
|
24 |
+
if filtername in FILTERS:
|
25 |
+
return FILTERS[filtername]
|
26 |
+
for name, cls in find_plugin_filters():
|
27 |
+
if name == filtername:
|
28 |
+
return cls
|
29 |
+
return None
|
30 |
+
|
31 |
+
|
32 |
+
def get_filter_by_name(filtername, **options):
|
33 |
+
"""Return an instantiated filter.
|
34 |
+
|
35 |
+
Options are passed to the filter initializer if wanted.
|
36 |
+
Raise a ClassNotFound if not found.
|
37 |
+
"""
|
38 |
+
cls = find_filter_class(filtername)
|
39 |
+
if cls:
|
40 |
+
return cls(**options)
|
41 |
+
else:
|
42 |
+
raise ClassNotFound('filter %r not found' % filtername)
|
43 |
+
|
44 |
+
|
45 |
+
def get_all_filters():
|
46 |
+
"""Return a generator of all filter names."""
|
47 |
+
yield from FILTERS
|
48 |
+
for name, _ in find_plugin_filters():
|
49 |
+
yield name
|
50 |
+
|
51 |
+
|
52 |
+
def _replace_special(ttype, value, regex, specialttype,
|
53 |
+
replacefunc=lambda x: x):
|
54 |
+
last = 0
|
55 |
+
for match in regex.finditer(value):
|
56 |
+
start, end = match.start(), match.end()
|
57 |
+
if start != last:
|
58 |
+
yield ttype, value[last:start]
|
59 |
+
yield specialttype, replacefunc(value[start:end])
|
60 |
+
last = end
|
61 |
+
if last != len(value):
|
62 |
+
yield ttype, value[last:]
|
63 |
+
|
64 |
+
|
65 |
+
class CodeTagFilter(Filter):
|
66 |
+
"""Highlight special code tags in comments and docstrings.
|
67 |
+
|
68 |
+
Options accepted:
|
69 |
+
|
70 |
+
`codetags` : list of strings
|
71 |
+
A list of strings that are flagged as code tags. The default is to
|
72 |
+
highlight ``XXX``, ``TODO``, ``BUG`` and ``NOTE``.
|
73 |
+
"""
|
74 |
+
|
75 |
+
def __init__(self, **options):
|
76 |
+
Filter.__init__(self, **options)
|
77 |
+
tags = get_list_opt(options, 'codetags',
|
78 |
+
['XXX', 'TODO', 'BUG', 'NOTE'])
|
79 |
+
self.tag_re = re.compile(r'\b(%s)\b' % '|'.join([
|
80 |
+
re.escape(tag) for tag in tags if tag
|
81 |
+
]))
|
82 |
+
|
83 |
+
def filter(self, lexer, stream):
|
84 |
+
regex = self.tag_re
|
85 |
+
for ttype, value in stream:
|
86 |
+
if ttype in String.Doc or \
|
87 |
+
ttype in Comment and \
|
88 |
+
ttype not in Comment.Preproc:
|
89 |
+
yield from _replace_special(ttype, value, regex, Comment.Special)
|
90 |
+
else:
|
91 |
+
yield ttype, value
|
92 |
+
|
93 |
+
|
94 |
+
class SymbolFilter(Filter):
|
95 |
+
"""Convert mathematical symbols such as \\<longrightarrow> in Isabelle
|
96 |
+
or \\longrightarrow in LaTeX into Unicode characters.
|
97 |
+
|
98 |
+
This is mostly useful for HTML or console output when you want to
|
99 |
+
approximate the source rendering you'd see in an IDE.
|
100 |
+
|
101 |
+
Options accepted:
|
102 |
+
|
103 |
+
`lang` : string
|
104 |
+
The symbol language. Must be one of ``'isabelle'`` or
|
105 |
+
``'latex'``. The default is ``'isabelle'``.
|
106 |
+
"""
|
107 |
+
|
108 |
+
latex_symbols = {
|
109 |
+
'\\alpha' : '\U000003b1',
|
110 |
+
'\\beta' : '\U000003b2',
|
111 |
+
'\\gamma' : '\U000003b3',
|
112 |
+
'\\delta' : '\U000003b4',
|
113 |
+
'\\varepsilon' : '\U000003b5',
|
114 |
+
'\\zeta' : '\U000003b6',
|
115 |
+
'\\eta' : '\U000003b7',
|
116 |
+
'\\vartheta' : '\U000003b8',
|
117 |
+
'\\iota' : '\U000003b9',
|
118 |
+
'\\kappa' : '\U000003ba',
|
119 |
+
'\\lambda' : '\U000003bb',
|
120 |
+
'\\mu' : '\U000003bc',
|
121 |
+
'\\nu' : '\U000003bd',
|
122 |
+
'\\xi' : '\U000003be',
|
123 |
+
'\\pi' : '\U000003c0',
|
124 |
+
'\\varrho' : '\U000003c1',
|
125 |
+
'\\sigma' : '\U000003c3',
|
126 |
+
'\\tau' : '\U000003c4',
|
127 |
+
'\\upsilon' : '\U000003c5',
|
128 |
+
'\\varphi' : '\U000003c6',
|
129 |
+
'\\chi' : '\U000003c7',
|
130 |
+
'\\psi' : '\U000003c8',
|
131 |
+
'\\omega' : '\U000003c9',
|
132 |
+
'\\Gamma' : '\U00000393',
|
133 |
+
'\\Delta' : '\U00000394',
|
134 |
+
'\\Theta' : '\U00000398',
|
135 |
+
'\\Lambda' : '\U0000039b',
|
136 |
+
'\\Xi' : '\U0000039e',
|
137 |
+
'\\Pi' : '\U000003a0',
|
138 |
+
'\\Sigma' : '\U000003a3',
|
139 |
+
'\\Upsilon' : '\U000003a5',
|
140 |
+
'\\Phi' : '\U000003a6',
|
141 |
+
'\\Psi' : '\U000003a8',
|
142 |
+
'\\Omega' : '\U000003a9',
|
143 |
+
'\\leftarrow' : '\U00002190',
|
144 |
+
'\\longleftarrow' : '\U000027f5',
|
145 |
+
'\\rightarrow' : '\U00002192',
|
146 |
+
'\\longrightarrow' : '\U000027f6',
|
147 |
+
'\\Leftarrow' : '\U000021d0',
|
148 |
+
'\\Longleftarrow' : '\U000027f8',
|
149 |
+
'\\Rightarrow' : '\U000021d2',
|
150 |
+
'\\Longrightarrow' : '\U000027f9',
|
151 |
+
'\\leftrightarrow' : '\U00002194',
|
152 |
+
'\\longleftrightarrow' : '\U000027f7',
|
153 |
+
'\\Leftrightarrow' : '\U000021d4',
|
154 |
+
'\\Longleftrightarrow' : '\U000027fa',
|
155 |
+
'\\mapsto' : '\U000021a6',
|
156 |
+
'\\longmapsto' : '\U000027fc',
|
157 |
+
'\\relbar' : '\U00002500',
|
158 |
+
'\\Relbar' : '\U00002550',
|
159 |
+
'\\hookleftarrow' : '\U000021a9',
|
160 |
+
'\\hookrightarrow' : '\U000021aa',
|
161 |
+
'\\leftharpoondown' : '\U000021bd',
|
162 |
+
'\\rightharpoondown' : '\U000021c1',
|
163 |
+
'\\leftharpoonup' : '\U000021bc',
|
164 |
+
'\\rightharpoonup' : '\U000021c0',
|
165 |
+
'\\rightleftharpoons' : '\U000021cc',
|
166 |
+
'\\leadsto' : '\U0000219d',
|
167 |
+
'\\downharpoonleft' : '\U000021c3',
|
168 |
+
'\\downharpoonright' : '\U000021c2',
|
169 |
+
'\\upharpoonleft' : '\U000021bf',
|
170 |
+
'\\upharpoonright' : '\U000021be',
|
171 |
+
'\\restriction' : '\U000021be',
|
172 |
+
'\\uparrow' : '\U00002191',
|
173 |
+
'\\Uparrow' : '\U000021d1',
|
174 |
+
'\\downarrow' : '\U00002193',
|
175 |
+
'\\Downarrow' : '\U000021d3',
|
176 |
+
'\\updownarrow' : '\U00002195',
|
177 |
+
'\\Updownarrow' : '\U000021d5',
|
178 |
+
'\\langle' : '\U000027e8',
|
179 |
+
'\\rangle' : '\U000027e9',
|
180 |
+
'\\lceil' : '\U00002308',
|
181 |
+
'\\rceil' : '\U00002309',
|
182 |
+
'\\lfloor' : '\U0000230a',
|
183 |
+
'\\rfloor' : '\U0000230b',
|
184 |
+
'\\flqq' : '\U000000ab',
|
185 |
+
'\\frqq' : '\U000000bb',
|
186 |
+
'\\bot' : '\U000022a5',
|
187 |
+
'\\top' : '\U000022a4',
|
188 |
+
'\\wedge' : '\U00002227',
|
189 |
+
'\\bigwedge' : '\U000022c0',
|
190 |
+
'\\vee' : '\U00002228',
|
191 |
+
'\\bigvee' : '\U000022c1',
|
192 |
+
'\\forall' : '\U00002200',
|
193 |
+
'\\exists' : '\U00002203',
|
194 |
+
'\\nexists' : '\U00002204',
|
195 |
+
'\\neg' : '\U000000ac',
|
196 |
+
'\\Box' : '\U000025a1',
|
197 |
+
'\\Diamond' : '\U000025c7',
|
198 |
+
'\\vdash' : '\U000022a2',
|
199 |
+
'\\models' : '\U000022a8',
|
200 |
+
'\\dashv' : '\U000022a3',
|
201 |
+
'\\surd' : '\U0000221a',
|
202 |
+
'\\le' : '\U00002264',
|
203 |
+
'\\ge' : '\U00002265',
|
204 |
+
'\\ll' : '\U0000226a',
|
205 |
+
'\\gg' : '\U0000226b',
|
206 |
+
'\\lesssim' : '\U00002272',
|
207 |
+
'\\gtrsim' : '\U00002273',
|
208 |
+
'\\lessapprox' : '\U00002a85',
|
209 |
+
'\\gtrapprox' : '\U00002a86',
|
210 |
+
'\\in' : '\U00002208',
|
211 |
+
'\\notin' : '\U00002209',
|
212 |
+
'\\subset' : '\U00002282',
|
213 |
+
'\\supset' : '\U00002283',
|
214 |
+
'\\subseteq' : '\U00002286',
|
215 |
+
'\\supseteq' : '\U00002287',
|
216 |
+
'\\sqsubset' : '\U0000228f',
|
217 |
+
'\\sqsupset' : '\U00002290',
|
218 |
+
'\\sqsubseteq' : '\U00002291',
|
219 |
+
'\\sqsupseteq' : '\U00002292',
|
220 |
+
'\\cap' : '\U00002229',
|
221 |
+
'\\bigcap' : '\U000022c2',
|
222 |
+
'\\cup' : '\U0000222a',
|
223 |
+
'\\bigcup' : '\U000022c3',
|
224 |
+
'\\sqcup' : '\U00002294',
|
225 |
+
'\\bigsqcup' : '\U00002a06',
|
226 |
+
'\\sqcap' : '\U00002293',
|
227 |
+
'\\Bigsqcap' : '\U00002a05',
|
228 |
+
'\\setminus' : '\U00002216',
|
229 |
+
'\\propto' : '\U0000221d',
|
230 |
+
'\\uplus' : '\U0000228e',
|
231 |
+
'\\bigplus' : '\U00002a04',
|
232 |
+
'\\sim' : '\U0000223c',
|
233 |
+
'\\doteq' : '\U00002250',
|
234 |
+
'\\simeq' : '\U00002243',
|
235 |
+
'\\approx' : '\U00002248',
|
236 |
+
'\\asymp' : '\U0000224d',
|
237 |
+
'\\cong' : '\U00002245',
|
238 |
+
'\\equiv' : '\U00002261',
|
239 |
+
'\\Join' : '\U000022c8',
|
240 |
+
'\\bowtie' : '\U00002a1d',
|
241 |
+
'\\prec' : '\U0000227a',
|
242 |
+
'\\succ' : '\U0000227b',
|
243 |
+
'\\preceq' : '\U0000227c',
|
244 |
+
'\\succeq' : '\U0000227d',
|
245 |
+
'\\parallel' : '\U00002225',
|
246 |
+
'\\mid' : '\U000000a6',
|
247 |
+
'\\pm' : '\U000000b1',
|
248 |
+
'\\mp' : '\U00002213',
|
249 |
+
'\\times' : '\U000000d7',
|
250 |
+
'\\div' : '\U000000f7',
|
251 |
+
'\\cdot' : '\U000022c5',
|
252 |
+
'\\star' : '\U000022c6',
|
253 |
+
'\\circ' : '\U00002218',
|
254 |
+
'\\dagger' : '\U00002020',
|
255 |
+
'\\ddagger' : '\U00002021',
|
256 |
+
'\\lhd' : '\U000022b2',
|
257 |
+
'\\rhd' : '\U000022b3',
|
258 |
+
'\\unlhd' : '\U000022b4',
|
259 |
+
'\\unrhd' : '\U000022b5',
|
260 |
+
'\\triangleleft' : '\U000025c3',
|
261 |
+
'\\triangleright' : '\U000025b9',
|
262 |
+
'\\triangle' : '\U000025b3',
|
263 |
+
'\\triangleq' : '\U0000225c',
|
264 |
+
'\\oplus' : '\U00002295',
|
265 |
+
'\\bigoplus' : '\U00002a01',
|
266 |
+
'\\otimes' : '\U00002297',
|
267 |
+
'\\bigotimes' : '\U00002a02',
|
268 |
+
'\\odot' : '\U00002299',
|
269 |
+
'\\bigodot' : '\U00002a00',
|
270 |
+
'\\ominus' : '\U00002296',
|
271 |
+
'\\oslash' : '\U00002298',
|
272 |
+
'\\dots' : '\U00002026',
|
273 |
+
'\\cdots' : '\U000022ef',
|
274 |
+
'\\sum' : '\U00002211',
|
275 |
+
'\\prod' : '\U0000220f',
|
276 |
+
'\\coprod' : '\U00002210',
|
277 |
+
'\\infty' : '\U0000221e',
|
278 |
+
'\\int' : '\U0000222b',
|
279 |
+
'\\oint' : '\U0000222e',
|
280 |
+
'\\clubsuit' : '\U00002663',
|
281 |
+
'\\diamondsuit' : '\U00002662',
|
282 |
+
'\\heartsuit' : '\U00002661',
|
283 |
+
'\\spadesuit' : '\U00002660',
|
284 |
+
'\\aleph' : '\U00002135',
|
285 |
+
'\\emptyset' : '\U00002205',
|
286 |
+
'\\nabla' : '\U00002207',
|
287 |
+
'\\partial' : '\U00002202',
|
288 |
+
'\\flat' : '\U0000266d',
|
289 |
+
'\\natural' : '\U0000266e',
|
290 |
+
'\\sharp' : '\U0000266f',
|
291 |
+
'\\angle' : '\U00002220',
|
292 |
+
'\\copyright' : '\U000000a9',
|
293 |
+
'\\textregistered' : '\U000000ae',
|
294 |
+
'\\textonequarter' : '\U000000bc',
|
295 |
+
'\\textonehalf' : '\U000000bd',
|
296 |
+
'\\textthreequarters' : '\U000000be',
|
297 |
+
'\\textordfeminine' : '\U000000aa',
|
298 |
+
'\\textordmasculine' : '\U000000ba',
|
299 |
+
'\\euro' : '\U000020ac',
|
300 |
+
'\\pounds' : '\U000000a3',
|
301 |
+
'\\yen' : '\U000000a5',
|
302 |
+
'\\textcent' : '\U000000a2',
|
303 |
+
'\\textcurrency' : '\U000000a4',
|
304 |
+
'\\textdegree' : '\U000000b0',
|
305 |
+
}
|
306 |
+
|
307 |
+
isabelle_symbols = {
|
308 |
+
'\\<zero>' : '\U0001d7ec',
|
309 |
+
'\\<one>' : '\U0001d7ed',
|
310 |
+
'\\<two>' : '\U0001d7ee',
|
311 |
+
'\\<three>' : '\U0001d7ef',
|
312 |
+
'\\<four>' : '\U0001d7f0',
|
313 |
+
'\\<five>' : '\U0001d7f1',
|
314 |
+
'\\<six>' : '\U0001d7f2',
|
315 |
+
'\\<seven>' : '\U0001d7f3',
|
316 |
+
'\\<eight>' : '\U0001d7f4',
|
317 |
+
'\\<nine>' : '\U0001d7f5',
|
318 |
+
'\\<A>' : '\U0001d49c',
|
319 |
+
'\\<B>' : '\U0000212c',
|
320 |
+
'\\<C>' : '\U0001d49e',
|
321 |
+
'\\<D>' : '\U0001d49f',
|
322 |
+
'\\<E>' : '\U00002130',
|
323 |
+
'\\<F>' : '\U00002131',
|
324 |
+
'\\<G>' : '\U0001d4a2',
|
325 |
+
'\\<H>' : '\U0000210b',
|
326 |
+
'\\<I>' : '\U00002110',
|
327 |
+
'\\<J>' : '\U0001d4a5',
|
328 |
+
'\\<K>' : '\U0001d4a6',
|
329 |
+
'\\<L>' : '\U00002112',
|
330 |
+
'\\<M>' : '\U00002133',
|
331 |
+
'\\<N>' : '\U0001d4a9',
|
332 |
+
'\\<O>' : '\U0001d4aa',
|
333 |
+
'\\<P>' : '\U0001d4ab',
|
334 |
+
'\\<Q>' : '\U0001d4ac',
|
335 |
+
'\\<R>' : '\U0000211b',
|
336 |
+
'\\<S>' : '\U0001d4ae',
|
337 |
+
'\\<T>' : '\U0001d4af',
|
338 |
+
'\\<U>' : '\U0001d4b0',
|
339 |
+
'\\<V>' : '\U0001d4b1',
|
340 |
+
'\\<W>' : '\U0001d4b2',
|
341 |
+
'\\<X>' : '\U0001d4b3',
|
342 |
+
'\\<Y>' : '\U0001d4b4',
|
343 |
+
'\\<Z>' : '\U0001d4b5',
|
344 |
+
'\\<a>' : '\U0001d5ba',
|
345 |
+
'\\<b>' : '\U0001d5bb',
|
346 |
+
'\\<c>' : '\U0001d5bc',
|
347 |
+
'\\<d>' : '\U0001d5bd',
|
348 |
+
'\\<e>' : '\U0001d5be',
|
349 |
+
'\\<f>' : '\U0001d5bf',
|
350 |
+
'\\<g>' : '\U0001d5c0',
|
351 |
+
'\\<h>' : '\U0001d5c1',
|
352 |
+
'\\<i>' : '\U0001d5c2',
|
353 |
+
'\\<j>' : '\U0001d5c3',
|
354 |
+
'\\<k>' : '\U0001d5c4',
|
355 |
+
'\\<l>' : '\U0001d5c5',
|
356 |
+
'\\<m>' : '\U0001d5c6',
|
357 |
+
'\\<n>' : '\U0001d5c7',
|
358 |
+
'\\<o>' : '\U0001d5c8',
|
359 |
+
'\\<p>' : '\U0001d5c9',
|
360 |
+
'\\<q>' : '\U0001d5ca',
|
361 |
+
'\\<r>' : '\U0001d5cb',
|
362 |
+
'\\<s>' : '\U0001d5cc',
|
363 |
+
'\\<t>' : '\U0001d5cd',
|
364 |
+
'\\<u>' : '\U0001d5ce',
|
365 |
+
'\\<v>' : '\U0001d5cf',
|
366 |
+
'\\<w>' : '\U0001d5d0',
|
367 |
+
'\\<x>' : '\U0001d5d1',
|
368 |
+
'\\<y>' : '\U0001d5d2',
|
369 |
+
'\\<z>' : '\U0001d5d3',
|
370 |
+
'\\<AA>' : '\U0001d504',
|
371 |
+
'\\<BB>' : '\U0001d505',
|
372 |
+
'\\<CC>' : '\U0000212d',
|
373 |
+
'\\<DD>' : '\U0001d507',
|
374 |
+
'\\<EE>' : '\U0001d508',
|
375 |
+
'\\<FF>' : '\U0001d509',
|
376 |
+
'\\<GG>' : '\U0001d50a',
|
377 |
+
'\\<HH>' : '\U0000210c',
|
378 |
+
'\\<II>' : '\U00002111',
|
379 |
+
'\\<JJ>' : '\U0001d50d',
|
380 |
+
'\\<KK>' : '\U0001d50e',
|
381 |
+
'\\<LL>' : '\U0001d50f',
|
382 |
+
'\\<MM>' : '\U0001d510',
|
383 |
+
'\\<NN>' : '\U0001d511',
|
384 |
+
'\\<OO>' : '\U0001d512',
|
385 |
+
'\\<PP>' : '\U0001d513',
|
386 |
+
'\\<QQ>' : '\U0001d514',
|
387 |
+
'\\<RR>' : '\U0000211c',
|
388 |
+
'\\<SS>' : '\U0001d516',
|
389 |
+
'\\<TT>' : '\U0001d517',
|
390 |
+
'\\<UU>' : '\U0001d518',
|
391 |
+
'\\<VV>' : '\U0001d519',
|
392 |
+
'\\<WW>' : '\U0001d51a',
|
393 |
+
'\\<XX>' : '\U0001d51b',
|
394 |
+
'\\<YY>' : '\U0001d51c',
|
395 |
+
'\\<ZZ>' : '\U00002128',
|
396 |
+
'\\<aa>' : '\U0001d51e',
|
397 |
+
'\\<bb>' : '\U0001d51f',
|
398 |
+
'\\<cc>' : '\U0001d520',
|
399 |
+
'\\<dd>' : '\U0001d521',
|
400 |
+
'\\<ee>' : '\U0001d522',
|
401 |
+
'\\<ff>' : '\U0001d523',
|
402 |
+
'\\<gg>' : '\U0001d524',
|
403 |
+
'\\<hh>' : '\U0001d525',
|
404 |
+
'\\<ii>' : '\U0001d526',
|
405 |
+
'\\<jj>' : '\U0001d527',
|
406 |
+
'\\<kk>' : '\U0001d528',
|
407 |
+
'\\<ll>' : '\U0001d529',
|
408 |
+
'\\<mm>' : '\U0001d52a',
|
409 |
+
'\\<nn>' : '\U0001d52b',
|
410 |
+
'\\<oo>' : '\U0001d52c',
|
411 |
+
'\\<pp>' : '\U0001d52d',
|
412 |
+
'\\<qq>' : '\U0001d52e',
|
413 |
+
'\\<rr>' : '\U0001d52f',
|
414 |
+
'\\<ss>' : '\U0001d530',
|
415 |
+
'\\<tt>' : '\U0001d531',
|
416 |
+
'\\<uu>' : '\U0001d532',
|
417 |
+
'\\<vv>' : '\U0001d533',
|
418 |
+
'\\<ww>' : '\U0001d534',
|
419 |
+
'\\<xx>' : '\U0001d535',
|
420 |
+
'\\<yy>' : '\U0001d536',
|
421 |
+
'\\<zz>' : '\U0001d537',
|
422 |
+
'\\<alpha>' : '\U000003b1',
|
423 |
+
'\\<beta>' : '\U000003b2',
|
424 |
+
'\\<gamma>' : '\U000003b3',
|
425 |
+
'\\<delta>' : '\U000003b4',
|
426 |
+
'\\<epsilon>' : '\U000003b5',
|
427 |
+
'\\<zeta>' : '\U000003b6',
|
428 |
+
'\\<eta>' : '\U000003b7',
|
429 |
+
'\\<theta>' : '\U000003b8',
|
430 |
+
'\\<iota>' : '\U000003b9',
|
431 |
+
'\\<kappa>' : '\U000003ba',
|
432 |
+
'\\<lambda>' : '\U000003bb',
|
433 |
+
'\\<mu>' : '\U000003bc',
|
434 |
+
'\\<nu>' : '\U000003bd',
|
435 |
+
'\\<xi>' : '\U000003be',
|
436 |
+
'\\<pi>' : '\U000003c0',
|
437 |
+
'\\<rho>' : '\U000003c1',
|
438 |
+
'\\<sigma>' : '\U000003c3',
|
439 |
+
'\\<tau>' : '\U000003c4',
|
440 |
+
'\\<upsilon>' : '\U000003c5',
|
441 |
+
'\\<phi>' : '\U000003c6',
|
442 |
+
'\\<chi>' : '\U000003c7',
|
443 |
+
'\\<psi>' : '\U000003c8',
|
444 |
+
'\\<omega>' : '\U000003c9',
|
445 |
+
'\\<Gamma>' : '\U00000393',
|
446 |
+
'\\<Delta>' : '\U00000394',
|
447 |
+
'\\<Theta>' : '\U00000398',
|
448 |
+
'\\<Lambda>' : '\U0000039b',
|
449 |
+
'\\<Xi>' : '\U0000039e',
|
450 |
+
'\\<Pi>' : '\U000003a0',
|
451 |
+
'\\<Sigma>' : '\U000003a3',
|
452 |
+
'\\<Upsilon>' : '\U000003a5',
|
453 |
+
'\\<Phi>' : '\U000003a6',
|
454 |
+
'\\<Psi>' : '\U000003a8',
|
455 |
+
'\\<Omega>' : '\U000003a9',
|
456 |
+
'\\<bool>' : '\U0001d539',
|
457 |
+
'\\<complex>' : '\U00002102',
|
458 |
+
'\\<nat>' : '\U00002115',
|
459 |
+
'\\<rat>' : '\U0000211a',
|
460 |
+
'\\<real>' : '\U0000211d',
|
461 |
+
'\\<int>' : '\U00002124',
|
462 |
+
'\\<leftarrow>' : '\U00002190',
|
463 |
+
'\\<longleftarrow>' : '\U000027f5',
|
464 |
+
'\\<rightarrow>' : '\U00002192',
|
465 |
+
'\\<longrightarrow>' : '\U000027f6',
|
466 |
+
'\\<Leftarrow>' : '\U000021d0',
|
467 |
+
'\\<Longleftarrow>' : '\U000027f8',
|
468 |
+
'\\<Rightarrow>' : '\U000021d2',
|
469 |
+
'\\<Longrightarrow>' : '\U000027f9',
|
470 |
+
'\\<leftrightarrow>' : '\U00002194',
|
471 |
+
'\\<longleftrightarrow>' : '\U000027f7',
|
472 |
+
'\\<Leftrightarrow>' : '\U000021d4',
|
473 |
+
'\\<Longleftrightarrow>' : '\U000027fa',
|
474 |
+
'\\<mapsto>' : '\U000021a6',
|
475 |
+
'\\<longmapsto>' : '\U000027fc',
|
476 |
+
'\\<midarrow>' : '\U00002500',
|
477 |
+
'\\<Midarrow>' : '\U00002550',
|
478 |
+
'\\<hookleftarrow>' : '\U000021a9',
|
479 |
+
'\\<hookrightarrow>' : '\U000021aa',
|
480 |
+
'\\<leftharpoondown>' : '\U000021bd',
|
481 |
+
'\\<rightharpoondown>' : '\U000021c1',
|
482 |
+
'\\<leftharpoonup>' : '\U000021bc',
|
483 |
+
'\\<rightharpoonup>' : '\U000021c0',
|
484 |
+
'\\<rightleftharpoons>' : '\U000021cc',
|
485 |
+
'\\<leadsto>' : '\U0000219d',
|
486 |
+
'\\<downharpoonleft>' : '\U000021c3',
|
487 |
+
'\\<downharpoonright>' : '\U000021c2',
|
488 |
+
'\\<upharpoonleft>' : '\U000021bf',
|
489 |
+
'\\<upharpoonright>' : '\U000021be',
|
490 |
+
'\\<restriction>' : '\U000021be',
|
491 |
+
'\\<Colon>' : '\U00002237',
|
492 |
+
'\\<up>' : '\U00002191',
|
493 |
+
'\\<Up>' : '\U000021d1',
|
494 |
+
'\\<down>' : '\U00002193',
|
495 |
+
'\\<Down>' : '\U000021d3',
|
496 |
+
'\\<updown>' : '\U00002195',
|
497 |
+
'\\<Updown>' : '\U000021d5',
|
498 |
+
'\\<langle>' : '\U000027e8',
|
499 |
+
'\\<rangle>' : '\U000027e9',
|
500 |
+
'\\<lceil>' : '\U00002308',
|
501 |
+
'\\<rceil>' : '\U00002309',
|
502 |
+
'\\<lfloor>' : '\U0000230a',
|
503 |
+
'\\<rfloor>' : '\U0000230b',
|
504 |
+
'\\<lparr>' : '\U00002987',
|
505 |
+
'\\<rparr>' : '\U00002988',
|
506 |
+
'\\<lbrakk>' : '\U000027e6',
|
507 |
+
'\\<rbrakk>' : '\U000027e7',
|
508 |
+
'\\<lbrace>' : '\U00002983',
|
509 |
+
'\\<rbrace>' : '\U00002984',
|
510 |
+
'\\<guillemotleft>' : '\U000000ab',
|
511 |
+
'\\<guillemotright>' : '\U000000bb',
|
512 |
+
'\\<bottom>' : '\U000022a5',
|
513 |
+
'\\<top>' : '\U000022a4',
|
514 |
+
'\\<and>' : '\U00002227',
|
515 |
+
'\\<And>' : '\U000022c0',
|
516 |
+
'\\<or>' : '\U00002228',
|
517 |
+
'\\<Or>' : '\U000022c1',
|
518 |
+
'\\<forall>' : '\U00002200',
|
519 |
+
'\\<exists>' : '\U00002203',
|
520 |
+
'\\<nexists>' : '\U00002204',
|
521 |
+
'\\<not>' : '\U000000ac',
|
522 |
+
'\\<box>' : '\U000025a1',
|
523 |
+
'\\<diamond>' : '\U000025c7',
|
524 |
+
'\\<turnstile>' : '\U000022a2',
|
525 |
+
'\\<Turnstile>' : '\U000022a8',
|
526 |
+
'\\<tturnstile>' : '\U000022a9',
|
527 |
+
'\\<TTurnstile>' : '\U000022ab',
|
528 |
+
'\\<stileturn>' : '\U000022a3',
|
529 |
+
'\\<surd>' : '\U0000221a',
|
530 |
+
'\\<le>' : '\U00002264',
|
531 |
+
'\\<ge>' : '\U00002265',
|
532 |
+
'\\<lless>' : '\U0000226a',
|
533 |
+
'\\<ggreater>' : '\U0000226b',
|
534 |
+
'\\<lesssim>' : '\U00002272',
|
535 |
+
'\\<greatersim>' : '\U00002273',
|
536 |
+
'\\<lessapprox>' : '\U00002a85',
|
537 |
+
'\\<greaterapprox>' : '\U00002a86',
|
538 |
+
'\\<in>' : '\U00002208',
|
539 |
+
'\\<notin>' : '\U00002209',
|
540 |
+
'\\<subset>' : '\U00002282',
|
541 |
+
'\\<supset>' : '\U00002283',
|
542 |
+
'\\<subseteq>' : '\U00002286',
|
543 |
+
'\\<supseteq>' : '\U00002287',
|
544 |
+
'\\<sqsubset>' : '\U0000228f',
|
545 |
+
'\\<sqsupset>' : '\U00002290',
|
546 |
+
'\\<sqsubseteq>' : '\U00002291',
|
547 |
+
'\\<sqsupseteq>' : '\U00002292',
|
548 |
+
'\\<inter>' : '\U00002229',
|
549 |
+
'\\<Inter>' : '\U000022c2',
|
550 |
+
'\\<union>' : '\U0000222a',
|
551 |
+
'\\<Union>' : '\U000022c3',
|
552 |
+
'\\<squnion>' : '\U00002294',
|
553 |
+
'\\<Squnion>' : '\U00002a06',
|
554 |
+
'\\<sqinter>' : '\U00002293',
|
555 |
+
'\\<Sqinter>' : '\U00002a05',
|
556 |
+
'\\<setminus>' : '\U00002216',
|
557 |
+
'\\<propto>' : '\U0000221d',
|
558 |
+
'\\<uplus>' : '\U0000228e',
|
559 |
+
'\\<Uplus>' : '\U00002a04',
|
560 |
+
'\\<noteq>' : '\U00002260',
|
561 |
+
'\\<sim>' : '\U0000223c',
|
562 |
+
'\\<doteq>' : '\U00002250',
|
563 |
+
'\\<simeq>' : '\U00002243',
|
564 |
+
'\\<approx>' : '\U00002248',
|
565 |
+
'\\<asymp>' : '\U0000224d',
|
566 |
+
'\\<cong>' : '\U00002245',
|
567 |
+
'\\<smile>' : '\U00002323',
|
568 |
+
'\\<equiv>' : '\U00002261',
|
569 |
+
'\\<frown>' : '\U00002322',
|
570 |
+
'\\<Join>' : '\U000022c8',
|
571 |
+
'\\<bowtie>' : '\U00002a1d',
|
572 |
+
'\\<prec>' : '\U0000227a',
|
573 |
+
'\\<succ>' : '\U0000227b',
|
574 |
+
'\\<preceq>' : '\U0000227c',
|
575 |
+
'\\<succeq>' : '\U0000227d',
|
576 |
+
'\\<parallel>' : '\U00002225',
|
577 |
+
'\\<bar>' : '\U000000a6',
|
578 |
+
'\\<plusminus>' : '\U000000b1',
|
579 |
+
'\\<minusplus>' : '\U00002213',
|
580 |
+
'\\<times>' : '\U000000d7',
|
581 |
+
'\\<div>' : '\U000000f7',
|
582 |
+
'\\<cdot>' : '\U000022c5',
|
583 |
+
'\\<star>' : '\U000022c6',
|
584 |
+
'\\<bullet>' : '\U00002219',
|
585 |
+
'\\<circ>' : '\U00002218',
|
586 |
+
'\\<dagger>' : '\U00002020',
|
587 |
+
'\\<ddagger>' : '\U00002021',
|
588 |
+
'\\<lhd>' : '\U000022b2',
|
589 |
+
'\\<rhd>' : '\U000022b3',
|
590 |
+
'\\<unlhd>' : '\U000022b4',
|
591 |
+
'\\<unrhd>' : '\U000022b5',
|
592 |
+
'\\<triangleleft>' : '\U000025c3',
|
593 |
+
'\\<triangleright>' : '\U000025b9',
|
594 |
+
'\\<triangle>' : '\U000025b3',
|
595 |
+
'\\<triangleq>' : '\U0000225c',
|
596 |
+
'\\<oplus>' : '\U00002295',
|
597 |
+
'\\<Oplus>' : '\U00002a01',
|
598 |
+
'\\<otimes>' : '\U00002297',
|
599 |
+
'\\<Otimes>' : '\U00002a02',
|
600 |
+
'\\<odot>' : '\U00002299',
|
601 |
+
'\\<Odot>' : '\U00002a00',
|
602 |
+
'\\<ominus>' : '\U00002296',
|
603 |
+
'\\<oslash>' : '\U00002298',
|
604 |
+
'\\<dots>' : '\U00002026',
|
605 |
+
'\\<cdots>' : '\U000022ef',
|
606 |
+
'\\<Sum>' : '\U00002211',
|
607 |
+
'\\<Prod>' : '\U0000220f',
|
608 |
+
'\\<Coprod>' : '\U00002210',
|
609 |
+
'\\<infinity>' : '\U0000221e',
|
610 |
+
'\\<integral>' : '\U0000222b',
|
611 |
+
'\\<ointegral>' : '\U0000222e',
|
612 |
+
'\\<clubsuit>' : '\U00002663',
|
613 |
+
'\\<diamondsuit>' : '\U00002662',
|
614 |
+
'\\<heartsuit>' : '\U00002661',
|
615 |
+
'\\<spadesuit>' : '\U00002660',
|
616 |
+
'\\<aleph>' : '\U00002135',
|
617 |
+
'\\<emptyset>' : '\U00002205',
|
618 |
+
'\\<nabla>' : '\U00002207',
|
619 |
+
'\\<partial>' : '\U00002202',
|
620 |
+
'\\<flat>' : '\U0000266d',
|
621 |
+
'\\<natural>' : '\U0000266e',
|
622 |
+
'\\<sharp>' : '\U0000266f',
|
623 |
+
'\\<angle>' : '\U00002220',
|
624 |
+
'\\<copyright>' : '\U000000a9',
|
625 |
+
'\\<registered>' : '\U000000ae',
|
626 |
+
'\\<hyphen>' : '\U000000ad',
|
627 |
+
'\\<inverse>' : '\U000000af',
|
628 |
+
'\\<onequarter>' : '\U000000bc',
|
629 |
+
'\\<onehalf>' : '\U000000bd',
|
630 |
+
'\\<threequarters>' : '\U000000be',
|
631 |
+
'\\<ordfeminine>' : '\U000000aa',
|
632 |
+
'\\<ordmasculine>' : '\U000000ba',
|
633 |
+
'\\<section>' : '\U000000a7',
|
634 |
+
'\\<paragraph>' : '\U000000b6',
|
635 |
+
'\\<exclamdown>' : '\U000000a1',
|
636 |
+
'\\<questiondown>' : '\U000000bf',
|
637 |
+
'\\<euro>' : '\U000020ac',
|
638 |
+
'\\<pounds>' : '\U000000a3',
|
639 |
+
'\\<yen>' : '\U000000a5',
|
640 |
+
'\\<cent>' : '\U000000a2',
|
641 |
+
'\\<currency>' : '\U000000a4',
|
642 |
+
'\\<degree>' : '\U000000b0',
|
643 |
+
'\\<amalg>' : '\U00002a3f',
|
644 |
+
'\\<mho>' : '\U00002127',
|
645 |
+
'\\<lozenge>' : '\U000025ca',
|
646 |
+
'\\<wp>' : '\U00002118',
|
647 |
+
'\\<wrong>' : '\U00002240',
|
648 |
+
'\\<struct>' : '\U000022c4',
|
649 |
+
'\\<acute>' : '\U000000b4',
|
650 |
+
'\\<index>' : '\U00000131',
|
651 |
+
'\\<dieresis>' : '\U000000a8',
|
652 |
+
'\\<cedilla>' : '\U000000b8',
|
653 |
+
'\\<hungarumlaut>' : '\U000002dd',
|
654 |
+
'\\<some>' : '\U000003f5',
|
655 |
+
'\\<newline>' : '\U000023ce',
|
656 |
+
'\\<open>' : '\U00002039',
|
657 |
+
'\\<close>' : '\U0000203a',
|
658 |
+
'\\<here>' : '\U00002302',
|
659 |
+
'\\<^sub>' : '\U000021e9',
|
660 |
+
'\\<^sup>' : '\U000021e7',
|
661 |
+
'\\<^bold>' : '\U00002759',
|
662 |
+
'\\<^bsub>' : '\U000021d8',
|
663 |
+
'\\<^esub>' : '\U000021d9',
|
664 |
+
'\\<^bsup>' : '\U000021d7',
|
665 |
+
'\\<^esup>' : '\U000021d6',
|
666 |
+
}
|
667 |
+
|
668 |
+
lang_map = {'isabelle' : isabelle_symbols, 'latex' : latex_symbols}
|
669 |
+
|
670 |
+
def __init__(self, **options):
|
671 |
+
Filter.__init__(self, **options)
|
672 |
+
lang = get_choice_opt(options, 'lang',
|
673 |
+
['isabelle', 'latex'], 'isabelle')
|
674 |
+
self.symbols = self.lang_map[lang]
|
675 |
+
|
676 |
+
def filter(self, lexer, stream):
|
677 |
+
for ttype, value in stream:
|
678 |
+
if value in self.symbols:
|
679 |
+
yield ttype, self.symbols[value]
|
680 |
+
else:
|
681 |
+
yield ttype, value
|
682 |
+
|
683 |
+
|
684 |
+
class KeywordCaseFilter(Filter):
|
685 |
+
"""Convert keywords to lowercase or uppercase or capitalize them, which
|
686 |
+
means first letter uppercase, rest lowercase.
|
687 |
+
|
688 |
+
This can be useful e.g. if you highlight Pascal code and want to adapt the
|
689 |
+
code to your styleguide.
|
690 |
+
|
691 |
+
Options accepted:
|
692 |
+
|
693 |
+
`case` : string
|
694 |
+
The casing to convert keywords to. Must be one of ``'lower'``,
|
695 |
+
``'upper'`` or ``'capitalize'``. The default is ``'lower'``.
|
696 |
+
"""
|
697 |
+
|
698 |
+
def __init__(self, **options):
|
699 |
+
Filter.__init__(self, **options)
|
700 |
+
case = get_choice_opt(options, 'case',
|
701 |
+
['lower', 'upper', 'capitalize'], 'lower')
|
702 |
+
self.convert = getattr(str, case)
|
703 |
+
|
704 |
+
def filter(self, lexer, stream):
|
705 |
+
for ttype, value in stream:
|
706 |
+
if ttype in Keyword:
|
707 |
+
yield ttype, self.convert(value)
|
708 |
+
else:
|
709 |
+
yield ttype, value
|
710 |
+
|
711 |
+
|
712 |
+
class NameHighlightFilter(Filter):
|
713 |
+
"""Highlight a normal Name (and Name.*) token with a different token type.
|
714 |
+
|
715 |
+
Example::
|
716 |
+
|
717 |
+
filter = NameHighlightFilter(
|
718 |
+
names=['foo', 'bar', 'baz'],
|
719 |
+
tokentype=Name.Function,
|
720 |
+
)
|
721 |
+
|
722 |
+
This would highlight the names "foo", "bar" and "baz"
|
723 |
+
as functions. `Name.Function` is the default token type.
|
724 |
+
|
725 |
+
Options accepted:
|
726 |
+
|
727 |
+
`names` : list of strings
|
728 |
+
A list of names that should be given the different token type.
|
729 |
+
There is no default.
|
730 |
+
`tokentype` : TokenType or string
|
731 |
+
A token type or a string containing a token type name that is
|
732 |
+
used for highlighting the strings in `names`. The default is
|
733 |
+
`Name.Function`.
|
734 |
+
"""
|
735 |
+
|
736 |
+
def __init__(self, **options):
|
737 |
+
Filter.__init__(self, **options)
|
738 |
+
self.names = set(get_list_opt(options, 'names', []))
|
739 |
+
tokentype = options.get('tokentype')
|
740 |
+
if tokentype:
|
741 |
+
self.tokentype = string_to_tokentype(tokentype)
|
742 |
+
else:
|
743 |
+
self.tokentype = Name.Function
|
744 |
+
|
745 |
+
def filter(self, lexer, stream):
|
746 |
+
for ttype, value in stream:
|
747 |
+
if ttype in Name and value in self.names:
|
748 |
+
yield self.tokentype, value
|
749 |
+
else:
|
750 |
+
yield ttype, value
|
751 |
+
|
752 |
+
|
753 |
+
class ErrorToken(Exception):
|
754 |
+
pass
|
755 |
+
|
756 |
+
|
757 |
+
class RaiseOnErrorTokenFilter(Filter):
|
758 |
+
"""Raise an exception when the lexer generates an error token.
|
759 |
+
|
760 |
+
Options accepted:
|
761 |
+
|
762 |
+
`excclass` : Exception class
|
763 |
+
The exception class to raise.
|
764 |
+
The default is `pygments.filters.ErrorToken`.
|
765 |
+
|
766 |
+
.. versionadded:: 0.8
|
767 |
+
"""
|
768 |
+
|
769 |
+
def __init__(self, **options):
|
770 |
+
Filter.__init__(self, **options)
|
771 |
+
self.exception = options.get('excclass', ErrorToken)
|
772 |
+
try:
|
773 |
+
# issubclass() will raise TypeError if first argument is not a class
|
774 |
+
if not issubclass(self.exception, Exception):
|
775 |
+
raise TypeError
|
776 |
+
except TypeError:
|
777 |
+
raise OptionError('excclass option is not an exception class')
|
778 |
+
|
779 |
+
def filter(self, lexer, stream):
|
780 |
+
for ttype, value in stream:
|
781 |
+
if ttype is Error:
|
782 |
+
raise self.exception(value)
|
783 |
+
yield ttype, value
|
784 |
+
|
785 |
+
|
786 |
+
class VisibleWhitespaceFilter(Filter):
|
787 |
+
"""Convert tabs, newlines and/or spaces to visible characters.
|
788 |
+
|
789 |
+
Options accepted:
|
790 |
+
|
791 |
+
`spaces` : string or bool
|
792 |
+
If this is a one-character string, spaces will be replaces by this string.
|
793 |
+
If it is another true value, spaces will be replaced by ``·`` (unicode
|
794 |
+
MIDDLE DOT). If it is a false value, spaces will not be replaced. The
|
795 |
+
default is ``False``.
|
796 |
+
`tabs` : string or bool
|
797 |
+
The same as for `spaces`, but the default replacement character is ``»``
|
798 |
+
(unicode RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK). The default value
|
799 |
+
is ``False``. Note: this will not work if the `tabsize` option for the
|
800 |
+
lexer is nonzero, as tabs will already have been expanded then.
|
801 |
+
`tabsize` : int
|
802 |
+
If tabs are to be replaced by this filter (see the `tabs` option), this
|
803 |
+
is the total number of characters that a tab should be expanded to.
|
804 |
+
The default is ``8``.
|
805 |
+
`newlines` : string or bool
|
806 |
+
The same as for `spaces`, but the default replacement character is ``¶``
|
807 |
+
(unicode PILCROW SIGN). The default value is ``False``.
|
808 |
+
`wstokentype` : bool
|
809 |
+
If true, give whitespace the special `Whitespace` token type. This allows
|
810 |
+
styling the visible whitespace differently (e.g. greyed out), but it can
|
811 |
+
disrupt background colors. The default is ``True``.
|
812 |
+
|
813 |
+
.. versionadded:: 0.8
|
814 |
+
"""
|
815 |
+
|
816 |
+
def __init__(self, **options):
|
817 |
+
Filter.__init__(self, **options)
|
818 |
+
for name, default in [('spaces', '·'),
|
819 |
+
('tabs', '»'),
|
820 |
+
('newlines', '¶')]:
|
821 |
+
opt = options.get(name, False)
|
822 |
+
if isinstance(opt, str) and len(opt) == 1:
|
823 |
+
setattr(self, name, opt)
|
824 |
+
else:
|
825 |
+
setattr(self, name, (opt and default or ''))
|
826 |
+
tabsize = get_int_opt(options, 'tabsize', 8)
|
827 |
+
if self.tabs:
|
828 |
+
self.tabs += ' ' * (tabsize - 1)
|
829 |
+
if self.newlines:
|
830 |
+
self.newlines += '\n'
|
831 |
+
self.wstt = get_bool_opt(options, 'wstokentype', True)
|
832 |
+
|
833 |
+
def filter(self, lexer, stream):
|
834 |
+
if self.wstt:
|
835 |
+
spaces = self.spaces or ' '
|
836 |
+
tabs = self.tabs or '\t'
|
837 |
+
newlines = self.newlines or '\n'
|
838 |
+
regex = re.compile(r'\s')
|
839 |
+
|
840 |
+
def replacefunc(wschar):
|
841 |
+
if wschar == ' ':
|
842 |
+
return spaces
|
843 |
+
elif wschar == '\t':
|
844 |
+
return tabs
|
845 |
+
elif wschar == '\n':
|
846 |
+
return newlines
|
847 |
+
return wschar
|
848 |
+
|
849 |
+
for ttype, value in stream:
|
850 |
+
yield from _replace_special(ttype, value, regex, Whitespace,
|
851 |
+
replacefunc)
|
852 |
+
else:
|
853 |
+
spaces, tabs, newlines = self.spaces, self.tabs, self.newlines
|
854 |
+
# simpler processing
|
855 |
+
for ttype, value in stream:
|
856 |
+
if spaces:
|
857 |
+
value = value.replace(' ', spaces)
|
858 |
+
if tabs:
|
859 |
+
value = value.replace('\t', tabs)
|
860 |
+
if newlines:
|
861 |
+
value = value.replace('\n', newlines)
|
862 |
+
yield ttype, value
|
863 |
+
|
864 |
+
|
865 |
+
class GobbleFilter(Filter):
|
866 |
+
"""Gobbles source code lines (eats initial characters).
|
867 |
+
|
868 |
+
This filter drops the first ``n`` characters off every line of code. This
|
869 |
+
may be useful when the source code fed to the lexer is indented by a fixed
|
870 |
+
amount of space that isn't desired in the output.
|
871 |
+
|
872 |
+
Options accepted:
|
873 |
+
|
874 |
+
`n` : int
|
875 |
+
The number of characters to gobble.
|
876 |
+
|
877 |
+
.. versionadded:: 1.2
|
878 |
+
"""
|
879 |
+
def __init__(self, **options):
|
880 |
+
Filter.__init__(self, **options)
|
881 |
+
self.n = get_int_opt(options, 'n', 0)
|
882 |
+
|
883 |
+
def gobble(self, value, left):
|
884 |
+
if left < len(value):
|
885 |
+
return value[left:], 0
|
886 |
+
else:
|
887 |
+
return '', left - len(value)
|
888 |
+
|
889 |
+
def filter(self, lexer, stream):
|
890 |
+
n = self.n
|
891 |
+
left = n # How many characters left to gobble.
|
892 |
+
for ttype, value in stream:
|
893 |
+
# Remove ``left`` tokens from first line, ``n`` from all others.
|
894 |
+
parts = value.split('\n')
|
895 |
+
(parts[0], left) = self.gobble(parts[0], left)
|
896 |
+
for i in range(1, len(parts)):
|
897 |
+
(parts[i], left) = self.gobble(parts[i], n)
|
898 |
+
value = '\n'.join(parts)
|
899 |
+
|
900 |
+
if value != '':
|
901 |
+
yield ttype, value
|
902 |
+
|
903 |
+
|
904 |
+
class TokenMergeFilter(Filter):
|
905 |
+
"""Merges consecutive tokens with the same token type in the output
|
906 |
+
stream of a lexer.
|
907 |
+
|
908 |
+
.. versionadded:: 1.2
|
909 |
+
"""
|
910 |
+
def __init__(self, **options):
|
911 |
+
Filter.__init__(self, **options)
|
912 |
+
|
913 |
+
def filter(self, lexer, stream):
|
914 |
+
current_type = None
|
915 |
+
current_value = None
|
916 |
+
for ttype, value in stream:
|
917 |
+
if ttype is current_type:
|
918 |
+
current_value += value
|
919 |
+
else:
|
920 |
+
if current_type is not None:
|
921 |
+
yield current_type, current_value
|
922 |
+
current_type = ttype
|
923 |
+
current_value = value
|
924 |
+
if current_type is not None:
|
925 |
+
yield current_type, current_value
|
926 |
+
|
927 |
+
|
928 |
+
FILTERS = {
|
929 |
+
'codetagify': CodeTagFilter,
|
930 |
+
'keywordcase': KeywordCaseFilter,
|
931 |
+
'highlight': NameHighlightFilter,
|
932 |
+
'raiseonerror': RaiseOnErrorTokenFilter,
|
933 |
+
'whitespace': VisibleWhitespaceFilter,
|
934 |
+
'gobble': GobbleFilter,
|
935 |
+
'tokenmerge': TokenMergeFilter,
|
936 |
+
'symbols': SymbolFilter,
|
937 |
+
}
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/formatter.py
ADDED
@@ -0,0 +1,94 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
pygments.formatter
|
3 |
+
~~~~~~~~~~~~~~~~~~
|
4 |
+
|
5 |
+
Base formatter class.
|
6 |
+
|
7 |
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
|
8 |
+
:license: BSD, see LICENSE for details.
|
9 |
+
"""
|
10 |
+
|
11 |
+
import codecs
|
12 |
+
|
13 |
+
from pip._vendor.pygments.util import get_bool_opt
|
14 |
+
from pip._vendor.pygments.styles import get_style_by_name
|
15 |
+
|
16 |
+
__all__ = ['Formatter']
|
17 |
+
|
18 |
+
|
19 |
+
def _lookup_style(style):
|
20 |
+
if isinstance(style, str):
|
21 |
+
return get_style_by_name(style)
|
22 |
+
return style
|
23 |
+
|
24 |
+
|
25 |
+
class Formatter:
|
26 |
+
"""
|
27 |
+
Converts a token stream to text.
|
28 |
+
|
29 |
+
Options accepted:
|
30 |
+
|
31 |
+
``style``
|
32 |
+
The style to use, can be a string or a Style subclass
|
33 |
+
(default: "default"). Not used by e.g. the
|
34 |
+
TerminalFormatter.
|
35 |
+
``full``
|
36 |
+
Tells the formatter to output a "full" document, i.e.
|
37 |
+
a complete self-contained document. This doesn't have
|
38 |
+
any effect for some formatters (default: false).
|
39 |
+
``title``
|
40 |
+
If ``full`` is true, the title that should be used to
|
41 |
+
caption the document (default: '').
|
42 |
+
``encoding``
|
43 |
+
If given, must be an encoding name. This will be used to
|
44 |
+
convert the Unicode token strings to byte strings in the
|
45 |
+
output. If it is "" or None, Unicode strings will be written
|
46 |
+
to the output file, which most file-like objects do not
|
47 |
+
support (default: None).
|
48 |
+
``outencoding``
|
49 |
+
Overrides ``encoding`` if given.
|
50 |
+
"""
|
51 |
+
|
52 |
+
#: Name of the formatter
|
53 |
+
name = None
|
54 |
+
|
55 |
+
#: Shortcuts for the formatter
|
56 |
+
aliases = []
|
57 |
+
|
58 |
+
#: fn match rules
|
59 |
+
filenames = []
|
60 |
+
|
61 |
+
#: If True, this formatter outputs Unicode strings when no encoding
|
62 |
+
#: option is given.
|
63 |
+
unicodeoutput = True
|
64 |
+
|
65 |
+
def __init__(self, **options):
|
66 |
+
self.style = _lookup_style(options.get('style', 'default'))
|
67 |
+
self.full = get_bool_opt(options, 'full', False)
|
68 |
+
self.title = options.get('title', '')
|
69 |
+
self.encoding = options.get('encoding', None) or None
|
70 |
+
if self.encoding in ('guess', 'chardet'):
|
71 |
+
# can happen for e.g. pygmentize -O encoding=guess
|
72 |
+
self.encoding = 'utf-8'
|
73 |
+
self.encoding = options.get('outencoding') or self.encoding
|
74 |
+
self.options = options
|
75 |
+
|
76 |
+
def get_style_defs(self, arg=''):
|
77 |
+
"""
|
78 |
+
Return the style definitions for the current style as a string.
|
79 |
+
|
80 |
+
``arg`` is an additional argument whose meaning depends on the
|
81 |
+
formatter used. Note that ``arg`` can also be a list or tuple
|
82 |
+
for some formatters like the html formatter.
|
83 |
+
"""
|
84 |
+
return ''
|
85 |
+
|
86 |
+
def format(self, tokensource, outfile):
|
87 |
+
"""
|
88 |
+
Format ``tokensource``, an iterable of ``(tokentype, tokenstring)``
|
89 |
+
tuples and write it into ``outfile``.
|
90 |
+
"""
|
91 |
+
if self.encoding:
|
92 |
+
# wrap the outfile in a StreamWriter
|
93 |
+
outfile = codecs.lookup(self.encoding)[3](outfile)
|
94 |
+
return self.format_unencoded(tokensource, outfile)
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/lexer.py
ADDED
@@ -0,0 +1,879 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
pygments.lexer
|
3 |
+
~~~~~~~~~~~~~~
|
4 |
+
|
5 |
+
Base lexer classes.
|
6 |
+
|
7 |
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
|
8 |
+
:license: BSD, see LICENSE for details.
|
9 |
+
"""
|
10 |
+
|
11 |
+
import re
|
12 |
+
import sys
|
13 |
+
import time
|
14 |
+
|
15 |
+
from pip._vendor.pygments.filter import apply_filters, Filter
|
16 |
+
from pip._vendor.pygments.filters import get_filter_by_name
|
17 |
+
from pip._vendor.pygments.token import Error, Text, Other, _TokenType
|
18 |
+
from pip._vendor.pygments.util import get_bool_opt, get_int_opt, get_list_opt, \
|
19 |
+
make_analysator, Future, guess_decode
|
20 |
+
from pip._vendor.pygments.regexopt import regex_opt
|
21 |
+
|
22 |
+
__all__ = ['Lexer', 'RegexLexer', 'ExtendedRegexLexer', 'DelegatingLexer',
|
23 |
+
'LexerContext', 'include', 'inherit', 'bygroups', 'using', 'this',
|
24 |
+
'default', 'words']
|
25 |
+
|
26 |
+
|
27 |
+
_encoding_map = [(b'\xef\xbb\xbf', 'utf-8'),
|
28 |
+
(b'\xff\xfe\0\0', 'utf-32'),
|
29 |
+
(b'\0\0\xfe\xff', 'utf-32be'),
|
30 |
+
(b'\xff\xfe', 'utf-16'),
|
31 |
+
(b'\xfe\xff', 'utf-16be')]
|
32 |
+
|
33 |
+
_default_analyse = staticmethod(lambda x: 0.0)
|
34 |
+
|
35 |
+
|
36 |
+
class LexerMeta(type):
|
37 |
+
"""
|
38 |
+
This metaclass automagically converts ``analyse_text`` methods into
|
39 |
+
static methods which always return float values.
|
40 |
+
"""
|
41 |
+
|
42 |
+
def __new__(mcs, name, bases, d):
|
43 |
+
if 'analyse_text' in d:
|
44 |
+
d['analyse_text'] = make_analysator(d['analyse_text'])
|
45 |
+
return type.__new__(mcs, name, bases, d)
|
46 |
+
|
47 |
+
|
48 |
+
class Lexer(metaclass=LexerMeta):
|
49 |
+
"""
|
50 |
+
Lexer for a specific language.
|
51 |
+
|
52 |
+
Basic options recognized:
|
53 |
+
``stripnl``
|
54 |
+
Strip leading and trailing newlines from the input (default: True).
|
55 |
+
``stripall``
|
56 |
+
Strip all leading and trailing whitespace from the input
|
57 |
+
(default: False).
|
58 |
+
``ensurenl``
|
59 |
+
Make sure that the input ends with a newline (default: True). This
|
60 |
+
is required for some lexers that consume input linewise.
|
61 |
+
|
62 |
+
.. versionadded:: 1.3
|
63 |
+
|
64 |
+
``tabsize``
|
65 |
+
If given and greater than 0, expand tabs in the input (default: 0).
|
66 |
+
``encoding``
|
67 |
+
If given, must be an encoding name. This encoding will be used to
|
68 |
+
convert the input string to Unicode, if it is not already a Unicode
|
69 |
+
string (default: ``'guess'``, which uses a simple UTF-8 / Locale /
|
70 |
+
Latin1 detection. Can also be ``'chardet'`` to use the chardet
|
71 |
+
library, if it is installed.
|
72 |
+
``inencoding``
|
73 |
+
Overrides the ``encoding`` if given.
|
74 |
+
"""
|
75 |
+
|
76 |
+
#: Name of the lexer
|
77 |
+
name = None
|
78 |
+
|
79 |
+
#: Shortcuts for the lexer
|
80 |
+
aliases = []
|
81 |
+
|
82 |
+
#: File name globs
|
83 |
+
filenames = []
|
84 |
+
|
85 |
+
#: Secondary file name globs
|
86 |
+
alias_filenames = []
|
87 |
+
|
88 |
+
#: MIME types
|
89 |
+
mimetypes = []
|
90 |
+
|
91 |
+
#: Priority, should multiple lexers match and no content is provided
|
92 |
+
priority = 0
|
93 |
+
|
94 |
+
def __init__(self, **options):
|
95 |
+
self.options = options
|
96 |
+
self.stripnl = get_bool_opt(options, 'stripnl', True)
|
97 |
+
self.stripall = get_bool_opt(options, 'stripall', False)
|
98 |
+
self.ensurenl = get_bool_opt(options, 'ensurenl', True)
|
99 |
+
self.tabsize = get_int_opt(options, 'tabsize', 0)
|
100 |
+
self.encoding = options.get('encoding', 'guess')
|
101 |
+
self.encoding = options.get('inencoding') or self.encoding
|
102 |
+
self.filters = []
|
103 |
+
for filter_ in get_list_opt(options, 'filters', ()):
|
104 |
+
self.add_filter(filter_)
|
105 |
+
|
106 |
+
def __repr__(self):
|
107 |
+
if self.options:
|
108 |
+
return '<pygments.lexers.%s with %r>' % (self.__class__.__name__,
|
109 |
+
self.options)
|
110 |
+
else:
|
111 |
+
return '<pygments.lexers.%s>' % self.__class__.__name__
|
112 |
+
|
113 |
+
def add_filter(self, filter_, **options):
|
114 |
+
"""
|
115 |
+
Add a new stream filter to this lexer.
|
116 |
+
"""
|
117 |
+
if not isinstance(filter_, Filter):
|
118 |
+
filter_ = get_filter_by_name(filter_, **options)
|
119 |
+
self.filters.append(filter_)
|
120 |
+
|
121 |
+
def analyse_text(text):
|
122 |
+
"""
|
123 |
+
Has to return a float between ``0`` and ``1`` that indicates
|
124 |
+
if a lexer wants to highlight this text. Used by ``guess_lexer``.
|
125 |
+
If this method returns ``0`` it won't highlight it in any case, if
|
126 |
+
it returns ``1`` highlighting with this lexer is guaranteed.
|
127 |
+
|
128 |
+
The `LexerMeta` metaclass automatically wraps this function so
|
129 |
+
that it works like a static method (no ``self`` or ``cls``
|
130 |
+
parameter) and the return value is automatically converted to
|
131 |
+
`float`. If the return value is an object that is boolean `False`
|
132 |
+
it's the same as if the return values was ``0.0``.
|
133 |
+
"""
|
134 |
+
|
135 |
+
def get_tokens(self, text, unfiltered=False):
|
136 |
+
"""
|
137 |
+
Return an iterable of (tokentype, value) pairs generated from
|
138 |
+
`text`. If `unfiltered` is set to `True`, the filtering mechanism
|
139 |
+
is bypassed even if filters are defined.
|
140 |
+
|
141 |
+
Also preprocess the text, i.e. expand tabs and strip it if
|
142 |
+
wanted and applies registered filters.
|
143 |
+
"""
|
144 |
+
if not isinstance(text, str):
|
145 |
+
if self.encoding == 'guess':
|
146 |
+
text, _ = guess_decode(text)
|
147 |
+
elif self.encoding == 'chardet':
|
148 |
+
try:
|
149 |
+
from pip._vendor import chardet
|
150 |
+
except ImportError as e:
|
151 |
+
raise ImportError('To enable chardet encoding guessing, '
|
152 |
+
'please install the chardet library '
|
153 |
+
'from http://chardet.feedparser.org/') from e
|
154 |
+
# check for BOM first
|
155 |
+
decoded = None
|
156 |
+
for bom, encoding in _encoding_map:
|
157 |
+
if text.startswith(bom):
|
158 |
+
decoded = text[len(bom):].decode(encoding, 'replace')
|
159 |
+
break
|
160 |
+
# no BOM found, so use chardet
|
161 |
+
if decoded is None:
|
162 |
+
enc = chardet.detect(text[:1024]) # Guess using first 1KB
|
163 |
+
decoded = text.decode(enc.get('encoding') or 'utf-8',
|
164 |
+
'replace')
|
165 |
+
text = decoded
|
166 |
+
else:
|
167 |
+
text = text.decode(self.encoding)
|
168 |
+
if text.startswith('\ufeff'):
|
169 |
+
text = text[len('\ufeff'):]
|
170 |
+
else:
|
171 |
+
if text.startswith('\ufeff'):
|
172 |
+
text = text[len('\ufeff'):]
|
173 |
+
|
174 |
+
# text now *is* a unicode string
|
175 |
+
text = text.replace('\r\n', '\n')
|
176 |
+
text = text.replace('\r', '\n')
|
177 |
+
if self.stripall:
|
178 |
+
text = text.strip()
|
179 |
+
elif self.stripnl:
|
180 |
+
text = text.strip('\n')
|
181 |
+
if self.tabsize > 0:
|
182 |
+
text = text.expandtabs(self.tabsize)
|
183 |
+
if self.ensurenl and not text.endswith('\n'):
|
184 |
+
text += '\n'
|
185 |
+
|
186 |
+
def streamer():
|
187 |
+
for _, t, v in self.get_tokens_unprocessed(text):
|
188 |
+
yield t, v
|
189 |
+
stream = streamer()
|
190 |
+
if not unfiltered:
|
191 |
+
stream = apply_filters(stream, self.filters, self)
|
192 |
+
return stream
|
193 |
+
|
194 |
+
def get_tokens_unprocessed(self, text):
|
195 |
+
"""
|
196 |
+
Return an iterable of (index, tokentype, value) pairs where "index"
|
197 |
+
is the starting position of the token within the input text.
|
198 |
+
|
199 |
+
In subclasses, implement this method as a generator to
|
200 |
+
maximize effectiveness.
|
201 |
+
"""
|
202 |
+
raise NotImplementedError
|
203 |
+
|
204 |
+
|
205 |
+
class DelegatingLexer(Lexer):
|
206 |
+
"""
|
207 |
+
This lexer takes two lexer as arguments. A root lexer and
|
208 |
+
a language lexer. First everything is scanned using the language
|
209 |
+
lexer, afterwards all ``Other`` tokens are lexed using the root
|
210 |
+
lexer.
|
211 |
+
|
212 |
+
The lexers from the ``template`` lexer package use this base lexer.
|
213 |
+
"""
|
214 |
+
|
215 |
+
def __init__(self, _root_lexer, _language_lexer, _needle=Other, **options):
|
216 |
+
self.root_lexer = _root_lexer(**options)
|
217 |
+
self.language_lexer = _language_lexer(**options)
|
218 |
+
self.needle = _needle
|
219 |
+
Lexer.__init__(self, **options)
|
220 |
+
|
221 |
+
def get_tokens_unprocessed(self, text):
|
222 |
+
buffered = ''
|
223 |
+
insertions = []
|
224 |
+
lng_buffer = []
|
225 |
+
for i, t, v in self.language_lexer.get_tokens_unprocessed(text):
|
226 |
+
if t is self.needle:
|
227 |
+
if lng_buffer:
|
228 |
+
insertions.append((len(buffered), lng_buffer))
|
229 |
+
lng_buffer = []
|
230 |
+
buffered += v
|
231 |
+
else:
|
232 |
+
lng_buffer.append((i, t, v))
|
233 |
+
if lng_buffer:
|
234 |
+
insertions.append((len(buffered), lng_buffer))
|
235 |
+
return do_insertions(insertions,
|
236 |
+
self.root_lexer.get_tokens_unprocessed(buffered))
|
237 |
+
|
238 |
+
|
239 |
+
# ------------------------------------------------------------------------------
|
240 |
+
# RegexLexer and ExtendedRegexLexer
|
241 |
+
#
|
242 |
+
|
243 |
+
|
244 |
+
class include(str): # pylint: disable=invalid-name
|
245 |
+
"""
|
246 |
+
Indicates that a state should include rules from another state.
|
247 |
+
"""
|
248 |
+
pass
|
249 |
+
|
250 |
+
|
251 |
+
class _inherit:
|
252 |
+
"""
|
253 |
+
Indicates the a state should inherit from its superclass.
|
254 |
+
"""
|
255 |
+
def __repr__(self):
|
256 |
+
return 'inherit'
|
257 |
+
|
258 |
+
inherit = _inherit() # pylint: disable=invalid-name
|
259 |
+
|
260 |
+
|
261 |
+
class combined(tuple): # pylint: disable=invalid-name
|
262 |
+
"""
|
263 |
+
Indicates a state combined from multiple states.
|
264 |
+
"""
|
265 |
+
|
266 |
+
def __new__(cls, *args):
|
267 |
+
return tuple.__new__(cls, args)
|
268 |
+
|
269 |
+
def __init__(self, *args):
|
270 |
+
# tuple.__init__ doesn't do anything
|
271 |
+
pass
|
272 |
+
|
273 |
+
|
274 |
+
class _PseudoMatch:
|
275 |
+
"""
|
276 |
+
A pseudo match object constructed from a string.
|
277 |
+
"""
|
278 |
+
|
279 |
+
def __init__(self, start, text):
|
280 |
+
self._text = text
|
281 |
+
self._start = start
|
282 |
+
|
283 |
+
def start(self, arg=None):
|
284 |
+
return self._start
|
285 |
+
|
286 |
+
def end(self, arg=None):
|
287 |
+
return self._start + len(self._text)
|
288 |
+
|
289 |
+
def group(self, arg=None):
|
290 |
+
if arg:
|
291 |
+
raise IndexError('No such group')
|
292 |
+
return self._text
|
293 |
+
|
294 |
+
def groups(self):
|
295 |
+
return (self._text,)
|
296 |
+
|
297 |
+
def groupdict(self):
|
298 |
+
return {}
|
299 |
+
|
300 |
+
|
301 |
+
def bygroups(*args):
|
302 |
+
"""
|
303 |
+
Callback that yields multiple actions for each group in the match.
|
304 |
+
"""
|
305 |
+
def callback(lexer, match, ctx=None):
|
306 |
+
for i, action in enumerate(args):
|
307 |
+
if action is None:
|
308 |
+
continue
|
309 |
+
elif type(action) is _TokenType:
|
310 |
+
data = match.group(i + 1)
|
311 |
+
if data:
|
312 |
+
yield match.start(i + 1), action, data
|
313 |
+
else:
|
314 |
+
data = match.group(i + 1)
|
315 |
+
if data is not None:
|
316 |
+
if ctx:
|
317 |
+
ctx.pos = match.start(i + 1)
|
318 |
+
for item in action(lexer,
|
319 |
+
_PseudoMatch(match.start(i + 1), data), ctx):
|
320 |
+
if item:
|
321 |
+
yield item
|
322 |
+
if ctx:
|
323 |
+
ctx.pos = match.end()
|
324 |
+
return callback
|
325 |
+
|
326 |
+
|
327 |
+
class _This:
|
328 |
+
"""
|
329 |
+
Special singleton used for indicating the caller class.
|
330 |
+
Used by ``using``.
|
331 |
+
"""
|
332 |
+
|
333 |
+
this = _This()
|
334 |
+
|
335 |
+
|
336 |
+
def using(_other, **kwargs):
|
337 |
+
"""
|
338 |
+
Callback that processes the match with a different lexer.
|
339 |
+
|
340 |
+
The keyword arguments are forwarded to the lexer, except `state` which
|
341 |
+
is handled separately.
|
342 |
+
|
343 |
+
`state` specifies the state that the new lexer will start in, and can
|
344 |
+
be an enumerable such as ('root', 'inline', 'string') or a simple
|
345 |
+
string which is assumed to be on top of the root state.
|
346 |
+
|
347 |
+
Note: For that to work, `_other` must not be an `ExtendedRegexLexer`.
|
348 |
+
"""
|
349 |
+
gt_kwargs = {}
|
350 |
+
if 'state' in kwargs:
|
351 |
+
s = kwargs.pop('state')
|
352 |
+
if isinstance(s, (list, tuple)):
|
353 |
+
gt_kwargs['stack'] = s
|
354 |
+
else:
|
355 |
+
gt_kwargs['stack'] = ('root', s)
|
356 |
+
|
357 |
+
if _other is this:
|
358 |
+
def callback(lexer, match, ctx=None):
|
359 |
+
# if keyword arguments are given the callback
|
360 |
+
# function has to create a new lexer instance
|
361 |
+
if kwargs:
|
362 |
+
# XXX: cache that somehow
|
363 |
+
kwargs.update(lexer.options)
|
364 |
+
lx = lexer.__class__(**kwargs)
|
365 |
+
else:
|
366 |
+
lx = lexer
|
367 |
+
s = match.start()
|
368 |
+
for i, t, v in lx.get_tokens_unprocessed(match.group(), **gt_kwargs):
|
369 |
+
yield i + s, t, v
|
370 |
+
if ctx:
|
371 |
+
ctx.pos = match.end()
|
372 |
+
else:
|
373 |
+
def callback(lexer, match, ctx=None):
|
374 |
+
# XXX: cache that somehow
|
375 |
+
kwargs.update(lexer.options)
|
376 |
+
lx = _other(**kwargs)
|
377 |
+
|
378 |
+
s = match.start()
|
379 |
+
for i, t, v in lx.get_tokens_unprocessed(match.group(), **gt_kwargs):
|
380 |
+
yield i + s, t, v
|
381 |
+
if ctx:
|
382 |
+
ctx.pos = match.end()
|
383 |
+
return callback
|
384 |
+
|
385 |
+
|
386 |
+
class default:
|
387 |
+
"""
|
388 |
+
Indicates a state or state action (e.g. #pop) to apply.
|
389 |
+
For example default('#pop') is equivalent to ('', Token, '#pop')
|
390 |
+
Note that state tuples may be used as well.
|
391 |
+
|
392 |
+
.. versionadded:: 2.0
|
393 |
+
"""
|
394 |
+
def __init__(self, state):
|
395 |
+
self.state = state
|
396 |
+
|
397 |
+
|
398 |
+
class words(Future):
|
399 |
+
"""
|
400 |
+
Indicates a list of literal words that is transformed into an optimized
|
401 |
+
regex that matches any of the words.
|
402 |
+
|
403 |
+
.. versionadded:: 2.0
|
404 |
+
"""
|
405 |
+
def __init__(self, words, prefix='', suffix=''):
|
406 |
+
self.words = words
|
407 |
+
self.prefix = prefix
|
408 |
+
self.suffix = suffix
|
409 |
+
|
410 |
+
def get(self):
|
411 |
+
return regex_opt(self.words, prefix=self.prefix, suffix=self.suffix)
|
412 |
+
|
413 |
+
|
414 |
+
class RegexLexerMeta(LexerMeta):
|
415 |
+
"""
|
416 |
+
Metaclass for RegexLexer, creates the self._tokens attribute from
|
417 |
+
self.tokens on the first instantiation.
|
418 |
+
"""
|
419 |
+
|
420 |
+
def _process_regex(cls, regex, rflags, state):
|
421 |
+
"""Preprocess the regular expression component of a token definition."""
|
422 |
+
if isinstance(regex, Future):
|
423 |
+
regex = regex.get()
|
424 |
+
return re.compile(regex, rflags).match
|
425 |
+
|
426 |
+
def _process_token(cls, token):
|
427 |
+
"""Preprocess the token component of a token definition."""
|
428 |
+
assert type(token) is _TokenType or callable(token), \
|
429 |
+
'token type must be simple type or callable, not %r' % (token,)
|
430 |
+
return token
|
431 |
+
|
432 |
+
def _process_new_state(cls, new_state, unprocessed, processed):
|
433 |
+
"""Preprocess the state transition action of a token definition."""
|
434 |
+
if isinstance(new_state, str):
|
435 |
+
# an existing state
|
436 |
+
if new_state == '#pop':
|
437 |
+
return -1
|
438 |
+
elif new_state in unprocessed:
|
439 |
+
return (new_state,)
|
440 |
+
elif new_state == '#push':
|
441 |
+
return new_state
|
442 |
+
elif new_state[:5] == '#pop:':
|
443 |
+
return -int(new_state[5:])
|
444 |
+
else:
|
445 |
+
assert False, 'unknown new state %r' % new_state
|
446 |
+
elif isinstance(new_state, combined):
|
447 |
+
# combine a new state from existing ones
|
448 |
+
tmp_state = '_tmp_%d' % cls._tmpname
|
449 |
+
cls._tmpname += 1
|
450 |
+
itokens = []
|
451 |
+
for istate in new_state:
|
452 |
+
assert istate != new_state, 'circular state ref %r' % istate
|
453 |
+
itokens.extend(cls._process_state(unprocessed,
|
454 |
+
processed, istate))
|
455 |
+
processed[tmp_state] = itokens
|
456 |
+
return (tmp_state,)
|
457 |
+
elif isinstance(new_state, tuple):
|
458 |
+
# push more than one state
|
459 |
+
for istate in new_state:
|
460 |
+
assert (istate in unprocessed or
|
461 |
+
istate in ('#pop', '#push')), \
|
462 |
+
'unknown new state ' + istate
|
463 |
+
return new_state
|
464 |
+
else:
|
465 |
+
assert False, 'unknown new state def %r' % new_state
|
466 |
+
|
467 |
+
def _process_state(cls, unprocessed, processed, state):
|
468 |
+
"""Preprocess a single state definition."""
|
469 |
+
assert type(state) is str, "wrong state name %r" % state
|
470 |
+
assert state[0] != '#', "invalid state name %r" % state
|
471 |
+
if state in processed:
|
472 |
+
return processed[state]
|
473 |
+
tokens = processed[state] = []
|
474 |
+
rflags = cls.flags
|
475 |
+
for tdef in unprocessed[state]:
|
476 |
+
if isinstance(tdef, include):
|
477 |
+
# it's a state reference
|
478 |
+
assert tdef != state, "circular state reference %r" % state
|
479 |
+
tokens.extend(cls._process_state(unprocessed, processed,
|
480 |
+
str(tdef)))
|
481 |
+
continue
|
482 |
+
if isinstance(tdef, _inherit):
|
483 |
+
# should be processed already, but may not in the case of:
|
484 |
+
# 1. the state has no counterpart in any parent
|
485 |
+
# 2. the state includes more than one 'inherit'
|
486 |
+
continue
|
487 |
+
if isinstance(tdef, default):
|
488 |
+
new_state = cls._process_new_state(tdef.state, unprocessed, processed)
|
489 |
+
tokens.append((re.compile('').match, None, new_state))
|
490 |
+
continue
|
491 |
+
|
492 |
+
assert type(tdef) is tuple, "wrong rule def %r" % tdef
|
493 |
+
|
494 |
+
try:
|
495 |
+
rex = cls._process_regex(tdef[0], rflags, state)
|
496 |
+
except Exception as err:
|
497 |
+
raise ValueError("uncompilable regex %r in state %r of %r: %s" %
|
498 |
+
(tdef[0], state, cls, err)) from err
|
499 |
+
|
500 |
+
token = cls._process_token(tdef[1])
|
501 |
+
|
502 |
+
if len(tdef) == 2:
|
503 |
+
new_state = None
|
504 |
+
else:
|
505 |
+
new_state = cls._process_new_state(tdef[2],
|
506 |
+
unprocessed, processed)
|
507 |
+
|
508 |
+
tokens.append((rex, token, new_state))
|
509 |
+
return tokens
|
510 |
+
|
511 |
+
def process_tokendef(cls, name, tokendefs=None):
|
512 |
+
"""Preprocess a dictionary of token definitions."""
|
513 |
+
processed = cls._all_tokens[name] = {}
|
514 |
+
tokendefs = tokendefs or cls.tokens[name]
|
515 |
+
for state in list(tokendefs):
|
516 |
+
cls._process_state(tokendefs, processed, state)
|
517 |
+
return processed
|
518 |
+
|
519 |
+
def get_tokendefs(cls):
|
520 |
+
"""
|
521 |
+
Merge tokens from superclasses in MRO order, returning a single tokendef
|
522 |
+
dictionary.
|
523 |
+
|
524 |
+
Any state that is not defined by a subclass will be inherited
|
525 |
+
automatically. States that *are* defined by subclasses will, by
|
526 |
+
default, override that state in the superclass. If a subclass wishes to
|
527 |
+
inherit definitions from a superclass, it can use the special value
|
528 |
+
"inherit", which will cause the superclass' state definition to be
|
529 |
+
included at that point in the state.
|
530 |
+
"""
|
531 |
+
tokens = {}
|
532 |
+
inheritable = {}
|
533 |
+
for c in cls.__mro__:
|
534 |
+
toks = c.__dict__.get('tokens', {})
|
535 |
+
|
536 |
+
for state, items in toks.items():
|
537 |
+
curitems = tokens.get(state)
|
538 |
+
if curitems is None:
|
539 |
+
# N.b. because this is assigned by reference, sufficiently
|
540 |
+
# deep hierarchies are processed incrementally (e.g. for
|
541 |
+
# A(B), B(C), C(RegexLexer), B will be premodified so X(B)
|
542 |
+
# will not see any inherits in B).
|
543 |
+
tokens[state] = items
|
544 |
+
try:
|
545 |
+
inherit_ndx = items.index(inherit)
|
546 |
+
except ValueError:
|
547 |
+
continue
|
548 |
+
inheritable[state] = inherit_ndx
|
549 |
+
continue
|
550 |
+
|
551 |
+
inherit_ndx = inheritable.pop(state, None)
|
552 |
+
if inherit_ndx is None:
|
553 |
+
continue
|
554 |
+
|
555 |
+
# Replace the "inherit" value with the items
|
556 |
+
curitems[inherit_ndx:inherit_ndx+1] = items
|
557 |
+
try:
|
558 |
+
# N.b. this is the index in items (that is, the superclass
|
559 |
+
# copy), so offset required when storing below.
|
560 |
+
new_inh_ndx = items.index(inherit)
|
561 |
+
except ValueError:
|
562 |
+
pass
|
563 |
+
else:
|
564 |
+
inheritable[state] = inherit_ndx + new_inh_ndx
|
565 |
+
|
566 |
+
return tokens
|
567 |
+
|
568 |
+
def __call__(cls, *args, **kwds):
|
569 |
+
"""Instantiate cls after preprocessing its token definitions."""
|
570 |
+
if '_tokens' not in cls.__dict__:
|
571 |
+
cls._all_tokens = {}
|
572 |
+
cls._tmpname = 0
|
573 |
+
if hasattr(cls, 'token_variants') and cls.token_variants:
|
574 |
+
# don't process yet
|
575 |
+
pass
|
576 |
+
else:
|
577 |
+
cls._tokens = cls.process_tokendef('', cls.get_tokendefs())
|
578 |
+
|
579 |
+
return type.__call__(cls, *args, **kwds)
|
580 |
+
|
581 |
+
|
582 |
+
class RegexLexer(Lexer, metaclass=RegexLexerMeta):
|
583 |
+
"""
|
584 |
+
Base for simple stateful regular expression-based lexers.
|
585 |
+
Simplifies the lexing process so that you need only
|
586 |
+
provide a list of states and regular expressions.
|
587 |
+
"""
|
588 |
+
|
589 |
+
#: Flags for compiling the regular expressions.
|
590 |
+
#: Defaults to MULTILINE.
|
591 |
+
flags = re.MULTILINE
|
592 |
+
|
593 |
+
#: At all time there is a stack of states. Initially, the stack contains
|
594 |
+
#: a single state 'root'. The top of the stack is called "the current state".
|
595 |
+
#:
|
596 |
+
#: Dict of ``{'state': [(regex, tokentype, new_state), ...], ...}``
|
597 |
+
#:
|
598 |
+
#: ``new_state`` can be omitted to signify no state transition.
|
599 |
+
#: If ``new_state`` is a string, it is pushed on the stack. This ensure
|
600 |
+
#: the new current state is ``new_state``.
|
601 |
+
#: If ``new_state`` is a tuple of strings, all of those strings are pushed
|
602 |
+
#: on the stack and the current state will be the last element of the list.
|
603 |
+
#: ``new_state`` can also be ``combined('state1', 'state2', ...)``
|
604 |
+
#: to signify a new, anonymous state combined from the rules of two
|
605 |
+
#: or more existing ones.
|
606 |
+
#: Furthermore, it can be '#pop' to signify going back one step in
|
607 |
+
#: the state stack, or '#push' to push the current state on the stack
|
608 |
+
#: again. Note that if you push while in a combined state, the combined
|
609 |
+
#: state itself is pushed, and not only the state in which the rule is
|
610 |
+
#: defined.
|
611 |
+
#:
|
612 |
+
#: The tuple can also be replaced with ``include('state')``, in which
|
613 |
+
#: case the rules from the state named by the string are included in the
|
614 |
+
#: current one.
|
615 |
+
tokens = {}
|
616 |
+
|
617 |
+
def get_tokens_unprocessed(self, text, stack=('root',)):
|
618 |
+
"""
|
619 |
+
Split ``text`` into (tokentype, text) pairs.
|
620 |
+
|
621 |
+
``stack`` is the inital stack (default: ``['root']``)
|
622 |
+
"""
|
623 |
+
pos = 0
|
624 |
+
tokendefs = self._tokens
|
625 |
+
statestack = list(stack)
|
626 |
+
statetokens = tokendefs[statestack[-1]]
|
627 |
+
while 1:
|
628 |
+
for rexmatch, action, new_state in statetokens:
|
629 |
+
m = rexmatch(text, pos)
|
630 |
+
if m:
|
631 |
+
if action is not None:
|
632 |
+
if type(action) is _TokenType:
|
633 |
+
yield pos, action, m.group()
|
634 |
+
else:
|
635 |
+
yield from action(self, m)
|
636 |
+
pos = m.end()
|
637 |
+
if new_state is not None:
|
638 |
+
# state transition
|
639 |
+
if isinstance(new_state, tuple):
|
640 |
+
for state in new_state:
|
641 |
+
if state == '#pop':
|
642 |
+
if len(statestack) > 1:
|
643 |
+
statestack.pop()
|
644 |
+
elif state == '#push':
|
645 |
+
statestack.append(statestack[-1])
|
646 |
+
else:
|
647 |
+
statestack.append(state)
|
648 |
+
elif isinstance(new_state, int):
|
649 |
+
# pop, but keep at least one state on the stack
|
650 |
+
# (random code leading to unexpected pops should
|
651 |
+
# not allow exceptions)
|
652 |
+
if abs(new_state) >= len(statestack):
|
653 |
+
del statestack[1:]
|
654 |
+
else:
|
655 |
+
del statestack[new_state:]
|
656 |
+
elif new_state == '#push':
|
657 |
+
statestack.append(statestack[-1])
|
658 |
+
else:
|
659 |
+
assert False, "wrong state def: %r" % new_state
|
660 |
+
statetokens = tokendefs[statestack[-1]]
|
661 |
+
break
|
662 |
+
else:
|
663 |
+
# We are here only if all state tokens have been considered
|
664 |
+
# and there was not a match on any of them.
|
665 |
+
try:
|
666 |
+
if text[pos] == '\n':
|
667 |
+
# at EOL, reset state to "root"
|
668 |
+
statestack = ['root']
|
669 |
+
statetokens = tokendefs['root']
|
670 |
+
yield pos, Text, '\n'
|
671 |
+
pos += 1
|
672 |
+
continue
|
673 |
+
yield pos, Error, text[pos]
|
674 |
+
pos += 1
|
675 |
+
except IndexError:
|
676 |
+
break
|
677 |
+
|
678 |
+
|
679 |
+
class LexerContext:
|
680 |
+
"""
|
681 |
+
A helper object that holds lexer position data.
|
682 |
+
"""
|
683 |
+
|
684 |
+
def __init__(self, text, pos, stack=None, end=None):
|
685 |
+
self.text = text
|
686 |
+
self.pos = pos
|
687 |
+
self.end = end or len(text) # end=0 not supported ;-)
|
688 |
+
self.stack = stack or ['root']
|
689 |
+
|
690 |
+
def __repr__(self):
|
691 |
+
return 'LexerContext(%r, %r, %r)' % (
|
692 |
+
self.text, self.pos, self.stack)
|
693 |
+
|
694 |
+
|
695 |
+
class ExtendedRegexLexer(RegexLexer):
|
696 |
+
"""
|
697 |
+
A RegexLexer that uses a context object to store its state.
|
698 |
+
"""
|
699 |
+
|
700 |
+
def get_tokens_unprocessed(self, text=None, context=None):
|
701 |
+
"""
|
702 |
+
Split ``text`` into (tokentype, text) pairs.
|
703 |
+
If ``context`` is given, use this lexer context instead.
|
704 |
+
"""
|
705 |
+
tokendefs = self._tokens
|
706 |
+
if not context:
|
707 |
+
ctx = LexerContext(text, 0)
|
708 |
+
statetokens = tokendefs['root']
|
709 |
+
else:
|
710 |
+
ctx = context
|
711 |
+
statetokens = tokendefs[ctx.stack[-1]]
|
712 |
+
text = ctx.text
|
713 |
+
while 1:
|
714 |
+
for rexmatch, action, new_state in statetokens:
|
715 |
+
m = rexmatch(text, ctx.pos, ctx.end)
|
716 |
+
if m:
|
717 |
+
if action is not None:
|
718 |
+
if type(action) is _TokenType:
|
719 |
+
yield ctx.pos, action, m.group()
|
720 |
+
ctx.pos = m.end()
|
721 |
+
else:
|
722 |
+
yield from action(self, m, ctx)
|
723 |
+
if not new_state:
|
724 |
+
# altered the state stack?
|
725 |
+
statetokens = tokendefs[ctx.stack[-1]]
|
726 |
+
# CAUTION: callback must set ctx.pos!
|
727 |
+
if new_state is not None:
|
728 |
+
# state transition
|
729 |
+
if isinstance(new_state, tuple):
|
730 |
+
for state in new_state:
|
731 |
+
if state == '#pop':
|
732 |
+
if len(ctx.stack) > 1:
|
733 |
+
ctx.stack.pop()
|
734 |
+
elif state == '#push':
|
735 |
+
ctx.stack.append(ctx.stack[-1])
|
736 |
+
else:
|
737 |
+
ctx.stack.append(state)
|
738 |
+
elif isinstance(new_state, int):
|
739 |
+
# see RegexLexer for why this check is made
|
740 |
+
if abs(new_state) >= len(ctx.stack):
|
741 |
+
del ctx.state[1:]
|
742 |
+
else:
|
743 |
+
del ctx.stack[new_state:]
|
744 |
+
elif new_state == '#push':
|
745 |
+
ctx.stack.append(ctx.stack[-1])
|
746 |
+
else:
|
747 |
+
assert False, "wrong state def: %r" % new_state
|
748 |
+
statetokens = tokendefs[ctx.stack[-1]]
|
749 |
+
break
|
750 |
+
else:
|
751 |
+
try:
|
752 |
+
if ctx.pos >= ctx.end:
|
753 |
+
break
|
754 |
+
if text[ctx.pos] == '\n':
|
755 |
+
# at EOL, reset state to "root"
|
756 |
+
ctx.stack = ['root']
|
757 |
+
statetokens = tokendefs['root']
|
758 |
+
yield ctx.pos, Text, '\n'
|
759 |
+
ctx.pos += 1
|
760 |
+
continue
|
761 |
+
yield ctx.pos, Error, text[ctx.pos]
|
762 |
+
ctx.pos += 1
|
763 |
+
except IndexError:
|
764 |
+
break
|
765 |
+
|
766 |
+
|
767 |
+
def do_insertions(insertions, tokens):
|
768 |
+
"""
|
769 |
+
Helper for lexers which must combine the results of several
|
770 |
+
sublexers.
|
771 |
+
|
772 |
+
``insertions`` is a list of ``(index, itokens)`` pairs.
|
773 |
+
Each ``itokens`` iterable should be inserted at position
|
774 |
+
``index`` into the token stream given by the ``tokens``
|
775 |
+
argument.
|
776 |
+
|
777 |
+
The result is a combined token stream.
|
778 |
+
|
779 |
+
TODO: clean up the code here.
|
780 |
+
"""
|
781 |
+
insertions = iter(insertions)
|
782 |
+
try:
|
783 |
+
index, itokens = next(insertions)
|
784 |
+
except StopIteration:
|
785 |
+
# no insertions
|
786 |
+
yield from tokens
|
787 |
+
return
|
788 |
+
|
789 |
+
realpos = None
|
790 |
+
insleft = True
|
791 |
+
|
792 |
+
# iterate over the token stream where we want to insert
|
793 |
+
# the tokens from the insertion list.
|
794 |
+
for i, t, v in tokens:
|
795 |
+
# first iteration. store the postition of first item
|
796 |
+
if realpos is None:
|
797 |
+
realpos = i
|
798 |
+
oldi = 0
|
799 |
+
while insleft and i + len(v) >= index:
|
800 |
+
tmpval = v[oldi:index - i]
|
801 |
+
if tmpval:
|
802 |
+
yield realpos, t, tmpval
|
803 |
+
realpos += len(tmpval)
|
804 |
+
for it_index, it_token, it_value in itokens:
|
805 |
+
yield realpos, it_token, it_value
|
806 |
+
realpos += len(it_value)
|
807 |
+
oldi = index - i
|
808 |
+
try:
|
809 |
+
index, itokens = next(insertions)
|
810 |
+
except StopIteration:
|
811 |
+
insleft = False
|
812 |
+
break # not strictly necessary
|
813 |
+
if oldi < len(v):
|
814 |
+
yield realpos, t, v[oldi:]
|
815 |
+
realpos += len(v) - oldi
|
816 |
+
|
817 |
+
# leftover tokens
|
818 |
+
while insleft:
|
819 |
+
# no normal tokens, set realpos to zero
|
820 |
+
realpos = realpos or 0
|
821 |
+
for p, t, v in itokens:
|
822 |
+
yield realpos, t, v
|
823 |
+
realpos += len(v)
|
824 |
+
try:
|
825 |
+
index, itokens = next(insertions)
|
826 |
+
except StopIteration:
|
827 |
+
insleft = False
|
828 |
+
break # not strictly necessary
|
829 |
+
|
830 |
+
|
831 |
+
class ProfilingRegexLexerMeta(RegexLexerMeta):
|
832 |
+
"""Metaclass for ProfilingRegexLexer, collects regex timing info."""
|
833 |
+
|
834 |
+
def _process_regex(cls, regex, rflags, state):
|
835 |
+
if isinstance(regex, words):
|
836 |
+
rex = regex_opt(regex.words, prefix=regex.prefix,
|
837 |
+
suffix=regex.suffix)
|
838 |
+
else:
|
839 |
+
rex = regex
|
840 |
+
compiled = re.compile(rex, rflags)
|
841 |
+
|
842 |
+
def match_func(text, pos, endpos=sys.maxsize):
|
843 |
+
info = cls._prof_data[-1].setdefault((state, rex), [0, 0.0])
|
844 |
+
t0 = time.time()
|
845 |
+
res = compiled.match(text, pos, endpos)
|
846 |
+
t1 = time.time()
|
847 |
+
info[0] += 1
|
848 |
+
info[1] += t1 - t0
|
849 |
+
return res
|
850 |
+
return match_func
|
851 |
+
|
852 |
+
|
853 |
+
class ProfilingRegexLexer(RegexLexer, metaclass=ProfilingRegexLexerMeta):
|
854 |
+
"""Drop-in replacement for RegexLexer that does profiling of its regexes."""
|
855 |
+
|
856 |
+
_prof_data = []
|
857 |
+
_prof_sort_index = 4 # defaults to time per call
|
858 |
+
|
859 |
+
def get_tokens_unprocessed(self, text, stack=('root',)):
|
860 |
+
# this needs to be a stack, since using(this) will produce nested calls
|
861 |
+
self.__class__._prof_data.append({})
|
862 |
+
yield from RegexLexer.get_tokens_unprocessed(self, text, stack)
|
863 |
+
rawdata = self.__class__._prof_data.pop()
|
864 |
+
data = sorted(((s, repr(r).strip('u\'').replace('\\\\', '\\')[:65],
|
865 |
+
n, 1000 * t, 1000 * t / n)
|
866 |
+
for ((s, r), (n, t)) in rawdata.items()),
|
867 |
+
key=lambda x: x[self._prof_sort_index],
|
868 |
+
reverse=True)
|
869 |
+
sum_total = sum(x[3] for x in data)
|
870 |
+
|
871 |
+
print()
|
872 |
+
print('Profiling result for %s lexing %d chars in %.3f ms' %
|
873 |
+
(self.__class__.__name__, len(text), sum_total))
|
874 |
+
print('=' * 110)
|
875 |
+
print('%-20s %-64s ncalls tottime percall' % ('state', 'regex'))
|
876 |
+
print('-' * 110)
|
877 |
+
for d in data:
|
878 |
+
print('%-20s %-65s %5d %8.4f %8.4f' % d)
|
879 |
+
print('=' * 110)
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/lexers/__init__.py
ADDED
@@ -0,0 +1,341 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
pygments.lexers
|
3 |
+
~~~~~~~~~~~~~~~
|
4 |
+
|
5 |
+
Pygments lexers.
|
6 |
+
|
7 |
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
|
8 |
+
:license: BSD, see LICENSE for details.
|
9 |
+
"""
|
10 |
+
|
11 |
+
import re
|
12 |
+
import sys
|
13 |
+
import types
|
14 |
+
import fnmatch
|
15 |
+
from os.path import basename
|
16 |
+
|
17 |
+
from pip._vendor.pygments.lexers._mapping import LEXERS
|
18 |
+
from pip._vendor.pygments.modeline import get_filetype_from_buffer
|
19 |
+
from pip._vendor.pygments.plugin import find_plugin_lexers
|
20 |
+
from pip._vendor.pygments.util import ClassNotFound, guess_decode
|
21 |
+
|
22 |
+
COMPAT = {
|
23 |
+
'Python3Lexer': 'PythonLexer',
|
24 |
+
'Python3TracebackLexer': 'PythonTracebackLexer',
|
25 |
+
}
|
26 |
+
|
27 |
+
__all__ = ['get_lexer_by_name', 'get_lexer_for_filename', 'find_lexer_class',
|
28 |
+
'guess_lexer', 'load_lexer_from_file'] + list(LEXERS) + list(COMPAT)
|
29 |
+
|
30 |
+
_lexer_cache = {}
|
31 |
+
_pattern_cache = {}
|
32 |
+
|
33 |
+
|
34 |
+
def _fn_matches(fn, glob):
|
35 |
+
"""Return whether the supplied file name fn matches pattern filename."""
|
36 |
+
if glob not in _pattern_cache:
|
37 |
+
pattern = _pattern_cache[glob] = re.compile(fnmatch.translate(glob))
|
38 |
+
return pattern.match(fn)
|
39 |
+
return _pattern_cache[glob].match(fn)
|
40 |
+
|
41 |
+
|
42 |
+
def _load_lexers(module_name):
|
43 |
+
"""Load a lexer (and all others in the module too)."""
|
44 |
+
mod = __import__(module_name, None, None, ['__all__'])
|
45 |
+
for lexer_name in mod.__all__:
|
46 |
+
cls = getattr(mod, lexer_name)
|
47 |
+
_lexer_cache[cls.name] = cls
|
48 |
+
|
49 |
+
|
50 |
+
def get_all_lexers():
|
51 |
+
"""Return a generator of tuples in the form ``(name, aliases,
|
52 |
+
filenames, mimetypes)`` of all know lexers.
|
53 |
+
"""
|
54 |
+
for item in LEXERS.values():
|
55 |
+
yield item[1:]
|
56 |
+
for lexer in find_plugin_lexers():
|
57 |
+
yield lexer.name, lexer.aliases, lexer.filenames, lexer.mimetypes
|
58 |
+
|
59 |
+
|
60 |
+
def find_lexer_class(name):
|
61 |
+
"""Lookup a lexer class by name.
|
62 |
+
|
63 |
+
Return None if not found.
|
64 |
+
"""
|
65 |
+
if name in _lexer_cache:
|
66 |
+
return _lexer_cache[name]
|
67 |
+
# lookup builtin lexers
|
68 |
+
for module_name, lname, aliases, _, _ in LEXERS.values():
|
69 |
+
if name == lname:
|
70 |
+
_load_lexers(module_name)
|
71 |
+
return _lexer_cache[name]
|
72 |
+
# continue with lexers from setuptools entrypoints
|
73 |
+
for cls in find_plugin_lexers():
|
74 |
+
if cls.name == name:
|
75 |
+
return cls
|
76 |
+
|
77 |
+
|
78 |
+
def find_lexer_class_by_name(_alias):
|
79 |
+
"""Lookup a lexer class by alias.
|
80 |
+
|
81 |
+
Like `get_lexer_by_name`, but does not instantiate the class.
|
82 |
+
|
83 |
+
.. versionadded:: 2.2
|
84 |
+
"""
|
85 |
+
if not _alias:
|
86 |
+
raise ClassNotFound('no lexer for alias %r found' % _alias)
|
87 |
+
# lookup builtin lexers
|
88 |
+
for module_name, name, aliases, _, _ in LEXERS.values():
|
89 |
+
if _alias.lower() in aliases:
|
90 |
+
if name not in _lexer_cache:
|
91 |
+
_load_lexers(module_name)
|
92 |
+
return _lexer_cache[name]
|
93 |
+
# continue with lexers from setuptools entrypoints
|
94 |
+
for cls in find_plugin_lexers():
|
95 |
+
if _alias.lower() in cls.aliases:
|
96 |
+
return cls
|
97 |
+
raise ClassNotFound('no lexer for alias %r found' % _alias)
|
98 |
+
|
99 |
+
|
100 |
+
def get_lexer_by_name(_alias, **options):
|
101 |
+
"""Get a lexer by an alias.
|
102 |
+
|
103 |
+
Raises ClassNotFound if not found.
|
104 |
+
"""
|
105 |
+
if not _alias:
|
106 |
+
raise ClassNotFound('no lexer for alias %r found' % _alias)
|
107 |
+
|
108 |
+
# lookup builtin lexers
|
109 |
+
for module_name, name, aliases, _, _ in LEXERS.values():
|
110 |
+
if _alias.lower() in aliases:
|
111 |
+
if name not in _lexer_cache:
|
112 |
+
_load_lexers(module_name)
|
113 |
+
return _lexer_cache[name](**options)
|
114 |
+
# continue with lexers from setuptools entrypoints
|
115 |
+
for cls in find_plugin_lexers():
|
116 |
+
if _alias.lower() in cls.aliases:
|
117 |
+
return cls(**options)
|
118 |
+
raise ClassNotFound('no lexer for alias %r found' % _alias)
|
119 |
+
|
120 |
+
|
121 |
+
def load_lexer_from_file(filename, lexername="CustomLexer", **options):
|
122 |
+
"""Load a lexer from a file.
|
123 |
+
|
124 |
+
This method expects a file located relative to the current working
|
125 |
+
directory, which contains a Lexer class. By default, it expects the
|
126 |
+
Lexer to be name CustomLexer; you can specify your own class name
|
127 |
+
as the second argument to this function.
|
128 |
+
|
129 |
+
Users should be very careful with the input, because this method
|
130 |
+
is equivalent to running eval on the input file.
|
131 |
+
|
132 |
+
Raises ClassNotFound if there are any problems importing the Lexer.
|
133 |
+
|
134 |
+
.. versionadded:: 2.2
|
135 |
+
"""
|
136 |
+
try:
|
137 |
+
# This empty dict will contain the namespace for the exec'd file
|
138 |
+
custom_namespace = {}
|
139 |
+
with open(filename, 'rb') as f:
|
140 |
+
exec(f.read(), custom_namespace)
|
141 |
+
# Retrieve the class `lexername` from that namespace
|
142 |
+
if lexername not in custom_namespace:
|
143 |
+
raise ClassNotFound('no valid %s class found in %s' %
|
144 |
+
(lexername, filename))
|
145 |
+
lexer_class = custom_namespace[lexername]
|
146 |
+
# And finally instantiate it with the options
|
147 |
+
return lexer_class(**options)
|
148 |
+
except OSError as err:
|
149 |
+
raise ClassNotFound('cannot read %s: %s' % (filename, err))
|
150 |
+
except ClassNotFound:
|
151 |
+
raise
|
152 |
+
except Exception as err:
|
153 |
+
raise ClassNotFound('error when loading custom lexer: %s' % err)
|
154 |
+
|
155 |
+
|
156 |
+
def find_lexer_class_for_filename(_fn, code=None):
|
157 |
+
"""Get a lexer for a filename.
|
158 |
+
|
159 |
+
If multiple lexers match the filename pattern, use ``analyse_text()`` to
|
160 |
+
figure out which one is more appropriate.
|
161 |
+
|
162 |
+
Returns None if not found.
|
163 |
+
"""
|
164 |
+
matches = []
|
165 |
+
fn = basename(_fn)
|
166 |
+
for modname, name, _, filenames, _ in LEXERS.values():
|
167 |
+
for filename in filenames:
|
168 |
+
if _fn_matches(fn, filename):
|
169 |
+
if name not in _lexer_cache:
|
170 |
+
_load_lexers(modname)
|
171 |
+
matches.append((_lexer_cache[name], filename))
|
172 |
+
for cls in find_plugin_lexers():
|
173 |
+
for filename in cls.filenames:
|
174 |
+
if _fn_matches(fn, filename):
|
175 |
+
matches.append((cls, filename))
|
176 |
+
|
177 |
+
if isinstance(code, bytes):
|
178 |
+
# decode it, since all analyse_text functions expect unicode
|
179 |
+
code = guess_decode(code)
|
180 |
+
|
181 |
+
def get_rating(info):
|
182 |
+
cls, filename = info
|
183 |
+
# explicit patterns get a bonus
|
184 |
+
bonus = '*' not in filename and 0.5 or 0
|
185 |
+
# The class _always_ defines analyse_text because it's included in
|
186 |
+
# the Lexer class. The default implementation returns None which
|
187 |
+
# gets turned into 0.0. Run scripts/detect_missing_analyse_text.py
|
188 |
+
# to find lexers which need it overridden.
|
189 |
+
if code:
|
190 |
+
return cls.analyse_text(code) + bonus, cls.__name__
|
191 |
+
return cls.priority + bonus, cls.__name__
|
192 |
+
|
193 |
+
if matches:
|
194 |
+
matches.sort(key=get_rating)
|
195 |
+
# print "Possible lexers, after sort:", matches
|
196 |
+
return matches[-1][0]
|
197 |
+
|
198 |
+
|
199 |
+
def get_lexer_for_filename(_fn, code=None, **options):
|
200 |
+
"""Get a lexer for a filename.
|
201 |
+
|
202 |
+
If multiple lexers match the filename pattern, use ``analyse_text()`` to
|
203 |
+
figure out which one is more appropriate.
|
204 |
+
|
205 |
+
Raises ClassNotFound if not found.
|
206 |
+
"""
|
207 |
+
res = find_lexer_class_for_filename(_fn, code)
|
208 |
+
if not res:
|
209 |
+
raise ClassNotFound('no lexer for filename %r found' % _fn)
|
210 |
+
return res(**options)
|
211 |
+
|
212 |
+
|
213 |
+
def get_lexer_for_mimetype(_mime, **options):
|
214 |
+
"""Get a lexer for a mimetype.
|
215 |
+
|
216 |
+
Raises ClassNotFound if not found.
|
217 |
+
"""
|
218 |
+
for modname, name, _, _, mimetypes in LEXERS.values():
|
219 |
+
if _mime in mimetypes:
|
220 |
+
if name not in _lexer_cache:
|
221 |
+
_load_lexers(modname)
|
222 |
+
return _lexer_cache[name](**options)
|
223 |
+
for cls in find_plugin_lexers():
|
224 |
+
if _mime in cls.mimetypes:
|
225 |
+
return cls(**options)
|
226 |
+
raise ClassNotFound('no lexer for mimetype %r found' % _mime)
|
227 |
+
|
228 |
+
|
229 |
+
def _iter_lexerclasses(plugins=True):
|
230 |
+
"""Return an iterator over all lexer classes."""
|
231 |
+
for key in sorted(LEXERS):
|
232 |
+
module_name, name = LEXERS[key][:2]
|
233 |
+
if name not in _lexer_cache:
|
234 |
+
_load_lexers(module_name)
|
235 |
+
yield _lexer_cache[name]
|
236 |
+
if plugins:
|
237 |
+
yield from find_plugin_lexers()
|
238 |
+
|
239 |
+
|
240 |
+
def guess_lexer_for_filename(_fn, _text, **options):
|
241 |
+
"""
|
242 |
+
Lookup all lexers that handle those filenames primary (``filenames``)
|
243 |
+
or secondary (``alias_filenames``). Then run a text analysis for those
|
244 |
+
lexers and choose the best result.
|
245 |
+
|
246 |
+
usage::
|
247 |
+
|
248 |
+
>>> from pygments.lexers import guess_lexer_for_filename
|
249 |
+
>>> guess_lexer_for_filename('hello.html', '<%= @foo %>')
|
250 |
+
<pygments.lexers.templates.RhtmlLexer object at 0xb7d2f32c>
|
251 |
+
>>> guess_lexer_for_filename('hello.html', '<h1>{{ title|e }}</h1>')
|
252 |
+
<pygments.lexers.templates.HtmlDjangoLexer object at 0xb7d2f2ac>
|
253 |
+
>>> guess_lexer_for_filename('style.css', 'a { color: <?= $link ?> }')
|
254 |
+
<pygments.lexers.templates.CssPhpLexer object at 0xb7ba518c>
|
255 |
+
"""
|
256 |
+
fn = basename(_fn)
|
257 |
+
primary = {}
|
258 |
+
matching_lexers = set()
|
259 |
+
for lexer in _iter_lexerclasses():
|
260 |
+
for filename in lexer.filenames:
|
261 |
+
if _fn_matches(fn, filename):
|
262 |
+
matching_lexers.add(lexer)
|
263 |
+
primary[lexer] = True
|
264 |
+
for filename in lexer.alias_filenames:
|
265 |
+
if _fn_matches(fn, filename):
|
266 |
+
matching_lexers.add(lexer)
|
267 |
+
primary[lexer] = False
|
268 |
+
if not matching_lexers:
|
269 |
+
raise ClassNotFound('no lexer for filename %r found' % fn)
|
270 |
+
if len(matching_lexers) == 1:
|
271 |
+
return matching_lexers.pop()(**options)
|
272 |
+
result = []
|
273 |
+
for lexer in matching_lexers:
|
274 |
+
rv = lexer.analyse_text(_text)
|
275 |
+
if rv == 1.0:
|
276 |
+
return lexer(**options)
|
277 |
+
result.append((rv, lexer))
|
278 |
+
|
279 |
+
def type_sort(t):
|
280 |
+
# sort by:
|
281 |
+
# - analyse score
|
282 |
+
# - is primary filename pattern?
|
283 |
+
# - priority
|
284 |
+
# - last resort: class name
|
285 |
+
return (t[0], primary[t[1]], t[1].priority, t[1].__name__)
|
286 |
+
result.sort(key=type_sort)
|
287 |
+
|
288 |
+
return result[-1][1](**options)
|
289 |
+
|
290 |
+
|
291 |
+
def guess_lexer(_text, **options):
|
292 |
+
"""Guess a lexer by strong distinctions in the text (eg, shebang)."""
|
293 |
+
|
294 |
+
if not isinstance(_text, str):
|
295 |
+
inencoding = options.get('inencoding', options.get('encoding'))
|
296 |
+
if inencoding:
|
297 |
+
_text = _text.decode(inencoding or 'utf8')
|
298 |
+
else:
|
299 |
+
_text, _ = guess_decode(_text)
|
300 |
+
|
301 |
+
# try to get a vim modeline first
|
302 |
+
ft = get_filetype_from_buffer(_text)
|
303 |
+
|
304 |
+
if ft is not None:
|
305 |
+
try:
|
306 |
+
return get_lexer_by_name(ft, **options)
|
307 |
+
except ClassNotFound:
|
308 |
+
pass
|
309 |
+
|
310 |
+
best_lexer = [0.0, None]
|
311 |
+
for lexer in _iter_lexerclasses():
|
312 |
+
rv = lexer.analyse_text(_text)
|
313 |
+
if rv == 1.0:
|
314 |
+
return lexer(**options)
|
315 |
+
if rv > best_lexer[0]:
|
316 |
+
best_lexer[:] = (rv, lexer)
|
317 |
+
if not best_lexer[0] or best_lexer[1] is None:
|
318 |
+
raise ClassNotFound('no lexer matching the text found')
|
319 |
+
return best_lexer[1](**options)
|
320 |
+
|
321 |
+
|
322 |
+
class _automodule(types.ModuleType):
|
323 |
+
"""Automatically import lexers."""
|
324 |
+
|
325 |
+
def __getattr__(self, name):
|
326 |
+
info = LEXERS.get(name)
|
327 |
+
if info:
|
328 |
+
_load_lexers(info[0])
|
329 |
+
cls = _lexer_cache[info[1]]
|
330 |
+
setattr(self, name, cls)
|
331 |
+
return cls
|
332 |
+
if name in COMPAT:
|
333 |
+
return getattr(self, COMPAT[name])
|
334 |
+
raise AttributeError(name)
|
335 |
+
|
336 |
+
|
337 |
+
oldmod = sys.modules[__name__]
|
338 |
+
newmod = _automodule(__name__)
|
339 |
+
newmod.__dict__.update(oldmod.__dict__)
|
340 |
+
sys.modules[__name__] = newmod
|
341 |
+
del newmod.newmod, newmod.oldmod, newmod.sys, newmod.types
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/lexers/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (9.19 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/lexers/__pycache__/_mapping.cpython-310.pyc
ADDED
Binary file (58.1 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/lexers/__pycache__/python.cpython-310.pyc
ADDED
Binary file (29.4 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/lexers/_mapping.py
ADDED
@@ -0,0 +1,580 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
pygments.lexers._mapping
|
3 |
+
~~~~~~~~~~~~~~~~~~~~~~~~
|
4 |
+
|
5 |
+
Lexer mapping definitions. This file is generated by itself. Everytime
|
6 |
+
you change something on a builtin lexer definition, run this script from
|
7 |
+
the lexers folder to update it.
|
8 |
+
|
9 |
+
Do not alter the LEXERS dictionary by hand.
|
10 |
+
|
11 |
+
:copyright: Copyright 2006-2014, 2016 by the Pygments team, see AUTHORS.
|
12 |
+
:license: BSD, see LICENSE for details.
|
13 |
+
"""
|
14 |
+
|
15 |
+
LEXERS = {
|
16 |
+
'ABAPLexer': ('pip._vendor.pygments.lexers.business', 'ABAP', ('abap',), ('*.abap', '*.ABAP'), ('text/x-abap',)),
|
17 |
+
'AMDGPULexer': ('pip._vendor.pygments.lexers.amdgpu', 'AMDGPU', ('amdgpu',), ('*.isa',), ()),
|
18 |
+
'APLLexer': ('pip._vendor.pygments.lexers.apl', 'APL', ('apl',), ('*.apl', '*.aplf', '*.aplo', '*.apln', '*.aplc', '*.apli', '*.dyalog'), ()),
|
19 |
+
'AbnfLexer': ('pip._vendor.pygments.lexers.grammar_notation', 'ABNF', ('abnf',), ('*.abnf',), ('text/x-abnf',)),
|
20 |
+
'ActionScript3Lexer': ('pip._vendor.pygments.lexers.actionscript', 'ActionScript 3', ('actionscript3', 'as3'), ('*.as',), ('application/x-actionscript3', 'text/x-actionscript3', 'text/actionscript3')),
|
21 |
+
'ActionScriptLexer': ('pip._vendor.pygments.lexers.actionscript', 'ActionScript', ('actionscript', 'as'), ('*.as',), ('application/x-actionscript', 'text/x-actionscript', 'text/actionscript')),
|
22 |
+
'AdaLexer': ('pip._vendor.pygments.lexers.pascal', 'Ada', ('ada', 'ada95', 'ada2005'), ('*.adb', '*.ads', '*.ada'), ('text/x-ada',)),
|
23 |
+
'AdlLexer': ('pip._vendor.pygments.lexers.archetype', 'ADL', ('adl',), ('*.adl', '*.adls', '*.adlf', '*.adlx'), ()),
|
24 |
+
'AgdaLexer': ('pip._vendor.pygments.lexers.haskell', 'Agda', ('agda',), ('*.agda',), ('text/x-agda',)),
|
25 |
+
'AheuiLexer': ('pip._vendor.pygments.lexers.esoteric', 'Aheui', ('aheui',), ('*.aheui',), ()),
|
26 |
+
'AlloyLexer': ('pip._vendor.pygments.lexers.dsls', 'Alloy', ('alloy',), ('*.als',), ('text/x-alloy',)),
|
27 |
+
'AmbientTalkLexer': ('pip._vendor.pygments.lexers.ambient', 'AmbientTalk', ('ambienttalk', 'ambienttalk/2', 'at'), ('*.at',), ('text/x-ambienttalk',)),
|
28 |
+
'AmplLexer': ('pip._vendor.pygments.lexers.ampl', 'Ampl', ('ampl',), ('*.run',), ()),
|
29 |
+
'Angular2HtmlLexer': ('pip._vendor.pygments.lexers.templates', 'HTML + Angular2', ('html+ng2',), ('*.ng2',), ()),
|
30 |
+
'Angular2Lexer': ('pip._vendor.pygments.lexers.templates', 'Angular2', ('ng2',), (), ()),
|
31 |
+
'AntlrActionScriptLexer': ('pip._vendor.pygments.lexers.parsers', 'ANTLR With ActionScript Target', ('antlr-actionscript', 'antlr-as'), ('*.G', '*.g'), ()),
|
32 |
+
'AntlrCSharpLexer': ('pip._vendor.pygments.lexers.parsers', 'ANTLR With C# Target', ('antlr-csharp', 'antlr-c#'), ('*.G', '*.g'), ()),
|
33 |
+
'AntlrCppLexer': ('pip._vendor.pygments.lexers.parsers', 'ANTLR With CPP Target', ('antlr-cpp',), ('*.G', '*.g'), ()),
|
34 |
+
'AntlrJavaLexer': ('pip._vendor.pygments.lexers.parsers', 'ANTLR With Java Target', ('antlr-java',), ('*.G', '*.g'), ()),
|
35 |
+
'AntlrLexer': ('pip._vendor.pygments.lexers.parsers', 'ANTLR', ('antlr',), (), ()),
|
36 |
+
'AntlrObjectiveCLexer': ('pip._vendor.pygments.lexers.parsers', 'ANTLR With ObjectiveC Target', ('antlr-objc',), ('*.G', '*.g'), ()),
|
37 |
+
'AntlrPerlLexer': ('pip._vendor.pygments.lexers.parsers', 'ANTLR With Perl Target', ('antlr-perl',), ('*.G', '*.g'), ()),
|
38 |
+
'AntlrPythonLexer': ('pip._vendor.pygments.lexers.parsers', 'ANTLR With Python Target', ('antlr-python',), ('*.G', '*.g'), ()),
|
39 |
+
'AntlrRubyLexer': ('pip._vendor.pygments.lexers.parsers', 'ANTLR With Ruby Target', ('antlr-ruby', 'antlr-rb'), ('*.G', '*.g'), ()),
|
40 |
+
'ApacheConfLexer': ('pip._vendor.pygments.lexers.configs', 'ApacheConf', ('apacheconf', 'aconf', 'apache'), ('.htaccess', 'apache.conf', 'apache2.conf'), ('text/x-apacheconf',)),
|
41 |
+
'AppleScriptLexer': ('pip._vendor.pygments.lexers.scripting', 'AppleScript', ('applescript',), ('*.applescript',), ()),
|
42 |
+
'ArduinoLexer': ('pip._vendor.pygments.lexers.c_like', 'Arduino', ('arduino',), ('*.ino',), ('text/x-arduino',)),
|
43 |
+
'ArrowLexer': ('pip._vendor.pygments.lexers.arrow', 'Arrow', ('arrow',), ('*.arw',), ()),
|
44 |
+
'AscLexer': ('pip._vendor.pygments.lexers.asc', 'ASCII armored', ('asc', 'pem'), ('*.asc', '*.pem', 'id_dsa', 'id_ecdsa', 'id_ecdsa_sk', 'id_ed25519', 'id_ed25519_sk', 'id_rsa'), ('application/pgp-keys', 'application/pgp-encrypted', 'application/pgp-signature')),
|
45 |
+
'AspectJLexer': ('pip._vendor.pygments.lexers.jvm', 'AspectJ', ('aspectj',), ('*.aj',), ('text/x-aspectj',)),
|
46 |
+
'AsymptoteLexer': ('pip._vendor.pygments.lexers.graphics', 'Asymptote', ('asymptote', 'asy'), ('*.asy',), ('text/x-asymptote',)),
|
47 |
+
'AugeasLexer': ('pip._vendor.pygments.lexers.configs', 'Augeas', ('augeas',), ('*.aug',), ()),
|
48 |
+
'AutoItLexer': ('pip._vendor.pygments.lexers.automation', 'AutoIt', ('autoit',), ('*.au3',), ('text/x-autoit',)),
|
49 |
+
'AutohotkeyLexer': ('pip._vendor.pygments.lexers.automation', 'autohotkey', ('autohotkey', 'ahk'), ('*.ahk', '*.ahkl'), ('text/x-autohotkey',)),
|
50 |
+
'AwkLexer': ('pip._vendor.pygments.lexers.textedit', 'Awk', ('awk', 'gawk', 'mawk', 'nawk'), ('*.awk',), ('application/x-awk',)),
|
51 |
+
'BBCBasicLexer': ('pip._vendor.pygments.lexers.basic', 'BBC Basic', ('bbcbasic',), ('*.bbc',), ()),
|
52 |
+
'BBCodeLexer': ('pip._vendor.pygments.lexers.markup', 'BBCode', ('bbcode',), (), ('text/x-bbcode',)),
|
53 |
+
'BCLexer': ('pip._vendor.pygments.lexers.algebra', 'BC', ('bc',), ('*.bc',), ()),
|
54 |
+
'BSTLexer': ('pip._vendor.pygments.lexers.bibtex', 'BST', ('bst', 'bst-pybtex'), ('*.bst',), ()),
|
55 |
+
'BareLexer': ('pip._vendor.pygments.lexers.bare', 'BARE', ('bare',), ('*.bare',), ()),
|
56 |
+
'BaseMakefileLexer': ('pip._vendor.pygments.lexers.make', 'Base Makefile', ('basemake',), (), ()),
|
57 |
+
'BashLexer': ('pip._vendor.pygments.lexers.shell', 'Bash', ('bash', 'sh', 'ksh', 'zsh', 'shell'), ('*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass', '*.exheres-0', '*.exlib', '*.zsh', '.bashrc', 'bashrc', '.bash_*', 'bash_*', 'zshrc', '.zshrc', '.kshrc', 'kshrc', 'PKGBUILD'), ('application/x-sh', 'application/x-shellscript', 'text/x-shellscript')),
|
58 |
+
'BashSessionLexer': ('pip._vendor.pygments.lexers.shell', 'Bash Session', ('console', 'shell-session'), ('*.sh-session', '*.shell-session'), ('application/x-shell-session', 'application/x-sh-session')),
|
59 |
+
'BatchLexer': ('pip._vendor.pygments.lexers.shell', 'Batchfile', ('batch', 'bat', 'dosbatch', 'winbatch'), ('*.bat', '*.cmd'), ('application/x-dos-batch',)),
|
60 |
+
'BddLexer': ('pip._vendor.pygments.lexers.bdd', 'Bdd', ('bdd',), ('*.feature',), ('text/x-bdd',)),
|
61 |
+
'BefungeLexer': ('pip._vendor.pygments.lexers.esoteric', 'Befunge', ('befunge',), ('*.befunge',), ('application/x-befunge',)),
|
62 |
+
'BibTeXLexer': ('pip._vendor.pygments.lexers.bibtex', 'BibTeX', ('bibtex', 'bib'), ('*.bib',), ('text/x-bibtex',)),
|
63 |
+
'BlitzBasicLexer': ('pip._vendor.pygments.lexers.basic', 'BlitzBasic', ('blitzbasic', 'b3d', 'bplus'), ('*.bb', '*.decls'), ('text/x-bb',)),
|
64 |
+
'BlitzMaxLexer': ('pip._vendor.pygments.lexers.basic', 'BlitzMax', ('blitzmax', 'bmax'), ('*.bmx',), ('text/x-bmx',)),
|
65 |
+
'BnfLexer': ('pip._vendor.pygments.lexers.grammar_notation', 'BNF', ('bnf',), ('*.bnf',), ('text/x-bnf',)),
|
66 |
+
'BoaLexer': ('pip._vendor.pygments.lexers.boa', 'Boa', ('boa',), ('*.boa',), ()),
|
67 |
+
'BooLexer': ('pip._vendor.pygments.lexers.dotnet', 'Boo', ('boo',), ('*.boo',), ('text/x-boo',)),
|
68 |
+
'BoogieLexer': ('pip._vendor.pygments.lexers.verification', 'Boogie', ('boogie',), ('*.bpl',), ()),
|
69 |
+
'BrainfuckLexer': ('pip._vendor.pygments.lexers.esoteric', 'Brainfuck', ('brainfuck', 'bf'), ('*.bf', '*.b'), ('application/x-brainfuck',)),
|
70 |
+
'BugsLexer': ('pip._vendor.pygments.lexers.modeling', 'BUGS', ('bugs', 'winbugs', 'openbugs'), ('*.bug',), ()),
|
71 |
+
'CAmkESLexer': ('pip._vendor.pygments.lexers.esoteric', 'CAmkES', ('camkes', 'idl4'), ('*.camkes', '*.idl4'), ()),
|
72 |
+
'CLexer': ('pip._vendor.pygments.lexers.c_cpp', 'C', ('c',), ('*.c', '*.h', '*.idc', '*.x[bp]m'), ('text/x-chdr', 'text/x-csrc', 'image/x-xbitmap', 'image/x-xpixmap')),
|
73 |
+
'CMakeLexer': ('pip._vendor.pygments.lexers.make', 'CMake', ('cmake',), ('*.cmake', 'CMakeLists.txt'), ('text/x-cmake',)),
|
74 |
+
'CObjdumpLexer': ('pip._vendor.pygments.lexers.asm', 'c-objdump', ('c-objdump',), ('*.c-objdump',), ('text/x-c-objdump',)),
|
75 |
+
'CPSALexer': ('pip._vendor.pygments.lexers.lisp', 'CPSA', ('cpsa',), ('*.cpsa',), ()),
|
76 |
+
'CSharpAspxLexer': ('pip._vendor.pygments.lexers.dotnet', 'aspx-cs', ('aspx-cs',), ('*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd'), ()),
|
77 |
+
'CSharpLexer': ('pip._vendor.pygments.lexers.dotnet', 'C#', ('csharp', 'c#', 'cs'), ('*.cs',), ('text/x-csharp',)),
|
78 |
+
'Ca65Lexer': ('pip._vendor.pygments.lexers.asm', 'ca65 assembler', ('ca65',), ('*.s',), ()),
|
79 |
+
'CadlLexer': ('pip._vendor.pygments.lexers.archetype', 'cADL', ('cadl',), ('*.cadl',), ()),
|
80 |
+
'CapDLLexer': ('pip._vendor.pygments.lexers.esoteric', 'CapDL', ('capdl',), ('*.cdl',), ()),
|
81 |
+
'CapnProtoLexer': ('pip._vendor.pygments.lexers.capnproto', "Cap'n Proto", ('capnp',), ('*.capnp',), ()),
|
82 |
+
'CbmBasicV2Lexer': ('pip._vendor.pygments.lexers.basic', 'CBM BASIC V2', ('cbmbas',), ('*.bas',), ()),
|
83 |
+
'CddlLexer': ('pip._vendor.pygments.lexers.cddl', 'CDDL', ('cddl',), ('*.cddl',), ('text/x-cddl',)),
|
84 |
+
'CeylonLexer': ('pip._vendor.pygments.lexers.jvm', 'Ceylon', ('ceylon',), ('*.ceylon',), ('text/x-ceylon',)),
|
85 |
+
'Cfengine3Lexer': ('pip._vendor.pygments.lexers.configs', 'CFEngine3', ('cfengine3', 'cf3'), ('*.cf',), ()),
|
86 |
+
'ChaiscriptLexer': ('pip._vendor.pygments.lexers.scripting', 'ChaiScript', ('chaiscript', 'chai'), ('*.chai',), ('text/x-chaiscript', 'application/x-chaiscript')),
|
87 |
+
'ChapelLexer': ('pip._vendor.pygments.lexers.chapel', 'Chapel', ('chapel', 'chpl'), ('*.chpl',), ()),
|
88 |
+
'CharmciLexer': ('pip._vendor.pygments.lexers.c_like', 'Charmci', ('charmci',), ('*.ci',), ()),
|
89 |
+
'CheetahHtmlLexer': ('pip._vendor.pygments.lexers.templates', 'HTML+Cheetah', ('html+cheetah', 'html+spitfire', 'htmlcheetah'), (), ('text/html+cheetah', 'text/html+spitfire')),
|
90 |
+
'CheetahJavascriptLexer': ('pip._vendor.pygments.lexers.templates', 'JavaScript+Cheetah', ('javascript+cheetah', 'js+cheetah', 'javascript+spitfire', 'js+spitfire'), (), ('application/x-javascript+cheetah', 'text/x-javascript+cheetah', 'text/javascript+cheetah', 'application/x-javascript+spitfire', 'text/x-javascript+spitfire', 'text/javascript+spitfire')),
|
91 |
+
'CheetahLexer': ('pip._vendor.pygments.lexers.templates', 'Cheetah', ('cheetah', 'spitfire'), ('*.tmpl', '*.spt'), ('application/x-cheetah', 'application/x-spitfire')),
|
92 |
+
'CheetahXmlLexer': ('pip._vendor.pygments.lexers.templates', 'XML+Cheetah', ('xml+cheetah', 'xml+spitfire'), (), ('application/xml+cheetah', 'application/xml+spitfire')),
|
93 |
+
'CirruLexer': ('pip._vendor.pygments.lexers.webmisc', 'Cirru', ('cirru',), ('*.cirru',), ('text/x-cirru',)),
|
94 |
+
'ClayLexer': ('pip._vendor.pygments.lexers.c_like', 'Clay', ('clay',), ('*.clay',), ('text/x-clay',)),
|
95 |
+
'CleanLexer': ('pip._vendor.pygments.lexers.clean', 'Clean', ('clean',), ('*.icl', '*.dcl'), ()),
|
96 |
+
'ClojureLexer': ('pip._vendor.pygments.lexers.jvm', 'Clojure', ('clojure', 'clj'), ('*.clj',), ('text/x-clojure', 'application/x-clojure')),
|
97 |
+
'ClojureScriptLexer': ('pip._vendor.pygments.lexers.jvm', 'ClojureScript', ('clojurescript', 'cljs'), ('*.cljs',), ('text/x-clojurescript', 'application/x-clojurescript')),
|
98 |
+
'CobolFreeformatLexer': ('pip._vendor.pygments.lexers.business', 'COBOLFree', ('cobolfree',), ('*.cbl', '*.CBL'), ()),
|
99 |
+
'CobolLexer': ('pip._vendor.pygments.lexers.business', 'COBOL', ('cobol',), ('*.cob', '*.COB', '*.cpy', '*.CPY'), ('text/x-cobol',)),
|
100 |
+
'CoffeeScriptLexer': ('pip._vendor.pygments.lexers.javascript', 'CoffeeScript', ('coffeescript', 'coffee-script', 'coffee'), ('*.coffee',), ('text/coffeescript',)),
|
101 |
+
'ColdfusionCFCLexer': ('pip._vendor.pygments.lexers.templates', 'Coldfusion CFC', ('cfc',), ('*.cfc',), ()),
|
102 |
+
'ColdfusionHtmlLexer': ('pip._vendor.pygments.lexers.templates', 'Coldfusion HTML', ('cfm',), ('*.cfm', '*.cfml'), ('application/x-coldfusion',)),
|
103 |
+
'ColdfusionLexer': ('pip._vendor.pygments.lexers.templates', 'cfstatement', ('cfs',), (), ()),
|
104 |
+
'CommonLispLexer': ('pip._vendor.pygments.lexers.lisp', 'Common Lisp', ('common-lisp', 'cl', 'lisp'), ('*.cl', '*.lisp'), ('text/x-common-lisp',)),
|
105 |
+
'ComponentPascalLexer': ('pip._vendor.pygments.lexers.oberon', 'Component Pascal', ('componentpascal', 'cp'), ('*.cp', '*.cps'), ('text/x-component-pascal',)),
|
106 |
+
'CoqLexer': ('pip._vendor.pygments.lexers.theorem', 'Coq', ('coq',), ('*.v',), ('text/x-coq',)),
|
107 |
+
'CppLexer': ('pip._vendor.pygments.lexers.c_cpp', 'C++', ('cpp', 'c++'), ('*.cpp', '*.hpp', '*.c++', '*.h++', '*.cc', '*.hh', '*.cxx', '*.hxx', '*.C', '*.H', '*.cp', '*.CPP'), ('text/x-c++hdr', 'text/x-c++src')),
|
108 |
+
'CppObjdumpLexer': ('pip._vendor.pygments.lexers.asm', 'cpp-objdump', ('cpp-objdump', 'c++-objdumb', 'cxx-objdump'), ('*.cpp-objdump', '*.c++-objdump', '*.cxx-objdump'), ('text/x-cpp-objdump',)),
|
109 |
+
'CrmshLexer': ('pip._vendor.pygments.lexers.dsls', 'Crmsh', ('crmsh', 'pcmk'), ('*.crmsh', '*.pcmk'), ()),
|
110 |
+
'CrocLexer': ('pip._vendor.pygments.lexers.d', 'Croc', ('croc',), ('*.croc',), ('text/x-crocsrc',)),
|
111 |
+
'CryptolLexer': ('pip._vendor.pygments.lexers.haskell', 'Cryptol', ('cryptol', 'cry'), ('*.cry',), ('text/x-cryptol',)),
|
112 |
+
'CrystalLexer': ('pip._vendor.pygments.lexers.crystal', 'Crystal', ('cr', 'crystal'), ('*.cr',), ('text/x-crystal',)),
|
113 |
+
'CsoundDocumentLexer': ('pip._vendor.pygments.lexers.csound', 'Csound Document', ('csound-document', 'csound-csd'), ('*.csd',), ()),
|
114 |
+
'CsoundOrchestraLexer': ('pip._vendor.pygments.lexers.csound', 'Csound Orchestra', ('csound', 'csound-orc'), ('*.orc', '*.udo'), ()),
|
115 |
+
'CsoundScoreLexer': ('pip._vendor.pygments.lexers.csound', 'Csound Score', ('csound-score', 'csound-sco'), ('*.sco',), ()),
|
116 |
+
'CssDjangoLexer': ('pip._vendor.pygments.lexers.templates', 'CSS+Django/Jinja', ('css+django', 'css+jinja'), (), ('text/css+django', 'text/css+jinja')),
|
117 |
+
'CssErbLexer': ('pip._vendor.pygments.lexers.templates', 'CSS+Ruby', ('css+ruby', 'css+erb'), (), ('text/css+ruby',)),
|
118 |
+
'CssGenshiLexer': ('pip._vendor.pygments.lexers.templates', 'CSS+Genshi Text', ('css+genshitext', 'css+genshi'), (), ('text/css+genshi',)),
|
119 |
+
'CssLexer': ('pip._vendor.pygments.lexers.css', 'CSS', ('css',), ('*.css',), ('text/css',)),
|
120 |
+
'CssPhpLexer': ('pip._vendor.pygments.lexers.templates', 'CSS+PHP', ('css+php',), (), ('text/css+php',)),
|
121 |
+
'CssSmartyLexer': ('pip._vendor.pygments.lexers.templates', 'CSS+Smarty', ('css+smarty',), (), ('text/css+smarty',)),
|
122 |
+
'CudaLexer': ('pip._vendor.pygments.lexers.c_like', 'CUDA', ('cuda', 'cu'), ('*.cu', '*.cuh'), ('text/x-cuda',)),
|
123 |
+
'CypherLexer': ('pip._vendor.pygments.lexers.graph', 'Cypher', ('cypher',), ('*.cyp', '*.cypher'), ()),
|
124 |
+
'CythonLexer': ('pip._vendor.pygments.lexers.python', 'Cython', ('cython', 'pyx', 'pyrex'), ('*.pyx', '*.pxd', '*.pxi'), ('text/x-cython', 'application/x-cython')),
|
125 |
+
'DLexer': ('pip._vendor.pygments.lexers.d', 'D', ('d',), ('*.d', '*.di'), ('text/x-dsrc',)),
|
126 |
+
'DObjdumpLexer': ('pip._vendor.pygments.lexers.asm', 'd-objdump', ('d-objdump',), ('*.d-objdump',), ('text/x-d-objdump',)),
|
127 |
+
'DarcsPatchLexer': ('pip._vendor.pygments.lexers.diff', 'Darcs Patch', ('dpatch',), ('*.dpatch', '*.darcspatch'), ()),
|
128 |
+
'DartLexer': ('pip._vendor.pygments.lexers.javascript', 'Dart', ('dart',), ('*.dart',), ('text/x-dart',)),
|
129 |
+
'Dasm16Lexer': ('pip._vendor.pygments.lexers.asm', 'DASM16', ('dasm16',), ('*.dasm16', '*.dasm'), ('text/x-dasm16',)),
|
130 |
+
'DebianControlLexer': ('pip._vendor.pygments.lexers.installers', 'Debian Control file', ('debcontrol', 'control'), ('control',), ()),
|
131 |
+
'DelphiLexer': ('pip._vendor.pygments.lexers.pascal', 'Delphi', ('delphi', 'pas', 'pascal', 'objectpascal'), ('*.pas', '*.dpr'), ('text/x-pascal',)),
|
132 |
+
'DevicetreeLexer': ('pip._vendor.pygments.lexers.devicetree', 'Devicetree', ('devicetree', 'dts'), ('*.dts', '*.dtsi'), ('text/x-c',)),
|
133 |
+
'DgLexer': ('pip._vendor.pygments.lexers.python', 'dg', ('dg',), ('*.dg',), ('text/x-dg',)),
|
134 |
+
'DiffLexer': ('pip._vendor.pygments.lexers.diff', 'Diff', ('diff', 'udiff'), ('*.diff', '*.patch'), ('text/x-diff', 'text/x-patch')),
|
135 |
+
'DjangoLexer': ('pip._vendor.pygments.lexers.templates', 'Django/Jinja', ('django', 'jinja'), (), ('application/x-django-templating', 'application/x-jinja')),
|
136 |
+
'DockerLexer': ('pip._vendor.pygments.lexers.configs', 'Docker', ('docker', 'dockerfile'), ('Dockerfile', '*.docker'), ('text/x-dockerfile-config',)),
|
137 |
+
'DtdLexer': ('pip._vendor.pygments.lexers.html', 'DTD', ('dtd',), ('*.dtd',), ('application/xml-dtd',)),
|
138 |
+
'DuelLexer': ('pip._vendor.pygments.lexers.webmisc', 'Duel', ('duel', 'jbst', 'jsonml+bst'), ('*.duel', '*.jbst'), ('text/x-duel', 'text/x-jbst')),
|
139 |
+
'DylanConsoleLexer': ('pip._vendor.pygments.lexers.dylan', 'Dylan session', ('dylan-console', 'dylan-repl'), ('*.dylan-console',), ('text/x-dylan-console',)),
|
140 |
+
'DylanLexer': ('pip._vendor.pygments.lexers.dylan', 'Dylan', ('dylan',), ('*.dylan', '*.dyl', '*.intr'), ('text/x-dylan',)),
|
141 |
+
'DylanLidLexer': ('pip._vendor.pygments.lexers.dylan', 'DylanLID', ('dylan-lid', 'lid'), ('*.lid', '*.hdp'), ('text/x-dylan-lid',)),
|
142 |
+
'ECLLexer': ('pip._vendor.pygments.lexers.ecl', 'ECL', ('ecl',), ('*.ecl',), ('application/x-ecl',)),
|
143 |
+
'ECLexer': ('pip._vendor.pygments.lexers.c_like', 'eC', ('ec',), ('*.ec', '*.eh'), ('text/x-echdr', 'text/x-ecsrc')),
|
144 |
+
'EarlGreyLexer': ('pip._vendor.pygments.lexers.javascript', 'Earl Grey', ('earl-grey', 'earlgrey', 'eg'), ('*.eg',), ('text/x-earl-grey',)),
|
145 |
+
'EasytrieveLexer': ('pip._vendor.pygments.lexers.scripting', 'Easytrieve', ('easytrieve',), ('*.ezt', '*.mac'), ('text/x-easytrieve',)),
|
146 |
+
'EbnfLexer': ('pip._vendor.pygments.lexers.parsers', 'EBNF', ('ebnf',), ('*.ebnf',), ('text/x-ebnf',)),
|
147 |
+
'EiffelLexer': ('pip._vendor.pygments.lexers.eiffel', 'Eiffel', ('eiffel',), ('*.e',), ('text/x-eiffel',)),
|
148 |
+
'ElixirConsoleLexer': ('pip._vendor.pygments.lexers.erlang', 'Elixir iex session', ('iex',), (), ('text/x-elixir-shellsession',)),
|
149 |
+
'ElixirLexer': ('pip._vendor.pygments.lexers.erlang', 'Elixir', ('elixir', 'ex', 'exs'), ('*.ex', '*.eex', '*.exs', '*.leex'), ('text/x-elixir',)),
|
150 |
+
'ElmLexer': ('pip._vendor.pygments.lexers.elm', 'Elm', ('elm',), ('*.elm',), ('text/x-elm',)),
|
151 |
+
'ElpiLexer': ('pip._vendor.pygments.lexers.elpi', 'Elpi', ('elpi',), ('*.elpi',), ('text/x-elpi',)),
|
152 |
+
'EmacsLispLexer': ('pip._vendor.pygments.lexers.lisp', 'EmacsLisp', ('emacs-lisp', 'elisp', 'emacs'), ('*.el',), ('text/x-elisp', 'application/x-elisp')),
|
153 |
+
'EmailLexer': ('pip._vendor.pygments.lexers.email', 'E-mail', ('email', 'eml'), ('*.eml',), ('message/rfc822',)),
|
154 |
+
'ErbLexer': ('pip._vendor.pygments.lexers.templates', 'ERB', ('erb',), (), ('application/x-ruby-templating',)),
|
155 |
+
'ErlangLexer': ('pip._vendor.pygments.lexers.erlang', 'Erlang', ('erlang',), ('*.erl', '*.hrl', '*.es', '*.escript'), ('text/x-erlang',)),
|
156 |
+
'ErlangShellLexer': ('pip._vendor.pygments.lexers.erlang', 'Erlang erl session', ('erl',), ('*.erl-sh',), ('text/x-erl-shellsession',)),
|
157 |
+
'EvoqueHtmlLexer': ('pip._vendor.pygments.lexers.templates', 'HTML+Evoque', ('html+evoque',), ('*.html',), ('text/html+evoque',)),
|
158 |
+
'EvoqueLexer': ('pip._vendor.pygments.lexers.templates', 'Evoque', ('evoque',), ('*.evoque',), ('application/x-evoque',)),
|
159 |
+
'EvoqueXmlLexer': ('pip._vendor.pygments.lexers.templates', 'XML+Evoque', ('xml+evoque',), ('*.xml',), ('application/xml+evoque',)),
|
160 |
+
'ExeclineLexer': ('pip._vendor.pygments.lexers.shell', 'execline', ('execline',), ('*.exec',), ()),
|
161 |
+
'EzhilLexer': ('pip._vendor.pygments.lexers.ezhil', 'Ezhil', ('ezhil',), ('*.n',), ('text/x-ezhil',)),
|
162 |
+
'FSharpLexer': ('pip._vendor.pygments.lexers.dotnet', 'F#', ('fsharp', 'f#'), ('*.fs', '*.fsi'), ('text/x-fsharp',)),
|
163 |
+
'FStarLexer': ('pip._vendor.pygments.lexers.ml', 'FStar', ('fstar',), ('*.fst', '*.fsti'), ('text/x-fstar',)),
|
164 |
+
'FactorLexer': ('pip._vendor.pygments.lexers.factor', 'Factor', ('factor',), ('*.factor',), ('text/x-factor',)),
|
165 |
+
'FancyLexer': ('pip._vendor.pygments.lexers.ruby', 'Fancy', ('fancy', 'fy'), ('*.fy', '*.fancypack'), ('text/x-fancysrc',)),
|
166 |
+
'FantomLexer': ('pip._vendor.pygments.lexers.fantom', 'Fantom', ('fan',), ('*.fan',), ('application/x-fantom',)),
|
167 |
+
'FelixLexer': ('pip._vendor.pygments.lexers.felix', 'Felix', ('felix', 'flx'), ('*.flx', '*.flxh'), ('text/x-felix',)),
|
168 |
+
'FennelLexer': ('pip._vendor.pygments.lexers.lisp', 'Fennel', ('fennel', 'fnl'), ('*.fnl',), ()),
|
169 |
+
'FishShellLexer': ('pip._vendor.pygments.lexers.shell', 'Fish', ('fish', 'fishshell'), ('*.fish', '*.load'), ('application/x-fish',)),
|
170 |
+
'FlatlineLexer': ('pip._vendor.pygments.lexers.dsls', 'Flatline', ('flatline',), (), ('text/x-flatline',)),
|
171 |
+
'FloScriptLexer': ('pip._vendor.pygments.lexers.floscript', 'FloScript', ('floscript', 'flo'), ('*.flo',), ()),
|
172 |
+
'ForthLexer': ('pip._vendor.pygments.lexers.forth', 'Forth', ('forth',), ('*.frt', '*.fs'), ('application/x-forth',)),
|
173 |
+
'FortranFixedLexer': ('pip._vendor.pygments.lexers.fortran', 'FortranFixed', ('fortranfixed',), ('*.f', '*.F'), ()),
|
174 |
+
'FortranLexer': ('pip._vendor.pygments.lexers.fortran', 'Fortran', ('fortran', 'f90'), ('*.f03', '*.f90', '*.F03', '*.F90'), ('text/x-fortran',)),
|
175 |
+
'FoxProLexer': ('pip._vendor.pygments.lexers.foxpro', 'FoxPro', ('foxpro', 'vfp', 'clipper', 'xbase'), ('*.PRG', '*.prg'), ()),
|
176 |
+
'FreeFemLexer': ('pip._vendor.pygments.lexers.freefem', 'Freefem', ('freefem',), ('*.edp',), ('text/x-freefem',)),
|
177 |
+
'FutharkLexer': ('pip._vendor.pygments.lexers.futhark', 'Futhark', ('futhark',), ('*.fut',), ('text/x-futhark',)),
|
178 |
+
'GAPLexer': ('pip._vendor.pygments.lexers.algebra', 'GAP', ('gap',), ('*.g', '*.gd', '*.gi', '*.gap'), ()),
|
179 |
+
'GDScriptLexer': ('pip._vendor.pygments.lexers.gdscript', 'GDScript', ('gdscript', 'gd'), ('*.gd',), ('text/x-gdscript', 'application/x-gdscript')),
|
180 |
+
'GLShaderLexer': ('pip._vendor.pygments.lexers.graphics', 'GLSL', ('glsl',), ('*.vert', '*.frag', '*.geo'), ('text/x-glslsrc',)),
|
181 |
+
'GSQLLexer': ('pip._vendor.pygments.lexers.gsql', 'GSQL', ('gsql',), ('*.gsql',), ()),
|
182 |
+
'GasLexer': ('pip._vendor.pygments.lexers.asm', 'GAS', ('gas', 'asm'), ('*.s', '*.S'), ('text/x-gas',)),
|
183 |
+
'GcodeLexer': ('pip._vendor.pygments.lexers.gcodelexer', 'g-code', ('gcode',), ('*.gcode',), ()),
|
184 |
+
'GenshiLexer': ('pip._vendor.pygments.lexers.templates', 'Genshi', ('genshi', 'kid', 'xml+genshi', 'xml+kid'), ('*.kid',), ('application/x-genshi', 'application/x-kid')),
|
185 |
+
'GenshiTextLexer': ('pip._vendor.pygments.lexers.templates', 'Genshi Text', ('genshitext',), (), ('application/x-genshi-text', 'text/x-genshi')),
|
186 |
+
'GettextLexer': ('pip._vendor.pygments.lexers.textfmts', 'Gettext Catalog', ('pot', 'po'), ('*.pot', '*.po'), ('application/x-gettext', 'text/x-gettext', 'text/gettext')),
|
187 |
+
'GherkinLexer': ('pip._vendor.pygments.lexers.testing', 'Gherkin', ('gherkin', 'cucumber'), ('*.feature',), ('text/x-gherkin',)),
|
188 |
+
'GnuplotLexer': ('pip._vendor.pygments.lexers.graphics', 'Gnuplot', ('gnuplot',), ('*.plot', '*.plt'), ('text/x-gnuplot',)),
|
189 |
+
'GoLexer': ('pip._vendor.pygments.lexers.go', 'Go', ('go', 'golang'), ('*.go',), ('text/x-gosrc',)),
|
190 |
+
'GoloLexer': ('pip._vendor.pygments.lexers.jvm', 'Golo', ('golo',), ('*.golo',), ()),
|
191 |
+
'GoodDataCLLexer': ('pip._vendor.pygments.lexers.business', 'GoodData-CL', ('gooddata-cl',), ('*.gdc',), ('text/x-gooddata-cl',)),
|
192 |
+
'GosuLexer': ('pip._vendor.pygments.lexers.jvm', 'Gosu', ('gosu',), ('*.gs', '*.gsx', '*.gsp', '*.vark'), ('text/x-gosu',)),
|
193 |
+
'GosuTemplateLexer': ('pip._vendor.pygments.lexers.jvm', 'Gosu Template', ('gst',), ('*.gst',), ('text/x-gosu-template',)),
|
194 |
+
'GraphvizLexer': ('pip._vendor.pygments.lexers.graphviz', 'Graphviz', ('graphviz', 'dot'), ('*.gv', '*.dot'), ('text/x-graphviz', 'text/vnd.graphviz')),
|
195 |
+
'GroffLexer': ('pip._vendor.pygments.lexers.markup', 'Groff', ('groff', 'nroff', 'man'), ('*.[1-9]', '*.man', '*.1p', '*.3pm'), ('application/x-troff', 'text/troff')),
|
196 |
+
'GroovyLexer': ('pip._vendor.pygments.lexers.jvm', 'Groovy', ('groovy',), ('*.groovy', '*.gradle'), ('text/x-groovy',)),
|
197 |
+
'HLSLShaderLexer': ('pip._vendor.pygments.lexers.graphics', 'HLSL', ('hlsl',), ('*.hlsl', '*.hlsli'), ('text/x-hlsl',)),
|
198 |
+
'HamlLexer': ('pip._vendor.pygments.lexers.html', 'Haml', ('haml',), ('*.haml',), ('text/x-haml',)),
|
199 |
+
'HandlebarsHtmlLexer': ('pip._vendor.pygments.lexers.templates', 'HTML+Handlebars', ('html+handlebars',), ('*.handlebars', '*.hbs'), ('text/html+handlebars', 'text/x-handlebars-template')),
|
200 |
+
'HandlebarsLexer': ('pip._vendor.pygments.lexers.templates', 'Handlebars', ('handlebars',), (), ()),
|
201 |
+
'HaskellLexer': ('pip._vendor.pygments.lexers.haskell', 'Haskell', ('haskell', 'hs'), ('*.hs',), ('text/x-haskell',)),
|
202 |
+
'HaxeLexer': ('pip._vendor.pygments.lexers.haxe', 'Haxe', ('haxe', 'hxsl', 'hx'), ('*.hx', '*.hxsl'), ('text/haxe', 'text/x-haxe', 'text/x-hx')),
|
203 |
+
'HexdumpLexer': ('pip._vendor.pygments.lexers.hexdump', 'Hexdump', ('hexdump',), (), ()),
|
204 |
+
'HsailLexer': ('pip._vendor.pygments.lexers.asm', 'HSAIL', ('hsail', 'hsa'), ('*.hsail',), ('text/x-hsail',)),
|
205 |
+
'HspecLexer': ('pip._vendor.pygments.lexers.haskell', 'Hspec', ('hspec',), (), ()),
|
206 |
+
'HtmlDjangoLexer': ('pip._vendor.pygments.lexers.templates', 'HTML+Django/Jinja', ('html+django', 'html+jinja', 'htmldjango'), (), ('text/html+django', 'text/html+jinja')),
|
207 |
+
'HtmlGenshiLexer': ('pip._vendor.pygments.lexers.templates', 'HTML+Genshi', ('html+genshi', 'html+kid'), (), ('text/html+genshi',)),
|
208 |
+
'HtmlLexer': ('pip._vendor.pygments.lexers.html', 'HTML', ('html',), ('*.html', '*.htm', '*.xhtml', '*.xslt'), ('text/html', 'application/xhtml+xml')),
|
209 |
+
'HtmlPhpLexer': ('pip._vendor.pygments.lexers.templates', 'HTML+PHP', ('html+php',), ('*.phtml',), ('application/x-php', 'application/x-httpd-php', 'application/x-httpd-php3', 'application/x-httpd-php4', 'application/x-httpd-php5')),
|
210 |
+
'HtmlSmartyLexer': ('pip._vendor.pygments.lexers.templates', 'HTML+Smarty', ('html+smarty',), (), ('text/html+smarty',)),
|
211 |
+
'HttpLexer': ('pip._vendor.pygments.lexers.textfmts', 'HTTP', ('http',), (), ()),
|
212 |
+
'HxmlLexer': ('pip._vendor.pygments.lexers.haxe', 'Hxml', ('haxeml', 'hxml'), ('*.hxml',), ()),
|
213 |
+
'HyLexer': ('pip._vendor.pygments.lexers.lisp', 'Hy', ('hylang',), ('*.hy',), ('text/x-hy', 'application/x-hy')),
|
214 |
+
'HybrisLexer': ('pip._vendor.pygments.lexers.scripting', 'Hybris', ('hybris', 'hy'), ('*.hy', '*.hyb'), ('text/x-hybris', 'application/x-hybris')),
|
215 |
+
'IDLLexer': ('pip._vendor.pygments.lexers.idl', 'IDL', ('idl',), ('*.pro',), ('text/idl',)),
|
216 |
+
'IconLexer': ('pip._vendor.pygments.lexers.unicon', 'Icon', ('icon',), ('*.icon', '*.ICON'), ()),
|
217 |
+
'IdrisLexer': ('pip._vendor.pygments.lexers.haskell', 'Idris', ('idris', 'idr'), ('*.idr',), ('text/x-idris',)),
|
218 |
+
'IgorLexer': ('pip._vendor.pygments.lexers.igor', 'Igor', ('igor', 'igorpro'), ('*.ipf',), ('text/ipf',)),
|
219 |
+
'Inform6Lexer': ('pip._vendor.pygments.lexers.int_fiction', 'Inform 6', ('inform6', 'i6'), ('*.inf',), ()),
|
220 |
+
'Inform6TemplateLexer': ('pip._vendor.pygments.lexers.int_fiction', 'Inform 6 template', ('i6t',), ('*.i6t',), ()),
|
221 |
+
'Inform7Lexer': ('pip._vendor.pygments.lexers.int_fiction', 'Inform 7', ('inform7', 'i7'), ('*.ni', '*.i7x'), ()),
|
222 |
+
'IniLexer': ('pip._vendor.pygments.lexers.configs', 'INI', ('ini', 'cfg', 'dosini'), ('*.ini', '*.cfg', '*.inf', '.editorconfig', '*.service', '*.socket', '*.device', '*.mount', '*.automount', '*.swap', '*.target', '*.path', '*.timer', '*.slice', '*.scope'), ('text/x-ini', 'text/inf')),
|
223 |
+
'IoLexer': ('pip._vendor.pygments.lexers.iolang', 'Io', ('io',), ('*.io',), ('text/x-iosrc',)),
|
224 |
+
'IokeLexer': ('pip._vendor.pygments.lexers.jvm', 'Ioke', ('ioke', 'ik'), ('*.ik',), ('text/x-iokesrc',)),
|
225 |
+
'IrcLogsLexer': ('pip._vendor.pygments.lexers.textfmts', 'IRC logs', ('irc',), ('*.weechatlog',), ('text/x-irclog',)),
|
226 |
+
'IsabelleLexer': ('pip._vendor.pygments.lexers.theorem', 'Isabelle', ('isabelle',), ('*.thy',), ('text/x-isabelle',)),
|
227 |
+
'JLexer': ('pip._vendor.pygments.lexers.j', 'J', ('j',), ('*.ijs',), ('text/x-j',)),
|
228 |
+
'JSLTLexer': ('pip._vendor.pygments.lexers.jslt', 'JSLT', ('jslt',), ('*.jslt',), ('text/x-jslt',)),
|
229 |
+
'JagsLexer': ('pip._vendor.pygments.lexers.modeling', 'JAGS', ('jags',), ('*.jag', '*.bug'), ()),
|
230 |
+
'JasminLexer': ('pip._vendor.pygments.lexers.jvm', 'Jasmin', ('jasmin', 'jasminxt'), ('*.j',), ()),
|
231 |
+
'JavaLexer': ('pip._vendor.pygments.lexers.jvm', 'Java', ('java',), ('*.java',), ('text/x-java',)),
|
232 |
+
'JavascriptDjangoLexer': ('pip._vendor.pygments.lexers.templates', 'JavaScript+Django/Jinja', ('javascript+django', 'js+django', 'javascript+jinja', 'js+jinja'), (), ('application/x-javascript+django', 'application/x-javascript+jinja', 'text/x-javascript+django', 'text/x-javascript+jinja', 'text/javascript+django', 'text/javascript+jinja')),
|
233 |
+
'JavascriptErbLexer': ('pip._vendor.pygments.lexers.templates', 'JavaScript+Ruby', ('javascript+ruby', 'js+ruby', 'javascript+erb', 'js+erb'), (), ('application/x-javascript+ruby', 'text/x-javascript+ruby', 'text/javascript+ruby')),
|
234 |
+
'JavascriptGenshiLexer': ('pip._vendor.pygments.lexers.templates', 'JavaScript+Genshi Text', ('js+genshitext', 'js+genshi', 'javascript+genshitext', 'javascript+genshi'), (), ('application/x-javascript+genshi', 'text/x-javascript+genshi', 'text/javascript+genshi')),
|
235 |
+
'JavascriptLexer': ('pip._vendor.pygments.lexers.javascript', 'JavaScript', ('javascript', 'js'), ('*.js', '*.jsm', '*.mjs', '*.cjs'), ('application/javascript', 'application/x-javascript', 'text/x-javascript', 'text/javascript')),
|
236 |
+
'JavascriptPhpLexer': ('pip._vendor.pygments.lexers.templates', 'JavaScript+PHP', ('javascript+php', 'js+php'), (), ('application/x-javascript+php', 'text/x-javascript+php', 'text/javascript+php')),
|
237 |
+
'JavascriptSmartyLexer': ('pip._vendor.pygments.lexers.templates', 'JavaScript+Smarty', ('javascript+smarty', 'js+smarty'), (), ('application/x-javascript+smarty', 'text/x-javascript+smarty', 'text/javascript+smarty')),
|
238 |
+
'JclLexer': ('pip._vendor.pygments.lexers.scripting', 'JCL', ('jcl',), ('*.jcl',), ('text/x-jcl',)),
|
239 |
+
'JsgfLexer': ('pip._vendor.pygments.lexers.grammar_notation', 'JSGF', ('jsgf',), ('*.jsgf',), ('application/jsgf', 'application/x-jsgf', 'text/jsgf')),
|
240 |
+
'JsonBareObjectLexer': ('pip._vendor.pygments.lexers.data', 'JSONBareObject', (), (), ()),
|
241 |
+
'JsonLdLexer': ('pip._vendor.pygments.lexers.data', 'JSON-LD', ('jsonld', 'json-ld'), ('*.jsonld',), ('application/ld+json',)),
|
242 |
+
'JsonLexer': ('pip._vendor.pygments.lexers.data', 'JSON', ('json', 'json-object'), ('*.json', 'Pipfile.lock'), ('application/json', 'application/json-object')),
|
243 |
+
'JspLexer': ('pip._vendor.pygments.lexers.templates', 'Java Server Page', ('jsp',), ('*.jsp',), ('application/x-jsp',)),
|
244 |
+
'JuliaConsoleLexer': ('pip._vendor.pygments.lexers.julia', 'Julia console', ('jlcon', 'julia-repl'), (), ()),
|
245 |
+
'JuliaLexer': ('pip._vendor.pygments.lexers.julia', 'Julia', ('julia', 'jl'), ('*.jl',), ('text/x-julia', 'application/x-julia')),
|
246 |
+
'JuttleLexer': ('pip._vendor.pygments.lexers.javascript', 'Juttle', ('juttle',), ('*.juttle',), ('application/juttle', 'application/x-juttle', 'text/x-juttle', 'text/juttle')),
|
247 |
+
'KalLexer': ('pip._vendor.pygments.lexers.javascript', 'Kal', ('kal',), ('*.kal',), ('text/kal', 'application/kal')),
|
248 |
+
'KconfigLexer': ('pip._vendor.pygments.lexers.configs', 'Kconfig', ('kconfig', 'menuconfig', 'linux-config', 'kernel-config'), ('Kconfig*', '*Config.in*', 'external.in*', 'standard-modules.in'), ('text/x-kconfig',)),
|
249 |
+
'KernelLogLexer': ('pip._vendor.pygments.lexers.textfmts', 'Kernel log', ('kmsg', 'dmesg'), ('*.kmsg', '*.dmesg'), ()),
|
250 |
+
'KokaLexer': ('pip._vendor.pygments.lexers.haskell', 'Koka', ('koka',), ('*.kk', '*.kki'), ('text/x-koka',)),
|
251 |
+
'KotlinLexer': ('pip._vendor.pygments.lexers.jvm', 'Kotlin', ('kotlin',), ('*.kt', '*.kts'), ('text/x-kotlin',)),
|
252 |
+
'KuinLexer': ('pip._vendor.pygments.lexers.kuin', 'Kuin', ('kuin',), ('*.kn',), ()),
|
253 |
+
'LSLLexer': ('pip._vendor.pygments.lexers.scripting', 'LSL', ('lsl',), ('*.lsl',), ('text/x-lsl',)),
|
254 |
+
'LassoCssLexer': ('pip._vendor.pygments.lexers.templates', 'CSS+Lasso', ('css+lasso',), (), ('text/css+lasso',)),
|
255 |
+
'LassoHtmlLexer': ('pip._vendor.pygments.lexers.templates', 'HTML+Lasso', ('html+lasso',), (), ('text/html+lasso', 'application/x-httpd-lasso', 'application/x-httpd-lasso[89]')),
|
256 |
+
'LassoJavascriptLexer': ('pip._vendor.pygments.lexers.templates', 'JavaScript+Lasso', ('javascript+lasso', 'js+lasso'), (), ('application/x-javascript+lasso', 'text/x-javascript+lasso', 'text/javascript+lasso')),
|
257 |
+
'LassoLexer': ('pip._vendor.pygments.lexers.javascript', 'Lasso', ('lasso', 'lassoscript'), ('*.lasso', '*.lasso[89]'), ('text/x-lasso',)),
|
258 |
+
'LassoXmlLexer': ('pip._vendor.pygments.lexers.templates', 'XML+Lasso', ('xml+lasso',), (), ('application/xml+lasso',)),
|
259 |
+
'LeanLexer': ('pip._vendor.pygments.lexers.theorem', 'Lean', ('lean',), ('*.lean',), ('text/x-lean',)),
|
260 |
+
'LessCssLexer': ('pip._vendor.pygments.lexers.css', 'LessCss', ('less',), ('*.less',), ('text/x-less-css',)),
|
261 |
+
'LighttpdConfLexer': ('pip._vendor.pygments.lexers.configs', 'Lighttpd configuration file', ('lighttpd', 'lighty'), ('lighttpd.conf',), ('text/x-lighttpd-conf',)),
|
262 |
+
'LilyPondLexer': ('pip._vendor.pygments.lexers.lilypond', 'LilyPond', ('lilypond',), ('*.ly',), ()),
|
263 |
+
'LimboLexer': ('pip._vendor.pygments.lexers.inferno', 'Limbo', ('limbo',), ('*.b',), ('text/limbo',)),
|
264 |
+
'LiquidLexer': ('pip._vendor.pygments.lexers.templates', 'liquid', ('liquid',), ('*.liquid',), ()),
|
265 |
+
'LiterateAgdaLexer': ('pip._vendor.pygments.lexers.haskell', 'Literate Agda', ('literate-agda', 'lagda'), ('*.lagda',), ('text/x-literate-agda',)),
|
266 |
+
'LiterateCryptolLexer': ('pip._vendor.pygments.lexers.haskell', 'Literate Cryptol', ('literate-cryptol', 'lcryptol', 'lcry'), ('*.lcry',), ('text/x-literate-cryptol',)),
|
267 |
+
'LiterateHaskellLexer': ('pip._vendor.pygments.lexers.haskell', 'Literate Haskell', ('literate-haskell', 'lhaskell', 'lhs'), ('*.lhs',), ('text/x-literate-haskell',)),
|
268 |
+
'LiterateIdrisLexer': ('pip._vendor.pygments.lexers.haskell', 'Literate Idris', ('literate-idris', 'lidris', 'lidr'), ('*.lidr',), ('text/x-literate-idris',)),
|
269 |
+
'LiveScriptLexer': ('pip._vendor.pygments.lexers.javascript', 'LiveScript', ('livescript', 'live-script'), ('*.ls',), ('text/livescript',)),
|
270 |
+
'LlvmLexer': ('pip._vendor.pygments.lexers.asm', 'LLVM', ('llvm',), ('*.ll',), ('text/x-llvm',)),
|
271 |
+
'LlvmMirBodyLexer': ('pip._vendor.pygments.lexers.asm', 'LLVM-MIR Body', ('llvm-mir-body',), (), ()),
|
272 |
+
'LlvmMirLexer': ('pip._vendor.pygments.lexers.asm', 'LLVM-MIR', ('llvm-mir',), ('*.mir',), ()),
|
273 |
+
'LogosLexer': ('pip._vendor.pygments.lexers.objective', 'Logos', ('logos',), ('*.x', '*.xi', '*.xm', '*.xmi'), ('text/x-logos',)),
|
274 |
+
'LogtalkLexer': ('pip._vendor.pygments.lexers.prolog', 'Logtalk', ('logtalk',), ('*.lgt', '*.logtalk'), ('text/x-logtalk',)),
|
275 |
+
'LuaLexer': ('pip._vendor.pygments.lexers.scripting', 'Lua', ('lua',), ('*.lua', '*.wlua'), ('text/x-lua', 'application/x-lua')),
|
276 |
+
'MIMELexer': ('pip._vendor.pygments.lexers.mime', 'MIME', ('mime',), (), ('multipart/mixed', 'multipart/related', 'multipart/alternative')),
|
277 |
+
'MOOCodeLexer': ('pip._vendor.pygments.lexers.scripting', 'MOOCode', ('moocode', 'moo'), ('*.moo',), ('text/x-moocode',)),
|
278 |
+
'MSDOSSessionLexer': ('pip._vendor.pygments.lexers.shell', 'MSDOS Session', ('doscon',), (), ()),
|
279 |
+
'MakefileLexer': ('pip._vendor.pygments.lexers.make', 'Makefile', ('make', 'makefile', 'mf', 'bsdmake'), ('*.mak', '*.mk', 'Makefile', 'makefile', 'Makefile.*', 'GNUmakefile'), ('text/x-makefile',)),
|
280 |
+
'MakoCssLexer': ('pip._vendor.pygments.lexers.templates', 'CSS+Mako', ('css+mako',), (), ('text/css+mako',)),
|
281 |
+
'MakoHtmlLexer': ('pip._vendor.pygments.lexers.templates', 'HTML+Mako', ('html+mako',), (), ('text/html+mako',)),
|
282 |
+
'MakoJavascriptLexer': ('pip._vendor.pygments.lexers.templates', 'JavaScript+Mako', ('javascript+mako', 'js+mako'), (), ('application/x-javascript+mako', 'text/x-javascript+mako', 'text/javascript+mako')),
|
283 |
+
'MakoLexer': ('pip._vendor.pygments.lexers.templates', 'Mako', ('mako',), ('*.mao',), ('application/x-mako',)),
|
284 |
+
'MakoXmlLexer': ('pip._vendor.pygments.lexers.templates', 'XML+Mako', ('xml+mako',), (), ('application/xml+mako',)),
|
285 |
+
'MaqlLexer': ('pip._vendor.pygments.lexers.business', 'MAQL', ('maql',), ('*.maql',), ('text/x-gooddata-maql', 'application/x-gooddata-maql')),
|
286 |
+
'MarkdownLexer': ('pip._vendor.pygments.lexers.markup', 'Markdown', ('markdown', 'md'), ('*.md', '*.markdown'), ('text/x-markdown',)),
|
287 |
+
'MaskLexer': ('pip._vendor.pygments.lexers.javascript', 'Mask', ('mask',), ('*.mask',), ('text/x-mask',)),
|
288 |
+
'MasonLexer': ('pip._vendor.pygments.lexers.templates', 'Mason', ('mason',), ('*.m', '*.mhtml', '*.mc', '*.mi', 'autohandler', 'dhandler'), ('application/x-mason',)),
|
289 |
+
'MathematicaLexer': ('pip._vendor.pygments.lexers.algebra', 'Mathematica', ('mathematica', 'mma', 'nb'), ('*.nb', '*.cdf', '*.nbp', '*.ma'), ('application/mathematica', 'application/vnd.wolfram.mathematica', 'application/vnd.wolfram.mathematica.package', 'application/vnd.wolfram.cdf')),
|
290 |
+
'MatlabLexer': ('pip._vendor.pygments.lexers.matlab', 'Matlab', ('matlab',), ('*.m',), ('text/matlab',)),
|
291 |
+
'MatlabSessionLexer': ('pip._vendor.pygments.lexers.matlab', 'Matlab session', ('matlabsession',), (), ()),
|
292 |
+
'MaximaLexer': ('pip._vendor.pygments.lexers.maxima', 'Maxima', ('maxima', 'macsyma'), ('*.mac', '*.max'), ()),
|
293 |
+
'MesonLexer': ('pip._vendor.pygments.lexers.meson', 'Meson', ('meson', 'meson.build'), ('meson.build', 'meson_options.txt'), ('text/x-meson',)),
|
294 |
+
'MiniDLexer': ('pip._vendor.pygments.lexers.d', 'MiniD', ('minid',), (), ('text/x-minidsrc',)),
|
295 |
+
'MiniScriptLexer': ('pip._vendor.pygments.lexers.scripting', 'MiniScript', ('miniscript', 'ms'), ('*.ms',), ('text/x-minicript', 'application/x-miniscript')),
|
296 |
+
'ModelicaLexer': ('pip._vendor.pygments.lexers.modeling', 'Modelica', ('modelica',), ('*.mo',), ('text/x-modelica',)),
|
297 |
+
'Modula2Lexer': ('pip._vendor.pygments.lexers.modula2', 'Modula-2', ('modula2', 'm2'), ('*.def', '*.mod'), ('text/x-modula2',)),
|
298 |
+
'MoinWikiLexer': ('pip._vendor.pygments.lexers.markup', 'MoinMoin/Trac Wiki markup', ('trac-wiki', 'moin'), (), ('text/x-trac-wiki',)),
|
299 |
+
'MonkeyLexer': ('pip._vendor.pygments.lexers.basic', 'Monkey', ('monkey',), ('*.monkey',), ('text/x-monkey',)),
|
300 |
+
'MonteLexer': ('pip._vendor.pygments.lexers.monte', 'Monte', ('monte',), ('*.mt',), ()),
|
301 |
+
'MoonScriptLexer': ('pip._vendor.pygments.lexers.scripting', 'MoonScript', ('moonscript', 'moon'), ('*.moon',), ('text/x-moonscript', 'application/x-moonscript')),
|
302 |
+
'MoselLexer': ('pip._vendor.pygments.lexers.mosel', 'Mosel', ('mosel',), ('*.mos',), ()),
|
303 |
+
'MozPreprocCssLexer': ('pip._vendor.pygments.lexers.markup', 'CSS+mozpreproc', ('css+mozpreproc',), ('*.css.in',), ()),
|
304 |
+
'MozPreprocHashLexer': ('pip._vendor.pygments.lexers.markup', 'mozhashpreproc', ('mozhashpreproc',), (), ()),
|
305 |
+
'MozPreprocJavascriptLexer': ('pip._vendor.pygments.lexers.markup', 'Javascript+mozpreproc', ('javascript+mozpreproc',), ('*.js.in',), ()),
|
306 |
+
'MozPreprocPercentLexer': ('pip._vendor.pygments.lexers.markup', 'mozpercentpreproc', ('mozpercentpreproc',), (), ()),
|
307 |
+
'MozPreprocXulLexer': ('pip._vendor.pygments.lexers.markup', 'XUL+mozpreproc', ('xul+mozpreproc',), ('*.xul.in',), ()),
|
308 |
+
'MqlLexer': ('pip._vendor.pygments.lexers.c_like', 'MQL', ('mql', 'mq4', 'mq5', 'mql4', 'mql5'), ('*.mq4', '*.mq5', '*.mqh'), ('text/x-mql',)),
|
309 |
+
'MscgenLexer': ('pip._vendor.pygments.lexers.dsls', 'Mscgen', ('mscgen', 'msc'), ('*.msc',), ()),
|
310 |
+
'MuPADLexer': ('pip._vendor.pygments.lexers.algebra', 'MuPAD', ('mupad',), ('*.mu',), ()),
|
311 |
+
'MxmlLexer': ('pip._vendor.pygments.lexers.actionscript', 'MXML', ('mxml',), ('*.mxml',), ()),
|
312 |
+
'MySqlLexer': ('pip._vendor.pygments.lexers.sql', 'MySQL', ('mysql',), (), ('text/x-mysql',)),
|
313 |
+
'MyghtyCssLexer': ('pip._vendor.pygments.lexers.templates', 'CSS+Myghty', ('css+myghty',), (), ('text/css+myghty',)),
|
314 |
+
'MyghtyHtmlLexer': ('pip._vendor.pygments.lexers.templates', 'HTML+Myghty', ('html+myghty',), (), ('text/html+myghty',)),
|
315 |
+
'MyghtyJavascriptLexer': ('pip._vendor.pygments.lexers.templates', 'JavaScript+Myghty', ('javascript+myghty', 'js+myghty'), (), ('application/x-javascript+myghty', 'text/x-javascript+myghty', 'text/javascript+mygthy')),
|
316 |
+
'MyghtyLexer': ('pip._vendor.pygments.lexers.templates', 'Myghty', ('myghty',), ('*.myt', 'autodelegate'), ('application/x-myghty',)),
|
317 |
+
'MyghtyXmlLexer': ('pip._vendor.pygments.lexers.templates', 'XML+Myghty', ('xml+myghty',), (), ('application/xml+myghty',)),
|
318 |
+
'NCLLexer': ('pip._vendor.pygments.lexers.ncl', 'NCL', ('ncl',), ('*.ncl',), ('text/ncl',)),
|
319 |
+
'NSISLexer': ('pip._vendor.pygments.lexers.installers', 'NSIS', ('nsis', 'nsi', 'nsh'), ('*.nsi', '*.nsh'), ('text/x-nsis',)),
|
320 |
+
'NasmLexer': ('pip._vendor.pygments.lexers.asm', 'NASM', ('nasm',), ('*.asm', '*.ASM'), ('text/x-nasm',)),
|
321 |
+
'NasmObjdumpLexer': ('pip._vendor.pygments.lexers.asm', 'objdump-nasm', ('objdump-nasm',), ('*.objdump-intel',), ('text/x-nasm-objdump',)),
|
322 |
+
'NemerleLexer': ('pip._vendor.pygments.lexers.dotnet', 'Nemerle', ('nemerle',), ('*.n',), ('text/x-nemerle',)),
|
323 |
+
'NesCLexer': ('pip._vendor.pygments.lexers.c_like', 'nesC', ('nesc',), ('*.nc',), ('text/x-nescsrc',)),
|
324 |
+
'NestedTextLexer': ('pip._vendor.pygments.lexers.configs', 'NestedText', ('nestedtext', 'nt'), ('*.nt',), ()),
|
325 |
+
'NewLispLexer': ('pip._vendor.pygments.lexers.lisp', 'NewLisp', ('newlisp',), ('*.lsp', '*.nl', '*.kif'), ('text/x-newlisp', 'application/x-newlisp')),
|
326 |
+
'NewspeakLexer': ('pip._vendor.pygments.lexers.smalltalk', 'Newspeak', ('newspeak',), ('*.ns2',), ('text/x-newspeak',)),
|
327 |
+
'NginxConfLexer': ('pip._vendor.pygments.lexers.configs', 'Nginx configuration file', ('nginx',), ('nginx.conf',), ('text/x-nginx-conf',)),
|
328 |
+
'NimrodLexer': ('pip._vendor.pygments.lexers.nimrod', 'Nimrod', ('nimrod', 'nim'), ('*.nim', '*.nimrod'), ('text/x-nim',)),
|
329 |
+
'NitLexer': ('pip._vendor.pygments.lexers.nit', 'Nit', ('nit',), ('*.nit',), ()),
|
330 |
+
'NixLexer': ('pip._vendor.pygments.lexers.nix', 'Nix', ('nixos', 'nix'), ('*.nix',), ('text/x-nix',)),
|
331 |
+
'NodeConsoleLexer': ('pip._vendor.pygments.lexers.javascript', 'Node.js REPL console session', ('nodejsrepl',), (), ('text/x-nodejsrepl',)),
|
332 |
+
'NotmuchLexer': ('pip._vendor.pygments.lexers.textfmts', 'Notmuch', ('notmuch',), (), ()),
|
333 |
+
'NuSMVLexer': ('pip._vendor.pygments.lexers.smv', 'NuSMV', ('nusmv',), ('*.smv',), ()),
|
334 |
+
'NumPyLexer': ('pip._vendor.pygments.lexers.python', 'NumPy', ('numpy',), (), ()),
|
335 |
+
'ObjdumpLexer': ('pip._vendor.pygments.lexers.asm', 'objdump', ('objdump',), ('*.objdump',), ('text/x-objdump',)),
|
336 |
+
'ObjectiveCLexer': ('pip._vendor.pygments.lexers.objective', 'Objective-C', ('objective-c', 'objectivec', 'obj-c', 'objc'), ('*.m', '*.h'), ('text/x-objective-c',)),
|
337 |
+
'ObjectiveCppLexer': ('pip._vendor.pygments.lexers.objective', 'Objective-C++', ('objective-c++', 'objectivec++', 'obj-c++', 'objc++'), ('*.mm', '*.hh'), ('text/x-objective-c++',)),
|
338 |
+
'ObjectiveJLexer': ('pip._vendor.pygments.lexers.javascript', 'Objective-J', ('objective-j', 'objectivej', 'obj-j', 'objj'), ('*.j',), ('text/x-objective-j',)),
|
339 |
+
'OcamlLexer': ('pip._vendor.pygments.lexers.ml', 'OCaml', ('ocaml',), ('*.ml', '*.mli', '*.mll', '*.mly'), ('text/x-ocaml',)),
|
340 |
+
'OctaveLexer': ('pip._vendor.pygments.lexers.matlab', 'Octave', ('octave',), ('*.m',), ('text/octave',)),
|
341 |
+
'OdinLexer': ('pip._vendor.pygments.lexers.archetype', 'ODIN', ('odin',), ('*.odin',), ('text/odin',)),
|
342 |
+
'OmgIdlLexer': ('pip._vendor.pygments.lexers.c_like', 'OMG Interface Definition Language', ('omg-idl',), ('*.idl', '*.pidl'), ()),
|
343 |
+
'OocLexer': ('pip._vendor.pygments.lexers.ooc', 'Ooc', ('ooc',), ('*.ooc',), ('text/x-ooc',)),
|
344 |
+
'OpaLexer': ('pip._vendor.pygments.lexers.ml', 'Opa', ('opa',), ('*.opa',), ('text/x-opa',)),
|
345 |
+
'OpenEdgeLexer': ('pip._vendor.pygments.lexers.business', 'OpenEdge ABL', ('openedge', 'abl', 'progress'), ('*.p', '*.cls'), ('text/x-openedge', 'application/x-openedge')),
|
346 |
+
'OutputLexer': ('pip._vendor.pygments.lexers.special', 'Text output', ('output',), (), ()),
|
347 |
+
'PacmanConfLexer': ('pip._vendor.pygments.lexers.configs', 'PacmanConf', ('pacmanconf',), ('pacman.conf',), ()),
|
348 |
+
'PanLexer': ('pip._vendor.pygments.lexers.dsls', 'Pan', ('pan',), ('*.pan',), ()),
|
349 |
+
'ParaSailLexer': ('pip._vendor.pygments.lexers.parasail', 'ParaSail', ('parasail',), ('*.psi', '*.psl'), ('text/x-parasail',)),
|
350 |
+
'PawnLexer': ('pip._vendor.pygments.lexers.pawn', 'Pawn', ('pawn',), ('*.p', '*.pwn', '*.inc'), ('text/x-pawn',)),
|
351 |
+
'PegLexer': ('pip._vendor.pygments.lexers.grammar_notation', 'PEG', ('peg',), ('*.peg',), ('text/x-peg',)),
|
352 |
+
'Perl6Lexer': ('pip._vendor.pygments.lexers.perl', 'Perl6', ('perl6', 'pl6', 'raku'), ('*.pl', '*.pm', '*.nqp', '*.p6', '*.6pl', '*.p6l', '*.pl6', '*.6pm', '*.p6m', '*.pm6', '*.t', '*.raku', '*.rakumod', '*.rakutest', '*.rakudoc'), ('text/x-perl6', 'application/x-perl6')),
|
353 |
+
'PerlLexer': ('pip._vendor.pygments.lexers.perl', 'Perl', ('perl', 'pl'), ('*.pl', '*.pm', '*.t', '*.perl'), ('text/x-perl', 'application/x-perl')),
|
354 |
+
'PhpLexer': ('pip._vendor.pygments.lexers.php', 'PHP', ('php', 'php3', 'php4', 'php5'), ('*.php', '*.php[345]', '*.inc'), ('text/x-php',)),
|
355 |
+
'PigLexer': ('pip._vendor.pygments.lexers.jvm', 'Pig', ('pig',), ('*.pig',), ('text/x-pig',)),
|
356 |
+
'PikeLexer': ('pip._vendor.pygments.lexers.c_like', 'Pike', ('pike',), ('*.pike', '*.pmod'), ('text/x-pike',)),
|
357 |
+
'PkgConfigLexer': ('pip._vendor.pygments.lexers.configs', 'PkgConfig', ('pkgconfig',), ('*.pc',), ()),
|
358 |
+
'PlPgsqlLexer': ('pip._vendor.pygments.lexers.sql', 'PL/pgSQL', ('plpgsql',), (), ('text/x-plpgsql',)),
|
359 |
+
'PointlessLexer': ('pip._vendor.pygments.lexers.pointless', 'Pointless', ('pointless',), ('*.ptls',), ()),
|
360 |
+
'PonyLexer': ('pip._vendor.pygments.lexers.pony', 'Pony', ('pony',), ('*.pony',), ()),
|
361 |
+
'PostScriptLexer': ('pip._vendor.pygments.lexers.graphics', 'PostScript', ('postscript', 'postscr'), ('*.ps', '*.eps'), ('application/postscript',)),
|
362 |
+
'PostgresConsoleLexer': ('pip._vendor.pygments.lexers.sql', 'PostgreSQL console (psql)', ('psql', 'postgresql-console', 'postgres-console'), (), ('text/x-postgresql-psql',)),
|
363 |
+
'PostgresLexer': ('pip._vendor.pygments.lexers.sql', 'PostgreSQL SQL dialect', ('postgresql', 'postgres'), (), ('text/x-postgresql',)),
|
364 |
+
'PovrayLexer': ('pip._vendor.pygments.lexers.graphics', 'POVRay', ('pov',), ('*.pov', '*.inc'), ('text/x-povray',)),
|
365 |
+
'PowerShellLexer': ('pip._vendor.pygments.lexers.shell', 'PowerShell', ('powershell', 'pwsh', 'posh', 'ps1', 'psm1'), ('*.ps1', '*.psm1'), ('text/x-powershell',)),
|
366 |
+
'PowerShellSessionLexer': ('pip._vendor.pygments.lexers.shell', 'PowerShell Session', ('pwsh-session', 'ps1con'), (), ()),
|
367 |
+
'PraatLexer': ('pip._vendor.pygments.lexers.praat', 'Praat', ('praat',), ('*.praat', '*.proc', '*.psc'), ()),
|
368 |
+
'ProcfileLexer': ('pip._vendor.pygments.lexers.procfile', 'Procfile', ('procfile',), ('Procfile',), ()),
|
369 |
+
'PrologLexer': ('pip._vendor.pygments.lexers.prolog', 'Prolog', ('prolog',), ('*.ecl', '*.prolog', '*.pro', '*.pl'), ('text/x-prolog',)),
|
370 |
+
'PromQLLexer': ('pip._vendor.pygments.lexers.promql', 'PromQL', ('promql',), ('*.promql',), ()),
|
371 |
+
'PropertiesLexer': ('pip._vendor.pygments.lexers.configs', 'Properties', ('properties', 'jproperties'), ('*.properties',), ('text/x-java-properties',)),
|
372 |
+
'ProtoBufLexer': ('pip._vendor.pygments.lexers.dsls', 'Protocol Buffer', ('protobuf', 'proto'), ('*.proto',), ()),
|
373 |
+
'PsyshConsoleLexer': ('pip._vendor.pygments.lexers.php', 'PsySH console session for PHP', ('psysh',), (), ()),
|
374 |
+
'PugLexer': ('pip._vendor.pygments.lexers.html', 'Pug', ('pug', 'jade'), ('*.pug', '*.jade'), ('text/x-pug', 'text/x-jade')),
|
375 |
+
'PuppetLexer': ('pip._vendor.pygments.lexers.dsls', 'Puppet', ('puppet',), ('*.pp',), ()),
|
376 |
+
'PyPyLogLexer': ('pip._vendor.pygments.lexers.console', 'PyPy Log', ('pypylog', 'pypy'), ('*.pypylog',), ('application/x-pypylog',)),
|
377 |
+
'Python2Lexer': ('pip._vendor.pygments.lexers.python', 'Python 2.x', ('python2', 'py2'), (), ('text/x-python2', 'application/x-python2')),
|
378 |
+
'Python2TracebackLexer': ('pip._vendor.pygments.lexers.python', 'Python 2.x Traceback', ('py2tb',), ('*.py2tb',), ('text/x-python2-traceback',)),
|
379 |
+
'PythonConsoleLexer': ('pip._vendor.pygments.lexers.python', 'Python console session', ('pycon',), (), ('text/x-python-doctest',)),
|
380 |
+
'PythonLexer': ('pip._vendor.pygments.lexers.python', 'Python', ('python', 'py', 'sage', 'python3', 'py3'), ('*.py', '*.pyw', '*.jy', '*.sage', '*.sc', 'SConstruct', 'SConscript', '*.bzl', 'BUCK', 'BUILD', 'BUILD.bazel', 'WORKSPACE', '*.tac'), ('text/x-python', 'application/x-python', 'text/x-python3', 'application/x-python3')),
|
381 |
+
'PythonTracebackLexer': ('pip._vendor.pygments.lexers.python', 'Python Traceback', ('pytb', 'py3tb'), ('*.pytb', '*.py3tb'), ('text/x-python-traceback', 'text/x-python3-traceback')),
|
382 |
+
'QBasicLexer': ('pip._vendor.pygments.lexers.basic', 'QBasic', ('qbasic', 'basic'), ('*.BAS', '*.bas'), ('text/basic',)),
|
383 |
+
'QVToLexer': ('pip._vendor.pygments.lexers.qvt', 'QVTO', ('qvto', 'qvt'), ('*.qvto',), ()),
|
384 |
+
'QmlLexer': ('pip._vendor.pygments.lexers.webmisc', 'QML', ('qml', 'qbs'), ('*.qml', '*.qbs'), ('application/x-qml', 'application/x-qt.qbs+qml')),
|
385 |
+
'RConsoleLexer': ('pip._vendor.pygments.lexers.r', 'RConsole', ('rconsole', 'rout'), ('*.Rout',), ()),
|
386 |
+
'RNCCompactLexer': ('pip._vendor.pygments.lexers.rnc', 'Relax-NG Compact', ('rng-compact', 'rnc'), ('*.rnc',), ()),
|
387 |
+
'RPMSpecLexer': ('pip._vendor.pygments.lexers.installers', 'RPMSpec', ('spec',), ('*.spec',), ('text/x-rpm-spec',)),
|
388 |
+
'RacketLexer': ('pip._vendor.pygments.lexers.lisp', 'Racket', ('racket', 'rkt'), ('*.rkt', '*.rktd', '*.rktl'), ('text/x-racket', 'application/x-racket')),
|
389 |
+
'RagelCLexer': ('pip._vendor.pygments.lexers.parsers', 'Ragel in C Host', ('ragel-c',), ('*.rl',), ()),
|
390 |
+
'RagelCppLexer': ('pip._vendor.pygments.lexers.parsers', 'Ragel in CPP Host', ('ragel-cpp',), ('*.rl',), ()),
|
391 |
+
'RagelDLexer': ('pip._vendor.pygments.lexers.parsers', 'Ragel in D Host', ('ragel-d',), ('*.rl',), ()),
|
392 |
+
'RagelEmbeddedLexer': ('pip._vendor.pygments.lexers.parsers', 'Embedded Ragel', ('ragel-em',), ('*.rl',), ()),
|
393 |
+
'RagelJavaLexer': ('pip._vendor.pygments.lexers.parsers', 'Ragel in Java Host', ('ragel-java',), ('*.rl',), ()),
|
394 |
+
'RagelLexer': ('pip._vendor.pygments.lexers.parsers', 'Ragel', ('ragel',), (), ()),
|
395 |
+
'RagelObjectiveCLexer': ('pip._vendor.pygments.lexers.parsers', 'Ragel in Objective C Host', ('ragel-objc',), ('*.rl',), ()),
|
396 |
+
'RagelRubyLexer': ('pip._vendor.pygments.lexers.parsers', 'Ragel in Ruby Host', ('ragel-ruby', 'ragel-rb'), ('*.rl',), ()),
|
397 |
+
'RawTokenLexer': ('pip._vendor.pygments.lexers.special', 'Raw token data', (), (), ('application/x-pygments-tokens',)),
|
398 |
+
'RdLexer': ('pip._vendor.pygments.lexers.r', 'Rd', ('rd',), ('*.Rd',), ('text/x-r-doc',)),
|
399 |
+
'ReasonLexer': ('pip._vendor.pygments.lexers.ml', 'ReasonML', ('reasonml', 'reason'), ('*.re', '*.rei'), ('text/x-reasonml',)),
|
400 |
+
'RebolLexer': ('pip._vendor.pygments.lexers.rebol', 'REBOL', ('rebol',), ('*.r', '*.r3', '*.reb'), ('text/x-rebol',)),
|
401 |
+
'RedLexer': ('pip._vendor.pygments.lexers.rebol', 'Red', ('red', 'red/system'), ('*.red', '*.reds'), ('text/x-red', 'text/x-red-system')),
|
402 |
+
'RedcodeLexer': ('pip._vendor.pygments.lexers.esoteric', 'Redcode', ('redcode',), ('*.cw',), ()),
|
403 |
+
'RegeditLexer': ('pip._vendor.pygments.lexers.configs', 'reg', ('registry',), ('*.reg',), ('text/x-windows-registry',)),
|
404 |
+
'ResourceLexer': ('pip._vendor.pygments.lexers.resource', 'ResourceBundle', ('resourcebundle', 'resource'), (), ()),
|
405 |
+
'RexxLexer': ('pip._vendor.pygments.lexers.scripting', 'Rexx', ('rexx', 'arexx'), ('*.rexx', '*.rex', '*.rx', '*.arexx'), ('text/x-rexx',)),
|
406 |
+
'RhtmlLexer': ('pip._vendor.pygments.lexers.templates', 'RHTML', ('rhtml', 'html+erb', 'html+ruby'), ('*.rhtml',), ('text/html+ruby',)),
|
407 |
+
'RideLexer': ('pip._vendor.pygments.lexers.ride', 'Ride', ('ride',), ('*.ride',), ('text/x-ride',)),
|
408 |
+
'RitaLexer': ('pip._vendor.pygments.lexers.rita', 'Rita', ('rita',), ('*.rita',), ('text/rita',)),
|
409 |
+
'RoboconfGraphLexer': ('pip._vendor.pygments.lexers.roboconf', 'Roboconf Graph', ('roboconf-graph',), ('*.graph',), ()),
|
410 |
+
'RoboconfInstancesLexer': ('pip._vendor.pygments.lexers.roboconf', 'Roboconf Instances', ('roboconf-instances',), ('*.instances',), ()),
|
411 |
+
'RobotFrameworkLexer': ('pip._vendor.pygments.lexers.robotframework', 'RobotFramework', ('robotframework',), ('*.robot',), ('text/x-robotframework',)),
|
412 |
+
'RqlLexer': ('pip._vendor.pygments.lexers.sql', 'RQL', ('rql',), ('*.rql',), ('text/x-rql',)),
|
413 |
+
'RslLexer': ('pip._vendor.pygments.lexers.dsls', 'RSL', ('rsl',), ('*.rsl',), ('text/rsl',)),
|
414 |
+
'RstLexer': ('pip._vendor.pygments.lexers.markup', 'reStructuredText', ('restructuredtext', 'rst', 'rest'), ('*.rst', '*.rest'), ('text/x-rst', 'text/prs.fallenstein.rst')),
|
415 |
+
'RtsLexer': ('pip._vendor.pygments.lexers.trafficscript', 'TrafficScript', ('trafficscript', 'rts'), ('*.rts',), ()),
|
416 |
+
'RubyConsoleLexer': ('pip._vendor.pygments.lexers.ruby', 'Ruby irb session', ('rbcon', 'irb'), (), ('text/x-ruby-shellsession',)),
|
417 |
+
'RubyLexer': ('pip._vendor.pygments.lexers.ruby', 'Ruby', ('ruby', 'rb', 'duby'), ('*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec', '*.rbx', '*.duby', 'Gemfile', 'Vagrantfile'), ('text/x-ruby', 'application/x-ruby')),
|
418 |
+
'RustLexer': ('pip._vendor.pygments.lexers.rust', 'Rust', ('rust', 'rs'), ('*.rs', '*.rs.in'), ('text/rust', 'text/x-rust')),
|
419 |
+
'SASLexer': ('pip._vendor.pygments.lexers.sas', 'SAS', ('sas',), ('*.SAS', '*.sas'), ('text/x-sas', 'text/sas', 'application/x-sas')),
|
420 |
+
'SLexer': ('pip._vendor.pygments.lexers.r', 'S', ('splus', 's', 'r'), ('*.S', '*.R', '.Rhistory', '.Rprofile', '.Renviron'), ('text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r', 'text/x-R', 'text/x-r-history', 'text/x-r-profile')),
|
421 |
+
'SMLLexer': ('pip._vendor.pygments.lexers.ml', 'Standard ML', ('sml',), ('*.sml', '*.sig', '*.fun'), ('text/x-standardml', 'application/x-standardml')),
|
422 |
+
'SarlLexer': ('pip._vendor.pygments.lexers.jvm', 'SARL', ('sarl',), ('*.sarl',), ('text/x-sarl',)),
|
423 |
+
'SassLexer': ('pip._vendor.pygments.lexers.css', 'Sass', ('sass',), ('*.sass',), ('text/x-sass',)),
|
424 |
+
'SaviLexer': ('pip._vendor.pygments.lexers.savi', 'Savi', ('savi',), ('*.savi',), ()),
|
425 |
+
'ScalaLexer': ('pip._vendor.pygments.lexers.jvm', 'Scala', ('scala',), ('*.scala',), ('text/x-scala',)),
|
426 |
+
'ScamlLexer': ('pip._vendor.pygments.lexers.html', 'Scaml', ('scaml',), ('*.scaml',), ('text/x-scaml',)),
|
427 |
+
'ScdocLexer': ('pip._vendor.pygments.lexers.scdoc', 'scdoc', ('scdoc', 'scd'), ('*.scd', '*.scdoc'), ()),
|
428 |
+
'SchemeLexer': ('pip._vendor.pygments.lexers.lisp', 'Scheme', ('scheme', 'scm'), ('*.scm', '*.ss'), ('text/x-scheme', 'application/x-scheme')),
|
429 |
+
'ScilabLexer': ('pip._vendor.pygments.lexers.matlab', 'Scilab', ('scilab',), ('*.sci', '*.sce', '*.tst'), ('text/scilab',)),
|
430 |
+
'ScssLexer': ('pip._vendor.pygments.lexers.css', 'SCSS', ('scss',), ('*.scss',), ('text/x-scss',)),
|
431 |
+
'SedLexer': ('pip._vendor.pygments.lexers.textedit', 'Sed', ('sed', 'gsed', 'ssed'), ('*.sed', '*.[gs]sed'), ('text/x-sed',)),
|
432 |
+
'ShExCLexer': ('pip._vendor.pygments.lexers.rdf', 'ShExC', ('shexc', 'shex'), ('*.shex',), ('text/shex',)),
|
433 |
+
'ShenLexer': ('pip._vendor.pygments.lexers.lisp', 'Shen', ('shen',), ('*.shen',), ('text/x-shen', 'application/x-shen')),
|
434 |
+
'SieveLexer': ('pip._vendor.pygments.lexers.sieve', 'Sieve', ('sieve',), ('*.siv', '*.sieve'), ()),
|
435 |
+
'SilverLexer': ('pip._vendor.pygments.lexers.verification', 'Silver', ('silver',), ('*.sil', '*.vpr'), ()),
|
436 |
+
'SingularityLexer': ('pip._vendor.pygments.lexers.configs', 'Singularity', ('singularity',), ('*.def', 'Singularity'), ()),
|
437 |
+
'SlashLexer': ('pip._vendor.pygments.lexers.slash', 'Slash', ('slash',), ('*.sla',), ()),
|
438 |
+
'SlimLexer': ('pip._vendor.pygments.lexers.webmisc', 'Slim', ('slim',), ('*.slim',), ('text/x-slim',)),
|
439 |
+
'SlurmBashLexer': ('pip._vendor.pygments.lexers.shell', 'Slurm', ('slurm', 'sbatch'), ('*.sl',), ()),
|
440 |
+
'SmaliLexer': ('pip._vendor.pygments.lexers.dalvik', 'Smali', ('smali',), ('*.smali',), ('text/smali',)),
|
441 |
+
'SmalltalkLexer': ('pip._vendor.pygments.lexers.smalltalk', 'Smalltalk', ('smalltalk', 'squeak', 'st'), ('*.st',), ('text/x-smalltalk',)),
|
442 |
+
'SmartGameFormatLexer': ('pip._vendor.pygments.lexers.sgf', 'SmartGameFormat', ('sgf',), ('*.sgf',), ()),
|
443 |
+
'SmartyLexer': ('pip._vendor.pygments.lexers.templates', 'Smarty', ('smarty',), ('*.tpl',), ('application/x-smarty',)),
|
444 |
+
'SmithyLexer': ('pip._vendor.pygments.lexers.smithy', 'Smithy', ('smithy',), ('*.smithy',), ()),
|
445 |
+
'SnobolLexer': ('pip._vendor.pygments.lexers.snobol', 'Snobol', ('snobol',), ('*.snobol',), ('text/x-snobol',)),
|
446 |
+
'SnowballLexer': ('pip._vendor.pygments.lexers.dsls', 'Snowball', ('snowball',), ('*.sbl',), ()),
|
447 |
+
'SolidityLexer': ('pip._vendor.pygments.lexers.solidity', 'Solidity', ('solidity',), ('*.sol',), ()),
|
448 |
+
'SophiaLexer': ('pip._vendor.pygments.lexers.sophia', 'Sophia', ('sophia',), ('*.aes',), ()),
|
449 |
+
'SourcePawnLexer': ('pip._vendor.pygments.lexers.pawn', 'SourcePawn', ('sp',), ('*.sp',), ('text/x-sourcepawn',)),
|
450 |
+
'SourcesListLexer': ('pip._vendor.pygments.lexers.installers', 'Debian Sourcelist', ('debsources', 'sourceslist', 'sources.list'), ('sources.list',), ()),
|
451 |
+
'SparqlLexer': ('pip._vendor.pygments.lexers.rdf', 'SPARQL', ('sparql',), ('*.rq', '*.sparql'), ('application/sparql-query',)),
|
452 |
+
'SpiceLexer': ('pip._vendor.pygments.lexers.spice', 'Spice', ('spice', 'spicelang'), ('*.spice',), ('text/x-spice',)),
|
453 |
+
'SqlLexer': ('pip._vendor.pygments.lexers.sql', 'SQL', ('sql',), ('*.sql',), ('text/x-sql',)),
|
454 |
+
'SqliteConsoleLexer': ('pip._vendor.pygments.lexers.sql', 'sqlite3con', ('sqlite3',), ('*.sqlite3-console',), ('text/x-sqlite3-console',)),
|
455 |
+
'SquidConfLexer': ('pip._vendor.pygments.lexers.configs', 'SquidConf', ('squidconf', 'squid.conf', 'squid'), ('squid.conf',), ('text/x-squidconf',)),
|
456 |
+
'SrcinfoLexer': ('pip._vendor.pygments.lexers.srcinfo', 'Srcinfo', ('srcinfo',), ('.SRCINFO',), ()),
|
457 |
+
'SspLexer': ('pip._vendor.pygments.lexers.templates', 'Scalate Server Page', ('ssp',), ('*.ssp',), ('application/x-ssp',)),
|
458 |
+
'StanLexer': ('pip._vendor.pygments.lexers.modeling', 'Stan', ('stan',), ('*.stan',), ()),
|
459 |
+
'StataLexer': ('pip._vendor.pygments.lexers.stata', 'Stata', ('stata', 'do'), ('*.do', '*.ado'), ('text/x-stata', 'text/stata', 'application/x-stata')),
|
460 |
+
'SuperColliderLexer': ('pip._vendor.pygments.lexers.supercollider', 'SuperCollider', ('supercollider', 'sc'), ('*.sc', '*.scd'), ('application/supercollider', 'text/supercollider')),
|
461 |
+
'SwiftLexer': ('pip._vendor.pygments.lexers.objective', 'Swift', ('swift',), ('*.swift',), ('text/x-swift',)),
|
462 |
+
'SwigLexer': ('pip._vendor.pygments.lexers.c_like', 'SWIG', ('swig',), ('*.swg', '*.i'), ('text/swig',)),
|
463 |
+
'SystemVerilogLexer': ('pip._vendor.pygments.lexers.hdl', 'systemverilog', ('systemverilog', 'sv'), ('*.sv', '*.svh'), ('text/x-systemverilog',)),
|
464 |
+
'TAPLexer': ('pip._vendor.pygments.lexers.testing', 'TAP', ('tap',), ('*.tap',), ()),
|
465 |
+
'TNTLexer': ('pip._vendor.pygments.lexers.tnt', 'Typographic Number Theory', ('tnt',), ('*.tnt',), ()),
|
466 |
+
'TOMLLexer': ('pip._vendor.pygments.lexers.configs', 'TOML', ('toml',), ('*.toml', 'Pipfile', 'poetry.lock'), ()),
|
467 |
+
'Tads3Lexer': ('pip._vendor.pygments.lexers.int_fiction', 'TADS 3', ('tads3',), ('*.t',), ()),
|
468 |
+
'TasmLexer': ('pip._vendor.pygments.lexers.asm', 'TASM', ('tasm',), ('*.asm', '*.ASM', '*.tasm'), ('text/x-tasm',)),
|
469 |
+
'TclLexer': ('pip._vendor.pygments.lexers.tcl', 'Tcl', ('tcl',), ('*.tcl', '*.rvt'), ('text/x-tcl', 'text/x-script.tcl', 'application/x-tcl')),
|
470 |
+
'TcshLexer': ('pip._vendor.pygments.lexers.shell', 'Tcsh', ('tcsh', 'csh'), ('*.tcsh', '*.csh'), ('application/x-csh',)),
|
471 |
+
'TcshSessionLexer': ('pip._vendor.pygments.lexers.shell', 'Tcsh Session', ('tcshcon',), (), ()),
|
472 |
+
'TeaTemplateLexer': ('pip._vendor.pygments.lexers.templates', 'Tea', ('tea',), ('*.tea',), ('text/x-tea',)),
|
473 |
+
'TealLexer': ('pip._vendor.pygments.lexers.teal', 'teal', ('teal',), ('*.teal',), ()),
|
474 |
+
'TeraTermLexer': ('pip._vendor.pygments.lexers.teraterm', 'Tera Term macro', ('teratermmacro', 'teraterm', 'ttl'), ('*.ttl',), ('text/x-teratermmacro',)),
|
475 |
+
'TermcapLexer': ('pip._vendor.pygments.lexers.configs', 'Termcap', ('termcap',), ('termcap', 'termcap.src'), ()),
|
476 |
+
'TerminfoLexer': ('pip._vendor.pygments.lexers.configs', 'Terminfo', ('terminfo',), ('terminfo', 'terminfo.src'), ()),
|
477 |
+
'TerraformLexer': ('pip._vendor.pygments.lexers.configs', 'Terraform', ('terraform', 'tf'), ('*.tf',), ('application/x-tf', 'application/x-terraform')),
|
478 |
+
'TexLexer': ('pip._vendor.pygments.lexers.markup', 'TeX', ('tex', 'latex'), ('*.tex', '*.aux', '*.toc'), ('text/x-tex', 'text/x-latex')),
|
479 |
+
'TextLexer': ('pip._vendor.pygments.lexers.special', 'Text only', ('text',), ('*.txt',), ('text/plain',)),
|
480 |
+
'ThingsDBLexer': ('pip._vendor.pygments.lexers.thingsdb', 'ThingsDB', ('ti', 'thingsdb'), ('*.ti',), ()),
|
481 |
+
'ThriftLexer': ('pip._vendor.pygments.lexers.dsls', 'Thrift', ('thrift',), ('*.thrift',), ('application/x-thrift',)),
|
482 |
+
'TiddlyWiki5Lexer': ('pip._vendor.pygments.lexers.markup', 'tiddler', ('tid',), ('*.tid',), ('text/vnd.tiddlywiki',)),
|
483 |
+
'TodotxtLexer': ('pip._vendor.pygments.lexers.textfmts', 'Todotxt', ('todotxt',), ('todo.txt', '*.todotxt'), ('text/x-todo',)),
|
484 |
+
'TransactSqlLexer': ('pip._vendor.pygments.lexers.sql', 'Transact-SQL', ('tsql', 't-sql'), ('*.sql',), ('text/x-tsql',)),
|
485 |
+
'TreetopLexer': ('pip._vendor.pygments.lexers.parsers', 'Treetop', ('treetop',), ('*.treetop', '*.tt'), ()),
|
486 |
+
'TurtleLexer': ('pip._vendor.pygments.lexers.rdf', 'Turtle', ('turtle',), ('*.ttl',), ('text/turtle', 'application/x-turtle')),
|
487 |
+
'TwigHtmlLexer': ('pip._vendor.pygments.lexers.templates', 'HTML+Twig', ('html+twig',), ('*.twig',), ('text/html+twig',)),
|
488 |
+
'TwigLexer': ('pip._vendor.pygments.lexers.templates', 'Twig', ('twig',), (), ('application/x-twig',)),
|
489 |
+
'TypeScriptLexer': ('pip._vendor.pygments.lexers.javascript', 'TypeScript', ('typescript', 'ts'), ('*.ts',), ('application/x-typescript', 'text/x-typescript')),
|
490 |
+
'TypoScriptCssDataLexer': ('pip._vendor.pygments.lexers.typoscript', 'TypoScriptCssData', ('typoscriptcssdata',), (), ()),
|
491 |
+
'TypoScriptHtmlDataLexer': ('pip._vendor.pygments.lexers.typoscript', 'TypoScriptHtmlData', ('typoscripthtmldata',), (), ()),
|
492 |
+
'TypoScriptLexer': ('pip._vendor.pygments.lexers.typoscript', 'TypoScript', ('typoscript',), ('*.typoscript',), ('text/x-typoscript',)),
|
493 |
+
'UcodeLexer': ('pip._vendor.pygments.lexers.unicon', 'ucode', ('ucode',), ('*.u', '*.u1', '*.u2'), ()),
|
494 |
+
'UniconLexer': ('pip._vendor.pygments.lexers.unicon', 'Unicon', ('unicon',), ('*.icn',), ('text/unicon',)),
|
495 |
+
'UrbiscriptLexer': ('pip._vendor.pygments.lexers.urbi', 'UrbiScript', ('urbiscript',), ('*.u',), ('application/x-urbiscript',)),
|
496 |
+
'UsdLexer': ('pip._vendor.pygments.lexers.usd', 'USD', ('usd', 'usda'), ('*.usd', '*.usda'), ()),
|
497 |
+
'VBScriptLexer': ('pip._vendor.pygments.lexers.basic', 'VBScript', ('vbscript',), ('*.vbs', '*.VBS'), ()),
|
498 |
+
'VCLLexer': ('pip._vendor.pygments.lexers.varnish', 'VCL', ('vcl',), ('*.vcl',), ('text/x-vclsrc',)),
|
499 |
+
'VCLSnippetLexer': ('pip._vendor.pygments.lexers.varnish', 'VCLSnippets', ('vclsnippets', 'vclsnippet'), (), ('text/x-vclsnippet',)),
|
500 |
+
'VCTreeStatusLexer': ('pip._vendor.pygments.lexers.console', 'VCTreeStatus', ('vctreestatus',), (), ()),
|
501 |
+
'VGLLexer': ('pip._vendor.pygments.lexers.dsls', 'VGL', ('vgl',), ('*.rpf',), ()),
|
502 |
+
'ValaLexer': ('pip._vendor.pygments.lexers.c_like', 'Vala', ('vala', 'vapi'), ('*.vala', '*.vapi'), ('text/x-vala',)),
|
503 |
+
'VbNetAspxLexer': ('pip._vendor.pygments.lexers.dotnet', 'aspx-vb', ('aspx-vb',), ('*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd'), ()),
|
504 |
+
'VbNetLexer': ('pip._vendor.pygments.lexers.dotnet', 'VB.net', ('vb.net', 'vbnet'), ('*.vb', '*.bas'), ('text/x-vbnet', 'text/x-vba')),
|
505 |
+
'VelocityHtmlLexer': ('pip._vendor.pygments.lexers.templates', 'HTML+Velocity', ('html+velocity',), (), ('text/html+velocity',)),
|
506 |
+
'VelocityLexer': ('pip._vendor.pygments.lexers.templates', 'Velocity', ('velocity',), ('*.vm', '*.fhtml'), ()),
|
507 |
+
'VelocityXmlLexer': ('pip._vendor.pygments.lexers.templates', 'XML+Velocity', ('xml+velocity',), (), ('application/xml+velocity',)),
|
508 |
+
'VerilogLexer': ('pip._vendor.pygments.lexers.hdl', 'verilog', ('verilog', 'v'), ('*.v',), ('text/x-verilog',)),
|
509 |
+
'VhdlLexer': ('pip._vendor.pygments.lexers.hdl', 'vhdl', ('vhdl',), ('*.vhdl', '*.vhd'), ('text/x-vhdl',)),
|
510 |
+
'VimLexer': ('pip._vendor.pygments.lexers.textedit', 'VimL', ('vim',), ('*.vim', '.vimrc', '.exrc', '.gvimrc', '_vimrc', '_exrc', '_gvimrc', 'vimrc', 'gvimrc'), ('text/x-vim',)),
|
511 |
+
'WDiffLexer': ('pip._vendor.pygments.lexers.diff', 'WDiff', ('wdiff',), ('*.wdiff',), ()),
|
512 |
+
'WatLexer': ('pip._vendor.pygments.lexers.webassembly', 'WebAssembly', ('wast', 'wat'), ('*.wat', '*.wast'), ()),
|
513 |
+
'WebIDLLexer': ('pip._vendor.pygments.lexers.webidl', 'Web IDL', ('webidl',), ('*.webidl',), ()),
|
514 |
+
'WhileyLexer': ('pip._vendor.pygments.lexers.whiley', 'Whiley', ('whiley',), ('*.whiley',), ('text/x-whiley',)),
|
515 |
+
'X10Lexer': ('pip._vendor.pygments.lexers.x10', 'X10', ('x10', 'xten'), ('*.x10',), ('text/x-x10',)),
|
516 |
+
'XQueryLexer': ('pip._vendor.pygments.lexers.webmisc', 'XQuery', ('xquery', 'xqy', 'xq', 'xql', 'xqm'), ('*.xqy', '*.xquery', '*.xq', '*.xql', '*.xqm'), ('text/xquery', 'application/xquery')),
|
517 |
+
'XmlDjangoLexer': ('pip._vendor.pygments.lexers.templates', 'XML+Django/Jinja', ('xml+django', 'xml+jinja'), (), ('application/xml+django', 'application/xml+jinja')),
|
518 |
+
'XmlErbLexer': ('pip._vendor.pygments.lexers.templates', 'XML+Ruby', ('xml+ruby', 'xml+erb'), (), ('application/xml+ruby',)),
|
519 |
+
'XmlLexer': ('pip._vendor.pygments.lexers.html', 'XML', ('xml',), ('*.xml', '*.xsl', '*.rss', '*.xslt', '*.xsd', '*.wsdl', '*.wsf'), ('text/xml', 'application/xml', 'image/svg+xml', 'application/rss+xml', 'application/atom+xml')),
|
520 |
+
'XmlPhpLexer': ('pip._vendor.pygments.lexers.templates', 'XML+PHP', ('xml+php',), (), ('application/xml+php',)),
|
521 |
+
'XmlSmartyLexer': ('pip._vendor.pygments.lexers.templates', 'XML+Smarty', ('xml+smarty',), (), ('application/xml+smarty',)),
|
522 |
+
'XorgLexer': ('pip._vendor.pygments.lexers.xorg', 'Xorg', ('xorg.conf',), ('xorg.conf',), ()),
|
523 |
+
'XsltLexer': ('pip._vendor.pygments.lexers.html', 'XSLT', ('xslt',), ('*.xsl', '*.xslt', '*.xpl'), ('application/xsl+xml', 'application/xslt+xml')),
|
524 |
+
'XtendLexer': ('pip._vendor.pygments.lexers.jvm', 'Xtend', ('xtend',), ('*.xtend',), ('text/x-xtend',)),
|
525 |
+
'XtlangLexer': ('pip._vendor.pygments.lexers.lisp', 'xtlang', ('extempore',), ('*.xtm',), ()),
|
526 |
+
'YamlJinjaLexer': ('pip._vendor.pygments.lexers.templates', 'YAML+Jinja', ('yaml+jinja', 'salt', 'sls'), ('*.sls',), ('text/x-yaml+jinja', 'text/x-sls')),
|
527 |
+
'YamlLexer': ('pip._vendor.pygments.lexers.data', 'YAML', ('yaml',), ('*.yaml', '*.yml'), ('text/x-yaml',)),
|
528 |
+
'YangLexer': ('pip._vendor.pygments.lexers.yang', 'YANG', ('yang',), ('*.yang',), ('application/yang',)),
|
529 |
+
'ZeekLexer': ('pip._vendor.pygments.lexers.dsls', 'Zeek', ('zeek', 'bro'), ('*.zeek', '*.bro'), ()),
|
530 |
+
'ZephirLexer': ('pip._vendor.pygments.lexers.php', 'Zephir', ('zephir',), ('*.zep',), ()),
|
531 |
+
'ZigLexer': ('pip._vendor.pygments.lexers.zig', 'Zig', ('zig',), ('*.zig',), ('text/zig',)),
|
532 |
+
'apdlexer': ('pip._vendor.pygments.lexers.apdlexer', 'ANSYS parametric design language', ('ansys', 'apdl'), ('*.ans',), ()),
|
533 |
+
}
|
534 |
+
|
535 |
+
if __name__ == '__main__': # pragma: no cover
|
536 |
+
import sys
|
537 |
+
import os
|
538 |
+
|
539 |
+
# lookup lexers
|
540 |
+
found_lexers = []
|
541 |
+
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
|
542 |
+
for root, dirs, files in os.walk('.'):
|
543 |
+
for filename in files:
|
544 |
+
if filename.endswith('.py') and not filename.startswith('_'):
|
545 |
+
module_name = 'pygments.lexers%s.%s' % (
|
546 |
+
root[1:].replace('/', '.'), filename[:-3])
|
547 |
+
print(module_name)
|
548 |
+
module = __import__(module_name, None, None, [''])
|
549 |
+
for lexer_name in module.__all__:
|
550 |
+
lexer = getattr(module, lexer_name)
|
551 |
+
found_lexers.append(
|
552 |
+
'%r: %r' % (lexer_name,
|
553 |
+
(module_name,
|
554 |
+
lexer.name,
|
555 |
+
tuple(lexer.aliases),
|
556 |
+
tuple(lexer.filenames),
|
557 |
+
tuple(lexer.mimetypes))))
|
558 |
+
# sort them to make the diff minimal
|
559 |
+
found_lexers.sort()
|
560 |
+
|
561 |
+
# extract useful sourcecode from this file
|
562 |
+
with open(__file__) as fp:
|
563 |
+
content = fp.read()
|
564 |
+
# replace crnl to nl for Windows.
|
565 |
+
#
|
566 |
+
# Note that, originally, contributers should keep nl of master
|
567 |
+
# repository, for example by using some kind of automatic
|
568 |
+
# management EOL, like `EolExtension
|
569 |
+
# <https://www.mercurial-scm.org/wiki/EolExtension>`.
|
570 |
+
content = content.replace("\r\n", "\n")
|
571 |
+
header = content[:content.find('LEXERS = {')]
|
572 |
+
footer = content[content.find("if __name__ == '__main__':"):]
|
573 |
+
|
574 |
+
# write new file
|
575 |
+
with open(__file__, 'w') as fp:
|
576 |
+
fp.write(header)
|
577 |
+
fp.write('LEXERS = {\n %s,\n}\n\n' % ',\n '.join(found_lexers))
|
578 |
+
fp.write(footer)
|
579 |
+
|
580 |
+
print ('=== %d lexers processed.' % len(found_lexers))
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/lexers/python.py
ADDED
@@ -0,0 +1,1188 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
pygments.lexers.python
|
3 |
+
~~~~~~~~~~~~~~~~~~~~~~
|
4 |
+
|
5 |
+
Lexers for Python and related languages.
|
6 |
+
|
7 |
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
|
8 |
+
:license: BSD, see LICENSE for details.
|
9 |
+
"""
|
10 |
+
|
11 |
+
import re
|
12 |
+
import keyword
|
13 |
+
|
14 |
+
from pip._vendor.pygments.lexer import Lexer, RegexLexer, include, bygroups, using, \
|
15 |
+
default, words, combined, do_insertions, this
|
16 |
+
from pip._vendor.pygments.util import get_bool_opt, shebang_matches
|
17 |
+
from pip._vendor.pygments.token import Text, Comment, Operator, Keyword, Name, String, \
|
18 |
+
Number, Punctuation, Generic, Other, Error
|
19 |
+
from pip._vendor.pygments import unistring as uni
|
20 |
+
|
21 |
+
__all__ = ['PythonLexer', 'PythonConsoleLexer', 'PythonTracebackLexer',
|
22 |
+
'Python2Lexer', 'Python2TracebackLexer',
|
23 |
+
'CythonLexer', 'DgLexer', 'NumPyLexer']
|
24 |
+
|
25 |
+
line_re = re.compile('.*?\n')
|
26 |
+
|
27 |
+
|
28 |
+
class PythonLexer(RegexLexer):
|
29 |
+
"""
|
30 |
+
For `Python <http://www.python.org>`_ source code (version 3.x).
|
31 |
+
|
32 |
+
.. versionadded:: 0.10
|
33 |
+
|
34 |
+
.. versionchanged:: 2.5
|
35 |
+
This is now the default ``PythonLexer``. It is still available as the
|
36 |
+
alias ``Python3Lexer``.
|
37 |
+
"""
|
38 |
+
|
39 |
+
name = 'Python'
|
40 |
+
aliases = ['python', 'py', 'sage', 'python3', 'py3']
|
41 |
+
filenames = [
|
42 |
+
'*.py',
|
43 |
+
'*.pyw',
|
44 |
+
# Jython
|
45 |
+
'*.jy',
|
46 |
+
# Sage
|
47 |
+
'*.sage',
|
48 |
+
# SCons
|
49 |
+
'*.sc',
|
50 |
+
'SConstruct',
|
51 |
+
'SConscript',
|
52 |
+
# Skylark/Starlark (used by Bazel, Buck, and Pants)
|
53 |
+
'*.bzl',
|
54 |
+
'BUCK',
|
55 |
+
'BUILD',
|
56 |
+
'BUILD.bazel',
|
57 |
+
'WORKSPACE',
|
58 |
+
# Twisted Application infrastructure
|
59 |
+
'*.tac',
|
60 |
+
]
|
61 |
+
mimetypes = ['text/x-python', 'application/x-python',
|
62 |
+
'text/x-python3', 'application/x-python3']
|
63 |
+
|
64 |
+
flags = re.MULTILINE | re.UNICODE
|
65 |
+
|
66 |
+
uni_name = "[%s][%s]*" % (uni.xid_start, uni.xid_continue)
|
67 |
+
|
68 |
+
def innerstring_rules(ttype):
|
69 |
+
return [
|
70 |
+
# the old style '%s' % (...) string formatting (still valid in Py3)
|
71 |
+
(r'%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
|
72 |
+
'[hlL]?[E-GXc-giorsaux%]', String.Interpol),
|
73 |
+
# the new style '{}'.format(...) string formatting
|
74 |
+
(r'\{'
|
75 |
+
r'((\w+)((\.\w+)|(\[[^\]]+\]))*)?' # field name
|
76 |
+
r'(\![sra])?' # conversion
|
77 |
+
r'(\:(.?[<>=\^])?[-+ ]?#?0?(\d+)?,?(\.\d+)?[E-GXb-gnosx%]?)?'
|
78 |
+
r'\}', String.Interpol),
|
79 |
+
|
80 |
+
# backslashes, quotes and formatting signs must be parsed one at a time
|
81 |
+
(r'[^\\\'"%{\n]+', ttype),
|
82 |
+
(r'[\'"\\]', ttype),
|
83 |
+
# unhandled string formatting sign
|
84 |
+
(r'%|(\{{1,2})', ttype)
|
85 |
+
# newlines are an error (use "nl" state)
|
86 |
+
]
|
87 |
+
|
88 |
+
def fstring_rules(ttype):
|
89 |
+
return [
|
90 |
+
# Assuming that a '}' is the closing brace after format specifier.
|
91 |
+
# Sadly, this means that we won't detect syntax error. But it's
|
92 |
+
# more important to parse correct syntax correctly, than to
|
93 |
+
# highlight invalid syntax.
|
94 |
+
(r'\}', String.Interpol),
|
95 |
+
(r'\{', String.Interpol, 'expr-inside-fstring'),
|
96 |
+
# backslashes, quotes and formatting signs must be parsed one at a time
|
97 |
+
(r'[^\\\'"{}\n]+', ttype),
|
98 |
+
(r'[\'"\\]', ttype),
|
99 |
+
# newlines are an error (use "nl" state)
|
100 |
+
]
|
101 |
+
|
102 |
+
tokens = {
|
103 |
+
'root': [
|
104 |
+
(r'\n', Text),
|
105 |
+
(r'^(\s*)([rRuUbB]{,2})("""(?:.|\n)*?""")',
|
106 |
+
bygroups(Text, String.Affix, String.Doc)),
|
107 |
+
(r"^(\s*)([rRuUbB]{,2})('''(?:.|\n)*?''')",
|
108 |
+
bygroups(Text, String.Affix, String.Doc)),
|
109 |
+
(r'\A#!.+$', Comment.Hashbang),
|
110 |
+
(r'#.*$', Comment.Single),
|
111 |
+
(r'\\\n', Text),
|
112 |
+
(r'\\', Text),
|
113 |
+
include('keywords'),
|
114 |
+
include('soft-keywords'),
|
115 |
+
(r'(def)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'funcname'),
|
116 |
+
(r'(class)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'classname'),
|
117 |
+
(r'(from)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Text),
|
118 |
+
'fromimport'),
|
119 |
+
(r'(import)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Text),
|
120 |
+
'import'),
|
121 |
+
include('expr'),
|
122 |
+
],
|
123 |
+
'expr': [
|
124 |
+
# raw f-strings
|
125 |
+
('(?i)(rf|fr)(""")',
|
126 |
+
bygroups(String.Affix, String.Double),
|
127 |
+
combined('rfstringescape', 'tdqf')),
|
128 |
+
("(?i)(rf|fr)(''')",
|
129 |
+
bygroups(String.Affix, String.Single),
|
130 |
+
combined('rfstringescape', 'tsqf')),
|
131 |
+
('(?i)(rf|fr)(")',
|
132 |
+
bygroups(String.Affix, String.Double),
|
133 |
+
combined('rfstringescape', 'dqf')),
|
134 |
+
("(?i)(rf|fr)(')",
|
135 |
+
bygroups(String.Affix, String.Single),
|
136 |
+
combined('rfstringescape', 'sqf')),
|
137 |
+
# non-raw f-strings
|
138 |
+
('([fF])(""")', bygroups(String.Affix, String.Double),
|
139 |
+
combined('fstringescape', 'tdqf')),
|
140 |
+
("([fF])(''')", bygroups(String.Affix, String.Single),
|
141 |
+
combined('fstringescape', 'tsqf')),
|
142 |
+
('([fF])(")', bygroups(String.Affix, String.Double),
|
143 |
+
combined('fstringescape', 'dqf')),
|
144 |
+
("([fF])(')", bygroups(String.Affix, String.Single),
|
145 |
+
combined('fstringescape', 'sqf')),
|
146 |
+
# raw strings
|
147 |
+
('(?i)(rb|br|r)(""")',
|
148 |
+
bygroups(String.Affix, String.Double), 'tdqs'),
|
149 |
+
("(?i)(rb|br|r)(''')",
|
150 |
+
bygroups(String.Affix, String.Single), 'tsqs'),
|
151 |
+
('(?i)(rb|br|r)(")',
|
152 |
+
bygroups(String.Affix, String.Double), 'dqs'),
|
153 |
+
("(?i)(rb|br|r)(')",
|
154 |
+
bygroups(String.Affix, String.Single), 'sqs'),
|
155 |
+
# non-raw strings
|
156 |
+
('([uUbB]?)(""")', bygroups(String.Affix, String.Double),
|
157 |
+
combined('stringescape', 'tdqs')),
|
158 |
+
("([uUbB]?)(''')", bygroups(String.Affix, String.Single),
|
159 |
+
combined('stringescape', 'tsqs')),
|
160 |
+
('([uUbB]?)(")', bygroups(String.Affix, String.Double),
|
161 |
+
combined('stringescape', 'dqs')),
|
162 |
+
("([uUbB]?)(')", bygroups(String.Affix, String.Single),
|
163 |
+
combined('stringescape', 'sqs')),
|
164 |
+
(r'[^\S\n]+', Text),
|
165 |
+
include('numbers'),
|
166 |
+
(r'!=|==|<<|>>|:=|[-~+/*%=<>&^|.]', Operator),
|
167 |
+
(r'[]{}:(),;[]', Punctuation),
|
168 |
+
(r'(in|is|and|or|not)\b', Operator.Word),
|
169 |
+
include('expr-keywords'),
|
170 |
+
include('builtins'),
|
171 |
+
include('magicfuncs'),
|
172 |
+
include('magicvars'),
|
173 |
+
include('name'),
|
174 |
+
],
|
175 |
+
'expr-inside-fstring': [
|
176 |
+
(r'[{([]', Punctuation, 'expr-inside-fstring-inner'),
|
177 |
+
# without format specifier
|
178 |
+
(r'(=\s*)?' # debug (https://bugs.python.org/issue36817)
|
179 |
+
r'(\![sraf])?' # conversion
|
180 |
+
r'\}', String.Interpol, '#pop'),
|
181 |
+
# with format specifier
|
182 |
+
# we'll catch the remaining '}' in the outer scope
|
183 |
+
(r'(=\s*)?' # debug (https://bugs.python.org/issue36817)
|
184 |
+
r'(\![sraf])?' # conversion
|
185 |
+
r':', String.Interpol, '#pop'),
|
186 |
+
(r'\s+', Text), # allow new lines
|
187 |
+
include('expr'),
|
188 |
+
],
|
189 |
+
'expr-inside-fstring-inner': [
|
190 |
+
(r'[{([]', Punctuation, 'expr-inside-fstring-inner'),
|
191 |
+
(r'[])}]', Punctuation, '#pop'),
|
192 |
+
(r'\s+', Text), # allow new lines
|
193 |
+
include('expr'),
|
194 |
+
],
|
195 |
+
'expr-keywords': [
|
196 |
+
# Based on https://docs.python.org/3/reference/expressions.html
|
197 |
+
(words((
|
198 |
+
'async for', 'await', 'else', 'for', 'if', 'lambda',
|
199 |
+
'yield', 'yield from'), suffix=r'\b'),
|
200 |
+
Keyword),
|
201 |
+
(words(('True', 'False', 'None'), suffix=r'\b'), Keyword.Constant),
|
202 |
+
],
|
203 |
+
'keywords': [
|
204 |
+
(words((
|
205 |
+
'assert', 'async', 'await', 'break', 'continue', 'del', 'elif',
|
206 |
+
'else', 'except', 'finally', 'for', 'global', 'if', 'lambda',
|
207 |
+
'pass', 'raise', 'nonlocal', 'return', 'try', 'while', 'yield',
|
208 |
+
'yield from', 'as', 'with'), suffix=r'\b'),
|
209 |
+
Keyword),
|
210 |
+
(words(('True', 'False', 'None'), suffix=r'\b'), Keyword.Constant),
|
211 |
+
],
|
212 |
+
'soft-keywords': [
|
213 |
+
# `match`, `case` and `_` soft keywords
|
214 |
+
(r'(^[ \t]*)' # at beginning of line + possible indentation
|
215 |
+
r'(match|case)\b' # a possible keyword
|
216 |
+
r'(?![ \t]*(?:' # not followed by...
|
217 |
+
r'[:,;=^&|@~)\]}]|(?:' + # characters and keywords that mean this isn't
|
218 |
+
r'|'.join(keyword.kwlist) + r')\b))', # pattern matching
|
219 |
+
bygroups(Text, Keyword), 'soft-keywords-inner'),
|
220 |
+
],
|
221 |
+
'soft-keywords-inner': [
|
222 |
+
# optional `_` keyword
|
223 |
+
(r'(\s+)([^\n_]*)(_\b)', bygroups(Text, using(this), Keyword)),
|
224 |
+
default('#pop')
|
225 |
+
],
|
226 |
+
'builtins': [
|
227 |
+
(words((
|
228 |
+
'__import__', 'abs', 'all', 'any', 'bin', 'bool', 'bytearray',
|
229 |
+
'breakpoint', 'bytes', 'chr', 'classmethod', 'compile', 'complex',
|
230 |
+
'delattr', 'dict', 'dir', 'divmod', 'enumerate', 'eval', 'filter',
|
231 |
+
'float', 'format', 'frozenset', 'getattr', 'globals', 'hasattr',
|
232 |
+
'hash', 'hex', 'id', 'input', 'int', 'isinstance', 'issubclass',
|
233 |
+
'iter', 'len', 'list', 'locals', 'map', 'max', 'memoryview',
|
234 |
+
'min', 'next', 'object', 'oct', 'open', 'ord', 'pow', 'print',
|
235 |
+
'property', 'range', 'repr', 'reversed', 'round', 'set', 'setattr',
|
236 |
+
'slice', 'sorted', 'staticmethod', 'str', 'sum', 'super', 'tuple',
|
237 |
+
'type', 'vars', 'zip'), prefix=r'(?<!\.)', suffix=r'\b'),
|
238 |
+
Name.Builtin),
|
239 |
+
(r'(?<!\.)(self|Ellipsis|NotImplemented|cls)\b', Name.Builtin.Pseudo),
|
240 |
+
(words((
|
241 |
+
'ArithmeticError', 'AssertionError', 'AttributeError',
|
242 |
+
'BaseException', 'BufferError', 'BytesWarning', 'DeprecationWarning',
|
243 |
+
'EOFError', 'EnvironmentError', 'Exception', 'FloatingPointError',
|
244 |
+
'FutureWarning', 'GeneratorExit', 'IOError', 'ImportError',
|
245 |
+
'ImportWarning', 'IndentationError', 'IndexError', 'KeyError',
|
246 |
+
'KeyboardInterrupt', 'LookupError', 'MemoryError', 'NameError',
|
247 |
+
'NotImplementedError', 'OSError', 'OverflowError',
|
248 |
+
'PendingDeprecationWarning', 'ReferenceError', 'ResourceWarning',
|
249 |
+
'RuntimeError', 'RuntimeWarning', 'StopIteration',
|
250 |
+
'SyntaxError', 'SyntaxWarning', 'SystemError', 'SystemExit',
|
251 |
+
'TabError', 'TypeError', 'UnboundLocalError', 'UnicodeDecodeError',
|
252 |
+
'UnicodeEncodeError', 'UnicodeError', 'UnicodeTranslateError',
|
253 |
+
'UnicodeWarning', 'UserWarning', 'ValueError', 'VMSError',
|
254 |
+
'Warning', 'WindowsError', 'ZeroDivisionError',
|
255 |
+
# new builtin exceptions from PEP 3151
|
256 |
+
'BlockingIOError', 'ChildProcessError', 'ConnectionError',
|
257 |
+
'BrokenPipeError', 'ConnectionAbortedError', 'ConnectionRefusedError',
|
258 |
+
'ConnectionResetError', 'FileExistsError', 'FileNotFoundError',
|
259 |
+
'InterruptedError', 'IsADirectoryError', 'NotADirectoryError',
|
260 |
+
'PermissionError', 'ProcessLookupError', 'TimeoutError',
|
261 |
+
# others new in Python 3
|
262 |
+
'StopAsyncIteration', 'ModuleNotFoundError', 'RecursionError'),
|
263 |
+
prefix=r'(?<!\.)', suffix=r'\b'),
|
264 |
+
Name.Exception),
|
265 |
+
],
|
266 |
+
'magicfuncs': [
|
267 |
+
(words((
|
268 |
+
'__abs__', '__add__', '__aenter__', '__aexit__', '__aiter__',
|
269 |
+
'__and__', '__anext__', '__await__', '__bool__', '__bytes__',
|
270 |
+
'__call__', '__complex__', '__contains__', '__del__', '__delattr__',
|
271 |
+
'__delete__', '__delitem__', '__dir__', '__divmod__', '__enter__',
|
272 |
+
'__eq__', '__exit__', '__float__', '__floordiv__', '__format__',
|
273 |
+
'__ge__', '__get__', '__getattr__', '__getattribute__',
|
274 |
+
'__getitem__', '__gt__', '__hash__', '__iadd__', '__iand__',
|
275 |
+
'__ifloordiv__', '__ilshift__', '__imatmul__', '__imod__',
|
276 |
+
'__imul__', '__index__', '__init__', '__instancecheck__',
|
277 |
+
'__int__', '__invert__', '__ior__', '__ipow__', '__irshift__',
|
278 |
+
'__isub__', '__iter__', '__itruediv__', '__ixor__', '__le__',
|
279 |
+
'__len__', '__length_hint__', '__lshift__', '__lt__', '__matmul__',
|
280 |
+
'__missing__', '__mod__', '__mul__', '__ne__', '__neg__',
|
281 |
+
'__new__', '__next__', '__or__', '__pos__', '__pow__',
|
282 |
+
'__prepare__', '__radd__', '__rand__', '__rdivmod__', '__repr__',
|
283 |
+
'__reversed__', '__rfloordiv__', '__rlshift__', '__rmatmul__',
|
284 |
+
'__rmod__', '__rmul__', '__ror__', '__round__', '__rpow__',
|
285 |
+
'__rrshift__', '__rshift__', '__rsub__', '__rtruediv__',
|
286 |
+
'__rxor__', '__set__', '__setattr__', '__setitem__', '__str__',
|
287 |
+
'__sub__', '__subclasscheck__', '__truediv__',
|
288 |
+
'__xor__'), suffix=r'\b'),
|
289 |
+
Name.Function.Magic),
|
290 |
+
],
|
291 |
+
'magicvars': [
|
292 |
+
(words((
|
293 |
+
'__annotations__', '__bases__', '__class__', '__closure__',
|
294 |
+
'__code__', '__defaults__', '__dict__', '__doc__', '__file__',
|
295 |
+
'__func__', '__globals__', '__kwdefaults__', '__module__',
|
296 |
+
'__mro__', '__name__', '__objclass__', '__qualname__',
|
297 |
+
'__self__', '__slots__', '__weakref__'), suffix=r'\b'),
|
298 |
+
Name.Variable.Magic),
|
299 |
+
],
|
300 |
+
'numbers': [
|
301 |
+
(r'(\d(?:_?\d)*\.(?:\d(?:_?\d)*)?|(?:\d(?:_?\d)*)?\.\d(?:_?\d)*)'
|
302 |
+
r'([eE][+-]?\d(?:_?\d)*)?', Number.Float),
|
303 |
+
(r'\d(?:_?\d)*[eE][+-]?\d(?:_?\d)*j?', Number.Float),
|
304 |
+
(r'0[oO](?:_?[0-7])+', Number.Oct),
|
305 |
+
(r'0[bB](?:_?[01])+', Number.Bin),
|
306 |
+
(r'0[xX](?:_?[a-fA-F0-9])+', Number.Hex),
|
307 |
+
(r'\d(?:_?\d)*', Number.Integer),
|
308 |
+
],
|
309 |
+
'name': [
|
310 |
+
(r'@' + uni_name, Name.Decorator),
|
311 |
+
(r'@', Operator), # new matrix multiplication operator
|
312 |
+
(uni_name, Name),
|
313 |
+
],
|
314 |
+
'funcname': [
|
315 |
+
include('magicfuncs'),
|
316 |
+
(uni_name, Name.Function, '#pop'),
|
317 |
+
default('#pop'),
|
318 |
+
],
|
319 |
+
'classname': [
|
320 |
+
(uni_name, Name.Class, '#pop'),
|
321 |
+
],
|
322 |
+
'import': [
|
323 |
+
(r'(\s+)(as)(\s+)', bygroups(Text, Keyword, Text)),
|
324 |
+
(r'\.', Name.Namespace),
|
325 |
+
(uni_name, Name.Namespace),
|
326 |
+
(r'(\s*)(,)(\s*)', bygroups(Text, Operator, Text)),
|
327 |
+
default('#pop') # all else: go back
|
328 |
+
],
|
329 |
+
'fromimport': [
|
330 |
+
(r'(\s+)(import)\b', bygroups(Text, Keyword.Namespace), '#pop'),
|
331 |
+
(r'\.', Name.Namespace),
|
332 |
+
# if None occurs here, it's "raise x from None", since None can
|
333 |
+
# never be a module name
|
334 |
+
(r'None\b', Name.Builtin.Pseudo, '#pop'),
|
335 |
+
(uni_name, Name.Namespace),
|
336 |
+
default('#pop'),
|
337 |
+
],
|
338 |
+
'rfstringescape': [
|
339 |
+
(r'\{\{', String.Escape),
|
340 |
+
(r'\}\}', String.Escape),
|
341 |
+
],
|
342 |
+
'fstringescape': [
|
343 |
+
include('rfstringescape'),
|
344 |
+
include('stringescape'),
|
345 |
+
],
|
346 |
+
'stringescape': [
|
347 |
+
(r'\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|'
|
348 |
+
r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
|
349 |
+
],
|
350 |
+
'fstrings-single': fstring_rules(String.Single),
|
351 |
+
'fstrings-double': fstring_rules(String.Double),
|
352 |
+
'strings-single': innerstring_rules(String.Single),
|
353 |
+
'strings-double': innerstring_rules(String.Double),
|
354 |
+
'dqf': [
|
355 |
+
(r'"', String.Double, '#pop'),
|
356 |
+
(r'\\\\|\\"|\\\n', String.Escape), # included here for raw strings
|
357 |
+
include('fstrings-double')
|
358 |
+
],
|
359 |
+
'sqf': [
|
360 |
+
(r"'", String.Single, '#pop'),
|
361 |
+
(r"\\\\|\\'|\\\n", String.Escape), # included here for raw strings
|
362 |
+
include('fstrings-single')
|
363 |
+
],
|
364 |
+
'dqs': [
|
365 |
+
(r'"', String.Double, '#pop'),
|
366 |
+
(r'\\\\|\\"|\\\n', String.Escape), # included here for raw strings
|
367 |
+
include('strings-double')
|
368 |
+
],
|
369 |
+
'sqs': [
|
370 |
+
(r"'", String.Single, '#pop'),
|
371 |
+
(r"\\\\|\\'|\\\n", String.Escape), # included here for raw strings
|
372 |
+
include('strings-single')
|
373 |
+
],
|
374 |
+
'tdqf': [
|
375 |
+
(r'"""', String.Double, '#pop'),
|
376 |
+
include('fstrings-double'),
|
377 |
+
(r'\n', String.Double)
|
378 |
+
],
|
379 |
+
'tsqf': [
|
380 |
+
(r"'''", String.Single, '#pop'),
|
381 |
+
include('fstrings-single'),
|
382 |
+
(r'\n', String.Single)
|
383 |
+
],
|
384 |
+
'tdqs': [
|
385 |
+
(r'"""', String.Double, '#pop'),
|
386 |
+
include('strings-double'),
|
387 |
+
(r'\n', String.Double)
|
388 |
+
],
|
389 |
+
'tsqs': [
|
390 |
+
(r"'''", String.Single, '#pop'),
|
391 |
+
include('strings-single'),
|
392 |
+
(r'\n', String.Single)
|
393 |
+
],
|
394 |
+
}
|
395 |
+
|
396 |
+
def analyse_text(text):
|
397 |
+
return shebang_matches(text, r'pythonw?(3(\.\d)?)?') or \
|
398 |
+
'import ' in text[:1000]
|
399 |
+
|
400 |
+
|
401 |
+
Python3Lexer = PythonLexer
|
402 |
+
|
403 |
+
|
404 |
+
class Python2Lexer(RegexLexer):
|
405 |
+
"""
|
406 |
+
For `Python 2.x <http://www.python.org>`_ source code.
|
407 |
+
|
408 |
+
.. versionchanged:: 2.5
|
409 |
+
This class has been renamed from ``PythonLexer``. ``PythonLexer`` now
|
410 |
+
refers to the Python 3 variant. File name patterns like ``*.py`` have
|
411 |
+
been moved to Python 3 as well.
|
412 |
+
"""
|
413 |
+
|
414 |
+
name = 'Python 2.x'
|
415 |
+
aliases = ['python2', 'py2']
|
416 |
+
filenames = [] # now taken over by PythonLexer (3.x)
|
417 |
+
mimetypes = ['text/x-python2', 'application/x-python2']
|
418 |
+
|
419 |
+
def innerstring_rules(ttype):
|
420 |
+
return [
|
421 |
+
# the old style '%s' % (...) string formatting
|
422 |
+
(r'%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
|
423 |
+
'[hlL]?[E-GXc-giorsux%]', String.Interpol),
|
424 |
+
# backslashes, quotes and formatting signs must be parsed one at a time
|
425 |
+
(r'[^\\\'"%\n]+', ttype),
|
426 |
+
(r'[\'"\\]', ttype),
|
427 |
+
# unhandled string formatting sign
|
428 |
+
(r'%', ttype),
|
429 |
+
# newlines are an error (use "nl" state)
|
430 |
+
]
|
431 |
+
|
432 |
+
tokens = {
|
433 |
+
'root': [
|
434 |
+
(r'\n', Text),
|
435 |
+
(r'^(\s*)([rRuUbB]{,2})("""(?:.|\n)*?""")',
|
436 |
+
bygroups(Text, String.Affix, String.Doc)),
|
437 |
+
(r"^(\s*)([rRuUbB]{,2})('''(?:.|\n)*?''')",
|
438 |
+
bygroups(Text, String.Affix, String.Doc)),
|
439 |
+
(r'[^\S\n]+', Text),
|
440 |
+
(r'\A#!.+$', Comment.Hashbang),
|
441 |
+
(r'#.*$', Comment.Single),
|
442 |
+
(r'[]{}:(),;[]', Punctuation),
|
443 |
+
(r'\\\n', Text),
|
444 |
+
(r'\\', Text),
|
445 |
+
(r'(in|is|and|or|not)\b', Operator.Word),
|
446 |
+
(r'!=|==|<<|>>|[-~+/*%=<>&^|.]', Operator),
|
447 |
+
include('keywords'),
|
448 |
+
(r'(def)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'funcname'),
|
449 |
+
(r'(class)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'classname'),
|
450 |
+
(r'(from)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Text),
|
451 |
+
'fromimport'),
|
452 |
+
(r'(import)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Text),
|
453 |
+
'import'),
|
454 |
+
include('builtins'),
|
455 |
+
include('magicfuncs'),
|
456 |
+
include('magicvars'),
|
457 |
+
include('backtick'),
|
458 |
+
('([rR]|[uUbB][rR]|[rR][uUbB])(""")',
|
459 |
+
bygroups(String.Affix, String.Double), 'tdqs'),
|
460 |
+
("([rR]|[uUbB][rR]|[rR][uUbB])(''')",
|
461 |
+
bygroups(String.Affix, String.Single), 'tsqs'),
|
462 |
+
('([rR]|[uUbB][rR]|[rR][uUbB])(")',
|
463 |
+
bygroups(String.Affix, String.Double), 'dqs'),
|
464 |
+
("([rR]|[uUbB][rR]|[rR][uUbB])(')",
|
465 |
+
bygroups(String.Affix, String.Single), 'sqs'),
|
466 |
+
('([uUbB]?)(""")', bygroups(String.Affix, String.Double),
|
467 |
+
combined('stringescape', 'tdqs')),
|
468 |
+
("([uUbB]?)(''')", bygroups(String.Affix, String.Single),
|
469 |
+
combined('stringescape', 'tsqs')),
|
470 |
+
('([uUbB]?)(")', bygroups(String.Affix, String.Double),
|
471 |
+
combined('stringescape', 'dqs')),
|
472 |
+
("([uUbB]?)(')", bygroups(String.Affix, String.Single),
|
473 |
+
combined('stringescape', 'sqs')),
|
474 |
+
include('name'),
|
475 |
+
include('numbers'),
|
476 |
+
],
|
477 |
+
'keywords': [
|
478 |
+
(words((
|
479 |
+
'assert', 'break', 'continue', 'del', 'elif', 'else', 'except',
|
480 |
+
'exec', 'finally', 'for', 'global', 'if', 'lambda', 'pass',
|
481 |
+
'print', 'raise', 'return', 'try', 'while', 'yield',
|
482 |
+
'yield from', 'as', 'with'), suffix=r'\b'),
|
483 |
+
Keyword),
|
484 |
+
],
|
485 |
+
'builtins': [
|
486 |
+
(words((
|
487 |
+
'__import__', 'abs', 'all', 'any', 'apply', 'basestring', 'bin',
|
488 |
+
'bool', 'buffer', 'bytearray', 'bytes', 'callable', 'chr', 'classmethod',
|
489 |
+
'cmp', 'coerce', 'compile', 'complex', 'delattr', 'dict', 'dir', 'divmod',
|
490 |
+
'enumerate', 'eval', 'execfile', 'exit', 'file', 'filter', 'float',
|
491 |
+
'frozenset', 'getattr', 'globals', 'hasattr', 'hash', 'hex', 'id',
|
492 |
+
'input', 'int', 'intern', 'isinstance', 'issubclass', 'iter', 'len',
|
493 |
+
'list', 'locals', 'long', 'map', 'max', 'min', 'next', 'object',
|
494 |
+
'oct', 'open', 'ord', 'pow', 'property', 'range', 'raw_input', 'reduce',
|
495 |
+
'reload', 'repr', 'reversed', 'round', 'set', 'setattr', 'slice',
|
496 |
+
'sorted', 'staticmethod', 'str', 'sum', 'super', 'tuple', 'type',
|
497 |
+
'unichr', 'unicode', 'vars', 'xrange', 'zip'),
|
498 |
+
prefix=r'(?<!\.)', suffix=r'\b'),
|
499 |
+
Name.Builtin),
|
500 |
+
(r'(?<!\.)(self|None|Ellipsis|NotImplemented|False|True|cls'
|
501 |
+
r')\b', Name.Builtin.Pseudo),
|
502 |
+
(words((
|
503 |
+
'ArithmeticError', 'AssertionError', 'AttributeError',
|
504 |
+
'BaseException', 'DeprecationWarning', 'EOFError', 'EnvironmentError',
|
505 |
+
'Exception', 'FloatingPointError', 'FutureWarning', 'GeneratorExit',
|
506 |
+
'IOError', 'ImportError', 'ImportWarning', 'IndentationError',
|
507 |
+
'IndexError', 'KeyError', 'KeyboardInterrupt', 'LookupError',
|
508 |
+
'MemoryError', 'NameError',
|
509 |
+
'NotImplementedError', 'OSError', 'OverflowError', 'OverflowWarning',
|
510 |
+
'PendingDeprecationWarning', 'ReferenceError',
|
511 |
+
'RuntimeError', 'RuntimeWarning', 'StandardError', 'StopIteration',
|
512 |
+
'SyntaxError', 'SyntaxWarning', 'SystemError', 'SystemExit',
|
513 |
+
'TabError', 'TypeError', 'UnboundLocalError', 'UnicodeDecodeError',
|
514 |
+
'UnicodeEncodeError', 'UnicodeError', 'UnicodeTranslateError',
|
515 |
+
'UnicodeWarning', 'UserWarning', 'ValueError', 'VMSError', 'Warning',
|
516 |
+
'WindowsError', 'ZeroDivisionError'), prefix=r'(?<!\.)', suffix=r'\b'),
|
517 |
+
Name.Exception),
|
518 |
+
],
|
519 |
+
'magicfuncs': [
|
520 |
+
(words((
|
521 |
+
'__abs__', '__add__', '__and__', '__call__', '__cmp__', '__coerce__',
|
522 |
+
'__complex__', '__contains__', '__del__', '__delattr__', '__delete__',
|
523 |
+
'__delitem__', '__delslice__', '__div__', '__divmod__', '__enter__',
|
524 |
+
'__eq__', '__exit__', '__float__', '__floordiv__', '__ge__', '__get__',
|
525 |
+
'__getattr__', '__getattribute__', '__getitem__', '__getslice__', '__gt__',
|
526 |
+
'__hash__', '__hex__', '__iadd__', '__iand__', '__idiv__', '__ifloordiv__',
|
527 |
+
'__ilshift__', '__imod__', '__imul__', '__index__', '__init__',
|
528 |
+
'__instancecheck__', '__int__', '__invert__', '__iop__', '__ior__',
|
529 |
+
'__ipow__', '__irshift__', '__isub__', '__iter__', '__itruediv__',
|
530 |
+
'__ixor__', '__le__', '__len__', '__long__', '__lshift__', '__lt__',
|
531 |
+
'__missing__', '__mod__', '__mul__', '__ne__', '__neg__', '__new__',
|
532 |
+
'__nonzero__', '__oct__', '__op__', '__or__', '__pos__', '__pow__',
|
533 |
+
'__radd__', '__rand__', '__rcmp__', '__rdiv__', '__rdivmod__', '__repr__',
|
534 |
+
'__reversed__', '__rfloordiv__', '__rlshift__', '__rmod__', '__rmul__',
|
535 |
+
'__rop__', '__ror__', '__rpow__', '__rrshift__', '__rshift__', '__rsub__',
|
536 |
+
'__rtruediv__', '__rxor__', '__set__', '__setattr__', '__setitem__',
|
537 |
+
'__setslice__', '__str__', '__sub__', '__subclasscheck__', '__truediv__',
|
538 |
+
'__unicode__', '__xor__'), suffix=r'\b'),
|
539 |
+
Name.Function.Magic),
|
540 |
+
],
|
541 |
+
'magicvars': [
|
542 |
+
(words((
|
543 |
+
'__bases__', '__class__', '__closure__', '__code__', '__defaults__',
|
544 |
+
'__dict__', '__doc__', '__file__', '__func__', '__globals__',
|
545 |
+
'__metaclass__', '__module__', '__mro__', '__name__', '__self__',
|
546 |
+
'__slots__', '__weakref__'),
|
547 |
+
suffix=r'\b'),
|
548 |
+
Name.Variable.Magic),
|
549 |
+
],
|
550 |
+
'numbers': [
|
551 |
+
(r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?j?', Number.Float),
|
552 |
+
(r'\d+[eE][+-]?[0-9]+j?', Number.Float),
|
553 |
+
(r'0[0-7]+j?', Number.Oct),
|
554 |
+
(r'0[bB][01]+', Number.Bin),
|
555 |
+
(r'0[xX][a-fA-F0-9]+', Number.Hex),
|
556 |
+
(r'\d+L', Number.Integer.Long),
|
557 |
+
(r'\d+j?', Number.Integer)
|
558 |
+
],
|
559 |
+
'backtick': [
|
560 |
+
('`.*?`', String.Backtick),
|
561 |
+
],
|
562 |
+
'name': [
|
563 |
+
(r'@[\w.]+', Name.Decorator),
|
564 |
+
(r'[a-zA-Z_]\w*', Name),
|
565 |
+
],
|
566 |
+
'funcname': [
|
567 |
+
include('magicfuncs'),
|
568 |
+
(r'[a-zA-Z_]\w*', Name.Function, '#pop'),
|
569 |
+
default('#pop'),
|
570 |
+
],
|
571 |
+
'classname': [
|
572 |
+
(r'[a-zA-Z_]\w*', Name.Class, '#pop')
|
573 |
+
],
|
574 |
+
'import': [
|
575 |
+
(r'(?:[ \t]|\\\n)+', Text),
|
576 |
+
(r'as\b', Keyword.Namespace),
|
577 |
+
(r',', Operator),
|
578 |
+
(r'[a-zA-Z_][\w.]*', Name.Namespace),
|
579 |
+
default('#pop') # all else: go back
|
580 |
+
],
|
581 |
+
'fromimport': [
|
582 |
+
(r'(?:[ \t]|\\\n)+', Text),
|
583 |
+
(r'import\b', Keyword.Namespace, '#pop'),
|
584 |
+
# if None occurs here, it's "raise x from None", since None can
|
585 |
+
# never be a module name
|
586 |
+
(r'None\b', Name.Builtin.Pseudo, '#pop'),
|
587 |
+
# sadly, in "raise x from y" y will be highlighted as namespace too
|
588 |
+
(r'[a-zA-Z_.][\w.]*', Name.Namespace),
|
589 |
+
# anything else here also means "raise x from y" and is therefore
|
590 |
+
# not an error
|
591 |
+
default('#pop'),
|
592 |
+
],
|
593 |
+
'stringescape': [
|
594 |
+
(r'\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|'
|
595 |
+
r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
|
596 |
+
],
|
597 |
+
'strings-single': innerstring_rules(String.Single),
|
598 |
+
'strings-double': innerstring_rules(String.Double),
|
599 |
+
'dqs': [
|
600 |
+
(r'"', String.Double, '#pop'),
|
601 |
+
(r'\\\\|\\"|\\\n', String.Escape), # included here for raw strings
|
602 |
+
include('strings-double')
|
603 |
+
],
|
604 |
+
'sqs': [
|
605 |
+
(r"'", String.Single, '#pop'),
|
606 |
+
(r"\\\\|\\'|\\\n", String.Escape), # included here for raw strings
|
607 |
+
include('strings-single')
|
608 |
+
],
|
609 |
+
'tdqs': [
|
610 |
+
(r'"""', String.Double, '#pop'),
|
611 |
+
include('strings-double'),
|
612 |
+
(r'\n', String.Double)
|
613 |
+
],
|
614 |
+
'tsqs': [
|
615 |
+
(r"'''", String.Single, '#pop'),
|
616 |
+
include('strings-single'),
|
617 |
+
(r'\n', String.Single)
|
618 |
+
],
|
619 |
+
}
|
620 |
+
|
621 |
+
def analyse_text(text):
|
622 |
+
return shebang_matches(text, r'pythonw?2(\.\d)?')
|
623 |
+
|
624 |
+
|
625 |
+
class PythonConsoleLexer(Lexer):
|
626 |
+
"""
|
627 |
+
For Python console output or doctests, such as:
|
628 |
+
|
629 |
+
.. sourcecode:: pycon
|
630 |
+
|
631 |
+
>>> a = 'foo'
|
632 |
+
>>> print a
|
633 |
+
foo
|
634 |
+
>>> 1 / 0
|
635 |
+
Traceback (most recent call last):
|
636 |
+
File "<stdin>", line 1, in <module>
|
637 |
+
ZeroDivisionError: integer division or modulo by zero
|
638 |
+
|
639 |
+
Additional options:
|
640 |
+
|
641 |
+
`python3`
|
642 |
+
Use Python 3 lexer for code. Default is ``True``.
|
643 |
+
|
644 |
+
.. versionadded:: 1.0
|
645 |
+
.. versionchanged:: 2.5
|
646 |
+
Now defaults to ``True``.
|
647 |
+
"""
|
648 |
+
name = 'Python console session'
|
649 |
+
aliases = ['pycon']
|
650 |
+
mimetypes = ['text/x-python-doctest']
|
651 |
+
|
652 |
+
def __init__(self, **options):
|
653 |
+
self.python3 = get_bool_opt(options, 'python3', True)
|
654 |
+
Lexer.__init__(self, **options)
|
655 |
+
|
656 |
+
def get_tokens_unprocessed(self, text):
|
657 |
+
if self.python3:
|
658 |
+
pylexer = PythonLexer(**self.options)
|
659 |
+
tblexer = PythonTracebackLexer(**self.options)
|
660 |
+
else:
|
661 |
+
pylexer = Python2Lexer(**self.options)
|
662 |
+
tblexer = Python2TracebackLexer(**self.options)
|
663 |
+
|
664 |
+
curcode = ''
|
665 |
+
insertions = []
|
666 |
+
curtb = ''
|
667 |
+
tbindex = 0
|
668 |
+
tb = 0
|
669 |
+
for match in line_re.finditer(text):
|
670 |
+
line = match.group()
|
671 |
+
if line.startswith('>>> ') or line.startswith('... '):
|
672 |
+
tb = 0
|
673 |
+
insertions.append((len(curcode),
|
674 |
+
[(0, Generic.Prompt, line[:4])]))
|
675 |
+
curcode += line[4:]
|
676 |
+
elif line.rstrip() == '...' and not tb:
|
677 |
+
# only a new >>> prompt can end an exception block
|
678 |
+
# otherwise an ellipsis in place of the traceback frames
|
679 |
+
# will be mishandled
|
680 |
+
insertions.append((len(curcode),
|
681 |
+
[(0, Generic.Prompt, '...')]))
|
682 |
+
curcode += line[3:]
|
683 |
+
else:
|
684 |
+
if curcode:
|
685 |
+
yield from do_insertions(
|
686 |
+
insertions, pylexer.get_tokens_unprocessed(curcode))
|
687 |
+
curcode = ''
|
688 |
+
insertions = []
|
689 |
+
if (line.startswith('Traceback (most recent call last):') or
|
690 |
+
re.match(' File "[^"]+", line \\d+\\n$', line)):
|
691 |
+
tb = 1
|
692 |
+
curtb = line
|
693 |
+
tbindex = match.start()
|
694 |
+
elif line == 'KeyboardInterrupt\n':
|
695 |
+
yield match.start(), Name.Class, line
|
696 |
+
elif tb:
|
697 |
+
curtb += line
|
698 |
+
if not (line.startswith(' ') or line.strip() == '...'):
|
699 |
+
tb = 0
|
700 |
+
for i, t, v in tblexer.get_tokens_unprocessed(curtb):
|
701 |
+
yield tbindex+i, t, v
|
702 |
+
curtb = ''
|
703 |
+
else:
|
704 |
+
yield match.start(), Generic.Output, line
|
705 |
+
if curcode:
|
706 |
+
yield from do_insertions(insertions,
|
707 |
+
pylexer.get_tokens_unprocessed(curcode))
|
708 |
+
if curtb:
|
709 |
+
for i, t, v in tblexer.get_tokens_unprocessed(curtb):
|
710 |
+
yield tbindex+i, t, v
|
711 |
+
|
712 |
+
|
713 |
+
class PythonTracebackLexer(RegexLexer):
|
714 |
+
"""
|
715 |
+
For Python 3.x tracebacks, with support for chained exceptions.
|
716 |
+
|
717 |
+
.. versionadded:: 1.0
|
718 |
+
|
719 |
+
.. versionchanged:: 2.5
|
720 |
+
This is now the default ``PythonTracebackLexer``. It is still available
|
721 |
+
as the alias ``Python3TracebackLexer``.
|
722 |
+
"""
|
723 |
+
|
724 |
+
name = 'Python Traceback'
|
725 |
+
aliases = ['pytb', 'py3tb']
|
726 |
+
filenames = ['*.pytb', '*.py3tb']
|
727 |
+
mimetypes = ['text/x-python-traceback', 'text/x-python3-traceback']
|
728 |
+
|
729 |
+
tokens = {
|
730 |
+
'root': [
|
731 |
+
(r'\n', Text),
|
732 |
+
(r'^Traceback \(most recent call last\):\n', Generic.Traceback, 'intb'),
|
733 |
+
(r'^During handling of the above exception, another '
|
734 |
+
r'exception occurred:\n\n', Generic.Traceback),
|
735 |
+
(r'^The above exception was the direct cause of the '
|
736 |
+
r'following exception:\n\n', Generic.Traceback),
|
737 |
+
(r'^(?= File "[^"]+", line \d+)', Generic.Traceback, 'intb'),
|
738 |
+
(r'^.*\n', Other),
|
739 |
+
],
|
740 |
+
'intb': [
|
741 |
+
(r'^( File )("[^"]+")(, line )(\d+)(, in )(.+)(\n)',
|
742 |
+
bygroups(Text, Name.Builtin, Text, Number, Text, Name, Text)),
|
743 |
+
(r'^( File )("[^"]+")(, line )(\d+)(\n)',
|
744 |
+
bygroups(Text, Name.Builtin, Text, Number, Text)),
|
745 |
+
(r'^( )(.+)(\n)',
|
746 |
+
bygroups(Text, using(PythonLexer), Text), 'markers'),
|
747 |
+
(r'^([ \t]*)(\.\.\.)(\n)',
|
748 |
+
bygroups(Text, Comment, Text)), # for doctests...
|
749 |
+
(r'^([^:]+)(: )(.+)(\n)',
|
750 |
+
bygroups(Generic.Error, Text, Name, Text), '#pop'),
|
751 |
+
(r'^([a-zA-Z_][\w.]*)(:?\n)',
|
752 |
+
bygroups(Generic.Error, Text), '#pop')
|
753 |
+
],
|
754 |
+
'markers': [
|
755 |
+
# Either `PEP 657 <https://www.python.org/dev/peps/pep-0657/>`
|
756 |
+
# error locations in Python 3.11+, or single-caret markers
|
757 |
+
# for syntax errors before that.
|
758 |
+
(r'^( {4,})([~^]+)(\n)',
|
759 |
+
bygroups(Text, Punctuation.Marker, Text),
|
760 |
+
'#pop'),
|
761 |
+
default('#pop'),
|
762 |
+
],
|
763 |
+
}
|
764 |
+
|
765 |
+
|
766 |
+
Python3TracebackLexer = PythonTracebackLexer
|
767 |
+
|
768 |
+
|
769 |
+
class Python2TracebackLexer(RegexLexer):
|
770 |
+
"""
|
771 |
+
For Python tracebacks.
|
772 |
+
|
773 |
+
.. versionadded:: 0.7
|
774 |
+
|
775 |
+
.. versionchanged:: 2.5
|
776 |
+
This class has been renamed from ``PythonTracebackLexer``.
|
777 |
+
``PythonTracebackLexer`` now refers to the Python 3 variant.
|
778 |
+
"""
|
779 |
+
|
780 |
+
name = 'Python 2.x Traceback'
|
781 |
+
aliases = ['py2tb']
|
782 |
+
filenames = ['*.py2tb']
|
783 |
+
mimetypes = ['text/x-python2-traceback']
|
784 |
+
|
785 |
+
tokens = {
|
786 |
+
'root': [
|
787 |
+
# Cover both (most recent call last) and (innermost last)
|
788 |
+
# The optional ^C allows us to catch keyboard interrupt signals.
|
789 |
+
(r'^(\^C)?(Traceback.*\n)',
|
790 |
+
bygroups(Text, Generic.Traceback), 'intb'),
|
791 |
+
# SyntaxError starts with this.
|
792 |
+
(r'^(?= File "[^"]+", line \d+)', Generic.Traceback, 'intb'),
|
793 |
+
(r'^.*\n', Other),
|
794 |
+
],
|
795 |
+
'intb': [
|
796 |
+
(r'^( File )("[^"]+")(, line )(\d+)(, in )(.+)(\n)',
|
797 |
+
bygroups(Text, Name.Builtin, Text, Number, Text, Name, Text)),
|
798 |
+
(r'^( File )("[^"]+")(, line )(\d+)(\n)',
|
799 |
+
bygroups(Text, Name.Builtin, Text, Number, Text)),
|
800 |
+
(r'^( )(.+)(\n)',
|
801 |
+
bygroups(Text, using(Python2Lexer), Text), 'marker'),
|
802 |
+
(r'^([ \t]*)(\.\.\.)(\n)',
|
803 |
+
bygroups(Text, Comment, Text)), # for doctests...
|
804 |
+
(r'^([^:]+)(: )(.+)(\n)',
|
805 |
+
bygroups(Generic.Error, Text, Name, Text), '#pop'),
|
806 |
+
(r'^([a-zA-Z_]\w*)(:?\n)',
|
807 |
+
bygroups(Generic.Error, Text), '#pop')
|
808 |
+
],
|
809 |
+
'marker': [
|
810 |
+
# For syntax errors.
|
811 |
+
(r'( {4,})(\^)', bygroups(Text, Punctuation.Marker), '#pop'),
|
812 |
+
default('#pop'),
|
813 |
+
],
|
814 |
+
}
|
815 |
+
|
816 |
+
|
817 |
+
class CythonLexer(RegexLexer):
|
818 |
+
"""
|
819 |
+
For Pyrex and `Cython <http://cython.org>`_ source code.
|
820 |
+
|
821 |
+
.. versionadded:: 1.1
|
822 |
+
"""
|
823 |
+
|
824 |
+
name = 'Cython'
|
825 |
+
aliases = ['cython', 'pyx', 'pyrex']
|
826 |
+
filenames = ['*.pyx', '*.pxd', '*.pxi']
|
827 |
+
mimetypes = ['text/x-cython', 'application/x-cython']
|
828 |
+
|
829 |
+
tokens = {
|
830 |
+
'root': [
|
831 |
+
(r'\n', Text),
|
832 |
+
(r'^(\s*)("""(?:.|\n)*?""")', bygroups(Text, String.Doc)),
|
833 |
+
(r"^(\s*)('''(?:.|\n)*?''')", bygroups(Text, String.Doc)),
|
834 |
+
(r'[^\S\n]+', Text),
|
835 |
+
(r'#.*$', Comment),
|
836 |
+
(r'[]{}:(),;[]', Punctuation),
|
837 |
+
(r'\\\n', Text),
|
838 |
+
(r'\\', Text),
|
839 |
+
(r'(in|is|and|or|not)\b', Operator.Word),
|
840 |
+
(r'(<)([a-zA-Z0-9.?]+)(>)',
|
841 |
+
bygroups(Punctuation, Keyword.Type, Punctuation)),
|
842 |
+
(r'!=|==|<<|>>|[-~+/*%=<>&^|.?]', Operator),
|
843 |
+
(r'(from)(\d+)(<=)(\s+)(<)(\d+)(:)',
|
844 |
+
bygroups(Keyword, Number.Integer, Operator, Name, Operator,
|
845 |
+
Name, Punctuation)),
|
846 |
+
include('keywords'),
|
847 |
+
(r'(def|property)(\s+)', bygroups(Keyword, Text), 'funcname'),
|
848 |
+
(r'(cp?def)(\s+)', bygroups(Keyword, Text), 'cdef'),
|
849 |
+
# (should actually start a block with only cdefs)
|
850 |
+
(r'(cdef)(:)', bygroups(Keyword, Punctuation)),
|
851 |
+
(r'(class|struct)(\s+)', bygroups(Keyword, Text), 'classname'),
|
852 |
+
(r'(from)(\s+)', bygroups(Keyword, Text), 'fromimport'),
|
853 |
+
(r'(c?import)(\s+)', bygroups(Keyword, Text), 'import'),
|
854 |
+
include('builtins'),
|
855 |
+
include('backtick'),
|
856 |
+
('(?:[rR]|[uU][rR]|[rR][uU])"""', String, 'tdqs'),
|
857 |
+
("(?:[rR]|[uU][rR]|[rR][uU])'''", String, 'tsqs'),
|
858 |
+
('(?:[rR]|[uU][rR]|[rR][uU])"', String, 'dqs'),
|
859 |
+
("(?:[rR]|[uU][rR]|[rR][uU])'", String, 'sqs'),
|
860 |
+
('[uU]?"""', String, combined('stringescape', 'tdqs')),
|
861 |
+
("[uU]?'''", String, combined('stringescape', 'tsqs')),
|
862 |
+
('[uU]?"', String, combined('stringescape', 'dqs')),
|
863 |
+
("[uU]?'", String, combined('stringescape', 'sqs')),
|
864 |
+
include('name'),
|
865 |
+
include('numbers'),
|
866 |
+
],
|
867 |
+
'keywords': [
|
868 |
+
(words((
|
869 |
+
'assert', 'async', 'await', 'break', 'by', 'continue', 'ctypedef', 'del', 'elif',
|
870 |
+
'else', 'except', 'except?', 'exec', 'finally', 'for', 'fused', 'gil',
|
871 |
+
'global', 'if', 'include', 'lambda', 'nogil', 'pass', 'print',
|
872 |
+
'raise', 'return', 'try', 'while', 'yield', 'as', 'with'), suffix=r'\b'),
|
873 |
+
Keyword),
|
874 |
+
(r'(DEF|IF|ELIF|ELSE)\b', Comment.Preproc),
|
875 |
+
],
|
876 |
+
'builtins': [
|
877 |
+
(words((
|
878 |
+
'__import__', 'abs', 'all', 'any', 'apply', 'basestring', 'bin', 'bint',
|
879 |
+
'bool', 'buffer', 'bytearray', 'bytes', 'callable', 'chr',
|
880 |
+
'classmethod', 'cmp', 'coerce', 'compile', 'complex', 'delattr',
|
881 |
+
'dict', 'dir', 'divmod', 'enumerate', 'eval', 'execfile', 'exit',
|
882 |
+
'file', 'filter', 'float', 'frozenset', 'getattr', 'globals',
|
883 |
+
'hasattr', 'hash', 'hex', 'id', 'input', 'int', 'intern', 'isinstance',
|
884 |
+
'issubclass', 'iter', 'len', 'list', 'locals', 'long', 'map', 'max',
|
885 |
+
'min', 'next', 'object', 'oct', 'open', 'ord', 'pow', 'property', 'Py_ssize_t',
|
886 |
+
'range', 'raw_input', 'reduce', 'reload', 'repr', 'reversed',
|
887 |
+
'round', 'set', 'setattr', 'slice', 'sorted', 'staticmethod',
|
888 |
+
'str', 'sum', 'super', 'tuple', 'type', 'unichr', 'unicode', 'unsigned',
|
889 |
+
'vars', 'xrange', 'zip'), prefix=r'(?<!\.)', suffix=r'\b'),
|
890 |
+
Name.Builtin),
|
891 |
+
(r'(?<!\.)(self|None|Ellipsis|NotImplemented|False|True|NULL'
|
892 |
+
r')\b', Name.Builtin.Pseudo),
|
893 |
+
(words((
|
894 |
+
'ArithmeticError', 'AssertionError', 'AttributeError',
|
895 |
+
'BaseException', 'DeprecationWarning', 'EOFError', 'EnvironmentError',
|
896 |
+
'Exception', 'FloatingPointError', 'FutureWarning', 'GeneratorExit',
|
897 |
+
'IOError', 'ImportError', 'ImportWarning', 'IndentationError',
|
898 |
+
'IndexError', 'KeyError', 'KeyboardInterrupt', 'LookupError',
|
899 |
+
'MemoryError', 'NameError', 'NotImplemented', 'NotImplementedError',
|
900 |
+
'OSError', 'OverflowError', 'OverflowWarning',
|
901 |
+
'PendingDeprecationWarning', 'ReferenceError', 'RuntimeError',
|
902 |
+
'RuntimeWarning', 'StandardError', 'StopIteration', 'SyntaxError',
|
903 |
+
'SyntaxWarning', 'SystemError', 'SystemExit', 'TabError',
|
904 |
+
'TypeError', 'UnboundLocalError', 'UnicodeDecodeError',
|
905 |
+
'UnicodeEncodeError', 'UnicodeError', 'UnicodeTranslateError',
|
906 |
+
'UnicodeWarning', 'UserWarning', 'ValueError', 'Warning',
|
907 |
+
'ZeroDivisionError'), prefix=r'(?<!\.)', suffix=r'\b'),
|
908 |
+
Name.Exception),
|
909 |
+
],
|
910 |
+
'numbers': [
|
911 |
+
(r'(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
|
912 |
+
(r'0\d+', Number.Oct),
|
913 |
+
(r'0[xX][a-fA-F0-9]+', Number.Hex),
|
914 |
+
(r'\d+L', Number.Integer.Long),
|
915 |
+
(r'\d+', Number.Integer)
|
916 |
+
],
|
917 |
+
'backtick': [
|
918 |
+
('`.*?`', String.Backtick),
|
919 |
+
],
|
920 |
+
'name': [
|
921 |
+
(r'@\w+', Name.Decorator),
|
922 |
+
(r'[a-zA-Z_]\w*', Name),
|
923 |
+
],
|
924 |
+
'funcname': [
|
925 |
+
(r'[a-zA-Z_]\w*', Name.Function, '#pop')
|
926 |
+
],
|
927 |
+
'cdef': [
|
928 |
+
(r'(public|readonly|extern|api|inline)\b', Keyword.Reserved),
|
929 |
+
(r'(struct|enum|union|class)\b', Keyword),
|
930 |
+
(r'([a-zA-Z_]\w*)(\s*)(?=[(:#=]|$)',
|
931 |
+
bygroups(Name.Function, Text), '#pop'),
|
932 |
+
(r'([a-zA-Z_]\w*)(\s*)(,)',
|
933 |
+
bygroups(Name.Function, Text, Punctuation)),
|
934 |
+
(r'from\b', Keyword, '#pop'),
|
935 |
+
(r'as\b', Keyword),
|
936 |
+
(r':', Punctuation, '#pop'),
|
937 |
+
(r'(?=["\'])', Text, '#pop'),
|
938 |
+
(r'[a-zA-Z_]\w*', Keyword.Type),
|
939 |
+
(r'.', Text),
|
940 |
+
],
|
941 |
+
'classname': [
|
942 |
+
(r'[a-zA-Z_]\w*', Name.Class, '#pop')
|
943 |
+
],
|
944 |
+
'import': [
|
945 |
+
(r'(\s+)(as)(\s+)', bygroups(Text, Keyword, Text)),
|
946 |
+
(r'[a-zA-Z_][\w.]*', Name.Namespace),
|
947 |
+
(r'(\s*)(,)(\s*)', bygroups(Text, Operator, Text)),
|
948 |
+
default('#pop') # all else: go back
|
949 |
+
],
|
950 |
+
'fromimport': [
|
951 |
+
(r'(\s+)(c?import)\b', bygroups(Text, Keyword), '#pop'),
|
952 |
+
(r'[a-zA-Z_.][\w.]*', Name.Namespace),
|
953 |
+
# ``cdef foo from "header"``, or ``for foo from 0 < i < 10``
|
954 |
+
default('#pop'),
|
955 |
+
],
|
956 |
+
'stringescape': [
|
957 |
+
(r'\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|'
|
958 |
+
r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
|
959 |
+
],
|
960 |
+
'strings': [
|
961 |
+
(r'%(\([a-zA-Z0-9]+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
|
962 |
+
'[hlL]?[E-GXc-giorsux%]', String.Interpol),
|
963 |
+
(r'[^\\\'"%\n]+', String),
|
964 |
+
# quotes, percents and backslashes must be parsed one at a time
|
965 |
+
(r'[\'"\\]', String),
|
966 |
+
# unhandled string formatting sign
|
967 |
+
(r'%', String)
|
968 |
+
# newlines are an error (use "nl" state)
|
969 |
+
],
|
970 |
+
'nl': [
|
971 |
+
(r'\n', String)
|
972 |
+
],
|
973 |
+
'dqs': [
|
974 |
+
(r'"', String, '#pop'),
|
975 |
+
(r'\\\\|\\"|\\\n', String.Escape), # included here again for raw strings
|
976 |
+
include('strings')
|
977 |
+
],
|
978 |
+
'sqs': [
|
979 |
+
(r"'", String, '#pop'),
|
980 |
+
(r"\\\\|\\'|\\\n", String.Escape), # included here again for raw strings
|
981 |
+
include('strings')
|
982 |
+
],
|
983 |
+
'tdqs': [
|
984 |
+
(r'"""', String, '#pop'),
|
985 |
+
include('strings'),
|
986 |
+
include('nl')
|
987 |
+
],
|
988 |
+
'tsqs': [
|
989 |
+
(r"'''", String, '#pop'),
|
990 |
+
include('strings'),
|
991 |
+
include('nl')
|
992 |
+
],
|
993 |
+
}
|
994 |
+
|
995 |
+
|
996 |
+
class DgLexer(RegexLexer):
|
997 |
+
"""
|
998 |
+
Lexer for `dg <http://pyos.github.com/dg>`_,
|
999 |
+
a functional and object-oriented programming language
|
1000 |
+
running on the CPython 3 VM.
|
1001 |
+
|
1002 |
+
.. versionadded:: 1.6
|
1003 |
+
"""
|
1004 |
+
name = 'dg'
|
1005 |
+
aliases = ['dg']
|
1006 |
+
filenames = ['*.dg']
|
1007 |
+
mimetypes = ['text/x-dg']
|
1008 |
+
|
1009 |
+
tokens = {
|
1010 |
+
'root': [
|
1011 |
+
(r'\s+', Text),
|
1012 |
+
(r'#.*?$', Comment.Single),
|
1013 |
+
|
1014 |
+
(r'(?i)0b[01]+', Number.Bin),
|
1015 |
+
(r'(?i)0o[0-7]+', Number.Oct),
|
1016 |
+
(r'(?i)0x[0-9a-f]+', Number.Hex),
|
1017 |
+
(r'(?i)[+-]?[0-9]+\.[0-9]+(e[+-]?[0-9]+)?j?', Number.Float),
|
1018 |
+
(r'(?i)[+-]?[0-9]+e[+-]?\d+j?', Number.Float),
|
1019 |
+
(r'(?i)[+-]?[0-9]+j?', Number.Integer),
|
1020 |
+
|
1021 |
+
(r"(?i)(br|r?b?)'''", String, combined('stringescape', 'tsqs', 'string')),
|
1022 |
+
(r'(?i)(br|r?b?)"""', String, combined('stringescape', 'tdqs', 'string')),
|
1023 |
+
(r"(?i)(br|r?b?)'", String, combined('stringescape', 'sqs', 'string')),
|
1024 |
+
(r'(?i)(br|r?b?)"', String, combined('stringescape', 'dqs', 'string')),
|
1025 |
+
|
1026 |
+
(r"`\w+'*`", Operator),
|
1027 |
+
(r'\b(and|in|is|or|where)\b', Operator.Word),
|
1028 |
+
(r'[!$%&*+\-./:<-@\\^|~;,]+', Operator),
|
1029 |
+
|
1030 |
+
(words((
|
1031 |
+
'bool', 'bytearray', 'bytes', 'classmethod', 'complex', 'dict', 'dict\'',
|
1032 |
+
'float', 'frozenset', 'int', 'list', 'list\'', 'memoryview', 'object',
|
1033 |
+
'property', 'range', 'set', 'set\'', 'slice', 'staticmethod', 'str',
|
1034 |
+
'super', 'tuple', 'tuple\'', 'type'),
|
1035 |
+
prefix=r'(?<!\.)', suffix=r'(?![\'\w])'),
|
1036 |
+
Name.Builtin),
|
1037 |
+
(words((
|
1038 |
+
'__import__', 'abs', 'all', 'any', 'bin', 'bind', 'chr', 'cmp', 'compile',
|
1039 |
+
'complex', 'delattr', 'dir', 'divmod', 'drop', 'dropwhile', 'enumerate',
|
1040 |
+
'eval', 'exhaust', 'filter', 'flip', 'foldl1?', 'format', 'fst',
|
1041 |
+
'getattr', 'globals', 'hasattr', 'hash', 'head', 'hex', 'id', 'init',
|
1042 |
+
'input', 'isinstance', 'issubclass', 'iter', 'iterate', 'last', 'len',
|
1043 |
+
'locals', 'map', 'max', 'min', 'next', 'oct', 'open', 'ord', 'pow',
|
1044 |
+
'print', 'repr', 'reversed', 'round', 'setattr', 'scanl1?', 'snd',
|
1045 |
+
'sorted', 'sum', 'tail', 'take', 'takewhile', 'vars', 'zip'),
|
1046 |
+
prefix=r'(?<!\.)', suffix=r'(?![\'\w])'),
|
1047 |
+
Name.Builtin),
|
1048 |
+
(r"(?<!\.)(self|Ellipsis|NotImplemented|None|True|False)(?!['\w])",
|
1049 |
+
Name.Builtin.Pseudo),
|
1050 |
+
|
1051 |
+
(r"(?<!\.)[A-Z]\w*(Error|Exception|Warning)'*(?!['\w])",
|
1052 |
+
Name.Exception),
|
1053 |
+
(r"(?<!\.)(Exception|GeneratorExit|KeyboardInterrupt|StopIteration|"
|
1054 |
+
r"SystemExit)(?!['\w])", Name.Exception),
|
1055 |
+
|
1056 |
+
(r"(?<![\w.])(except|finally|for|if|import|not|otherwise|raise|"
|
1057 |
+
r"subclass|while|with|yield)(?!['\w])", Keyword.Reserved),
|
1058 |
+
|
1059 |
+
(r"[A-Z_]+'*(?!['\w])", Name),
|
1060 |
+
(r"[A-Z]\w+'*(?!['\w])", Keyword.Type),
|
1061 |
+
(r"\w+'*", Name),
|
1062 |
+
|
1063 |
+
(r'[()]', Punctuation),
|
1064 |
+
(r'.', Error),
|
1065 |
+
],
|
1066 |
+
'stringescape': [
|
1067 |
+
(r'\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|'
|
1068 |
+
r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
|
1069 |
+
],
|
1070 |
+
'string': [
|
1071 |
+
(r'%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
|
1072 |
+
'[hlL]?[E-GXc-giorsux%]', String.Interpol),
|
1073 |
+
(r'[^\\\'"%\n]+', String),
|
1074 |
+
# quotes, percents and backslashes must be parsed one at a time
|
1075 |
+
(r'[\'"\\]', String),
|
1076 |
+
# unhandled string formatting sign
|
1077 |
+
(r'%', String),
|
1078 |
+
(r'\n', String)
|
1079 |
+
],
|
1080 |
+
'dqs': [
|
1081 |
+
(r'"', String, '#pop')
|
1082 |
+
],
|
1083 |
+
'sqs': [
|
1084 |
+
(r"'", String, '#pop')
|
1085 |
+
],
|
1086 |
+
'tdqs': [
|
1087 |
+
(r'"""', String, '#pop')
|
1088 |
+
],
|
1089 |
+
'tsqs': [
|
1090 |
+
(r"'''", String, '#pop')
|
1091 |
+
],
|
1092 |
+
}
|
1093 |
+
|
1094 |
+
|
1095 |
+
class NumPyLexer(PythonLexer):
|
1096 |
+
"""
|
1097 |
+
A Python lexer recognizing Numerical Python builtins.
|
1098 |
+
|
1099 |
+
.. versionadded:: 0.10
|
1100 |
+
"""
|
1101 |
+
|
1102 |
+
name = 'NumPy'
|
1103 |
+
aliases = ['numpy']
|
1104 |
+
|
1105 |
+
# override the mimetypes to not inherit them from python
|
1106 |
+
mimetypes = []
|
1107 |
+
filenames = []
|
1108 |
+
|
1109 |
+
EXTRA_KEYWORDS = {
|
1110 |
+
'abs', 'absolute', 'accumulate', 'add', 'alen', 'all', 'allclose',
|
1111 |
+
'alltrue', 'alterdot', 'amax', 'amin', 'angle', 'any', 'append',
|
1112 |
+
'apply_along_axis', 'apply_over_axes', 'arange', 'arccos', 'arccosh',
|
1113 |
+
'arcsin', 'arcsinh', 'arctan', 'arctan2', 'arctanh', 'argmax', 'argmin',
|
1114 |
+
'argsort', 'argwhere', 'around', 'array', 'array2string', 'array_equal',
|
1115 |
+
'array_equiv', 'array_repr', 'array_split', 'array_str', 'arrayrange',
|
1116 |
+
'asanyarray', 'asarray', 'asarray_chkfinite', 'ascontiguousarray',
|
1117 |
+
'asfarray', 'asfortranarray', 'asmatrix', 'asscalar', 'astype',
|
1118 |
+
'atleast_1d', 'atleast_2d', 'atleast_3d', 'average', 'bartlett',
|
1119 |
+
'base_repr', 'beta', 'binary_repr', 'bincount', 'binomial',
|
1120 |
+
'bitwise_and', 'bitwise_not', 'bitwise_or', 'bitwise_xor', 'blackman',
|
1121 |
+
'bmat', 'broadcast', 'byte_bounds', 'bytes', 'byteswap', 'c_',
|
1122 |
+
'can_cast', 'ceil', 'choose', 'clip', 'column_stack', 'common_type',
|
1123 |
+
'compare_chararrays', 'compress', 'concatenate', 'conj', 'conjugate',
|
1124 |
+
'convolve', 'copy', 'corrcoef', 'correlate', 'cos', 'cosh', 'cov',
|
1125 |
+
'cross', 'cumprod', 'cumproduct', 'cumsum', 'delete', 'deprecate',
|
1126 |
+
'diag', 'diagflat', 'diagonal', 'diff', 'digitize', 'disp', 'divide',
|
1127 |
+
'dot', 'dsplit', 'dstack', 'dtype', 'dump', 'dumps', 'ediff1d', 'empty',
|
1128 |
+
'empty_like', 'equal', 'exp', 'expand_dims', 'expm1', 'extract', 'eye',
|
1129 |
+
'fabs', 'fastCopyAndTranspose', 'fft', 'fftfreq', 'fftshift', 'fill',
|
1130 |
+
'finfo', 'fix', 'flat', 'flatnonzero', 'flatten', 'fliplr', 'flipud',
|
1131 |
+
'floor', 'floor_divide', 'fmod', 'frexp', 'fromarrays', 'frombuffer',
|
1132 |
+
'fromfile', 'fromfunction', 'fromiter', 'frompyfunc', 'fromstring',
|
1133 |
+
'generic', 'get_array_wrap', 'get_include', 'get_numarray_include',
|
1134 |
+
'get_numpy_include', 'get_printoptions', 'getbuffer', 'getbufsize',
|
1135 |
+
'geterr', 'geterrcall', 'geterrobj', 'getfield', 'gradient', 'greater',
|
1136 |
+
'greater_equal', 'gumbel', 'hamming', 'hanning', 'histogram',
|
1137 |
+
'histogram2d', 'histogramdd', 'hsplit', 'hstack', 'hypot', 'i0',
|
1138 |
+
'identity', 'ifft', 'imag', 'index_exp', 'indices', 'inf', 'info',
|
1139 |
+
'inner', 'insert', 'int_asbuffer', 'interp', 'intersect1d',
|
1140 |
+
'intersect1d_nu', 'inv', 'invert', 'iscomplex', 'iscomplexobj',
|
1141 |
+
'isfinite', 'isfortran', 'isinf', 'isnan', 'isneginf', 'isposinf',
|
1142 |
+
'isreal', 'isrealobj', 'isscalar', 'issctype', 'issubclass_',
|
1143 |
+
'issubdtype', 'issubsctype', 'item', 'itemset', 'iterable', 'ix_',
|
1144 |
+
'kaiser', 'kron', 'ldexp', 'left_shift', 'less', 'less_equal', 'lexsort',
|
1145 |
+
'linspace', 'load', 'loads', 'loadtxt', 'log', 'log10', 'log1p', 'log2',
|
1146 |
+
'logical_and', 'logical_not', 'logical_or', 'logical_xor', 'logspace',
|
1147 |
+
'lstsq', 'mat', 'matrix', 'max', 'maximum', 'maximum_sctype',
|
1148 |
+
'may_share_memory', 'mean', 'median', 'meshgrid', 'mgrid', 'min',
|
1149 |
+
'minimum', 'mintypecode', 'mod', 'modf', 'msort', 'multiply', 'nan',
|
1150 |
+
'nan_to_num', 'nanargmax', 'nanargmin', 'nanmax', 'nanmin', 'nansum',
|
1151 |
+
'ndenumerate', 'ndim', 'ndindex', 'negative', 'newaxis', 'newbuffer',
|
1152 |
+
'newbyteorder', 'nonzero', 'not_equal', 'obj2sctype', 'ogrid', 'ones',
|
1153 |
+
'ones_like', 'outer', 'permutation', 'piecewise', 'pinv', 'pkgload',
|
1154 |
+
'place', 'poisson', 'poly', 'poly1d', 'polyadd', 'polyder', 'polydiv',
|
1155 |
+
'polyfit', 'polyint', 'polymul', 'polysub', 'polyval', 'power', 'prod',
|
1156 |
+
'product', 'ptp', 'put', 'putmask', 'r_', 'randint', 'random_integers',
|
1157 |
+
'random_sample', 'ranf', 'rank', 'ravel', 'real', 'real_if_close',
|
1158 |
+
'recarray', 'reciprocal', 'reduce', 'remainder', 'repeat', 'require',
|
1159 |
+
'reshape', 'resize', 'restoredot', 'right_shift', 'rint', 'roll',
|
1160 |
+
'rollaxis', 'roots', 'rot90', 'round', 'round_', 'row_stack', 's_',
|
1161 |
+
'sample', 'savetxt', 'sctype2char', 'searchsorted', 'seed', 'select',
|
1162 |
+
'set_numeric_ops', 'set_printoptions', 'set_string_function',
|
1163 |
+
'setbufsize', 'setdiff1d', 'seterr', 'seterrcall', 'seterrobj',
|
1164 |
+
'setfield', 'setflags', 'setmember1d', 'setxor1d', 'shape',
|
1165 |
+
'show_config', 'shuffle', 'sign', 'signbit', 'sin', 'sinc', 'sinh',
|
1166 |
+
'size', 'slice', 'solve', 'sometrue', 'sort', 'sort_complex', 'source',
|
1167 |
+
'split', 'sqrt', 'square', 'squeeze', 'standard_normal', 'std',
|
1168 |
+
'subtract', 'sum', 'svd', 'swapaxes', 'take', 'tan', 'tanh', 'tensordot',
|
1169 |
+
'test', 'tile', 'tofile', 'tolist', 'tostring', 'trace', 'transpose',
|
1170 |
+
'trapz', 'tri', 'tril', 'trim_zeros', 'triu', 'true_divide', 'typeDict',
|
1171 |
+
'typename', 'uniform', 'union1d', 'unique', 'unique1d', 'unravel_index',
|
1172 |
+
'unwrap', 'vander', 'var', 'vdot', 'vectorize', 'view', 'vonmises',
|
1173 |
+
'vsplit', 'vstack', 'weibull', 'where', 'who', 'zeros', 'zeros_like'
|
1174 |
+
}
|
1175 |
+
|
1176 |
+
def get_tokens_unprocessed(self, text):
|
1177 |
+
for index, token, value in \
|
1178 |
+
PythonLexer.get_tokens_unprocessed(self, text):
|
1179 |
+
if token is Name and value in self.EXTRA_KEYWORDS:
|
1180 |
+
yield index, Keyword.Pseudo, value
|
1181 |
+
else:
|
1182 |
+
yield index, token, value
|
1183 |
+
|
1184 |
+
def analyse_text(text):
|
1185 |
+
ltext = text[:1000]
|
1186 |
+
return (shebang_matches(text, r'pythonw?(3(\.\d)?)?') or
|
1187 |
+
'import ' in ltext) \
|
1188 |
+
and ('import numpy' in ltext or 'from numpy import' in ltext)
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/modeline.py
ADDED
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
pygments.modeline
|
3 |
+
~~~~~~~~~~~~~~~~~
|
4 |
+
|
5 |
+
A simple modeline parser (based on pymodeline).
|
6 |
+
|
7 |
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
|
8 |
+
:license: BSD, see LICENSE for details.
|
9 |
+
"""
|
10 |
+
|
11 |
+
import re
|
12 |
+
|
13 |
+
__all__ = ['get_filetype_from_buffer']
|
14 |
+
|
15 |
+
|
16 |
+
modeline_re = re.compile(r'''
|
17 |
+
(?: vi | vim | ex ) (?: [<=>]? \d* )? :
|
18 |
+
.* (?: ft | filetype | syn | syntax ) = ( [^:\s]+ )
|
19 |
+
''', re.VERBOSE)
|
20 |
+
|
21 |
+
|
22 |
+
def get_filetype_from_line(l):
|
23 |
+
m = modeline_re.search(l)
|
24 |
+
if m:
|
25 |
+
return m.group(1)
|
26 |
+
|
27 |
+
|
28 |
+
def get_filetype_from_buffer(buf, max_lines=5):
|
29 |
+
"""
|
30 |
+
Scan the buffer for modelines and return filetype if one is found.
|
31 |
+
"""
|
32 |
+
lines = buf.splitlines()
|
33 |
+
for l in lines[-1:-max_lines-1:-1]:
|
34 |
+
ret = get_filetype_from_line(l)
|
35 |
+
if ret:
|
36 |
+
return ret
|
37 |
+
for i in range(max_lines, -1, -1):
|
38 |
+
if i < len(lines):
|
39 |
+
ret = get_filetype_from_line(lines[i])
|
40 |
+
if ret:
|
41 |
+
return ret
|
42 |
+
|
43 |
+
return None
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/plugin.py
ADDED
@@ -0,0 +1,69 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
pygments.plugin
|
3 |
+
~~~~~~~~~~~~~~~
|
4 |
+
|
5 |
+
Pygments setuptools plugin interface. The methods defined
|
6 |
+
here also work if setuptools isn't installed but they just
|
7 |
+
return nothing.
|
8 |
+
|
9 |
+
lexer plugins::
|
10 |
+
|
11 |
+
[pygments.lexers]
|
12 |
+
yourlexer = yourmodule:YourLexer
|
13 |
+
|
14 |
+
formatter plugins::
|
15 |
+
|
16 |
+
[pygments.formatters]
|
17 |
+
yourformatter = yourformatter:YourFormatter
|
18 |
+
/.ext = yourformatter:YourFormatter
|
19 |
+
|
20 |
+
As you can see, you can define extensions for the formatter
|
21 |
+
with a leading slash.
|
22 |
+
|
23 |
+
syntax plugins::
|
24 |
+
|
25 |
+
[pygments.styles]
|
26 |
+
yourstyle = yourstyle:YourStyle
|
27 |
+
|
28 |
+
filter plugin::
|
29 |
+
|
30 |
+
[pygments.filter]
|
31 |
+
yourfilter = yourfilter:YourFilter
|
32 |
+
|
33 |
+
|
34 |
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
|
35 |
+
:license: BSD, see LICENSE for details.
|
36 |
+
"""
|
37 |
+
LEXER_ENTRY_POINT = 'pygments.lexers'
|
38 |
+
FORMATTER_ENTRY_POINT = 'pygments.formatters'
|
39 |
+
STYLE_ENTRY_POINT = 'pygments.styles'
|
40 |
+
FILTER_ENTRY_POINT = 'pygments.filters'
|
41 |
+
|
42 |
+
|
43 |
+
def iter_entry_points(group_name):
|
44 |
+
try:
|
45 |
+
from pip._vendor import pkg_resources
|
46 |
+
except (ImportError, OSError):
|
47 |
+
return []
|
48 |
+
|
49 |
+
return pkg_resources.iter_entry_points(group_name)
|
50 |
+
|
51 |
+
|
52 |
+
def find_plugin_lexers():
|
53 |
+
for entrypoint in iter_entry_points(LEXER_ENTRY_POINT):
|
54 |
+
yield entrypoint.load()
|
55 |
+
|
56 |
+
|
57 |
+
def find_plugin_formatters():
|
58 |
+
for entrypoint in iter_entry_points(FORMATTER_ENTRY_POINT):
|
59 |
+
yield entrypoint.name, entrypoint.load()
|
60 |
+
|
61 |
+
|
62 |
+
def find_plugin_styles():
|
63 |
+
for entrypoint in iter_entry_points(STYLE_ENTRY_POINT):
|
64 |
+
yield entrypoint.name, entrypoint.load()
|
65 |
+
|
66 |
+
|
67 |
+
def find_plugin_filters():
|
68 |
+
for entrypoint in iter_entry_points(FILTER_ENTRY_POINT):
|
69 |
+
yield entrypoint.name, entrypoint.load()
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/regexopt.py
ADDED
@@ -0,0 +1,91 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
pygments.regexopt
|
3 |
+
~~~~~~~~~~~~~~~~~
|
4 |
+
|
5 |
+
An algorithm that generates optimized regexes for matching long lists of
|
6 |
+
literal strings.
|
7 |
+
|
8 |
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
|
9 |
+
:license: BSD, see LICENSE for details.
|
10 |
+
"""
|
11 |
+
|
12 |
+
import re
|
13 |
+
from re import escape
|
14 |
+
from os.path import commonprefix
|
15 |
+
from itertools import groupby
|
16 |
+
from operator import itemgetter
|
17 |
+
|
18 |
+
CS_ESCAPE = re.compile(r'[\[\^\\\-\]]')
|
19 |
+
FIRST_ELEMENT = itemgetter(0)
|
20 |
+
|
21 |
+
|
22 |
+
def make_charset(letters):
|
23 |
+
return '[' + CS_ESCAPE.sub(lambda m: '\\' + m.group(), ''.join(letters)) + ']'
|
24 |
+
|
25 |
+
|
26 |
+
def regex_opt_inner(strings, open_paren):
|
27 |
+
"""Return a regex that matches any string in the sorted list of strings."""
|
28 |
+
close_paren = open_paren and ')' or ''
|
29 |
+
# print strings, repr(open_paren)
|
30 |
+
if not strings:
|
31 |
+
# print '-> nothing left'
|
32 |
+
return ''
|
33 |
+
first = strings[0]
|
34 |
+
if len(strings) == 1:
|
35 |
+
# print '-> only 1 string'
|
36 |
+
return open_paren + escape(first) + close_paren
|
37 |
+
if not first:
|
38 |
+
# print '-> first string empty'
|
39 |
+
return open_paren + regex_opt_inner(strings[1:], '(?:') \
|
40 |
+
+ '?' + close_paren
|
41 |
+
if len(first) == 1:
|
42 |
+
# multiple one-char strings? make a charset
|
43 |
+
oneletter = []
|
44 |
+
rest = []
|
45 |
+
for s in strings:
|
46 |
+
if len(s) == 1:
|
47 |
+
oneletter.append(s)
|
48 |
+
else:
|
49 |
+
rest.append(s)
|
50 |
+
if len(oneletter) > 1: # do we have more than one oneletter string?
|
51 |
+
if rest:
|
52 |
+
# print '-> 1-character + rest'
|
53 |
+
return open_paren + regex_opt_inner(rest, '') + '|' \
|
54 |
+
+ make_charset(oneletter) + close_paren
|
55 |
+
# print '-> only 1-character'
|
56 |
+
return open_paren + make_charset(oneletter) + close_paren
|
57 |
+
prefix = commonprefix(strings)
|
58 |
+
if prefix:
|
59 |
+
plen = len(prefix)
|
60 |
+
# we have a prefix for all strings
|
61 |
+
# print '-> prefix:', prefix
|
62 |
+
return open_paren + escape(prefix) \
|
63 |
+
+ regex_opt_inner([s[plen:] for s in strings], '(?:') \
|
64 |
+
+ close_paren
|
65 |
+
# is there a suffix?
|
66 |
+
strings_rev = [s[::-1] for s in strings]
|
67 |
+
suffix = commonprefix(strings_rev)
|
68 |
+
if suffix:
|
69 |
+
slen = len(suffix)
|
70 |
+
# print '-> suffix:', suffix[::-1]
|
71 |
+
return open_paren \
|
72 |
+
+ regex_opt_inner(sorted(s[:-slen] for s in strings), '(?:') \
|
73 |
+
+ escape(suffix[::-1]) + close_paren
|
74 |
+
# recurse on common 1-string prefixes
|
75 |
+
# print '-> last resort'
|
76 |
+
return open_paren + \
|
77 |
+
'|'.join(regex_opt_inner(list(group[1]), '')
|
78 |
+
for group in groupby(strings, lambda s: s[0] == first[0])) \
|
79 |
+
+ close_paren
|
80 |
+
|
81 |
+
|
82 |
+
def regex_opt(strings, prefix='', suffix=''):
|
83 |
+
"""Return a compiled regex that matches any string in the given list.
|
84 |
+
|
85 |
+
The strings to match must be literal strings, not regexes. They will be
|
86 |
+
regex-escaped.
|
87 |
+
|
88 |
+
*prefix* and *suffix* are pre- and appended to the final regex.
|
89 |
+
"""
|
90 |
+
strings = sorted(strings)
|
91 |
+
return prefix + regex_opt_inner(strings, '(') + suffix
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/scanner.py
ADDED
@@ -0,0 +1,104 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
pygments.scanner
|
3 |
+
~~~~~~~~~~~~~~~~
|
4 |
+
|
5 |
+
This library implements a regex based scanner. Some languages
|
6 |
+
like Pascal are easy to parse but have some keywords that
|
7 |
+
depend on the context. Because of this it's impossible to lex
|
8 |
+
that just by using a regular expression lexer like the
|
9 |
+
`RegexLexer`.
|
10 |
+
|
11 |
+
Have a look at the `DelphiLexer` to get an idea of how to use
|
12 |
+
this scanner.
|
13 |
+
|
14 |
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
|
15 |
+
:license: BSD, see LICENSE for details.
|
16 |
+
"""
|
17 |
+
import re
|
18 |
+
|
19 |
+
|
20 |
+
class EndOfText(RuntimeError):
|
21 |
+
"""
|
22 |
+
Raise if end of text is reached and the user
|
23 |
+
tried to call a match function.
|
24 |
+
"""
|
25 |
+
|
26 |
+
|
27 |
+
class Scanner:
|
28 |
+
"""
|
29 |
+
Simple scanner
|
30 |
+
|
31 |
+
All method patterns are regular expression strings (not
|
32 |
+
compiled expressions!)
|
33 |
+
"""
|
34 |
+
|
35 |
+
def __init__(self, text, flags=0):
|
36 |
+
"""
|
37 |
+
:param text: The text which should be scanned
|
38 |
+
:param flags: default regular expression flags
|
39 |
+
"""
|
40 |
+
self.data = text
|
41 |
+
self.data_length = len(text)
|
42 |
+
self.start_pos = 0
|
43 |
+
self.pos = 0
|
44 |
+
self.flags = flags
|
45 |
+
self.last = None
|
46 |
+
self.match = None
|
47 |
+
self._re_cache = {}
|
48 |
+
|
49 |
+
def eos(self):
|
50 |
+
"""`True` if the scanner reached the end of text."""
|
51 |
+
return self.pos >= self.data_length
|
52 |
+
eos = property(eos, eos.__doc__)
|
53 |
+
|
54 |
+
def check(self, pattern):
|
55 |
+
"""
|
56 |
+
Apply `pattern` on the current position and return
|
57 |
+
the match object. (Doesn't touch pos). Use this for
|
58 |
+
lookahead.
|
59 |
+
"""
|
60 |
+
if self.eos:
|
61 |
+
raise EndOfText()
|
62 |
+
if pattern not in self._re_cache:
|
63 |
+
self._re_cache[pattern] = re.compile(pattern, self.flags)
|
64 |
+
return self._re_cache[pattern].match(self.data, self.pos)
|
65 |
+
|
66 |
+
def test(self, pattern):
|
67 |
+
"""Apply a pattern on the current position and check
|
68 |
+
if it patches. Doesn't touch pos.
|
69 |
+
"""
|
70 |
+
return self.check(pattern) is not None
|
71 |
+
|
72 |
+
def scan(self, pattern):
|
73 |
+
"""
|
74 |
+
Scan the text for the given pattern and update pos/match
|
75 |
+
and related fields. The return value is a boolen that
|
76 |
+
indicates if the pattern matched. The matched value is
|
77 |
+
stored on the instance as ``match``, the last value is
|
78 |
+
stored as ``last``. ``start_pos`` is the position of the
|
79 |
+
pointer before the pattern was matched, ``pos`` is the
|
80 |
+
end position.
|
81 |
+
"""
|
82 |
+
if self.eos:
|
83 |
+
raise EndOfText()
|
84 |
+
if pattern not in self._re_cache:
|
85 |
+
self._re_cache[pattern] = re.compile(pattern, self.flags)
|
86 |
+
self.last = self.match
|
87 |
+
m = self._re_cache[pattern].match(self.data, self.pos)
|
88 |
+
if m is None:
|
89 |
+
return False
|
90 |
+
self.start_pos = m.start()
|
91 |
+
self.pos = m.end()
|
92 |
+
self.match = m.group()
|
93 |
+
return True
|
94 |
+
|
95 |
+
def get_char(self):
|
96 |
+
"""Scan exactly one char."""
|
97 |
+
self.scan('.')
|
98 |
+
|
99 |
+
def __repr__(self):
|
100 |
+
return '<%s %d/%d>' % (
|
101 |
+
self.__class__.__name__,
|
102 |
+
self.pos,
|
103 |
+
self.data_length
|
104 |
+
)
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/sphinxext.py
ADDED
@@ -0,0 +1,155 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
pygments.sphinxext
|
3 |
+
~~~~~~~~~~~~~~~~~~
|
4 |
+
|
5 |
+
Sphinx extension to generate automatic documentation of lexers,
|
6 |
+
formatters and filters.
|
7 |
+
|
8 |
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
|
9 |
+
:license: BSD, see LICENSE for details.
|
10 |
+
"""
|
11 |
+
|
12 |
+
import sys
|
13 |
+
|
14 |
+
from docutils import nodes
|
15 |
+
from docutils.statemachine import ViewList
|
16 |
+
from docutils.parsers.rst import Directive
|
17 |
+
from sphinx.util.nodes import nested_parse_with_titles
|
18 |
+
|
19 |
+
|
20 |
+
MODULEDOC = '''
|
21 |
+
.. module:: %s
|
22 |
+
|
23 |
+
%s
|
24 |
+
%s
|
25 |
+
'''
|
26 |
+
|
27 |
+
LEXERDOC = '''
|
28 |
+
.. class:: %s
|
29 |
+
|
30 |
+
:Short names: %s
|
31 |
+
:Filenames: %s
|
32 |
+
:MIME types: %s
|
33 |
+
|
34 |
+
%s
|
35 |
+
|
36 |
+
'''
|
37 |
+
|
38 |
+
FMTERDOC = '''
|
39 |
+
.. class:: %s
|
40 |
+
|
41 |
+
:Short names: %s
|
42 |
+
:Filenames: %s
|
43 |
+
|
44 |
+
%s
|
45 |
+
|
46 |
+
'''
|
47 |
+
|
48 |
+
FILTERDOC = '''
|
49 |
+
.. class:: %s
|
50 |
+
|
51 |
+
:Name: %s
|
52 |
+
|
53 |
+
%s
|
54 |
+
|
55 |
+
'''
|
56 |
+
|
57 |
+
|
58 |
+
class PygmentsDoc(Directive):
|
59 |
+
"""
|
60 |
+
A directive to collect all lexers/formatters/filters and generate
|
61 |
+
autoclass directives for them.
|
62 |
+
"""
|
63 |
+
has_content = False
|
64 |
+
required_arguments = 1
|
65 |
+
optional_arguments = 0
|
66 |
+
final_argument_whitespace = False
|
67 |
+
option_spec = {}
|
68 |
+
|
69 |
+
def run(self):
|
70 |
+
self.filenames = set()
|
71 |
+
if self.arguments[0] == 'lexers':
|
72 |
+
out = self.document_lexers()
|
73 |
+
elif self.arguments[0] == 'formatters':
|
74 |
+
out = self.document_formatters()
|
75 |
+
elif self.arguments[0] == 'filters':
|
76 |
+
out = self.document_filters()
|
77 |
+
else:
|
78 |
+
raise Exception('invalid argument for "pygmentsdoc" directive')
|
79 |
+
node = nodes.compound()
|
80 |
+
vl = ViewList(out.split('\n'), source='')
|
81 |
+
nested_parse_with_titles(self.state, vl, node)
|
82 |
+
for fn in self.filenames:
|
83 |
+
self.state.document.settings.record_dependencies.add(fn)
|
84 |
+
return node.children
|
85 |
+
|
86 |
+
def document_lexers(self):
|
87 |
+
from pip._vendor.pygments.lexers._mapping import LEXERS
|
88 |
+
out = []
|
89 |
+
modules = {}
|
90 |
+
moduledocstrings = {}
|
91 |
+
for classname, data in sorted(LEXERS.items(), key=lambda x: x[0]):
|
92 |
+
module = data[0]
|
93 |
+
mod = __import__(module, None, None, [classname])
|
94 |
+
self.filenames.add(mod.__file__)
|
95 |
+
cls = getattr(mod, classname)
|
96 |
+
if not cls.__doc__:
|
97 |
+
print("Warning: %s does not have a docstring." % classname)
|
98 |
+
docstring = cls.__doc__
|
99 |
+
if isinstance(docstring, bytes):
|
100 |
+
docstring = docstring.decode('utf8')
|
101 |
+
modules.setdefault(module, []).append((
|
102 |
+
classname,
|
103 |
+
', '.join(data[2]) or 'None',
|
104 |
+
', '.join(data[3]).replace('*', '\\*').replace('_', '\\') or 'None',
|
105 |
+
', '.join(data[4]) or 'None',
|
106 |
+
docstring))
|
107 |
+
if module not in moduledocstrings:
|
108 |
+
moddoc = mod.__doc__
|
109 |
+
if isinstance(moddoc, bytes):
|
110 |
+
moddoc = moddoc.decode('utf8')
|
111 |
+
moduledocstrings[module] = moddoc
|
112 |
+
|
113 |
+
for module, lexers in sorted(modules.items(), key=lambda x: x[0]):
|
114 |
+
if moduledocstrings[module] is None:
|
115 |
+
raise Exception("Missing docstring for %s" % (module,))
|
116 |
+
heading = moduledocstrings[module].splitlines()[4].strip().rstrip('.')
|
117 |
+
out.append(MODULEDOC % (module, heading, '-'*len(heading)))
|
118 |
+
for data in lexers:
|
119 |
+
out.append(LEXERDOC % data)
|
120 |
+
|
121 |
+
return ''.join(out)
|
122 |
+
|
123 |
+
def document_formatters(self):
|
124 |
+
from pip._vendor.pygments.formatters import FORMATTERS
|
125 |
+
|
126 |
+
out = []
|
127 |
+
for classname, data in sorted(FORMATTERS.items(), key=lambda x: x[0]):
|
128 |
+
module = data[0]
|
129 |
+
mod = __import__(module, None, None, [classname])
|
130 |
+
self.filenames.add(mod.__file__)
|
131 |
+
cls = getattr(mod, classname)
|
132 |
+
docstring = cls.__doc__
|
133 |
+
if isinstance(docstring, bytes):
|
134 |
+
docstring = docstring.decode('utf8')
|
135 |
+
heading = cls.__name__
|
136 |
+
out.append(FMTERDOC % (heading, ', '.join(data[2]) or 'None',
|
137 |
+
', '.join(data[3]).replace('*', '\\*') or 'None',
|
138 |
+
docstring))
|
139 |
+
return ''.join(out)
|
140 |
+
|
141 |
+
def document_filters(self):
|
142 |
+
from pip._vendor.pygments.filters import FILTERS
|
143 |
+
|
144 |
+
out = []
|
145 |
+
for name, cls in FILTERS.items():
|
146 |
+
self.filenames.add(sys.modules[cls.__module__].__file__)
|
147 |
+
docstring = cls.__doc__
|
148 |
+
if isinstance(docstring, bytes):
|
149 |
+
docstring = docstring.decode('utf8')
|
150 |
+
out.append(FILTERDOC % (cls.__name__, name, docstring))
|
151 |
+
return ''.join(out)
|
152 |
+
|
153 |
+
|
154 |
+
def setup(app):
|
155 |
+
app.add_directive('pygmentsdoc', PygmentsDoc)
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/style.py
ADDED
@@ -0,0 +1,197 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
pygments.style
|
3 |
+
~~~~~~~~~~~~~~
|
4 |
+
|
5 |
+
Basic style object.
|
6 |
+
|
7 |
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
|
8 |
+
:license: BSD, see LICENSE for details.
|
9 |
+
"""
|
10 |
+
|
11 |
+
from pip._vendor.pygments.token import Token, STANDARD_TYPES
|
12 |
+
|
13 |
+
# Default mapping of ansixxx to RGB colors.
|
14 |
+
_ansimap = {
|
15 |
+
# dark
|
16 |
+
'ansiblack': '000000',
|
17 |
+
'ansired': '7f0000',
|
18 |
+
'ansigreen': '007f00',
|
19 |
+
'ansiyellow': '7f7fe0',
|
20 |
+
'ansiblue': '00007f',
|
21 |
+
'ansimagenta': '7f007f',
|
22 |
+
'ansicyan': '007f7f',
|
23 |
+
'ansigray': 'e5e5e5',
|
24 |
+
# normal
|
25 |
+
'ansibrightblack': '555555',
|
26 |
+
'ansibrightred': 'ff0000',
|
27 |
+
'ansibrightgreen': '00ff00',
|
28 |
+
'ansibrightyellow': 'ffff00',
|
29 |
+
'ansibrightblue': '0000ff',
|
30 |
+
'ansibrightmagenta': 'ff00ff',
|
31 |
+
'ansibrightcyan': '00ffff',
|
32 |
+
'ansiwhite': 'ffffff',
|
33 |
+
}
|
34 |
+
# mapping of deprecated #ansixxx colors to new color names
|
35 |
+
_deprecated_ansicolors = {
|
36 |
+
# dark
|
37 |
+
'#ansiblack': 'ansiblack',
|
38 |
+
'#ansidarkred': 'ansired',
|
39 |
+
'#ansidarkgreen': 'ansigreen',
|
40 |
+
'#ansibrown': 'ansiyellow',
|
41 |
+
'#ansidarkblue': 'ansiblue',
|
42 |
+
'#ansipurple': 'ansimagenta',
|
43 |
+
'#ansiteal': 'ansicyan',
|
44 |
+
'#ansilightgray': 'ansigray',
|
45 |
+
# normal
|
46 |
+
'#ansidarkgray': 'ansibrightblack',
|
47 |
+
'#ansired': 'ansibrightred',
|
48 |
+
'#ansigreen': 'ansibrightgreen',
|
49 |
+
'#ansiyellow': 'ansibrightyellow',
|
50 |
+
'#ansiblue': 'ansibrightblue',
|
51 |
+
'#ansifuchsia': 'ansibrightmagenta',
|
52 |
+
'#ansiturquoise': 'ansibrightcyan',
|
53 |
+
'#ansiwhite': 'ansiwhite',
|
54 |
+
}
|
55 |
+
ansicolors = set(_ansimap)
|
56 |
+
|
57 |
+
|
58 |
+
class StyleMeta(type):
|
59 |
+
|
60 |
+
def __new__(mcs, name, bases, dct):
|
61 |
+
obj = type.__new__(mcs, name, bases, dct)
|
62 |
+
for token in STANDARD_TYPES:
|
63 |
+
if token not in obj.styles:
|
64 |
+
obj.styles[token] = ''
|
65 |
+
|
66 |
+
def colorformat(text):
|
67 |
+
if text in ansicolors:
|
68 |
+
return text
|
69 |
+
if text[0:1] == '#':
|
70 |
+
col = text[1:]
|
71 |
+
if len(col) == 6:
|
72 |
+
return col
|
73 |
+
elif len(col) == 3:
|
74 |
+
return col[0] * 2 + col[1] * 2 + col[2] * 2
|
75 |
+
elif text == '':
|
76 |
+
return ''
|
77 |
+
elif text.startswith('var') or text.startswith('calc'):
|
78 |
+
return text
|
79 |
+
assert False, "wrong color format %r" % text
|
80 |
+
|
81 |
+
_styles = obj._styles = {}
|
82 |
+
|
83 |
+
for ttype in obj.styles:
|
84 |
+
for token in ttype.split():
|
85 |
+
if token in _styles:
|
86 |
+
continue
|
87 |
+
ndef = _styles.get(token.parent, None)
|
88 |
+
styledefs = obj.styles.get(token, '').split()
|
89 |
+
if not ndef or token is None:
|
90 |
+
ndef = ['', 0, 0, 0, '', '', 0, 0, 0]
|
91 |
+
elif 'noinherit' in styledefs and token is not Token:
|
92 |
+
ndef = _styles[Token][:]
|
93 |
+
else:
|
94 |
+
ndef = ndef[:]
|
95 |
+
_styles[token] = ndef
|
96 |
+
for styledef in obj.styles.get(token, '').split():
|
97 |
+
if styledef == 'noinherit':
|
98 |
+
pass
|
99 |
+
elif styledef == 'bold':
|
100 |
+
ndef[1] = 1
|
101 |
+
elif styledef == 'nobold':
|
102 |
+
ndef[1] = 0
|
103 |
+
elif styledef == 'italic':
|
104 |
+
ndef[2] = 1
|
105 |
+
elif styledef == 'noitalic':
|
106 |
+
ndef[2] = 0
|
107 |
+
elif styledef == 'underline':
|
108 |
+
ndef[3] = 1
|
109 |
+
elif styledef == 'nounderline':
|
110 |
+
ndef[3] = 0
|
111 |
+
elif styledef[:3] == 'bg:':
|
112 |
+
ndef[4] = colorformat(styledef[3:])
|
113 |
+
elif styledef[:7] == 'border:':
|
114 |
+
ndef[5] = colorformat(styledef[7:])
|
115 |
+
elif styledef == 'roman':
|
116 |
+
ndef[6] = 1
|
117 |
+
elif styledef == 'sans':
|
118 |
+
ndef[7] = 1
|
119 |
+
elif styledef == 'mono':
|
120 |
+
ndef[8] = 1
|
121 |
+
else:
|
122 |
+
ndef[0] = colorformat(styledef)
|
123 |
+
|
124 |
+
return obj
|
125 |
+
|
126 |
+
def style_for_token(cls, token):
|
127 |
+
t = cls._styles[token]
|
128 |
+
ansicolor = bgansicolor = None
|
129 |
+
color = t[0]
|
130 |
+
if color in _deprecated_ansicolors:
|
131 |
+
color = _deprecated_ansicolors[color]
|
132 |
+
if color in ansicolors:
|
133 |
+
ansicolor = color
|
134 |
+
color = _ansimap[color]
|
135 |
+
bgcolor = t[4]
|
136 |
+
if bgcolor in _deprecated_ansicolors:
|
137 |
+
bgcolor = _deprecated_ansicolors[bgcolor]
|
138 |
+
if bgcolor in ansicolors:
|
139 |
+
bgansicolor = bgcolor
|
140 |
+
bgcolor = _ansimap[bgcolor]
|
141 |
+
|
142 |
+
return {
|
143 |
+
'color': color or None,
|
144 |
+
'bold': bool(t[1]),
|
145 |
+
'italic': bool(t[2]),
|
146 |
+
'underline': bool(t[3]),
|
147 |
+
'bgcolor': bgcolor or None,
|
148 |
+
'border': t[5] or None,
|
149 |
+
'roman': bool(t[6]) or None,
|
150 |
+
'sans': bool(t[7]) or None,
|
151 |
+
'mono': bool(t[8]) or None,
|
152 |
+
'ansicolor': ansicolor,
|
153 |
+
'bgansicolor': bgansicolor,
|
154 |
+
}
|
155 |
+
|
156 |
+
def list_styles(cls):
|
157 |
+
return list(cls)
|
158 |
+
|
159 |
+
def styles_token(cls, ttype):
|
160 |
+
return ttype in cls._styles
|
161 |
+
|
162 |
+
def __iter__(cls):
|
163 |
+
for token in cls._styles:
|
164 |
+
yield token, cls.style_for_token(token)
|
165 |
+
|
166 |
+
def __len__(cls):
|
167 |
+
return len(cls._styles)
|
168 |
+
|
169 |
+
|
170 |
+
class Style(metaclass=StyleMeta):
|
171 |
+
|
172 |
+
#: overall background color (``None`` means transparent)
|
173 |
+
background_color = '#ffffff'
|
174 |
+
|
175 |
+
#: highlight background color
|
176 |
+
highlight_color = '#ffffcc'
|
177 |
+
|
178 |
+
#: line number font color
|
179 |
+
line_number_color = 'inherit'
|
180 |
+
|
181 |
+
#: line number background color
|
182 |
+
line_number_background_color = 'transparent'
|
183 |
+
|
184 |
+
#: special line number font color
|
185 |
+
line_number_special_color = '#000000'
|
186 |
+
|
187 |
+
#: special line number background color
|
188 |
+
line_number_special_background_color = '#ffffc0'
|
189 |
+
|
190 |
+
#: Style definitions for individual token types.
|
191 |
+
styles = {}
|
192 |
+
|
193 |
+
# Attribute for lexers defined within Pygments. If set
|
194 |
+
# to True, the style is not shown in the style gallery
|
195 |
+
# on the website. This is intended for language-specific
|
196 |
+
# styles.
|
197 |
+
web_style_gallery_exclude = False
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/token.py
ADDED
@@ -0,0 +1,212 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
pygments.token
|
3 |
+
~~~~~~~~~~~~~~
|
4 |
+
|
5 |
+
Basic token types and the standard tokens.
|
6 |
+
|
7 |
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
|
8 |
+
:license: BSD, see LICENSE for details.
|
9 |
+
"""
|
10 |
+
|
11 |
+
|
12 |
+
class _TokenType(tuple):
|
13 |
+
parent = None
|
14 |
+
|
15 |
+
def split(self):
|
16 |
+
buf = []
|
17 |
+
node = self
|
18 |
+
while node is not None:
|
19 |
+
buf.append(node)
|
20 |
+
node = node.parent
|
21 |
+
buf.reverse()
|
22 |
+
return buf
|
23 |
+
|
24 |
+
def __init__(self, *args):
|
25 |
+
# no need to call super.__init__
|
26 |
+
self.subtypes = set()
|
27 |
+
|
28 |
+
def __contains__(self, val):
|
29 |
+
return self is val or (
|
30 |
+
type(val) is self.__class__ and
|
31 |
+
val[:len(self)] == self
|
32 |
+
)
|
33 |
+
|
34 |
+
def __getattr__(self, val):
|
35 |
+
if not val or not val[0].isupper():
|
36 |
+
return tuple.__getattribute__(self, val)
|
37 |
+
new = _TokenType(self + (val,))
|
38 |
+
setattr(self, val, new)
|
39 |
+
self.subtypes.add(new)
|
40 |
+
new.parent = self
|
41 |
+
return new
|
42 |
+
|
43 |
+
def __repr__(self):
|
44 |
+
return 'Token' + (self and '.' or '') + '.'.join(self)
|
45 |
+
|
46 |
+
def __copy__(self):
|
47 |
+
# These instances are supposed to be singletons
|
48 |
+
return self
|
49 |
+
|
50 |
+
def __deepcopy__(self, memo):
|
51 |
+
# These instances are supposed to be singletons
|
52 |
+
return self
|
53 |
+
|
54 |
+
|
55 |
+
Token = _TokenType()
|
56 |
+
|
57 |
+
# Special token types
|
58 |
+
Text = Token.Text
|
59 |
+
Whitespace = Text.Whitespace
|
60 |
+
Escape = Token.Escape
|
61 |
+
Error = Token.Error
|
62 |
+
# Text that doesn't belong to this lexer (e.g. HTML in PHP)
|
63 |
+
Other = Token.Other
|
64 |
+
|
65 |
+
# Common token types for source code
|
66 |
+
Keyword = Token.Keyword
|
67 |
+
Name = Token.Name
|
68 |
+
Literal = Token.Literal
|
69 |
+
String = Literal.String
|
70 |
+
Number = Literal.Number
|
71 |
+
Punctuation = Token.Punctuation
|
72 |
+
Operator = Token.Operator
|
73 |
+
Comment = Token.Comment
|
74 |
+
|
75 |
+
# Generic types for non-source code
|
76 |
+
Generic = Token.Generic
|
77 |
+
|
78 |
+
# String and some others are not direct children of Token.
|
79 |
+
# alias them:
|
80 |
+
Token.Token = Token
|
81 |
+
Token.String = String
|
82 |
+
Token.Number = Number
|
83 |
+
|
84 |
+
|
85 |
+
def is_token_subtype(ttype, other):
|
86 |
+
"""
|
87 |
+
Return True if ``ttype`` is a subtype of ``other``.
|
88 |
+
|
89 |
+
exists for backwards compatibility. use ``ttype in other`` now.
|
90 |
+
"""
|
91 |
+
return ttype in other
|
92 |
+
|
93 |
+
|
94 |
+
def string_to_tokentype(s):
|
95 |
+
"""
|
96 |
+
Convert a string into a token type::
|
97 |
+
|
98 |
+
>>> string_to_token('String.Double')
|
99 |
+
Token.Literal.String.Double
|
100 |
+
>>> string_to_token('Token.Literal.Number')
|
101 |
+
Token.Literal.Number
|
102 |
+
>>> string_to_token('')
|
103 |
+
Token
|
104 |
+
|
105 |
+
Tokens that are already tokens are returned unchanged:
|
106 |
+
|
107 |
+
>>> string_to_token(String)
|
108 |
+
Token.Literal.String
|
109 |
+
"""
|
110 |
+
if isinstance(s, _TokenType):
|
111 |
+
return s
|
112 |
+
if not s:
|
113 |
+
return Token
|
114 |
+
node = Token
|
115 |
+
for item in s.split('.'):
|
116 |
+
node = getattr(node, item)
|
117 |
+
return node
|
118 |
+
|
119 |
+
|
120 |
+
# Map standard token types to short names, used in CSS class naming.
|
121 |
+
# If you add a new item, please be sure to run this file to perform
|
122 |
+
# a consistency check for duplicate values.
|
123 |
+
STANDARD_TYPES = {
|
124 |
+
Token: '',
|
125 |
+
|
126 |
+
Text: '',
|
127 |
+
Whitespace: 'w',
|
128 |
+
Escape: 'esc',
|
129 |
+
Error: 'err',
|
130 |
+
Other: 'x',
|
131 |
+
|
132 |
+
Keyword: 'k',
|
133 |
+
Keyword.Constant: 'kc',
|
134 |
+
Keyword.Declaration: 'kd',
|
135 |
+
Keyword.Namespace: 'kn',
|
136 |
+
Keyword.Pseudo: 'kp',
|
137 |
+
Keyword.Reserved: 'kr',
|
138 |
+
Keyword.Type: 'kt',
|
139 |
+
|
140 |
+
Name: 'n',
|
141 |
+
Name.Attribute: 'na',
|
142 |
+
Name.Builtin: 'nb',
|
143 |
+
Name.Builtin.Pseudo: 'bp',
|
144 |
+
Name.Class: 'nc',
|
145 |
+
Name.Constant: 'no',
|
146 |
+
Name.Decorator: 'nd',
|
147 |
+
Name.Entity: 'ni',
|
148 |
+
Name.Exception: 'ne',
|
149 |
+
Name.Function: 'nf',
|
150 |
+
Name.Function.Magic: 'fm',
|
151 |
+
Name.Property: 'py',
|
152 |
+
Name.Label: 'nl',
|
153 |
+
Name.Namespace: 'nn',
|
154 |
+
Name.Other: 'nx',
|
155 |
+
Name.Tag: 'nt',
|
156 |
+
Name.Variable: 'nv',
|
157 |
+
Name.Variable.Class: 'vc',
|
158 |
+
Name.Variable.Global: 'vg',
|
159 |
+
Name.Variable.Instance: 'vi',
|
160 |
+
Name.Variable.Magic: 'vm',
|
161 |
+
|
162 |
+
Literal: 'l',
|
163 |
+
Literal.Date: 'ld',
|
164 |
+
|
165 |
+
String: 's',
|
166 |
+
String.Affix: 'sa',
|
167 |
+
String.Backtick: 'sb',
|
168 |
+
String.Char: 'sc',
|
169 |
+
String.Delimiter: 'dl',
|
170 |
+
String.Doc: 'sd',
|
171 |
+
String.Double: 's2',
|
172 |
+
String.Escape: 'se',
|
173 |
+
String.Heredoc: 'sh',
|
174 |
+
String.Interpol: 'si',
|
175 |
+
String.Other: 'sx',
|
176 |
+
String.Regex: 'sr',
|
177 |
+
String.Single: 's1',
|
178 |
+
String.Symbol: 'ss',
|
179 |
+
|
180 |
+
Number: 'm',
|
181 |
+
Number.Bin: 'mb',
|
182 |
+
Number.Float: 'mf',
|
183 |
+
Number.Hex: 'mh',
|
184 |
+
Number.Integer: 'mi',
|
185 |
+
Number.Integer.Long: 'il',
|
186 |
+
Number.Oct: 'mo',
|
187 |
+
|
188 |
+
Operator: 'o',
|
189 |
+
Operator.Word: 'ow',
|
190 |
+
|
191 |
+
Punctuation: 'p',
|
192 |
+
|
193 |
+
Comment: 'c',
|
194 |
+
Comment.Hashbang: 'ch',
|
195 |
+
Comment.Multiline: 'cm',
|
196 |
+
Comment.Preproc: 'cp',
|
197 |
+
Comment.PreprocFile: 'cpf',
|
198 |
+
Comment.Single: 'c1',
|
199 |
+
Comment.Special: 'cs',
|
200 |
+
|
201 |
+
Generic: 'g',
|
202 |
+
Generic.Deleted: 'gd',
|
203 |
+
Generic.Emph: 'ge',
|
204 |
+
Generic.Error: 'gr',
|
205 |
+
Generic.Heading: 'gh',
|
206 |
+
Generic.Inserted: 'gi',
|
207 |
+
Generic.Output: 'go',
|
208 |
+
Generic.Prompt: 'gp',
|
209 |
+
Generic.Strong: 'gs',
|
210 |
+
Generic.Subheading: 'gu',
|
211 |
+
Generic.Traceback: 'gt',
|
212 |
+
}
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/unistring.py
ADDED
@@ -0,0 +1,153 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
pygments.unistring
|
3 |
+
~~~~~~~~~~~~~~~~~~
|
4 |
+
|
5 |
+
Strings of all Unicode characters of a certain category.
|
6 |
+
Used for matching in Unicode-aware languages. Run to regenerate.
|
7 |
+
|
8 |
+
Inspired by chartypes_create.py from the MoinMoin project.
|
9 |
+
|
10 |
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
|
11 |
+
:license: BSD, see LICENSE for details.
|
12 |
+
"""
|
13 |
+
|
14 |
+
Cc = '\x00-\x1f\x7f-\x9f'
|
15 |
+
|
16 |
+
Cf = '\xad\u0600-\u0605\u061c\u06dd\u070f\u08e2\u180e\u200b-\u200f\u202a-\u202e\u2060-\u2064\u2066-\u206f\ufeff\ufff9-\ufffb\U000110bd\U000110cd\U0001bca0-\U0001bca3\U0001d173-\U0001d17a\U000e0001\U000e0020-\U000e007f'
|
17 |
+
|
18 |
+
Cn = '\u0378-\u0379\u0380-\u0383\u038b\u038d\u03a2\u0530\u0557-\u0558\u058b-\u058c\u0590\u05c8-\u05cf\u05eb-\u05ee\u05f5-\u05ff\u061d\u070e\u074b-\u074c\u07b2-\u07bf\u07fb-\u07fc\u082e-\u082f\u083f\u085c-\u085d\u085f\u086b-\u089f\u08b5\u08be-\u08d2\u0984\u098d-\u098e\u0991-\u0992\u09a9\u09b1\u09b3-\u09b5\u09ba-\u09bb\u09c5-\u09c6\u09c9-\u09ca\u09cf-\u09d6\u09d8-\u09db\u09de\u09e4-\u09e5\u09ff-\u0a00\u0a04\u0a0b-\u0a0e\u0a11-\u0a12\u0a29\u0a31\u0a34\u0a37\u0a3a-\u0a3b\u0a3d\u0a43-\u0a46\u0a49-\u0a4a\u0a4e-\u0a50\u0a52-\u0a58\u0a5d\u0a5f-\u0a65\u0a77-\u0a80\u0a84\u0a8e\u0a92\u0aa9\u0ab1\u0ab4\u0aba-\u0abb\u0ac6\u0aca\u0ace-\u0acf\u0ad1-\u0adf\u0ae4-\u0ae5\u0af2-\u0af8\u0b00\u0b04\u0b0d-\u0b0e\u0b11-\u0b12\u0b29\u0b31\u0b34\u0b3a-\u0b3b\u0b45-\u0b46\u0b49-\u0b4a\u0b4e-\u0b55\u0b58-\u0b5b\u0b5e\u0b64-\u0b65\u0b78-\u0b81\u0b84\u0b8b-\u0b8d\u0b91\u0b96-\u0b98\u0b9b\u0b9d\u0ba0-\u0ba2\u0ba5-\u0ba7\u0bab-\u0bad\u0bba-\u0bbd\u0bc3-\u0bc5\u0bc9\u0bce-\u0bcf\u0bd1-\u0bd6\u0bd8-\u0be5\u0bfb-\u0bff\u0c0d\u0c11\u0c29\u0c3a-\u0c3c\u0c45\u0c49\u0c4e-\u0c54\u0c57\u0c5b-\u0c5f\u0c64-\u0c65\u0c70-\u0c77\u0c8d\u0c91\u0ca9\u0cb4\u0cba-\u0cbb\u0cc5\u0cc9\u0cce-\u0cd4\u0cd7-\u0cdd\u0cdf\u0ce4-\u0ce5\u0cf0\u0cf3-\u0cff\u0d04\u0d0d\u0d11\u0d45\u0d49\u0d50-\u0d53\u0d64-\u0d65\u0d80-\u0d81\u0d84\u0d97-\u0d99\u0db2\u0dbc\u0dbe-\u0dbf\u0dc7-\u0dc9\u0dcb-\u0dce\u0dd5\u0dd7\u0de0-\u0de5\u0df0-\u0df1\u0df5-\u0e00\u0e3b-\u0e3e\u0e5c-\u0e80\u0e83\u0e85-\u0e86\u0e89\u0e8b-\u0e8c\u0e8e-\u0e93\u0e98\u0ea0\u0ea4\u0ea6\u0ea8-\u0ea9\u0eac\u0eba\u0ebe-\u0ebf\u0ec5\u0ec7\u0ece-\u0ecf\u0eda-\u0edb\u0ee0-\u0eff\u0f48\u0f6d-\u0f70\u0f98\u0fbd\u0fcd\u0fdb-\u0fff\u10c6\u10c8-\u10cc\u10ce-\u10cf\u1249\u124e-\u124f\u1257\u1259\u125e-\u125f\u1289\u128e-\u128f\u12b1\u12b6-\u12b7\u12bf\u12c1\u12c6-\u12c7\u12d7\u1311\u1316-\u1317\u135b-\u135c\u137d-\u137f\u139a-\u139f\u13f6-\u13f7\u13fe-\u13ff\u169d-\u169f\u16f9-\u16ff\u170d\u1715-\u171f\u1737-\u173f\u1754-\u175f\u176d\u1771\u1774-\u177f\u17de-\u17df\u17ea-\u17ef\u17fa-\u17ff\u180f\u181a-\u181f\u1879-\u187f\u18ab-\u18af\u18f6-\u18ff\u191f\u192c-\u192f\u193c-\u193f\u1941-\u1943\u196e-\u196f\u1975-\u197f\u19ac-\u19af\u19ca-\u19cf\u19db-\u19dd\u1a1c-\u1a1d\u1a5f\u1a7d-\u1a7e\u1a8a-\u1a8f\u1a9a-\u1a9f\u1aae-\u1aaf\u1abf-\u1aff\u1b4c-\u1b4f\u1b7d-\u1b7f\u1bf4-\u1bfb\u1c38-\u1c3a\u1c4a-\u1c4c\u1c89-\u1c8f\u1cbb-\u1cbc\u1cc8-\u1ccf\u1cfa-\u1cff\u1dfa\u1f16-\u1f17\u1f1e-\u1f1f\u1f46-\u1f47\u1f4e-\u1f4f\u1f58\u1f5a\u1f5c\u1f5e\u1f7e-\u1f7f\u1fb5\u1fc5\u1fd4-\u1fd5\u1fdc\u1ff0-\u1ff1\u1ff5\u1fff\u2065\u2072-\u2073\u208f\u209d-\u209f\u20c0-\u20cf\u20f1-\u20ff\u218c-\u218f\u2427-\u243f\u244b-\u245f\u2b74-\u2b75\u2b96-\u2b97\u2bc9\u2bff\u2c2f\u2c5f\u2cf4-\u2cf8\u2d26\u2d28-\u2d2c\u2d2e-\u2d2f\u2d68-\u2d6e\u2d71-\u2d7e\u2d97-\u2d9f\u2da7\u2daf\u2db7\u2dbf\u2dc7\u2dcf\u2dd7\u2ddf\u2e4f-\u2e7f\u2e9a\u2ef4-\u2eff\u2fd6-\u2fef\u2ffc-\u2fff\u3040\u3097-\u3098\u3100-\u3104\u3130\u318f\u31bb-\u31bf\u31e4-\u31ef\u321f\u32ff\u4db6-\u4dbf\u9ff0-\u9fff\ua48d-\ua48f\ua4c7-\ua4cf\ua62c-\ua63f\ua6f8-\ua6ff\ua7ba-\ua7f6\ua82c-\ua82f\ua83a-\ua83f\ua878-\ua87f\ua8c6-\ua8cd\ua8da-\ua8df\ua954-\ua95e\ua97d-\ua97f\ua9ce\ua9da-\ua9dd\ua9ff\uaa37-\uaa3f\uaa4e-\uaa4f\uaa5a-\uaa5b\uaac3-\uaada\uaaf7-\uab00\uab07-\uab08\uab0f-\uab10\uab17-\uab1f\uab27\uab2f\uab66-\uab6f\uabee-\uabef\uabfa-\uabff\ud7a4-\ud7af\ud7c7-\ud7ca\ud7fc-\ud7ff\ufa6e-\ufa6f\ufada-\ufaff\ufb07-\ufb12\ufb18-\ufb1c\ufb37\ufb3d\ufb3f\ufb42\ufb45\ufbc2-\ufbd2\ufd40-\ufd4f\ufd90-\ufd91\ufdc8-\ufdef\ufdfe-\ufdff\ufe1a-\ufe1f\ufe53\ufe67\ufe6c-\ufe6f\ufe75\ufefd-\ufefe\uff00\uffbf-\uffc1\uffc8-\uffc9\uffd0-\uffd1\uffd8-\uffd9\uffdd-\uffdf\uffe7\uffef-\ufff8\ufffe-\uffff\U0001000c\U00010027\U0001003b\U0001003e\U0001004e-\U0001004f\U0001005e-\U0001007f\U000100fb-\U000100ff\U00010103-\U00010106\U00010134-\U00010136\U0001018f\U0001019c-\U0001019f\U000101a1-\U000101cf\U000101fe-\U0001027f\U0001029d-\U0001029f\U000102d1-\U000102df\U000102fc-\U000102ff\U00010324-\U0001032c\U0001034b-\U0001034f\U0001037b-\U0001037f\U0001039e\U000103c4-\U000103c7\U000103d6-\U000103ff\U0001049e-\U0001049f\U000104aa-\U000104af\U000104d4-\U000104d7\U000104fc-\U000104ff\U00010528-\U0001052f\U00010564-\U0001056e\U00010570-\U000105ff\U00010737-\U0001073f\U00010756-\U0001075f\U00010768-\U000107ff\U00010806-\U00010807\U00010809\U00010836\U00010839-\U0001083b\U0001083d-\U0001083e\U00010856\U0001089f-\U000108a6\U000108b0-\U000108df\U000108f3\U000108f6-\U000108fa\U0001091c-\U0001091e\U0001093a-\U0001093e\U00010940-\U0001097f\U000109b8-\U000109bb\U000109d0-\U000109d1\U00010a04\U00010a07-\U00010a0b\U00010a14\U00010a18\U00010a36-\U00010a37\U00010a3b-\U00010a3e\U00010a49-\U00010a4f\U00010a59-\U00010a5f\U00010aa0-\U00010abf\U00010ae7-\U00010aea\U00010af7-\U00010aff\U00010b36-\U00010b38\U00010b56-\U00010b57\U00010b73-\U00010b77\U00010b92-\U00010b98\U00010b9d-\U00010ba8\U00010bb0-\U00010bff\U00010c49-\U00010c7f\U00010cb3-\U00010cbf\U00010cf3-\U00010cf9\U00010d28-\U00010d2f\U00010d3a-\U00010e5f\U00010e7f-\U00010eff\U00010f28-\U00010f2f\U00010f5a-\U00010fff\U0001104e-\U00011051\U00011070-\U0001107e\U000110c2-\U000110cc\U000110ce-\U000110cf\U000110e9-\U000110ef\U000110fa-\U000110ff\U00011135\U00011147-\U0001114f\U00011177-\U0001117f\U000111ce-\U000111cf\U000111e0\U000111f5-\U000111ff\U00011212\U0001123f-\U0001127f\U00011287\U00011289\U0001128e\U0001129e\U000112aa-\U000112af\U000112eb-\U000112ef\U000112fa-\U000112ff\U00011304\U0001130d-\U0001130e\U00011311-\U00011312\U00011329\U00011331\U00011334\U0001133a\U00011345-\U00011346\U00011349-\U0001134a\U0001134e-\U0001134f\U00011351-\U00011356\U00011358-\U0001135c\U00011364-\U00011365\U0001136d-\U0001136f\U00011375-\U000113ff\U0001145a\U0001145c\U0001145f-\U0001147f\U000114c8-\U000114cf\U000114da-\U0001157f\U000115b6-\U000115b7\U000115de-\U000115ff\U00011645-\U0001164f\U0001165a-\U0001165f\U0001166d-\U0001167f\U000116b8-\U000116bf\U000116ca-\U000116ff\U0001171b-\U0001171c\U0001172c-\U0001172f\U00011740-\U000117ff\U0001183c-\U0001189f\U000118f3-\U000118fe\U00011900-\U000119ff\U00011a48-\U00011a4f\U00011a84-\U00011a85\U00011aa3-\U00011abf\U00011af9-\U00011bff\U00011c09\U00011c37\U00011c46-\U00011c4f\U00011c6d-\U00011c6f\U00011c90-\U00011c91\U00011ca8\U00011cb7-\U00011cff\U00011d07\U00011d0a\U00011d37-\U00011d39\U00011d3b\U00011d3e\U00011d48-\U00011d4f\U00011d5a-\U00011d5f\U00011d66\U00011d69\U00011d8f\U00011d92\U00011d99-\U00011d9f\U00011daa-\U00011edf\U00011ef9-\U00011fff\U0001239a-\U000123ff\U0001246f\U00012475-\U0001247f\U00012544-\U00012fff\U0001342f-\U000143ff\U00014647-\U000167ff\U00016a39-\U00016a3f\U00016a5f\U00016a6a-\U00016a6d\U00016a70-\U00016acf\U00016aee-\U00016aef\U00016af6-\U00016aff\U00016b46-\U00016b4f\U00016b5a\U00016b62\U00016b78-\U00016b7c\U00016b90-\U00016e3f\U00016e9b-\U00016eff\U00016f45-\U00016f4f\U00016f7f-\U00016f8e\U00016fa0-\U00016fdf\U00016fe2-\U00016fff\U000187f2-\U000187ff\U00018af3-\U0001afff\U0001b11f-\U0001b16f\U0001b2fc-\U0001bbff\U0001bc6b-\U0001bc6f\U0001bc7d-\U0001bc7f\U0001bc89-\U0001bc8f\U0001bc9a-\U0001bc9b\U0001bca4-\U0001cfff\U0001d0f6-\U0001d0ff\U0001d127-\U0001d128\U0001d1e9-\U0001d1ff\U0001d246-\U0001d2df\U0001d2f4-\U0001d2ff\U0001d357-\U0001d35f\U0001d379-\U0001d3ff\U0001d455\U0001d49d\U0001d4a0-\U0001d4a1\U0001d4a3-\U0001d4a4\U0001d4a7-\U0001d4a8\U0001d4ad\U0001d4ba\U0001d4bc\U0001d4c4\U0001d506\U0001d50b-\U0001d50c\U0001d515\U0001d51d\U0001d53a\U0001d53f\U0001d545\U0001d547-\U0001d549\U0001d551\U0001d6a6-\U0001d6a7\U0001d7cc-\U0001d7cd\U0001da8c-\U0001da9a\U0001daa0\U0001dab0-\U0001dfff\U0001e007\U0001e019-\U0001e01a\U0001e022\U0001e025\U0001e02b-\U0001e7ff\U0001e8c5-\U0001e8c6\U0001e8d7-\U0001e8ff\U0001e94b-\U0001e94f\U0001e95a-\U0001e95d\U0001e960-\U0001ec70\U0001ecb5-\U0001edff\U0001ee04\U0001ee20\U0001ee23\U0001ee25-\U0001ee26\U0001ee28\U0001ee33\U0001ee38\U0001ee3a\U0001ee3c-\U0001ee41\U0001ee43-\U0001ee46\U0001ee48\U0001ee4a\U0001ee4c\U0001ee50\U0001ee53\U0001ee55-\U0001ee56\U0001ee58\U0001ee5a\U0001ee5c\U0001ee5e\U0001ee60\U0001ee63\U0001ee65-\U0001ee66\U0001ee6b\U0001ee73\U0001ee78\U0001ee7d\U0001ee7f\U0001ee8a\U0001ee9c-\U0001eea0\U0001eea4\U0001eeaa\U0001eebc-\U0001eeef\U0001eef2-\U0001efff\U0001f02c-\U0001f02f\U0001f094-\U0001f09f\U0001f0af-\U0001f0b0\U0001f0c0\U0001f0d0\U0001f0f6-\U0001f0ff\U0001f10d-\U0001f10f\U0001f16c-\U0001f16f\U0001f1ad-\U0001f1e5\U0001f203-\U0001f20f\U0001f23c-\U0001f23f\U0001f249-\U0001f24f\U0001f252-\U0001f25f\U0001f266-\U0001f2ff\U0001f6d5-\U0001f6df\U0001f6ed-\U0001f6ef\U0001f6fa-\U0001f6ff\U0001f774-\U0001f77f\U0001f7d9-\U0001f7ff\U0001f80c-\U0001f80f\U0001f848-\U0001f84f\U0001f85a-\U0001f85f\U0001f888-\U0001f88f\U0001f8ae-\U0001f8ff\U0001f90c-\U0001f90f\U0001f93f\U0001f971-\U0001f972\U0001f977-\U0001f979\U0001f97b\U0001f9a3-\U0001f9af\U0001f9ba-\U0001f9bf\U0001f9c3-\U0001f9cf\U0001fa00-\U0001fa5f\U0001fa6e-\U0001ffff\U0002a6d7-\U0002a6ff\U0002b735-\U0002b73f\U0002b81e-\U0002b81f\U0002cea2-\U0002ceaf\U0002ebe1-\U0002f7ff\U0002fa1e-\U000e0000\U000e0002-\U000e001f\U000e0080-\U000e00ff\U000e01f0-\U000effff\U000ffffe-\U000fffff\U0010fffe-\U0010ffff'
|
19 |
+
|
20 |
+
Co = '\ue000-\uf8ff\U000f0000-\U000ffffd\U00100000-\U0010fffd'
|
21 |
+
|
22 |
+
Cs = '\ud800-\udbff\\\udc00\udc01-\udfff'
|
23 |
+
|
24 |
+
Ll = 'a-z\xb5\xdf-\xf6\xf8-\xff\u0101\u0103\u0105\u0107\u0109\u010b\u010d\u010f\u0111\u0113\u0115\u0117\u0119\u011b\u011d\u011f\u0121\u0123\u0125\u0127\u0129\u012b\u012d\u012f\u0131\u0133\u0135\u0137-\u0138\u013a\u013c\u013e\u0140\u0142\u0144\u0146\u0148-\u0149\u014b\u014d\u014f\u0151\u0153\u0155\u0157\u0159\u015b\u015d\u015f\u0161\u0163\u0165\u0167\u0169\u016b\u016d\u016f\u0171\u0173\u0175\u0177\u017a\u017c\u017e-\u0180\u0183\u0185\u0188\u018c-\u018d\u0192\u0195\u0199-\u019b\u019e\u01a1\u01a3\u01a5\u01a8\u01aa-\u01ab\u01ad\u01b0\u01b4\u01b6\u01b9-\u01ba\u01bd-\u01bf\u01c6\u01c9\u01cc\u01ce\u01d0\u01d2\u01d4\u01d6\u01d8\u01da\u01dc-\u01dd\u01df\u01e1\u01e3\u01e5\u01e7\u01e9\u01eb\u01ed\u01ef-\u01f0\u01f3\u01f5\u01f9\u01fb\u01fd\u01ff\u0201\u0203\u0205\u0207\u0209\u020b\u020d\u020f\u0211\u0213\u0215\u0217\u0219\u021b\u021d\u021f\u0221\u0223\u0225\u0227\u0229\u022b\u022d\u022f\u0231\u0233-\u0239\u023c\u023f-\u0240\u0242\u0247\u0249\u024b\u024d\u024f-\u0293\u0295-\u02af\u0371\u0373\u0377\u037b-\u037d\u0390\u03ac-\u03ce\u03d0-\u03d1\u03d5-\u03d7\u03d9\u03db\u03dd\u03df\u03e1\u03e3\u03e5\u03e7\u03e9\u03eb\u03ed\u03ef-\u03f3\u03f5\u03f8\u03fb-\u03fc\u0430-\u045f\u0461\u0463\u0465\u0467\u0469\u046b\u046d\u046f\u0471\u0473\u0475\u0477\u0479\u047b\u047d\u047f\u0481\u048b\u048d\u048f\u0491\u0493\u0495\u0497\u0499\u049b\u049d\u049f\u04a1\u04a3\u04a5\u04a7\u04a9\u04ab\u04ad\u04af\u04b1\u04b3\u04b5\u04b7\u04b9\u04bb\u04bd\u04bf\u04c2\u04c4\u04c6\u04c8\u04ca\u04cc\u04ce-\u04cf\u04d1\u04d3\u04d5\u04d7\u04d9\u04db\u04dd\u04df\u04e1\u04e3\u04e5\u04e7\u04e9\u04eb\u04ed\u04ef\u04f1\u04f3\u04f5\u04f7\u04f9\u04fb\u04fd\u04ff\u0501\u0503\u0505\u0507\u0509\u050b\u050d\u050f\u0511\u0513\u0515\u0517\u0519\u051b\u051d\u051f\u0521\u0523\u0525\u0527\u0529\u052b\u052d\u052f\u0560-\u0588\u10d0-\u10fa\u10fd-\u10ff\u13f8-\u13fd\u1c80-\u1c88\u1d00-\u1d2b\u1d6b-\u1d77\u1d79-\u1d9a\u1e01\u1e03\u1e05\u1e07\u1e09\u1e0b\u1e0d\u1e0f\u1e11\u1e13\u1e15\u1e17\u1e19\u1e1b\u1e1d\u1e1f\u1e21\u1e23\u1e25\u1e27\u1e29\u1e2b\u1e2d\u1e2f\u1e31\u1e33\u1e35\u1e37\u1e39\u1e3b\u1e3d\u1e3f\u1e41\u1e43\u1e45\u1e47\u1e49\u1e4b\u1e4d\u1e4f\u1e51\u1e53\u1e55\u1e57\u1e59\u1e5b\u1e5d\u1e5f\u1e61\u1e63\u1e65\u1e67\u1e69\u1e6b\u1e6d\u1e6f\u1e71\u1e73\u1e75\u1e77\u1e79\u1e7b\u1e7d\u1e7f\u1e81\u1e83\u1e85\u1e87\u1e89\u1e8b\u1e8d\u1e8f\u1e91\u1e93\u1e95-\u1e9d\u1e9f\u1ea1\u1ea3\u1ea5\u1ea7\u1ea9\u1eab\u1ead\u1eaf\u1eb1\u1eb3\u1eb5\u1eb7\u1eb9\u1ebb\u1ebd\u1ebf\u1ec1\u1ec3\u1ec5\u1ec7\u1ec9\u1ecb\u1ecd\u1ecf\u1ed1\u1ed3\u1ed5\u1ed7\u1ed9\u1edb\u1edd\u1edf\u1ee1\u1ee3\u1ee5\u1ee7\u1ee9\u1eeb\u1eed\u1eef\u1ef1\u1ef3\u1ef5\u1ef7\u1ef9\u1efb\u1efd\u1eff-\u1f07\u1f10-\u1f15\u1f20-\u1f27\u1f30-\u1f37\u1f40-\u1f45\u1f50-\u1f57\u1f60-\u1f67\u1f70-\u1f7d\u1f80-\u1f87\u1f90-\u1f97\u1fa0-\u1fa7\u1fb0-\u1fb4\u1fb6-\u1fb7\u1fbe\u1fc2-\u1fc4\u1fc6-\u1fc7\u1fd0-\u1fd3\u1fd6-\u1fd7\u1fe0-\u1fe7\u1ff2-\u1ff4\u1ff6-\u1ff7\u210a\u210e-\u210f\u2113\u212f\u2134\u2139\u213c-\u213d\u2146-\u2149\u214e\u2184\u2c30-\u2c5e\u2c61\u2c65-\u2c66\u2c68\u2c6a\u2c6c\u2c71\u2c73-\u2c74\u2c76-\u2c7b\u2c81\u2c83\u2c85\u2c87\u2c89\u2c8b\u2c8d\u2c8f\u2c91\u2c93\u2c95\u2c97\u2c99\u2c9b\u2c9d\u2c9f\u2ca1\u2ca3\u2ca5\u2ca7\u2ca9\u2cab\u2cad\u2caf\u2cb1\u2cb3\u2cb5\u2cb7\u2cb9\u2cbb\u2cbd\u2cbf\u2cc1\u2cc3\u2cc5\u2cc7\u2cc9\u2ccb\u2ccd\u2ccf\u2cd1\u2cd3\u2cd5\u2cd7\u2cd9\u2cdb\u2cdd\u2cdf\u2ce1\u2ce3-\u2ce4\u2cec\u2cee\u2cf3\u2d00-\u2d25\u2d27\u2d2d\ua641\ua643\ua645\ua647\ua649\ua64b\ua64d\ua64f\ua651\ua653\ua655\ua657\ua659\ua65b\ua65d\ua65f\ua661\ua663\ua665\ua667\ua669\ua66b\ua66d\ua681\ua683\ua685\ua687\ua689\ua68b\ua68d\ua68f\ua691\ua693\ua695\ua697\ua699\ua69b\ua723\ua725\ua727\ua729\ua72b\ua72d\ua72f-\ua731\ua733\ua735\ua737\ua739\ua73b\ua73d\ua73f\ua741\ua743\ua745\ua747\ua749\ua74b\ua74d\ua74f\ua751\ua753\ua755\ua757\ua759\ua75b\ua75d\ua75f\ua761\ua763\ua765\ua767\ua769\ua76b\ua76d\ua76f\ua771-\ua778\ua77a\ua77c\ua77f\ua781\ua783\ua785\ua787\ua78c\ua78e\ua791\ua793-\ua795\ua797\ua799\ua79b\ua79d\ua79f\ua7a1\ua7a3\ua7a5\ua7a7\ua7a9\ua7af\ua7b5\ua7b7\ua7b9\ua7fa\uab30-\uab5a\uab60-\uab65\uab70-\uabbf\ufb00-\ufb06\ufb13-\ufb17\uff41-\uff5a\U00010428-\U0001044f\U000104d8-\U000104fb\U00010cc0-\U00010cf2\U000118c0-\U000118df\U00016e60-\U00016e7f\U0001d41a-\U0001d433\U0001d44e-\U0001d454\U0001d456-\U0001d467\U0001d482-\U0001d49b\U0001d4b6-\U0001d4b9\U0001d4bb\U0001d4bd-\U0001d4c3\U0001d4c5-\U0001d4cf\U0001d4ea-\U0001d503\U0001d51e-\U0001d537\U0001d552-\U0001d56b\U0001d586-\U0001d59f\U0001d5ba-\U0001d5d3\U0001d5ee-\U0001d607\U0001d622-\U0001d63b\U0001d656-\U0001d66f\U0001d68a-\U0001d6a5\U0001d6c2-\U0001d6da\U0001d6dc-\U0001d6e1\U0001d6fc-\U0001d714\U0001d716-\U0001d71b\U0001d736-\U0001d74e\U0001d750-\U0001d755\U0001d770-\U0001d788\U0001d78a-\U0001d78f\U0001d7aa-\U0001d7c2\U0001d7c4-\U0001d7c9\U0001d7cb\U0001e922-\U0001e943'
|
25 |
+
|
26 |
+
Lm = '\u02b0-\u02c1\u02c6-\u02d1\u02e0-\u02e4\u02ec\u02ee\u0374\u037a\u0559\u0640\u06e5-\u06e6\u07f4-\u07f5\u07fa\u081a\u0824\u0828\u0971\u0e46\u0ec6\u10fc\u17d7\u1843\u1aa7\u1c78-\u1c7d\u1d2c-\u1d6a\u1d78\u1d9b-\u1dbf\u2071\u207f\u2090-\u209c\u2c7c-\u2c7d\u2d6f\u2e2f\u3005\u3031-\u3035\u303b\u309d-\u309e\u30fc-\u30fe\ua015\ua4f8-\ua4fd\ua60c\ua67f\ua69c-\ua69d\ua717-\ua71f\ua770\ua788\ua7f8-\ua7f9\ua9cf\ua9e6\uaa70\uaadd\uaaf3-\uaaf4\uab5c-\uab5f\uff70\uff9e-\uff9f\U00016b40-\U00016b43\U00016f93-\U00016f9f\U00016fe0-\U00016fe1'
|
27 |
+
|
28 |
+
Lo = '\xaa\xba\u01bb\u01c0-\u01c3\u0294\u05d0-\u05ea\u05ef-\u05f2\u0620-\u063f\u0641-\u064a\u066e-\u066f\u0671-\u06d3\u06d5\u06ee-\u06ef\u06fa-\u06fc\u06ff\u0710\u0712-\u072f\u074d-\u07a5\u07b1\u07ca-\u07ea\u0800-\u0815\u0840-\u0858\u0860-\u086a\u08a0-\u08b4\u08b6-\u08bd\u0904-\u0939\u093d\u0950\u0958-\u0961\u0972-\u0980\u0985-\u098c\u098f-\u0990\u0993-\u09a8\u09aa-\u09b0\u09b2\u09b6-\u09b9\u09bd\u09ce\u09dc-\u09dd\u09df-\u09e1\u09f0-\u09f1\u09fc\u0a05-\u0a0a\u0a0f-\u0a10\u0a13-\u0a28\u0a2a-\u0a30\u0a32-\u0a33\u0a35-\u0a36\u0a38-\u0a39\u0a59-\u0a5c\u0a5e\u0a72-\u0a74\u0a85-\u0a8d\u0a8f-\u0a91\u0a93-\u0aa8\u0aaa-\u0ab0\u0ab2-\u0ab3\u0ab5-\u0ab9\u0abd\u0ad0\u0ae0-\u0ae1\u0af9\u0b05-\u0b0c\u0b0f-\u0b10\u0b13-\u0b28\u0b2a-\u0b30\u0b32-\u0b33\u0b35-\u0b39\u0b3d\u0b5c-\u0b5d\u0b5f-\u0b61\u0b71\u0b83\u0b85-\u0b8a\u0b8e-\u0b90\u0b92-\u0b95\u0b99-\u0b9a\u0b9c\u0b9e-\u0b9f\u0ba3-\u0ba4\u0ba8-\u0baa\u0bae-\u0bb9\u0bd0\u0c05-\u0c0c\u0c0e-\u0c10\u0c12-\u0c28\u0c2a-\u0c39\u0c3d\u0c58-\u0c5a\u0c60-\u0c61\u0c80\u0c85-\u0c8c\u0c8e-\u0c90\u0c92-\u0ca8\u0caa-\u0cb3\u0cb5-\u0cb9\u0cbd\u0cde\u0ce0-\u0ce1\u0cf1-\u0cf2\u0d05-\u0d0c\u0d0e-\u0d10\u0d12-\u0d3a\u0d3d\u0d4e\u0d54-\u0d56\u0d5f-\u0d61\u0d7a-\u0d7f\u0d85-\u0d96\u0d9a-\u0db1\u0db3-\u0dbb\u0dbd\u0dc0-\u0dc6\u0e01-\u0e30\u0e32-\u0e33\u0e40-\u0e45\u0e81-\u0e82\u0e84\u0e87-\u0e88\u0e8a\u0e8d\u0e94-\u0e97\u0e99-\u0e9f\u0ea1-\u0ea3\u0ea5\u0ea7\u0eaa-\u0eab\u0ead-\u0eb0\u0eb2-\u0eb3\u0ebd\u0ec0-\u0ec4\u0edc-\u0edf\u0f00\u0f40-\u0f47\u0f49-\u0f6c\u0f88-\u0f8c\u1000-\u102a\u103f\u1050-\u1055\u105a-\u105d\u1061\u1065-\u1066\u106e-\u1070\u1075-\u1081\u108e\u1100-\u1248\u124a-\u124d\u1250-\u1256\u1258\u125a-\u125d\u1260-\u1288\u128a-\u128d\u1290-\u12b0\u12b2-\u12b5\u12b8-\u12be\u12c0\u12c2-\u12c5\u12c8-\u12d6\u12d8-\u1310\u1312-\u1315\u1318-\u135a\u1380-\u138f\u1401-\u166c\u166f-\u167f\u1681-\u169a\u16a0-\u16ea\u16f1-\u16f8\u1700-\u170c\u170e-\u1711\u1720-\u1731\u1740-\u1751\u1760-\u176c\u176e-\u1770\u1780-\u17b3\u17dc\u1820-\u1842\u1844-\u1878\u1880-\u1884\u1887-\u18a8\u18aa\u18b0-\u18f5\u1900-\u191e\u1950-\u196d\u1970-\u1974\u1980-\u19ab\u19b0-\u19c9\u1a00-\u1a16\u1a20-\u1a54\u1b05-\u1b33\u1b45-\u1b4b\u1b83-\u1ba0\u1bae-\u1baf\u1bba-\u1be5\u1c00-\u1c23\u1c4d-\u1c4f\u1c5a-\u1c77\u1ce9-\u1cec\u1cee-\u1cf1\u1cf5-\u1cf6\u2135-\u2138\u2d30-\u2d67\u2d80-\u2d96\u2da0-\u2da6\u2da8-\u2dae\u2db0-\u2db6\u2db8-\u2dbe\u2dc0-\u2dc6\u2dc8-\u2dce\u2dd0-\u2dd6\u2dd8-\u2dde\u3006\u303c\u3041-\u3096\u309f\u30a1-\u30fa\u30ff\u3105-\u312f\u3131-\u318e\u31a0-\u31ba\u31f0-\u31ff\u3400-\u4db5\u4e00-\u9fef\ua000-\ua014\ua016-\ua48c\ua4d0-\ua4f7\ua500-\ua60b\ua610-\ua61f\ua62a-\ua62b\ua66e\ua6a0-\ua6e5\ua78f\ua7f7\ua7fb-\ua801\ua803-\ua805\ua807-\ua80a\ua80c-\ua822\ua840-\ua873\ua882-\ua8b3\ua8f2-\ua8f7\ua8fb\ua8fd-\ua8fe\ua90a-\ua925\ua930-\ua946\ua960-\ua97c\ua984-\ua9b2\ua9e0-\ua9e4\ua9e7-\ua9ef\ua9fa-\ua9fe\uaa00-\uaa28\uaa40-\uaa42\uaa44-\uaa4b\uaa60-\uaa6f\uaa71-\uaa76\uaa7a\uaa7e-\uaaaf\uaab1\uaab5-\uaab6\uaab9-\uaabd\uaac0\uaac2\uaadb-\uaadc\uaae0-\uaaea\uaaf2\uab01-\uab06\uab09-\uab0e\uab11-\uab16\uab20-\uab26\uab28-\uab2e\uabc0-\uabe2\uac00-\ud7a3\ud7b0-\ud7c6\ud7cb-\ud7fb\uf900-\ufa6d\ufa70-\ufad9\ufb1d\ufb1f-\ufb28\ufb2a-\ufb36\ufb38-\ufb3c\ufb3e\ufb40-\ufb41\ufb43-\ufb44\ufb46-\ufbb1\ufbd3-\ufd3d\ufd50-\ufd8f\ufd92-\ufdc7\ufdf0-\ufdfb\ufe70-\ufe74\ufe76-\ufefc\uff66-\uff6f\uff71-\uff9d\uffa0-\uffbe\uffc2-\uffc7\uffca-\uffcf\uffd2-\uffd7\uffda-\uffdc\U00010000-\U0001000b\U0001000d-\U00010026\U00010028-\U0001003a\U0001003c-\U0001003d\U0001003f-\U0001004d\U00010050-\U0001005d\U00010080-\U000100fa\U00010280-\U0001029c\U000102a0-\U000102d0\U00010300-\U0001031f\U0001032d-\U00010340\U00010342-\U00010349\U00010350-\U00010375\U00010380-\U0001039d\U000103a0-\U000103c3\U000103c8-\U000103cf\U00010450-\U0001049d\U00010500-\U00010527\U00010530-\U00010563\U00010600-\U00010736\U00010740-\U00010755\U00010760-\U00010767\U00010800-\U00010805\U00010808\U0001080a-\U00010835\U00010837-\U00010838\U0001083c\U0001083f-\U00010855\U00010860-\U00010876\U00010880-\U0001089e\U000108e0-\U000108f2\U000108f4-\U000108f5\U00010900-\U00010915\U00010920-\U00010939\U00010980-\U000109b7\U000109be-\U000109bf\U00010a00\U00010a10-\U00010a13\U00010a15-\U00010a17\U00010a19-\U00010a35\U00010a60-\U00010a7c\U00010a80-\U00010a9c\U00010ac0-\U00010ac7\U00010ac9-\U00010ae4\U00010b00-\U00010b35\U00010b40-\U00010b55\U00010b60-\U00010b72\U00010b80-\U00010b91\U00010c00-\U00010c48\U00010d00-\U00010d23\U00010f00-\U00010f1c\U00010f27\U00010f30-\U00010f45\U00011003-\U00011037\U00011083-\U000110af\U000110d0-\U000110e8\U00011103-\U00011126\U00011144\U00011150-\U00011172\U00011176\U00011183-\U000111b2\U000111c1-\U000111c4\U000111da\U000111dc\U00011200-\U00011211\U00011213-\U0001122b\U00011280-\U00011286\U00011288\U0001128a-\U0001128d\U0001128f-\U0001129d\U0001129f-\U000112a8\U000112b0-\U000112de\U00011305-\U0001130c\U0001130f-\U00011310\U00011313-\U00011328\U0001132a-\U00011330\U00011332-\U00011333\U00011335-\U00011339\U0001133d\U00011350\U0001135d-\U00011361\U00011400-\U00011434\U00011447-\U0001144a\U00011480-\U000114af\U000114c4-\U000114c5\U000114c7\U00011580-\U000115ae\U000115d8-\U000115db\U00011600-\U0001162f\U00011644\U00011680-\U000116aa\U00011700-\U0001171a\U00011800-\U0001182b\U000118ff\U00011a00\U00011a0b-\U00011a32\U00011a3a\U00011a50\U00011a5c-\U00011a83\U00011a86-\U00011a89\U00011a9d\U00011ac0-\U00011af8\U00011c00-\U00011c08\U00011c0a-\U00011c2e\U00011c40\U00011c72-\U00011c8f\U00011d00-\U00011d06\U00011d08-\U00011d09\U00011d0b-\U00011d30\U00011d46\U00011d60-\U00011d65\U00011d67-\U00011d68\U00011d6a-\U00011d89\U00011d98\U00011ee0-\U00011ef2\U00012000-\U00012399\U00012480-\U00012543\U00013000-\U0001342e\U00014400-\U00014646\U00016800-\U00016a38\U00016a40-\U00016a5e\U00016ad0-\U00016aed\U00016b00-\U00016b2f\U00016b63-\U00016b77\U00016b7d-\U00016b8f\U00016f00-\U00016f44\U00016f50\U00017000-\U000187f1\U00018800-\U00018af2\U0001b000-\U0001b11e\U0001b170-\U0001b2fb\U0001bc00-\U0001bc6a\U0001bc70-\U0001bc7c\U0001bc80-\U0001bc88\U0001bc90-\U0001bc99\U0001e800-\U0001e8c4\U0001ee00-\U0001ee03\U0001ee05-\U0001ee1f\U0001ee21-\U0001ee22\U0001ee24\U0001ee27\U0001ee29-\U0001ee32\U0001ee34-\U0001ee37\U0001ee39\U0001ee3b\U0001ee42\U0001ee47\U0001ee49\U0001ee4b\U0001ee4d-\U0001ee4f\U0001ee51-\U0001ee52\U0001ee54\U0001ee57\U0001ee59\U0001ee5b\U0001ee5d\U0001ee5f\U0001ee61-\U0001ee62\U0001ee64\U0001ee67-\U0001ee6a\U0001ee6c-\U0001ee72\U0001ee74-\U0001ee77\U0001ee79-\U0001ee7c\U0001ee7e\U0001ee80-\U0001ee89\U0001ee8b-\U0001ee9b\U0001eea1-\U0001eea3\U0001eea5-\U0001eea9\U0001eeab-\U0001eebb\U00020000-\U0002a6d6\U0002a700-\U0002b734\U0002b740-\U0002b81d\U0002b820-\U0002cea1\U0002ceb0-\U0002ebe0\U0002f800-\U0002fa1d'
|
29 |
+
|
30 |
+
Lt = '\u01c5\u01c8\u01cb\u01f2\u1f88-\u1f8f\u1f98-\u1f9f\u1fa8-\u1faf\u1fbc\u1fcc\u1ffc'
|
31 |
+
|
32 |
+
Lu = 'A-Z\xc0-\xd6\xd8-\xde\u0100\u0102\u0104\u0106\u0108\u010a\u010c\u010e\u0110\u0112\u0114\u0116\u0118\u011a\u011c\u011e\u0120\u0122\u0124\u0126\u0128\u012a\u012c\u012e\u0130\u0132\u0134\u0136\u0139\u013b\u013d\u013f\u0141\u0143\u0145\u0147\u014a\u014c\u014e\u0150\u0152\u0154\u0156\u0158\u015a\u015c\u015e\u0160\u0162\u0164\u0166\u0168\u016a\u016c\u016e\u0170\u0172\u0174\u0176\u0178-\u0179\u017b\u017d\u0181-\u0182\u0184\u0186-\u0187\u0189-\u018b\u018e-\u0191\u0193-\u0194\u0196-\u0198\u019c-\u019d\u019f-\u01a0\u01a2\u01a4\u01a6-\u01a7\u01a9\u01ac\u01ae-\u01af\u01b1-\u01b3\u01b5\u01b7-\u01b8\u01bc\u01c4\u01c7\u01ca\u01cd\u01cf\u01d1\u01d3\u01d5\u01d7\u01d9\u01db\u01de\u01e0\u01e2\u01e4\u01e6\u01e8\u01ea\u01ec\u01ee\u01f1\u01f4\u01f6-\u01f8\u01fa\u01fc\u01fe\u0200\u0202\u0204\u0206\u0208\u020a\u020c\u020e\u0210\u0212\u0214\u0216\u0218\u021a\u021c\u021e\u0220\u0222\u0224\u0226\u0228\u022a\u022c\u022e\u0230\u0232\u023a-\u023b\u023d-\u023e\u0241\u0243-\u0246\u0248\u024a\u024c\u024e\u0370\u0372\u0376\u037f\u0386\u0388-\u038a\u038c\u038e-\u038f\u0391-\u03a1\u03a3-\u03ab\u03cf\u03d2-\u03d4\u03d8\u03da\u03dc\u03de\u03e0\u03e2\u03e4\u03e6\u03e8\u03ea\u03ec\u03ee\u03f4\u03f7\u03f9-\u03fa\u03fd-\u042f\u0460\u0462\u0464\u0466\u0468\u046a\u046c\u046e\u0470\u0472\u0474\u0476\u0478\u047a\u047c\u047e\u0480\u048a\u048c\u048e\u0490\u0492\u0494\u0496\u0498\u049a\u049c\u049e\u04a0\u04a2\u04a4\u04a6\u04a8\u04aa\u04ac\u04ae\u04b0\u04b2\u04b4\u04b6\u04b8\u04ba\u04bc\u04be\u04c0-\u04c1\u04c3\u04c5\u04c7\u04c9\u04cb\u04cd\u04d0\u04d2\u04d4\u04d6\u04d8\u04da\u04dc\u04de\u04e0\u04e2\u04e4\u04e6\u04e8\u04ea\u04ec\u04ee\u04f0\u04f2\u04f4\u04f6\u04f8\u04fa\u04fc\u04fe\u0500\u0502\u0504\u0506\u0508\u050a\u050c\u050e\u0510\u0512\u0514\u0516\u0518\u051a\u051c\u051e\u0520\u0522\u0524\u0526\u0528\u052a\u052c\u052e\u0531-\u0556\u10a0-\u10c5\u10c7\u10cd\u13a0-\u13f5\u1c90-\u1cba\u1cbd-\u1cbf\u1e00\u1e02\u1e04\u1e06\u1e08\u1e0a\u1e0c\u1e0e\u1e10\u1e12\u1e14\u1e16\u1e18\u1e1a\u1e1c\u1e1e\u1e20\u1e22\u1e24\u1e26\u1e28\u1e2a\u1e2c\u1e2e\u1e30\u1e32\u1e34\u1e36\u1e38\u1e3a\u1e3c\u1e3e\u1e40\u1e42\u1e44\u1e46\u1e48\u1e4a\u1e4c\u1e4e\u1e50\u1e52\u1e54\u1e56\u1e58\u1e5a\u1e5c\u1e5e\u1e60\u1e62\u1e64\u1e66\u1e68\u1e6a\u1e6c\u1e6e\u1e70\u1e72\u1e74\u1e76\u1e78\u1e7a\u1e7c\u1e7e\u1e80\u1e82\u1e84\u1e86\u1e88\u1e8a\u1e8c\u1e8e\u1e90\u1e92\u1e94\u1e9e\u1ea0\u1ea2\u1ea4\u1ea6\u1ea8\u1eaa\u1eac\u1eae\u1eb0\u1eb2\u1eb4\u1eb6\u1eb8\u1eba\u1ebc\u1ebe\u1ec0\u1ec2\u1ec4\u1ec6\u1ec8\u1eca\u1ecc\u1ece\u1ed0\u1ed2\u1ed4\u1ed6\u1ed8\u1eda\u1edc\u1ede\u1ee0\u1ee2\u1ee4\u1ee6\u1ee8\u1eea\u1eec\u1eee\u1ef0\u1ef2\u1ef4\u1ef6\u1ef8\u1efa\u1efc\u1efe\u1f08-\u1f0f\u1f18-\u1f1d\u1f28-\u1f2f\u1f38-\u1f3f\u1f48-\u1f4d\u1f59\u1f5b\u1f5d\u1f5f\u1f68-\u1f6f\u1fb8-\u1fbb\u1fc8-\u1fcb\u1fd8-\u1fdb\u1fe8-\u1fec\u1ff8-\u1ffb\u2102\u2107\u210b-\u210d\u2110-\u2112\u2115\u2119-\u211d\u2124\u2126\u2128\u212a-\u212d\u2130-\u2133\u213e-\u213f\u2145\u2183\u2c00-\u2c2e\u2c60\u2c62-\u2c64\u2c67\u2c69\u2c6b\u2c6d-\u2c70\u2c72\u2c75\u2c7e-\u2c80\u2c82\u2c84\u2c86\u2c88\u2c8a\u2c8c\u2c8e\u2c90\u2c92\u2c94\u2c96\u2c98\u2c9a\u2c9c\u2c9e\u2ca0\u2ca2\u2ca4\u2ca6\u2ca8\u2caa\u2cac\u2cae\u2cb0\u2cb2\u2cb4\u2cb6\u2cb8\u2cba\u2cbc\u2cbe\u2cc0\u2cc2\u2cc4\u2cc6\u2cc8\u2cca\u2ccc\u2cce\u2cd0\u2cd2\u2cd4\u2cd6\u2cd8\u2cda\u2cdc\u2cde\u2ce0\u2ce2\u2ceb\u2ced\u2cf2\ua640\ua642\ua644\ua646\ua648\ua64a\ua64c\ua64e\ua650\ua652\ua654\ua656\ua658\ua65a\ua65c\ua65e\ua660\ua662\ua664\ua666\ua668\ua66a\ua66c\ua680\ua682\ua684\ua686\ua688\ua68a\ua68c\ua68e\ua690\ua692\ua694\ua696\ua698\ua69a\ua722\ua724\ua726\ua728\ua72a\ua72c\ua72e\ua732\ua734\ua736\ua738\ua73a\ua73c\ua73e\ua740\ua742\ua744\ua746\ua748\ua74a\ua74c\ua74e\ua750\ua752\ua754\ua756\ua758\ua75a\ua75c\ua75e\ua760\ua762\ua764\ua766\ua768\ua76a\ua76c\ua76e\ua779\ua77b\ua77d-\ua77e\ua780\ua782\ua784\ua786\ua78b\ua78d\ua790\ua792\ua796\ua798\ua79a\ua79c\ua79e\ua7a0\ua7a2\ua7a4\ua7a6\ua7a8\ua7aa-\ua7ae\ua7b0-\ua7b4\ua7b6\ua7b8\uff21-\uff3a\U00010400-\U00010427\U000104b0-\U000104d3\U00010c80-\U00010cb2\U000118a0-\U000118bf\U00016e40-\U00016e5f\U0001d400-\U0001d419\U0001d434-\U0001d44d\U0001d468-\U0001d481\U0001d49c\U0001d49e-\U0001d49f\U0001d4a2\U0001d4a5-\U0001d4a6\U0001d4a9-\U0001d4ac\U0001d4ae-\U0001d4b5\U0001d4d0-\U0001d4e9\U0001d504-\U0001d505\U0001d507-\U0001d50a\U0001d50d-\U0001d514\U0001d516-\U0001d51c\U0001d538-\U0001d539\U0001d53b-\U0001d53e\U0001d540-\U0001d544\U0001d546\U0001d54a-\U0001d550\U0001d56c-\U0001d585\U0001d5a0-\U0001d5b9\U0001d5d4-\U0001d5ed\U0001d608-\U0001d621\U0001d63c-\U0001d655\U0001d670-\U0001d689\U0001d6a8-\U0001d6c0\U0001d6e2-\U0001d6fa\U0001d71c-\U0001d734\U0001d756-\U0001d76e\U0001d790-\U0001d7a8\U0001d7ca\U0001e900-\U0001e921'
|
33 |
+
|
34 |
+
Mc = '\u0903\u093b\u093e-\u0940\u0949-\u094c\u094e-\u094f\u0982-\u0983\u09be-\u09c0\u09c7-\u09c8\u09cb-\u09cc\u09d7\u0a03\u0a3e-\u0a40\u0a83\u0abe-\u0ac0\u0ac9\u0acb-\u0acc\u0b02-\u0b03\u0b3e\u0b40\u0b47-\u0b48\u0b4b-\u0b4c\u0b57\u0bbe-\u0bbf\u0bc1-\u0bc2\u0bc6-\u0bc8\u0bca-\u0bcc\u0bd7\u0c01-\u0c03\u0c41-\u0c44\u0c82-\u0c83\u0cbe\u0cc0-\u0cc4\u0cc7-\u0cc8\u0cca-\u0ccb\u0cd5-\u0cd6\u0d02-\u0d03\u0d3e-\u0d40\u0d46-\u0d48\u0d4a-\u0d4c\u0d57\u0d82-\u0d83\u0dcf-\u0dd1\u0dd8-\u0ddf\u0df2-\u0df3\u0f3e-\u0f3f\u0f7f\u102b-\u102c\u1031\u1038\u103b-\u103c\u1056-\u1057\u1062-\u1064\u1067-\u106d\u1083-\u1084\u1087-\u108c\u108f\u109a-\u109c\u17b6\u17be-\u17c5\u17c7-\u17c8\u1923-\u1926\u1929-\u192b\u1930-\u1931\u1933-\u1938\u1a19-\u1a1a\u1a55\u1a57\u1a61\u1a63-\u1a64\u1a6d-\u1a72\u1b04\u1b35\u1b3b\u1b3d-\u1b41\u1b43-\u1b44\u1b82\u1ba1\u1ba6-\u1ba7\u1baa\u1be7\u1bea-\u1bec\u1bee\u1bf2-\u1bf3\u1c24-\u1c2b\u1c34-\u1c35\u1ce1\u1cf2-\u1cf3\u1cf7\u302e-\u302f\ua823-\ua824\ua827\ua880-\ua881\ua8b4-\ua8c3\ua952-\ua953\ua983\ua9b4-\ua9b5\ua9ba-\ua9bb\ua9bd-\ua9c0\uaa2f-\uaa30\uaa33-\uaa34\uaa4d\uaa7b\uaa7d\uaaeb\uaaee-\uaaef\uaaf5\uabe3-\uabe4\uabe6-\uabe7\uabe9-\uabea\uabec\U00011000\U00011002\U00011082\U000110b0-\U000110b2\U000110b7-\U000110b8\U0001112c\U00011145-\U00011146\U00011182\U000111b3-\U000111b5\U000111bf-\U000111c0\U0001122c-\U0001122e\U00011232-\U00011233\U00011235\U000112e0-\U000112e2\U00011302-\U00011303\U0001133e-\U0001133f\U00011341-\U00011344\U00011347-\U00011348\U0001134b-\U0001134d\U00011357\U00011362-\U00011363\U00011435-\U00011437\U00011440-\U00011441\U00011445\U000114b0-\U000114b2\U000114b9\U000114bb-\U000114be\U000114c1\U000115af-\U000115b1\U000115b8-\U000115bb\U000115be\U00011630-\U00011632\U0001163b-\U0001163c\U0001163e\U000116ac\U000116ae-\U000116af\U000116b6\U00011720-\U00011721\U00011726\U0001182c-\U0001182e\U00011838\U00011a39\U00011a57-\U00011a58\U00011a97\U00011c2f\U00011c3e\U00011ca9\U00011cb1\U00011cb4\U00011d8a-\U00011d8e\U00011d93-\U00011d94\U00011d96\U00011ef5-\U00011ef6\U00016f51-\U00016f7e\U0001d165-\U0001d166\U0001d16d-\U0001d172'
|
35 |
+
|
36 |
+
Me = '\u0488-\u0489\u1abe\u20dd-\u20e0\u20e2-\u20e4\ua670-\ua672'
|
37 |
+
|
38 |
+
Mn = '\u0300-\u036f\u0483-\u0487\u0591-\u05bd\u05bf\u05c1-\u05c2\u05c4-\u05c5\u05c7\u0610-\u061a\u064b-\u065f\u0670\u06d6-\u06dc\u06df-\u06e4\u06e7-\u06e8\u06ea-\u06ed\u0711\u0730-\u074a\u07a6-\u07b0\u07eb-\u07f3\u07fd\u0816-\u0819\u081b-\u0823\u0825-\u0827\u0829-\u082d\u0859-\u085b\u08d3-\u08e1\u08e3-\u0902\u093a\u093c\u0941-\u0948\u094d\u0951-\u0957\u0962-\u0963\u0981\u09bc\u09c1-\u09c4\u09cd\u09e2-\u09e3\u09fe\u0a01-\u0a02\u0a3c\u0a41-\u0a42\u0a47-\u0a48\u0a4b-\u0a4d\u0a51\u0a70-\u0a71\u0a75\u0a81-\u0a82\u0abc\u0ac1-\u0ac5\u0ac7-\u0ac8\u0acd\u0ae2-\u0ae3\u0afa-\u0aff\u0b01\u0b3c\u0b3f\u0b41-\u0b44\u0b4d\u0b56\u0b62-\u0b63\u0b82\u0bc0\u0bcd\u0c00\u0c04\u0c3e-\u0c40\u0c46-\u0c48\u0c4a-\u0c4d\u0c55-\u0c56\u0c62-\u0c63\u0c81\u0cbc\u0cbf\u0cc6\u0ccc-\u0ccd\u0ce2-\u0ce3\u0d00-\u0d01\u0d3b-\u0d3c\u0d41-\u0d44\u0d4d\u0d62-\u0d63\u0dca\u0dd2-\u0dd4\u0dd6\u0e31\u0e34-\u0e3a\u0e47-\u0e4e\u0eb1\u0eb4-\u0eb9\u0ebb-\u0ebc\u0ec8-\u0ecd\u0f18-\u0f19\u0f35\u0f37\u0f39\u0f71-\u0f7e\u0f80-\u0f84\u0f86-\u0f87\u0f8d-\u0f97\u0f99-\u0fbc\u0fc6\u102d-\u1030\u1032-\u1037\u1039-\u103a\u103d-\u103e\u1058-\u1059\u105e-\u1060\u1071-\u1074\u1082\u1085-\u1086\u108d\u109d\u135d-\u135f\u1712-\u1714\u1732-\u1734\u1752-\u1753\u1772-\u1773\u17b4-\u17b5\u17b7-\u17bd\u17c6\u17c9-\u17d3\u17dd\u180b-\u180d\u1885-\u1886\u18a9\u1920-\u1922\u1927-\u1928\u1932\u1939-\u193b\u1a17-\u1a18\u1a1b\u1a56\u1a58-\u1a5e\u1a60\u1a62\u1a65-\u1a6c\u1a73-\u1a7c\u1a7f\u1ab0-\u1abd\u1b00-\u1b03\u1b34\u1b36-\u1b3a\u1b3c\u1b42\u1b6b-\u1b73\u1b80-\u1b81\u1ba2-\u1ba5\u1ba8-\u1ba9\u1bab-\u1bad\u1be6\u1be8-\u1be9\u1bed\u1bef-\u1bf1\u1c2c-\u1c33\u1c36-\u1c37\u1cd0-\u1cd2\u1cd4-\u1ce0\u1ce2-\u1ce8\u1ced\u1cf4\u1cf8-\u1cf9\u1dc0-\u1df9\u1dfb-\u1dff\u20d0-\u20dc\u20e1\u20e5-\u20f0\u2cef-\u2cf1\u2d7f\u2de0-\u2dff\u302a-\u302d\u3099-\u309a\ua66f\ua674-\ua67d\ua69e-\ua69f\ua6f0-\ua6f1\ua802\ua806\ua80b\ua825-\ua826\ua8c4-\ua8c5\ua8e0-\ua8f1\ua8ff\ua926-\ua92d\ua947-\ua951\ua980-\ua982\ua9b3\ua9b6-\ua9b9\ua9bc\ua9e5\uaa29-\uaa2e\uaa31-\uaa32\uaa35-\uaa36\uaa43\uaa4c\uaa7c\uaab0\uaab2-\uaab4\uaab7-\uaab8\uaabe-\uaabf\uaac1\uaaec-\uaaed\uaaf6\uabe5\uabe8\uabed\ufb1e\ufe00-\ufe0f\ufe20-\ufe2f\U000101fd\U000102e0\U00010376-\U0001037a\U00010a01-\U00010a03\U00010a05-\U00010a06\U00010a0c-\U00010a0f\U00010a38-\U00010a3a\U00010a3f\U00010ae5-\U00010ae6\U00010d24-\U00010d27\U00010f46-\U00010f50\U00011001\U00011038-\U00011046\U0001107f-\U00011081\U000110b3-\U000110b6\U000110b9-\U000110ba\U00011100-\U00011102\U00011127-\U0001112b\U0001112d-\U00011134\U00011173\U00011180-\U00011181\U000111b6-\U000111be\U000111c9-\U000111cc\U0001122f-\U00011231\U00011234\U00011236-\U00011237\U0001123e\U000112df\U000112e3-\U000112ea\U00011300-\U00011301\U0001133b-\U0001133c\U00011340\U00011366-\U0001136c\U00011370-\U00011374\U00011438-\U0001143f\U00011442-\U00011444\U00011446\U0001145e\U000114b3-\U000114b8\U000114ba\U000114bf-\U000114c0\U000114c2-\U000114c3\U000115b2-\U000115b5\U000115bc-\U000115bd\U000115bf-\U000115c0\U000115dc-\U000115dd\U00011633-\U0001163a\U0001163d\U0001163f-\U00011640\U000116ab\U000116ad\U000116b0-\U000116b5\U000116b7\U0001171d-\U0001171f\U00011722-\U00011725\U00011727-\U0001172b\U0001182f-\U00011837\U00011839-\U0001183a\U00011a01-\U00011a0a\U00011a33-\U00011a38\U00011a3b-\U00011a3e\U00011a47\U00011a51-\U00011a56\U00011a59-\U00011a5b\U00011a8a-\U00011a96\U00011a98-\U00011a99\U00011c30-\U00011c36\U00011c38-\U00011c3d\U00011c3f\U00011c92-\U00011ca7\U00011caa-\U00011cb0\U00011cb2-\U00011cb3\U00011cb5-\U00011cb6\U00011d31-\U00011d36\U00011d3a\U00011d3c-\U00011d3d\U00011d3f-\U00011d45\U00011d47\U00011d90-\U00011d91\U00011d95\U00011d97\U00011ef3-\U00011ef4\U00016af0-\U00016af4\U00016b30-\U00016b36\U00016f8f-\U00016f92\U0001bc9d-\U0001bc9e\U0001d167-\U0001d169\U0001d17b-\U0001d182\U0001d185-\U0001d18b\U0001d1aa-\U0001d1ad\U0001d242-\U0001d244\U0001da00-\U0001da36\U0001da3b-\U0001da6c\U0001da75\U0001da84\U0001da9b-\U0001da9f\U0001daa1-\U0001daaf\U0001e000-\U0001e006\U0001e008-\U0001e018\U0001e01b-\U0001e021\U0001e023-\U0001e024\U0001e026-\U0001e02a\U0001e8d0-\U0001e8d6\U0001e944-\U0001e94a\U000e0100-\U000e01ef'
|
39 |
+
|
40 |
+
Nd = '0-9\u0660-\u0669\u06f0-\u06f9\u07c0-\u07c9\u0966-\u096f\u09e6-\u09ef\u0a66-\u0a6f\u0ae6-\u0aef\u0b66-\u0b6f\u0be6-\u0bef\u0c66-\u0c6f\u0ce6-\u0cef\u0d66-\u0d6f\u0de6-\u0def\u0e50-\u0e59\u0ed0-\u0ed9\u0f20-\u0f29\u1040-\u1049\u1090-\u1099\u17e0-\u17e9\u1810-\u1819\u1946-\u194f\u19d0-\u19d9\u1a80-\u1a89\u1a90-\u1a99\u1b50-\u1b59\u1bb0-\u1bb9\u1c40-\u1c49\u1c50-\u1c59\ua620-\ua629\ua8d0-\ua8d9\ua900-\ua909\ua9d0-\ua9d9\ua9f0-\ua9f9\uaa50-\uaa59\uabf0-\uabf9\uff10-\uff19\U000104a0-\U000104a9\U00010d30-\U00010d39\U00011066-\U0001106f\U000110f0-\U000110f9\U00011136-\U0001113f\U000111d0-\U000111d9\U000112f0-\U000112f9\U00011450-\U00011459\U000114d0-\U000114d9\U00011650-\U00011659\U000116c0-\U000116c9\U00011730-\U00011739\U000118e0-\U000118e9\U00011c50-\U00011c59\U00011d50-\U00011d59\U00011da0-\U00011da9\U00016a60-\U00016a69\U00016b50-\U00016b59\U0001d7ce-\U0001d7ff\U0001e950-\U0001e959'
|
41 |
+
|
42 |
+
Nl = '\u16ee-\u16f0\u2160-\u2182\u2185-\u2188\u3007\u3021-\u3029\u3038-\u303a\ua6e6-\ua6ef\U00010140-\U00010174\U00010341\U0001034a\U000103d1-\U000103d5\U00012400-\U0001246e'
|
43 |
+
|
44 |
+
No = '\xb2-\xb3\xb9\xbc-\xbe\u09f4-\u09f9\u0b72-\u0b77\u0bf0-\u0bf2\u0c78-\u0c7e\u0d58-\u0d5e\u0d70-\u0d78\u0f2a-\u0f33\u1369-\u137c\u17f0-\u17f9\u19da\u2070\u2074-\u2079\u2080-\u2089\u2150-\u215f\u2189\u2460-\u249b\u24ea-\u24ff\u2776-\u2793\u2cfd\u3192-\u3195\u3220-\u3229\u3248-\u324f\u3251-\u325f\u3280-\u3289\u32b1-\u32bf\ua830-\ua835\U00010107-\U00010133\U00010175-\U00010178\U0001018a-\U0001018b\U000102e1-\U000102fb\U00010320-\U00010323\U00010858-\U0001085f\U00010879-\U0001087f\U000108a7-\U000108af\U000108fb-\U000108ff\U00010916-\U0001091b\U000109bc-\U000109bd\U000109c0-\U000109cf\U000109d2-\U000109ff\U00010a40-\U00010a48\U00010a7d-\U00010a7e\U00010a9d-\U00010a9f\U00010aeb-\U00010aef\U00010b58-\U00010b5f\U00010b78-\U00010b7f\U00010ba9-\U00010baf\U00010cfa-\U00010cff\U00010e60-\U00010e7e\U00010f1d-\U00010f26\U00010f51-\U00010f54\U00011052-\U00011065\U000111e1-\U000111f4\U0001173a-\U0001173b\U000118ea-\U000118f2\U00011c5a-\U00011c6c\U00016b5b-\U00016b61\U00016e80-\U00016e96\U0001d2e0-\U0001d2f3\U0001d360-\U0001d378\U0001e8c7-\U0001e8cf\U0001ec71-\U0001ecab\U0001ecad-\U0001ecaf\U0001ecb1-\U0001ecb4\U0001f100-\U0001f10c'
|
45 |
+
|
46 |
+
Pc = '_\u203f-\u2040\u2054\ufe33-\ufe34\ufe4d-\ufe4f\uff3f'
|
47 |
+
|
48 |
+
Pd = '\\-\u058a\u05be\u1400\u1806\u2010-\u2015\u2e17\u2e1a\u2e3a-\u2e3b\u2e40\u301c\u3030\u30a0\ufe31-\ufe32\ufe58\ufe63\uff0d'
|
49 |
+
|
50 |
+
Pe = ')\\]}\u0f3b\u0f3d\u169c\u2046\u207e\u208e\u2309\u230b\u232a\u2769\u276b\u276d\u276f\u2771\u2773\u2775\u27c6\u27e7\u27e9\u27eb\u27ed\u27ef\u2984\u2986\u2988\u298a\u298c\u298e\u2990\u2992\u2994\u2996\u2998\u29d9\u29db\u29fd\u2e23\u2e25\u2e27\u2e29\u3009\u300b\u300d\u300f\u3011\u3015\u3017\u3019\u301b\u301e-\u301f\ufd3e\ufe18\ufe36\ufe38\ufe3a\ufe3c\ufe3e\ufe40\ufe42\ufe44\ufe48\ufe5a\ufe5c\ufe5e\uff09\uff3d\uff5d\uff60\uff63'
|
51 |
+
|
52 |
+
Pf = '\xbb\u2019\u201d\u203a\u2e03\u2e05\u2e0a\u2e0d\u2e1d\u2e21'
|
53 |
+
|
54 |
+
Pi = '\xab\u2018\u201b-\u201c\u201f\u2039\u2e02\u2e04\u2e09\u2e0c\u2e1c\u2e20'
|
55 |
+
|
56 |
+
Po = "!-#%-'*,.-/:-;?-@\\\\\xa1\xa7\xb6-\xb7\xbf\u037e\u0387\u055a-\u055f\u0589\u05c0\u05c3\u05c6\u05f3-\u05f4\u0609-\u060a\u060c-\u060d\u061b\u061e-\u061f\u066a-\u066d\u06d4\u0700-\u070d\u07f7-\u07f9\u0830-\u083e\u085e\u0964-\u0965\u0970\u09fd\u0a76\u0af0\u0c84\u0df4\u0e4f\u0e5a-\u0e5b\u0f04-\u0f12\u0f14\u0f85\u0fd0-\u0fd4\u0fd9-\u0fda\u104a-\u104f\u10fb\u1360-\u1368\u166d-\u166e\u16eb-\u16ed\u1735-\u1736\u17d4-\u17d6\u17d8-\u17da\u1800-\u1805\u1807-\u180a\u1944-\u1945\u1a1e-\u1a1f\u1aa0-\u1aa6\u1aa8-\u1aad\u1b5a-\u1b60\u1bfc-\u1bff\u1c3b-\u1c3f\u1c7e-\u1c7f\u1cc0-\u1cc7\u1cd3\u2016-\u2017\u2020-\u2027\u2030-\u2038\u203b-\u203e\u2041-\u2043\u2047-\u2051\u2053\u2055-\u205e\u2cf9-\u2cfc\u2cfe-\u2cff\u2d70\u2e00-\u2e01\u2e06-\u2e08\u2e0b\u2e0e-\u2e16\u2e18-\u2e19\u2e1b\u2e1e-\u2e1f\u2e2a-\u2e2e\u2e30-\u2e39\u2e3c-\u2e3f\u2e41\u2e43-\u2e4e\u3001-\u3003\u303d\u30fb\ua4fe-\ua4ff\ua60d-\ua60f\ua673\ua67e\ua6f2-\ua6f7\ua874-\ua877\ua8ce-\ua8cf\ua8f8-\ua8fa\ua8fc\ua92e-\ua92f\ua95f\ua9c1-\ua9cd\ua9de-\ua9df\uaa5c-\uaa5f\uaade-\uaadf\uaaf0-\uaaf1\uabeb\ufe10-\ufe16\ufe19\ufe30\ufe45-\ufe46\ufe49-\ufe4c\ufe50-\ufe52\ufe54-\ufe57\ufe5f-\ufe61\ufe68\ufe6a-\ufe6b\uff01-\uff03\uff05-\uff07\uff0a\uff0c\uff0e-\uff0f\uff1a-\uff1b\uff1f-\uff20\uff3c\uff61\uff64-\uff65\U00010100-\U00010102\U0001039f\U000103d0\U0001056f\U00010857\U0001091f\U0001093f\U00010a50-\U00010a58\U00010a7f\U00010af0-\U00010af6\U00010b39-\U00010b3f\U00010b99-\U00010b9c\U00010f55-\U00010f59\U00011047-\U0001104d\U000110bb-\U000110bc\U000110be-\U000110c1\U00011140-\U00011143\U00011174-\U00011175\U000111c5-\U000111c8\U000111cd\U000111db\U000111dd-\U000111df\U00011238-\U0001123d\U000112a9\U0001144b-\U0001144f\U0001145b\U0001145d\U000114c6\U000115c1-\U000115d7\U00011641-\U00011643\U00011660-\U0001166c\U0001173c-\U0001173e\U0001183b\U00011a3f-\U00011a46\U00011a9a-\U00011a9c\U00011a9e-\U00011aa2\U00011c41-\U00011c45\U00011c70-\U00011c71\U00011ef7-\U00011ef8\U00012470-\U00012474\U00016a6e-\U00016a6f\U00016af5\U00016b37-\U00016b3b\U00016b44\U00016e97-\U00016e9a\U0001bc9f\U0001da87-\U0001da8b\U0001e95e-\U0001e95f"
|
57 |
+
|
58 |
+
Ps = '(\\[{\u0f3a\u0f3c\u169b\u201a\u201e\u2045\u207d\u208d\u2308\u230a\u2329\u2768\u276a\u276c\u276e\u2770\u2772\u2774\u27c5\u27e6\u27e8\u27ea\u27ec\u27ee\u2983\u2985\u2987\u2989\u298b\u298d\u298f\u2991\u2993\u2995\u2997\u29d8\u29da\u29fc\u2e22\u2e24\u2e26\u2e28\u2e42\u3008\u300a\u300c\u300e\u3010\u3014\u3016\u3018\u301a\u301d\ufd3f\ufe17\ufe35\ufe37\ufe39\ufe3b\ufe3d\ufe3f\ufe41\ufe43\ufe47\ufe59\ufe5b\ufe5d\uff08\uff3b\uff5b\uff5f\uff62'
|
59 |
+
|
60 |
+
Sc = '$\xa2-\xa5\u058f\u060b\u07fe-\u07ff\u09f2-\u09f3\u09fb\u0af1\u0bf9\u0e3f\u17db\u20a0-\u20bf\ua838\ufdfc\ufe69\uff04\uffe0-\uffe1\uffe5-\uffe6\U0001ecb0'
|
61 |
+
|
62 |
+
Sk = '\\^`\xa8\xaf\xb4\xb8\u02c2-\u02c5\u02d2-\u02df\u02e5-\u02eb\u02ed\u02ef-\u02ff\u0375\u0384-\u0385\u1fbd\u1fbf-\u1fc1\u1fcd-\u1fcf\u1fdd-\u1fdf\u1fed-\u1fef\u1ffd-\u1ffe\u309b-\u309c\ua700-\ua716\ua720-\ua721\ua789-\ua78a\uab5b\ufbb2-\ufbc1\uff3e\uff40\uffe3\U0001f3fb-\U0001f3ff'
|
63 |
+
|
64 |
+
Sm = '+<->|~\xac\xb1\xd7\xf7\u03f6\u0606-\u0608\u2044\u2052\u207a-\u207c\u208a-\u208c\u2118\u2140-\u2144\u214b\u2190-\u2194\u219a-\u219b\u21a0\u21a3\u21a6\u21ae\u21ce-\u21cf\u21d2\u21d4\u21f4-\u22ff\u2320-\u2321\u237c\u239b-\u23b3\u23dc-\u23e1\u25b7\u25c1\u25f8-\u25ff\u266f\u27c0-\u27c4\u27c7-\u27e5\u27f0-\u27ff\u2900-\u2982\u2999-\u29d7\u29dc-\u29fb\u29fe-\u2aff\u2b30-\u2b44\u2b47-\u2b4c\ufb29\ufe62\ufe64-\ufe66\uff0b\uff1c-\uff1e\uff5c\uff5e\uffe2\uffe9-\uffec\U0001d6c1\U0001d6db\U0001d6fb\U0001d715\U0001d735\U0001d74f\U0001d76f\U0001d789\U0001d7a9\U0001d7c3\U0001eef0-\U0001eef1'
|
65 |
+
|
66 |
+
So = '\xa6\xa9\xae\xb0\u0482\u058d-\u058e\u060e-\u060f\u06de\u06e9\u06fd-\u06fe\u07f6\u09fa\u0b70\u0bf3-\u0bf8\u0bfa\u0c7f\u0d4f\u0d79\u0f01-\u0f03\u0f13\u0f15-\u0f17\u0f1a-\u0f1f\u0f34\u0f36\u0f38\u0fbe-\u0fc5\u0fc7-\u0fcc\u0fce-\u0fcf\u0fd5-\u0fd8\u109e-\u109f\u1390-\u1399\u1940\u19de-\u19ff\u1b61-\u1b6a\u1b74-\u1b7c\u2100-\u2101\u2103-\u2106\u2108-\u2109\u2114\u2116-\u2117\u211e-\u2123\u2125\u2127\u2129\u212e\u213a-\u213b\u214a\u214c-\u214d\u214f\u218a-\u218b\u2195-\u2199\u219c-\u219f\u21a1-\u21a2\u21a4-\u21a5\u21a7-\u21ad\u21af-\u21cd\u21d0-\u21d1\u21d3\u21d5-\u21f3\u2300-\u2307\u230c-\u231f\u2322-\u2328\u232b-\u237b\u237d-\u239a\u23b4-\u23db\u23e2-\u2426\u2440-\u244a\u249c-\u24e9\u2500-\u25b6\u25b8-\u25c0\u25c2-\u25f7\u2600-\u266e\u2670-\u2767\u2794-\u27bf\u2800-\u28ff\u2b00-\u2b2f\u2b45-\u2b46\u2b4d-\u2b73\u2b76-\u2b95\u2b98-\u2bc8\u2bca-\u2bfe\u2ce5-\u2cea\u2e80-\u2e99\u2e9b-\u2ef3\u2f00-\u2fd5\u2ff0-\u2ffb\u3004\u3012-\u3013\u3020\u3036-\u3037\u303e-\u303f\u3190-\u3191\u3196-\u319f\u31c0-\u31e3\u3200-\u321e\u322a-\u3247\u3250\u3260-\u327f\u328a-\u32b0\u32c0-\u32fe\u3300-\u33ff\u4dc0-\u4dff\ua490-\ua4c6\ua828-\ua82b\ua836-\ua837\ua839\uaa77-\uaa79\ufdfd\uffe4\uffe8\uffed-\uffee\ufffc-\ufffd\U00010137-\U0001013f\U00010179-\U00010189\U0001018c-\U0001018e\U00010190-\U0001019b\U000101a0\U000101d0-\U000101fc\U00010877-\U00010878\U00010ac8\U0001173f\U00016b3c-\U00016b3f\U00016b45\U0001bc9c\U0001d000-\U0001d0f5\U0001d100-\U0001d126\U0001d129-\U0001d164\U0001d16a-\U0001d16c\U0001d183-\U0001d184\U0001d18c-\U0001d1a9\U0001d1ae-\U0001d1e8\U0001d200-\U0001d241\U0001d245\U0001d300-\U0001d356\U0001d800-\U0001d9ff\U0001da37-\U0001da3a\U0001da6d-\U0001da74\U0001da76-\U0001da83\U0001da85-\U0001da86\U0001ecac\U0001f000-\U0001f02b\U0001f030-\U0001f093\U0001f0a0-\U0001f0ae\U0001f0b1-\U0001f0bf\U0001f0c1-\U0001f0cf\U0001f0d1-\U0001f0f5\U0001f110-\U0001f16b\U0001f170-\U0001f1ac\U0001f1e6-\U0001f202\U0001f210-\U0001f23b\U0001f240-\U0001f248\U0001f250-\U0001f251\U0001f260-\U0001f265\U0001f300-\U0001f3fa\U0001f400-\U0001f6d4\U0001f6e0-\U0001f6ec\U0001f6f0-\U0001f6f9\U0001f700-\U0001f773\U0001f780-\U0001f7d8\U0001f800-\U0001f80b\U0001f810-\U0001f847\U0001f850-\U0001f859\U0001f860-\U0001f887\U0001f890-\U0001f8ad\U0001f900-\U0001f90b\U0001f910-\U0001f93e\U0001f940-\U0001f970\U0001f973-\U0001f976\U0001f97a\U0001f97c-\U0001f9a2\U0001f9b0-\U0001f9b9\U0001f9c0-\U0001f9c2\U0001f9d0-\U0001f9ff\U0001fa60-\U0001fa6d'
|
67 |
+
|
68 |
+
Zl = '\u2028'
|
69 |
+
|
70 |
+
Zp = '\u2029'
|
71 |
+
|
72 |
+
Zs = ' \xa0\u1680\u2000-\u200a\u202f\u205f\u3000'
|
73 |
+
|
74 |
+
xid_continue = '0-9A-Z_a-z\xaa\xb5\xb7\xba\xc0-\xd6\xd8-\xf6\xf8-\u02c1\u02c6-\u02d1\u02e0-\u02e4\u02ec\u02ee\u0300-\u0374\u0376-\u0377\u037b-\u037d\u037f\u0386-\u038a\u038c\u038e-\u03a1\u03a3-\u03f5\u03f7-\u0481\u0483-\u0487\u048a-\u052f\u0531-\u0556\u0559\u0560-\u0588\u0591-\u05bd\u05bf\u05c1-\u05c2\u05c4-\u05c5\u05c7\u05d0-\u05ea\u05ef-\u05f2\u0610-\u061a\u0620-\u0669\u066e-\u06d3\u06d5-\u06dc\u06df-\u06e8\u06ea-\u06fc\u06ff\u0710-\u074a\u074d-\u07b1\u07c0-\u07f5\u07fa\u07fd\u0800-\u082d\u0840-\u085b\u0860-\u086a\u08a0-\u08b4\u08b6-\u08bd\u08d3-\u08e1\u08e3-\u0963\u0966-\u096f\u0971-\u0983\u0985-\u098c\u098f-\u0990\u0993-\u09a8\u09aa-\u09b0\u09b2\u09b6-\u09b9\u09bc-\u09c4\u09c7-\u09c8\u09cb-\u09ce\u09d7\u09dc-\u09dd\u09df-\u09e3\u09e6-\u09f1\u09fc\u09fe\u0a01-\u0a03\u0a05-\u0a0a\u0a0f-\u0a10\u0a13-\u0a28\u0a2a-\u0a30\u0a32-\u0a33\u0a35-\u0a36\u0a38-\u0a39\u0a3c\u0a3e-\u0a42\u0a47-\u0a48\u0a4b-\u0a4d\u0a51\u0a59-\u0a5c\u0a5e\u0a66-\u0a75\u0a81-\u0a83\u0a85-\u0a8d\u0a8f-\u0a91\u0a93-\u0aa8\u0aaa-\u0ab0\u0ab2-\u0ab3\u0ab5-\u0ab9\u0abc-\u0ac5\u0ac7-\u0ac9\u0acb-\u0acd\u0ad0\u0ae0-\u0ae3\u0ae6-\u0aef\u0af9-\u0aff\u0b01-\u0b03\u0b05-\u0b0c\u0b0f-\u0b10\u0b13-\u0b28\u0b2a-\u0b30\u0b32-\u0b33\u0b35-\u0b39\u0b3c-\u0b44\u0b47-\u0b48\u0b4b-\u0b4d\u0b56-\u0b57\u0b5c-\u0b5d\u0b5f-\u0b63\u0b66-\u0b6f\u0b71\u0b82-\u0b83\u0b85-\u0b8a\u0b8e-\u0b90\u0b92-\u0b95\u0b99-\u0b9a\u0b9c\u0b9e-\u0b9f\u0ba3-\u0ba4\u0ba8-\u0baa\u0bae-\u0bb9\u0bbe-\u0bc2\u0bc6-\u0bc8\u0bca-\u0bcd\u0bd0\u0bd7\u0be6-\u0bef\u0c00-\u0c0c\u0c0e-\u0c10\u0c12-\u0c28\u0c2a-\u0c39\u0c3d-\u0c44\u0c46-\u0c48\u0c4a-\u0c4d\u0c55-\u0c56\u0c58-\u0c5a\u0c60-\u0c63\u0c66-\u0c6f\u0c80-\u0c83\u0c85-\u0c8c\u0c8e-\u0c90\u0c92-\u0ca8\u0caa-\u0cb3\u0cb5-\u0cb9\u0cbc-\u0cc4\u0cc6-\u0cc8\u0cca-\u0ccd\u0cd5-\u0cd6\u0cde\u0ce0-\u0ce3\u0ce6-\u0cef\u0cf1-\u0cf2\u0d00-\u0d03\u0d05-\u0d0c\u0d0e-\u0d10\u0d12-\u0d44\u0d46-\u0d48\u0d4a-\u0d4e\u0d54-\u0d57\u0d5f-\u0d63\u0d66-\u0d6f\u0d7a-\u0d7f\u0d82-\u0d83\u0d85-\u0d96\u0d9a-\u0db1\u0db3-\u0dbb\u0dbd\u0dc0-\u0dc6\u0dca\u0dcf-\u0dd4\u0dd6\u0dd8-\u0ddf\u0de6-\u0def\u0df2-\u0df3\u0e01-\u0e3a\u0e40-\u0e4e\u0e50-\u0e59\u0e81-\u0e82\u0e84\u0e87-\u0e88\u0e8a\u0e8d\u0e94-\u0e97\u0e99-\u0e9f\u0ea1-\u0ea3\u0ea5\u0ea7\u0eaa-\u0eab\u0ead-\u0eb9\u0ebb-\u0ebd\u0ec0-\u0ec4\u0ec6\u0ec8-\u0ecd\u0ed0-\u0ed9\u0edc-\u0edf\u0f00\u0f18-\u0f19\u0f20-\u0f29\u0f35\u0f37\u0f39\u0f3e-\u0f47\u0f49-\u0f6c\u0f71-\u0f84\u0f86-\u0f97\u0f99-\u0fbc\u0fc6\u1000-\u1049\u1050-\u109d\u10a0-\u10c5\u10c7\u10cd\u10d0-\u10fa\u10fc-\u1248\u124a-\u124d\u1250-\u1256\u1258\u125a-\u125d\u1260-\u1288\u128a-\u128d\u1290-\u12b0\u12b2-\u12b5\u12b8-\u12be\u12c0\u12c2-\u12c5\u12c8-\u12d6\u12d8-\u1310\u1312-\u1315\u1318-\u135a\u135d-\u135f\u1369-\u1371\u1380-\u138f\u13a0-\u13f5\u13f8-\u13fd\u1401-\u166c\u166f-\u167f\u1681-\u169a\u16a0-\u16ea\u16ee-\u16f8\u1700-\u170c\u170e-\u1714\u1720-\u1734\u1740-\u1753\u1760-\u176c\u176e-\u1770\u1772-\u1773\u1780-\u17d3\u17d7\u17dc-\u17dd\u17e0-\u17e9\u180b-\u180d\u1810-\u1819\u1820-\u1878\u1880-\u18aa\u18b0-\u18f5\u1900-\u191e\u1920-\u192b\u1930-\u193b\u1946-\u196d\u1970-\u1974\u1980-\u19ab\u19b0-\u19c9\u19d0-\u19da\u1a00-\u1a1b\u1a20-\u1a5e\u1a60-\u1a7c\u1a7f-\u1a89\u1a90-\u1a99\u1aa7\u1ab0-\u1abd\u1b00-\u1b4b\u1b50-\u1b59\u1b6b-\u1b73\u1b80-\u1bf3\u1c00-\u1c37\u1c40-\u1c49\u1c4d-\u1c7d\u1c80-\u1c88\u1c90-\u1cba\u1cbd-\u1cbf\u1cd0-\u1cd2\u1cd4-\u1cf9\u1d00-\u1df9\u1dfb-\u1f15\u1f18-\u1f1d\u1f20-\u1f45\u1f48-\u1f4d\u1f50-\u1f57\u1f59\u1f5b\u1f5d\u1f5f-\u1f7d\u1f80-\u1fb4\u1fb6-\u1fbc\u1fbe\u1fc2-\u1fc4\u1fc6-\u1fcc\u1fd0-\u1fd3\u1fd6-\u1fdb\u1fe0-\u1fec\u1ff2-\u1ff4\u1ff6-\u1ffc\u203f-\u2040\u2054\u2071\u207f\u2090-\u209c\u20d0-\u20dc\u20e1\u20e5-\u20f0\u2102\u2107\u210a-\u2113\u2115\u2118-\u211d\u2124\u2126\u2128\u212a-\u2139\u213c-\u213f\u2145-\u2149\u214e\u2160-\u2188\u2c00-\u2c2e\u2c30-\u2c5e\u2c60-\u2ce4\u2ceb-\u2cf3\u2d00-\u2d25\u2d27\u2d2d\u2d30-\u2d67\u2d6f\u2d7f-\u2d96\u2da0-\u2da6\u2da8-\u2dae\u2db0-\u2db6\u2db8-\u2dbe\u2dc0-\u2dc6\u2dc8-\u2dce\u2dd0-\u2dd6\u2dd8-\u2dde\u2de0-\u2dff\u3005-\u3007\u3021-\u302f\u3031-\u3035\u3038-\u303c\u3041-\u3096\u3099-\u309a\u309d-\u309f\u30a1-\u30fa\u30fc-\u30ff\u3105-\u312f\u3131-\u318e\u31a0-\u31ba\u31f0-\u31ff\u3400-\u4db5\u4e00-\u9fef\ua000-\ua48c\ua4d0-\ua4fd\ua500-\ua60c\ua610-\ua62b\ua640-\ua66f\ua674-\ua67d\ua67f-\ua6f1\ua717-\ua71f\ua722-\ua788\ua78b-\ua7b9\ua7f7-\ua827\ua840-\ua873\ua880-\ua8c5\ua8d0-\ua8d9\ua8e0-\ua8f7\ua8fb\ua8fd-\ua92d\ua930-\ua953\ua960-\ua97c\ua980-\ua9c0\ua9cf-\ua9d9\ua9e0-\ua9fe\uaa00-\uaa36\uaa40-\uaa4d\uaa50-\uaa59\uaa60-\uaa76\uaa7a-\uaac2\uaadb-\uaadd\uaae0-\uaaef\uaaf2-\uaaf6\uab01-\uab06\uab09-\uab0e\uab11-\uab16\uab20-\uab26\uab28-\uab2e\uab30-\uab5a\uab5c-\uab65\uab70-\uabea\uabec-\uabed\uabf0-\uabf9\uac00-\ud7a3\ud7b0-\ud7c6\ud7cb-\ud7fb\uf900-\ufa6d\ufa70-\ufad9\ufb00-\ufb06\ufb13-\ufb17\ufb1d-\ufb28\ufb2a-\ufb36\ufb38-\ufb3c\ufb3e\ufb40-\ufb41\ufb43-\ufb44\ufb46-\ufbb1\ufbd3-\ufc5d\ufc64-\ufd3d\ufd50-\ufd8f\ufd92-\ufdc7\ufdf0-\ufdf9\ufe00-\ufe0f\ufe20-\ufe2f\ufe33-\ufe34\ufe4d-\ufe4f\ufe71\ufe73\ufe77\ufe79\ufe7b\ufe7d\ufe7f-\ufefc\uff10-\uff19\uff21-\uff3a\uff3f\uff41-\uff5a\uff66-\uffbe\uffc2-\uffc7\uffca-\uffcf\uffd2-\uffd7\uffda-\uffdc\U00010000-\U0001000b\U0001000d-\U00010026\U00010028-\U0001003a\U0001003c-\U0001003d\U0001003f-\U0001004d\U00010050-\U0001005d\U00010080-\U000100fa\U00010140-\U00010174\U000101fd\U00010280-\U0001029c\U000102a0-\U000102d0\U000102e0\U00010300-\U0001031f\U0001032d-\U0001034a\U00010350-\U0001037a\U00010380-\U0001039d\U000103a0-\U000103c3\U000103c8-\U000103cf\U000103d1-\U000103d5\U00010400-\U0001049d\U000104a0-\U000104a9\U000104b0-\U000104d3\U000104d8-\U000104fb\U00010500-\U00010527\U00010530-\U00010563\U00010600-\U00010736\U00010740-\U00010755\U00010760-\U00010767\U00010800-\U00010805\U00010808\U0001080a-\U00010835\U00010837-\U00010838\U0001083c\U0001083f-\U00010855\U00010860-\U00010876\U00010880-\U0001089e\U000108e0-\U000108f2\U000108f4-\U000108f5\U00010900-\U00010915\U00010920-\U00010939\U00010980-\U000109b7\U000109be-\U000109bf\U00010a00-\U00010a03\U00010a05-\U00010a06\U00010a0c-\U00010a13\U00010a15-\U00010a17\U00010a19-\U00010a35\U00010a38-\U00010a3a\U00010a3f\U00010a60-\U00010a7c\U00010a80-\U00010a9c\U00010ac0-\U00010ac7\U00010ac9-\U00010ae6\U00010b00-\U00010b35\U00010b40-\U00010b55\U00010b60-\U00010b72\U00010b80-\U00010b91\U00010c00-\U00010c48\U00010c80-\U00010cb2\U00010cc0-\U00010cf2\U00010d00-\U00010d27\U00010d30-\U00010d39\U00010f00-\U00010f1c\U00010f27\U00010f30-\U00010f50\U00011000-\U00011046\U00011066-\U0001106f\U0001107f-\U000110ba\U000110d0-\U000110e8\U000110f0-\U000110f9\U00011100-\U00011134\U00011136-\U0001113f\U00011144-\U00011146\U00011150-\U00011173\U00011176\U00011180-\U000111c4\U000111c9-\U000111cc\U000111d0-\U000111da\U000111dc\U00011200-\U00011211\U00011213-\U00011237\U0001123e\U00011280-\U00011286\U00011288\U0001128a-\U0001128d\U0001128f-\U0001129d\U0001129f-\U000112a8\U000112b0-\U000112ea\U000112f0-\U000112f9\U00011300-\U00011303\U00011305-\U0001130c\U0001130f-\U00011310\U00011313-\U00011328\U0001132a-\U00011330\U00011332-\U00011333\U00011335-\U00011339\U0001133b-\U00011344\U00011347-\U00011348\U0001134b-\U0001134d\U00011350\U00011357\U0001135d-\U00011363\U00011366-\U0001136c\U00011370-\U00011374\U00011400-\U0001144a\U00011450-\U00011459\U0001145e\U00011480-\U000114c5\U000114c7\U000114d0-\U000114d9\U00011580-\U000115b5\U000115b8-\U000115c0\U000115d8-\U000115dd\U00011600-\U00011640\U00011644\U00011650-\U00011659\U00011680-\U000116b7\U000116c0-\U000116c9\U00011700-\U0001171a\U0001171d-\U0001172b\U00011730-\U00011739\U00011800-\U0001183a\U000118a0-\U000118e9\U000118ff\U00011a00-\U00011a3e\U00011a47\U00011a50-\U00011a83\U00011a86-\U00011a99\U00011a9d\U00011ac0-\U00011af8\U00011c00-\U00011c08\U00011c0a-\U00011c36\U00011c38-\U00011c40\U00011c50-\U00011c59\U00011c72-\U00011c8f\U00011c92-\U00011ca7\U00011ca9-\U00011cb6\U00011d00-\U00011d06\U00011d08-\U00011d09\U00011d0b-\U00011d36\U00011d3a\U00011d3c-\U00011d3d\U00011d3f-\U00011d47\U00011d50-\U00011d59\U00011d60-\U00011d65\U00011d67-\U00011d68\U00011d6a-\U00011d8e\U00011d90-\U00011d91\U00011d93-\U00011d98\U00011da0-\U00011da9\U00011ee0-\U00011ef6\U00012000-\U00012399\U00012400-\U0001246e\U00012480-\U00012543\U00013000-\U0001342e\U00014400-\U00014646\U00016800-\U00016a38\U00016a40-\U00016a5e\U00016a60-\U00016a69\U00016ad0-\U00016aed\U00016af0-\U00016af4\U00016b00-\U00016b36\U00016b40-\U00016b43\U00016b50-\U00016b59\U00016b63-\U00016b77\U00016b7d-\U00016b8f\U00016e40-\U00016e7f\U00016f00-\U00016f44\U00016f50-\U00016f7e\U00016f8f-\U00016f9f\U00016fe0-\U00016fe1\U00017000-\U000187f1\U00018800-\U00018af2\U0001b000-\U0001b11e\U0001b170-\U0001b2fb\U0001bc00-\U0001bc6a\U0001bc70-\U0001bc7c\U0001bc80-\U0001bc88\U0001bc90-\U0001bc99\U0001bc9d-\U0001bc9e\U0001d165-\U0001d169\U0001d16d-\U0001d172\U0001d17b-\U0001d182\U0001d185-\U0001d18b\U0001d1aa-\U0001d1ad\U0001d242-\U0001d244\U0001d400-\U0001d454\U0001d456-\U0001d49c\U0001d49e-\U0001d49f\U0001d4a2\U0001d4a5-\U0001d4a6\U0001d4a9-\U0001d4ac\U0001d4ae-\U0001d4b9\U0001d4bb\U0001d4bd-\U0001d4c3\U0001d4c5-\U0001d505\U0001d507-\U0001d50a\U0001d50d-\U0001d514\U0001d516-\U0001d51c\U0001d51e-\U0001d539\U0001d53b-\U0001d53e\U0001d540-\U0001d544\U0001d546\U0001d54a-\U0001d550\U0001d552-\U0001d6a5\U0001d6a8-\U0001d6c0\U0001d6c2-\U0001d6da\U0001d6dc-\U0001d6fa\U0001d6fc-\U0001d714\U0001d716-\U0001d734\U0001d736-\U0001d74e\U0001d750-\U0001d76e\U0001d770-\U0001d788\U0001d78a-\U0001d7a8\U0001d7aa-\U0001d7c2\U0001d7c4-\U0001d7cb\U0001d7ce-\U0001d7ff\U0001da00-\U0001da36\U0001da3b-\U0001da6c\U0001da75\U0001da84\U0001da9b-\U0001da9f\U0001daa1-\U0001daaf\U0001e000-\U0001e006\U0001e008-\U0001e018\U0001e01b-\U0001e021\U0001e023-\U0001e024\U0001e026-\U0001e02a\U0001e800-\U0001e8c4\U0001e8d0-\U0001e8d6\U0001e900-\U0001e94a\U0001e950-\U0001e959\U0001ee00-\U0001ee03\U0001ee05-\U0001ee1f\U0001ee21-\U0001ee22\U0001ee24\U0001ee27\U0001ee29-\U0001ee32\U0001ee34-\U0001ee37\U0001ee39\U0001ee3b\U0001ee42\U0001ee47\U0001ee49\U0001ee4b\U0001ee4d-\U0001ee4f\U0001ee51-\U0001ee52\U0001ee54\U0001ee57\U0001ee59\U0001ee5b\U0001ee5d\U0001ee5f\U0001ee61-\U0001ee62\U0001ee64\U0001ee67-\U0001ee6a\U0001ee6c-\U0001ee72\U0001ee74-\U0001ee77\U0001ee79-\U0001ee7c\U0001ee7e\U0001ee80-\U0001ee89\U0001ee8b-\U0001ee9b\U0001eea1-\U0001eea3\U0001eea5-\U0001eea9\U0001eeab-\U0001eebb\U00020000-\U0002a6d6\U0002a700-\U0002b734\U0002b740-\U0002b81d\U0002b820-\U0002cea1\U0002ceb0-\U0002ebe0\U0002f800-\U0002fa1d\U000e0100-\U000e01ef'
|
75 |
+
|
76 |
+
xid_start = 'A-Z_a-z\xaa\xb5\xba\xc0-\xd6\xd8-\xf6\xf8-\u02c1\u02c6-\u02d1\u02e0-\u02e4\u02ec\u02ee\u0370-\u0374\u0376-\u0377\u037b-\u037d\u037f\u0386\u0388-\u038a\u038c\u038e-\u03a1\u03a3-\u03f5\u03f7-\u0481\u048a-\u052f\u0531-\u0556\u0559\u0560-\u0588\u05d0-\u05ea\u05ef-\u05f2\u0620-\u064a\u066e-\u066f\u0671-\u06d3\u06d5\u06e5-\u06e6\u06ee-\u06ef\u06fa-\u06fc\u06ff\u0710\u0712-\u072f\u074d-\u07a5\u07b1\u07ca-\u07ea\u07f4-\u07f5\u07fa\u0800-\u0815\u081a\u0824\u0828\u0840-\u0858\u0860-\u086a\u08a0-\u08b4\u08b6-\u08bd\u0904-\u0939\u093d\u0950\u0958-\u0961\u0971-\u0980\u0985-\u098c\u098f-\u0990\u0993-\u09a8\u09aa-\u09b0\u09b2\u09b6-\u09b9\u09bd\u09ce\u09dc-\u09dd\u09df-\u09e1\u09f0-\u09f1\u09fc\u0a05-\u0a0a\u0a0f-\u0a10\u0a13-\u0a28\u0a2a-\u0a30\u0a32-\u0a33\u0a35-\u0a36\u0a38-\u0a39\u0a59-\u0a5c\u0a5e\u0a72-\u0a74\u0a85-\u0a8d\u0a8f-\u0a91\u0a93-\u0aa8\u0aaa-\u0ab0\u0ab2-\u0ab3\u0ab5-\u0ab9\u0abd\u0ad0\u0ae0-\u0ae1\u0af9\u0b05-\u0b0c\u0b0f-\u0b10\u0b13-\u0b28\u0b2a-\u0b30\u0b32-\u0b33\u0b35-\u0b39\u0b3d\u0b5c-\u0b5d\u0b5f-\u0b61\u0b71\u0b83\u0b85-\u0b8a\u0b8e-\u0b90\u0b92-\u0b95\u0b99-\u0b9a\u0b9c\u0b9e-\u0b9f\u0ba3-\u0ba4\u0ba8-\u0baa\u0bae-\u0bb9\u0bd0\u0c05-\u0c0c\u0c0e-\u0c10\u0c12-\u0c28\u0c2a-\u0c39\u0c3d\u0c58-\u0c5a\u0c60-\u0c61\u0c80\u0c85-\u0c8c\u0c8e-\u0c90\u0c92-\u0ca8\u0caa-\u0cb3\u0cb5-\u0cb9\u0cbd\u0cde\u0ce0-\u0ce1\u0cf1-\u0cf2\u0d05-\u0d0c\u0d0e-\u0d10\u0d12-\u0d3a\u0d3d\u0d4e\u0d54-\u0d56\u0d5f-\u0d61\u0d7a-\u0d7f\u0d85-\u0d96\u0d9a-\u0db1\u0db3-\u0dbb\u0dbd\u0dc0-\u0dc6\u0e01-\u0e30\u0e32\u0e40-\u0e46\u0e81-\u0e82\u0e84\u0e87-\u0e88\u0e8a\u0e8d\u0e94-\u0e97\u0e99-\u0e9f\u0ea1-\u0ea3\u0ea5\u0ea7\u0eaa-\u0eab\u0ead-\u0eb0\u0eb2\u0ebd\u0ec0-\u0ec4\u0ec6\u0edc-\u0edf\u0f00\u0f40-\u0f47\u0f49-\u0f6c\u0f88-\u0f8c\u1000-\u102a\u103f\u1050-\u1055\u105a-\u105d\u1061\u1065-\u1066\u106e-\u1070\u1075-\u1081\u108e\u10a0-\u10c5\u10c7\u10cd\u10d0-\u10fa\u10fc-\u1248\u124a-\u124d\u1250-\u1256\u1258\u125a-\u125d\u1260-\u1288\u128a-\u128d\u1290-\u12b0\u12b2-\u12b5\u12b8-\u12be\u12c0\u12c2-\u12c5\u12c8-\u12d6\u12d8-\u1310\u1312-\u1315\u1318-\u135a\u1380-\u138f\u13a0-\u13f5\u13f8-\u13fd\u1401-\u166c\u166f-\u167f\u1681-\u169a\u16a0-\u16ea\u16ee-\u16f8\u1700-\u170c\u170e-\u1711\u1720-\u1731\u1740-\u1751\u1760-\u176c\u176e-\u1770\u1780-\u17b3\u17d7\u17dc\u1820-\u1878\u1880-\u18a8\u18aa\u18b0-\u18f5\u1900-\u191e\u1950-\u196d\u1970-\u1974\u1980-\u19ab\u19b0-\u19c9\u1a00-\u1a16\u1a20-\u1a54\u1aa7\u1b05-\u1b33\u1b45-\u1b4b\u1b83-\u1ba0\u1bae-\u1baf\u1bba-\u1be5\u1c00-\u1c23\u1c4d-\u1c4f\u1c5a-\u1c7d\u1c80-\u1c88\u1c90-\u1cba\u1cbd-\u1cbf\u1ce9-\u1cec\u1cee-\u1cf1\u1cf5-\u1cf6\u1d00-\u1dbf\u1e00-\u1f15\u1f18-\u1f1d\u1f20-\u1f45\u1f48-\u1f4d\u1f50-\u1f57\u1f59\u1f5b\u1f5d\u1f5f-\u1f7d\u1f80-\u1fb4\u1fb6-\u1fbc\u1fbe\u1fc2-\u1fc4\u1fc6-\u1fcc\u1fd0-\u1fd3\u1fd6-\u1fdb\u1fe0-\u1fec\u1ff2-\u1ff4\u1ff6-\u1ffc\u2071\u207f\u2090-\u209c\u2102\u2107\u210a-\u2113\u2115\u2118-\u211d\u2124\u2126\u2128\u212a-\u2139\u213c-\u213f\u2145-\u2149\u214e\u2160-\u2188\u2c00-\u2c2e\u2c30-\u2c5e\u2c60-\u2ce4\u2ceb-\u2cee\u2cf2-\u2cf3\u2d00-\u2d25\u2d27\u2d2d\u2d30-\u2d67\u2d6f\u2d80-\u2d96\u2da0-\u2da6\u2da8-\u2dae\u2db0-\u2db6\u2db8-\u2dbe\u2dc0-\u2dc6\u2dc8-\u2dce\u2dd0-\u2dd6\u2dd8-\u2dde\u3005-\u3007\u3021-\u3029\u3031-\u3035\u3038-\u303c\u3041-\u3096\u309d-\u309f\u30a1-\u30fa\u30fc-\u30ff\u3105-\u312f\u3131-\u318e\u31a0-\u31ba\u31f0-\u31ff\u3400-\u4db5\u4e00-\u9fef\ua000-\ua48c\ua4d0-\ua4fd\ua500-\ua60c\ua610-\ua61f\ua62a-\ua62b\ua640-\ua66e\ua67f-\ua69d\ua6a0-\ua6ef\ua717-\ua71f\ua722-\ua788\ua78b-\ua7b9\ua7f7-\ua801\ua803-\ua805\ua807-\ua80a\ua80c-\ua822\ua840-\ua873\ua882-\ua8b3\ua8f2-\ua8f7\ua8fb\ua8fd-\ua8fe\ua90a-\ua925\ua930-\ua946\ua960-\ua97c\ua984-\ua9b2\ua9cf\ua9e0-\ua9e4\ua9e6-\ua9ef\ua9fa-\ua9fe\uaa00-\uaa28\uaa40-\uaa42\uaa44-\uaa4b\uaa60-\uaa76\uaa7a\uaa7e-\uaaaf\uaab1\uaab5-\uaab6\uaab9-\uaabd\uaac0\uaac2\uaadb-\uaadd\uaae0-\uaaea\uaaf2-\uaaf4\uab01-\uab06\uab09-\uab0e\uab11-\uab16\uab20-\uab26\uab28-\uab2e\uab30-\uab5a\uab5c-\uab65\uab70-\uabe2\uac00-\ud7a3\ud7b0-\ud7c6\ud7cb-\ud7fb\uf900-\ufa6d\ufa70-\ufad9\ufb00-\ufb06\ufb13-\ufb17\ufb1d\ufb1f-\ufb28\ufb2a-\ufb36\ufb38-\ufb3c\ufb3e\ufb40-\ufb41\ufb43-\ufb44\ufb46-\ufbb1\ufbd3-\ufc5d\ufc64-\ufd3d\ufd50-\ufd8f\ufd92-\ufdc7\ufdf0-\ufdf9\ufe71\ufe73\ufe77\ufe79\ufe7b\ufe7d\ufe7f-\ufefc\uff21-\uff3a\uff41-\uff5a\uff66-\uff9d\uffa0-\uffbe\uffc2-\uffc7\uffca-\uffcf\uffd2-\uffd7\uffda-\uffdc\U00010000-\U0001000b\U0001000d-\U00010026\U00010028-\U0001003a\U0001003c-\U0001003d\U0001003f-\U0001004d\U00010050-\U0001005d\U00010080-\U000100fa\U00010140-\U00010174\U00010280-\U0001029c\U000102a0-\U000102d0\U00010300-\U0001031f\U0001032d-\U0001034a\U00010350-\U00010375\U00010380-\U0001039d\U000103a0-\U000103c3\U000103c8-\U000103cf\U000103d1-\U000103d5\U00010400-\U0001049d\U000104b0-\U000104d3\U000104d8-\U000104fb\U00010500-\U00010527\U00010530-\U00010563\U00010600-\U00010736\U00010740-\U00010755\U00010760-\U00010767\U00010800-\U00010805\U00010808\U0001080a-\U00010835\U00010837-\U00010838\U0001083c\U0001083f-\U00010855\U00010860-\U00010876\U00010880-\U0001089e\U000108e0-\U000108f2\U000108f4-\U000108f5\U00010900-\U00010915\U00010920-\U00010939\U00010980-\U000109b7\U000109be-\U000109bf\U00010a00\U00010a10-\U00010a13\U00010a15-\U00010a17\U00010a19-\U00010a35\U00010a60-\U00010a7c\U00010a80-\U00010a9c\U00010ac0-\U00010ac7\U00010ac9-\U00010ae4\U00010b00-\U00010b35\U00010b40-\U00010b55\U00010b60-\U00010b72\U00010b80-\U00010b91\U00010c00-\U00010c48\U00010c80-\U00010cb2\U00010cc0-\U00010cf2\U00010d00-\U00010d23\U00010f00-\U00010f1c\U00010f27\U00010f30-\U00010f45\U00011003-\U00011037\U00011083-\U000110af\U000110d0-\U000110e8\U00011103-\U00011126\U00011144\U00011150-\U00011172\U00011176\U00011183-\U000111b2\U000111c1-\U000111c4\U000111da\U000111dc\U00011200-\U00011211\U00011213-\U0001122b\U00011280-\U00011286\U00011288\U0001128a-\U0001128d\U0001128f-\U0001129d\U0001129f-\U000112a8\U000112b0-\U000112de\U00011305-\U0001130c\U0001130f-\U00011310\U00011313-\U00011328\U0001132a-\U00011330\U00011332-\U00011333\U00011335-\U00011339\U0001133d\U00011350\U0001135d-\U00011361\U00011400-\U00011434\U00011447-\U0001144a\U00011480-\U000114af\U000114c4-\U000114c5\U000114c7\U00011580-\U000115ae\U000115d8-\U000115db\U00011600-\U0001162f\U00011644\U00011680-\U000116aa\U00011700-\U0001171a\U00011800-\U0001182b\U000118a0-\U000118df\U000118ff\U00011a00\U00011a0b-\U00011a32\U00011a3a\U00011a50\U00011a5c-\U00011a83\U00011a86-\U00011a89\U00011a9d\U00011ac0-\U00011af8\U00011c00-\U00011c08\U00011c0a-\U00011c2e\U00011c40\U00011c72-\U00011c8f\U00011d00-\U00011d06\U00011d08-\U00011d09\U00011d0b-\U00011d30\U00011d46\U00011d60-\U00011d65\U00011d67-\U00011d68\U00011d6a-\U00011d89\U00011d98\U00011ee0-\U00011ef2\U00012000-\U00012399\U00012400-\U0001246e\U00012480-\U00012543\U00013000-\U0001342e\U00014400-\U00014646\U00016800-\U00016a38\U00016a40-\U00016a5e\U00016ad0-\U00016aed\U00016b00-\U00016b2f\U00016b40-\U00016b43\U00016b63-\U00016b77\U00016b7d-\U00016b8f\U00016e40-\U00016e7f\U00016f00-\U00016f44\U00016f50\U00016f93-\U00016f9f\U00016fe0-\U00016fe1\U00017000-\U000187f1\U00018800-\U00018af2\U0001b000-\U0001b11e\U0001b170-\U0001b2fb\U0001bc00-\U0001bc6a\U0001bc70-\U0001bc7c\U0001bc80-\U0001bc88\U0001bc90-\U0001bc99\U0001d400-\U0001d454\U0001d456-\U0001d49c\U0001d49e-\U0001d49f\U0001d4a2\U0001d4a5-\U0001d4a6\U0001d4a9-\U0001d4ac\U0001d4ae-\U0001d4b9\U0001d4bb\U0001d4bd-\U0001d4c3\U0001d4c5-\U0001d505\U0001d507-\U0001d50a\U0001d50d-\U0001d514\U0001d516-\U0001d51c\U0001d51e-\U0001d539\U0001d53b-\U0001d53e\U0001d540-\U0001d544\U0001d546\U0001d54a-\U0001d550\U0001d552-\U0001d6a5\U0001d6a8-\U0001d6c0\U0001d6c2-\U0001d6da\U0001d6dc-\U0001d6fa\U0001d6fc-\U0001d714\U0001d716-\U0001d734\U0001d736-\U0001d74e\U0001d750-\U0001d76e\U0001d770-\U0001d788\U0001d78a-\U0001d7a8\U0001d7aa-\U0001d7c2\U0001d7c4-\U0001d7cb\U0001e800-\U0001e8c4\U0001e900-\U0001e943\U0001ee00-\U0001ee03\U0001ee05-\U0001ee1f\U0001ee21-\U0001ee22\U0001ee24\U0001ee27\U0001ee29-\U0001ee32\U0001ee34-\U0001ee37\U0001ee39\U0001ee3b\U0001ee42\U0001ee47\U0001ee49\U0001ee4b\U0001ee4d-\U0001ee4f\U0001ee51-\U0001ee52\U0001ee54\U0001ee57\U0001ee59\U0001ee5b\U0001ee5d\U0001ee5f\U0001ee61-\U0001ee62\U0001ee64\U0001ee67-\U0001ee6a\U0001ee6c-\U0001ee72\U0001ee74-\U0001ee77\U0001ee79-\U0001ee7c\U0001ee7e\U0001ee80-\U0001ee89\U0001ee8b-\U0001ee9b\U0001eea1-\U0001eea3\U0001eea5-\U0001eea9\U0001eeab-\U0001eebb\U00020000-\U0002a6d6\U0002a700-\U0002b734\U0002b740-\U0002b81d\U0002b820-\U0002cea1\U0002ceb0-\U0002ebe0\U0002f800-\U0002fa1d'
|
77 |
+
|
78 |
+
cats = ['Cc', 'Cf', 'Cn', 'Co', 'Cs', 'Ll', 'Lm', 'Lo', 'Lt', 'Lu', 'Mc', 'Me', 'Mn', 'Nd', 'Nl', 'No', 'Pc', 'Pd', 'Pe', 'Pf', 'Pi', 'Po', 'Ps', 'Sc', 'Sk', 'Sm', 'So', 'Zl', 'Zp', 'Zs']
|
79 |
+
|
80 |
+
# Generated from unidata 11.0.0
|
81 |
+
|
82 |
+
def combine(*args):
|
83 |
+
return ''.join(globals()[cat] for cat in args)
|
84 |
+
|
85 |
+
|
86 |
+
def allexcept(*args):
|
87 |
+
newcats = cats[:]
|
88 |
+
for arg in args:
|
89 |
+
newcats.remove(arg)
|
90 |
+
return ''.join(globals()[cat] for cat in newcats)
|
91 |
+
|
92 |
+
|
93 |
+
def _handle_runs(char_list): # pragma: no cover
|
94 |
+
buf = []
|
95 |
+
for c in char_list:
|
96 |
+
if len(c) == 1:
|
97 |
+
if buf and buf[-1][1] == chr(ord(c)-1):
|
98 |
+
buf[-1] = (buf[-1][0], c)
|
99 |
+
else:
|
100 |
+
buf.append((c, c))
|
101 |
+
else:
|
102 |
+
buf.append((c, c))
|
103 |
+
for a, b in buf:
|
104 |
+
if a == b:
|
105 |
+
yield a
|
106 |
+
else:
|
107 |
+
yield '%s-%s' % (a, b)
|
108 |
+
|
109 |
+
|
110 |
+
if __name__ == '__main__': # pragma: no cover
|
111 |
+
import unicodedata
|
112 |
+
|
113 |
+
categories = {'xid_start': [], 'xid_continue': []}
|
114 |
+
|
115 |
+
with open(__file__) as fp:
|
116 |
+
content = fp.read()
|
117 |
+
|
118 |
+
header = content[:content.find('Cc =')]
|
119 |
+
footer = content[content.find("def combine("):]
|
120 |
+
|
121 |
+
for code in range(0x110000):
|
122 |
+
c = chr(code)
|
123 |
+
cat = unicodedata.category(c)
|
124 |
+
if ord(c) == 0xdc00:
|
125 |
+
# Hack to avoid combining this combining with the preceeding high
|
126 |
+
# surrogate, 0xdbff, when doing a repr.
|
127 |
+
c = '\\' + c
|
128 |
+
elif ord(c) in (0x2d, 0x5b, 0x5c, 0x5d, 0x5e):
|
129 |
+
# Escape regex metachars.
|
130 |
+
c = '\\' + c
|
131 |
+
categories.setdefault(cat, []).append(c)
|
132 |
+
# XID_START and XID_CONTINUE are special categories used for matching
|
133 |
+
# identifiers in Python 3.
|
134 |
+
if c.isidentifier():
|
135 |
+
categories['xid_start'].append(c)
|
136 |
+
if ('a' + c).isidentifier():
|
137 |
+
categories['xid_continue'].append(c)
|
138 |
+
|
139 |
+
with open(__file__, 'w') as fp:
|
140 |
+
fp.write(header)
|
141 |
+
|
142 |
+
for cat in sorted(categories):
|
143 |
+
val = ''.join(_handle_runs(categories[cat]))
|
144 |
+
fp.write('%s = %a\n\n' % (cat, val))
|
145 |
+
|
146 |
+
cats = sorted(categories)
|
147 |
+
cats.remove('xid_start')
|
148 |
+
cats.remove('xid_continue')
|
149 |
+
fp.write('cats = %r\n\n' % cats)
|
150 |
+
|
151 |
+
fp.write('# Generated from unidata %s\n\n' % (unicodedata.unidata_version,))
|
152 |
+
|
153 |
+
fp.write(footer)
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pygments/util.py
ADDED
@@ -0,0 +1,308 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
pygments.util
|
3 |
+
~~~~~~~~~~~~~
|
4 |
+
|
5 |
+
Utility functions.
|
6 |
+
|
7 |
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
|
8 |
+
:license: BSD, see LICENSE for details.
|
9 |
+
"""
|
10 |
+
|
11 |
+
import re
|
12 |
+
from io import TextIOWrapper
|
13 |
+
|
14 |
+
|
15 |
+
split_path_re = re.compile(r'[/\\ ]')
|
16 |
+
doctype_lookup_re = re.compile(r'''
|
17 |
+
<!DOCTYPE\s+(
|
18 |
+
[a-zA-Z_][a-zA-Z0-9]*
|
19 |
+
(?: \s+ # optional in HTML5
|
20 |
+
[a-zA-Z_][a-zA-Z0-9]*\s+
|
21 |
+
"[^"]*")?
|
22 |
+
)
|
23 |
+
[^>]*>
|
24 |
+
''', re.DOTALL | re.MULTILINE | re.VERBOSE)
|
25 |
+
tag_re = re.compile(r'<(.+?)(\s.*?)?>.*?</.+?>',
|
26 |
+
re.UNICODE | re.IGNORECASE | re.DOTALL | re.MULTILINE)
|
27 |
+
xml_decl_re = re.compile(r'\s*<\?xml[^>]*\?>', re.I)
|
28 |
+
|
29 |
+
|
30 |
+
class ClassNotFound(ValueError):
|
31 |
+
"""Raised if one of the lookup functions didn't find a matching class."""
|
32 |
+
|
33 |
+
|
34 |
+
class OptionError(Exception):
|
35 |
+
pass
|
36 |
+
|
37 |
+
|
38 |
+
def get_choice_opt(options, optname, allowed, default=None, normcase=False):
|
39 |
+
string = options.get(optname, default)
|
40 |
+
if normcase:
|
41 |
+
string = string.lower()
|
42 |
+
if string not in allowed:
|
43 |
+
raise OptionError('Value for option %s must be one of %s' %
|
44 |
+
(optname, ', '.join(map(str, allowed))))
|
45 |
+
return string
|
46 |
+
|
47 |
+
|
48 |
+
def get_bool_opt(options, optname, default=None):
|
49 |
+
string = options.get(optname, default)
|
50 |
+
if isinstance(string, bool):
|
51 |
+
return string
|
52 |
+
elif isinstance(string, int):
|
53 |
+
return bool(string)
|
54 |
+
elif not isinstance(string, str):
|
55 |
+
raise OptionError('Invalid type %r for option %s; use '
|
56 |
+
'1/0, yes/no, true/false, on/off' % (
|
57 |
+
string, optname))
|
58 |
+
elif string.lower() in ('1', 'yes', 'true', 'on'):
|
59 |
+
return True
|
60 |
+
elif string.lower() in ('0', 'no', 'false', 'off'):
|
61 |
+
return False
|
62 |
+
else:
|
63 |
+
raise OptionError('Invalid value %r for option %s; use '
|
64 |
+
'1/0, yes/no, true/false, on/off' % (
|
65 |
+
string, optname))
|
66 |
+
|
67 |
+
|
68 |
+
def get_int_opt(options, optname, default=None):
|
69 |
+
string = options.get(optname, default)
|
70 |
+
try:
|
71 |
+
return int(string)
|
72 |
+
except TypeError:
|
73 |
+
raise OptionError('Invalid type %r for option %s; you '
|
74 |
+
'must give an integer value' % (
|
75 |
+
string, optname))
|
76 |
+
except ValueError:
|
77 |
+
raise OptionError('Invalid value %r for option %s; you '
|
78 |
+
'must give an integer value' % (
|
79 |
+
string, optname))
|
80 |
+
|
81 |
+
|
82 |
+
def get_list_opt(options, optname, default=None):
|
83 |
+
val = options.get(optname, default)
|
84 |
+
if isinstance(val, str):
|
85 |
+
return val.split()
|
86 |
+
elif isinstance(val, (list, tuple)):
|
87 |
+
return list(val)
|
88 |
+
else:
|
89 |
+
raise OptionError('Invalid type %r for option %s; you '
|
90 |
+
'must give a list value' % (
|
91 |
+
val, optname))
|
92 |
+
|
93 |
+
|
94 |
+
def docstring_headline(obj):
|
95 |
+
if not obj.__doc__:
|
96 |
+
return ''
|
97 |
+
res = []
|
98 |
+
for line in obj.__doc__.strip().splitlines():
|
99 |
+
if line.strip():
|
100 |
+
res.append(" " + line.strip())
|
101 |
+
else:
|
102 |
+
break
|
103 |
+
return ''.join(res).lstrip()
|
104 |
+
|
105 |
+
|
106 |
+
def make_analysator(f):
|
107 |
+
"""Return a static text analyser function that returns float values."""
|
108 |
+
def text_analyse(text):
|
109 |
+
try:
|
110 |
+
rv = f(text)
|
111 |
+
except Exception:
|
112 |
+
return 0.0
|
113 |
+
if not rv:
|
114 |
+
return 0.0
|
115 |
+
try:
|
116 |
+
return min(1.0, max(0.0, float(rv)))
|
117 |
+
except (ValueError, TypeError):
|
118 |
+
return 0.0
|
119 |
+
text_analyse.__doc__ = f.__doc__
|
120 |
+
return staticmethod(text_analyse)
|
121 |
+
|
122 |
+
|
123 |
+
def shebang_matches(text, regex):
|
124 |
+
r"""Check if the given regular expression matches the last part of the
|
125 |
+
shebang if one exists.
|
126 |
+
|
127 |
+
>>> from pygments.util import shebang_matches
|
128 |
+
>>> shebang_matches('#!/usr/bin/env python', r'python(2\.\d)?')
|
129 |
+
True
|
130 |
+
>>> shebang_matches('#!/usr/bin/python2.4', r'python(2\.\d)?')
|
131 |
+
True
|
132 |
+
>>> shebang_matches('#!/usr/bin/python-ruby', r'python(2\.\d)?')
|
133 |
+
False
|
134 |
+
>>> shebang_matches('#!/usr/bin/python/ruby', r'python(2\.\d)?')
|
135 |
+
False
|
136 |
+
>>> shebang_matches('#!/usr/bin/startsomethingwith python',
|
137 |
+
... r'python(2\.\d)?')
|
138 |
+
True
|
139 |
+
|
140 |
+
It also checks for common windows executable file extensions::
|
141 |
+
|
142 |
+
>>> shebang_matches('#!C:\\Python2.4\\Python.exe', r'python(2\.\d)?')
|
143 |
+
True
|
144 |
+
|
145 |
+
Parameters (``'-f'`` or ``'--foo'`` are ignored so ``'perl'`` does
|
146 |
+
the same as ``'perl -e'``)
|
147 |
+
|
148 |
+
Note that this method automatically searches the whole string (eg:
|
149 |
+
the regular expression is wrapped in ``'^$'``)
|
150 |
+
"""
|
151 |
+
index = text.find('\n')
|
152 |
+
if index >= 0:
|
153 |
+
first_line = text[:index].lower()
|
154 |
+
else:
|
155 |
+
first_line = text.lower()
|
156 |
+
if first_line.startswith('#!'):
|
157 |
+
try:
|
158 |
+
found = [x for x in split_path_re.split(first_line[2:].strip())
|
159 |
+
if x and not x.startswith('-')][-1]
|
160 |
+
except IndexError:
|
161 |
+
return False
|
162 |
+
regex = re.compile(r'^%s(\.(exe|cmd|bat|bin))?$' % regex, re.IGNORECASE)
|
163 |
+
if regex.search(found) is not None:
|
164 |
+
return True
|
165 |
+
return False
|
166 |
+
|
167 |
+
|
168 |
+
def doctype_matches(text, regex):
|
169 |
+
"""Check if the doctype matches a regular expression (if present).
|
170 |
+
|
171 |
+
Note that this method only checks the first part of a DOCTYPE.
|
172 |
+
eg: 'html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"'
|
173 |
+
"""
|
174 |
+
m = doctype_lookup_re.search(text)
|
175 |
+
if m is None:
|
176 |
+
return False
|
177 |
+
doctype = m.group(1)
|
178 |
+
return re.compile(regex, re.I).match(doctype.strip()) is not None
|
179 |
+
|
180 |
+
|
181 |
+
def html_doctype_matches(text):
|
182 |
+
"""Check if the file looks like it has a html doctype."""
|
183 |
+
return doctype_matches(text, r'html')
|
184 |
+
|
185 |
+
|
186 |
+
_looks_like_xml_cache = {}
|
187 |
+
|
188 |
+
|
189 |
+
def looks_like_xml(text):
|
190 |
+
"""Check if a doctype exists or if we have some tags."""
|
191 |
+
if xml_decl_re.match(text):
|
192 |
+
return True
|
193 |
+
key = hash(text)
|
194 |
+
try:
|
195 |
+
return _looks_like_xml_cache[key]
|
196 |
+
except KeyError:
|
197 |
+
m = doctype_lookup_re.search(text)
|
198 |
+
if m is not None:
|
199 |
+
return True
|
200 |
+
rv = tag_re.search(text[:1000]) is not None
|
201 |
+
_looks_like_xml_cache[key] = rv
|
202 |
+
return rv
|
203 |
+
|
204 |
+
|
205 |
+
def surrogatepair(c):
|
206 |
+
"""Given a unicode character code with length greater than 16 bits,
|
207 |
+
return the two 16 bit surrogate pair.
|
208 |
+
"""
|
209 |
+
# From example D28 of:
|
210 |
+
# http://www.unicode.org/book/ch03.pdf
|
211 |
+
return (0xd7c0 + (c >> 10), (0xdc00 + (c & 0x3ff)))
|
212 |
+
|
213 |
+
|
214 |
+
def format_lines(var_name, seq, raw=False, indent_level=0):
|
215 |
+
"""Formats a sequence of strings for output."""
|
216 |
+
lines = []
|
217 |
+
base_indent = ' ' * indent_level * 4
|
218 |
+
inner_indent = ' ' * (indent_level + 1) * 4
|
219 |
+
lines.append(base_indent + var_name + ' = (')
|
220 |
+
if raw:
|
221 |
+
# These should be preformatted reprs of, say, tuples.
|
222 |
+
for i in seq:
|
223 |
+
lines.append(inner_indent + i + ',')
|
224 |
+
else:
|
225 |
+
for i in seq:
|
226 |
+
# Force use of single quotes
|
227 |
+
r = repr(i + '"')
|
228 |
+
lines.append(inner_indent + r[:-2] + r[-1] + ',')
|
229 |
+
lines.append(base_indent + ')')
|
230 |
+
return '\n'.join(lines)
|
231 |
+
|
232 |
+
|
233 |
+
def duplicates_removed(it, already_seen=()):
|
234 |
+
"""
|
235 |
+
Returns a list with duplicates removed from the iterable `it`.
|
236 |
+
|
237 |
+
Order is preserved.
|
238 |
+
"""
|
239 |
+
lst = []
|
240 |
+
seen = set()
|
241 |
+
for i in it:
|
242 |
+
if i in seen or i in already_seen:
|
243 |
+
continue
|
244 |
+
lst.append(i)
|
245 |
+
seen.add(i)
|
246 |
+
return lst
|
247 |
+
|
248 |
+
|
249 |
+
class Future:
|
250 |
+
"""Generic class to defer some work.
|
251 |
+
|
252 |
+
Handled specially in RegexLexerMeta, to support regex string construction at
|
253 |
+
first use.
|
254 |
+
"""
|
255 |
+
def get(self):
|
256 |
+
raise NotImplementedError
|
257 |
+
|
258 |
+
|
259 |
+
def guess_decode(text):
|
260 |
+
"""Decode *text* with guessed encoding.
|
261 |
+
|
262 |
+
First try UTF-8; this should fail for non-UTF-8 encodings.
|
263 |
+
Then try the preferred locale encoding.
|
264 |
+
Fall back to latin-1, which always works.
|
265 |
+
"""
|
266 |
+
try:
|
267 |
+
text = text.decode('utf-8')
|
268 |
+
return text, 'utf-8'
|
269 |
+
except UnicodeDecodeError:
|
270 |
+
try:
|
271 |
+
import locale
|
272 |
+
prefencoding = locale.getpreferredencoding()
|
273 |
+
text = text.decode()
|
274 |
+
return text, prefencoding
|
275 |
+
except (UnicodeDecodeError, LookupError):
|
276 |
+
text = text.decode('latin1')
|
277 |
+
return text, 'latin1'
|
278 |
+
|
279 |
+
|
280 |
+
def guess_decode_from_terminal(text, term):
|
281 |
+
"""Decode *text* coming from terminal *term*.
|
282 |
+
|
283 |
+
First try the terminal encoding, if given.
|
284 |
+
Then try UTF-8. Then try the preferred locale encoding.
|
285 |
+
Fall back to latin-1, which always works.
|
286 |
+
"""
|
287 |
+
if getattr(term, 'encoding', None):
|
288 |
+
try:
|
289 |
+
text = text.decode(term.encoding)
|
290 |
+
except UnicodeDecodeError:
|
291 |
+
pass
|
292 |
+
else:
|
293 |
+
return text, term.encoding
|
294 |
+
return guess_decode(text)
|
295 |
+
|
296 |
+
|
297 |
+
def terminal_encoding(term):
|
298 |
+
"""Return our best guess of encoding for the given *term*."""
|
299 |
+
if getattr(term, 'encoding', None):
|
300 |
+
return term.encoding
|
301 |
+
import locale
|
302 |
+
return locale.getpreferredencoding()
|
303 |
+
|
304 |
+
|
305 |
+
class UnclosingTextIOWrapper(TextIOWrapper):
|
306 |
+
# Don't close underlying buffer on destruction.
|
307 |
+
def close(self):
|
308 |
+
self.flush()
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pyparsing/__init__.py
ADDED
@@ -0,0 +1,328 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# module pyparsing.py
|
2 |
+
#
|
3 |
+
# Copyright (c) 2003-2021 Paul T. McGuire
|
4 |
+
#
|
5 |
+
# Permission is hereby granted, free of charge, to any person obtaining
|
6 |
+
# a copy of this software and associated documentation files (the
|
7 |
+
# "Software"), to deal in the Software without restriction, including
|
8 |
+
# without limitation the rights to use, copy, modify, merge, publish,
|
9 |
+
# distribute, sublicense, and/or sell copies of the Software, and to
|
10 |
+
# permit persons to whom the Software is furnished to do so, subject to
|
11 |
+
# the following conditions:
|
12 |
+
#
|
13 |
+
# The above copyright notice and this permission notice shall be
|
14 |
+
# included in all copies or substantial portions of the Software.
|
15 |
+
#
|
16 |
+
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
17 |
+
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
18 |
+
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
19 |
+
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
20 |
+
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
21 |
+
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
22 |
+
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
23 |
+
#
|
24 |
+
|
25 |
+
__doc__ = """
|
26 |
+
pyparsing module - Classes and methods to define and execute parsing grammars
|
27 |
+
=============================================================================
|
28 |
+
|
29 |
+
The pyparsing module is an alternative approach to creating and
|
30 |
+
executing simple grammars, vs. the traditional lex/yacc approach, or the
|
31 |
+
use of regular expressions. With pyparsing, you don't need to learn
|
32 |
+
a new syntax for defining grammars or matching expressions - the parsing
|
33 |
+
module provides a library of classes that you use to construct the
|
34 |
+
grammar directly in Python.
|
35 |
+
|
36 |
+
Here is a program to parse "Hello, World!" (or any greeting of the form
|
37 |
+
``"<salutation>, <addressee>!"``), built up using :class:`Word`,
|
38 |
+
:class:`Literal`, and :class:`And` elements
|
39 |
+
(the :meth:`'+'<ParserElement.__add__>` operators create :class:`And` expressions,
|
40 |
+
and the strings are auto-converted to :class:`Literal` expressions)::
|
41 |
+
|
42 |
+
from pip._vendor.pyparsing import Word, alphas
|
43 |
+
|
44 |
+
# define grammar of a greeting
|
45 |
+
greet = Word(alphas) + "," + Word(alphas) + "!"
|
46 |
+
|
47 |
+
hello = "Hello, World!"
|
48 |
+
print(hello, "->", greet.parse_string(hello))
|
49 |
+
|
50 |
+
The program outputs the following::
|
51 |
+
|
52 |
+
Hello, World! -> ['Hello', ',', 'World', '!']
|
53 |
+
|
54 |
+
The Python representation of the grammar is quite readable, owing to the
|
55 |
+
self-explanatory class names, and the use of :class:`'+'<And>`,
|
56 |
+
:class:`'|'<MatchFirst>`, :class:`'^'<Or>` and :class:`'&'<Each>` operators.
|
57 |
+
|
58 |
+
The :class:`ParseResults` object returned from
|
59 |
+
:class:`ParserElement.parseString` can be
|
60 |
+
accessed as a nested list, a dictionary, or an object with named
|
61 |
+
attributes.
|
62 |
+
|
63 |
+
The pyparsing module handles some of the problems that are typically
|
64 |
+
vexing when writing text parsers:
|
65 |
+
|
66 |
+
- extra or missing whitespace (the above program will also handle
|
67 |
+
"Hello,World!", "Hello , World !", etc.)
|
68 |
+
- quoted strings
|
69 |
+
- embedded comments
|
70 |
+
|
71 |
+
|
72 |
+
Getting Started -
|
73 |
+
-----------------
|
74 |
+
Visit the classes :class:`ParserElement` and :class:`ParseResults` to
|
75 |
+
see the base classes that most other pyparsing
|
76 |
+
classes inherit from. Use the docstrings for examples of how to:
|
77 |
+
|
78 |
+
- construct literal match expressions from :class:`Literal` and
|
79 |
+
:class:`CaselessLiteral` classes
|
80 |
+
- construct character word-group expressions using the :class:`Word`
|
81 |
+
class
|
82 |
+
- see how to create repetitive expressions using :class:`ZeroOrMore`
|
83 |
+
and :class:`OneOrMore` classes
|
84 |
+
- use :class:`'+'<And>`, :class:`'|'<MatchFirst>`, :class:`'^'<Or>`,
|
85 |
+
and :class:`'&'<Each>` operators to combine simple expressions into
|
86 |
+
more complex ones
|
87 |
+
- associate names with your parsed results using
|
88 |
+
:class:`ParserElement.setResultsName`
|
89 |
+
- access the parsed data, which is returned as a :class:`ParseResults`
|
90 |
+
object
|
91 |
+
- find some helpful expression short-cuts like :class:`delimitedList`
|
92 |
+
and :class:`oneOf`
|
93 |
+
- find more useful common expressions in the :class:`pyparsing_common`
|
94 |
+
namespace class
|
95 |
+
"""
|
96 |
+
from typing import NamedTuple
|
97 |
+
|
98 |
+
|
99 |
+
class version_info(NamedTuple):
|
100 |
+
major: int
|
101 |
+
minor: int
|
102 |
+
micro: int
|
103 |
+
releaselevel: str
|
104 |
+
serial: int
|
105 |
+
|
106 |
+
@property
|
107 |
+
def __version__(self):
|
108 |
+
return "{}.{}.{}".format(self.major, self.minor, self.micro) + (
|
109 |
+
"{}{}{}".format(
|
110 |
+
"r" if self.releaselevel[0] == "c" else "",
|
111 |
+
self.releaselevel[0],
|
112 |
+
self.serial,
|
113 |
+
),
|
114 |
+
"",
|
115 |
+
)[self.releaselevel == "final"]
|
116 |
+
|
117 |
+
def __str__(self):
|
118 |
+
return "{} {} / {}".format(__name__, self.__version__, __version_time__)
|
119 |
+
|
120 |
+
def __repr__(self):
|
121 |
+
return "{}.{}({})".format(
|
122 |
+
__name__,
|
123 |
+
type(self).__name__,
|
124 |
+
", ".join("{}={!r}".format(*nv) for nv in zip(self._fields, self)),
|
125 |
+
)
|
126 |
+
|
127 |
+
|
128 |
+
__version_info__ = version_info(3, 0, 7, "final", 0)
|
129 |
+
__version_time__ = "15 Jan 2022 04:10 UTC"
|
130 |
+
__version__ = __version_info__.__version__
|
131 |
+
__versionTime__ = __version_time__
|
132 |
+
__author__ = "Paul McGuire <[email protected]>"
|
133 |
+
|
134 |
+
from .util import *
|
135 |
+
from .exceptions import *
|
136 |
+
from .actions import *
|
137 |
+
from .core import __diag__, __compat__
|
138 |
+
from .results import *
|
139 |
+
from .core import *
|
140 |
+
from .core import _builtin_exprs as core_builtin_exprs
|
141 |
+
from .helpers import *
|
142 |
+
from .helpers import _builtin_exprs as helper_builtin_exprs
|
143 |
+
|
144 |
+
from .unicode import unicode_set, UnicodeRangeList, pyparsing_unicode as unicode
|
145 |
+
from .testing import pyparsing_test as testing
|
146 |
+
from .common import (
|
147 |
+
pyparsing_common as common,
|
148 |
+
_builtin_exprs as common_builtin_exprs,
|
149 |
+
)
|
150 |
+
|
151 |
+
# define backward compat synonyms
|
152 |
+
if "pyparsing_unicode" not in globals():
|
153 |
+
pyparsing_unicode = unicode
|
154 |
+
if "pyparsing_common" not in globals():
|
155 |
+
pyparsing_common = common
|
156 |
+
if "pyparsing_test" not in globals():
|
157 |
+
pyparsing_test = testing
|
158 |
+
|
159 |
+
core_builtin_exprs += common_builtin_exprs + helper_builtin_exprs
|
160 |
+
|
161 |
+
|
162 |
+
__all__ = [
|
163 |
+
"__version__",
|
164 |
+
"__version_time__",
|
165 |
+
"__author__",
|
166 |
+
"__compat__",
|
167 |
+
"__diag__",
|
168 |
+
"And",
|
169 |
+
"AtLineStart",
|
170 |
+
"AtStringStart",
|
171 |
+
"CaselessKeyword",
|
172 |
+
"CaselessLiteral",
|
173 |
+
"CharsNotIn",
|
174 |
+
"Combine",
|
175 |
+
"Dict",
|
176 |
+
"Each",
|
177 |
+
"Empty",
|
178 |
+
"FollowedBy",
|
179 |
+
"Forward",
|
180 |
+
"GoToColumn",
|
181 |
+
"Group",
|
182 |
+
"IndentedBlock",
|
183 |
+
"Keyword",
|
184 |
+
"LineEnd",
|
185 |
+
"LineStart",
|
186 |
+
"Literal",
|
187 |
+
"Located",
|
188 |
+
"PrecededBy",
|
189 |
+
"MatchFirst",
|
190 |
+
"NoMatch",
|
191 |
+
"NotAny",
|
192 |
+
"OneOrMore",
|
193 |
+
"OnlyOnce",
|
194 |
+
"OpAssoc",
|
195 |
+
"Opt",
|
196 |
+
"Optional",
|
197 |
+
"Or",
|
198 |
+
"ParseBaseException",
|
199 |
+
"ParseElementEnhance",
|
200 |
+
"ParseException",
|
201 |
+
"ParseExpression",
|
202 |
+
"ParseFatalException",
|
203 |
+
"ParseResults",
|
204 |
+
"ParseSyntaxException",
|
205 |
+
"ParserElement",
|
206 |
+
"PositionToken",
|
207 |
+
"QuotedString",
|
208 |
+
"RecursiveGrammarException",
|
209 |
+
"Regex",
|
210 |
+
"SkipTo",
|
211 |
+
"StringEnd",
|
212 |
+
"StringStart",
|
213 |
+
"Suppress",
|
214 |
+
"Token",
|
215 |
+
"TokenConverter",
|
216 |
+
"White",
|
217 |
+
"Word",
|
218 |
+
"WordEnd",
|
219 |
+
"WordStart",
|
220 |
+
"ZeroOrMore",
|
221 |
+
"Char",
|
222 |
+
"alphanums",
|
223 |
+
"alphas",
|
224 |
+
"alphas8bit",
|
225 |
+
"any_close_tag",
|
226 |
+
"any_open_tag",
|
227 |
+
"c_style_comment",
|
228 |
+
"col",
|
229 |
+
"common_html_entity",
|
230 |
+
"counted_array",
|
231 |
+
"cpp_style_comment",
|
232 |
+
"dbl_quoted_string",
|
233 |
+
"dbl_slash_comment",
|
234 |
+
"delimited_list",
|
235 |
+
"dict_of",
|
236 |
+
"empty",
|
237 |
+
"hexnums",
|
238 |
+
"html_comment",
|
239 |
+
"identchars",
|
240 |
+
"identbodychars",
|
241 |
+
"java_style_comment",
|
242 |
+
"line",
|
243 |
+
"line_end",
|
244 |
+
"line_start",
|
245 |
+
"lineno",
|
246 |
+
"make_html_tags",
|
247 |
+
"make_xml_tags",
|
248 |
+
"match_only_at_col",
|
249 |
+
"match_previous_expr",
|
250 |
+
"match_previous_literal",
|
251 |
+
"nested_expr",
|
252 |
+
"null_debug_action",
|
253 |
+
"nums",
|
254 |
+
"one_of",
|
255 |
+
"printables",
|
256 |
+
"punc8bit",
|
257 |
+
"python_style_comment",
|
258 |
+
"quoted_string",
|
259 |
+
"remove_quotes",
|
260 |
+
"replace_with",
|
261 |
+
"replace_html_entity",
|
262 |
+
"rest_of_line",
|
263 |
+
"sgl_quoted_string",
|
264 |
+
"srange",
|
265 |
+
"string_end",
|
266 |
+
"string_start",
|
267 |
+
"trace_parse_action",
|
268 |
+
"unicode_string",
|
269 |
+
"with_attribute",
|
270 |
+
"indentedBlock",
|
271 |
+
"original_text_for",
|
272 |
+
"ungroup",
|
273 |
+
"infix_notation",
|
274 |
+
"locatedExpr",
|
275 |
+
"with_class",
|
276 |
+
"CloseMatch",
|
277 |
+
"token_map",
|
278 |
+
"pyparsing_common",
|
279 |
+
"pyparsing_unicode",
|
280 |
+
"unicode_set",
|
281 |
+
"condition_as_parse_action",
|
282 |
+
"pyparsing_test",
|
283 |
+
# pre-PEP8 compatibility names
|
284 |
+
"__versionTime__",
|
285 |
+
"anyCloseTag",
|
286 |
+
"anyOpenTag",
|
287 |
+
"cStyleComment",
|
288 |
+
"commonHTMLEntity",
|
289 |
+
"countedArray",
|
290 |
+
"cppStyleComment",
|
291 |
+
"dblQuotedString",
|
292 |
+
"dblSlashComment",
|
293 |
+
"delimitedList",
|
294 |
+
"dictOf",
|
295 |
+
"htmlComment",
|
296 |
+
"javaStyleComment",
|
297 |
+
"lineEnd",
|
298 |
+
"lineStart",
|
299 |
+
"makeHTMLTags",
|
300 |
+
"makeXMLTags",
|
301 |
+
"matchOnlyAtCol",
|
302 |
+
"matchPreviousExpr",
|
303 |
+
"matchPreviousLiteral",
|
304 |
+
"nestedExpr",
|
305 |
+
"nullDebugAction",
|
306 |
+
"oneOf",
|
307 |
+
"opAssoc",
|
308 |
+
"pythonStyleComment",
|
309 |
+
"quotedString",
|
310 |
+
"removeQuotes",
|
311 |
+
"replaceHTMLEntity",
|
312 |
+
"replaceWith",
|
313 |
+
"restOfLine",
|
314 |
+
"sglQuotedString",
|
315 |
+
"stringEnd",
|
316 |
+
"stringStart",
|
317 |
+
"traceParseAction",
|
318 |
+
"unicodeString",
|
319 |
+
"withAttribute",
|
320 |
+
"indentedBlock",
|
321 |
+
"originalTextFor",
|
322 |
+
"infixNotation",
|
323 |
+
"locatedExpr",
|
324 |
+
"withClass",
|
325 |
+
"tokenMap",
|
326 |
+
"conditionAsParseAction",
|
327 |
+
"autoname_elements",
|
328 |
+
]
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pyparsing/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (7.13 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pyparsing/__pycache__/actions.cpython-310.pyc
ADDED
Binary file (7.19 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pyparsing/__pycache__/common.cpython-310.pyc
ADDED
Binary file (10.1 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pyparsing/__pycache__/core.cpython-310.pyc
ADDED
Binary file (175 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pyparsing/__pycache__/exceptions.cpython-310.pyc
ADDED
Binary file (9.08 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pyparsing/__pycache__/helpers.cpython-310.pyc
ADDED
Binary file (34.8 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pyparsing/__pycache__/results.cpython-310.pyc
ADDED
Binary file (24.8 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pyparsing/__pycache__/testing.cpython-310.pyc
ADDED
Binary file (12.1 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pyparsing/__pycache__/unicode.cpython-310.pyc
ADDED
Binary file (9.82 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pyparsing/__pycache__/util.cpython-310.pyc
ADDED
Binary file (8.61 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pyparsing/actions.py
ADDED
@@ -0,0 +1,207 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# actions.py
|
2 |
+
|
3 |
+
from .exceptions import ParseException
|
4 |
+
from .util import col
|
5 |
+
|
6 |
+
|
7 |
+
class OnlyOnce:
|
8 |
+
"""
|
9 |
+
Wrapper for parse actions, to ensure they are only called once.
|
10 |
+
"""
|
11 |
+
|
12 |
+
def __init__(self, method_call):
|
13 |
+
from .core import _trim_arity
|
14 |
+
|
15 |
+
self.callable = _trim_arity(method_call)
|
16 |
+
self.called = False
|
17 |
+
|
18 |
+
def __call__(self, s, l, t):
|
19 |
+
if not self.called:
|
20 |
+
results = self.callable(s, l, t)
|
21 |
+
self.called = True
|
22 |
+
return results
|
23 |
+
raise ParseException(s, l, "OnlyOnce obj called multiple times w/out reset")
|
24 |
+
|
25 |
+
def reset(self):
|
26 |
+
"""
|
27 |
+
Allow the associated parse action to be called once more.
|
28 |
+
"""
|
29 |
+
|
30 |
+
self.called = False
|
31 |
+
|
32 |
+
|
33 |
+
def match_only_at_col(n):
|
34 |
+
"""
|
35 |
+
Helper method for defining parse actions that require matching at
|
36 |
+
a specific column in the input text.
|
37 |
+
"""
|
38 |
+
|
39 |
+
def verify_col(strg, locn, toks):
|
40 |
+
if col(locn, strg) != n:
|
41 |
+
raise ParseException(strg, locn, "matched token not at column {}".format(n))
|
42 |
+
|
43 |
+
return verify_col
|
44 |
+
|
45 |
+
|
46 |
+
def replace_with(repl_str):
|
47 |
+
"""
|
48 |
+
Helper method for common parse actions that simply return
|
49 |
+
a literal value. Especially useful when used with
|
50 |
+
:class:`transform_string<ParserElement.transform_string>` ().
|
51 |
+
|
52 |
+
Example::
|
53 |
+
|
54 |
+
num = Word(nums).set_parse_action(lambda toks: int(toks[0]))
|
55 |
+
na = one_of("N/A NA").set_parse_action(replace_with(math.nan))
|
56 |
+
term = na | num
|
57 |
+
|
58 |
+
OneOrMore(term).parse_string("324 234 N/A 234") # -> [324, 234, nan, 234]
|
59 |
+
"""
|
60 |
+
return lambda s, l, t: [repl_str]
|
61 |
+
|
62 |
+
|
63 |
+
def remove_quotes(s, l, t):
|
64 |
+
"""
|
65 |
+
Helper parse action for removing quotation marks from parsed
|
66 |
+
quoted strings.
|
67 |
+
|
68 |
+
Example::
|
69 |
+
|
70 |
+
# by default, quotation marks are included in parsed results
|
71 |
+
quoted_string.parse_string("'Now is the Winter of our Discontent'") # -> ["'Now is the Winter of our Discontent'"]
|
72 |
+
|
73 |
+
# use remove_quotes to strip quotation marks from parsed results
|
74 |
+
quoted_string.set_parse_action(remove_quotes)
|
75 |
+
quoted_string.parse_string("'Now is the Winter of our Discontent'") # -> ["Now is the Winter of our Discontent"]
|
76 |
+
"""
|
77 |
+
return t[0][1:-1]
|
78 |
+
|
79 |
+
|
80 |
+
def with_attribute(*args, **attr_dict):
|
81 |
+
"""
|
82 |
+
Helper to create a validating parse action to be used with start
|
83 |
+
tags created with :class:`make_xml_tags` or
|
84 |
+
:class:`make_html_tags`. Use ``with_attribute`` to qualify
|
85 |
+
a starting tag with a required attribute value, to avoid false
|
86 |
+
matches on common tags such as ``<TD>`` or ``<DIV>``.
|
87 |
+
|
88 |
+
Call ``with_attribute`` with a series of attribute names and
|
89 |
+
values. Specify the list of filter attributes names and values as:
|
90 |
+
|
91 |
+
- keyword arguments, as in ``(align="right")``, or
|
92 |
+
- as an explicit dict with ``**`` operator, when an attribute
|
93 |
+
name is also a Python reserved word, as in ``**{"class":"Customer", "align":"right"}``
|
94 |
+
- a list of name-value tuples, as in ``(("ns1:class", "Customer"), ("ns2:align", "right"))``
|
95 |
+
|
96 |
+
For attribute names with a namespace prefix, you must use the second
|
97 |
+
form. Attribute names are matched insensitive to upper/lower case.
|
98 |
+
|
99 |
+
If just testing for ``class`` (with or without a namespace), use
|
100 |
+
:class:`with_class`.
|
101 |
+
|
102 |
+
To verify that the attribute exists, but without specifying a value,
|
103 |
+
pass ``with_attribute.ANY_VALUE`` as the value.
|
104 |
+
|
105 |
+
Example::
|
106 |
+
|
107 |
+
html = '''
|
108 |
+
<div>
|
109 |
+
Some text
|
110 |
+
<div type="grid">1 4 0 1 0</div>
|
111 |
+
<div type="graph">1,3 2,3 1,1</div>
|
112 |
+
<div>this has no type</div>
|
113 |
+
</div>
|
114 |
+
|
115 |
+
'''
|
116 |
+
div,div_end = make_html_tags("div")
|
117 |
+
|
118 |
+
# only match div tag having a type attribute with value "grid"
|
119 |
+
div_grid = div().set_parse_action(with_attribute(type="grid"))
|
120 |
+
grid_expr = div_grid + SkipTo(div | div_end)("body")
|
121 |
+
for grid_header in grid_expr.search_string(html):
|
122 |
+
print(grid_header.body)
|
123 |
+
|
124 |
+
# construct a match with any div tag having a type attribute, regardless of the value
|
125 |
+
div_any_type = div().set_parse_action(with_attribute(type=with_attribute.ANY_VALUE))
|
126 |
+
div_expr = div_any_type + SkipTo(div | div_end)("body")
|
127 |
+
for div_header in div_expr.search_string(html):
|
128 |
+
print(div_header.body)
|
129 |
+
|
130 |
+
prints::
|
131 |
+
|
132 |
+
1 4 0 1 0
|
133 |
+
|
134 |
+
1 4 0 1 0
|
135 |
+
1,3 2,3 1,1
|
136 |
+
"""
|
137 |
+
if args:
|
138 |
+
attrs = args[:]
|
139 |
+
else:
|
140 |
+
attrs = attr_dict.items()
|
141 |
+
attrs = [(k, v) for k, v in attrs]
|
142 |
+
|
143 |
+
def pa(s, l, tokens):
|
144 |
+
for attrName, attrValue in attrs:
|
145 |
+
if attrName not in tokens:
|
146 |
+
raise ParseException(s, l, "no matching attribute " + attrName)
|
147 |
+
if attrValue != with_attribute.ANY_VALUE and tokens[attrName] != attrValue:
|
148 |
+
raise ParseException(
|
149 |
+
s,
|
150 |
+
l,
|
151 |
+
"attribute {!r} has value {!r}, must be {!r}".format(
|
152 |
+
attrName, tokens[attrName], attrValue
|
153 |
+
),
|
154 |
+
)
|
155 |
+
|
156 |
+
return pa
|
157 |
+
|
158 |
+
|
159 |
+
with_attribute.ANY_VALUE = object()
|
160 |
+
|
161 |
+
|
162 |
+
def with_class(classname, namespace=""):
|
163 |
+
"""
|
164 |
+
Simplified version of :class:`with_attribute` when
|
165 |
+
matching on a div class - made difficult because ``class`` is
|
166 |
+
a reserved word in Python.
|
167 |
+
|
168 |
+
Example::
|
169 |
+
|
170 |
+
html = '''
|
171 |
+
<div>
|
172 |
+
Some text
|
173 |
+
<div class="grid">1 4 0 1 0</div>
|
174 |
+
<div class="graph">1,3 2,3 1,1</div>
|
175 |
+
<div>this <div> has no class</div>
|
176 |
+
</div>
|
177 |
+
|
178 |
+
'''
|
179 |
+
div,div_end = make_html_tags("div")
|
180 |
+
div_grid = div().set_parse_action(with_class("grid"))
|
181 |
+
|
182 |
+
grid_expr = div_grid + SkipTo(div | div_end)("body")
|
183 |
+
for grid_header in grid_expr.search_string(html):
|
184 |
+
print(grid_header.body)
|
185 |
+
|
186 |
+
div_any_type = div().set_parse_action(with_class(withAttribute.ANY_VALUE))
|
187 |
+
div_expr = div_any_type + SkipTo(div | div_end)("body")
|
188 |
+
for div_header in div_expr.search_string(html):
|
189 |
+
print(div_header.body)
|
190 |
+
|
191 |
+
prints::
|
192 |
+
|
193 |
+
1 4 0 1 0
|
194 |
+
|
195 |
+
1 4 0 1 0
|
196 |
+
1,3 2,3 1,1
|
197 |
+
"""
|
198 |
+
classattr = "{}:class".format(namespace) if namespace else "class"
|
199 |
+
return with_attribute(**{classattr: classname})
|
200 |
+
|
201 |
+
|
202 |
+
# pre-PEP8 compatibility symbols
|
203 |
+
replaceWith = replace_with
|
204 |
+
removeQuotes = remove_quotes
|
205 |
+
withAttribute = with_attribute
|
206 |
+
withClass = with_class
|
207 |
+
matchOnlyAtCol = match_only_at_col
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pyparsing/common.py
ADDED
@@ -0,0 +1,424 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# common.py
|
2 |
+
from .core import *
|
3 |
+
from .helpers import delimited_list, any_open_tag, any_close_tag
|
4 |
+
from datetime import datetime
|
5 |
+
|
6 |
+
|
7 |
+
# some other useful expressions - using lower-case class name since we are really using this as a namespace
|
8 |
+
class pyparsing_common:
|
9 |
+
"""Here are some common low-level expressions that may be useful in
|
10 |
+
jump-starting parser development:
|
11 |
+
|
12 |
+
- numeric forms (:class:`integers<integer>`, :class:`reals<real>`,
|
13 |
+
:class:`scientific notation<sci_real>`)
|
14 |
+
- common :class:`programming identifiers<identifier>`
|
15 |
+
- network addresses (:class:`MAC<mac_address>`,
|
16 |
+
:class:`IPv4<ipv4_address>`, :class:`IPv6<ipv6_address>`)
|
17 |
+
- ISO8601 :class:`dates<iso8601_date>` and
|
18 |
+
:class:`datetime<iso8601_datetime>`
|
19 |
+
- :class:`UUID<uuid>`
|
20 |
+
- :class:`comma-separated list<comma_separated_list>`
|
21 |
+
- :class:`url`
|
22 |
+
|
23 |
+
Parse actions:
|
24 |
+
|
25 |
+
- :class:`convertToInteger`
|
26 |
+
- :class:`convertToFloat`
|
27 |
+
- :class:`convertToDate`
|
28 |
+
- :class:`convertToDatetime`
|
29 |
+
- :class:`stripHTMLTags`
|
30 |
+
- :class:`upcaseTokens`
|
31 |
+
- :class:`downcaseTokens`
|
32 |
+
|
33 |
+
Example::
|
34 |
+
|
35 |
+
pyparsing_common.number.runTests('''
|
36 |
+
# any int or real number, returned as the appropriate type
|
37 |
+
100
|
38 |
+
-100
|
39 |
+
+100
|
40 |
+
3.14159
|
41 |
+
6.02e23
|
42 |
+
1e-12
|
43 |
+
''')
|
44 |
+
|
45 |
+
pyparsing_common.fnumber.runTests('''
|
46 |
+
# any int or real number, returned as float
|
47 |
+
100
|
48 |
+
-100
|
49 |
+
+100
|
50 |
+
3.14159
|
51 |
+
6.02e23
|
52 |
+
1e-12
|
53 |
+
''')
|
54 |
+
|
55 |
+
pyparsing_common.hex_integer.runTests('''
|
56 |
+
# hex numbers
|
57 |
+
100
|
58 |
+
FF
|
59 |
+
''')
|
60 |
+
|
61 |
+
pyparsing_common.fraction.runTests('''
|
62 |
+
# fractions
|
63 |
+
1/2
|
64 |
+
-3/4
|
65 |
+
''')
|
66 |
+
|
67 |
+
pyparsing_common.mixed_integer.runTests('''
|
68 |
+
# mixed fractions
|
69 |
+
1
|
70 |
+
1/2
|
71 |
+
-3/4
|
72 |
+
1-3/4
|
73 |
+
''')
|
74 |
+
|
75 |
+
import uuid
|
76 |
+
pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID))
|
77 |
+
pyparsing_common.uuid.runTests('''
|
78 |
+
# uuid
|
79 |
+
12345678-1234-5678-1234-567812345678
|
80 |
+
''')
|
81 |
+
|
82 |
+
prints::
|
83 |
+
|
84 |
+
# any int or real number, returned as the appropriate type
|
85 |
+
100
|
86 |
+
[100]
|
87 |
+
|
88 |
+
-100
|
89 |
+
[-100]
|
90 |
+
|
91 |
+
+100
|
92 |
+
[100]
|
93 |
+
|
94 |
+
3.14159
|
95 |
+
[3.14159]
|
96 |
+
|
97 |
+
6.02e23
|
98 |
+
[6.02e+23]
|
99 |
+
|
100 |
+
1e-12
|
101 |
+
[1e-12]
|
102 |
+
|
103 |
+
# any int or real number, returned as float
|
104 |
+
100
|
105 |
+
[100.0]
|
106 |
+
|
107 |
+
-100
|
108 |
+
[-100.0]
|
109 |
+
|
110 |
+
+100
|
111 |
+
[100.0]
|
112 |
+
|
113 |
+
3.14159
|
114 |
+
[3.14159]
|
115 |
+
|
116 |
+
6.02e23
|
117 |
+
[6.02e+23]
|
118 |
+
|
119 |
+
1e-12
|
120 |
+
[1e-12]
|
121 |
+
|
122 |
+
# hex numbers
|
123 |
+
100
|
124 |
+
[256]
|
125 |
+
|
126 |
+
FF
|
127 |
+
[255]
|
128 |
+
|
129 |
+
# fractions
|
130 |
+
1/2
|
131 |
+
[0.5]
|
132 |
+
|
133 |
+
-3/4
|
134 |
+
[-0.75]
|
135 |
+
|
136 |
+
# mixed fractions
|
137 |
+
1
|
138 |
+
[1]
|
139 |
+
|
140 |
+
1/2
|
141 |
+
[0.5]
|
142 |
+
|
143 |
+
-3/4
|
144 |
+
[-0.75]
|
145 |
+
|
146 |
+
1-3/4
|
147 |
+
[1.75]
|
148 |
+
|
149 |
+
# uuid
|
150 |
+
12345678-1234-5678-1234-567812345678
|
151 |
+
[UUID('12345678-1234-5678-1234-567812345678')]
|
152 |
+
"""
|
153 |
+
|
154 |
+
convert_to_integer = token_map(int)
|
155 |
+
"""
|
156 |
+
Parse action for converting parsed integers to Python int
|
157 |
+
"""
|
158 |
+
|
159 |
+
convert_to_float = token_map(float)
|
160 |
+
"""
|
161 |
+
Parse action for converting parsed numbers to Python float
|
162 |
+
"""
|
163 |
+
|
164 |
+
integer = Word(nums).set_name("integer").set_parse_action(convert_to_integer)
|
165 |
+
"""expression that parses an unsigned integer, returns an int"""
|
166 |
+
|
167 |
+
hex_integer = (
|
168 |
+
Word(hexnums).set_name("hex integer").set_parse_action(token_map(int, 16))
|
169 |
+
)
|
170 |
+
"""expression that parses a hexadecimal integer, returns an int"""
|
171 |
+
|
172 |
+
signed_integer = (
|
173 |
+
Regex(r"[+-]?\d+")
|
174 |
+
.set_name("signed integer")
|
175 |
+
.set_parse_action(convert_to_integer)
|
176 |
+
)
|
177 |
+
"""expression that parses an integer with optional leading sign, returns an int"""
|
178 |
+
|
179 |
+
fraction = (
|
180 |
+
signed_integer().set_parse_action(convert_to_float)
|
181 |
+
+ "/"
|
182 |
+
+ signed_integer().set_parse_action(convert_to_float)
|
183 |
+
).set_name("fraction")
|
184 |
+
"""fractional expression of an integer divided by an integer, returns a float"""
|
185 |
+
fraction.add_parse_action(lambda tt: tt[0] / tt[-1])
|
186 |
+
|
187 |
+
mixed_integer = (
|
188 |
+
fraction | signed_integer + Opt(Opt("-").suppress() + fraction)
|
189 |
+
).set_name("fraction or mixed integer-fraction")
|
190 |
+
"""mixed integer of the form 'integer - fraction', with optional leading integer, returns float"""
|
191 |
+
mixed_integer.add_parse_action(sum)
|
192 |
+
|
193 |
+
real = (
|
194 |
+
Regex(r"[+-]?(?:\d+\.\d*|\.\d+)")
|
195 |
+
.set_name("real number")
|
196 |
+
.set_parse_action(convert_to_float)
|
197 |
+
)
|
198 |
+
"""expression that parses a floating point number and returns a float"""
|
199 |
+
|
200 |
+
sci_real = (
|
201 |
+
Regex(r"[+-]?(?:\d+(?:[eE][+-]?\d+)|(?:\d+\.\d*|\.\d+)(?:[eE][+-]?\d+)?)")
|
202 |
+
.set_name("real number with scientific notation")
|
203 |
+
.set_parse_action(convert_to_float)
|
204 |
+
)
|
205 |
+
"""expression that parses a floating point number with optional
|
206 |
+
scientific notation and returns a float"""
|
207 |
+
|
208 |
+
# streamlining this expression makes the docs nicer-looking
|
209 |
+
number = (sci_real | real | signed_integer).setName("number").streamline()
|
210 |
+
"""any numeric expression, returns the corresponding Python type"""
|
211 |
+
|
212 |
+
fnumber = (
|
213 |
+
Regex(r"[+-]?\d+\.?\d*([eE][+-]?\d+)?")
|
214 |
+
.set_name("fnumber")
|
215 |
+
.set_parse_action(convert_to_float)
|
216 |
+
)
|
217 |
+
"""any int or real number, returned as float"""
|
218 |
+
|
219 |
+
identifier = Word(identchars, identbodychars).set_name("identifier")
|
220 |
+
"""typical code identifier (leading alpha or '_', followed by 0 or more alphas, nums, or '_')"""
|
221 |
+
|
222 |
+
ipv4_address = Regex(
|
223 |
+
r"(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})(\.(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})){3}"
|
224 |
+
).set_name("IPv4 address")
|
225 |
+
"IPv4 address (``0.0.0.0 - 255.255.255.255``)"
|
226 |
+
|
227 |
+
_ipv6_part = Regex(r"[0-9a-fA-F]{1,4}").set_name("hex_integer")
|
228 |
+
_full_ipv6_address = (_ipv6_part + (":" + _ipv6_part) * 7).set_name(
|
229 |
+
"full IPv6 address"
|
230 |
+
)
|
231 |
+
_short_ipv6_address = (
|
232 |
+
Opt(_ipv6_part + (":" + _ipv6_part) * (0, 6))
|
233 |
+
+ "::"
|
234 |
+
+ Opt(_ipv6_part + (":" + _ipv6_part) * (0, 6))
|
235 |
+
).set_name("short IPv6 address")
|
236 |
+
_short_ipv6_address.add_condition(
|
237 |
+
lambda t: sum(1 for tt in t if pyparsing_common._ipv6_part.matches(tt)) < 8
|
238 |
+
)
|
239 |
+
_mixed_ipv6_address = ("::ffff:" + ipv4_address).set_name("mixed IPv6 address")
|
240 |
+
ipv6_address = Combine(
|
241 |
+
(_full_ipv6_address | _mixed_ipv6_address | _short_ipv6_address).set_name(
|
242 |
+
"IPv6 address"
|
243 |
+
)
|
244 |
+
).set_name("IPv6 address")
|
245 |
+
"IPv6 address (long, short, or mixed form)"
|
246 |
+
|
247 |
+
mac_address = Regex(
|
248 |
+
r"[0-9a-fA-F]{2}([:.-])[0-9a-fA-F]{2}(?:\1[0-9a-fA-F]{2}){4}"
|
249 |
+
).set_name("MAC address")
|
250 |
+
"MAC address xx:xx:xx:xx:xx (may also have '-' or '.' delimiters)"
|
251 |
+
|
252 |
+
@staticmethod
|
253 |
+
def convert_to_date(fmt: str = "%Y-%m-%d"):
|
254 |
+
"""
|
255 |
+
Helper to create a parse action for converting parsed date string to Python datetime.date
|
256 |
+
|
257 |
+
Params -
|
258 |
+
- fmt - format to be passed to datetime.strptime (default= ``"%Y-%m-%d"``)
|
259 |
+
|
260 |
+
Example::
|
261 |
+
|
262 |
+
date_expr = pyparsing_common.iso8601_date.copy()
|
263 |
+
date_expr.setParseAction(pyparsing_common.convertToDate())
|
264 |
+
print(date_expr.parseString("1999-12-31"))
|
265 |
+
|
266 |
+
prints::
|
267 |
+
|
268 |
+
[datetime.date(1999, 12, 31)]
|
269 |
+
"""
|
270 |
+
|
271 |
+
def cvt_fn(ss, ll, tt):
|
272 |
+
try:
|
273 |
+
return datetime.strptime(tt[0], fmt).date()
|
274 |
+
except ValueError as ve:
|
275 |
+
raise ParseException(ss, ll, str(ve))
|
276 |
+
|
277 |
+
return cvt_fn
|
278 |
+
|
279 |
+
@staticmethod
|
280 |
+
def convert_to_datetime(fmt: str = "%Y-%m-%dT%H:%M:%S.%f"):
|
281 |
+
"""Helper to create a parse action for converting parsed
|
282 |
+
datetime string to Python datetime.datetime
|
283 |
+
|
284 |
+
Params -
|
285 |
+
- fmt - format to be passed to datetime.strptime (default= ``"%Y-%m-%dT%H:%M:%S.%f"``)
|
286 |
+
|
287 |
+
Example::
|
288 |
+
|
289 |
+
dt_expr = pyparsing_common.iso8601_datetime.copy()
|
290 |
+
dt_expr.setParseAction(pyparsing_common.convertToDatetime())
|
291 |
+
print(dt_expr.parseString("1999-12-31T23:59:59.999"))
|
292 |
+
|
293 |
+
prints::
|
294 |
+
|
295 |
+
[datetime.datetime(1999, 12, 31, 23, 59, 59, 999000)]
|
296 |
+
"""
|
297 |
+
|
298 |
+
def cvt_fn(s, l, t):
|
299 |
+
try:
|
300 |
+
return datetime.strptime(t[0], fmt)
|
301 |
+
except ValueError as ve:
|
302 |
+
raise ParseException(s, l, str(ve))
|
303 |
+
|
304 |
+
return cvt_fn
|
305 |
+
|
306 |
+
iso8601_date = Regex(
|
307 |
+
r"(?P<year>\d{4})(?:-(?P<month>\d\d)(?:-(?P<day>\d\d))?)?"
|
308 |
+
).set_name("ISO8601 date")
|
309 |
+
"ISO8601 date (``yyyy-mm-dd``)"
|
310 |
+
|
311 |
+
iso8601_datetime = Regex(
|
312 |
+
r"(?P<year>\d{4})-(?P<month>\d\d)-(?P<day>\d\d)[T ](?P<hour>\d\d):(?P<minute>\d\d)(:(?P<second>\d\d(\.\d*)?)?)?(?P<tz>Z|[+-]\d\d:?\d\d)?"
|
313 |
+
).set_name("ISO8601 datetime")
|
314 |
+
"ISO8601 datetime (``yyyy-mm-ddThh:mm:ss.s(Z|+-00:00)``) - trailing seconds, milliseconds, and timezone optional; accepts separating ``'T'`` or ``' '``"
|
315 |
+
|
316 |
+
uuid = Regex(r"[0-9a-fA-F]{8}(-[0-9a-fA-F]{4}){3}-[0-9a-fA-F]{12}").set_name("UUID")
|
317 |
+
"UUID (``xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx``)"
|
318 |
+
|
319 |
+
_html_stripper = any_open_tag.suppress() | any_close_tag.suppress()
|
320 |
+
|
321 |
+
@staticmethod
|
322 |
+
def strip_html_tags(s: str, l: int, tokens: ParseResults):
|
323 |
+
"""Parse action to remove HTML tags from web page HTML source
|
324 |
+
|
325 |
+
Example::
|
326 |
+
|
327 |
+
# strip HTML links from normal text
|
328 |
+
text = '<td>More info at the <a href="https://github.com/pyparsing/pyparsing/wiki">pyparsing</a> wiki page</td>'
|
329 |
+
td, td_end = makeHTMLTags("TD")
|
330 |
+
table_text = td + SkipTo(td_end).setParseAction(pyparsing_common.stripHTMLTags)("body") + td_end
|
331 |
+
print(table_text.parseString(text).body)
|
332 |
+
|
333 |
+
Prints::
|
334 |
+
|
335 |
+
More info at the pyparsing wiki page
|
336 |
+
"""
|
337 |
+
return pyparsing_common._html_stripper.transform_string(tokens[0])
|
338 |
+
|
339 |
+
_commasepitem = (
|
340 |
+
Combine(
|
341 |
+
OneOrMore(
|
342 |
+
~Literal(",")
|
343 |
+
+ ~LineEnd()
|
344 |
+
+ Word(printables, exclude_chars=",")
|
345 |
+
+ Opt(White(" \t") + ~FollowedBy(LineEnd() | ","))
|
346 |
+
)
|
347 |
+
)
|
348 |
+
.streamline()
|
349 |
+
.set_name("commaItem")
|
350 |
+
)
|
351 |
+
comma_separated_list = delimited_list(
|
352 |
+
Opt(quoted_string.copy() | _commasepitem, default="")
|
353 |
+
).set_name("comma separated list")
|
354 |
+
"""Predefined expression of 1 or more printable words or quoted strings, separated by commas."""
|
355 |
+
|
356 |
+
upcase_tokens = staticmethod(token_map(lambda t: t.upper()))
|
357 |
+
"""Parse action to convert tokens to upper case."""
|
358 |
+
|
359 |
+
downcase_tokens = staticmethod(token_map(lambda t: t.lower()))
|
360 |
+
"""Parse action to convert tokens to lower case."""
|
361 |
+
|
362 |
+
# fmt: off
|
363 |
+
url = Regex(
|
364 |
+
# https://mathiasbynens.be/demo/url-regex
|
365 |
+
# https://gist.github.com/dperini/729294
|
366 |
+
r"^" +
|
367 |
+
# protocol identifier (optional)
|
368 |
+
# short syntax // still required
|
369 |
+
r"(?:(?:(?P<scheme>https?|ftp):)?\/\/)" +
|
370 |
+
# user:pass BasicAuth (optional)
|
371 |
+
r"(?:(?P<auth>\S+(?::\S*)?)@)?" +
|
372 |
+
r"(?P<host>" +
|
373 |
+
# IP address exclusion
|
374 |
+
# private & local networks
|
375 |
+
r"(?!(?:10|127)(?:\.\d{1,3}){3})" +
|
376 |
+
r"(?!(?:169\.254|192\.168)(?:\.\d{1,3}){2})" +
|
377 |
+
r"(?!172\.(?:1[6-9]|2\d|3[0-1])(?:\.\d{1,3}){2})" +
|
378 |
+
# IP address dotted notation octets
|
379 |
+
# excludes loopback network 0.0.0.0
|
380 |
+
# excludes reserved space >= 224.0.0.0
|
381 |
+
# excludes network & broadcast addresses
|
382 |
+
# (first & last IP address of each class)
|
383 |
+
r"(?:[1-9]\d?|1\d\d|2[01]\d|22[0-3])" +
|
384 |
+
r"(?:\.(?:1?\d{1,2}|2[0-4]\d|25[0-5])){2}" +
|
385 |
+
r"(?:\.(?:[1-9]\d?|1\d\d|2[0-4]\d|25[0-4]))" +
|
386 |
+
r"|" +
|
387 |
+
# host & domain names, may end with dot
|
388 |
+
# can be replaced by a shortest alternative
|
389 |
+
# (?![-_])(?:[-\w\u00a1-\uffff]{0,63}[^-_]\.)+
|
390 |
+
r"(?:" +
|
391 |
+
r"(?:" +
|
392 |
+
r"[a-z0-9\u00a1-\uffff]" +
|
393 |
+
r"[a-z0-9\u00a1-\uffff_-]{0,62}" +
|
394 |
+
r")?" +
|
395 |
+
r"[a-z0-9\u00a1-\uffff]\." +
|
396 |
+
r")+" +
|
397 |
+
# TLD identifier name, may end with dot
|
398 |
+
r"(?:[a-z\u00a1-\uffff]{2,}\.?)" +
|
399 |
+
r")" +
|
400 |
+
# port number (optional)
|
401 |
+
r"(:(?P<port>\d{2,5}))?" +
|
402 |
+
# resource path (optional)
|
403 |
+
r"(?P<path>\/[^?# ]*)?" +
|
404 |
+
# query string (optional)
|
405 |
+
r"(\?(?P<query>[^#]*))?" +
|
406 |
+
# fragment (optional)
|
407 |
+
r"(#(?P<fragment>\S*))?" +
|
408 |
+
r"$"
|
409 |
+
).set_name("url")
|
410 |
+
# fmt: on
|
411 |
+
|
412 |
+
# pre-PEP8 compatibility names
|
413 |
+
convertToInteger = convert_to_integer
|
414 |
+
convertToFloat = convert_to_float
|
415 |
+
convertToDate = convert_to_date
|
416 |
+
convertToDatetime = convert_to_datetime
|
417 |
+
stripHTMLTags = strip_html_tags
|
418 |
+
upcaseTokens = upcase_tokens
|
419 |
+
downcaseTokens = downcase_tokens
|
420 |
+
|
421 |
+
|
422 |
+
_builtin_exprs = [
|
423 |
+
v for v in vars(pyparsing_common).values() if isinstance(v, ParserElement)
|
424 |
+
]
|