diff --git a/env-llmeval/lib/python3.10/site-packages/idna-3.7.dist-info/INSTALLER b/env-llmeval/lib/python3.10/site-packages/idna-3.7.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/idna-3.7.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/env-llmeval/lib/python3.10/site-packages/idna-3.7.dist-info/LICENSE.md b/env-llmeval/lib/python3.10/site-packages/idna-3.7.dist-info/LICENSE.md new file mode 100644 index 0000000000000000000000000000000000000000..19b6b45242c16a1025465309eec2ca5009319de3 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/idna-3.7.dist-info/LICENSE.md @@ -0,0 +1,31 @@ +BSD 3-Clause License + +Copyright (c) 2013-2024, Kim Davies and contributors. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/env-llmeval/lib/python3.10/site-packages/idna-3.7.dist-info/METADATA b/env-llmeval/lib/python3.10/site-packages/idna-3.7.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..b28f6ecdd5b297640bc7edac00381de90447b34b --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/idna-3.7.dist-info/METADATA @@ -0,0 +1,243 @@ +Metadata-Version: 2.1 +Name: idna +Version: 3.7 +Summary: Internationalized Domain Names in Applications (IDNA) +Author-email: Kim Davies +Requires-Python: >=3.5 +Description-Content-Type: text/x-rst +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: System Administrators +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Internet :: Name Service (DNS) +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Utilities +Project-URL: Changelog, https://github.com/kjd/idna/blob/master/HISTORY.rst +Project-URL: Issue tracker, https://github.com/kjd/idna/issues +Project-URL: Source, https://github.com/kjd/idna + +Internationalized Domain Names in Applications (IDNA) +===================================================== + +Support for the Internationalized Domain Names in +Applications (IDNA) protocol as specified in `RFC 5891 +`_. This is the latest version of +the protocol and is sometimes referred to as “IDNA 2008”. + +This library also provides support for Unicode Technical +Standard 46, `Unicode IDNA Compatibility Processing +`_. + +This acts as a suitable replacement for the “encodings.idna” +module that comes with the Python standard library, but which +only supports the older superseded IDNA specification (`RFC 3490 +`_). + +Basic functions are simply executed: + +.. code-block:: pycon + + >>> import idna + >>> idna.encode('ドメイン.テスト') + b'xn--eckwd4c7c.xn--zckzah' + >>> print(idna.decode('xn--eckwd4c7c.xn--zckzah')) + ドメイン.テスト + + +Installation +------------ + +This package is available for installation from PyPI: + +.. code-block:: bash + + $ python3 -m pip install idna + + +Usage +----- + +For typical usage, the ``encode`` and ``decode`` functions will take a +domain name argument and perform a conversion to A-labels or U-labels +respectively. + +.. code-block:: pycon + + >>> import idna + >>> idna.encode('ドメイン.テスト') + b'xn--eckwd4c7c.xn--zckzah' + >>> print(idna.decode('xn--eckwd4c7c.xn--zckzah')) + ドメイン.テスト + +You may use the codec encoding and decoding methods using the +``idna.codec`` module: + +.. code-block:: pycon + + >>> import idna.codec + >>> print('домен.испытание'.encode('idna2008')) + b'xn--d1acufc.xn--80akhbyknj4f' + >>> print(b'xn--d1acufc.xn--80akhbyknj4f'.decode('idna2008')) + домен.испытание + +Conversions can be applied at a per-label basis using the ``ulabel`` or +``alabel`` functions if necessary: + +.. code-block:: pycon + + >>> idna.alabel('测试') + b'xn--0zwm56d' + +Compatibility Mapping (UTS #46) ++++++++++++++++++++++++++++++++ + +As described in `RFC 5895 `_, the +IDNA specification does not normalize input from different potential +ways a user may input a domain name. This functionality, known as +a “mapping”, is considered by the specification to be a local +user-interface issue distinct from IDNA conversion functionality. + +This library provides one such mapping that was developed by the +Unicode Consortium. Known as `Unicode IDNA Compatibility Processing +`_, it provides for both a regular +mapping for typical applications, as well as a transitional mapping to +help migrate from older IDNA 2003 applications. + +For example, “Königsgäßchen” is not a permissible label as *LATIN +CAPITAL LETTER K* is not allowed (nor are capital letters in general). +UTS 46 will convert this into lower case prior to applying the IDNA +conversion. + +.. code-block:: pycon + + >>> import idna + >>> idna.encode('Königsgäßchen') + ... + idna.core.InvalidCodepoint: Codepoint U+004B at position 1 of 'Königsgäßchen' not allowed + >>> idna.encode('Königsgäßchen', uts46=True) + b'xn--knigsgchen-b4a3dun' + >>> print(idna.decode('xn--knigsgchen-b4a3dun')) + königsgäßchen + +Transitional processing provides conversions to help transition from +the older 2003 standard to the current standard. For example, in the +original IDNA specification, the *LATIN SMALL LETTER SHARP S* (ß) was +converted into two *LATIN SMALL LETTER S* (ss), whereas in the current +IDNA specification this conversion is not performed. + +.. code-block:: pycon + + >>> idna.encode('Königsgäßchen', uts46=True, transitional=True) + 'xn--knigsgsschen-lcb0w' + +Implementers should use transitional processing with caution, only in +rare cases where conversion from legacy labels to current labels must be +performed (i.e. IDNA implementations that pre-date 2008). For typical +applications that just need to convert labels, transitional processing +is unlikely to be beneficial and could produce unexpected incompatible +results. + +``encodings.idna`` Compatibility +++++++++++++++++++++++++++++++++ + +Function calls from the Python built-in ``encodings.idna`` module are +mapped to their IDNA 2008 equivalents using the ``idna.compat`` module. +Simply substitute the ``import`` clause in your code to refer to the new +module name. + +Exceptions +---------- + +All errors raised during the conversion following the specification +should raise an exception derived from the ``idna.IDNAError`` base +class. + +More specific exceptions that may be generated as ``idna.IDNABidiError`` +when the error reflects an illegal combination of left-to-right and +right-to-left characters in a label; ``idna.InvalidCodepoint`` when +a specific codepoint is an illegal character in an IDN label (i.e. +INVALID); and ``idna.InvalidCodepointContext`` when the codepoint is +illegal based on its positional context (i.e. it is CONTEXTO or CONTEXTJ +but the contextual requirements are not satisfied.) + +Building and Diagnostics +------------------------ + +The IDNA and UTS 46 functionality relies upon pre-calculated lookup +tables for performance. These tables are derived from computing against +eligibility criteria in the respective standards. These tables are +computed using the command-line script ``tools/idna-data``. + +This tool will fetch relevant codepoint data from the Unicode repository +and perform the required calculations to identify eligibility. There are +three main modes: + +* ``idna-data make-libdata``. Generates ``idnadata.py`` and + ``uts46data.py``, the pre-calculated lookup tables used for IDNA and + UTS 46 conversions. Implementers who wish to track this library against + a different Unicode version may use this tool to manually generate a + different version of the ``idnadata.py`` and ``uts46data.py`` files. + +* ``idna-data make-table``. Generate a table of the IDNA disposition + (e.g. PVALID, CONTEXTJ, CONTEXTO) in the format found in Appendix + B.1 of RFC 5892 and the pre-computed tables published by `IANA + `_. + +* ``idna-data U+0061``. Prints debugging output on the various + properties associated with an individual Unicode codepoint (in this + case, U+0061), that are used to assess the IDNA and UTS 46 status of a + codepoint. This is helpful in debugging or analysis. + +The tool accepts a number of arguments, described using ``idna-data +-h``. Most notably, the ``--version`` argument allows the specification +of the version of Unicode to be used in computing the table data. For +example, ``idna-data --version 9.0.0 make-libdata`` will generate +library data against Unicode 9.0.0. + + +Additional Notes +---------------- + +* **Packages**. The latest tagged release version is published in the + `Python Package Index `_. + +* **Version support**. This library supports Python 3.5 and higher. + As this library serves as a low-level toolkit for a variety of + applications, many of which strive for broad compatibility with older + Python versions, there is no rush to remove older interpreter support. + Removing support for older versions should be well justified in that the + maintenance burden has become too high. + +* **Python 2**. Python 2 is supported by version 2.x of this library. + While active development of the version 2.x series has ended, notable + issues being corrected may be backported to 2.x. Use "idna<3" in your + requirements file if you need this library for a Python 2 application. + +* **Testing**. The library has a test suite based on each rule of the + IDNA specification, as well as tests that are provided as part of the + Unicode Technical Standard 46, `Unicode IDNA Compatibility Processing + `_. + +* **Emoji**. It is an occasional request to support emoji domains in + this library. Encoding of symbols like emoji is expressly prohibited by + the technical standard IDNA 2008 and emoji domains are broadly phased + out across the domain industry due to associated security risks. For + now, applications that need to support these non-compliant labels + may wish to consider trying the encode/decode operation in this library + first, and then falling back to using `encodings.idna`. See `the Github + project `_ for more discussion. + diff --git a/env-llmeval/lib/python3.10/site-packages/idna-3.7.dist-info/RECORD b/env-llmeval/lib/python3.10/site-packages/idna-3.7.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..9e27c72fb533a64d54029a6014594d261e75836b --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/idna-3.7.dist-info/RECORD @@ -0,0 +1,22 @@ +idna-3.7.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +idna-3.7.dist-info/LICENSE.md,sha256=pZ8LDvNjWHQQmkRhykT_enDVBpboFHZ7-vch1Mmw2w8,1541 +idna-3.7.dist-info/METADATA,sha256=OixCk-dKLZkPy-MfviOmiPvwJ1O2K_8rqCrFjC_uxy4,9888 +idna-3.7.dist-info/RECORD,, +idna-3.7.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81 +idna/__init__.py,sha256=KJQN1eQBr8iIK5SKrJ47lXvxG0BJ7Lm38W4zT0v_8lk,849 +idna/__pycache__/__init__.cpython-310.pyc,, +idna/__pycache__/codec.cpython-310.pyc,, +idna/__pycache__/compat.cpython-310.pyc,, +idna/__pycache__/core.cpython-310.pyc,, +idna/__pycache__/idnadata.cpython-310.pyc,, +idna/__pycache__/intranges.cpython-310.pyc,, +idna/__pycache__/package_data.cpython-310.pyc,, +idna/__pycache__/uts46data.cpython-310.pyc,, +idna/codec.py,sha256=PS6m-XmdST7Wj7J7ulRMakPDt5EBJyYrT3CPtjh-7t4,3426 +idna/compat.py,sha256=0_sOEUMT4CVw9doD3vyRhX80X19PwqFoUBs7gWsFME4,321 +idna/core.py,sha256=lyhpoe2vulEaB_65xhXmoKgO-xUqFDvcwxu5hpNNO4E,12663 +idna/idnadata.py,sha256=dqRwytzkjIHMBa2R1lYvHDwACenZPt8eGVu1Y8UBE-E,78320 +idna/intranges.py,sha256=YBr4fRYuWH7kTKS2tXlFjM24ZF1Pdvcir-aywniInqg,1881 +idna/package_data.py,sha256=Tkt0KnIeyIlnHddOaz9WSkkislNgokJAuE-p5GorMqo,21 +idna/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +idna/uts46data.py,sha256=1KuksWqLuccPXm2uyRVkhfiFLNIhM_H2m4azCcnOqEU,206503 diff --git a/env-llmeval/lib/python3.10/site-packages/idna-3.7.dist-info/WHEEL b/env-llmeval/lib/python3.10/site-packages/idna-3.7.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..3b5e64b5e6c4a210201d1676a891fd57b15cda99 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/idna-3.7.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: flit 3.9.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/crypto/__init__.py b/env-llmeval/lib/python3.10/site-packages/sympy/crypto/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..2b27b4b036e5f2ed93a1ea88cd7d7144eb5615d4 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/sympy/crypto/__init__.py @@ -0,0 +1,35 @@ +from sympy.crypto.crypto import (cycle_list, + encipher_shift, encipher_affine, encipher_substitution, + check_and_join, encipher_vigenere, decipher_vigenere, bifid5_square, + bifid6_square, encipher_hill, decipher_hill, + encipher_bifid5, encipher_bifid6, decipher_bifid5, + decipher_bifid6, encipher_kid_rsa, decipher_kid_rsa, + kid_rsa_private_key, kid_rsa_public_key, decipher_rsa, rsa_private_key, + rsa_public_key, encipher_rsa, lfsr_connection_polynomial, + lfsr_autocorrelation, lfsr_sequence, encode_morse, decode_morse, + elgamal_private_key, elgamal_public_key, decipher_elgamal, + encipher_elgamal, dh_private_key, dh_public_key, dh_shared_key, + padded_key, encipher_bifid, decipher_bifid, bifid_square, bifid5, + bifid6, bifid10, decipher_gm, encipher_gm, gm_public_key, + gm_private_key, bg_private_key, bg_public_key, encipher_bg, decipher_bg, + encipher_rot13, decipher_rot13, encipher_atbash, decipher_atbash, + encipher_railfence, decipher_railfence) + +__all__ = [ + 'cycle_list', 'encipher_shift', 'encipher_affine', + 'encipher_substitution', 'check_and_join', 'encipher_vigenere', + 'decipher_vigenere', 'bifid5_square', 'bifid6_square', 'encipher_hill', + 'decipher_hill', 'encipher_bifid5', 'encipher_bifid6', 'decipher_bifid5', + 'decipher_bifid6', 'encipher_kid_rsa', 'decipher_kid_rsa', + 'kid_rsa_private_key', 'kid_rsa_public_key', 'decipher_rsa', + 'rsa_private_key', 'rsa_public_key', 'encipher_rsa', + 'lfsr_connection_polynomial', 'lfsr_autocorrelation', 'lfsr_sequence', + 'encode_morse', 'decode_morse', 'elgamal_private_key', + 'elgamal_public_key', 'decipher_elgamal', 'encipher_elgamal', + 'dh_private_key', 'dh_public_key', 'dh_shared_key', 'padded_key', + 'encipher_bifid', 'decipher_bifid', 'bifid_square', 'bifid5', 'bifid6', + 'bifid10', 'decipher_gm', 'encipher_gm', 'gm_public_key', + 'gm_private_key', 'bg_private_key', 'bg_public_key', 'encipher_bg', + 'decipher_bg', 'encipher_rot13', 'decipher_rot13', 'encipher_atbash', + 'decipher_atbash', 'encipher_railfence', 'decipher_railfence', +] diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/crypto/__pycache__/__init__.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/sympy/crypto/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c1d0942c7535729d482e607f24b0767ae2113ad3 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/sympy/crypto/__pycache__/__init__.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/crypto/__pycache__/crypto.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/sympy/crypto/__pycache__/crypto.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..efafd868a7f6110cd32b6bf0d34b3aa81a4399e6 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/sympy/crypto/__pycache__/crypto.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/crypto/crypto.py b/env-llmeval/lib/python3.10/site-packages/sympy/crypto/crypto.py new file mode 100644 index 0000000000000000000000000000000000000000..c2d1f320b868cd2c3661afbb607a4924cf90aa62 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/sympy/crypto/crypto.py @@ -0,0 +1,3360 @@ +""" +This file contains some classical ciphers and routines +implementing a linear-feedback shift register (LFSR) +and the Diffie-Hellman key exchange. + +.. warning:: + + This module is intended for educational purposes only. Do not use the + functions in this module for real cryptographic applications. If you wish + to encrypt real data, we recommend using something like the `cryptography + `_ module. + +""" + +from string import whitespace, ascii_uppercase as uppercase, printable +from functools import reduce +import warnings + +from itertools import cycle + +from sympy.core import Symbol +from sympy.core.numbers import igcdex, mod_inverse, igcd, Rational +from sympy.core.random import _randrange, _randint +from sympy.matrices import Matrix +from sympy.ntheory import isprime, primitive_root, factorint +from sympy.ntheory import totient as _euler +from sympy.ntheory import reduced_totient as _carmichael +from sympy.ntheory.generate import nextprime +from sympy.ntheory.modular import crt +from sympy.polys.domains import FF +from sympy.polys.polytools import gcd, Poly +from sympy.utilities.misc import as_int, filldedent, translate +from sympy.utilities.iterables import uniq, multiset + + +class NonInvertibleCipherWarning(RuntimeWarning): + """A warning raised if the cipher is not invertible.""" + def __init__(self, msg): + self.fullMessage = msg + + def __str__(self): + return '\n\t' + self.fullMessage + + def warn(self, stacklevel=3): + warnings.warn(self, stacklevel=stacklevel) + + +def AZ(s=None): + """Return the letters of ``s`` in uppercase. In case more than + one string is passed, each of them will be processed and a list + of upper case strings will be returned. + + Examples + ======== + + >>> from sympy.crypto.crypto import AZ + >>> AZ('Hello, world!') + 'HELLOWORLD' + >>> AZ('Hello, world!'.split()) + ['HELLO', 'WORLD'] + + See Also + ======== + + check_and_join + + """ + if not s: + return uppercase + t = isinstance(s, str) + if t: + s = [s] + rv = [check_and_join(i.upper().split(), uppercase, filter=True) + for i in s] + if t: + return rv[0] + return rv + +bifid5 = AZ().replace('J', '') +bifid6 = AZ() + '0123456789' +bifid10 = printable + + +def padded_key(key, symbols): + """Return a string of the distinct characters of ``symbols`` with + those of ``key`` appearing first. A ValueError is raised if + a) there are duplicate characters in ``symbols`` or + b) there are characters in ``key`` that are not in ``symbols``. + + Examples + ======== + + >>> from sympy.crypto.crypto import padded_key + >>> padded_key('PUPPY', 'OPQRSTUVWXY') + 'PUYOQRSTVWX' + >>> padded_key('RSA', 'ARTIST') + Traceback (most recent call last): + ... + ValueError: duplicate characters in symbols: T + + """ + syms = list(uniq(symbols)) + if len(syms) != len(symbols): + extra = ''.join(sorted({ + i for i in symbols if symbols.count(i) > 1})) + raise ValueError('duplicate characters in symbols: %s' % extra) + extra = set(key) - set(syms) + if extra: + raise ValueError( + 'characters in key but not symbols: %s' % ''.join( + sorted(extra))) + key0 = ''.join(list(uniq(key))) + # remove from syms characters in key0 + return key0 + translate(''.join(syms), None, key0) + + +def check_and_join(phrase, symbols=None, filter=None): + """ + Joins characters of ``phrase`` and if ``symbols`` is given, raises + an error if any character in ``phrase`` is not in ``symbols``. + + Parameters + ========== + + phrase + String or list of strings to be returned as a string. + + symbols + Iterable of characters allowed in ``phrase``. + + If ``symbols`` is ``None``, no checking is performed. + + Examples + ======== + + >>> from sympy.crypto.crypto import check_and_join + >>> check_and_join('a phrase') + 'a phrase' + >>> check_and_join('a phrase'.upper().split()) + 'APHRASE' + >>> check_and_join('a phrase!'.upper().split(), 'ARE', filter=True) + 'ARAE' + >>> check_and_join('a phrase!'.upper().split(), 'ARE') + Traceback (most recent call last): + ... + ValueError: characters in phrase but not symbols: "!HPS" + + """ + rv = ''.join(''.join(phrase)) + if symbols is not None: + symbols = check_and_join(symbols) + missing = ''.join(sorted(set(rv) - set(symbols))) + if missing: + if not filter: + raise ValueError( + 'characters in phrase but not symbols: "%s"' % missing) + rv = translate(rv, None, missing) + return rv + + +def _prep(msg, key, alp, default=None): + if not alp: + if not default: + alp = AZ() + msg = AZ(msg) + key = AZ(key) + else: + alp = default + else: + alp = ''.join(alp) + key = check_and_join(key, alp, filter=True) + msg = check_and_join(msg, alp, filter=True) + return msg, key, alp + + +def cycle_list(k, n): + """ + Returns the elements of the list ``range(n)`` shifted to the + left by ``k`` (so the list starts with ``k`` (mod ``n``)). + + Examples + ======== + + >>> from sympy.crypto.crypto import cycle_list + >>> cycle_list(3, 10) + [3, 4, 5, 6, 7, 8, 9, 0, 1, 2] + + """ + k = k % n + return list(range(k, n)) + list(range(k)) + + +######## shift cipher examples ############ + + +def encipher_shift(msg, key, symbols=None): + """ + Performs shift cipher encryption on plaintext msg, and returns the + ciphertext. + + Parameters + ========== + + key : int + The secret key. + + msg : str + Plaintext of upper-case letters. + + Returns + ======= + + str + Ciphertext of upper-case letters. + + Examples + ======== + + >>> from sympy.crypto.crypto import encipher_shift, decipher_shift + >>> msg = "GONAVYBEATARMY" + >>> ct = encipher_shift(msg, 1); ct + 'HPOBWZCFBUBSNZ' + + To decipher the shifted text, change the sign of the key: + + >>> encipher_shift(ct, -1) + 'GONAVYBEATARMY' + + There is also a convenience function that does this with the + original key: + + >>> decipher_shift(ct, 1) + 'GONAVYBEATARMY' + + Notes + ===== + + ALGORITHM: + + STEPS: + 0. Number the letters of the alphabet from 0, ..., N + 1. Compute from the string ``msg`` a list ``L1`` of + corresponding integers. + 2. Compute from the list ``L1`` a new list ``L2``, given by + adding ``(k mod 26)`` to each element in ``L1``. + 3. Compute from the list ``L2`` a string ``ct`` of + corresponding letters. + + The shift cipher is also called the Caesar cipher, after + Julius Caesar, who, according to Suetonius, used it with a + shift of three to protect messages of military significance. + Caesar's nephew Augustus reportedly used a similar cipher, but + with a right shift of 1. + + References + ========== + + .. [1] https://en.wikipedia.org/wiki/Caesar_cipher + .. [2] https://mathworld.wolfram.com/CaesarsMethod.html + + See Also + ======== + + decipher_shift + + """ + msg, _, A = _prep(msg, '', symbols) + shift = len(A) - key % len(A) + key = A[shift:] + A[:shift] + return translate(msg, key, A) + + +def decipher_shift(msg, key, symbols=None): + """ + Return the text by shifting the characters of ``msg`` to the + left by the amount given by ``key``. + + Examples + ======== + + >>> from sympy.crypto.crypto import encipher_shift, decipher_shift + >>> msg = "GONAVYBEATARMY" + >>> ct = encipher_shift(msg, 1); ct + 'HPOBWZCFBUBSNZ' + + To decipher the shifted text, change the sign of the key: + + >>> encipher_shift(ct, -1) + 'GONAVYBEATARMY' + + Or use this function with the original key: + + >>> decipher_shift(ct, 1) + 'GONAVYBEATARMY' + + """ + return encipher_shift(msg, -key, symbols) + +def encipher_rot13(msg, symbols=None): + """ + Performs the ROT13 encryption on a given plaintext ``msg``. + + Explanation + =========== + + ROT13 is a substitution cipher which substitutes each letter + in the plaintext message for the letter furthest away from it + in the English alphabet. + + Equivalently, it is just a Caeser (shift) cipher with a shift + key of 13 (midway point of the alphabet). + + References + ========== + + .. [1] https://en.wikipedia.org/wiki/ROT13 + + See Also + ======== + + decipher_rot13 + encipher_shift + + """ + return encipher_shift(msg, 13, symbols) + +def decipher_rot13(msg, symbols=None): + """ + Performs the ROT13 decryption on a given plaintext ``msg``. + + Explanation + ============ + + ``decipher_rot13`` is equivalent to ``encipher_rot13`` as both + ``decipher_shift`` with a key of 13 and ``encipher_shift`` key with a + key of 13 will return the same results. Nonetheless, + ``decipher_rot13`` has nonetheless been explicitly defined here for + consistency. + + Examples + ======== + + >>> from sympy.crypto.crypto import encipher_rot13, decipher_rot13 + >>> msg = 'GONAVYBEATARMY' + >>> ciphertext = encipher_rot13(msg);ciphertext + 'TBANILORNGNEZL' + >>> decipher_rot13(ciphertext) + 'GONAVYBEATARMY' + >>> encipher_rot13(msg) == decipher_rot13(msg) + True + >>> msg == decipher_rot13(ciphertext) + True + + """ + return decipher_shift(msg, 13, symbols) + +######## affine cipher examples ############ + + +def encipher_affine(msg, key, symbols=None, _inverse=False): + r""" + Performs the affine cipher encryption on plaintext ``msg``, and + returns the ciphertext. + + Explanation + =========== + + Encryption is based on the map `x \rightarrow ax+b` (mod `N`) + where ``N`` is the number of characters in the alphabet. + Decryption is based on the map `x \rightarrow cx+d` (mod `N`), + where `c = a^{-1}` (mod `N`) and `d = -a^{-1}b` (mod `N`). + In particular, for the map to be invertible, we need + `\mathrm{gcd}(a, N) = 1` and an error will be raised if this is + not true. + + Parameters + ========== + + msg : str + Characters that appear in ``symbols``. + + a, b : int, int + A pair integers, with ``gcd(a, N) = 1`` (the secret key). + + symbols + String of characters (default = uppercase letters). + + When no symbols are given, ``msg`` is converted to upper case + letters and all other characters are ignored. + + Returns + ======= + + ct + String of characters (the ciphertext message) + + Notes + ===== + + ALGORITHM: + + STEPS: + 0. Number the letters of the alphabet from 0, ..., N + 1. Compute from the string ``msg`` a list ``L1`` of + corresponding integers. + 2. Compute from the list ``L1`` a new list ``L2``, given by + replacing ``x`` by ``a*x + b (mod N)``, for each element + ``x`` in ``L1``. + 3. Compute from the list ``L2`` a string ``ct`` of + corresponding letters. + + This is a straightforward generalization of the shift cipher with + the added complexity of requiring 2 characters to be deciphered in + order to recover the key. + + References + ========== + + .. [1] https://en.wikipedia.org/wiki/Affine_cipher + + See Also + ======== + + decipher_affine + + """ + msg, _, A = _prep(msg, '', symbols) + N = len(A) + a, b = key + assert gcd(a, N) == 1 + if _inverse: + c = mod_inverse(a, N) + d = -b*c + a, b = c, d + B = ''.join([A[(a*i + b) % N] for i in range(N)]) + return translate(msg, A, B) + + +def decipher_affine(msg, key, symbols=None): + r""" + Return the deciphered text that was made from the mapping, + `x \rightarrow ax+b` (mod `N`), where ``N`` is the + number of characters in the alphabet. Deciphering is done by + reciphering with a new key: `x \rightarrow cx+d` (mod `N`), + where `c = a^{-1}` (mod `N`) and `d = -a^{-1}b` (mod `N`). + + Examples + ======== + + >>> from sympy.crypto.crypto import encipher_affine, decipher_affine + >>> msg = "GO NAVY BEAT ARMY" + >>> key = (3, 1) + >>> encipher_affine(msg, key) + 'TROBMVENBGBALV' + >>> decipher_affine(_, key) + 'GONAVYBEATARMY' + + See Also + ======== + + encipher_affine + + """ + return encipher_affine(msg, key, symbols, _inverse=True) + + +def encipher_atbash(msg, symbols=None): + r""" + Enciphers a given ``msg`` into its Atbash ciphertext and returns it. + + Explanation + =========== + + Atbash is a substitution cipher originally used to encrypt the Hebrew + alphabet. Atbash works on the principle of mapping each alphabet to its + reverse / counterpart (i.e. a would map to z, b to y etc.) + + Atbash is functionally equivalent to the affine cipher with ``a = 25`` + and ``b = 25`` + + See Also + ======== + + decipher_atbash + + """ + return encipher_affine(msg, (25, 25), symbols) + + +def decipher_atbash(msg, symbols=None): + r""" + Deciphers a given ``msg`` using Atbash cipher and returns it. + + Explanation + =========== + + ``decipher_atbash`` is functionally equivalent to ``encipher_atbash``. + However, it has still been added as a separate function to maintain + consistency. + + Examples + ======== + + >>> from sympy.crypto.crypto import encipher_atbash, decipher_atbash + >>> msg = 'GONAVYBEATARMY' + >>> encipher_atbash(msg) + 'TLMZEBYVZGZINB' + >>> decipher_atbash(msg) + 'TLMZEBYVZGZINB' + >>> encipher_atbash(msg) == decipher_atbash(msg) + True + >>> msg == encipher_atbash(encipher_atbash(msg)) + True + + References + ========== + + .. [1] https://en.wikipedia.org/wiki/Atbash + + See Also + ======== + + encipher_atbash + + """ + return decipher_affine(msg, (25, 25), symbols) + +#################### substitution cipher ########################### + + +def encipher_substitution(msg, old, new=None): + r""" + Returns the ciphertext obtained by replacing each character that + appears in ``old`` with the corresponding character in ``new``. + If ``old`` is a mapping, then new is ignored and the replacements + defined by ``old`` are used. + + Explanation + =========== + + This is a more general than the affine cipher in that the key can + only be recovered by determining the mapping for each symbol. + Though in practice, once a few symbols are recognized the mappings + for other characters can be quickly guessed. + + Examples + ======== + + >>> from sympy.crypto.crypto import encipher_substitution, AZ + >>> old = 'OEYAG' + >>> new = '034^6' + >>> msg = AZ("go navy! beat army!") + >>> ct = encipher_substitution(msg, old, new); ct + '60N^V4B3^T^RM4' + + To decrypt a substitution, reverse the last two arguments: + + >>> encipher_substitution(ct, new, old) + 'GONAVYBEATARMY' + + In the special case where ``old`` and ``new`` are a permutation of + order 2 (representing a transposition of characters) their order + is immaterial: + + >>> old = 'NAVY' + >>> new = 'ANYV' + >>> encipher = lambda x: encipher_substitution(x, old, new) + >>> encipher('NAVY') + 'ANYV' + >>> encipher(_) + 'NAVY' + + The substitution cipher, in general, is a method + whereby "units" (not necessarily single characters) of plaintext + are replaced with ciphertext according to a regular system. + + >>> ords = dict(zip('abc', ['\\%i' % ord(i) for i in 'abc'])) + >>> print(encipher_substitution('abc', ords)) + \97\98\99 + + References + ========== + + .. [1] https://en.wikipedia.org/wiki/Substitution_cipher + + """ + return translate(msg, old, new) + + +###################################################################### +#################### Vigenere cipher examples ######################## +###################################################################### + +def encipher_vigenere(msg, key, symbols=None): + """ + Performs the Vigenere cipher encryption on plaintext ``msg``, and + returns the ciphertext. + + Examples + ======== + + >>> from sympy.crypto.crypto import encipher_vigenere, AZ + >>> key = "encrypt" + >>> msg = "meet me on monday" + >>> encipher_vigenere(msg, key) + 'QRGKKTHRZQEBPR' + + Section 1 of the Kryptos sculpture at the CIA headquarters + uses this cipher and also changes the order of the + alphabet [2]_. Here is the first line of that section of + the sculpture: + + >>> from sympy.crypto.crypto import decipher_vigenere, padded_key + >>> alp = padded_key('KRYPTOS', AZ()) + >>> key = 'PALIMPSEST' + >>> msg = 'EMUFPHZLRFAXYUSDJKZLDKRNSHGNFIVJ' + >>> decipher_vigenere(msg, key, alp) + 'BETWEENSUBTLESHADINGANDTHEABSENC' + + Explanation + =========== + + The Vigenere cipher is named after Blaise de Vigenere, a sixteenth + century diplomat and cryptographer, by a historical accident. + Vigenere actually invented a different and more complicated cipher. + The so-called *Vigenere cipher* was actually invented + by Giovan Batista Belaso in 1553. + + This cipher was used in the 1800's, for example, during the American + Civil War. The Confederacy used a brass cipher disk to implement the + Vigenere cipher (now on display in the NSA Museum in Fort + Meade) [1]_. + + The Vigenere cipher is a generalization of the shift cipher. + Whereas the shift cipher shifts each letter by the same amount + (that amount being the key of the shift cipher) the Vigenere + cipher shifts a letter by an amount determined by the key (which is + a word or phrase known only to the sender and receiver). + + For example, if the key was a single letter, such as "C", then the + so-called Vigenere cipher is actually a shift cipher with a + shift of `2` (since "C" is the 2nd letter of the alphabet, if + you start counting at `0`). If the key was a word with two + letters, such as "CA", then the so-called Vigenere cipher will + shift letters in even positions by `2` and letters in odd positions + are left alone (shifted by `0`, since "A" is the 0th letter, if + you start counting at `0`). + + + ALGORITHM: + + INPUT: + + ``msg``: string of characters that appear in ``symbols`` + (the plaintext) + + ``key``: a string of characters that appear in ``symbols`` + (the secret key) + + ``symbols``: a string of letters defining the alphabet + + + OUTPUT: + + ``ct``: string of characters (the ciphertext message) + + STEPS: + 0. Number the letters of the alphabet from 0, ..., N + 1. Compute from the string ``key`` a list ``L1`` of + corresponding integers. Let ``n1 = len(L1)``. + 2. Compute from the string ``msg`` a list ``L2`` of + corresponding integers. Let ``n2 = len(L2)``. + 3. Break ``L2`` up sequentially into sublists of size + ``n1``; the last sublist may be smaller than ``n1`` + 4. For each of these sublists ``L`` of ``L2``, compute a + new list ``C`` given by ``C[i] = L[i] + L1[i] (mod N)`` + to the ``i``-th element in the sublist, for each ``i``. + 5. Assemble these lists ``C`` by concatenation into a new + list of length ``n2``. + 6. Compute from the new list a string ``ct`` of + corresponding letters. + + Once it is known that the key is, say, `n` characters long, + frequency analysis can be applied to every `n`-th letter of + the ciphertext to determine the plaintext. This method is + called *Kasiski examination* (although it was first discovered + by Babbage). If they key is as long as the message and is + comprised of randomly selected characters -- a one-time pad -- the + message is theoretically unbreakable. + + The cipher Vigenere actually discovered is an "auto-key" cipher + described as follows. + + ALGORITHM: + + INPUT: + + ``key``: a string of letters (the secret key) + + ``msg``: string of letters (the plaintext message) + + OUTPUT: + + ``ct``: string of upper-case letters (the ciphertext message) + + STEPS: + 0. Number the letters of the alphabet from 0, ..., N + 1. Compute from the string ``msg`` a list ``L2`` of + corresponding integers. Let ``n2 = len(L2)``. + 2. Let ``n1`` be the length of the key. Append to the + string ``key`` the first ``n2 - n1`` characters of + the plaintext message. Compute from this string (also of + length ``n2``) a list ``L1`` of integers corresponding + to the letter numbers in the first step. + 3. Compute a new list ``C`` given by + ``C[i] = L1[i] + L2[i] (mod N)``. + 4. Compute from the new list a string ``ct`` of letters + corresponding to the new integers. + + To decipher the auto-key ciphertext, the key is used to decipher + the first ``n1`` characters and then those characters become the + key to decipher the next ``n1`` characters, etc...: + + >>> m = AZ('go navy, beat army! yes you can'); m + 'GONAVYBEATARMYYESYOUCAN' + >>> key = AZ('gold bug'); n1 = len(key); n2 = len(m) + >>> auto_key = key + m[:n2 - n1]; auto_key + 'GOLDBUGGONAVYBEATARMYYE' + >>> ct = encipher_vigenere(m, auto_key); ct + 'MCYDWSHKOGAMKZCELYFGAYR' + >>> n1 = len(key) + >>> pt = [] + >>> while ct: + ... part, ct = ct[:n1], ct[n1:] + ... pt.append(decipher_vigenere(part, key)) + ... key = pt[-1] + ... + >>> ''.join(pt) == m + True + + References + ========== + + .. [1] https://en.wikipedia.org/wiki/Vigenere_cipher + .. [2] https://web.archive.org/web/20071116100808/https://filebox.vt.edu/users/batman/kryptos.html + (short URL: https://goo.gl/ijr22d) + + """ + msg, key, A = _prep(msg, key, symbols) + map = {c: i for i, c in enumerate(A)} + key = [map[c] for c in key] + N = len(map) + k = len(key) + rv = [] + for i, m in enumerate(msg): + rv.append(A[(map[m] + key[i % k]) % N]) + rv = ''.join(rv) + return rv + + +def decipher_vigenere(msg, key, symbols=None): + """ + Decode using the Vigenere cipher. + + Examples + ======== + + >>> from sympy.crypto.crypto import decipher_vigenere + >>> key = "encrypt" + >>> ct = "QRGK kt HRZQE BPR" + >>> decipher_vigenere(ct, key) + 'MEETMEONMONDAY' + + """ + msg, key, A = _prep(msg, key, symbols) + map = {c: i for i, c in enumerate(A)} + N = len(A) # normally, 26 + K = [map[c] for c in key] + n = len(K) + C = [map[c] for c in msg] + rv = ''.join([A[(-K[i % n] + c) % N] for i, c in enumerate(C)]) + return rv + + +#################### Hill cipher ######################## + + +def encipher_hill(msg, key, symbols=None, pad="Q"): + r""" + Return the Hill cipher encryption of ``msg``. + + Explanation + =========== + + The Hill cipher [1]_, invented by Lester S. Hill in the 1920's [2]_, + was the first polygraphic cipher in which it was practical + (though barely) to operate on more than three symbols at once. + The following discussion assumes an elementary knowledge of + matrices. + + First, each letter is first encoded as a number starting with 0. + Suppose your message `msg` consists of `n` capital letters, with no + spaces. This may be regarded an `n`-tuple M of elements of + `Z_{26}` (if the letters are those of the English alphabet). A key + in the Hill cipher is a `k x k` matrix `K`, all of whose entries + are in `Z_{26}`, such that the matrix `K` is invertible (i.e., the + linear transformation `K: Z_{N}^k \rightarrow Z_{N}^k` + is one-to-one). + + + Parameters + ========== + + msg + Plaintext message of `n` upper-case letters. + + key + A `k \times k` invertible matrix `K`, all of whose entries are + in `Z_{26}` (or whatever number of symbols are being used). + + pad + Character (default "Q") to use to make length of text be a + multiple of ``k``. + + Returns + ======= + + ct + Ciphertext of upper-case letters. + + Notes + ===== + + ALGORITHM: + + STEPS: + 0. Number the letters of the alphabet from 0, ..., N + 1. Compute from the string ``msg`` a list ``L`` of + corresponding integers. Let ``n = len(L)``. + 2. Break the list ``L`` up into ``t = ceiling(n/k)`` + sublists ``L_1``, ..., ``L_t`` of size ``k`` (with + the last list "padded" to ensure its size is + ``k``). + 3. Compute new list ``C_1``, ..., ``C_t`` given by + ``C[i] = K*L_i`` (arithmetic is done mod N), for each + ``i``. + 4. Concatenate these into a list ``C = C_1 + ... + C_t``. + 5. Compute from ``C`` a string ``ct`` of corresponding + letters. This has length ``k*t``. + + References + ========== + + .. [1] https://en.wikipedia.org/wiki/Hill_cipher + .. [2] Lester S. Hill, Cryptography in an Algebraic Alphabet, + The American Mathematical Monthly Vol.36, June-July 1929, + pp.306-312. + + See Also + ======== + + decipher_hill + + """ + assert key.is_square + assert len(pad) == 1 + msg, pad, A = _prep(msg, pad, symbols) + map = {c: i for i, c in enumerate(A)} + P = [map[c] for c in msg] + N = len(A) + k = key.cols + n = len(P) + m, r = divmod(n, k) + if r: + P = P + [map[pad]]*(k - r) + m += 1 + rv = ''.join([A[c % N] for j in range(m) for c in + list(key*Matrix(k, 1, [P[i] + for i in range(k*j, k*(j + 1))]))]) + return rv + + +def decipher_hill(msg, key, symbols=None): + """ + Deciphering is the same as enciphering but using the inverse of the + key matrix. + + Examples + ======== + + >>> from sympy.crypto.crypto import encipher_hill, decipher_hill + >>> from sympy import Matrix + + >>> key = Matrix([[1, 2], [3, 5]]) + >>> encipher_hill("meet me on monday", key) + 'UEQDUEODOCTCWQ' + >>> decipher_hill(_, key) + 'MEETMEONMONDAY' + + When the length of the plaintext (stripped of invalid characters) + is not a multiple of the key dimension, extra characters will + appear at the end of the enciphered and deciphered text. In order to + decipher the text, those characters must be included in the text to + be deciphered. In the following, the key has a dimension of 4 but + the text is 2 short of being a multiple of 4 so two characters will + be added. + + >>> key = Matrix([[1, 1, 1, 2], [0, 1, 1, 0], + ... [2, 2, 3, 4], [1, 1, 0, 1]]) + >>> msg = "ST" + >>> encipher_hill(msg, key) + 'HJEB' + >>> decipher_hill(_, key) + 'STQQ' + >>> encipher_hill(msg, key, pad="Z") + 'ISPK' + >>> decipher_hill(_, key) + 'STZZ' + + If the last two characters of the ciphertext were ignored in + either case, the wrong plaintext would be recovered: + + >>> decipher_hill("HD", key) + 'ORMV' + >>> decipher_hill("IS", key) + 'UIKY' + + See Also + ======== + + encipher_hill + + """ + assert key.is_square + msg, _, A = _prep(msg, '', symbols) + map = {c: i for i, c in enumerate(A)} + C = [map[c] for c in msg] + N = len(A) + k = key.cols + n = len(C) + m, r = divmod(n, k) + if r: + C = C + [0]*(k - r) + m += 1 + key_inv = key.inv_mod(N) + rv = ''.join([A[p % N] for j in range(m) for p in + list(key_inv*Matrix( + k, 1, [C[i] for i in range(k*j, k*(j + 1))]))]) + return rv + + +#################### Bifid cipher ######################## + + +def encipher_bifid(msg, key, symbols=None): + r""" + Performs the Bifid cipher encryption on plaintext ``msg``, and + returns the ciphertext. + + This is the version of the Bifid cipher that uses an `n \times n` + Polybius square. + + Parameters + ========== + + msg + Plaintext string. + + key + Short string for key. + + Duplicate characters are ignored and then it is padded with the + characters in ``symbols`` that were not in the short key. + + symbols + `n \times n` characters defining the alphabet. + + (default is string.printable) + + Returns + ======= + + ciphertext + Ciphertext using Bifid5 cipher without spaces. + + See Also + ======== + + decipher_bifid, encipher_bifid5, encipher_bifid6 + + References + ========== + + .. [1] https://en.wikipedia.org/wiki/Bifid_cipher + + """ + msg, key, A = _prep(msg, key, symbols, bifid10) + long_key = ''.join(uniq(key)) or A + + n = len(A)**.5 + if n != int(n): + raise ValueError( + 'Length of alphabet (%s) is not a square number.' % len(A)) + N = int(n) + if len(long_key) < N**2: + long_key = list(long_key) + [x for x in A if x not in long_key] + + # the fractionalization + row_col = {ch: divmod(i, N) for i, ch in enumerate(long_key)} + r, c = zip(*[row_col[x] for x in msg]) + rc = r + c + ch = {i: ch for ch, i in row_col.items()} + rv = ''.join(ch[i] for i in zip(rc[::2], rc[1::2])) + return rv + + +def decipher_bifid(msg, key, symbols=None): + r""" + Performs the Bifid cipher decryption on ciphertext ``msg``, and + returns the plaintext. + + This is the version of the Bifid cipher that uses the `n \times n` + Polybius square. + + Parameters + ========== + + msg + Ciphertext string. + + key + Short string for key. + + Duplicate characters are ignored and then it is padded with the + characters in symbols that were not in the short key. + + symbols + `n \times n` characters defining the alphabet. + + (default=string.printable, a `10 \times 10` matrix) + + Returns + ======= + + deciphered + Deciphered text. + + Examples + ======== + + >>> from sympy.crypto.crypto import ( + ... encipher_bifid, decipher_bifid, AZ) + + Do an encryption using the bifid5 alphabet: + + >>> alp = AZ().replace('J', '') + >>> ct = AZ("meet me on monday!") + >>> key = AZ("gold bug") + >>> encipher_bifid(ct, key, alp) + 'IEILHHFSTSFQYE' + + When entering the text or ciphertext, spaces are ignored so it + can be formatted as desired. Re-entering the ciphertext from the + preceding, putting 4 characters per line and padding with an extra + J, does not cause problems for the deciphering: + + >>> decipher_bifid(''' + ... IEILH + ... HFSTS + ... FQYEJ''', key, alp) + 'MEETMEONMONDAY' + + When no alphabet is given, all 100 printable characters will be + used: + + >>> key = '' + >>> encipher_bifid('hello world!', key) + 'bmtwmg-bIo*w' + >>> decipher_bifid(_, key) + 'hello world!' + + If the key is changed, a different encryption is obtained: + + >>> key = 'gold bug' + >>> encipher_bifid('hello world!', 'gold_bug') + 'hg2sfuei7t}w' + + And if the key used to decrypt the message is not exact, the + original text will not be perfectly obtained: + + >>> decipher_bifid(_, 'gold pug') + 'heldo~wor6d!' + + """ + msg, _, A = _prep(msg, '', symbols, bifid10) + long_key = ''.join(uniq(key)) or A + + n = len(A)**.5 + if n != int(n): + raise ValueError( + 'Length of alphabet (%s) is not a square number.' % len(A)) + N = int(n) + if len(long_key) < N**2: + long_key = list(long_key) + [x for x in A if x not in long_key] + + # the reverse fractionalization + row_col = { + ch: divmod(i, N) for i, ch in enumerate(long_key)} + rc = [i for c in msg for i in row_col[c]] + n = len(msg) + rc = zip(*(rc[:n], rc[n:])) + ch = {i: ch for ch, i in row_col.items()} + rv = ''.join(ch[i] for i in rc) + return rv + + +def bifid_square(key): + """Return characters of ``key`` arranged in a square. + + Examples + ======== + + >>> from sympy.crypto.crypto import ( + ... bifid_square, AZ, padded_key, bifid5) + >>> bifid_square(AZ().replace('J', '')) + Matrix([ + [A, B, C, D, E], + [F, G, H, I, K], + [L, M, N, O, P], + [Q, R, S, T, U], + [V, W, X, Y, Z]]) + + >>> bifid_square(padded_key(AZ('gold bug!'), bifid5)) + Matrix([ + [G, O, L, D, B], + [U, A, C, E, F], + [H, I, K, M, N], + [P, Q, R, S, T], + [V, W, X, Y, Z]]) + + See Also + ======== + + padded_key + + """ + A = ''.join(uniq(''.join(key))) + n = len(A)**.5 + if n != int(n): + raise ValueError( + 'Length of alphabet (%s) is not a square number.' % len(A)) + n = int(n) + f = lambda i, j: Symbol(A[n*i + j]) + rv = Matrix(n, n, f) + return rv + + +def encipher_bifid5(msg, key): + r""" + Performs the Bifid cipher encryption on plaintext ``msg``, and + returns the ciphertext. + + Explanation + =========== + + This is the version of the Bifid cipher that uses the `5 \times 5` + Polybius square. The letter "J" is ignored so it must be replaced + with something else (traditionally an "I") before encryption. + + ALGORITHM: (5x5 case) + + STEPS: + 0. Create the `5 \times 5` Polybius square ``S`` associated + to ``key`` as follows: + + a) moving from left-to-right, top-to-bottom, + place the letters of the key into a `5 \times 5` + matrix, + b) if the key has less than 25 letters, add the + letters of the alphabet not in the key until the + `5 \times 5` square is filled. + + 1. Create a list ``P`` of pairs of numbers which are the + coordinates in the Polybius square of the letters in + ``msg``. + 2. Let ``L1`` be the list of all first coordinates of ``P`` + (length of ``L1 = n``), let ``L2`` be the list of all + second coordinates of ``P`` (so the length of ``L2`` + is also ``n``). + 3. Let ``L`` be the concatenation of ``L1`` and ``L2`` + (length ``L = 2*n``), except that consecutive numbers + are paired ``(L[2*i], L[2*i + 1])``. You can regard + ``L`` as a list of pairs of length ``n``. + 4. Let ``C`` be the list of all letters which are of the + form ``S[i, j]``, for all ``(i, j)`` in ``L``. As a + string, this is the ciphertext of ``msg``. + + Parameters + ========== + + msg : str + Plaintext string. + + Converted to upper case and filtered of anything but all letters + except J. + + key + Short string for key; non-alphabetic letters, J and duplicated + characters are ignored and then, if the length is less than 25 + characters, it is padded with other letters of the alphabet + (in alphabetical order). + + Returns + ======= + + ct + Ciphertext (all caps, no spaces). + + Examples + ======== + + >>> from sympy.crypto.crypto import ( + ... encipher_bifid5, decipher_bifid5) + + "J" will be omitted unless it is replaced with something else: + + >>> round_trip = lambda m, k: \ + ... decipher_bifid5(encipher_bifid5(m, k), k) + >>> key = 'a' + >>> msg = "JOSIE" + >>> round_trip(msg, key) + 'OSIE' + >>> round_trip(msg.replace("J", "I"), key) + 'IOSIE' + >>> j = "QIQ" + >>> round_trip(msg.replace("J", j), key).replace(j, "J") + 'JOSIE' + + + Notes + ===== + + The Bifid cipher was invented around 1901 by Felix Delastelle. + It is a *fractional substitution* cipher, where letters are + replaced by pairs of symbols from a smaller alphabet. The + cipher uses a `5 \times 5` square filled with some ordering of the + alphabet, except that "J" is replaced with "I" (this is a so-called + Polybius square; there is a `6 \times 6` analog if you add back in + "J" and also append onto the usual 26 letter alphabet, the digits + 0, 1, ..., 9). + According to Helen Gaines' book *Cryptanalysis*, this type of cipher + was used in the field by the German Army during World War I. + + See Also + ======== + + decipher_bifid5, encipher_bifid + + """ + msg, key, _ = _prep(msg.upper(), key.upper(), None, bifid5) + key = padded_key(key, bifid5) + return encipher_bifid(msg, '', key) + + +def decipher_bifid5(msg, key): + r""" + Return the Bifid cipher decryption of ``msg``. + + Explanation + =========== + + This is the version of the Bifid cipher that uses the `5 \times 5` + Polybius square; the letter "J" is ignored unless a ``key`` of + length 25 is used. + + Parameters + ========== + + msg + Ciphertext string. + + key + Short string for key; duplicated characters are ignored and if + the length is less then 25 characters, it will be padded with + other letters from the alphabet omitting "J". + Non-alphabetic characters are ignored. + + Returns + ======= + + plaintext + Plaintext from Bifid5 cipher (all caps, no spaces). + + Examples + ======== + + >>> from sympy.crypto.crypto import encipher_bifid5, decipher_bifid5 + >>> key = "gold bug" + >>> encipher_bifid5('meet me on friday', key) + 'IEILEHFSTSFXEE' + >>> encipher_bifid5('meet me on monday', key) + 'IEILHHFSTSFQYE' + >>> decipher_bifid5(_, key) + 'MEETMEONMONDAY' + + """ + msg, key, _ = _prep(msg.upper(), key.upper(), None, bifid5) + key = padded_key(key, bifid5) + return decipher_bifid(msg, '', key) + + +def bifid5_square(key=None): + r""" + 5x5 Polybius square. + + Produce the Polybius square for the `5 \times 5` Bifid cipher. + + Examples + ======== + + >>> from sympy.crypto.crypto import bifid5_square + >>> bifid5_square("gold bug") + Matrix([ + [G, O, L, D, B], + [U, A, C, E, F], + [H, I, K, M, N], + [P, Q, R, S, T], + [V, W, X, Y, Z]]) + + """ + if not key: + key = bifid5 + else: + _, key, _ = _prep('', key.upper(), None, bifid5) + key = padded_key(key, bifid5) + return bifid_square(key) + + +def encipher_bifid6(msg, key): + r""" + Performs the Bifid cipher encryption on plaintext ``msg``, and + returns the ciphertext. + + This is the version of the Bifid cipher that uses the `6 \times 6` + Polybius square. + + Parameters + ========== + + msg + Plaintext string (digits okay). + + key + Short string for key (digits okay). + + If ``key`` is less than 36 characters long, the square will be + filled with letters A through Z and digits 0 through 9. + + Returns + ======= + + ciphertext + Ciphertext from Bifid cipher (all caps, no spaces). + + See Also + ======== + + decipher_bifid6, encipher_bifid + + """ + msg, key, _ = _prep(msg.upper(), key.upper(), None, bifid6) + key = padded_key(key, bifid6) + return encipher_bifid(msg, '', key) + + +def decipher_bifid6(msg, key): + r""" + Performs the Bifid cipher decryption on ciphertext ``msg``, and + returns the plaintext. + + This is the version of the Bifid cipher that uses the `6 \times 6` + Polybius square. + + Parameters + ========== + + msg + Ciphertext string (digits okay); converted to upper case + + key + Short string for key (digits okay). + + If ``key`` is less than 36 characters long, the square will be + filled with letters A through Z and digits 0 through 9. + All letters are converted to uppercase. + + Returns + ======= + + plaintext + Plaintext from Bifid cipher (all caps, no spaces). + + Examples + ======== + + >>> from sympy.crypto.crypto import encipher_bifid6, decipher_bifid6 + >>> key = "gold bug" + >>> encipher_bifid6('meet me on monday at 8am', key) + 'KFKLJJHF5MMMKTFRGPL' + >>> decipher_bifid6(_, key) + 'MEETMEONMONDAYAT8AM' + + """ + msg, key, _ = _prep(msg.upper(), key.upper(), None, bifid6) + key = padded_key(key, bifid6) + return decipher_bifid(msg, '', key) + + +def bifid6_square(key=None): + r""" + 6x6 Polybius square. + + Produces the Polybius square for the `6 \times 6` Bifid cipher. + Assumes alphabet of symbols is "A", ..., "Z", "0", ..., "9". + + Examples + ======== + + >>> from sympy.crypto.crypto import bifid6_square + >>> key = "gold bug" + >>> bifid6_square(key) + Matrix([ + [G, O, L, D, B, U], + [A, C, E, F, H, I], + [J, K, M, N, P, Q], + [R, S, T, V, W, X], + [Y, Z, 0, 1, 2, 3], + [4, 5, 6, 7, 8, 9]]) + + """ + if not key: + key = bifid6 + else: + _, key, _ = _prep('', key.upper(), None, bifid6) + key = padded_key(key, bifid6) + return bifid_square(key) + + +#################### RSA ############################# + +def _decipher_rsa_crt(i, d, factors): + """Decipher RSA using chinese remainder theorem from the information + of the relatively-prime factors of the modulus. + + Parameters + ========== + + i : integer + Ciphertext + + d : integer + The exponent component. + + factors : list of relatively-prime integers + The integers given must be coprime and the product must equal + the modulus component of the original RSA key. + + Examples + ======== + + How to decrypt RSA with CRT: + + >>> from sympy.crypto.crypto import rsa_public_key, rsa_private_key + >>> primes = [61, 53] + >>> e = 17 + >>> args = primes + [e] + >>> puk = rsa_public_key(*args) + >>> prk = rsa_private_key(*args) + + >>> from sympy.crypto.crypto import encipher_rsa, _decipher_rsa_crt + >>> msg = 65 + >>> crt_primes = primes + >>> encrypted = encipher_rsa(msg, puk) + >>> decrypted = _decipher_rsa_crt(encrypted, prk[1], primes) + >>> decrypted + 65 + """ + moduluses = [pow(i, d, p) for p in factors] + + result = crt(factors, moduluses) + if not result: + raise ValueError("CRT failed") + return result[0] + + +def _rsa_key(*args, public=True, private=True, totient='Euler', index=None, multipower=None): + r"""A private subroutine to generate RSA key + + Parameters + ========== + + public, private : bool, optional + Flag to generate either a public key, a private key. + + totient : 'Euler' or 'Carmichael' + Different notation used for totient. + + multipower : bool, optional + Flag to bypass warning for multipower RSA. + """ + + if len(args) < 2: + return False + + if totient not in ('Euler', 'Carmichael'): + raise ValueError( + "The argument totient={} should either be " \ + "'Euler', 'Carmichalel'." \ + .format(totient)) + + if totient == 'Euler': + _totient = _euler + else: + _totient = _carmichael + + if index is not None: + index = as_int(index) + if totient != 'Carmichael': + raise ValueError( + "Setting the 'index' keyword argument requires totient" + "notation to be specified as 'Carmichael'.") + + primes, e = args[:-1], args[-1] + + if not all(isprime(p) for p in primes): + new_primes = [] + for i in primes: + new_primes.extend(factorint(i, multiple=True)) + primes = new_primes + + n = reduce(lambda i, j: i*j, primes) + + tally = multiset(primes) + if all(v == 1 for v in tally.values()): + multiple = list(tally.keys()) + phi = _totient._from_distinct_primes(*multiple) + + else: + if not multipower: + NonInvertibleCipherWarning( + 'Non-distinctive primes found in the factors {}. ' + 'The cipher may not be decryptable for some numbers ' + 'in the complete residue system Z[{}], but the cipher ' + 'can still be valid if you restrict the domain to be ' + 'the reduced residue system Z*[{}]. You can pass ' + 'the flag multipower=True if you want to suppress this ' + 'warning.' + .format(primes, n, n) + # stacklevel=4 because most users will call a function that + # calls this function + ).warn(stacklevel=4) + phi = _totient._from_factors(tally) + + if igcd(e, phi) == 1: + if public and not private: + if isinstance(index, int): + e = e % phi + e += index * phi + return n, e + + if private and not public: + d = mod_inverse(e, phi) + if isinstance(index, int): + d += index * phi + return n, d + + return False + + +def rsa_public_key(*args, **kwargs): + r"""Return the RSA *public key* pair, `(n, e)` + + Parameters + ========== + + args : naturals + If specified as `p, q, e` where `p` and `q` are distinct primes + and `e` is a desired public exponent of the RSA, `n = p q` and + `e` will be verified against the totient + `\phi(n)` (Euler totient) or `\lambda(n)` (Carmichael totient) + to be `\gcd(e, \phi(n)) = 1` or `\gcd(e, \lambda(n)) = 1`. + + If specified as `p_1, p_2, \dots, p_n, e` where + `p_1, p_2, \dots, p_n` are specified as primes, + and `e` is specified as a desired public exponent of the RSA, + it will be able to form a multi-prime RSA, which is a more + generalized form of the popular 2-prime RSA. + + It can also be possible to form a single-prime RSA by specifying + the argument as `p, e`, which can be considered a trivial case + of a multiprime RSA. + + Furthermore, it can be possible to form a multi-power RSA by + specifying two or more pairs of the primes to be same. + However, unlike the two-distinct prime RSA or multi-prime + RSA, not every numbers in the complete residue system + (`\mathbb{Z}_n`) will be decryptable since the mapping + `\mathbb{Z}_{n} \rightarrow \mathbb{Z}_{n}` + will not be bijective. + (Only except for the trivial case when + `e = 1` + or more generally, + + .. math:: + e \in \left \{ 1 + k \lambda(n) + \mid k \in \mathbb{Z} \land k \geq 0 \right \} + + when RSA reduces to the identity.) + However, the RSA can still be decryptable for the numbers in the + reduced residue system (`\mathbb{Z}_n^{\times}`), since the + mapping + `\mathbb{Z}_{n}^{\times} \rightarrow \mathbb{Z}_{n}^{\times}` + can still be bijective. + + If you pass a non-prime integer to the arguments + `p_1, p_2, \dots, p_n`, the particular number will be + prime-factored and it will become either a multi-prime RSA or a + multi-power RSA in its canonical form, depending on whether the + product equals its radical or not. + `p_1 p_2 \dots p_n = \text{rad}(p_1 p_2 \dots p_n)` + + totient : bool, optional + If ``'Euler'``, it uses Euler's totient `\phi(n)` which is + :meth:`sympy.ntheory.factor_.totient` in SymPy. + + If ``'Carmichael'``, it uses Carmichael's totient `\lambda(n)` + which is :meth:`sympy.ntheory.factor_.reduced_totient` in SymPy. + + Unlike private key generation, this is a trivial keyword for + public key generation because + `\gcd(e, \phi(n)) = 1 \iff \gcd(e, \lambda(n)) = 1`. + + index : nonnegative integer, optional + Returns an arbitrary solution of a RSA public key at the index + specified at `0, 1, 2, \dots`. This parameter needs to be + specified along with ``totient='Carmichael'``. + + Similarly to the non-uniquenss of a RSA private key as described + in the ``index`` parameter documentation in + :meth:`rsa_private_key`, RSA public key is also not unique and + there is an infinite number of RSA public exponents which + can behave in the same manner. + + From any given RSA public exponent `e`, there are can be an + another RSA public exponent `e + k \lambda(n)` where `k` is an + integer, `\lambda` is a Carmichael's totient function. + + However, considering only the positive cases, there can be + a principal solution of a RSA public exponent `e_0` in + `0 < e_0 < \lambda(n)`, and all the other solutions + can be canonicalzed in a form of `e_0 + k \lambda(n)`. + + ``index`` specifies the `k` notation to yield any possible value + an RSA public key can have. + + An example of computing any arbitrary RSA public key: + + >>> from sympy.crypto.crypto import rsa_public_key + >>> rsa_public_key(61, 53, 17, totient='Carmichael', index=0) + (3233, 17) + >>> rsa_public_key(61, 53, 17, totient='Carmichael', index=1) + (3233, 797) + >>> rsa_public_key(61, 53, 17, totient='Carmichael', index=2) + (3233, 1577) + + multipower : bool, optional + Any pair of non-distinct primes found in the RSA specification + will restrict the domain of the cryptosystem, as noted in the + explanation of the parameter ``args``. + + SymPy RSA key generator may give a warning before dispatching it + as a multi-power RSA, however, you can disable the warning if + you pass ``True`` to this keyword. + + Returns + ======= + + (n, e) : int, int + `n` is a product of any arbitrary number of primes given as + the argument. + + `e` is relatively prime (coprime) to the Euler totient + `\phi(n)`. + + False + Returned if less than two arguments are given, or `e` is + not relatively prime to the modulus. + + Examples + ======== + + >>> from sympy.crypto.crypto import rsa_public_key + + A public key of a two-prime RSA: + + >>> p, q, e = 3, 5, 7 + >>> rsa_public_key(p, q, e) + (15, 7) + >>> rsa_public_key(p, q, 30) + False + + A public key of a multiprime RSA: + + >>> primes = [2, 3, 5, 7, 11, 13] + >>> e = 7 + >>> args = primes + [e] + >>> rsa_public_key(*args) + (30030, 7) + + Notes + ===== + + Although the RSA can be generalized over any modulus `n`, using + two large primes had became the most popular specification because a + product of two large primes is usually the hardest to factor + relatively to the digits of `n` can have. + + However, it may need further understanding of the time complexities + of each prime-factoring algorithms to verify the claim. + + See Also + ======== + + rsa_private_key + encipher_rsa + decipher_rsa + + References + ========== + + .. [1] https://en.wikipedia.org/wiki/RSA_%28cryptosystem%29 + + .. [2] https://cacr.uwaterloo.ca/techreports/2006/cacr2006-16.pdf + + .. [3] https://link.springer.com/content/pdf/10.1007/BFb0055738.pdf + + .. [4] https://www.itiis.org/digital-library/manuscript/1381 + """ + return _rsa_key(*args, public=True, private=False, **kwargs) + + +def rsa_private_key(*args, **kwargs): + r"""Return the RSA *private key* pair, `(n, d)` + + Parameters + ========== + + args : naturals + The keyword is identical to the ``args`` in + :meth:`rsa_public_key`. + + totient : bool, optional + If ``'Euler'``, it uses Euler's totient convention `\phi(n)` + which is :meth:`sympy.ntheory.factor_.totient` in SymPy. + + If ``'Carmichael'``, it uses Carmichael's totient convention + `\lambda(n)` which is + :meth:`sympy.ntheory.factor_.reduced_totient` in SymPy. + + There can be some output differences for private key generation + as examples below. + + Example using Euler's totient: + + >>> from sympy.crypto.crypto import rsa_private_key + >>> rsa_private_key(61, 53, 17, totient='Euler') + (3233, 2753) + + Example using Carmichael's totient: + + >>> from sympy.crypto.crypto import rsa_private_key + >>> rsa_private_key(61, 53, 17, totient='Carmichael') + (3233, 413) + + index : nonnegative integer, optional + Returns an arbitrary solution of a RSA private key at the index + specified at `0, 1, 2, \dots`. This parameter needs to be + specified along with ``totient='Carmichael'``. + + RSA private exponent is a non-unique solution of + `e d \mod \lambda(n) = 1` and it is possible in any form of + `d + k \lambda(n)`, where `d` is an another + already-computed private exponent, and `\lambda` is a + Carmichael's totient function, and `k` is any integer. + + However, considering only the positive cases, there can be + a principal solution of a RSA private exponent `d_0` in + `0 < d_0 < \lambda(n)`, and all the other solutions + can be canonicalzed in a form of `d_0 + k \lambda(n)`. + + ``index`` specifies the `k` notation to yield any possible value + an RSA private key can have. + + An example of computing any arbitrary RSA private key: + + >>> from sympy.crypto.crypto import rsa_private_key + >>> rsa_private_key(61, 53, 17, totient='Carmichael', index=0) + (3233, 413) + >>> rsa_private_key(61, 53, 17, totient='Carmichael', index=1) + (3233, 1193) + >>> rsa_private_key(61, 53, 17, totient='Carmichael', index=2) + (3233, 1973) + + multipower : bool, optional + The keyword is identical to the ``multipower`` in + :meth:`rsa_public_key`. + + Returns + ======= + + (n, d) : int, int + `n` is a product of any arbitrary number of primes given as + the argument. + + `d` is the inverse of `e` (mod `\phi(n)`) where `e` is the + exponent given, and `\phi` is a Euler totient. + + False + Returned if less than two arguments are given, or `e` is + not relatively prime to the totient of the modulus. + + Examples + ======== + + >>> from sympy.crypto.crypto import rsa_private_key + + A private key of a two-prime RSA: + + >>> p, q, e = 3, 5, 7 + >>> rsa_private_key(p, q, e) + (15, 7) + >>> rsa_private_key(p, q, 30) + False + + A private key of a multiprime RSA: + + >>> primes = [2, 3, 5, 7, 11, 13] + >>> e = 7 + >>> args = primes + [e] + >>> rsa_private_key(*args) + (30030, 823) + + See Also + ======== + + rsa_public_key + encipher_rsa + decipher_rsa + + References + ========== + + .. [1] https://en.wikipedia.org/wiki/RSA_%28cryptosystem%29 + + .. [2] https://cacr.uwaterloo.ca/techreports/2006/cacr2006-16.pdf + + .. [3] https://link.springer.com/content/pdf/10.1007/BFb0055738.pdf + + .. [4] https://www.itiis.org/digital-library/manuscript/1381 + """ + return _rsa_key(*args, public=False, private=True, **kwargs) + + +def _encipher_decipher_rsa(i, key, factors=None): + n, d = key + if not factors: + return pow(i, d, n) + + def _is_coprime_set(l): + is_coprime_set = True + for i in range(len(l)): + for j in range(i+1, len(l)): + if igcd(l[i], l[j]) != 1: + is_coprime_set = False + break + return is_coprime_set + + prod = reduce(lambda i, j: i*j, factors) + if prod == n and _is_coprime_set(factors): + return _decipher_rsa_crt(i, d, factors) + return _encipher_decipher_rsa(i, key, factors=None) + + +def encipher_rsa(i, key, factors=None): + r"""Encrypt the plaintext with RSA. + + Parameters + ========== + + i : integer + The plaintext to be encrypted for. + + key : (n, e) where n, e are integers + `n` is the modulus of the key and `e` is the exponent of the + key. The encryption is computed by `i^e \bmod n`. + + The key can either be a public key or a private key, however, + the message encrypted by a public key can only be decrypted by + a private key, and vice versa, as RSA is an asymmetric + cryptography system. + + factors : list of coprime integers + This is identical to the keyword ``factors`` in + :meth:`decipher_rsa`. + + Notes + ===== + + Some specifications may make the RSA not cryptographically + meaningful. + + For example, `0`, `1` will remain always same after taking any + number of exponentiation, thus, should be avoided. + + Furthermore, if `i^e < n`, `i` may easily be figured out by taking + `e` th root. + + And also, specifying the exponent as `1` or in more generalized form + as `1 + k \lambda(n)` where `k` is an nonnegative integer, + `\lambda` is a carmichael totient, the RSA becomes an identity + mapping. + + Examples + ======== + + >>> from sympy.crypto.crypto import encipher_rsa + >>> from sympy.crypto.crypto import rsa_public_key, rsa_private_key + + Public Key Encryption: + + >>> p, q, e = 3, 5, 7 + >>> puk = rsa_public_key(p, q, e) + >>> msg = 12 + >>> encipher_rsa(msg, puk) + 3 + + Private Key Encryption: + + >>> p, q, e = 3, 5, 7 + >>> prk = rsa_private_key(p, q, e) + >>> msg = 12 + >>> encipher_rsa(msg, prk) + 3 + + Encryption using chinese remainder theorem: + + >>> encipher_rsa(msg, prk, factors=[p, q]) + 3 + """ + return _encipher_decipher_rsa(i, key, factors=factors) + + +def decipher_rsa(i, key, factors=None): + r"""Decrypt the ciphertext with RSA. + + Parameters + ========== + + i : integer + The ciphertext to be decrypted for. + + key : (n, d) where n, d are integers + `n` is the modulus of the key and `d` is the exponent of the + key. The decryption is computed by `i^d \bmod n`. + + The key can either be a public key or a private key, however, + the message encrypted by a public key can only be decrypted by + a private key, and vice versa, as RSA is an asymmetric + cryptography system. + + factors : list of coprime integers + As the modulus `n` created from RSA key generation is composed + of arbitrary prime factors + `n = {p_1}^{k_1}{p_2}^{k_2}\dots{p_n}^{k_n}` where + `p_1, p_2, \dots, p_n` are distinct primes and + `k_1, k_2, \dots, k_n` are positive integers, chinese remainder + theorem can be used to compute `i^d \bmod n` from the + fragmented modulo operations like + + .. math:: + i^d \bmod {p_1}^{k_1}, i^d \bmod {p_2}^{k_2}, \dots, + i^d \bmod {p_n}^{k_n} + + or like + + .. math:: + i^d \bmod {p_1}^{k_1}{p_2}^{k_2}, + i^d \bmod {p_3}^{k_3}, \dots , + i^d \bmod {p_n}^{k_n} + + as long as every moduli does not share any common divisor each + other. + + The raw primes used in generating the RSA key pair can be a good + option. + + Note that the speed advantage of using this is only viable for + very large cases (Like 2048-bit RSA keys) since the + overhead of using pure Python implementation of + :meth:`sympy.ntheory.modular.crt` may overcompensate the + theoretical speed advantage. + + Notes + ===== + + See the ``Notes`` section in the documentation of + :meth:`encipher_rsa` + + Examples + ======== + + >>> from sympy.crypto.crypto import decipher_rsa, encipher_rsa + >>> from sympy.crypto.crypto import rsa_public_key, rsa_private_key + + Public Key Encryption and Decryption: + + >>> p, q, e = 3, 5, 7 + >>> prk = rsa_private_key(p, q, e) + >>> puk = rsa_public_key(p, q, e) + >>> msg = 12 + >>> new_msg = encipher_rsa(msg, prk) + >>> new_msg + 3 + >>> decipher_rsa(new_msg, puk) + 12 + + Private Key Encryption and Decryption: + + >>> p, q, e = 3, 5, 7 + >>> prk = rsa_private_key(p, q, e) + >>> puk = rsa_public_key(p, q, e) + >>> msg = 12 + >>> new_msg = encipher_rsa(msg, puk) + >>> new_msg + 3 + >>> decipher_rsa(new_msg, prk) + 12 + + Decryption using chinese remainder theorem: + + >>> decipher_rsa(new_msg, prk, factors=[p, q]) + 12 + + See Also + ======== + + encipher_rsa + """ + return _encipher_decipher_rsa(i, key, factors=factors) + + +#################### kid krypto (kid RSA) ############################# + + +def kid_rsa_public_key(a, b, A, B): + r""" + Kid RSA is a version of RSA useful to teach grade school children + since it does not involve exponentiation. + + Explanation + =========== + + Alice wants to talk to Bob. Bob generates keys as follows. + Key generation: + + * Select positive integers `a, b, A, B` at random. + * Compute `M = a b - 1`, `e = A M + a`, `d = B M + b`, + `n = (e d - 1)//M`. + * The *public key* is `(n, e)`. Bob sends these to Alice. + * The *private key* is `(n, d)`, which Bob keeps secret. + + Encryption: If `p` is the plaintext message then the + ciphertext is `c = p e \pmod n`. + + Decryption: If `c` is the ciphertext message then the + plaintext is `p = c d \pmod n`. + + Examples + ======== + + >>> from sympy.crypto.crypto import kid_rsa_public_key + >>> a, b, A, B = 3, 4, 5, 6 + >>> kid_rsa_public_key(a, b, A, B) + (369, 58) + + """ + M = a*b - 1 + e = A*M + a + d = B*M + b + n = (e*d - 1)//M + return n, e + + +def kid_rsa_private_key(a, b, A, B): + """ + Compute `M = a b - 1`, `e = A M + a`, `d = B M + b`, + `n = (e d - 1) / M`. The *private key* is `d`, which Bob + keeps secret. + + Examples + ======== + + >>> from sympy.crypto.crypto import kid_rsa_private_key + >>> a, b, A, B = 3, 4, 5, 6 + >>> kid_rsa_private_key(a, b, A, B) + (369, 70) + + """ + M = a*b - 1 + e = A*M + a + d = B*M + b + n = (e*d - 1)//M + return n, d + + +def encipher_kid_rsa(msg, key): + """ + Here ``msg`` is the plaintext and ``key`` is the public key. + + Examples + ======== + + >>> from sympy.crypto.crypto import ( + ... encipher_kid_rsa, kid_rsa_public_key) + >>> msg = 200 + >>> a, b, A, B = 3, 4, 5, 6 + >>> key = kid_rsa_public_key(a, b, A, B) + >>> encipher_kid_rsa(msg, key) + 161 + + """ + n, e = key + return (msg*e) % n + + +def decipher_kid_rsa(msg, key): + """ + Here ``msg`` is the plaintext and ``key`` is the private key. + + Examples + ======== + + >>> from sympy.crypto.crypto import ( + ... kid_rsa_public_key, kid_rsa_private_key, + ... decipher_kid_rsa, encipher_kid_rsa) + >>> a, b, A, B = 3, 4, 5, 6 + >>> d = kid_rsa_private_key(a, b, A, B) + >>> msg = 200 + >>> pub = kid_rsa_public_key(a, b, A, B) + >>> pri = kid_rsa_private_key(a, b, A, B) + >>> ct = encipher_kid_rsa(msg, pub) + >>> decipher_kid_rsa(ct, pri) + 200 + + """ + n, d = key + return (msg*d) % n + + +#################### Morse Code ###################################### + +morse_char = { + ".-": "A", "-...": "B", + "-.-.": "C", "-..": "D", + ".": "E", "..-.": "F", + "--.": "G", "....": "H", + "..": "I", ".---": "J", + "-.-": "K", ".-..": "L", + "--": "M", "-.": "N", + "---": "O", ".--.": "P", + "--.-": "Q", ".-.": "R", + "...": "S", "-": "T", + "..-": "U", "...-": "V", + ".--": "W", "-..-": "X", + "-.--": "Y", "--..": "Z", + "-----": "0", ".----": "1", + "..---": "2", "...--": "3", + "....-": "4", ".....": "5", + "-....": "6", "--...": "7", + "---..": "8", "----.": "9", + ".-.-.-": ".", "--..--": ",", + "---...": ":", "-.-.-.": ";", + "..--..": "?", "-....-": "-", + "..--.-": "_", "-.--.": "(", + "-.--.-": ")", ".----.": "'", + "-...-": "=", ".-.-.": "+", + "-..-.": "/", ".--.-.": "@", + "...-..-": "$", "-.-.--": "!"} +char_morse = {v: k for k, v in morse_char.items()} + + +def encode_morse(msg, sep='|', mapping=None): + """ + Encodes a plaintext into popular Morse Code with letters + separated by ``sep`` and words by a double ``sep``. + + Examples + ======== + + >>> from sympy.crypto.crypto import encode_morse + >>> msg = 'ATTACK RIGHT FLANK' + >>> encode_morse(msg) + '.-|-|-|.-|-.-.|-.-||.-.|..|--.|....|-||..-.|.-..|.-|-.|-.-' + + References + ========== + + .. [1] https://en.wikipedia.org/wiki/Morse_code + + """ + + mapping = mapping or char_morse + assert sep not in mapping + word_sep = 2*sep + mapping[" "] = word_sep + suffix = msg and msg[-1] in whitespace + + # normalize whitespace + msg = (' ' if word_sep else '').join(msg.split()) + # omit unmapped chars + chars = set(''.join(msg.split())) + ok = set(mapping.keys()) + msg = translate(msg, None, ''.join(chars - ok)) + + morsestring = [] + words = msg.split() + for word in words: + morseword = [] + for letter in word: + morseletter = mapping[letter] + morseword.append(morseletter) + + word = sep.join(morseword) + morsestring.append(word) + + return word_sep.join(morsestring) + (word_sep if suffix else '') + + +def decode_morse(msg, sep='|', mapping=None): + """ + Decodes a Morse Code with letters separated by ``sep`` + (default is '|') and words by `word_sep` (default is '||) + into plaintext. + + Examples + ======== + + >>> from sympy.crypto.crypto import decode_morse + >>> mc = '--|---|...-|.||.|.-|...|-' + >>> decode_morse(mc) + 'MOVE EAST' + + References + ========== + + .. [1] https://en.wikipedia.org/wiki/Morse_code + + """ + + mapping = mapping or morse_char + word_sep = 2*sep + characterstring = [] + words = msg.strip(word_sep).split(word_sep) + for word in words: + letters = word.split(sep) + chars = [mapping[c] for c in letters] + word = ''.join(chars) + characterstring.append(word) + rv = " ".join(characterstring) + return rv + + +#################### LFSRs ########################################## + + +def lfsr_sequence(key, fill, n): + r""" + This function creates an LFSR sequence. + + Parameters + ========== + + key : list + A list of finite field elements, `[c_0, c_1, \ldots, c_k].` + + fill : list + The list of the initial terms of the LFSR sequence, + `[x_0, x_1, \ldots, x_k].` + + n + Number of terms of the sequence that the function returns. + + Returns + ======= + + L + The LFSR sequence defined by + `x_{n+1} = c_k x_n + \ldots + c_0 x_{n-k}`, for + `n \leq k`. + + Notes + ===== + + S. Golomb [G]_ gives a list of three statistical properties a + sequence of numbers `a = \{a_n\}_{n=1}^\infty`, + `a_n \in \{0,1\}`, should display to be considered + "random". Define the autocorrelation of `a` to be + + .. math:: + + C(k) = C(k,a) = \lim_{N\rightarrow \infty} {1\over N}\sum_{n=1}^N (-1)^{a_n + a_{n+k}}. + + In the case where `a` is periodic with period + `P` then this reduces to + + .. math:: + + C(k) = {1\over P}\sum_{n=1}^P (-1)^{a_n + a_{n+k}}. + + Assume `a` is periodic with period `P`. + + - balance: + + .. math:: + + \left|\sum_{n=1}^P(-1)^{a_n}\right| \leq 1. + + - low autocorrelation: + + .. math:: + + C(k) = \left\{ \begin{array}{cc} 1,& k = 0,\\ \epsilon, & k \ne 0. \end{array} \right. + + (For sequences satisfying these first two properties, it is known + that `\epsilon = -1/P` must hold.) + + - proportional runs property: In each period, half the runs have + length `1`, one-fourth have length `2`, etc. + Moreover, there are as many runs of `1`'s as there are of + `0`'s. + + Examples + ======== + + >>> from sympy.crypto.crypto import lfsr_sequence + >>> from sympy.polys.domains import FF + >>> F = FF(2) + >>> fill = [F(1), F(1), F(0), F(1)] + >>> key = [F(1), F(0), F(0), F(1)] + >>> lfsr_sequence(key, fill, 10) + [1 mod 2, 1 mod 2, 0 mod 2, 1 mod 2, 0 mod 2, + 1 mod 2, 1 mod 2, 0 mod 2, 0 mod 2, 1 mod 2] + + References + ========== + + .. [G] Solomon Golomb, Shift register sequences, Aegean Park Press, + Laguna Hills, Ca, 1967 + + """ + if not isinstance(key, list): + raise TypeError("key must be a list") + if not isinstance(fill, list): + raise TypeError("fill must be a list") + p = key[0].mod + F = FF(p) + s = fill + k = len(fill) + L = [] + for i in range(n): + s0 = s[:] + L.append(s[0]) + s = s[1:k] + x = sum([int(key[i]*s0[i]) for i in range(k)]) + s.append(F(x)) + return L # use [x.to_int() for x in L] for int version + + +def lfsr_autocorrelation(L, P, k): + """ + This function computes the LFSR autocorrelation function. + + Parameters + ========== + + L + A periodic sequence of elements of `GF(2)`. + L must have length larger than P. + + P + The period of L. + + k : int + An integer `k` (`0 < k < P`). + + Returns + ======= + + autocorrelation + The k-th value of the autocorrelation of the LFSR L. + + Examples + ======== + + >>> from sympy.crypto.crypto import ( + ... lfsr_sequence, lfsr_autocorrelation) + >>> from sympy.polys.domains import FF + >>> F = FF(2) + >>> fill = [F(1), F(1), F(0), F(1)] + >>> key = [F(1), F(0), F(0), F(1)] + >>> s = lfsr_sequence(key, fill, 20) + >>> lfsr_autocorrelation(s, 15, 7) + -1/15 + >>> lfsr_autocorrelation(s, 15, 0) + 1 + + """ + if not isinstance(L, list): + raise TypeError("L (=%s) must be a list" % L) + P = int(P) + k = int(k) + L0 = L[:P] # slices makes a copy + L1 = L0 + L0[:k] + L2 = [(-1)**(L1[i].to_int() + L1[i + k].to_int()) for i in range(P)] + tot = sum(L2) + return Rational(tot, P) + + +def lfsr_connection_polynomial(s): + """ + This function computes the LFSR connection polynomial. + + Parameters + ========== + + s + A sequence of elements of even length, with entries in a finite + field. + + Returns + ======= + + C(x) + The connection polynomial of a minimal LFSR yielding s. + + This implements the algorithm in section 3 of J. L. Massey's + article [M]_. + + Examples + ======== + + >>> from sympy.crypto.crypto import ( + ... lfsr_sequence, lfsr_connection_polynomial) + >>> from sympy.polys.domains import FF + >>> F = FF(2) + >>> fill = [F(1), F(1), F(0), F(1)] + >>> key = [F(1), F(0), F(0), F(1)] + >>> s = lfsr_sequence(key, fill, 20) + >>> lfsr_connection_polynomial(s) + x**4 + x + 1 + >>> fill = [F(1), F(0), F(0), F(1)] + >>> key = [F(1), F(1), F(0), F(1)] + >>> s = lfsr_sequence(key, fill, 20) + >>> lfsr_connection_polynomial(s) + x**3 + 1 + >>> fill = [F(1), F(0), F(1)] + >>> key = [F(1), F(1), F(0)] + >>> s = lfsr_sequence(key, fill, 20) + >>> lfsr_connection_polynomial(s) + x**3 + x**2 + 1 + >>> fill = [F(1), F(0), F(1)] + >>> key = [F(1), F(0), F(1)] + >>> s = lfsr_sequence(key, fill, 20) + >>> lfsr_connection_polynomial(s) + x**3 + x + 1 + + References + ========== + + .. [M] James L. Massey, "Shift-Register Synthesis and BCH Decoding." + IEEE Trans. on Information Theory, vol. 15(1), pp. 122-127, + Jan 1969. + + """ + # Initialization: + p = s[0].mod + x = Symbol("x") + C = 1*x**0 + B = 1*x**0 + m = 1 + b = 1*x**0 + L = 0 + N = 0 + while N < len(s): + if L > 0: + dC = Poly(C).degree() + r = min(L + 1, dC + 1) + coeffsC = [C.subs(x, 0)] + [C.coeff(x**i) + for i in range(1, dC + 1)] + d = (s[N].to_int() + sum([coeffsC[i]*s[N - i].to_int() + for i in range(1, r)])) % p + if L == 0: + d = s[N].to_int()*x**0 + if d == 0: + m += 1 + N += 1 + if d > 0: + if 2*L > N: + C = (C - d*((b**(p - 2)) % p)*x**m*B).expand() + m += 1 + N += 1 + else: + T = C + C = (C - d*((b**(p - 2)) % p)*x**m*B).expand() + L = N + 1 - L + m = 1 + b = d + B = T + N += 1 + dC = Poly(C).degree() + coeffsC = [C.subs(x, 0)] + [C.coeff(x**i) for i in range(1, dC + 1)] + return sum([coeffsC[i] % p*x**i for i in range(dC + 1) + if coeffsC[i] is not None]) + + +#################### ElGamal ############################# + + +def elgamal_private_key(digit=10, seed=None): + r""" + Return three number tuple as private key. + + Explanation + =========== + + Elgamal encryption is based on the mathematical problem + called the Discrete Logarithm Problem (DLP). For example, + + `a^{b} \equiv c \pmod p` + + In general, if ``a`` and ``b`` are known, ``ct`` is easily + calculated. If ``b`` is unknown, it is hard to use + ``a`` and ``ct`` to get ``b``. + + Parameters + ========== + + digit : int + Minimum number of binary digits for key. + + Returns + ======= + + tuple : (p, r, d) + p = prime number. + + r = primitive root. + + d = random number. + + Notes + ===== + + For testing purposes, the ``seed`` parameter may be set to control + the output of this routine. See sympy.core.random._randrange. + + Examples + ======== + + >>> from sympy.crypto.crypto import elgamal_private_key + >>> from sympy.ntheory import is_primitive_root, isprime + >>> a, b, _ = elgamal_private_key() + >>> isprime(a) + True + >>> is_primitive_root(b, a) + True + + """ + randrange = _randrange(seed) + p = nextprime(2**digit) + return p, primitive_root(p), randrange(2, p) + + +def elgamal_public_key(key): + r""" + Return three number tuple as public key. + + Parameters + ========== + + key : (p, r, e) + Tuple generated by ``elgamal_private_key``. + + Returns + ======= + + tuple : (p, r, e) + `e = r**d \bmod p` + + `d` is a random number in private key. + + Examples + ======== + + >>> from sympy.crypto.crypto import elgamal_public_key + >>> elgamal_public_key((1031, 14, 636)) + (1031, 14, 212) + + """ + p, r, e = key + return p, r, pow(r, e, p) + + +def encipher_elgamal(i, key, seed=None): + r""" + Encrypt message with public key. + + Explanation + =========== + + ``i`` is a plaintext message expressed as an integer. + ``key`` is public key (p, r, e). In order to encrypt + a message, a random number ``a`` in ``range(2, p)`` + is generated and the encryped message is returned as + `c_{1}` and `c_{2}` where: + + `c_{1} \equiv r^{a} \pmod p` + + `c_{2} \equiv m e^{a} \pmod p` + + Parameters + ========== + + msg + int of encoded message. + + key + Public key. + + Returns + ======= + + tuple : (c1, c2) + Encipher into two number. + + Notes + ===== + + For testing purposes, the ``seed`` parameter may be set to control + the output of this routine. See sympy.core.random._randrange. + + Examples + ======== + + >>> from sympy.crypto.crypto import encipher_elgamal, elgamal_private_key, elgamal_public_key + >>> pri = elgamal_private_key(5, seed=[3]); pri + (37, 2, 3) + >>> pub = elgamal_public_key(pri); pub + (37, 2, 8) + >>> msg = 36 + >>> encipher_elgamal(msg, pub, seed=[3]) + (8, 6) + + """ + p, r, e = key + if i < 0 or i >= p: + raise ValueError( + 'Message (%s) should be in range(%s)' % (i, p)) + randrange = _randrange(seed) + a = randrange(2, p) + return pow(r, a, p), i*pow(e, a, p) % p + + +def decipher_elgamal(msg, key): + r""" + Decrypt message with private key. + + `msg = (c_{1}, c_{2})` + + `key = (p, r, d)` + + According to extended Eucliden theorem, + `u c_{1}^{d} + p n = 1` + + `u \equiv 1/{{c_{1}}^d} \pmod p` + + `u c_{2} \equiv \frac{1}{c_{1}^d} c_{2} \equiv \frac{1}{r^{ad}} c_{2} \pmod p` + + `\frac{1}{r^{ad}} m e^a \equiv \frac{1}{r^{ad}} m {r^{d a}} \equiv m \pmod p` + + Examples + ======== + + >>> from sympy.crypto.crypto import decipher_elgamal + >>> from sympy.crypto.crypto import encipher_elgamal + >>> from sympy.crypto.crypto import elgamal_private_key + >>> from sympy.crypto.crypto import elgamal_public_key + + >>> pri = elgamal_private_key(5, seed=[3]) + >>> pub = elgamal_public_key(pri); pub + (37, 2, 8) + >>> msg = 17 + >>> decipher_elgamal(encipher_elgamal(msg, pub), pri) == msg + True + + """ + p, _, d = key + c1, c2 = msg + u = igcdex(c1**d, p)[0] + return u * c2 % p + + +################ Diffie-Hellman Key Exchange ######################### + +def dh_private_key(digit=10, seed=None): + r""" + Return three integer tuple as private key. + + Explanation + =========== + + Diffie-Hellman key exchange is based on the mathematical problem + called the Discrete Logarithm Problem (see ElGamal). + + Diffie-Hellman key exchange is divided into the following steps: + + * Alice and Bob agree on a base that consist of a prime ``p`` + and a primitive root of ``p`` called ``g`` + * Alice choses a number ``a`` and Bob choses a number ``b`` where + ``a`` and ``b`` are random numbers in range `[2, p)`. These are + their private keys. + * Alice then publicly sends Bob `g^{a} \pmod p` while Bob sends + Alice `g^{b} \pmod p` + * They both raise the received value to their secretly chosen + number (``a`` or ``b``) and now have both as their shared key + `g^{ab} \pmod p` + + Parameters + ========== + + digit + Minimum number of binary digits required in key. + + Returns + ======= + + tuple : (p, g, a) + p = prime number. + + g = primitive root of p. + + a = random number from 2 through p - 1. + + Notes + ===== + + For testing purposes, the ``seed`` parameter may be set to control + the output of this routine. See sympy.core.random._randrange. + + Examples + ======== + + >>> from sympy.crypto.crypto import dh_private_key + >>> from sympy.ntheory import isprime, is_primitive_root + >>> p, g, _ = dh_private_key() + >>> isprime(p) + True + >>> is_primitive_root(g, p) + True + >>> p, g, _ = dh_private_key(5) + >>> isprime(p) + True + >>> is_primitive_root(g, p) + True + + """ + p = nextprime(2**digit) + g = primitive_root(p) + randrange = _randrange(seed) + a = randrange(2, p) + return p, g, a + + +def dh_public_key(key): + r""" + Return three number tuple as public key. + + This is the tuple that Alice sends to Bob. + + Parameters + ========== + + key : (p, g, a) + A tuple generated by ``dh_private_key``. + + Returns + ======= + + tuple : int, int, int + A tuple of `(p, g, g^a \mod p)` with `p`, `g` and `a` given as + parameters.s + + Examples + ======== + + >>> from sympy.crypto.crypto import dh_private_key, dh_public_key + >>> p, g, a = dh_private_key(); + >>> _p, _g, x = dh_public_key((p, g, a)) + >>> p == _p and g == _g + True + >>> x == pow(g, a, p) + True + + """ + p, g, a = key + return p, g, pow(g, a, p) + + +def dh_shared_key(key, b): + """ + Return an integer that is the shared key. + + This is what Bob and Alice can both calculate using the public + keys they received from each other and their private keys. + + Parameters + ========== + + key : (p, g, x) + Tuple `(p, g, x)` generated by ``dh_public_key``. + + b + Random number in the range of `2` to `p - 1` + (Chosen by second key exchange member (Bob)). + + Returns + ======= + + int + A shared key. + + Examples + ======== + + >>> from sympy.crypto.crypto import ( + ... dh_private_key, dh_public_key, dh_shared_key) + >>> prk = dh_private_key(); + >>> p, g, x = dh_public_key(prk); + >>> sk = dh_shared_key((p, g, x), 1000) + >>> sk == pow(x, 1000, p) + True + + """ + p, _, x = key + if 1 >= b or b >= p: + raise ValueError(filldedent(''' + Value of b should be greater 1 and less + than prime %s.''' % p)) + + return pow(x, b, p) + + +################ Goldwasser-Micali Encryption ######################### + + +def _legendre(a, p): + """ + Returns the legendre symbol of a and p + assuming that p is a prime. + + i.e. 1 if a is a quadratic residue mod p + -1 if a is not a quadratic residue mod p + 0 if a is divisible by p + + Parameters + ========== + + a : int + The number to test. + + p : prime + The prime to test ``a`` against. + + Returns + ======= + + int + Legendre symbol (a / p). + + """ + sig = pow(a, (p - 1)//2, p) + if sig == 1: + return 1 + elif sig == 0: + return 0 + else: + return -1 + + +def _random_coprime_stream(n, seed=None): + randrange = _randrange(seed) + while True: + y = randrange(n) + if gcd(y, n) == 1: + yield y + + +def gm_private_key(p, q, a=None): + r""" + Check if ``p`` and ``q`` can be used as private keys for + the Goldwasser-Micali encryption. The method works + roughly as follows. + + Explanation + =========== + + #. Pick two large primes $p$ and $q$. + #. Call their product $N$. + #. Given a message as an integer $i$, write $i$ in its bit representation $b_0, \dots, b_n$. + #. For each $k$, + + if $b_k = 0$: + let $a_k$ be a random square + (quadratic residue) modulo $p q$ + such that ``jacobi_symbol(a, p*q) = 1`` + if $b_k = 1$: + let $a_k$ be a random non-square + (non-quadratic residue) modulo $p q$ + such that ``jacobi_symbol(a, p*q) = 1`` + + returns $\left[a_1, a_2, \dots\right]$ + + $b_k$ can be recovered by checking whether or not + $a_k$ is a residue. And from the $b_k$'s, the message + can be reconstructed. + + The idea is that, while ``jacobi_symbol(a, p*q)`` + can be easily computed (and when it is equal to $-1$ will + tell you that $a$ is not a square mod $p q$), quadratic + residuosity modulo a composite number is hard to compute + without knowing its factorization. + + Moreover, approximately half the numbers coprime to $p q$ have + :func:`~.jacobi_symbol` equal to $1$ . And among those, approximately half + are residues and approximately half are not. This maximizes the + entropy of the code. + + Parameters + ========== + + p, q, a + Initialization variables. + + Returns + ======= + + tuple : (p, q) + The input value ``p`` and ``q``. + + Raises + ====== + + ValueError + If ``p`` and ``q`` are not distinct odd primes. + + """ + if p == q: + raise ValueError("expected distinct primes, " + "got two copies of %i" % p) + elif not isprime(p) or not isprime(q): + raise ValueError("first two arguments must be prime, " + "got %i of %i" % (p, q)) + elif p == 2 or q == 2: + raise ValueError("first two arguments must not be even, " + "got %i of %i" % (p, q)) + return p, q + + +def gm_public_key(p, q, a=None, seed=None): + """ + Compute public keys for ``p`` and ``q``. + Note that in Goldwasser-Micali Encryption, + public keys are randomly selected. + + Parameters + ========== + + p, q, a : int, int, int + Initialization variables. + + Returns + ======= + + tuple : (a, N) + ``a`` is the input ``a`` if it is not ``None`` otherwise + some random integer coprime to ``p`` and ``q``. + + ``N`` is the product of ``p`` and ``q``. + + """ + + p, q = gm_private_key(p, q) + N = p * q + + if a is None: + randrange = _randrange(seed) + while True: + a = randrange(N) + if _legendre(a, p) == _legendre(a, q) == -1: + break + else: + if _legendre(a, p) != -1 or _legendre(a, q) != -1: + return False + return (a, N) + + +def encipher_gm(i, key, seed=None): + """ + Encrypt integer 'i' using public_key 'key' + Note that gm uses random encryption. + + Parameters + ========== + + i : int + The message to encrypt. + + key : (a, N) + The public key. + + Returns + ======= + + list : list of int + The randomized encrypted message. + + """ + if i < 0: + raise ValueError( + "message must be a non-negative " + "integer: got %d instead" % i) + a, N = key + bits = [] + while i > 0: + bits.append(i % 2) + i //= 2 + + gen = _random_coprime_stream(N, seed) + rev = reversed(bits) + encode = lambda b: next(gen)**2*pow(a, b) % N + return [ encode(b) for b in rev ] + + + +def decipher_gm(message, key): + """ + Decrypt message 'message' using public_key 'key'. + + Parameters + ========== + + message : list of int + The randomized encrypted message. + + key : (p, q) + The private key. + + Returns + ======= + + int + The encrypted message. + + """ + p, q = key + res = lambda m, p: _legendre(m, p) > 0 + bits = [res(m, p) * res(m, q) for m in message] + m = 0 + for b in bits: + m <<= 1 + m += not b + return m + + + +########### RailFence Cipher ############# + +def encipher_railfence(message,rails): + """ + Performs Railfence Encryption on plaintext and returns ciphertext + + Examples + ======== + + >>> from sympy.crypto.crypto import encipher_railfence + >>> message = "hello world" + >>> encipher_railfence(message,3) + 'horel ollwd' + + Parameters + ========== + + message : string, the message to encrypt. + rails : int, the number of rails. + + Returns + ======= + + The Encrypted string message. + + References + ========== + .. [1] https://en.wikipedia.org/wiki/Rail_fence_cipher + + """ + r = list(range(rails)) + p = cycle(r + r[-2:0:-1]) + return ''.join(sorted(message, key=lambda i: next(p))) + + +def decipher_railfence(ciphertext,rails): + """ + Decrypt the message using the given rails + + Examples + ======== + + >>> from sympy.crypto.crypto import decipher_railfence + >>> decipher_railfence("horel ollwd",3) + 'hello world' + + Parameters + ========== + + message : string, the message to encrypt. + rails : int, the number of rails. + + Returns + ======= + + The Decrypted string message. + + """ + r = list(range(rails)) + p = cycle(r + r[-2:0:-1]) + + idx = sorted(range(len(ciphertext)), key=lambda i: next(p)) + res = [''] * len(ciphertext) + for i, c in zip(idx, ciphertext): + res[i] = c + return ''.join(res) + + +################ Blum-Goldwasser cryptosystem ######################### + +def bg_private_key(p, q): + """ + Check if p and q can be used as private keys for + the Blum-Goldwasser cryptosystem. + + Explanation + =========== + + The three necessary checks for p and q to pass + so that they can be used as private keys: + + 1. p and q must both be prime + 2. p and q must be distinct + 3. p and q must be congruent to 3 mod 4 + + Parameters + ========== + + p, q + The keys to be checked. + + Returns + ======= + + p, q + Input values. + + Raises + ====== + + ValueError + If p and q do not pass the above conditions. + + """ + + if not isprime(p) or not isprime(q): + raise ValueError("the two arguments must be prime, " + "got %i and %i" %(p, q)) + elif p == q: + raise ValueError("the two arguments must be distinct, " + "got two copies of %i. " %p) + elif (p - 3) % 4 != 0 or (q - 3) % 4 != 0: + raise ValueError("the two arguments must be congruent to 3 mod 4, " + "got %i and %i" %(p, q)) + return p, q + +def bg_public_key(p, q): + """ + Calculates public keys from private keys. + + Explanation + =========== + + The function first checks the validity of + private keys passed as arguments and + then returns their product. + + Parameters + ========== + + p, q + The private keys. + + Returns + ======= + + N + The public key. + + """ + p, q = bg_private_key(p, q) + N = p * q + return N + +def encipher_bg(i, key, seed=None): + """ + Encrypts the message using public key and seed. + + Explanation + =========== + + ALGORITHM: + 1. Encodes i as a string of L bits, m. + 2. Select a random element r, where 1 < r < key, and computes + x = r^2 mod key. + 3. Use BBS pseudo-random number generator to generate L random bits, b, + using the initial seed as x. + 4. Encrypted message, c_i = m_i XOR b_i, 1 <= i <= L. + 5. x_L = x^(2^L) mod key. + 6. Return (c, x_L) + + Parameters + ========== + + i + Message, a non-negative integer + + key + The public key + + Returns + ======= + + Tuple + (encrypted_message, x_L) + + Raises + ====== + + ValueError + If i is negative. + + """ + + if i < 0: + raise ValueError( + "message must be a non-negative " + "integer: got %d instead" % i) + + enc_msg = [] + while i > 0: + enc_msg.append(i % 2) + i //= 2 + enc_msg.reverse() + L = len(enc_msg) + + r = _randint(seed)(2, key - 1) + x = r**2 % key + x_L = pow(int(x), int(2**L), int(key)) + + rand_bits = [] + for _ in range(L): + rand_bits.append(x % 2) + x = x**2 % key + + encrypt_msg = [m ^ b for (m, b) in zip(enc_msg, rand_bits)] + + return (encrypt_msg, x_L) + +def decipher_bg(message, key): + """ + Decrypts the message using private keys. + + Explanation + =========== + + ALGORITHM: + 1. Let, c be the encrypted message, y the second number received, + and p and q be the private keys. + 2. Compute, r_p = y^((p+1)/4 ^ L) mod p and + r_q = y^((q+1)/4 ^ L) mod q. + 3. Compute x_0 = (q(q^-1 mod p)r_p + p(p^-1 mod q)r_q) mod N. + 4. From, recompute the bits using the BBS generator, as in the + encryption algorithm. + 5. Compute original message by XORing c and b. + + Parameters + ========== + + message + Tuple of encrypted message and a non-negative integer. + + key + Tuple of private keys. + + Returns + ======= + + orig_msg + The original message + + """ + + p, q = key + encrypt_msg, y = message + public_key = p * q + L = len(encrypt_msg) + p_t = ((p + 1)/4)**L + q_t = ((q + 1)/4)**L + r_p = pow(int(y), int(p_t), int(p)) + r_q = pow(int(y), int(q_t), int(q)) + + x = (q * mod_inverse(q, p) * r_p + p * mod_inverse(p, q) * r_q) % public_key + + orig_bits = [] + for _ in range(L): + orig_bits.append(x % 2) + x = x**2 % public_key + + orig_msg = 0 + for (m, b) in zip(encrypt_msg, orig_bits): + orig_msg = orig_msg * 2 + orig_msg += (m ^ b) + + return orig_msg diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/crypto/tests/__init__.py b/env-llmeval/lib/python3.10/site-packages/sympy/crypto/tests/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/crypto/tests/__pycache__/__init__.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/sympy/crypto/tests/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..25bd491ae16b0eb2e339fe5c7735a8eba4427af5 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/sympy/crypto/tests/__pycache__/__init__.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/crypto/tests/__pycache__/test_crypto.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/sympy/crypto/tests/__pycache__/test_crypto.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7aaae33a2878b0b4f819130b8ec4eb522b13a6f7 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/sympy/crypto/tests/__pycache__/test_crypto.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/crypto/tests/test_crypto.py b/env-llmeval/lib/python3.10/site-packages/sympy/crypto/tests/test_crypto.py new file mode 100644 index 0000000000000000000000000000000000000000..36067bfd018189ff32ee4072befb507b3015bc74 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/sympy/crypto/tests/test_crypto.py @@ -0,0 +1,562 @@ +from sympy.core import symbols +from sympy.crypto.crypto import (cycle_list, + encipher_shift, encipher_affine, encipher_substitution, + check_and_join, encipher_vigenere, decipher_vigenere, + encipher_hill, decipher_hill, encipher_bifid5, encipher_bifid6, + bifid5_square, bifid6_square, bifid5, bifid6, + decipher_bifid5, decipher_bifid6, encipher_kid_rsa, + decipher_kid_rsa, kid_rsa_private_key, kid_rsa_public_key, + decipher_rsa, rsa_private_key, rsa_public_key, encipher_rsa, + lfsr_connection_polynomial, lfsr_autocorrelation, lfsr_sequence, + encode_morse, decode_morse, elgamal_private_key, elgamal_public_key, + encipher_elgamal, decipher_elgamal, dh_private_key, dh_public_key, + dh_shared_key, decipher_shift, decipher_affine, encipher_bifid, + decipher_bifid, bifid_square, padded_key, uniq, decipher_gm, + encipher_gm, gm_public_key, gm_private_key, encipher_bg, decipher_bg, + bg_private_key, bg_public_key, encipher_rot13, decipher_rot13, + encipher_atbash, decipher_atbash, NonInvertibleCipherWarning, + encipher_railfence, decipher_railfence) +from sympy.matrices import Matrix +from sympy.ntheory import isprime, is_primitive_root +from sympy.polys.domains import FF + +from sympy.testing.pytest import raises, warns + +from sympy.core.random import randrange + +def test_encipher_railfence(): + assert encipher_railfence("hello world",2) == "hlowrdel ol" + assert encipher_railfence("hello world",3) == "horel ollwd" + assert encipher_railfence("hello world",4) == "hwe olordll" + +def test_decipher_railfence(): + assert decipher_railfence("hlowrdel ol",2) == "hello world" + assert decipher_railfence("horel ollwd",3) == "hello world" + assert decipher_railfence("hwe olordll",4) == "hello world" + + +def test_cycle_list(): + assert cycle_list(3, 4) == [3, 0, 1, 2] + assert cycle_list(-1, 4) == [3, 0, 1, 2] + assert cycle_list(1, 4) == [1, 2, 3, 0] + + +def test_encipher_shift(): + assert encipher_shift("ABC", 0) == "ABC" + assert encipher_shift("ABC", 1) == "BCD" + assert encipher_shift("ABC", -1) == "ZAB" + assert decipher_shift("ZAB", -1) == "ABC" + +def test_encipher_rot13(): + assert encipher_rot13("ABC") == "NOP" + assert encipher_rot13("NOP") == "ABC" + assert decipher_rot13("ABC") == "NOP" + assert decipher_rot13("NOP") == "ABC" + + +def test_encipher_affine(): + assert encipher_affine("ABC", (1, 0)) == "ABC" + assert encipher_affine("ABC", (1, 1)) == "BCD" + assert encipher_affine("ABC", (-1, 0)) == "AZY" + assert encipher_affine("ABC", (-1, 1), symbols="ABCD") == "BAD" + assert encipher_affine("123", (-1, 1), symbols="1234") == "214" + assert encipher_affine("ABC", (3, 16)) == "QTW" + assert decipher_affine("QTW", (3, 16)) == "ABC" + +def test_encipher_atbash(): + assert encipher_atbash("ABC") == "ZYX" + assert encipher_atbash("ZYX") == "ABC" + assert decipher_atbash("ABC") == "ZYX" + assert decipher_atbash("ZYX") == "ABC" + +def test_encipher_substitution(): + assert encipher_substitution("ABC", "BAC", "ABC") == "BAC" + assert encipher_substitution("123", "1243", "1234") == "124" + + +def test_check_and_join(): + assert check_and_join("abc") == "abc" + assert check_and_join(uniq("aaabc")) == "abc" + assert check_and_join("ab c".split()) == "abc" + assert check_and_join("abc", "a", filter=True) == "a" + raises(ValueError, lambda: check_and_join('ab', 'a')) + + +def test_encipher_vigenere(): + assert encipher_vigenere("ABC", "ABC") == "ACE" + assert encipher_vigenere("ABC", "ABC", symbols="ABCD") == "ACA" + assert encipher_vigenere("ABC", "AB", symbols="ABCD") == "ACC" + assert encipher_vigenere("AB", "ABC", symbols="ABCD") == "AC" + assert encipher_vigenere("A", "ABC", symbols="ABCD") == "A" + + +def test_decipher_vigenere(): + assert decipher_vigenere("ABC", "ABC") == "AAA" + assert decipher_vigenere("ABC", "ABC", symbols="ABCD") == "AAA" + assert decipher_vigenere("ABC", "AB", symbols="ABCD") == "AAC" + assert decipher_vigenere("AB", "ABC", symbols="ABCD") == "AA" + assert decipher_vigenere("A", "ABC", symbols="ABCD") == "A" + + +def test_encipher_hill(): + A = Matrix(2, 2, [1, 2, 3, 5]) + assert encipher_hill("ABCD", A) == "CFIV" + A = Matrix(2, 2, [1, 0, 0, 1]) + assert encipher_hill("ABCD", A) == "ABCD" + assert encipher_hill("ABCD", A, symbols="ABCD") == "ABCD" + A = Matrix(2, 2, [1, 2, 3, 5]) + assert encipher_hill("ABCD", A, symbols="ABCD") == "CBAB" + assert encipher_hill("AB", A, symbols="ABCD") == "CB" + # message length, n, does not need to be a multiple of k; + # it is padded + assert encipher_hill("ABA", A) == "CFGC" + assert encipher_hill("ABA", A, pad="Z") == "CFYV" + + +def test_decipher_hill(): + A = Matrix(2, 2, [1, 2, 3, 5]) + assert decipher_hill("CFIV", A) == "ABCD" + A = Matrix(2, 2, [1, 0, 0, 1]) + assert decipher_hill("ABCD", A) == "ABCD" + assert decipher_hill("ABCD", A, symbols="ABCD") == "ABCD" + A = Matrix(2, 2, [1, 2, 3, 5]) + assert decipher_hill("CBAB", A, symbols="ABCD") == "ABCD" + assert decipher_hill("CB", A, symbols="ABCD") == "AB" + # n does not need to be a multiple of k + assert decipher_hill("CFA", A) == "ABAA" + + +def test_encipher_bifid5(): + assert encipher_bifid5("AB", "AB") == "AB" + assert encipher_bifid5("AB", "CD") == "CO" + assert encipher_bifid5("ab", "c") == "CH" + assert encipher_bifid5("a bc", "b") == "BAC" + + +def test_bifid5_square(): + A = bifid5 + f = lambda i, j: symbols(A[5*i + j]) + M = Matrix(5, 5, f) + assert bifid5_square("") == M + + +def test_decipher_bifid5(): + assert decipher_bifid5("AB", "AB") == "AB" + assert decipher_bifid5("CO", "CD") == "AB" + assert decipher_bifid5("ch", "c") == "AB" + assert decipher_bifid5("b ac", "b") == "ABC" + + +def test_encipher_bifid6(): + assert encipher_bifid6("AB", "AB") == "AB" + assert encipher_bifid6("AB", "CD") == "CP" + assert encipher_bifid6("ab", "c") == "CI" + assert encipher_bifid6("a bc", "b") == "BAC" + + +def test_decipher_bifid6(): + assert decipher_bifid6("AB", "AB") == "AB" + assert decipher_bifid6("CP", "CD") == "AB" + assert decipher_bifid6("ci", "c") == "AB" + assert decipher_bifid6("b ac", "b") == "ABC" + + +def test_bifid6_square(): + A = bifid6 + f = lambda i, j: symbols(A[6*i + j]) + M = Matrix(6, 6, f) + assert bifid6_square("") == M + + +def test_rsa_public_key(): + assert rsa_public_key(2, 3, 1) == (6, 1) + assert rsa_public_key(5, 3, 3) == (15, 3) + + with warns(NonInvertibleCipherWarning): + assert rsa_public_key(2, 2, 1) == (4, 1) + assert rsa_public_key(8, 8, 8) is False + + +def test_rsa_private_key(): + assert rsa_private_key(2, 3, 1) == (6, 1) + assert rsa_private_key(5, 3, 3) == (15, 3) + assert rsa_private_key(23,29,5) == (667,493) + + with warns(NonInvertibleCipherWarning): + assert rsa_private_key(2, 2, 1) == (4, 1) + assert rsa_private_key(8, 8, 8) is False + + +def test_rsa_large_key(): + # Sample from + # http://www.herongyang.com/Cryptography/JCE-Public-Key-RSA-Private-Public-Key-Pair-Sample.html + p = int('101565610013301240713207239558950144682174355406589305284428666'\ + '903702505233009') + q = int('894687191887545488935455605955948413812376003053143521429242133'\ + '12069293984003') + e = int('65537') + d = int('893650581832704239530398858744759129594796235440844479456143566'\ + '6999402846577625762582824202269399672579058991442587406384754958587'\ + '400493169361356902030209') + assert rsa_public_key(p, q, e) == (p*q, e) + assert rsa_private_key(p, q, e) == (p*q, d) + + +def test_encipher_rsa(): + puk = rsa_public_key(2, 3, 1) + assert encipher_rsa(2, puk) == 2 + puk = rsa_public_key(5, 3, 3) + assert encipher_rsa(2, puk) == 8 + + with warns(NonInvertibleCipherWarning): + puk = rsa_public_key(2, 2, 1) + assert encipher_rsa(2, puk) == 2 + + +def test_decipher_rsa(): + prk = rsa_private_key(2, 3, 1) + assert decipher_rsa(2, prk) == 2 + prk = rsa_private_key(5, 3, 3) + assert decipher_rsa(8, prk) == 2 + + with warns(NonInvertibleCipherWarning): + prk = rsa_private_key(2, 2, 1) + assert decipher_rsa(2, prk) == 2 + + +def test_mutltiprime_rsa_full_example(): + # Test example from + # https://iopscience.iop.org/article/10.1088/1742-6596/995/1/012030 + puk = rsa_public_key(2, 3, 5, 7, 11, 13, 7) + prk = rsa_private_key(2, 3, 5, 7, 11, 13, 7) + assert puk == (30030, 7) + assert prk == (30030, 823) + + msg = 10 + encrypted = encipher_rsa(2 * msg - 15, puk) + assert encrypted == 18065 + decrypted = (decipher_rsa(encrypted, prk) + 15) / 2 + assert decrypted == msg + + # Test example from + # https://www.scirp.org/pdf/JCC_2018032215502008.pdf + puk1 = rsa_public_key(53, 41, 43, 47, 41) + prk1 = rsa_private_key(53, 41, 43, 47, 41) + puk2 = rsa_public_key(53, 41, 43, 47, 97) + prk2 = rsa_private_key(53, 41, 43, 47, 97) + + assert puk1 == (4391633, 41) + assert prk1 == (4391633, 294041) + assert puk2 == (4391633, 97) + assert prk2 == (4391633, 455713) + + msg = 12321 + encrypted = encipher_rsa(encipher_rsa(msg, puk1), puk2) + assert encrypted == 1081588 + decrypted = decipher_rsa(decipher_rsa(encrypted, prk2), prk1) + assert decrypted == msg + + +def test_rsa_crt_extreme(): + p = int( + '10177157607154245068023861503693082120906487143725062283406501' \ + '54082258226204046999838297167140821364638180697194879500245557' \ + '65445186962893346463841419427008800341257468600224049986260471' \ + '92257248163014468841725476918639415726709736077813632961290911' \ + '0256421232977833028677441206049309220354796014376698325101693') + + q = int( + '28752342353095132872290181526607275886182793241660805077850801' \ + '75689512797754286972952273553128181861830576836289738668745250' \ + '34028199691128870676414118458442900035778874482624765513861643' \ + '27966696316822188398336199002306588703902894100476186823849595' \ + '103239410527279605442148285816149368667083114802852804976893') + + r = int( + '17698229259868825776879500736350186838850961935956310134378261' \ + '89771862186717463067541369694816245225291921138038800171125596' \ + '07315449521981157084370187887650624061033066022458512942411841' \ + '18747893789972315277160085086164119879536041875335384844820566' \ + '0287479617671726408053319619892052000850883994343378882717849') + + s = int( + '68925428438585431029269182233502611027091755064643742383515623' \ + '64321310582896893395529367074942808353187138794422745718419645' \ + '28291231865157212604266903677599180789896916456120289112752835' \ + '98502265889669730331688206825220074713977607415178738015831030' \ + '364290585369150502819743827343552098197095520550865360159439' + ) + + t = int( + '69035483433453632820551311892368908779778144568711455301541094' \ + '31487047642322695357696860925747923189635033183069823820910521' \ + '71172909106797748883261493224162414050106920442445896819806600' \ + '15448444826108008217972129130625571421904893252804729877353352' \ + '739420480574842850202181462656251626522910618936534699566291' + ) + + e = 65537 + puk = rsa_public_key(p, q, r, s, t, e) + prk = rsa_private_key(p, q, r, s, t, e) + + plaintext = 1000 + ciphertext_1 = encipher_rsa(plaintext, puk) + ciphertext_2 = encipher_rsa(plaintext, puk, [p, q, r, s, t]) + assert ciphertext_1 == ciphertext_2 + assert decipher_rsa(ciphertext_1, prk) == \ + decipher_rsa(ciphertext_1, prk, [p, q, r, s, t]) + + +def test_rsa_exhaustive(): + p, q = 61, 53 + e = 17 + puk = rsa_public_key(p, q, e, totient='Carmichael') + prk = rsa_private_key(p, q, e, totient='Carmichael') + + for msg in range(puk[0]): + encrypted = encipher_rsa(msg, puk) + decrypted = decipher_rsa(encrypted, prk) + try: + assert decrypted == msg + except AssertionError: + raise AssertionError( + "The RSA is not correctly decrypted " \ + "(Original : {}, Encrypted : {}, Decrypted : {})" \ + .format(msg, encrypted, decrypted) + ) + + +def test_rsa_multiprime_exhanstive(): + primes = [3, 5, 7, 11] + e = 7 + args = primes + [e] + puk = rsa_public_key(*args, totient='Carmichael') + prk = rsa_private_key(*args, totient='Carmichael') + n = puk[0] + + for msg in range(n): + encrypted = encipher_rsa(msg, puk) + decrypted = decipher_rsa(encrypted, prk) + try: + assert decrypted == msg + except AssertionError: + raise AssertionError( + "The RSA is not correctly decrypted " \ + "(Original : {}, Encrypted : {}, Decrypted : {})" \ + .format(msg, encrypted, decrypted) + ) + + +def test_rsa_multipower_exhanstive(): + from sympy.core.numbers import igcd + primes = [5, 5, 7] + e = 7 + args = primes + [e] + puk = rsa_public_key(*args, multipower=True) + prk = rsa_private_key(*args, multipower=True) + n = puk[0] + + for msg in range(n): + if igcd(msg, n) != 1: + continue + + encrypted = encipher_rsa(msg, puk) + decrypted = decipher_rsa(encrypted, prk) + try: + assert decrypted == msg + except AssertionError: + raise AssertionError( + "The RSA is not correctly decrypted " \ + "(Original : {}, Encrypted : {}, Decrypted : {})" \ + .format(msg, encrypted, decrypted) + ) + + +def test_kid_rsa_public_key(): + assert kid_rsa_public_key(1, 2, 1, 1) == (5, 2) + assert kid_rsa_public_key(1, 2, 2, 1) == (8, 3) + assert kid_rsa_public_key(1, 2, 1, 2) == (7, 2) + + +def test_kid_rsa_private_key(): + assert kid_rsa_private_key(1, 2, 1, 1) == (5, 3) + assert kid_rsa_private_key(1, 2, 2, 1) == (8, 3) + assert kid_rsa_private_key(1, 2, 1, 2) == (7, 4) + + +def test_encipher_kid_rsa(): + assert encipher_kid_rsa(1, (5, 2)) == 2 + assert encipher_kid_rsa(1, (8, 3)) == 3 + assert encipher_kid_rsa(1, (7, 2)) == 2 + + +def test_decipher_kid_rsa(): + assert decipher_kid_rsa(2, (5, 3)) == 1 + assert decipher_kid_rsa(3, (8, 3)) == 1 + assert decipher_kid_rsa(2, (7, 4)) == 1 + + +def test_encode_morse(): + assert encode_morse('ABC') == '.-|-...|-.-.' + assert encode_morse('SMS ') == '...|--|...||' + assert encode_morse('SMS\n') == '...|--|...||' + assert encode_morse('') == '' + assert encode_morse(' ') == '||' + assert encode_morse(' ', sep='`') == '``' + assert encode_morse(' ', sep='``') == '````' + assert encode_morse('!@#$%^&*()_+') == '-.-.--|.--.-.|...-..-|-.--.|-.--.-|..--.-|.-.-.' + assert encode_morse('12345') == '.----|..---|...--|....-|.....' + assert encode_morse('67890') == '-....|--...|---..|----.|-----' + + +def test_decode_morse(): + assert decode_morse('-.-|.|-.--') == 'KEY' + assert decode_morse('.-.|..-|-.||') == 'RUN' + raises(KeyError, lambda: decode_morse('.....----')) + + +def test_lfsr_sequence(): + raises(TypeError, lambda: lfsr_sequence(1, [1], 1)) + raises(TypeError, lambda: lfsr_sequence([1], 1, 1)) + F = FF(2) + assert lfsr_sequence([F(1)], [F(1)], 2) == [F(1), F(1)] + assert lfsr_sequence([F(0)], [F(1)], 2) == [F(1), F(0)] + F = FF(3) + assert lfsr_sequence([F(1)], [F(1)], 2) == [F(1), F(1)] + assert lfsr_sequence([F(0)], [F(2)], 2) == [F(2), F(0)] + assert lfsr_sequence([F(1)], [F(2)], 2) == [F(2), F(2)] + + +def test_lfsr_autocorrelation(): + raises(TypeError, lambda: lfsr_autocorrelation(1, 2, 3)) + F = FF(2) + s = lfsr_sequence([F(1), F(0)], [F(0), F(1)], 5) + assert lfsr_autocorrelation(s, 2, 0) == 1 + assert lfsr_autocorrelation(s, 2, 1) == -1 + + +def test_lfsr_connection_polynomial(): + F = FF(2) + x = symbols("x") + s = lfsr_sequence([F(1), F(0)], [F(0), F(1)], 5) + assert lfsr_connection_polynomial(s) == x**2 + 1 + s = lfsr_sequence([F(1), F(1)], [F(0), F(1)], 5) + assert lfsr_connection_polynomial(s) == x**2 + x + 1 + + +def test_elgamal_private_key(): + a, b, _ = elgamal_private_key(digit=100) + assert isprime(a) + assert is_primitive_root(b, a) + assert len(bin(a)) >= 102 + + +def test_elgamal(): + dk = elgamal_private_key(5) + ek = elgamal_public_key(dk) + P = ek[0] + assert P - 1 == decipher_elgamal(encipher_elgamal(P - 1, ek), dk) + raises(ValueError, lambda: encipher_elgamal(P, dk)) + raises(ValueError, lambda: encipher_elgamal(-1, dk)) + + +def test_dh_private_key(): + p, g, _ = dh_private_key(digit = 100) + assert isprime(p) + assert is_primitive_root(g, p) + assert len(bin(p)) >= 102 + + +def test_dh_public_key(): + p1, g1, a = dh_private_key(digit = 100) + p2, g2, ga = dh_public_key((p1, g1, a)) + assert p1 == p2 + assert g1 == g2 + assert ga == pow(g1, a, p1) + + +def test_dh_shared_key(): + prk = dh_private_key(digit = 100) + p, _, ga = dh_public_key(prk) + b = randrange(2, p) + sk = dh_shared_key((p, _, ga), b) + assert sk == pow(ga, b, p) + raises(ValueError, lambda: dh_shared_key((1031, 14, 565), 2000)) + + +def test_padded_key(): + assert padded_key('b', 'ab') == 'ba' + raises(ValueError, lambda: padded_key('ab', 'ace')) + raises(ValueError, lambda: padded_key('ab', 'abba')) + + +def test_bifid(): + raises(ValueError, lambda: encipher_bifid('abc', 'b', 'abcde')) + assert encipher_bifid('abc', 'b', 'abcd') == 'bdb' + raises(ValueError, lambda: decipher_bifid('bdb', 'b', 'abcde')) + assert encipher_bifid('bdb', 'b', 'abcd') == 'abc' + raises(ValueError, lambda: bifid_square('abcde')) + assert bifid5_square("B") == \ + bifid5_square('BACDEFGHIKLMNOPQRSTUVWXYZ') + assert bifid6_square('B0') == \ + bifid6_square('B0ACDEFGHIJKLMNOPQRSTUVWXYZ123456789') + + +def test_encipher_decipher_gm(): + ps = [131, 137, 139, 149, 151, 157, 163, 167, + 173, 179, 181, 191, 193, 197, 199] + qs = [89, 97, 101, 103, 107, 109, 113, 127, + 131, 137, 139, 149, 151, 157, 47] + messages = [ + 0, 32855, 34303, 14805, 1280, 75859, 38368, + 724, 60356, 51675, 76697, 61854, 18661, + ] + for p, q in zip(ps, qs): + pri = gm_private_key(p, q) + for msg in messages: + pub = gm_public_key(p, q) + enc = encipher_gm(msg, pub) + dec = decipher_gm(enc, pri) + assert dec == msg + + +def test_gm_private_key(): + raises(ValueError, lambda: gm_public_key(13, 15)) + raises(ValueError, lambda: gm_public_key(0, 0)) + raises(ValueError, lambda: gm_public_key(0, 5)) + assert 17, 19 == gm_public_key(17, 19) + + +def test_gm_public_key(): + assert 323 == gm_public_key(17, 19)[1] + assert 15 == gm_public_key(3, 5)[1] + raises(ValueError, lambda: gm_public_key(15, 19)) + +def test_encipher_decipher_bg(): + ps = [67, 7, 71, 103, 11, 43, 107, 47, + 79, 19, 83, 23, 59, 127, 31] + qs = qs = [7, 71, 103, 11, 43, 107, 47, + 79, 19, 83, 23, 59, 127, 31, 67] + messages = [ + 0, 328, 343, 148, 1280, 758, 383, + 724, 603, 516, 766, 618, 186, + ] + + for p, q in zip(ps, qs): + pri = bg_private_key(p, q) + for msg in messages: + pub = bg_public_key(p, q) + enc = encipher_bg(msg, pub) + dec = decipher_bg(enc, pri) + assert dec == msg + +def test_bg_private_key(): + raises(ValueError, lambda: bg_private_key(8, 16)) + raises(ValueError, lambda: bg_private_key(8, 8)) + raises(ValueError, lambda: bg_private_key(13, 17)) + assert 23, 31 == bg_private_key(23, 31) + +def test_bg_public_key(): + assert 5293 == bg_public_key(67, 79) + assert 713 == bg_public_key(23, 31) + raises(ValueError, lambda: bg_private_key(13, 17)) diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/physics/optics/tests/__init__.py b/env-llmeval/lib/python3.10/site-packages/sympy/physics/optics/tests/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/physics/optics/tests/__pycache__/__init__.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/sympy/physics/optics/tests/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0e347484e8be4386011bcad021e9e7d61349973a Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/sympy/physics/optics/tests/__pycache__/__init__.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/physics/optics/tests/__pycache__/test_gaussopt.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/sympy/physics/optics/tests/__pycache__/test_gaussopt.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7924d5bc0b395b7f41461fb46e3a4b3bfca54471 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/sympy/physics/optics/tests/__pycache__/test_gaussopt.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/physics/optics/tests/__pycache__/test_medium.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/sympy/physics/optics/tests/__pycache__/test_medium.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..45311bfea75380df14e4fa86d6af4ce8e918f447 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/sympy/physics/optics/tests/__pycache__/test_medium.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/physics/optics/tests/__pycache__/test_polarization.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/sympy/physics/optics/tests/__pycache__/test_polarization.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bfc65bbb46922fbd146ab66cbcba6530f4afe016 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/sympy/physics/optics/tests/__pycache__/test_polarization.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/physics/optics/tests/__pycache__/test_utils.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/sympy/physics/optics/tests/__pycache__/test_utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e544decd61b51892f114e9ef56f25a39fe18793f Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/sympy/physics/optics/tests/__pycache__/test_utils.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/physics/optics/tests/__pycache__/test_waves.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/sympy/physics/optics/tests/__pycache__/test_waves.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e2c50a8b100fc212ad9ac91837703a96f66b879f Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/sympy/physics/optics/tests/__pycache__/test_waves.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/physics/optics/tests/test_gaussopt.py b/env-llmeval/lib/python3.10/site-packages/sympy/physics/optics/tests/test_gaussopt.py new file mode 100644 index 0000000000000000000000000000000000000000..5271f3cbb69cf5de861ff332d36418b79daeb1b5 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/sympy/physics/optics/tests/test_gaussopt.py @@ -0,0 +1,102 @@ +from sympy.core.evalf import N +from sympy.core.numbers import (Float, I, oo, pi) +from sympy.core.symbol import symbols +from sympy.functions.elementary.miscellaneous import sqrt +from sympy.functions.elementary.trigonometric import atan2 +from sympy.matrices.dense import Matrix +from sympy.polys.polytools import factor + +from sympy.physics.optics import (BeamParameter, CurvedMirror, + CurvedRefraction, FlatMirror, FlatRefraction, FreeSpace, GeometricRay, + RayTransferMatrix, ThinLens, conjugate_gauss_beams, + gaussian_conj, geometric_conj_ab, geometric_conj_af, geometric_conj_bf, + rayleigh2waist, waist2rayleigh) + + +def streq(a, b): + return str(a) == str(b) + + +def test_gauss_opt(): + mat = RayTransferMatrix(1, 2, 3, 4) + assert mat == Matrix([[1, 2], [3, 4]]) + assert mat == RayTransferMatrix( Matrix([[1, 2], [3, 4]]) ) + assert [mat.A, mat.B, mat.C, mat.D] == [1, 2, 3, 4] + + d, f, h, n1, n2, R = symbols('d f h n1 n2 R') + lens = ThinLens(f) + assert lens == Matrix([[ 1, 0], [-1/f, 1]]) + assert lens.C == -1/f + assert FreeSpace(d) == Matrix([[ 1, d], [0, 1]]) + assert FlatRefraction(n1, n2) == Matrix([[1, 0], [0, n1/n2]]) + assert CurvedRefraction( + R, n1, n2) == Matrix([[1, 0], [(n1 - n2)/(R*n2), n1/n2]]) + assert FlatMirror() == Matrix([[1, 0], [0, 1]]) + assert CurvedMirror(R) == Matrix([[ 1, 0], [-2/R, 1]]) + assert ThinLens(f) == Matrix([[ 1, 0], [-1/f, 1]]) + + mul = CurvedMirror(R)*FreeSpace(d) + mul_mat = Matrix([[ 1, 0], [-2/R, 1]])*Matrix([[ 1, d], [0, 1]]) + assert mul.A == mul_mat[0, 0] + assert mul.B == mul_mat[0, 1] + assert mul.C == mul_mat[1, 0] + assert mul.D == mul_mat[1, 1] + + angle = symbols('angle') + assert GeometricRay(h, angle) == Matrix([[ h], [angle]]) + assert FreeSpace( + d)*GeometricRay(h, angle) == Matrix([[angle*d + h], [angle]]) + assert GeometricRay( Matrix( ((h,), (angle,)) ) ) == Matrix([[h], [angle]]) + assert (FreeSpace(d)*GeometricRay(h, angle)).height == angle*d + h + assert (FreeSpace(d)*GeometricRay(h, angle)).angle == angle + + p = BeamParameter(530e-9, 1, w=1e-3) + assert streq(p.q, 1 + 1.88679245283019*I*pi) + assert streq(N(p.q), 1.0 + 5.92753330865999*I) + assert streq(N(p.w_0), Float(0.00100000000000000)) + assert streq(N(p.z_r), Float(5.92753330865999)) + fs = FreeSpace(10) + p1 = fs*p + assert streq(N(p.w), Float(0.00101413072159615)) + assert streq(N(p1.w), Float(0.00210803120913829)) + + w, wavelen = symbols('w wavelen') + assert waist2rayleigh(w, wavelen) == pi*w**2/wavelen + z_r, wavelen = symbols('z_r wavelen') + assert rayleigh2waist(z_r, wavelen) == sqrt(wavelen*z_r)/sqrt(pi) + + a, b, f = symbols('a b f') + assert geometric_conj_ab(a, b) == a*b/(a + b) + assert geometric_conj_af(a, f) == a*f/(a - f) + assert geometric_conj_bf(b, f) == b*f/(b - f) + assert geometric_conj_ab(oo, b) == b + assert geometric_conj_ab(a, oo) == a + + s_in, z_r_in, f = symbols('s_in z_r_in f') + assert gaussian_conj( + s_in, z_r_in, f)[0] == 1/(-1/(s_in + z_r_in**2/(-f + s_in)) + 1/f) + assert gaussian_conj( + s_in, z_r_in, f)[1] == z_r_in/(1 - s_in**2/f**2 + z_r_in**2/f**2) + assert gaussian_conj( + s_in, z_r_in, f)[2] == 1/sqrt(1 - s_in**2/f**2 + z_r_in**2/f**2) + + l, w_i, w_o, f = symbols('l w_i w_o f') + assert conjugate_gauss_beams(l, w_i, w_o, f=f)[0] == f*( + -sqrt(w_i**2/w_o**2 - pi**2*w_i**4/(f**2*l**2)) + 1) + assert factor(conjugate_gauss_beams(l, w_i, w_o, f=f)[1]) == f*w_o**2*( + w_i**2/w_o**2 - sqrt(w_i**2/w_o**2 - pi**2*w_i**4/(f**2*l**2)))/w_i**2 + assert conjugate_gauss_beams(l, w_i, w_o, f=f)[2] == f + + z, l, w_0 = symbols('z l w_0', positive=True) + p = BeamParameter(l, z, w=w_0) + assert p.radius == z*(pi**2*w_0**4/(l**2*z**2) + 1) + assert p.w == w_0*sqrt(l**2*z**2/(pi**2*w_0**4) + 1) + assert p.w_0 == w_0 + assert p.divergence == l/(pi*w_0) + assert p.gouy == atan2(z, pi*w_0**2/l) + assert p.waist_approximation_limit == 2*l/pi + + p = BeamParameter(530e-9, 1, w=1e-3, n=2) + assert streq(p.q, 1 + 3.77358490566038*I*pi) + assert streq(N(p.z_r), Float(11.8550666173200)) + assert streq(N(p.w_0), Float(0.00100000000000000)) diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/physics/optics/tests/test_medium.py b/env-llmeval/lib/python3.10/site-packages/sympy/physics/optics/tests/test_medium.py new file mode 100644 index 0000000000000000000000000000000000000000..dfbb485f5b8e401f38c7f1cfa573f960a2479d7b --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/sympy/physics/optics/tests/test_medium.py @@ -0,0 +1,48 @@ +from sympy.functions.elementary.miscellaneous import sqrt +from sympy.physics.optics import Medium +from sympy.abc import epsilon, mu, n +from sympy.physics.units import speed_of_light, u0, e0, m, kg, s, A + +from sympy.testing.pytest import raises + +c = speed_of_light.convert_to(m/s) +e0 = e0.convert_to(A**2*s**4/(kg*m**3)) +u0 = u0.convert_to(m*kg/(A**2*s**2)) + + +def test_medium(): + m1 = Medium('m1') + assert m1.intrinsic_impedance == sqrt(u0/e0) + assert m1.speed == 1/sqrt(e0*u0) + assert m1.refractive_index == c*sqrt(e0*u0) + assert m1.permittivity == e0 + assert m1.permeability == u0 + m2 = Medium('m2', epsilon, mu) + assert m2.intrinsic_impedance == sqrt(mu/epsilon) + assert m2.speed == 1/sqrt(epsilon*mu) + assert m2.refractive_index == c*sqrt(epsilon*mu) + assert m2.permittivity == epsilon + assert m2.permeability == mu + # Increasing electric permittivity and magnetic permeability + # by small amount from its value in vacuum. + m3 = Medium('m3', 9.0*10**(-12)*s**4*A**2/(m**3*kg), 1.45*10**(-6)*kg*m/(A**2*s**2)) + assert m3.refractive_index > m1.refractive_index + assert m3 != m1 + # Decreasing electric permittivity and magnetic permeability + # by small amount from its value in vacuum. + m4 = Medium('m4', 7.0*10**(-12)*s**4*A**2/(m**3*kg), 1.15*10**(-6)*kg*m/(A**2*s**2)) + assert m4.refractive_index < m1.refractive_index + m5 = Medium('m5', permittivity=710*10**(-12)*s**4*A**2/(m**3*kg), n=1.33) + assert abs(m5.intrinsic_impedance - 6.24845417765552*kg*m**2/(A**2*s**3)) \ + < 1e-12*kg*m**2/(A**2*s**3) + assert abs(m5.speed - 225407863.157895*m/s) < 1e-6*m/s + assert abs(m5.refractive_index - 1.33000000000000) < 1e-12 + assert abs(m5.permittivity - 7.1e-10*A**2*s**4/(kg*m**3)) \ + < 1e-20*A**2*s**4/(kg*m**3) + assert abs(m5.permeability - 2.77206575232851e-8*kg*m/(A**2*s**2)) \ + < 1e-20*kg*m/(A**2*s**2) + m6 = Medium('m6', None, mu, n) + assert m6.permittivity == n**2/(c**2*mu) + # test for equality of refractive indices + assert Medium('m7').refractive_index == Medium('m8', e0, u0).refractive_index + raises(ValueError, lambda:Medium('m9', e0, u0, 2)) diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/physics/optics/tests/test_polarization.py b/env-llmeval/lib/python3.10/site-packages/sympy/physics/optics/tests/test_polarization.py new file mode 100644 index 0000000000000000000000000000000000000000..99c595d82a4a296066d5075f6182895a8de54d91 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/sympy/physics/optics/tests/test_polarization.py @@ -0,0 +1,57 @@ +from sympy.physics.optics.polarization import (jones_vector, stokes_vector, + jones_2_stokes, linear_polarizer, phase_retarder, half_wave_retarder, + quarter_wave_retarder, transmissive_filter, reflective_filter, + mueller_matrix, polarizing_beam_splitter) +from sympy.core.numbers import (I, pi) +from sympy.core.singleton import S +from sympy.core.symbol import symbols +from sympy.functions.elementary.exponential import exp +from sympy.matrices.dense import Matrix + + +def test_polarization(): + assert jones_vector(0, 0) == Matrix([1, 0]) + assert jones_vector(pi/2, 0) == Matrix([0, 1]) + ################################################################# + assert stokes_vector(0, 0) == Matrix([1, 1, 0, 0]) + assert stokes_vector(pi/2, 0) == Matrix([1, -1, 0, 0]) + ################################################################# + H = jones_vector(0, 0) + V = jones_vector(pi/2, 0) + D = jones_vector(pi/4, 0) + A = jones_vector(-pi/4, 0) + R = jones_vector(0, pi/4) + L = jones_vector(0, -pi/4) + + res = [Matrix([1, 1, 0, 0]), + Matrix([1, -1, 0, 0]), + Matrix([1, 0, 1, 0]), + Matrix([1, 0, -1, 0]), + Matrix([1, 0, 0, 1]), + Matrix([1, 0, 0, -1])] + + assert [jones_2_stokes(e) for e in [H, V, D, A, R, L]] == res + ################################################################# + assert linear_polarizer(0) == Matrix([[1, 0], [0, 0]]) + ################################################################# + delta = symbols("delta", real=True) + res = Matrix([[exp(-I*delta/2), 0], [0, exp(I*delta/2)]]) + assert phase_retarder(0, delta) == res + ################################################################# + assert half_wave_retarder(0) == Matrix([[-I, 0], [0, I]]) + ################################################################# + res = Matrix([[exp(-I*pi/4), 0], [0, I*exp(-I*pi/4)]]) + assert quarter_wave_retarder(0) == res + ################################################################# + assert transmissive_filter(1) == Matrix([[1, 0], [0, 1]]) + ################################################################# + assert reflective_filter(1) == Matrix([[1, 0], [0, -1]]) + + res = Matrix([[S(1)/2, S(1)/2, 0, 0], + [S(1)/2, S(1)/2, 0, 0], + [0, 0, 0, 0], + [0, 0, 0, 0]]) + assert mueller_matrix(linear_polarizer(0)) == res + ################################################################# + res = Matrix([[1, 0, 0, 0], [0, 0, 0, -I], [0, 0, 1, 0], [0, -I, 0, 0]]) + assert polarizing_beam_splitter() == res diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/physics/optics/tests/test_utils.py b/env-llmeval/lib/python3.10/site-packages/sympy/physics/optics/tests/test_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..6c93883a081d3614a604aeadc8a4b617181de669 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/sympy/physics/optics/tests/test_utils.py @@ -0,0 +1,202 @@ +from sympy.core.numbers import comp, Rational +from sympy.physics.optics.utils import (refraction_angle, fresnel_coefficients, + deviation, brewster_angle, critical_angle, lens_makers_formula, + mirror_formula, lens_formula, hyperfocal_distance, + transverse_magnification) +from sympy.physics.optics.medium import Medium +from sympy.physics.units import e0 + +from sympy.core.numbers import oo +from sympy.core.symbol import symbols +from sympy.functions.elementary.miscellaneous import sqrt +from sympy.matrices.dense import Matrix +from sympy.geometry.point import Point3D +from sympy.geometry.line import Ray3D +from sympy.geometry.plane import Plane + +from sympy.testing.pytest import raises + + +ae = lambda a, b, n: comp(a, b, 10**-n) + + +def test_refraction_angle(): + n1, n2 = symbols('n1, n2') + m1 = Medium('m1') + m2 = Medium('m2') + r1 = Ray3D(Point3D(-1, -1, 1), Point3D(0, 0, 0)) + i = Matrix([1, 1, 1]) + n = Matrix([0, 0, 1]) + normal_ray = Ray3D(Point3D(0, 0, 0), Point3D(0, 0, 1)) + P = Plane(Point3D(0, 0, 0), normal_vector=[0, 0, 1]) + assert refraction_angle(r1, 1, 1, n) == Matrix([ + [ 1], + [ 1], + [-1]]) + assert refraction_angle([1, 1, 1], 1, 1, n) == Matrix([ + [ 1], + [ 1], + [-1]]) + assert refraction_angle((1, 1, 1), 1, 1, n) == Matrix([ + [ 1], + [ 1], + [-1]]) + assert refraction_angle(i, 1, 1, [0, 0, 1]) == Matrix([ + [ 1], + [ 1], + [-1]]) + assert refraction_angle(i, 1, 1, (0, 0, 1)) == Matrix([ + [ 1], + [ 1], + [-1]]) + assert refraction_angle(i, 1, 1, normal_ray) == Matrix([ + [ 1], + [ 1], + [-1]]) + assert refraction_angle(i, 1, 1, plane=P) == Matrix([ + [ 1], + [ 1], + [-1]]) + assert refraction_angle(r1, 1, 1, plane=P) == \ + Ray3D(Point3D(0, 0, 0), Point3D(1, 1, -1)) + assert refraction_angle(r1, m1, 1.33, plane=P) == \ + Ray3D(Point3D(0, 0, 0), Point3D(Rational(100, 133), Rational(100, 133), -789378201649271*sqrt(3)/1000000000000000)) + assert refraction_angle(r1, 1, m2, plane=P) == \ + Ray3D(Point3D(0, 0, 0), Point3D(1, 1, -1)) + assert refraction_angle(r1, n1, n2, plane=P) == \ + Ray3D(Point3D(0, 0, 0), Point3D(n1/n2, n1/n2, -sqrt(3)*sqrt(-2*n1**2/(3*n2**2) + 1))) + assert refraction_angle(r1, 1.33, 1, plane=P) == 0 # TIR + assert refraction_angle(r1, 1, 1, normal_ray) == \ + Ray3D(Point3D(0, 0, 0), direction_ratio=[1, 1, -1]) + assert ae(refraction_angle(0.5, 1, 2), 0.24207, 5) + assert ae(refraction_angle(0.5, 2, 1), 1.28293, 5) + raises(ValueError, lambda: refraction_angle(r1, m1, m2, normal_ray, P)) + raises(TypeError, lambda: refraction_angle(m1, m1, m2)) # can add other values for arg[0] + raises(TypeError, lambda: refraction_angle(r1, m1, m2, None, i)) + raises(TypeError, lambda: refraction_angle(r1, m1, m2, m2)) + + +def test_fresnel_coefficients(): + assert all(ae(i, j, 5) for i, j in zip( + fresnel_coefficients(0.5, 1, 1.33), + [0.11163, -0.17138, 0.83581, 0.82862])) + assert all(ae(i, j, 5) for i, j in zip( + fresnel_coefficients(0.5, 1.33, 1), + [-0.07726, 0.20482, 1.22724, 1.20482])) + m1 = Medium('m1') + m2 = Medium('m2', n=2) + assert all(ae(i, j, 5) for i, j in zip( + fresnel_coefficients(0.3, m1, m2), + [0.31784, -0.34865, 0.65892, 0.65135])) + ans = [[-0.23563, -0.97184], [0.81648, -0.57738]] + got = fresnel_coefficients(0.6, m2, m1) + for i, j in zip(got, ans): + for a, b in zip(i.as_real_imag(), j): + assert ae(a, b, 5) + + +def test_deviation(): + n1, n2 = symbols('n1, n2') + r1 = Ray3D(Point3D(-1, -1, 1), Point3D(0, 0, 0)) + n = Matrix([0, 0, 1]) + i = Matrix([-1, -1, -1]) + normal_ray = Ray3D(Point3D(0, 0, 0), Point3D(0, 0, 1)) + P = Plane(Point3D(0, 0, 0), normal_vector=[0, 0, 1]) + assert deviation(r1, 1, 1, normal=n) == 0 + assert deviation(r1, 1, 1, plane=P) == 0 + assert deviation(r1, 1, 1.1, plane=P).evalf(3) + 0.119 < 1e-3 + assert deviation(i, 1, 1.1, normal=normal_ray).evalf(3) + 0.119 < 1e-3 + assert deviation(r1, 1.33, 1, plane=P) is None # TIR + assert deviation(r1, 1, 1, normal=[0, 0, 1]) == 0 + assert deviation([-1, -1, -1], 1, 1, normal=[0, 0, 1]) == 0 + assert ae(deviation(0.5, 1, 2), -0.25793, 5) + assert ae(deviation(0.5, 2, 1), 0.78293, 5) + + +def test_brewster_angle(): + m1 = Medium('m1', n=1) + m2 = Medium('m2', n=1.33) + assert ae(brewster_angle(m1, m2), 0.93, 2) + m1 = Medium('m1', permittivity=e0, n=1) + m2 = Medium('m2', permittivity=e0, n=1.33) + assert ae(brewster_angle(m1, m2), 0.93, 2) + assert ae(brewster_angle(1, 1.33), 0.93, 2) + + +def test_critical_angle(): + m1 = Medium('m1', n=1) + m2 = Medium('m2', n=1.33) + assert ae(critical_angle(m2, m1), 0.85, 2) + + +def test_lens_makers_formula(): + n1, n2 = symbols('n1, n2') + m1 = Medium('m1', permittivity=e0, n=1) + m2 = Medium('m2', permittivity=e0, n=1.33) + assert lens_makers_formula(n1, n2, 10, -10) == 5.0*n2/(n1 - n2) + assert ae(lens_makers_formula(m1, m2, 10, -10), -20.15, 2) + assert ae(lens_makers_formula(1.33, 1, 10, -10), 15.15, 2) + + +def test_mirror_formula(): + u, v, f = symbols('u, v, f') + assert mirror_formula(focal_length=f, u=u) == f*u/(-f + u) + assert mirror_formula(focal_length=f, v=v) == f*v/(-f + v) + assert mirror_formula(u=u, v=v) == u*v/(u + v) + assert mirror_formula(u=oo, v=v) == v + assert mirror_formula(u=oo, v=oo) is oo + assert mirror_formula(focal_length=oo, u=u) == -u + assert mirror_formula(u=u, v=oo) == u + assert mirror_formula(focal_length=oo, v=oo) is oo + assert mirror_formula(focal_length=f, v=oo) == f + assert mirror_formula(focal_length=oo, v=v) == -v + assert mirror_formula(focal_length=oo, u=oo) is oo + assert mirror_formula(focal_length=f, u=oo) == f + assert mirror_formula(focal_length=oo, u=u) == -u + raises(ValueError, lambda: mirror_formula(focal_length=f, u=u, v=v)) + + +def test_lens_formula(): + u, v, f = symbols('u, v, f') + assert lens_formula(focal_length=f, u=u) == f*u/(f + u) + assert lens_formula(focal_length=f, v=v) == f*v/(f - v) + assert lens_formula(u=u, v=v) == u*v/(u - v) + assert lens_formula(u=oo, v=v) == v + assert lens_formula(u=oo, v=oo) is oo + assert lens_formula(focal_length=oo, u=u) == u + assert lens_formula(u=u, v=oo) == -u + assert lens_formula(focal_length=oo, v=oo) is -oo + assert lens_formula(focal_length=oo, v=v) == v + assert lens_formula(focal_length=f, v=oo) == -f + assert lens_formula(focal_length=oo, u=oo) is oo + assert lens_formula(focal_length=oo, u=u) == u + assert lens_formula(focal_length=f, u=oo) == f + raises(ValueError, lambda: lens_formula(focal_length=f, u=u, v=v)) + + +def test_hyperfocal_distance(): + f, N, c = symbols('f, N, c') + assert hyperfocal_distance(f=f, N=N, c=c) == f**2/(N*c) + assert ae(hyperfocal_distance(f=0.5, N=8, c=0.0033), 9.47, 2) + + +def test_transverse_magnification(): + si, so = symbols('si, so') + assert transverse_magnification(si, so) == -si/so + assert transverse_magnification(30, 15) == -2 + + +def test_lens_makers_formula_thick_lens(): + n1, n2 = symbols('n1, n2') + m1 = Medium('m1', permittivity=e0, n=1) + m2 = Medium('m2', permittivity=e0, n=1.33) + assert ae(lens_makers_formula(m1, m2, 10, -10, d=1), -19.82, 2) + assert lens_makers_formula(n1, n2, 1, -1, d=0.1) == n2/((2.0 - (0.1*n1 - 0.1*n2)/n1)*(n1 - n2)) + + +def test_lens_makers_formula_plano_lens(): + n1, n2 = symbols('n1, n2') + m1 = Medium('m1', permittivity=e0, n=1) + m2 = Medium('m2', permittivity=e0, n=1.33) + assert ae(lens_makers_formula(m1, m2, 10, oo), -40.30, 2) + assert lens_makers_formula(n1, n2, 10, oo) == 10.0*n2/(n1 - n2) diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/physics/optics/tests/test_waves.py b/env-llmeval/lib/python3.10/site-packages/sympy/physics/optics/tests/test_waves.py new file mode 100644 index 0000000000000000000000000000000000000000..3cb8f804fb5be86d6174cb7c7b15fd8979c85ff8 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/sympy/physics/optics/tests/test_waves.py @@ -0,0 +1,82 @@ +from sympy.core.function import (Derivative, Function) +from sympy.core.numbers import (I, pi) +from sympy.core.symbol import (Symbol, symbols) +from sympy.functions.elementary.miscellaneous import sqrt +from sympy.functions.elementary.trigonometric import (atan2, cos, sin) +from sympy.simplify.simplify import simplify +from sympy.abc import epsilon, mu +from sympy.functions.elementary.exponential import exp +from sympy.physics.units import speed_of_light, m, s +from sympy.physics.optics import TWave + +from sympy.testing.pytest import raises + +c = speed_of_light.convert_to(m/s) + +def test_twave(): + A1, phi1, A2, phi2, f = symbols('A1, phi1, A2, phi2, f') + n = Symbol('n') # Refractive index + t = Symbol('t') # Time + x = Symbol('x') # Spatial variable + E = Function('E') + w1 = TWave(A1, f, phi1) + w2 = TWave(A2, f, phi2) + assert w1.amplitude == A1 + assert w1.frequency == f + assert w1.phase == phi1 + assert w1.wavelength == c/(f*n) + assert w1.time_period == 1/f + assert w1.angular_velocity == 2*pi*f + assert w1.wavenumber == 2*pi*f*n/c + assert w1.speed == c/n + + w3 = w1 + w2 + assert w3.amplitude == sqrt(A1**2 + 2*A1*A2*cos(phi1 - phi2) + A2**2) + assert w3.frequency == f + assert w3.phase == atan2(A1*sin(phi1) + A2*sin(phi2), A1*cos(phi1) + A2*cos(phi2)) + assert w3.wavelength == c/(f*n) + assert w3.time_period == 1/f + assert w3.angular_velocity == 2*pi*f + assert w3.wavenumber == 2*pi*f*n/c + assert w3.speed == c/n + assert simplify(w3.rewrite(sin) - w2.rewrite(sin) - w1.rewrite(sin)) == 0 + assert w3.rewrite('pde') == epsilon*mu*Derivative(E(x, t), t, t) + Derivative(E(x, t), x, x) + assert w3.rewrite(cos) == sqrt(A1**2 + 2*A1*A2*cos(phi1 - phi2) + + A2**2)*cos(pi*f*n*x*s/(149896229*m) - 2*pi*f*t + atan2(A1*sin(phi1) + + A2*sin(phi2), A1*cos(phi1) + A2*cos(phi2))) + assert w3.rewrite(exp) == sqrt(A1**2 + 2*A1*A2*cos(phi1 - phi2) + + A2**2)*exp(I*(-2*pi*f*t + atan2(A1*sin(phi1) + A2*sin(phi2), A1*cos(phi1) + + A2*cos(phi2)) + pi*s*f*n*x/(149896229*m))) + + w4 = TWave(A1, None, 0, 1/f) + assert w4.frequency == f + + w5 = w1 - w2 + assert w5.amplitude == sqrt(A1**2 - 2*A1*A2*cos(phi1 - phi2) + A2**2) + assert w5.frequency == f + assert w5.phase == atan2(A1*sin(phi1) - A2*sin(phi2), A1*cos(phi1) - A2*cos(phi2)) + assert w5.wavelength == c/(f*n) + assert w5.time_period == 1/f + assert w5.angular_velocity == 2*pi*f + assert w5.wavenumber == 2*pi*f*n/c + assert w5.speed == c/n + assert simplify(w5.rewrite(sin) - w1.rewrite(sin) + w2.rewrite(sin)) == 0 + assert w5.rewrite('pde') == epsilon*mu*Derivative(E(x, t), t, t) + Derivative(E(x, t), x, x) + assert w5.rewrite(cos) == sqrt(A1**2 - 2*A1*A2*cos(phi1 - phi2) + + A2**2)*cos(-2*pi*f*t + atan2(A1*sin(phi1) - A2*sin(phi2), A1*cos(phi1) + - A2*cos(phi2)) + pi*s*f*n*x/(149896229*m)) + assert w5.rewrite(exp) == sqrt(A1**2 - 2*A1*A2*cos(phi1 - phi2) + + A2**2)*exp(I*(-2*pi*f*t + atan2(A1*sin(phi1) - A2*sin(phi2), A1*cos(phi1) + - A2*cos(phi2)) + pi*s*f*n*x/(149896229*m))) + + w6 = 2*w1 + assert w6.amplitude == 2*A1 + assert w6.frequency == f + assert w6.phase == phi1 + w7 = -w6 + assert w7.amplitude == -2*A1 + assert w7.frequency == f + assert w7.phase == phi1 + + raises(ValueError, lambda:TWave(A1)) + raises(ValueError, lambda:TWave(A1, f, phi1, t)) diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/physics/units/__pycache__/__init__.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/sympy/physics/units/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..92e49b3581450c96e76ea07bbf5001eed8640699 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/sympy/physics/units/__pycache__/__init__.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/physics/units/__pycache__/quantities.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/sympy/physics/units/__pycache__/quantities.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3b830d64ddf4e9c61f752f4e1bc8f22880c50b1c Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/sympy/physics/units/__pycache__/quantities.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/physics/units/systems/__pycache__/__init__.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/sympy/physics/units/systems/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3cd83c21f8c7b46b00a6d37c8ddc1a8d190c00a8 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/sympy/physics/units/systems/__pycache__/__init__.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/physics/units/systems/__pycache__/cgs.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/sympy/physics/units/systems/__pycache__/cgs.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3eb7fdb1b1a76d039b356b8f4d2588853edf343c Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/sympy/physics/units/systems/__pycache__/cgs.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/physics/units/systems/__pycache__/length_weight_time.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/sympy/physics/units/systems/__pycache__/length_weight_time.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a93937b5abd2d126a934ba2c0f16595001271918 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/sympy/physics/units/systems/__pycache__/length_weight_time.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/physics/units/systems/__pycache__/mks.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/sympy/physics/units/systems/__pycache__/mks.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f2d7b4f0c142cc8dbd0e30742c29bb44c06f09e9 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/sympy/physics/units/systems/__pycache__/mks.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/physics/units/systems/__pycache__/mksa.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/sympy/physics/units/systems/__pycache__/mksa.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d2736ab259f2e103ee21588aa498648ecc2c52c9 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/sympy/physics/units/systems/__pycache__/mksa.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/physics/units/systems/__pycache__/natural.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/sympy/physics/units/systems/__pycache__/natural.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3c70d3f42aecf7c61d5013738c5252f6e838cef3 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/sympy/physics/units/systems/__pycache__/natural.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/physics/units/systems/__pycache__/si.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/sympy/physics/units/systems/__pycache__/si.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ef44f1156d7654311ffadb6c7e2c86f53ca45635 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/sympy/physics/units/systems/__pycache__/si.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/physics/units/systems/si.py b/env-llmeval/lib/python3.10/site-packages/sympy/physics/units/systems/si.py new file mode 100644 index 0000000000000000000000000000000000000000..2bfa7805871b8663c70b8af7da9ca1dc9b4afab3 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/sympy/physics/units/systems/si.py @@ -0,0 +1,377 @@ +""" +SI unit system. +Based on MKSA, which stands for "meter, kilogram, second, ampere". +Added kelvin, candela and mole. + +""" + +from __future__ import annotations + +from sympy.physics.units import DimensionSystem, Dimension, dHg0 + +from sympy.physics.units.quantities import Quantity + +from sympy.core.numbers import (Rational, pi) +from sympy.core.singleton import S +from sympy.functions.elementary.miscellaneous import sqrt +from sympy.physics.units.definitions.dimension_definitions import ( + acceleration, action, current, impedance, length, mass, time, velocity, + amount_of_substance, temperature, information, frequency, force, pressure, + energy, power, charge, voltage, capacitance, conductance, magnetic_flux, + magnetic_density, inductance, luminous_intensity +) +from sympy.physics.units.definitions import ( + kilogram, newton, second, meter, gram, cd, K, joule, watt, pascal, hertz, + coulomb, volt, ohm, siemens, farad, henry, tesla, weber, dioptre, lux, + katal, gray, becquerel, inch, liter, julian_year, gravitational_constant, + speed_of_light, elementary_charge, planck, hbar, electronvolt, + avogadro_number, avogadro_constant, boltzmann_constant, electron_rest_mass, + stefan_boltzmann_constant, Da, atomic_mass_constant, molar_gas_constant, + faraday_constant, josephson_constant, von_klitzing_constant, + acceleration_due_to_gravity, magnetic_constant, vacuum_permittivity, + vacuum_impedance, coulomb_constant, atmosphere, bar, pound, psi, mmHg, + milli_mass_unit, quart, lightyear, astronomical_unit, planck_mass, + planck_time, planck_temperature, planck_length, planck_charge, planck_area, + planck_volume, planck_momentum, planck_energy, planck_force, planck_power, + planck_density, planck_energy_density, planck_intensity, + planck_angular_frequency, planck_pressure, planck_current, planck_voltage, + planck_impedance, planck_acceleration, bit, byte, kibibyte, mebibyte, + gibibyte, tebibyte, pebibyte, exbibyte, curie, rutherford, radian, degree, + steradian, angular_mil, atomic_mass_unit, gee, kPa, ampere, u0, c, kelvin, + mol, mole, candela, m, kg, s, electric_constant, G, boltzmann +) +from sympy.physics.units.prefixes import PREFIXES, prefix_unit +from sympy.physics.units.systems.mksa import MKSA, dimsys_MKSA + +derived_dims = (frequency, force, pressure, energy, power, charge, voltage, + capacitance, conductance, magnetic_flux, + magnetic_density, inductance, luminous_intensity) +base_dims = (amount_of_substance, luminous_intensity, temperature) + +units = [mol, cd, K, lux, hertz, newton, pascal, joule, watt, coulomb, volt, + farad, ohm, siemens, weber, tesla, henry, candela, lux, becquerel, + gray, katal] + +all_units: list[Quantity] = [] +for u in units: + all_units.extend(prefix_unit(u, PREFIXES)) + +all_units.extend(units) +all_units.extend([mol, cd, K, lux]) + + +dimsys_SI = dimsys_MKSA.extend( + [ + # Dimensional dependencies for other base dimensions: + temperature, + amount_of_substance, + luminous_intensity, + ]) + +dimsys_default = dimsys_SI.extend( + [information], +) + +SI = MKSA.extend(base=(mol, cd, K), units=all_units, name='SI', dimension_system=dimsys_SI, derived_units={ + power: watt, + magnetic_flux: weber, + time: second, + impedance: ohm, + pressure: pascal, + current: ampere, + voltage: volt, + length: meter, + frequency: hertz, + inductance: henry, + temperature: kelvin, + amount_of_substance: mole, + luminous_intensity: candela, + conductance: siemens, + mass: kilogram, + magnetic_density: tesla, + charge: coulomb, + force: newton, + capacitance: farad, + energy: joule, + velocity: meter/second, +}) + +One = S.One + +SI.set_quantity_dimension(radian, One) + +SI.set_quantity_scale_factor(ampere, One) + +SI.set_quantity_scale_factor(kelvin, One) + +SI.set_quantity_scale_factor(mole, One) + +SI.set_quantity_scale_factor(candela, One) + +# MKSA extension to MKS: derived units + +SI.set_quantity_scale_factor(coulomb, One) + +SI.set_quantity_scale_factor(volt, joule/coulomb) + +SI.set_quantity_scale_factor(ohm, volt/ampere) + +SI.set_quantity_scale_factor(siemens, ampere/volt) + +SI.set_quantity_scale_factor(farad, coulomb/volt) + +SI.set_quantity_scale_factor(henry, volt*second/ampere) + +SI.set_quantity_scale_factor(tesla, volt*second/meter**2) + +SI.set_quantity_scale_factor(weber, joule/ampere) + + +SI.set_quantity_dimension(lux, luminous_intensity / length ** 2) +SI.set_quantity_scale_factor(lux, steradian*candela/meter**2) + +# katal is the SI unit of catalytic activity + +SI.set_quantity_dimension(katal, amount_of_substance / time) +SI.set_quantity_scale_factor(katal, mol/second) + +# gray is the SI unit of absorbed dose + +SI.set_quantity_dimension(gray, energy / mass) +SI.set_quantity_scale_factor(gray, meter**2/second**2) + +# becquerel is the SI unit of radioactivity + +SI.set_quantity_dimension(becquerel, 1 / time) +SI.set_quantity_scale_factor(becquerel, 1/second) + +#### CONSTANTS #### + +# elementary charge +# REF: NIST SP 959 (June 2019) + +SI.set_quantity_dimension(elementary_charge, charge) +SI.set_quantity_scale_factor(elementary_charge, 1.602176634e-19*coulomb) + +# Electronvolt +# REF: NIST SP 959 (June 2019) + +SI.set_quantity_dimension(electronvolt, energy) +SI.set_quantity_scale_factor(electronvolt, 1.602176634e-19*joule) + +# Avogadro number +# REF: NIST SP 959 (June 2019) + +SI.set_quantity_dimension(avogadro_number, One) +SI.set_quantity_scale_factor(avogadro_number, 6.02214076e23) + +# Avogadro constant + +SI.set_quantity_dimension(avogadro_constant, amount_of_substance ** -1) +SI.set_quantity_scale_factor(avogadro_constant, avogadro_number / mol) + +# Boltzmann constant +# REF: NIST SP 959 (June 2019) + +SI.set_quantity_dimension(boltzmann_constant, energy / temperature) +SI.set_quantity_scale_factor(boltzmann_constant, 1.380649e-23*joule/kelvin) + +# Stefan-Boltzmann constant +# REF: NIST SP 959 (June 2019) + +SI.set_quantity_dimension(stefan_boltzmann_constant, energy * time ** -1 * length ** -2 * temperature ** -4) +SI.set_quantity_scale_factor(stefan_boltzmann_constant, pi**2 * boltzmann_constant**4 / (60 * hbar**3 * speed_of_light ** 2)) + +# Atomic mass +# REF: NIST SP 959 (June 2019) + +SI.set_quantity_dimension(atomic_mass_constant, mass) +SI.set_quantity_scale_factor(atomic_mass_constant, 1.66053906660e-24*gram) + +# Molar gas constant +# REF: NIST SP 959 (June 2019) + +SI.set_quantity_dimension(molar_gas_constant, energy / (temperature * amount_of_substance)) +SI.set_quantity_scale_factor(molar_gas_constant, boltzmann_constant * avogadro_constant) + +# Faraday constant + +SI.set_quantity_dimension(faraday_constant, charge / amount_of_substance) +SI.set_quantity_scale_factor(faraday_constant, elementary_charge * avogadro_constant) + +# Josephson constant + +SI.set_quantity_dimension(josephson_constant, frequency / voltage) +SI.set_quantity_scale_factor(josephson_constant, 0.5 * planck / elementary_charge) + +# Von Klitzing constant + +SI.set_quantity_dimension(von_klitzing_constant, voltage / current) +SI.set_quantity_scale_factor(von_klitzing_constant, hbar / elementary_charge ** 2) + +# Acceleration due to gravity (on the Earth surface) + +SI.set_quantity_dimension(acceleration_due_to_gravity, acceleration) +SI.set_quantity_scale_factor(acceleration_due_to_gravity, 9.80665*meter/second**2) + +# magnetic constant: + +SI.set_quantity_dimension(magnetic_constant, force / current ** 2) +SI.set_quantity_scale_factor(magnetic_constant, 4*pi/10**7 * newton/ampere**2) + +# electric constant: + +SI.set_quantity_dimension(vacuum_permittivity, capacitance / length) +SI.set_quantity_scale_factor(vacuum_permittivity, 1/(u0 * c**2)) + +# vacuum impedance: + +SI.set_quantity_dimension(vacuum_impedance, impedance) +SI.set_quantity_scale_factor(vacuum_impedance, u0 * c) + +# Electron rest mass +SI.set_quantity_dimension(electron_rest_mass, mass) +SI.set_quantity_scale_factor(electron_rest_mass, 9.1093837015e-31*kilogram) + +# Coulomb's constant: +SI.set_quantity_dimension(coulomb_constant, force * length ** 2 / charge ** 2) +SI.set_quantity_scale_factor(coulomb_constant, 1/(4*pi*vacuum_permittivity)) + +SI.set_quantity_dimension(psi, pressure) +SI.set_quantity_scale_factor(psi, pound * gee / inch ** 2) + +SI.set_quantity_dimension(mmHg, pressure) +SI.set_quantity_scale_factor(mmHg, dHg0 * acceleration_due_to_gravity * kilogram / meter**2) + +SI.set_quantity_dimension(milli_mass_unit, mass) +SI.set_quantity_scale_factor(milli_mass_unit, atomic_mass_unit/1000) + +SI.set_quantity_dimension(quart, length ** 3) +SI.set_quantity_scale_factor(quart, Rational(231, 4) * inch**3) + +# Other convenient units and magnitudes + +SI.set_quantity_dimension(lightyear, length) +SI.set_quantity_scale_factor(lightyear, speed_of_light*julian_year) + +SI.set_quantity_dimension(astronomical_unit, length) +SI.set_quantity_scale_factor(astronomical_unit, 149597870691*meter) + +# Fundamental Planck units: + +SI.set_quantity_dimension(planck_mass, mass) +SI.set_quantity_scale_factor(planck_mass, sqrt(hbar*speed_of_light/G)) + +SI.set_quantity_dimension(planck_time, time) +SI.set_quantity_scale_factor(planck_time, sqrt(hbar*G/speed_of_light**5)) + +SI.set_quantity_dimension(planck_temperature, temperature) +SI.set_quantity_scale_factor(planck_temperature, sqrt(hbar*speed_of_light**5/G/boltzmann**2)) + +SI.set_quantity_dimension(planck_length, length) +SI.set_quantity_scale_factor(planck_length, sqrt(hbar*G/speed_of_light**3)) + +SI.set_quantity_dimension(planck_charge, charge) +SI.set_quantity_scale_factor(planck_charge, sqrt(4*pi*electric_constant*hbar*speed_of_light)) + +# Derived Planck units: + +SI.set_quantity_dimension(planck_area, length ** 2) +SI.set_quantity_scale_factor(planck_area, planck_length**2) + +SI.set_quantity_dimension(planck_volume, length ** 3) +SI.set_quantity_scale_factor(planck_volume, planck_length**3) + +SI.set_quantity_dimension(planck_momentum, mass * velocity) +SI.set_quantity_scale_factor(planck_momentum, planck_mass * speed_of_light) + +SI.set_quantity_dimension(planck_energy, energy) +SI.set_quantity_scale_factor(planck_energy, planck_mass * speed_of_light**2) + +SI.set_quantity_dimension(planck_force, force) +SI.set_quantity_scale_factor(planck_force, planck_energy / planck_length) + +SI.set_quantity_dimension(planck_power, power) +SI.set_quantity_scale_factor(planck_power, planck_energy / planck_time) + +SI.set_quantity_dimension(planck_density, mass / length ** 3) +SI.set_quantity_scale_factor(planck_density, planck_mass / planck_length**3) + +SI.set_quantity_dimension(planck_energy_density, energy / length ** 3) +SI.set_quantity_scale_factor(planck_energy_density, planck_energy / planck_length**3) + +SI.set_quantity_dimension(planck_intensity, mass * time ** (-3)) +SI.set_quantity_scale_factor(planck_intensity, planck_energy_density * speed_of_light) + +SI.set_quantity_dimension(planck_angular_frequency, 1 / time) +SI.set_quantity_scale_factor(planck_angular_frequency, 1 / planck_time) + +SI.set_quantity_dimension(planck_pressure, pressure) +SI.set_quantity_scale_factor(planck_pressure, planck_force / planck_length**2) + +SI.set_quantity_dimension(planck_current, current) +SI.set_quantity_scale_factor(planck_current, planck_charge / planck_time) + +SI.set_quantity_dimension(planck_voltage, voltage) +SI.set_quantity_scale_factor(planck_voltage, planck_energy / planck_charge) + +SI.set_quantity_dimension(planck_impedance, impedance) +SI.set_quantity_scale_factor(planck_impedance, planck_voltage / planck_current) + +SI.set_quantity_dimension(planck_acceleration, acceleration) +SI.set_quantity_scale_factor(planck_acceleration, speed_of_light / planck_time) + +# Older units for radioactivity + +SI.set_quantity_dimension(curie, 1 / time) +SI.set_quantity_scale_factor(curie, 37000000000*becquerel) + +SI.set_quantity_dimension(rutherford, 1 / time) +SI.set_quantity_scale_factor(rutherford, 1000000*becquerel) + + +# check that scale factors are the right SI dimensions: +for _scale_factor, _dimension in zip( + SI._quantity_scale_factors.values(), + SI._quantity_dimension_map.values() +): + dimex = SI.get_dimensional_expr(_scale_factor) + if dimex != 1: + # XXX: equivalent_dims is an instance method taking two arguments in + # addition to self so this can not work: + if not DimensionSystem.equivalent_dims(_dimension, Dimension(dimex)): # type: ignore + raise ValueError("quantity value and dimension mismatch") +del _scale_factor, _dimension + +__all__ = [ + 'mmHg', 'atmosphere', 'inductance', 'newton', 'meter', + 'vacuum_permittivity', 'pascal', 'magnetic_constant', 'voltage', + 'angular_mil', 'luminous_intensity', 'all_units', + 'julian_year', 'weber', 'exbibyte', 'liter', + 'molar_gas_constant', 'faraday_constant', 'avogadro_constant', + 'lightyear', 'planck_density', 'gee', 'mol', 'bit', 'gray', + 'planck_momentum', 'bar', 'magnetic_density', 'prefix_unit', 'PREFIXES', + 'planck_time', 'dimex', 'gram', 'candela', 'force', 'planck_intensity', + 'energy', 'becquerel', 'planck_acceleration', 'speed_of_light', + 'conductance', 'frequency', 'coulomb_constant', 'degree', 'lux', 'planck', + 'current', 'planck_current', 'tebibyte', 'planck_power', 'MKSA', 'power', + 'K', 'planck_volume', 'quart', 'pressure', 'amount_of_substance', + 'joule', 'boltzmann_constant', 'Dimension', 'c', 'planck_force', 'length', + 'watt', 'action', 'hbar', 'gibibyte', 'DimensionSystem', 'cd', 'volt', + 'planck_charge', 'dioptre', 'vacuum_impedance', 'dimsys_default', 'farad', + 'charge', 'gravitational_constant', 'temperature', 'u0', 'hertz', + 'capacitance', 'tesla', 'steradian', 'planck_mass', 'josephson_constant', + 'planck_area', 'stefan_boltzmann_constant', 'base_dims', + 'astronomical_unit', 'radian', 'planck_voltage', 'impedance', + 'planck_energy', 'Da', 'atomic_mass_constant', 'rutherford', 'second', 'inch', + 'elementary_charge', 'SI', 'electronvolt', 'dimsys_SI', 'henry', + 'planck_angular_frequency', 'ohm', 'pound', 'planck_pressure', 'G', 'psi', + 'dHg0', 'von_klitzing_constant', 'planck_length', 'avogadro_number', + 'mole', 'acceleration', 'information', 'planck_energy_density', + 'mebibyte', 's', 'acceleration_due_to_gravity', 'electron_rest_mass', + 'planck_temperature', 'units', 'mass', 'dimsys_MKSA', 'kelvin', 'kPa', + 'boltzmann', 'milli_mass_unit', 'planck_impedance', 'electric_constant', + 'derived_dims', 'kg', 'coulomb', 'siemens', 'byte', 'magnetic_flux', + 'atomic_mass_unit', 'm', 'kibibyte', 'kilogram', 'One', 'curie', 'u', + 'time', 'pebibyte', 'velocity', 'ampere', 'katal', +] diff --git a/env-llmeval/lib/python3.10/site-packages/tcolorpy-0.1.4.dist-info/INSTALLER b/env-llmeval/lib/python3.10/site-packages/tcolorpy-0.1.4.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/tcolorpy-0.1.4.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/env-llmeval/lib/python3.10/site-packages/tcolorpy-0.1.4.dist-info/LICENSE b/env-llmeval/lib/python3.10/site-packages/tcolorpy-0.1.4.dist-info/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..7fa40cf5fe093c0c9246746e07a4f04fbc5b565d --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/tcolorpy-0.1.4.dist-info/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 Tsuyoshi Hombashi + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/env-llmeval/lib/python3.10/site-packages/tcolorpy-0.1.4.dist-info/METADATA b/env-llmeval/lib/python3.10/site-packages/tcolorpy-0.1.4.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..5f4f4544db761506805dc3107d59ecc43af87f59 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/tcolorpy-0.1.4.dist-info/METADATA @@ -0,0 +1,162 @@ +Metadata-Version: 2.1 +Name: tcolorpy +Version: 0.1.4 +Summary: tcolopy is a Python library to apply true color for terminal text. +Home-page: https://github.com/thombashi/tcolorpy +Author: Tsuyoshi Hombashi +Author-email: tsuyoshi.hombashi@gmail.com +License: MIT License +Project-URL: Source, https://github.com/thombashi/tcolorpy +Project-URL: Tracker, https://github.com/thombashi/tcolorpy/issues +Keywords: ANSI escape,terminal color,truecolor +Classifier: Development Status :: 4 - Beta +Classifier: Intended Audience :: Information Technology +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Software Development :: Libraries +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Terminals +Classifier: Topic :: Text Processing +Requires-Python: >=3.7 +Description-Content-Type: text/x-rst +License-File: LICENSE +Provides-Extra: test +Requires-Dist: pytest >=6.0.1 ; extra == 'test' +Requires-Dist: pytest-md-report >=0.4.1 ; extra == 'test' + +.. contents:: **tcolorpy** + :backlinks: top + :depth: 2 + + +Summary +============================================ +tcolopy is a Python library to apply true color for terminal text. + +.. image:: https://badge.fury.io/py/tcolorpy.svg + :target: https://badge.fury.io/py/tcolorpy + :alt: PyPI package version + +.. image:: https://anaconda.org/conda-forge/tcolorpy/badges/version.svg + :target: https://anaconda.org/conda-forge/tcolorpy + :alt: conda-forge package version + +.. image:: https://img.shields.io/pypi/pyversions/tcolorpy.svg + :target: https://pypi.org/project/tcolorpy + :alt: Supported Python versions + +.. image:: https://img.shields.io/pypi/implementation/tcolorpy.svg + :target: https://pypi.org/project/tcolorpy + :alt: Supported Python implementations + +.. image:: https://github.com/thombashi/tcolorpy/workflows/Tests/badge.svg + :target: https://github.com/thombashi/tcolorpy/actions?query=workflow%3ATests + :alt: Linux/macOS/Windows CI status + +.. image:: https://coveralls.io/repos/github/thombashi/tcolorpy/badge.svg?branch=master + :target: https://coveralls.io/github/thombashi/tcolorpy?branch=master + :alt: Test coverage: coveralls + + +Installation +============================================ + +Installation: pip +------------------------------ +:: + + pip install tcolorpy + +Installation: conda +------------------------------ +:: + + conda install -c conda-forge tcolorpy + + +Usage +============================================ + +Library usage +-------------------------------------------- + +:Sample Code: + .. code-block:: python + + from tcolorpy import tcolor + + print(tcolor("tcolopy example", color="#ee1177", styles=["bold", "italic", "underline"])) + +:Output: + .. figure:: https://cdn.jsdelivr.net/gh/thombashi/tcolorpy@master/ss/oneline.png + :scale: 60% + :alt: https://github.com/thombashi/tcolorpy/blob/master/ss/oneline.png + +You can set the following ``tcolor`` arguments: + +- ``color``/``bg_color`` + - color names (``"red"``, ``"green"``, etc.) or color code (``"#RRGGBB"``) +- ``styles`` + - ``"bold"``, ``"italic"``, etc. + + +Other examples +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Apply true color and styles to text: + +.. figure:: https://cdn.jsdelivr.net/gh/thombashi/tcolorpy@master/ss/styles.png + :scale: 60% + :alt: https://github.com/thombashi/tcolorpy/blob/master/ss/styles.png + + `example source code `__ + +You can also specify colors by name: + +.. figure:: https://cdn.jsdelivr.net/gh/thombashi/tcolorpy@master/ss/ansi_colors.png + :scale: 60% + :alt: https://github.com/thombashi/tcolorpy/blob/master/ss/ansi_colors.png + + `example source code `__ + + +CLI usage +-------------------------------------------- +``tcolorpy`` can be used via CLI: + +:: + + $ python3 -m tcolorpy "tcolopy example" -c "#ee1177" -s bold,italic,underline + +Command help +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +:: + + usage: __main__.py [-h] [-c COLOR] [-b BG_COLOR] [-s STYLES] [--encode ENCODE] string + + positional arguments: + string string to apply styles. + + options: + -h, --help show this help message and exit + -c COLOR, --color COLOR + specify a color code (#XXXXXX) or a name. valid names are: black, red, green, yellow, blue, magenta, cyan, white, lightblack, lightred, lightgreen, lightyellow, lightblue, lightmagenta, lightcyan, lightwhite + -b BG_COLOR, --bg-color BG_COLOR + specify a background color code (#XXXXXX) or a name. valid names are: black, red, green, yellow, blue, magenta, cyan, white, lightblack, lightred, lightgreen, lightyellow, lightblue, lightmagenta, lightcyan, lightwhite + -s STYLES, --styles STYLES + specify a comma-separated style. valid values are: bold, dim, italic, underline, blink, invert, strike + --encode ENCODE output a text encoded with the specified encoding + + +Dependencies +============================================ +Python 3.7+ +no external dependencies. diff --git a/env-llmeval/lib/python3.10/site-packages/tcolorpy-0.1.4.dist-info/RECORD b/env-llmeval/lib/python3.10/site-packages/tcolorpy-0.1.4.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..ee34dbd8b70064e4d587b41f5659ec983ccb5242 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/tcolorpy-0.1.4.dist-info/RECORD @@ -0,0 +1,17 @@ +tcolorpy-0.1.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +tcolorpy-0.1.4.dist-info/LICENSE,sha256=9BoEVtXyu6Jf1NflC1GpXeMEdw_x21p5UV0DOXqRTY0,1074 +tcolorpy-0.1.4.dist-info/METADATA,sha256=rFyw79V_YbDAiIAncjF7kdjKdHq0v7CJAlcy0ImSQFw,5716 +tcolorpy-0.1.4.dist-info/RECORD,, +tcolorpy-0.1.4.dist-info/WHEEL,sha256=yQN5g4mg4AybRjkgi-9yy4iQEFibGQmlz78Pik5Or-A,92 +tcolorpy-0.1.4.dist-info/top_level.txt,sha256=g8LDaQz0FVP61jibPz7OTwQqiseVV9pxUYDeGp2lFAI,9 +tcolorpy/__init__.py,sha256=729PMIfmOYicXFInfS4Uml-uA17tRllqPJ1ZFmSsIQ0,705 +tcolorpy/__main__.py,sha256=gjNpi78hE-X6CpY20ZLMmQ_yaWYIh_eOu2XrLnoGkBE,1701 +tcolorpy/__pycache__/__init__.cpython-310.pyc,, +tcolorpy/__pycache__/__main__.cpython-310.pyc,, +tcolorpy/__pycache__/__version__.cpython-310.pyc,, +tcolorpy/__pycache__/_const.cpython-310.pyc,, +tcolorpy/__pycache__/_truecolor.cpython-310.pyc,, +tcolorpy/__version__.py,sha256=uzTkOQkPBiZDuqTer6QpBlWsMYzrB5PZ-7rn53qkbaQ,201 +tcolorpy/_const.py,sha256=XS2rzsxY7SKxg0HreYTR_kEGeSi_59gOrrntI2_kG1o,1080 +tcolorpy/_truecolor.py,sha256=nzu2GCc6Tu_4no5_Qcksm88-Vm75sCdeOMDQHG_2DhM,7495 +tcolorpy/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/env-llmeval/lib/python3.10/site-packages/tcolorpy-0.1.4.dist-info/WHEEL b/env-llmeval/lib/python3.10/site-packages/tcolorpy-0.1.4.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..7e688737d490be3643d705bc16b5a77f7bd567b7 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/tcolorpy-0.1.4.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.41.2) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/env-llmeval/lib/python3.10/site-packages/tcolorpy-0.1.4.dist-info/top_level.txt b/env-llmeval/lib/python3.10/site-packages/tcolorpy-0.1.4.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..eda2ffb721c9f828b59d81ca04550452a1cde8cc --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/tcolorpy-0.1.4.dist-info/top_level.txt @@ -0,0 +1 @@ +tcolorpy diff --git a/env-llmeval/lib/python3.10/site-packages/yaml/__init__.py b/env-llmeval/lib/python3.10/site-packages/yaml/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..824936194774d34cd5e7816e519d00517612e7b4 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/yaml/__init__.py @@ -0,0 +1,390 @@ + +from .error import * + +from .tokens import * +from .events import * +from .nodes import * + +from .loader import * +from .dumper import * + +__version__ = '6.0.1' +try: + from .cyaml import * + __with_libyaml__ = True +except ImportError: + __with_libyaml__ = False + +import io + +#------------------------------------------------------------------------------ +# XXX "Warnings control" is now deprecated. Leaving in the API function to not +# break code that uses it. +#------------------------------------------------------------------------------ +def warnings(settings=None): + if settings is None: + return {} + +#------------------------------------------------------------------------------ +def scan(stream, Loader=Loader): + """ + Scan a YAML stream and produce scanning tokens. + """ + loader = Loader(stream) + try: + while loader.check_token(): + yield loader.get_token() + finally: + loader.dispose() + +def parse(stream, Loader=Loader): + """ + Parse a YAML stream and produce parsing events. + """ + loader = Loader(stream) + try: + while loader.check_event(): + yield loader.get_event() + finally: + loader.dispose() + +def compose(stream, Loader=Loader): + """ + Parse the first YAML document in a stream + and produce the corresponding representation tree. + """ + loader = Loader(stream) + try: + return loader.get_single_node() + finally: + loader.dispose() + +def compose_all(stream, Loader=Loader): + """ + Parse all YAML documents in a stream + and produce corresponding representation trees. + """ + loader = Loader(stream) + try: + while loader.check_node(): + yield loader.get_node() + finally: + loader.dispose() + +def load(stream, Loader): + """ + Parse the first YAML document in a stream + and produce the corresponding Python object. + """ + loader = Loader(stream) + try: + return loader.get_single_data() + finally: + loader.dispose() + +def load_all(stream, Loader): + """ + Parse all YAML documents in a stream + and produce corresponding Python objects. + """ + loader = Loader(stream) + try: + while loader.check_data(): + yield loader.get_data() + finally: + loader.dispose() + +def full_load(stream): + """ + Parse the first YAML document in a stream + and produce the corresponding Python object. + + Resolve all tags except those known to be + unsafe on untrusted input. + """ + return load(stream, FullLoader) + +def full_load_all(stream): + """ + Parse all YAML documents in a stream + and produce corresponding Python objects. + + Resolve all tags except those known to be + unsafe on untrusted input. + """ + return load_all(stream, FullLoader) + +def safe_load(stream): + """ + Parse the first YAML document in a stream + and produce the corresponding Python object. + + Resolve only basic YAML tags. This is known + to be safe for untrusted input. + """ + return load(stream, SafeLoader) + +def safe_load_all(stream): + """ + Parse all YAML documents in a stream + and produce corresponding Python objects. + + Resolve only basic YAML tags. This is known + to be safe for untrusted input. + """ + return load_all(stream, SafeLoader) + +def unsafe_load(stream): + """ + Parse the first YAML document in a stream + and produce the corresponding Python object. + + Resolve all tags, even those known to be + unsafe on untrusted input. + """ + return load(stream, UnsafeLoader) + +def unsafe_load_all(stream): + """ + Parse all YAML documents in a stream + and produce corresponding Python objects. + + Resolve all tags, even those known to be + unsafe on untrusted input. + """ + return load_all(stream, UnsafeLoader) + +def emit(events, stream=None, Dumper=Dumper, + canonical=None, indent=None, width=None, + allow_unicode=None, line_break=None): + """ + Emit YAML parsing events into a stream. + If stream is None, return the produced string instead. + """ + getvalue = None + if stream is None: + stream = io.StringIO() + getvalue = stream.getvalue + dumper = Dumper(stream, canonical=canonical, indent=indent, width=width, + allow_unicode=allow_unicode, line_break=line_break) + try: + for event in events: + dumper.emit(event) + finally: + dumper.dispose() + if getvalue: + return getvalue() + +def serialize_all(nodes, stream=None, Dumper=Dumper, + canonical=None, indent=None, width=None, + allow_unicode=None, line_break=None, + encoding=None, explicit_start=None, explicit_end=None, + version=None, tags=None): + """ + Serialize a sequence of representation trees into a YAML stream. + If stream is None, return the produced string instead. + """ + getvalue = None + if stream is None: + if encoding is None: + stream = io.StringIO() + else: + stream = io.BytesIO() + getvalue = stream.getvalue + dumper = Dumper(stream, canonical=canonical, indent=indent, width=width, + allow_unicode=allow_unicode, line_break=line_break, + encoding=encoding, version=version, tags=tags, + explicit_start=explicit_start, explicit_end=explicit_end) + try: + dumper.open() + for node in nodes: + dumper.serialize(node) + dumper.close() + finally: + dumper.dispose() + if getvalue: + return getvalue() + +def serialize(node, stream=None, Dumper=Dumper, **kwds): + """ + Serialize a representation tree into a YAML stream. + If stream is None, return the produced string instead. + """ + return serialize_all([node], stream, Dumper=Dumper, **kwds) + +def dump_all(documents, stream=None, Dumper=Dumper, + default_style=None, default_flow_style=False, + canonical=None, indent=None, width=None, + allow_unicode=None, line_break=None, + encoding=None, explicit_start=None, explicit_end=None, + version=None, tags=None, sort_keys=True): + """ + Serialize a sequence of Python objects into a YAML stream. + If stream is None, return the produced string instead. + """ + getvalue = None + if stream is None: + if encoding is None: + stream = io.StringIO() + else: + stream = io.BytesIO() + getvalue = stream.getvalue + dumper = Dumper(stream, default_style=default_style, + default_flow_style=default_flow_style, + canonical=canonical, indent=indent, width=width, + allow_unicode=allow_unicode, line_break=line_break, + encoding=encoding, version=version, tags=tags, + explicit_start=explicit_start, explicit_end=explicit_end, sort_keys=sort_keys) + try: + dumper.open() + for data in documents: + dumper.represent(data) + dumper.close() + finally: + dumper.dispose() + if getvalue: + return getvalue() + +def dump(data, stream=None, Dumper=Dumper, **kwds): + """ + Serialize a Python object into a YAML stream. + If stream is None, return the produced string instead. + """ + return dump_all([data], stream, Dumper=Dumper, **kwds) + +def safe_dump_all(documents, stream=None, **kwds): + """ + Serialize a sequence of Python objects into a YAML stream. + Produce only basic YAML tags. + If stream is None, return the produced string instead. + """ + return dump_all(documents, stream, Dumper=SafeDumper, **kwds) + +def safe_dump(data, stream=None, **kwds): + """ + Serialize a Python object into a YAML stream. + Produce only basic YAML tags. + If stream is None, return the produced string instead. + """ + return dump_all([data], stream, Dumper=SafeDumper, **kwds) + +def add_implicit_resolver(tag, regexp, first=None, + Loader=None, Dumper=Dumper): + """ + Add an implicit scalar detector. + If an implicit scalar value matches the given regexp, + the corresponding tag is assigned to the scalar. + first is a sequence of possible initial characters or None. + """ + if Loader is None: + loader.Loader.add_implicit_resolver(tag, regexp, first) + loader.FullLoader.add_implicit_resolver(tag, regexp, first) + loader.UnsafeLoader.add_implicit_resolver(tag, regexp, first) + else: + Loader.add_implicit_resolver(tag, regexp, first) + Dumper.add_implicit_resolver(tag, regexp, first) + +def add_path_resolver(tag, path, kind=None, Loader=None, Dumper=Dumper): + """ + Add a path based resolver for the given tag. + A path is a list of keys that forms a path + to a node in the representation tree. + Keys can be string values, integers, or None. + """ + if Loader is None: + loader.Loader.add_path_resolver(tag, path, kind) + loader.FullLoader.add_path_resolver(tag, path, kind) + loader.UnsafeLoader.add_path_resolver(tag, path, kind) + else: + Loader.add_path_resolver(tag, path, kind) + Dumper.add_path_resolver(tag, path, kind) + +def add_constructor(tag, constructor, Loader=None): + """ + Add a constructor for the given tag. + Constructor is a function that accepts a Loader instance + and a node object and produces the corresponding Python object. + """ + if Loader is None: + loader.Loader.add_constructor(tag, constructor) + loader.FullLoader.add_constructor(tag, constructor) + loader.UnsafeLoader.add_constructor(tag, constructor) + else: + Loader.add_constructor(tag, constructor) + +def add_multi_constructor(tag_prefix, multi_constructor, Loader=None): + """ + Add a multi-constructor for the given tag prefix. + Multi-constructor is called for a node if its tag starts with tag_prefix. + Multi-constructor accepts a Loader instance, a tag suffix, + and a node object and produces the corresponding Python object. + """ + if Loader is None: + loader.Loader.add_multi_constructor(tag_prefix, multi_constructor) + loader.FullLoader.add_multi_constructor(tag_prefix, multi_constructor) + loader.UnsafeLoader.add_multi_constructor(tag_prefix, multi_constructor) + else: + Loader.add_multi_constructor(tag_prefix, multi_constructor) + +def add_representer(data_type, representer, Dumper=Dumper): + """ + Add a representer for the given type. + Representer is a function accepting a Dumper instance + and an instance of the given data type + and producing the corresponding representation node. + """ + Dumper.add_representer(data_type, representer) + +def add_multi_representer(data_type, multi_representer, Dumper=Dumper): + """ + Add a representer for the given type. + Multi-representer is a function accepting a Dumper instance + and an instance of the given data type or subtype + and producing the corresponding representation node. + """ + Dumper.add_multi_representer(data_type, multi_representer) + +class YAMLObjectMetaclass(type): + """ + The metaclass for YAMLObject. + """ + def __init__(cls, name, bases, kwds): + super(YAMLObjectMetaclass, cls).__init__(name, bases, kwds) + if 'yaml_tag' in kwds and kwds['yaml_tag'] is not None: + if isinstance(cls.yaml_loader, list): + for loader in cls.yaml_loader: + loader.add_constructor(cls.yaml_tag, cls.from_yaml) + else: + cls.yaml_loader.add_constructor(cls.yaml_tag, cls.from_yaml) + + cls.yaml_dumper.add_representer(cls, cls.to_yaml) + +class YAMLObject(metaclass=YAMLObjectMetaclass): + """ + An object that can dump itself to a YAML stream + and load itself from a YAML stream. + """ + + __slots__ = () # no direct instantiation, so allow immutable subclasses + + yaml_loader = [Loader, FullLoader, UnsafeLoader] + yaml_dumper = Dumper + + yaml_tag = None + yaml_flow_style = None + + @classmethod + def from_yaml(cls, loader, node): + """ + Convert a representation node to a Python object. + """ + return loader.construct_yaml_object(node, cls) + + @classmethod + def to_yaml(cls, dumper, data): + """ + Convert a Python object to a representation node. + """ + return dumper.represent_yaml_object(cls.yaml_tag, data, cls, + flow_style=cls.yaml_flow_style) + diff --git a/env-llmeval/lib/python3.10/site-packages/yaml/__pycache__/__init__.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/yaml/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8e2723dd762cf8f9d8d6e7561dc70fd8535ddced Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/yaml/__pycache__/__init__.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/yaml/__pycache__/composer.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/yaml/__pycache__/composer.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..79d0cb7e897e3399a154bd0c374e86318659b37e Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/yaml/__pycache__/composer.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/yaml/__pycache__/constructor.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/yaml/__pycache__/constructor.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cfad434de43bfada90df820c6f4a6ba13f9f48ee Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/yaml/__pycache__/constructor.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/yaml/__pycache__/cyaml.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/yaml/__pycache__/cyaml.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..91f09d2dab9ec80c579e27cc45d509fa513e24c2 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/yaml/__pycache__/cyaml.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/yaml/__pycache__/dumper.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/yaml/__pycache__/dumper.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..280882f8699f27af3a2dc605d3cf7ae318ced912 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/yaml/__pycache__/dumper.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/yaml/__pycache__/emitter.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/yaml/__pycache__/emitter.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3419cafa78d046acb7e15ecc994930b94e1806d4 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/yaml/__pycache__/emitter.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/yaml/__pycache__/error.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/yaml/__pycache__/error.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..666b516889eadbcd66b0e8497eb3e951690e6f79 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/yaml/__pycache__/error.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/yaml/__pycache__/events.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/yaml/__pycache__/events.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..eba992218028aeab9c58ed31d11ad285ac860515 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/yaml/__pycache__/events.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/yaml/__pycache__/loader.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/yaml/__pycache__/loader.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..821573eed98745761884734ab6d5755dfbb329a3 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/yaml/__pycache__/loader.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/yaml/__pycache__/nodes.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/yaml/__pycache__/nodes.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ddf922153fa359315bedc11649c678cc1a2e2683 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/yaml/__pycache__/nodes.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/yaml/__pycache__/parser.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/yaml/__pycache__/parser.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ee4e11451725f32eee0393e78f29876479ea28b6 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/yaml/__pycache__/parser.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/yaml/__pycache__/reader.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/yaml/__pycache__/reader.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2dc5d45b9695cca2093c6e16e3684d4b4728f06b Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/yaml/__pycache__/reader.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/yaml/__pycache__/representer.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/yaml/__pycache__/representer.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..860a002043c0151c2773f3ef71f71239d949ed49 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/yaml/__pycache__/representer.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/yaml/__pycache__/resolver.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/yaml/__pycache__/resolver.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f592a58d07700d7b703f2326914ee23a61ec1b5d Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/yaml/__pycache__/resolver.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/yaml/__pycache__/scanner.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/yaml/__pycache__/scanner.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..97657718055b22e4cfefd180872210d65716493f Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/yaml/__pycache__/scanner.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/yaml/__pycache__/serializer.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/yaml/__pycache__/serializer.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2eca5ab163a40571d79fdc8719050ca93a87ffee Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/yaml/__pycache__/serializer.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/yaml/__pycache__/tokens.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/yaml/__pycache__/tokens.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..930c9412e180c1a512f732caa08de4d32cbce8f5 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/yaml/__pycache__/tokens.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/yaml/composer.py b/env-llmeval/lib/python3.10/site-packages/yaml/composer.py new file mode 100644 index 0000000000000000000000000000000000000000..6d15cb40e3b4198819c91c6f8d8b32807fcf53b2 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/yaml/composer.py @@ -0,0 +1,139 @@ + +__all__ = ['Composer', 'ComposerError'] + +from .error import MarkedYAMLError +from .events import * +from .nodes import * + +class ComposerError(MarkedYAMLError): + pass + +class Composer: + + def __init__(self): + self.anchors = {} + + def check_node(self): + # Drop the STREAM-START event. + if self.check_event(StreamStartEvent): + self.get_event() + + # If there are more documents available? + return not self.check_event(StreamEndEvent) + + def get_node(self): + # Get the root node of the next document. + if not self.check_event(StreamEndEvent): + return self.compose_document() + + def get_single_node(self): + # Drop the STREAM-START event. + self.get_event() + + # Compose a document if the stream is not empty. + document = None + if not self.check_event(StreamEndEvent): + document = self.compose_document() + + # Ensure that the stream contains no more documents. + if not self.check_event(StreamEndEvent): + event = self.get_event() + raise ComposerError("expected a single document in the stream", + document.start_mark, "but found another document", + event.start_mark) + + # Drop the STREAM-END event. + self.get_event() + + return document + + def compose_document(self): + # Drop the DOCUMENT-START event. + self.get_event() + + # Compose the root node. + node = self.compose_node(None, None) + + # Drop the DOCUMENT-END event. + self.get_event() + + self.anchors = {} + return node + + def compose_node(self, parent, index): + if self.check_event(AliasEvent): + event = self.get_event() + anchor = event.anchor + if anchor not in self.anchors: + raise ComposerError(None, None, "found undefined alias %r" + % anchor, event.start_mark) + return self.anchors[anchor] + event = self.peek_event() + anchor = event.anchor + if anchor is not None: + if anchor in self.anchors: + raise ComposerError("found duplicate anchor %r; first occurrence" + % anchor, self.anchors[anchor].start_mark, + "second occurrence", event.start_mark) + self.descend_resolver(parent, index) + if self.check_event(ScalarEvent): + node = self.compose_scalar_node(anchor) + elif self.check_event(SequenceStartEvent): + node = self.compose_sequence_node(anchor) + elif self.check_event(MappingStartEvent): + node = self.compose_mapping_node(anchor) + self.ascend_resolver() + return node + + def compose_scalar_node(self, anchor): + event = self.get_event() + tag = event.tag + if tag is None or tag == '!': + tag = self.resolve(ScalarNode, event.value, event.implicit) + node = ScalarNode(tag, event.value, + event.start_mark, event.end_mark, style=event.style) + if anchor is not None: + self.anchors[anchor] = node + return node + + def compose_sequence_node(self, anchor): + start_event = self.get_event() + tag = start_event.tag + if tag is None or tag == '!': + tag = self.resolve(SequenceNode, None, start_event.implicit) + node = SequenceNode(tag, [], + start_event.start_mark, None, + flow_style=start_event.flow_style) + if anchor is not None: + self.anchors[anchor] = node + index = 0 + while not self.check_event(SequenceEndEvent): + node.value.append(self.compose_node(node, index)) + index += 1 + end_event = self.get_event() + node.end_mark = end_event.end_mark + return node + + def compose_mapping_node(self, anchor): + start_event = self.get_event() + tag = start_event.tag + if tag is None or tag == '!': + tag = self.resolve(MappingNode, None, start_event.implicit) + node = MappingNode(tag, [], + start_event.start_mark, None, + flow_style=start_event.flow_style) + if anchor is not None: + self.anchors[anchor] = node + while not self.check_event(MappingEndEvent): + #key_event = self.peek_event() + item_key = self.compose_node(node, None) + #if item_key in node.value: + # raise ComposerError("while composing a mapping", start_event.start_mark, + # "found duplicate key", key_event.start_mark) + item_value = self.compose_node(node, item_key) + #node.value[item_key] = item_value + node.value.append((item_key, item_value)) + end_event = self.get_event() + node.end_mark = end_event.end_mark + return node + diff --git a/env-llmeval/lib/python3.10/site-packages/yaml/constructor.py b/env-llmeval/lib/python3.10/site-packages/yaml/constructor.py new file mode 100644 index 0000000000000000000000000000000000000000..619acd3070a4845c653fcf22a626e05158035bc2 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/yaml/constructor.py @@ -0,0 +1,748 @@ + +__all__ = [ + 'BaseConstructor', + 'SafeConstructor', + 'FullConstructor', + 'UnsafeConstructor', + 'Constructor', + 'ConstructorError' +] + +from .error import * +from .nodes import * + +import collections.abc, datetime, base64, binascii, re, sys, types + +class ConstructorError(MarkedYAMLError): + pass + +class BaseConstructor: + + yaml_constructors = {} + yaml_multi_constructors = {} + + def __init__(self): + self.constructed_objects = {} + self.recursive_objects = {} + self.state_generators = [] + self.deep_construct = False + + def check_data(self): + # If there are more documents available? + return self.check_node() + + def check_state_key(self, key): + """Block special attributes/methods from being set in a newly created + object, to prevent user-controlled methods from being called during + deserialization""" + if self.get_state_keys_blacklist_regexp().match(key): + raise ConstructorError(None, None, + "blacklisted key '%s' in instance state found" % (key,), None) + + def get_data(self): + # Construct and return the next document. + if self.check_node(): + return self.construct_document(self.get_node()) + + def get_single_data(self): + # Ensure that the stream contains a single document and construct it. + node = self.get_single_node() + if node is not None: + return self.construct_document(node) + return None + + def construct_document(self, node): + data = self.construct_object(node) + while self.state_generators: + state_generators = self.state_generators + self.state_generators = [] + for generator in state_generators: + for dummy in generator: + pass + self.constructed_objects = {} + self.recursive_objects = {} + self.deep_construct = False + return data + + def construct_object(self, node, deep=False): + if node in self.constructed_objects: + return self.constructed_objects[node] + if deep: + old_deep = self.deep_construct + self.deep_construct = True + if node in self.recursive_objects: + raise ConstructorError(None, None, + "found unconstructable recursive node", node.start_mark) + self.recursive_objects[node] = None + constructor = None + tag_suffix = None + if node.tag in self.yaml_constructors: + constructor = self.yaml_constructors[node.tag] + else: + for tag_prefix in self.yaml_multi_constructors: + if tag_prefix is not None and node.tag.startswith(tag_prefix): + tag_suffix = node.tag[len(tag_prefix):] + constructor = self.yaml_multi_constructors[tag_prefix] + break + else: + if None in self.yaml_multi_constructors: + tag_suffix = node.tag + constructor = self.yaml_multi_constructors[None] + elif None in self.yaml_constructors: + constructor = self.yaml_constructors[None] + elif isinstance(node, ScalarNode): + constructor = self.__class__.construct_scalar + elif isinstance(node, SequenceNode): + constructor = self.__class__.construct_sequence + elif isinstance(node, MappingNode): + constructor = self.__class__.construct_mapping + if tag_suffix is None: + data = constructor(self, node) + else: + data = constructor(self, tag_suffix, node) + if isinstance(data, types.GeneratorType): + generator = data + data = next(generator) + if self.deep_construct: + for dummy in generator: + pass + else: + self.state_generators.append(generator) + self.constructed_objects[node] = data + del self.recursive_objects[node] + if deep: + self.deep_construct = old_deep + return data + + def construct_scalar(self, node): + if not isinstance(node, ScalarNode): + raise ConstructorError(None, None, + "expected a scalar node, but found %s" % node.id, + node.start_mark) + return node.value + + def construct_sequence(self, node, deep=False): + if not isinstance(node, SequenceNode): + raise ConstructorError(None, None, + "expected a sequence node, but found %s" % node.id, + node.start_mark) + return [self.construct_object(child, deep=deep) + for child in node.value] + + def construct_mapping(self, node, deep=False): + if not isinstance(node, MappingNode): + raise ConstructorError(None, None, + "expected a mapping node, but found %s" % node.id, + node.start_mark) + mapping = {} + for key_node, value_node in node.value: + key = self.construct_object(key_node, deep=deep) + if not isinstance(key, collections.abc.Hashable): + raise ConstructorError("while constructing a mapping", node.start_mark, + "found unhashable key", key_node.start_mark) + value = self.construct_object(value_node, deep=deep) + mapping[key] = value + return mapping + + def construct_pairs(self, node, deep=False): + if not isinstance(node, MappingNode): + raise ConstructorError(None, None, + "expected a mapping node, but found %s" % node.id, + node.start_mark) + pairs = [] + for key_node, value_node in node.value: + key = self.construct_object(key_node, deep=deep) + value = self.construct_object(value_node, deep=deep) + pairs.append((key, value)) + return pairs + + @classmethod + def add_constructor(cls, tag, constructor): + if not 'yaml_constructors' in cls.__dict__: + cls.yaml_constructors = cls.yaml_constructors.copy() + cls.yaml_constructors[tag] = constructor + + @classmethod + def add_multi_constructor(cls, tag_prefix, multi_constructor): + if not 'yaml_multi_constructors' in cls.__dict__: + cls.yaml_multi_constructors = cls.yaml_multi_constructors.copy() + cls.yaml_multi_constructors[tag_prefix] = multi_constructor + +class SafeConstructor(BaseConstructor): + + def construct_scalar(self, node): + if isinstance(node, MappingNode): + for key_node, value_node in node.value: + if key_node.tag == 'tag:yaml.org,2002:value': + return self.construct_scalar(value_node) + return super().construct_scalar(node) + + def flatten_mapping(self, node): + merge = [] + index = 0 + while index < len(node.value): + key_node, value_node = node.value[index] + if key_node.tag == 'tag:yaml.org,2002:merge': + del node.value[index] + if isinstance(value_node, MappingNode): + self.flatten_mapping(value_node) + merge.extend(value_node.value) + elif isinstance(value_node, SequenceNode): + submerge = [] + for subnode in value_node.value: + if not isinstance(subnode, MappingNode): + raise ConstructorError("while constructing a mapping", + node.start_mark, + "expected a mapping for merging, but found %s" + % subnode.id, subnode.start_mark) + self.flatten_mapping(subnode) + submerge.append(subnode.value) + submerge.reverse() + for value in submerge: + merge.extend(value) + else: + raise ConstructorError("while constructing a mapping", node.start_mark, + "expected a mapping or list of mappings for merging, but found %s" + % value_node.id, value_node.start_mark) + elif key_node.tag == 'tag:yaml.org,2002:value': + key_node.tag = 'tag:yaml.org,2002:str' + index += 1 + else: + index += 1 + if merge: + node.value = merge + node.value + + def construct_mapping(self, node, deep=False): + if isinstance(node, MappingNode): + self.flatten_mapping(node) + return super().construct_mapping(node, deep=deep) + + def construct_yaml_null(self, node): + self.construct_scalar(node) + return None + + bool_values = { + 'yes': True, + 'no': False, + 'true': True, + 'false': False, + 'on': True, + 'off': False, + } + + def construct_yaml_bool(self, node): + value = self.construct_scalar(node) + return self.bool_values[value.lower()] + + def construct_yaml_int(self, node): + value = self.construct_scalar(node) + value = value.replace('_', '') + sign = +1 + if value[0] == '-': + sign = -1 + if value[0] in '+-': + value = value[1:] + if value == '0': + return 0 + elif value.startswith('0b'): + return sign*int(value[2:], 2) + elif value.startswith('0x'): + return sign*int(value[2:], 16) + elif value[0] == '0': + return sign*int(value, 8) + elif ':' in value: + digits = [int(part) for part in value.split(':')] + digits.reverse() + base = 1 + value = 0 + for digit in digits: + value += digit*base + base *= 60 + return sign*value + else: + return sign*int(value) + + inf_value = 1e300 + while inf_value != inf_value*inf_value: + inf_value *= inf_value + nan_value = -inf_value/inf_value # Trying to make a quiet NaN (like C99). + + def construct_yaml_float(self, node): + value = self.construct_scalar(node) + value = value.replace('_', '').lower() + sign = +1 + if value[0] == '-': + sign = -1 + if value[0] in '+-': + value = value[1:] + if value == '.inf': + return sign*self.inf_value + elif value == '.nan': + return self.nan_value + elif ':' in value: + digits = [float(part) for part in value.split(':')] + digits.reverse() + base = 1 + value = 0.0 + for digit in digits: + value += digit*base + base *= 60 + return sign*value + else: + return sign*float(value) + + def construct_yaml_binary(self, node): + try: + value = self.construct_scalar(node).encode('ascii') + except UnicodeEncodeError as exc: + raise ConstructorError(None, None, + "failed to convert base64 data into ascii: %s" % exc, + node.start_mark) + try: + if hasattr(base64, 'decodebytes'): + return base64.decodebytes(value) + else: + return base64.decodestring(value) + except binascii.Error as exc: + raise ConstructorError(None, None, + "failed to decode base64 data: %s" % exc, node.start_mark) + + timestamp_regexp = re.compile( + r'''^(?P[0-9][0-9][0-9][0-9]) + -(?P[0-9][0-9]?) + -(?P[0-9][0-9]?) + (?:(?:[Tt]|[ \t]+) + (?P[0-9][0-9]?) + :(?P[0-9][0-9]) + :(?P[0-9][0-9]) + (?:\.(?P[0-9]*))? + (?:[ \t]*(?PZ|(?P[-+])(?P[0-9][0-9]?) + (?::(?P[0-9][0-9]))?))?)?$''', re.X) + + def construct_yaml_timestamp(self, node): + value = self.construct_scalar(node) + match = self.timestamp_regexp.match(node.value) + values = match.groupdict() + year = int(values['year']) + month = int(values['month']) + day = int(values['day']) + if not values['hour']: + return datetime.date(year, month, day) + hour = int(values['hour']) + minute = int(values['minute']) + second = int(values['second']) + fraction = 0 + tzinfo = None + if values['fraction']: + fraction = values['fraction'][:6] + while len(fraction) < 6: + fraction += '0' + fraction = int(fraction) + if values['tz_sign']: + tz_hour = int(values['tz_hour']) + tz_minute = int(values['tz_minute'] or 0) + delta = datetime.timedelta(hours=tz_hour, minutes=tz_minute) + if values['tz_sign'] == '-': + delta = -delta + tzinfo = datetime.timezone(delta) + elif values['tz']: + tzinfo = datetime.timezone.utc + return datetime.datetime(year, month, day, hour, minute, second, fraction, + tzinfo=tzinfo) + + def construct_yaml_omap(self, node): + # Note: we do not check for duplicate keys, because it's too + # CPU-expensive. + omap = [] + yield omap + if not isinstance(node, SequenceNode): + raise ConstructorError("while constructing an ordered map", node.start_mark, + "expected a sequence, but found %s" % node.id, node.start_mark) + for subnode in node.value: + if not isinstance(subnode, MappingNode): + raise ConstructorError("while constructing an ordered map", node.start_mark, + "expected a mapping of length 1, but found %s" % subnode.id, + subnode.start_mark) + if len(subnode.value) != 1: + raise ConstructorError("while constructing an ordered map", node.start_mark, + "expected a single mapping item, but found %d items" % len(subnode.value), + subnode.start_mark) + key_node, value_node = subnode.value[0] + key = self.construct_object(key_node) + value = self.construct_object(value_node) + omap.append((key, value)) + + def construct_yaml_pairs(self, node): + # Note: the same code as `construct_yaml_omap`. + pairs = [] + yield pairs + if not isinstance(node, SequenceNode): + raise ConstructorError("while constructing pairs", node.start_mark, + "expected a sequence, but found %s" % node.id, node.start_mark) + for subnode in node.value: + if not isinstance(subnode, MappingNode): + raise ConstructorError("while constructing pairs", node.start_mark, + "expected a mapping of length 1, but found %s" % subnode.id, + subnode.start_mark) + if len(subnode.value) != 1: + raise ConstructorError("while constructing pairs", node.start_mark, + "expected a single mapping item, but found %d items" % len(subnode.value), + subnode.start_mark) + key_node, value_node = subnode.value[0] + key = self.construct_object(key_node) + value = self.construct_object(value_node) + pairs.append((key, value)) + + def construct_yaml_set(self, node): + data = set() + yield data + value = self.construct_mapping(node) + data.update(value) + + def construct_yaml_str(self, node): + return self.construct_scalar(node) + + def construct_yaml_seq(self, node): + data = [] + yield data + data.extend(self.construct_sequence(node)) + + def construct_yaml_map(self, node): + data = {} + yield data + value = self.construct_mapping(node) + data.update(value) + + def construct_yaml_object(self, node, cls): + data = cls.__new__(cls) + yield data + if hasattr(data, '__setstate__'): + state = self.construct_mapping(node, deep=True) + data.__setstate__(state) + else: + state = self.construct_mapping(node) + data.__dict__.update(state) + + def construct_undefined(self, node): + raise ConstructorError(None, None, + "could not determine a constructor for the tag %r" % node.tag, + node.start_mark) + +SafeConstructor.add_constructor( + 'tag:yaml.org,2002:null', + SafeConstructor.construct_yaml_null) + +SafeConstructor.add_constructor( + 'tag:yaml.org,2002:bool', + SafeConstructor.construct_yaml_bool) + +SafeConstructor.add_constructor( + 'tag:yaml.org,2002:int', + SafeConstructor.construct_yaml_int) + +SafeConstructor.add_constructor( + 'tag:yaml.org,2002:float', + SafeConstructor.construct_yaml_float) + +SafeConstructor.add_constructor( + 'tag:yaml.org,2002:binary', + SafeConstructor.construct_yaml_binary) + +SafeConstructor.add_constructor( + 'tag:yaml.org,2002:timestamp', + SafeConstructor.construct_yaml_timestamp) + +SafeConstructor.add_constructor( + 'tag:yaml.org,2002:omap', + SafeConstructor.construct_yaml_omap) + +SafeConstructor.add_constructor( + 'tag:yaml.org,2002:pairs', + SafeConstructor.construct_yaml_pairs) + +SafeConstructor.add_constructor( + 'tag:yaml.org,2002:set', + SafeConstructor.construct_yaml_set) + +SafeConstructor.add_constructor( + 'tag:yaml.org,2002:str', + SafeConstructor.construct_yaml_str) + +SafeConstructor.add_constructor( + 'tag:yaml.org,2002:seq', + SafeConstructor.construct_yaml_seq) + +SafeConstructor.add_constructor( + 'tag:yaml.org,2002:map', + SafeConstructor.construct_yaml_map) + +SafeConstructor.add_constructor(None, + SafeConstructor.construct_undefined) + +class FullConstructor(SafeConstructor): + # 'extend' is blacklisted because it is used by + # construct_python_object_apply to add `listitems` to a newly generate + # python instance + def get_state_keys_blacklist(self): + return ['^extend$', '^__.*__$'] + + def get_state_keys_blacklist_regexp(self): + if not hasattr(self, 'state_keys_blacklist_regexp'): + self.state_keys_blacklist_regexp = re.compile('(' + '|'.join(self.get_state_keys_blacklist()) + ')') + return self.state_keys_blacklist_regexp + + def construct_python_str(self, node): + return self.construct_scalar(node) + + def construct_python_unicode(self, node): + return self.construct_scalar(node) + + def construct_python_bytes(self, node): + try: + value = self.construct_scalar(node).encode('ascii') + except UnicodeEncodeError as exc: + raise ConstructorError(None, None, + "failed to convert base64 data into ascii: %s" % exc, + node.start_mark) + try: + if hasattr(base64, 'decodebytes'): + return base64.decodebytes(value) + else: + return base64.decodestring(value) + except binascii.Error as exc: + raise ConstructorError(None, None, + "failed to decode base64 data: %s" % exc, node.start_mark) + + def construct_python_long(self, node): + return self.construct_yaml_int(node) + + def construct_python_complex(self, node): + return complex(self.construct_scalar(node)) + + def construct_python_tuple(self, node): + return tuple(self.construct_sequence(node)) + + def find_python_module(self, name, mark, unsafe=False): + if not name: + raise ConstructorError("while constructing a Python module", mark, + "expected non-empty name appended to the tag", mark) + if unsafe: + try: + __import__(name) + except ImportError as exc: + raise ConstructorError("while constructing a Python module", mark, + "cannot find module %r (%s)" % (name, exc), mark) + if name not in sys.modules: + raise ConstructorError("while constructing a Python module", mark, + "module %r is not imported" % name, mark) + return sys.modules[name] + + def find_python_name(self, name, mark, unsafe=False): + if not name: + raise ConstructorError("while constructing a Python object", mark, + "expected non-empty name appended to the tag", mark) + if '.' in name: + module_name, object_name = name.rsplit('.', 1) + else: + module_name = 'builtins' + object_name = name + if unsafe: + try: + __import__(module_name) + except ImportError as exc: + raise ConstructorError("while constructing a Python object", mark, + "cannot find module %r (%s)" % (module_name, exc), mark) + if module_name not in sys.modules: + raise ConstructorError("while constructing a Python object", mark, + "module %r is not imported" % module_name, mark) + module = sys.modules[module_name] + if not hasattr(module, object_name): + raise ConstructorError("while constructing a Python object", mark, + "cannot find %r in the module %r" + % (object_name, module.__name__), mark) + return getattr(module, object_name) + + def construct_python_name(self, suffix, node): + value = self.construct_scalar(node) + if value: + raise ConstructorError("while constructing a Python name", node.start_mark, + "expected the empty value, but found %r" % value, node.start_mark) + return self.find_python_name(suffix, node.start_mark) + + def construct_python_module(self, suffix, node): + value = self.construct_scalar(node) + if value: + raise ConstructorError("while constructing a Python module", node.start_mark, + "expected the empty value, but found %r" % value, node.start_mark) + return self.find_python_module(suffix, node.start_mark) + + def make_python_instance(self, suffix, node, + args=None, kwds=None, newobj=False, unsafe=False): + if not args: + args = [] + if not kwds: + kwds = {} + cls = self.find_python_name(suffix, node.start_mark) + if not (unsafe or isinstance(cls, type)): + raise ConstructorError("while constructing a Python instance", node.start_mark, + "expected a class, but found %r" % type(cls), + node.start_mark) + if newobj and isinstance(cls, type): + return cls.__new__(cls, *args, **kwds) + else: + return cls(*args, **kwds) + + def set_python_instance_state(self, instance, state, unsafe=False): + if hasattr(instance, '__setstate__'): + instance.__setstate__(state) + else: + slotstate = {} + if isinstance(state, tuple) and len(state) == 2: + state, slotstate = state + if hasattr(instance, '__dict__'): + if not unsafe and state: + for key in state.keys(): + self.check_state_key(key) + instance.__dict__.update(state) + elif state: + slotstate.update(state) + for key, value in slotstate.items(): + if not unsafe: + self.check_state_key(key) + setattr(instance, key, value) + + def construct_python_object(self, suffix, node): + # Format: + # !!python/object:module.name { ... state ... } + instance = self.make_python_instance(suffix, node, newobj=True) + yield instance + deep = hasattr(instance, '__setstate__') + state = self.construct_mapping(node, deep=deep) + self.set_python_instance_state(instance, state) + + def construct_python_object_apply(self, suffix, node, newobj=False): + # Format: + # !!python/object/apply # (or !!python/object/new) + # args: [ ... arguments ... ] + # kwds: { ... keywords ... } + # state: ... state ... + # listitems: [ ... listitems ... ] + # dictitems: { ... dictitems ... } + # or short format: + # !!python/object/apply [ ... arguments ... ] + # The difference between !!python/object/apply and !!python/object/new + # is how an object is created, check make_python_instance for details. + if isinstance(node, SequenceNode): + args = self.construct_sequence(node, deep=True) + kwds = {} + state = {} + listitems = [] + dictitems = {} + else: + value = self.construct_mapping(node, deep=True) + args = value.get('args', []) + kwds = value.get('kwds', {}) + state = value.get('state', {}) + listitems = value.get('listitems', []) + dictitems = value.get('dictitems', {}) + instance = self.make_python_instance(suffix, node, args, kwds, newobj) + if state: + self.set_python_instance_state(instance, state) + if listitems: + instance.extend(listitems) + if dictitems: + for key in dictitems: + instance[key] = dictitems[key] + return instance + + def construct_python_object_new(self, suffix, node): + return self.construct_python_object_apply(suffix, node, newobj=True) + +FullConstructor.add_constructor( + 'tag:yaml.org,2002:python/none', + FullConstructor.construct_yaml_null) + +FullConstructor.add_constructor( + 'tag:yaml.org,2002:python/bool', + FullConstructor.construct_yaml_bool) + +FullConstructor.add_constructor( + 'tag:yaml.org,2002:python/str', + FullConstructor.construct_python_str) + +FullConstructor.add_constructor( + 'tag:yaml.org,2002:python/unicode', + FullConstructor.construct_python_unicode) + +FullConstructor.add_constructor( + 'tag:yaml.org,2002:python/bytes', + FullConstructor.construct_python_bytes) + +FullConstructor.add_constructor( + 'tag:yaml.org,2002:python/int', + FullConstructor.construct_yaml_int) + +FullConstructor.add_constructor( + 'tag:yaml.org,2002:python/long', + FullConstructor.construct_python_long) + +FullConstructor.add_constructor( + 'tag:yaml.org,2002:python/float', + FullConstructor.construct_yaml_float) + +FullConstructor.add_constructor( + 'tag:yaml.org,2002:python/complex', + FullConstructor.construct_python_complex) + +FullConstructor.add_constructor( + 'tag:yaml.org,2002:python/list', + FullConstructor.construct_yaml_seq) + +FullConstructor.add_constructor( + 'tag:yaml.org,2002:python/tuple', + FullConstructor.construct_python_tuple) + +FullConstructor.add_constructor( + 'tag:yaml.org,2002:python/dict', + FullConstructor.construct_yaml_map) + +FullConstructor.add_multi_constructor( + 'tag:yaml.org,2002:python/name:', + FullConstructor.construct_python_name) + +class UnsafeConstructor(FullConstructor): + + def find_python_module(self, name, mark): + return super(UnsafeConstructor, self).find_python_module(name, mark, unsafe=True) + + def find_python_name(self, name, mark): + return super(UnsafeConstructor, self).find_python_name(name, mark, unsafe=True) + + def make_python_instance(self, suffix, node, args=None, kwds=None, newobj=False): + return super(UnsafeConstructor, self).make_python_instance( + suffix, node, args, kwds, newobj, unsafe=True) + + def set_python_instance_state(self, instance, state): + return super(UnsafeConstructor, self).set_python_instance_state( + instance, state, unsafe=True) + +UnsafeConstructor.add_multi_constructor( + 'tag:yaml.org,2002:python/module:', + UnsafeConstructor.construct_python_module) + +UnsafeConstructor.add_multi_constructor( + 'tag:yaml.org,2002:python/object:', + UnsafeConstructor.construct_python_object) + +UnsafeConstructor.add_multi_constructor( + 'tag:yaml.org,2002:python/object/new:', + UnsafeConstructor.construct_python_object_new) + +UnsafeConstructor.add_multi_constructor( + 'tag:yaml.org,2002:python/object/apply:', + UnsafeConstructor.construct_python_object_apply) + +# Constructor is same as UnsafeConstructor. Need to leave this in place in case +# people have extended it directly. +class Constructor(UnsafeConstructor): + pass diff --git a/env-llmeval/lib/python3.10/site-packages/yaml/cyaml.py b/env-llmeval/lib/python3.10/site-packages/yaml/cyaml.py new file mode 100644 index 0000000000000000000000000000000000000000..0c21345879b298bb8668201bebe7d289586b17f9 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/yaml/cyaml.py @@ -0,0 +1,101 @@ + +__all__ = [ + 'CBaseLoader', 'CSafeLoader', 'CFullLoader', 'CUnsafeLoader', 'CLoader', + 'CBaseDumper', 'CSafeDumper', 'CDumper' +] + +from yaml._yaml import CParser, CEmitter + +from .constructor import * + +from .serializer import * +from .representer import * + +from .resolver import * + +class CBaseLoader(CParser, BaseConstructor, BaseResolver): + + def __init__(self, stream): + CParser.__init__(self, stream) + BaseConstructor.__init__(self) + BaseResolver.__init__(self) + +class CSafeLoader(CParser, SafeConstructor, Resolver): + + def __init__(self, stream): + CParser.__init__(self, stream) + SafeConstructor.__init__(self) + Resolver.__init__(self) + +class CFullLoader(CParser, FullConstructor, Resolver): + + def __init__(self, stream): + CParser.__init__(self, stream) + FullConstructor.__init__(self) + Resolver.__init__(self) + +class CUnsafeLoader(CParser, UnsafeConstructor, Resolver): + + def __init__(self, stream): + CParser.__init__(self, stream) + UnsafeConstructor.__init__(self) + Resolver.__init__(self) + +class CLoader(CParser, Constructor, Resolver): + + def __init__(self, stream): + CParser.__init__(self, stream) + Constructor.__init__(self) + Resolver.__init__(self) + +class CBaseDumper(CEmitter, BaseRepresenter, BaseResolver): + + def __init__(self, stream, + default_style=None, default_flow_style=False, + canonical=None, indent=None, width=None, + allow_unicode=None, line_break=None, + encoding=None, explicit_start=None, explicit_end=None, + version=None, tags=None, sort_keys=True): + CEmitter.__init__(self, stream, canonical=canonical, + indent=indent, width=width, encoding=encoding, + allow_unicode=allow_unicode, line_break=line_break, + explicit_start=explicit_start, explicit_end=explicit_end, + version=version, tags=tags) + Representer.__init__(self, default_style=default_style, + default_flow_style=default_flow_style, sort_keys=sort_keys) + Resolver.__init__(self) + +class CSafeDumper(CEmitter, SafeRepresenter, Resolver): + + def __init__(self, stream, + default_style=None, default_flow_style=False, + canonical=None, indent=None, width=None, + allow_unicode=None, line_break=None, + encoding=None, explicit_start=None, explicit_end=None, + version=None, tags=None, sort_keys=True): + CEmitter.__init__(self, stream, canonical=canonical, + indent=indent, width=width, encoding=encoding, + allow_unicode=allow_unicode, line_break=line_break, + explicit_start=explicit_start, explicit_end=explicit_end, + version=version, tags=tags) + SafeRepresenter.__init__(self, default_style=default_style, + default_flow_style=default_flow_style, sort_keys=sort_keys) + Resolver.__init__(self) + +class CDumper(CEmitter, Serializer, Representer, Resolver): + + def __init__(self, stream, + default_style=None, default_flow_style=False, + canonical=None, indent=None, width=None, + allow_unicode=None, line_break=None, + encoding=None, explicit_start=None, explicit_end=None, + version=None, tags=None, sort_keys=True): + CEmitter.__init__(self, stream, canonical=canonical, + indent=indent, width=width, encoding=encoding, + allow_unicode=allow_unicode, line_break=line_break, + explicit_start=explicit_start, explicit_end=explicit_end, + version=version, tags=tags) + Representer.__init__(self, default_style=default_style, + default_flow_style=default_flow_style, sort_keys=sort_keys) + Resolver.__init__(self) + diff --git a/env-llmeval/lib/python3.10/site-packages/yaml/dumper.py b/env-llmeval/lib/python3.10/site-packages/yaml/dumper.py new file mode 100644 index 0000000000000000000000000000000000000000..6aadba551f3836b02f4752277f4b3027073defad --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/yaml/dumper.py @@ -0,0 +1,62 @@ + +__all__ = ['BaseDumper', 'SafeDumper', 'Dumper'] + +from .emitter import * +from .serializer import * +from .representer import * +from .resolver import * + +class BaseDumper(Emitter, Serializer, BaseRepresenter, BaseResolver): + + def __init__(self, stream, + default_style=None, default_flow_style=False, + canonical=None, indent=None, width=None, + allow_unicode=None, line_break=None, + encoding=None, explicit_start=None, explicit_end=None, + version=None, tags=None, sort_keys=True): + Emitter.__init__(self, stream, canonical=canonical, + indent=indent, width=width, + allow_unicode=allow_unicode, line_break=line_break) + Serializer.__init__(self, encoding=encoding, + explicit_start=explicit_start, explicit_end=explicit_end, + version=version, tags=tags) + Representer.__init__(self, default_style=default_style, + default_flow_style=default_flow_style, sort_keys=sort_keys) + Resolver.__init__(self) + +class SafeDumper(Emitter, Serializer, SafeRepresenter, Resolver): + + def __init__(self, stream, + default_style=None, default_flow_style=False, + canonical=None, indent=None, width=None, + allow_unicode=None, line_break=None, + encoding=None, explicit_start=None, explicit_end=None, + version=None, tags=None, sort_keys=True): + Emitter.__init__(self, stream, canonical=canonical, + indent=indent, width=width, + allow_unicode=allow_unicode, line_break=line_break) + Serializer.__init__(self, encoding=encoding, + explicit_start=explicit_start, explicit_end=explicit_end, + version=version, tags=tags) + SafeRepresenter.__init__(self, default_style=default_style, + default_flow_style=default_flow_style, sort_keys=sort_keys) + Resolver.__init__(self) + +class Dumper(Emitter, Serializer, Representer, Resolver): + + def __init__(self, stream, + default_style=None, default_flow_style=False, + canonical=None, indent=None, width=None, + allow_unicode=None, line_break=None, + encoding=None, explicit_start=None, explicit_end=None, + version=None, tags=None, sort_keys=True): + Emitter.__init__(self, stream, canonical=canonical, + indent=indent, width=width, + allow_unicode=allow_unicode, line_break=line_break) + Serializer.__init__(self, encoding=encoding, + explicit_start=explicit_start, explicit_end=explicit_end, + version=version, tags=tags) + Representer.__init__(self, default_style=default_style, + default_flow_style=default_flow_style, sort_keys=sort_keys) + Resolver.__init__(self) + diff --git a/env-llmeval/lib/python3.10/site-packages/yaml/emitter.py b/env-llmeval/lib/python3.10/site-packages/yaml/emitter.py new file mode 100644 index 0000000000000000000000000000000000000000..a664d011162af69184df2f8e59ab7feec818f7c7 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/yaml/emitter.py @@ -0,0 +1,1137 @@ + +# Emitter expects events obeying the following grammar: +# stream ::= STREAM-START document* STREAM-END +# document ::= DOCUMENT-START node DOCUMENT-END +# node ::= SCALAR | sequence | mapping +# sequence ::= SEQUENCE-START node* SEQUENCE-END +# mapping ::= MAPPING-START (node node)* MAPPING-END + +__all__ = ['Emitter', 'EmitterError'] + +from .error import YAMLError +from .events import * + +class EmitterError(YAMLError): + pass + +class ScalarAnalysis: + def __init__(self, scalar, empty, multiline, + allow_flow_plain, allow_block_plain, + allow_single_quoted, allow_double_quoted, + allow_block): + self.scalar = scalar + self.empty = empty + self.multiline = multiline + self.allow_flow_plain = allow_flow_plain + self.allow_block_plain = allow_block_plain + self.allow_single_quoted = allow_single_quoted + self.allow_double_quoted = allow_double_quoted + self.allow_block = allow_block + +class Emitter: + + DEFAULT_TAG_PREFIXES = { + '!' : '!', + 'tag:yaml.org,2002:' : '!!', + } + + def __init__(self, stream, canonical=None, indent=None, width=None, + allow_unicode=None, line_break=None): + + # The stream should have the methods `write` and possibly `flush`. + self.stream = stream + + # Encoding can be overridden by STREAM-START. + self.encoding = None + + # Emitter is a state machine with a stack of states to handle nested + # structures. + self.states = [] + self.state = self.expect_stream_start + + # Current event and the event queue. + self.events = [] + self.event = None + + # The current indentation level and the stack of previous indents. + self.indents = [] + self.indent = None + + # Flow level. + self.flow_level = 0 + + # Contexts. + self.root_context = False + self.sequence_context = False + self.mapping_context = False + self.simple_key_context = False + + # Characteristics of the last emitted character: + # - current position. + # - is it a whitespace? + # - is it an indention character + # (indentation space, '-', '?', or ':')? + self.line = 0 + self.column = 0 + self.whitespace = True + self.indention = True + + # Whether the document requires an explicit document indicator + self.open_ended = False + + # Formatting details. + self.canonical = canonical + self.allow_unicode = allow_unicode + self.best_indent = 2 + if indent and 1 < indent < 10: + self.best_indent = indent + self.best_width = 80 + if width and width > self.best_indent*2: + self.best_width = width + self.best_line_break = '\n' + if line_break in ['\r', '\n', '\r\n']: + self.best_line_break = line_break + + # Tag prefixes. + self.tag_prefixes = None + + # Prepared anchor and tag. + self.prepared_anchor = None + self.prepared_tag = None + + # Scalar analysis and style. + self.analysis = None + self.style = None + + def dispose(self): + # Reset the state attributes (to clear self-references) + self.states = [] + self.state = None + + def emit(self, event): + self.events.append(event) + while not self.need_more_events(): + self.event = self.events.pop(0) + self.state() + self.event = None + + # In some cases, we wait for a few next events before emitting. + + def need_more_events(self): + if not self.events: + return True + event = self.events[0] + if isinstance(event, DocumentStartEvent): + return self.need_events(1) + elif isinstance(event, SequenceStartEvent): + return self.need_events(2) + elif isinstance(event, MappingStartEvent): + return self.need_events(3) + else: + return False + + def need_events(self, count): + level = 0 + for event in self.events[1:]: + if isinstance(event, (DocumentStartEvent, CollectionStartEvent)): + level += 1 + elif isinstance(event, (DocumentEndEvent, CollectionEndEvent)): + level -= 1 + elif isinstance(event, StreamEndEvent): + level = -1 + if level < 0: + return False + return (len(self.events) < count+1) + + def increase_indent(self, flow=False, indentless=False): + self.indents.append(self.indent) + if self.indent is None: + if flow: + self.indent = self.best_indent + else: + self.indent = 0 + elif not indentless: + self.indent += self.best_indent + + # States. + + # Stream handlers. + + def expect_stream_start(self): + if isinstance(self.event, StreamStartEvent): + if self.event.encoding and not hasattr(self.stream, 'encoding'): + self.encoding = self.event.encoding + self.write_stream_start() + self.state = self.expect_first_document_start + else: + raise EmitterError("expected StreamStartEvent, but got %s" + % self.event) + + def expect_nothing(self): + raise EmitterError("expected nothing, but got %s" % self.event) + + # Document handlers. + + def expect_first_document_start(self): + return self.expect_document_start(first=True) + + def expect_document_start(self, first=False): + if isinstance(self.event, DocumentStartEvent): + if (self.event.version or self.event.tags) and self.open_ended: + self.write_indicator('...', True) + self.write_indent() + if self.event.version: + version_text = self.prepare_version(self.event.version) + self.write_version_directive(version_text) + self.tag_prefixes = self.DEFAULT_TAG_PREFIXES.copy() + if self.event.tags: + handles = sorted(self.event.tags.keys()) + for handle in handles: + prefix = self.event.tags[handle] + self.tag_prefixes[prefix] = handle + handle_text = self.prepare_tag_handle(handle) + prefix_text = self.prepare_tag_prefix(prefix) + self.write_tag_directive(handle_text, prefix_text) + implicit = (first and not self.event.explicit and not self.canonical + and not self.event.version and not self.event.tags + and not self.check_empty_document()) + if not implicit: + self.write_indent() + self.write_indicator('---', True) + if self.canonical: + self.write_indent() + self.state = self.expect_document_root + elif isinstance(self.event, StreamEndEvent): + if self.open_ended: + self.write_indicator('...', True) + self.write_indent() + self.write_stream_end() + self.state = self.expect_nothing + else: + raise EmitterError("expected DocumentStartEvent, but got %s" + % self.event) + + def expect_document_end(self): + if isinstance(self.event, DocumentEndEvent): + self.write_indent() + if self.event.explicit: + self.write_indicator('...', True) + self.write_indent() + self.flush_stream() + self.state = self.expect_document_start + else: + raise EmitterError("expected DocumentEndEvent, but got %s" + % self.event) + + def expect_document_root(self): + self.states.append(self.expect_document_end) + self.expect_node(root=True) + + # Node handlers. + + def expect_node(self, root=False, sequence=False, mapping=False, + simple_key=False): + self.root_context = root + self.sequence_context = sequence + self.mapping_context = mapping + self.simple_key_context = simple_key + if isinstance(self.event, AliasEvent): + self.expect_alias() + elif isinstance(self.event, (ScalarEvent, CollectionStartEvent)): + self.process_anchor('&') + self.process_tag() + if isinstance(self.event, ScalarEvent): + self.expect_scalar() + elif isinstance(self.event, SequenceStartEvent): + if self.flow_level or self.canonical or self.event.flow_style \ + or self.check_empty_sequence(): + self.expect_flow_sequence() + else: + self.expect_block_sequence() + elif isinstance(self.event, MappingStartEvent): + if self.flow_level or self.canonical or self.event.flow_style \ + or self.check_empty_mapping(): + self.expect_flow_mapping() + else: + self.expect_block_mapping() + else: + raise EmitterError("expected NodeEvent, but got %s" % self.event) + + def expect_alias(self): + if self.event.anchor is None: + raise EmitterError("anchor is not specified for alias") + self.process_anchor('*') + self.state = self.states.pop() + + def expect_scalar(self): + self.increase_indent(flow=True) + self.process_scalar() + self.indent = self.indents.pop() + self.state = self.states.pop() + + # Flow sequence handlers. + + def expect_flow_sequence(self): + self.write_indicator('[', True, whitespace=True) + self.flow_level += 1 + self.increase_indent(flow=True) + self.state = self.expect_first_flow_sequence_item + + def expect_first_flow_sequence_item(self): + if isinstance(self.event, SequenceEndEvent): + self.indent = self.indents.pop() + self.flow_level -= 1 + self.write_indicator(']', False) + self.state = self.states.pop() + else: + if self.canonical or self.column > self.best_width: + self.write_indent() + self.states.append(self.expect_flow_sequence_item) + self.expect_node(sequence=True) + + def expect_flow_sequence_item(self): + if isinstance(self.event, SequenceEndEvent): + self.indent = self.indents.pop() + self.flow_level -= 1 + if self.canonical: + self.write_indicator(',', False) + self.write_indent() + self.write_indicator(']', False) + self.state = self.states.pop() + else: + self.write_indicator(',', False) + if self.canonical or self.column > self.best_width: + self.write_indent() + self.states.append(self.expect_flow_sequence_item) + self.expect_node(sequence=True) + + # Flow mapping handlers. + + def expect_flow_mapping(self): + self.write_indicator('{', True, whitespace=True) + self.flow_level += 1 + self.increase_indent(flow=True) + self.state = self.expect_first_flow_mapping_key + + def expect_first_flow_mapping_key(self): + if isinstance(self.event, MappingEndEvent): + self.indent = self.indents.pop() + self.flow_level -= 1 + self.write_indicator('}', False) + self.state = self.states.pop() + else: + if self.canonical or self.column > self.best_width: + self.write_indent() + if not self.canonical and self.check_simple_key(): + self.states.append(self.expect_flow_mapping_simple_value) + self.expect_node(mapping=True, simple_key=True) + else: + self.write_indicator('?', True) + self.states.append(self.expect_flow_mapping_value) + self.expect_node(mapping=True) + + def expect_flow_mapping_key(self): + if isinstance(self.event, MappingEndEvent): + self.indent = self.indents.pop() + self.flow_level -= 1 + if self.canonical: + self.write_indicator(',', False) + self.write_indent() + self.write_indicator('}', False) + self.state = self.states.pop() + else: + self.write_indicator(',', False) + if self.canonical or self.column > self.best_width: + self.write_indent() + if not self.canonical and self.check_simple_key(): + self.states.append(self.expect_flow_mapping_simple_value) + self.expect_node(mapping=True, simple_key=True) + else: + self.write_indicator('?', True) + self.states.append(self.expect_flow_mapping_value) + self.expect_node(mapping=True) + + def expect_flow_mapping_simple_value(self): + self.write_indicator(':', False) + self.states.append(self.expect_flow_mapping_key) + self.expect_node(mapping=True) + + def expect_flow_mapping_value(self): + if self.canonical or self.column > self.best_width: + self.write_indent() + self.write_indicator(':', True) + self.states.append(self.expect_flow_mapping_key) + self.expect_node(mapping=True) + + # Block sequence handlers. + + def expect_block_sequence(self): + indentless = (self.mapping_context and not self.indention) + self.increase_indent(flow=False, indentless=indentless) + self.state = self.expect_first_block_sequence_item + + def expect_first_block_sequence_item(self): + return self.expect_block_sequence_item(first=True) + + def expect_block_sequence_item(self, first=False): + if not first and isinstance(self.event, SequenceEndEvent): + self.indent = self.indents.pop() + self.state = self.states.pop() + else: + self.write_indent() + self.write_indicator('-', True, indention=True) + self.states.append(self.expect_block_sequence_item) + self.expect_node(sequence=True) + + # Block mapping handlers. + + def expect_block_mapping(self): + self.increase_indent(flow=False) + self.state = self.expect_first_block_mapping_key + + def expect_first_block_mapping_key(self): + return self.expect_block_mapping_key(first=True) + + def expect_block_mapping_key(self, first=False): + if not first and isinstance(self.event, MappingEndEvent): + self.indent = self.indents.pop() + self.state = self.states.pop() + else: + self.write_indent() + if self.check_simple_key(): + self.states.append(self.expect_block_mapping_simple_value) + self.expect_node(mapping=True, simple_key=True) + else: + self.write_indicator('?', True, indention=True) + self.states.append(self.expect_block_mapping_value) + self.expect_node(mapping=True) + + def expect_block_mapping_simple_value(self): + self.write_indicator(':', False) + self.states.append(self.expect_block_mapping_key) + self.expect_node(mapping=True) + + def expect_block_mapping_value(self): + self.write_indent() + self.write_indicator(':', True, indention=True) + self.states.append(self.expect_block_mapping_key) + self.expect_node(mapping=True) + + # Checkers. + + def check_empty_sequence(self): + return (isinstance(self.event, SequenceStartEvent) and self.events + and isinstance(self.events[0], SequenceEndEvent)) + + def check_empty_mapping(self): + return (isinstance(self.event, MappingStartEvent) and self.events + and isinstance(self.events[0], MappingEndEvent)) + + def check_empty_document(self): + if not isinstance(self.event, DocumentStartEvent) or not self.events: + return False + event = self.events[0] + return (isinstance(event, ScalarEvent) and event.anchor is None + and event.tag is None and event.implicit and event.value == '') + + def check_simple_key(self): + length = 0 + if isinstance(self.event, NodeEvent) and self.event.anchor is not None: + if self.prepared_anchor is None: + self.prepared_anchor = self.prepare_anchor(self.event.anchor) + length += len(self.prepared_anchor) + if isinstance(self.event, (ScalarEvent, CollectionStartEvent)) \ + and self.event.tag is not None: + if self.prepared_tag is None: + self.prepared_tag = self.prepare_tag(self.event.tag) + length += len(self.prepared_tag) + if isinstance(self.event, ScalarEvent): + if self.analysis is None: + self.analysis = self.analyze_scalar(self.event.value) + length += len(self.analysis.scalar) + return (length < 128 and (isinstance(self.event, AliasEvent) + or (isinstance(self.event, ScalarEvent) + and not self.analysis.empty and not self.analysis.multiline) + or self.check_empty_sequence() or self.check_empty_mapping())) + + # Anchor, Tag, and Scalar processors. + + def process_anchor(self, indicator): + if self.event.anchor is None: + self.prepared_anchor = None + return + if self.prepared_anchor is None: + self.prepared_anchor = self.prepare_anchor(self.event.anchor) + if self.prepared_anchor: + self.write_indicator(indicator+self.prepared_anchor, True) + self.prepared_anchor = None + + def process_tag(self): + tag = self.event.tag + if isinstance(self.event, ScalarEvent): + if self.style is None: + self.style = self.choose_scalar_style() + if ((not self.canonical or tag is None) and + ((self.style == '' and self.event.implicit[0]) + or (self.style != '' and self.event.implicit[1]))): + self.prepared_tag = None + return + if self.event.implicit[0] and tag is None: + tag = '!' + self.prepared_tag = None + else: + if (not self.canonical or tag is None) and self.event.implicit: + self.prepared_tag = None + return + if tag is None: + raise EmitterError("tag is not specified") + if self.prepared_tag is None: + self.prepared_tag = self.prepare_tag(tag) + if self.prepared_tag: + self.write_indicator(self.prepared_tag, True) + self.prepared_tag = None + + def choose_scalar_style(self): + if self.analysis is None: + self.analysis = self.analyze_scalar(self.event.value) + if self.event.style == '"' or self.canonical: + return '"' + if not self.event.style and self.event.implicit[0]: + if (not (self.simple_key_context and + (self.analysis.empty or self.analysis.multiline)) + and (self.flow_level and self.analysis.allow_flow_plain + or (not self.flow_level and self.analysis.allow_block_plain))): + return '' + if self.event.style and self.event.style in '|>': + if (not self.flow_level and not self.simple_key_context + and self.analysis.allow_block): + return self.event.style + if not self.event.style or self.event.style == '\'': + if (self.analysis.allow_single_quoted and + not (self.simple_key_context and self.analysis.multiline)): + return '\'' + return '"' + + def process_scalar(self): + if self.analysis is None: + self.analysis = self.analyze_scalar(self.event.value) + if self.style is None: + self.style = self.choose_scalar_style() + split = (not self.simple_key_context) + #if self.analysis.multiline and split \ + # and (not self.style or self.style in '\'\"'): + # self.write_indent() + if self.style == '"': + self.write_double_quoted(self.analysis.scalar, split) + elif self.style == '\'': + self.write_single_quoted(self.analysis.scalar, split) + elif self.style == '>': + self.write_folded(self.analysis.scalar) + elif self.style == '|': + self.write_literal(self.analysis.scalar) + else: + self.write_plain(self.analysis.scalar, split) + self.analysis = None + self.style = None + + # Analyzers. + + def prepare_version(self, version): + major, minor = version + if major != 1: + raise EmitterError("unsupported YAML version: %d.%d" % (major, minor)) + return '%d.%d' % (major, minor) + + def prepare_tag_handle(self, handle): + if not handle: + raise EmitterError("tag handle must not be empty") + if handle[0] != '!' or handle[-1] != '!': + raise EmitterError("tag handle must start and end with '!': %r" % handle) + for ch in handle[1:-1]: + if not ('0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \ + or ch in '-_'): + raise EmitterError("invalid character %r in the tag handle: %r" + % (ch, handle)) + return handle + + def prepare_tag_prefix(self, prefix): + if not prefix: + raise EmitterError("tag prefix must not be empty") + chunks = [] + start = end = 0 + if prefix[0] == '!': + end = 1 + while end < len(prefix): + ch = prefix[end] + if '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \ + or ch in '-;/?!:@&=+$,_.~*\'()[]': + end += 1 + else: + if start < end: + chunks.append(prefix[start:end]) + start = end = end+1 + data = ch.encode('utf-8') + for ch in data: + chunks.append('%%%02X' % ord(ch)) + if start < end: + chunks.append(prefix[start:end]) + return ''.join(chunks) + + def prepare_tag(self, tag): + if not tag: + raise EmitterError("tag must not be empty") + if tag == '!': + return tag + handle = None + suffix = tag + prefixes = sorted(self.tag_prefixes.keys()) + for prefix in prefixes: + if tag.startswith(prefix) \ + and (prefix == '!' or len(prefix) < len(tag)): + handle = self.tag_prefixes[prefix] + suffix = tag[len(prefix):] + chunks = [] + start = end = 0 + while end < len(suffix): + ch = suffix[end] + if '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \ + or ch in '-;/?:@&=+$,_.~*\'()[]' \ + or (ch == '!' and handle != '!'): + end += 1 + else: + if start < end: + chunks.append(suffix[start:end]) + start = end = end+1 + data = ch.encode('utf-8') + for ch in data: + chunks.append('%%%02X' % ch) + if start < end: + chunks.append(suffix[start:end]) + suffix_text = ''.join(chunks) + if handle: + return '%s%s' % (handle, suffix_text) + else: + return '!<%s>' % suffix_text + + def prepare_anchor(self, anchor): + if not anchor: + raise EmitterError("anchor must not be empty") + for ch in anchor: + if not ('0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \ + or ch in '-_'): + raise EmitterError("invalid character %r in the anchor: %r" + % (ch, anchor)) + return anchor + + def analyze_scalar(self, scalar): + + # Empty scalar is a special case. + if not scalar: + return ScalarAnalysis(scalar=scalar, empty=True, multiline=False, + allow_flow_plain=False, allow_block_plain=True, + allow_single_quoted=True, allow_double_quoted=True, + allow_block=False) + + # Indicators and special characters. + block_indicators = False + flow_indicators = False + line_breaks = False + special_characters = False + + # Important whitespace combinations. + leading_space = False + leading_break = False + trailing_space = False + trailing_break = False + break_space = False + space_break = False + + # Check document indicators. + if scalar.startswith('---') or scalar.startswith('...'): + block_indicators = True + flow_indicators = True + + # First character or preceded by a whitespace. + preceded_by_whitespace = True + + # Last character or followed by a whitespace. + followed_by_whitespace = (len(scalar) == 1 or + scalar[1] in '\0 \t\r\n\x85\u2028\u2029') + + # The previous character is a space. + previous_space = False + + # The previous character is a break. + previous_break = False + + index = 0 + while index < len(scalar): + ch = scalar[index] + + # Check for indicators. + if index == 0: + # Leading indicators are special characters. + if ch in '#,[]{}&*!|>\'\"%@`': + flow_indicators = True + block_indicators = True + if ch in '?:': + flow_indicators = True + if followed_by_whitespace: + block_indicators = True + if ch == '-' and followed_by_whitespace: + flow_indicators = True + block_indicators = True + else: + # Some indicators cannot appear within a scalar as well. + if ch in ',?[]{}': + flow_indicators = True + if ch == ':': + flow_indicators = True + if followed_by_whitespace: + block_indicators = True + if ch == '#' and preceded_by_whitespace: + flow_indicators = True + block_indicators = True + + # Check for line breaks, special, and unicode characters. + if ch in '\n\x85\u2028\u2029': + line_breaks = True + if not (ch == '\n' or '\x20' <= ch <= '\x7E'): + if (ch == '\x85' or '\xA0' <= ch <= '\uD7FF' + or '\uE000' <= ch <= '\uFFFD' + or '\U00010000' <= ch < '\U0010ffff') and ch != '\uFEFF': + unicode_characters = True + if not self.allow_unicode: + special_characters = True + else: + special_characters = True + + # Detect important whitespace combinations. + if ch == ' ': + if index == 0: + leading_space = True + if index == len(scalar)-1: + trailing_space = True + if previous_break: + break_space = True + previous_space = True + previous_break = False + elif ch in '\n\x85\u2028\u2029': + if index == 0: + leading_break = True + if index == len(scalar)-1: + trailing_break = True + if previous_space: + space_break = True + previous_space = False + previous_break = True + else: + previous_space = False + previous_break = False + + # Prepare for the next character. + index += 1 + preceded_by_whitespace = (ch in '\0 \t\r\n\x85\u2028\u2029') + followed_by_whitespace = (index+1 >= len(scalar) or + scalar[index+1] in '\0 \t\r\n\x85\u2028\u2029') + + # Let's decide what styles are allowed. + allow_flow_plain = True + allow_block_plain = True + allow_single_quoted = True + allow_double_quoted = True + allow_block = True + + # Leading and trailing whitespaces are bad for plain scalars. + if (leading_space or leading_break + or trailing_space or trailing_break): + allow_flow_plain = allow_block_plain = False + + # We do not permit trailing spaces for block scalars. + if trailing_space: + allow_block = False + + # Spaces at the beginning of a new line are only acceptable for block + # scalars. + if break_space: + allow_flow_plain = allow_block_plain = allow_single_quoted = False + + # Spaces followed by breaks, as well as special character are only + # allowed for double quoted scalars. + if space_break or special_characters: + allow_flow_plain = allow_block_plain = \ + allow_single_quoted = allow_block = False + + # Although the plain scalar writer supports breaks, we never emit + # multiline plain scalars. + if line_breaks: + allow_flow_plain = allow_block_plain = False + + # Flow indicators are forbidden for flow plain scalars. + if flow_indicators: + allow_flow_plain = False + + # Block indicators are forbidden for block plain scalars. + if block_indicators: + allow_block_plain = False + + return ScalarAnalysis(scalar=scalar, + empty=False, multiline=line_breaks, + allow_flow_plain=allow_flow_plain, + allow_block_plain=allow_block_plain, + allow_single_quoted=allow_single_quoted, + allow_double_quoted=allow_double_quoted, + allow_block=allow_block) + + # Writers. + + def flush_stream(self): + if hasattr(self.stream, 'flush'): + self.stream.flush() + + def write_stream_start(self): + # Write BOM if needed. + if self.encoding and self.encoding.startswith('utf-16'): + self.stream.write('\uFEFF'.encode(self.encoding)) + + def write_stream_end(self): + self.flush_stream() + + def write_indicator(self, indicator, need_whitespace, + whitespace=False, indention=False): + if self.whitespace or not need_whitespace: + data = indicator + else: + data = ' '+indicator + self.whitespace = whitespace + self.indention = self.indention and indention + self.column += len(data) + self.open_ended = False + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + + def write_indent(self): + indent = self.indent or 0 + if not self.indention or self.column > indent \ + or (self.column == indent and not self.whitespace): + self.write_line_break() + if self.column < indent: + self.whitespace = True + data = ' '*(indent-self.column) + self.column = indent + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + + def write_line_break(self, data=None): + if data is None: + data = self.best_line_break + self.whitespace = True + self.indention = True + self.line += 1 + self.column = 0 + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + + def write_version_directive(self, version_text): + data = '%%YAML %s' % version_text + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + self.write_line_break() + + def write_tag_directive(self, handle_text, prefix_text): + data = '%%TAG %s %s' % (handle_text, prefix_text) + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + self.write_line_break() + + # Scalar streams. + + def write_single_quoted(self, text, split=True): + self.write_indicator('\'', True) + spaces = False + breaks = False + start = end = 0 + while end <= len(text): + ch = None + if end < len(text): + ch = text[end] + if spaces: + if ch is None or ch != ' ': + if start+1 == end and self.column > self.best_width and split \ + and start != 0 and end != len(text): + self.write_indent() + else: + data = text[start:end] + self.column += len(data) + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + start = end + elif breaks: + if ch is None or ch not in '\n\x85\u2028\u2029': + if text[start] == '\n': + self.write_line_break() + for br in text[start:end]: + if br == '\n': + self.write_line_break() + else: + self.write_line_break(br) + self.write_indent() + start = end + else: + if ch is None or ch in ' \n\x85\u2028\u2029' or ch == '\'': + if start < end: + data = text[start:end] + self.column += len(data) + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + start = end + if ch == '\'': + data = '\'\'' + self.column += 2 + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + start = end + 1 + if ch is not None: + spaces = (ch == ' ') + breaks = (ch in '\n\x85\u2028\u2029') + end += 1 + self.write_indicator('\'', False) + + ESCAPE_REPLACEMENTS = { + '\0': '0', + '\x07': 'a', + '\x08': 'b', + '\x09': 't', + '\x0A': 'n', + '\x0B': 'v', + '\x0C': 'f', + '\x0D': 'r', + '\x1B': 'e', + '\"': '\"', + '\\': '\\', + '\x85': 'N', + '\xA0': '_', + '\u2028': 'L', + '\u2029': 'P', + } + + def write_double_quoted(self, text, split=True): + self.write_indicator('"', True) + start = end = 0 + while end <= len(text): + ch = None + if end < len(text): + ch = text[end] + if ch is None or ch in '"\\\x85\u2028\u2029\uFEFF' \ + or not ('\x20' <= ch <= '\x7E' + or (self.allow_unicode + and ('\xA0' <= ch <= '\uD7FF' + or '\uE000' <= ch <= '\uFFFD'))): + if start < end: + data = text[start:end] + self.column += len(data) + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + start = end + if ch is not None: + if ch in self.ESCAPE_REPLACEMENTS: + data = '\\'+self.ESCAPE_REPLACEMENTS[ch] + elif ch <= '\xFF': + data = '\\x%02X' % ord(ch) + elif ch <= '\uFFFF': + data = '\\u%04X' % ord(ch) + else: + data = '\\U%08X' % ord(ch) + self.column += len(data) + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + start = end+1 + if 0 < end < len(text)-1 and (ch == ' ' or start >= end) \ + and self.column+(end-start) > self.best_width and split: + data = text[start:end]+'\\' + if start < end: + start = end + self.column += len(data) + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + self.write_indent() + self.whitespace = False + self.indention = False + if text[start] == ' ': + data = '\\' + self.column += len(data) + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + end += 1 + self.write_indicator('"', False) + + def determine_block_hints(self, text): + hints = '' + if text: + if text[0] in ' \n\x85\u2028\u2029': + hints += str(self.best_indent) + if text[-1] not in '\n\x85\u2028\u2029': + hints += '-' + elif len(text) == 1 or text[-2] in '\n\x85\u2028\u2029': + hints += '+' + return hints + + def write_folded(self, text): + hints = self.determine_block_hints(text) + self.write_indicator('>'+hints, True) + if hints[-1:] == '+': + self.open_ended = True + self.write_line_break() + leading_space = True + spaces = False + breaks = True + start = end = 0 + while end <= len(text): + ch = None + if end < len(text): + ch = text[end] + if breaks: + if ch is None or ch not in '\n\x85\u2028\u2029': + if not leading_space and ch is not None and ch != ' ' \ + and text[start] == '\n': + self.write_line_break() + leading_space = (ch == ' ') + for br in text[start:end]: + if br == '\n': + self.write_line_break() + else: + self.write_line_break(br) + if ch is not None: + self.write_indent() + start = end + elif spaces: + if ch != ' ': + if start+1 == end and self.column > self.best_width: + self.write_indent() + else: + data = text[start:end] + self.column += len(data) + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + start = end + else: + if ch is None or ch in ' \n\x85\u2028\u2029': + data = text[start:end] + self.column += len(data) + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + if ch is None: + self.write_line_break() + start = end + if ch is not None: + breaks = (ch in '\n\x85\u2028\u2029') + spaces = (ch == ' ') + end += 1 + + def write_literal(self, text): + hints = self.determine_block_hints(text) + self.write_indicator('|'+hints, True) + if hints[-1:] == '+': + self.open_ended = True + self.write_line_break() + breaks = True + start = end = 0 + while end <= len(text): + ch = None + if end < len(text): + ch = text[end] + if breaks: + if ch is None or ch not in '\n\x85\u2028\u2029': + for br in text[start:end]: + if br == '\n': + self.write_line_break() + else: + self.write_line_break(br) + if ch is not None: + self.write_indent() + start = end + else: + if ch is None or ch in '\n\x85\u2028\u2029': + data = text[start:end] + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + if ch is None: + self.write_line_break() + start = end + if ch is not None: + breaks = (ch in '\n\x85\u2028\u2029') + end += 1 + + def write_plain(self, text, split=True): + if self.root_context: + self.open_ended = True + if not text: + return + if not self.whitespace: + data = ' ' + self.column += len(data) + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + self.whitespace = False + self.indention = False + spaces = False + breaks = False + start = end = 0 + while end <= len(text): + ch = None + if end < len(text): + ch = text[end] + if spaces: + if ch != ' ': + if start+1 == end and self.column > self.best_width and split: + self.write_indent() + self.whitespace = False + self.indention = False + else: + data = text[start:end] + self.column += len(data) + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + start = end + elif breaks: + if ch not in '\n\x85\u2028\u2029': + if text[start] == '\n': + self.write_line_break() + for br in text[start:end]: + if br == '\n': + self.write_line_break() + else: + self.write_line_break(br) + self.write_indent() + self.whitespace = False + self.indention = False + start = end + else: + if ch is None or ch in ' \n\x85\u2028\u2029': + data = text[start:end] + self.column += len(data) + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + start = end + if ch is not None: + spaces = (ch == ' ') + breaks = (ch in '\n\x85\u2028\u2029') + end += 1 diff --git a/env-llmeval/lib/python3.10/site-packages/yaml/error.py b/env-llmeval/lib/python3.10/site-packages/yaml/error.py new file mode 100644 index 0000000000000000000000000000000000000000..b796b4dc519512c4825ff539a2e6aa20f4d370d0 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/yaml/error.py @@ -0,0 +1,75 @@ + +__all__ = ['Mark', 'YAMLError', 'MarkedYAMLError'] + +class Mark: + + def __init__(self, name, index, line, column, buffer, pointer): + self.name = name + self.index = index + self.line = line + self.column = column + self.buffer = buffer + self.pointer = pointer + + def get_snippet(self, indent=4, max_length=75): + if self.buffer is None: + return None + head = '' + start = self.pointer + while start > 0 and self.buffer[start-1] not in '\0\r\n\x85\u2028\u2029': + start -= 1 + if self.pointer-start > max_length/2-1: + head = ' ... ' + start += 5 + break + tail = '' + end = self.pointer + while end < len(self.buffer) and self.buffer[end] not in '\0\r\n\x85\u2028\u2029': + end += 1 + if end-self.pointer > max_length/2-1: + tail = ' ... ' + end -= 5 + break + snippet = self.buffer[start:end] + return ' '*indent + head + snippet + tail + '\n' \ + + ' '*(indent+self.pointer-start+len(head)) + '^' + + def __str__(self): + snippet = self.get_snippet() + where = " in \"%s\", line %d, column %d" \ + % (self.name, self.line+1, self.column+1) + if snippet is not None: + where += ":\n"+snippet + return where + +class YAMLError(Exception): + pass + +class MarkedYAMLError(YAMLError): + + def __init__(self, context=None, context_mark=None, + problem=None, problem_mark=None, note=None): + self.context = context + self.context_mark = context_mark + self.problem = problem + self.problem_mark = problem_mark + self.note = note + + def __str__(self): + lines = [] + if self.context is not None: + lines.append(self.context) + if self.context_mark is not None \ + and (self.problem is None or self.problem_mark is None + or self.context_mark.name != self.problem_mark.name + or self.context_mark.line != self.problem_mark.line + or self.context_mark.column != self.problem_mark.column): + lines.append(str(self.context_mark)) + if self.problem is not None: + lines.append(self.problem) + if self.problem_mark is not None: + lines.append(str(self.problem_mark)) + if self.note is not None: + lines.append(self.note) + return '\n'.join(lines) + diff --git a/env-llmeval/lib/python3.10/site-packages/yaml/events.py b/env-llmeval/lib/python3.10/site-packages/yaml/events.py new file mode 100644 index 0000000000000000000000000000000000000000..f79ad389cb6c9517e391dcd25534866bc9ccd36a --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/yaml/events.py @@ -0,0 +1,86 @@ + +# Abstract classes. + +class Event(object): + def __init__(self, start_mark=None, end_mark=None): + self.start_mark = start_mark + self.end_mark = end_mark + def __repr__(self): + attributes = [key for key in ['anchor', 'tag', 'implicit', 'value'] + if hasattr(self, key)] + arguments = ', '.join(['%s=%r' % (key, getattr(self, key)) + for key in attributes]) + return '%s(%s)' % (self.__class__.__name__, arguments) + +class NodeEvent(Event): + def __init__(self, anchor, start_mark=None, end_mark=None): + self.anchor = anchor + self.start_mark = start_mark + self.end_mark = end_mark + +class CollectionStartEvent(NodeEvent): + def __init__(self, anchor, tag, implicit, start_mark=None, end_mark=None, + flow_style=None): + self.anchor = anchor + self.tag = tag + self.implicit = implicit + self.start_mark = start_mark + self.end_mark = end_mark + self.flow_style = flow_style + +class CollectionEndEvent(Event): + pass + +# Implementations. + +class StreamStartEvent(Event): + def __init__(self, start_mark=None, end_mark=None, encoding=None): + self.start_mark = start_mark + self.end_mark = end_mark + self.encoding = encoding + +class StreamEndEvent(Event): + pass + +class DocumentStartEvent(Event): + def __init__(self, start_mark=None, end_mark=None, + explicit=None, version=None, tags=None): + self.start_mark = start_mark + self.end_mark = end_mark + self.explicit = explicit + self.version = version + self.tags = tags + +class DocumentEndEvent(Event): + def __init__(self, start_mark=None, end_mark=None, + explicit=None): + self.start_mark = start_mark + self.end_mark = end_mark + self.explicit = explicit + +class AliasEvent(NodeEvent): + pass + +class ScalarEvent(NodeEvent): + def __init__(self, anchor, tag, implicit, value, + start_mark=None, end_mark=None, style=None): + self.anchor = anchor + self.tag = tag + self.implicit = implicit + self.value = value + self.start_mark = start_mark + self.end_mark = end_mark + self.style = style + +class SequenceStartEvent(CollectionStartEvent): + pass + +class SequenceEndEvent(CollectionEndEvent): + pass + +class MappingStartEvent(CollectionStartEvent): + pass + +class MappingEndEvent(CollectionEndEvent): + pass + diff --git a/env-llmeval/lib/python3.10/site-packages/yaml/loader.py b/env-llmeval/lib/python3.10/site-packages/yaml/loader.py new file mode 100644 index 0000000000000000000000000000000000000000..e90c11224c38e559cdf0cb205f0692ebd4fb8681 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/yaml/loader.py @@ -0,0 +1,63 @@ + +__all__ = ['BaseLoader', 'FullLoader', 'SafeLoader', 'Loader', 'UnsafeLoader'] + +from .reader import * +from .scanner import * +from .parser import * +from .composer import * +from .constructor import * +from .resolver import * + +class BaseLoader(Reader, Scanner, Parser, Composer, BaseConstructor, BaseResolver): + + def __init__(self, stream): + Reader.__init__(self, stream) + Scanner.__init__(self) + Parser.__init__(self) + Composer.__init__(self) + BaseConstructor.__init__(self) + BaseResolver.__init__(self) + +class FullLoader(Reader, Scanner, Parser, Composer, FullConstructor, Resolver): + + def __init__(self, stream): + Reader.__init__(self, stream) + Scanner.__init__(self) + Parser.__init__(self) + Composer.__init__(self) + FullConstructor.__init__(self) + Resolver.__init__(self) + +class SafeLoader(Reader, Scanner, Parser, Composer, SafeConstructor, Resolver): + + def __init__(self, stream): + Reader.__init__(self, stream) + Scanner.__init__(self) + Parser.__init__(self) + Composer.__init__(self) + SafeConstructor.__init__(self) + Resolver.__init__(self) + +class Loader(Reader, Scanner, Parser, Composer, Constructor, Resolver): + + def __init__(self, stream): + Reader.__init__(self, stream) + Scanner.__init__(self) + Parser.__init__(self) + Composer.__init__(self) + Constructor.__init__(self) + Resolver.__init__(self) + +# UnsafeLoader is the same as Loader (which is and was always unsafe on +# untrusted input). Use of either Loader or UnsafeLoader should be rare, since +# FullLoad should be able to load almost all YAML safely. Loader is left intact +# to ensure backwards compatibility. +class UnsafeLoader(Reader, Scanner, Parser, Composer, Constructor, Resolver): + + def __init__(self, stream): + Reader.__init__(self, stream) + Scanner.__init__(self) + Parser.__init__(self) + Composer.__init__(self) + Constructor.__init__(self) + Resolver.__init__(self) diff --git a/env-llmeval/lib/python3.10/site-packages/yaml/nodes.py b/env-llmeval/lib/python3.10/site-packages/yaml/nodes.py new file mode 100644 index 0000000000000000000000000000000000000000..c4f070c41e1fb1bc01af27d69329e92dded38908 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/yaml/nodes.py @@ -0,0 +1,49 @@ + +class Node(object): + def __init__(self, tag, value, start_mark, end_mark): + self.tag = tag + self.value = value + self.start_mark = start_mark + self.end_mark = end_mark + def __repr__(self): + value = self.value + #if isinstance(value, list): + # if len(value) == 0: + # value = '' + # elif len(value) == 1: + # value = '<1 item>' + # else: + # value = '<%d items>' % len(value) + #else: + # if len(value) > 75: + # value = repr(value[:70]+u' ... ') + # else: + # value = repr(value) + value = repr(value) + return '%s(tag=%r, value=%s)' % (self.__class__.__name__, self.tag, value) + +class ScalarNode(Node): + id = 'scalar' + def __init__(self, tag, value, + start_mark=None, end_mark=None, style=None): + self.tag = tag + self.value = value + self.start_mark = start_mark + self.end_mark = end_mark + self.style = style + +class CollectionNode(Node): + def __init__(self, tag, value, + start_mark=None, end_mark=None, flow_style=None): + self.tag = tag + self.value = value + self.start_mark = start_mark + self.end_mark = end_mark + self.flow_style = flow_style + +class SequenceNode(CollectionNode): + id = 'sequence' + +class MappingNode(CollectionNode): + id = 'mapping' + diff --git a/env-llmeval/lib/python3.10/site-packages/yaml/parser.py b/env-llmeval/lib/python3.10/site-packages/yaml/parser.py new file mode 100644 index 0000000000000000000000000000000000000000..13a5995d292045d0f865a99abf692bd35dc87814 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/yaml/parser.py @@ -0,0 +1,589 @@ + +# The following YAML grammar is LL(1) and is parsed by a recursive descent +# parser. +# +# stream ::= STREAM-START implicit_document? explicit_document* STREAM-END +# implicit_document ::= block_node DOCUMENT-END* +# explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* +# block_node_or_indentless_sequence ::= +# ALIAS +# | properties (block_content | indentless_block_sequence)? +# | block_content +# | indentless_block_sequence +# block_node ::= ALIAS +# | properties block_content? +# | block_content +# flow_node ::= ALIAS +# | properties flow_content? +# | flow_content +# properties ::= TAG ANCHOR? | ANCHOR TAG? +# block_content ::= block_collection | flow_collection | SCALAR +# flow_content ::= flow_collection | SCALAR +# block_collection ::= block_sequence | block_mapping +# flow_collection ::= flow_sequence | flow_mapping +# block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END +# indentless_sequence ::= (BLOCK-ENTRY block_node?)+ +# block_mapping ::= BLOCK-MAPPING_START +# ((KEY block_node_or_indentless_sequence?)? +# (VALUE block_node_or_indentless_sequence?)?)* +# BLOCK-END +# flow_sequence ::= FLOW-SEQUENCE-START +# (flow_sequence_entry FLOW-ENTRY)* +# flow_sequence_entry? +# FLOW-SEQUENCE-END +# flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? +# flow_mapping ::= FLOW-MAPPING-START +# (flow_mapping_entry FLOW-ENTRY)* +# flow_mapping_entry? +# FLOW-MAPPING-END +# flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? +# +# FIRST sets: +# +# stream: { STREAM-START } +# explicit_document: { DIRECTIVE DOCUMENT-START } +# implicit_document: FIRST(block_node) +# block_node: { ALIAS TAG ANCHOR SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START } +# flow_node: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START } +# block_content: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR } +# flow_content: { FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR } +# block_collection: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START } +# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START } +# block_sequence: { BLOCK-SEQUENCE-START } +# block_mapping: { BLOCK-MAPPING-START } +# block_node_or_indentless_sequence: { ALIAS ANCHOR TAG SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START BLOCK-ENTRY } +# indentless_sequence: { ENTRY } +# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START } +# flow_sequence: { FLOW-SEQUENCE-START } +# flow_mapping: { FLOW-MAPPING-START } +# flow_sequence_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY } +# flow_mapping_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY } + +__all__ = ['Parser', 'ParserError'] + +from .error import MarkedYAMLError +from .tokens import * +from .events import * +from .scanner import * + +class ParserError(MarkedYAMLError): + pass + +class Parser: + # Since writing a recursive-descendant parser is a straightforward task, we + # do not give many comments here. + + DEFAULT_TAGS = { + '!': '!', + '!!': 'tag:yaml.org,2002:', + } + + def __init__(self): + self.current_event = None + self.yaml_version = None + self.tag_handles = {} + self.states = [] + self.marks = [] + self.state = self.parse_stream_start + + def dispose(self): + # Reset the state attributes (to clear self-references) + self.states = [] + self.state = None + + def check_event(self, *choices): + # Check the type of the next event. + if self.current_event is None: + if self.state: + self.current_event = self.state() + if self.current_event is not None: + if not choices: + return True + for choice in choices: + if isinstance(self.current_event, choice): + return True + return False + + def peek_event(self): + # Get the next event. + if self.current_event is None: + if self.state: + self.current_event = self.state() + return self.current_event + + def get_event(self): + # Get the next event and proceed further. + if self.current_event is None: + if self.state: + self.current_event = self.state() + value = self.current_event + self.current_event = None + return value + + # stream ::= STREAM-START implicit_document? explicit_document* STREAM-END + # implicit_document ::= block_node DOCUMENT-END* + # explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* + + def parse_stream_start(self): + + # Parse the stream start. + token = self.get_token() + event = StreamStartEvent(token.start_mark, token.end_mark, + encoding=token.encoding) + + # Prepare the next state. + self.state = self.parse_implicit_document_start + + return event + + def parse_implicit_document_start(self): + + # Parse an implicit document. + if not self.check_token(DirectiveToken, DocumentStartToken, + StreamEndToken): + self.tag_handles = self.DEFAULT_TAGS + token = self.peek_token() + start_mark = end_mark = token.start_mark + event = DocumentStartEvent(start_mark, end_mark, + explicit=False) + + # Prepare the next state. + self.states.append(self.parse_document_end) + self.state = self.parse_block_node + + return event + + else: + return self.parse_document_start() + + def parse_document_start(self): + + # Parse any extra document end indicators. + while self.check_token(DocumentEndToken): + self.get_token() + + # Parse an explicit document. + if not self.check_token(StreamEndToken): + token = self.peek_token() + start_mark = token.start_mark + version, tags = self.process_directives() + if not self.check_token(DocumentStartToken): + raise ParserError(None, None, + "expected '', but found %r" + % self.peek_token().id, + self.peek_token().start_mark) + token = self.get_token() + end_mark = token.end_mark + event = DocumentStartEvent(start_mark, end_mark, + explicit=True, version=version, tags=tags) + self.states.append(self.parse_document_end) + self.state = self.parse_document_content + else: + # Parse the end of the stream. + token = self.get_token() + event = StreamEndEvent(token.start_mark, token.end_mark) + assert not self.states + assert not self.marks + self.state = None + return event + + def parse_document_end(self): + + # Parse the document end. + token = self.peek_token() + start_mark = end_mark = token.start_mark + explicit = False + if self.check_token(DocumentEndToken): + token = self.get_token() + end_mark = token.end_mark + explicit = True + event = DocumentEndEvent(start_mark, end_mark, + explicit=explicit) + + # Prepare the next state. + self.state = self.parse_document_start + + return event + + def parse_document_content(self): + if self.check_token(DirectiveToken, + DocumentStartToken, DocumentEndToken, StreamEndToken): + event = self.process_empty_scalar(self.peek_token().start_mark) + self.state = self.states.pop() + return event + else: + return self.parse_block_node() + + def process_directives(self): + self.yaml_version = None + self.tag_handles = {} + while self.check_token(DirectiveToken): + token = self.get_token() + if token.name == 'YAML': + if self.yaml_version is not None: + raise ParserError(None, None, + "found duplicate YAML directive", token.start_mark) + major, minor = token.value + if major != 1: + raise ParserError(None, None, + "found incompatible YAML document (version 1.* is required)", + token.start_mark) + self.yaml_version = token.value + elif token.name == 'TAG': + handle, prefix = token.value + if handle in self.tag_handles: + raise ParserError(None, None, + "duplicate tag handle %r" % handle, + token.start_mark) + self.tag_handles[handle] = prefix + if self.tag_handles: + value = self.yaml_version, self.tag_handles.copy() + else: + value = self.yaml_version, None + for key in self.DEFAULT_TAGS: + if key not in self.tag_handles: + self.tag_handles[key] = self.DEFAULT_TAGS[key] + return value + + # block_node_or_indentless_sequence ::= ALIAS + # | properties (block_content | indentless_block_sequence)? + # | block_content + # | indentless_block_sequence + # block_node ::= ALIAS + # | properties block_content? + # | block_content + # flow_node ::= ALIAS + # | properties flow_content? + # | flow_content + # properties ::= TAG ANCHOR? | ANCHOR TAG? + # block_content ::= block_collection | flow_collection | SCALAR + # flow_content ::= flow_collection | SCALAR + # block_collection ::= block_sequence | block_mapping + # flow_collection ::= flow_sequence | flow_mapping + + def parse_block_node(self): + return self.parse_node(block=True) + + def parse_flow_node(self): + return self.parse_node() + + def parse_block_node_or_indentless_sequence(self): + return self.parse_node(block=True, indentless_sequence=True) + + def parse_node(self, block=False, indentless_sequence=False): + if self.check_token(AliasToken): + token = self.get_token() + event = AliasEvent(token.value, token.start_mark, token.end_mark) + self.state = self.states.pop() + else: + anchor = None + tag = None + start_mark = end_mark = tag_mark = None + if self.check_token(AnchorToken): + token = self.get_token() + start_mark = token.start_mark + end_mark = token.end_mark + anchor = token.value + if self.check_token(TagToken): + token = self.get_token() + tag_mark = token.start_mark + end_mark = token.end_mark + tag = token.value + elif self.check_token(TagToken): + token = self.get_token() + start_mark = tag_mark = token.start_mark + end_mark = token.end_mark + tag = token.value + if self.check_token(AnchorToken): + token = self.get_token() + end_mark = token.end_mark + anchor = token.value + if tag is not None: + handle, suffix = tag + if handle is not None: + if handle not in self.tag_handles: + raise ParserError("while parsing a node", start_mark, + "found undefined tag handle %r" % handle, + tag_mark) + tag = self.tag_handles[handle]+suffix + else: + tag = suffix + #if tag == '!': + # raise ParserError("while parsing a node", start_mark, + # "found non-specific tag '!'", tag_mark, + # "Please check 'http://pyyaml.org/wiki/YAMLNonSpecificTag' and share your opinion.") + if start_mark is None: + start_mark = end_mark = self.peek_token().start_mark + event = None + implicit = (tag is None or tag == '!') + if indentless_sequence and self.check_token(BlockEntryToken): + end_mark = self.peek_token().end_mark + event = SequenceStartEvent(anchor, tag, implicit, + start_mark, end_mark) + self.state = self.parse_indentless_sequence_entry + else: + if self.check_token(ScalarToken): + token = self.get_token() + end_mark = token.end_mark + if (token.plain and tag is None) or tag == '!': + implicit = (True, False) + elif tag is None: + implicit = (False, True) + else: + implicit = (False, False) + event = ScalarEvent(anchor, tag, implicit, token.value, + start_mark, end_mark, style=token.style) + self.state = self.states.pop() + elif self.check_token(FlowSequenceStartToken): + end_mark = self.peek_token().end_mark + event = SequenceStartEvent(anchor, tag, implicit, + start_mark, end_mark, flow_style=True) + self.state = self.parse_flow_sequence_first_entry + elif self.check_token(FlowMappingStartToken): + end_mark = self.peek_token().end_mark + event = MappingStartEvent(anchor, tag, implicit, + start_mark, end_mark, flow_style=True) + self.state = self.parse_flow_mapping_first_key + elif block and self.check_token(BlockSequenceStartToken): + end_mark = self.peek_token().start_mark + event = SequenceStartEvent(anchor, tag, implicit, + start_mark, end_mark, flow_style=False) + self.state = self.parse_block_sequence_first_entry + elif block and self.check_token(BlockMappingStartToken): + end_mark = self.peek_token().start_mark + event = MappingStartEvent(anchor, tag, implicit, + start_mark, end_mark, flow_style=False) + self.state = self.parse_block_mapping_first_key + elif anchor is not None or tag is not None: + # Empty scalars are allowed even if a tag or an anchor is + # specified. + event = ScalarEvent(anchor, tag, (implicit, False), '', + start_mark, end_mark) + self.state = self.states.pop() + else: + if block: + node = 'block' + else: + node = 'flow' + token = self.peek_token() + raise ParserError("while parsing a %s node" % node, start_mark, + "expected the node content, but found %r" % token.id, + token.start_mark) + return event + + # block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END + + def parse_block_sequence_first_entry(self): + token = self.get_token() + self.marks.append(token.start_mark) + return self.parse_block_sequence_entry() + + def parse_block_sequence_entry(self): + if self.check_token(BlockEntryToken): + token = self.get_token() + if not self.check_token(BlockEntryToken, BlockEndToken): + self.states.append(self.parse_block_sequence_entry) + return self.parse_block_node() + else: + self.state = self.parse_block_sequence_entry + return self.process_empty_scalar(token.end_mark) + if not self.check_token(BlockEndToken): + token = self.peek_token() + raise ParserError("while parsing a block collection", self.marks[-1], + "expected , but found %r" % token.id, token.start_mark) + token = self.get_token() + event = SequenceEndEvent(token.start_mark, token.end_mark) + self.state = self.states.pop() + self.marks.pop() + return event + + # indentless_sequence ::= (BLOCK-ENTRY block_node?)+ + + def parse_indentless_sequence_entry(self): + if self.check_token(BlockEntryToken): + token = self.get_token() + if not self.check_token(BlockEntryToken, + KeyToken, ValueToken, BlockEndToken): + self.states.append(self.parse_indentless_sequence_entry) + return self.parse_block_node() + else: + self.state = self.parse_indentless_sequence_entry + return self.process_empty_scalar(token.end_mark) + token = self.peek_token() + event = SequenceEndEvent(token.start_mark, token.start_mark) + self.state = self.states.pop() + return event + + # block_mapping ::= BLOCK-MAPPING_START + # ((KEY block_node_or_indentless_sequence?)? + # (VALUE block_node_or_indentless_sequence?)?)* + # BLOCK-END + + def parse_block_mapping_first_key(self): + token = self.get_token() + self.marks.append(token.start_mark) + return self.parse_block_mapping_key() + + def parse_block_mapping_key(self): + if self.check_token(KeyToken): + token = self.get_token() + if not self.check_token(KeyToken, ValueToken, BlockEndToken): + self.states.append(self.parse_block_mapping_value) + return self.parse_block_node_or_indentless_sequence() + else: + self.state = self.parse_block_mapping_value + return self.process_empty_scalar(token.end_mark) + if not self.check_token(BlockEndToken): + token = self.peek_token() + raise ParserError("while parsing a block mapping", self.marks[-1], + "expected , but found %r" % token.id, token.start_mark) + token = self.get_token() + event = MappingEndEvent(token.start_mark, token.end_mark) + self.state = self.states.pop() + self.marks.pop() + return event + + def parse_block_mapping_value(self): + if self.check_token(ValueToken): + token = self.get_token() + if not self.check_token(KeyToken, ValueToken, BlockEndToken): + self.states.append(self.parse_block_mapping_key) + return self.parse_block_node_or_indentless_sequence() + else: + self.state = self.parse_block_mapping_key + return self.process_empty_scalar(token.end_mark) + else: + self.state = self.parse_block_mapping_key + token = self.peek_token() + return self.process_empty_scalar(token.start_mark) + + # flow_sequence ::= FLOW-SEQUENCE-START + # (flow_sequence_entry FLOW-ENTRY)* + # flow_sequence_entry? + # FLOW-SEQUENCE-END + # flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? + # + # Note that while production rules for both flow_sequence_entry and + # flow_mapping_entry are equal, their interpretations are different. + # For `flow_sequence_entry`, the part `KEY flow_node? (VALUE flow_node?)?` + # generate an inline mapping (set syntax). + + def parse_flow_sequence_first_entry(self): + token = self.get_token() + self.marks.append(token.start_mark) + return self.parse_flow_sequence_entry(first=True) + + def parse_flow_sequence_entry(self, first=False): + if not self.check_token(FlowSequenceEndToken): + if not first: + if self.check_token(FlowEntryToken): + self.get_token() + else: + token = self.peek_token() + raise ParserError("while parsing a flow sequence", self.marks[-1], + "expected ',' or ']', but got %r" % token.id, token.start_mark) + + if self.check_token(KeyToken): + token = self.peek_token() + event = MappingStartEvent(None, None, True, + token.start_mark, token.end_mark, + flow_style=True) + self.state = self.parse_flow_sequence_entry_mapping_key + return event + elif not self.check_token(FlowSequenceEndToken): + self.states.append(self.parse_flow_sequence_entry) + return self.parse_flow_node() + token = self.get_token() + event = SequenceEndEvent(token.start_mark, token.end_mark) + self.state = self.states.pop() + self.marks.pop() + return event + + def parse_flow_sequence_entry_mapping_key(self): + token = self.get_token() + if not self.check_token(ValueToken, + FlowEntryToken, FlowSequenceEndToken): + self.states.append(self.parse_flow_sequence_entry_mapping_value) + return self.parse_flow_node() + else: + self.state = self.parse_flow_sequence_entry_mapping_value + return self.process_empty_scalar(token.end_mark) + + def parse_flow_sequence_entry_mapping_value(self): + if self.check_token(ValueToken): + token = self.get_token() + if not self.check_token(FlowEntryToken, FlowSequenceEndToken): + self.states.append(self.parse_flow_sequence_entry_mapping_end) + return self.parse_flow_node() + else: + self.state = self.parse_flow_sequence_entry_mapping_end + return self.process_empty_scalar(token.end_mark) + else: + self.state = self.parse_flow_sequence_entry_mapping_end + token = self.peek_token() + return self.process_empty_scalar(token.start_mark) + + def parse_flow_sequence_entry_mapping_end(self): + self.state = self.parse_flow_sequence_entry + token = self.peek_token() + return MappingEndEvent(token.start_mark, token.start_mark) + + # flow_mapping ::= FLOW-MAPPING-START + # (flow_mapping_entry FLOW-ENTRY)* + # flow_mapping_entry? + # FLOW-MAPPING-END + # flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? + + def parse_flow_mapping_first_key(self): + token = self.get_token() + self.marks.append(token.start_mark) + return self.parse_flow_mapping_key(first=True) + + def parse_flow_mapping_key(self, first=False): + if not self.check_token(FlowMappingEndToken): + if not first: + if self.check_token(FlowEntryToken): + self.get_token() + else: + token = self.peek_token() + raise ParserError("while parsing a flow mapping", self.marks[-1], + "expected ',' or '}', but got %r" % token.id, token.start_mark) + if self.check_token(KeyToken): + token = self.get_token() + if not self.check_token(ValueToken, + FlowEntryToken, FlowMappingEndToken): + self.states.append(self.parse_flow_mapping_value) + return self.parse_flow_node() + else: + self.state = self.parse_flow_mapping_value + return self.process_empty_scalar(token.end_mark) + elif not self.check_token(FlowMappingEndToken): + self.states.append(self.parse_flow_mapping_empty_value) + return self.parse_flow_node() + token = self.get_token() + event = MappingEndEvent(token.start_mark, token.end_mark) + self.state = self.states.pop() + self.marks.pop() + return event + + def parse_flow_mapping_value(self): + if self.check_token(ValueToken): + token = self.get_token() + if not self.check_token(FlowEntryToken, FlowMappingEndToken): + self.states.append(self.parse_flow_mapping_key) + return self.parse_flow_node() + else: + self.state = self.parse_flow_mapping_key + return self.process_empty_scalar(token.end_mark) + else: + self.state = self.parse_flow_mapping_key + token = self.peek_token() + return self.process_empty_scalar(token.start_mark) + + def parse_flow_mapping_empty_value(self): + self.state = self.parse_flow_mapping_key + return self.process_empty_scalar(self.peek_token().start_mark) + + def process_empty_scalar(self, mark): + return ScalarEvent(None, None, (True, False), '', mark, mark) + diff --git a/env-llmeval/lib/python3.10/site-packages/yaml/reader.py b/env-llmeval/lib/python3.10/site-packages/yaml/reader.py new file mode 100644 index 0000000000000000000000000000000000000000..774b0219b5932a0ee1c27e637371de5ba8d9cb16 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/yaml/reader.py @@ -0,0 +1,185 @@ +# This module contains abstractions for the input stream. You don't have to +# looks further, there are no pretty code. +# +# We define two classes here. +# +# Mark(source, line, column) +# It's just a record and its only use is producing nice error messages. +# Parser does not use it for any other purposes. +# +# Reader(source, data) +# Reader determines the encoding of `data` and converts it to unicode. +# Reader provides the following methods and attributes: +# reader.peek(length=1) - return the next `length` characters +# reader.forward(length=1) - move the current position to `length` characters. +# reader.index - the number of the current character. +# reader.line, stream.column - the line and the column of the current character. + +__all__ = ['Reader', 'ReaderError'] + +from .error import YAMLError, Mark + +import codecs, re + +class ReaderError(YAMLError): + + def __init__(self, name, position, character, encoding, reason): + self.name = name + self.character = character + self.position = position + self.encoding = encoding + self.reason = reason + + def __str__(self): + if isinstance(self.character, bytes): + return "'%s' codec can't decode byte #x%02x: %s\n" \ + " in \"%s\", position %d" \ + % (self.encoding, ord(self.character), self.reason, + self.name, self.position) + else: + return "unacceptable character #x%04x: %s\n" \ + " in \"%s\", position %d" \ + % (self.character, self.reason, + self.name, self.position) + +class Reader(object): + # Reader: + # - determines the data encoding and converts it to a unicode string, + # - checks if characters are in allowed range, + # - adds '\0' to the end. + + # Reader accepts + # - a `bytes` object, + # - a `str` object, + # - a file-like object with its `read` method returning `str`, + # - a file-like object with its `read` method returning `unicode`. + + # Yeah, it's ugly and slow. + + def __init__(self, stream): + self.name = None + self.stream = None + self.stream_pointer = 0 + self.eof = True + self.buffer = '' + self.pointer = 0 + self.raw_buffer = None + self.raw_decode = None + self.encoding = None + self.index = 0 + self.line = 0 + self.column = 0 + if isinstance(stream, str): + self.name = "" + self.check_printable(stream) + self.buffer = stream+'\0' + elif isinstance(stream, bytes): + self.name = "" + self.raw_buffer = stream + self.determine_encoding() + else: + self.stream = stream + self.name = getattr(stream, 'name', "") + self.eof = False + self.raw_buffer = None + self.determine_encoding() + + def peek(self, index=0): + try: + return self.buffer[self.pointer+index] + except IndexError: + self.update(index+1) + return self.buffer[self.pointer+index] + + def prefix(self, length=1): + if self.pointer+length >= len(self.buffer): + self.update(length) + return self.buffer[self.pointer:self.pointer+length] + + def forward(self, length=1): + if self.pointer+length+1 >= len(self.buffer): + self.update(length+1) + while length: + ch = self.buffer[self.pointer] + self.pointer += 1 + self.index += 1 + if ch in '\n\x85\u2028\u2029' \ + or (ch == '\r' and self.buffer[self.pointer] != '\n'): + self.line += 1 + self.column = 0 + elif ch != '\uFEFF': + self.column += 1 + length -= 1 + + def get_mark(self): + if self.stream is None: + return Mark(self.name, self.index, self.line, self.column, + self.buffer, self.pointer) + else: + return Mark(self.name, self.index, self.line, self.column, + None, None) + + def determine_encoding(self): + while not self.eof and (self.raw_buffer is None or len(self.raw_buffer) < 2): + self.update_raw() + if isinstance(self.raw_buffer, bytes): + if self.raw_buffer.startswith(codecs.BOM_UTF16_LE): + self.raw_decode = codecs.utf_16_le_decode + self.encoding = 'utf-16-le' + elif self.raw_buffer.startswith(codecs.BOM_UTF16_BE): + self.raw_decode = codecs.utf_16_be_decode + self.encoding = 'utf-16-be' + else: + self.raw_decode = codecs.utf_8_decode + self.encoding = 'utf-8' + self.update(1) + + NON_PRINTABLE = re.compile('[^\x09\x0A\x0D\x20-\x7E\x85\xA0-\uD7FF\uE000-\uFFFD\U00010000-\U0010ffff]') + def check_printable(self, data): + match = self.NON_PRINTABLE.search(data) + if match: + character = match.group() + position = self.index+(len(self.buffer)-self.pointer)+match.start() + raise ReaderError(self.name, position, ord(character), + 'unicode', "special characters are not allowed") + + def update(self, length): + if self.raw_buffer is None: + return + self.buffer = self.buffer[self.pointer:] + self.pointer = 0 + while len(self.buffer) < length: + if not self.eof: + self.update_raw() + if self.raw_decode is not None: + try: + data, converted = self.raw_decode(self.raw_buffer, + 'strict', self.eof) + except UnicodeDecodeError as exc: + character = self.raw_buffer[exc.start] + if self.stream is not None: + position = self.stream_pointer-len(self.raw_buffer)+exc.start + else: + position = exc.start + raise ReaderError(self.name, position, character, + exc.encoding, exc.reason) + else: + data = self.raw_buffer + converted = len(data) + self.check_printable(data) + self.buffer += data + self.raw_buffer = self.raw_buffer[converted:] + if self.eof: + self.buffer += '\0' + self.raw_buffer = None + break + + def update_raw(self, size=4096): + data = self.stream.read(size) + if self.raw_buffer is None: + self.raw_buffer = data + else: + self.raw_buffer += data + self.stream_pointer += len(data) + if not data: + self.eof = True diff --git a/env-llmeval/lib/python3.10/site-packages/yaml/representer.py b/env-llmeval/lib/python3.10/site-packages/yaml/representer.py new file mode 100644 index 0000000000000000000000000000000000000000..808ca06dfbd60c9a23eb079151b74a82ef688749 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/yaml/representer.py @@ -0,0 +1,389 @@ + +__all__ = ['BaseRepresenter', 'SafeRepresenter', 'Representer', + 'RepresenterError'] + +from .error import * +from .nodes import * + +import datetime, copyreg, types, base64, collections + +class RepresenterError(YAMLError): + pass + +class BaseRepresenter: + + yaml_representers = {} + yaml_multi_representers = {} + + def __init__(self, default_style=None, default_flow_style=False, sort_keys=True): + self.default_style = default_style + self.sort_keys = sort_keys + self.default_flow_style = default_flow_style + self.represented_objects = {} + self.object_keeper = [] + self.alias_key = None + + def represent(self, data): + node = self.represent_data(data) + self.serialize(node) + self.represented_objects = {} + self.object_keeper = [] + self.alias_key = None + + def represent_data(self, data): + if self.ignore_aliases(data): + self.alias_key = None + else: + self.alias_key = id(data) + if self.alias_key is not None: + if self.alias_key in self.represented_objects: + node = self.represented_objects[self.alias_key] + #if node is None: + # raise RepresenterError("recursive objects are not allowed: %r" % data) + return node + #self.represented_objects[alias_key] = None + self.object_keeper.append(data) + data_types = type(data).__mro__ + if data_types[0] in self.yaml_representers: + node = self.yaml_representers[data_types[0]](self, data) + else: + for data_type in data_types: + if data_type in self.yaml_multi_representers: + node = self.yaml_multi_representers[data_type](self, data) + break + else: + if None in self.yaml_multi_representers: + node = self.yaml_multi_representers[None](self, data) + elif None in self.yaml_representers: + node = self.yaml_representers[None](self, data) + else: + node = ScalarNode(None, str(data)) + #if alias_key is not None: + # self.represented_objects[alias_key] = node + return node + + @classmethod + def add_representer(cls, data_type, representer): + if not 'yaml_representers' in cls.__dict__: + cls.yaml_representers = cls.yaml_representers.copy() + cls.yaml_representers[data_type] = representer + + @classmethod + def add_multi_representer(cls, data_type, representer): + if not 'yaml_multi_representers' in cls.__dict__: + cls.yaml_multi_representers = cls.yaml_multi_representers.copy() + cls.yaml_multi_representers[data_type] = representer + + def represent_scalar(self, tag, value, style=None): + if style is None: + style = self.default_style + node = ScalarNode(tag, value, style=style) + if self.alias_key is not None: + self.represented_objects[self.alias_key] = node + return node + + def represent_sequence(self, tag, sequence, flow_style=None): + value = [] + node = SequenceNode(tag, value, flow_style=flow_style) + if self.alias_key is not None: + self.represented_objects[self.alias_key] = node + best_style = True + for item in sequence: + node_item = self.represent_data(item) + if not (isinstance(node_item, ScalarNode) and not node_item.style): + best_style = False + value.append(node_item) + if flow_style is None: + if self.default_flow_style is not None: + node.flow_style = self.default_flow_style + else: + node.flow_style = best_style + return node + + def represent_mapping(self, tag, mapping, flow_style=None): + value = [] + node = MappingNode(tag, value, flow_style=flow_style) + if self.alias_key is not None: + self.represented_objects[self.alias_key] = node + best_style = True + if hasattr(mapping, 'items'): + mapping = list(mapping.items()) + if self.sort_keys: + try: + mapping = sorted(mapping) + except TypeError: + pass + for item_key, item_value in mapping: + node_key = self.represent_data(item_key) + node_value = self.represent_data(item_value) + if not (isinstance(node_key, ScalarNode) and not node_key.style): + best_style = False + if not (isinstance(node_value, ScalarNode) and not node_value.style): + best_style = False + value.append((node_key, node_value)) + if flow_style is None: + if self.default_flow_style is not None: + node.flow_style = self.default_flow_style + else: + node.flow_style = best_style + return node + + def ignore_aliases(self, data): + return False + +class SafeRepresenter(BaseRepresenter): + + def ignore_aliases(self, data): + if data is None: + return True + if isinstance(data, tuple) and data == (): + return True + if isinstance(data, (str, bytes, bool, int, float)): + return True + + def represent_none(self, data): + return self.represent_scalar('tag:yaml.org,2002:null', 'null') + + def represent_str(self, data): + return self.represent_scalar('tag:yaml.org,2002:str', data) + + def represent_binary(self, data): + if hasattr(base64, 'encodebytes'): + data = base64.encodebytes(data).decode('ascii') + else: + data = base64.encodestring(data).decode('ascii') + return self.represent_scalar('tag:yaml.org,2002:binary', data, style='|') + + def represent_bool(self, data): + if data: + value = 'true' + else: + value = 'false' + return self.represent_scalar('tag:yaml.org,2002:bool', value) + + def represent_int(self, data): + return self.represent_scalar('tag:yaml.org,2002:int', str(data)) + + inf_value = 1e300 + while repr(inf_value) != repr(inf_value*inf_value): + inf_value *= inf_value + + def represent_float(self, data): + if data != data or (data == 0.0 and data == 1.0): + value = '.nan' + elif data == self.inf_value: + value = '.inf' + elif data == -self.inf_value: + value = '-.inf' + else: + value = repr(data).lower() + # Note that in some cases `repr(data)` represents a float number + # without the decimal parts. For instance: + # >>> repr(1e17) + # '1e17' + # Unfortunately, this is not a valid float representation according + # to the definition of the `!!float` tag. We fix this by adding + # '.0' before the 'e' symbol. + if '.' not in value and 'e' in value: + value = value.replace('e', '.0e', 1) + return self.represent_scalar('tag:yaml.org,2002:float', value) + + def represent_list(self, data): + #pairs = (len(data) > 0 and isinstance(data, list)) + #if pairs: + # for item in data: + # if not isinstance(item, tuple) or len(item) != 2: + # pairs = False + # break + #if not pairs: + return self.represent_sequence('tag:yaml.org,2002:seq', data) + #value = [] + #for item_key, item_value in data: + # value.append(self.represent_mapping(u'tag:yaml.org,2002:map', + # [(item_key, item_value)])) + #return SequenceNode(u'tag:yaml.org,2002:pairs', value) + + def represent_dict(self, data): + return self.represent_mapping('tag:yaml.org,2002:map', data) + + def represent_set(self, data): + value = {} + for key in data: + value[key] = None + return self.represent_mapping('tag:yaml.org,2002:set', value) + + def represent_date(self, data): + value = data.isoformat() + return self.represent_scalar('tag:yaml.org,2002:timestamp', value) + + def represent_datetime(self, data): + value = data.isoformat(' ') + return self.represent_scalar('tag:yaml.org,2002:timestamp', value) + + def represent_yaml_object(self, tag, data, cls, flow_style=None): + if hasattr(data, '__getstate__'): + state = data.__getstate__() + else: + state = data.__dict__.copy() + return self.represent_mapping(tag, state, flow_style=flow_style) + + def represent_undefined(self, data): + raise RepresenterError("cannot represent an object", data) + +SafeRepresenter.add_representer(type(None), + SafeRepresenter.represent_none) + +SafeRepresenter.add_representer(str, + SafeRepresenter.represent_str) + +SafeRepresenter.add_representer(bytes, + SafeRepresenter.represent_binary) + +SafeRepresenter.add_representer(bool, + SafeRepresenter.represent_bool) + +SafeRepresenter.add_representer(int, + SafeRepresenter.represent_int) + +SafeRepresenter.add_representer(float, + SafeRepresenter.represent_float) + +SafeRepresenter.add_representer(list, + SafeRepresenter.represent_list) + +SafeRepresenter.add_representer(tuple, + SafeRepresenter.represent_list) + +SafeRepresenter.add_representer(dict, + SafeRepresenter.represent_dict) + +SafeRepresenter.add_representer(set, + SafeRepresenter.represent_set) + +SafeRepresenter.add_representer(datetime.date, + SafeRepresenter.represent_date) + +SafeRepresenter.add_representer(datetime.datetime, + SafeRepresenter.represent_datetime) + +SafeRepresenter.add_representer(None, + SafeRepresenter.represent_undefined) + +class Representer(SafeRepresenter): + + def represent_complex(self, data): + if data.imag == 0.0: + data = '%r' % data.real + elif data.real == 0.0: + data = '%rj' % data.imag + elif data.imag > 0: + data = '%r+%rj' % (data.real, data.imag) + else: + data = '%r%rj' % (data.real, data.imag) + return self.represent_scalar('tag:yaml.org,2002:python/complex', data) + + def represent_tuple(self, data): + return self.represent_sequence('tag:yaml.org,2002:python/tuple', data) + + def represent_name(self, data): + name = '%s.%s' % (data.__module__, data.__name__) + return self.represent_scalar('tag:yaml.org,2002:python/name:'+name, '') + + def represent_module(self, data): + return self.represent_scalar( + 'tag:yaml.org,2002:python/module:'+data.__name__, '') + + def represent_object(self, data): + # We use __reduce__ API to save the data. data.__reduce__ returns + # a tuple of length 2-5: + # (function, args, state, listitems, dictitems) + + # For reconstructing, we calls function(*args), then set its state, + # listitems, and dictitems if they are not None. + + # A special case is when function.__name__ == '__newobj__'. In this + # case we create the object with args[0].__new__(*args). + + # Another special case is when __reduce__ returns a string - we don't + # support it. + + # We produce a !!python/object, !!python/object/new or + # !!python/object/apply node. + + cls = type(data) + if cls in copyreg.dispatch_table: + reduce = copyreg.dispatch_table[cls](data) + elif hasattr(data, '__reduce_ex__'): + reduce = data.__reduce_ex__(2) + elif hasattr(data, '__reduce__'): + reduce = data.__reduce__() + else: + raise RepresenterError("cannot represent an object", data) + reduce = (list(reduce)+[None]*5)[:5] + function, args, state, listitems, dictitems = reduce + args = list(args) + if state is None: + state = {} + if listitems is not None: + listitems = list(listitems) + if dictitems is not None: + dictitems = dict(dictitems) + if function.__name__ == '__newobj__': + function = args[0] + args = args[1:] + tag = 'tag:yaml.org,2002:python/object/new:' + newobj = True + else: + tag = 'tag:yaml.org,2002:python/object/apply:' + newobj = False + function_name = '%s.%s' % (function.__module__, function.__name__) + if not args and not listitems and not dictitems \ + and isinstance(state, dict) and newobj: + return self.represent_mapping( + 'tag:yaml.org,2002:python/object:'+function_name, state) + if not listitems and not dictitems \ + and isinstance(state, dict) and not state: + return self.represent_sequence(tag+function_name, args) + value = {} + if args: + value['args'] = args + if state or not isinstance(state, dict): + value['state'] = state + if listitems: + value['listitems'] = listitems + if dictitems: + value['dictitems'] = dictitems + return self.represent_mapping(tag+function_name, value) + + def represent_ordered_dict(self, data): + # Provide uniform representation across different Python versions. + data_type = type(data) + tag = 'tag:yaml.org,2002:python/object/apply:%s.%s' \ + % (data_type.__module__, data_type.__name__) + items = [[key, value] for key, value in data.items()] + return self.represent_sequence(tag, [items]) + +Representer.add_representer(complex, + Representer.represent_complex) + +Representer.add_representer(tuple, + Representer.represent_tuple) + +Representer.add_multi_representer(type, + Representer.represent_name) + +Representer.add_representer(collections.OrderedDict, + Representer.represent_ordered_dict) + +Representer.add_representer(types.FunctionType, + Representer.represent_name) + +Representer.add_representer(types.BuiltinFunctionType, + Representer.represent_name) + +Representer.add_representer(types.ModuleType, + Representer.represent_module) + +Representer.add_multi_representer(object, + Representer.represent_object) + diff --git a/env-llmeval/lib/python3.10/site-packages/yaml/resolver.py b/env-llmeval/lib/python3.10/site-packages/yaml/resolver.py new file mode 100644 index 0000000000000000000000000000000000000000..3522bdaaf6358110b608f4e6503b9d314c82d887 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/yaml/resolver.py @@ -0,0 +1,227 @@ + +__all__ = ['BaseResolver', 'Resolver'] + +from .error import * +from .nodes import * + +import re + +class ResolverError(YAMLError): + pass + +class BaseResolver: + + DEFAULT_SCALAR_TAG = 'tag:yaml.org,2002:str' + DEFAULT_SEQUENCE_TAG = 'tag:yaml.org,2002:seq' + DEFAULT_MAPPING_TAG = 'tag:yaml.org,2002:map' + + yaml_implicit_resolvers = {} + yaml_path_resolvers = {} + + def __init__(self): + self.resolver_exact_paths = [] + self.resolver_prefix_paths = [] + + @classmethod + def add_implicit_resolver(cls, tag, regexp, first): + if not 'yaml_implicit_resolvers' in cls.__dict__: + implicit_resolvers = {} + for key in cls.yaml_implicit_resolvers: + implicit_resolvers[key] = cls.yaml_implicit_resolvers[key][:] + cls.yaml_implicit_resolvers = implicit_resolvers + if first is None: + first = [None] + for ch in first: + cls.yaml_implicit_resolvers.setdefault(ch, []).append((tag, regexp)) + + @classmethod + def add_path_resolver(cls, tag, path, kind=None): + # Note: `add_path_resolver` is experimental. The API could be changed. + # `new_path` is a pattern that is matched against the path from the + # root to the node that is being considered. `node_path` elements are + # tuples `(node_check, index_check)`. `node_check` is a node class: + # `ScalarNode`, `SequenceNode`, `MappingNode` or `None`. `None` + # matches any kind of a node. `index_check` could be `None`, a boolean + # value, a string value, or a number. `None` and `False` match against + # any _value_ of sequence and mapping nodes. `True` matches against + # any _key_ of a mapping node. A string `index_check` matches against + # a mapping value that corresponds to a scalar key which content is + # equal to the `index_check` value. An integer `index_check` matches + # against a sequence value with the index equal to `index_check`. + if not 'yaml_path_resolvers' in cls.__dict__: + cls.yaml_path_resolvers = cls.yaml_path_resolvers.copy() + new_path = [] + for element in path: + if isinstance(element, (list, tuple)): + if len(element) == 2: + node_check, index_check = element + elif len(element) == 1: + node_check = element[0] + index_check = True + else: + raise ResolverError("Invalid path element: %s" % element) + else: + node_check = None + index_check = element + if node_check is str: + node_check = ScalarNode + elif node_check is list: + node_check = SequenceNode + elif node_check is dict: + node_check = MappingNode + elif node_check not in [ScalarNode, SequenceNode, MappingNode] \ + and not isinstance(node_check, str) \ + and node_check is not None: + raise ResolverError("Invalid node checker: %s" % node_check) + if not isinstance(index_check, (str, int)) \ + and index_check is not None: + raise ResolverError("Invalid index checker: %s" % index_check) + new_path.append((node_check, index_check)) + if kind is str: + kind = ScalarNode + elif kind is list: + kind = SequenceNode + elif kind is dict: + kind = MappingNode + elif kind not in [ScalarNode, SequenceNode, MappingNode] \ + and kind is not None: + raise ResolverError("Invalid node kind: %s" % kind) + cls.yaml_path_resolvers[tuple(new_path), kind] = tag + + def descend_resolver(self, current_node, current_index): + if not self.yaml_path_resolvers: + return + exact_paths = {} + prefix_paths = [] + if current_node: + depth = len(self.resolver_prefix_paths) + for path, kind in self.resolver_prefix_paths[-1]: + if self.check_resolver_prefix(depth, path, kind, + current_node, current_index): + if len(path) > depth: + prefix_paths.append((path, kind)) + else: + exact_paths[kind] = self.yaml_path_resolvers[path, kind] + else: + for path, kind in self.yaml_path_resolvers: + if not path: + exact_paths[kind] = self.yaml_path_resolvers[path, kind] + else: + prefix_paths.append((path, kind)) + self.resolver_exact_paths.append(exact_paths) + self.resolver_prefix_paths.append(prefix_paths) + + def ascend_resolver(self): + if not self.yaml_path_resolvers: + return + self.resolver_exact_paths.pop() + self.resolver_prefix_paths.pop() + + def check_resolver_prefix(self, depth, path, kind, + current_node, current_index): + node_check, index_check = path[depth-1] + if isinstance(node_check, str): + if current_node.tag != node_check: + return + elif node_check is not None: + if not isinstance(current_node, node_check): + return + if index_check is True and current_index is not None: + return + if (index_check is False or index_check is None) \ + and current_index is None: + return + if isinstance(index_check, str): + if not (isinstance(current_index, ScalarNode) + and index_check == current_index.value): + return + elif isinstance(index_check, int) and not isinstance(index_check, bool): + if index_check != current_index: + return + return True + + def resolve(self, kind, value, implicit): + if kind is ScalarNode and implicit[0]: + if value == '': + resolvers = self.yaml_implicit_resolvers.get('', []) + else: + resolvers = self.yaml_implicit_resolvers.get(value[0], []) + wildcard_resolvers = self.yaml_implicit_resolvers.get(None, []) + for tag, regexp in resolvers + wildcard_resolvers: + if regexp.match(value): + return tag + implicit = implicit[1] + if self.yaml_path_resolvers: + exact_paths = self.resolver_exact_paths[-1] + if kind in exact_paths: + return exact_paths[kind] + if None in exact_paths: + return exact_paths[None] + if kind is ScalarNode: + return self.DEFAULT_SCALAR_TAG + elif kind is SequenceNode: + return self.DEFAULT_SEQUENCE_TAG + elif kind is MappingNode: + return self.DEFAULT_MAPPING_TAG + +class Resolver(BaseResolver): + pass + +Resolver.add_implicit_resolver( + 'tag:yaml.org,2002:bool', + re.compile(r'''^(?:yes|Yes|YES|no|No|NO + |true|True|TRUE|false|False|FALSE + |on|On|ON|off|Off|OFF)$''', re.X), + list('yYnNtTfFoO')) + +Resolver.add_implicit_resolver( + 'tag:yaml.org,2002:float', + re.compile(r'''^(?:[-+]?(?:[0-9][0-9_]*)\.[0-9_]*(?:[eE][-+][0-9]+)? + |\.[0-9][0-9_]*(?:[eE][-+][0-9]+)? + |[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\.[0-9_]* + |[-+]?\.(?:inf|Inf|INF) + |\.(?:nan|NaN|NAN))$''', re.X), + list('-+0123456789.')) + +Resolver.add_implicit_resolver( + 'tag:yaml.org,2002:int', + re.compile(r'''^(?:[-+]?0b[0-1_]+ + |[-+]?0[0-7_]+ + |[-+]?(?:0|[1-9][0-9_]*) + |[-+]?0x[0-9a-fA-F_]+ + |[-+]?[1-9][0-9_]*(?::[0-5]?[0-9])+)$''', re.X), + list('-+0123456789')) + +Resolver.add_implicit_resolver( + 'tag:yaml.org,2002:merge', + re.compile(r'^(?:<<)$'), + ['<']) + +Resolver.add_implicit_resolver( + 'tag:yaml.org,2002:null', + re.compile(r'''^(?: ~ + |null|Null|NULL + | )$''', re.X), + ['~', 'n', 'N', '']) + +Resolver.add_implicit_resolver( + 'tag:yaml.org,2002:timestamp', + re.compile(r'''^(?:[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9] + |[0-9][0-9][0-9][0-9] -[0-9][0-9]? -[0-9][0-9]? + (?:[Tt]|[ \t]+)[0-9][0-9]? + :[0-9][0-9] :[0-9][0-9] (?:\.[0-9]*)? + (?:[ \t]*(?:Z|[-+][0-9][0-9]?(?::[0-9][0-9])?))?)$''', re.X), + list('0123456789')) + +Resolver.add_implicit_resolver( + 'tag:yaml.org,2002:value', + re.compile(r'^(?:=)$'), + ['=']) + +# The following resolver is only for documentation purposes. It cannot work +# because plain scalars cannot start with '!', '&', or '*'. +Resolver.add_implicit_resolver( + 'tag:yaml.org,2002:yaml', + re.compile(r'^(?:!|&|\*)$'), + list('!&*')) + diff --git a/env-llmeval/lib/python3.10/site-packages/yaml/scanner.py b/env-llmeval/lib/python3.10/site-packages/yaml/scanner.py new file mode 100644 index 0000000000000000000000000000000000000000..de925b07f1eaec33c9c305a8a69f9eb7ac5983c5 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/yaml/scanner.py @@ -0,0 +1,1435 @@ + +# Scanner produces tokens of the following types: +# STREAM-START +# STREAM-END +# DIRECTIVE(name, value) +# DOCUMENT-START +# DOCUMENT-END +# BLOCK-SEQUENCE-START +# BLOCK-MAPPING-START +# BLOCK-END +# FLOW-SEQUENCE-START +# FLOW-MAPPING-START +# FLOW-SEQUENCE-END +# FLOW-MAPPING-END +# BLOCK-ENTRY +# FLOW-ENTRY +# KEY +# VALUE +# ALIAS(value) +# ANCHOR(value) +# TAG(value) +# SCALAR(value, plain, style) +# +# Read comments in the Scanner code for more details. +# + +__all__ = ['Scanner', 'ScannerError'] + +from .error import MarkedYAMLError +from .tokens import * + +class ScannerError(MarkedYAMLError): + pass + +class SimpleKey: + # See below simple keys treatment. + + def __init__(self, token_number, required, index, line, column, mark): + self.token_number = token_number + self.required = required + self.index = index + self.line = line + self.column = column + self.mark = mark + +class Scanner: + + def __init__(self): + """Initialize the scanner.""" + # It is assumed that Scanner and Reader will have a common descendant. + # Reader do the dirty work of checking for BOM and converting the + # input data to Unicode. It also adds NUL to the end. + # + # Reader supports the following methods + # self.peek(i=0) # peek the next i-th character + # self.prefix(l=1) # peek the next l characters + # self.forward(l=1) # read the next l characters and move the pointer. + + # Had we reached the end of the stream? + self.done = False + + # The number of unclosed '{' and '['. `flow_level == 0` means block + # context. + self.flow_level = 0 + + # List of processed tokens that are not yet emitted. + self.tokens = [] + + # Add the STREAM-START token. + self.fetch_stream_start() + + # Number of tokens that were emitted through the `get_token` method. + self.tokens_taken = 0 + + # The current indentation level. + self.indent = -1 + + # Past indentation levels. + self.indents = [] + + # Variables related to simple keys treatment. + + # A simple key is a key that is not denoted by the '?' indicator. + # Example of simple keys: + # --- + # block simple key: value + # ? not a simple key: + # : { flow simple key: value } + # We emit the KEY token before all keys, so when we find a potential + # simple key, we try to locate the corresponding ':' indicator. + # Simple keys should be limited to a single line and 1024 characters. + + # Can a simple key start at the current position? A simple key may + # start: + # - at the beginning of the line, not counting indentation spaces + # (in block context), + # - after '{', '[', ',' (in the flow context), + # - after '?', ':', '-' (in the block context). + # In the block context, this flag also signifies if a block collection + # may start at the current position. + self.allow_simple_key = True + + # Keep track of possible simple keys. This is a dictionary. The key + # is `flow_level`; there can be no more that one possible simple key + # for each level. The value is a SimpleKey record: + # (token_number, required, index, line, column, mark) + # A simple key may start with ALIAS, ANCHOR, TAG, SCALAR(flow), + # '[', or '{' tokens. + self.possible_simple_keys = {} + + # Public methods. + + def check_token(self, *choices): + # Check if the next token is one of the given types. + while self.need_more_tokens(): + self.fetch_more_tokens() + if self.tokens: + if not choices: + return True + for choice in choices: + if isinstance(self.tokens[0], choice): + return True + return False + + def peek_token(self): + # Return the next token, but do not delete if from the queue. + # Return None if no more tokens. + while self.need_more_tokens(): + self.fetch_more_tokens() + if self.tokens: + return self.tokens[0] + else: + return None + + def get_token(self): + # Return the next token. + while self.need_more_tokens(): + self.fetch_more_tokens() + if self.tokens: + self.tokens_taken += 1 + return self.tokens.pop(0) + + # Private methods. + + def need_more_tokens(self): + if self.done: + return False + if not self.tokens: + return True + # The current token may be a potential simple key, so we + # need to look further. + self.stale_possible_simple_keys() + if self.next_possible_simple_key() == self.tokens_taken: + return True + + def fetch_more_tokens(self): + + # Eat whitespaces and comments until we reach the next token. + self.scan_to_next_token() + + # Remove obsolete possible simple keys. + self.stale_possible_simple_keys() + + # Compare the current indentation and column. It may add some tokens + # and decrease the current indentation level. + self.unwind_indent(self.column) + + # Peek the next character. + ch = self.peek() + + # Is it the end of stream? + if ch == '\0': + return self.fetch_stream_end() + + # Is it a directive? + if ch == '%' and self.check_directive(): + return self.fetch_directive() + + # Is it the document start? + if ch == '-' and self.check_document_start(): + return self.fetch_document_start() + + # Is it the document end? + if ch == '.' and self.check_document_end(): + return self.fetch_document_end() + + # TODO: support for BOM within a stream. + #if ch == '\uFEFF': + # return self.fetch_bom() <-- issue BOMToken + + # Note: the order of the following checks is NOT significant. + + # Is it the flow sequence start indicator? + if ch == '[': + return self.fetch_flow_sequence_start() + + # Is it the flow mapping start indicator? + if ch == '{': + return self.fetch_flow_mapping_start() + + # Is it the flow sequence end indicator? + if ch == ']': + return self.fetch_flow_sequence_end() + + # Is it the flow mapping end indicator? + if ch == '}': + return self.fetch_flow_mapping_end() + + # Is it the flow entry indicator? + if ch == ',': + return self.fetch_flow_entry() + + # Is it the block entry indicator? + if ch == '-' and self.check_block_entry(): + return self.fetch_block_entry() + + # Is it the key indicator? + if ch == '?' and self.check_key(): + return self.fetch_key() + + # Is it the value indicator? + if ch == ':' and self.check_value(): + return self.fetch_value() + + # Is it an alias? + if ch == '*': + return self.fetch_alias() + + # Is it an anchor? + if ch == '&': + return self.fetch_anchor() + + # Is it a tag? + if ch == '!': + return self.fetch_tag() + + # Is it a literal scalar? + if ch == '|' and not self.flow_level: + return self.fetch_literal() + + # Is it a folded scalar? + if ch == '>' and not self.flow_level: + return self.fetch_folded() + + # Is it a single quoted scalar? + if ch == '\'': + return self.fetch_single() + + # Is it a double quoted scalar? + if ch == '\"': + return self.fetch_double() + + # It must be a plain scalar then. + if self.check_plain(): + return self.fetch_plain() + + # No? It's an error. Let's produce a nice error message. + raise ScannerError("while scanning for the next token", None, + "found character %r that cannot start any token" % ch, + self.get_mark()) + + # Simple keys treatment. + + def next_possible_simple_key(self): + # Return the number of the nearest possible simple key. Actually we + # don't need to loop through the whole dictionary. We may replace it + # with the following code: + # if not self.possible_simple_keys: + # return None + # return self.possible_simple_keys[ + # min(self.possible_simple_keys.keys())].token_number + min_token_number = None + for level in self.possible_simple_keys: + key = self.possible_simple_keys[level] + if min_token_number is None or key.token_number < min_token_number: + min_token_number = key.token_number + return min_token_number + + def stale_possible_simple_keys(self): + # Remove entries that are no longer possible simple keys. According to + # the YAML specification, simple keys + # - should be limited to a single line, + # - should be no longer than 1024 characters. + # Disabling this procedure will allow simple keys of any length and + # height (may cause problems if indentation is broken though). + for level in list(self.possible_simple_keys): + key = self.possible_simple_keys[level] + if key.line != self.line \ + or self.index-key.index > 1024: + if key.required: + raise ScannerError("while scanning a simple key", key.mark, + "could not find expected ':'", self.get_mark()) + del self.possible_simple_keys[level] + + def save_possible_simple_key(self): + # The next token may start a simple key. We check if it's possible + # and save its position. This function is called for + # ALIAS, ANCHOR, TAG, SCALAR(flow), '[', and '{'. + + # Check if a simple key is required at the current position. + required = not self.flow_level and self.indent == self.column + + # The next token might be a simple key. Let's save it's number and + # position. + if self.allow_simple_key: + self.remove_possible_simple_key() + token_number = self.tokens_taken+len(self.tokens) + key = SimpleKey(token_number, required, + self.index, self.line, self.column, self.get_mark()) + self.possible_simple_keys[self.flow_level] = key + + def remove_possible_simple_key(self): + # Remove the saved possible key position at the current flow level. + if self.flow_level in self.possible_simple_keys: + key = self.possible_simple_keys[self.flow_level] + + if key.required: + raise ScannerError("while scanning a simple key", key.mark, + "could not find expected ':'", self.get_mark()) + + del self.possible_simple_keys[self.flow_level] + + # Indentation functions. + + def unwind_indent(self, column): + + ## In flow context, tokens should respect indentation. + ## Actually the condition should be `self.indent >= column` according to + ## the spec. But this condition will prohibit intuitively correct + ## constructions such as + ## key : { + ## } + #if self.flow_level and self.indent > column: + # raise ScannerError(None, None, + # "invalid indentation or unclosed '[' or '{'", + # self.get_mark()) + + # In the flow context, indentation is ignored. We make the scanner less + # restrictive then specification requires. + if self.flow_level: + return + + # In block context, we may need to issue the BLOCK-END tokens. + while self.indent > column: + mark = self.get_mark() + self.indent = self.indents.pop() + self.tokens.append(BlockEndToken(mark, mark)) + + def add_indent(self, column): + # Check if we need to increase indentation. + if self.indent < column: + self.indents.append(self.indent) + self.indent = column + return True + return False + + # Fetchers. + + def fetch_stream_start(self): + # We always add STREAM-START as the first token and STREAM-END as the + # last token. + + # Read the token. + mark = self.get_mark() + + # Add STREAM-START. + self.tokens.append(StreamStartToken(mark, mark, + encoding=self.encoding)) + + + def fetch_stream_end(self): + + # Set the current indentation to -1. + self.unwind_indent(-1) + + # Reset simple keys. + self.remove_possible_simple_key() + self.allow_simple_key = False + self.possible_simple_keys = {} + + # Read the token. + mark = self.get_mark() + + # Add STREAM-END. + self.tokens.append(StreamEndToken(mark, mark)) + + # The steam is finished. + self.done = True + + def fetch_directive(self): + + # Set the current indentation to -1. + self.unwind_indent(-1) + + # Reset simple keys. + self.remove_possible_simple_key() + self.allow_simple_key = False + + # Scan and add DIRECTIVE. + self.tokens.append(self.scan_directive()) + + def fetch_document_start(self): + self.fetch_document_indicator(DocumentStartToken) + + def fetch_document_end(self): + self.fetch_document_indicator(DocumentEndToken) + + def fetch_document_indicator(self, TokenClass): + + # Set the current indentation to -1. + self.unwind_indent(-1) + + # Reset simple keys. Note that there could not be a block collection + # after '---'. + self.remove_possible_simple_key() + self.allow_simple_key = False + + # Add DOCUMENT-START or DOCUMENT-END. + start_mark = self.get_mark() + self.forward(3) + end_mark = self.get_mark() + self.tokens.append(TokenClass(start_mark, end_mark)) + + def fetch_flow_sequence_start(self): + self.fetch_flow_collection_start(FlowSequenceStartToken) + + def fetch_flow_mapping_start(self): + self.fetch_flow_collection_start(FlowMappingStartToken) + + def fetch_flow_collection_start(self, TokenClass): + + # '[' and '{' may start a simple key. + self.save_possible_simple_key() + + # Increase the flow level. + self.flow_level += 1 + + # Simple keys are allowed after '[' and '{'. + self.allow_simple_key = True + + # Add FLOW-SEQUENCE-START or FLOW-MAPPING-START. + start_mark = self.get_mark() + self.forward() + end_mark = self.get_mark() + self.tokens.append(TokenClass(start_mark, end_mark)) + + def fetch_flow_sequence_end(self): + self.fetch_flow_collection_end(FlowSequenceEndToken) + + def fetch_flow_mapping_end(self): + self.fetch_flow_collection_end(FlowMappingEndToken) + + def fetch_flow_collection_end(self, TokenClass): + + # Reset possible simple key on the current level. + self.remove_possible_simple_key() + + # Decrease the flow level. + self.flow_level -= 1 + + # No simple keys after ']' or '}'. + self.allow_simple_key = False + + # Add FLOW-SEQUENCE-END or FLOW-MAPPING-END. + start_mark = self.get_mark() + self.forward() + end_mark = self.get_mark() + self.tokens.append(TokenClass(start_mark, end_mark)) + + def fetch_flow_entry(self): + + # Simple keys are allowed after ','. + self.allow_simple_key = True + + # Reset possible simple key on the current level. + self.remove_possible_simple_key() + + # Add FLOW-ENTRY. + start_mark = self.get_mark() + self.forward() + end_mark = self.get_mark() + self.tokens.append(FlowEntryToken(start_mark, end_mark)) + + def fetch_block_entry(self): + + # Block context needs additional checks. + if not self.flow_level: + + # Are we allowed to start a new entry? + if not self.allow_simple_key: + raise ScannerError(None, None, + "sequence entries are not allowed here", + self.get_mark()) + + # We may need to add BLOCK-SEQUENCE-START. + if self.add_indent(self.column): + mark = self.get_mark() + self.tokens.append(BlockSequenceStartToken(mark, mark)) + + # It's an error for the block entry to occur in the flow context, + # but we let the parser detect this. + else: + pass + + # Simple keys are allowed after '-'. + self.allow_simple_key = True + + # Reset possible simple key on the current level. + self.remove_possible_simple_key() + + # Add BLOCK-ENTRY. + start_mark = self.get_mark() + self.forward() + end_mark = self.get_mark() + self.tokens.append(BlockEntryToken(start_mark, end_mark)) + + def fetch_key(self): + + # Block context needs additional checks. + if not self.flow_level: + + # Are we allowed to start a key (not necessary a simple)? + if not self.allow_simple_key: + raise ScannerError(None, None, + "mapping keys are not allowed here", + self.get_mark()) + + # We may need to add BLOCK-MAPPING-START. + if self.add_indent(self.column): + mark = self.get_mark() + self.tokens.append(BlockMappingStartToken(mark, mark)) + + # Simple keys are allowed after '?' in the block context. + self.allow_simple_key = not self.flow_level + + # Reset possible simple key on the current level. + self.remove_possible_simple_key() + + # Add KEY. + start_mark = self.get_mark() + self.forward() + end_mark = self.get_mark() + self.tokens.append(KeyToken(start_mark, end_mark)) + + def fetch_value(self): + + # Do we determine a simple key? + if self.flow_level in self.possible_simple_keys: + + # Add KEY. + key = self.possible_simple_keys[self.flow_level] + del self.possible_simple_keys[self.flow_level] + self.tokens.insert(key.token_number-self.tokens_taken, + KeyToken(key.mark, key.mark)) + + # If this key starts a new block mapping, we need to add + # BLOCK-MAPPING-START. + if not self.flow_level: + if self.add_indent(key.column): + self.tokens.insert(key.token_number-self.tokens_taken, + BlockMappingStartToken(key.mark, key.mark)) + + # There cannot be two simple keys one after another. + self.allow_simple_key = False + + # It must be a part of a complex key. + else: + + # Block context needs additional checks. + # (Do we really need them? They will be caught by the parser + # anyway.) + if not self.flow_level: + + # We are allowed to start a complex value if and only if + # we can start a simple key. + if not self.allow_simple_key: + raise ScannerError(None, None, + "mapping values are not allowed here", + self.get_mark()) + + # If this value starts a new block mapping, we need to add + # BLOCK-MAPPING-START. It will be detected as an error later by + # the parser. + if not self.flow_level: + if self.add_indent(self.column): + mark = self.get_mark() + self.tokens.append(BlockMappingStartToken(mark, mark)) + + # Simple keys are allowed after ':' in the block context. + self.allow_simple_key = not self.flow_level + + # Reset possible simple key on the current level. + self.remove_possible_simple_key() + + # Add VALUE. + start_mark = self.get_mark() + self.forward() + end_mark = self.get_mark() + self.tokens.append(ValueToken(start_mark, end_mark)) + + def fetch_alias(self): + + # ALIAS could be a simple key. + self.save_possible_simple_key() + + # No simple keys after ALIAS. + self.allow_simple_key = False + + # Scan and add ALIAS. + self.tokens.append(self.scan_anchor(AliasToken)) + + def fetch_anchor(self): + + # ANCHOR could start a simple key. + self.save_possible_simple_key() + + # No simple keys after ANCHOR. + self.allow_simple_key = False + + # Scan and add ANCHOR. + self.tokens.append(self.scan_anchor(AnchorToken)) + + def fetch_tag(self): + + # TAG could start a simple key. + self.save_possible_simple_key() + + # No simple keys after TAG. + self.allow_simple_key = False + + # Scan and add TAG. + self.tokens.append(self.scan_tag()) + + def fetch_literal(self): + self.fetch_block_scalar(style='|') + + def fetch_folded(self): + self.fetch_block_scalar(style='>') + + def fetch_block_scalar(self, style): + + # A simple key may follow a block scalar. + self.allow_simple_key = True + + # Reset possible simple key on the current level. + self.remove_possible_simple_key() + + # Scan and add SCALAR. + self.tokens.append(self.scan_block_scalar(style)) + + def fetch_single(self): + self.fetch_flow_scalar(style='\'') + + def fetch_double(self): + self.fetch_flow_scalar(style='"') + + def fetch_flow_scalar(self, style): + + # A flow scalar could be a simple key. + self.save_possible_simple_key() + + # No simple keys after flow scalars. + self.allow_simple_key = False + + # Scan and add SCALAR. + self.tokens.append(self.scan_flow_scalar(style)) + + def fetch_plain(self): + + # A plain scalar could be a simple key. + self.save_possible_simple_key() + + # No simple keys after plain scalars. But note that `scan_plain` will + # change this flag if the scan is finished at the beginning of the + # line. + self.allow_simple_key = False + + # Scan and add SCALAR. May change `allow_simple_key`. + self.tokens.append(self.scan_plain()) + + # Checkers. + + def check_directive(self): + + # DIRECTIVE: ^ '%' ... + # The '%' indicator is already checked. + if self.column == 0: + return True + + def check_document_start(self): + + # DOCUMENT-START: ^ '---' (' '|'\n') + if self.column == 0: + if self.prefix(3) == '---' \ + and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029': + return True + + def check_document_end(self): + + # DOCUMENT-END: ^ '...' (' '|'\n') + if self.column == 0: + if self.prefix(3) == '...' \ + and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029': + return True + + def check_block_entry(self): + + # BLOCK-ENTRY: '-' (' '|'\n') + return self.peek(1) in '\0 \t\r\n\x85\u2028\u2029' + + def check_key(self): + + # KEY(flow context): '?' + if self.flow_level: + return True + + # KEY(block context): '?' (' '|'\n') + else: + return self.peek(1) in '\0 \t\r\n\x85\u2028\u2029' + + def check_value(self): + + # VALUE(flow context): ':' + if self.flow_level: + return True + + # VALUE(block context): ':' (' '|'\n') + else: + return self.peek(1) in '\0 \t\r\n\x85\u2028\u2029' + + def check_plain(self): + + # A plain scalar may start with any non-space character except: + # '-', '?', ':', ',', '[', ']', '{', '}', + # '#', '&', '*', '!', '|', '>', '\'', '\"', + # '%', '@', '`'. + # + # It may also start with + # '-', '?', ':' + # if it is followed by a non-space character. + # + # Note that we limit the last rule to the block context (except the + # '-' character) because we want the flow context to be space + # independent. + ch = self.peek() + return ch not in '\0 \t\r\n\x85\u2028\u2029-?:,[]{}#&*!|>\'\"%@`' \ + or (self.peek(1) not in '\0 \t\r\n\x85\u2028\u2029' + and (ch == '-' or (not self.flow_level and ch in '?:'))) + + # Scanners. + + def scan_to_next_token(self): + # We ignore spaces, line breaks and comments. + # If we find a line break in the block context, we set the flag + # `allow_simple_key` on. + # The byte order mark is stripped if it's the first character in the + # stream. We do not yet support BOM inside the stream as the + # specification requires. Any such mark will be considered as a part + # of the document. + # + # TODO: We need to make tab handling rules more sane. A good rule is + # Tabs cannot precede tokens + # BLOCK-SEQUENCE-START, BLOCK-MAPPING-START, BLOCK-END, + # KEY(block), VALUE(block), BLOCK-ENTRY + # So the checking code is + # if : + # self.allow_simple_keys = False + # We also need to add the check for `allow_simple_keys == True` to + # `unwind_indent` before issuing BLOCK-END. + # Scanners for block, flow, and plain scalars need to be modified. + + if self.index == 0 and self.peek() == '\uFEFF': + self.forward() + found = False + while not found: + while self.peek() == ' ': + self.forward() + if self.peek() == '#': + while self.peek() not in '\0\r\n\x85\u2028\u2029': + self.forward() + if self.scan_line_break(): + if not self.flow_level: + self.allow_simple_key = True + else: + found = True + + def scan_directive(self): + # See the specification for details. + start_mark = self.get_mark() + self.forward() + name = self.scan_directive_name(start_mark) + value = None + if name == 'YAML': + value = self.scan_yaml_directive_value(start_mark) + end_mark = self.get_mark() + elif name == 'TAG': + value = self.scan_tag_directive_value(start_mark) + end_mark = self.get_mark() + else: + end_mark = self.get_mark() + while self.peek() not in '\0\r\n\x85\u2028\u2029': + self.forward() + self.scan_directive_ignored_line(start_mark) + return DirectiveToken(name, value, start_mark, end_mark) + + def scan_directive_name(self, start_mark): + # See the specification for details. + length = 0 + ch = self.peek(length) + while '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \ + or ch in '-_': + length += 1 + ch = self.peek(length) + if not length: + raise ScannerError("while scanning a directive", start_mark, + "expected alphabetic or numeric character, but found %r" + % ch, self.get_mark()) + value = self.prefix(length) + self.forward(length) + ch = self.peek() + if ch not in '\0 \r\n\x85\u2028\u2029': + raise ScannerError("while scanning a directive", start_mark, + "expected alphabetic or numeric character, but found %r" + % ch, self.get_mark()) + return value + + def scan_yaml_directive_value(self, start_mark): + # See the specification for details. + while self.peek() == ' ': + self.forward() + major = self.scan_yaml_directive_number(start_mark) + if self.peek() != '.': + raise ScannerError("while scanning a directive", start_mark, + "expected a digit or '.', but found %r" % self.peek(), + self.get_mark()) + self.forward() + minor = self.scan_yaml_directive_number(start_mark) + if self.peek() not in '\0 \r\n\x85\u2028\u2029': + raise ScannerError("while scanning a directive", start_mark, + "expected a digit or ' ', but found %r" % self.peek(), + self.get_mark()) + return (major, minor) + + def scan_yaml_directive_number(self, start_mark): + # See the specification for details. + ch = self.peek() + if not ('0' <= ch <= '9'): + raise ScannerError("while scanning a directive", start_mark, + "expected a digit, but found %r" % ch, self.get_mark()) + length = 0 + while '0' <= self.peek(length) <= '9': + length += 1 + value = int(self.prefix(length)) + self.forward(length) + return value + + def scan_tag_directive_value(self, start_mark): + # See the specification for details. + while self.peek() == ' ': + self.forward() + handle = self.scan_tag_directive_handle(start_mark) + while self.peek() == ' ': + self.forward() + prefix = self.scan_tag_directive_prefix(start_mark) + return (handle, prefix) + + def scan_tag_directive_handle(self, start_mark): + # See the specification for details. + value = self.scan_tag_handle('directive', start_mark) + ch = self.peek() + if ch != ' ': + raise ScannerError("while scanning a directive", start_mark, + "expected ' ', but found %r" % ch, self.get_mark()) + return value + + def scan_tag_directive_prefix(self, start_mark): + # See the specification for details. + value = self.scan_tag_uri('directive', start_mark) + ch = self.peek() + if ch not in '\0 \r\n\x85\u2028\u2029': + raise ScannerError("while scanning a directive", start_mark, + "expected ' ', but found %r" % ch, self.get_mark()) + return value + + def scan_directive_ignored_line(self, start_mark): + # See the specification for details. + while self.peek() == ' ': + self.forward() + if self.peek() == '#': + while self.peek() not in '\0\r\n\x85\u2028\u2029': + self.forward() + ch = self.peek() + if ch not in '\0\r\n\x85\u2028\u2029': + raise ScannerError("while scanning a directive", start_mark, + "expected a comment or a line break, but found %r" + % ch, self.get_mark()) + self.scan_line_break() + + def scan_anchor(self, TokenClass): + # The specification does not restrict characters for anchors and + # aliases. This may lead to problems, for instance, the document: + # [ *alias, value ] + # can be interpreted in two ways, as + # [ "value" ] + # and + # [ *alias , "value" ] + # Therefore we restrict aliases to numbers and ASCII letters. + start_mark = self.get_mark() + indicator = self.peek() + if indicator == '*': + name = 'alias' + else: + name = 'anchor' + self.forward() + length = 0 + ch = self.peek(length) + while '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \ + or ch in '-_': + length += 1 + ch = self.peek(length) + if not length: + raise ScannerError("while scanning an %s" % name, start_mark, + "expected alphabetic or numeric character, but found %r" + % ch, self.get_mark()) + value = self.prefix(length) + self.forward(length) + ch = self.peek() + if ch not in '\0 \t\r\n\x85\u2028\u2029?:,]}%@`': + raise ScannerError("while scanning an %s" % name, start_mark, + "expected alphabetic or numeric character, but found %r" + % ch, self.get_mark()) + end_mark = self.get_mark() + return TokenClass(value, start_mark, end_mark) + + def scan_tag(self): + # See the specification for details. + start_mark = self.get_mark() + ch = self.peek(1) + if ch == '<': + handle = None + self.forward(2) + suffix = self.scan_tag_uri('tag', start_mark) + if self.peek() != '>': + raise ScannerError("while parsing a tag", start_mark, + "expected '>', but found %r" % self.peek(), + self.get_mark()) + self.forward() + elif ch in '\0 \t\r\n\x85\u2028\u2029': + handle = None + suffix = '!' + self.forward() + else: + length = 1 + use_handle = False + while ch not in '\0 \r\n\x85\u2028\u2029': + if ch == '!': + use_handle = True + break + length += 1 + ch = self.peek(length) + handle = '!' + if use_handle: + handle = self.scan_tag_handle('tag', start_mark) + else: + handle = '!' + self.forward() + suffix = self.scan_tag_uri('tag', start_mark) + ch = self.peek() + if ch not in '\0 \r\n\x85\u2028\u2029': + raise ScannerError("while scanning a tag", start_mark, + "expected ' ', but found %r" % ch, self.get_mark()) + value = (handle, suffix) + end_mark = self.get_mark() + return TagToken(value, start_mark, end_mark) + + def scan_block_scalar(self, style): + # See the specification for details. + + if style == '>': + folded = True + else: + folded = False + + chunks = [] + start_mark = self.get_mark() + + # Scan the header. + self.forward() + chomping, increment = self.scan_block_scalar_indicators(start_mark) + self.scan_block_scalar_ignored_line(start_mark) + + # Determine the indentation level and go to the first non-empty line. + min_indent = self.indent+1 + if min_indent < 1: + min_indent = 1 + if increment is None: + breaks, max_indent, end_mark = self.scan_block_scalar_indentation() + indent = max(min_indent, max_indent) + else: + indent = min_indent+increment-1 + breaks, end_mark = self.scan_block_scalar_breaks(indent) + line_break = '' + + # Scan the inner part of the block scalar. + while self.column == indent and self.peek() != '\0': + chunks.extend(breaks) + leading_non_space = self.peek() not in ' \t' + length = 0 + while self.peek(length) not in '\0\r\n\x85\u2028\u2029': + length += 1 + chunks.append(self.prefix(length)) + self.forward(length) + line_break = self.scan_line_break() + breaks, end_mark = self.scan_block_scalar_breaks(indent) + if self.column == indent and self.peek() != '\0': + + # Unfortunately, folding rules are ambiguous. + # + # This is the folding according to the specification: + + if folded and line_break == '\n' \ + and leading_non_space and self.peek() not in ' \t': + if not breaks: + chunks.append(' ') + else: + chunks.append(line_break) + + # This is Clark Evans's interpretation (also in the spec + # examples): + # + #if folded and line_break == '\n': + # if not breaks: + # if self.peek() not in ' \t': + # chunks.append(' ') + # else: + # chunks.append(line_break) + #else: + # chunks.append(line_break) + else: + break + + # Chomp the tail. + if chomping is not False: + chunks.append(line_break) + if chomping is True: + chunks.extend(breaks) + + # We are done. + return ScalarToken(''.join(chunks), False, start_mark, end_mark, + style) + + def scan_block_scalar_indicators(self, start_mark): + # See the specification for details. + chomping = None + increment = None + ch = self.peek() + if ch in '+-': + if ch == '+': + chomping = True + else: + chomping = False + self.forward() + ch = self.peek() + if ch in '0123456789': + increment = int(ch) + if increment == 0: + raise ScannerError("while scanning a block scalar", start_mark, + "expected indentation indicator in the range 1-9, but found 0", + self.get_mark()) + self.forward() + elif ch in '0123456789': + increment = int(ch) + if increment == 0: + raise ScannerError("while scanning a block scalar", start_mark, + "expected indentation indicator in the range 1-9, but found 0", + self.get_mark()) + self.forward() + ch = self.peek() + if ch in '+-': + if ch == '+': + chomping = True + else: + chomping = False + self.forward() + ch = self.peek() + if ch not in '\0 \r\n\x85\u2028\u2029': + raise ScannerError("while scanning a block scalar", start_mark, + "expected chomping or indentation indicators, but found %r" + % ch, self.get_mark()) + return chomping, increment + + def scan_block_scalar_ignored_line(self, start_mark): + # See the specification for details. + while self.peek() == ' ': + self.forward() + if self.peek() == '#': + while self.peek() not in '\0\r\n\x85\u2028\u2029': + self.forward() + ch = self.peek() + if ch not in '\0\r\n\x85\u2028\u2029': + raise ScannerError("while scanning a block scalar", start_mark, + "expected a comment or a line break, but found %r" % ch, + self.get_mark()) + self.scan_line_break() + + def scan_block_scalar_indentation(self): + # See the specification for details. + chunks = [] + max_indent = 0 + end_mark = self.get_mark() + while self.peek() in ' \r\n\x85\u2028\u2029': + if self.peek() != ' ': + chunks.append(self.scan_line_break()) + end_mark = self.get_mark() + else: + self.forward() + if self.column > max_indent: + max_indent = self.column + return chunks, max_indent, end_mark + + def scan_block_scalar_breaks(self, indent): + # See the specification for details. + chunks = [] + end_mark = self.get_mark() + while self.column < indent and self.peek() == ' ': + self.forward() + while self.peek() in '\r\n\x85\u2028\u2029': + chunks.append(self.scan_line_break()) + end_mark = self.get_mark() + while self.column < indent and self.peek() == ' ': + self.forward() + return chunks, end_mark + + def scan_flow_scalar(self, style): + # See the specification for details. + # Note that we loose indentation rules for quoted scalars. Quoted + # scalars don't need to adhere indentation because " and ' clearly + # mark the beginning and the end of them. Therefore we are less + # restrictive then the specification requires. We only need to check + # that document separators are not included in scalars. + if style == '"': + double = True + else: + double = False + chunks = [] + start_mark = self.get_mark() + quote = self.peek() + self.forward() + chunks.extend(self.scan_flow_scalar_non_spaces(double, start_mark)) + while self.peek() != quote: + chunks.extend(self.scan_flow_scalar_spaces(double, start_mark)) + chunks.extend(self.scan_flow_scalar_non_spaces(double, start_mark)) + self.forward() + end_mark = self.get_mark() + return ScalarToken(''.join(chunks), False, start_mark, end_mark, + style) + + ESCAPE_REPLACEMENTS = { + '0': '\0', + 'a': '\x07', + 'b': '\x08', + 't': '\x09', + '\t': '\x09', + 'n': '\x0A', + 'v': '\x0B', + 'f': '\x0C', + 'r': '\x0D', + 'e': '\x1B', + ' ': '\x20', + '\"': '\"', + '\\': '\\', + '/': '/', + 'N': '\x85', + '_': '\xA0', + 'L': '\u2028', + 'P': '\u2029', + } + + ESCAPE_CODES = { + 'x': 2, + 'u': 4, + 'U': 8, + } + + def scan_flow_scalar_non_spaces(self, double, start_mark): + # See the specification for details. + chunks = [] + while True: + length = 0 + while self.peek(length) not in '\'\"\\\0 \t\r\n\x85\u2028\u2029': + length += 1 + if length: + chunks.append(self.prefix(length)) + self.forward(length) + ch = self.peek() + if not double and ch == '\'' and self.peek(1) == '\'': + chunks.append('\'') + self.forward(2) + elif (double and ch == '\'') or (not double and ch in '\"\\'): + chunks.append(ch) + self.forward() + elif double and ch == '\\': + self.forward() + ch = self.peek() + if ch in self.ESCAPE_REPLACEMENTS: + chunks.append(self.ESCAPE_REPLACEMENTS[ch]) + self.forward() + elif ch in self.ESCAPE_CODES: + length = self.ESCAPE_CODES[ch] + self.forward() + for k in range(length): + if self.peek(k) not in '0123456789ABCDEFabcdef': + raise ScannerError("while scanning a double-quoted scalar", start_mark, + "expected escape sequence of %d hexadecimal numbers, but found %r" % + (length, self.peek(k)), self.get_mark()) + code = int(self.prefix(length), 16) + chunks.append(chr(code)) + self.forward(length) + elif ch in '\r\n\x85\u2028\u2029': + self.scan_line_break() + chunks.extend(self.scan_flow_scalar_breaks(double, start_mark)) + else: + raise ScannerError("while scanning a double-quoted scalar", start_mark, + "found unknown escape character %r" % ch, self.get_mark()) + else: + return chunks + + def scan_flow_scalar_spaces(self, double, start_mark): + # See the specification for details. + chunks = [] + length = 0 + while self.peek(length) in ' \t': + length += 1 + whitespaces = self.prefix(length) + self.forward(length) + ch = self.peek() + if ch == '\0': + raise ScannerError("while scanning a quoted scalar", start_mark, + "found unexpected end of stream", self.get_mark()) + elif ch in '\r\n\x85\u2028\u2029': + line_break = self.scan_line_break() + breaks = self.scan_flow_scalar_breaks(double, start_mark) + if line_break != '\n': + chunks.append(line_break) + elif not breaks: + chunks.append(' ') + chunks.extend(breaks) + else: + chunks.append(whitespaces) + return chunks + + def scan_flow_scalar_breaks(self, double, start_mark): + # See the specification for details. + chunks = [] + while True: + # Instead of checking indentation, we check for document + # separators. + prefix = self.prefix(3) + if (prefix == '---' or prefix == '...') \ + and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029': + raise ScannerError("while scanning a quoted scalar", start_mark, + "found unexpected document separator", self.get_mark()) + while self.peek() in ' \t': + self.forward() + if self.peek() in '\r\n\x85\u2028\u2029': + chunks.append(self.scan_line_break()) + else: + return chunks + + def scan_plain(self): + # See the specification for details. + # We add an additional restriction for the flow context: + # plain scalars in the flow context cannot contain ',' or '?'. + # We also keep track of the `allow_simple_key` flag here. + # Indentation rules are loosed for the flow context. + chunks = [] + start_mark = self.get_mark() + end_mark = start_mark + indent = self.indent+1 + # We allow zero indentation for scalars, but then we need to check for + # document separators at the beginning of the line. + #if indent == 0: + # indent = 1 + spaces = [] + while True: + length = 0 + if self.peek() == '#': + break + while True: + ch = self.peek(length) + if ch in '\0 \t\r\n\x85\u2028\u2029' \ + or (ch == ':' and + self.peek(length+1) in '\0 \t\r\n\x85\u2028\u2029' + + (u',[]{}' if self.flow_level else u''))\ + or (self.flow_level and ch in ',?[]{}'): + break + length += 1 + if length == 0: + break + self.allow_simple_key = False + chunks.extend(spaces) + chunks.append(self.prefix(length)) + self.forward(length) + end_mark = self.get_mark() + spaces = self.scan_plain_spaces(indent, start_mark) + if not spaces or self.peek() == '#' \ + or (not self.flow_level and self.column < indent): + break + return ScalarToken(''.join(chunks), True, start_mark, end_mark) + + def scan_plain_spaces(self, indent, start_mark): + # See the specification for details. + # The specification is really confusing about tabs in plain scalars. + # We just forbid them completely. Do not use tabs in YAML! + chunks = [] + length = 0 + while self.peek(length) in ' ': + length += 1 + whitespaces = self.prefix(length) + self.forward(length) + ch = self.peek() + if ch in '\r\n\x85\u2028\u2029': + line_break = self.scan_line_break() + self.allow_simple_key = True + prefix = self.prefix(3) + if (prefix == '---' or prefix == '...') \ + and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029': + return + breaks = [] + while self.peek() in ' \r\n\x85\u2028\u2029': + if self.peek() == ' ': + self.forward() + else: + breaks.append(self.scan_line_break()) + prefix = self.prefix(3) + if (prefix == '---' or prefix == '...') \ + and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029': + return + if line_break != '\n': + chunks.append(line_break) + elif not breaks: + chunks.append(' ') + chunks.extend(breaks) + elif whitespaces: + chunks.append(whitespaces) + return chunks + + def scan_tag_handle(self, name, start_mark): + # See the specification for details. + # For some strange reasons, the specification does not allow '_' in + # tag handles. I have allowed it anyway. + ch = self.peek() + if ch != '!': + raise ScannerError("while scanning a %s" % name, start_mark, + "expected '!', but found %r" % ch, self.get_mark()) + length = 1 + ch = self.peek(length) + if ch != ' ': + while '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \ + or ch in '-_': + length += 1 + ch = self.peek(length) + if ch != '!': + self.forward(length) + raise ScannerError("while scanning a %s" % name, start_mark, + "expected '!', but found %r" % ch, self.get_mark()) + length += 1 + value = self.prefix(length) + self.forward(length) + return value + + def scan_tag_uri(self, name, start_mark): + # See the specification for details. + # Note: we do not check if URI is well-formed. + chunks = [] + length = 0 + ch = self.peek(length) + while '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \ + or ch in '-;/?:@&=+$,_.!~*\'()[]%': + if ch == '%': + chunks.append(self.prefix(length)) + self.forward(length) + length = 0 + chunks.append(self.scan_uri_escapes(name, start_mark)) + else: + length += 1 + ch = self.peek(length) + if length: + chunks.append(self.prefix(length)) + self.forward(length) + length = 0 + if not chunks: + raise ScannerError("while parsing a %s" % name, start_mark, + "expected URI, but found %r" % ch, self.get_mark()) + return ''.join(chunks) + + def scan_uri_escapes(self, name, start_mark): + # See the specification for details. + codes = [] + mark = self.get_mark() + while self.peek() == '%': + self.forward() + for k in range(2): + if self.peek(k) not in '0123456789ABCDEFabcdef': + raise ScannerError("while scanning a %s" % name, start_mark, + "expected URI escape sequence of 2 hexadecimal numbers, but found %r" + % self.peek(k), self.get_mark()) + codes.append(int(self.prefix(2), 16)) + self.forward(2) + try: + value = bytes(codes).decode('utf-8') + except UnicodeDecodeError as exc: + raise ScannerError("while scanning a %s" % name, start_mark, str(exc), mark) + return value + + def scan_line_break(self): + # Transforms: + # '\r\n' : '\n' + # '\r' : '\n' + # '\n' : '\n' + # '\x85' : '\n' + # '\u2028' : '\u2028' + # '\u2029 : '\u2029' + # default : '' + ch = self.peek() + if ch in '\r\n\x85': + if self.prefix(2) == '\r\n': + self.forward(2) + else: + self.forward() + return '\n' + elif ch in '\u2028\u2029': + self.forward() + return ch + return '' diff --git a/env-llmeval/lib/python3.10/site-packages/yaml/serializer.py b/env-llmeval/lib/python3.10/site-packages/yaml/serializer.py new file mode 100644 index 0000000000000000000000000000000000000000..fe911e67ae7a739abb491fbbc6834b9c37bbda4b --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/yaml/serializer.py @@ -0,0 +1,111 @@ + +__all__ = ['Serializer', 'SerializerError'] + +from .error import YAMLError +from .events import * +from .nodes import * + +class SerializerError(YAMLError): + pass + +class Serializer: + + ANCHOR_TEMPLATE = 'id%03d' + + def __init__(self, encoding=None, + explicit_start=None, explicit_end=None, version=None, tags=None): + self.use_encoding = encoding + self.use_explicit_start = explicit_start + self.use_explicit_end = explicit_end + self.use_version = version + self.use_tags = tags + self.serialized_nodes = {} + self.anchors = {} + self.last_anchor_id = 0 + self.closed = None + + def open(self): + if self.closed is None: + self.emit(StreamStartEvent(encoding=self.use_encoding)) + self.closed = False + elif self.closed: + raise SerializerError("serializer is closed") + else: + raise SerializerError("serializer is already opened") + + def close(self): + if self.closed is None: + raise SerializerError("serializer is not opened") + elif not self.closed: + self.emit(StreamEndEvent()) + self.closed = True + + #def __del__(self): + # self.close() + + def serialize(self, node): + if self.closed is None: + raise SerializerError("serializer is not opened") + elif self.closed: + raise SerializerError("serializer is closed") + self.emit(DocumentStartEvent(explicit=self.use_explicit_start, + version=self.use_version, tags=self.use_tags)) + self.anchor_node(node) + self.serialize_node(node, None, None) + self.emit(DocumentEndEvent(explicit=self.use_explicit_end)) + self.serialized_nodes = {} + self.anchors = {} + self.last_anchor_id = 0 + + def anchor_node(self, node): + if node in self.anchors: + if self.anchors[node] is None: + self.anchors[node] = self.generate_anchor(node) + else: + self.anchors[node] = None + if isinstance(node, SequenceNode): + for item in node.value: + self.anchor_node(item) + elif isinstance(node, MappingNode): + for key, value in node.value: + self.anchor_node(key) + self.anchor_node(value) + + def generate_anchor(self, node): + self.last_anchor_id += 1 + return self.ANCHOR_TEMPLATE % self.last_anchor_id + + def serialize_node(self, node, parent, index): + alias = self.anchors[node] + if node in self.serialized_nodes: + self.emit(AliasEvent(alias)) + else: + self.serialized_nodes[node] = True + self.descend_resolver(parent, index) + if isinstance(node, ScalarNode): + detected_tag = self.resolve(ScalarNode, node.value, (True, False)) + default_tag = self.resolve(ScalarNode, node.value, (False, True)) + implicit = (node.tag == detected_tag), (node.tag == default_tag) + self.emit(ScalarEvent(alias, node.tag, implicit, node.value, + style=node.style)) + elif isinstance(node, SequenceNode): + implicit = (node.tag + == self.resolve(SequenceNode, node.value, True)) + self.emit(SequenceStartEvent(alias, node.tag, implicit, + flow_style=node.flow_style)) + index = 0 + for item in node.value: + self.serialize_node(item, node, index) + index += 1 + self.emit(SequenceEndEvent()) + elif isinstance(node, MappingNode): + implicit = (node.tag + == self.resolve(MappingNode, node.value, True)) + self.emit(MappingStartEvent(alias, node.tag, implicit, + flow_style=node.flow_style)) + for key, value in node.value: + self.serialize_node(key, node, None) + self.serialize_node(value, node, key) + self.emit(MappingEndEvent()) + self.ascend_resolver() + diff --git a/env-llmeval/lib/python3.10/site-packages/yaml/tokens.py b/env-llmeval/lib/python3.10/site-packages/yaml/tokens.py new file mode 100644 index 0000000000000000000000000000000000000000..4d0b48a394ac8c019b401516a12f688df361cf90 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/yaml/tokens.py @@ -0,0 +1,104 @@ + +class Token(object): + def __init__(self, start_mark, end_mark): + self.start_mark = start_mark + self.end_mark = end_mark + def __repr__(self): + attributes = [key for key in self.__dict__ + if not key.endswith('_mark')] + attributes.sort() + arguments = ', '.join(['%s=%r' % (key, getattr(self, key)) + for key in attributes]) + return '%s(%s)' % (self.__class__.__name__, arguments) + +#class BOMToken(Token): +# id = '' + +class DirectiveToken(Token): + id = '' + def __init__(self, name, value, start_mark, end_mark): + self.name = name + self.value = value + self.start_mark = start_mark + self.end_mark = end_mark + +class DocumentStartToken(Token): + id = '' + +class DocumentEndToken(Token): + id = '' + +class StreamStartToken(Token): + id = '' + def __init__(self, start_mark=None, end_mark=None, + encoding=None): + self.start_mark = start_mark + self.end_mark = end_mark + self.encoding = encoding + +class StreamEndToken(Token): + id = '' + +class BlockSequenceStartToken(Token): + id = '' + +class BlockMappingStartToken(Token): + id = '' + +class BlockEndToken(Token): + id = '' + +class FlowSequenceStartToken(Token): + id = '[' + +class FlowMappingStartToken(Token): + id = '{' + +class FlowSequenceEndToken(Token): + id = ']' + +class FlowMappingEndToken(Token): + id = '}' + +class KeyToken(Token): + id = '?' + +class ValueToken(Token): + id = ':' + +class BlockEntryToken(Token): + id = '-' + +class FlowEntryToken(Token): + id = ',' + +class AliasToken(Token): + id = '' + def __init__(self, value, start_mark, end_mark): + self.value = value + self.start_mark = start_mark + self.end_mark = end_mark + +class AnchorToken(Token): + id = '' + def __init__(self, value, start_mark, end_mark): + self.value = value + self.start_mark = start_mark + self.end_mark = end_mark + +class TagToken(Token): + id = '' + def __init__(self, value, start_mark, end_mark): + self.value = value + self.start_mark = start_mark + self.end_mark = end_mark + +class ScalarToken(Token): + id = '' + def __init__(self, value, plain, start_mark, end_mark, style=None): + self.value = value + self.plain = plain + self.start_mark = start_mark + self.end_mark = end_mark + self.style = style +