diff --git a/ckpts/universal/global_step80/zero/11.mlp.dense_h_to_4h.weight/exp_avg.pt b/ckpts/universal/global_step80/zero/11.mlp.dense_h_to_4h.weight/exp_avg.pt new file mode 100644 index 0000000000000000000000000000000000000000..6f7023a3439892ff937360f3dfcd419f97999993 --- /dev/null +++ b/ckpts/universal/global_step80/zero/11.mlp.dense_h_to_4h.weight/exp_avg.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:aaa96c7a3d45d98f2333735e41c9892d74e78dbd0640cda0b31f78334e1db866 +size 33555612 diff --git a/ckpts/universal/global_step80/zero/11.mlp.dense_h_to_4h.weight/fp32.pt b/ckpts/universal/global_step80/zero/11.mlp.dense_h_to_4h.weight/fp32.pt new file mode 100644 index 0000000000000000000000000000000000000000..bef2067f12f1626ee9d88bc719c8271691288f94 --- /dev/null +++ b/ckpts/universal/global_step80/zero/11.mlp.dense_h_to_4h.weight/fp32.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:61a7aedfce35c129dbc3f286586b7221f7f2f95322a1bbd29755f54d381efeca +size 33555533 diff --git a/ckpts/universal/global_step80/zero/28.final_rmsnorm.weight/exp_avg.pt b/ckpts/universal/global_step80/zero/28.final_rmsnorm.weight/exp_avg.pt new file mode 100644 index 0000000000000000000000000000000000000000..4e3a8ea2b95e6264055bb97ae622d2f0535668f7 --- /dev/null +++ b/ckpts/universal/global_step80/zero/28.final_rmsnorm.weight/exp_avg.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9f93d927de1898973cb8449f1dd99431dd02132485aef105585f0985205b0c24 +size 9372 diff --git a/ckpts/universal/global_step80/zero/28.final_rmsnorm.weight/exp_avg_sq.pt b/ckpts/universal/global_step80/zero/28.final_rmsnorm.weight/exp_avg_sq.pt new file mode 100644 index 0000000000000000000000000000000000000000..fc3ae588c201be3d313d5f34a71768ccf7479407 --- /dev/null +++ b/ckpts/universal/global_step80/zero/28.final_rmsnorm.weight/exp_avg_sq.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d39adbcaf379e1e4475e2f4bb1262bbd91a94b917d584c4e2532dc4e0fb998bc +size 9387 diff --git a/ckpts/universal/global_step80/zero/28.final_rmsnorm.weight/fp32.pt b/ckpts/universal/global_step80/zero/28.final_rmsnorm.weight/fp32.pt new file mode 100644 index 0000000000000000000000000000000000000000..ede64d1cf1ecf8c578fa41ee7fa6a0a5729d263b --- /dev/null +++ b/ckpts/universal/global_step80/zero/28.final_rmsnorm.weight/fp32.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4ebbcf3acf60567cdbbf88756e15ad70609d2193ade148e3302ba430fbf15b08 +size 9293 diff --git a/venv/lib/python3.10/site-packages/sympy/liealgebras/__init__.py b/venv/lib/python3.10/site-packages/sympy/liealgebras/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..d023d86f2c6f0c64d7ac460c50eedc355e78b21f --- /dev/null +++ b/venv/lib/python3.10/site-packages/sympy/liealgebras/__init__.py @@ -0,0 +1,3 @@ +from sympy.liealgebras.cartan_type import CartanType + +__all__ = ['CartanType'] diff --git a/venv/lib/python3.10/site-packages/sympy/liealgebras/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/sympy/liealgebras/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..380b2ede2567351d638cd5307288c6e189502743 Binary files /dev/null and b/venv/lib/python3.10/site-packages/sympy/liealgebras/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/sympy/liealgebras/__pycache__/cartan_matrix.cpython-310.pyc b/venv/lib/python3.10/site-packages/sympy/liealgebras/__pycache__/cartan_matrix.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8fff1ad86ab2bae3ccede8ce0510d9c3c6082e2a Binary files /dev/null and b/venv/lib/python3.10/site-packages/sympy/liealgebras/__pycache__/cartan_matrix.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/sympy/liealgebras/__pycache__/cartan_type.cpython-310.pyc b/venv/lib/python3.10/site-packages/sympy/liealgebras/__pycache__/cartan_type.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..81ef232d44f62fcab4f855dbdd90c169bbebf40a Binary files /dev/null and b/venv/lib/python3.10/site-packages/sympy/liealgebras/__pycache__/cartan_type.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/sympy/liealgebras/__pycache__/dynkin_diagram.cpython-310.pyc b/venv/lib/python3.10/site-packages/sympy/liealgebras/__pycache__/dynkin_diagram.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6e4e89f22ec9294b8038fdc628a56de32b2bae87 Binary files /dev/null and b/venv/lib/python3.10/site-packages/sympy/liealgebras/__pycache__/dynkin_diagram.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/sympy/liealgebras/__pycache__/root_system.cpython-310.pyc b/venv/lib/python3.10/site-packages/sympy/liealgebras/__pycache__/root_system.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f7bdaaa924344a2c5f7a73d3686a3064514f4574 Binary files /dev/null and b/venv/lib/python3.10/site-packages/sympy/liealgebras/__pycache__/root_system.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/sympy/liealgebras/__pycache__/type_a.cpython-310.pyc b/venv/lib/python3.10/site-packages/sympy/liealgebras/__pycache__/type_a.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..70cad649b6be15fd7083d3745933ef17a1457965 Binary files /dev/null and b/venv/lib/python3.10/site-packages/sympy/liealgebras/__pycache__/type_a.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/sympy/liealgebras/__pycache__/type_b.cpython-310.pyc b/venv/lib/python3.10/site-packages/sympy/liealgebras/__pycache__/type_b.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..738174692a6bc177ee23bf9293635d7ffd9bd299 Binary files /dev/null and b/venv/lib/python3.10/site-packages/sympy/liealgebras/__pycache__/type_b.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/sympy/liealgebras/__pycache__/type_c.cpython-310.pyc b/venv/lib/python3.10/site-packages/sympy/liealgebras/__pycache__/type_c.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..38c728f0518aeac7e7e102bb2450bc1ce9ed7181 Binary files /dev/null and b/venv/lib/python3.10/site-packages/sympy/liealgebras/__pycache__/type_c.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/sympy/liealgebras/__pycache__/type_d.cpython-310.pyc b/venv/lib/python3.10/site-packages/sympy/liealgebras/__pycache__/type_d.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..db6e926868dcbb56460d2af62b909765175adf1c Binary files /dev/null and b/venv/lib/python3.10/site-packages/sympy/liealgebras/__pycache__/type_d.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/sympy/liealgebras/__pycache__/type_e.cpython-310.pyc b/venv/lib/python3.10/site-packages/sympy/liealgebras/__pycache__/type_e.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..337d5c6ad9b46f9d7bf8bbc1427f43494308dfda Binary files /dev/null and b/venv/lib/python3.10/site-packages/sympy/liealgebras/__pycache__/type_e.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/sympy/liealgebras/__pycache__/type_f.cpython-310.pyc b/venv/lib/python3.10/site-packages/sympy/liealgebras/__pycache__/type_f.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..60e20513d0c9cb29ca3658fd88e008cbd67b5a5c Binary files /dev/null and b/venv/lib/python3.10/site-packages/sympy/liealgebras/__pycache__/type_f.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/sympy/liealgebras/__pycache__/type_g.cpython-310.pyc b/venv/lib/python3.10/site-packages/sympy/liealgebras/__pycache__/type_g.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6ec1b191db9589d07a9ad50b6a32ba68cde5d1a6 Binary files /dev/null and b/venv/lib/python3.10/site-packages/sympy/liealgebras/__pycache__/type_g.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/sympy/liealgebras/__pycache__/weyl_group.cpython-310.pyc b/venv/lib/python3.10/site-packages/sympy/liealgebras/__pycache__/weyl_group.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fc95c8b80d2112aeddd67daa05810cbb4cd82bd5 Binary files /dev/null and b/venv/lib/python3.10/site-packages/sympy/liealgebras/__pycache__/weyl_group.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/sympy/liealgebras/cartan_matrix.py b/venv/lib/python3.10/site-packages/sympy/liealgebras/cartan_matrix.py new file mode 100644 index 0000000000000000000000000000000000000000..2d29b37bc9a1a26790ee88b5902951afe4fc4560 --- /dev/null +++ b/venv/lib/python3.10/site-packages/sympy/liealgebras/cartan_matrix.py @@ -0,0 +1,25 @@ +from .cartan_type import CartanType + +def CartanMatrix(ct): + """Access the Cartan matrix of a specific Lie algebra + + Examples + ======== + + >>> from sympy.liealgebras.cartan_matrix import CartanMatrix + >>> CartanMatrix("A2") + Matrix([ + [ 2, -1], + [-1, 2]]) + + >>> CartanMatrix(['C', 3]) + Matrix([ + [ 2, -1, 0], + [-1, 2, -1], + [ 0, -2, 2]]) + + This method works by returning the Cartan matrix + which corresponds to Cartan type t. + """ + + return CartanType(ct).cartan_matrix() diff --git a/venv/lib/python3.10/site-packages/sympy/liealgebras/cartan_type.py b/venv/lib/python3.10/site-packages/sympy/liealgebras/cartan_type.py new file mode 100644 index 0000000000000000000000000000000000000000..16bb152469238ea912a30c2d0f8210d6f729bdb1 --- /dev/null +++ b/venv/lib/python3.10/site-packages/sympy/liealgebras/cartan_type.py @@ -0,0 +1,73 @@ +from sympy.core import Atom, Basic + + +class CartanType_generator(): + """ + Constructor for actually creating things + """ + def __call__(self, *args): + c = args[0] + if isinstance(c, list): + letter, n = c[0], int(c[1]) + elif isinstance(c, str): + letter, n = c[0], int(c[1:]) + else: + raise TypeError("Argument must be a string (e.g. 'A3') or a list (e.g. ['A', 3])") + + if n < 0: + raise ValueError("Lie algebra rank cannot be negative") + if letter == "A": + from . import type_a + return type_a.TypeA(n) + if letter == "B": + from . import type_b + return type_b.TypeB(n) + + if letter == "C": + from . import type_c + return type_c.TypeC(n) + + if letter == "D": + from . import type_d + return type_d.TypeD(n) + + if letter == "E": + if n >= 6 and n <= 8: + from . import type_e + return type_e.TypeE(n) + + if letter == "F": + if n == 4: + from . import type_f + return type_f.TypeF(n) + + if letter == "G": + if n == 2: + from . import type_g + return type_g.TypeG(n) + +CartanType = CartanType_generator() + + +class Standard_Cartan(Atom): + """ + Concrete base class for Cartan types such as A4, etc + """ + + def __new__(cls, series, n): + obj = Basic.__new__(cls) + obj.n = n + obj.series = series + return obj + + def rank(self): + """ + Returns the rank of the Lie algebra + """ + return self.n + + def series(self): + """ + Returns the type of the Lie algebra + """ + return self.series diff --git a/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/__init__.py b/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ce776474375bb7049d57d870f62ff5b69b018c12 Binary files /dev/null and b/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/__pycache__/test_cartan_matrix.cpython-310.pyc b/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/__pycache__/test_cartan_matrix.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..df8ba930cb5c29619be21b8e6d84f27af511df80 Binary files /dev/null and b/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/__pycache__/test_cartan_matrix.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/__pycache__/test_cartan_type.cpython-310.pyc b/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/__pycache__/test_cartan_type.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9c54b3635ec50f7c1e60d1acaf0f54094a7a09a4 Binary files /dev/null and b/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/__pycache__/test_cartan_type.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/__pycache__/test_dynkin_diagram.cpython-310.pyc b/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/__pycache__/test_dynkin_diagram.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f912fac39b284f8f2c5534e69690792440dc334e Binary files /dev/null and b/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/__pycache__/test_dynkin_diagram.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/__pycache__/test_root_system.cpython-310.pyc b/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/__pycache__/test_root_system.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0384f9a6fab1d98f5edc534bb889ec020c77e5a4 Binary files /dev/null and b/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/__pycache__/test_root_system.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/__pycache__/test_type_A.cpython-310.pyc b/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/__pycache__/test_type_A.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f33518328dc2ccb4a87f19662fcc8968cf54e4c1 Binary files /dev/null and b/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/__pycache__/test_type_A.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/__pycache__/test_type_B.cpython-310.pyc b/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/__pycache__/test_type_B.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cbe9ed2cc83e9ee9860d511cf9dafb4472420f6b Binary files /dev/null and b/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/__pycache__/test_type_B.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/__pycache__/test_type_C.cpython-310.pyc b/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/__pycache__/test_type_C.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b2465a15c0b0fdce22d10936797e8052a1d86119 Binary files /dev/null and b/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/__pycache__/test_type_C.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/__pycache__/test_type_D.cpython-310.pyc b/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/__pycache__/test_type_D.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3ae8adfeda889401f82a532392360553cb7cae98 Binary files /dev/null and b/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/__pycache__/test_type_D.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/__pycache__/test_type_E.cpython-310.pyc b/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/__pycache__/test_type_E.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d0dbad3925c8a351ebbddee5d8c29befba47f000 Binary files /dev/null and b/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/__pycache__/test_type_E.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/__pycache__/test_type_F.cpython-310.pyc b/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/__pycache__/test_type_F.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9271d856ac20d2f61245d7e5e6501f3162584ee1 Binary files /dev/null and b/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/__pycache__/test_type_F.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/__pycache__/test_type_G.cpython-310.pyc b/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/__pycache__/test_type_G.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9cf4ca2ae23be4c327b0b1c7f1651d360f713a72 Binary files /dev/null and b/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/__pycache__/test_type_G.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/__pycache__/test_weyl_group.cpython-310.pyc b/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/__pycache__/test_weyl_group.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4d13c552679dbbadb692ae49f09bcf99d7a0b9bc Binary files /dev/null and b/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/__pycache__/test_weyl_group.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/test_cartan_matrix.py b/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/test_cartan_matrix.py new file mode 100644 index 0000000000000000000000000000000000000000..98b1793dee63e0e87c610768554a8388dfd641a1 --- /dev/null +++ b/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/test_cartan_matrix.py @@ -0,0 +1,10 @@ +from sympy.liealgebras.cartan_matrix import CartanMatrix +from sympy.matrices import Matrix + +def test_CartanMatrix(): + c = CartanMatrix("A3") + m = Matrix(3, 3, [2, -1, 0, -1, 2, -1, 0, -1, 2]) + assert c == m + a = CartanMatrix(["G",2]) + mt = Matrix(2, 2, [2, -1, -3, 2]) + assert a == mt diff --git a/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/test_cartan_type.py b/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/test_cartan_type.py new file mode 100644 index 0000000000000000000000000000000000000000..257eeca41d0f5f2eb240cc270f76d452848ed405 --- /dev/null +++ b/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/test_cartan_type.py @@ -0,0 +1,12 @@ +from sympy.liealgebras.cartan_type import CartanType, Standard_Cartan + +def test_Standard_Cartan(): + c = CartanType("A4") + assert c.rank() == 4 + assert c.series == "A" + m = Standard_Cartan("A", 2) + assert m.rank() == 2 + assert m.series == "A" + b = CartanType("B12") + assert b.rank() == 12 + assert b.series == "B" diff --git a/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/test_dynkin_diagram.py b/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/test_dynkin_diagram.py new file mode 100644 index 0000000000000000000000000000000000000000..ad2ee4c162945c437ecf83d75c7fef9455c9464a --- /dev/null +++ b/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/test_dynkin_diagram.py @@ -0,0 +1,9 @@ +from sympy.liealgebras.dynkin_diagram import DynkinDiagram + +def test_DynkinDiagram(): + c = DynkinDiagram("A3") + diag = "0---0---0\n1 2 3" + assert c == diag + ct = DynkinDiagram(["B", 3]) + diag2 = "0---0=>=0\n1 2 3" + assert ct == diag2 diff --git a/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/test_root_system.py b/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/test_root_system.py new file mode 100644 index 0000000000000000000000000000000000000000..42110da5a1c59a7e6b2e537ee13746bfce361579 --- /dev/null +++ b/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/test_root_system.py @@ -0,0 +1,18 @@ +from sympy.liealgebras.root_system import RootSystem +from sympy.liealgebras.type_a import TypeA +from sympy.matrices import Matrix + +def test_root_system(): + c = RootSystem("A3") + assert c.cartan_type == TypeA(3) + assert c.simple_roots() == {1: [1, -1, 0, 0], 2: [0, 1, -1, 0], 3: [0, 0, 1, -1]} + assert c.root_space() == "alpha[1] + alpha[2] + alpha[3]" + assert c.cartan_matrix() == Matrix([[ 2, -1, 0], [-1, 2, -1], [ 0, -1, 2]]) + assert c.dynkin_diagram() == "0---0---0\n1 2 3" + assert c.add_simple_roots(1, 2) == [1, 0, -1, 0] + assert c.all_roots() == {1: [1, -1, 0, 0], 2: [1, 0, -1, 0], + 3: [1, 0, 0, -1], 4: [0, 1, -1, 0], 5: [0, 1, 0, -1], + 6: [0, 0, 1, -1], 7: [-1, 1, 0, 0], 8: [-1, 0, 1, 0], + 9: [-1, 0, 0, 1], 10: [0, -1, 1, 0], + 11: [0, -1, 0, 1], 12: [0, 0, -1, 1]} + assert c.add_as_roots([1, 0, -1, 0], [0, 0, 1, -1]) == [1, 0, 0, -1] diff --git a/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/test_type_A.py b/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/test_type_A.py new file mode 100644 index 0000000000000000000000000000000000000000..85d6f451ee167cf6db17ab20e59efab86ac0b691 --- /dev/null +++ b/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/test_type_A.py @@ -0,0 +1,17 @@ +from sympy.liealgebras.cartan_type import CartanType +from sympy.matrices import Matrix + +def test_type_A(): + c = CartanType("A3") + m = Matrix(3, 3, [2, -1, 0, -1, 2, -1, 0, -1, 2]) + assert m == c.cartan_matrix() + assert c.basis() == 8 + assert c.roots() == 12 + assert c.dimension() == 4 + assert c.simple_root(1) == [1, -1, 0, 0] + assert c.highest_root() == [1, 0, 0, -1] + assert c.lie_algebra() == "su(4)" + diag = "0---0---0\n1 2 3" + assert c.dynkin_diagram() == diag + assert c.positive_roots() == {1: [1, -1, 0, 0], 2: [1, 0, -1, 0], + 3: [1, 0, 0, -1], 4: [0, 1, -1, 0], 5: [0, 1, 0, -1], 6: [0, 0, 1, -1]} diff --git a/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/test_type_B.py b/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/test_type_B.py new file mode 100644 index 0000000000000000000000000000000000000000..8f2a9011f96bc647e48d39e16cf10703a99d86b3 --- /dev/null +++ b/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/test_type_B.py @@ -0,0 +1,17 @@ +from sympy.liealgebras.cartan_type import CartanType +from sympy.matrices import Matrix + +def test_type_B(): + c = CartanType("B3") + m = Matrix(3, 3, [2, -1, 0, -1, 2, -2, 0, -1, 2]) + assert m == c.cartan_matrix() + assert c.dimension() == 3 + assert c.roots() == 18 + assert c.simple_root(3) == [0, 0, 1] + assert c.basis() == 3 + assert c.lie_algebra() == "so(6)" + diag = "0---0=>=0\n1 2 3" + assert c.dynkin_diagram() == diag + assert c.positive_roots() == {1: [1, -1, 0], 2: [1, 1, 0], 3: [1, 0, -1], + 4: [1, 0, 1], 5: [0, 1, -1], 6: [0, 1, 1], 7: [1, 0, 0], + 8: [0, 1, 0], 9: [0, 0, 1]} diff --git a/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/test_type_C.py b/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/test_type_C.py new file mode 100644 index 0000000000000000000000000000000000000000..8154c201e6c50adb7c74458b240ed98b9a0dd123 --- /dev/null +++ b/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/test_type_C.py @@ -0,0 +1,22 @@ +from sympy.liealgebras.cartan_type import CartanType +from sympy.matrices import Matrix + +def test_type_C(): + c = CartanType("C4") + m = Matrix(4, 4, [2, -1, 0, 0, -1, 2, -1, 0, 0, -1, 2, -1, 0, 0, -2, 2]) + assert c.cartan_matrix() == m + assert c.dimension() == 4 + assert c.simple_root(4) == [0, 0, 0, 2] + assert c.roots() == 32 + assert c.basis() == 36 + assert c.lie_algebra() == "sp(8)" + t = CartanType(['C', 3]) + assert t.dimension() == 3 + diag = "0---0---0=<=0\n1 2 3 4" + assert c.dynkin_diagram() == diag + assert c.positive_roots() == {1: [1, -1, 0, 0], 2: [1, 1, 0, 0], + 3: [1, 0, -1, 0], 4: [1, 0, 1, 0], 5: [1, 0, 0, -1], + 6: [1, 0, 0, 1], 7: [0, 1, -1, 0], 8: [0, 1, 1, 0], + 9: [0, 1, 0, -1], 10: [0, 1, 0, 1], 11: [0, 0, 1, -1], + 12: [0, 0, 1, 1], 13: [2, 0, 0, 0], 14: [0, 2, 0, 0], 15: [0, 0, 2, 0], + 16: [0, 0, 0, 2]} diff --git a/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/test_type_D.py b/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/test_type_D.py new file mode 100644 index 0000000000000000000000000000000000000000..ddf6a34cb5be475cc30042e95bf8eae2376a2223 --- /dev/null +++ b/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/test_type_D.py @@ -0,0 +1,19 @@ +from sympy.liealgebras.cartan_type import CartanType +from sympy.matrices import Matrix + + + +def test_type_D(): + c = CartanType("D4") + m = Matrix(4, 4, [2, -1, 0, 0, -1, 2, -1, -1, 0, -1, 2, 0, 0, -1, 0, 2]) + assert c.cartan_matrix() == m + assert c.basis() == 6 + assert c.lie_algebra() == "so(8)" + assert c.roots() == 24 + assert c.simple_root(3) == [0, 0, 1, -1] + diag = " 3\n 0\n |\n |\n0---0---0\n1 2 4" + assert diag == c.dynkin_diagram() + assert c.positive_roots() == {1: [1, -1, 0, 0], 2: [1, 1, 0, 0], + 3: [1, 0, -1, 0], 4: [1, 0, 1, 0], 5: [1, 0, 0, -1], 6: [1, 0, 0, 1], + 7: [0, 1, -1, 0], 8: [0, 1, 1, 0], 9: [0, 1, 0, -1], 10: [0, 1, 0, 1], + 11: [0, 0, 1, -1], 12: [0, 0, 1, 1]} diff --git a/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/test_type_E.py b/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/test_type_E.py new file mode 100644 index 0000000000000000000000000000000000000000..95bb23e205bdfc5cb84e432e92d97b98beff540b --- /dev/null +++ b/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/test_type_E.py @@ -0,0 +1,19 @@ +from sympy.liealgebras.cartan_type import CartanType +from sympy.matrices import Matrix + +def test_type_E(): + c = CartanType("E6") + m = Matrix(6, 6, [2, 0, -1, 0, 0, 0, 0, 2, 0, -1, 0, 0, + -1, 0, 2, -1, 0, 0, 0, -1, -1, 2, -1, 0, 0, 0, 0, + -1, 2, -1, 0, 0, 0, 0, -1, 2]) + assert c.cartan_matrix() == m + assert c.dimension() == 8 + assert c.simple_root(6) == [0, 0, 0, -1, 1, 0, 0, 0] + assert c.roots() == 72 + assert c.basis() == 78 + diag = " "*8 + "2\n" + " "*8 + "0\n" + " "*8 + "|\n" + " "*8 + "|\n" + diag += "---".join("0" for i in range(1, 6))+"\n" + diag += "1 " + " ".join(str(i) for i in range(3, 7)) + assert c.dynkin_diagram() == diag + posroots = c.positive_roots() + assert posroots[8] == [1, 0, 0, 0, 1, 0, 0, 0] diff --git a/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/test_type_F.py b/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/test_type_F.py new file mode 100644 index 0000000000000000000000000000000000000000..fbb58223d0b5886e6044108c9c5cc3bbf371dd14 --- /dev/null +++ b/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/test_type_F.py @@ -0,0 +1,24 @@ +from sympy.liealgebras.cartan_type import CartanType +from sympy.matrices import Matrix +from sympy.core.backend import S + +def test_type_F(): + c = CartanType("F4") + m = Matrix(4, 4, [2, -1, 0, 0, -1, 2, -2, 0, 0, -1, 2, -1, 0, 0, -1, 2]) + assert c.cartan_matrix() == m + assert c.dimension() == 4 + assert c.simple_root(1) == [1, -1, 0, 0] + assert c.simple_root(2) == [0, 1, -1, 0] + assert c.simple_root(3) == [0, 0, 0, 1] + assert c.simple_root(4) == [-S.Half, -S.Half, -S.Half, -S.Half] + assert c.roots() == 48 + assert c.basis() == 52 + diag = "0---0=>=0---0\n" + " ".join(str(i) for i in range(1, 5)) + assert c.dynkin_diagram() == diag + assert c.positive_roots() == {1: [1, -1, 0, 0], 2: [1, 1, 0, 0], 3: [1, 0, -1, 0], + 4: [1, 0, 1, 0], 5: [1, 0, 0, -1], 6: [1, 0, 0, 1], 7: [0, 1, -1, 0], + 8: [0, 1, 1, 0], 9: [0, 1, 0, -1], 10: [0, 1, 0, 1], 11: [0, 0, 1, -1], + 12: [0, 0, 1, 1], 13: [1, 0, 0, 0], 14: [0, 1, 0, 0], 15: [0, 0, 1, 0], + 16: [0, 0, 0, 1], 17: [S.Half, S.Half, S.Half, S.Half], 18: [S.Half, -S.Half, S.Half, S.Half], + 19: [S.Half, S.Half, -S.Half, S.Half], 20: [S.Half, S.Half, S.Half, -S.Half], 21: [S.Half, S.Half, -S.Half, -S.Half], + 22: [S.Half, -S.Half, S.Half, -S.Half], 23: [S.Half, -S.Half, -S.Half, S.Half], 24: [S.Half, -S.Half, -S.Half, -S.Half]} diff --git a/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/test_type_G.py b/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/test_type_G.py new file mode 100644 index 0000000000000000000000000000000000000000..c427eeb85bad8fc77d17a1563a7b796d4e0f217f --- /dev/null +++ b/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/test_type_G.py @@ -0,0 +1,16 @@ +# coding=utf-8 +from sympy.liealgebras.cartan_type import CartanType +from sympy.matrices import Matrix + +def test_type_G(): + c = CartanType("G2") + m = Matrix(2, 2, [2, -1, -3, 2]) + assert c.cartan_matrix() == m + assert c.simple_root(2) == [1, -2, 1] + assert c.basis() == 14 + assert c.roots() == 12 + assert c.dimension() == 3 + diag = "0≡<≡0\n1 2" + assert diag == c.dynkin_diagram() + assert c.positive_roots() == {1: [0, 1, -1], 2: [1, -2, 1], 3: [1, -1, 0], + 4: [1, 0, 1], 5: [1, 1, -2], 6: [2, -1, -1]} diff --git a/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/test_weyl_group.py b/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/test_weyl_group.py new file mode 100644 index 0000000000000000000000000000000000000000..e4e57246fdcb5a431d8bbd65f1f60e0254a9cdf0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/sympy/liealgebras/tests/test_weyl_group.py @@ -0,0 +1,35 @@ +from sympy.liealgebras.weyl_group import WeylGroup +from sympy.matrices import Matrix + +def test_weyl_group(): + c = WeylGroup("A3") + assert c.matrix_form('r1*r2') == Matrix([[0, 0, 1, 0], [1, 0, 0, 0], + [0, 1, 0, 0], [0, 0, 0, 1]]) + assert c.generators() == ['r1', 'r2', 'r3'] + assert c.group_order() == 24.0 + assert c.group_name() == "S4: the symmetric group acting on 4 elements." + assert c.coxeter_diagram() == "0---0---0\n1 2 3" + assert c.element_order('r1*r2*r3') == 4 + assert c.element_order('r1*r3*r2*r3') == 3 + d = WeylGroup("B5") + assert d.group_order() == 3840 + assert d.element_order('r1*r2*r4*r5') == 12 + assert d.matrix_form('r2*r3') == Matrix([[0, 0, 1, 0, 0], [1, 0, 0, 0, 0], + [0, 1, 0, 0, 0], [0, 0, 0, 1, 0], [0, 0, 0, 0, 1]]) + assert d.element_order('r1*r2*r1*r3*r5') == 6 + e = WeylGroup("D5") + assert e.element_order('r2*r3*r5') == 4 + assert e.matrix_form('r2*r3*r5') == Matrix([[1, 0, 0, 0, 0], [0, 0, 0, 0, -1], + [0, 1, 0, 0, 0], [0, 0, 1, 0, 0], [0, 0, 0, -1, 0]]) + f = WeylGroup("G2") + assert f.element_order('r1*r2*r1*r2') == 3 + assert f.element_order('r2*r1*r1*r2') == 1 + + assert f.matrix_form('r1*r2*r1*r2') == Matrix([[0, 1, 0], [0, 0, 1], [1, 0, 0]]) + g = WeylGroup("F4") + assert g.matrix_form('r2*r3') == Matrix([[1, 0, 0, 0], [0, 1, 0, 0], + [0, 0, 0, -1], [0, 0, 1, 0]]) + + assert g.element_order('r2*r3') == 4 + h = WeylGroup("E6") + assert h.group_order() == 51840 diff --git a/venv/lib/python3.10/site-packages/sympy/liealgebras/type_b.py b/venv/lib/python3.10/site-packages/sympy/liealgebras/type_b.py new file mode 100644 index 0000000000000000000000000000000000000000..a08cd606fcbcc45e1b45800f8e8b30e8958bfa7e --- /dev/null +++ b/venv/lib/python3.10/site-packages/sympy/liealgebras/type_b.py @@ -0,0 +1,172 @@ +from .cartan_type import Standard_Cartan +from sympy.core.backend import eye + +class TypeB(Standard_Cartan): + + def __new__(cls, n): + if n < 2: + raise ValueError("n cannot be less than 2") + return Standard_Cartan.__new__(cls, "B", n) + + def dimension(self): + """Dimension of the vector space V underlying the Lie algebra + + Examples + ======== + + >>> from sympy.liealgebras.cartan_type import CartanType + >>> c = CartanType("B3") + >>> c.dimension() + 3 + """ + + return self.n + + def basic_root(self, i, j): + """ + This is a method just to generate roots + with a 1 iin the ith position and a -1 + in the jth position. + + """ + root = [0]*self.n + root[i] = 1 + root[j] = -1 + return root + + def simple_root(self, i): + """ + Every lie algebra has a unique root system. + Given a root system Q, there is a subset of the + roots such that an element of Q is called a + simple root if it cannot be written as the sum + of two elements in Q. If we let D denote the + set of simple roots, then it is clear that every + element of Q can be written as a linear combination + of elements of D with all coefficients non-negative. + + In B_n the first n-1 simple roots are the same as the + roots in A_(n-1) (a 1 in the ith position, a -1 in + the (i+1)th position, and zeroes elsewhere). The n-th + simple root is the root with a 1 in the nth position + and zeroes elsewhere. + + This method returns the ith simple root for the B series. + + Examples + ======== + + >>> from sympy.liealgebras.cartan_type import CartanType + >>> c = CartanType("B3") + >>> c.simple_root(2) + [0, 1, -1] + + """ + n = self.n + if i < n: + return self.basic_root(i-1, i) + else: + root = [0]*self.n + root[n-1] = 1 + return root + + def positive_roots(self): + """ + This method generates all the positive roots of + A_n. This is half of all of the roots of B_n; + by multiplying all the positive roots by -1 we + get the negative roots. + + Examples + ======== + + >>> from sympy.liealgebras.cartan_type import CartanType + >>> c = CartanType("A3") + >>> c.positive_roots() + {1: [1, -1, 0, 0], 2: [1, 0, -1, 0], 3: [1, 0, 0, -1], 4: [0, 1, -1, 0], + 5: [0, 1, 0, -1], 6: [0, 0, 1, -1]} + """ + + n = self.n + posroots = {} + k = 0 + for i in range(0, n-1): + for j in range(i+1, n): + k += 1 + posroots[k] = self.basic_root(i, j) + k += 1 + root = self.basic_root(i, j) + root[j] = 1 + posroots[k] = root + + for i in range(0, n): + k += 1 + root = [0]*n + root[i] = 1 + posroots[k] = root + + return posroots + + def roots(self): + """ + Returns the total number of roots for B_n" + """ + + n = self.n + return 2*(n**2) + + def cartan_matrix(self): + """ + Returns the Cartan matrix for B_n. + The Cartan matrix matrix for a Lie algebra is + generated by assigning an ordering to the simple + roots, (alpha[1], ...., alpha[l]). Then the ijth + entry of the Cartan matrix is (). + + Examples + ======== + + >>> from sympy.liealgebras.cartan_type import CartanType + >>> c = CartanType('B4') + >>> c.cartan_matrix() + Matrix([ + [ 2, -1, 0, 0], + [-1, 2, -1, 0], + [ 0, -1, 2, -2], + [ 0, 0, -1, 2]]) + + """ + + n = self.n + m = 2* eye(n) + i = 1 + while i < n-1: + m[i, i+1] = -1 + m[i, i-1] = -1 + i += 1 + m[0, 1] = -1 + m[n-2, n-1] = -2 + m[n-1, n-2] = -1 + return m + + def basis(self): + """ + Returns the number of independent generators of B_n + """ + + n = self.n + return (n**2 - n)/2 + + def lie_algebra(self): + """ + Returns the Lie algebra associated with B_n + """ + + n = self.n + return "so(" + str(2*n) + ")" + + def dynkin_diagram(self): + n = self.n + diag = "---".join("0" for i in range(1, n)) + "=>=0\n" + diag += " ".join(str(i) for i in range(1, n+1)) + return diag diff --git a/venv/lib/python3.10/site-packages/sympy/liealgebras/type_c.py b/venv/lib/python3.10/site-packages/sympy/liealgebras/type_c.py new file mode 100644 index 0000000000000000000000000000000000000000..7cf4c227fb0cd0d7f3a69e2e4348c67593131dfa --- /dev/null +++ b/venv/lib/python3.10/site-packages/sympy/liealgebras/type_c.py @@ -0,0 +1,171 @@ +from .cartan_type import Standard_Cartan +from sympy.core.backend import eye + +class TypeC(Standard_Cartan): + + def __new__(cls, n): + if n < 3: + raise ValueError("n cannot be less than 3") + return Standard_Cartan.__new__(cls, "C", n) + + + def dimension(self): + """Dimension of the vector space V underlying the Lie algebra + + Examples + ======== + + >>> from sympy.liealgebras.cartan_type import CartanType + >>> c = CartanType("C3") + >>> c.dimension() + 3 + """ + n = self.n + return n + + def basic_root(self, i, j): + """Generate roots with 1 in ith position and a -1 in jth position + """ + n = self.n + root = [0]*n + root[i] = 1 + root[j] = -1 + return root + + def simple_root(self, i): + """The ith simple root for the C series + + Every lie algebra has a unique root system. + Given a root system Q, there is a subset of the + roots such that an element of Q is called a + simple root if it cannot be written as the sum + of two elements in Q. If we let D denote the + set of simple roots, then it is clear that every + element of Q can be written as a linear combination + of elements of D with all coefficients non-negative. + + In C_n, the first n-1 simple roots are the same as + the roots in A_(n-1) (a 1 in the ith position, a -1 + in the (i+1)th position, and zeroes elsewhere). The + nth simple root is the root in which there is a 2 in + the nth position and zeroes elsewhere. + + Examples + ======== + + >>> from sympy.liealgebras.cartan_type import CartanType + >>> c = CartanType("C3") + >>> c.simple_root(2) + [0, 1, -1] + + """ + + n = self.n + if i < n: + return self.basic_root(i-1,i) + else: + root = [0]*self.n + root[n-1] = 2 + return root + + + def positive_roots(self): + """Generates all the positive roots of A_n + + This is half of all of the roots of C_n; by multiplying all the + positive roots by -1 we get the negative roots. + + Examples + ======== + + >>> from sympy.liealgebras.cartan_type import CartanType + >>> c = CartanType("A3") + >>> c.positive_roots() + {1: [1, -1, 0, 0], 2: [1, 0, -1, 0], 3: [1, 0, 0, -1], 4: [0, 1, -1, 0], + 5: [0, 1, 0, -1], 6: [0, 0, 1, -1]} + + """ + + n = self.n + posroots = {} + k = 0 + for i in range(0, n-1): + for j in range(i+1, n): + k += 1 + posroots[k] = self.basic_root(i, j) + k += 1 + root = self.basic_root(i, j) + root[j] = 1 + posroots[k] = root + + for i in range(0, n): + k += 1 + root = [0]*n + root[i] = 2 + posroots[k] = root + + return posroots + + def roots(self): + """ + Returns the total number of roots for C_n" + """ + + n = self.n + return 2*(n**2) + + def cartan_matrix(self): + """The Cartan matrix for C_n + + The Cartan matrix matrix for a Lie algebra is + generated by assigning an ordering to the simple + roots, (alpha[1], ...., alpha[l]). Then the ijth + entry of the Cartan matrix is (). + + Examples + ======== + + >>> from sympy.liealgebras.cartan_type import CartanType + >>> c = CartanType('C4') + >>> c.cartan_matrix() + Matrix([ + [ 2, -1, 0, 0], + [-1, 2, -1, 0], + [ 0, -1, 2, -1], + [ 0, 0, -2, 2]]) + + """ + + n = self.n + m = 2 * eye(n) + i = 1 + while i < n-1: + m[i, i+1] = -1 + m[i, i-1] = -1 + i += 1 + m[0,1] = -1 + m[n-1, n-2] = -2 + return m + + + def basis(self): + """ + Returns the number of independent generators of C_n + """ + + n = self.n + return n*(2*n + 1) + + def lie_algebra(self): + """ + Returns the Lie algebra associated with C_n" + """ + + n = self.n + return "sp(" + str(2*n) + ")" + + def dynkin_diagram(self): + n = self.n + diag = "---".join("0" for i in range(1, n)) + "=<=0\n" + diag += " ".join(str(i) for i in range(1, n+1)) + return diag diff --git a/venv/lib/python3.10/site-packages/sympy/liealgebras/type_d.py b/venv/lib/python3.10/site-packages/sympy/liealgebras/type_d.py new file mode 100644 index 0000000000000000000000000000000000000000..3c46bd79837fce236ffd6922add0eb4743230aa4 --- /dev/null +++ b/venv/lib/python3.10/site-packages/sympy/liealgebras/type_d.py @@ -0,0 +1,175 @@ +from .cartan_type import Standard_Cartan +from sympy.core.backend import eye + +class TypeD(Standard_Cartan): + + def __new__(cls, n): + if n < 3: + raise ValueError("n cannot be less than 3") + return Standard_Cartan.__new__(cls, "D", n) + + + def dimension(self): + """Dmension of the vector space V underlying the Lie algebra + + Examples + ======== + + >>> from sympy.liealgebras.cartan_type import CartanType + >>> c = CartanType("D4") + >>> c.dimension() + 4 + """ + + return self.n + + def basic_root(self, i, j): + """ + This is a method just to generate roots + with a 1 iin the ith position and a -1 + in the jth position. + + """ + + n = self.n + root = [0]*n + root[i] = 1 + root[j] = -1 + return root + + def simple_root(self, i): + """ + Every lie algebra has a unique root system. + Given a root system Q, there is a subset of the + roots such that an element of Q is called a + simple root if it cannot be written as the sum + of two elements in Q. If we let D denote the + set of simple roots, then it is clear that every + element of Q can be written as a linear combination + of elements of D with all coefficients non-negative. + + In D_n, the first n-1 simple roots are the same as + the roots in A_(n-1) (a 1 in the ith position, a -1 + in the (i+1)th position, and zeroes elsewhere). + The nth simple root is the root in which there 1s in + the nth and (n-1)th positions, and zeroes elsewhere. + + This method returns the ith simple root for the D series. + + Examples + ======== + + >>> from sympy.liealgebras.cartan_type import CartanType + >>> c = CartanType("D4") + >>> c.simple_root(2) + [0, 1, -1, 0] + + """ + + n = self.n + if i < n: + return self.basic_root(i-1, i) + else: + root = [0]*n + root[n-2] = 1 + root[n-1] = 1 + return root + + + def positive_roots(self): + """ + This method generates all the positive roots of + A_n. This is half of all of the roots of D_n + by multiplying all the positive roots by -1 we + get the negative roots. + + Examples + ======== + + >>> from sympy.liealgebras.cartan_type import CartanType + >>> c = CartanType("A3") + >>> c.positive_roots() + {1: [1, -1, 0, 0], 2: [1, 0, -1, 0], 3: [1, 0, 0, -1], 4: [0, 1, -1, 0], + 5: [0, 1, 0, -1], 6: [0, 0, 1, -1]} + """ + + n = self.n + posroots = {} + k = 0 + for i in range(0, n-1): + for j in range(i+1, n): + k += 1 + posroots[k] = self.basic_root(i, j) + k += 1 + root = self.basic_root(i, j) + root[j] = 1 + posroots[k] = root + return posroots + + def roots(self): + """ + Returns the total number of roots for D_n" + """ + + n = self.n + return 2*n*(n-1) + + def cartan_matrix(self): + """ + Returns the Cartan matrix for D_n. + The Cartan matrix matrix for a Lie algebra is + generated by assigning an ordering to the simple + roots, (alpha[1], ...., alpha[l]). Then the ijth + entry of the Cartan matrix is (). + + Examples + ======== + + >>> from sympy.liealgebras.cartan_type import CartanType + >>> c = CartanType('D4') + >>> c.cartan_matrix() + Matrix([ + [ 2, -1, 0, 0], + [-1, 2, -1, -1], + [ 0, -1, 2, 0], + [ 0, -1, 0, 2]]) + + """ + + n = self.n + m = 2*eye(n) + i = 1 + while i < n-2: + m[i,i+1] = -1 + m[i,i-1] = -1 + i += 1 + m[n-2, n-3] = -1 + m[n-3, n-1] = -1 + m[n-1, n-3] = -1 + m[0, 1] = -1 + return m + + def basis(self): + """ + Returns the number of independent generators of D_n + """ + n = self.n + return n*(n-1)/2 + + def lie_algebra(self): + """ + Returns the Lie algebra associated with D_n" + """ + + n = self.n + return "so(" + str(2*n) + ")" + + def dynkin_diagram(self): + n = self.n + diag = " "*4*(n-3) + str(n-1) + "\n" + diag += " "*4*(n-3) + "0\n" + diag += " "*4*(n-3) +"|\n" + diag += " "*4*(n-3) + "|\n" + diag += "---".join("0" for i in range(1,n)) + "\n" + diag += " ".join(str(i) for i in range(1, n-1)) + " "+str(n) + return diag diff --git a/venv/lib/python3.10/site-packages/sympy/liealgebras/type_f.py b/venv/lib/python3.10/site-packages/sympy/liealgebras/type_f.py new file mode 100644 index 0000000000000000000000000000000000000000..778fcd6d14950c36b63e187b18aeb17c5beb99dd --- /dev/null +++ b/venv/lib/python3.10/site-packages/sympy/liealgebras/type_f.py @@ -0,0 +1,162 @@ +from .cartan_type import Standard_Cartan +from sympy.core.backend import Matrix, Rational + + +class TypeF(Standard_Cartan): + + def __new__(cls, n): + if n != 4: + raise ValueError("n should be 4") + return Standard_Cartan.__new__(cls, "F", 4) + + def dimension(self): + """Dimension of the vector space V underlying the Lie algebra + + Examples + ======== + + >>> from sympy.liealgebras.cartan_type import CartanType + >>> c = CartanType("F4") + >>> c.dimension() + 4 + """ + + return 4 + + + def basic_root(self, i, j): + """Generate roots with 1 in ith position and -1 in jth position + + """ + + n = self.n + root = [0]*n + root[i] = 1 + root[j] = -1 + return root + + def simple_root(self, i): + """The ith simple root of F_4 + + Every lie algebra has a unique root system. + Given a root system Q, there is a subset of the + roots such that an element of Q is called a + simple root if it cannot be written as the sum + of two elements in Q. If we let D denote the + set of simple roots, then it is clear that every + element of Q can be written as a linear combination + of elements of D with all coefficients non-negative. + + Examples + ======== + + >>> from sympy.liealgebras.cartan_type import CartanType + >>> c = CartanType("F4") + >>> c.simple_root(3) + [0, 0, 0, 1] + + """ + + if i < 3: + return self.basic_root(i-1, i) + if i == 3: + root = [0]*4 + root[3] = 1 + return root + if i == 4: + root = [Rational(-1, 2)]*4 + return root + + def positive_roots(self): + """Generate all the positive roots of A_n + + This is half of all of the roots of F_4; by multiplying all the + positive roots by -1 we get the negative roots. + + Examples + ======== + + >>> from sympy.liealgebras.cartan_type import CartanType + >>> c = CartanType("A3") + >>> c.positive_roots() + {1: [1, -1, 0, 0], 2: [1, 0, -1, 0], 3: [1, 0, 0, -1], 4: [0, 1, -1, 0], + 5: [0, 1, 0, -1], 6: [0, 0, 1, -1]} + + """ + + n = self.n + posroots = {} + k = 0 + for i in range(0, n-1): + for j in range(i+1, n): + k += 1 + posroots[k] = self.basic_root(i, j) + k += 1 + root = self.basic_root(i, j) + root[j] = 1 + posroots[k] = root + + for i in range(0, n): + k += 1 + root = [0]*n + root[i] = 1 + posroots[k] = root + + k += 1 + root = [Rational(1, 2)]*n + posroots[k] = root + for i in range(1, 4): + k += 1 + root = [Rational(1, 2)]*n + root[i] = Rational(-1, 2) + posroots[k] = root + + posroots[k+1] = [Rational(1, 2), Rational(1, 2), Rational(-1, 2), Rational(-1, 2)] + posroots[k+2] = [Rational(1, 2), Rational(-1, 2), Rational(1, 2), Rational(-1, 2)] + posroots[k+3] = [Rational(1, 2), Rational(-1, 2), Rational(-1, 2), Rational(1, 2)] + posroots[k+4] = [Rational(1, 2), Rational(-1, 2), Rational(-1, 2), Rational(-1, 2)] + + return posroots + + + def roots(self): + """ + Returns the total number of roots for F_4 + """ + return 48 + + def cartan_matrix(self): + """The Cartan matrix for F_4 + + The Cartan matrix matrix for a Lie algebra is + generated by assigning an ordering to the simple + roots, (alpha[1], ...., alpha[l]). Then the ijth + entry of the Cartan matrix is (). + + Examples + ======== + + >>> from sympy.liealgebras.cartan_type import CartanType + >>> c = CartanType('A4') + >>> c.cartan_matrix() + Matrix([ + [ 2, -1, 0, 0], + [-1, 2, -1, 0], + [ 0, -1, 2, -1], + [ 0, 0, -1, 2]]) + """ + + m = Matrix( 4, 4, [2, -1, 0, 0, -1, 2, -2, 0, 0, + -1, 2, -1, 0, 0, -1, 2]) + return m + + def basis(self): + """ + Returns the number of independent generators of F_4 + """ + return 52 + + def dynkin_diagram(self): + diag = "0---0=>=0---0\n" + diag += " ".join(str(i) for i in range(1, 5)) + return diag diff --git a/venv/lib/python3.10/site-packages/sympy/testing/__init__.py b/venv/lib/python3.10/site-packages/sympy/testing/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..92c50b991d76f0821556ddcd301eff99b1767d7a --- /dev/null +++ b/venv/lib/python3.10/site-packages/sympy/testing/__init__.py @@ -0,0 +1,7 @@ +"""This module contains code for running the tests in SymPy. +""" +from .runtests import test, doctest + +__all__ = [ + 'test', 'doctest', +] diff --git a/venv/lib/python3.10/site-packages/sympy/testing/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/sympy/testing/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ff2530d6291ecafca41e4e87840fd9fd6248582c Binary files /dev/null and b/venv/lib/python3.10/site-packages/sympy/testing/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/sympy/testing/__pycache__/matrices.cpython-310.pyc b/venv/lib/python3.10/site-packages/sympy/testing/__pycache__/matrices.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..16215beca10d9906da40eba9de3260412f2c9e60 Binary files /dev/null and b/venv/lib/python3.10/site-packages/sympy/testing/__pycache__/matrices.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/sympy/testing/__pycache__/pytest.cpython-310.pyc b/venv/lib/python3.10/site-packages/sympy/testing/__pycache__/pytest.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e6d15d855b63a39328129e3bf125e8340cbeff89 Binary files /dev/null and b/venv/lib/python3.10/site-packages/sympy/testing/__pycache__/pytest.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/sympy/testing/__pycache__/quality_unicode.cpython-310.pyc b/venv/lib/python3.10/site-packages/sympy/testing/__pycache__/quality_unicode.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7943588e07ef83e5cfe13023e7aa6d7cb52ce982 Binary files /dev/null and b/venv/lib/python3.10/site-packages/sympy/testing/__pycache__/quality_unicode.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/sympy/testing/__pycache__/randtest.cpython-310.pyc b/venv/lib/python3.10/site-packages/sympy/testing/__pycache__/randtest.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7a0eee6ae734c045533793f64176e4707ba4d6f2 Binary files /dev/null and b/venv/lib/python3.10/site-packages/sympy/testing/__pycache__/randtest.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/sympy/testing/__pycache__/runtests.cpython-310.pyc b/venv/lib/python3.10/site-packages/sympy/testing/__pycache__/runtests.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d9cfdad51c68c7318209015031b50b441e324d89 Binary files /dev/null and b/venv/lib/python3.10/site-packages/sympy/testing/__pycache__/runtests.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/sympy/testing/__pycache__/tmpfiles.cpython-310.pyc b/venv/lib/python3.10/site-packages/sympy/testing/__pycache__/tmpfiles.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d56457a49ce17f49c3ce7c3b60e3f05186264caa Binary files /dev/null and b/venv/lib/python3.10/site-packages/sympy/testing/__pycache__/tmpfiles.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/sympy/testing/matrices.py b/venv/lib/python3.10/site-packages/sympy/testing/matrices.py new file mode 100644 index 0000000000000000000000000000000000000000..236a384366df7f69d0d92f43f7e007e95c12388c --- /dev/null +++ b/venv/lib/python3.10/site-packages/sympy/testing/matrices.py @@ -0,0 +1,8 @@ +def allclose(A, B, rtol=1e-05, atol=1e-08): + if len(A) != len(B): + return False + + for x, y in zip(A, B): + if abs(x-y) > atol + rtol * max(abs(x), abs(y)): + return False + return True diff --git a/venv/lib/python3.10/site-packages/sympy/testing/pytest.py b/venv/lib/python3.10/site-packages/sympy/testing/pytest.py new file mode 100644 index 0000000000000000000000000000000000000000..ff92cfa0029c4b0f4f76114277cb409153e1474e --- /dev/null +++ b/venv/lib/python3.10/site-packages/sympy/testing/pytest.py @@ -0,0 +1,388 @@ +"""py.test hacks to support XFAIL/XPASS""" + +import sys +import re +import functools +import os +import contextlib +import warnings +import inspect +import pathlib +from typing import Any, Callable + +from sympy.utilities.exceptions import SymPyDeprecationWarning +# Imported here for backwards compatibility. Note: do not import this from +# here in library code (importing sympy.pytest in library code will break the +# pytest integration). +from sympy.utilities.exceptions import ignore_warnings # noqa:F401 + +ON_CI = os.getenv('CI', None) == "true" + +try: + import pytest + USE_PYTEST = getattr(sys, '_running_pytest', False) +except ImportError: + USE_PYTEST = False + + +raises: Callable[[Any, Any], Any] +XFAIL: Callable[[Any], Any] +skip: Callable[[Any], Any] +SKIP: Callable[[Any], Any] +slow: Callable[[Any], Any] +nocache_fail: Callable[[Any], Any] + + +if USE_PYTEST: + raises = pytest.raises + skip = pytest.skip + XFAIL = pytest.mark.xfail + SKIP = pytest.mark.skip + slow = pytest.mark.slow + nocache_fail = pytest.mark.nocache_fail + from _pytest.outcomes import Failed + +else: + # Not using pytest so define the things that would have been imported from + # there. + + # _pytest._code.code.ExceptionInfo + class ExceptionInfo: + def __init__(self, value): + self.value = value + + def __repr__(self): + return "".format(self.value) + + + def raises(expectedException, code=None): + """ + Tests that ``code`` raises the exception ``expectedException``. + + ``code`` may be a callable, such as a lambda expression or function + name. + + If ``code`` is not given or None, ``raises`` will return a context + manager for use in ``with`` statements; the code to execute then + comes from the scope of the ``with``. + + ``raises()`` does nothing if the callable raises the expected exception, + otherwise it raises an AssertionError. + + Examples + ======== + + >>> from sympy.testing.pytest import raises + + >>> raises(ZeroDivisionError, lambda: 1/0) + + >>> raises(ZeroDivisionError, lambda: 1/2) + Traceback (most recent call last): + ... + Failed: DID NOT RAISE + + >>> with raises(ZeroDivisionError): + ... n = 1/0 + >>> with raises(ZeroDivisionError): + ... n = 1/2 + Traceback (most recent call last): + ... + Failed: DID NOT RAISE + + Note that you cannot test multiple statements via + ``with raises``: + + >>> with raises(ZeroDivisionError): + ... n = 1/0 # will execute and raise, aborting the ``with`` + ... n = 9999/0 # never executed + + This is just what ``with`` is supposed to do: abort the + contained statement sequence at the first exception and let + the context manager deal with the exception. + + To test multiple statements, you'll need a separate ``with`` + for each: + + >>> with raises(ZeroDivisionError): + ... n = 1/0 # will execute and raise + >>> with raises(ZeroDivisionError): + ... n = 9999/0 # will also execute and raise + + """ + if code is None: + return RaisesContext(expectedException) + elif callable(code): + try: + code() + except expectedException as e: + return ExceptionInfo(e) + raise Failed("DID NOT RAISE") + elif isinstance(code, str): + raise TypeError( + '\'raises(xxx, "code")\' has been phased out; ' + 'change \'raises(xxx, "expression")\' ' + 'to \'raises(xxx, lambda: expression)\', ' + '\'raises(xxx, "statement")\' ' + 'to \'with raises(xxx): statement\'') + else: + raise TypeError( + 'raises() expects a callable for the 2nd argument.') + + class RaisesContext: + def __init__(self, expectedException): + self.expectedException = expectedException + + def __enter__(self): + return None + + def __exit__(self, exc_type, exc_value, traceback): + if exc_type is None: + raise Failed("DID NOT RAISE") + return issubclass(exc_type, self.expectedException) + + class XFail(Exception): + pass + + class XPass(Exception): + pass + + class Skipped(Exception): + pass + + class Failed(Exception): # type: ignore + pass + + def XFAIL(func): + def wrapper(): + try: + func() + except Exception as e: + message = str(e) + if message != "Timeout": + raise XFail(func.__name__) + else: + raise Skipped("Timeout") + raise XPass(func.__name__) + + wrapper = functools.update_wrapper(wrapper, func) + return wrapper + + def skip(str): + raise Skipped(str) + + def SKIP(reason): + """Similar to ``skip()``, but this is a decorator. """ + def wrapper(func): + def func_wrapper(): + raise Skipped(reason) + + func_wrapper = functools.update_wrapper(func_wrapper, func) + return func_wrapper + + return wrapper + + def slow(func): + func._slow = True + + def func_wrapper(): + func() + + func_wrapper = functools.update_wrapper(func_wrapper, func) + func_wrapper.__wrapped__ = func + return func_wrapper + + def nocache_fail(func): + "Dummy decorator for marking tests that fail when cache is disabled" + return func + +@contextlib.contextmanager +def warns(warningcls, *, match='', test_stacklevel=True): + ''' + Like raises but tests that warnings are emitted. + + >>> from sympy.testing.pytest import warns + >>> import warnings + + >>> with warns(UserWarning): + ... warnings.warn('deprecated', UserWarning, stacklevel=2) + + >>> with warns(UserWarning): + ... pass + Traceback (most recent call last): + ... + Failed: DID NOT WARN. No warnings of type UserWarning\ + was emitted. The list of emitted warnings is: []. + + ``test_stacklevel`` makes it check that the ``stacklevel`` parameter to + ``warn()`` is set so that the warning shows the user line of code (the + code under the warns() context manager). Set this to False if this is + ambiguous or if the context manager does not test the direct user code + that emits the warning. + + If the warning is a ``SymPyDeprecationWarning``, this additionally tests + that the ``active_deprecations_target`` is a real target in the + ``active-deprecations.md`` file. + + ''' + # Absorbs all warnings in warnrec + with warnings.catch_warnings(record=True) as warnrec: + # Any warning other than the one we are looking for is an error + warnings.simplefilter("error") + warnings.filterwarnings("always", category=warningcls) + # Now run the test + yield warnrec + + # Raise if expected warning not found + if not any(issubclass(w.category, warningcls) for w in warnrec): + msg = ('Failed: DID NOT WARN.' + ' No warnings of type %s was emitted.' + ' The list of emitted warnings is: %s.' + ) % (warningcls, [w.message for w in warnrec]) + raise Failed(msg) + + # We don't include the match in the filter above because it would then + # fall to the error filter, so we instead manually check that it matches + # here + for w in warnrec: + # Should always be true due to the filters above + assert issubclass(w.category, warningcls) + if not re.compile(match, re.I).match(str(w.message)): + raise Failed(f"Failed: WRONG MESSAGE. A warning with of the correct category ({warningcls.__name__}) was issued, but it did not match the given match regex ({match!r})") + + if test_stacklevel: + for f in inspect.stack(): + thisfile = f.filename + file = os.path.split(thisfile)[1] + if file.startswith('test_'): + break + elif file == 'doctest.py': + # skip the stacklevel testing in the doctests of this + # function + return + else: + raise RuntimeError("Could not find the file for the given warning to test the stacklevel") + for w in warnrec: + if w.filename != thisfile: + msg = f'''\ +Failed: Warning has the wrong stacklevel. The warning stacklevel needs to be +set so that the line of code shown in the warning message is user code that +calls the deprecated code (the current stacklevel is showing code from +{w.filename} (line {w.lineno}), expected {thisfile})'''.replace('\n', ' ') + raise Failed(msg) + + if warningcls == SymPyDeprecationWarning: + this_file = pathlib.Path(__file__) + active_deprecations_file = (this_file.parent.parent.parent / 'doc' / + 'src' / 'explanation' / + 'active-deprecations.md') + if not active_deprecations_file.exists(): + # We can only test that the active_deprecations_target works if we are + # in the git repo. + return + targets = [] + for w in warnrec: + targets.append(w.message.active_deprecations_target) + with open(active_deprecations_file, encoding="utf-8") as f: + text = f.read() + for target in targets: + if f'({target})=' not in text: + raise Failed(f"The active deprecations target {target!r} does not appear to be a valid target in the active-deprecations.md file ({active_deprecations_file}).") + +def _both_exp_pow(func): + """ + Decorator used to run the test twice: the first time `e^x` is represented + as ``Pow(E, x)``, the second time as ``exp(x)`` (exponential object is not + a power). + + This is a temporary trick helping to manage the elimination of the class + ``exp`` in favor of a replacement by ``Pow(E, ...)``. + """ + from sympy.core.parameters import _exp_is_pow + + def func_wrap(): + with _exp_is_pow(True): + func() + with _exp_is_pow(False): + func() + + wrapper = functools.update_wrapper(func_wrap, func) + return wrapper + + +@contextlib.contextmanager +def warns_deprecated_sympy(): + ''' + Shorthand for ``warns(SymPyDeprecationWarning)`` + + This is the recommended way to test that ``SymPyDeprecationWarning`` is + emitted for deprecated features in SymPy. To test for other warnings use + ``warns``. To suppress warnings without asserting that they are emitted + use ``ignore_warnings``. + + .. note:: + + ``warns_deprecated_sympy()`` is only intended for internal use in the + SymPy test suite to test that a deprecation warning triggers properly. + All other code in the SymPy codebase, including documentation examples, + should not use deprecated behavior. + + If you are a user of SymPy and you want to disable + SymPyDeprecationWarnings, use ``warnings`` filters (see + :ref:`silencing-sympy-deprecation-warnings`). + + >>> from sympy.testing.pytest import warns_deprecated_sympy + >>> from sympy.utilities.exceptions import sympy_deprecation_warning + >>> with warns_deprecated_sympy(): + ... sympy_deprecation_warning("Don't use", + ... deprecated_since_version="1.0", + ... active_deprecations_target="active-deprecations") + + >>> with warns_deprecated_sympy(): + ... pass + Traceback (most recent call last): + ... + Failed: DID NOT WARN. No warnings of type \ + SymPyDeprecationWarning was emitted. The list of emitted warnings is: []. + + .. note:: + + Sometimes the stacklevel test will fail because the same warning is + emitted multiple times. In this case, you can use + :func:`sympy.utilities.exceptions.ignore_warnings` in the code to + prevent the ``SymPyDeprecationWarning`` from being emitted again + recursively. In rare cases it is impossible to have a consistent + ``stacklevel`` for deprecation warnings because different ways of + calling a function will produce different call stacks.. In those cases, + use ``warns(SymPyDeprecationWarning)`` instead. + + See Also + ======== + sympy.utilities.exceptions.SymPyDeprecationWarning + sympy.utilities.exceptions.sympy_deprecation_warning + sympy.utilities.decorator.deprecated + + ''' + with warns(SymPyDeprecationWarning): + yield + + +def _running_under_pyodide(): + """Test if running under pyodide.""" + try: + import pyodide_js # type: ignore # noqa + except ImportError: + return False + else: + return True + + +def skip_under_pyodide(message): + """Decorator to skip a test if running under pyodide.""" + def decorator(test_func): + @functools.wraps(test_func) + def test_wrapper(): + if _running_under_pyodide(): + skip(message) + return test_func() + return test_wrapper + return decorator diff --git a/venv/lib/python3.10/site-packages/sympy/testing/quality_unicode.py b/venv/lib/python3.10/site-packages/sympy/testing/quality_unicode.py new file mode 100644 index 0000000000000000000000000000000000000000..de575b75e8d81f1995171ff4ed514628d98e39c7 --- /dev/null +++ b/venv/lib/python3.10/site-packages/sympy/testing/quality_unicode.py @@ -0,0 +1,97 @@ +import re +import fnmatch + + +message_unicode_B = \ + "File contains a unicode character : %s, line %s. " \ + "But not in the whitelist. " \ + "Add the file to the whitelist in " + __file__ +message_unicode_D = \ + "File does not contain a unicode character : %s." \ + "but is in the whitelist. " \ + "Remove the file from the whitelist in " + __file__ + + +encoding_header_re = re.compile( + r'^[ \t\f]*#.*?coding[:=][ \t]*([-_.a-zA-Z0-9]+)') + +# Whitelist pattern for files which can have unicode. +unicode_whitelist = [ + # Author names can include non-ASCII characters + r'*/bin/authors_update.py', + r'*/bin/mailmap_check.py', + + # These files have functions and test functions for unicode input and + # output. + r'*/sympy/testing/tests/test_code_quality.py', + r'*/sympy/physics/vector/tests/test_printing.py', + r'*/physics/quantum/tests/test_printing.py', + r'*/sympy/vector/tests/test_printing.py', + r'*/sympy/parsing/tests/test_sympy_parser.py', + r'*/sympy/printing/pretty/tests/test_pretty.py', + r'*/sympy/printing/tests/test_conventions.py', + r'*/sympy/printing/tests/test_preview.py', + r'*/liealgebras/type_g.py', + r'*/liealgebras/weyl_group.py', + r'*/liealgebras/tests/test_type_G.py', + + # wigner.py and polarization.py have unicode doctests. These probably + # don't need to be there but some of the examples that are there are + # pretty ugly without use_unicode (matrices need to be wrapped across + # multiple lines etc) + r'*/sympy/physics/wigner.py', + r'*/sympy/physics/optics/polarization.py', + + # joint.py uses some unicode for variable names in the docstrings + r'*/sympy/physics/mechanics/joint.py', + + # lll method has unicode in docstring references and author name + r'*/sympy/polys/matrices/domainmatrix.py', +] + +unicode_strict_whitelist = [ + r'*/sympy/parsing/latex/_antlr/__init__.py', + # test_mathematica.py uses some unicode for testing Greek characters are working #24055 + r'*/sympy/parsing/tests/test_mathematica.py', +] + + +def _test_this_file_encoding( + fname, test_file, + unicode_whitelist=unicode_whitelist, + unicode_strict_whitelist=unicode_strict_whitelist): + """Test helper function for unicode test + + The test may have to operate on filewise manner, so it had moved + to a separate process. + """ + has_unicode = False + + is_in_whitelist = False + is_in_strict_whitelist = False + for patt in unicode_whitelist: + if fnmatch.fnmatch(fname, patt): + is_in_whitelist = True + break + for patt in unicode_strict_whitelist: + if fnmatch.fnmatch(fname, patt): + is_in_strict_whitelist = True + is_in_whitelist = True + break + + if is_in_whitelist: + for idx, line in enumerate(test_file): + try: + line.encode(encoding='ascii') + except (UnicodeEncodeError, UnicodeDecodeError): + has_unicode = True + + if not has_unicode and not is_in_strict_whitelist: + assert False, message_unicode_D % fname + + else: + for idx, line in enumerate(test_file): + try: + line.encode(encoding='ascii') + except (UnicodeEncodeError, UnicodeDecodeError): + assert False, message_unicode_B % (fname, idx + 1) diff --git a/venv/lib/python3.10/site-packages/sympy/testing/randtest.py b/venv/lib/python3.10/site-packages/sympy/testing/randtest.py new file mode 100644 index 0000000000000000000000000000000000000000..3ce2c8c031eec1c886532daba32c96d83e9cf85c --- /dev/null +++ b/venv/lib/python3.10/site-packages/sympy/testing/randtest.py @@ -0,0 +1,19 @@ +""" +.. deprecated:: 1.10 + + ``sympy.testing.randtest`` functions have been moved to + :mod:`sympy.core.random`. + +""" +from sympy.utilities.exceptions import sympy_deprecation_warning + +sympy_deprecation_warning("The sympy.testing.randtest submodule is deprecated. Use sympy.core.random instead.", + deprecated_since_version="1.10", + active_deprecations_target="deprecated-sympy-testing-randtest") + +from sympy.core.random import ( # noqa:F401 + random_complex_number, + verify_numerically, + test_derivative_numerically, + _randrange, + _randint) diff --git a/venv/lib/python3.10/site-packages/sympy/testing/runtests.py b/venv/lib/python3.10/site-packages/sympy/testing/runtests.py new file mode 100644 index 0000000000000000000000000000000000000000..19a16e2436048c5fc044008fefe850e03764bd30 --- /dev/null +++ b/venv/lib/python3.10/site-packages/sympy/testing/runtests.py @@ -0,0 +1,2387 @@ +""" +This is our testing framework. + +Goals: + +* it should be compatible with py.test and operate very similarly + (or identically) +* does not require any external dependencies +* preferably all the functionality should be in this file only +* no magic, just import the test file and execute the test functions, that's it +* portable + +""" + +import os +import sys +import platform +import inspect +import traceback +import pdb +import re +import linecache +import time +from fnmatch import fnmatch +from timeit import default_timer as clock +import doctest as pdoctest # avoid clashing with our doctest() function +from doctest import DocTestFinder, DocTestRunner +import random +import subprocess +import shutil +import signal +import stat +import tempfile +import warnings +from contextlib import contextmanager +from inspect import unwrap + +from sympy.core.cache import clear_cache +from sympy.external import import_module +from sympy.external.gmpy import GROUND_TYPES, HAS_GMPY + +IS_WINDOWS = (os.name == 'nt') +ON_CI = os.getenv('CI', None) + +# empirically generated list of the proportion of time spent running +# an even split of tests. This should periodically be regenerated. +# A list of [.6, .1, .3] would mean that if the tests are evenly split +# into '1/3', '2/3', '3/3', the first split would take 60% of the time, +# the second 10% and the third 30%. These lists are normalized to sum +# to 1, so [60, 10, 30] has the same behavior as [6, 1, 3] or [.6, .1, .3]. +# +# This list can be generated with the code: +# from time import time +# import sympy +# import os +# os.environ["CI"] = 'true' # Mock CI to get more correct densities +# delays, num_splits = [], 30 +# for i in range(1, num_splits + 1): +# tic = time() +# sympy.test(split='{}/{}'.format(i, num_splits), time_balance=False) # Add slow=True for slow tests +# delays.append(time() - tic) +# tot = sum(delays) +# print([round(x / tot, 4) for x in delays]) +SPLIT_DENSITY = [ + 0.0059, 0.0027, 0.0068, 0.0011, 0.0006, + 0.0058, 0.0047, 0.0046, 0.004, 0.0257, + 0.0017, 0.0026, 0.004, 0.0032, 0.0016, + 0.0015, 0.0004, 0.0011, 0.0016, 0.0014, + 0.0077, 0.0137, 0.0217, 0.0074, 0.0043, + 0.0067, 0.0236, 0.0004, 0.1189, 0.0142, + 0.0234, 0.0003, 0.0003, 0.0047, 0.0006, + 0.0013, 0.0004, 0.0008, 0.0007, 0.0006, + 0.0139, 0.0013, 0.0007, 0.0051, 0.002, + 0.0004, 0.0005, 0.0213, 0.0048, 0.0016, + 0.0012, 0.0014, 0.0024, 0.0015, 0.0004, + 0.0005, 0.0007, 0.011, 0.0062, 0.0015, + 0.0021, 0.0049, 0.0006, 0.0006, 0.0011, + 0.0006, 0.0019, 0.003, 0.0044, 0.0054, + 0.0057, 0.0049, 0.0016, 0.0006, 0.0009, + 0.0006, 0.0012, 0.0006, 0.0149, 0.0532, + 0.0076, 0.0041, 0.0024, 0.0135, 0.0081, + 0.2209, 0.0459, 0.0438, 0.0488, 0.0137, + 0.002, 0.0003, 0.0008, 0.0039, 0.0024, + 0.0005, 0.0004, 0.003, 0.056, 0.0026] +SPLIT_DENSITY_SLOW = [0.0086, 0.0004, 0.0568, 0.0003, 0.0032, 0.0005, 0.0004, 0.0013, 0.0016, 0.0648, 0.0198, 0.1285, 0.098, 0.0005, 0.0064, 0.0003, 0.0004, 0.0026, 0.0007, 0.0051, 0.0089, 0.0024, 0.0033, 0.0057, 0.0005, 0.0003, 0.001, 0.0045, 0.0091, 0.0006, 0.0005, 0.0321, 0.0059, 0.1105, 0.216, 0.1489, 0.0004, 0.0003, 0.0006, 0.0483] + +class Skipped(Exception): + pass + +class TimeOutError(Exception): + pass + +class DependencyError(Exception): + pass + + +def _indent(s, indent=4): + """ + Add the given number of space characters to the beginning of + every non-blank line in ``s``, and return the result. + If the string ``s`` is Unicode, it is encoded using the stdout + encoding and the ``backslashreplace`` error handler. + """ + # This regexp matches the start of non-blank lines: + return re.sub('(?m)^(?!$)', indent*' ', s) + + +pdoctest._indent = _indent # type: ignore + +# override reporter to maintain windows and python3 + + +def _report_failure(self, out, test, example, got): + """ + Report that the given example failed. + """ + s = self._checker.output_difference(example, got, self.optionflags) + s = s.encode('raw_unicode_escape').decode('utf8', 'ignore') + out(self._failure_header(test, example) + s) + + +if IS_WINDOWS: + DocTestRunner.report_failure = _report_failure # type: ignore + + +def convert_to_native_paths(lst): + """ + Converts a list of '/' separated paths into a list of + native (os.sep separated) paths and converts to lowercase + if the system is case insensitive. + """ + newlst = [] + for i, rv in enumerate(lst): + rv = os.path.join(*rv.split("/")) + # on windows the slash after the colon is dropped + if sys.platform == "win32": + pos = rv.find(':') + if pos != -1: + if rv[pos + 1] != '\\': + rv = rv[:pos + 1] + '\\' + rv[pos + 1:] + newlst.append(os.path.normcase(rv)) + return newlst + + +def get_sympy_dir(): + """ + Returns the root SymPy directory and set the global value + indicating whether the system is case sensitive or not. + """ + this_file = os.path.abspath(__file__) + sympy_dir = os.path.join(os.path.dirname(this_file), "..", "..") + sympy_dir = os.path.normpath(sympy_dir) + return os.path.normcase(sympy_dir) + + +def setup_pprint(): + from sympy.interactive.printing import init_printing + from sympy.printing.pretty.pretty import pprint_use_unicode + import sympy.interactive.printing as interactive_printing + + # force pprint to be in ascii mode in doctests + use_unicode_prev = pprint_use_unicode(False) + + # hook our nice, hash-stable strprinter + init_printing(pretty_print=False) + + # Prevent init_printing() in doctests from affecting other doctests + interactive_printing.NO_GLOBAL = True + return use_unicode_prev + + +@contextmanager +def raise_on_deprecated(): + """Context manager to make DeprecationWarning raise an error + + This is to catch SymPyDeprecationWarning from library code while running + tests and doctests. It is important to use this context manager around + each individual test/doctest in case some tests modify the warning + filters. + """ + with warnings.catch_warnings(): + warnings.filterwarnings('error', '.*', DeprecationWarning, module='sympy.*') + yield + + +def run_in_subprocess_with_hash_randomization( + function, function_args=(), + function_kwargs=None, command=sys.executable, + module='sympy.testing.runtests', force=False): + """ + Run a function in a Python subprocess with hash randomization enabled. + + If hash randomization is not supported by the version of Python given, it + returns False. Otherwise, it returns the exit value of the command. The + function is passed to sys.exit(), so the return value of the function will + be the return value. + + The environment variable PYTHONHASHSEED is used to seed Python's hash + randomization. If it is set, this function will return False, because + starting a new subprocess is unnecessary in that case. If it is not set, + one is set at random, and the tests are run. Note that if this + environment variable is set when Python starts, hash randomization is + automatically enabled. To force a subprocess to be created even if + PYTHONHASHSEED is set, pass ``force=True``. This flag will not force a + subprocess in Python versions that do not support hash randomization (see + below), because those versions of Python do not support the ``-R`` flag. + + ``function`` should be a string name of a function that is importable from + the module ``module``, like "_test". The default for ``module`` is + "sympy.testing.runtests". ``function_args`` and ``function_kwargs`` + should be a repr-able tuple and dict, respectively. The default Python + command is sys.executable, which is the currently running Python command. + + This function is necessary because the seed for hash randomization must be + set by the environment variable before Python starts. Hence, in order to + use a predetermined seed for tests, we must start Python in a separate + subprocess. + + Hash randomization was added in the minor Python versions 2.6.8, 2.7.3, + 3.1.5, and 3.2.3, and is enabled by default in all Python versions after + and including 3.3.0. + + Examples + ======== + + >>> from sympy.testing.runtests import ( + ... run_in_subprocess_with_hash_randomization) + >>> # run the core tests in verbose mode + >>> run_in_subprocess_with_hash_randomization("_test", + ... function_args=("core",), + ... function_kwargs={'verbose': True}) # doctest: +SKIP + # Will return 0 if sys.executable supports hash randomization and tests + # pass, 1 if they fail, and False if it does not support hash + # randomization. + + """ + cwd = get_sympy_dir() + # Note, we must return False everywhere, not None, as subprocess.call will + # sometimes return None. + + # First check if the Python version supports hash randomization + # If it does not have this support, it won't recognize the -R flag + p = subprocess.Popen([command, "-RV"], stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, cwd=cwd) + p.communicate() + if p.returncode != 0: + return False + + hash_seed = os.getenv("PYTHONHASHSEED") + if not hash_seed: + os.environ["PYTHONHASHSEED"] = str(random.randrange(2**32)) + else: + if not force: + return False + + function_kwargs = function_kwargs or {} + + # Now run the command + commandstring = ("import sys; from %s import %s;sys.exit(%s(*%s, **%s))" % + (module, function, function, repr(function_args), + repr(function_kwargs))) + + try: + p = subprocess.Popen([command, "-R", "-c", commandstring], cwd=cwd) + p.communicate() + except KeyboardInterrupt: + p.wait() + finally: + # Put the environment variable back, so that it reads correctly for + # the current Python process. + if hash_seed is None: + del os.environ["PYTHONHASHSEED"] + else: + os.environ["PYTHONHASHSEED"] = hash_seed + return p.returncode + + +def run_all_tests(test_args=(), test_kwargs=None, + doctest_args=(), doctest_kwargs=None, + examples_args=(), examples_kwargs=None): + """ + Run all tests. + + Right now, this runs the regular tests (bin/test), the doctests + (bin/doctest), and the examples (examples/all.py). + + This is what ``setup.py test`` uses. + + You can pass arguments and keyword arguments to the test functions that + support them (for now, test, doctest, and the examples). See the + docstrings of those functions for a description of the available options. + + For example, to run the solvers tests with colors turned off: + + >>> from sympy.testing.runtests import run_all_tests + >>> run_all_tests(test_args=("solvers",), + ... test_kwargs={"colors:False"}) # doctest: +SKIP + + """ + tests_successful = True + + test_kwargs = test_kwargs or {} + doctest_kwargs = doctest_kwargs or {} + examples_kwargs = examples_kwargs or {'quiet': True} + + try: + # Regular tests + if not test(*test_args, **test_kwargs): + # some regular test fails, so set the tests_successful + # flag to false and continue running the doctests + tests_successful = False + + # Doctests + print() + if not doctest(*doctest_args, **doctest_kwargs): + tests_successful = False + + # Examples + print() + sys.path.append("examples") # examples/all.py + from all import run_examples # type: ignore + if not run_examples(*examples_args, **examples_kwargs): + tests_successful = False + + if tests_successful: + return + else: + # Return nonzero exit code + sys.exit(1) + except KeyboardInterrupt: + print() + print("DO *NOT* COMMIT!") + sys.exit(1) + + +def test(*paths, subprocess=True, rerun=0, **kwargs): + """ + Run tests in the specified test_*.py files. + + Tests in a particular test_*.py file are run if any of the given strings + in ``paths`` matches a part of the test file's path. If ``paths=[]``, + tests in all test_*.py files are run. + + Notes: + + - If sort=False, tests are run in random order (not default). + - Paths can be entered in native system format or in unix, + forward-slash format. + - Files that are on the blacklist can be tested by providing + their path; they are only excluded if no paths are given. + + **Explanation of test results** + + ====== =============================================================== + Output Meaning + ====== =============================================================== + . passed + F failed + X XPassed (expected to fail but passed) + f XFAILed (expected to fail and indeed failed) + s skipped + w slow + T timeout (e.g., when ``--timeout`` is used) + K KeyboardInterrupt (when running the slow tests with ``--slow``, + you can interrupt one of them without killing the test runner) + ====== =============================================================== + + + Colors have no additional meaning and are used just to facilitate + interpreting the output. + + Examples + ======== + + >>> import sympy + + Run all tests: + + >>> sympy.test() # doctest: +SKIP + + Run one file: + + >>> sympy.test("sympy/core/tests/test_basic.py") # doctest: +SKIP + >>> sympy.test("_basic") # doctest: +SKIP + + Run all tests in sympy/functions/ and some particular file: + + >>> sympy.test("sympy/core/tests/test_basic.py", + ... "sympy/functions") # doctest: +SKIP + + Run all tests in sympy/core and sympy/utilities: + + >>> sympy.test("/core", "/util") # doctest: +SKIP + + Run specific test from a file: + + >>> sympy.test("sympy/core/tests/test_basic.py", + ... kw="test_equality") # doctest: +SKIP + + Run specific test from any file: + + >>> sympy.test(kw="subs") # doctest: +SKIP + + Run the tests with verbose mode on: + + >>> sympy.test(verbose=True) # doctest: +SKIP + + Do not sort the test output: + + >>> sympy.test(sort=False) # doctest: +SKIP + + Turn on post-mortem pdb: + + >>> sympy.test(pdb=True) # doctest: +SKIP + + Turn off colors: + + >>> sympy.test(colors=False) # doctest: +SKIP + + Force colors, even when the output is not to a terminal (this is useful, + e.g., if you are piping to ``less -r`` and you still want colors) + + >>> sympy.test(force_colors=False) # doctest: +SKIP + + The traceback verboseness can be set to "short" or "no" (default is + "short") + + >>> sympy.test(tb='no') # doctest: +SKIP + + The ``split`` option can be passed to split the test run into parts. The + split currently only splits the test files, though this may change in the + future. ``split`` should be a string of the form 'a/b', which will run + part ``a`` of ``b``. For instance, to run the first half of the test suite: + + >>> sympy.test(split='1/2') # doctest: +SKIP + + The ``time_balance`` option can be passed in conjunction with ``split``. + If ``time_balance=True`` (the default for ``sympy.test``), SymPy will attempt + to split the tests such that each split takes equal time. This heuristic + for balancing is based on pre-recorded test data. + + >>> sympy.test(split='1/2', time_balance=True) # doctest: +SKIP + + You can disable running the tests in a separate subprocess using + ``subprocess=False``. This is done to support seeding hash randomization, + which is enabled by default in the Python versions where it is supported. + If subprocess=False, hash randomization is enabled/disabled according to + whether it has been enabled or not in the calling Python process. + However, even if it is enabled, the seed cannot be printed unless it is + called from a new Python process. + + Hash randomization was added in the minor Python versions 2.6.8, 2.7.3, + 3.1.5, and 3.2.3, and is enabled by default in all Python versions after + and including 3.3.0. + + If hash randomization is not supported ``subprocess=False`` is used + automatically. + + >>> sympy.test(subprocess=False) # doctest: +SKIP + + To set the hash randomization seed, set the environment variable + ``PYTHONHASHSEED`` before running the tests. This can be done from within + Python using + + >>> import os + >>> os.environ['PYTHONHASHSEED'] = '42' # doctest: +SKIP + + Or from the command line using + + $ PYTHONHASHSEED=42 ./bin/test + + If the seed is not set, a random seed will be chosen. + + Note that to reproduce the same hash values, you must use both the same seed + as well as the same architecture (32-bit vs. 64-bit). + + """ + # count up from 0, do not print 0 + print_counter = lambda i : (print("rerun %d" % (rerun-i)) + if rerun-i else None) + + if subprocess: + # loop backwards so last i is 0 + for i in range(rerun, -1, -1): + print_counter(i) + ret = run_in_subprocess_with_hash_randomization("_test", + function_args=paths, function_kwargs=kwargs) + if ret is False: + break + val = not bool(ret) + # exit on the first failure or if done + if not val or i == 0: + return val + + # rerun even if hash randomization is not supported + for i in range(rerun, -1, -1): + print_counter(i) + val = not bool(_test(*paths, **kwargs)) + if not val or i == 0: + return val + + +def _test(*paths, + verbose=False, tb="short", kw=None, pdb=False, colors=True, + force_colors=False, sort=True, seed=None, timeout=False, + fail_on_timeout=False, slow=False, enhance_asserts=False, split=None, + time_balance=True, blacklist=(), + fast_threshold=None, slow_threshold=None): + """ + Internal function that actually runs the tests. + + All keyword arguments from ``test()`` are passed to this function except for + ``subprocess``. + + Returns 0 if tests passed and 1 if they failed. See the docstring of + ``test()`` for more information. + """ + kw = kw or () + # ensure that kw is a tuple + if isinstance(kw, str): + kw = (kw,) + post_mortem = pdb + if seed is None: + seed = random.randrange(100000000) + if ON_CI and timeout is False: + timeout = 595 + fail_on_timeout = True + if ON_CI: + blacklist = list(blacklist) + ['sympy/plotting/pygletplot/tests'] + blacklist = convert_to_native_paths(blacklist) + r = PyTestReporter(verbose=verbose, tb=tb, colors=colors, + force_colors=force_colors, split=split) + t = SymPyTests(r, kw, post_mortem, seed, + fast_threshold=fast_threshold, + slow_threshold=slow_threshold) + + test_files = t.get_test_files('sympy') + + not_blacklisted = [f for f in test_files + if not any(b in f for b in blacklist)] + + if len(paths) == 0: + matched = not_blacklisted + else: + paths = convert_to_native_paths(paths) + matched = [] + for f in not_blacklisted: + basename = os.path.basename(f) + for p in paths: + if p in f or fnmatch(basename, p): + matched.append(f) + break + + density = None + if time_balance: + if slow: + density = SPLIT_DENSITY_SLOW + else: + density = SPLIT_DENSITY + + if split: + matched = split_list(matched, split, density=density) + + t._testfiles.extend(matched) + + return int(not t.test(sort=sort, timeout=timeout, slow=slow, + enhance_asserts=enhance_asserts, fail_on_timeout=fail_on_timeout)) + + +def doctest(*paths, subprocess=True, rerun=0, **kwargs): + r""" + Runs doctests in all \*.py files in the SymPy directory which match + any of the given strings in ``paths`` or all tests if paths=[]. + + Notes: + + - Paths can be entered in native system format or in unix, + forward-slash format. + - Files that are on the blacklist can be tested by providing + their path; they are only excluded if no paths are given. + + Examples + ======== + + >>> import sympy + + Run all tests: + + >>> sympy.doctest() # doctest: +SKIP + + Run one file: + + >>> sympy.doctest("sympy/core/basic.py") # doctest: +SKIP + >>> sympy.doctest("polynomial.rst") # doctest: +SKIP + + Run all tests in sympy/functions/ and some particular file: + + >>> sympy.doctest("/functions", "basic.py") # doctest: +SKIP + + Run any file having polynomial in its name, doc/src/modules/polynomial.rst, + sympy/functions/special/polynomials.py, and sympy/polys/polynomial.py: + + >>> sympy.doctest("polynomial") # doctest: +SKIP + + The ``split`` option can be passed to split the test run into parts. The + split currently only splits the test files, though this may change in the + future. ``split`` should be a string of the form 'a/b', which will run + part ``a`` of ``b``. Note that the regular doctests and the Sphinx + doctests are split independently. For instance, to run the first half of + the test suite: + + >>> sympy.doctest(split='1/2') # doctest: +SKIP + + The ``subprocess`` and ``verbose`` options are the same as with the function + ``test()`` (see the docstring of that function for more information) except + that ``verbose`` may also be set equal to ``2`` in order to print + individual doctest lines, as they are being tested. + """ + # count up from 0, do not print 0 + print_counter = lambda i : (print("rerun %d" % (rerun-i)) + if rerun-i else None) + + if subprocess: + # loop backwards so last i is 0 + for i in range(rerun, -1, -1): + print_counter(i) + ret = run_in_subprocess_with_hash_randomization("_doctest", + function_args=paths, function_kwargs=kwargs) + if ret is False: + break + val = not bool(ret) + # exit on the first failure or if done + if not val or i == 0: + return val + + # rerun even if hash randomization is not supported + for i in range(rerun, -1, -1): + print_counter(i) + val = not bool(_doctest(*paths, **kwargs)) + if not val or i == 0: + return val + + +def _get_doctest_blacklist(): + '''Get the default blacklist for the doctests''' + blacklist = [] + + blacklist.extend([ + "doc/src/modules/plotting.rst", # generates live plots + "doc/src/modules/physics/mechanics/autolev_parser.rst", + "sympy/codegen/array_utils.py", # raises deprecation warning + "sympy/core/compatibility.py", # backwards compatibility shim, importing it triggers a deprecation warning + "sympy/core/trace.py", # backwards compatibility shim, importing it triggers a deprecation warning + "sympy/galgebra.py", # no longer part of SymPy + "sympy/parsing/autolev/_antlr/autolevlexer.py", # generated code + "sympy/parsing/autolev/_antlr/autolevlistener.py", # generated code + "sympy/parsing/autolev/_antlr/autolevparser.py", # generated code + "sympy/parsing/latex/_antlr/latexlexer.py", # generated code + "sympy/parsing/latex/_antlr/latexparser.py", # generated code + "sympy/plotting/pygletplot/__init__.py", # crashes on some systems + "sympy/plotting/pygletplot/plot.py", # crashes on some systems + "sympy/printing/ccode.py", # backwards compatibility shim, importing it breaks the codegen doctests + "sympy/printing/cxxcode.py", # backwards compatibility shim, importing it breaks the codegen doctests + "sympy/printing/fcode.py", # backwards compatibility shim, importing it breaks the codegen doctests + "sympy/testing/randtest.py", # backwards compatibility shim, importing it triggers a deprecation warning + "sympy/this.py", # prints text + ]) + # autolev parser tests + num = 12 + for i in range (1, num+1): + blacklist.append("sympy/parsing/autolev/test-examples/ruletest" + str(i) + ".py") + blacklist.extend(["sympy/parsing/autolev/test-examples/pydy-example-repo/mass_spring_damper.py", + "sympy/parsing/autolev/test-examples/pydy-example-repo/chaos_pendulum.py", + "sympy/parsing/autolev/test-examples/pydy-example-repo/double_pendulum.py", + "sympy/parsing/autolev/test-examples/pydy-example-repo/non_min_pendulum.py"]) + + if import_module('numpy') is None: + blacklist.extend([ + "sympy/plotting/experimental_lambdify.py", + "sympy/plotting/plot_implicit.py", + "examples/advanced/autowrap_integrators.py", + "examples/advanced/autowrap_ufuncify.py", + "examples/intermediate/sample.py", + "examples/intermediate/mplot2d.py", + "examples/intermediate/mplot3d.py", + "doc/src/modules/numeric-computation.rst" + ]) + else: + if import_module('matplotlib') is None: + blacklist.extend([ + "examples/intermediate/mplot2d.py", + "examples/intermediate/mplot3d.py" + ]) + else: + # Use a non-windowed backend, so that the tests work on CI + import matplotlib + matplotlib.use('Agg') + + if ON_CI or import_module('pyglet') is None: + blacklist.extend(["sympy/plotting/pygletplot"]) + + if import_module('aesara') is None: + blacklist.extend([ + "sympy/printing/aesaracode.py", + "doc/src/modules/numeric-computation.rst", + ]) + + if import_module('cupy') is None: + blacklist.extend([ + "doc/src/modules/numeric-computation.rst", + ]) + + if import_module('jax') is None: + blacklist.extend([ + "doc/src/modules/numeric-computation.rst", + ]) + + if import_module('antlr4') is None: + blacklist.extend([ + "sympy/parsing/autolev/__init__.py", + "sympy/parsing/latex/_parse_latex_antlr.py", + ]) + + if import_module('lfortran') is None: + #throws ImportError when lfortran not installed + blacklist.extend([ + "sympy/parsing/sym_expr.py", + ]) + + if import_module("scipy") is None: + # throws ModuleNotFoundError when scipy not installed + blacklist.extend([ + "doc/src/guides/solving/solve-numerically.md", + "doc/src/guides/solving/solve-ode.md", + ]) + + if import_module("numpy") is None: + # throws ModuleNotFoundError when numpy not installed + blacklist.extend([ + "doc/src/guides/solving/solve-ode.md", + "doc/src/guides/solving/solve-numerically.md", + ]) + + # disabled because of doctest failures in asmeurer's bot + blacklist.extend([ + "sympy/utilities/autowrap.py", + "examples/advanced/autowrap_integrators.py", + "examples/advanced/autowrap_ufuncify.py" + ]) + + blacklist.extend([ + "sympy/conftest.py", # Depends on pytest + ]) + + # These are deprecated stubs to be removed: + blacklist.extend([ + "sympy/utilities/tmpfiles.py", + "sympy/utilities/pytest.py", + "sympy/utilities/runtests.py", + "sympy/utilities/quality_unicode.py", + "sympy/utilities/randtest.py", + ]) + + blacklist = convert_to_native_paths(blacklist) + return blacklist + + +def _doctest(*paths, **kwargs): + """ + Internal function that actually runs the doctests. + + All keyword arguments from ``doctest()`` are passed to this function + except for ``subprocess``. + + Returns 0 if tests passed and 1 if they failed. See the docstrings of + ``doctest()`` and ``test()`` for more information. + """ + from sympy.printing.pretty.pretty import pprint_use_unicode + + normal = kwargs.get("normal", False) + verbose = kwargs.get("verbose", False) + colors = kwargs.get("colors", True) + force_colors = kwargs.get("force_colors", False) + blacklist = kwargs.get("blacklist", []) + split = kwargs.get('split', None) + + blacklist.extend(_get_doctest_blacklist()) + + # Use a non-windowed backend, so that the tests work on CI + if import_module('matplotlib') is not None: + import matplotlib + matplotlib.use('Agg') + + # Disable warnings for external modules + import sympy.external + sympy.external.importtools.WARN_OLD_VERSION = False + sympy.external.importtools.WARN_NOT_INSTALLED = False + + # Disable showing up of plots + from sympy.plotting.plot import unset_show + unset_show() + + r = PyTestReporter(verbose, split=split, colors=colors,\ + force_colors=force_colors) + t = SymPyDocTests(r, normal) + + test_files = t.get_test_files('sympy') + test_files.extend(t.get_test_files('examples', init_only=False)) + + not_blacklisted = [f for f in test_files + if not any(b in f for b in blacklist)] + if len(paths) == 0: + matched = not_blacklisted + else: + # take only what was requested...but not blacklisted items + # and allow for partial match anywhere or fnmatch of name + paths = convert_to_native_paths(paths) + matched = [] + for f in not_blacklisted: + basename = os.path.basename(f) + for p in paths: + if p in f or fnmatch(basename, p): + matched.append(f) + break + + matched.sort() + + if split: + matched = split_list(matched, split) + + t._testfiles.extend(matched) + + # run the tests and record the result for this *py portion of the tests + if t._testfiles: + failed = not t.test() + else: + failed = False + + # N.B. + # -------------------------------------------------------------------- + # Here we test *.rst and *.md files at or below doc/src. Code from these + # must be self supporting in terms of imports since there is no importing + # of necessary modules by doctest.testfile. If you try to pass *.py files + # through this they might fail because they will lack the needed imports + # and smarter parsing that can be done with source code. + # + test_files_rst = t.get_test_files('doc/src', '*.rst', init_only=False) + test_files_md = t.get_test_files('doc/src', '*.md', init_only=False) + test_files = test_files_rst + test_files_md + test_files.sort() + + not_blacklisted = [f for f in test_files + if not any(b in f for b in blacklist)] + + if len(paths) == 0: + matched = not_blacklisted + else: + # Take only what was requested as long as it's not on the blacklist. + # Paths were already made native in *py tests so don't repeat here. + # There's no chance of having a *py file slip through since we + # only have *rst files in test_files. + matched = [] + for f in not_blacklisted: + basename = os.path.basename(f) + for p in paths: + if p in f or fnmatch(basename, p): + matched.append(f) + break + + if split: + matched = split_list(matched, split) + + first_report = True + for rst_file in matched: + if not os.path.isfile(rst_file): + continue + old_displayhook = sys.displayhook + try: + use_unicode_prev = setup_pprint() + out = sympytestfile( + rst_file, module_relative=False, encoding='utf-8', + optionflags=pdoctest.ELLIPSIS | pdoctest.NORMALIZE_WHITESPACE | + pdoctest.IGNORE_EXCEPTION_DETAIL) + finally: + # make sure we return to the original displayhook in case some + # doctest has changed that + sys.displayhook = old_displayhook + # The NO_GLOBAL flag overrides the no_global flag to init_printing + # if True + import sympy.interactive.printing as interactive_printing + interactive_printing.NO_GLOBAL = False + pprint_use_unicode(use_unicode_prev) + + rstfailed, tested = out + if tested: + failed = rstfailed or failed + if first_report: + first_report = False + msg = 'rst/md doctests start' + if not t._testfiles: + r.start(msg=msg) + else: + r.write_center(msg) + print() + # use as the id, everything past the first 'sympy' + file_id = rst_file[rst_file.find('sympy') + len('sympy') + 1:] + print(file_id, end=" ") + # get at least the name out so it is know who is being tested + wid = r.terminal_width - len(file_id) - 1 # update width + test_file = '[%s]' % (tested) + report = '[%s]' % (rstfailed or 'OK') + print(''.join( + [test_file, ' '*(wid - len(test_file) - len(report)), report]) + ) + + # the doctests for *py will have printed this message already if there was + # a failure, so now only print it if there was intervening reporting by + # testing the *rst as evidenced by first_report no longer being True. + if not first_report and failed: + print() + print("DO *NOT* COMMIT!") + + return int(failed) + +sp = re.compile(r'([0-9]+)/([1-9][0-9]*)') + +def split_list(l, split, density=None): + """ + Splits a list into part a of b + + split should be a string of the form 'a/b'. For instance, '1/3' would give + the split one of three. + + If the length of the list is not divisible by the number of splits, the + last split will have more items. + + `density` may be specified as a list. If specified, + tests will be balanced so that each split has as equal-as-possible + amount of mass according to `density`. + + >>> from sympy.testing.runtests import split_list + >>> a = list(range(10)) + >>> split_list(a, '1/3') + [0, 1, 2] + >>> split_list(a, '2/3') + [3, 4, 5] + >>> split_list(a, '3/3') + [6, 7, 8, 9] + """ + m = sp.match(split) + if not m: + raise ValueError("split must be a string of the form a/b where a and b are ints") + i, t = map(int, m.groups()) + + if not density: + return l[(i - 1)*len(l)//t : i*len(l)//t] + + # normalize density + tot = sum(density) + density = [x / tot for x in density] + + def density_inv(x): + """Interpolate the inverse to the cumulative + distribution function given by density""" + if x <= 0: + return 0 + if x >= sum(density): + return 1 + + # find the first time the cumulative sum surpasses x + # and linearly interpolate + cumm = 0 + for i, d in enumerate(density): + cumm += d + if cumm >= x: + break + frac = (d - (cumm - x)) / d + return (i + frac) / len(density) + + lower_frac = density_inv((i - 1) / t) + higher_frac = density_inv(i / t) + return l[int(lower_frac*len(l)) : int(higher_frac*len(l))] + +from collections import namedtuple +SymPyTestResults = namedtuple('SymPyTestResults', 'failed attempted') + +def sympytestfile(filename, module_relative=True, name=None, package=None, + globs=None, verbose=None, report=True, optionflags=0, + extraglobs=None, raise_on_error=False, + parser=pdoctest.DocTestParser(), encoding=None): + + """ + Test examples in the given file. Return (#failures, #tests). + + Optional keyword arg ``module_relative`` specifies how filenames + should be interpreted: + + - If ``module_relative`` is True (the default), then ``filename`` + specifies a module-relative path. By default, this path is + relative to the calling module's directory; but if the + ``package`` argument is specified, then it is relative to that + package. To ensure os-independence, ``filename`` should use + "/" characters to separate path segments, and should not + be an absolute path (i.e., it may not begin with "/"). + + - If ``module_relative`` is False, then ``filename`` specifies an + os-specific path. The path may be absolute or relative (to + the current working directory). + + Optional keyword arg ``name`` gives the name of the test; by default + use the file's basename. + + Optional keyword argument ``package`` is a Python package or the + name of a Python package whose directory should be used as the + base directory for a module relative filename. If no package is + specified, then the calling module's directory is used as the base + directory for module relative filenames. It is an error to + specify ``package`` if ``module_relative`` is False. + + Optional keyword arg ``globs`` gives a dict to be used as the globals + when executing examples; by default, use {}. A copy of this dict + is actually used for each docstring, so that each docstring's + examples start with a clean slate. + + Optional keyword arg ``extraglobs`` gives a dictionary that should be + merged into the globals that are used to execute examples. By + default, no extra globals are used. + + Optional keyword arg ``verbose`` prints lots of stuff if true, prints + only failures if false; by default, it's true iff "-v" is in sys.argv. + + Optional keyword arg ``report`` prints a summary at the end when true, + else prints nothing at the end. In verbose mode, the summary is + detailed, else very brief (in fact, empty if all tests passed). + + Optional keyword arg ``optionflags`` or's together module constants, + and defaults to 0. Possible values (see the docs for details): + + - DONT_ACCEPT_TRUE_FOR_1 + - DONT_ACCEPT_BLANKLINE + - NORMALIZE_WHITESPACE + - ELLIPSIS + - SKIP + - IGNORE_EXCEPTION_DETAIL + - REPORT_UDIFF + - REPORT_CDIFF + - REPORT_NDIFF + - REPORT_ONLY_FIRST_FAILURE + + Optional keyword arg ``raise_on_error`` raises an exception on the + first unexpected exception or failure. This allows failures to be + post-mortem debugged. + + Optional keyword arg ``parser`` specifies a DocTestParser (or + subclass) that should be used to extract tests from the files. + + Optional keyword arg ``encoding`` specifies an encoding that should + be used to convert the file to unicode. + + Advanced tomfoolery: testmod runs methods of a local instance of + class doctest.Tester, then merges the results into (or creates) + global Tester instance doctest.master. Methods of doctest.master + can be called directly too, if you want to do something unusual. + Passing report=0 to testmod is especially useful then, to delay + displaying a summary. Invoke doctest.master.summarize(verbose) + when you're done fiddling. + """ + if package and not module_relative: + raise ValueError("Package may only be specified for module-" + "relative paths.") + + # Relativize the path + text, filename = pdoctest._load_testfile( + filename, package, module_relative, encoding) + + # If no name was given, then use the file's name. + if name is None: + name = os.path.basename(filename) + + # Assemble the globals. + if globs is None: + globs = {} + else: + globs = globs.copy() + if extraglobs is not None: + globs.update(extraglobs) + if '__name__' not in globs: + globs['__name__'] = '__main__' + + if raise_on_error: + runner = pdoctest.DebugRunner(verbose=verbose, optionflags=optionflags) + else: + runner = SymPyDocTestRunner(verbose=verbose, optionflags=optionflags) + runner._checker = SymPyOutputChecker() + + # Read the file, convert it to a test, and run it. + test = parser.get_doctest(text, globs, name, filename, 0) + runner.run(test) + + if report: + runner.summarize() + + if pdoctest.master is None: + pdoctest.master = runner + else: + pdoctest.master.merge(runner) + + return SymPyTestResults(runner.failures, runner.tries) + + +class SymPyTests: + + def __init__(self, reporter, kw="", post_mortem=False, + seed=None, fast_threshold=None, slow_threshold=None): + self._post_mortem = post_mortem + self._kw = kw + self._count = 0 + self._root_dir = get_sympy_dir() + self._reporter = reporter + self._reporter.root_dir(self._root_dir) + self._testfiles = [] + self._seed = seed if seed is not None else random.random() + + # Defaults in seconds, from human / UX design limits + # http://www.nngroup.com/articles/response-times-3-important-limits/ + # + # These defaults are *NOT* set in stone as we are measuring different + # things, so others feel free to come up with a better yardstick :) + if fast_threshold: + self._fast_threshold = float(fast_threshold) + else: + self._fast_threshold = 8 + if slow_threshold: + self._slow_threshold = float(slow_threshold) + else: + self._slow_threshold = 10 + + def test(self, sort=False, timeout=False, slow=False, + enhance_asserts=False, fail_on_timeout=False): + """ + Runs the tests returning True if all tests pass, otherwise False. + + If sort=False run tests in random order. + """ + if sort: + self._testfiles.sort() + elif slow: + pass + else: + random.seed(self._seed) + random.shuffle(self._testfiles) + self._reporter.start(self._seed) + for f in self._testfiles: + try: + self.test_file(f, sort, timeout, slow, + enhance_asserts, fail_on_timeout) + except KeyboardInterrupt: + print(" interrupted by user") + self._reporter.finish() + raise + return self._reporter.finish() + + def _enhance_asserts(self, source): + from ast import (NodeTransformer, Compare, Name, Store, Load, Tuple, + Assign, BinOp, Str, Mod, Assert, parse, fix_missing_locations) + + ops = {"Eq": '==', "NotEq": '!=', "Lt": '<', "LtE": '<=', + "Gt": '>', "GtE": '>=', "Is": 'is', "IsNot": 'is not', + "In": 'in', "NotIn": 'not in'} + + class Transform(NodeTransformer): + def visit_Assert(self, stmt): + if isinstance(stmt.test, Compare): + compare = stmt.test + values = [compare.left] + compare.comparators + names = [ "_%s" % i for i, _ in enumerate(values) ] + names_store = [ Name(n, Store()) for n in names ] + names_load = [ Name(n, Load()) for n in names ] + target = Tuple(names_store, Store()) + value = Tuple(values, Load()) + assign = Assign([target], value) + new_compare = Compare(names_load[0], compare.ops, names_load[1:]) + msg_format = "\n%s " + "\n%s ".join([ ops[op.__class__.__name__] for op in compare.ops ]) + "\n%s" + msg = BinOp(Str(msg_format), Mod(), Tuple(names_load, Load())) + test = Assert(new_compare, msg, lineno=stmt.lineno, col_offset=stmt.col_offset) + return [assign, test] + else: + return stmt + + tree = parse(source) + new_tree = Transform().visit(tree) + return fix_missing_locations(new_tree) + + def test_file(self, filename, sort=True, timeout=False, slow=False, + enhance_asserts=False, fail_on_timeout=False): + reporter = self._reporter + funcs = [] + try: + gl = {'__file__': filename} + try: + open_file = lambda: open(filename, encoding="utf8") + + with open_file() as f: + source = f.read() + if self._kw: + for l in source.splitlines(): + if l.lstrip().startswith('def '): + if any(l.lower().find(k.lower()) != -1 for k in self._kw): + break + else: + return + + if enhance_asserts: + try: + source = self._enhance_asserts(source) + except ImportError: + pass + + code = compile(source, filename, "exec", flags=0, dont_inherit=True) + exec(code, gl) + except (SystemExit, KeyboardInterrupt): + raise + except ImportError: + reporter.import_error(filename, sys.exc_info()) + return + except Exception: + reporter.test_exception(sys.exc_info()) + + clear_cache() + self._count += 1 + random.seed(self._seed) + disabled = gl.get("disabled", False) + if not disabled: + # we need to filter only those functions that begin with 'test_' + # We have to be careful about decorated functions. As long as + # the decorator uses functools.wraps, we can detect it. + funcs = [] + for f in gl: + if (f.startswith("test_") and (inspect.isfunction(gl[f]) + or inspect.ismethod(gl[f]))): + func = gl[f] + # Handle multiple decorators + while hasattr(func, '__wrapped__'): + func = func.__wrapped__ + + if inspect.getsourcefile(func) == filename: + funcs.append(gl[f]) + if slow: + funcs = [f for f in funcs if getattr(f, '_slow', False)] + # Sorting of XFAILed functions isn't fixed yet :-( + funcs.sort(key=lambda x: inspect.getsourcelines(x)[1]) + i = 0 + while i < len(funcs): + if inspect.isgeneratorfunction(funcs[i]): + # some tests can be generators, that return the actual + # test functions. We unpack it below: + f = funcs.pop(i) + for fg in f(): + func = fg[0] + args = fg[1:] + fgw = lambda: func(*args) + funcs.insert(i, fgw) + i += 1 + else: + i += 1 + # drop functions that are not selected with the keyword expression: + funcs = [x for x in funcs if self.matches(x)] + + if not funcs: + return + except Exception: + reporter.entering_filename(filename, len(funcs)) + raise + + reporter.entering_filename(filename, len(funcs)) + if not sort: + random.shuffle(funcs) + + for f in funcs: + start = time.time() + reporter.entering_test(f) + try: + if getattr(f, '_slow', False) and not slow: + raise Skipped("Slow") + with raise_on_deprecated(): + if timeout: + self._timeout(f, timeout, fail_on_timeout) + else: + random.seed(self._seed) + f() + except KeyboardInterrupt: + if getattr(f, '_slow', False): + reporter.test_skip("KeyboardInterrupt") + else: + raise + except Exception: + if timeout: + signal.alarm(0) # Disable the alarm. It could not be handled before. + t, v, tr = sys.exc_info() + if t is AssertionError: + reporter.test_fail((t, v, tr)) + if self._post_mortem: + pdb.post_mortem(tr) + elif t.__name__ == "Skipped": + reporter.test_skip(v) + elif t.__name__ == "XFail": + reporter.test_xfail() + elif t.__name__ == "XPass": + reporter.test_xpass(v) + else: + reporter.test_exception((t, v, tr)) + if self._post_mortem: + pdb.post_mortem(tr) + else: + reporter.test_pass() + taken = time.time() - start + if taken > self._slow_threshold: + filename = os.path.relpath(filename, reporter._root_dir) + reporter.slow_test_functions.append( + (filename + "::" + f.__name__, taken)) + if getattr(f, '_slow', False) and slow: + if taken < self._fast_threshold: + filename = os.path.relpath(filename, reporter._root_dir) + reporter.fast_test_functions.append( + (filename + "::" + f.__name__, taken)) + reporter.leaving_filename() + + def _timeout(self, function, timeout, fail_on_timeout): + def callback(x, y): + signal.alarm(0) + if fail_on_timeout: + raise TimeOutError("Timed out after %d seconds" % timeout) + else: + raise Skipped("Timeout") + signal.signal(signal.SIGALRM, callback) + signal.alarm(timeout) # Set an alarm with a given timeout + function() + signal.alarm(0) # Disable the alarm + + def matches(self, x): + """ + Does the keyword expression self._kw match "x"? Returns True/False. + + Always returns True if self._kw is "". + """ + if not self._kw: + return True + for kw in self._kw: + if x.__name__.lower().find(kw.lower()) != -1: + return True + return False + + def get_test_files(self, dir, pat='test_*.py'): + """ + Returns the list of test_*.py (default) files at or below directory + ``dir`` relative to the SymPy home directory. + """ + dir = os.path.join(self._root_dir, convert_to_native_paths([dir])[0]) + + g = [] + for path, folders, files in os.walk(dir): + g.extend([os.path.join(path, f) for f in files if fnmatch(f, pat)]) + + return sorted([os.path.normcase(gi) for gi in g]) + + +class SymPyDocTests: + + def __init__(self, reporter, normal): + self._count = 0 + self._root_dir = get_sympy_dir() + self._reporter = reporter + self._reporter.root_dir(self._root_dir) + self._normal = normal + + self._testfiles = [] + + def test(self): + """ + Runs the tests and returns True if all tests pass, otherwise False. + """ + self._reporter.start() + for f in self._testfiles: + try: + self.test_file(f) + except KeyboardInterrupt: + print(" interrupted by user") + self._reporter.finish() + raise + return self._reporter.finish() + + def test_file(self, filename): + clear_cache() + + from io import StringIO + import sympy.interactive.printing as interactive_printing + from sympy.printing.pretty.pretty import pprint_use_unicode + + rel_name = filename[len(self._root_dir) + 1:] + dirname, file = os.path.split(filename) + module = rel_name.replace(os.sep, '.')[:-3] + + if rel_name.startswith("examples"): + # Examples files do not have __init__.py files, + # So we have to temporarily extend sys.path to import them + sys.path.insert(0, dirname) + module = file[:-3] # remove ".py" + try: + module = pdoctest._normalize_module(module) + tests = SymPyDocTestFinder().find(module) + except (SystemExit, KeyboardInterrupt): + raise + except ImportError: + self._reporter.import_error(filename, sys.exc_info()) + return + finally: + if rel_name.startswith("examples"): + del sys.path[0] + + tests = [test for test in tests if len(test.examples) > 0] + # By default tests are sorted by alphabetical order by function name. + # We sort by line number so one can edit the file sequentially from + # bottom to top. However, if there are decorated functions, their line + # numbers will be too large and for now one must just search for these + # by text and function name. + tests.sort(key=lambda x: -x.lineno) + + if not tests: + return + self._reporter.entering_filename(filename, len(tests)) + for test in tests: + assert len(test.examples) != 0 + + if self._reporter._verbose: + self._reporter.write("\n{} ".format(test.name)) + + # check if there are external dependencies which need to be met + if '_doctest_depends_on' in test.globs: + try: + self._check_dependencies(**test.globs['_doctest_depends_on']) + except DependencyError as e: + self._reporter.test_skip(v=str(e)) + continue + + runner = SymPyDocTestRunner(verbose=self._reporter._verbose==2, + optionflags=pdoctest.ELLIPSIS | + pdoctest.NORMALIZE_WHITESPACE | + pdoctest.IGNORE_EXCEPTION_DETAIL) + runner._checker = SymPyOutputChecker() + old = sys.stdout + new = old if self._reporter._verbose==2 else StringIO() + sys.stdout = new + # If the testing is normal, the doctests get importing magic to + # provide the global namespace. If not normal (the default) then + # then must run on their own; all imports must be explicit within + # a function's docstring. Once imported that import will be + # available to the rest of the tests in a given function's + # docstring (unless clear_globs=True below). + if not self._normal: + test.globs = {} + # if this is uncommented then all the test would get is what + # comes by default with a "from sympy import *" + #exec('from sympy import *') in test.globs + old_displayhook = sys.displayhook + use_unicode_prev = setup_pprint() + + try: + f, t = runner.run(test, + out=new.write, clear_globs=False) + except KeyboardInterrupt: + raise + finally: + sys.stdout = old + if f > 0: + self._reporter.doctest_fail(test.name, new.getvalue()) + else: + self._reporter.test_pass() + sys.displayhook = old_displayhook + interactive_printing.NO_GLOBAL = False + pprint_use_unicode(use_unicode_prev) + + self._reporter.leaving_filename() + + def get_test_files(self, dir, pat='*.py', init_only=True): + r""" + Returns the list of \*.py files (default) from which docstrings + will be tested which are at or below directory ``dir``. By default, + only those that have an __init__.py in their parent directory + and do not start with ``test_`` will be included. + """ + def importable(x): + """ + Checks if given pathname x is an importable module by checking for + __init__.py file. + + Returns True/False. + + Currently we only test if the __init__.py file exists in the + directory with the file "x" (in theory we should also test all the + parent dirs). + """ + init_py = os.path.join(os.path.dirname(x), "__init__.py") + return os.path.exists(init_py) + + dir = os.path.join(self._root_dir, convert_to_native_paths([dir])[0]) + + g = [] + for path, folders, files in os.walk(dir): + g.extend([os.path.join(path, f) for f in files + if not f.startswith('test_') and fnmatch(f, pat)]) + if init_only: + # skip files that are not importable (i.e. missing __init__.py) + g = [x for x in g if importable(x)] + + return [os.path.normcase(gi) for gi in g] + + def _check_dependencies(self, + executables=(), + modules=(), + disable_viewers=(), + python_version=(3, 5)): + """ + Checks if the dependencies for the test are installed. + + Raises ``DependencyError`` it at least one dependency is not installed. + """ + + for executable in executables: + if not shutil.which(executable): + raise DependencyError("Could not find %s" % executable) + + for module in modules: + if module == 'matplotlib': + matplotlib = import_module( + 'matplotlib', + import_kwargs={'fromlist': + ['pyplot', 'cm', 'collections']}, + min_module_version='1.0.0', catch=(RuntimeError,)) + if matplotlib is None: + raise DependencyError("Could not import matplotlib") + else: + if not import_module(module): + raise DependencyError("Could not import %s" % module) + + if disable_viewers: + tempdir = tempfile.mkdtemp() + os.environ['PATH'] = '%s:%s' % (tempdir, os.environ['PATH']) + + vw = ('#!/usr/bin/env python3\n' + 'import sys\n' + 'if len(sys.argv) <= 1:\n' + ' exit("wrong number of args")\n') + + for viewer in disable_viewers: + with open(os.path.join(tempdir, viewer), 'w') as fh: + fh.write(vw) + + # make the file executable + os.chmod(os.path.join(tempdir, viewer), + stat.S_IREAD | stat.S_IWRITE | stat.S_IXUSR) + + if python_version: + if sys.version_info < python_version: + raise DependencyError("Requires Python >= " + '.'.join(map(str, python_version))) + + if 'pyglet' in modules: + # monkey-patch pyglet s.t. it does not open a window during + # doctesting + import pyglet + class DummyWindow: + def __init__(self, *args, **kwargs): + self.has_exit = True + self.width = 600 + self.height = 400 + + def set_vsync(self, x): + pass + + def switch_to(self): + pass + + def push_handlers(self, x): + pass + + def close(self): + pass + + pyglet.window.Window = DummyWindow + + +class SymPyDocTestFinder(DocTestFinder): + """ + A class used to extract the DocTests that are relevant to a given + object, from its docstring and the docstrings of its contained + objects. Doctests can currently be extracted from the following + object types: modules, functions, classes, methods, staticmethods, + classmethods, and properties. + + Modified from doctest's version to look harder for code that + appears comes from a different module. For example, the @vectorize + decorator makes it look like functions come from multidimensional.py + even though their code exists elsewhere. + """ + + def _find(self, tests, obj, name, module, source_lines, globs, seen): + """ + Find tests for the given object and any contained objects, and + add them to ``tests``. + """ + if self._verbose: + print('Finding tests in %s' % name) + + # If we've already processed this object, then ignore it. + if id(obj) in seen: + return + seen[id(obj)] = 1 + + # Make sure we don't run doctests for classes outside of sympy, such + # as in numpy or scipy. + if inspect.isclass(obj): + if obj.__module__.split('.')[0] != 'sympy': + return + + # Find a test for this object, and add it to the list of tests. + test = self._get_test(obj, name, module, globs, source_lines) + if test is not None: + tests.append(test) + + if not self._recurse: + return + + # Look for tests in a module's contained objects. + if inspect.ismodule(obj): + for rawname, val in obj.__dict__.items(): + # Recurse to functions & classes. + if inspect.isfunction(val) or inspect.isclass(val): + # Make sure we don't run doctests functions or classes + # from different modules + if val.__module__ != module.__name__: + continue + + assert self._from_module(module, val), \ + "%s is not in module %s (rawname %s)" % (val, module, rawname) + + try: + valname = '%s.%s' % (name, rawname) + self._find(tests, val, valname, module, + source_lines, globs, seen) + except KeyboardInterrupt: + raise + + # Look for tests in a module's __test__ dictionary. + for valname, val in getattr(obj, '__test__', {}).items(): + if not isinstance(valname, str): + raise ValueError("SymPyDocTestFinder.find: __test__ keys " + "must be strings: %r" % + (type(valname),)) + if not (inspect.isfunction(val) or inspect.isclass(val) or + inspect.ismethod(val) or inspect.ismodule(val) or + isinstance(val, str)): + raise ValueError("SymPyDocTestFinder.find: __test__ values " + "must be strings, functions, methods, " + "classes, or modules: %r" % + (type(val),)) + valname = '%s.__test__.%s' % (name, valname) + self._find(tests, val, valname, module, source_lines, + globs, seen) + + + # Look for tests in a class's contained objects. + if inspect.isclass(obj): + for valname, val in obj.__dict__.items(): + # Special handling for staticmethod/classmethod. + if isinstance(val, staticmethod): + val = getattr(obj, valname) + if isinstance(val, classmethod): + val = getattr(obj, valname).__func__ + + + # Recurse to methods, properties, and nested classes. + if ((inspect.isfunction(unwrap(val)) or + inspect.isclass(val) or + isinstance(val, property)) and + self._from_module(module, val)): + # Make sure we don't run doctests functions or classes + # from different modules + if isinstance(val, property): + if hasattr(val.fget, '__module__'): + if val.fget.__module__ != module.__name__: + continue + else: + if val.__module__ != module.__name__: + continue + + assert self._from_module(module, val), \ + "%s is not in module %s (valname %s)" % ( + val, module, valname) + + valname = '%s.%s' % (name, valname) + self._find(tests, val, valname, module, source_lines, + globs, seen) + + def _get_test(self, obj, name, module, globs, source_lines): + """ + Return a DocTest for the given object, if it defines a docstring; + otherwise, return None. + """ + + lineno = None + + # Extract the object's docstring. If it does not have one, + # then return None (no test for this object). + if isinstance(obj, str): + # obj is a string in the case for objects in the polys package. + # Note that source_lines is a binary string (compiled polys + # modules), which can't be handled by _find_lineno so determine + # the line number here. + + docstring = obj + + matches = re.findall(r"line \d+", name) + assert len(matches) == 1, \ + "string '%s' does not contain lineno " % name + + # NOTE: this is not the exact linenumber but its better than no + # lineno ;) + lineno = int(matches[0][5:]) + + else: + try: + if obj.__doc__ is None: + docstring = '' + else: + docstring = obj.__doc__ + if not isinstance(docstring, str): + docstring = str(docstring) + except (TypeError, AttributeError): + docstring = '' + + # Don't bother if the docstring is empty. + if self._exclude_empty and not docstring: + return None + + # check that properties have a docstring because _find_lineno + # assumes it + if isinstance(obj, property): + if obj.fget.__doc__ is None: + return None + + # Find the docstring's location in the file. + if lineno is None: + obj = unwrap(obj) + # handling of properties is not implemented in _find_lineno so do + # it here + if hasattr(obj, 'func_closure') and obj.func_closure is not None: + tobj = obj.func_closure[0].cell_contents + elif isinstance(obj, property): + tobj = obj.fget + else: + tobj = obj + lineno = self._find_lineno(tobj, source_lines) + + if lineno is None: + return None + + # Return a DocTest for this object. + if module is None: + filename = None + else: + filename = getattr(module, '__file__', module.__name__) + if filename[-4:] in (".pyc", ".pyo"): + filename = filename[:-1] + + globs['_doctest_depends_on'] = getattr(obj, '_doctest_depends_on', {}) + + return self._parser.get_doctest(docstring, globs, name, + filename, lineno) + + +class SymPyDocTestRunner(DocTestRunner): + """ + A class used to run DocTest test cases, and accumulate statistics. + The ``run`` method is used to process a single DocTest case. It + returns a tuple ``(f, t)``, where ``t`` is the number of test cases + tried, and ``f`` is the number of test cases that failed. + + Modified from the doctest version to not reset the sys.displayhook (see + issue 5140). + + See the docstring of the original DocTestRunner for more information. + """ + + def run(self, test, compileflags=None, out=None, clear_globs=True): + """ + Run the examples in ``test``, and display the results using the + writer function ``out``. + + The examples are run in the namespace ``test.globs``. If + ``clear_globs`` is true (the default), then this namespace will + be cleared after the test runs, to help with garbage + collection. If you would like to examine the namespace after + the test completes, then use ``clear_globs=False``. + + ``compileflags`` gives the set of flags that should be used by + the Python compiler when running the examples. If not + specified, then it will default to the set of future-import + flags that apply to ``globs``. + + The output of each example is checked using + ``SymPyDocTestRunner.check_output``, and the results are + formatted by the ``SymPyDocTestRunner.report_*`` methods. + """ + self.test = test + + # Remove ``` from the end of example, which may appear in Markdown + # files + for example in test.examples: + example.want = example.want.replace('```\n', '') + example.exc_msg = example.exc_msg and example.exc_msg.replace('```\n', '') + + + if compileflags is None: + compileflags = pdoctest._extract_future_flags(test.globs) + + save_stdout = sys.stdout + if out is None: + out = save_stdout.write + sys.stdout = self._fakeout + + # Patch pdb.set_trace to restore sys.stdout during interactive + # debugging (so it's not still redirected to self._fakeout). + # Note that the interactive output will go to *our* + # save_stdout, even if that's not the real sys.stdout; this + # allows us to write test cases for the set_trace behavior. + save_set_trace = pdb.set_trace + self.debugger = pdoctest._OutputRedirectingPdb(save_stdout) + self.debugger.reset() + pdb.set_trace = self.debugger.set_trace + + # Patch linecache.getlines, so we can see the example's source + # when we're inside the debugger. + self.save_linecache_getlines = pdoctest.linecache.getlines + linecache.getlines = self.__patched_linecache_getlines + + # Fail for deprecation warnings + with raise_on_deprecated(): + try: + return self.__run(test, compileflags, out) + finally: + sys.stdout = save_stdout + pdb.set_trace = save_set_trace + linecache.getlines = self.save_linecache_getlines + if clear_globs: + test.globs.clear() + + +# We have to override the name mangled methods. +monkeypatched_methods = [ + 'patched_linecache_getlines', + 'run', + 'record_outcome' +] +for method in monkeypatched_methods: + oldname = '_DocTestRunner__' + method + newname = '_SymPyDocTestRunner__' + method + setattr(SymPyDocTestRunner, newname, getattr(DocTestRunner, oldname)) + + +class SymPyOutputChecker(pdoctest.OutputChecker): + """ + Compared to the OutputChecker from the stdlib our OutputChecker class + supports numerical comparison of floats occurring in the output of the + doctest examples + """ + + def __init__(self): + # NOTE OutputChecker is an old-style class with no __init__ method, + # so we can't call the base class version of __init__ here + + got_floats = r'(\d+\.\d*|\.\d+)' + + # floats in the 'want' string may contain ellipses + want_floats = got_floats + r'(\.{3})?' + + front_sep = r'\s|\+|\-|\*|,' + back_sep = front_sep + r'|j|e' + + fbeg = r'^%s(?=%s|$)' % (got_floats, back_sep) + fmidend = r'(?<=%s)%s(?=%s|$)' % (front_sep, got_floats, back_sep) + self.num_got_rgx = re.compile(r'(%s|%s)' %(fbeg, fmidend)) + + fbeg = r'^%s(?=%s|$)' % (want_floats, back_sep) + fmidend = r'(?<=%s)%s(?=%s|$)' % (front_sep, want_floats, back_sep) + self.num_want_rgx = re.compile(r'(%s|%s)' %(fbeg, fmidend)) + + def check_output(self, want, got, optionflags): + """ + Return True iff the actual output from an example (`got`) + matches the expected output (`want`). These strings are + always considered to match if they are identical; but + depending on what option flags the test runner is using, + several non-exact match types are also possible. See the + documentation for `TestRunner` for more information about + option flags. + """ + # Handle the common case first, for efficiency: + # if they're string-identical, always return true. + if got == want: + return True + + # TODO parse integers as well ? + # Parse floats and compare them. If some of the parsed floats contain + # ellipses, skip the comparison. + matches = self.num_got_rgx.finditer(got) + numbers_got = [match.group(1) for match in matches] # list of strs + matches = self.num_want_rgx.finditer(want) + numbers_want = [match.group(1) for match in matches] # list of strs + if len(numbers_got) != len(numbers_want): + return False + + if len(numbers_got) > 0: + nw_ = [] + for ng, nw in zip(numbers_got, numbers_want): + if '...' in nw: + nw_.append(ng) + continue + else: + nw_.append(nw) + + if abs(float(ng)-float(nw)) > 1e-5: + return False + + got = self.num_got_rgx.sub(r'%s', got) + got = got % tuple(nw_) + + # can be used as a special sequence to signify a + # blank line, unless the DONT_ACCEPT_BLANKLINE flag is used. + if not (optionflags & pdoctest.DONT_ACCEPT_BLANKLINE): + # Replace in want with a blank line. + want = re.sub(r'(?m)^%s\s*?$' % re.escape(pdoctest.BLANKLINE_MARKER), + '', want) + # If a line in got contains only spaces, then remove the + # spaces. + got = re.sub(r'(?m)^\s*?$', '', got) + if got == want: + return True + + # This flag causes doctest to ignore any differences in the + # contents of whitespace strings. Note that this can be used + # in conjunction with the ELLIPSIS flag. + if optionflags & pdoctest.NORMALIZE_WHITESPACE: + got = ' '.join(got.split()) + want = ' '.join(want.split()) + if got == want: + return True + + # The ELLIPSIS flag says to let the sequence "..." in `want` + # match any substring in `got`. + if optionflags & pdoctest.ELLIPSIS: + if pdoctest._ellipsis_match(want, got): + return True + + # We didn't find any match; return false. + return False + + +class Reporter: + """ + Parent class for all reporters. + """ + pass + + +class PyTestReporter(Reporter): + """ + Py.test like reporter. Should produce output identical to py.test. + """ + + def __init__(self, verbose=False, tb="short", colors=True, + force_colors=False, split=None): + self._verbose = verbose + self._tb_style = tb + self._colors = colors + self._force_colors = force_colors + self._xfailed = 0 + self._xpassed = [] + self._failed = [] + self._failed_doctest = [] + self._passed = 0 + self._skipped = 0 + self._exceptions = [] + self._terminal_width = None + self._default_width = 80 + self._split = split + self._active_file = '' + self._active_f = None + + # TODO: Should these be protected? + self.slow_test_functions = [] + self.fast_test_functions = [] + + # this tracks the x-position of the cursor (useful for positioning + # things on the screen), without the need for any readline library: + self._write_pos = 0 + self._line_wrap = False + + def root_dir(self, dir): + self._root_dir = dir + + @property + def terminal_width(self): + if self._terminal_width is not None: + return self._terminal_width + + def findout_terminal_width(): + if sys.platform == "win32": + # Windows support is based on: + # + # http://code.activestate.com/recipes/ + # 440694-determine-size-of-console-window-on-windows/ + + from ctypes import windll, create_string_buffer + + h = windll.kernel32.GetStdHandle(-12) + csbi = create_string_buffer(22) + res = windll.kernel32.GetConsoleScreenBufferInfo(h, csbi) + + if res: + import struct + (_, _, _, _, _, left, _, right, _, _, _) = \ + struct.unpack("hhhhHhhhhhh", csbi.raw) + return right - left + else: + return self._default_width + + if hasattr(sys.stdout, 'isatty') and not sys.stdout.isatty(): + return self._default_width # leave PIPEs alone + + try: + process = subprocess.Popen(['stty', '-a'], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + stdout, stderr = process.communicate() + stdout = stdout.decode("utf-8") + except OSError: + pass + else: + # We support the following output formats from stty: + # + # 1) Linux -> columns 80 + # 2) OS X -> 80 columns + # 3) Solaris -> columns = 80 + + re_linux = r"columns\s+(?P\d+);" + re_osx = r"(?P\d+)\s*columns;" + re_solaris = r"columns\s+=\s+(?P\d+);" + + for regex in (re_linux, re_osx, re_solaris): + match = re.search(regex, stdout) + + if match is not None: + columns = match.group('columns') + + try: + width = int(columns) + except ValueError: + pass + if width != 0: + return width + + return self._default_width + + width = findout_terminal_width() + self._terminal_width = width + + return width + + def write(self, text, color="", align="left", width=None, + force_colors=False): + """ + Prints a text on the screen. + + It uses sys.stdout.write(), so no readline library is necessary. + + Parameters + ========== + + color : choose from the colors below, "" means default color + align : "left"/"right", "left" is a normal print, "right" is aligned on + the right-hand side of the screen, filled with spaces if + necessary + width : the screen width + + """ + color_templates = ( + ("Black", "0;30"), + ("Red", "0;31"), + ("Green", "0;32"), + ("Brown", "0;33"), + ("Blue", "0;34"), + ("Purple", "0;35"), + ("Cyan", "0;36"), + ("LightGray", "0;37"), + ("DarkGray", "1;30"), + ("LightRed", "1;31"), + ("LightGreen", "1;32"), + ("Yellow", "1;33"), + ("LightBlue", "1;34"), + ("LightPurple", "1;35"), + ("LightCyan", "1;36"), + ("White", "1;37"), + ) + + colors = {} + + for name, value in color_templates: + colors[name] = value + c_normal = '\033[0m' + c_color = '\033[%sm' + + if width is None: + width = self.terminal_width + + if align == "right": + if self._write_pos + len(text) > width: + # we don't fit on the current line, create a new line + self.write("\n") + self.write(" "*(width - self._write_pos - len(text))) + + if not self._force_colors and hasattr(sys.stdout, 'isatty') and not \ + sys.stdout.isatty(): + # the stdout is not a terminal, this for example happens if the + # output is piped to less, e.g. "bin/test | less". In this case, + # the terminal control sequences would be printed verbatim, so + # don't use any colors. + color = "" + elif sys.platform == "win32": + # Windows consoles don't support ANSI escape sequences + color = "" + elif not self._colors: + color = "" + + if self._line_wrap: + if text[0] != "\n": + sys.stdout.write("\n") + + # Avoid UnicodeEncodeError when printing out test failures + if IS_WINDOWS: + text = text.encode('raw_unicode_escape').decode('utf8', 'ignore') + elif not sys.stdout.encoding.lower().startswith('utf'): + text = text.encode(sys.stdout.encoding, 'backslashreplace' + ).decode(sys.stdout.encoding) + + if color == "": + sys.stdout.write(text) + else: + sys.stdout.write("%s%s%s" % + (c_color % colors[color], text, c_normal)) + sys.stdout.flush() + l = text.rfind("\n") + if l == -1: + self._write_pos += len(text) + else: + self._write_pos = len(text) - l - 1 + self._line_wrap = self._write_pos >= width + self._write_pos %= width + + def write_center(self, text, delim="="): + width = self.terminal_width + if text != "": + text = " %s " % text + idx = (width - len(text)) // 2 + t = delim*idx + text + delim*(width - idx - len(text)) + self.write(t + "\n") + + def write_exception(self, e, val, tb): + # remove the first item, as that is always runtests.py + tb = tb.tb_next + t = traceback.format_exception(e, val, tb) + self.write("".join(t)) + + def start(self, seed=None, msg="test process starts"): + self.write_center(msg) + executable = sys.executable + v = tuple(sys.version_info) + python_version = "%s.%s.%s-%s-%s" % v + implementation = platform.python_implementation() + if implementation == 'PyPy': + implementation += " %s.%s.%s-%s-%s" % sys.pypy_version_info + self.write("executable: %s (%s) [%s]\n" % + (executable, python_version, implementation)) + from sympy.utilities.misc import ARCH + self.write("architecture: %s\n" % ARCH) + from sympy.core.cache import USE_CACHE + self.write("cache: %s\n" % USE_CACHE) + version = '' + if GROUND_TYPES =='gmpy': + if HAS_GMPY == 1: + import gmpy + elif HAS_GMPY == 2: + import gmpy2 as gmpy + version = gmpy.version() + self.write("ground types: %s %s\n" % (GROUND_TYPES, version)) + numpy = import_module('numpy') + self.write("numpy: %s\n" % (None if not numpy else numpy.__version__)) + if seed is not None: + self.write("random seed: %d\n" % seed) + from sympy.utilities.misc import HASH_RANDOMIZATION + self.write("hash randomization: ") + hash_seed = os.getenv("PYTHONHASHSEED") or '0' + if HASH_RANDOMIZATION and (hash_seed == "random" or int(hash_seed)): + self.write("on (PYTHONHASHSEED=%s)\n" % hash_seed) + else: + self.write("off\n") + if self._split: + self.write("split: %s\n" % self._split) + self.write('\n') + self._t_start = clock() + + def finish(self): + self._t_end = clock() + self.write("\n") + global text, linelen + text = "tests finished: %d passed, " % self._passed + linelen = len(text) + + def add_text(mytext): + global text, linelen + """Break new text if too long.""" + if linelen + len(mytext) > self.terminal_width: + text += '\n' + linelen = 0 + text += mytext + linelen += len(mytext) + + if len(self._failed) > 0: + add_text("%d failed, " % len(self._failed)) + if len(self._failed_doctest) > 0: + add_text("%d failed, " % len(self._failed_doctest)) + if self._skipped > 0: + add_text("%d skipped, " % self._skipped) + if self._xfailed > 0: + add_text("%d expected to fail, " % self._xfailed) + if len(self._xpassed) > 0: + add_text("%d expected to fail but passed, " % len(self._xpassed)) + if len(self._exceptions) > 0: + add_text("%d exceptions, " % len(self._exceptions)) + add_text("in %.2f seconds" % (self._t_end - self._t_start)) + + if self.slow_test_functions: + self.write_center('slowest tests', '_') + sorted_slow = sorted(self.slow_test_functions, key=lambda r: r[1]) + for slow_func_name, taken in sorted_slow: + print('%s - Took %.3f seconds' % (slow_func_name, taken)) + + if self.fast_test_functions: + self.write_center('unexpectedly fast tests', '_') + sorted_fast = sorted(self.fast_test_functions, + key=lambda r: r[1]) + for fast_func_name, taken in sorted_fast: + print('%s - Took %.3f seconds' % (fast_func_name, taken)) + + if len(self._xpassed) > 0: + self.write_center("xpassed tests", "_") + for e in self._xpassed: + self.write("%s: %s\n" % (e[0], e[1])) + self.write("\n") + + if self._tb_style != "no" and len(self._exceptions) > 0: + for e in self._exceptions: + filename, f, (t, val, tb) = e + self.write_center("", "_") + if f is None: + s = "%s" % filename + else: + s = "%s:%s" % (filename, f.__name__) + self.write_center(s, "_") + self.write_exception(t, val, tb) + self.write("\n") + + if self._tb_style != "no" and len(self._failed) > 0: + for e in self._failed: + filename, f, (t, val, tb) = e + self.write_center("", "_") + self.write_center("%s:%s" % (filename, f.__name__), "_") + self.write_exception(t, val, tb) + self.write("\n") + + if self._tb_style != "no" and len(self._failed_doctest) > 0: + for e in self._failed_doctest: + filename, msg = e + self.write_center("", "_") + self.write_center("%s" % filename, "_") + self.write(msg) + self.write("\n") + + self.write_center(text) + ok = len(self._failed) == 0 and len(self._exceptions) == 0 and \ + len(self._failed_doctest) == 0 + if not ok: + self.write("DO *NOT* COMMIT!\n") + return ok + + def entering_filename(self, filename, n): + rel_name = filename[len(self._root_dir) + 1:] + self._active_file = rel_name + self._active_file_error = False + self.write(rel_name) + self.write("[%d] " % n) + + def leaving_filename(self): + self.write(" ") + if self._active_file_error: + self.write("[FAIL]", "Red", align="right") + else: + self.write("[OK]", "Green", align="right") + self.write("\n") + if self._verbose: + self.write("\n") + + def entering_test(self, f): + self._active_f = f + if self._verbose: + self.write("\n" + f.__name__ + " ") + + def test_xfail(self): + self._xfailed += 1 + self.write("f", "Green") + + def test_xpass(self, v): + message = str(v) + self._xpassed.append((self._active_file, message)) + self.write("X", "Green") + + def test_fail(self, exc_info): + self._failed.append((self._active_file, self._active_f, exc_info)) + self.write("F", "Red") + self._active_file_error = True + + def doctest_fail(self, name, error_msg): + # the first line contains "******", remove it: + error_msg = "\n".join(error_msg.split("\n")[1:]) + self._failed_doctest.append((name, error_msg)) + self.write("F", "Red") + self._active_file_error = True + + def test_pass(self, char="."): + self._passed += 1 + if self._verbose: + self.write("ok", "Green") + else: + self.write(char, "Green") + + def test_skip(self, v=None): + char = "s" + self._skipped += 1 + if v is not None: + message = str(v) + if message == "KeyboardInterrupt": + char = "K" + elif message == "Timeout": + char = "T" + elif message == "Slow": + char = "w" + if self._verbose: + if v is not None: + self.write(message + ' ', "Blue") + else: + self.write(" - ", "Blue") + self.write(char, "Blue") + + def test_exception(self, exc_info): + self._exceptions.append((self._active_file, self._active_f, exc_info)) + if exc_info[0] is TimeOutError: + self.write("T", "Red") + else: + self.write("E", "Red") + self._active_file_error = True + + def import_error(self, filename, exc_info): + self._exceptions.append((filename, None, exc_info)) + rel_name = filename[len(self._root_dir) + 1:] + self.write(rel_name) + self.write("[?] Failed to import", "Red") + self.write(" ") + self.write("[FAIL]", "Red", align="right") + self.write("\n") diff --git a/venv/lib/python3.10/site-packages/sympy/testing/tests/__init__.py b/venv/lib/python3.10/site-packages/sympy/testing/tests/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/venv/lib/python3.10/site-packages/sympy/testing/tests/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/sympy/testing/tests/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0a73bee19136b528ca931dbad26a7b2a9bb3ce73 Binary files /dev/null and b/venv/lib/python3.10/site-packages/sympy/testing/tests/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/sympy/testing/tests/__pycache__/diagnose_imports.cpython-310.pyc b/venv/lib/python3.10/site-packages/sympy/testing/tests/__pycache__/diagnose_imports.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..eb95b90bbb4da0eafb132507b70f01b72cec7385 Binary files /dev/null and b/venv/lib/python3.10/site-packages/sympy/testing/tests/__pycache__/diagnose_imports.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/sympy/testing/tests/__pycache__/test_code_quality.cpython-310.pyc b/venv/lib/python3.10/site-packages/sympy/testing/tests/__pycache__/test_code_quality.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6a5eb12451b1f6d1b3ffb2b550ff837e1d20886a Binary files /dev/null and b/venv/lib/python3.10/site-packages/sympy/testing/tests/__pycache__/test_code_quality.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/sympy/testing/tests/__pycache__/test_deprecated.cpython-310.pyc b/venv/lib/python3.10/site-packages/sympy/testing/tests/__pycache__/test_deprecated.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..dd84b6154755a0be6823617427aa97eca68e3923 Binary files /dev/null and b/venv/lib/python3.10/site-packages/sympy/testing/tests/__pycache__/test_deprecated.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/sympy/testing/tests/__pycache__/test_module_imports.cpython-310.pyc b/venv/lib/python3.10/site-packages/sympy/testing/tests/__pycache__/test_module_imports.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ab1e85326d2f4a504295243a11af7babf6446915 Binary files /dev/null and b/venv/lib/python3.10/site-packages/sympy/testing/tests/__pycache__/test_module_imports.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/sympy/testing/tests/__pycache__/test_pytest.cpython-310.pyc b/venv/lib/python3.10/site-packages/sympy/testing/tests/__pycache__/test_pytest.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f63aff910a349d07605338e42225344afbf51e37 Binary files /dev/null and b/venv/lib/python3.10/site-packages/sympy/testing/tests/__pycache__/test_pytest.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/sympy/testing/tests/diagnose_imports.py b/venv/lib/python3.10/site-packages/sympy/testing/tests/diagnose_imports.py new file mode 100644 index 0000000000000000000000000000000000000000..7ddad652f055d41950cb825eee3a943425fa2fa7 --- /dev/null +++ b/venv/lib/python3.10/site-packages/sympy/testing/tests/diagnose_imports.py @@ -0,0 +1,220 @@ +#!/usr/bin/env python + +""" +Import diagnostics. Run bin/diagnose_imports.py --help for details. +""" + +from __future__ import annotations + +if __name__ == "__main__": + + import sys + import inspect + import builtins + + import optparse + + from os.path import abspath, dirname, join, normpath + this_file = abspath(__file__) + sympy_dir = join(dirname(this_file), '..', '..', '..') + sympy_dir = normpath(sympy_dir) + sys.path.insert(0, sympy_dir) + + option_parser = optparse.OptionParser( + usage= + "Usage: %prog option [options]\n" + "\n" + "Import analysis for imports between SymPy modules.") + option_group = optparse.OptionGroup( + option_parser, + 'Analysis options', + 'Options that define what to do. Exactly one of these must be given.') + option_group.add_option( + '--problems', + help= + 'Print all import problems, that is: ' + 'If an import pulls in a package instead of a module ' + '(e.g. sympy.core instead of sympy.core.add); ' # see ##PACKAGE## + 'if it imports a symbol that is already present; ' # see ##DUPLICATE## + 'if it imports a symbol ' + 'from somewhere other than the defining module.', # see ##ORIGIN## + action='count') + option_group.add_option( + '--origins', + help= + 'For each imported symbol in each module, ' + 'print the module that defined it. ' + '(This is useful for import refactoring.)', + action='count') + option_parser.add_option_group(option_group) + option_group = optparse.OptionGroup( + option_parser, + 'Sort options', + 'These options define the sort order for output lines. ' + 'At most one of these options is allowed. ' + 'Unsorted output will reflect the order in which imports happened.') + option_group.add_option( + '--by-importer', + help='Sort output lines by name of importing module.', + action='count') + option_group.add_option( + '--by-origin', + help='Sort output lines by name of imported module.', + action='count') + option_parser.add_option_group(option_group) + (options, args) = option_parser.parse_args() + if args: + option_parser.error( + 'Unexpected arguments %s (try %s --help)' % (args, sys.argv[0])) + if options.problems > 1: + option_parser.error('--problems must not be given more than once.') + if options.origins > 1: + option_parser.error('--origins must not be given more than once.') + if options.by_importer > 1: + option_parser.error('--by-importer must not be given more than once.') + if options.by_origin > 1: + option_parser.error('--by-origin must not be given more than once.') + options.problems = options.problems == 1 + options.origins = options.origins == 1 + options.by_importer = options.by_importer == 1 + options.by_origin = options.by_origin == 1 + if not options.problems and not options.origins: + option_parser.error( + 'At least one of --problems and --origins is required') + if options.problems and options.origins: + option_parser.error( + 'At most one of --problems and --origins is allowed') + if options.by_importer and options.by_origin: + option_parser.error( + 'At most one of --by-importer and --by-origin is allowed') + options.by_process = not options.by_importer and not options.by_origin + + builtin_import = builtins.__import__ + + class Definition: + """Information about a symbol's definition.""" + def __init__(self, name, value, definer): + self.name = name + self.value = value + self.definer = definer + def __hash__(self): + return hash(self.name) + def __eq__(self, other): + return self.name == other.name and self.value == other.value + def __ne__(self, other): + return not (self == other) + def __repr__(self): + return 'Definition(%s, ..., %s)' % ( + repr(self.name), repr(self.definer)) + + # Maps each function/variable to name of module to define it + symbol_definers: dict[Definition, str] = {} + + def in_module(a, b): + """Is a the same module as or a submodule of b?""" + return a == b or a != None and b != None and a.startswith(b + '.') + + def relevant(module): + """Is module relevant for import checking? + + Only imports between relevant modules will be checked.""" + return in_module(module, 'sympy') + + sorted_messages = [] + + def msg(msg, *args): + global options, sorted_messages + if options.by_process: + print(msg % args) + else: + sorted_messages.append(msg % args) + + def tracking_import(module, globals=globals(), locals=[], fromlist=None, level=-1): + """__import__ wrapper - does not change imports at all, but tracks them. + + Default order is implemented by doing output directly. + All other orders are implemented by collecting output information into + a sorted list that will be emitted after all imports are processed. + + Indirect imports can only occur after the requested symbol has been + imported directly (because the indirect import would not have a module + to pick the symbol up from). + So this code detects indirect imports by checking whether the symbol in + question was already imported. + + Keeps the semantics of __import__ unchanged.""" + global options, symbol_definers + caller_frame = inspect.getframeinfo(sys._getframe(1)) + importer_filename = caller_frame.filename + importer_module = globals['__name__'] + if importer_filename == caller_frame.filename: + importer_reference = '%s line %s' % ( + importer_filename, str(caller_frame.lineno)) + else: + importer_reference = importer_filename + result = builtin_import(module, globals, locals, fromlist, level) + importee_module = result.__name__ + # We're only interested if importer and importee are in SymPy + if relevant(importer_module) and relevant(importee_module): + for symbol in result.__dict__.iterkeys(): + definition = Definition( + symbol, result.__dict__[symbol], importer_module) + if definition not in symbol_definers: + symbol_definers[definition] = importee_module + if hasattr(result, '__path__'): + ##PACKAGE## + # The existence of __path__ is documented in the tutorial on modules. + # Python 3.3 documents this in http://docs.python.org/3.3/reference/import.html + if options.by_origin: + msg('Error: %s (a package) is imported by %s', + module, importer_reference) + else: + msg('Error: %s contains package import %s', + importer_reference, module) + if fromlist != None: + symbol_list = fromlist + if '*' in symbol_list: + if (importer_filename.endswith('__init__.py') + or importer_filename.endswith('__init__.pyc') + or importer_filename.endswith('__init__.pyo')): + # We do not check starred imports inside __init__ + # That's the normal "please copy over its imports to my namespace" + symbol_list = [] + else: + symbol_list = result.__dict__.iterkeys() + for symbol in symbol_list: + if symbol not in result.__dict__: + if options.by_origin: + msg('Error: %s.%s is not defined (yet), but %s tries to import it', + importee_module, symbol, importer_reference) + else: + msg('Error: %s tries to import %s.%s, which did not define it (yet)', + importer_reference, importee_module, symbol) + else: + definition = Definition( + symbol, result.__dict__[symbol], importer_module) + symbol_definer = symbol_definers[definition] + if symbol_definer == importee_module: + ##DUPLICATE## + if options.by_origin: + msg('Error: %s.%s is imported again into %s', + importee_module, symbol, importer_reference) + else: + msg('Error: %s imports %s.%s again', + importer_reference, importee_module, symbol) + else: + ##ORIGIN## + if options.by_origin: + msg('Error: %s.%s is imported by %s, which should import %s.%s instead', + importee_module, symbol, importer_reference, symbol_definer, symbol) + else: + msg('Error: %s imports %s.%s but should import %s.%s instead', + importer_reference, importee_module, symbol, symbol_definer, symbol) + return result + + builtins.__import__ = tracking_import + __import__('sympy') + + sorted_messages.sort() + for message in sorted_messages: + print(message) diff --git a/venv/lib/python3.10/site-packages/sympy/testing/tests/test_code_quality.py b/venv/lib/python3.10/site-packages/sympy/testing/tests/test_code_quality.py new file mode 100644 index 0000000000000000000000000000000000000000..41c4e5df29523909df715ecc1954e6789c6d7948 --- /dev/null +++ b/venv/lib/python3.10/site-packages/sympy/testing/tests/test_code_quality.py @@ -0,0 +1,510 @@ +# coding=utf-8 +from os import walk, sep, pardir +from os.path import split, join, abspath, exists, isfile +from glob import glob +import re +import random +import ast + +from sympy.testing.pytest import raises +from sympy.testing.quality_unicode import _test_this_file_encoding + +# System path separator (usually slash or backslash) to be +# used with excluded files, e.g. +# exclude = set([ +# "%(sep)smpmath%(sep)s" % sepd, +# ]) +sepd = {"sep": sep} + +# path and sympy_path +SYMPY_PATH = abspath(join(split(__file__)[0], pardir, pardir)) # go to sympy/ +assert exists(SYMPY_PATH) + +TOP_PATH = abspath(join(SYMPY_PATH, pardir)) +BIN_PATH = join(TOP_PATH, "bin") +EXAMPLES_PATH = join(TOP_PATH, "examples") + +# Error messages +message_space = "File contains trailing whitespace: %s, line %s." +message_implicit = "File contains an implicit import: %s, line %s." +message_tabs = "File contains tabs instead of spaces: %s, line %s." +message_carriage = "File contains carriage returns at end of line: %s, line %s" +message_str_raise = "File contains string exception: %s, line %s" +message_gen_raise = "File contains generic exception: %s, line %s" +message_old_raise = "File contains old-style raise statement: %s, line %s, \"%s\"" +message_eof = "File does not end with a newline: %s, line %s" +message_multi_eof = "File ends with more than 1 newline: %s, line %s" +message_test_suite_def = "Function should start with 'test_' or '_': %s, line %s" +message_duplicate_test = "This is a duplicate test function: %s, line %s" +message_self_assignments = "File contains assignments to self/cls: %s, line %s." +message_func_is = "File contains '.func is': %s, line %s." +message_bare_expr = "File contains bare expression: %s, line %s." + +implicit_test_re = re.compile(r'^\s*(>>> )?(\.\.\. )?from .* import .*\*') +str_raise_re = re.compile( + r'^\s*(>>> )?(\.\.\. )?raise(\s+(\'|\")|\s*(\(\s*)+(\'|\"))') +gen_raise_re = re.compile( + r'^\s*(>>> )?(\.\.\. )?raise(\s+Exception|\s*(\(\s*)+Exception)') +old_raise_re = re.compile(r'^\s*(>>> )?(\.\.\. )?raise((\s*\(\s*)|\s+)\w+\s*,') +test_suite_def_re = re.compile(r'^def\s+(?!(_|test))[^(]*\(\s*\)\s*:$') +test_ok_def_re = re.compile(r'^def\s+test_.*:$') +test_file_re = re.compile(r'.*[/\\]test_.*\.py$') +func_is_re = re.compile(r'\.\s*func\s+is') + + +def tab_in_leading(s): + """Returns True if there are tabs in the leading whitespace of a line, + including the whitespace of docstring code samples.""" + n = len(s) - len(s.lstrip()) + if not s[n:n + 3] in ['...', '>>>']: + check = s[:n] + else: + smore = s[n + 3:] + check = s[:n] + smore[:len(smore) - len(smore.lstrip())] + return not (check.expandtabs() == check) + + +def find_self_assignments(s): + """Returns a list of "bad" assignments: if there are instances + of assigning to the first argument of the class method (except + for staticmethod's). + """ + t = [n for n in ast.parse(s).body if isinstance(n, ast.ClassDef)] + + bad = [] + for c in t: + for n in c.body: + if not isinstance(n, ast.FunctionDef): + continue + if any(d.id == 'staticmethod' + for d in n.decorator_list if isinstance(d, ast.Name)): + continue + if n.name == '__new__': + continue + if not n.args.args: + continue + first_arg = n.args.args[0].arg + + for m in ast.walk(n): + if isinstance(m, ast.Assign): + for a in m.targets: + if isinstance(a, ast.Name) and a.id == first_arg: + bad.append(m) + elif (isinstance(a, ast.Tuple) and + any(q.id == first_arg for q in a.elts + if isinstance(q, ast.Name))): + bad.append(m) + + return bad + + +def check_directory_tree(base_path, file_check, exclusions=set(), pattern="*.py"): + """ + Checks all files in the directory tree (with base_path as starting point) + with the file_check function provided, skipping files that contain + any of the strings in the set provided by exclusions. + """ + if not base_path: + return + for root, dirs, files in walk(base_path): + check_files(glob(join(root, pattern)), file_check, exclusions) + + +def check_files(files, file_check, exclusions=set(), pattern=None): + """ + Checks all files with the file_check function provided, skipping files + that contain any of the strings in the set provided by exclusions. + """ + if not files: + return + for fname in files: + if not exists(fname) or not isfile(fname): + continue + if any(ex in fname for ex in exclusions): + continue + if pattern is None or re.match(pattern, fname): + file_check(fname) + + +class _Visit(ast.NodeVisitor): + """return the line number corresponding to the + line on which a bare expression appears if it is a binary op + or a comparison that is not in a with block. + + EXAMPLES + ======== + + >>> import ast + >>> class _Visit(ast.NodeVisitor): + ... def visit_Expr(self, node): + ... if isinstance(node.value, (ast.BinOp, ast.Compare)): + ... print(node.lineno) + ... def visit_With(self, node): + ... pass # no checking there + ... + >>> code='''x = 1 # line 1 + ... for i in range(3): + ... x == 2 # <-- 3 + ... if x == 2: + ... x == 3 # <-- 5 + ... x + 1 # <-- 6 + ... x = 1 + ... if x == 1: + ... print(1) + ... while x != 1: + ... x == 1 # <-- 11 + ... with raises(TypeError): + ... c == 1 + ... raise TypeError + ... assert x == 1 + ... ''' + >>> _Visit().visit(ast.parse(code)) + 3 + 5 + 6 + 11 + """ + def visit_Expr(self, node): + if isinstance(node.value, (ast.BinOp, ast.Compare)): + assert None, message_bare_expr % ('', node.lineno) + def visit_With(self, node): + pass + + +BareExpr = _Visit() + + +def line_with_bare_expr(code): + """return None or else 0-based line number of code on which + a bare expression appeared. + """ + tree = ast.parse(code) + try: + BareExpr.visit(tree) + except AssertionError as msg: + assert msg.args + msg = msg.args[0] + assert msg.startswith(message_bare_expr.split(':', 1)[0]) + return int(msg.rsplit(' ', 1)[1].rstrip('.')) # the line number + + +def test_files(): + """ + This test tests all files in SymPy and checks that: + o no lines contains a trailing whitespace + o no lines end with \r\n + o no line uses tabs instead of spaces + o that the file ends with a single newline + o there are no general or string exceptions + o there are no old style raise statements + o name of arg-less test suite functions start with _ or test_ + o no duplicate function names that start with test_ + o no assignments to self variable in class methods + o no lines contain ".func is" except in the test suite + o there is no do-nothing expression like `a == b` or `x + 1` + """ + + def test(fname): + with open(fname, encoding="utf8") as test_file: + test_this_file(fname, test_file) + with open(fname, encoding='utf8') as test_file: + _test_this_file_encoding(fname, test_file) + + def test_this_file(fname, test_file): + idx = None + code = test_file.read() + test_file.seek(0) # restore reader to head + py = fname if sep not in fname else fname.rsplit(sep, 1)[-1] + if py.startswith('test_'): + idx = line_with_bare_expr(code) + if idx is not None: + assert False, message_bare_expr % (fname, idx + 1) + + line = None # to flag the case where there were no lines in file + tests = 0 + test_set = set() + for idx, line in enumerate(test_file): + if test_file_re.match(fname): + if test_suite_def_re.match(line): + assert False, message_test_suite_def % (fname, idx + 1) + if test_ok_def_re.match(line): + tests += 1 + test_set.add(line[3:].split('(')[0].strip()) + if len(test_set) != tests: + assert False, message_duplicate_test % (fname, idx + 1) + if line.endswith(" \n") or line.endswith("\t\n"): + assert False, message_space % (fname, idx + 1) + if line.endswith("\r\n"): + assert False, message_carriage % (fname, idx + 1) + if tab_in_leading(line): + assert False, message_tabs % (fname, idx + 1) + if str_raise_re.search(line): + assert False, message_str_raise % (fname, idx + 1) + if gen_raise_re.search(line): + assert False, message_gen_raise % (fname, idx + 1) + if (implicit_test_re.search(line) and + not list(filter(lambda ex: ex in fname, import_exclude))): + assert False, message_implicit % (fname, idx + 1) + if func_is_re.search(line) and not test_file_re.search(fname): + assert False, message_func_is % (fname, idx + 1) + + result = old_raise_re.search(line) + + if result is not None: + assert False, message_old_raise % ( + fname, idx + 1, result.group(2)) + + if line is not None: + if line == '\n' and idx > 0: + assert False, message_multi_eof % (fname, idx + 1) + elif not line.endswith('\n'): + # eof newline check + assert False, message_eof % (fname, idx + 1) + + + # Files to test at top level + top_level_files = [join(TOP_PATH, file) for file in [ + "isympy.py", + "build.py", + "setup.py", + ]] + # Files to exclude from all tests + exclude = { + "%(sep)ssympy%(sep)sparsing%(sep)sautolev%(sep)s_antlr%(sep)sautolevparser.py" % sepd, + "%(sep)ssympy%(sep)sparsing%(sep)sautolev%(sep)s_antlr%(sep)sautolevlexer.py" % sepd, + "%(sep)ssympy%(sep)sparsing%(sep)sautolev%(sep)s_antlr%(sep)sautolevlistener.py" % sepd, + "%(sep)ssympy%(sep)sparsing%(sep)slatex%(sep)s_antlr%(sep)slatexparser.py" % sepd, + "%(sep)ssympy%(sep)sparsing%(sep)slatex%(sep)s_antlr%(sep)slatexlexer.py" % sepd, + } + # Files to exclude from the implicit import test + import_exclude = { + # glob imports are allowed in top-level __init__.py: + "%(sep)ssympy%(sep)s__init__.py" % sepd, + # these __init__.py should be fixed: + # XXX: not really, they use useful import pattern (DRY) + "%(sep)svector%(sep)s__init__.py" % sepd, + "%(sep)smechanics%(sep)s__init__.py" % sepd, + "%(sep)squantum%(sep)s__init__.py" % sepd, + "%(sep)spolys%(sep)s__init__.py" % sepd, + "%(sep)spolys%(sep)sdomains%(sep)s__init__.py" % sepd, + # interactive SymPy executes ``from sympy import *``: + "%(sep)sinteractive%(sep)ssession.py" % sepd, + # isympy.py executes ``from sympy import *``: + "%(sep)sisympy.py" % sepd, + # these two are import timing tests: + "%(sep)sbin%(sep)ssympy_time.py" % sepd, + "%(sep)sbin%(sep)ssympy_time_cache.py" % sepd, + # Taken from Python stdlib: + "%(sep)sparsing%(sep)ssympy_tokenize.py" % sepd, + # this one should be fixed: + "%(sep)splotting%(sep)spygletplot%(sep)s" % sepd, + # False positive in the docstring + "%(sep)sbin%(sep)stest_external_imports.py" % sepd, + "%(sep)sbin%(sep)stest_submodule_imports.py" % sepd, + # These are deprecated stubs that can be removed at some point: + "%(sep)sutilities%(sep)sruntests.py" % sepd, + "%(sep)sutilities%(sep)spytest.py" % sepd, + "%(sep)sutilities%(sep)srandtest.py" % sepd, + "%(sep)sutilities%(sep)stmpfiles.py" % sepd, + "%(sep)sutilities%(sep)squality_unicode.py" % sepd, + } + check_files(top_level_files, test) + check_directory_tree(BIN_PATH, test, {"~", ".pyc", ".sh", ".mjs"}, "*") + check_directory_tree(SYMPY_PATH, test, exclude) + check_directory_tree(EXAMPLES_PATH, test, exclude) + + +def _with_space(c): + # return c with a random amount of leading space + return random.randint(0, 10)*' ' + c + + +def test_raise_statement_regular_expression(): + candidates_ok = [ + "some text # raise Exception, 'text'", + "raise ValueError('text') # raise Exception, 'text'", + "raise ValueError('text')", + "raise ValueError", + "raise ValueError('text')", + "raise ValueError('text') #,", + # Talking about an exception in a docstring + ''''"""This function will raise ValueError, except when it doesn't"""''', + "raise (ValueError('text')", + ] + str_candidates_fail = [ + "raise 'exception'", + "raise 'Exception'", + 'raise "exception"', + 'raise "Exception"', + "raise 'ValueError'", + ] + gen_candidates_fail = [ + "raise Exception('text') # raise Exception, 'text'", + "raise Exception('text')", + "raise Exception", + "raise Exception('text')", + "raise Exception('text') #,", + "raise Exception, 'text'", + "raise Exception, 'text' # raise Exception('text')", + "raise Exception, 'text' # raise Exception, 'text'", + ">>> raise Exception, 'text'", + ">>> raise Exception, 'text' # raise Exception('text')", + ">>> raise Exception, 'text' # raise Exception, 'text'", + ] + old_candidates_fail = [ + "raise Exception, 'text'", + "raise Exception, 'text' # raise Exception('text')", + "raise Exception, 'text' # raise Exception, 'text'", + ">>> raise Exception, 'text'", + ">>> raise Exception, 'text' # raise Exception('text')", + ">>> raise Exception, 'text' # raise Exception, 'text'", + "raise ValueError, 'text'", + "raise ValueError, 'text' # raise Exception('text')", + "raise ValueError, 'text' # raise Exception, 'text'", + ">>> raise ValueError, 'text'", + ">>> raise ValueError, 'text' # raise Exception('text')", + ">>> raise ValueError, 'text' # raise Exception, 'text'", + "raise(ValueError,", + "raise (ValueError,", + "raise( ValueError,", + "raise ( ValueError,", + "raise(ValueError ,", + "raise (ValueError ,", + "raise( ValueError ,", + "raise ( ValueError ,", + ] + + for c in candidates_ok: + assert str_raise_re.search(_with_space(c)) is None, c + assert gen_raise_re.search(_with_space(c)) is None, c + assert old_raise_re.search(_with_space(c)) is None, c + for c in str_candidates_fail: + assert str_raise_re.search(_with_space(c)) is not None, c + for c in gen_candidates_fail: + assert gen_raise_re.search(_with_space(c)) is not None, c + for c in old_candidates_fail: + assert old_raise_re.search(_with_space(c)) is not None, c + + +def test_implicit_imports_regular_expression(): + candidates_ok = [ + "from sympy import something", + ">>> from sympy import something", + "from sympy.somewhere import something", + ">>> from sympy.somewhere import something", + "import sympy", + ">>> import sympy", + "import sympy.something.something", + "... import sympy", + "... import sympy.something.something", + "... from sympy import something", + "... from sympy.somewhere import something", + ">> from sympy import *", # To allow 'fake' docstrings + "# from sympy import *", + "some text # from sympy import *", + ] + candidates_fail = [ + "from sympy import *", + ">>> from sympy import *", + "from sympy.somewhere import *", + ">>> from sympy.somewhere import *", + "... from sympy import *", + "... from sympy.somewhere import *", + ] + for c in candidates_ok: + assert implicit_test_re.search(_with_space(c)) is None, c + for c in candidates_fail: + assert implicit_test_re.search(_with_space(c)) is not None, c + + +def test_test_suite_defs(): + candidates_ok = [ + " def foo():\n", + "def foo(arg):\n", + "def _foo():\n", + "def test_foo():\n", + ] + candidates_fail = [ + "def foo():\n", + "def foo() :\n", + "def foo( ):\n", + "def foo():\n", + ] + for c in candidates_ok: + assert test_suite_def_re.search(c) is None, c + for c in candidates_fail: + assert test_suite_def_re.search(c) is not None, c + + +def test_test_duplicate_defs(): + candidates_ok = [ + "def foo():\ndef foo():\n", + "def test():\ndef test_():\n", + "def test_():\ndef test__():\n", + ] + candidates_fail = [ + "def test_():\ndef test_ ():\n", + "def test_1():\ndef test_1():\n", + ] + ok = (None, 'check') + def check(file): + tests = 0 + test_set = set() + for idx, line in enumerate(file.splitlines()): + if test_ok_def_re.match(line): + tests += 1 + test_set.add(line[3:].split('(')[0].strip()) + if len(test_set) != tests: + return False, message_duplicate_test % ('check', idx + 1) + return None, 'check' + for c in candidates_ok: + assert check(c) == ok + for c in candidates_fail: + assert check(c) != ok + + +def test_find_self_assignments(): + candidates_ok = [ + "class A(object):\n def foo(self, arg): arg = self\n", + "class A(object):\n def foo(self, arg): self.prop = arg\n", + "class A(object):\n def foo(self, arg): obj, obj2 = arg, self\n", + "class A(object):\n @classmethod\n def bar(cls, arg): arg = cls\n", + "class A(object):\n def foo(var, arg): arg = var\n", + ] + candidates_fail = [ + "class A(object):\n def foo(self, arg): self = arg\n", + "class A(object):\n def foo(self, arg): obj, self = arg, arg\n", + "class A(object):\n def foo(self, arg):\n if arg: self = arg", + "class A(object):\n @classmethod\n def foo(cls, arg): cls = arg\n", + "class A(object):\n def foo(var, arg): var = arg\n", + ] + + for c in candidates_ok: + assert find_self_assignments(c) == [] + for c in candidates_fail: + assert find_self_assignments(c) != [] + + +def test_test_unicode_encoding(): + unicode_whitelist = ['foo'] + unicode_strict_whitelist = ['bar'] + + fname = 'abc' + test_file = ['α'] + raises(AssertionError, lambda: _test_this_file_encoding( + fname, test_file, unicode_whitelist, unicode_strict_whitelist)) + + fname = 'abc' + test_file = ['abc'] + _test_this_file_encoding( + fname, test_file, unicode_whitelist, unicode_strict_whitelist) + + fname = 'foo' + test_file = ['abc'] + raises(AssertionError, lambda: _test_this_file_encoding( + fname, test_file, unicode_whitelist, unicode_strict_whitelist)) + + fname = 'bar' + test_file = ['abc'] + _test_this_file_encoding( + fname, test_file, unicode_whitelist, unicode_strict_whitelist) diff --git a/venv/lib/python3.10/site-packages/sympy/testing/tests/test_deprecated.py b/venv/lib/python3.10/site-packages/sympy/testing/tests/test_deprecated.py new file mode 100644 index 0000000000000000000000000000000000000000..696933d96d6232ea869da1002ec9ebee5309724d --- /dev/null +++ b/venv/lib/python3.10/site-packages/sympy/testing/tests/test_deprecated.py @@ -0,0 +1,5 @@ +from sympy.testing.pytest import warns_deprecated_sympy + +def test_deprecated_testing_randtest(): + with warns_deprecated_sympy(): + import sympy.testing.randtest # noqa:F401 diff --git a/venv/lib/python3.10/site-packages/sympy/testing/tests/test_module_imports.py b/venv/lib/python3.10/site-packages/sympy/testing/tests/test_module_imports.py new file mode 100644 index 0000000000000000000000000000000000000000..d16dbaa98156c287c18b46ff07c0ede5d26e069a --- /dev/null +++ b/venv/lib/python3.10/site-packages/sympy/testing/tests/test_module_imports.py @@ -0,0 +1,42 @@ +""" +Checks that SymPy does not contain indirect imports. + +An indirect import is importing a symbol from a module that itself imported the +symbol from elsewhere. Such a constellation makes it harder to diagnose +inter-module dependencies and import order problems, and is therefore strongly +discouraged. + +(Indirect imports from end-user code is fine and in fact a best practice.) + +Implementation note: Forcing Python into actually unloading already-imported +submodules is a tricky and partly undocumented process. To avoid these issues, +the actual diagnostic code is in bin/diagnose_imports, which is run as a +separate, pristine Python process. +""" + +import subprocess +import sys +from os.path import abspath, dirname, join, normpath +import inspect + +from sympy.testing.pytest import XFAIL + +@XFAIL +def test_module_imports_are_direct(): + my_filename = abspath(inspect.getfile(inspect.currentframe())) + my_dirname = dirname(my_filename) + diagnose_imports_filename = join(my_dirname, 'diagnose_imports.py') + diagnose_imports_filename = normpath(diagnose_imports_filename) + + process = subprocess.Popen( + [ + sys.executable, + normpath(diagnose_imports_filename), + '--problems', + '--by-importer' + ], + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + bufsize=-1) + output, _ = process.communicate() + assert output == '', "There are import problems:\n" + output.decode() diff --git a/venv/lib/python3.10/site-packages/sympy/testing/tests/test_pytest.py b/venv/lib/python3.10/site-packages/sympy/testing/tests/test_pytest.py new file mode 100644 index 0000000000000000000000000000000000000000..7080a4ed4602707a3efb4d9025e8e9cbe23a5ef8 --- /dev/null +++ b/venv/lib/python3.10/site-packages/sympy/testing/tests/test_pytest.py @@ -0,0 +1,211 @@ +import warnings + +from sympy.testing.pytest import (raises, warns, ignore_warnings, + warns_deprecated_sympy, Failed) +from sympy.utilities.exceptions import sympy_deprecation_warning + + + +# Test callables + + +def test_expected_exception_is_silent_callable(): + def f(): + raise ValueError() + raises(ValueError, f) + + +# Under pytest raises will raise Failed rather than AssertionError +def test_lack_of_exception_triggers_AssertionError_callable(): + try: + raises(Exception, lambda: 1 + 1) + assert False + except Failed as e: + assert "DID NOT RAISE" in str(e) + + +def test_unexpected_exception_is_passed_through_callable(): + def f(): + raise ValueError("some error message") + try: + raises(TypeError, f) + assert False + except ValueError as e: + assert str(e) == "some error message" + +# Test with statement + +def test_expected_exception_is_silent_with(): + with raises(ValueError): + raise ValueError() + + +def test_lack_of_exception_triggers_AssertionError_with(): + try: + with raises(Exception): + 1 + 1 + assert False + except Failed as e: + assert "DID NOT RAISE" in str(e) + + +def test_unexpected_exception_is_passed_through_with(): + try: + with raises(TypeError): + raise ValueError("some error message") + assert False + except ValueError as e: + assert str(e) == "some error message" + +# Now we can use raises() instead of try/catch +# to test that a specific exception class is raised + + +def test_second_argument_should_be_callable_or_string(): + raises(TypeError, lambda: raises("irrelevant", 42)) + + +def test_warns_catches_warning(): + with warnings.catch_warnings(record=True) as w: + with warns(UserWarning): + warnings.warn('this is the warning message') + assert len(w) == 0 + + +def test_warns_raises_without_warning(): + with raises(Failed): + with warns(UserWarning): + pass + + +def test_warns_hides_other_warnings(): + with raises(RuntimeWarning): + with warns(UserWarning): + warnings.warn('this is the warning message', UserWarning) + warnings.warn('this is the other message', RuntimeWarning) + + +def test_warns_continues_after_warning(): + with warnings.catch_warnings(record=True) as w: + finished = False + with warns(UserWarning): + warnings.warn('this is the warning message') + finished = True + assert finished + assert len(w) == 0 + + +def test_warns_many_warnings(): + with warns(UserWarning): + warnings.warn('this is the warning message', UserWarning) + warnings.warn('this is the other warning message', UserWarning) + + +def test_warns_match_matching(): + with warnings.catch_warnings(record=True) as w: + with warns(UserWarning, match='this is the warning message'): + warnings.warn('this is the warning message', UserWarning) + assert len(w) == 0 + + +def test_warns_match_non_matching(): + with warnings.catch_warnings(record=True) as w: + with raises(Failed): + with warns(UserWarning, match='this is the warning message'): + warnings.warn('this is not the expected warning message', UserWarning) + assert len(w) == 0 + +def _warn_sympy_deprecation(stacklevel=3): + sympy_deprecation_warning( + "feature", + active_deprecations_target="active-deprecations", + deprecated_since_version="0.0.0", + stacklevel=stacklevel, + ) + +def test_warns_deprecated_sympy_catches_warning(): + with warnings.catch_warnings(record=True) as w: + with warns_deprecated_sympy(): + _warn_sympy_deprecation() + assert len(w) == 0 + + +def test_warns_deprecated_sympy_raises_without_warning(): + with raises(Failed): + with warns_deprecated_sympy(): + pass + +def test_warns_deprecated_sympy_wrong_stacklevel(): + with raises(Failed): + with warns_deprecated_sympy(): + _warn_sympy_deprecation(stacklevel=1) + +def test_warns_deprecated_sympy_doesnt_hide_other_warnings(): + # Unlike pytest's deprecated_call, we should not hide other warnings. + with raises(RuntimeWarning): + with warns_deprecated_sympy(): + _warn_sympy_deprecation() + warnings.warn('this is the other message', RuntimeWarning) + + +def test_warns_deprecated_sympy_continues_after_warning(): + with warnings.catch_warnings(record=True) as w: + finished = False + with warns_deprecated_sympy(): + _warn_sympy_deprecation() + finished = True + assert finished + assert len(w) == 0 + +def test_ignore_ignores_warning(): + with warnings.catch_warnings(record=True) as w: + with ignore_warnings(UserWarning): + warnings.warn('this is the warning message') + assert len(w) == 0 + + +def test_ignore_does_not_raise_without_warning(): + with warnings.catch_warnings(record=True) as w: + with ignore_warnings(UserWarning): + pass + assert len(w) == 0 + + +def test_ignore_allows_other_warnings(): + with warnings.catch_warnings(record=True) as w: + # This is needed when pytest is run as -Werror + # the setting is reverted at the end of the catch_Warnings block. + warnings.simplefilter("always") + with ignore_warnings(UserWarning): + warnings.warn('this is the warning message', UserWarning) + warnings.warn('this is the other message', RuntimeWarning) + assert len(w) == 1 + assert isinstance(w[0].message, RuntimeWarning) + assert str(w[0].message) == 'this is the other message' + + +def test_ignore_continues_after_warning(): + with warnings.catch_warnings(record=True) as w: + finished = False + with ignore_warnings(UserWarning): + warnings.warn('this is the warning message') + finished = True + assert finished + assert len(w) == 0 + + +def test_ignore_many_warnings(): + with warnings.catch_warnings(record=True) as w: + # This is needed when pytest is run as -Werror + # the setting is reverted at the end of the catch_Warnings block. + warnings.simplefilter("always") + with ignore_warnings(UserWarning): + warnings.warn('this is the warning message', UserWarning) + warnings.warn('this is the other message', RuntimeWarning) + warnings.warn('this is the warning message', UserWarning) + warnings.warn('this is the other message', RuntimeWarning) + warnings.warn('this is the other message', RuntimeWarning) + assert len(w) == 3 + for wi in w: + assert isinstance(wi.message, RuntimeWarning) + assert str(wi.message) == 'this is the other message' diff --git a/venv/lib/python3.10/site-packages/sympy/testing/tmpfiles.py b/venv/lib/python3.10/site-packages/sympy/testing/tmpfiles.py new file mode 100644 index 0000000000000000000000000000000000000000..1d5c69cb58aa11f77679855f3df21f03a10d3b2b --- /dev/null +++ b/venv/lib/python3.10/site-packages/sympy/testing/tmpfiles.py @@ -0,0 +1,46 @@ +""" +This module adds context manager for temporary files generated by the tests. +""" + +import shutil +import os + + +class TmpFileManager: + """ + A class to track record of every temporary files created by the tests. + """ + tmp_files = set('') + tmp_folders = set('') + + @classmethod + def tmp_file(cls, name=''): + cls.tmp_files.add(name) + return name + + @classmethod + def tmp_folder(cls, name=''): + cls.tmp_folders.add(name) + return name + + @classmethod + def cleanup(cls): + while cls.tmp_files: + file = cls.tmp_files.pop() + if os.path.isfile(file): + os.remove(file) + while cls.tmp_folders: + folder = cls.tmp_folders.pop() + shutil.rmtree(folder) + +def cleanup_tmp_files(test_func): + """ + A decorator to help test codes remove temporary files after the tests. + """ + def wrapper_function(): + try: + test_func() + finally: + TmpFileManager.cleanup() + + return wrapper_function diff --git a/venv/lib/python3.10/site-packages/sympy/vector/coordsysrect.py b/venv/lib/python3.10/site-packages/sympy/vector/coordsysrect.py new file mode 100644 index 0000000000000000000000000000000000000000..b852c7cb4a2747683f3f2e4e653e0977449cc477 --- /dev/null +++ b/venv/lib/python3.10/site-packages/sympy/vector/coordsysrect.py @@ -0,0 +1,1034 @@ +from collections.abc import Callable + +from sympy.core.basic import Basic +from sympy.core.cache import cacheit +from sympy.core import S, Dummy, Lambda +from sympy.core.symbol import Str +from sympy.core.symbol import symbols +from sympy.matrices.immutable import ImmutableDenseMatrix as Matrix +from sympy.matrices.matrices import MatrixBase +from sympy.solvers import solve +from sympy.vector.scalar import BaseScalar +from sympy.core.containers import Tuple +from sympy.core.function import diff +from sympy.functions.elementary.miscellaneous import sqrt +from sympy.functions.elementary.trigonometric import (acos, atan2, cos, sin) +from sympy.matrices.dense import eye +from sympy.matrices.immutable import ImmutableDenseMatrix +from sympy.simplify.simplify import simplify +from sympy.simplify.trigsimp import trigsimp +import sympy.vector +from sympy.vector.orienters import (Orienter, AxisOrienter, BodyOrienter, + SpaceOrienter, QuaternionOrienter) + + +class CoordSys3D(Basic): + """ + Represents a coordinate system in 3-D space. + """ + + def __new__(cls, name, transformation=None, parent=None, location=None, + rotation_matrix=None, vector_names=None, variable_names=None): + """ + The orientation/location parameters are necessary if this system + is being defined at a certain orientation or location wrt another. + + Parameters + ========== + + name : str + The name of the new CoordSys3D instance. + + transformation : Lambda, Tuple, str + Transformation defined by transformation equations or chosen + from predefined ones. + + location : Vector + The position vector of the new system's origin wrt the parent + instance. + + rotation_matrix : SymPy ImmutableMatrix + The rotation matrix of the new coordinate system with respect + to the parent. In other words, the output of + new_system.rotation_matrix(parent). + + parent : CoordSys3D + The coordinate system wrt which the orientation/location + (or both) is being defined. + + vector_names, variable_names : iterable(optional) + Iterables of 3 strings each, with custom names for base + vectors and base scalars of the new system respectively. + Used for simple str printing. + + """ + + name = str(name) + Vector = sympy.vector.Vector + Point = sympy.vector.Point + + if not isinstance(name, str): + raise TypeError("name should be a string") + + if transformation is not None: + if (location is not None) or (rotation_matrix is not None): + raise ValueError("specify either `transformation` or " + "`location`/`rotation_matrix`") + if isinstance(transformation, (Tuple, tuple, list)): + if isinstance(transformation[0], MatrixBase): + rotation_matrix = transformation[0] + location = transformation[1] + else: + transformation = Lambda(transformation[0], + transformation[1]) + elif isinstance(transformation, Callable): + x1, x2, x3 = symbols('x1 x2 x3', cls=Dummy) + transformation = Lambda((x1, x2, x3), + transformation(x1, x2, x3)) + elif isinstance(transformation, str): + transformation = Str(transformation) + elif isinstance(transformation, (Str, Lambda)): + pass + else: + raise TypeError("transformation: " + "wrong type {}".format(type(transformation))) + + # If orientation information has been provided, store + # the rotation matrix accordingly + if rotation_matrix is None: + rotation_matrix = ImmutableDenseMatrix(eye(3)) + else: + if not isinstance(rotation_matrix, MatrixBase): + raise TypeError("rotation_matrix should be an Immutable" + + "Matrix instance") + rotation_matrix = rotation_matrix.as_immutable() + + # If location information is not given, adjust the default + # location as Vector.zero + if parent is not None: + if not isinstance(parent, CoordSys3D): + raise TypeError("parent should be a " + + "CoordSys3D/None") + if location is None: + location = Vector.zero + else: + if not isinstance(location, Vector): + raise TypeError("location should be a Vector") + # Check that location does not contain base + # scalars + for x in location.free_symbols: + if isinstance(x, BaseScalar): + raise ValueError("location should not contain" + + " BaseScalars") + origin = parent.origin.locate_new(name + '.origin', + location) + else: + location = Vector.zero + origin = Point(name + '.origin') + + if transformation is None: + transformation = Tuple(rotation_matrix, location) + + if isinstance(transformation, Tuple): + lambda_transformation = CoordSys3D._compose_rotation_and_translation( + transformation[0], + transformation[1], + parent + ) + r, l = transformation + l = l._projections + lambda_lame = CoordSys3D._get_lame_coeff('cartesian') + lambda_inverse = lambda x, y, z: r.inv()*Matrix( + [x-l[0], y-l[1], z-l[2]]) + elif isinstance(transformation, Str): + trname = transformation.name + lambda_transformation = CoordSys3D._get_transformation_lambdas(trname) + if parent is not None: + if parent.lame_coefficients() != (S.One, S.One, S.One): + raise ValueError('Parent for pre-defined coordinate ' + 'system should be Cartesian.') + lambda_lame = CoordSys3D._get_lame_coeff(trname) + lambda_inverse = CoordSys3D._set_inv_trans_equations(trname) + elif isinstance(transformation, Lambda): + if not CoordSys3D._check_orthogonality(transformation): + raise ValueError("The transformation equation does not " + "create orthogonal coordinate system") + lambda_transformation = transformation + lambda_lame = CoordSys3D._calculate_lame_coeff(lambda_transformation) + lambda_inverse = None + else: + lambda_transformation = lambda x, y, z: transformation(x, y, z) + lambda_lame = CoordSys3D._get_lame_coeff(transformation) + lambda_inverse = None + + if variable_names is None: + if isinstance(transformation, Lambda): + variable_names = ["x1", "x2", "x3"] + elif isinstance(transformation, Str): + if transformation.name == 'spherical': + variable_names = ["r", "theta", "phi"] + elif transformation.name == 'cylindrical': + variable_names = ["r", "theta", "z"] + else: + variable_names = ["x", "y", "z"] + else: + variable_names = ["x", "y", "z"] + if vector_names is None: + vector_names = ["i", "j", "k"] + + # All systems that are defined as 'roots' are unequal, unless + # they have the same name. + # Systems defined at same orientation/position wrt the same + # 'parent' are equal, irrespective of the name. + # This is true even if the same orientation is provided via + # different methods like Axis/Body/Space/Quaternion. + # However, coincident systems may be seen as unequal if + # positioned/oriented wrt different parents, even though + # they may actually be 'coincident' wrt the root system. + if parent is not None: + obj = super().__new__( + cls, Str(name), transformation, parent) + else: + obj = super().__new__( + cls, Str(name), transformation) + obj._name = name + # Initialize the base vectors + + _check_strings('vector_names', vector_names) + vector_names = list(vector_names) + latex_vects = [(r'\mathbf{\hat{%s}_{%s}}' % (x, name)) for + x in vector_names] + pretty_vects = ['%s_%s' % (x, name) for x in vector_names] + + obj._vector_names = vector_names + + v1 = BaseVector(0, obj, pretty_vects[0], latex_vects[0]) + v2 = BaseVector(1, obj, pretty_vects[1], latex_vects[1]) + v3 = BaseVector(2, obj, pretty_vects[2], latex_vects[2]) + + obj._base_vectors = (v1, v2, v3) + + # Initialize the base scalars + + _check_strings('variable_names', vector_names) + variable_names = list(variable_names) + latex_scalars = [(r"\mathbf{{%s}_{%s}}" % (x, name)) for + x in variable_names] + pretty_scalars = ['%s_%s' % (x, name) for x in variable_names] + + obj._variable_names = variable_names + obj._vector_names = vector_names + + x1 = BaseScalar(0, obj, pretty_scalars[0], latex_scalars[0]) + x2 = BaseScalar(1, obj, pretty_scalars[1], latex_scalars[1]) + x3 = BaseScalar(2, obj, pretty_scalars[2], latex_scalars[2]) + + obj._base_scalars = (x1, x2, x3) + + obj._transformation = transformation + obj._transformation_lambda = lambda_transformation + obj._lame_coefficients = lambda_lame(x1, x2, x3) + obj._transformation_from_parent_lambda = lambda_inverse + + setattr(obj, variable_names[0], x1) + setattr(obj, variable_names[1], x2) + setattr(obj, variable_names[2], x3) + + setattr(obj, vector_names[0], v1) + setattr(obj, vector_names[1], v2) + setattr(obj, vector_names[2], v3) + + # Assign params + obj._parent = parent + if obj._parent is not None: + obj._root = obj._parent._root + else: + obj._root = obj + + obj._parent_rotation_matrix = rotation_matrix + obj._origin = origin + + # Return the instance + return obj + + def _sympystr(self, printer): + return self._name + + def __iter__(self): + return iter(self.base_vectors()) + + @staticmethod + def _check_orthogonality(equations): + """ + Helper method for _connect_to_cartesian. It checks if + set of transformation equations create orthogonal curvilinear + coordinate system + + Parameters + ========== + + equations : Lambda + Lambda of transformation equations + + """ + + x1, x2, x3 = symbols("x1, x2, x3", cls=Dummy) + equations = equations(x1, x2, x3) + v1 = Matrix([diff(equations[0], x1), + diff(equations[1], x1), diff(equations[2], x1)]) + + v2 = Matrix([diff(equations[0], x2), + diff(equations[1], x2), diff(equations[2], x2)]) + + v3 = Matrix([diff(equations[0], x3), + diff(equations[1], x3), diff(equations[2], x3)]) + + if any(simplify(i[0] + i[1] + i[2]) == 0 for i in (v1, v2, v3)): + return False + else: + if simplify(v1.dot(v2)) == 0 and simplify(v2.dot(v3)) == 0 \ + and simplify(v3.dot(v1)) == 0: + return True + else: + return False + + @staticmethod + def _set_inv_trans_equations(curv_coord_name): + """ + Store information about inverse transformation equations for + pre-defined coordinate systems. + + Parameters + ========== + + curv_coord_name : str + Name of coordinate system + + """ + if curv_coord_name == 'cartesian': + return lambda x, y, z: (x, y, z) + + if curv_coord_name == 'spherical': + return lambda x, y, z: ( + sqrt(x**2 + y**2 + z**2), + acos(z/sqrt(x**2 + y**2 + z**2)), + atan2(y, x) + ) + if curv_coord_name == 'cylindrical': + return lambda x, y, z: ( + sqrt(x**2 + y**2), + atan2(y, x), + z + ) + raise ValueError('Wrong set of parameters.' + 'Type of coordinate system is defined') + + def _calculate_inv_trans_equations(self): + """ + Helper method for set_coordinate_type. It calculates inverse + transformation equations for given transformations equations. + + """ + x1, x2, x3 = symbols("x1, x2, x3", cls=Dummy, reals=True) + x, y, z = symbols("x, y, z", cls=Dummy) + + equations = self._transformation(x1, x2, x3) + + solved = solve([equations[0] - x, + equations[1] - y, + equations[2] - z], (x1, x2, x3), dict=True)[0] + solved = solved[x1], solved[x2], solved[x3] + self._transformation_from_parent_lambda = \ + lambda x1, x2, x3: tuple(i.subs(list(zip((x, y, z), (x1, x2, x3)))) for i in solved) + + @staticmethod + def _get_lame_coeff(curv_coord_name): + """ + Store information about Lame coefficients for pre-defined + coordinate systems. + + Parameters + ========== + + curv_coord_name : str + Name of coordinate system + + """ + if isinstance(curv_coord_name, str): + if curv_coord_name == 'cartesian': + return lambda x, y, z: (S.One, S.One, S.One) + if curv_coord_name == 'spherical': + return lambda r, theta, phi: (S.One, r, r*sin(theta)) + if curv_coord_name == 'cylindrical': + return lambda r, theta, h: (S.One, r, S.One) + raise ValueError('Wrong set of parameters.' + ' Type of coordinate system is not defined') + return CoordSys3D._calculate_lame_coefficients(curv_coord_name) + + @staticmethod + def _calculate_lame_coeff(equations): + """ + It calculates Lame coefficients + for given transformations equations. + + Parameters + ========== + + equations : Lambda + Lambda of transformation equations. + + """ + return lambda x1, x2, x3: ( + sqrt(diff(equations(x1, x2, x3)[0], x1)**2 + + diff(equations(x1, x2, x3)[1], x1)**2 + + diff(equations(x1, x2, x3)[2], x1)**2), + sqrt(diff(equations(x1, x2, x3)[0], x2)**2 + + diff(equations(x1, x2, x3)[1], x2)**2 + + diff(equations(x1, x2, x3)[2], x2)**2), + sqrt(diff(equations(x1, x2, x3)[0], x3)**2 + + diff(equations(x1, x2, x3)[1], x3)**2 + + diff(equations(x1, x2, x3)[2], x3)**2) + ) + + def _inverse_rotation_matrix(self): + """ + Returns inverse rotation matrix. + """ + return simplify(self._parent_rotation_matrix**-1) + + @staticmethod + def _get_transformation_lambdas(curv_coord_name): + """ + Store information about transformation equations for pre-defined + coordinate systems. + + Parameters + ========== + + curv_coord_name : str + Name of coordinate system + + """ + if isinstance(curv_coord_name, str): + if curv_coord_name == 'cartesian': + return lambda x, y, z: (x, y, z) + if curv_coord_name == 'spherical': + return lambda r, theta, phi: ( + r*sin(theta)*cos(phi), + r*sin(theta)*sin(phi), + r*cos(theta) + ) + if curv_coord_name == 'cylindrical': + return lambda r, theta, h: ( + r*cos(theta), + r*sin(theta), + h + ) + raise ValueError('Wrong set of parameters.' + 'Type of coordinate system is defined') + + @classmethod + def _rotation_trans_equations(cls, matrix, equations): + """ + Returns the transformation equations obtained from rotation matrix. + + Parameters + ========== + + matrix : Matrix + Rotation matrix + + equations : tuple + Transformation equations + + """ + return tuple(matrix * Matrix(equations)) + + @property + def origin(self): + return self._origin + + def base_vectors(self): + return self._base_vectors + + def base_scalars(self): + return self._base_scalars + + def lame_coefficients(self): + return self._lame_coefficients + + def transformation_to_parent(self): + return self._transformation_lambda(*self.base_scalars()) + + def transformation_from_parent(self): + if self._parent is None: + raise ValueError("no parent coordinate system, use " + "`transformation_from_parent_function()`") + return self._transformation_from_parent_lambda( + *self._parent.base_scalars()) + + def transformation_from_parent_function(self): + return self._transformation_from_parent_lambda + + def rotation_matrix(self, other): + """ + Returns the direction cosine matrix(DCM), also known as the + 'rotation matrix' of this coordinate system with respect to + another system. + + If v_a is a vector defined in system 'A' (in matrix format) + and v_b is the same vector defined in system 'B', then + v_a = A.rotation_matrix(B) * v_b. + + A SymPy Matrix is returned. + + Parameters + ========== + + other : CoordSys3D + The system which the DCM is generated to. + + Examples + ======== + + >>> from sympy.vector import CoordSys3D + >>> from sympy import symbols + >>> q1 = symbols('q1') + >>> N = CoordSys3D('N') + >>> A = N.orient_new_axis('A', q1, N.i) + >>> N.rotation_matrix(A) + Matrix([ + [1, 0, 0], + [0, cos(q1), -sin(q1)], + [0, sin(q1), cos(q1)]]) + + """ + from sympy.vector.functions import _path + if not isinstance(other, CoordSys3D): + raise TypeError(str(other) + + " is not a CoordSys3D") + # Handle special cases + if other == self: + return eye(3) + elif other == self._parent: + return self._parent_rotation_matrix + elif other._parent == self: + return other._parent_rotation_matrix.T + # Else, use tree to calculate position + rootindex, path = _path(self, other) + result = eye(3) + i = -1 + for i in range(rootindex): + result *= path[i]._parent_rotation_matrix + i += 2 + while i < len(path): + result *= path[i]._parent_rotation_matrix.T + i += 1 + return result + + @cacheit + def position_wrt(self, other): + """ + Returns the position vector of the origin of this coordinate + system with respect to another Point/CoordSys3D. + + Parameters + ========== + + other : Point/CoordSys3D + If other is a Point, the position of this system's origin + wrt it is returned. If its an instance of CoordSyRect, + the position wrt its origin is returned. + + Examples + ======== + + >>> from sympy.vector import CoordSys3D + >>> N = CoordSys3D('N') + >>> N1 = N.locate_new('N1', 10 * N.i) + >>> N.position_wrt(N1) + (-10)*N.i + + """ + return self.origin.position_wrt(other) + + def scalar_map(self, other): + """ + Returns a dictionary which expresses the coordinate variables + (base scalars) of this frame in terms of the variables of + otherframe. + + Parameters + ========== + + otherframe : CoordSys3D + The other system to map the variables to. + + Examples + ======== + + >>> from sympy.vector import CoordSys3D + >>> from sympy import Symbol + >>> A = CoordSys3D('A') + >>> q = Symbol('q') + >>> B = A.orient_new_axis('B', q, A.k) + >>> A.scalar_map(B) + {A.x: B.x*cos(q) - B.y*sin(q), A.y: B.x*sin(q) + B.y*cos(q), A.z: B.z} + + """ + + origin_coords = tuple(self.position_wrt(other).to_matrix(other)) + relocated_scalars = [x - origin_coords[i] + for i, x in enumerate(other.base_scalars())] + + vars_matrix = (self.rotation_matrix(other) * + Matrix(relocated_scalars)) + return {x: trigsimp(vars_matrix[i]) + for i, x in enumerate(self.base_scalars())} + + def locate_new(self, name, position, vector_names=None, + variable_names=None): + """ + Returns a CoordSys3D with its origin located at the given + position wrt this coordinate system's origin. + + Parameters + ========== + + name : str + The name of the new CoordSys3D instance. + + position : Vector + The position vector of the new system's origin wrt this + one. + + vector_names, variable_names : iterable(optional) + Iterables of 3 strings each, with custom names for base + vectors and base scalars of the new system respectively. + Used for simple str printing. + + Examples + ======== + + >>> from sympy.vector import CoordSys3D + >>> A = CoordSys3D('A') + >>> B = A.locate_new('B', 10 * A.i) + >>> B.origin.position_wrt(A.origin) + 10*A.i + + """ + if variable_names is None: + variable_names = self._variable_names + if vector_names is None: + vector_names = self._vector_names + + return CoordSys3D(name, location=position, + vector_names=vector_names, + variable_names=variable_names, + parent=self) + + def orient_new(self, name, orienters, location=None, + vector_names=None, variable_names=None): + """ + Creates a new CoordSys3D oriented in the user-specified way + with respect to this system. + + Please refer to the documentation of the orienter classes + for more information about the orientation procedure. + + Parameters + ========== + + name : str + The name of the new CoordSys3D instance. + + orienters : iterable/Orienter + An Orienter or an iterable of Orienters for orienting the + new coordinate system. + If an Orienter is provided, it is applied to get the new + system. + If an iterable is provided, the orienters will be applied + in the order in which they appear in the iterable. + + location : Vector(optional) + The location of the new coordinate system's origin wrt this + system's origin. If not specified, the origins are taken to + be coincident. + + vector_names, variable_names : iterable(optional) + Iterables of 3 strings each, with custom names for base + vectors and base scalars of the new system respectively. + Used for simple str printing. + + Examples + ======== + + >>> from sympy.vector import CoordSys3D + >>> from sympy import symbols + >>> q0, q1, q2, q3 = symbols('q0 q1 q2 q3') + >>> N = CoordSys3D('N') + + Using an AxisOrienter + + >>> from sympy.vector import AxisOrienter + >>> axis_orienter = AxisOrienter(q1, N.i + 2 * N.j) + >>> A = N.orient_new('A', (axis_orienter, )) + + Using a BodyOrienter + + >>> from sympy.vector import BodyOrienter + >>> body_orienter = BodyOrienter(q1, q2, q3, '123') + >>> B = N.orient_new('B', (body_orienter, )) + + Using a SpaceOrienter + + >>> from sympy.vector import SpaceOrienter + >>> space_orienter = SpaceOrienter(q1, q2, q3, '312') + >>> C = N.orient_new('C', (space_orienter, )) + + Using a QuaternionOrienter + + >>> from sympy.vector import QuaternionOrienter + >>> q_orienter = QuaternionOrienter(q0, q1, q2, q3) + >>> D = N.orient_new('D', (q_orienter, )) + """ + if variable_names is None: + variable_names = self._variable_names + if vector_names is None: + vector_names = self._vector_names + + if isinstance(orienters, Orienter): + if isinstance(orienters, AxisOrienter): + final_matrix = orienters.rotation_matrix(self) + else: + final_matrix = orienters.rotation_matrix() + # TODO: trigsimp is needed here so that the matrix becomes + # canonical (scalar_map also calls trigsimp; without this, you can + # end up with the same CoordinateSystem that compares differently + # due to a differently formatted matrix). However, this is + # probably not so good for performance. + final_matrix = trigsimp(final_matrix) + else: + final_matrix = Matrix(eye(3)) + for orienter in orienters: + if isinstance(orienter, AxisOrienter): + final_matrix *= orienter.rotation_matrix(self) + else: + final_matrix *= orienter.rotation_matrix() + + return CoordSys3D(name, rotation_matrix=final_matrix, + vector_names=vector_names, + variable_names=variable_names, + location=location, + parent=self) + + def orient_new_axis(self, name, angle, axis, location=None, + vector_names=None, variable_names=None): + """ + Axis rotation is a rotation about an arbitrary axis by + some angle. The angle is supplied as a SymPy expr scalar, and + the axis is supplied as a Vector. + + Parameters + ========== + + name : string + The name of the new coordinate system + + angle : Expr + The angle by which the new system is to be rotated + + axis : Vector + The axis around which the rotation has to be performed + + location : Vector(optional) + The location of the new coordinate system's origin wrt this + system's origin. If not specified, the origins are taken to + be coincident. + + vector_names, variable_names : iterable(optional) + Iterables of 3 strings each, with custom names for base + vectors and base scalars of the new system respectively. + Used for simple str printing. + + Examples + ======== + + >>> from sympy.vector import CoordSys3D + >>> from sympy import symbols + >>> q1 = symbols('q1') + >>> N = CoordSys3D('N') + >>> B = N.orient_new_axis('B', q1, N.i + 2 * N.j) + + """ + if variable_names is None: + variable_names = self._variable_names + if vector_names is None: + vector_names = self._vector_names + + orienter = AxisOrienter(angle, axis) + return self.orient_new(name, orienter, + location=location, + vector_names=vector_names, + variable_names=variable_names) + + def orient_new_body(self, name, angle1, angle2, angle3, + rotation_order, location=None, + vector_names=None, variable_names=None): + """ + Body orientation takes this coordinate system through three + successive simple rotations. + + Body fixed rotations include both Euler Angles and + Tait-Bryan Angles, see https://en.wikipedia.org/wiki/Euler_angles. + + Parameters + ========== + + name : string + The name of the new coordinate system + + angle1, angle2, angle3 : Expr + Three successive angles to rotate the coordinate system by + + rotation_order : string + String defining the order of axes for rotation + + location : Vector(optional) + The location of the new coordinate system's origin wrt this + system's origin. If not specified, the origins are taken to + be coincident. + + vector_names, variable_names : iterable(optional) + Iterables of 3 strings each, with custom names for base + vectors and base scalars of the new system respectively. + Used for simple str printing. + + Examples + ======== + + >>> from sympy.vector import CoordSys3D + >>> from sympy import symbols + >>> q1, q2, q3 = symbols('q1 q2 q3') + >>> N = CoordSys3D('N') + + A 'Body' fixed rotation is described by three angles and + three body-fixed rotation axes. To orient a coordinate system D + with respect to N, each sequential rotation is always about + the orthogonal unit vectors fixed to D. For example, a '123' + rotation will specify rotations about N.i, then D.j, then + D.k. (Initially, D.i is same as N.i) + Therefore, + + >>> D = N.orient_new_body('D', q1, q2, q3, '123') + + is same as + + >>> D = N.orient_new_axis('D', q1, N.i) + >>> D = D.orient_new_axis('D', q2, D.j) + >>> D = D.orient_new_axis('D', q3, D.k) + + Acceptable rotation orders are of length 3, expressed in XYZ or + 123, and cannot have a rotation about about an axis twice in a row. + + >>> B = N.orient_new_body('B', q1, q2, q3, '123') + >>> B = N.orient_new_body('B', q1, q2, 0, 'ZXZ') + >>> B = N.orient_new_body('B', 0, 0, 0, 'XYX') + + """ + + orienter = BodyOrienter(angle1, angle2, angle3, rotation_order) + return self.orient_new(name, orienter, + location=location, + vector_names=vector_names, + variable_names=variable_names) + + def orient_new_space(self, name, angle1, angle2, angle3, + rotation_order, location=None, + vector_names=None, variable_names=None): + """ + Space rotation is similar to Body rotation, but the rotations + are applied in the opposite order. + + Parameters + ========== + + name : string + The name of the new coordinate system + + angle1, angle2, angle3 : Expr + Three successive angles to rotate the coordinate system by + + rotation_order : string + String defining the order of axes for rotation + + location : Vector(optional) + The location of the new coordinate system's origin wrt this + system's origin. If not specified, the origins are taken to + be coincident. + + vector_names, variable_names : iterable(optional) + Iterables of 3 strings each, with custom names for base + vectors and base scalars of the new system respectively. + Used for simple str printing. + + See Also + ======== + + CoordSys3D.orient_new_body : method to orient via Euler + angles + + Examples + ======== + + >>> from sympy.vector import CoordSys3D + >>> from sympy import symbols + >>> q1, q2, q3 = symbols('q1 q2 q3') + >>> N = CoordSys3D('N') + + To orient a coordinate system D with respect to N, each + sequential rotation is always about N's orthogonal unit vectors. + For example, a '123' rotation will specify rotations about + N.i, then N.j, then N.k. + Therefore, + + >>> D = N.orient_new_space('D', q1, q2, q3, '312') + + is same as + + >>> B = N.orient_new_axis('B', q1, N.i) + >>> C = B.orient_new_axis('C', q2, N.j) + >>> D = C.orient_new_axis('D', q3, N.k) + + """ + + orienter = SpaceOrienter(angle1, angle2, angle3, rotation_order) + return self.orient_new(name, orienter, + location=location, + vector_names=vector_names, + variable_names=variable_names) + + def orient_new_quaternion(self, name, q0, q1, q2, q3, location=None, + vector_names=None, variable_names=None): + """ + Quaternion orientation orients the new CoordSys3D with + Quaternions, defined as a finite rotation about lambda, a unit + vector, by some amount theta. + + This orientation is described by four parameters: + + q0 = cos(theta/2) + + q1 = lambda_x sin(theta/2) + + q2 = lambda_y sin(theta/2) + + q3 = lambda_z sin(theta/2) + + Quaternion does not take in a rotation order. + + Parameters + ========== + + name : string + The name of the new coordinate system + + q0, q1, q2, q3 : Expr + The quaternions to rotate the coordinate system by + + location : Vector(optional) + The location of the new coordinate system's origin wrt this + system's origin. If not specified, the origins are taken to + be coincident. + + vector_names, variable_names : iterable(optional) + Iterables of 3 strings each, with custom names for base + vectors and base scalars of the new system respectively. + Used for simple str printing. + + Examples + ======== + + >>> from sympy.vector import CoordSys3D + >>> from sympy import symbols + >>> q0, q1, q2, q3 = symbols('q0 q1 q2 q3') + >>> N = CoordSys3D('N') + >>> B = N.orient_new_quaternion('B', q0, q1, q2, q3) + + """ + + orienter = QuaternionOrienter(q0, q1, q2, q3) + return self.orient_new(name, orienter, + location=location, + vector_names=vector_names, + variable_names=variable_names) + + def create_new(self, name, transformation, variable_names=None, vector_names=None): + """ + Returns a CoordSys3D which is connected to self by transformation. + + Parameters + ========== + + name : str + The name of the new CoordSys3D instance. + + transformation : Lambda, Tuple, str + Transformation defined by transformation equations or chosen + from predefined ones. + + vector_names, variable_names : iterable(optional) + Iterables of 3 strings each, with custom names for base + vectors and base scalars of the new system respectively. + Used for simple str printing. + + Examples + ======== + + >>> from sympy.vector import CoordSys3D + >>> a = CoordSys3D('a') + >>> b = a.create_new('b', transformation='spherical') + >>> b.transformation_to_parent() + (b.r*sin(b.theta)*cos(b.phi), b.r*sin(b.phi)*sin(b.theta), b.r*cos(b.theta)) + >>> b.transformation_from_parent() + (sqrt(a.x**2 + a.y**2 + a.z**2), acos(a.z/sqrt(a.x**2 + a.y**2 + a.z**2)), atan2(a.y, a.x)) + + """ + return CoordSys3D(name, parent=self, transformation=transformation, + variable_names=variable_names, vector_names=vector_names) + + def __init__(self, name, location=None, rotation_matrix=None, + parent=None, vector_names=None, variable_names=None, + latex_vects=None, pretty_vects=None, latex_scalars=None, + pretty_scalars=None, transformation=None): + # Dummy initializer for setting docstring + pass + + __init__.__doc__ = __new__.__doc__ + + @staticmethod + def _compose_rotation_and_translation(rot, translation, parent): + r = lambda x, y, z: CoordSys3D._rotation_trans_equations(rot, (x, y, z)) + if parent is None: + return r + + dx, dy, dz = [translation.dot(i) for i in parent.base_vectors()] + t = lambda x, y, z: ( + x + dx, + y + dy, + z + dz, + ) + return lambda x, y, z: t(*r(x, y, z)) + + +def _check_strings(arg_name, arg): + errorstr = arg_name + " must be an iterable of 3 string-types" + if len(arg) != 3: + raise ValueError(errorstr) + for s in arg: + if not isinstance(s, str): + raise TypeError(errorstr) + + +# Delayed import to avoid cyclic import problems: +from sympy.vector.vector import BaseVector diff --git a/venv/lib/python3.10/site-packages/sympy/vector/orienters.py b/venv/lib/python3.10/site-packages/sympy/vector/orienters.py new file mode 100644 index 0000000000000000000000000000000000000000..0c22089e568bc817c943c1beecebde0fea46b6ae --- /dev/null +++ b/venv/lib/python3.10/site-packages/sympy/vector/orienters.py @@ -0,0 +1,398 @@ +from sympy.core.basic import Basic +from sympy.core.sympify import sympify +from sympy.functions.elementary.trigonometric import (cos, sin) +from sympy.matrices.dense import (eye, rot_axis1, rot_axis2, rot_axis3) +from sympy.matrices.immutable import ImmutableDenseMatrix as Matrix +from sympy.core.cache import cacheit +from sympy.core.symbol import Str +import sympy.vector + + +class Orienter(Basic): + """ + Super-class for all orienter classes. + """ + + def rotation_matrix(self): + """ + The rotation matrix corresponding to this orienter + instance. + """ + return self._parent_orient + + +class AxisOrienter(Orienter): + """ + Class to denote an axis orienter. + """ + + def __new__(cls, angle, axis): + if not isinstance(axis, sympy.vector.Vector): + raise TypeError("axis should be a Vector") + angle = sympify(angle) + + obj = super().__new__(cls, angle, axis) + obj._angle = angle + obj._axis = axis + + return obj + + def __init__(self, angle, axis): + """ + Axis rotation is a rotation about an arbitrary axis by + some angle. The angle is supplied as a SymPy expr scalar, and + the axis is supplied as a Vector. + + Parameters + ========== + + angle : Expr + The angle by which the new system is to be rotated + + axis : Vector + The axis around which the rotation has to be performed + + Examples + ======== + + >>> from sympy.vector import CoordSys3D + >>> from sympy import symbols + >>> q1 = symbols('q1') + >>> N = CoordSys3D('N') + >>> from sympy.vector import AxisOrienter + >>> orienter = AxisOrienter(q1, N.i + 2 * N.j) + >>> B = N.orient_new('B', (orienter, )) + + """ + # Dummy initializer for docstrings + pass + + @cacheit + def rotation_matrix(self, system): + """ + The rotation matrix corresponding to this orienter + instance. + + Parameters + ========== + + system : CoordSys3D + The coordinate system wrt which the rotation matrix + is to be computed + """ + + axis = sympy.vector.express(self.axis, system).normalize() + axis = axis.to_matrix(system) + theta = self.angle + parent_orient = ((eye(3) - axis * axis.T) * cos(theta) + + Matrix([[0, -axis[2], axis[1]], + [axis[2], 0, -axis[0]], + [-axis[1], axis[0], 0]]) * sin(theta) + + axis * axis.T) + parent_orient = parent_orient.T + return parent_orient + + @property + def angle(self): + return self._angle + + @property + def axis(self): + return self._axis + + +class ThreeAngleOrienter(Orienter): + """ + Super-class for Body and Space orienters. + """ + + def __new__(cls, angle1, angle2, angle3, rot_order): + if isinstance(rot_order, Str): + rot_order = rot_order.name + + approved_orders = ('123', '231', '312', '132', '213', + '321', '121', '131', '212', '232', + '313', '323', '') + original_rot_order = rot_order + rot_order = str(rot_order).upper() + if not (len(rot_order) == 3): + raise TypeError('rot_order should be a str of length 3') + rot_order = [i.replace('X', '1') for i in rot_order] + rot_order = [i.replace('Y', '2') for i in rot_order] + rot_order = [i.replace('Z', '3') for i in rot_order] + rot_order = ''.join(rot_order) + if rot_order not in approved_orders: + raise TypeError('Invalid rot_type parameter') + a1 = int(rot_order[0]) + a2 = int(rot_order[1]) + a3 = int(rot_order[2]) + angle1 = sympify(angle1) + angle2 = sympify(angle2) + angle3 = sympify(angle3) + if cls._in_order: + parent_orient = (_rot(a1, angle1) * + _rot(a2, angle2) * + _rot(a3, angle3)) + else: + parent_orient = (_rot(a3, angle3) * + _rot(a2, angle2) * + _rot(a1, angle1)) + parent_orient = parent_orient.T + + obj = super().__new__( + cls, angle1, angle2, angle3, Str(rot_order)) + obj._angle1 = angle1 + obj._angle2 = angle2 + obj._angle3 = angle3 + obj._rot_order = original_rot_order + obj._parent_orient = parent_orient + + return obj + + @property + def angle1(self): + return self._angle1 + + @property + def angle2(self): + return self._angle2 + + @property + def angle3(self): + return self._angle3 + + @property + def rot_order(self): + return self._rot_order + + +class BodyOrienter(ThreeAngleOrienter): + """ + Class to denote a body-orienter. + """ + + _in_order = True + + def __new__(cls, angle1, angle2, angle3, rot_order): + obj = ThreeAngleOrienter.__new__(cls, angle1, angle2, angle3, + rot_order) + return obj + + def __init__(self, angle1, angle2, angle3, rot_order): + """ + Body orientation takes this coordinate system through three + successive simple rotations. + + Body fixed rotations include both Euler Angles and + Tait-Bryan Angles, see https://en.wikipedia.org/wiki/Euler_angles. + + Parameters + ========== + + angle1, angle2, angle3 : Expr + Three successive angles to rotate the coordinate system by + + rotation_order : string + String defining the order of axes for rotation + + Examples + ======== + + >>> from sympy.vector import CoordSys3D, BodyOrienter + >>> from sympy import symbols + >>> q1, q2, q3 = symbols('q1 q2 q3') + >>> N = CoordSys3D('N') + + A 'Body' fixed rotation is described by three angles and + three body-fixed rotation axes. To orient a coordinate system D + with respect to N, each sequential rotation is always about + the orthogonal unit vectors fixed to D. For example, a '123' + rotation will specify rotations about N.i, then D.j, then + D.k. (Initially, D.i is same as N.i) + Therefore, + + >>> body_orienter = BodyOrienter(q1, q2, q3, '123') + >>> D = N.orient_new('D', (body_orienter, )) + + is same as + + >>> from sympy.vector import AxisOrienter + >>> axis_orienter1 = AxisOrienter(q1, N.i) + >>> D = N.orient_new('D', (axis_orienter1, )) + >>> axis_orienter2 = AxisOrienter(q2, D.j) + >>> D = D.orient_new('D', (axis_orienter2, )) + >>> axis_orienter3 = AxisOrienter(q3, D.k) + >>> D = D.orient_new('D', (axis_orienter3, )) + + Acceptable rotation orders are of length 3, expressed in XYZ or + 123, and cannot have a rotation about about an axis twice in a row. + + >>> body_orienter1 = BodyOrienter(q1, q2, q3, '123') + >>> body_orienter2 = BodyOrienter(q1, q2, 0, 'ZXZ') + >>> body_orienter3 = BodyOrienter(0, 0, 0, 'XYX') + + """ + # Dummy initializer for docstrings + pass + + +class SpaceOrienter(ThreeAngleOrienter): + """ + Class to denote a space-orienter. + """ + + _in_order = False + + def __new__(cls, angle1, angle2, angle3, rot_order): + obj = ThreeAngleOrienter.__new__(cls, angle1, angle2, angle3, + rot_order) + return obj + + def __init__(self, angle1, angle2, angle3, rot_order): + """ + Space rotation is similar to Body rotation, but the rotations + are applied in the opposite order. + + Parameters + ========== + + angle1, angle2, angle3 : Expr + Three successive angles to rotate the coordinate system by + + rotation_order : string + String defining the order of axes for rotation + + See Also + ======== + + BodyOrienter : Orienter to orient systems wrt Euler angles. + + Examples + ======== + + >>> from sympy.vector import CoordSys3D, SpaceOrienter + >>> from sympy import symbols + >>> q1, q2, q3 = symbols('q1 q2 q3') + >>> N = CoordSys3D('N') + + To orient a coordinate system D with respect to N, each + sequential rotation is always about N's orthogonal unit vectors. + For example, a '123' rotation will specify rotations about + N.i, then N.j, then N.k. + Therefore, + + >>> space_orienter = SpaceOrienter(q1, q2, q3, '312') + >>> D = N.orient_new('D', (space_orienter, )) + + is same as + + >>> from sympy.vector import AxisOrienter + >>> axis_orienter1 = AxisOrienter(q1, N.i) + >>> B = N.orient_new('B', (axis_orienter1, )) + >>> axis_orienter2 = AxisOrienter(q2, N.j) + >>> C = B.orient_new('C', (axis_orienter2, )) + >>> axis_orienter3 = AxisOrienter(q3, N.k) + >>> D = C.orient_new('C', (axis_orienter3, )) + + """ + # Dummy initializer for docstrings + pass + + +class QuaternionOrienter(Orienter): + """ + Class to denote a quaternion-orienter. + """ + + def __new__(cls, q0, q1, q2, q3): + q0 = sympify(q0) + q1 = sympify(q1) + q2 = sympify(q2) + q3 = sympify(q3) + parent_orient = (Matrix([[q0 ** 2 + q1 ** 2 - q2 ** 2 - + q3 ** 2, + 2 * (q1 * q2 - q0 * q3), + 2 * (q0 * q2 + q1 * q3)], + [2 * (q1 * q2 + q0 * q3), + q0 ** 2 - q1 ** 2 + + q2 ** 2 - q3 ** 2, + 2 * (q2 * q3 - q0 * q1)], + [2 * (q1 * q3 - q0 * q2), + 2 * (q0 * q1 + q2 * q3), + q0 ** 2 - q1 ** 2 - + q2 ** 2 + q3 ** 2]])) + parent_orient = parent_orient.T + + obj = super().__new__(cls, q0, q1, q2, q3) + obj._q0 = q0 + obj._q1 = q1 + obj._q2 = q2 + obj._q3 = q3 + obj._parent_orient = parent_orient + + return obj + + def __init__(self, angle1, angle2, angle3, rot_order): + """ + Quaternion orientation orients the new CoordSys3D with + Quaternions, defined as a finite rotation about lambda, a unit + vector, by some amount theta. + + This orientation is described by four parameters: + + q0 = cos(theta/2) + + q1 = lambda_x sin(theta/2) + + q2 = lambda_y sin(theta/2) + + q3 = lambda_z sin(theta/2) + + Quaternion does not take in a rotation order. + + Parameters + ========== + + q0, q1, q2, q3 : Expr + The quaternions to rotate the coordinate system by + + Examples + ======== + + >>> from sympy.vector import CoordSys3D + >>> from sympy import symbols + >>> q0, q1, q2, q3 = symbols('q0 q1 q2 q3') + >>> N = CoordSys3D('N') + >>> from sympy.vector import QuaternionOrienter + >>> q_orienter = QuaternionOrienter(q0, q1, q2, q3) + >>> B = N.orient_new('B', (q_orienter, )) + + """ + # Dummy initializer for docstrings + pass + + @property + def q0(self): + return self._q0 + + @property + def q1(self): + return self._q1 + + @property + def q2(self): + return self._q2 + + @property + def q3(self): + return self._q3 + + +def _rot(axis, angle): + """DCM for simple axis 1, 2 or 3 rotations. """ + if axis == 1: + return Matrix(rot_axis1(angle).T) + elif axis == 2: + return Matrix(rot_axis2(angle).T) + elif axis == 3: + return Matrix(rot_axis3(angle).T) diff --git a/venv/lib/python3.10/site-packages/sympy/vector/parametricregion.py b/venv/lib/python3.10/site-packages/sympy/vector/parametricregion.py new file mode 100644 index 0000000000000000000000000000000000000000..0d55f790b52121fd982225f02505336a05536928 --- /dev/null +++ b/venv/lib/python3.10/site-packages/sympy/vector/parametricregion.py @@ -0,0 +1,189 @@ +from functools import singledispatch +from sympy.core.numbers import pi +from sympy.functions.elementary.trigonometric import tan +from sympy.simplify import trigsimp +from sympy.core import Basic, Tuple +from sympy.core.symbol import _symbol +from sympy.solvers import solve +from sympy.geometry import Point, Segment, Curve, Ellipse, Polygon +from sympy.vector import ImplicitRegion + + +class ParametricRegion(Basic): + """ + Represents a parametric region in space. + + Examples + ======== + + >>> from sympy import cos, sin, pi + >>> from sympy.abc import r, theta, t, a, b, x, y + >>> from sympy.vector import ParametricRegion + + >>> ParametricRegion((t, t**2), (t, -1, 2)) + ParametricRegion((t, t**2), (t, -1, 2)) + >>> ParametricRegion((x, y), (x, 3, 4), (y, 5, 6)) + ParametricRegion((x, y), (x, 3, 4), (y, 5, 6)) + >>> ParametricRegion((r*cos(theta), r*sin(theta)), (r, -2, 2), (theta, 0, pi)) + ParametricRegion((r*cos(theta), r*sin(theta)), (r, -2, 2), (theta, 0, pi)) + >>> ParametricRegion((a*cos(t), b*sin(t)), t) + ParametricRegion((a*cos(t), b*sin(t)), t) + + >>> circle = ParametricRegion((r*cos(theta), r*sin(theta)), r, (theta, 0, pi)) + >>> circle.parameters + (r, theta) + >>> circle.definition + (r*cos(theta), r*sin(theta)) + >>> circle.limits + {theta: (0, pi)} + + Dimension of a parametric region determines whether a region is a curve, surface + or volume region. It does not represent its dimensions in space. + + >>> circle.dimensions + 1 + + Parameters + ========== + + definition : tuple to define base scalars in terms of parameters. + + bounds : Parameter or a tuple of length 3 to define parameter and corresponding lower and upper bound. + + """ + def __new__(cls, definition, *bounds): + parameters = () + limits = {} + + if not isinstance(bounds, Tuple): + bounds = Tuple(*bounds) + + for bound in bounds: + if isinstance(bound, (tuple, Tuple)): + if len(bound) != 3: + raise ValueError("Tuple should be in the form (parameter, lowerbound, upperbound)") + parameters += (bound[0],) + limits[bound[0]] = (bound[1], bound[2]) + else: + parameters += (bound,) + + if not isinstance(definition, (tuple, Tuple)): + definition = (definition,) + + obj = super().__new__(cls, Tuple(*definition), *bounds) + obj._parameters = parameters + obj._limits = limits + + return obj + + @property + def definition(self): + return self.args[0] + + @property + def limits(self): + return self._limits + + @property + def parameters(self): + return self._parameters + + @property + def dimensions(self): + return len(self.limits) + + +@singledispatch +def parametric_region_list(reg): + """ + Returns a list of ParametricRegion objects representing the geometric region. + + Examples + ======== + + >>> from sympy.abc import t + >>> from sympy.vector import parametric_region_list + >>> from sympy.geometry import Point, Curve, Ellipse, Segment, Polygon + + >>> p = Point(2, 5) + >>> parametric_region_list(p) + [ParametricRegion((2, 5))] + + >>> c = Curve((t**3, 4*t), (t, -3, 4)) + >>> parametric_region_list(c) + [ParametricRegion((t**3, 4*t), (t, -3, 4))] + + >>> e = Ellipse(Point(1, 3), 2, 3) + >>> parametric_region_list(e) + [ParametricRegion((2*cos(t) + 1, 3*sin(t) + 3), (t, 0, 2*pi))] + + >>> s = Segment(Point(1, 3), Point(2, 6)) + >>> parametric_region_list(s) + [ParametricRegion((t + 1, 3*t + 3), (t, 0, 1))] + + >>> p1, p2, p3, p4 = [(0, 1), (2, -3), (5, 3), (-2, 3)] + >>> poly = Polygon(p1, p2, p3, p4) + >>> parametric_region_list(poly) + [ParametricRegion((2*t, 1 - 4*t), (t, 0, 1)), ParametricRegion((3*t + 2, 6*t - 3), (t, 0, 1)),\ + ParametricRegion((5 - 7*t, 3), (t, 0, 1)), ParametricRegion((2*t - 2, 3 - 2*t), (t, 0, 1))] + + """ + raise ValueError("SymPy cannot determine parametric representation of the region.") + + +@parametric_region_list.register(Point) +def _(obj): + return [ParametricRegion(obj.args)] + + +@parametric_region_list.register(Curve) # type: ignore +def _(obj): + definition = obj.arbitrary_point(obj.parameter).args + bounds = obj.limits + return [ParametricRegion(definition, bounds)] + + +@parametric_region_list.register(Ellipse) # type: ignore +def _(obj, parameter='t'): + definition = obj.arbitrary_point(parameter).args + t = _symbol(parameter, real=True) + bounds = (t, 0, 2*pi) + return [ParametricRegion(definition, bounds)] + + +@parametric_region_list.register(Segment) # type: ignore +def _(obj, parameter='t'): + t = _symbol(parameter, real=True) + definition = obj.arbitrary_point(t).args + + for i in range(0, 3): + lower_bound = solve(definition[i] - obj.points[0].args[i], t) + upper_bound = solve(definition[i] - obj.points[1].args[i], t) + + if len(lower_bound) == 1 and len(upper_bound) == 1: + bounds = t, lower_bound[0], upper_bound[0] + break + + definition_tuple = obj.arbitrary_point(parameter).args + return [ParametricRegion(definition_tuple, bounds)] + + +@parametric_region_list.register(Polygon) # type: ignore +def _(obj, parameter='t'): + l = [parametric_region_list(side, parameter)[0] for side in obj.sides] + return l + + +@parametric_region_list.register(ImplicitRegion) # type: ignore +def _(obj, parameters=('t', 's')): + definition = obj.rational_parametrization(parameters) + bounds = [] + + for i in range(len(obj.variables) - 1): + # Each parameter is replaced by its tangent to simplify intergation + parameter = _symbol(parameters[i], real=True) + definition = [trigsimp(elem.subs(parameter, tan(parameter/2))) for elem in definition] + bounds.append((parameter, 0, 2*pi),) + + definition = Tuple(*definition) + return [ParametricRegion(definition, *bounds)]