diff --git a/env-llmeval/lib/python3.10/site-packages/joblib/test/data/joblib_0.10.0_pickle_py34_np19.pkl.bz2 b/env-llmeval/lib/python3.10/site-packages/joblib/test/data/joblib_0.10.0_pickle_py34_np19.pkl.bz2 new file mode 100644 index 0000000000000000000000000000000000000000..80818a8baa1e2481b62bed06bb2b95f4a614cc3a --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/joblib/test/data/joblib_0.10.0_pickle_py34_np19.pkl.bz2 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3f2af67ea667c1f5315ddcab06bfa447005863c1c0fd88bb7e04a0b8acb9a54b +size 1021 diff --git a/env-llmeval/lib/python3.10/site-packages/joblib/test/data/joblib_0.10.0_pickle_py34_np19.pkl.xz b/env-llmeval/lib/python3.10/site-packages/joblib/test/data/joblib_0.10.0_pickle_py34_np19.pkl.xz new file mode 100644 index 0000000000000000000000000000000000000000..1cd5660dd8d32121e8dd7b40534187add5981639 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/joblib/test/data/joblib_0.10.0_pickle_py34_np19.pkl.xz @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:04d7e68907e978b56975f9309492b8849e42a60974beb795c9e93273977f3cd3 +size 752 diff --git a/env-llmeval/lib/python3.10/site-packages/joblib/test/data/joblib_0.10.0_pickle_py35_np19.pkl.xz b/env-llmeval/lib/python3.10/site-packages/joblib/test/data/joblib_0.10.0_pickle_py35_np19.pkl.xz new file mode 100644 index 0000000000000000000000000000000000000000..cec2871b09ae347e07c81eb55e7979300748ccd1 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/joblib/test/data/joblib_0.10.0_pickle_py35_np19.pkl.xz @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:02cf30d8b196c303662b2dd035d2a58caeb762ae3a82345ffd1274961e7f5aa0 +size 752 diff --git a/env-llmeval/lib/python3.10/site-packages/joblib/test/data/joblib_0.9.2_pickle_py33_np18.pkl b/env-llmeval/lib/python3.10/site-packages/joblib/test/data/joblib_0.9.2_pickle_py33_np18.pkl new file mode 100644 index 0000000000000000000000000000000000000000..e739b6d035cdf110063dbb8b2cdceb116e187019 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/joblib/test/data/joblib_0.9.2_pickle_py33_np18.pkl @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c3d4cbc690d3ce9e5323a714ea546f32c01ab1710285c420184f6cdf4b26fc25 +size 691 diff --git a/env-llmeval/lib/python3.10/site-packages/joblib/test/data/joblib_0.9.2_pickle_py33_np18.pkl_02.npy b/env-llmeval/lib/python3.10/site-packages/joblib/test/data/joblib_0.9.2_pickle_py33_np18.pkl_02.npy new file mode 100644 index 0000000000000000000000000000000000000000..f00f08fbeeda280fa3ce00069c313c5412a33eca --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/joblib/test/data/joblib_0.9.2_pickle_py33_np18.pkl_02.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1c1cf36cb781fbcc21b953bb0a0b45df092da0eae0e765882e5963ccd70105b1 +size 120 diff --git a/env-llmeval/lib/python3.10/site-packages/joblib/test/data/joblib_0.9.2_pickle_py33_np18.pkl_04.npy b/env-llmeval/lib/python3.10/site-packages/joblib/test/data/joblib_0.9.2_pickle_py33_np18.pkl_04.npy new file mode 100644 index 0000000000000000000000000000000000000000..e9b5e77c73268dfff541b576126f06fc6fed3d59 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/joblib/test/data/joblib_0.9.2_pickle_py33_np18.pkl_04.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3ecbe244294ba93e08479b16c1b9a9411e3569ff660ed0459dca1d241381df05 +size 104 diff --git a/env-llmeval/lib/python3.10/site-packages/joblib/test/data/joblib_0.9.2_pickle_py34_np19.pkl_03.npy b/env-llmeval/lib/python3.10/site-packages/joblib/test/data/joblib_0.9.2_pickle_py34_np19.pkl_03.npy new file mode 100644 index 0000000000000000000000000000000000000000..73976395be90d4b2b2d955c79a90721e16cebc82 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/joblib/test/data/joblib_0.9.2_pickle_py34_np19.pkl_03.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8ede9a64a52b25d7db30950956c978ec0b3932b7d14acd5abc63216e64babde7 +size 307 diff --git a/env-llmeval/lib/python3.10/site-packages/joblib/test/data/joblib_0.9.2_pickle_py35_np19.pkl_02.npy b/env-llmeval/lib/python3.10/site-packages/joblib/test/data/joblib_0.9.2_pickle_py35_np19.pkl_02.npy new file mode 100644 index 0000000000000000000000000000000000000000..f00f08fbeeda280fa3ce00069c313c5412a33eca --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/joblib/test/data/joblib_0.9.2_pickle_py35_np19.pkl_02.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1c1cf36cb781fbcc21b953bb0a0b45df092da0eae0e765882e5963ccd70105b1 +size 120 diff --git a/env-llmeval/lib/python3.10/site-packages/joblib/test/data/joblib_0.9.2_pickle_py35_np19.pkl_04.npy b/env-llmeval/lib/python3.10/site-packages/joblib/test/data/joblib_0.9.2_pickle_py35_np19.pkl_04.npy new file mode 100644 index 0000000000000000000000000000000000000000..e9b5e77c73268dfff541b576126f06fc6fed3d59 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/joblib/test/data/joblib_0.9.2_pickle_py35_np19.pkl_04.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3ecbe244294ba93e08479b16c1b9a9411e3569ff660ed0459dca1d241381df05 +size 104 diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/calculus/tests/__init__.py b/env-llmeval/lib/python3.10/site-packages/sympy/calculus/tests/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/calculus/tests/__pycache__/__init__.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/sympy/calculus/tests/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4aa55672d1047de7adbec8557983c00cd2b9ce1f Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/sympy/calculus/tests/__pycache__/__init__.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/calculus/tests/__pycache__/test_accumulationbounds.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/sympy/calculus/tests/__pycache__/test_accumulationbounds.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9ba3af65fb185c82b30c871588fdc58177bc0641 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/sympy/calculus/tests/__pycache__/test_accumulationbounds.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/calculus/tests/__pycache__/test_euler.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/sympy/calculus/tests/__pycache__/test_euler.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e4f5648f865b9b2f3d6fdc2bd865c902ee920bab Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/sympy/calculus/tests/__pycache__/test_euler.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/calculus/tests/__pycache__/test_finite_diff.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/sympy/calculus/tests/__pycache__/test_finite_diff.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..75b063b8833ef79b3a7fb09040a6d984c1a78a8b Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/sympy/calculus/tests/__pycache__/test_finite_diff.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/calculus/tests/__pycache__/test_singularities.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/sympy/calculus/tests/__pycache__/test_singularities.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7334f3e821e220ef73289f944cf06a5a8997013e Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/sympy/calculus/tests/__pycache__/test_singularities.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/calculus/tests/__pycache__/test_util.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/sympy/calculus/tests/__pycache__/test_util.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1aa6dc8f837ea77dea60ec8c7cbadad27d71bcbd Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/sympy/calculus/tests/__pycache__/test_util.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/calculus/tests/test_euler.py b/env-llmeval/lib/python3.10/site-packages/sympy/calculus/tests/test_euler.py new file mode 100644 index 0000000000000000000000000000000000000000..56371c8c787d9459d1390e18c306fddde94d2745 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/sympy/calculus/tests/test_euler.py @@ -0,0 +1,74 @@ +from sympy.core.function import (Derivative as D, Function) +from sympy.core.relational import Eq +from sympy.core.symbol import (Symbol, symbols) +from sympy.functions.elementary.trigonometric import (cos, sin) +from sympy.testing.pytest import raises +from sympy.calculus.euler import euler_equations as euler + + +def test_euler_interface(): + x = Function('x') + y = Symbol('y') + t = Symbol('t') + raises(TypeError, lambda: euler()) + raises(TypeError, lambda: euler(D(x(t), t)*y(t), [x(t), y])) + raises(ValueError, lambda: euler(D(x(t), t)*x(y), [x(t), x(y)])) + raises(TypeError, lambda: euler(D(x(t), t)**2, x(0))) + raises(TypeError, lambda: euler(D(x(t), t)*y(t), [t])) + assert euler(D(x(t), t)**2/2, {x(t)}) == [Eq(-D(x(t), t, t), 0)] + assert euler(D(x(t), t)**2/2, x(t), {t}) == [Eq(-D(x(t), t, t), 0)] + + +def test_euler_pendulum(): + x = Function('x') + t = Symbol('t') + L = D(x(t), t)**2/2 + cos(x(t)) + assert euler(L, x(t), t) == [Eq(-sin(x(t)) - D(x(t), t, t), 0)] + + +def test_euler_henonheiles(): + x = Function('x') + y = Function('y') + t = Symbol('t') + L = sum(D(z(t), t)**2/2 - z(t)**2/2 for z in [x, y]) + L += -x(t)**2*y(t) + y(t)**3/3 + assert euler(L, [x(t), y(t)], t) == [Eq(-2*x(t)*y(t) - x(t) - + D(x(t), t, t), 0), + Eq(-x(t)**2 + y(t)**2 - + y(t) - D(y(t), t, t), 0)] + + +def test_euler_sineg(): + psi = Function('psi') + t = Symbol('t') + x = Symbol('x') + L = D(psi(t, x), t)**2/2 - D(psi(t, x), x)**2/2 + cos(psi(t, x)) + assert euler(L, psi(t, x), [t, x]) == [Eq(-sin(psi(t, x)) - + D(psi(t, x), t, t) + + D(psi(t, x), x, x), 0)] + + +def test_euler_high_order(): + # an example from hep-th/0309038 + m = Symbol('m') + k = Symbol('k') + x = Function('x') + y = Function('y') + t = Symbol('t') + L = (m*D(x(t), t)**2/2 + m*D(y(t), t)**2/2 - + k*D(x(t), t)*D(y(t), t, t) + k*D(y(t), t)*D(x(t), t, t)) + assert euler(L, [x(t), y(t)]) == [Eq(2*k*D(y(t), t, t, t) - + m*D(x(t), t, t), 0), + Eq(-2*k*D(x(t), t, t, t) - + m*D(y(t), t, t), 0)] + + w = Symbol('w') + L = D(x(t, w), t, w)**2/2 + assert euler(L) == [Eq(D(x(t, w), t, t, w, w), 0)] + +def test_issue_18653(): + x, y, z = symbols("x y z") + f, g, h = symbols("f g h", cls=Function, args=(x, y)) + f, g, h = f(), g(), h() + expr2 = f.diff(x)*h.diff(z) + assert euler(expr2, (f,), (x, y)) == [] diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/calculus/tests/test_singularities.py b/env-llmeval/lib/python3.10/site-packages/sympy/calculus/tests/test_singularities.py new file mode 100644 index 0000000000000000000000000000000000000000..26e1cb875bf6363a7337b634f14ffe5411a55983 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/sympy/calculus/tests/test_singularities.py @@ -0,0 +1,103 @@ +from sympy.core.numbers import (I, Rational, oo) +from sympy.core.singleton import S +from sympy.core.symbol import Symbol +from sympy.functions.elementary.exponential import (exp, log) +from sympy.functions.elementary.miscellaneous import sqrt +from sympy.calculus.singularities import ( + singularities, + is_increasing, + is_strictly_increasing, + is_decreasing, + is_strictly_decreasing, + is_monotonic +) +from sympy.sets import Interval, FiniteSet +from sympy.testing.pytest import raises +from sympy.abc import x, y + + +def test_singularities(): + x = Symbol('x') + assert singularities(x**2, x) == S.EmptySet + assert singularities(x/(x**2 + 3*x + 2), x) == FiniteSet(-2, -1) + assert singularities(1/(x**2 + 1), x) == FiniteSet(I, -I) + assert singularities(x/(x**3 + 1), x) == \ + FiniteSet(-1, (1 - sqrt(3) * I) / 2, (1 + sqrt(3) * I) / 2) + assert singularities(1/(y**2 + 2*I*y + 1), y) == \ + FiniteSet(-I + sqrt(2)*I, -I - sqrt(2)*I) + + x = Symbol('x', real=True) + assert singularities(1/(x**2 + 1), x) == S.EmptySet + assert singularities(exp(1/x), x, S.Reals) == FiniteSet(0) + assert singularities(exp(1/x), x, Interval(1, 2)) == S.EmptySet + assert singularities(log((x - 2)**2), x, Interval(1, 3)) == FiniteSet(2) + raises(NotImplementedError, lambda: singularities(x**-oo, x)) + + +def test_is_increasing(): + """Test whether is_increasing returns correct value.""" + a = Symbol('a', negative=True) + + assert is_increasing(x**3 - 3*x**2 + 4*x, S.Reals) + assert is_increasing(-x**2, Interval(-oo, 0)) + assert not is_increasing(-x**2, Interval(0, oo)) + assert not is_increasing(4*x**3 - 6*x**2 - 72*x + 30, Interval(-2, 3)) + assert is_increasing(x**2 + y, Interval(1, oo), x) + assert is_increasing(-x**2*a, Interval(1, oo), x) + assert is_increasing(1) + + assert is_increasing(4*x**3 - 6*x**2 - 72*x + 30, Interval(-2, 3)) is False + + +def test_is_strictly_increasing(): + """Test whether is_strictly_increasing returns correct value.""" + assert is_strictly_increasing( + 4*x**3 - 6*x**2 - 72*x + 30, Interval.Ropen(-oo, -2)) + assert is_strictly_increasing( + 4*x**3 - 6*x**2 - 72*x + 30, Interval.Lopen(3, oo)) + assert not is_strictly_increasing( + 4*x**3 - 6*x**2 - 72*x + 30, Interval.open(-2, 3)) + assert not is_strictly_increasing(-x**2, Interval(0, oo)) + assert not is_strictly_decreasing(1) + + assert is_strictly_increasing(4*x**3 - 6*x**2 - 72*x + 30, Interval.open(-2, 3)) is False + + +def test_is_decreasing(): + """Test whether is_decreasing returns correct value.""" + b = Symbol('b', positive=True) + + assert is_decreasing(1/(x**2 - 3*x), Interval.open(Rational(3,2), 3)) + assert is_decreasing(1/(x**2 - 3*x), Interval.open(1.5, 3)) + assert is_decreasing(1/(x**2 - 3*x), Interval.Lopen(3, oo)) + assert not is_decreasing(1/(x**2 - 3*x), Interval.Ropen(-oo, Rational(3, 2))) + assert not is_decreasing(-x**2, Interval(-oo, 0)) + assert not is_decreasing(-x**2*b, Interval(-oo, 0), x) + + +def test_is_strictly_decreasing(): + """Test whether is_strictly_decreasing returns correct value.""" + assert is_strictly_decreasing(1/(x**2 - 3*x), Interval.Lopen(3, oo)) + assert not is_strictly_decreasing( + 1/(x**2 - 3*x), Interval.Ropen(-oo, Rational(3, 2))) + assert not is_strictly_decreasing(-x**2, Interval(-oo, 0)) + assert not is_strictly_decreasing(1) + assert is_strictly_decreasing(1/(x**2 - 3*x), Interval.open(Rational(3,2), 3)) + assert is_strictly_decreasing(1/(x**2 - 3*x), Interval.open(1.5, 3)) + + +def test_is_monotonic(): + """Test whether is_monotonic returns correct value.""" + assert is_monotonic(1/(x**2 - 3*x), Interval.open(Rational(3,2), 3)) + assert is_monotonic(1/(x**2 - 3*x), Interval.open(1.5, 3)) + assert is_monotonic(1/(x**2 - 3*x), Interval.Lopen(3, oo)) + assert is_monotonic(x**3 - 3*x**2 + 4*x, S.Reals) + assert not is_monotonic(-x**2, S.Reals) + assert is_monotonic(x**2 + y + 1, Interval(1, 2), x) + raises(NotImplementedError, lambda: is_monotonic(x**2 + y + 1)) + + +def test_issue_23401(): + x = Symbol('x') + expr = (x + 1)/(-1.0e-3*x**2 + 0.1*x + 0.1) + assert is_increasing(expr, Interval(1,2), x) diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__init__.py b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..220c27a42975555f8fe7770fcef0dd18475c89e1 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__init__.py @@ -0,0 +1,43 @@ +from sympy.combinatorics.permutations import Permutation, Cycle +from sympy.combinatorics.prufer import Prufer +from sympy.combinatorics.generators import cyclic, alternating, symmetric, dihedral +from sympy.combinatorics.subsets import Subset +from sympy.combinatorics.partitions import (Partition, IntegerPartition, + RGS_rank, RGS_unrank, RGS_enum) +from sympy.combinatorics.polyhedron import (Polyhedron, tetrahedron, cube, + octahedron, dodecahedron, icosahedron) +from sympy.combinatorics.perm_groups import PermutationGroup, Coset, SymmetricPermutationGroup +from sympy.combinatorics.group_constructs import DirectProduct +from sympy.combinatorics.graycode import GrayCode +from sympy.combinatorics.named_groups import (SymmetricGroup, DihedralGroup, + CyclicGroup, AlternatingGroup, AbelianGroup, RubikGroup) +from sympy.combinatorics.pc_groups import PolycyclicGroup, Collector +from sympy.combinatorics.free_groups import free_group + +__all__ = [ + 'Permutation', 'Cycle', + + 'Prufer', + + 'cyclic', 'alternating', 'symmetric', 'dihedral', + + 'Subset', + + 'Partition', 'IntegerPartition', 'RGS_rank', 'RGS_unrank', 'RGS_enum', + + 'Polyhedron', 'tetrahedron', 'cube', 'octahedron', 'dodecahedron', + 'icosahedron', + + 'PermutationGroup', 'Coset', 'SymmetricPermutationGroup', + + 'DirectProduct', + + 'GrayCode', + + 'SymmetricGroup', 'DihedralGroup', 'CyclicGroup', 'AlternatingGroup', + 'AbelianGroup', 'RubikGroup', + + 'PolycyclicGroup', 'Collector', + + 'free_group', +] diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/__init__.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..51e8cbadde385fd3dbf8dad2fb1e4ec41f66d660 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/__init__.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/coset_table.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/coset_table.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..91cbd07b5fe85a0d152d2f9fea713b4ef8d99149 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/coset_table.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/fp_groups.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/fp_groups.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9a075c261998bfd1586bf2c5fb370a10295129b8 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/fp_groups.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/free_groups.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/free_groups.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0d5c67c467bddb7b2160dd0ffc2d457c927ecf5c Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/free_groups.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/galois.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/galois.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f9d4092b06b8f489d12528e19f351d692459a5bd Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/galois.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/generators.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/generators.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cdb4ad6f065fd5f86fa7cb214313a1c4d174b7dc Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/generators.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/graycode.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/graycode.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7b120381e57a308d8fe5d9ec51c50af8ff053ffe Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/graycode.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/group_constructs.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/group_constructs.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..003a11b30a297652e5ff2ba4ac4da8c96bf4ce8c Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/group_constructs.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/group_numbers.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/group_numbers.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d1b9b0b4a80acebfce8f3085b3cee2180410803b Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/group_numbers.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/homomorphisms.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/homomorphisms.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0e167345e8f222e0cc600ab0258d622fcd52e39e Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/homomorphisms.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/named_groups.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/named_groups.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d6a3218f27dc8db60f5a2c457f6d39a7bf1745ef Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/named_groups.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/partitions.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/partitions.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e8ed3f723c286eac8cfec65c5a405b7b814235e0 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/partitions.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/pc_groups.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/pc_groups.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..18c24baeb940cefd7d5a5d7578fa20d6c3899fce Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/pc_groups.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/perm_groups.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/perm_groups.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9a4a3bd7a87f3f824ce287777ad93efe28cb9f3b Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/perm_groups.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/permutations.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/permutations.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..299e4408fa66fb71bfb74115a2422efe4cd0c9b1 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/permutations.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/polyhedron.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/polyhedron.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..98a4300afe19a2124a86b3272c42c4d9f70a0281 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/polyhedron.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/prufer.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/prufer.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..eabf73d6dbaccc49de16550f3c30a2efa065863f Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/prufer.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/rewritingsystem.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/rewritingsystem.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..da852eabfbfc8bc42b257a46ef79390e30b0ca61 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/rewritingsystem.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/rewritingsystem_fsm.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/rewritingsystem_fsm.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..45829c4a80814e1661874e3f712c8938f7059008 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/rewritingsystem_fsm.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/schur_number.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/schur_number.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6d40d6c3688e3fe9632a81f6949cc047ae8d4e8b Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/schur_number.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/subsets.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/subsets.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..91735c51978920273787af38fc8997fa159c26b5 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/subsets.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/tensor_can.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/tensor_can.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e9fa1787f71705400a57d92ba646aa59eea3aecd Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/tensor_can.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/testutil.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/testutil.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..68dbbc643de391fd9f3a6e9514b2f0930ca520ab Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/testutil.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/util.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/util.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..74a0c6fe44228b84c143de002ce9f49e0d865813 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/__pycache__/util.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/coset_table.py b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/coset_table.py new file mode 100644 index 0000000000000000000000000000000000000000..06cc427c87c6860892f34735112f2f833d3f7f6d --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/coset_table.py @@ -0,0 +1,1255 @@ +from sympy.combinatorics.free_groups import free_group +from sympy.printing.defaults import DefaultPrinting + +from itertools import chain, product +from bisect import bisect_left + + +############################################################################### +# COSET TABLE # +############################################################################### + +class CosetTable(DefaultPrinting): + # coset_table: Mathematically a coset table + # represented using a list of lists + # alpha: Mathematically a coset (precisely, a live coset) + # represented by an integer between i with 1 <= i <= n + # alpha in c + # x: Mathematically an element of "A" (set of generators and + # their inverses), represented using "FpGroupElement" + # fp_grp: Finitely Presented Group with < X|R > as presentation. + # H: subgroup of fp_grp. + # NOTE: We start with H as being only a list of words in generators + # of "fp_grp". Since `.subgroup` method has not been implemented. + + r""" + + Properties + ========== + + [1] `0 \in \Omega` and `\tau(1) = \epsilon` + [2] `\alpha^x = \beta \Leftrightarrow \beta^{x^{-1}} = \alpha` + [3] If `\alpha^x = \beta`, then `H \tau(\alpha)x = H \tau(\beta)` + [4] `\forall \alpha \in \Omega, 1^{\tau(\alpha)} = \alpha` + + References + ========== + + .. [1] Holt, D., Eick, B., O'Brien, E. + "Handbook of Computational Group Theory" + + .. [2] John J. Cannon; Lucien A. Dimino; George Havas; Jane M. Watson + Mathematics of Computation, Vol. 27, No. 123. (Jul., 1973), pp. 463-490. + "Implementation and Analysis of the Todd-Coxeter Algorithm" + + """ + # default limit for the number of cosets allowed in a + # coset enumeration. + coset_table_max_limit = 4096000 + # limit for the current instance + coset_table_limit = None + # maximum size of deduction stack above or equal to + # which it is emptied + max_stack_size = 100 + + def __init__(self, fp_grp, subgroup, max_cosets=None): + if not max_cosets: + max_cosets = CosetTable.coset_table_max_limit + self.fp_group = fp_grp + self.subgroup = subgroup + self.coset_table_limit = max_cosets + # "p" is setup independent of Omega and n + self.p = [0] + # a list of the form `[gen_1, gen_1^{-1}, ... , gen_k, gen_k^{-1}]` + self.A = list(chain.from_iterable((gen, gen**-1) \ + for gen in self.fp_group.generators)) + #P[alpha, x] Only defined when alpha^x is defined. + self.P = [[None]*len(self.A)] + # the mathematical coset table which is a list of lists + self.table = [[None]*len(self.A)] + self.A_dict = {x: self.A.index(x) for x in self.A} + self.A_dict_inv = {} + for x, index in self.A_dict.items(): + if index % 2 == 0: + self.A_dict_inv[x] = self.A_dict[x] + 1 + else: + self.A_dict_inv[x] = self.A_dict[x] - 1 + # used in the coset-table based method of coset enumeration. Each of + # the element is called a "deduction" which is the form (alpha, x) whenever + # a value is assigned to alpha^x during a definition or "deduction process" + self.deduction_stack = [] + # Attributes for modified methods. + H = self.subgroup + self._grp = free_group(', ' .join(["a_%d" % i for i in range(len(H))]))[0] + self.P = [[None]*len(self.A)] + self.p_p = {} + + @property + def omega(self): + """Set of live cosets. """ + return [coset for coset in range(len(self.p)) if self.p[coset] == coset] + + def copy(self): + """ + Return a shallow copy of Coset Table instance ``self``. + + """ + self_copy = self.__class__(self.fp_group, self.subgroup) + self_copy.table = [list(perm_rep) for perm_rep in self.table] + self_copy.p = list(self.p) + self_copy.deduction_stack = list(self.deduction_stack) + return self_copy + + def __str__(self): + return "Coset Table on %s with %s as subgroup generators" \ + % (self.fp_group, self.subgroup) + + __repr__ = __str__ + + @property + def n(self): + """The number `n` represents the length of the sublist containing the + live cosets. + + """ + if not self.table: + return 0 + return max(self.omega) + 1 + + # Pg. 152 [1] + def is_complete(self): + r""" + The coset table is called complete if it has no undefined entries + on the live cosets; that is, `\alpha^x` is defined for all + `\alpha \in \Omega` and `x \in A`. + + """ + return not any(None in self.table[coset] for coset in self.omega) + + # Pg. 153 [1] + def define(self, alpha, x, modified=False): + r""" + This routine is used in the relator-based strategy of Todd-Coxeter + algorithm if some `\alpha^x` is undefined. We check whether there is + space available for defining a new coset. If there is enough space + then we remedy this by adjoining a new coset `\beta` to `\Omega` + (i.e to set of live cosets) and put that equal to `\alpha^x`, then + make an assignment satisfying Property[1]. If there is not enough space + then we halt the Coset Table creation. The maximum amount of space that + can be used by Coset Table can be manipulated using the class variable + ``CosetTable.coset_table_max_limit``. + + See Also + ======== + + define_c + + """ + A = self.A + table = self.table + len_table = len(table) + if len_table >= self.coset_table_limit: + # abort the further generation of cosets + raise ValueError("the coset enumeration has defined more than " + "%s cosets. Try with a greater value max number of cosets " + % self.coset_table_limit) + table.append([None]*len(A)) + self.P.append([None]*len(self.A)) + # beta is the new coset generated + beta = len_table + self.p.append(beta) + table[alpha][self.A_dict[x]] = beta + table[beta][self.A_dict_inv[x]] = alpha + # P[alpha][x] = epsilon, P[beta][x**-1] = epsilon + if modified: + self.P[alpha][self.A_dict[x]] = self._grp.identity + self.P[beta][self.A_dict_inv[x]] = self._grp.identity + self.p_p[beta] = self._grp.identity + + def define_c(self, alpha, x): + r""" + A variation of ``define`` routine, described on Pg. 165 [1], used in + the coset table-based strategy of Todd-Coxeter algorithm. It differs + from ``define`` routine in that for each definition it also adds the + tuple `(\alpha, x)` to the deduction stack. + + See Also + ======== + + define + + """ + A = self.A + table = self.table + len_table = len(table) + if len_table >= self.coset_table_limit: + # abort the further generation of cosets + raise ValueError("the coset enumeration has defined more than " + "%s cosets. Try with a greater value max number of cosets " + % self.coset_table_limit) + table.append([None]*len(A)) + # beta is the new coset generated + beta = len_table + self.p.append(beta) + table[alpha][self.A_dict[x]] = beta + table[beta][self.A_dict_inv[x]] = alpha + # append to deduction stack + self.deduction_stack.append((alpha, x)) + + def scan_c(self, alpha, word): + """ + A variation of ``scan`` routine, described on pg. 165 of [1], which + puts at tuple, whenever a deduction occurs, to deduction stack. + + See Also + ======== + + scan, scan_check, scan_and_fill, scan_and_fill_c + + """ + # alpha is an integer representing a "coset" + # since scanning can be in two cases + # 1. for alpha=0 and w in Y (i.e generating set of H) + # 2. alpha in Omega (set of live cosets), w in R (relators) + A_dict = self.A_dict + A_dict_inv = self.A_dict_inv + table = self.table + f = alpha + i = 0 + r = len(word) + b = alpha + j = r - 1 + # list of union of generators and their inverses + while i <= j and table[f][A_dict[word[i]]] is not None: + f = table[f][A_dict[word[i]]] + i += 1 + if i > j: + if f != b: + self.coincidence_c(f, b) + return + while j >= i and table[b][A_dict_inv[word[j]]] is not None: + b = table[b][A_dict_inv[word[j]]] + j -= 1 + if j < i: + # we have an incorrect completed scan with coincidence f ~ b + # run the "coincidence" routine + self.coincidence_c(f, b) + elif j == i: + # deduction process + table[f][A_dict[word[i]]] = b + table[b][A_dict_inv[word[i]]] = f + self.deduction_stack.append((f, word[i])) + # otherwise scan is incomplete and yields no information + + # alpha, beta coincide, i.e. alpha, beta represent the pair of cosets where + # coincidence occurs + def coincidence_c(self, alpha, beta): + """ + A variation of ``coincidence`` routine used in the coset-table based + method of coset enumeration. The only difference being on addition of + a new coset in coset table(i.e new coset introduction), then it is + appended to ``deduction_stack``. + + See Also + ======== + + coincidence + + """ + A_dict = self.A_dict + A_dict_inv = self.A_dict_inv + table = self.table + # behaves as a queue + q = [] + self.merge(alpha, beta, q) + while len(q) > 0: + gamma = q.pop(0) + for x in A_dict: + delta = table[gamma][A_dict[x]] + if delta is not None: + table[delta][A_dict_inv[x]] = None + # only line of difference from ``coincidence`` routine + self.deduction_stack.append((delta, x**-1)) + mu = self.rep(gamma) + nu = self.rep(delta) + if table[mu][A_dict[x]] is not None: + self.merge(nu, table[mu][A_dict[x]], q) + elif table[nu][A_dict_inv[x]] is not None: + self.merge(mu, table[nu][A_dict_inv[x]], q) + else: + table[mu][A_dict[x]] = nu + table[nu][A_dict_inv[x]] = mu + + def scan(self, alpha, word, y=None, fill=False, modified=False): + r""" + ``scan`` performs a scanning process on the input ``word``. + It first locates the largest prefix ``s`` of ``word`` for which + `\alpha^s` is defined (i.e is not ``None``), ``s`` may be empty. Let + ``word=sv``, let ``t`` be the longest suffix of ``v`` for which + `\alpha^{t^{-1}}` is defined, and let ``v=ut``. Then three + possibilities are there: + + 1. If ``t=v``, then we say that the scan completes, and if, in addition + `\alpha^s = \alpha^{t^{-1}}`, then we say that the scan completes + correctly. + + 2. It can also happen that scan does not complete, but `|u|=1`; that + is, the word ``u`` consists of a single generator `x \in A`. In that + case, if `\alpha^s = \beta` and `\alpha^{t^{-1}} = \gamma`, then we can + set `\beta^x = \gamma` and `\gamma^{x^{-1}} = \beta`. These assignments + are known as deductions and enable the scan to complete correctly. + + 3. See ``coicidence`` routine for explanation of third condition. + + Notes + ===== + + The code for the procedure of scanning `\alpha \in \Omega` + under `w \in A*` is defined on pg. 155 [1] + + See Also + ======== + + scan_c, scan_check, scan_and_fill, scan_and_fill_c + + Scan and Fill + ============= + + Performed when the default argument fill=True. + + Modified Scan + ============= + + Performed when the default argument modified=True + + """ + # alpha is an integer representing a "coset" + # since scanning can be in two cases + # 1. for alpha=0 and w in Y (i.e generating set of H) + # 2. alpha in Omega (set of live cosets), w in R (relators) + A_dict = self.A_dict + A_dict_inv = self.A_dict_inv + table = self.table + f = alpha + i = 0 + r = len(word) + b = alpha + j = r - 1 + b_p = y + if modified: + f_p = self._grp.identity + flag = 0 + while fill or flag == 0: + flag = 1 + while i <= j and table[f][A_dict[word[i]]] is not None: + if modified: + f_p = f_p*self.P[f][A_dict[word[i]]] + f = table[f][A_dict[word[i]]] + i += 1 + if i > j: + if f != b: + if modified: + self.modified_coincidence(f, b, f_p**-1*y) + else: + self.coincidence(f, b) + return + while j >= i and table[b][A_dict_inv[word[j]]] is not None: + if modified: + b_p = b_p*self.P[b][self.A_dict_inv[word[j]]] + b = table[b][A_dict_inv[word[j]]] + j -= 1 + if j < i: + # we have an incorrect completed scan with coincidence f ~ b + # run the "coincidence" routine + if modified: + self.modified_coincidence(f, b, f_p**-1*b_p) + else: + self.coincidence(f, b) + elif j == i: + # deduction process + table[f][A_dict[word[i]]] = b + table[b][A_dict_inv[word[i]]] = f + if modified: + self.P[f][self.A_dict[word[i]]] = f_p**-1*b_p + self.P[b][self.A_dict_inv[word[i]]] = b_p**-1*f_p + return + elif fill: + self.define(f, word[i], modified=modified) + # otherwise scan is incomplete and yields no information + + # used in the low-index subgroups algorithm + def scan_check(self, alpha, word): + r""" + Another version of ``scan`` routine, described on, it checks whether + `\alpha` scans correctly under `word`, it is a straightforward + modification of ``scan``. ``scan_check`` returns ``False`` (rather than + calling ``coincidence``) if the scan completes incorrectly; otherwise + it returns ``True``. + + See Also + ======== + + scan, scan_c, scan_and_fill, scan_and_fill_c + + """ + # alpha is an integer representing a "coset" + # since scanning can be in two cases + # 1. for alpha=0 and w in Y (i.e generating set of H) + # 2. alpha in Omega (set of live cosets), w in R (relators) + A_dict = self.A_dict + A_dict_inv = self.A_dict_inv + table = self.table + f = alpha + i = 0 + r = len(word) + b = alpha + j = r - 1 + while i <= j and table[f][A_dict[word[i]]] is not None: + f = table[f][A_dict[word[i]]] + i += 1 + if i > j: + return f == b + while j >= i and table[b][A_dict_inv[word[j]]] is not None: + b = table[b][A_dict_inv[word[j]]] + j -= 1 + if j < i: + # we have an incorrect completed scan with coincidence f ~ b + # return False, instead of calling coincidence routine + return False + elif j == i: + # deduction process + table[f][A_dict[word[i]]] = b + table[b][A_dict_inv[word[i]]] = f + return True + + def merge(self, k, lamda, q, w=None, modified=False): + """ + Merge two classes with representatives ``k`` and ``lamda``, described + on Pg. 157 [1] (for pseudocode), start by putting ``p[k] = lamda``. + It is more efficient to choose the new representative from the larger + of the two classes being merged, i.e larger among ``k`` and ``lamda``. + procedure ``merge`` performs the merging operation, adds the deleted + class representative to the queue ``q``. + + Parameters + ========== + + 'k', 'lamda' being the two class representatives to be merged. + + Notes + ===== + + Pg. 86-87 [1] contains a description of this method. + + See Also + ======== + + coincidence, rep + + """ + p = self.p + rep = self.rep + phi = rep(k, modified=modified) + psi = rep(lamda, modified=modified) + if phi != psi: + mu = min(phi, psi) + v = max(phi, psi) + p[v] = mu + if modified: + if v == phi: + self.p_p[phi] = self.p_p[k]**-1*w*self.p_p[lamda] + else: + self.p_p[psi] = self.p_p[lamda]**-1*w**-1*self.p_p[k] + q.append(v) + + def rep(self, k, modified=False): + r""" + Parameters + ========== + + `k \in [0 \ldots n-1]`, as for ``self`` only array ``p`` is used + + Returns + ======= + + Representative of the class containing ``k``. + + Returns the representative of `\sim` class containing ``k``, it also + makes some modification to array ``p`` of ``self`` to ease further + computations, described on Pg. 157 [1]. + + The information on classes under `\sim` is stored in array `p` of + ``self`` argument, which will always satisfy the property: + + `p[\alpha] \sim \alpha` and `p[\alpha]=\alpha \iff \alpha=rep(\alpha)` + `\forall \in [0 \ldots n-1]`. + + So, for `\alpha \in [0 \ldots n-1]`, we find `rep(self, \alpha)` by + continually replacing `\alpha` by `p[\alpha]` until it becomes + constant (i.e satisfies `p[\alpha] = \alpha`):w + + To increase the efficiency of later ``rep`` calculations, whenever we + find `rep(self, \alpha)=\beta`, we set + `p[\gamma] = \beta \forall \gamma \in p-chain` from `\alpha` to `\beta` + + Notes + ===== + + ``rep`` routine is also described on Pg. 85-87 [1] in Atkinson's + algorithm, this results from the fact that ``coincidence`` routine + introduces functionality similar to that introduced by the + ``minimal_block`` routine on Pg. 85-87 [1]. + + See Also + ======== + + coincidence, merge + + """ + p = self.p + lamda = k + rho = p[lamda] + if modified: + s = p[:] + while rho != lamda: + if modified: + s[rho] = lamda + lamda = rho + rho = p[lamda] + if modified: + rho = s[lamda] + while rho != k: + mu = rho + rho = s[mu] + p[rho] = lamda + self.p_p[rho] = self.p_p[rho]*self.p_p[mu] + else: + mu = k + rho = p[mu] + while rho != lamda: + p[mu] = lamda + mu = rho + rho = p[mu] + return lamda + + # alpha, beta coincide, i.e. alpha, beta represent the pair of cosets + # where coincidence occurs + def coincidence(self, alpha, beta, w=None, modified=False): + r""" + The third situation described in ``scan`` routine is handled by this + routine, described on Pg. 156-161 [1]. + + The unfortunate situation when the scan completes but not correctly, + then ``coincidence`` routine is run. i.e when for some `i` with + `1 \le i \le r+1`, we have `w=st` with `s = x_1 x_2 \dots x_{i-1}`, + `t = x_i x_{i+1} \dots x_r`, and `\beta = \alpha^s` and + `\gamma = \alpha^{t-1}` are defined but unequal. This means that + `\beta` and `\gamma` represent the same coset of `H` in `G`. Described + on Pg. 156 [1]. ``rep`` + + See Also + ======== + + scan + + """ + A_dict = self.A_dict + A_dict_inv = self.A_dict_inv + table = self.table + # behaves as a queue + q = [] + if modified: + self.modified_merge(alpha, beta, w, q) + else: + self.merge(alpha, beta, q) + while len(q) > 0: + gamma = q.pop(0) + for x in A_dict: + delta = table[gamma][A_dict[x]] + if delta is not None: + table[delta][A_dict_inv[x]] = None + mu = self.rep(gamma, modified=modified) + nu = self.rep(delta, modified=modified) + if table[mu][A_dict[x]] is not None: + if modified: + v = self.p_p[delta]**-1*self.P[gamma][self.A_dict[x]]**-1 + v = v*self.p_p[gamma]*self.P[mu][self.A_dict[x]] + self.modified_merge(nu, table[mu][self.A_dict[x]], v, q) + else: + self.merge(nu, table[mu][A_dict[x]], q) + elif table[nu][A_dict_inv[x]] is not None: + if modified: + v = self.p_p[gamma]**-1*self.P[gamma][self.A_dict[x]] + v = v*self.p_p[delta]*self.P[mu][self.A_dict_inv[x]] + self.modified_merge(mu, table[nu][self.A_dict_inv[x]], v, q) + else: + self.merge(mu, table[nu][A_dict_inv[x]], q) + else: + table[mu][A_dict[x]] = nu + table[nu][A_dict_inv[x]] = mu + if modified: + v = self.p_p[gamma]**-1*self.P[gamma][self.A_dict[x]]*self.p_p[delta] + self.P[mu][self.A_dict[x]] = v + self.P[nu][self.A_dict_inv[x]] = v**-1 + + # method used in the HLT strategy + def scan_and_fill(self, alpha, word): + """ + A modified version of ``scan`` routine used in the relator-based + method of coset enumeration, described on pg. 162-163 [1], which + follows the idea that whenever the procedure is called and the scan + is incomplete then it makes new definitions to enable the scan to + complete; i.e it fills in the gaps in the scan of the relator or + subgroup generator. + + """ + self.scan(alpha, word, fill=True) + + def scan_and_fill_c(self, alpha, word): + """ + A modified version of ``scan`` routine, described on Pg. 165 second + para. [1], with modification similar to that of ``scan_anf_fill`` the + only difference being it calls the coincidence procedure used in the + coset-table based method i.e. the routine ``coincidence_c`` is used. + + See Also + ======== + + scan, scan_and_fill + + """ + A_dict = self.A_dict + A_dict_inv = self.A_dict_inv + table = self.table + r = len(word) + f = alpha + i = 0 + b = alpha + j = r - 1 + # loop until it has filled the alpha row in the table. + while True: + # do the forward scanning + while i <= j and table[f][A_dict[word[i]]] is not None: + f = table[f][A_dict[word[i]]] + i += 1 + if i > j: + if f != b: + self.coincidence_c(f, b) + return + # forward scan was incomplete, scan backwards + while j >= i and table[b][A_dict_inv[word[j]]] is not None: + b = table[b][A_dict_inv[word[j]]] + j -= 1 + if j < i: + self.coincidence_c(f, b) + elif j == i: + table[f][A_dict[word[i]]] = b + table[b][A_dict_inv[word[i]]] = f + self.deduction_stack.append((f, word[i])) + else: + self.define_c(f, word[i]) + + # method used in the HLT strategy + def look_ahead(self): + """ + When combined with the HLT method this is known as HLT+Lookahead + method of coset enumeration, described on pg. 164 [1]. Whenever + ``define`` aborts due to lack of space available this procedure is + executed. This routine helps in recovering space resulting from + "coincidence" of cosets. + + """ + R = self.fp_group.relators + p = self.p + # complete scan all relators under all cosets(obviously live) + # without making new definitions + for beta in self.omega: + for w in R: + self.scan(beta, w) + if p[beta] < beta: + break + + # Pg. 166 + def process_deductions(self, R_c_x, R_c_x_inv): + """ + Processes the deductions that have been pushed onto ``deduction_stack``, + described on Pg. 166 [1] and is used in coset-table based enumeration. + + See Also + ======== + + deduction_stack + + """ + p = self.p + table = self.table + while len(self.deduction_stack) > 0: + if len(self.deduction_stack) >= CosetTable.max_stack_size: + self.look_ahead() + del self.deduction_stack[:] + continue + else: + alpha, x = self.deduction_stack.pop() + if p[alpha] == alpha: + for w in R_c_x: + self.scan_c(alpha, w) + if p[alpha] < alpha: + break + beta = table[alpha][self.A_dict[x]] + if beta is not None and p[beta] == beta: + for w in R_c_x_inv: + self.scan_c(beta, w) + if p[beta] < beta: + break + + def process_deductions_check(self, R_c_x, R_c_x_inv): + """ + A variation of ``process_deductions``, this calls ``scan_check`` + wherever ``process_deductions`` calls ``scan``, described on Pg. [1]. + + See Also + ======== + + process_deductions + + """ + table = self.table + while len(self.deduction_stack) > 0: + alpha, x = self.deduction_stack.pop() + for w in R_c_x: + if not self.scan_check(alpha, w): + return False + beta = table[alpha][self.A_dict[x]] + if beta is not None: + for w in R_c_x_inv: + if not self.scan_check(beta, w): + return False + return True + + def switch(self, beta, gamma): + r"""Switch the elements `\beta, \gamma \in \Omega` of ``self``, used + by the ``standardize`` procedure, described on Pg. 167 [1]. + + See Also + ======== + + standardize + + """ + A = self.A + A_dict = self.A_dict + table = self.table + for x in A: + z = table[gamma][A_dict[x]] + table[gamma][A_dict[x]] = table[beta][A_dict[x]] + table[beta][A_dict[x]] = z + for alpha in range(len(self.p)): + if self.p[alpha] == alpha: + if table[alpha][A_dict[x]] == beta: + table[alpha][A_dict[x]] = gamma + elif table[alpha][A_dict[x]] == gamma: + table[alpha][A_dict[x]] = beta + + def standardize(self): + r""" + A coset table is standardized if when running through the cosets and + within each coset through the generator images (ignoring generator + inverses), the cosets appear in order of the integers + `0, 1, \dots, n`. "Standardize" reorders the elements of `\Omega` + such that, if we scan the coset table first by elements of `\Omega` + and then by elements of A, then the cosets occur in ascending order. + ``standardize()`` is used at the end of an enumeration to permute the + cosets so that they occur in some sort of standard order. + + Notes + ===== + + procedure is described on pg. 167-168 [1], it also makes use of the + ``switch`` routine to replace by smaller integer value. + + Examples + ======== + + >>> from sympy.combinatorics import free_group + >>> from sympy.combinatorics.fp_groups import FpGroup, coset_enumeration_r + >>> F, x, y = free_group("x, y") + + # Example 5.3 from [1] + >>> f = FpGroup(F, [x**2*y**2, x**3*y**5]) + >>> C = coset_enumeration_r(f, []) + >>> C.compress() + >>> C.table + [[1, 3, 1, 3], [2, 0, 2, 0], [3, 1, 3, 1], [0, 2, 0, 2]] + >>> C.standardize() + >>> C.table + [[1, 2, 1, 2], [3, 0, 3, 0], [0, 3, 0, 3], [2, 1, 2, 1]] + + """ + A = self.A + A_dict = self.A_dict + gamma = 1 + for alpha, x in product(range(self.n), A): + beta = self.table[alpha][A_dict[x]] + if beta >= gamma: + if beta > gamma: + self.switch(gamma, beta) + gamma += 1 + if gamma == self.n: + return + + # Compression of a Coset Table + def compress(self): + """Removes the non-live cosets from the coset table, described on + pg. 167 [1]. + + """ + gamma = -1 + A = self.A + A_dict = self.A_dict + A_dict_inv = self.A_dict_inv + table = self.table + chi = tuple([i for i in range(len(self.p)) if self.p[i] != i]) + for alpha in self.omega: + gamma += 1 + if gamma != alpha: + # replace alpha by gamma in coset table + for x in A: + beta = table[alpha][A_dict[x]] + table[gamma][A_dict[x]] = beta + table[beta][A_dict_inv[x]] == gamma + # all the cosets in the table are live cosets + self.p = list(range(gamma + 1)) + # delete the useless columns + del table[len(self.p):] + # re-define values + for row in table: + for j in range(len(self.A)): + row[j] -= bisect_left(chi, row[j]) + + def conjugates(self, R): + R_c = list(chain.from_iterable((rel.cyclic_conjugates(), \ + (rel**-1).cyclic_conjugates()) for rel in R)) + R_set = set() + for conjugate in R_c: + R_set = R_set.union(conjugate) + R_c_list = [] + for x in self.A: + r = {word for word in R_set if word[0] == x} + R_c_list.append(r) + R_set.difference_update(r) + return R_c_list + + def coset_representative(self, coset): + ''' + Compute the coset representative of a given coset. + + Examples + ======== + + >>> from sympy.combinatorics import free_group + >>> from sympy.combinatorics.fp_groups import FpGroup, coset_enumeration_r + >>> F, x, y = free_group("x, y") + >>> f = FpGroup(F, [x**3, y**3, x**-1*y**-1*x*y]) + >>> C = coset_enumeration_r(f, [x]) + >>> C.compress() + >>> C.table + [[0, 0, 1, 2], [1, 1, 2, 0], [2, 2, 0, 1]] + >>> C.coset_representative(0) + + >>> C.coset_representative(1) + y + >>> C.coset_representative(2) + y**-1 + + ''' + for x in self.A: + gamma = self.table[coset][self.A_dict[x]] + if coset == 0: + return self.fp_group.identity + if gamma < coset: + return self.coset_representative(gamma)*x**-1 + + ############################## + # Modified Methods # + ############################## + + def modified_define(self, alpha, x): + r""" + Define a function p_p from from [1..n] to A* as + an additional component of the modified coset table. + + Parameters + ========== + + \alpha \in \Omega + x \in A* + + See Also + ======== + + define + + """ + self.define(alpha, x, modified=True) + + def modified_scan(self, alpha, w, y, fill=False): + r""" + Parameters + ========== + \alpha \in \Omega + w \in A* + y \in (YUY^-1) + fill -- `modified_scan_and_fill` when set to True. + + See Also + ======== + + scan + """ + self.scan(alpha, w, y=y, fill=fill, modified=True) + + def modified_scan_and_fill(self, alpha, w, y): + self.modified_scan(alpha, w, y, fill=True) + + def modified_merge(self, k, lamda, w, q): + r""" + Parameters + ========== + + 'k', 'lamda' -- the two class representatives to be merged. + q -- queue of length l of elements to be deleted from `\Omega` *. + w -- Word in (YUY^-1) + + See Also + ======== + + merge + """ + self.merge(k, lamda, q, w=w, modified=True) + + def modified_rep(self, k): + r""" + Parameters + ========== + + `k \in [0 \ldots n-1]` + + See Also + ======== + + rep + """ + self.rep(k, modified=True) + + def modified_coincidence(self, alpha, beta, w): + r""" + Parameters + ========== + + A coincident pair `\alpha, \beta \in \Omega, w \in Y \cup Y^{-1}` + + See Also + ======== + + coincidence + + """ + self.coincidence(alpha, beta, w=w, modified=True) + +############################################################################### +# COSET ENUMERATION # +############################################################################### + +# relator-based method +def coset_enumeration_r(fp_grp, Y, max_cosets=None, draft=None, + incomplete=False, modified=False): + """ + This is easier of the two implemented methods of coset enumeration. + and is often called the HLT method, after Hazelgrove, Leech, Trotter + The idea is that we make use of ``scan_and_fill`` makes new definitions + whenever the scan is incomplete to enable the scan to complete; this way + we fill in the gaps in the scan of the relator or subgroup generator, + that's why the name relator-based method. + + An instance of `CosetTable` for `fp_grp` can be passed as the keyword + argument `draft` in which case the coset enumeration will start with + that instance and attempt to complete it. + + When `incomplete` is `True` and the function is unable to complete for + some reason, the partially complete table will be returned. + + # TODO: complete the docstring + + See Also + ======== + + scan_and_fill, + + Examples + ======== + + >>> from sympy.combinatorics.free_groups import free_group + >>> from sympy.combinatorics.fp_groups import FpGroup, coset_enumeration_r + >>> F, x, y = free_group("x, y") + + # Example 5.1 from [1] + >>> f = FpGroup(F, [x**3, y**3, x**-1*y**-1*x*y]) + >>> C = coset_enumeration_r(f, [x]) + >>> for i in range(len(C.p)): + ... if C.p[i] == i: + ... print(C.table[i]) + [0, 0, 1, 2] + [1, 1, 2, 0] + [2, 2, 0, 1] + >>> C.p + [0, 1, 2, 1, 1] + + # Example from exercises Q2 [1] + >>> f = FpGroup(F, [x**2*y**2, y**-1*x*y*x**-3]) + >>> C = coset_enumeration_r(f, []) + >>> C.compress(); C.standardize() + >>> C.table + [[1, 2, 3, 4], + [5, 0, 6, 7], + [0, 5, 7, 6], + [7, 6, 5, 0], + [6, 7, 0, 5], + [2, 1, 4, 3], + [3, 4, 2, 1], + [4, 3, 1, 2]] + + # Example 5.2 + >>> f = FpGroup(F, [x**2, y**3, (x*y)**3]) + >>> Y = [x*y] + >>> C = coset_enumeration_r(f, Y) + >>> for i in range(len(C.p)): + ... if C.p[i] == i: + ... print(C.table[i]) + [1, 1, 2, 1] + [0, 0, 0, 2] + [3, 3, 1, 0] + [2, 2, 3, 3] + + # Example 5.3 + >>> f = FpGroup(F, [x**2*y**2, x**3*y**5]) + >>> Y = [] + >>> C = coset_enumeration_r(f, Y) + >>> for i in range(len(C.p)): + ... if C.p[i] == i: + ... print(C.table[i]) + [1, 3, 1, 3] + [2, 0, 2, 0] + [3, 1, 3, 1] + [0, 2, 0, 2] + + # Example 5.4 + >>> F, a, b, c, d, e = free_group("a, b, c, d, e") + >>> f = FpGroup(F, [a*b*c**-1, b*c*d**-1, c*d*e**-1, d*e*a**-1, e*a*b**-1]) + >>> Y = [a] + >>> C = coset_enumeration_r(f, Y) + >>> for i in range(len(C.p)): + ... if C.p[i] == i: + ... print(C.table[i]) + [0, 0, 0, 0, 0, 0, 0, 0, 0, 0] + + # example of "compress" method + >>> C.compress() + >>> C.table + [[0, 0, 0, 0, 0, 0, 0, 0, 0, 0]] + + # Exercises Pg. 161, Q2. + >>> F, x, y = free_group("x, y") + >>> f = FpGroup(F, [x**2*y**2, y**-1*x*y*x**-3]) + >>> Y = [] + >>> C = coset_enumeration_r(f, Y) + >>> C.compress() + >>> C.standardize() + >>> C.table + [[1, 2, 3, 4], + [5, 0, 6, 7], + [0, 5, 7, 6], + [7, 6, 5, 0], + [6, 7, 0, 5], + [2, 1, 4, 3], + [3, 4, 2, 1], + [4, 3, 1, 2]] + + # John J. Cannon; Lucien A. Dimino; George Havas; Jane M. Watson + # Mathematics of Computation, Vol. 27, No. 123. (Jul., 1973), pp. 463-490 + # from 1973chwd.pdf + # Table 1. Ex. 1 + >>> F, r, s, t = free_group("r, s, t") + >>> E1 = FpGroup(F, [t**-1*r*t*r**-2, r**-1*s*r*s**-2, s**-1*t*s*t**-2]) + >>> C = coset_enumeration_r(E1, [r]) + >>> for i in range(len(C.p)): + ... if C.p[i] == i: + ... print(C.table[i]) + [0, 0, 0, 0, 0, 0] + + Ex. 2 + >>> F, a, b = free_group("a, b") + >>> Cox = FpGroup(F, [a**6, b**6, (a*b)**2, (a**2*b**2)**2, (a**3*b**3)**5]) + >>> C = coset_enumeration_r(Cox, [a]) + >>> index = 0 + >>> for i in range(len(C.p)): + ... if C.p[i] == i: + ... index += 1 + >>> index + 500 + + # Ex. 3 + >>> F, a, b = free_group("a, b") + >>> B_2_4 = FpGroup(F, [a**4, b**4, (a*b)**4, (a**-1*b)**4, (a**2*b)**4, \ + (a*b**2)**4, (a**2*b**2)**4, (a**-1*b*a*b)**4, (a*b**-1*a*b)**4]) + >>> C = coset_enumeration_r(B_2_4, [a]) + >>> index = 0 + >>> for i in range(len(C.p)): + ... if C.p[i] == i: + ... index += 1 + >>> index + 1024 + + References + ========== + + .. [1] Holt, D., Eick, B., O'Brien, E. + "Handbook of computational group theory" + + """ + # 1. Initialize a coset table C for < X|R > + C = CosetTable(fp_grp, Y, max_cosets=max_cosets) + # Define coset table methods. + if modified: + _scan_and_fill = C.modified_scan_and_fill + _define = C.modified_define + else: + _scan_and_fill = C.scan_and_fill + _define = C.define + if draft: + C.table = draft.table[:] + C.p = draft.p[:] + R = fp_grp.relators + A_dict = C.A_dict + p = C.p + for i in range(len(Y)): + if modified: + _scan_and_fill(0, Y[i], C._grp.generators[i]) + else: + _scan_and_fill(0, Y[i]) + alpha = 0 + while alpha < C.n: + if p[alpha] == alpha: + try: + for w in R: + if modified: + _scan_and_fill(alpha, w, C._grp.identity) + else: + _scan_and_fill(alpha, w) + # if alpha was eliminated during the scan then break + if p[alpha] < alpha: + break + if p[alpha] == alpha: + for x in A_dict: + if C.table[alpha][A_dict[x]] is None: + _define(alpha, x) + except ValueError as e: + if incomplete: + return C + raise e + alpha += 1 + return C + +def modified_coset_enumeration_r(fp_grp, Y, max_cosets=None, draft=None, + incomplete=False): + r""" + Introduce a new set of symbols y \in Y that correspond to the + generators of the subgroup. Store the elements of Y as a + word P[\alpha, x] and compute the coset table similar to that of + the regular coset enumeration methods. + + Examples + ======== + + >>> from sympy.combinatorics.free_groups import free_group + >>> from sympy.combinatorics.fp_groups import FpGroup + >>> from sympy.combinatorics.coset_table import modified_coset_enumeration_r + >>> F, x, y = free_group("x, y") + >>> f = FpGroup(F, [x**3, y**3, x**-1*y**-1*x*y]) + >>> C = modified_coset_enumeration_r(f, [x]) + >>> C.table + [[0, 0, 1, 2], [1, 1, 2, 0], [2, 2, 0, 1], [None, 1, None, None], [1, 3, None, None]] + + See Also + ======== + + coset_enumertation_r + + References + ========== + + .. [1] Holt, D., Eick, B., O'Brien, E., + "Handbook of Computational Group Theory", + Section 5.3.2 + """ + return coset_enumeration_r(fp_grp, Y, max_cosets=max_cosets, draft=draft, + incomplete=incomplete, modified=True) + +# Pg. 166 +# coset-table based method +def coset_enumeration_c(fp_grp, Y, max_cosets=None, draft=None, + incomplete=False): + """ + >>> from sympy.combinatorics.free_groups import free_group + >>> from sympy.combinatorics.fp_groups import FpGroup, coset_enumeration_c + >>> F, x, y = free_group("x, y") + >>> f = FpGroup(F, [x**3, y**3, x**-1*y**-1*x*y]) + >>> C = coset_enumeration_c(f, [x]) + >>> C.table + [[0, 0, 1, 2], [1, 1, 2, 0], [2, 2, 0, 1]] + + """ + # Initialize a coset table C for < X|R > + X = fp_grp.generators + R = fp_grp.relators + C = CosetTable(fp_grp, Y, max_cosets=max_cosets) + if draft: + C.table = draft.table[:] + C.p = draft.p[:] + C.deduction_stack = draft.deduction_stack + for alpha, x in product(range(len(C.table)), X): + if C.table[alpha][C.A_dict[x]] is not None: + C.deduction_stack.append((alpha, x)) + A = C.A + # replace all the elements by cyclic reductions + R_cyc_red = [rel.identity_cyclic_reduction() for rel in R] + R_c = list(chain.from_iterable((rel.cyclic_conjugates(), (rel**-1).cyclic_conjugates()) \ + for rel in R_cyc_red)) + R_set = set() + for conjugate in R_c: + R_set = R_set.union(conjugate) + # a list of subsets of R_c whose words start with "x". + R_c_list = [] + for x in C.A: + r = {word for word in R_set if word[0] == x} + R_c_list.append(r) + R_set.difference_update(r) + for w in Y: + C.scan_and_fill_c(0, w) + for x in A: + C.process_deductions(R_c_list[C.A_dict[x]], R_c_list[C.A_dict_inv[x]]) + alpha = 0 + while alpha < len(C.table): + if C.p[alpha] == alpha: + try: + for x in C.A: + if C.p[alpha] != alpha: + break + if C.table[alpha][C.A_dict[x]] is None: + C.define_c(alpha, x) + C.process_deductions(R_c_list[C.A_dict[x]], R_c_list[C.A_dict_inv[x]]) + except ValueError as e: + if incomplete: + return C + raise e + alpha += 1 + return C diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/fp_groups.py b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/fp_groups.py new file mode 100644 index 0000000000000000000000000000000000000000..9ab8c47d4e6fa211522a5b3e85a8b06c0fa402e5 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/fp_groups.py @@ -0,0 +1,1348 @@ +"""Finitely Presented Groups and its algorithms. """ + +from sympy.core.singleton import S +from sympy.core.symbol import symbols +from sympy.combinatorics.free_groups import (FreeGroup, FreeGroupElement, + free_group) +from sympy.combinatorics.rewritingsystem import RewritingSystem +from sympy.combinatorics.coset_table import (CosetTable, + coset_enumeration_r, + coset_enumeration_c) +from sympy.combinatorics import PermutationGroup +from sympy.matrices.normalforms import invariant_factors +from sympy.matrices import Matrix +from sympy.polys.polytools import gcd +from sympy.printing.defaults import DefaultPrinting +from sympy.utilities import public +from sympy.utilities.magic import pollute + +from itertools import product + + +@public +def fp_group(fr_grp, relators=()): + _fp_group = FpGroup(fr_grp, relators) + return (_fp_group,) + tuple(_fp_group._generators) + +@public +def xfp_group(fr_grp, relators=()): + _fp_group = FpGroup(fr_grp, relators) + return (_fp_group, _fp_group._generators) + +# Does not work. Both symbols and pollute are undefined. Never tested. +@public +def vfp_group(fr_grpm, relators): + _fp_group = FpGroup(symbols, relators) + pollute([sym.name for sym in _fp_group.symbols], _fp_group.generators) + return _fp_group + + +def _parse_relators(rels): + """Parse the passed relators.""" + return rels + + +############################################################################### +# FINITELY PRESENTED GROUPS # +############################################################################### + + +class FpGroup(DefaultPrinting): + """ + The FpGroup would take a FreeGroup and a list/tuple of relators, the + relators would be specified in such a way that each of them be equal to the + identity of the provided free group. + + """ + is_group = True + is_FpGroup = True + is_PermutationGroup = False + + def __init__(self, fr_grp, relators): + relators = _parse_relators(relators) + self.free_group = fr_grp + self.relators = relators + self.generators = self._generators() + self.dtype = type("FpGroupElement", (FpGroupElement,), {"group": self}) + + # CosetTable instance on identity subgroup + self._coset_table = None + # returns whether coset table on identity subgroup + # has been standardized + self._is_standardized = False + + self._order = None + self._center = None + + self._rewriting_system = RewritingSystem(self) + self._perm_isomorphism = None + return + + def _generators(self): + return self.free_group.generators + + def make_confluent(self): + ''' + Try to make the group's rewriting system confluent + + ''' + self._rewriting_system.make_confluent() + return + + def reduce(self, word): + ''' + Return the reduced form of `word` in `self` according to the group's + rewriting system. If it's confluent, the reduced form is the unique normal + form of the word in the group. + + ''' + return self._rewriting_system.reduce(word) + + def equals(self, word1, word2): + ''' + Compare `word1` and `word2` for equality in the group + using the group's rewriting system. If the system is + confluent, the returned answer is necessarily correct. + (If it is not, `False` could be returned in some cases + where in fact `word1 == word2`) + + ''' + if self.reduce(word1*word2**-1) == self.identity: + return True + elif self._rewriting_system.is_confluent: + return False + return None + + @property + def identity(self): + return self.free_group.identity + + def __contains__(self, g): + return g in self.free_group + + def subgroup(self, gens, C=None, homomorphism=False): + ''' + Return the subgroup generated by `gens` using the + Reidemeister-Schreier algorithm + homomorphism -- When set to True, return a dictionary containing the images + of the presentation generators in the original group. + + Examples + ======== + + >>> from sympy.combinatorics.fp_groups import FpGroup + >>> from sympy.combinatorics import free_group + >>> F, x, y = free_group("x, y") + >>> f = FpGroup(F, [x**3, y**5, (x*y)**2]) + >>> H = [x*y, x**-1*y**-1*x*y*x] + >>> K, T = f.subgroup(H, homomorphism=True) + >>> T(K.generators) + [x*y, x**-1*y**2*x**-1] + + ''' + + if not all(isinstance(g, FreeGroupElement) for g in gens): + raise ValueError("Generators must be `FreeGroupElement`s") + if not all(g.group == self.free_group for g in gens): + raise ValueError("Given generators are not members of the group") + if homomorphism: + g, rels, _gens = reidemeister_presentation(self, gens, C=C, homomorphism=True) + else: + g, rels = reidemeister_presentation(self, gens, C=C) + if g: + g = FpGroup(g[0].group, rels) + else: + g = FpGroup(free_group('')[0], []) + if homomorphism: + from sympy.combinatorics.homomorphisms import homomorphism + return g, homomorphism(g, self, g.generators, _gens, check=False) + return g + + def coset_enumeration(self, H, strategy="relator_based", max_cosets=None, + draft=None, incomplete=False): + """ + Return an instance of ``coset table``, when Todd-Coxeter algorithm is + run over the ``self`` with ``H`` as subgroup, using ``strategy`` + argument as strategy. The returned coset table is compressed but not + standardized. + + An instance of `CosetTable` for `fp_grp` can be passed as the keyword + argument `draft` in which case the coset enumeration will start with + that instance and attempt to complete it. + + When `incomplete` is `True` and the function is unable to complete for + some reason, the partially complete table will be returned. + + """ + if not max_cosets: + max_cosets = CosetTable.coset_table_max_limit + if strategy == 'relator_based': + C = coset_enumeration_r(self, H, max_cosets=max_cosets, + draft=draft, incomplete=incomplete) + else: + C = coset_enumeration_c(self, H, max_cosets=max_cosets, + draft=draft, incomplete=incomplete) + if C.is_complete(): + C.compress() + return C + + def standardize_coset_table(self): + """ + Standardized the coset table ``self`` and makes the internal variable + ``_is_standardized`` equal to ``True``. + + """ + self._coset_table.standardize() + self._is_standardized = True + + def coset_table(self, H, strategy="relator_based", max_cosets=None, + draft=None, incomplete=False): + """ + Return the mathematical coset table of ``self`` in ``H``. + + """ + if not H: + if self._coset_table is not None: + if not self._is_standardized: + self.standardize_coset_table() + else: + C = self.coset_enumeration([], strategy, max_cosets=max_cosets, + draft=draft, incomplete=incomplete) + self._coset_table = C + self.standardize_coset_table() + return self._coset_table.table + else: + C = self.coset_enumeration(H, strategy, max_cosets=max_cosets, + draft=draft, incomplete=incomplete) + C.standardize() + return C.table + + def order(self, strategy="relator_based"): + """ + Returns the order of the finitely presented group ``self``. It uses + the coset enumeration with identity group as subgroup, i.e ``H=[]``. + + Examples + ======== + + >>> from sympy.combinatorics import free_group + >>> from sympy.combinatorics.fp_groups import FpGroup + >>> F, x, y = free_group("x, y") + >>> f = FpGroup(F, [x, y**2]) + >>> f.order(strategy="coset_table_based") + 2 + + """ + if self._order is not None: + return self._order + if self._coset_table is not None: + self._order = len(self._coset_table.table) + elif len(self.relators) == 0: + self._order = self.free_group.order() + elif len(self.generators) == 1: + self._order = abs(gcd([r.array_form[0][1] for r in self.relators])) + elif self._is_infinite(): + self._order = S.Infinity + else: + gens, C = self._finite_index_subgroup() + if C: + ind = len(C.table) + self._order = ind*self.subgroup(gens, C=C).order() + else: + self._order = self.index([]) + return self._order + + def _is_infinite(self): + ''' + Test if the group is infinite. Return `True` if the test succeeds + and `None` otherwise + + ''' + used_gens = set() + for r in self.relators: + used_gens.update(r.contains_generators()) + if not set(self.generators) <= used_gens: + return True + # Abelianisation test: check is the abelianisation is infinite + abelian_rels = [] + for rel in self.relators: + abelian_rels.append([rel.exponent_sum(g) for g in self.generators]) + m = Matrix(Matrix(abelian_rels)) + if 0 in invariant_factors(m): + return True + else: + return None + + + def _finite_index_subgroup(self, s=None): + ''' + Find the elements of `self` that generate a finite index subgroup + and, if found, return the list of elements and the coset table of `self` by + the subgroup, otherwise return `(None, None)` + + ''' + gen = self.most_frequent_generator() + rels = list(self.generators) + rels.extend(self.relators) + if not s: + if len(self.generators) == 2: + s = [gen] + [g for g in self.generators if g != gen] + else: + rand = self.free_group.identity + i = 0 + while ((rand in rels or rand**-1 in rels or rand.is_identity) + and i<10): + rand = self.random() + i += 1 + s = [gen, rand] + [g for g in self.generators if g != gen] + mid = (len(s)+1)//2 + half1 = s[:mid] + half2 = s[mid:] + draft1 = None + draft2 = None + m = 200 + C = None + while not C and (m/2 < CosetTable.coset_table_max_limit): + m = min(m, CosetTable.coset_table_max_limit) + draft1 = self.coset_enumeration(half1, max_cosets=m, + draft=draft1, incomplete=True) + if draft1.is_complete(): + C = draft1 + half = half1 + else: + draft2 = self.coset_enumeration(half2, max_cosets=m, + draft=draft2, incomplete=True) + if draft2.is_complete(): + C = draft2 + half = half2 + if not C: + m *= 2 + if not C: + return None, None + C.compress() + return half, C + + def most_frequent_generator(self): + gens = self.generators + rels = self.relators + freqs = [sum([r.generator_count(g) for r in rels]) for g in gens] + return gens[freqs.index(max(freqs))] + + def random(self): + import random + r = self.free_group.identity + for i in range(random.randint(2,3)): + r = r*random.choice(self.generators)**random.choice([1,-1]) + return r + + def index(self, H, strategy="relator_based"): + """ + Return the index of subgroup ``H`` in group ``self``. + + Examples + ======== + + >>> from sympy.combinatorics import free_group + >>> from sympy.combinatorics.fp_groups import FpGroup + >>> F, x, y = free_group("x, y") + >>> f = FpGroup(F, [x**5, y**4, y*x*y**3*x**3]) + >>> f.index([x]) + 4 + + """ + # TODO: use |G:H| = |G|/|H| (currently H can't be made into a group) + # when we know |G| and |H| + + if H == []: + return self.order() + else: + C = self.coset_enumeration(H, strategy) + return len(C.table) + + def __str__(self): + if self.free_group.rank > 30: + str_form = "" % self.free_group.rank + else: + str_form = "" % str(self.generators) + return str_form + + __repr__ = __str__ + +#============================================================================== +# PERMUTATION GROUP METHODS +#============================================================================== + + def _to_perm_group(self): + ''' + Return an isomorphic permutation group and the isomorphism. + The implementation is dependent on coset enumeration so + will only terminate for finite groups. + + ''' + from sympy.combinatorics import Permutation + from sympy.combinatorics.homomorphisms import homomorphism + if self.order() is S.Infinity: + raise NotImplementedError("Permutation presentation of infinite " + "groups is not implemented") + if self._perm_isomorphism: + T = self._perm_isomorphism + P = T.image() + else: + C = self.coset_table([]) + gens = self.generators + images = [[C[i][2*gens.index(g)] for i in range(len(C))] for g in gens] + images = [Permutation(i) for i in images] + P = PermutationGroup(images) + T = homomorphism(self, P, gens, images, check=False) + self._perm_isomorphism = T + return P, T + + def _perm_group_list(self, method_name, *args): + ''' + Given the name of a `PermutationGroup` method (returning a subgroup + or a list of subgroups) and (optionally) additional arguments it takes, + return a list or a list of lists containing the generators of this (or + these) subgroups in terms of the generators of `self`. + + ''' + P, T = self._to_perm_group() + perm_result = getattr(P, method_name)(*args) + single = False + if isinstance(perm_result, PermutationGroup): + perm_result, single = [perm_result], True + result = [] + for group in perm_result: + gens = group.generators + result.append(T.invert(gens)) + return result[0] if single else result + + def derived_series(self): + ''' + Return the list of lists containing the generators + of the subgroups in the derived series of `self`. + + ''' + return self._perm_group_list('derived_series') + + def lower_central_series(self): + ''' + Return the list of lists containing the generators + of the subgroups in the lower central series of `self`. + + ''' + return self._perm_group_list('lower_central_series') + + def center(self): + ''' + Return the list of generators of the center of `self`. + + ''' + return self._perm_group_list('center') + + + def derived_subgroup(self): + ''' + Return the list of generators of the derived subgroup of `self`. + + ''' + return self._perm_group_list('derived_subgroup') + + + def centralizer(self, other): + ''' + Return the list of generators of the centralizer of `other` + (a list of elements of `self`) in `self`. + + ''' + T = self._to_perm_group()[1] + other = T(other) + return self._perm_group_list('centralizer', other) + + def normal_closure(self, other): + ''' + Return the list of generators of the normal closure of `other` + (a list of elements of `self`) in `self`. + + ''' + T = self._to_perm_group()[1] + other = T(other) + return self._perm_group_list('normal_closure', other) + + def _perm_property(self, attr): + ''' + Given an attribute of a `PermutationGroup`, return + its value for a permutation group isomorphic to `self`. + + ''' + P = self._to_perm_group()[0] + return getattr(P, attr) + + @property + def is_abelian(self): + ''' + Check if `self` is abelian. + + ''' + return self._perm_property("is_abelian") + + @property + def is_nilpotent(self): + ''' + Check if `self` is nilpotent. + + ''' + return self._perm_property("is_nilpotent") + + @property + def is_solvable(self): + ''' + Check if `self` is solvable. + + ''' + return self._perm_property("is_solvable") + + @property + def elements(self): + ''' + List the elements of `self`. + + ''' + P, T = self._to_perm_group() + return T.invert(P._elements) + + @property + def is_cyclic(self): + """ + Return ``True`` if group is Cyclic. + + """ + if len(self.generators) <= 1: + return True + try: + P, T = self._to_perm_group() + except NotImplementedError: + raise NotImplementedError("Check for infinite Cyclic group " + "is not implemented") + return P.is_cyclic + + def abelian_invariants(self): + """ + Return Abelian Invariants of a group. + """ + try: + P, T = self._to_perm_group() + except NotImplementedError: + raise NotImplementedError("abelian invariants is not implemented" + "for infinite group") + return P.abelian_invariants() + + def composition_series(self): + """ + Return subnormal series of maximum length for a group. + """ + try: + P, T = self._to_perm_group() + except NotImplementedError: + raise NotImplementedError("composition series is not implemented" + "for infinite group") + return P.composition_series() + + +class FpSubgroup(DefaultPrinting): + ''' + The class implementing a subgroup of an FpGroup or a FreeGroup + (only finite index subgroups are supported at this point). This + is to be used if one wishes to check if an element of the original + group belongs to the subgroup + + ''' + def __init__(self, G, gens, normal=False): + super().__init__() + self.parent = G + self.generators = list({g for g in gens if g != G.identity}) + self._min_words = None #for use in __contains__ + self.C = None + self.normal = normal + + def __contains__(self, g): + + if isinstance(self.parent, FreeGroup): + if self._min_words is None: + # make _min_words - a list of subwords such that + # g is in the subgroup if and only if it can be + # partitioned into these subwords. Infinite families of + # subwords are presented by tuples, e.g. (r, w) + # stands for the family of subwords r*w**n*r**-1 + + def _process(w): + # this is to be used before adding new words + # into _min_words; if the word w is not cyclically + # reduced, it will generate an infinite family of + # subwords so should be written as a tuple; + # if it is, w**-1 should be added to the list + # as well + p, r = w.cyclic_reduction(removed=True) + if not r.is_identity: + return [(r, p)] + else: + return [w, w**-1] + + # make the initial list + gens = [] + for w in self.generators: + if self.normal: + w = w.cyclic_reduction() + gens.extend(_process(w)) + + for w1 in gens: + for w2 in gens: + # if w1 and w2 are equal or are inverses, continue + if w1 == w2 or (not isinstance(w1, tuple) + and w1**-1 == w2): + continue + + # if the start of one word is the inverse of the + # end of the other, their multiple should be added + # to _min_words because of cancellation + if isinstance(w1, tuple): + # start, end + s1, s2 = w1[0][0], w1[0][0]**-1 + else: + s1, s2 = w1[0], w1[len(w1)-1] + + if isinstance(w2, tuple): + # start, end + r1, r2 = w2[0][0], w2[0][0]**-1 + else: + r1, r2 = w2[0], w2[len(w1)-1] + + # p1 and p2 are w1 and w2 or, in case when + # w1 or w2 is an infinite family, a representative + p1, p2 = w1, w2 + if isinstance(w1, tuple): + p1 = w1[0]*w1[1]*w1[0]**-1 + if isinstance(w2, tuple): + p2 = w2[0]*w2[1]*w2[0]**-1 + + # add the product of the words to the list is necessary + if r1**-1 == s2 and not (p1*p2).is_identity: + new = _process(p1*p2) + if new not in gens: + gens.extend(new) + + if r2**-1 == s1 and not (p2*p1).is_identity: + new = _process(p2*p1) + if new not in gens: + gens.extend(new) + + self._min_words = gens + + min_words = self._min_words + + def _is_subword(w): + # check if w is a word in _min_words or one of + # the infinite families in it + w, r = w.cyclic_reduction(removed=True) + if r.is_identity or self.normal: + return w in min_words + else: + t = [s[1] for s in min_words if isinstance(s, tuple) + and s[0] == r] + return [s for s in t if w.power_of(s)] != [] + + # store the solution of words for which the result of + # _word_break (below) is known + known = {} + + def _word_break(w): + # check if w can be written as a product of words + # in min_words + if len(w) == 0: + return True + i = 0 + while i < len(w): + i += 1 + prefix = w.subword(0, i) + if not _is_subword(prefix): + continue + rest = w.subword(i, len(w)) + if rest not in known: + known[rest] = _word_break(rest) + if known[rest]: + return True + return False + + if self.normal: + g = g.cyclic_reduction() + return _word_break(g) + else: + if self.C is None: + C = self.parent.coset_enumeration(self.generators) + self.C = C + i = 0 + C = self.C + for j in range(len(g)): + i = C.table[i][C.A_dict[g[j]]] + return i == 0 + + def order(self): + if not self.generators: + return S.One + if isinstance(self.parent, FreeGroup): + return S.Infinity + if self.C is None: + C = self.parent.coset_enumeration(self.generators) + self.C = C + # This is valid because `len(self.C.table)` (the index of the subgroup) + # will always be finite - otherwise coset enumeration doesn't terminate + return self.parent.order()/len(self.C.table) + + def to_FpGroup(self): + if isinstance(self.parent, FreeGroup): + gen_syms = [('x_%d'%i) for i in range(len(self.generators))] + return free_group(', '.join(gen_syms))[0] + return self.parent.subgroup(C=self.C) + + def __str__(self): + if len(self.generators) > 30: + str_form = "" % len(self.generators) + else: + str_form = "" % str(self.generators) + return str_form + + __repr__ = __str__ + + +############################################################################### +# LOW INDEX SUBGROUPS # +############################################################################### + +def low_index_subgroups(G, N, Y=()): + """ + Implements the Low Index Subgroups algorithm, i.e find all subgroups of + ``G`` upto a given index ``N``. This implements the method described in + [Sim94]. This procedure involves a backtrack search over incomplete Coset + Tables, rather than over forced coincidences. + + Parameters + ========== + + G: An FpGroup < X|R > + N: positive integer, representing the maximum index value for subgroups + Y: (an optional argument) specifying a list of subgroup generators, such + that each of the resulting subgroup contains the subgroup generated by Y. + + Examples + ======== + + >>> from sympy.combinatorics import free_group + >>> from sympy.combinatorics.fp_groups import FpGroup, low_index_subgroups + >>> F, x, y = free_group("x, y") + >>> f = FpGroup(F, [x**2, y**3, (x*y)**4]) + >>> L = low_index_subgroups(f, 4) + >>> for coset_table in L: + ... print(coset_table.table) + [[0, 0, 0, 0]] + [[0, 0, 1, 2], [1, 1, 2, 0], [3, 3, 0, 1], [2, 2, 3, 3]] + [[0, 0, 1, 2], [2, 2, 2, 0], [1, 1, 0, 1]] + [[1, 1, 0, 0], [0, 0, 1, 1]] + + References + ========== + + .. [1] Holt, D., Eick, B., O'Brien, E. + "Handbook of Computational Group Theory" + Section 5.4 + + .. [2] Marston Conder and Peter Dobcsanyi + "Applications and Adaptions of the Low Index Subgroups Procedure" + + """ + C = CosetTable(G, []) + R = G.relators + # length chosen for the length of the short relators + len_short_rel = 5 + # elements of R2 only checked at the last step for complete + # coset tables + R2 = {rel for rel in R if len(rel) > len_short_rel} + # elements of R1 are used in inner parts of the process to prune + # branches of the search tree, + R1 = {rel.identity_cyclic_reduction() for rel in set(R) - R2} + R1_c_list = C.conjugates(R1) + S = [] + descendant_subgroups(S, C, R1_c_list, C.A[0], R2, N, Y) + return S + + +def descendant_subgroups(S, C, R1_c_list, x, R2, N, Y): + A_dict = C.A_dict + A_dict_inv = C.A_dict_inv + if C.is_complete(): + # if C is complete then it only needs to test + # whether the relators in R2 are satisfied + for w, alpha in product(R2, C.omega): + if not C.scan_check(alpha, w): + return + # relators in R2 are satisfied, append the table to list + S.append(C) + else: + # find the first undefined entry in Coset Table + for alpha, x in product(range(len(C.table)), C.A): + if C.table[alpha][A_dict[x]] is None: + # this is "x" in pseudo-code (using "y" makes it clear) + undefined_coset, undefined_gen = alpha, x + break + # for filling up the undefine entry we try all possible values + # of beta in Omega or beta = n where beta^(undefined_gen^-1) is undefined + reach = C.omega + [C.n] + for beta in reach: + if beta < N: + if beta == C.n or C.table[beta][A_dict_inv[undefined_gen]] is None: + try_descendant(S, C, R1_c_list, R2, N, undefined_coset, \ + undefined_gen, beta, Y) + + +def try_descendant(S, C, R1_c_list, R2, N, alpha, x, beta, Y): + r""" + Solves the problem of trying out each individual possibility + for `\alpha^x. + + """ + D = C.copy() + if beta == D.n and beta < N: + D.table.append([None]*len(D.A)) + D.p.append(beta) + D.table[alpha][D.A_dict[x]] = beta + D.table[beta][D.A_dict_inv[x]] = alpha + D.deduction_stack.append((alpha, x)) + if not D.process_deductions_check(R1_c_list[D.A_dict[x]], \ + R1_c_list[D.A_dict_inv[x]]): + return + for w in Y: + if not D.scan_check(0, w): + return + if first_in_class(D, Y): + descendant_subgroups(S, D, R1_c_list, x, R2, N, Y) + + +def first_in_class(C, Y=()): + """ + Checks whether the subgroup ``H=G1`` corresponding to the Coset Table + could possibly be the canonical representative of its conjugacy class. + + Parameters + ========== + + C: CosetTable + + Returns + ======= + + bool: True/False + + If this returns False, then no descendant of C can have that property, and + so we can abandon C. If it returns True, then we need to process further + the node of the search tree corresponding to C, and so we call + ``descendant_subgroups`` recursively on C. + + Examples + ======== + + >>> from sympy.combinatorics import free_group + >>> from sympy.combinatorics.fp_groups import FpGroup, CosetTable, first_in_class + >>> F, x, y = free_group("x, y") + >>> f = FpGroup(F, [x**2, y**3, (x*y)**4]) + >>> C = CosetTable(f, []) + >>> C.table = [[0, 0, None, None]] + >>> first_in_class(C) + True + >>> C.table = [[1, 1, 1, None], [0, 0, None, 1]]; C.p = [0, 1] + >>> first_in_class(C) + True + >>> C.table = [[1, 1, 2, 1], [0, 0, 0, None], [None, None, None, 0]] + >>> C.p = [0, 1, 2] + >>> first_in_class(C) + False + >>> C.table = [[1, 1, 1, 2], [0, 0, 2, 0], [2, None, 0, 1]] + >>> first_in_class(C) + False + + # TODO:: Sims points out in [Sim94] that performance can be improved by + # remembering some of the information computed by ``first_in_class``. If + # the ``continue alpha`` statement is executed at line 14, then the same thing + # will happen for that value of alpha in any descendant of the table C, and so + # the values the values of alpha for which this occurs could profitably be + # stored and passed through to the descendants of C. Of course this would + # make the code more complicated. + + # The code below is taken directly from the function on page 208 of [Sim94] + # nu[alpha] + + """ + n = C.n + # lamda is the largest numbered point in Omega_c_alpha which is currently defined + lamda = -1 + # for alpha in Omega_c, nu[alpha] is the point in Omega_c_alpha corresponding to alpha + nu = [None]*n + # for alpha in Omega_c_alpha, mu[alpha] is the point in Omega_c corresponding to alpha + mu = [None]*n + # mutually nu and mu are the mutually-inverse equivalence maps between + # Omega_c_alpha and Omega_c + next_alpha = False + # For each 0!=alpha in [0 .. nc-1], we start by constructing the equivalent + # standardized coset table C_alpha corresponding to H_alpha + for alpha in range(1, n): + # reset nu to "None" after previous value of alpha + for beta in range(lamda+1): + nu[mu[beta]] = None + # we only want to reject our current table in favour of a preceding + # table in the ordering in which 1 is replaced by alpha, if the subgroup + # G_alpha corresponding to this preceding table definitely contains the + # given subgroup + for w in Y: + # TODO: this should support input of a list of general words + # not just the words which are in "A" (i.e gen and gen^-1) + if C.table[alpha][C.A_dict[w]] != alpha: + # continue with alpha + next_alpha = True + break + if next_alpha: + next_alpha = False + continue + # try alpha as the new point 0 in Omega_C_alpha + mu[0] = alpha + nu[alpha] = 0 + # compare corresponding entries in C and C_alpha + lamda = 0 + for beta in range(n): + for x in C.A: + gamma = C.table[beta][C.A_dict[x]] + delta = C.table[mu[beta]][C.A_dict[x]] + # if either of the entries is undefined, + # we move with next alpha + if gamma is None or delta is None: + # continue with alpha + next_alpha = True + break + if nu[delta] is None: + # delta becomes the next point in Omega_C_alpha + lamda += 1 + nu[delta] = lamda + mu[lamda] = delta + if nu[delta] < gamma: + return False + if nu[delta] > gamma: + # continue with alpha + next_alpha = True + break + if next_alpha: + next_alpha = False + break + return True + +#======================================================================== +# Simplifying Presentation +#======================================================================== + +def simplify_presentation(*args, change_gens=False): + ''' + For an instance of `FpGroup`, return a simplified isomorphic copy of + the group (e.g. remove redundant generators or relators). Alternatively, + a list of generators and relators can be passed in which case the + simplified lists will be returned. + + By default, the generators of the group are unchanged. If you would + like to remove redundant generators, set the keyword argument + `change_gens = True`. + + ''' + if len(args) == 1: + if not isinstance(args[0], FpGroup): + raise TypeError("The argument must be an instance of FpGroup") + G = args[0] + gens, rels = simplify_presentation(G.generators, G.relators, + change_gens=change_gens) + if gens: + return FpGroup(gens[0].group, rels) + return FpGroup(FreeGroup([]), []) + elif len(args) == 2: + gens, rels = args[0][:], args[1][:] + if not gens: + return gens, rels + identity = gens[0].group.identity + else: + if len(args) == 0: + m = "Not enough arguments" + else: + m = "Too many arguments" + raise RuntimeError(m) + + prev_gens = [] + prev_rels = [] + while not set(prev_rels) == set(rels): + prev_rels = rels + while change_gens and not set(prev_gens) == set(gens): + prev_gens = gens + gens, rels = elimination_technique_1(gens, rels, identity) + rels = _simplify_relators(rels, identity) + + if change_gens: + syms = [g.array_form[0][0] for g in gens] + F = free_group(syms)[0] + identity = F.identity + gens = F.generators + subs = dict(zip(syms, gens)) + for j, r in enumerate(rels): + a = r.array_form + rel = identity + for sym, p in a: + rel = rel*subs[sym]**p + rels[j] = rel + return gens, rels + +def _simplify_relators(rels, identity): + """Relies upon ``_simplification_technique_1`` for its functioning. """ + rels = rels[:] + + rels = list(set(_simplification_technique_1(rels))) + rels.sort() + rels = [r.identity_cyclic_reduction() for r in rels] + try: + rels.remove(identity) + except ValueError: + pass + return rels + +# Pg 350, section 2.5.1 from [2] +def elimination_technique_1(gens, rels, identity): + rels = rels[:] + # the shorter relators are examined first so that generators selected for + # elimination will have shorter strings as equivalent + rels.sort() + gens = gens[:] + redundant_gens = {} + redundant_rels = [] + used_gens = set() + # examine each relator in relator list for any generator occurring exactly + # once + for rel in rels: + # don't look for a redundant generator in a relator which + # depends on previously found ones + contained_gens = rel.contains_generators() + if any(g in contained_gens for g in redundant_gens): + continue + contained_gens = list(contained_gens) + contained_gens.sort(reverse = True) + for gen in contained_gens: + if rel.generator_count(gen) == 1 and gen not in used_gens: + k = rel.exponent_sum(gen) + gen_index = rel.index(gen**k) + bk = rel.subword(gen_index + 1, len(rel)) + fw = rel.subword(0, gen_index) + chi = bk*fw + redundant_gens[gen] = chi**(-1*k) + used_gens.update(chi.contains_generators()) + redundant_rels.append(rel) + break + rels = [r for r in rels if r not in redundant_rels] + # eliminate the redundant generators from remaining relators + rels = [r.eliminate_words(redundant_gens, _all = True).identity_cyclic_reduction() for r in rels] + rels = list(set(rels)) + try: + rels.remove(identity) + except ValueError: + pass + gens = [g for g in gens if g not in redundant_gens] + return gens, rels + +def _simplification_technique_1(rels): + """ + All relators are checked to see if they are of the form `gen^n`. If any + such relators are found then all other relators are processed for strings + in the `gen` known order. + + Examples + ======== + + >>> from sympy.combinatorics import free_group + >>> from sympy.combinatorics.fp_groups import _simplification_technique_1 + >>> F, x, y = free_group("x, y") + >>> w1 = [x**2*y**4, x**3] + >>> _simplification_technique_1(w1) + [x**-1*y**4, x**3] + + >>> w2 = [x**2*y**-4*x**5, x**3, x**2*y**8, y**5] + >>> _simplification_technique_1(w2) + [x**-1*y*x**-1, x**3, x**-1*y**-2, y**5] + + >>> w3 = [x**6*y**4, x**4] + >>> _simplification_technique_1(w3) + [x**2*y**4, x**4] + + """ + rels = rels[:] + # dictionary with "gen: n" where gen^n is one of the relators + exps = {} + for i in range(len(rels)): + rel = rels[i] + if rel.number_syllables() == 1: + g = rel[0] + exp = abs(rel.array_form[0][1]) + if rel.array_form[0][1] < 0: + rels[i] = rels[i]**-1 + g = g**-1 + if g in exps: + exp = gcd(exp, exps[g].array_form[0][1]) + exps[g] = g**exp + + one_syllables_words = exps.values() + # decrease some of the exponents in relators, making use of the single + # syllable relators + for i in range(len(rels)): + rel = rels[i] + if rel in one_syllables_words: + continue + rel = rel.eliminate_words(one_syllables_words, _all = True) + # if rels[i] contains g**n where abs(n) is greater than half of the power p + # of g in exps, g**n can be replaced by g**(n-p) (or g**(p-n) if n<0) + for g in rel.contains_generators(): + if g in exps: + exp = exps[g].array_form[0][1] + max_exp = (exp + 1)//2 + rel = rel.eliminate_word(g**(max_exp), g**(max_exp-exp), _all = True) + rel = rel.eliminate_word(g**(-max_exp), g**(-(max_exp-exp)), _all = True) + rels[i] = rel + rels = [r.identity_cyclic_reduction() for r in rels] + return rels + + +############################################################################### +# SUBGROUP PRESENTATIONS # +############################################################################### + +# Pg 175 [1] +def define_schreier_generators(C, homomorphism=False): + ''' + Parameters + ========== + + C -- Coset table. + homomorphism -- When set to True, return a dictionary containing the images + of the presentation generators in the original group. + ''' + y = [] + gamma = 1 + f = C.fp_group + X = f.generators + if homomorphism: + # `_gens` stores the elements of the parent group to + # to which the schreier generators correspond to. + _gens = {} + # compute the schreier Traversal + tau = {} + tau[0] = f.identity + C.P = [[None]*len(C.A) for i in range(C.n)] + for alpha, x in product(C.omega, C.A): + beta = C.table[alpha][C.A_dict[x]] + if beta == gamma: + C.P[alpha][C.A_dict[x]] = "" + C.P[beta][C.A_dict_inv[x]] = "" + gamma += 1 + if homomorphism: + tau[beta] = tau[alpha]*x + elif x in X and C.P[alpha][C.A_dict[x]] is None: + y_alpha_x = '%s_%s' % (x, alpha) + y.append(y_alpha_x) + C.P[alpha][C.A_dict[x]] = y_alpha_x + if homomorphism: + _gens[y_alpha_x] = tau[alpha]*x*tau[beta]**-1 + grp_gens = list(free_group(', '.join(y))) + C._schreier_free_group = grp_gens.pop(0) + C._schreier_generators = grp_gens + if homomorphism: + C._schreier_gen_elem = _gens + # replace all elements of P by, free group elements + for i, j in product(range(len(C.P)), range(len(C.A))): + # if equals "", replace by identity element + if C.P[i][j] == "": + C.P[i][j] = C._schreier_free_group.identity + elif isinstance(C.P[i][j], str): + r = C._schreier_generators[y.index(C.P[i][j])] + C.P[i][j] = r + beta = C.table[i][j] + C.P[beta][j + 1] = r**-1 + +def reidemeister_relators(C): + R = C.fp_group.relators + rels = [rewrite(C, coset, word) for word in R for coset in range(C.n)] + order_1_gens = {i for i in rels if len(i) == 1} + + # remove all the order 1 generators from relators + rels = list(filter(lambda rel: rel not in order_1_gens, rels)) + + # replace order 1 generators by identity element in reidemeister relators + for i in range(len(rels)): + w = rels[i] + w = w.eliminate_words(order_1_gens, _all=True) + rels[i] = w + + C._schreier_generators = [i for i in C._schreier_generators + if not (i in order_1_gens or i**-1 in order_1_gens)] + + # Tietze transformation 1 i.e TT_1 + # remove cyclic conjugate elements from relators + i = 0 + while i < len(rels): + w = rels[i] + j = i + 1 + while j < len(rels): + if w.is_cyclic_conjugate(rels[j]): + del rels[j] + else: + j += 1 + i += 1 + + C._reidemeister_relators = rels + + +def rewrite(C, alpha, w): + """ + Parameters + ========== + + C: CosetTable + alpha: A live coset + w: A word in `A*` + + Returns + ======= + + rho(tau(alpha), w) + + Examples + ======== + + >>> from sympy.combinatorics.fp_groups import FpGroup, CosetTable, define_schreier_generators, rewrite + >>> from sympy.combinatorics import free_group + >>> F, x, y = free_group("x, y") + >>> f = FpGroup(F, [x**2, y**3, (x*y)**6]) + >>> C = CosetTable(f, []) + >>> C.table = [[1, 1, 2, 3], [0, 0, 4, 5], [4, 4, 3, 0], [5, 5, 0, 2], [2, 2, 5, 1], [3, 3, 1, 4]] + >>> C.p = [0, 1, 2, 3, 4, 5] + >>> define_schreier_generators(C) + >>> rewrite(C, 0, (x*y)**6) + x_4*y_2*x_3*x_1*x_2*y_4*x_5 + + """ + v = C._schreier_free_group.identity + for i in range(len(w)): + x_i = w[i] + v = v*C.P[alpha][C.A_dict[x_i]] + alpha = C.table[alpha][C.A_dict[x_i]] + return v + +# Pg 350, section 2.5.2 from [2] +def elimination_technique_2(C): + """ + This technique eliminates one generator at a time. Heuristically this + seems superior in that we may select for elimination the generator with + shortest equivalent string at each stage. + + >>> from sympy.combinatorics import free_group + >>> from sympy.combinatorics.fp_groups import FpGroup, coset_enumeration_r, \ + reidemeister_relators, define_schreier_generators, elimination_technique_2 + >>> F, x, y = free_group("x, y") + >>> f = FpGroup(F, [x**3, y**5, (x*y)**2]); H = [x*y, x**-1*y**-1*x*y*x] + >>> C = coset_enumeration_r(f, H) + >>> C.compress(); C.standardize() + >>> define_schreier_generators(C) + >>> reidemeister_relators(C) + >>> elimination_technique_2(C) + ([y_1, y_2], [y_2**-3, y_2*y_1*y_2*y_1*y_2*y_1, y_1**2]) + + """ + rels = C._reidemeister_relators + rels.sort(reverse=True) + gens = C._schreier_generators + for i in range(len(gens) - 1, -1, -1): + rel = rels[i] + for j in range(len(gens) - 1, -1, -1): + gen = gens[j] + if rel.generator_count(gen) == 1: + k = rel.exponent_sum(gen) + gen_index = rel.index(gen**k) + bk = rel.subword(gen_index + 1, len(rel)) + fw = rel.subword(0, gen_index) + rep_by = (bk*fw)**(-1*k) + del rels[i]; del gens[j] + for l in range(len(rels)): + rels[l] = rels[l].eliminate_word(gen, rep_by) + break + C._reidemeister_relators = rels + C._schreier_generators = gens + return C._schreier_generators, C._reidemeister_relators + +def reidemeister_presentation(fp_grp, H, C=None, homomorphism=False): + """ + Parameters + ========== + + fp_group: A finitely presented group, an instance of FpGroup + H: A subgroup whose presentation is to be found, given as a list + of words in generators of `fp_grp` + homomorphism: When set to True, return a homomorphism from the subgroup + to the parent group + + Examples + ======== + + >>> from sympy.combinatorics import free_group + >>> from sympy.combinatorics.fp_groups import FpGroup, reidemeister_presentation + >>> F, x, y = free_group("x, y") + + Example 5.6 Pg. 177 from [1] + >>> f = FpGroup(F, [x**3, y**5, (x*y)**2]) + >>> H = [x*y, x**-1*y**-1*x*y*x] + >>> reidemeister_presentation(f, H) + ((y_1, y_2), (y_1**2, y_2**3, y_2*y_1*y_2*y_1*y_2*y_1)) + + Example 5.8 Pg. 183 from [1] + >>> f = FpGroup(F, [x**3, y**3, (x*y)**3]) + >>> H = [x*y, x*y**-1] + >>> reidemeister_presentation(f, H) + ((x_0, y_0), (x_0**3, y_0**3, x_0*y_0*x_0*y_0*x_0*y_0)) + + Exercises Q2. Pg 187 from [1] + >>> f = FpGroup(F, [x**2*y**2, y**-1*x*y*x**-3]) + >>> H = [x] + >>> reidemeister_presentation(f, H) + ((x_0,), (x_0**4,)) + + Example 5.9 Pg. 183 from [1] + >>> f = FpGroup(F, [x**3*y**-3, (x*y)**3, (x*y**-1)**2]) + >>> H = [x] + >>> reidemeister_presentation(f, H) + ((x_0,), (x_0**6,)) + + """ + if not C: + C = coset_enumeration_r(fp_grp, H) + C.compress(); C.standardize() + define_schreier_generators(C, homomorphism=homomorphism) + reidemeister_relators(C) + gens, rels = C._schreier_generators, C._reidemeister_relators + gens, rels = simplify_presentation(gens, rels, change_gens=True) + + C.schreier_generators = tuple(gens) + C.reidemeister_relators = tuple(rels) + + if homomorphism: + _gens = [] + for gen in gens: + _gens.append(C._schreier_gen_elem[str(gen)]) + return C.schreier_generators, C.reidemeister_relators, _gens + + return C.schreier_generators, C.reidemeister_relators + + +FpGroupElement = FreeGroupElement diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/free_groups.py b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/free_groups.py new file mode 100644 index 0000000000000000000000000000000000000000..9c24abdc480c7803331435ba7453c4e0848ddb07 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/free_groups.py @@ -0,0 +1,1354 @@ +from __future__ import annotations + +from sympy.core import S +from sympy.core.expr import Expr +from sympy.core.symbol import Symbol, symbols as _symbols +from sympy.core.sympify import CantSympify +from sympy.printing.defaults import DefaultPrinting +from sympy.utilities import public +from sympy.utilities.iterables import flatten, is_sequence +from sympy.utilities.magic import pollute +from sympy.utilities.misc import as_int + + +@public +def free_group(symbols): + """Construct a free group returning ``(FreeGroup, (f_0, f_1, ..., f_(n-1))``. + + Parameters + ========== + + symbols : str, Symbol/Expr or sequence of str, Symbol/Expr (may be empty) + + Examples + ======== + + >>> from sympy.combinatorics import free_group + >>> F, x, y, z = free_group("x, y, z") + >>> F + + >>> x**2*y**-1 + x**2*y**-1 + >>> type(_) + + + """ + _free_group = FreeGroup(symbols) + return (_free_group,) + tuple(_free_group.generators) + +@public +def xfree_group(symbols): + """Construct a free group returning ``(FreeGroup, (f_0, f_1, ..., f_(n-1)))``. + + Parameters + ========== + + symbols : str, Symbol/Expr or sequence of str, Symbol/Expr (may be empty) + + Examples + ======== + + >>> from sympy.combinatorics.free_groups import xfree_group + >>> F, (x, y, z) = xfree_group("x, y, z") + >>> F + + >>> y**2*x**-2*z**-1 + y**2*x**-2*z**-1 + >>> type(_) + + + """ + _free_group = FreeGroup(symbols) + return (_free_group, _free_group.generators) + +@public +def vfree_group(symbols): + """Construct a free group and inject ``f_0, f_1, ..., f_(n-1)`` as symbols + into the global namespace. + + Parameters + ========== + + symbols : str, Symbol/Expr or sequence of str, Symbol/Expr (may be empty) + + Examples + ======== + + >>> from sympy.combinatorics.free_groups import vfree_group + >>> vfree_group("x, y, z") + + >>> x**2*y**-2*z # noqa: F821 + x**2*y**-2*z + >>> type(_) + + + """ + _free_group = FreeGroup(symbols) + pollute([sym.name for sym in _free_group.symbols], _free_group.generators) + return _free_group + + +def _parse_symbols(symbols): + if not symbols: + return () + if isinstance(symbols, str): + return _symbols(symbols, seq=True) + elif isinstance(symbols, (Expr, FreeGroupElement)): + return (symbols,) + elif is_sequence(symbols): + if all(isinstance(s, str) for s in symbols): + return _symbols(symbols) + elif all(isinstance(s, Expr) for s in symbols): + return symbols + raise ValueError("The type of `symbols` must be one of the following: " + "a str, Symbol/Expr or a sequence of " + "one of these types") + + +############################################################################## +# FREE GROUP # +############################################################################## + +_free_group_cache: dict[int, FreeGroup] = {} + +class FreeGroup(DefaultPrinting): + """ + Free group with finite or infinite number of generators. Its input API + is that of a str, Symbol/Expr or a sequence of one of + these types (which may be empty) + + See Also + ======== + + sympy.polys.rings.PolyRing + + References + ========== + + .. [1] https://www.gap-system.org/Manuals/doc/ref/chap37.html + + .. [2] https://en.wikipedia.org/wiki/Free_group + + """ + is_associative = True + is_group = True + is_FreeGroup = True + is_PermutationGroup = False + relators: list[Expr] = [] + + def __new__(cls, symbols): + symbols = tuple(_parse_symbols(symbols)) + rank = len(symbols) + _hash = hash((cls.__name__, symbols, rank)) + obj = _free_group_cache.get(_hash) + + if obj is None: + obj = object.__new__(cls) + obj._hash = _hash + obj._rank = rank + # dtype method is used to create new instances of FreeGroupElement + obj.dtype = type("FreeGroupElement", (FreeGroupElement,), {"group": obj}) + obj.symbols = symbols + obj.generators = obj._generators() + obj._gens_set = set(obj.generators) + for symbol, generator in zip(obj.symbols, obj.generators): + if isinstance(symbol, Symbol): + name = symbol.name + if hasattr(obj, name): + setattr(obj, name, generator) + + _free_group_cache[_hash] = obj + + return obj + + def _generators(group): + """Returns the generators of the FreeGroup. + + Examples + ======== + + >>> from sympy.combinatorics import free_group + >>> F, x, y, z = free_group("x, y, z") + >>> F.generators + (x, y, z) + + """ + gens = [] + for sym in group.symbols: + elm = ((sym, 1),) + gens.append(group.dtype(elm)) + return tuple(gens) + + def clone(self, symbols=None): + return self.__class__(symbols or self.symbols) + + def __contains__(self, i): + """Return True if ``i`` is contained in FreeGroup.""" + if not isinstance(i, FreeGroupElement): + return False + group = i.group + return self == group + + def __hash__(self): + return self._hash + + def __len__(self): + return self.rank + + def __str__(self): + if self.rank > 30: + str_form = "" % self.rank + else: + str_form = "" + return str_form + + __repr__ = __str__ + + def __getitem__(self, index): + symbols = self.symbols[index] + return self.clone(symbols=symbols) + + def __eq__(self, other): + """No ``FreeGroup`` is equal to any "other" ``FreeGroup``. + """ + return self is other + + def index(self, gen): + """Return the index of the generator `gen` from ``(f_0, ..., f_(n-1))``. + + Examples + ======== + + >>> from sympy.combinatorics import free_group + >>> F, x, y = free_group("x, y") + >>> F.index(y) + 1 + >>> F.index(x) + 0 + + """ + if isinstance(gen, self.dtype): + return self.generators.index(gen) + else: + raise ValueError("expected a generator of Free Group %s, got %s" % (self, gen)) + + def order(self): + """Return the order of the free group. + + Examples + ======== + + >>> from sympy.combinatorics import free_group + >>> F, x, y = free_group("x, y") + >>> F.order() + oo + + >>> free_group("")[0].order() + 1 + + """ + if self.rank == 0: + return S.One + else: + return S.Infinity + + @property + def elements(self): + """ + Return the elements of the free group. + + Examples + ======== + + >>> from sympy.combinatorics import free_group + >>> (z,) = free_group("") + >>> z.elements + {} + + """ + if self.rank == 0: + # A set containing Identity element of `FreeGroup` self is returned + return {self.identity} + else: + raise ValueError("Group contains infinitely many elements" + ", hence cannot be represented") + + @property + def rank(self): + r""" + In group theory, the `rank` of a group `G`, denoted `G.rank`, + can refer to the smallest cardinality of a generating set + for G, that is + + \operatorname{rank}(G)=\min\{ |X|: X\subseteq G, \left\langle X\right\rangle =G\}. + + """ + return self._rank + + @property + def is_abelian(self): + """Returns if the group is Abelian. + + Examples + ======== + + >>> from sympy.combinatorics import free_group + >>> f, x, y, z = free_group("x y z") + >>> f.is_abelian + False + + """ + return self.rank in (0, 1) + + @property + def identity(self): + """Returns the identity element of free group.""" + return self.dtype() + + def contains(self, g): + """Tests if Free Group element ``g`` belong to self, ``G``. + + In mathematical terms any linear combination of generators + of a Free Group is contained in it. + + Examples + ======== + + >>> from sympy.combinatorics import free_group + >>> f, x, y, z = free_group("x y z") + >>> f.contains(x**3*y**2) + True + + """ + if not isinstance(g, FreeGroupElement): + return False + elif self != g.group: + return False + else: + return True + + def center(self): + """Returns the center of the free group `self`.""" + return {self.identity} + + +############################################################################ +# FreeGroupElement # +############################################################################ + + +class FreeGroupElement(CantSympify, DefaultPrinting, tuple): + """Used to create elements of FreeGroup. It cannot be used directly to + create a free group element. It is called by the `dtype` method of the + `FreeGroup` class. + + """ + is_assoc_word = True + + def new(self, init): + return self.__class__(init) + + _hash = None + + def __hash__(self): + _hash = self._hash + if _hash is None: + self._hash = _hash = hash((self.group, frozenset(tuple(self)))) + return _hash + + def copy(self): + return self.new(self) + + @property + def is_identity(self): + if self.array_form == (): + return True + else: + return False + + @property + def array_form(self): + """ + SymPy provides two different internal kinds of representation + of associative words. The first one is called the `array_form` + which is a tuple containing `tuples` as its elements, where the + size of each tuple is two. At the first position the tuple + contains the `symbol-generator`, while at the second position + of tuple contains the exponent of that generator at the position. + Since elements (i.e. words) do not commute, the indexing of tuple + makes that property to stay. + + The structure in ``array_form`` of ``FreeGroupElement`` is of form: + + ``( ( symbol_of_gen, exponent ), ( , ), ... ( , ) )`` + + Examples + ======== + + >>> from sympy.combinatorics import free_group + >>> f, x, y, z = free_group("x y z") + >>> (x*z).array_form + ((x, 1), (z, 1)) + >>> (x**2*z*y*x**2).array_form + ((x, 2), (z, 1), (y, 1), (x, 2)) + + See Also + ======== + + letter_repr + + """ + return tuple(self) + + @property + def letter_form(self): + """ + The letter representation of a ``FreeGroupElement`` is a tuple + of generator symbols, with each entry corresponding to a group + generator. Inverses of the generators are represented by + negative generator symbols. + + Examples + ======== + + >>> from sympy.combinatorics import free_group + >>> f, a, b, c, d = free_group("a b c d") + >>> (a**3).letter_form + (a, a, a) + >>> (a**2*d**-2*a*b**-4).letter_form + (a, a, -d, -d, a, -b, -b, -b, -b) + >>> (a**-2*b**3*d).letter_form + (-a, -a, b, b, b, d) + + See Also + ======== + + array_form + + """ + return tuple(flatten([(i,)*j if j > 0 else (-i,)*(-j) + for i, j in self.array_form])) + + def __getitem__(self, i): + group = self.group + r = self.letter_form[i] + if r.is_Symbol: + return group.dtype(((r, 1),)) + else: + return group.dtype(((-r, -1),)) + + def index(self, gen): + if len(gen) != 1: + raise ValueError() + return (self.letter_form).index(gen.letter_form[0]) + + @property + def letter_form_elm(self): + """ + """ + group = self.group + r = self.letter_form + return [group.dtype(((elm,1),)) if elm.is_Symbol \ + else group.dtype(((-elm,-1),)) for elm in r] + + @property + def ext_rep(self): + """This is called the External Representation of ``FreeGroupElement`` + """ + return tuple(flatten(self.array_form)) + + def __contains__(self, gen): + return gen.array_form[0][0] in tuple([r[0] for r in self.array_form]) + + def __str__(self): + if self.is_identity: + return "" + + str_form = "" + array_form = self.array_form + for i in range(len(array_form)): + if i == len(array_form) - 1: + if array_form[i][1] == 1: + str_form += str(array_form[i][0]) + else: + str_form += str(array_form[i][0]) + \ + "**" + str(array_form[i][1]) + else: + if array_form[i][1] == 1: + str_form += str(array_form[i][0]) + "*" + else: + str_form += str(array_form[i][0]) + \ + "**" + str(array_form[i][1]) + "*" + return str_form + + __repr__ = __str__ + + def __pow__(self, n): + n = as_int(n) + group = self.group + if n == 0: + return group.identity + + if n < 0: + n = -n + return (self.inverse())**n + + result = self + for i in range(n - 1): + result = result*self + # this method can be improved instead of just returning the + # multiplication of elements + return result + + def __mul__(self, other): + """Returns the product of elements belonging to the same ``FreeGroup``. + + Examples + ======== + + >>> from sympy.combinatorics import free_group + >>> f, x, y, z = free_group("x y z") + >>> x*y**2*y**-4 + x*y**-2 + >>> z*y**-2 + z*y**-2 + >>> x**2*y*y**-1*x**-2 + + + """ + group = self.group + if not isinstance(other, group.dtype): + raise TypeError("only FreeGroup elements of same FreeGroup can " + "be multiplied") + if self.is_identity: + return other + if other.is_identity: + return self + r = list(self.array_form + other.array_form) + zero_mul_simp(r, len(self.array_form) - 1) + return group.dtype(tuple(r)) + + def __truediv__(self, other): + group = self.group + if not isinstance(other, group.dtype): + raise TypeError("only FreeGroup elements of same FreeGroup can " + "be multiplied") + return self*(other.inverse()) + + def __rtruediv__(self, other): + group = self.group + if not isinstance(other, group.dtype): + raise TypeError("only FreeGroup elements of same FreeGroup can " + "be multiplied") + return other*(self.inverse()) + + def __add__(self, other): + return NotImplemented + + def inverse(self): + """ + Returns the inverse of a ``FreeGroupElement`` element + + Examples + ======== + + >>> from sympy.combinatorics import free_group + >>> f, x, y, z = free_group("x y z") + >>> x.inverse() + x**-1 + >>> (x*y).inverse() + y**-1*x**-1 + + """ + group = self.group + r = tuple([(i, -j) for i, j in self.array_form[::-1]]) + return group.dtype(r) + + def order(self): + """Find the order of a ``FreeGroupElement``. + + Examples + ======== + + >>> from sympy.combinatorics import free_group + >>> f, x, y = free_group("x y") + >>> (x**2*y*y**-1*x**-2).order() + 1 + + """ + if self.is_identity: + return S.One + else: + return S.Infinity + + def commutator(self, other): + """ + Return the commutator of `self` and `x`: ``~x*~self*x*self`` + + """ + group = self.group + if not isinstance(other, group.dtype): + raise ValueError("commutator of only FreeGroupElement of the same " + "FreeGroup exists") + else: + return self.inverse()*other.inverse()*self*other + + def eliminate_words(self, words, _all=False, inverse=True): + ''' + Replace each subword from the dictionary `words` by words[subword]. + If words is a list, replace the words by the identity. + + ''' + again = True + new = self + if isinstance(words, dict): + while again: + again = False + for sub in words: + prev = new + new = new.eliminate_word(sub, words[sub], _all=_all, inverse=inverse) + if new != prev: + again = True + else: + while again: + again = False + for sub in words: + prev = new + new = new.eliminate_word(sub, _all=_all, inverse=inverse) + if new != prev: + again = True + return new + + def eliminate_word(self, gen, by=None, _all=False, inverse=True): + """ + For an associative word `self`, a subword `gen`, and an associative + word `by` (identity by default), return the associative word obtained by + replacing each occurrence of `gen` in `self` by `by`. If `_all = True`, + the occurrences of `gen` that may appear after the first substitution will + also be replaced and so on until no occurrences are found. This might not + always terminate (e.g. `(x).eliminate_word(x, x**2, _all=True)`). + + Examples + ======== + + >>> from sympy.combinatorics import free_group + >>> f, x, y = free_group("x y") + >>> w = x**5*y*x**2*y**-4*x + >>> w.eliminate_word( x, x**2 ) + x**10*y*x**4*y**-4*x**2 + >>> w.eliminate_word( x, y**-1 ) + y**-11 + >>> w.eliminate_word(x**5) + y*x**2*y**-4*x + >>> w.eliminate_word(x*y, y) + x**4*y*x**2*y**-4*x + + See Also + ======== + substituted_word + + """ + if by is None: + by = self.group.identity + if self.is_independent(gen) or gen == by: + return self + if gen == self: + return by + if gen**-1 == by: + _all = False + word = self + l = len(gen) + + try: + i = word.subword_index(gen) + k = 1 + except ValueError: + if not inverse: + return word + try: + i = word.subword_index(gen**-1) + k = -1 + except ValueError: + return word + + word = word.subword(0, i)*by**k*word.subword(i+l, len(word)).eliminate_word(gen, by) + + if _all: + return word.eliminate_word(gen, by, _all=True, inverse=inverse) + else: + return word + + def __len__(self): + """ + For an associative word `self`, returns the number of letters in it. + + Examples + ======== + + >>> from sympy.combinatorics import free_group + >>> f, a, b = free_group("a b") + >>> w = a**5*b*a**2*b**-4*a + >>> len(w) + 13 + >>> len(a**17) + 17 + >>> len(w**0) + 0 + + """ + return sum(abs(j) for (i, j) in self) + + def __eq__(self, other): + """ + Two associative words are equal if they are words over the + same alphabet and if they are sequences of the same letters. + This is equivalent to saying that the external representations + of the words are equal. + There is no "universal" empty word, every alphabet has its own + empty word. + + Examples + ======== + + >>> from sympy.combinatorics import free_group + >>> f, swapnil0, swapnil1 = free_group("swapnil0 swapnil1") + >>> f + + >>> g, swap0, swap1 = free_group("swap0 swap1") + >>> g + + + >>> swapnil0 == swapnil1 + False + >>> swapnil0*swapnil1 == swapnil1/swapnil1*swapnil0*swapnil1 + True + >>> swapnil0*swapnil1 == swapnil1*swapnil0 + False + >>> swapnil1**0 == swap0**0 + False + + """ + group = self.group + if not isinstance(other, group.dtype): + return False + return tuple.__eq__(self, other) + + def __lt__(self, other): + """ + The ordering of associative words is defined by length and + lexicography (this ordering is called short-lex ordering), that + is, shorter words are smaller than longer words, and words of the + same length are compared w.r.t. the lexicographical ordering induced + by the ordering of generators. Generators are sorted according + to the order in which they were created. If the generators are + invertible then each generator `g` is larger than its inverse `g^{-1}`, + and `g^{-1}` is larger than every generator that is smaller than `g`. + + Examples + ======== + + >>> from sympy.combinatorics import free_group + >>> f, a, b = free_group("a b") + >>> b < a + False + >>> a < a.inverse() + False + + """ + group = self.group + if not isinstance(other, group.dtype): + raise TypeError("only FreeGroup elements of same FreeGroup can " + "be compared") + l = len(self) + m = len(other) + # implement lenlex order + if l < m: + return True + elif l > m: + return False + for i in range(l): + a = self[i].array_form[0] + b = other[i].array_form[0] + p = group.symbols.index(a[0]) + q = group.symbols.index(b[0]) + if p < q: + return True + elif p > q: + return False + elif a[1] < b[1]: + return True + elif a[1] > b[1]: + return False + return False + + def __le__(self, other): + return (self == other or self < other) + + def __gt__(self, other): + """ + + Examples + ======== + + >>> from sympy.combinatorics import free_group + >>> f, x, y, z = free_group("x y z") + >>> y**2 > x**2 + True + >>> y*z > z*y + False + >>> x > x.inverse() + True + + """ + group = self.group + if not isinstance(other, group.dtype): + raise TypeError("only FreeGroup elements of same FreeGroup can " + "be compared") + return not self <= other + + def __ge__(self, other): + return not self < other + + def exponent_sum(self, gen): + """ + For an associative word `self` and a generator or inverse of generator + `gen`, ``exponent_sum`` returns the number of times `gen` appears in + `self` minus the number of times its inverse appears in `self`. If + neither `gen` nor its inverse occur in `self` then 0 is returned. + + Examples + ======== + + >>> from sympy.combinatorics import free_group + >>> F, x, y = free_group("x, y") + >>> w = x**2*y**3 + >>> w.exponent_sum(x) + 2 + >>> w.exponent_sum(x**-1) + -2 + >>> w = x**2*y**4*x**-3 + >>> w.exponent_sum(x) + -1 + + See Also + ======== + + generator_count + + """ + if len(gen) != 1: + raise ValueError("gen must be a generator or inverse of a generator") + s = gen.array_form[0] + return s[1]*sum([i[1] for i in self.array_form if i[0] == s[0]]) + + def generator_count(self, gen): + """ + For an associative word `self` and a generator `gen`, + ``generator_count`` returns the multiplicity of generator + `gen` in `self`. + + Examples + ======== + + >>> from sympy.combinatorics import free_group + >>> F, x, y = free_group("x, y") + >>> w = x**2*y**3 + >>> w.generator_count(x) + 2 + >>> w = x**2*y**4*x**-3 + >>> w.generator_count(x) + 5 + + See Also + ======== + + exponent_sum + + """ + if len(gen) != 1 or gen.array_form[0][1] < 0: + raise ValueError("gen must be a generator") + s = gen.array_form[0] + return s[1]*sum([abs(i[1]) for i in self.array_form if i[0] == s[0]]) + + def subword(self, from_i, to_j, strict=True): + """ + For an associative word `self` and two positive integers `from_i` and + `to_j`, `subword` returns the subword of `self` that begins at position + `from_i` and ends at `to_j - 1`, indexing is done with origin 0. + + Examples + ======== + + >>> from sympy.combinatorics import free_group + >>> f, a, b = free_group("a b") + >>> w = a**5*b*a**2*b**-4*a + >>> w.subword(2, 6) + a**3*b + + """ + group = self.group + if not strict: + from_i = max(from_i, 0) + to_j = min(len(self), to_j) + if from_i < 0 or to_j > len(self): + raise ValueError("`from_i`, `to_j` must be positive and no greater than " + "the length of associative word") + if to_j <= from_i: + return group.identity + else: + letter_form = self.letter_form[from_i: to_j] + array_form = letter_form_to_array_form(letter_form, group) + return group.dtype(array_form) + + def subword_index(self, word, start = 0): + ''' + Find the index of `word` in `self`. + + Examples + ======== + + >>> from sympy.combinatorics import free_group + >>> f, a, b = free_group("a b") + >>> w = a**2*b*a*b**3 + >>> w.subword_index(a*b*a*b) + 1 + + ''' + l = len(word) + self_lf = self.letter_form + word_lf = word.letter_form + index = None + for i in range(start,len(self_lf)-l+1): + if self_lf[i:i+l] == word_lf: + index = i + break + if index is not None: + return index + else: + raise ValueError("The given word is not a subword of self") + + def is_dependent(self, word): + """ + Examples + ======== + + >>> from sympy.combinatorics import free_group + >>> F, x, y = free_group("x, y") + >>> (x**4*y**-3).is_dependent(x**4*y**-2) + True + >>> (x**2*y**-1).is_dependent(x*y) + False + >>> (x*y**2*x*y**2).is_dependent(x*y**2) + True + >>> (x**12).is_dependent(x**-4) + True + + See Also + ======== + + is_independent + + """ + try: + return self.subword_index(word) is not None + except ValueError: + pass + try: + return self.subword_index(word**-1) is not None + except ValueError: + return False + + def is_independent(self, word): + """ + + See Also + ======== + + is_dependent + + """ + return not self.is_dependent(word) + + def contains_generators(self): + """ + Examples + ======== + + >>> from sympy.combinatorics import free_group + >>> F, x, y, z = free_group("x, y, z") + >>> (x**2*y**-1).contains_generators() + {x, y} + >>> (x**3*z).contains_generators() + {x, z} + + """ + group = self.group + gens = set() + for syllable in self.array_form: + gens.add(group.dtype(((syllable[0], 1),))) + return set(gens) + + def cyclic_subword(self, from_i, to_j): + group = self.group + l = len(self) + letter_form = self.letter_form + period1 = int(from_i/l) + if from_i >= l: + from_i -= l*period1 + to_j -= l*period1 + diff = to_j - from_i + word = letter_form[from_i: to_j] + period2 = int(to_j/l) - 1 + word += letter_form*period2 + letter_form[:diff-l+from_i-l*period2] + word = letter_form_to_array_form(word, group) + return group.dtype(word) + + def cyclic_conjugates(self): + """Returns a words which are cyclic to the word `self`. + + Examples + ======== + + >>> from sympy.combinatorics import free_group + >>> F, x, y = free_group("x, y") + >>> w = x*y*x*y*x + >>> w.cyclic_conjugates() + {x*y*x**2*y, x**2*y*x*y, y*x*y*x**2, y*x**2*y*x, x*y*x*y*x} + >>> s = x*y*x**2*y*x + >>> s.cyclic_conjugates() + {x**2*y*x**2*y, y*x**2*y*x**2, x*y*x**2*y*x} + + References + ========== + + .. [1] https://planetmath.org/cyclicpermutation + + """ + return {self.cyclic_subword(i, i+len(self)) for i in range(len(self))} + + def is_cyclic_conjugate(self, w): + """ + Checks whether words ``self``, ``w`` are cyclic conjugates. + + Examples + ======== + + >>> from sympy.combinatorics import free_group + >>> F, x, y = free_group("x, y") + >>> w1 = x**2*y**5 + >>> w2 = x*y**5*x + >>> w1.is_cyclic_conjugate(w2) + True + >>> w3 = x**-1*y**5*x**-1 + >>> w3.is_cyclic_conjugate(w2) + False + + """ + l1 = len(self) + l2 = len(w) + if l1 != l2: + return False + w1 = self.identity_cyclic_reduction() + w2 = w.identity_cyclic_reduction() + letter1 = w1.letter_form + letter2 = w2.letter_form + str1 = ' '.join(map(str, letter1)) + str2 = ' '.join(map(str, letter2)) + if len(str1) != len(str2): + return False + + return str1 in str2 + ' ' + str2 + + def number_syllables(self): + """Returns the number of syllables of the associative word `self`. + + Examples + ======== + + >>> from sympy.combinatorics import free_group + >>> f, swapnil0, swapnil1 = free_group("swapnil0 swapnil1") + >>> (swapnil1**3*swapnil0*swapnil1**-1).number_syllables() + 3 + + """ + return len(self.array_form) + + def exponent_syllable(self, i): + """ + Returns the exponent of the `i`-th syllable of the associative word + `self`. + + Examples + ======== + + >>> from sympy.combinatorics import free_group + >>> f, a, b = free_group("a b") + >>> w = a**5*b*a**2*b**-4*a + >>> w.exponent_syllable( 2 ) + 2 + + """ + return self.array_form[i][1] + + def generator_syllable(self, i): + """ + Returns the symbol of the generator that is involved in the + i-th syllable of the associative word `self`. + + Examples + ======== + + >>> from sympy.combinatorics import free_group + >>> f, a, b = free_group("a b") + >>> w = a**5*b*a**2*b**-4*a + >>> w.generator_syllable( 3 ) + b + + """ + return self.array_form[i][0] + + def sub_syllables(self, from_i, to_j): + """ + `sub_syllables` returns the subword of the associative word `self` that + consists of syllables from positions `from_to` to `to_j`, where + `from_to` and `to_j` must be positive integers and indexing is done + with origin 0. + + Examples + ======== + + >>> from sympy.combinatorics import free_group + >>> f, a, b = free_group("a, b") + >>> w = a**5*b*a**2*b**-4*a + >>> w.sub_syllables(1, 2) + b + >>> w.sub_syllables(3, 3) + + + """ + if not isinstance(from_i, int) or not isinstance(to_j, int): + raise ValueError("both arguments should be integers") + group = self.group + if to_j <= from_i: + return group.identity + else: + r = tuple(self.array_form[from_i: to_j]) + return group.dtype(r) + + def substituted_word(self, from_i, to_j, by): + """ + Returns the associative word obtained by replacing the subword of + `self` that begins at position `from_i` and ends at position `to_j - 1` + by the associative word `by`. `from_i` and `to_j` must be positive + integers, indexing is done with origin 0. In other words, + `w.substituted_word(w, from_i, to_j, by)` is the product of the three + words: `w.subword(0, from_i)`, `by`, and + `w.subword(to_j len(w))`. + + See Also + ======== + + eliminate_word + + """ + lw = len(self) + if from_i >= to_j or from_i > lw or to_j > lw: + raise ValueError("values should be within bounds") + + # otherwise there are four possibilities + + # first if from=1 and to=lw then + if from_i == 0 and to_j == lw: + return by + elif from_i == 0: # second if from_i=1 (and to_j < lw) then + return by*self.subword(to_j, lw) + elif to_j == lw: # third if to_j=1 (and from_i > 1) then + return self.subword(0, from_i)*by + else: # finally + return self.subword(0, from_i)*by*self.subword(to_j, lw) + + def is_cyclically_reduced(self): + r"""Returns whether the word is cyclically reduced or not. + A word is cyclically reduced if by forming the cycle of the + word, the word is not reduced, i.e a word w = `a_1 ... a_n` + is called cyclically reduced if `a_1 \ne a_n^{-1}`. + + Examples + ======== + + >>> from sympy.combinatorics import free_group + >>> F, x, y = free_group("x, y") + >>> (x**2*y**-1*x**-1).is_cyclically_reduced() + False + >>> (y*x**2*y**2).is_cyclically_reduced() + True + + """ + if not self: + return True + return self[0] != self[-1]**-1 + + def identity_cyclic_reduction(self): + """Return a unique cyclically reduced version of the word. + + Examples + ======== + + >>> from sympy.combinatorics import free_group + >>> F, x, y = free_group("x, y") + >>> (x**2*y**2*x**-1).identity_cyclic_reduction() + x*y**2 + >>> (x**-3*y**-1*x**5).identity_cyclic_reduction() + x**2*y**-1 + + References + ========== + + .. [1] https://planetmath.org/cyclicallyreduced + + """ + word = self.copy() + group = self.group + while not word.is_cyclically_reduced(): + exp1 = word.exponent_syllable(0) + exp2 = word.exponent_syllable(-1) + r = exp1 + exp2 + if r == 0: + rep = word.array_form[1: word.number_syllables() - 1] + else: + rep = ((word.generator_syllable(0), exp1 + exp2),) + \ + word.array_form[1: word.number_syllables() - 1] + word = group.dtype(rep) + return word + + def cyclic_reduction(self, removed=False): + """Return a cyclically reduced version of the word. Unlike + `identity_cyclic_reduction`, this will not cyclically permute + the reduced word - just remove the "unreduced" bits on either + side of it. Compare the examples with those of + `identity_cyclic_reduction`. + + When `removed` is `True`, return a tuple `(word, r)` where + self `r` is such that before the reduction the word was either + `r*word*r**-1`. + + Examples + ======== + + >>> from sympy.combinatorics import free_group + >>> F, x, y = free_group("x, y") + >>> (x**2*y**2*x**-1).cyclic_reduction() + x*y**2 + >>> (x**-3*y**-1*x**5).cyclic_reduction() + y**-1*x**2 + >>> (x**-3*y**-1*x**5).cyclic_reduction(removed=True) + (y**-1*x**2, x**-3) + + """ + word = self.copy() + g = self.group.identity + while not word.is_cyclically_reduced(): + exp1 = abs(word.exponent_syllable(0)) + exp2 = abs(word.exponent_syllable(-1)) + exp = min(exp1, exp2) + start = word[0]**abs(exp) + end = word[-1]**abs(exp) + word = start**-1*word*end**-1 + g = g*start + if removed: + return word, g + return word + + def power_of(self, other): + ''' + Check if `self == other**n` for some integer n. + + Examples + ======== + + >>> from sympy.combinatorics import free_group + >>> F, x, y = free_group("x, y") + >>> ((x*y)**2).power_of(x*y) + True + >>> (x**-3*y**-2*x**3).power_of(x**-3*y*x**3) + True + + ''' + if self.is_identity: + return True + + l = len(other) + if l == 1: + # self has to be a power of one generator + gens = self.contains_generators() + s = other in gens or other**-1 in gens + return len(gens) == 1 and s + + # if self is not cyclically reduced and it is a power of other, + # other isn't cyclically reduced and the parts removed during + # their reduction must be equal + reduced, r1 = self.cyclic_reduction(removed=True) + if not r1.is_identity: + other, r2 = other.cyclic_reduction(removed=True) + if r1 == r2: + return reduced.power_of(other) + return False + + if len(self) < l or len(self) % l: + return False + + prefix = self.subword(0, l) + if prefix == other or prefix**-1 == other: + rest = self.subword(l, len(self)) + return rest.power_of(other) + return False + + +def letter_form_to_array_form(array_form, group): + """ + This method converts a list given with possible repetitions of elements in + it. It returns a new list such that repetitions of consecutive elements is + removed and replace with a tuple element of size two such that the first + index contains `value` and the second index contains the number of + consecutive repetitions of `value`. + + """ + a = list(array_form[:]) + new_array = [] + n = 1 + symbols = group.symbols + for i in range(len(a)): + if i == len(a) - 1: + if a[i] == a[i - 1]: + if (-a[i]) in symbols: + new_array.append((-a[i], -n)) + else: + new_array.append((a[i], n)) + else: + if (-a[i]) in symbols: + new_array.append((-a[i], -1)) + else: + new_array.append((a[i], 1)) + return new_array + elif a[i] == a[i + 1]: + n += 1 + else: + if (-a[i]) in symbols: + new_array.append((-a[i], -n)) + else: + new_array.append((a[i], n)) + n = 1 + + +def zero_mul_simp(l, index): + """Used to combine two reduced words.""" + while index >=0 and index < len(l) - 1 and l[index][0] == l[index + 1][0]: + exp = l[index][1] + l[index + 1][1] + base = l[index][0] + l[index] = (base, exp) + del l[index + 1] + if l[index][1] == 0: + del l[index] + index -= 1 diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/galois.py b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/galois.py new file mode 100644 index 0000000000000000000000000000000000000000..0666884676b367dd3fa4b49535cfbaee72b2eac9 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/galois.py @@ -0,0 +1,611 @@ +r""" +Construct transitive subgroups of symmetric groups, useful in Galois theory. + +Besides constructing instances of the :py:class:`~.PermutationGroup` class to +represent the transitive subgroups of $S_n$ for small $n$, this module provides +*names* for these groups. + +In some applications, it may be preferable to know the name of a group, +rather than receive an instance of the :py:class:`~.PermutationGroup` +class, and then have to do extra work to determine which group it is, by +checking various properties. + +Names are instances of ``Enum`` classes defined in this module. With a name in +hand, the name's ``get_perm_group`` method can then be used to retrieve a +:py:class:`~.PermutationGroup`. + +The names used for groups in this module are taken from [1]. + +References +========== + +.. [1] Cohen, H. *A Course in Computational Algebraic Number Theory*. + +""" + +from collections import defaultdict +from enum import Enum +import itertools + +from sympy.combinatorics.named_groups import ( + SymmetricGroup, AlternatingGroup, CyclicGroup, DihedralGroup, + set_symmetric_group_properties, set_alternating_group_properties, +) +from sympy.combinatorics.perm_groups import PermutationGroup +from sympy.combinatorics.permutations import Permutation + + +class S1TransitiveSubgroups(Enum): + """ + Names for the transitive subgroups of S1. + """ + S1 = "S1" + + def get_perm_group(self): + return SymmetricGroup(1) + + +class S2TransitiveSubgroups(Enum): + """ + Names for the transitive subgroups of S2. + """ + S2 = "S2" + + def get_perm_group(self): + return SymmetricGroup(2) + + +class S3TransitiveSubgroups(Enum): + """ + Names for the transitive subgroups of S3. + """ + A3 = "A3" + S3 = "S3" + + def get_perm_group(self): + if self == S3TransitiveSubgroups.A3: + return AlternatingGroup(3) + elif self == S3TransitiveSubgroups.S3: + return SymmetricGroup(3) + + +class S4TransitiveSubgroups(Enum): + """ + Names for the transitive subgroups of S4. + """ + C4 = "C4" + V = "V" + D4 = "D4" + A4 = "A4" + S4 = "S4" + + def get_perm_group(self): + if self == S4TransitiveSubgroups.C4: + return CyclicGroup(4) + elif self == S4TransitiveSubgroups.V: + return four_group() + elif self == S4TransitiveSubgroups.D4: + return DihedralGroup(4) + elif self == S4TransitiveSubgroups.A4: + return AlternatingGroup(4) + elif self == S4TransitiveSubgroups.S4: + return SymmetricGroup(4) + + +class S5TransitiveSubgroups(Enum): + """ + Names for the transitive subgroups of S5. + """ + C5 = "C5" + D5 = "D5" + M20 = "M20" + A5 = "A5" + S5 = "S5" + + def get_perm_group(self): + if self == S5TransitiveSubgroups.C5: + return CyclicGroup(5) + elif self == S5TransitiveSubgroups.D5: + return DihedralGroup(5) + elif self == S5TransitiveSubgroups.M20: + return M20() + elif self == S5TransitiveSubgroups.A5: + return AlternatingGroup(5) + elif self == S5TransitiveSubgroups.S5: + return SymmetricGroup(5) + + +class S6TransitiveSubgroups(Enum): + """ + Names for the transitive subgroups of S6. + """ + C6 = "C6" + S3 = "S3" + D6 = "D6" + A4 = "A4" + G18 = "G18" + A4xC2 = "A4 x C2" + S4m = "S4-" + S4p = "S4+" + G36m = "G36-" + G36p = "G36+" + S4xC2 = "S4 x C2" + PSL2F5 = "PSL2(F5)" + G72 = "G72" + PGL2F5 = "PGL2(F5)" + A6 = "A6" + S6 = "S6" + + def get_perm_group(self): + if self == S6TransitiveSubgroups.C6: + return CyclicGroup(6) + elif self == S6TransitiveSubgroups.S3: + return S3_in_S6() + elif self == S6TransitiveSubgroups.D6: + return DihedralGroup(6) + elif self == S6TransitiveSubgroups.A4: + return A4_in_S6() + elif self == S6TransitiveSubgroups.G18: + return G18() + elif self == S6TransitiveSubgroups.A4xC2: + return A4xC2() + elif self == S6TransitiveSubgroups.S4m: + return S4m() + elif self == S6TransitiveSubgroups.S4p: + return S4p() + elif self == S6TransitiveSubgroups.G36m: + return G36m() + elif self == S6TransitiveSubgroups.G36p: + return G36p() + elif self == S6TransitiveSubgroups.S4xC2: + return S4xC2() + elif self == S6TransitiveSubgroups.PSL2F5: + return PSL2F5() + elif self == S6TransitiveSubgroups.G72: + return G72() + elif self == S6TransitiveSubgroups.PGL2F5: + return PGL2F5() + elif self == S6TransitiveSubgroups.A6: + return AlternatingGroup(6) + elif self == S6TransitiveSubgroups.S6: + return SymmetricGroup(6) + + +def four_group(): + """ + Return a representation of the Klein four-group as a transitive subgroup + of S4. + """ + return PermutationGroup( + Permutation(0, 1)(2, 3), + Permutation(0, 2)(1, 3) + ) + + +def M20(): + """ + Return a representation of the metacyclic group M20, a transitive subgroup + of S5 that is one of the possible Galois groups for polys of degree 5. + + Notes + ===== + + See [1], Page 323. + + """ + G = PermutationGroup(Permutation(0, 1, 2, 3, 4), Permutation(1, 2, 4, 3)) + G._degree = 5 + G._order = 20 + G._is_transitive = True + G._is_sym = False + G._is_alt = False + G._is_cyclic = False + G._is_dihedral = False + return G + + +def S3_in_S6(): + """ + Return a representation of S3 as a transitive subgroup of S6. + + Notes + ===== + + The representation is found by viewing the group as the symmetries of a + triangular prism. + + """ + G = PermutationGroup(Permutation(0, 1, 2)(3, 4, 5), Permutation(0, 3)(2, 4)(1, 5)) + set_symmetric_group_properties(G, 3, 6) + return G + + +def A4_in_S6(): + """ + Return a representation of A4 as a transitive subgroup of S6. + + Notes + ===== + + This was computed using :py:func:`~.find_transitive_subgroups_of_S6`. + + """ + G = PermutationGroup(Permutation(0, 4, 5)(1, 3, 2), Permutation(0, 1, 2)(3, 5, 4)) + set_alternating_group_properties(G, 4, 6) + return G + + +def S4m(): + """ + Return a representation of the S4- transitive subgroup of S6. + + Notes + ===== + + This was computed using :py:func:`~.find_transitive_subgroups_of_S6`. + + """ + G = PermutationGroup(Permutation(1, 4, 5, 3), Permutation(0, 4)(1, 5)(2, 3)) + set_symmetric_group_properties(G, 4, 6) + return G + + +def S4p(): + """ + Return a representation of the S4+ transitive subgroup of S6. + + Notes + ===== + + This was computed using :py:func:`~.find_transitive_subgroups_of_S6`. + + """ + G = PermutationGroup(Permutation(0, 2, 4, 1)(3, 5), Permutation(0, 3)(4, 5)) + set_symmetric_group_properties(G, 4, 6) + return G + + +def A4xC2(): + """ + Return a representation of the (A4 x C2) transitive subgroup of S6. + + Notes + ===== + + This was computed using :py:func:`~.find_transitive_subgroups_of_S6`. + + """ + return PermutationGroup( + Permutation(0, 4, 5)(1, 3, 2), Permutation(0, 1, 2)(3, 5, 4), + Permutation(5)(2, 4)) + + +def S4xC2(): + """ + Return a representation of the (S4 x C2) transitive subgroup of S6. + + Notes + ===== + + This was computed using :py:func:`~.find_transitive_subgroups_of_S6`. + + """ + return PermutationGroup( + Permutation(1, 4, 5, 3), Permutation(0, 4)(1, 5)(2, 3), + Permutation(1, 4)(3, 5)) + + +def G18(): + """ + Return a representation of the group G18, a transitive subgroup of S6 + isomorphic to the semidirect product of C3^2 with C2. + + Notes + ===== + + This was computed using :py:func:`~.find_transitive_subgroups_of_S6`. + + """ + return PermutationGroup( + Permutation(5)(0, 1, 2), Permutation(3, 4, 5), + Permutation(0, 4)(1, 5)(2, 3)) + + +def G36m(): + """ + Return a representation of the group G36-, a transitive subgroup of S6 + isomorphic to the semidirect product of C3^2 with C2^2. + + Notes + ===== + + This was computed using :py:func:`~.find_transitive_subgroups_of_S6`. + + """ + return PermutationGroup( + Permutation(5)(0, 1, 2), Permutation(3, 4, 5), + Permutation(1, 2)(3, 5), Permutation(0, 4)(1, 5)(2, 3)) + + +def G36p(): + """ + Return a representation of the group G36+, a transitive subgroup of S6 + isomorphic to the semidirect product of C3^2 with C4. + + Notes + ===== + + This was computed using :py:func:`~.find_transitive_subgroups_of_S6`. + + """ + return PermutationGroup( + Permutation(5)(0, 1, 2), Permutation(3, 4, 5), + Permutation(0, 5, 2, 3)(1, 4)) + + +def G72(): + """ + Return a representation of the group G72, a transitive subgroup of S6 + isomorphic to the semidirect product of C3^2 with D4. + + Notes + ===== + + See [1], Page 325. + + """ + return PermutationGroup( + Permutation(5)(0, 1, 2), + Permutation(0, 4, 1, 3)(2, 5), Permutation(0, 3)(1, 4)(2, 5)) + + +def PSL2F5(): + r""" + Return a representation of the group $PSL_2(\mathbb{F}_5)$, as a transitive + subgroup of S6, isomorphic to $A_5$. + + Notes + ===== + + This was computed using :py:func:`~.find_transitive_subgroups_of_S6`. + + """ + G = PermutationGroup( + Permutation(0, 4, 5)(1, 3, 2), Permutation(0, 4, 3, 1, 5)) + set_alternating_group_properties(G, 5, 6) + return G + + +def PGL2F5(): + r""" + Return a representation of the group $PGL_2(\mathbb{F}_5)$, as a transitive + subgroup of S6, isomorphic to $S_5$. + + Notes + ===== + + See [1], Page 325. + + """ + G = PermutationGroup( + Permutation(0, 1, 2, 3, 4), Permutation(0, 5)(1, 2)(3, 4)) + set_symmetric_group_properties(G, 5, 6) + return G + + +def find_transitive_subgroups_of_S6(*targets, print_report=False): + r""" + Search for certain transitive subgroups of $S_6$. + + The symmetric group $S_6$ has 16 different transitive subgroups, up to + conjugacy. Some are more easily constructed than others. For example, the + dihedral group $D_6$ is immediately found, but it is not at all obvious how + to realize $S_4$ or $S_5$ *transitively* within $S_6$. + + In some cases there are well-known constructions that can be used. For + example, $S_5$ is isomorphic to $PGL_2(\mathbb{F}_5)$, which acts in a + natural way on the projective line $P^1(\mathbb{F}_5)$, a set of order 6. + + In absence of such special constructions however, we can simply search for + generators. For example, transitive instances of $A_4$ and $S_4$ can be + found within $S_6$ in this way. + + Once we are engaged in such searches, it may then be easier (if less + elegant) to find even those groups like $S_5$ that do have special + constructions, by mere search. + + This function locates generators for transitive instances in $S_6$ of the + following subgroups: + + * $A_4$ + * $S_4^-$ ($S_4$ not contained within $A_6$) + * $S_4^+$ ($S_4$ contained within $A_6$) + * $A_4 \times C_2$ + * $S_4 \times C_2$ + * $G_{18} = C_3^2 \rtimes C_2$ + * $G_{36}^- = C_3^2 \rtimes C_2^2$ + * $G_{36}^+ = C_3^2 \rtimes C_4$ + * $G_{72} = C_3^2 \rtimes D_4$ + * $A_5$ + * $S_5$ + + Note: Each of these groups also has a dedicated function in this module + that returns the group immediately, using generators that were found by + this search procedure. + + The search procedure serves as a record of how these generators were + found. Also, due to randomness in the generation of the elements of + permutation groups, it can be called again, in order to (probably) get + different generators for the same groups. + + Parameters + ========== + + targets : list of :py:class:`~.S6TransitiveSubgroups` values + The groups you want to find. + + print_report : bool (default False) + If True, print to stdout the generators found for each group. + + Returns + ======= + + dict + mapping each name in *targets* to the :py:class:`~.PermutationGroup` + that was found + + References + ========== + + .. [2] https://en.wikipedia.org/wiki/Projective_linear_group#Exceptional_isomorphisms + .. [3] https://en.wikipedia.org/wiki/Automorphisms_of_the_symmetric_and_alternating_groups#PGL(2,5) + + """ + def elts_by_order(G): + """Sort the elements of a group by their order. """ + elts = defaultdict(list) + for g in G.elements: + elts[g.order()].append(g) + return elts + + def order_profile(G, name=None): + """Determine how many elements a group has, of each order. """ + elts = elts_by_order(G) + profile = {o:len(e) for o, e in elts.items()} + if name: + print(f'{name}: ' + ' '.join(f'{len(profile[r])}@{r}' for r in sorted(profile.keys()))) + return profile + + S6 = SymmetricGroup(6) + A6 = AlternatingGroup(6) + S6_by_order = elts_by_order(S6) + + def search(existing_gens, needed_gen_orders, order, alt=None, profile=None, anti_profile=None): + """ + Find a transitive subgroup of S6. + + Parameters + ========== + + existing_gens : list of Permutation + Optionally empty list of generators that must be in the group. + + needed_gen_orders : list of positive int + Nonempty list of the orders of the additional generators that are + to be found. + + order: int + The order of the group being sought. + + alt: bool, None + If True, require the group to be contained in A6. + If False, require the group not to be contained in A6. + + profile : dict + If given, the group's order profile must equal this. + + anti_profile : dict + If given, the group's order profile must *not* equal this. + + """ + for gens in itertools.product(*[S6_by_order[n] for n in needed_gen_orders]): + if len(set(gens)) < len(gens): + continue + G = PermutationGroup(existing_gens + list(gens)) + if G.order() == order and G.is_transitive(): + if alt is not None and G.is_subgroup(A6) != alt: + continue + if profile and order_profile(G) != profile: + continue + if anti_profile and order_profile(G) == anti_profile: + continue + return G + + def match_known_group(G, alt=None): + needed = [g.order() for g in G.generators] + return search([], needed, G.order(), alt=alt, profile=order_profile(G)) + + found = {} + + def finish_up(name, G): + found[name] = G + if print_report: + print("=" * 40) + print(f"{name}:") + print(G.generators) + + if S6TransitiveSubgroups.A4 in targets or S6TransitiveSubgroups.A4xC2 in targets: + A4_in_S6 = match_known_group(AlternatingGroup(4)) + finish_up(S6TransitiveSubgroups.A4, A4_in_S6) + + if S6TransitiveSubgroups.S4m in targets or S6TransitiveSubgroups.S4xC2 in targets: + S4m_in_S6 = match_known_group(SymmetricGroup(4), alt=False) + finish_up(S6TransitiveSubgroups.S4m, S4m_in_S6) + + if S6TransitiveSubgroups.S4p in targets: + S4p_in_S6 = match_known_group(SymmetricGroup(4), alt=True) + finish_up(S6TransitiveSubgroups.S4p, S4p_in_S6) + + if S6TransitiveSubgroups.A4xC2 in targets: + A4xC2_in_S6 = search(A4_in_S6.generators, [2], 24, anti_profile=order_profile(SymmetricGroup(4))) + finish_up(S6TransitiveSubgroups.A4xC2, A4xC2_in_S6) + + if S6TransitiveSubgroups.S4xC2 in targets: + S4xC2_in_S6 = search(S4m_in_S6.generators, [2], 48) + finish_up(S6TransitiveSubgroups.S4xC2, S4xC2_in_S6) + + # For the normal factor N = C3^2 in any of the G_n subgroups, we take one + # obvious instance of C3^2 in S6: + N_gens = [Permutation(5)(0, 1, 2), Permutation(5)(3, 4, 5)] + + if S6TransitiveSubgroups.G18 in targets: + G18_in_S6 = search(N_gens, [2], 18) + finish_up(S6TransitiveSubgroups.G18, G18_in_S6) + + if S6TransitiveSubgroups.G36m in targets: + G36m_in_S6 = search(N_gens, [2, 2], 36, alt=False) + finish_up(S6TransitiveSubgroups.G36m, G36m_in_S6) + + if S6TransitiveSubgroups.G36p in targets: + G36p_in_S6 = search(N_gens, [4], 36, alt=True) + finish_up(S6TransitiveSubgroups.G36p, G36p_in_S6) + + if S6TransitiveSubgroups.G72 in targets: + G72_in_S6 = search(N_gens, [4, 2], 72) + finish_up(S6TransitiveSubgroups.G72, G72_in_S6) + + # The PSL2(F5) and PGL2(F5) subgroups are isomorphic to A5 and S5, resp. + + if S6TransitiveSubgroups.PSL2F5 in targets: + PSL2F5_in_S6 = match_known_group(AlternatingGroup(5)) + finish_up(S6TransitiveSubgroups.PSL2F5, PSL2F5_in_S6) + + if S6TransitiveSubgroups.PGL2F5 in targets: + PGL2F5_in_S6 = match_known_group(SymmetricGroup(5)) + finish_up(S6TransitiveSubgroups.PGL2F5, PGL2F5_in_S6) + + # There is little need to "search" for any of the groups C6, S3, D6, A6, + # or S6, since they all have obvious realizations within S6. However, we + # support them here just in case a random representation is desired. + + if S6TransitiveSubgroups.C6 in targets: + C6 = match_known_group(CyclicGroup(6)) + finish_up(S6TransitiveSubgroups.C6, C6) + + if S6TransitiveSubgroups.S3 in targets: + S3 = match_known_group(SymmetricGroup(3)) + finish_up(S6TransitiveSubgroups.S3, S3) + + if S6TransitiveSubgroups.D6 in targets: + D6 = match_known_group(DihedralGroup(6)) + finish_up(S6TransitiveSubgroups.D6, D6) + + if S6TransitiveSubgroups.A6 in targets: + A6 = match_known_group(A6) + finish_up(S6TransitiveSubgroups.A6, A6) + + if S6TransitiveSubgroups.S6 in targets: + S6 = match_known_group(S6) + finish_up(S6TransitiveSubgroups.S6, S6) + + return found diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/generators.py b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/generators.py new file mode 100644 index 0000000000000000000000000000000000000000..9f136502d4e082e6c2554e7fb294d0036c5b0034 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/generators.py @@ -0,0 +1,302 @@ +from sympy.combinatorics.permutations import Permutation +from sympy.core.symbol import symbols +from sympy.matrices import Matrix +from sympy.utilities.iterables import variations, rotate_left + + +def symmetric(n): + """ + Generates the symmetric group of order n, Sn. + + Examples + ======== + + >>> from sympy.combinatorics.generators import symmetric + >>> list(symmetric(3)) + [(2), (1 2), (2)(0 1), (0 1 2), (0 2 1), (0 2)] + """ + for perm in variations(range(n), n): + yield Permutation(perm) + + +def cyclic(n): + """ + Generates the cyclic group of order n, Cn. + + Examples + ======== + + >>> from sympy.combinatorics.generators import cyclic + >>> list(cyclic(5)) + [(4), (0 1 2 3 4), (0 2 4 1 3), + (0 3 1 4 2), (0 4 3 2 1)] + + See Also + ======== + + dihedral + """ + gen = list(range(n)) + for i in range(n): + yield Permutation(gen) + gen = rotate_left(gen, 1) + + +def alternating(n): + """ + Generates the alternating group of order n, An. + + Examples + ======== + + >>> from sympy.combinatorics.generators import alternating + >>> list(alternating(3)) + [(2), (0 1 2), (0 2 1)] + """ + for perm in variations(range(n), n): + p = Permutation(perm) + if p.is_even: + yield p + + +def dihedral(n): + """ + Generates the dihedral group of order 2n, Dn. + + The result is given as a subgroup of Sn, except for the special cases n=1 + (the group S2) and n=2 (the Klein 4-group) where that's not possible + and embeddings in S2 and S4 respectively are given. + + Examples + ======== + + >>> from sympy.combinatorics.generators import dihedral + >>> list(dihedral(3)) + [(2), (0 2), (0 1 2), (1 2), (0 2 1), (2)(0 1)] + + See Also + ======== + + cyclic + """ + if n == 1: + yield Permutation([0, 1]) + yield Permutation([1, 0]) + elif n == 2: + yield Permutation([0, 1, 2, 3]) + yield Permutation([1, 0, 3, 2]) + yield Permutation([2, 3, 0, 1]) + yield Permutation([3, 2, 1, 0]) + else: + gen = list(range(n)) + for i in range(n): + yield Permutation(gen) + yield Permutation(gen[::-1]) + gen = rotate_left(gen, 1) + + +def rubik_cube_generators(): + """Return the permutations of the 3x3 Rubik's cube, see + https://www.gap-system.org/Doc/Examples/rubik.html + """ + a = [ + [(1, 3, 8, 6), (2, 5, 7, 4), (9, 33, 25, 17), (10, 34, 26, 18), + (11, 35, 27, 19)], + [(9, 11, 16, 14), (10, 13, 15, 12), (1, 17, 41, 40), (4, 20, 44, 37), + (6, 22, 46, 35)], + [(17, 19, 24, 22), (18, 21, 23, 20), (6, 25, 43, 16), (7, 28, 42, 13), + (8, 30, 41, 11)], + [(25, 27, 32, 30), (26, 29, 31, 28), (3, 38, 43, 19), (5, 36, 45, 21), + (8, 33, 48, 24)], + [(33, 35, 40, 38), (34, 37, 39, 36), (3, 9, 46, 32), (2, 12, 47, 29), + (1, 14, 48, 27)], + [(41, 43, 48, 46), (42, 45, 47, 44), (14, 22, 30, 38), + (15, 23, 31, 39), (16, 24, 32, 40)] + ] + return [Permutation([[i - 1 for i in xi] for xi in x], size=48) for x in a] + + +def rubik(n): + """Return permutations for an nxn Rubik's cube. + + Permutations returned are for rotation of each of the slice + from the face up to the last face for each of the 3 sides (in this order): + front, right and bottom. Hence, the first n - 1 permutations are for the + slices from the front. + """ + + if n < 2: + raise ValueError('dimension of cube must be > 1') + + # 1-based reference to rows and columns in Matrix + def getr(f, i): + return faces[f].col(n - i) + + def getl(f, i): + return faces[f].col(i - 1) + + def getu(f, i): + return faces[f].row(i - 1) + + def getd(f, i): + return faces[f].row(n - i) + + def setr(f, i, s): + faces[f][:, n - i] = Matrix(n, 1, s) + + def setl(f, i, s): + faces[f][:, i - 1] = Matrix(n, 1, s) + + def setu(f, i, s): + faces[f][i - 1, :] = Matrix(1, n, s) + + def setd(f, i, s): + faces[f][n - i, :] = Matrix(1, n, s) + + # motion of a single face + def cw(F, r=1): + for _ in range(r): + face = faces[F] + rv = [] + for c in range(n): + for r in range(n - 1, -1, -1): + rv.append(face[r, c]) + faces[F] = Matrix(n, n, rv) + + def ccw(F): + cw(F, 3) + + # motion of plane i from the F side; + # fcw(0) moves the F face, fcw(1) moves the plane + # just behind the front face, etc... + def fcw(i, r=1): + for _ in range(r): + if i == 0: + cw(F) + i += 1 + temp = getr(L, i) + setr(L, i, list(getu(D, i))) + setu(D, i, list(reversed(getl(R, i)))) + setl(R, i, list(getd(U, i))) + setd(U, i, list(reversed(temp))) + i -= 1 + + def fccw(i): + fcw(i, 3) + + # motion of the entire cube from the F side + def FCW(r=1): + for _ in range(r): + cw(F) + ccw(B) + cw(U) + t = faces[U] + cw(L) + faces[U] = faces[L] + cw(D) + faces[L] = faces[D] + cw(R) + faces[D] = faces[R] + faces[R] = t + + def FCCW(): + FCW(3) + + # motion of the entire cube from the U side + def UCW(r=1): + for _ in range(r): + cw(U) + ccw(D) + t = faces[F] + faces[F] = faces[R] + faces[R] = faces[B] + faces[B] = faces[L] + faces[L] = t + + def UCCW(): + UCW(3) + + # defining the permutations for the cube + + U, F, R, B, L, D = names = symbols('U, F, R, B, L, D') + + # the faces are represented by nxn matrices + faces = {} + count = 0 + for fi in range(6): + f = [] + for a in range(n**2): + f.append(count) + count += 1 + faces[names[fi]] = Matrix(n, n, f) + + # this will either return the value of the current permutation + # (show != 1) or else append the permutation to the group, g + def perm(show=0): + # add perm to the list of perms + p = [] + for f in names: + p.extend(faces[f]) + if show: + return p + g.append(Permutation(p)) + + g = [] # container for the group's permutations + I = list(range(6*n**2)) # the identity permutation used for checking + + # define permutations corresponding to cw rotations of the planes + # up TO the last plane from that direction; by not including the + # last plane, the orientation of the cube is maintained. + + # F slices + for i in range(n - 1): + fcw(i) + perm() + fccw(i) # restore + assert perm(1) == I + + # R slices + # bring R to front + UCW() + for i in range(n - 1): + fcw(i) + # put it back in place + UCCW() + # record + perm() + # restore + # bring face to front + UCW() + fccw(i) + # restore + UCCW() + assert perm(1) == I + + # D slices + # bring up bottom + FCW() + UCCW() + FCCW() + for i in range(n - 1): + # turn strip + fcw(i) + # put bottom back on the bottom + FCW() + UCW() + FCCW() + # record + perm() + # restore + # bring up bottom + FCW() + UCCW() + FCCW() + # turn strip + fccw(i) + # put bottom back on the bottom + FCW() + UCW() + FCCW() + assert perm(1) == I + + return g diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/group_constructs.py b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/group_constructs.py new file mode 100644 index 0000000000000000000000000000000000000000..a5c16ec254191646b26eee869763e2926e187da5 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/group_constructs.py @@ -0,0 +1,61 @@ +from sympy.combinatorics.perm_groups import PermutationGroup +from sympy.combinatorics.permutations import Permutation +from sympy.utilities.iterables import uniq + +_af_new = Permutation._af_new + + +def DirectProduct(*groups): + """ + Returns the direct product of several groups as a permutation group. + + Explanation + =========== + + This is implemented much like the __mul__ procedure for taking the direct + product of two permutation groups, but the idea of shifting the + generators is realized in the case of an arbitrary number of groups. + A call to DirectProduct(G1, G2, ..., Gn) is generally expected to be faster + than a call to G1*G2*...*Gn (and thus the need for this algorithm). + + Examples + ======== + + >>> from sympy.combinatorics.group_constructs import DirectProduct + >>> from sympy.combinatorics.named_groups import CyclicGroup + >>> C = CyclicGroup(4) + >>> G = DirectProduct(C, C, C) + >>> G.order() + 64 + + See Also + ======== + + sympy.combinatorics.perm_groups.PermutationGroup.__mul__ + + """ + degrees = [] + gens_count = [] + total_degree = 0 + total_gens = 0 + for group in groups: + current_deg = group.degree + current_num_gens = len(group.generators) + degrees.append(current_deg) + total_degree += current_deg + gens_count.append(current_num_gens) + total_gens += current_num_gens + array_gens = [] + for i in range(total_gens): + array_gens.append(list(range(total_degree))) + current_gen = 0 + current_deg = 0 + for i in range(len(gens_count)): + for j in range(current_gen, current_gen + gens_count[i]): + gen = ((groups[i].generators)[j - current_gen]).array_form + array_gens[j][current_deg:current_deg + degrees[i]] = \ + [x + current_deg for x in gen] + current_gen += gens_count[i] + current_deg += degrees[i] + perm_gens = list(uniq([_af_new(list(a)) for a in array_gens])) + return PermutationGroup(perm_gens, dups=False) diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/group_numbers.py b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/group_numbers.py new file mode 100644 index 0000000000000000000000000000000000000000..4433d76d29636a974159ed67a53423d5702972d1 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/group_numbers.py @@ -0,0 +1,118 @@ +from sympy.core import Integer, Pow, Mod +from sympy import factorint + + +def is_nilpotent_number(n): + """ + Check whether `n` is a nilpotent number. A number `n` is said to be + nilpotent if and only if every finite group of order `n` is nilpotent. + For more information see [1]_. + + Examples + ======== + + >>> from sympy.combinatorics.group_numbers import is_nilpotent_number + >>> from sympy import randprime + >>> is_nilpotent_number(21) + False + >>> is_nilpotent_number(randprime(1, 30)**12) + True + + References + ========== + + .. [1] Pakianathan, J., Shankar, K., *Nilpotent Numbers*, + The American Mathematical Monthly, 107(7), 631-634. + + + """ + if n <= 0 or int(n) != n: + raise ValueError("n must be a positive integer, not %i" % n) + + n = Integer(n) + prime_factors = list(factorint(n).items()) + is_nilpotent = True + for p_j, a_j in prime_factors: + for p_i, a_i in prime_factors: + if any([Mod(Pow(p_i, k), p_j) == 1 for k in range(1, a_i + 1)]): + is_nilpotent = False + break + if not is_nilpotent: + break + + return is_nilpotent + + +def is_abelian_number(n): + """ + Check whether `n` is an abelian number. A number `n` is said to be abelian + if and only if every finite group of order `n` is abelian. For more + information see [1]_. + + Examples + ======== + + >>> from sympy.combinatorics.group_numbers import is_abelian_number + >>> from sympy import randprime + >>> is_abelian_number(4) + True + >>> is_abelian_number(randprime(1, 2000)**2) + True + >>> is_abelian_number(60) + False + + References + ========== + + .. [1] Pakianathan, J., Shankar, K., *Nilpotent Numbers*, + The American Mathematical Monthly, 107(7), 631-634. + + + """ + if n <= 0 or int(n) != n: + raise ValueError("n must be a positive integer, not %i" % n) + + n = Integer(n) + if not is_nilpotent_number(n): + return False + + prime_factors = list(factorint(n).items()) + is_abelian = all(a_i < 3 for p_i, a_i in prime_factors) + return is_abelian + + +def is_cyclic_number(n): + """ + Check whether `n` is a cyclic number. A number `n` is said to be cyclic + if and only if every finite group of order `n` is cyclic. For more + information see [1]_. + + Examples + ======== + + >>> from sympy.combinatorics.group_numbers import is_cyclic_number + >>> from sympy import randprime + >>> is_cyclic_number(15) + True + >>> is_cyclic_number(randprime(1, 2000)**2) + False + >>> is_cyclic_number(4) + False + + References + ========== + + .. [1] Pakianathan, J., Shankar, K., *Nilpotent Numbers*, + The American Mathematical Monthly, 107(7), 631-634. + + """ + if n <= 0 or int(n) != n: + raise ValueError("n must be a positive integer, not %i" % n) + + n = Integer(n) + if not is_nilpotent_number(n): + return False + + prime_factors = list(factorint(n).items()) + is_cyclic = all(a_i < 2 for p_i, a_i in prime_factors) + return is_cyclic diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/homomorphisms.py b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/homomorphisms.py new file mode 100644 index 0000000000000000000000000000000000000000..95b43c501300617e44a251388f46f2a2f1e9fca1 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/homomorphisms.py @@ -0,0 +1,549 @@ +import itertools +from sympy.combinatorics.fp_groups import FpGroup, FpSubgroup, simplify_presentation +from sympy.combinatorics.free_groups import FreeGroup +from sympy.combinatorics.perm_groups import PermutationGroup +from sympy.core.numbers import igcd +from sympy.ntheory.factor_ import totient +from sympy.core.singleton import S + +class GroupHomomorphism: + ''' + A class representing group homomorphisms. Instantiate using `homomorphism()`. + + References + ========== + + .. [1] Holt, D., Eick, B. and O'Brien, E. (2005). Handbook of computational group theory. + + ''' + + def __init__(self, domain, codomain, images): + self.domain = domain + self.codomain = codomain + self.images = images + self._inverses = None + self._kernel = None + self._image = None + + def _invs(self): + ''' + Return a dictionary with `{gen: inverse}` where `gen` is a rewriting + generator of `codomain` (e.g. strong generator for permutation groups) + and `inverse` is an element of its preimage + + ''' + image = self.image() + inverses = {} + for k in list(self.images.keys()): + v = self.images[k] + if not (v in inverses + or v.is_identity): + inverses[v] = k + if isinstance(self.codomain, PermutationGroup): + gens = image.strong_gens + else: + gens = image.generators + for g in gens: + if g in inverses or g.is_identity: + continue + w = self.domain.identity + if isinstance(self.codomain, PermutationGroup): + parts = image._strong_gens_slp[g][::-1] + else: + parts = g + for s in parts: + if s in inverses: + w = w*inverses[s] + else: + w = w*inverses[s**-1]**-1 + inverses[g] = w + + return inverses + + def invert(self, g): + ''' + Return an element of the preimage of ``g`` or of each element + of ``g`` if ``g`` is a list. + + Explanation + =========== + + If the codomain is an FpGroup, the inverse for equal + elements might not always be the same unless the FpGroup's + rewriting system is confluent. However, making a system + confluent can be time-consuming. If it's important, try + `self.codomain.make_confluent()` first. + + ''' + from sympy.combinatorics import Permutation + from sympy.combinatorics.free_groups import FreeGroupElement + if isinstance(g, (Permutation, FreeGroupElement)): + if isinstance(self.codomain, FpGroup): + g = self.codomain.reduce(g) + if self._inverses is None: + self._inverses = self._invs() + image = self.image() + w = self.domain.identity + if isinstance(self.codomain, PermutationGroup): + gens = image.generator_product(g)[::-1] + else: + gens = g + # the following can't be "for s in gens:" + # because that would be equivalent to + # "for s in gens.array_form:" when g is + # a FreeGroupElement. On the other hand, + # when you call gens by index, the generator + # (or inverse) at position i is returned. + for i in range(len(gens)): + s = gens[i] + if s.is_identity: + continue + if s in self._inverses: + w = w*self._inverses[s] + else: + w = w*self._inverses[s**-1]**-1 + return w + elif isinstance(g, list): + return [self.invert(e) for e in g] + + def kernel(self): + ''' + Compute the kernel of `self`. + + ''' + if self._kernel is None: + self._kernel = self._compute_kernel() + return self._kernel + + def _compute_kernel(self): + G = self.domain + G_order = G.order() + if G_order is S.Infinity: + raise NotImplementedError( + "Kernel computation is not implemented for infinite groups") + gens = [] + if isinstance(G, PermutationGroup): + K = PermutationGroup(G.identity) + else: + K = FpSubgroup(G, gens, normal=True) + i = self.image().order() + while K.order()*i != G_order: + r = G.random() + k = r*self.invert(self(r))**-1 + if k not in K: + gens.append(k) + if isinstance(G, PermutationGroup): + K = PermutationGroup(gens) + else: + K = FpSubgroup(G, gens, normal=True) + return K + + def image(self): + ''' + Compute the image of `self`. + + ''' + if self._image is None: + values = list(set(self.images.values())) + if isinstance(self.codomain, PermutationGroup): + self._image = self.codomain.subgroup(values) + else: + self._image = FpSubgroup(self.codomain, values) + return self._image + + def _apply(self, elem): + ''' + Apply `self` to `elem`. + + ''' + if elem not in self.domain: + if isinstance(elem, (list, tuple)): + return [self._apply(e) for e in elem] + raise ValueError("The supplied element does not belong to the domain") + if elem.is_identity: + return self.codomain.identity + else: + images = self.images + value = self.codomain.identity + if isinstance(self.domain, PermutationGroup): + gens = self.domain.generator_product(elem, original=True) + for g in gens: + if g in self.images: + value = images[g]*value + else: + value = images[g**-1]**-1*value + else: + i = 0 + for _, p in elem.array_form: + if p < 0: + g = elem[i]**-1 + else: + g = elem[i] + value = value*images[g]**p + i += abs(p) + return value + + def __call__(self, elem): + return self._apply(elem) + + def is_injective(self): + ''' + Check if the homomorphism is injective + + ''' + return self.kernel().order() == 1 + + def is_surjective(self): + ''' + Check if the homomorphism is surjective + + ''' + im = self.image().order() + oth = self.codomain.order() + if im is S.Infinity and oth is S.Infinity: + return None + else: + return im == oth + + def is_isomorphism(self): + ''' + Check if `self` is an isomorphism. + + ''' + return self.is_injective() and self.is_surjective() + + def is_trivial(self): + ''' + Check is `self` is a trivial homomorphism, i.e. all elements + are mapped to the identity. + + ''' + return self.image().order() == 1 + + def compose(self, other): + ''' + Return the composition of `self` and `other`, i.e. + the homomorphism phi such that for all g in the domain + of `other`, phi(g) = self(other(g)) + + ''' + if not other.image().is_subgroup(self.domain): + raise ValueError("The image of `other` must be a subgroup of " + "the domain of `self`") + images = {g: self(other(g)) for g in other.images} + return GroupHomomorphism(other.domain, self.codomain, images) + + def restrict_to(self, H): + ''' + Return the restriction of the homomorphism to the subgroup `H` + of the domain. + + ''' + if not isinstance(H, PermutationGroup) or not H.is_subgroup(self.domain): + raise ValueError("Given H is not a subgroup of the domain") + domain = H + images = {g: self(g) for g in H.generators} + return GroupHomomorphism(domain, self.codomain, images) + + def invert_subgroup(self, H): + ''' + Return the subgroup of the domain that is the inverse image + of the subgroup ``H`` of the homomorphism image + + ''' + if not H.is_subgroup(self.image()): + raise ValueError("Given H is not a subgroup of the image") + gens = [] + P = PermutationGroup(self.image().identity) + for h in H.generators: + h_i = self.invert(h) + if h_i not in P: + gens.append(h_i) + P = PermutationGroup(gens) + for k in self.kernel().generators: + if k*h_i not in P: + gens.append(k*h_i) + P = PermutationGroup(gens) + return P + +def homomorphism(domain, codomain, gens, images=(), check=True): + ''' + Create (if possible) a group homomorphism from the group ``domain`` + to the group ``codomain`` defined by the images of the domain's + generators ``gens``. ``gens`` and ``images`` can be either lists or tuples + of equal sizes. If ``gens`` is a proper subset of the group's generators, + the unspecified generators will be mapped to the identity. If the + images are not specified, a trivial homomorphism will be created. + + If the given images of the generators do not define a homomorphism, + an exception is raised. + + If ``check`` is ``False``, do not check whether the given images actually + define a homomorphism. + + ''' + if not isinstance(domain, (PermutationGroup, FpGroup, FreeGroup)): + raise TypeError("The domain must be a group") + if not isinstance(codomain, (PermutationGroup, FpGroup, FreeGroup)): + raise TypeError("The codomain must be a group") + + generators = domain.generators + if not all(g in generators for g in gens): + raise ValueError("The supplied generators must be a subset of the domain's generators") + if not all(g in codomain for g in images): + raise ValueError("The images must be elements of the codomain") + + if images and len(images) != len(gens): + raise ValueError("The number of images must be equal to the number of generators") + + gens = list(gens) + images = list(images) + + images.extend([codomain.identity]*(len(generators)-len(images))) + gens.extend([g for g in generators if g not in gens]) + images = dict(zip(gens,images)) + + if check and not _check_homomorphism(domain, codomain, images): + raise ValueError("The given images do not define a homomorphism") + return GroupHomomorphism(domain, codomain, images) + +def _check_homomorphism(domain, codomain, images): + """ + Check that a given mapping of generators to images defines a homomorphism. + + Parameters + ========== + domain : PermutationGroup, FpGroup, FreeGroup + codomain : PermutationGroup, FpGroup, FreeGroup + images : dict + The set of keys must be equal to domain.generators. + The values must be elements of the codomain. + + """ + pres = domain if hasattr(domain, 'relators') else domain.presentation() + rels = pres.relators + gens = pres.generators + symbols = [g.ext_rep[0] for g in gens] + symbols_to_domain_generators = dict(zip(symbols, domain.generators)) + identity = codomain.identity + + def _image(r): + w = identity + for symbol, power in r.array_form: + g = symbols_to_domain_generators[symbol] + w *= images[g]**power + return w + + for r in rels: + if isinstance(codomain, FpGroup): + s = codomain.equals(_image(r), identity) + if s is None: + # only try to make the rewriting system + # confluent when it can't determine the + # truth of equality otherwise + success = codomain.make_confluent() + s = codomain.equals(_image(r), identity) + if s is None and not success: + raise RuntimeError("Can't determine if the images " + "define a homomorphism. Try increasing " + "the maximum number of rewriting rules " + "(group._rewriting_system.set_max(new_value); " + "the current value is stored in group._rewriting" + "_system.maxeqns)") + else: + s = _image(r).is_identity + if not s: + return False + return True + +def orbit_homomorphism(group, omega): + ''' + Return the homomorphism induced by the action of the permutation + group ``group`` on the set ``omega`` that is closed under the action. + + ''' + from sympy.combinatorics import Permutation + from sympy.combinatorics.named_groups import SymmetricGroup + codomain = SymmetricGroup(len(omega)) + identity = codomain.identity + omega = list(omega) + images = {g: identity*Permutation([omega.index(o^g) for o in omega]) for g in group.generators} + group._schreier_sims(base=omega) + H = GroupHomomorphism(group, codomain, images) + if len(group.basic_stabilizers) > len(omega): + H._kernel = group.basic_stabilizers[len(omega)] + else: + H._kernel = PermutationGroup([group.identity]) + return H + +def block_homomorphism(group, blocks): + ''' + Return the homomorphism induced by the action of the permutation + group ``group`` on the block system ``blocks``. The latter should be + of the same form as returned by the ``minimal_block`` method for + permutation groups, namely a list of length ``group.degree`` where + the i-th entry is a representative of the block i belongs to. + + ''' + from sympy.combinatorics import Permutation + from sympy.combinatorics.named_groups import SymmetricGroup + + n = len(blocks) + + # number the blocks; m is the total number, + # b is such that b[i] is the number of the block i belongs to, + # p is the list of length m such that p[i] is the representative + # of the i-th block + m = 0 + p = [] + b = [None]*n + for i in range(n): + if blocks[i] == i: + p.append(i) + b[i] = m + m += 1 + for i in range(n): + b[i] = b[blocks[i]] + + codomain = SymmetricGroup(m) + # the list corresponding to the identity permutation in codomain + identity = range(m) + images = {g: Permutation([b[p[i]^g] for i in identity]) for g in group.generators} + H = GroupHomomorphism(group, codomain, images) + return H + +def group_isomorphism(G, H, isomorphism=True): + ''' + Compute an isomorphism between 2 given groups. + + Parameters + ========== + + G : A finite ``FpGroup`` or a ``PermutationGroup``. + First group. + + H : A finite ``FpGroup`` or a ``PermutationGroup`` + Second group. + + isomorphism : bool + This is used to avoid the computation of homomorphism + when the user only wants to check if there exists + an isomorphism between the groups. + + Returns + ======= + + If isomorphism = False -- Returns a boolean. + If isomorphism = True -- Returns a boolean and an isomorphism between `G` and `H`. + + Examples + ======== + + >>> from sympy.combinatorics import free_group, Permutation + >>> from sympy.combinatorics.perm_groups import PermutationGroup + >>> from sympy.combinatorics.fp_groups import FpGroup + >>> from sympy.combinatorics.homomorphisms import group_isomorphism + >>> from sympy.combinatorics.named_groups import DihedralGroup, AlternatingGroup + + >>> D = DihedralGroup(8) + >>> p = Permutation(0, 1, 2, 3, 4, 5, 6, 7) + >>> P = PermutationGroup(p) + >>> group_isomorphism(D, P) + (False, None) + + >>> F, a, b = free_group("a, b") + >>> G = FpGroup(F, [a**3, b**3, (a*b)**2]) + >>> H = AlternatingGroup(4) + >>> (check, T) = group_isomorphism(G, H) + >>> check + True + >>> T(b*a*b**-1*a**-1*b**-1) + (0 2 3) + + Notes + ===== + + Uses the approach suggested by Robert Tarjan to compute the isomorphism between two groups. + First, the generators of ``G`` are mapped to the elements of ``H`` and + we check if the mapping induces an isomorphism. + + ''' + if not isinstance(G, (PermutationGroup, FpGroup)): + raise TypeError("The group must be a PermutationGroup or an FpGroup") + if not isinstance(H, (PermutationGroup, FpGroup)): + raise TypeError("The group must be a PermutationGroup or an FpGroup") + + if isinstance(G, FpGroup) and isinstance(H, FpGroup): + G = simplify_presentation(G) + H = simplify_presentation(H) + # Two infinite FpGroups with the same generators are isomorphic + # when the relators are same but are ordered differently. + if G.generators == H.generators and (G.relators).sort() == (H.relators).sort(): + if not isomorphism: + return True + return (True, homomorphism(G, H, G.generators, H.generators)) + + # `_H` is the permutation group isomorphic to `H`. + _H = H + g_order = G.order() + h_order = H.order() + + if g_order is S.Infinity: + raise NotImplementedError("Isomorphism methods are not implemented for infinite groups.") + + if isinstance(H, FpGroup): + if h_order is S.Infinity: + raise NotImplementedError("Isomorphism methods are not implemented for infinite groups.") + _H, h_isomorphism = H._to_perm_group() + + if (g_order != h_order) or (G.is_abelian != H.is_abelian): + if not isomorphism: + return False + return (False, None) + + if not isomorphism: + # Two groups of the same cyclic numbered order + # are isomorphic to each other. + n = g_order + if (igcd(n, totient(n))) == 1: + return True + + # Match the generators of `G` with subsets of `_H` + gens = list(G.generators) + for subset in itertools.permutations(_H, len(gens)): + images = list(subset) + images.extend([_H.identity]*(len(G.generators)-len(images))) + _images = dict(zip(gens,images)) + if _check_homomorphism(G, _H, _images): + if isinstance(H, FpGroup): + images = h_isomorphism.invert(images) + T = homomorphism(G, H, G.generators, images, check=False) + if T.is_isomorphism(): + # It is a valid isomorphism + if not isomorphism: + return True + return (True, T) + + if not isomorphism: + return False + return (False, None) + +def is_isomorphic(G, H): + ''' + Check if the groups are isomorphic to each other + + Parameters + ========== + + G : A finite ``FpGroup`` or a ``PermutationGroup`` + First group. + + H : A finite ``FpGroup`` or a ``PermutationGroup`` + Second group. + + Returns + ======= + + boolean + ''' + return group_isomorphism(G, H, isomorphism=False) diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/named_groups.py b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/named_groups.py new file mode 100644 index 0000000000000000000000000000000000000000..59f10c40ef716e3b644e00f936323e9f6936eb88 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/named_groups.py @@ -0,0 +1,332 @@ +from sympy.combinatorics.group_constructs import DirectProduct +from sympy.combinatorics.perm_groups import PermutationGroup +from sympy.combinatorics.permutations import Permutation + +_af_new = Permutation._af_new + + +def AbelianGroup(*cyclic_orders): + """ + Returns the direct product of cyclic groups with the given orders. + + Explanation + =========== + + According to the structure theorem for finite abelian groups ([1]), + every finite abelian group can be written as the direct product of + finitely many cyclic groups. + + Examples + ======== + + >>> from sympy.combinatorics.named_groups import AbelianGroup + >>> AbelianGroup(3, 4) + PermutationGroup([ + (6)(0 1 2), + (3 4 5 6)]) + >>> _.is_group + True + + See Also + ======== + + DirectProduct + + References + ========== + + .. [1] https://groupprops.subwiki.org/wiki/Structure_theorem_for_finitely_generated_abelian_groups + + """ + groups = [] + degree = 0 + order = 1 + for size in cyclic_orders: + degree += size + order *= size + groups.append(CyclicGroup(size)) + G = DirectProduct(*groups) + G._is_abelian = True + G._degree = degree + G._order = order + + return G + + +def AlternatingGroup(n): + """ + Generates the alternating group on ``n`` elements as a permutation group. + + Explanation + =========== + + For ``n > 2``, the generators taken are ``(0 1 2), (0 1 2 ... n-1)`` for + ``n`` odd + and ``(0 1 2), (1 2 ... n-1)`` for ``n`` even (See [1], p.31, ex.6.9.). + After the group is generated, some of its basic properties are set. + The cases ``n = 1, 2`` are handled separately. + + Examples + ======== + + >>> from sympy.combinatorics.named_groups import AlternatingGroup + >>> G = AlternatingGroup(4) + >>> G.is_group + True + >>> a = list(G.generate_dimino()) + >>> len(a) + 12 + >>> all(perm.is_even for perm in a) + True + + See Also + ======== + + SymmetricGroup, CyclicGroup, DihedralGroup + + References + ========== + + .. [1] Armstrong, M. "Groups and Symmetry" + + """ + # small cases are special + if n in (1, 2): + return PermutationGroup([Permutation([0])]) + + a = list(range(n)) + a[0], a[1], a[2] = a[1], a[2], a[0] + gen1 = a + if n % 2: + a = list(range(1, n)) + a.append(0) + gen2 = a + else: + a = list(range(2, n)) + a.append(1) + a.insert(0, 0) + gen2 = a + gens = [gen1, gen2] + if gen1 == gen2: + gens = gens[:1] + G = PermutationGroup([_af_new(a) for a in gens], dups=False) + + set_alternating_group_properties(G, n, n) + G._is_alt = True + return G + + +def set_alternating_group_properties(G, n, degree): + """Set known properties of an alternating group. """ + if n < 4: + G._is_abelian = True + G._is_nilpotent = True + else: + G._is_abelian = False + G._is_nilpotent = False + if n < 5: + G._is_solvable = True + else: + G._is_solvable = False + G._degree = degree + G._is_transitive = True + G._is_dihedral = False + + +def CyclicGroup(n): + """ + Generates the cyclic group of order ``n`` as a permutation group. + + Explanation + =========== + + The generator taken is the ``n``-cycle ``(0 1 2 ... n-1)`` + (in cycle notation). After the group is generated, some of its basic + properties are set. + + Examples + ======== + + >>> from sympy.combinatorics.named_groups import CyclicGroup + >>> G = CyclicGroup(6) + >>> G.is_group + True + >>> G.order() + 6 + >>> list(G.generate_schreier_sims(af=True)) + [[0, 1, 2, 3, 4, 5], [1, 2, 3, 4, 5, 0], [2, 3, 4, 5, 0, 1], + [3, 4, 5, 0, 1, 2], [4, 5, 0, 1, 2, 3], [5, 0, 1, 2, 3, 4]] + + See Also + ======== + + SymmetricGroup, DihedralGroup, AlternatingGroup + + """ + a = list(range(1, n)) + a.append(0) + gen = _af_new(a) + G = PermutationGroup([gen]) + + G._is_abelian = True + G._is_nilpotent = True + G._is_solvable = True + G._degree = n + G._is_transitive = True + G._order = n + G._is_dihedral = (n == 2) + return G + + +def DihedralGroup(n): + r""" + Generates the dihedral group `D_n` as a permutation group. + + Explanation + =========== + + The dihedral group `D_n` is the group of symmetries of the regular + ``n``-gon. The generators taken are the ``n``-cycle ``a = (0 1 2 ... n-1)`` + (a rotation of the ``n``-gon) and ``b = (0 n-1)(1 n-2)...`` + (a reflection of the ``n``-gon) in cycle rotation. It is easy to see that + these satisfy ``a**n = b**2 = 1`` and ``bab = ~a`` so they indeed generate + `D_n` (See [1]). After the group is generated, some of its basic properties + are set. + + Examples + ======== + + >>> from sympy.combinatorics.named_groups import DihedralGroup + >>> G = DihedralGroup(5) + >>> G.is_group + True + >>> a = list(G.generate_dimino()) + >>> [perm.cyclic_form for perm in a] + [[], [[0, 1, 2, 3, 4]], [[0, 2, 4, 1, 3]], + [[0, 3, 1, 4, 2]], [[0, 4, 3, 2, 1]], [[0, 4], [1, 3]], + [[1, 4], [2, 3]], [[0, 1], [2, 4]], [[0, 2], [3, 4]], + [[0, 3], [1, 2]]] + + See Also + ======== + + SymmetricGroup, CyclicGroup, AlternatingGroup + + References + ========== + + .. [1] https://en.wikipedia.org/wiki/Dihedral_group + + """ + # small cases are special + if n == 1: + return PermutationGroup([Permutation([1, 0])]) + if n == 2: + return PermutationGroup([Permutation([1, 0, 3, 2]), + Permutation([2, 3, 0, 1]), Permutation([3, 2, 1, 0])]) + + a = list(range(1, n)) + a.append(0) + gen1 = _af_new(a) + a = list(range(n)) + a.reverse() + gen2 = _af_new(a) + G = PermutationGroup([gen1, gen2]) + # if n is a power of 2, group is nilpotent + if n & (n-1) == 0: + G._is_nilpotent = True + else: + G._is_nilpotent = False + G._is_dihedral = True + G._is_abelian = False + G._is_solvable = True + G._degree = n + G._is_transitive = True + G._order = 2*n + return G + + +def SymmetricGroup(n): + """ + Generates the symmetric group on ``n`` elements as a permutation group. + + Explanation + =========== + + The generators taken are the ``n``-cycle + ``(0 1 2 ... n-1)`` and the transposition ``(0 1)`` (in cycle notation). + (See [1]). After the group is generated, some of its basic properties + are set. + + Examples + ======== + + >>> from sympy.combinatorics.named_groups import SymmetricGroup + >>> G = SymmetricGroup(4) + >>> G.is_group + True + >>> G.order() + 24 + >>> list(G.generate_schreier_sims(af=True)) + [[0, 1, 2, 3], [1, 2, 3, 0], [2, 3, 0, 1], [3, 1, 2, 0], [0, 2, 3, 1], + [1, 3, 0, 2], [2, 0, 1, 3], [3, 2, 0, 1], [0, 3, 1, 2], [1, 0, 2, 3], + [2, 1, 3, 0], [3, 0, 1, 2], [0, 1, 3, 2], [1, 2, 0, 3], [2, 3, 1, 0], + [3, 1, 0, 2], [0, 2, 1, 3], [1, 3, 2, 0], [2, 0, 3, 1], [3, 2, 1, 0], + [0, 3, 2, 1], [1, 0, 3, 2], [2, 1, 0, 3], [3, 0, 2, 1]] + + See Also + ======== + + CyclicGroup, DihedralGroup, AlternatingGroup + + References + ========== + + .. [1] https://en.wikipedia.org/wiki/Symmetric_group#Generators_and_relations + + """ + if n == 1: + G = PermutationGroup([Permutation([0])]) + elif n == 2: + G = PermutationGroup([Permutation([1, 0])]) + else: + a = list(range(1, n)) + a.append(0) + gen1 = _af_new(a) + a = list(range(n)) + a[0], a[1] = a[1], a[0] + gen2 = _af_new(a) + G = PermutationGroup([gen1, gen2]) + set_symmetric_group_properties(G, n, n) + G._is_sym = True + return G + + +def set_symmetric_group_properties(G, n, degree): + """Set known properties of a symmetric group. """ + if n < 3: + G._is_abelian = True + G._is_nilpotent = True + else: + G._is_abelian = False + G._is_nilpotent = False + if n < 5: + G._is_solvable = True + else: + G._is_solvable = False + G._degree = degree + G._is_transitive = True + G._is_dihedral = (n in [2, 3]) # cf Landau's func and Stirling's approx + + +def RubikGroup(n): + """Return a group of Rubik's cube generators + + >>> from sympy.combinatorics.named_groups import RubikGroup + >>> RubikGroup(2).is_group + True + """ + from sympy.combinatorics.generators import rubik + if n <= 1: + raise ValueError("Invalid cube. n has to be greater than 1") + return PermutationGroup(rubik(n)) diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/perm_groups.py b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/perm_groups.py new file mode 100644 index 0000000000000000000000000000000000000000..25cf5f5edbc84c268461f4079ff123c9483e36c1 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/perm_groups.py @@ -0,0 +1,5472 @@ +from math import factorial as _factorial, log, prod +from itertools import chain, islice, product + + +from sympy.combinatorics import Permutation +from sympy.combinatorics.permutations import (_af_commutes_with, _af_invert, + _af_rmul, _af_rmuln, _af_pow, Cycle) +from sympy.combinatorics.util import (_check_cycles_alt_sym, + _distribute_gens_by_base, _orbits_transversals_from_bsgs, + _handle_precomputed_bsgs, _base_ordering, _strong_gens_from_distr, + _strip, _strip_af) +from sympy.core import Basic +from sympy.core.random import _randrange, randrange, choice +from sympy.core.symbol import Symbol +from sympy.core.sympify import _sympify +from sympy.functions.combinatorial.factorials import factorial +from sympy.ntheory import primefactors, sieve +from sympy.ntheory.factor_ import (factorint, multiplicity) +from sympy.ntheory.primetest import isprime +from sympy.utilities.iterables import has_variety, is_sequence, uniq + +rmul = Permutation.rmul_with_af +_af_new = Permutation._af_new + + +class PermutationGroup(Basic): + r"""The class defining a Permutation group. + + Explanation + =========== + + ``PermutationGroup([p1, p2, ..., pn])`` returns the permutation group + generated by the list of permutations. This group can be supplied + to Polyhedron if one desires to decorate the elements to which the + indices of the permutation refer. + + Examples + ======== + + >>> from sympy.combinatorics import Permutation, PermutationGroup + >>> from sympy.combinatorics import Polyhedron + + The permutations corresponding to motion of the front, right and + bottom face of a $2 \times 2$ Rubik's cube are defined: + + >>> F = Permutation(2, 19, 21, 8)(3, 17, 20, 10)(4, 6, 7, 5) + >>> R = Permutation(1, 5, 21, 14)(3, 7, 23, 12)(8, 10, 11, 9) + >>> D = Permutation(6, 18, 14, 10)(7, 19, 15, 11)(20, 22, 23, 21) + + These are passed as permutations to PermutationGroup: + + >>> G = PermutationGroup(F, R, D) + >>> G.order() + 3674160 + + The group can be supplied to a Polyhedron in order to track the + objects being moved. An example involving the $2 \times 2$ Rubik's cube is + given there, but here is a simple demonstration: + + >>> a = Permutation(2, 1) + >>> b = Permutation(1, 0) + >>> G = PermutationGroup(a, b) + >>> P = Polyhedron(list('ABC'), pgroup=G) + >>> P.corners + (A, B, C) + >>> P.rotate(0) # apply permutation 0 + >>> P.corners + (A, C, B) + >>> P.reset() + >>> P.corners + (A, B, C) + + Or one can make a permutation as a product of selected permutations + and apply them to an iterable directly: + + >>> P10 = G.make_perm([0, 1]) + >>> P10('ABC') + ['C', 'A', 'B'] + + See Also + ======== + + sympy.combinatorics.polyhedron.Polyhedron, + sympy.combinatorics.permutations.Permutation + + References + ========== + + .. [1] Holt, D., Eick, B., O'Brien, E. + "Handbook of Computational Group Theory" + + .. [2] Seress, A. + "Permutation Group Algorithms" + + .. [3] https://en.wikipedia.org/wiki/Schreier_vector + + .. [4] https://en.wikipedia.org/wiki/Nielsen_transformation#Product_replacement_algorithm + + .. [5] Frank Celler, Charles R.Leedham-Green, Scott H.Murray, + Alice C.Niemeyer, and E.A.O'Brien. "Generating Random + Elements of a Finite Group" + + .. [6] https://en.wikipedia.org/wiki/Block_%28permutation_group_theory%29 + + .. [7] https://algorithmist.com/wiki/Union_find + + .. [8] https://en.wikipedia.org/wiki/Multiply_transitive_group#Multiply_transitive_groups + + .. [9] https://en.wikipedia.org/wiki/Center_%28group_theory%29 + + .. [10] https://en.wikipedia.org/wiki/Centralizer_and_normalizer + + .. [11] https://groupprops.subwiki.org/wiki/Derived_subgroup + + .. [12] https://en.wikipedia.org/wiki/Nilpotent_group + + .. [13] https://www.math.colostate.edu/~hulpke/CGT/cgtnotes.pdf + + .. [14] https://docs.gap-system.org/doc/ref/manual.pdf + + """ + is_group = True + + def __new__(cls, *args, dups=True, **kwargs): + """The default constructor. Accepts Cycle and Permutation forms. + Removes duplicates unless ``dups`` keyword is ``False``. + """ + if not args: + args = [Permutation()] + else: + args = list(args[0] if is_sequence(args[0]) else args) + if not args: + args = [Permutation()] + if any(isinstance(a, Cycle) for a in args): + args = [Permutation(a) for a in args] + if has_variety(a.size for a in args): + degree = kwargs.pop('degree', None) + if degree is None: + degree = max(a.size for a in args) + for i in range(len(args)): + if args[i].size != degree: + args[i] = Permutation(args[i], size=degree) + if dups: + args = list(uniq([_af_new(list(a)) for a in args])) + if len(args) > 1: + args = [g for g in args if not g.is_identity] + return Basic.__new__(cls, *args, **kwargs) + + def __init__(self, *args, **kwargs): + self._generators = list(self.args) + self._order = None + self._center = [] + self._is_abelian = None + self._is_transitive = None + self._is_sym = None + self._is_alt = None + self._is_primitive = None + self._is_nilpotent = None + self._is_solvable = None + self._is_trivial = None + self._transitivity_degree = None + self._max_div = None + self._is_perfect = None + self._is_cyclic = None + self._is_dihedral = None + self._r = len(self._generators) + self._degree = self._generators[0].size + + # these attributes are assigned after running schreier_sims + self._base = [] + self._strong_gens = [] + self._strong_gens_slp = [] + self._basic_orbits = [] + self._transversals = [] + self._transversal_slp = [] + + # these attributes are assigned after running _random_pr_init + self._random_gens = [] + + # finite presentation of the group as an instance of `FpGroup` + self._fp_presentation = None + + def __getitem__(self, i): + return self._generators[i] + + def __contains__(self, i): + """Return ``True`` if *i* is contained in PermutationGroup. + + Examples + ======== + + >>> from sympy.combinatorics import Permutation, PermutationGroup + >>> p = Permutation(1, 2, 3) + >>> Permutation(3) in PermutationGroup(p) + True + + """ + if not isinstance(i, Permutation): + raise TypeError("A PermutationGroup contains only Permutations as " + "elements, not elements of type %s" % type(i)) + return self.contains(i) + + def __len__(self): + return len(self._generators) + + def equals(self, other): + """Return ``True`` if PermutationGroup generated by elements in the + group are same i.e they represent the same PermutationGroup. + + Examples + ======== + + >>> from sympy.combinatorics import Permutation, PermutationGroup + >>> p = Permutation(0, 1, 2, 3, 4, 5) + >>> G = PermutationGroup([p, p**2]) + >>> H = PermutationGroup([p**2, p]) + >>> G.generators == H.generators + False + >>> G.equals(H) + True + + """ + if not isinstance(other, PermutationGroup): + return False + + set_self_gens = set(self.generators) + set_other_gens = set(other.generators) + + # before reaching the general case there are also certain + # optimisation and obvious cases requiring less or no actual + # computation. + if set_self_gens == set_other_gens: + return True + + # in the most general case it will check that each generator of + # one group belongs to the other PermutationGroup and vice-versa + for gen1 in set_self_gens: + if not other.contains(gen1): + return False + for gen2 in set_other_gens: + if not self.contains(gen2): + return False + return True + + def __mul__(self, other): + """ + Return the direct product of two permutation groups as a permutation + group. + + Explanation + =========== + + This implementation realizes the direct product by shifting the index + set for the generators of the second group: so if we have ``G`` acting + on ``n1`` points and ``H`` acting on ``n2`` points, ``G*H`` acts on + ``n1 + n2`` points. + + Examples + ======== + + >>> from sympy.combinatorics.named_groups import CyclicGroup + >>> G = CyclicGroup(5) + >>> H = G*G + >>> H + PermutationGroup([ + (9)(0 1 2 3 4), + (5 6 7 8 9)]) + >>> H.order() + 25 + + """ + if isinstance(other, Permutation): + return Coset(other, self, dir='+') + gens1 = [perm._array_form for perm in self.generators] + gens2 = [perm._array_form for perm in other.generators] + n1 = self._degree + n2 = other._degree + start = list(range(n1)) + end = list(range(n1, n1 + n2)) + for i in range(len(gens2)): + gens2[i] = [x + n1 for x in gens2[i]] + gens2 = [start + gen for gen in gens2] + gens1 = [gen + end for gen in gens1] + together = gens1 + gens2 + gens = [_af_new(x) for x in together] + return PermutationGroup(gens) + + def _random_pr_init(self, r, n, _random_prec_n=None): + r"""Initialize random generators for the product replacement algorithm. + + Explanation + =========== + + The implementation uses a modification of the original product + replacement algorithm due to Leedham-Green, as described in [1], + pp. 69-71; also, see [2], pp. 27-29 for a detailed theoretical + analysis of the original product replacement algorithm, and [4]. + + The product replacement algorithm is used for producing random, + uniformly distributed elements of a group `G` with a set of generators + `S`. For the initialization ``_random_pr_init``, a list ``R`` of + `\max\{r, |S|\}` group generators is created as the attribute + ``G._random_gens``, repeating elements of `S` if necessary, and the + identity element of `G` is appended to ``R`` - we shall refer to this + last element as the accumulator. Then the function ``random_pr()`` + is called ``n`` times, randomizing the list ``R`` while preserving + the generation of `G` by ``R``. The function ``random_pr()`` itself + takes two random elements ``g, h`` among all elements of ``R`` but + the accumulator and replaces ``g`` with a randomly chosen element + from `\{gh, g(~h), hg, (~h)g\}`. Then the accumulator is multiplied + by whatever ``g`` was replaced by. The new value of the accumulator is + then returned by ``random_pr()``. + + The elements returned will eventually (for ``n`` large enough) become + uniformly distributed across `G` ([5]). For practical purposes however, + the values ``n = 50, r = 11`` are suggested in [1]. + + Notes + ===== + + THIS FUNCTION HAS SIDE EFFECTS: it changes the attribute + self._random_gens + + See Also + ======== + + random_pr + + """ + deg = self.degree + random_gens = [x._array_form for x in self.generators] + k = len(random_gens) + if k < r: + for i in range(k, r): + random_gens.append(random_gens[i - k]) + acc = list(range(deg)) + random_gens.append(acc) + self._random_gens = random_gens + + # handle randomized input for testing purposes + if _random_prec_n is None: + for i in range(n): + self.random_pr() + else: + for i in range(n): + self.random_pr(_random_prec=_random_prec_n[i]) + + def _union_find_merge(self, first, second, ranks, parents, not_rep): + """Merges two classes in a union-find data structure. + + Explanation + =========== + + Used in the implementation of Atkinson's algorithm as suggested in [1], + pp. 83-87. The class merging process uses union by rank as an + optimization. ([7]) + + Notes + ===== + + THIS FUNCTION HAS SIDE EFFECTS: the list of class representatives, + ``parents``, the list of class sizes, ``ranks``, and the list of + elements that are not representatives, ``not_rep``, are changed due to + class merging. + + See Also + ======== + + minimal_block, _union_find_rep + + References + ========== + + .. [1] Holt, D., Eick, B., O'Brien, E. + "Handbook of computational group theory" + + .. [7] https://algorithmist.com/wiki/Union_find + + """ + rep_first = self._union_find_rep(first, parents) + rep_second = self._union_find_rep(second, parents) + if rep_first != rep_second: + # union by rank + if ranks[rep_first] >= ranks[rep_second]: + new_1, new_2 = rep_first, rep_second + else: + new_1, new_2 = rep_second, rep_first + total_rank = ranks[new_1] + ranks[new_2] + if total_rank > self.max_div: + return -1 + parents[new_2] = new_1 + ranks[new_1] = total_rank + not_rep.append(new_2) + return 1 + return 0 + + def _union_find_rep(self, num, parents): + """Find representative of a class in a union-find data structure. + + Explanation + =========== + + Used in the implementation of Atkinson's algorithm as suggested in [1], + pp. 83-87. After the representative of the class to which ``num`` + belongs is found, path compression is performed as an optimization + ([7]). + + Notes + ===== + + THIS FUNCTION HAS SIDE EFFECTS: the list of class representatives, + ``parents``, is altered due to path compression. + + See Also + ======== + + minimal_block, _union_find_merge + + References + ========== + + .. [1] Holt, D., Eick, B., O'Brien, E. + "Handbook of computational group theory" + + .. [7] https://algorithmist.com/wiki/Union_find + + """ + rep, parent = num, parents[num] + while parent != rep: + rep = parent + parent = parents[rep] + # path compression + temp, parent = num, parents[num] + while parent != rep: + parents[temp] = rep + temp = parent + parent = parents[temp] + return rep + + @property + def base(self): + r"""Return a base from the Schreier-Sims algorithm. + + Explanation + =========== + + For a permutation group `G`, a base is a sequence of points + `B = (b_1, b_2, \dots, b_k)` such that no element of `G` apart + from the identity fixes all the points in `B`. The concepts of + a base and strong generating set and their applications are + discussed in depth in [1], pp. 87-89 and [2], pp. 55-57. + + An alternative way to think of `B` is that it gives the + indices of the stabilizer cosets that contain more than the + identity permutation. + + Examples + ======== + + >>> from sympy.combinatorics import Permutation, PermutationGroup + >>> G = PermutationGroup([Permutation(0, 1, 3)(2, 4)]) + >>> G.base + [0, 2] + + See Also + ======== + + strong_gens, basic_transversals, basic_orbits, basic_stabilizers + + """ + if self._base == []: + self.schreier_sims() + return self._base + + def baseswap(self, base, strong_gens, pos, randomized=False, + transversals=None, basic_orbits=None, strong_gens_distr=None): + r"""Swap two consecutive base points in base and strong generating set. + + Explanation + =========== + + If a base for a group `G` is given by `(b_1, b_2, \dots, b_k)`, this + function returns a base `(b_1, b_2, \dots, b_{i+1}, b_i, \dots, b_k)`, + where `i` is given by ``pos``, and a strong generating set relative + to that base. The original base and strong generating set are not + modified. + + The randomized version (default) is of Las Vegas type. + + Parameters + ========== + + base, strong_gens + The base and strong generating set. + pos + The position at which swapping is performed. + randomized + A switch between randomized and deterministic version. + transversals + The transversals for the basic orbits, if known. + basic_orbits + The basic orbits, if known. + strong_gens_distr + The strong generators distributed by basic stabilizers, if known. + + Returns + ======= + + (base, strong_gens) + ``base`` is the new base, and ``strong_gens`` is a generating set + relative to it. + + Examples + ======== + + >>> from sympy.combinatorics.named_groups import SymmetricGroup + >>> from sympy.combinatorics.testutil import _verify_bsgs + >>> from sympy.combinatorics.perm_groups import PermutationGroup + >>> S = SymmetricGroup(4) + >>> S.schreier_sims() + >>> S.base + [0, 1, 2] + >>> base, gens = S.baseswap(S.base, S.strong_gens, 1, randomized=False) + >>> base, gens + ([0, 2, 1], + [(0 1 2 3), (3)(0 1), (1 3 2), + (2 3), (1 3)]) + + check that base, gens is a BSGS + + >>> S1 = PermutationGroup(gens) + >>> _verify_bsgs(S1, base, gens) + True + + See Also + ======== + + schreier_sims + + Notes + ===== + + The deterministic version of the algorithm is discussed in + [1], pp. 102-103; the randomized version is discussed in [1], p.103, and + [2], p.98. It is of Las Vegas type. + Notice that [1] contains a mistake in the pseudocode and + discussion of BASESWAP: on line 3 of the pseudocode, + `|\beta_{i+1}^{\left\langle T\right\rangle}|` should be replaced by + `|\beta_{i}^{\left\langle T\right\rangle}|`, and the same for the + discussion of the algorithm. + + """ + # construct the basic orbits, generators for the stabilizer chain + # and transversal elements from whatever was provided + transversals, basic_orbits, strong_gens_distr = \ + _handle_precomputed_bsgs(base, strong_gens, transversals, + basic_orbits, strong_gens_distr) + base_len = len(base) + degree = self.degree + # size of orbit of base[pos] under the stabilizer we seek to insert + # in the stabilizer chain at position pos + 1 + size = len(basic_orbits[pos])*len(basic_orbits[pos + 1]) \ + //len(_orbit(degree, strong_gens_distr[pos], base[pos + 1])) + # initialize the wanted stabilizer by a subgroup + if pos + 2 > base_len - 1: + T = [] + else: + T = strong_gens_distr[pos + 2][:] + # randomized version + if randomized is True: + stab_pos = PermutationGroup(strong_gens_distr[pos]) + schreier_vector = stab_pos.schreier_vector(base[pos + 1]) + # add random elements of the stabilizer until they generate it + while len(_orbit(degree, T, base[pos])) != size: + new = stab_pos.random_stab(base[pos + 1], + schreier_vector=schreier_vector) + T.append(new) + # deterministic version + else: + Gamma = set(basic_orbits[pos]) + Gamma.remove(base[pos]) + if base[pos + 1] in Gamma: + Gamma.remove(base[pos + 1]) + # add elements of the stabilizer until they generate it by + # ruling out member of the basic orbit of base[pos] along the way + while len(_orbit(degree, T, base[pos])) != size: + gamma = next(iter(Gamma)) + x = transversals[pos][gamma] + temp = x._array_form.index(base[pos + 1]) # (~x)(base[pos + 1]) + if temp not in basic_orbits[pos + 1]: + Gamma = Gamma - _orbit(degree, T, gamma) + else: + y = transversals[pos + 1][temp] + el = rmul(x, y) + if el(base[pos]) not in _orbit(degree, T, base[pos]): + T.append(el) + Gamma = Gamma - _orbit(degree, T, base[pos]) + # build the new base and strong generating set + strong_gens_new_distr = strong_gens_distr[:] + strong_gens_new_distr[pos + 1] = T + base_new = base[:] + base_new[pos], base_new[pos + 1] = base_new[pos + 1], base_new[pos] + strong_gens_new = _strong_gens_from_distr(strong_gens_new_distr) + for gen in T: + if gen not in strong_gens_new: + strong_gens_new.append(gen) + return base_new, strong_gens_new + + @property + def basic_orbits(self): + r""" + Return the basic orbits relative to a base and strong generating set. + + Explanation + =========== + + If `(b_1, b_2, \dots, b_k)` is a base for a group `G`, and + `G^{(i)} = G_{b_1, b_2, \dots, b_{i-1}}` is the ``i``-th basic stabilizer + (so that `G^{(1)} = G`), the ``i``-th basic orbit relative to this base + is the orbit of `b_i` under `G^{(i)}`. See [1], pp. 87-89 for more + information. + + Examples + ======== + + >>> from sympy.combinatorics.named_groups import SymmetricGroup + >>> S = SymmetricGroup(4) + >>> S.basic_orbits + [[0, 1, 2, 3], [1, 2, 3], [2, 3]] + + See Also + ======== + + base, strong_gens, basic_transversals, basic_stabilizers + + """ + if self._basic_orbits == []: + self.schreier_sims() + return self._basic_orbits + + @property + def basic_stabilizers(self): + r""" + Return a chain of stabilizers relative to a base and strong generating + set. + + Explanation + =========== + + The ``i``-th basic stabilizer `G^{(i)}` relative to a base + `(b_1, b_2, \dots, b_k)` is `G_{b_1, b_2, \dots, b_{i-1}}`. For more + information, see [1], pp. 87-89. + + Examples + ======== + + >>> from sympy.combinatorics.named_groups import AlternatingGroup + >>> A = AlternatingGroup(4) + >>> A.schreier_sims() + >>> A.base + [0, 1] + >>> for g in A.basic_stabilizers: + ... print(g) + ... + PermutationGroup([ + (3)(0 1 2), + (1 2 3)]) + PermutationGroup([ + (1 2 3)]) + + See Also + ======== + + base, strong_gens, basic_orbits, basic_transversals + + """ + + if self._transversals == []: + self.schreier_sims() + strong_gens = self._strong_gens + base = self._base + if not base: # e.g. if self is trivial + return [] + strong_gens_distr = _distribute_gens_by_base(base, strong_gens) + basic_stabilizers = [] + for gens in strong_gens_distr: + basic_stabilizers.append(PermutationGroup(gens)) + return basic_stabilizers + + @property + def basic_transversals(self): + """ + Return basic transversals relative to a base and strong generating set. + + Explanation + =========== + + The basic transversals are transversals of the basic orbits. They + are provided as a list of dictionaries, each dictionary having + keys - the elements of one of the basic orbits, and values - the + corresponding transversal elements. See [1], pp. 87-89 for more + information. + + Examples + ======== + + >>> from sympy.combinatorics.named_groups import AlternatingGroup + >>> A = AlternatingGroup(4) + >>> A.basic_transversals + [{0: (3), 1: (3)(0 1 2), 2: (3)(0 2 1), 3: (0 3 1)}, {1: (3), 2: (1 2 3), 3: (1 3 2)}] + + See Also + ======== + + strong_gens, base, basic_orbits, basic_stabilizers + + """ + + if self._transversals == []: + self.schreier_sims() + return self._transversals + + def composition_series(self): + r""" + Return the composition series for a group as a list + of permutation groups. + + Explanation + =========== + + The composition series for a group `G` is defined as a + subnormal series `G = H_0 > H_1 > H_2 \ldots` A composition + series is a subnormal series such that each factor group + `H(i+1) / H(i)` is simple. + A subnormal series is a composition series only if it is of + maximum length. + + The algorithm works as follows: + Starting with the derived series the idea is to fill + the gap between `G = der[i]` and `H = der[i+1]` for each + `i` independently. Since, all subgroups of the abelian group + `G/H` are normal so, first step is to take the generators + `g` of `G` and add them to generators of `H` one by one. + + The factor groups formed are not simple in general. Each + group is obtained from the previous one by adding one + generator `g`, if the previous group is denoted by `H` + then the next group `K` is generated by `g` and `H`. + The factor group `K/H` is cyclic and it's order is + `K.order()//G.order()`. The series is then extended between + `K` and `H` by groups generated by powers of `g` and `H`. + The series formed is then prepended to the already existing + series. + + Examples + ======== + >>> from sympy.combinatorics.named_groups import SymmetricGroup + >>> from sympy.combinatorics.named_groups import CyclicGroup + >>> S = SymmetricGroup(12) + >>> G = S.sylow_subgroup(2) + >>> C = G.composition_series() + >>> [H.order() for H in C] + [1024, 512, 256, 128, 64, 32, 16, 8, 4, 2, 1] + >>> G = S.sylow_subgroup(3) + >>> C = G.composition_series() + >>> [H.order() for H in C] + [243, 81, 27, 9, 3, 1] + >>> G = CyclicGroup(12) + >>> C = G.composition_series() + >>> [H.order() for H in C] + [12, 6, 3, 1] + + """ + der = self.derived_series() + if not all(g.is_identity for g in der[-1].generators): + raise NotImplementedError('Group should be solvable') + series = [] + + for i in range(len(der)-1): + H = der[i+1] + up_seg = [] + for g in der[i].generators: + K = PermutationGroup([g] + H.generators) + order = K.order() // H.order() + down_seg = [] + for p, e in factorint(order).items(): + for _ in range(e): + down_seg.append(PermutationGroup([g] + H.generators)) + g = g**p + up_seg = down_seg + up_seg + H = K + up_seg[0] = der[i] + series.extend(up_seg) + series.append(der[-1]) + return series + + def coset_transversal(self, H): + """Return a transversal of the right cosets of self by its subgroup H + using the second method described in [1], Subsection 4.6.7 + + """ + + if not H.is_subgroup(self): + raise ValueError("The argument must be a subgroup") + + if H.order() == 1: + return self._elements + + self._schreier_sims(base=H.base) # make G.base an extension of H.base + + base = self.base + base_ordering = _base_ordering(base, self.degree) + identity = Permutation(self.degree - 1) + + transversals = self.basic_transversals[:] + # transversals is a list of dictionaries. Get rid of the keys + # so that it is a list of lists and sort each list in + # the increasing order of base[l]^x + for l, t in enumerate(transversals): + transversals[l] = sorted(t.values(), + key = lambda x: base_ordering[base[l]^x]) + + orbits = H.basic_orbits + h_stabs = H.basic_stabilizers + g_stabs = self.basic_stabilizers + + indices = [x.order()//y.order() for x, y in zip(g_stabs, h_stabs)] + + # T^(l) should be a right transversal of H^(l) in G^(l) for + # 1<=l<=len(base). While H^(l) is the trivial group, T^(l) + # contains all the elements of G^(l) so we might just as well + # start with l = len(h_stabs)-1 + if len(g_stabs) > len(h_stabs): + T = g_stabs[len(h_stabs)]._elements + else: + T = [identity] + l = len(h_stabs)-1 + t_len = len(T) + while l > -1: + T_next = [] + for u in transversals[l]: + if u == identity: + continue + b = base_ordering[base[l]^u] + for t in T: + p = t*u + if all(base_ordering[h^p] >= b for h in orbits[l]): + T_next.append(p) + if t_len + len(T_next) == indices[l]: + break + if t_len + len(T_next) == indices[l]: + break + T += T_next + t_len += len(T_next) + l -= 1 + T.remove(identity) + T = [identity] + T + return T + + def _coset_representative(self, g, H): + """Return the representative of Hg from the transversal that + would be computed by ``self.coset_transversal(H)``. + + """ + if H.order() == 1: + return g + # The base of self must be an extension of H.base. + if not(self.base[:len(H.base)] == H.base): + self._schreier_sims(base=H.base) + orbits = H.basic_orbits[:] + h_transversals = [list(_.values()) for _ in H.basic_transversals] + transversals = [list(_.values()) for _ in self.basic_transversals] + base = self.base + base_ordering = _base_ordering(base, self.degree) + def step(l, x): + gamma = sorted(orbits[l], key = lambda y: base_ordering[y^x])[0] + i = [base[l]^h for h in h_transversals[l]].index(gamma) + x = h_transversals[l][i]*x + if l < len(orbits)-1: + for u in transversals[l]: + if base[l]^u == base[l]^x: + break + x = step(l+1, x*u**-1)*u + return x + return step(0, g) + + def coset_table(self, H): + """Return the standardised (right) coset table of self in H as + a list of lists. + """ + # Maybe this should be made to return an instance of CosetTable + # from fp_groups.py but the class would need to be changed first + # to be compatible with PermutationGroups + + if not H.is_subgroup(self): + raise ValueError("The argument must be a subgroup") + T = self.coset_transversal(H) + n = len(T) + + A = list(chain.from_iterable((gen, gen**-1) + for gen in self.generators)) + + table = [] + for i in range(n): + row = [self._coset_representative(T[i]*x, H) for x in A] + row = [T.index(r) for r in row] + table.append(row) + + # standardize (this is the same as the algorithm used in coset_table) + # If CosetTable is made compatible with PermutationGroups, this + # should be replaced by table.standardize() + A = range(len(A)) + gamma = 1 + for alpha, a in product(range(n), A): + beta = table[alpha][a] + if beta >= gamma: + if beta > gamma: + for x in A: + z = table[gamma][x] + table[gamma][x] = table[beta][x] + table[beta][x] = z + for i in range(n): + if table[i][x] == beta: + table[i][x] = gamma + elif table[i][x] == gamma: + table[i][x] = beta + gamma += 1 + if gamma >= n-1: + return table + + def center(self): + r""" + Return the center of a permutation group. + + Explanation + =========== + + The center for a group `G` is defined as + `Z(G) = \{z\in G | \forall g\in G, zg = gz \}`, + the set of elements of `G` that commute with all elements of `G`. + It is equal to the centralizer of `G` inside `G`, and is naturally a + subgroup of `G` ([9]). + + Examples + ======== + + >>> from sympy.combinatorics.named_groups import DihedralGroup + >>> D = DihedralGroup(4) + >>> G = D.center() + >>> G.order() + 2 + + See Also + ======== + + centralizer + + Notes + ===== + + This is a naive implementation that is a straightforward application + of ``.centralizer()`` + + """ + return self.centralizer(self) + + def centralizer(self, other): + r""" + Return the centralizer of a group/set/element. + + Explanation + =========== + + The centralizer of a set of permutations ``S`` inside + a group ``G`` is the set of elements of ``G`` that commute with all + elements of ``S``:: + + `C_G(S) = \{ g \in G | gs = sg \forall s \in S\}` ([10]) + + Usually, ``S`` is a subset of ``G``, but if ``G`` is a proper subgroup of + the full symmetric group, we allow for ``S`` to have elements outside + ``G``. + + It is naturally a subgroup of ``G``; the centralizer of a permutation + group is equal to the centralizer of any set of generators for that + group, since any element commuting with the generators commutes with + any product of the generators. + + Parameters + ========== + + other + a permutation group/list of permutations/single permutation + + Examples + ======== + + >>> from sympy.combinatorics.named_groups import (SymmetricGroup, + ... CyclicGroup) + >>> S = SymmetricGroup(6) + >>> C = CyclicGroup(6) + >>> H = S.centralizer(C) + >>> H.is_subgroup(C) + True + + See Also + ======== + + subgroup_search + + Notes + ===== + + The implementation is an application of ``.subgroup_search()`` with + tests using a specific base for the group ``G``. + + """ + if hasattr(other, 'generators'): + if other.is_trivial or self.is_trivial: + return self + degree = self.degree + identity = _af_new(list(range(degree))) + orbits = other.orbits() + num_orbits = len(orbits) + orbits.sort(key=lambda x: -len(x)) + long_base = [] + orbit_reps = [None]*num_orbits + orbit_reps_indices = [None]*num_orbits + orbit_descr = [None]*degree + for i in range(num_orbits): + orbit = list(orbits[i]) + orbit_reps[i] = orbit[0] + orbit_reps_indices[i] = len(long_base) + for point in orbit: + orbit_descr[point] = i + long_base = long_base + orbit + base, strong_gens = self.schreier_sims_incremental(base=long_base) + strong_gens_distr = _distribute_gens_by_base(base, strong_gens) + i = 0 + for i in range(len(base)): + if strong_gens_distr[i] == [identity]: + break + base = base[:i] + base_len = i + for j in range(num_orbits): + if base[base_len - 1] in orbits[j]: + break + rel_orbits = orbits[: j + 1] + num_rel_orbits = len(rel_orbits) + transversals = [None]*num_rel_orbits + for j in range(num_rel_orbits): + rep = orbit_reps[j] + transversals[j] = dict( + other.orbit_transversal(rep, pairs=True)) + trivial_test = lambda x: True + tests = [None]*base_len + for l in range(base_len): + if base[l] in orbit_reps: + tests[l] = trivial_test + else: + def test(computed_words, l=l): + g = computed_words[l] + rep_orb_index = orbit_descr[base[l]] + rep = orbit_reps[rep_orb_index] + im = g._array_form[base[l]] + im_rep = g._array_form[rep] + tr_el = transversals[rep_orb_index][base[l]] + # using the definition of transversal, + # base[l]^g = rep^(tr_el*g); + # if g belongs to the centralizer, then + # base[l]^g = (rep^g)^tr_el + return im == tr_el._array_form[im_rep] + tests[l] = test + + def prop(g): + return [rmul(g, gen) for gen in other.generators] == \ + [rmul(gen, g) for gen in other.generators] + return self.subgroup_search(prop, base=base, + strong_gens=strong_gens, tests=tests) + elif hasattr(other, '__getitem__'): + gens = list(other) + return self.centralizer(PermutationGroup(gens)) + elif hasattr(other, 'array_form'): + return self.centralizer(PermutationGroup([other])) + + def commutator(self, G, H): + """ + Return the commutator of two subgroups. + + Explanation + =========== + + For a permutation group ``K`` and subgroups ``G``, ``H``, the + commutator of ``G`` and ``H`` is defined as the group generated + by all the commutators `[g, h] = hgh^{-1}g^{-1}` for ``g`` in ``G`` and + ``h`` in ``H``. It is naturally a subgroup of ``K`` ([1], p.27). + + Examples + ======== + + >>> from sympy.combinatorics.named_groups import (SymmetricGroup, + ... AlternatingGroup) + >>> S = SymmetricGroup(5) + >>> A = AlternatingGroup(5) + >>> G = S.commutator(S, A) + >>> G.is_subgroup(A) + True + + See Also + ======== + + derived_subgroup + + Notes + ===== + + The commutator of two subgroups `H, G` is equal to the normal closure + of the commutators of all the generators, i.e. `hgh^{-1}g^{-1}` for `h` + a generator of `H` and `g` a generator of `G` ([1], p.28) + + """ + ggens = G.generators + hgens = H.generators + commutators = [] + for ggen in ggens: + for hgen in hgens: + commutator = rmul(hgen, ggen, ~hgen, ~ggen) + if commutator not in commutators: + commutators.append(commutator) + res = self.normal_closure(commutators) + return res + + def coset_factor(self, g, factor_index=False): + """Return ``G``'s (self's) coset factorization of ``g`` + + Explanation + =========== + + If ``g`` is an element of ``G`` then it can be written as the product + of permutations drawn from the Schreier-Sims coset decomposition, + + The permutations returned in ``f`` are those for which + the product gives ``g``: ``g = f[n]*...f[1]*f[0]`` where ``n = len(B)`` + and ``B = G.base``. f[i] is one of the permutations in + ``self._basic_orbits[i]``. + + If factor_index==True, + returns a tuple ``[b[0],..,b[n]]``, where ``b[i]`` + belongs to ``self._basic_orbits[i]`` + + Examples + ======== + + >>> from sympy.combinatorics import Permutation, PermutationGroup + >>> a = Permutation(0, 1, 3, 7, 6, 4)(2, 5) + >>> b = Permutation(0, 1, 3, 2)(4, 5, 7, 6) + >>> G = PermutationGroup([a, b]) + + Define g: + + >>> g = Permutation(7)(1, 2, 4)(3, 6, 5) + + Confirm that it is an element of G: + + >>> G.contains(g) + True + + Thus, it can be written as a product of factors (up to + 3) drawn from u. See below that a factor from u1 and u2 + and the Identity permutation have been used: + + >>> f = G.coset_factor(g) + >>> f[2]*f[1]*f[0] == g + True + >>> f1 = G.coset_factor(g, True); f1 + [0, 4, 4] + >>> tr = G.basic_transversals + >>> f[0] == tr[0][f1[0]] + True + + If g is not an element of G then [] is returned: + + >>> c = Permutation(5, 6, 7) + >>> G.coset_factor(c) + [] + + See Also + ======== + + sympy.combinatorics.util._strip + + """ + if isinstance(g, (Cycle, Permutation)): + g = g.list() + if len(g) != self._degree: + # this could either adjust the size or return [] immediately + # but we don't choose between the two and just signal a possible + # error + raise ValueError('g should be the same size as permutations of G') + I = list(range(self._degree)) + basic_orbits = self.basic_orbits + transversals = self._transversals + factors = [] + base = self.base + h = g + for i in range(len(base)): + beta = h[base[i]] + if beta == base[i]: + factors.append(beta) + continue + if beta not in basic_orbits[i]: + return [] + u = transversals[i][beta]._array_form + h = _af_rmul(_af_invert(u), h) + factors.append(beta) + if h != I: + return [] + if factor_index: + return factors + tr = self.basic_transversals + factors = [tr[i][factors[i]] for i in range(len(base))] + return factors + + def generator_product(self, g, original=False): + r''' + Return a list of strong generators `[s1, \dots, sn]` + s.t `g = sn \times \dots \times s1`. If ``original=True``, make the + list contain only the original group generators + + ''' + product = [] + if g.is_identity: + return [] + if g in self.strong_gens: + if not original or g in self.generators: + return [g] + else: + slp = self._strong_gens_slp[g] + for s in slp: + product.extend(self.generator_product(s, original=True)) + return product + elif g**-1 in self.strong_gens: + g = g**-1 + if not original or g in self.generators: + return [g**-1] + else: + slp = self._strong_gens_slp[g] + for s in slp: + product.extend(self.generator_product(s, original=True)) + l = len(product) + product = [product[l-i-1]**-1 for i in range(l)] + return product + + f = self.coset_factor(g, True) + for i, j in enumerate(f): + slp = self._transversal_slp[i][j] + for s in slp: + if not original: + product.append(self.strong_gens[s]) + else: + s = self.strong_gens[s] + product.extend(self.generator_product(s, original=True)) + return product + + def coset_rank(self, g): + """rank using Schreier-Sims representation. + + Explanation + =========== + + The coset rank of ``g`` is the ordering number in which + it appears in the lexicographic listing according to the + coset decomposition + + The ordering is the same as in G.generate(method='coset'). + If ``g`` does not belong to the group it returns None. + + Examples + ======== + + >>> from sympy.combinatorics import Permutation, PermutationGroup + >>> a = Permutation(0, 1, 3, 7, 6, 4)(2, 5) + >>> b = Permutation(0, 1, 3, 2)(4, 5, 7, 6) + >>> G = PermutationGroup([a, b]) + >>> c = Permutation(7)(2, 4)(3, 5) + >>> G.coset_rank(c) + 16 + >>> G.coset_unrank(16) + (7)(2 4)(3 5) + + See Also + ======== + + coset_factor + + """ + factors = self.coset_factor(g, True) + if not factors: + return None + rank = 0 + b = 1 + transversals = self._transversals + base = self._base + basic_orbits = self._basic_orbits + for i in range(len(base)): + k = factors[i] + j = basic_orbits[i].index(k) + rank += b*j + b = b*len(transversals[i]) + return rank + + def coset_unrank(self, rank, af=False): + """unrank using Schreier-Sims representation + + coset_unrank is the inverse operation of coset_rank + if 0 <= rank < order; otherwise it returns None. + + """ + if rank < 0 or rank >= self.order(): + return None + base = self.base + transversals = self.basic_transversals + basic_orbits = self.basic_orbits + m = len(base) + v = [0]*m + for i in range(m): + rank, c = divmod(rank, len(transversals[i])) + v[i] = basic_orbits[i][c] + a = [transversals[i][v[i]]._array_form for i in range(m)] + h = _af_rmuln(*a) + if af: + return h + else: + return _af_new(h) + + @property + def degree(self): + """Returns the size of the permutations in the group. + + Explanation + =========== + + The number of permutations comprising the group is given by + ``len(group)``; the number of permutations that can be generated + by the group is given by ``group.order()``. + + Examples + ======== + + >>> from sympy.combinatorics import Permutation, PermutationGroup + >>> a = Permutation([1, 0, 2]) + >>> G = PermutationGroup([a]) + >>> G.degree + 3 + >>> len(G) + 1 + >>> G.order() + 2 + >>> list(G.generate()) + [(2), (2)(0 1)] + + See Also + ======== + + order + """ + return self._degree + + @property + def identity(self): + ''' + Return the identity element of the permutation group. + + ''' + return _af_new(list(range(self.degree))) + + @property + def elements(self): + """Returns all the elements of the permutation group as a set + + Examples + ======== + + >>> from sympy.combinatorics import Permutation, PermutationGroup + >>> p = PermutationGroup(Permutation(1, 3), Permutation(1, 2)) + >>> p.elements + {(1 2 3), (1 3 2), (1 3), (2 3), (3), (3)(1 2)} + + """ + return set(self._elements) + + @property + def _elements(self): + """Returns all the elements of the permutation group as a list + + Examples + ======== + + >>> from sympy.combinatorics import Permutation, PermutationGroup + >>> p = PermutationGroup(Permutation(1, 3), Permutation(1, 2)) + >>> p._elements + [(3), (3)(1 2), (1 3), (2 3), (1 2 3), (1 3 2)] + + """ + return list(islice(self.generate(), None)) + + def derived_series(self): + r"""Return the derived series for the group. + + Explanation + =========== + + The derived series for a group `G` is defined as + `G = G_0 > G_1 > G_2 > \ldots` where `G_i = [G_{i-1}, G_{i-1}]`, + i.e. `G_i` is the derived subgroup of `G_{i-1}`, for + `i\in\mathbb{N}`. When we have `G_k = G_{k-1}` for some + `k\in\mathbb{N}`, the series terminates. + + Returns + ======= + + A list of permutation groups containing the members of the derived + series in the order `G = G_0, G_1, G_2, \ldots`. + + Examples + ======== + + >>> from sympy.combinatorics.named_groups import (SymmetricGroup, + ... AlternatingGroup, DihedralGroup) + >>> A = AlternatingGroup(5) + >>> len(A.derived_series()) + 1 + >>> S = SymmetricGroup(4) + >>> len(S.derived_series()) + 4 + >>> S.derived_series()[1].is_subgroup(AlternatingGroup(4)) + True + >>> S.derived_series()[2].is_subgroup(DihedralGroup(2)) + True + + See Also + ======== + + derived_subgroup + + """ + res = [self] + current = self + nxt = self.derived_subgroup() + while not current.is_subgroup(nxt): + res.append(nxt) + current = nxt + nxt = nxt.derived_subgroup() + return res + + def derived_subgroup(self): + r"""Compute the derived subgroup. + + Explanation + =========== + + The derived subgroup, or commutator subgroup is the subgroup generated + by all commutators `[g, h] = hgh^{-1}g^{-1}` for `g, h\in G` ; it is + equal to the normal closure of the set of commutators of the generators + ([1], p.28, [11]). + + Examples + ======== + + >>> from sympy.combinatorics import Permutation, PermutationGroup + >>> a = Permutation([1, 0, 2, 4, 3]) + >>> b = Permutation([0, 1, 3, 2, 4]) + >>> G = PermutationGroup([a, b]) + >>> C = G.derived_subgroup() + >>> list(C.generate(af=True)) + [[0, 1, 2, 3, 4], [0, 1, 3, 4, 2], [0, 1, 4, 2, 3]] + + See Also + ======== + + derived_series + + """ + r = self._r + gens = [p._array_form for p in self.generators] + set_commutators = set() + degree = self._degree + rng = list(range(degree)) + for i in range(r): + for j in range(r): + p1 = gens[i] + p2 = gens[j] + c = list(range(degree)) + for k in rng: + c[p2[p1[k]]] = p1[p2[k]] + ct = tuple(c) + if ct not in set_commutators: + set_commutators.add(ct) + cms = [_af_new(p) for p in set_commutators] + G2 = self.normal_closure(cms) + return G2 + + def generate(self, method="coset", af=False): + """Return iterator to generate the elements of the group. + + Explanation + =========== + + Iteration is done with one of these methods:: + + method='coset' using the Schreier-Sims coset representation + method='dimino' using the Dimino method + + If ``af = True`` it yields the array form of the permutations + + Examples + ======== + + >>> from sympy.combinatorics import PermutationGroup + >>> from sympy.combinatorics.polyhedron import tetrahedron + + The permutation group given in the tetrahedron object is also + true groups: + + >>> G = tetrahedron.pgroup + >>> G.is_group + True + + Also the group generated by the permutations in the tetrahedron + pgroup -- even the first two -- is a proper group: + + >>> H = PermutationGroup(G[0], G[1]) + >>> J = PermutationGroup(list(H.generate())); J + PermutationGroup([ + (0 1)(2 3), + (1 2 3), + (1 3 2), + (0 3 1), + (0 2 3), + (0 3)(1 2), + (0 1 3), + (3)(0 2 1), + (0 3 2), + (3)(0 1 2), + (0 2)(1 3)]) + >>> _.is_group + True + """ + if method == "coset": + return self.generate_schreier_sims(af) + elif method == "dimino": + return self.generate_dimino(af) + else: + raise NotImplementedError('No generation defined for %s' % method) + + def generate_dimino(self, af=False): + """Yield group elements using Dimino's algorithm. + + If ``af == True`` it yields the array form of the permutations. + + Examples + ======== + + >>> from sympy.combinatorics import Permutation, PermutationGroup + >>> a = Permutation([0, 2, 1, 3]) + >>> b = Permutation([0, 2, 3, 1]) + >>> g = PermutationGroup([a, b]) + >>> list(g.generate_dimino(af=True)) + [[0, 1, 2, 3], [0, 2, 1, 3], [0, 2, 3, 1], + [0, 1, 3, 2], [0, 3, 2, 1], [0, 3, 1, 2]] + + References + ========== + + .. [1] The Implementation of Various Algorithms for Permutation Groups in + the Computer Algebra System: AXIOM, N.J. Doye, M.Sc. Thesis + + """ + idn = list(range(self.degree)) + order = 0 + element_list = [idn] + set_element_list = {tuple(idn)} + if af: + yield idn + else: + yield _af_new(idn) + gens = [p._array_form for p in self.generators] + + for i in range(len(gens)): + # D elements of the subgroup G_i generated by gens[:i] + D = element_list[:] + N = [idn] + while N: + A = N + N = [] + for a in A: + for g in gens[:i + 1]: + ag = _af_rmul(a, g) + if tuple(ag) not in set_element_list: + # produce G_i*g + for d in D: + order += 1 + ap = _af_rmul(d, ag) + if af: + yield ap + else: + p = _af_new(ap) + yield p + element_list.append(ap) + set_element_list.add(tuple(ap)) + N.append(ap) + self._order = len(element_list) + + def generate_schreier_sims(self, af=False): + """Yield group elements using the Schreier-Sims representation + in coset_rank order + + If ``af = True`` it yields the array form of the permutations + + Examples + ======== + + >>> from sympy.combinatorics import Permutation, PermutationGroup + >>> a = Permutation([0, 2, 1, 3]) + >>> b = Permutation([0, 2, 3, 1]) + >>> g = PermutationGroup([a, b]) + >>> list(g.generate_schreier_sims(af=True)) + [[0, 1, 2, 3], [0, 2, 1, 3], [0, 3, 2, 1], + [0, 1, 3, 2], [0, 2, 3, 1], [0, 3, 1, 2]] + """ + + n = self._degree + u = self.basic_transversals + basic_orbits = self._basic_orbits + if len(u) == 0: + for x in self.generators: + if af: + yield x._array_form + else: + yield x + return + if len(u) == 1: + for i in basic_orbits[0]: + if af: + yield u[0][i]._array_form + else: + yield u[0][i] + return + + u = list(reversed(u)) + basic_orbits = basic_orbits[::-1] + # stg stack of group elements + stg = [list(range(n))] + posmax = [len(x) for x in u] + n1 = len(posmax) - 1 + pos = [0]*n1 + h = 0 + while 1: + # backtrack when finished iterating over coset + if pos[h] >= posmax[h]: + if h == 0: + return + pos[h] = 0 + h -= 1 + stg.pop() + continue + p = _af_rmul(u[h][basic_orbits[h][pos[h]]]._array_form, stg[-1]) + pos[h] += 1 + stg.append(p) + h += 1 + if h == n1: + if af: + for i in basic_orbits[-1]: + p = _af_rmul(u[-1][i]._array_form, stg[-1]) + yield p + else: + for i in basic_orbits[-1]: + p = _af_rmul(u[-1][i]._array_form, stg[-1]) + p1 = _af_new(p) + yield p1 + stg.pop() + h -= 1 + + @property + def generators(self): + """Returns the generators of the group. + + Examples + ======== + + >>> from sympy.combinatorics import Permutation, PermutationGroup + >>> a = Permutation([0, 2, 1]) + >>> b = Permutation([1, 0, 2]) + >>> G = PermutationGroup([a, b]) + >>> G.generators + [(1 2), (2)(0 1)] + + """ + return self._generators + + def contains(self, g, strict=True): + """Test if permutation ``g`` belong to self, ``G``. + + Explanation + =========== + + If ``g`` is an element of ``G`` it can be written as a product + of factors drawn from the cosets of ``G``'s stabilizers. To see + if ``g`` is one of the actual generators defining the group use + ``G.has(g)``. + + If ``strict`` is not ``True``, ``g`` will be resized, if necessary, + to match the size of permutations in ``self``. + + Examples + ======== + + >>> from sympy.combinatorics import Permutation, PermutationGroup + + >>> a = Permutation(1, 2) + >>> b = Permutation(2, 3, 1) + >>> G = PermutationGroup(a, b, degree=5) + >>> G.contains(G[0]) # trivial check + True + >>> elem = Permutation([[2, 3]], size=5) + >>> G.contains(elem) + True + >>> G.contains(Permutation(4)(0, 1, 2, 3)) + False + + If strict is False, a permutation will be resized, if + necessary: + + >>> H = PermutationGroup(Permutation(5)) + >>> H.contains(Permutation(3)) + False + >>> H.contains(Permutation(3), strict=False) + True + + To test if a given permutation is present in the group: + + >>> elem in G.generators + False + >>> G.has(elem) + False + + See Also + ======== + + coset_factor, sympy.core.basic.Basic.has, __contains__ + + """ + if not isinstance(g, Permutation): + return False + if g.size != self.degree: + if strict: + return False + g = Permutation(g, size=self.degree) + if g in self.generators: + return True + return bool(self.coset_factor(g.array_form, True)) + + @property + def is_perfect(self): + """Return ``True`` if the group is perfect. + A group is perfect if it equals to its derived subgroup. + + Examples + ======== + + >>> from sympy.combinatorics import Permutation, PermutationGroup + >>> a = Permutation(1,2,3)(4,5) + >>> b = Permutation(1,2,3,4,5) + >>> G = PermutationGroup([a, b]) + >>> G.is_perfect + False + + """ + if self._is_perfect is None: + self._is_perfect = self.equals(self.derived_subgroup()) + return self._is_perfect + + @property + def is_abelian(self): + """Test if the group is Abelian. + + Examples + ======== + + >>> from sympy.combinatorics import Permutation, PermutationGroup + >>> a = Permutation([0, 2, 1]) + >>> b = Permutation([1, 0, 2]) + >>> G = PermutationGroup([a, b]) + >>> G.is_abelian + False + >>> a = Permutation([0, 2, 1]) + >>> G = PermutationGroup([a]) + >>> G.is_abelian + True + + """ + if self._is_abelian is not None: + return self._is_abelian + + self._is_abelian = True + gens = [p._array_form for p in self.generators] + for x in gens: + for y in gens: + if y <= x: + continue + if not _af_commutes_with(x, y): + self._is_abelian = False + return False + return True + + def abelian_invariants(self): + """ + Returns the abelian invariants for the given group. + Let ``G`` be a nontrivial finite abelian group. Then G is isomorphic to + the direct product of finitely many nontrivial cyclic groups of + prime-power order. + + Explanation + =========== + + The prime-powers that occur as the orders of the factors are uniquely + determined by G. More precisely, the primes that occur in the orders of the + factors in any such decomposition of ``G`` are exactly the primes that divide + ``|G|`` and for any such prime ``p``, if the orders of the factors that are + p-groups in one such decomposition of ``G`` are ``p^{t_1} >= p^{t_2} >= ... p^{t_r}``, + then the orders of the factors that are p-groups in any such decomposition of ``G`` + are ``p^{t_1} >= p^{t_2} >= ... p^{t_r}``. + + The uniquely determined integers ``p^{t_1} >= p^{t_2} >= ... p^{t_r}``, taken + for all primes that divide ``|G|`` are called the invariants of the nontrivial + group ``G`` as suggested in ([14], p. 542). + + Notes + ===== + + We adopt the convention that the invariants of a trivial group are []. + + Examples + ======== + + >>> from sympy.combinatorics import Permutation, PermutationGroup + >>> a = Permutation([0, 2, 1]) + >>> b = Permutation([1, 0, 2]) + >>> G = PermutationGroup([a, b]) + >>> G.abelian_invariants() + [2] + >>> from sympy.combinatorics import CyclicGroup + >>> G = CyclicGroup(7) + >>> G.abelian_invariants() + [7] + + """ + if self.is_trivial: + return [] + gns = self.generators + inv = [] + G = self + H = G.derived_subgroup() + Hgens = H.generators + for p in primefactors(G.order()): + ranks = [] + while True: + pows = [] + for g in gns: + elm = g**p + if not H.contains(elm): + pows.append(elm) + K = PermutationGroup(Hgens + pows) if pows else H + r = G.order()//K.order() + G = K + gns = pows + if r == 1: + break + ranks.append(multiplicity(p, r)) + + if ranks: + pows = [1]*ranks[0] + for i in ranks: + for j in range(i): + pows[j] = pows[j]*p + inv.extend(pows) + inv.sort() + return inv + + def is_elementary(self, p): + """Return ``True`` if the group is elementary abelian. An elementary + abelian group is a finite abelian group, where every nontrivial + element has order `p`, where `p` is a prime. + + Examples + ======== + + >>> from sympy.combinatorics import Permutation, PermutationGroup + >>> a = Permutation([0, 2, 1]) + >>> G = PermutationGroup([a]) + >>> G.is_elementary(2) + True + >>> a = Permutation([0, 2, 1, 3]) + >>> b = Permutation([3, 1, 2, 0]) + >>> G = PermutationGroup([a, b]) + >>> G.is_elementary(2) + True + >>> G.is_elementary(3) + False + + """ + return self.is_abelian and all(g.order() == p for g in self.generators) + + def _eval_is_alt_sym_naive(self, only_sym=False, only_alt=False): + """A naive test using the group order.""" + if only_sym and only_alt: + raise ValueError( + "Both {} and {} cannot be set to True" + .format(only_sym, only_alt)) + + n = self.degree + sym_order = _factorial(n) + order = self.order() + + if order == sym_order: + self._is_sym = True + self._is_alt = False + if only_alt: + return False + return True + + elif 2*order == sym_order: + self._is_sym = False + self._is_alt = True + if only_sym: + return False + return True + + return False + + def _eval_is_alt_sym_monte_carlo(self, eps=0.05, perms=None): + """A test using monte-carlo algorithm. + + Parameters + ========== + + eps : float, optional + The criterion for the incorrect ``False`` return. + + perms : list[Permutation], optional + If explicitly given, it tests over the given candidates + for testing. + + If ``None``, it randomly computes ``N_eps`` and chooses + ``N_eps`` sample of the permutation from the group. + + See Also + ======== + + _check_cycles_alt_sym + """ + if perms is None: + n = self.degree + if n < 17: + c_n = 0.34 + else: + c_n = 0.57 + d_n = (c_n*log(2))/log(n) + N_eps = int(-log(eps)/d_n) + + perms = (self.random_pr() for i in range(N_eps)) + return self._eval_is_alt_sym_monte_carlo(perms=perms) + + for perm in perms: + if _check_cycles_alt_sym(perm): + return True + return False + + def is_alt_sym(self, eps=0.05, _random_prec=None): + r"""Monte Carlo test for the symmetric/alternating group for degrees + >= 8. + + Explanation + =========== + + More specifically, it is one-sided Monte Carlo with the + answer True (i.e., G is symmetric/alternating) guaranteed to be + correct, and the answer False being incorrect with probability eps. + + For degree < 8, the order of the group is checked so the test + is deterministic. + + Notes + ===== + + The algorithm itself uses some nontrivial results from group theory and + number theory: + 1) If a transitive group ``G`` of degree ``n`` contains an element + with a cycle of length ``n/2 < p < n-2`` for ``p`` a prime, ``G`` is the + symmetric or alternating group ([1], pp. 81-82) + 2) The proportion of elements in the symmetric/alternating group having + the property described in 1) is approximately `\log(2)/\log(n)` + ([1], p.82; [2], pp. 226-227). + The helper function ``_check_cycles_alt_sym`` is used to + go over the cycles in a permutation and look for ones satisfying 1). + + Examples + ======== + + >>> from sympy.combinatorics.named_groups import DihedralGroup + >>> D = DihedralGroup(10) + >>> D.is_alt_sym() + False + + See Also + ======== + + _check_cycles_alt_sym + + """ + if _random_prec is not None: + N_eps = _random_prec['N_eps'] + perms= (_random_prec[i] for i in range(N_eps)) + return self._eval_is_alt_sym_monte_carlo(perms=perms) + + if self._is_sym or self._is_alt: + return True + if self._is_sym is False and self._is_alt is False: + return False + + n = self.degree + if n < 8: + return self._eval_is_alt_sym_naive() + elif self.is_transitive(): + return self._eval_is_alt_sym_monte_carlo(eps=eps) + + self._is_sym, self._is_alt = False, False + return False + + @property + def is_nilpotent(self): + """Test if the group is nilpotent. + + Explanation + =========== + + A group `G` is nilpotent if it has a central series of finite length. + Alternatively, `G` is nilpotent if its lower central series terminates + with the trivial group. Every nilpotent group is also solvable + ([1], p.29, [12]). + + Examples + ======== + + >>> from sympy.combinatorics.named_groups import (SymmetricGroup, + ... CyclicGroup) + >>> C = CyclicGroup(6) + >>> C.is_nilpotent + True + >>> S = SymmetricGroup(5) + >>> S.is_nilpotent + False + + See Also + ======== + + lower_central_series, is_solvable + + """ + if self._is_nilpotent is None: + lcs = self.lower_central_series() + terminator = lcs[len(lcs) - 1] + gens = terminator.generators + degree = self.degree + identity = _af_new(list(range(degree))) + if all(g == identity for g in gens): + self._is_solvable = True + self._is_nilpotent = True + return True + else: + self._is_nilpotent = False + return False + else: + return self._is_nilpotent + + def is_normal(self, gr, strict=True): + """Test if ``G=self`` is a normal subgroup of ``gr``. + + Explanation + =========== + + G is normal in gr if + for each g2 in G, g1 in gr, ``g = g1*g2*g1**-1`` belongs to G + It is sufficient to check this for each g1 in gr.generators and + g2 in G.generators. + + Examples + ======== + + >>> from sympy.combinatorics import Permutation, PermutationGroup + >>> a = Permutation([1, 2, 0]) + >>> b = Permutation([1, 0, 2]) + >>> G = PermutationGroup([a, b]) + >>> G1 = PermutationGroup([a, Permutation([2, 0, 1])]) + >>> G1.is_normal(G) + True + + """ + if not self.is_subgroup(gr, strict=strict): + return False + d_self = self.degree + d_gr = gr.degree + if self.is_trivial and (d_self == d_gr or not strict): + return True + if self._is_abelian: + return True + new_self = self.copy() + if not strict and d_self != d_gr: + if d_self < d_gr: + new_self = PermGroup(new_self.generators + [Permutation(d_gr - 1)]) + else: + gr = PermGroup(gr.generators + [Permutation(d_self - 1)]) + gens2 = [p._array_form for p in new_self.generators] + gens1 = [p._array_form for p in gr.generators] + for g1 in gens1: + for g2 in gens2: + p = _af_rmuln(g1, g2, _af_invert(g1)) + if not new_self.coset_factor(p, True): + return False + return True + + def is_primitive(self, randomized=True): + r"""Test if a group is primitive. + + Explanation + =========== + + A permutation group ``G`` acting on a set ``S`` is called primitive if + ``S`` contains no nontrivial block under the action of ``G`` + (a block is nontrivial if its cardinality is more than ``1``). + + Notes + ===== + + The algorithm is described in [1], p.83, and uses the function + minimal_block to search for blocks of the form `\{0, k\}` for ``k`` + ranging over representatives for the orbits of `G_0`, the stabilizer of + ``0``. This algorithm has complexity `O(n^2)` where ``n`` is the degree + of the group, and will perform badly if `G_0` is small. + + There are two implementations offered: one finds `G_0` + deterministically using the function ``stabilizer``, and the other + (default) produces random elements of `G_0` using ``random_stab``, + hoping that they generate a subgroup of `G_0` with not too many more + orbits than `G_0` (this is suggested in [1], p.83). Behavior is changed + by the ``randomized`` flag. + + Examples + ======== + + >>> from sympy.combinatorics.named_groups import DihedralGroup + >>> D = DihedralGroup(10) + >>> D.is_primitive() + False + + See Also + ======== + + minimal_block, random_stab + + """ + if self._is_primitive is not None: + return self._is_primitive + + if self.is_transitive() is False: + return False + + if randomized: + random_stab_gens = [] + v = self.schreier_vector(0) + for _ in range(len(self)): + random_stab_gens.append(self.random_stab(0, v)) + stab = PermutationGroup(random_stab_gens) + else: + stab = self.stabilizer(0) + orbits = stab.orbits() + for orb in orbits: + x = orb.pop() + if x != 0 and any(e != 0 for e in self.minimal_block([0, x])): + self._is_primitive = False + return False + self._is_primitive = True + return True + + def minimal_blocks(self, randomized=True): + ''' + For a transitive group, return the list of all minimal + block systems. If a group is intransitive, return `False`. + + Examples + ======== + >>> from sympy.combinatorics import Permutation, PermutationGroup + >>> from sympy.combinatorics.named_groups import DihedralGroup + >>> DihedralGroup(6).minimal_blocks() + [[0, 1, 0, 1, 0, 1], [0, 1, 2, 0, 1, 2]] + >>> G = PermutationGroup(Permutation(1,2,5)) + >>> G.minimal_blocks() + False + + See Also + ======== + + minimal_block, is_transitive, is_primitive + + ''' + def _number_blocks(blocks): + # number the blocks of a block system + # in order and return the number of + # blocks and the tuple with the + # reordering + n = len(blocks) + appeared = {} + m = 0 + b = [None]*n + for i in range(n): + if blocks[i] not in appeared: + appeared[blocks[i]] = m + b[i] = m + m += 1 + else: + b[i] = appeared[blocks[i]] + return tuple(b), m + + if not self.is_transitive(): + return False + blocks = [] + num_blocks = [] + rep_blocks = [] + if randomized: + random_stab_gens = [] + v = self.schreier_vector(0) + for i in range(len(self)): + random_stab_gens.append(self.random_stab(0, v)) + stab = PermutationGroup(random_stab_gens) + else: + stab = self.stabilizer(0) + orbits = stab.orbits() + for orb in orbits: + x = orb.pop() + if x != 0: + block = self.minimal_block([0, x]) + num_block, _ = _number_blocks(block) + # a representative block (containing 0) + rep = {j for j in range(self.degree) if num_block[j] == 0} + # check if the system is minimal with + # respect to the already discovere ones + minimal = True + blocks_remove_mask = [False] * len(blocks) + for i, r in enumerate(rep_blocks): + if len(r) > len(rep) and rep.issubset(r): + # i-th block system is not minimal + blocks_remove_mask[i] = True + elif len(r) < len(rep) and r.issubset(rep): + # the system being checked is not minimal + minimal = False + break + # remove non-minimal representative blocks + blocks = [b for i, b in enumerate(blocks) if not blocks_remove_mask[i]] + num_blocks = [n for i, n in enumerate(num_blocks) if not blocks_remove_mask[i]] + rep_blocks = [r for i, r in enumerate(rep_blocks) if not blocks_remove_mask[i]] + + if minimal and num_block not in num_blocks: + blocks.append(block) + num_blocks.append(num_block) + rep_blocks.append(rep) + return blocks + + @property + def is_solvable(self): + """Test if the group is solvable. + + ``G`` is solvable if its derived series terminates with the trivial + group ([1], p.29). + + Examples + ======== + + >>> from sympy.combinatorics.named_groups import SymmetricGroup + >>> S = SymmetricGroup(3) + >>> S.is_solvable + True + + See Also + ======== + + is_nilpotent, derived_series + + """ + if self._is_solvable is None: + if self.order() % 2 != 0: + return True + ds = self.derived_series() + terminator = ds[len(ds) - 1] + gens = terminator.generators + degree = self.degree + identity = _af_new(list(range(degree))) + if all(g == identity for g in gens): + self._is_solvable = True + return True + else: + self._is_solvable = False + return False + else: + return self._is_solvable + + def is_subgroup(self, G, strict=True): + """Return ``True`` if all elements of ``self`` belong to ``G``. + + If ``strict`` is ``False`` then if ``self``'s degree is smaller + than ``G``'s, the elements will be resized to have the same degree. + + Examples + ======== + + >>> from sympy.combinatorics import Permutation, PermutationGroup + >>> from sympy.combinatorics import SymmetricGroup, CyclicGroup + + Testing is strict by default: the degree of each group must be the + same: + + >>> p = Permutation(0, 1, 2, 3, 4, 5) + >>> G1 = PermutationGroup([Permutation(0, 1, 2), Permutation(0, 1)]) + >>> G2 = PermutationGroup([Permutation(0, 2), Permutation(0, 1, 2)]) + >>> G3 = PermutationGroup([p, p**2]) + >>> assert G1.order() == G2.order() == G3.order() == 6 + >>> G1.is_subgroup(G2) + True + >>> G1.is_subgroup(G3) + False + >>> G3.is_subgroup(PermutationGroup(G3[1])) + False + >>> G3.is_subgroup(PermutationGroup(G3[0])) + True + + To ignore the size, set ``strict`` to ``False``: + + >>> S3 = SymmetricGroup(3) + >>> S5 = SymmetricGroup(5) + >>> S3.is_subgroup(S5, strict=False) + True + >>> C7 = CyclicGroup(7) + >>> G = S5*C7 + >>> S5.is_subgroup(G, False) + True + >>> C7.is_subgroup(G, 0) + False + + """ + if isinstance(G, SymmetricPermutationGroup): + if self.degree != G.degree: + return False + return True + if not isinstance(G, PermutationGroup): + return False + if self == G or self.generators[0]==Permutation(): + return True + if G.order() % self.order() != 0: + return False + if self.degree == G.degree or \ + (self.degree < G.degree and not strict): + gens = self.generators + else: + return False + return all(G.contains(g, strict=strict) for g in gens) + + @property + def is_polycyclic(self): + """Return ``True`` if a group is polycyclic. A group is polycyclic if + it has a subnormal series with cyclic factors. For finite groups, + this is the same as if the group is solvable. + + Examples + ======== + + >>> from sympy.combinatorics import Permutation, PermutationGroup + >>> a = Permutation([0, 2, 1, 3]) + >>> b = Permutation([2, 0, 1, 3]) + >>> G = PermutationGroup([a, b]) + >>> G.is_polycyclic + True + + """ + return self.is_solvable + + def is_transitive(self, strict=True): + """Test if the group is transitive. + + Explanation + =========== + + A group is transitive if it has a single orbit. + + If ``strict`` is ``False`` the group is transitive if it has + a single orbit of length different from 1. + + Examples + ======== + + >>> from sympy.combinatorics import Permutation, PermutationGroup + >>> a = Permutation([0, 2, 1, 3]) + >>> b = Permutation([2, 0, 1, 3]) + >>> G1 = PermutationGroup([a, b]) + >>> G1.is_transitive() + False + >>> G1.is_transitive(strict=False) + True + >>> c = Permutation([2, 3, 0, 1]) + >>> G2 = PermutationGroup([a, c]) + >>> G2.is_transitive() + True + >>> d = Permutation([1, 0, 2, 3]) + >>> e = Permutation([0, 1, 3, 2]) + >>> G3 = PermutationGroup([d, e]) + >>> G3.is_transitive() or G3.is_transitive(strict=False) + False + + """ + if self._is_transitive: # strict or not, if True then True + return self._is_transitive + if strict: + if self._is_transitive is not None: # we only store strict=True + return self._is_transitive + + ans = len(self.orbit(0)) == self.degree + self._is_transitive = ans + return ans + + got_orb = False + for x in self.orbits(): + if len(x) > 1: + if got_orb: + return False + got_orb = True + return got_orb + + @property + def is_trivial(self): + """Test if the group is the trivial group. + + This is true if the group contains only the identity permutation. + + Examples + ======== + + >>> from sympy.combinatorics import Permutation, PermutationGroup + >>> G = PermutationGroup([Permutation([0, 1, 2])]) + >>> G.is_trivial + True + + """ + if self._is_trivial is None: + self._is_trivial = len(self) == 1 and self[0].is_Identity + return self._is_trivial + + def lower_central_series(self): + r"""Return the lower central series for the group. + + The lower central series for a group `G` is the series + `G = G_0 > G_1 > G_2 > \ldots` where + `G_k = [G, G_{k-1}]`, i.e. every term after the first is equal to the + commutator of `G` and the previous term in `G1` ([1], p.29). + + Returns + ======= + + A list of permutation groups in the order `G = G_0, G_1, G_2, \ldots` + + Examples + ======== + + >>> from sympy.combinatorics.named_groups import (AlternatingGroup, + ... DihedralGroup) + >>> A = AlternatingGroup(4) + >>> len(A.lower_central_series()) + 2 + >>> A.lower_central_series()[1].is_subgroup(DihedralGroup(2)) + True + + See Also + ======== + + commutator, derived_series + + """ + res = [self] + current = self + nxt = self.commutator(self, current) + while not current.is_subgroup(nxt): + res.append(nxt) + current = nxt + nxt = self.commutator(self, current) + return res + + @property + def max_div(self): + """Maximum proper divisor of the degree of a permutation group. + + Explanation + =========== + + Obviously, this is the degree divided by its minimal proper divisor + (larger than ``1``, if one exists). As it is guaranteed to be prime, + the ``sieve`` from ``sympy.ntheory`` is used. + This function is also used as an optimization tool for the functions + ``minimal_block`` and ``_union_find_merge``. + + Examples + ======== + + >>> from sympy.combinatorics import Permutation, PermutationGroup + >>> G = PermutationGroup([Permutation([0, 2, 1, 3])]) + >>> G.max_div + 2 + + See Also + ======== + + minimal_block, _union_find_merge + + """ + if self._max_div is not None: + return self._max_div + n = self.degree + if n == 1: + return 1 + for x in sieve: + if n % x == 0: + d = n//x + self._max_div = d + return d + + def minimal_block(self, points): + r"""For a transitive group, finds the block system generated by + ``points``. + + Explanation + =========== + + If a group ``G`` acts on a set ``S``, a nonempty subset ``B`` of ``S`` + is called a block under the action of ``G`` if for all ``g`` in ``G`` + we have ``gB = B`` (``g`` fixes ``B``) or ``gB`` and ``B`` have no + common points (``g`` moves ``B`` entirely). ([1], p.23; [6]). + + The distinct translates ``gB`` of a block ``B`` for ``g`` in ``G`` + partition the set ``S`` and this set of translates is known as a block + system. Moreover, we obviously have that all blocks in the partition + have the same size, hence the block size divides ``|S|`` ([1], p.23). + A ``G``-congruence is an equivalence relation ``~`` on the set ``S`` + such that ``a ~ b`` implies ``g(a) ~ g(b)`` for all ``g`` in ``G``. + For a transitive group, the equivalence classes of a ``G``-congruence + and the blocks of a block system are the same thing ([1], p.23). + + The algorithm below checks the group for transitivity, and then finds + the ``G``-congruence generated by the pairs ``(p_0, p_1), (p_0, p_2), + ..., (p_0,p_{k-1})`` which is the same as finding the maximal block + system (i.e., the one with minimum block size) such that + ``p_0, ..., p_{k-1}`` are in the same block ([1], p.83). + + It is an implementation of Atkinson's algorithm, as suggested in [1], + and manipulates an equivalence relation on the set ``S`` using a + union-find data structure. The running time is just above + `O(|points||S|)`. ([1], pp. 83-87; [7]). + + Examples + ======== + + >>> from sympy.combinatorics.named_groups import DihedralGroup + >>> D = DihedralGroup(10) + >>> D.minimal_block([0, 5]) + [0, 1, 2, 3, 4, 0, 1, 2, 3, 4] + >>> D.minimal_block([0, 1]) + [0, 0, 0, 0, 0, 0, 0, 0, 0, 0] + + See Also + ======== + + _union_find_rep, _union_find_merge, is_transitive, is_primitive + + """ + if not self.is_transitive(): + return False + n = self.degree + gens = self.generators + # initialize the list of equivalence class representatives + parents = list(range(n)) + ranks = [1]*n + not_rep = [] + k = len(points) + # the block size must divide the degree of the group + if k > self.max_div: + return [0]*n + for i in range(k - 1): + parents[points[i + 1]] = points[0] + not_rep.append(points[i + 1]) + ranks[points[0]] = k + i = 0 + len_not_rep = k - 1 + while i < len_not_rep: + gamma = not_rep[i] + i += 1 + for gen in gens: + # find has side effects: performs path compression on the list + # of representatives + delta = self._union_find_rep(gamma, parents) + # union has side effects: performs union by rank on the list + # of representatives + temp = self._union_find_merge(gen(gamma), gen(delta), ranks, + parents, not_rep) + if temp == -1: + return [0]*n + len_not_rep += temp + for i in range(n): + # force path compression to get the final state of the equivalence + # relation + self._union_find_rep(i, parents) + + # rewrite result so that block representatives are minimal + new_reps = {} + return [new_reps.setdefault(r, i) for i, r in enumerate(parents)] + + def conjugacy_class(self, x): + r"""Return the conjugacy class of an element in the group. + + Explanation + =========== + + The conjugacy class of an element ``g`` in a group ``G`` is the set of + elements ``x`` in ``G`` that are conjugate with ``g``, i.e. for which + + ``g = xax^{-1}`` + + for some ``a`` in ``G``. + + Note that conjugacy is an equivalence relation, and therefore that + conjugacy classes are partitions of ``G``. For a list of all the + conjugacy classes of the group, use the conjugacy_classes() method. + + In a permutation group, each conjugacy class corresponds to a particular + `cycle structure': for example, in ``S_3``, the conjugacy classes are: + + * the identity class, ``{()}`` + * all transpositions, ``{(1 2), (1 3), (2 3)}`` + * all 3-cycles, ``{(1 2 3), (1 3 2)}`` + + Examples + ======== + + >>> from sympy.combinatorics import Permutation, SymmetricGroup + >>> S3 = SymmetricGroup(3) + >>> S3.conjugacy_class(Permutation(0, 1, 2)) + {(0 1 2), (0 2 1)} + + Notes + ===== + + This procedure computes the conjugacy class directly by finding the + orbit of the element under conjugation in G. This algorithm is only + feasible for permutation groups of relatively small order, but is like + the orbit() function itself in that respect. + """ + # Ref: "Computing the conjugacy classes of finite groups"; Butler, G. + # Groups '93 Galway/St Andrews; edited by Campbell, C. M. + new_class = {x} + last_iteration = new_class + + while len(last_iteration) > 0: + this_iteration = set() + + for y in last_iteration: + for s in self.generators: + conjugated = s * y * (~s) + if conjugated not in new_class: + this_iteration.add(conjugated) + + new_class.update(last_iteration) + last_iteration = this_iteration + + return new_class + + + def conjugacy_classes(self): + r"""Return the conjugacy classes of the group. + + Explanation + =========== + + As described in the documentation for the .conjugacy_class() function, + conjugacy is an equivalence relation on a group G which partitions the + set of elements. This method returns a list of all these conjugacy + classes of G. + + Examples + ======== + + >>> from sympy.combinatorics import SymmetricGroup + >>> SymmetricGroup(3).conjugacy_classes() + [{(2)}, {(0 1 2), (0 2 1)}, {(0 2), (1 2), (2)(0 1)}] + + """ + identity = _af_new(list(range(self.degree))) + known_elements = {identity} + classes = [known_elements.copy()] + + for x in self.generate(): + if x not in known_elements: + new_class = self.conjugacy_class(x) + classes.append(new_class) + known_elements.update(new_class) + + return classes + + def normal_closure(self, other, k=10): + r"""Return the normal closure of a subgroup/set of permutations. + + Explanation + =========== + + If ``S`` is a subset of a group ``G``, the normal closure of ``A`` in ``G`` + is defined as the intersection of all normal subgroups of ``G`` that + contain ``A`` ([1], p.14). Alternatively, it is the group generated by + the conjugates ``x^{-1}yx`` for ``x`` a generator of ``G`` and ``y`` a + generator of the subgroup ``\left\langle S\right\rangle`` generated by + ``S`` (for some chosen generating set for ``\left\langle S\right\rangle``) + ([1], p.73). + + Parameters + ========== + + other + a subgroup/list of permutations/single permutation + k + an implementation-specific parameter that determines the number + of conjugates that are adjoined to ``other`` at once + + Examples + ======== + + >>> from sympy.combinatorics.named_groups import (SymmetricGroup, + ... CyclicGroup, AlternatingGroup) + >>> S = SymmetricGroup(5) + >>> C = CyclicGroup(5) + >>> G = S.normal_closure(C) + >>> G.order() + 60 + >>> G.is_subgroup(AlternatingGroup(5)) + True + + See Also + ======== + + commutator, derived_subgroup, random_pr + + Notes + ===== + + The algorithm is described in [1], pp. 73-74; it makes use of the + generation of random elements for permutation groups by the product + replacement algorithm. + + """ + if hasattr(other, 'generators'): + degree = self.degree + identity = _af_new(list(range(degree))) + + if all(g == identity for g in other.generators): + return other + Z = PermutationGroup(other.generators[:]) + base, strong_gens = Z.schreier_sims_incremental() + strong_gens_distr = _distribute_gens_by_base(base, strong_gens) + basic_orbits, basic_transversals = \ + _orbits_transversals_from_bsgs(base, strong_gens_distr) + + self._random_pr_init(r=10, n=20) + + _loop = True + while _loop: + Z._random_pr_init(r=10, n=10) + for _ in range(k): + g = self.random_pr() + h = Z.random_pr() + conj = h^g + res = _strip(conj, base, basic_orbits, basic_transversals) + if res[0] != identity or res[1] != len(base) + 1: + gens = Z.generators + gens.append(conj) + Z = PermutationGroup(gens) + strong_gens.append(conj) + temp_base, temp_strong_gens = \ + Z.schreier_sims_incremental(base, strong_gens) + base, strong_gens = temp_base, temp_strong_gens + strong_gens_distr = \ + _distribute_gens_by_base(base, strong_gens) + basic_orbits, basic_transversals = \ + _orbits_transversals_from_bsgs(base, + strong_gens_distr) + _loop = False + for g in self.generators: + for h in Z.generators: + conj = h^g + res = _strip(conj, base, basic_orbits, + basic_transversals) + if res[0] != identity or res[1] != len(base) + 1: + _loop = True + break + if _loop: + break + return Z + elif hasattr(other, '__getitem__'): + return self.normal_closure(PermutationGroup(other)) + elif hasattr(other, 'array_form'): + return self.normal_closure(PermutationGroup([other])) + + def orbit(self, alpha, action='tuples'): + r"""Compute the orbit of alpha `\{g(\alpha) | g \in G\}` as a set. + + Explanation + =========== + + The time complexity of the algorithm used here is `O(|Orb|*r)` where + `|Orb|` is the size of the orbit and ``r`` is the number of generators of + the group. For a more detailed analysis, see [1], p.78, [2], pp. 19-21. + Here alpha can be a single point, or a list of points. + + If alpha is a single point, the ordinary orbit is computed. + if alpha is a list of points, there are three available options: + + 'union' - computes the union of the orbits of the points in the list + 'tuples' - computes the orbit of the list interpreted as an ordered + tuple under the group action ( i.e., g((1,2,3)) = (g(1), g(2), g(3)) ) + 'sets' - computes the orbit of the list interpreted as a sets + + Examples + ======== + + >>> from sympy.combinatorics import Permutation, PermutationGroup + >>> a = Permutation([1, 2, 0, 4, 5, 6, 3]) + >>> G = PermutationGroup([a]) + >>> G.orbit(0) + {0, 1, 2} + >>> G.orbit([0, 4], 'union') + {0, 1, 2, 3, 4, 5, 6} + + See Also + ======== + + orbit_transversal + + """ + return _orbit(self.degree, self.generators, alpha, action) + + def orbit_rep(self, alpha, beta, schreier_vector=None): + """Return a group element which sends ``alpha`` to ``beta``. + + Explanation + =========== + + If ``beta`` is not in the orbit of ``alpha``, the function returns + ``False``. This implementation makes use of the schreier vector. + For a proof of correctness, see [1], p.80 + + Examples + ======== + + >>> from sympy.combinatorics.named_groups import AlternatingGroup + >>> G = AlternatingGroup(5) + >>> G.orbit_rep(0, 4) + (0 4 1 2 3) + + See Also + ======== + + schreier_vector + + """ + if schreier_vector is None: + schreier_vector = self.schreier_vector(alpha) + if schreier_vector[beta] is None: + return False + k = schreier_vector[beta] + gens = [x._array_form for x in self.generators] + a = [] + while k != -1: + a.append(gens[k]) + beta = gens[k].index(beta) # beta = (~gens[k])(beta) + k = schreier_vector[beta] + if a: + return _af_new(_af_rmuln(*a)) + else: + return _af_new(list(range(self._degree))) + + def orbit_transversal(self, alpha, pairs=False): + r"""Computes a transversal for the orbit of ``alpha`` as a set. + + Explanation + =========== + + For a permutation group `G`, a transversal for the orbit + `Orb = \{g(\alpha) | g \in G\}` is a set + `\{g_\beta | g_\beta(\alpha) = \beta\}` for `\beta \in Orb`. + Note that there may be more than one possible transversal. + If ``pairs`` is set to ``True``, it returns the list of pairs + `(\beta, g_\beta)`. For a proof of correctness, see [1], p.79 + + Examples + ======== + + >>> from sympy.combinatorics.named_groups import DihedralGroup + >>> G = DihedralGroup(6) + >>> G.orbit_transversal(0) + [(5), (0 1 2 3 4 5), (0 5)(1 4)(2 3), (0 2 4)(1 3 5), (5)(0 4)(1 3), (0 3)(1 4)(2 5)] + + See Also + ======== + + orbit + + """ + return _orbit_transversal(self._degree, self.generators, alpha, pairs) + + def orbits(self, rep=False): + """Return the orbits of ``self``, ordered according to lowest element + in each orbit. + + Examples + ======== + + >>> from sympy.combinatorics import Permutation, PermutationGroup + >>> a = Permutation(1, 5)(2, 3)(4, 0, 6) + >>> b = Permutation(1, 5)(3, 4)(2, 6, 0) + >>> G = PermutationGroup([a, b]) + >>> G.orbits() + [{0, 2, 3, 4, 6}, {1, 5}] + """ + return _orbits(self._degree, self._generators) + + def order(self): + """Return the order of the group: the number of permutations that + can be generated from elements of the group. + + The number of permutations comprising the group is given by + ``len(group)``; the length of each permutation in the group is + given by ``group.size``. + + Examples + ======== + + >>> from sympy.combinatorics import Permutation, PermutationGroup + + >>> a = Permutation([1, 0, 2]) + >>> G = PermutationGroup([a]) + >>> G.degree + 3 + >>> len(G) + 1 + >>> G.order() + 2 + >>> list(G.generate()) + [(2), (2)(0 1)] + + >>> a = Permutation([0, 2, 1]) + >>> b = Permutation([1, 0, 2]) + >>> G = PermutationGroup([a, b]) + >>> G.order() + 6 + + See Also + ======== + + degree + + """ + if self._order is not None: + return self._order + if self._is_sym: + n = self._degree + self._order = factorial(n) + return self._order + if self._is_alt: + n = self._degree + self._order = factorial(n)/2 + return self._order + + m = prod([len(x) for x in self.basic_transversals]) + self._order = m + return m + + def index(self, H): + """ + Returns the index of a permutation group. + + Examples + ======== + + >>> from sympy.combinatorics import Permutation, PermutationGroup + >>> a = Permutation(1,2,3) + >>> b =Permutation(3) + >>> G = PermutationGroup([a]) + >>> H = PermutationGroup([b]) + >>> G.index(H) + 3 + + """ + if H.is_subgroup(self): + return self.order()//H.order() + + @property + def is_symmetric(self): + """Return ``True`` if the group is symmetric. + + Examples + ======== + + >>> from sympy.combinatorics import SymmetricGroup + >>> g = SymmetricGroup(5) + >>> g.is_symmetric + True + + >>> from sympy.combinatorics import Permutation, PermutationGroup + >>> g = PermutationGroup( + ... Permutation(0, 1, 2, 3, 4), + ... Permutation(2, 3)) + >>> g.is_symmetric + True + + Notes + ===== + + This uses a naive test involving the computation of the full + group order. + If you need more quicker taxonomy for large groups, you can use + :meth:`PermutationGroup.is_alt_sym`. + However, :meth:`PermutationGroup.is_alt_sym` may not be accurate + and is not able to distinguish between an alternating group and + a symmetric group. + + See Also + ======== + + is_alt_sym + """ + _is_sym = self._is_sym + if _is_sym is not None: + return _is_sym + + n = self.degree + if n >= 8: + if self.is_transitive(): + _is_alt_sym = self._eval_is_alt_sym_monte_carlo() + if _is_alt_sym: + if any(g.is_odd for g in self.generators): + self._is_sym, self._is_alt = True, False + return True + + self._is_sym, self._is_alt = False, True + return False + + return self._eval_is_alt_sym_naive(only_sym=True) + + self._is_sym, self._is_alt = False, False + return False + + return self._eval_is_alt_sym_naive(only_sym=True) + + + @property + def is_alternating(self): + """Return ``True`` if the group is alternating. + + Examples + ======== + + >>> from sympy.combinatorics import AlternatingGroup + >>> g = AlternatingGroup(5) + >>> g.is_alternating + True + + >>> from sympy.combinatorics import Permutation, PermutationGroup + >>> g = PermutationGroup( + ... Permutation(0, 1, 2, 3, 4), + ... Permutation(2, 3, 4)) + >>> g.is_alternating + True + + Notes + ===== + + This uses a naive test involving the computation of the full + group order. + If you need more quicker taxonomy for large groups, you can use + :meth:`PermutationGroup.is_alt_sym`. + However, :meth:`PermutationGroup.is_alt_sym` may not be accurate + and is not able to distinguish between an alternating group and + a symmetric group. + + See Also + ======== + + is_alt_sym + """ + _is_alt = self._is_alt + if _is_alt is not None: + return _is_alt + + n = self.degree + if n >= 8: + if self.is_transitive(): + _is_alt_sym = self._eval_is_alt_sym_monte_carlo() + if _is_alt_sym: + if all(g.is_even for g in self.generators): + self._is_sym, self._is_alt = False, True + return True + + self._is_sym, self._is_alt = True, False + return False + + return self._eval_is_alt_sym_naive(only_alt=True) + + self._is_sym, self._is_alt = False, False + return False + + return self._eval_is_alt_sym_naive(only_alt=True) + + @classmethod + def _distinct_primes_lemma(cls, primes): + """Subroutine to test if there is only one cyclic group for the + order.""" + primes = sorted(primes) + l = len(primes) + for i in range(l): + for j in range(i+1, l): + if primes[j] % primes[i] == 1: + return None + return True + + @property + def is_cyclic(self): + r""" + Return ``True`` if the group is Cyclic. + + Examples + ======== + + >>> from sympy.combinatorics.named_groups import AbelianGroup + >>> G = AbelianGroup(3, 4) + >>> G.is_cyclic + True + >>> G = AbelianGroup(4, 4) + >>> G.is_cyclic + False + + Notes + ===== + + If the order of a group $n$ can be factored into the distinct + primes $p_1, p_2, \dots , p_s$ and if + + .. math:: + \forall i, j \in \{1, 2, \dots, s \}: + p_i \not \equiv 1 \pmod {p_j} + + holds true, there is only one group of the order $n$ which + is a cyclic group [1]_. This is a generalization of the lemma + that the group of order $15, 35, \dots$ are cyclic. + + And also, these additional lemmas can be used to test if a + group is cyclic if the order of the group is already found. + + - If the group is abelian and the order of the group is + square-free, the group is cyclic. + - If the order of the group is less than $6$ and is not $4$, the + group is cyclic. + - If the order of the group is prime, the group is cyclic. + + References + ========== + + .. [1] 1978: John S. Rose: A Course on Group Theory, + Introduction to Finite Group Theory: 1.4 + """ + if self._is_cyclic is not None: + return self._is_cyclic + + if len(self.generators) == 1: + self._is_cyclic = True + self._is_abelian = True + return True + + if self._is_abelian is False: + self._is_cyclic = False + return False + + order = self.order() + + if order < 6: + self._is_abelian = True + if order != 4: + self._is_cyclic = True + return True + + factors = factorint(order) + if all(v == 1 for v in factors.values()): + if self._is_abelian: + self._is_cyclic = True + return True + + primes = list(factors.keys()) + if PermutationGroup._distinct_primes_lemma(primes) is True: + self._is_cyclic = True + self._is_abelian = True + return True + + if not self.is_abelian: + self._is_cyclic = False + return False + + self._is_cyclic = all( + any(g**(order//p) != self.identity for g in self.generators) + for p, e in factors.items() if e > 1 + ) + return self._is_cyclic + + @property + def is_dihedral(self): + r""" + Return ``True`` if the group is dihedral. + + Examples + ======== + + >>> from sympy.combinatorics.perm_groups import PermutationGroup + >>> from sympy.combinatorics.permutations import Permutation + >>> from sympy.combinatorics.named_groups import SymmetricGroup, CyclicGroup + >>> G = PermutationGroup(Permutation(1, 6)(2, 5)(3, 4), Permutation(0, 1, 2, 3, 4, 5, 6)) + >>> G.is_dihedral + True + >>> G = SymmetricGroup(3) + >>> G.is_dihedral + True + >>> G = CyclicGroup(6) + >>> G.is_dihedral + False + + References + ========== + + .. [Di1] https://math.stackexchange.com/a/827273 + .. [Di2] https://kconrad.math.uconn.edu/blurbs/grouptheory/dihedral.pdf + .. [Di3] https://kconrad.math.uconn.edu/blurbs/grouptheory/dihedral2.pdf + .. [Di4] https://en.wikipedia.org/wiki/Dihedral_group + """ + if self._is_dihedral is not None: + return self._is_dihedral + + order = self.order() + + if order % 2 == 1: + self._is_dihedral = False + return False + if order == 2: + self._is_dihedral = True + return True + if order == 4: + # The dihedral group of order 4 is the Klein 4-group. + self._is_dihedral = not self.is_cyclic + return self._is_dihedral + if self.is_abelian: + # The only abelian dihedral groups are the ones of orders 2 and 4. + self._is_dihedral = False + return False + + # Now we know the group is of even order >= 6, and nonabelian. + n = order // 2 + + # Handle special cases where there are exactly two generators. + gens = self.generators + if len(gens) == 2: + x, y = gens + a, b = x.order(), y.order() + # Make a >= b + if a < b: + x, y, a, b = y, x, b, a + # Using Theorem 2.1 of [Di3]: + if a == 2 == b: + self._is_dihedral = True + return True + # Using Theorem 1.1 of [Di3]: + if a == n and b == 2 and y*x*y == ~x: + self._is_dihedral = True + return True + + # Proceed with algorithm of [Di1] + # Find elements of orders 2 and n + order_2, order_n = [], [] + for p in self.elements: + k = p.order() + if k == 2: + order_2.append(p) + elif k == n: + order_n.append(p) + + if len(order_2) != n + 1 - (n % 2): + self._is_dihedral = False + return False + + if not order_n: + self._is_dihedral = False + return False + + x = order_n[0] + # Want an element y of order 2 that is not a power of x + # (i.e. that is not the 180-deg rotation, when n is even). + y = order_2[0] + if n % 2 == 0 and y == x**(n//2): + y = order_2[1] + + self._is_dihedral = (y*x*y == ~x) + return self._is_dihedral + + def pointwise_stabilizer(self, points, incremental=True): + r"""Return the pointwise stabilizer for a set of points. + + Explanation + =========== + + For a permutation group `G` and a set of points + `\{p_1, p_2,\ldots, p_k\}`, the pointwise stabilizer of + `p_1, p_2, \ldots, p_k` is defined as + `G_{p_1,\ldots, p_k} = + \{g\in G | g(p_i) = p_i \forall i\in\{1, 2,\ldots,k\}\}` ([1],p20). + It is a subgroup of `G`. + + Examples + ======== + + >>> from sympy.combinatorics.named_groups import SymmetricGroup + >>> S = SymmetricGroup(7) + >>> Stab = S.pointwise_stabilizer([2, 3, 5]) + >>> Stab.is_subgroup(S.stabilizer(2).stabilizer(3).stabilizer(5)) + True + + See Also + ======== + + stabilizer, schreier_sims_incremental + + Notes + ===== + + When incremental == True, + rather than the obvious implementation using successive calls to + ``.stabilizer()``, this uses the incremental Schreier-Sims algorithm + to obtain a base with starting segment - the given points. + + """ + if incremental: + base, strong_gens = self.schreier_sims_incremental(base=points) + stab_gens = [] + degree = self.degree + for gen in strong_gens: + if [gen(point) for point in points] == points: + stab_gens.append(gen) + if not stab_gens: + stab_gens = _af_new(list(range(degree))) + return PermutationGroup(stab_gens) + else: + gens = self._generators + degree = self.degree + for x in points: + gens = _stabilizer(degree, gens, x) + return PermutationGroup(gens) + + def make_perm(self, n, seed=None): + """ + Multiply ``n`` randomly selected permutations from + pgroup together, starting with the identity + permutation. If ``n`` is a list of integers, those + integers will be used to select the permutations and they + will be applied in L to R order: make_perm((A, B, C)) will + give CBA(I) where I is the identity permutation. + + ``seed`` is used to set the seed for the random selection + of permutations from pgroup. If this is a list of integers, + the corresponding permutations from pgroup will be selected + in the order give. This is mainly used for testing purposes. + + Examples + ======== + + >>> from sympy.combinatorics import Permutation, PermutationGroup + >>> a, b = [Permutation([1, 0, 3, 2]), Permutation([1, 3, 0, 2])] + >>> G = PermutationGroup([a, b]) + >>> G.make_perm(1, [0]) + (0 1)(2 3) + >>> G.make_perm(3, [0, 1, 0]) + (0 2 3 1) + >>> G.make_perm([0, 1, 0]) + (0 2 3 1) + + See Also + ======== + + random + """ + if is_sequence(n): + if seed is not None: + raise ValueError('If n is a sequence, seed should be None') + n, seed = len(n), n + else: + try: + n = int(n) + except TypeError: + raise ValueError('n must be an integer or a sequence.') + randomrange = _randrange(seed) + + # start with the identity permutation + result = Permutation(list(range(self.degree))) + m = len(self) + for _ in range(n): + p = self[randomrange(m)] + result = rmul(result, p) + return result + + def random(self, af=False): + """Return a random group element + """ + rank = randrange(self.order()) + return self.coset_unrank(rank, af) + + def random_pr(self, gen_count=11, iterations=50, _random_prec=None): + """Return a random group element using product replacement. + + Explanation + =========== + + For the details of the product replacement algorithm, see + ``_random_pr_init`` In ``random_pr`` the actual 'product replacement' + is performed. Notice that if the attribute ``_random_gens`` + is empty, it needs to be initialized by ``_random_pr_init``. + + See Also + ======== + + _random_pr_init + + """ + if self._random_gens == []: + self._random_pr_init(gen_count, iterations) + random_gens = self._random_gens + r = len(random_gens) - 1 + + # handle randomized input for testing purposes + if _random_prec is None: + s = randrange(r) + t = randrange(r - 1) + if t == s: + t = r - 1 + x = choice([1, 2]) + e = choice([-1, 1]) + else: + s = _random_prec['s'] + t = _random_prec['t'] + if t == s: + t = r - 1 + x = _random_prec['x'] + e = _random_prec['e'] + + if x == 1: + random_gens[s] = _af_rmul(random_gens[s], _af_pow(random_gens[t], e)) + random_gens[r] = _af_rmul(random_gens[r], random_gens[s]) + else: + random_gens[s] = _af_rmul(_af_pow(random_gens[t], e), random_gens[s]) + random_gens[r] = _af_rmul(random_gens[s], random_gens[r]) + return _af_new(random_gens[r]) + + def random_stab(self, alpha, schreier_vector=None, _random_prec=None): + """Random element from the stabilizer of ``alpha``. + + The schreier vector for ``alpha`` is an optional argument used + for speeding up repeated calls. The algorithm is described in [1], p.81 + + See Also + ======== + + random_pr, orbit_rep + + """ + if schreier_vector is None: + schreier_vector = self.schreier_vector(alpha) + if _random_prec is None: + rand = self.random_pr() + else: + rand = _random_prec['rand'] + beta = rand(alpha) + h = self.orbit_rep(alpha, beta, schreier_vector) + return rmul(~h, rand) + + def schreier_sims(self): + """Schreier-Sims algorithm. + + Explanation + =========== + + It computes the generators of the chain of stabilizers + `G > G_{b_1} > .. > G_{b1,..,b_r} > 1` + in which `G_{b_1,..,b_i}` stabilizes `b_1,..,b_i`, + and the corresponding ``s`` cosets. + An element of the group can be written as the product + `h_1*..*h_s`. + + We use the incremental Schreier-Sims algorithm. + + Examples + ======== + + >>> from sympy.combinatorics import Permutation, PermutationGroup + >>> a = Permutation([0, 2, 1]) + >>> b = Permutation([1, 0, 2]) + >>> G = PermutationGroup([a, b]) + >>> G.schreier_sims() + >>> G.basic_transversals + [{0: (2)(0 1), 1: (2), 2: (1 2)}, + {0: (2), 2: (0 2)}] + """ + if self._transversals: + return + self._schreier_sims() + return + + def _schreier_sims(self, base=None): + schreier = self.schreier_sims_incremental(base=base, slp_dict=True) + base, strong_gens = schreier[:2] + self._base = base + self._strong_gens = strong_gens + self._strong_gens_slp = schreier[2] + if not base: + self._transversals = [] + self._basic_orbits = [] + return + + strong_gens_distr = _distribute_gens_by_base(base, strong_gens) + basic_orbits, transversals, slps = _orbits_transversals_from_bsgs(base,\ + strong_gens_distr, slp=True) + + # rewrite the indices stored in slps in terms of strong_gens + for i, slp in enumerate(slps): + gens = strong_gens_distr[i] + for k in slp: + slp[k] = [strong_gens.index(gens[s]) for s in slp[k]] + + self._transversals = transversals + self._basic_orbits = [sorted(x) for x in basic_orbits] + self._transversal_slp = slps + + def schreier_sims_incremental(self, base=None, gens=None, slp_dict=False): + """Extend a sequence of points and generating set to a base and strong + generating set. + + Parameters + ========== + + base + The sequence of points to be extended to a base. Optional + parameter with default value ``[]``. + gens + The generating set to be extended to a strong generating set + relative to the base obtained. Optional parameter with default + value ``self.generators``. + + slp_dict + If `True`, return a dictionary `{g: gens}` for each strong + generator `g` where `gens` is a list of strong generators + coming before `g` in `strong_gens`, such that the product + of the elements of `gens` is equal to `g`. + + Returns + ======= + + (base, strong_gens) + ``base`` is the base obtained, and ``strong_gens`` is the strong + generating set relative to it. The original parameters ``base``, + ``gens`` remain unchanged. + + Examples + ======== + + >>> from sympy.combinatorics.named_groups import AlternatingGroup + >>> from sympy.combinatorics.testutil import _verify_bsgs + >>> A = AlternatingGroup(7) + >>> base = [2, 3] + >>> seq = [2, 3] + >>> base, strong_gens = A.schreier_sims_incremental(base=seq) + >>> _verify_bsgs(A, base, strong_gens) + True + >>> base[:2] + [2, 3] + + Notes + ===== + + This version of the Schreier-Sims algorithm runs in polynomial time. + There are certain assumptions in the implementation - if the trivial + group is provided, ``base`` and ``gens`` are returned immediately, + as any sequence of points is a base for the trivial group. If the + identity is present in the generators ``gens``, it is removed as + it is a redundant generator. + The implementation is described in [1], pp. 90-93. + + See Also + ======== + + schreier_sims, schreier_sims_random + + """ + if base is None: + base = [] + if gens is None: + gens = self.generators[:] + degree = self.degree + id_af = list(range(degree)) + # handle the trivial group + if len(gens) == 1 and gens[0].is_Identity: + if slp_dict: + return base, gens, {gens[0]: [gens[0]]} + return base, gens + # prevent side effects + _base, _gens = base[:], gens[:] + # remove the identity as a generator + _gens = [x for x in _gens if not x.is_Identity] + # make sure no generator fixes all base points + for gen in _gens: + if all(x == gen._array_form[x] for x in _base): + for new in id_af: + if gen._array_form[new] != new: + break + else: + assert None # can this ever happen? + _base.append(new) + # distribute generators according to basic stabilizers + strong_gens_distr = _distribute_gens_by_base(_base, _gens) + strong_gens_slp = [] + # initialize the basic stabilizers, basic orbits and basic transversals + orbs = {} + transversals = {} + slps = {} + base_len = len(_base) + for i in range(base_len): + transversals[i], slps[i] = _orbit_transversal(degree, strong_gens_distr[i], + _base[i], pairs=True, af=True, slp=True) + transversals[i] = dict(transversals[i]) + orbs[i] = list(transversals[i].keys()) + # main loop: amend the stabilizer chain until we have generators + # for all stabilizers + i = base_len - 1 + while i >= 0: + # this flag is used to continue with the main loop from inside + # a nested loop + continue_i = False + # test the generators for being a strong generating set + db = {} + for beta, u_beta in list(transversals[i].items()): + for j, gen in enumerate(strong_gens_distr[i]): + gb = gen._array_form[beta] + u1 = transversals[i][gb] + g1 = _af_rmul(gen._array_form, u_beta) + slp = [(i, g) for g in slps[i][beta]] + slp = [(i, j)] + slp + if g1 != u1: + # test if the schreier generator is in the i+1-th + # would-be basic stabilizer + y = True + try: + u1_inv = db[gb] + except KeyError: + u1_inv = db[gb] = _af_invert(u1) + schreier_gen = _af_rmul(u1_inv, g1) + u1_inv_slp = slps[i][gb][:] + u1_inv_slp.reverse() + u1_inv_slp = [(i, (g,)) for g in u1_inv_slp] + slp = u1_inv_slp + slp + h, j, slp = _strip_af(schreier_gen, _base, orbs, transversals, i, slp=slp, slps=slps) + if j <= base_len: + # new strong generator h at level j + y = False + elif h: + # h fixes all base points + y = False + moved = 0 + while h[moved] == moved: + moved += 1 + _base.append(moved) + base_len += 1 + strong_gens_distr.append([]) + if y is False: + # if a new strong generator is found, update the + # data structures and start over + h = _af_new(h) + strong_gens_slp.append((h, slp)) + for l in range(i + 1, j): + strong_gens_distr[l].append(h) + transversals[l], slps[l] =\ + _orbit_transversal(degree, strong_gens_distr[l], + _base[l], pairs=True, af=True, slp=True) + transversals[l] = dict(transversals[l]) + orbs[l] = list(transversals[l].keys()) + i = j - 1 + # continue main loop using the flag + continue_i = True + if continue_i is True: + break + if continue_i is True: + break + if continue_i is True: + continue + i -= 1 + + strong_gens = _gens[:] + + if slp_dict: + # create the list of the strong generators strong_gens and + # rewrite the indices of strong_gens_slp in terms of the + # elements of strong_gens + for k, slp in strong_gens_slp: + strong_gens.append(k) + for i in range(len(slp)): + s = slp[i] + if isinstance(s[1], tuple): + slp[i] = strong_gens_distr[s[0]][s[1][0]]**-1 + else: + slp[i] = strong_gens_distr[s[0]][s[1]] + strong_gens_slp = dict(strong_gens_slp) + # add the original generators + for g in _gens: + strong_gens_slp[g] = [g] + return (_base, strong_gens, strong_gens_slp) + + strong_gens.extend([k for k, _ in strong_gens_slp]) + return _base, strong_gens + + def schreier_sims_random(self, base=None, gens=None, consec_succ=10, + _random_prec=None): + r"""Randomized Schreier-Sims algorithm. + + Explanation + =========== + + The randomized Schreier-Sims algorithm takes the sequence ``base`` + and the generating set ``gens``, and extends ``base`` to a base, and + ``gens`` to a strong generating set relative to that base with + probability of a wrong answer at most `2^{-consec\_succ}`, + provided the random generators are sufficiently random. + + Parameters + ========== + + base + The sequence to be extended to a base. + gens + The generating set to be extended to a strong generating set. + consec_succ + The parameter defining the probability of a wrong answer. + _random_prec + An internal parameter used for testing purposes. + + Returns + ======= + + (base, strong_gens) + ``base`` is the base and ``strong_gens`` is the strong generating + set relative to it. + + Examples + ======== + + >>> from sympy.combinatorics.testutil import _verify_bsgs + >>> from sympy.combinatorics.named_groups import SymmetricGroup + >>> S = SymmetricGroup(5) + >>> base, strong_gens = S.schreier_sims_random(consec_succ=5) + >>> _verify_bsgs(S, base, strong_gens) #doctest: +SKIP + True + + Notes + ===== + + The algorithm is described in detail in [1], pp. 97-98. It extends + the orbits ``orbs`` and the permutation groups ``stabs`` to + basic orbits and basic stabilizers for the base and strong generating + set produced in the end. + The idea of the extension process + is to "sift" random group elements through the stabilizer chain + and amend the stabilizers/orbits along the way when a sift + is not successful. + The helper function ``_strip`` is used to attempt + to decompose a random group element according to the current + state of the stabilizer chain and report whether the element was + fully decomposed (successful sift) or not (unsuccessful sift). In + the latter case, the level at which the sift failed is reported and + used to amend ``stabs``, ``base``, ``gens`` and ``orbs`` accordingly. + The halting condition is for ``consec_succ`` consecutive successful + sifts to pass. This makes sure that the current ``base`` and ``gens`` + form a BSGS with probability at least `1 - 1/\text{consec\_succ}`. + + See Also + ======== + + schreier_sims + + """ + if base is None: + base = [] + if gens is None: + gens = self.generators + base_len = len(base) + n = self.degree + # make sure no generator fixes all base points + for gen in gens: + if all(gen(x) == x for x in base): + new = 0 + while gen._array_form[new] == new: + new += 1 + base.append(new) + base_len += 1 + # distribute generators according to basic stabilizers + strong_gens_distr = _distribute_gens_by_base(base, gens) + # initialize the basic stabilizers, basic transversals and basic orbits + transversals = {} + orbs = {} + for i in range(base_len): + transversals[i] = dict(_orbit_transversal(n, strong_gens_distr[i], + base[i], pairs=True)) + orbs[i] = list(transversals[i].keys()) + # initialize the number of consecutive elements sifted + c = 0 + # start sifting random elements while the number of consecutive sifts + # is less than consec_succ + while c < consec_succ: + if _random_prec is None: + g = self.random_pr() + else: + g = _random_prec['g'].pop() + h, j = _strip(g, base, orbs, transversals) + y = True + # determine whether a new base point is needed + if j <= base_len: + y = False + elif not h.is_Identity: + y = False + moved = 0 + while h(moved) == moved: + moved += 1 + base.append(moved) + base_len += 1 + strong_gens_distr.append([]) + # if the element doesn't sift, amend the strong generators and + # associated stabilizers and orbits + if y is False: + for l in range(1, j): + strong_gens_distr[l].append(h) + transversals[l] = dict(_orbit_transversal(n, + strong_gens_distr[l], base[l], pairs=True)) + orbs[l] = list(transversals[l].keys()) + c = 0 + else: + c += 1 + # build the strong generating set + strong_gens = strong_gens_distr[0][:] + for gen in strong_gens_distr[1]: + if gen not in strong_gens: + strong_gens.append(gen) + return base, strong_gens + + def schreier_vector(self, alpha): + """Computes the schreier vector for ``alpha``. + + Explanation + =========== + + The Schreier vector efficiently stores information + about the orbit of ``alpha``. It can later be used to quickly obtain + elements of the group that send ``alpha`` to a particular element + in the orbit. Notice that the Schreier vector depends on the order + in which the group generators are listed. For a definition, see [3]. + Since list indices start from zero, we adopt the convention to use + "None" instead of 0 to signify that an element does not belong + to the orbit. + For the algorithm and its correctness, see [2], pp.78-80. + + Examples + ======== + + >>> from sympy.combinatorics import Permutation, PermutationGroup + >>> a = Permutation([2, 4, 6, 3, 1, 5, 0]) + >>> b = Permutation([0, 1, 3, 5, 4, 6, 2]) + >>> G = PermutationGroup([a, b]) + >>> G.schreier_vector(0) + [-1, None, 0, 1, None, 1, 0] + + See Also + ======== + + orbit + + """ + n = self.degree + v = [None]*n + v[alpha] = -1 + orb = [alpha] + used = [False]*n + used[alpha] = True + gens = self.generators + r = len(gens) + for b in orb: + for i in range(r): + temp = gens[i]._array_form[b] + if used[temp] is False: + orb.append(temp) + used[temp] = True + v[temp] = i + return v + + def stabilizer(self, alpha): + r"""Return the stabilizer subgroup of ``alpha``. + + Explanation + =========== + + The stabilizer of `\alpha` is the group `G_\alpha = + \{g \in G | g(\alpha) = \alpha\}`. + For a proof of correctness, see [1], p.79. + + Examples + ======== + + >>> from sympy.combinatorics.named_groups import DihedralGroup + >>> G = DihedralGroup(6) + >>> G.stabilizer(5) + PermutationGroup([ + (5)(0 4)(1 3)]) + + See Also + ======== + + orbit + + """ + return PermGroup(_stabilizer(self._degree, self._generators, alpha)) + + @property + def strong_gens(self): + r"""Return a strong generating set from the Schreier-Sims algorithm. + + Explanation + =========== + + A generating set `S = \{g_1, g_2, \dots, g_t\}` for a permutation group + `G` is a strong generating set relative to the sequence of points + (referred to as a "base") `(b_1, b_2, \dots, b_k)` if, for + `1 \leq i \leq k` we have that the intersection of the pointwise + stabilizer `G^{(i+1)} := G_{b_1, b_2, \dots, b_i}` with `S` generates + the pointwise stabilizer `G^{(i+1)}`. The concepts of a base and + strong generating set and their applications are discussed in depth + in [1], pp. 87-89 and [2], pp. 55-57. + + Examples + ======== + + >>> from sympy.combinatorics.named_groups import DihedralGroup + >>> D = DihedralGroup(4) + >>> D.strong_gens + [(0 1 2 3), (0 3)(1 2), (1 3)] + >>> D.base + [0, 1] + + See Also + ======== + + base, basic_transversals, basic_orbits, basic_stabilizers + + """ + if self._strong_gens == []: + self.schreier_sims() + return self._strong_gens + + def subgroup(self, gens): + """ + Return the subgroup generated by `gens` which is a list of + elements of the group + """ + + if not all(g in self for g in gens): + raise ValueError("The group does not contain the supplied generators") + + G = PermutationGroup(gens) + return G + + def subgroup_search(self, prop, base=None, strong_gens=None, tests=None, + init_subgroup=None): + """Find the subgroup of all elements satisfying the property ``prop``. + + Explanation + =========== + + This is done by a depth-first search with respect to base images that + uses several tests to prune the search tree. + + Parameters + ========== + + prop + The property to be used. Has to be callable on group elements + and always return ``True`` or ``False``. It is assumed that + all group elements satisfying ``prop`` indeed form a subgroup. + base + A base for the supergroup. + strong_gens + A strong generating set for the supergroup. + tests + A list of callables of length equal to the length of ``base``. + These are used to rule out group elements by partial base images, + so that ``tests[l](g)`` returns False if the element ``g`` is known + not to satisfy prop base on where g sends the first ``l + 1`` base + points. + init_subgroup + if a subgroup of the sought group is + known in advance, it can be passed to the function as this + parameter. + + Returns + ======= + + res + The subgroup of all elements satisfying ``prop``. The generating + set for this group is guaranteed to be a strong generating set + relative to the base ``base``. + + Examples + ======== + + >>> from sympy.combinatorics.named_groups import (SymmetricGroup, + ... AlternatingGroup) + >>> from sympy.combinatorics.testutil import _verify_bsgs + >>> S = SymmetricGroup(7) + >>> prop_even = lambda x: x.is_even + >>> base, strong_gens = S.schreier_sims_incremental() + >>> G = S.subgroup_search(prop_even, base=base, strong_gens=strong_gens) + >>> G.is_subgroup(AlternatingGroup(7)) + True + >>> _verify_bsgs(G, base, G.generators) + True + + Notes + ===== + + This function is extremely lengthy and complicated and will require + some careful attention. The implementation is described in + [1], pp. 114-117, and the comments for the code here follow the lines + of the pseudocode in the book for clarity. + + The complexity is exponential in general, since the search process by + itself visits all members of the supergroup. However, there are a lot + of tests which are used to prune the search tree, and users can define + their own tests via the ``tests`` parameter, so in practice, and for + some computations, it's not terrible. + + A crucial part in the procedure is the frequent base change performed + (this is line 11 in the pseudocode) in order to obtain a new basic + stabilizer. The book mentiones that this can be done by using + ``.baseswap(...)``, however the current implementation uses a more + straightforward way to find the next basic stabilizer - calling the + function ``.stabilizer(...)`` on the previous basic stabilizer. + + """ + # initialize BSGS and basic group properties + def get_reps(orbits): + # get the minimal element in the base ordering + return [min(orbit, key = lambda x: base_ordering[x]) \ + for orbit in orbits] + + def update_nu(l): + temp_index = len(basic_orbits[l]) + 1 -\ + len(res_basic_orbits_init_base[l]) + # this corresponds to the element larger than all points + if temp_index >= len(sorted_orbits[l]): + nu[l] = base_ordering[degree] + else: + nu[l] = sorted_orbits[l][temp_index] + + if base is None: + base, strong_gens = self.schreier_sims_incremental() + base_len = len(base) + degree = self.degree + identity = _af_new(list(range(degree))) + base_ordering = _base_ordering(base, degree) + # add an element larger than all points + base_ordering.append(degree) + # add an element smaller than all points + base_ordering.append(-1) + # compute BSGS-related structures + strong_gens_distr = _distribute_gens_by_base(base, strong_gens) + basic_orbits, transversals = _orbits_transversals_from_bsgs(base, + strong_gens_distr) + # handle subgroup initialization and tests + if init_subgroup is None: + init_subgroup = PermutationGroup([identity]) + if tests is None: + trivial_test = lambda x: True + tests = [] + for i in range(base_len): + tests.append(trivial_test) + # line 1: more initializations. + res = init_subgroup + f = base_len - 1 + l = base_len - 1 + # line 2: set the base for K to the base for G + res_base = base[:] + # line 3: compute BSGS and related structures for K + res_base, res_strong_gens = res.schreier_sims_incremental( + base=res_base) + res_strong_gens_distr = _distribute_gens_by_base(res_base, + res_strong_gens) + res_generators = res.generators + res_basic_orbits_init_base = \ + [_orbit(degree, res_strong_gens_distr[i], res_base[i])\ + for i in range(base_len)] + # initialize orbit representatives + orbit_reps = [None]*base_len + # line 4: orbit representatives for f-th basic stabilizer of K + orbits = _orbits(degree, res_strong_gens_distr[f]) + orbit_reps[f] = get_reps(orbits) + # line 5: remove the base point from the representatives to avoid + # getting the identity element as a generator for K + orbit_reps[f].remove(base[f]) + # line 6: more initializations + c = [0]*base_len + u = [identity]*base_len + sorted_orbits = [None]*base_len + for i in range(base_len): + sorted_orbits[i] = basic_orbits[i][:] + sorted_orbits[i].sort(key=lambda point: base_ordering[point]) + # line 7: initializations + mu = [None]*base_len + nu = [None]*base_len + # this corresponds to the element smaller than all points + mu[l] = degree + 1 + update_nu(l) + # initialize computed words + computed_words = [identity]*base_len + # line 8: main loop + while True: + # apply all the tests + while l < base_len - 1 and \ + computed_words[l](base[l]) in orbit_reps[l] and \ + base_ordering[mu[l]] < \ + base_ordering[computed_words[l](base[l])] < \ + base_ordering[nu[l]] and \ + tests[l](computed_words): + # line 11: change the (partial) base of K + new_point = computed_words[l](base[l]) + res_base[l] = new_point + new_stab_gens = _stabilizer(degree, res_strong_gens_distr[l], + new_point) + res_strong_gens_distr[l + 1] = new_stab_gens + # line 12: calculate minimal orbit representatives for the + # l+1-th basic stabilizer + orbits = _orbits(degree, new_stab_gens) + orbit_reps[l + 1] = get_reps(orbits) + # line 13: amend sorted orbits + l += 1 + temp_orbit = [computed_words[l - 1](point) for point + in basic_orbits[l]] + temp_orbit.sort(key=lambda point: base_ordering[point]) + sorted_orbits[l] = temp_orbit + # lines 14 and 15: update variables used minimality tests + new_mu = degree + 1 + for i in range(l): + if base[l] in res_basic_orbits_init_base[i]: + candidate = computed_words[i](base[i]) + if base_ordering[candidate] > base_ordering[new_mu]: + new_mu = candidate + mu[l] = new_mu + update_nu(l) + # line 16: determine the new transversal element + c[l] = 0 + temp_point = sorted_orbits[l][c[l]] + gamma = computed_words[l - 1]._array_form.index(temp_point) + u[l] = transversals[l][gamma] + # update computed words + computed_words[l] = rmul(computed_words[l - 1], u[l]) + # lines 17 & 18: apply the tests to the group element found + g = computed_words[l] + temp_point = g(base[l]) + if l == base_len - 1 and \ + base_ordering[mu[l]] < \ + base_ordering[temp_point] < base_ordering[nu[l]] and \ + temp_point in orbit_reps[l] and \ + tests[l](computed_words) and \ + prop(g): + # line 19: reset the base of K + res_generators.append(g) + res_base = base[:] + # line 20: recalculate basic orbits (and transversals) + res_strong_gens.append(g) + res_strong_gens_distr = _distribute_gens_by_base(res_base, + res_strong_gens) + res_basic_orbits_init_base = \ + [_orbit(degree, res_strong_gens_distr[i], res_base[i]) \ + for i in range(base_len)] + # line 21: recalculate orbit representatives + # line 22: reset the search depth + orbit_reps[f] = get_reps(orbits) + l = f + # line 23: go up the tree until in the first branch not fully + # searched + while l >= 0 and c[l] == len(basic_orbits[l]) - 1: + l = l - 1 + # line 24: if the entire tree is traversed, return K + if l == -1: + return PermutationGroup(res_generators) + # lines 25-27: update orbit representatives + if l < f: + # line 26 + f = l + c[l] = 0 + # line 27 + temp_orbits = _orbits(degree, res_strong_gens_distr[f]) + orbit_reps[f] = get_reps(temp_orbits) + # line 28: update variables used for minimality testing + mu[l] = degree + 1 + temp_index = len(basic_orbits[l]) + 1 - \ + len(res_basic_orbits_init_base[l]) + if temp_index >= len(sorted_orbits[l]): + nu[l] = base_ordering[degree] + else: + nu[l] = sorted_orbits[l][temp_index] + # line 29: set the next element from the current branch and update + # accordingly + c[l] += 1 + if l == 0: + gamma = sorted_orbits[l][c[l]] + else: + gamma = computed_words[l - 1]._array_form.index(sorted_orbits[l][c[l]]) + + u[l] = transversals[l][gamma] + if l == 0: + computed_words[l] = u[l] + else: + computed_words[l] = rmul(computed_words[l - 1], u[l]) + + @property + def transitivity_degree(self): + r"""Compute the degree of transitivity of the group. + + Explanation + =========== + + A permutation group `G` acting on `\Omega = \{0, 1, \dots, n-1\}` is + ``k``-fold transitive, if, for any `k` points + `(a_1, a_2, \dots, a_k) \in \Omega` and any `k` points + `(b_1, b_2, \dots, b_k) \in \Omega` there exists `g \in G` such that + `g(a_1) = b_1, g(a_2) = b_2, \dots, g(a_k) = b_k` + The degree of transitivity of `G` is the maximum ``k`` such that + `G` is ``k``-fold transitive. ([8]) + + Examples + ======== + + >>> from sympy.combinatorics import Permutation, PermutationGroup + >>> a = Permutation([1, 2, 0]) + >>> b = Permutation([1, 0, 2]) + >>> G = PermutationGroup([a, b]) + >>> G.transitivity_degree + 3 + + See Also + ======== + + is_transitive, orbit + + """ + if self._transitivity_degree is None: + n = self.degree + G = self + # if G is k-transitive, a tuple (a_0,..,a_k) + # can be brought to (b_0,...,b_(k-1), b_k) + # where b_0,...,b_(k-1) are fixed points; + # consider the group G_k which stabilizes b_0,...,b_(k-1) + # if G_k is transitive on the subset excluding b_0,...,b_(k-1) + # then G is (k+1)-transitive + for i in range(n): + orb = G.orbit(i) + if len(orb) != n - i: + self._transitivity_degree = i + return i + G = G.stabilizer(i) + self._transitivity_degree = n + return n + else: + return self._transitivity_degree + + def _p_elements_group(self, p): + ''' + For an abelian p-group, return the subgroup consisting of + all elements of order p (and the identity) + + ''' + gens = self.generators[:] + gens = sorted(gens, key=lambda x: x.order(), reverse=True) + gens_p = [g**(g.order()/p) for g in gens] + gens_r = [] + for i in range(len(gens)): + x = gens[i] + x_order = x.order() + # x_p has order p + x_p = x**(x_order/p) + if i > 0: + P = PermutationGroup(gens_p[:i]) + else: + P = PermutationGroup(self.identity) + if x**(x_order/p) not in P: + gens_r.append(x**(x_order/p)) + else: + # replace x by an element of order (x.order()/p) + # so that gens still generates G + g = P.generator_product(x_p, original=True) + for s in g: + x = x*s**-1 + x_order = x_order/p + # insert x to gens so that the sorting is preserved + del gens[i] + del gens_p[i] + j = i - 1 + while j < len(gens) and gens[j].order() >= x_order: + j += 1 + gens = gens[:j] + [x] + gens[j:] + gens_p = gens_p[:j] + [x] + gens_p[j:] + return PermutationGroup(gens_r) + + def _sylow_alt_sym(self, p): + ''' + Return a p-Sylow subgroup of a symmetric or an + alternating group. + + Explanation + =========== + + The algorithm for this is hinted at in [1], Chapter 4, + Exercise 4. + + For Sym(n) with n = p^i, the idea is as follows. Partition + the interval [0..n-1] into p equal parts, each of length p^(i-1): + [0..p^(i-1)-1], [p^(i-1)..2*p^(i-1)-1]...[(p-1)*p^(i-1)..p^i-1]. + Find a p-Sylow subgroup of Sym(p^(i-1)) (treated as a subgroup + of ``self``) acting on each of the parts. Call the subgroups + P_1, P_2...P_p. The generators for the subgroups P_2...P_p + can be obtained from those of P_1 by applying a "shifting" + permutation to them, that is, a permutation mapping [0..p^(i-1)-1] + to the second part (the other parts are obtained by using the shift + multiple times). The union of this permutation and the generators + of P_1 is a p-Sylow subgroup of ``self``. + + For n not equal to a power of p, partition + [0..n-1] in accordance with how n would be written in base p. + E.g. for p=2 and n=11, 11 = 2^3 + 2^2 + 1 so the partition + is [[0..7], [8..9], {10}]. To generate a p-Sylow subgroup, + take the union of the generators for each of the parts. + For the above example, {(0 1), (0 2)(1 3), (0 4), (1 5)(2 7)} + from the first part, {(8 9)} from the second part and + nothing from the third. This gives 4 generators in total, and + the subgroup they generate is p-Sylow. + + Alternating groups are treated the same except when p=2. In this + case, (0 1)(s s+1) should be added for an appropriate s (the start + of a part) for each part in the partitions. + + See Also + ======== + + sylow_subgroup, is_alt_sym + + ''' + n = self.degree + gens = [] + identity = Permutation(n-1) + # the case of 2-sylow subgroups of alternating groups + # needs special treatment + alt = p == 2 and all(g.is_even for g in self.generators) + + # find the presentation of n in base p + coeffs = [] + m = n + while m > 0: + coeffs.append(m % p) + m = m // p + + power = len(coeffs)-1 + # for a symmetric group, gens[:i] is the generating + # set for a p-Sylow subgroup on [0..p**(i-1)-1]. For + # alternating groups, the same is given by gens[:2*(i-1)] + for i in range(1, power+1): + if i == 1 and alt: + # (0 1) shouldn't be added for alternating groups + continue + gen = Permutation([(j + p**(i-1)) % p**i for j in range(p**i)]) + gens.append(identity*gen) + if alt: + gen = Permutation(0, 1)*gen*Permutation(0, 1)*gen + gens.append(gen) + + # the first point in the current part (see the algorithm + # description in the docstring) + start = 0 + + while power > 0: + a = coeffs[power] + + # make the permutation shifting the start of the first + # part ([0..p^i-1] for some i) to the current one + for _ in range(a): + shift = Permutation() + if start > 0: + for i in range(p**power): + shift = shift(i, start + i) + + if alt: + gen = Permutation(0, 1)*shift*Permutation(0, 1)*shift + gens.append(gen) + j = 2*(power - 1) + else: + j = power + + for i, gen in enumerate(gens[:j]): + if alt and i % 2 == 1: + continue + # shift the generator to the start of the + # partition part + gen = shift*gen*shift + gens.append(gen) + + start += p**power + power = power-1 + + return gens + + def sylow_subgroup(self, p): + ''' + Return a p-Sylow subgroup of the group. + + The algorithm is described in [1], Chapter 4, Section 7 + + Examples + ======== + >>> from sympy.combinatorics.named_groups import DihedralGroup + >>> from sympy.combinatorics.named_groups import SymmetricGroup + >>> from sympy.combinatorics.named_groups import AlternatingGroup + + >>> D = DihedralGroup(6) + >>> S = D.sylow_subgroup(2) + >>> S.order() + 4 + >>> G = SymmetricGroup(6) + >>> S = G.sylow_subgroup(5) + >>> S.order() + 5 + + >>> G1 = AlternatingGroup(3) + >>> G2 = AlternatingGroup(5) + >>> G3 = AlternatingGroup(9) + + >>> S1 = G1.sylow_subgroup(3) + >>> S2 = G2.sylow_subgroup(3) + >>> S3 = G3.sylow_subgroup(3) + + >>> len1 = len(S1.lower_central_series()) + >>> len2 = len(S2.lower_central_series()) + >>> len3 = len(S3.lower_central_series()) + + >>> len1 == len2 + True + >>> len1 < len3 + True + + ''' + from sympy.combinatorics.homomorphisms import ( + orbit_homomorphism, block_homomorphism) + + if not isprime(p): + raise ValueError("p must be a prime") + + def is_p_group(G): + # check if the order of G is a power of p + # and return the power + m = G.order() + n = 0 + while m % p == 0: + m = m/p + n += 1 + if m == 1: + return True, n + return False, n + + def _sylow_reduce(mu, nu): + # reduction based on two homomorphisms + # mu and nu with trivially intersecting + # kernels + Q = mu.image().sylow_subgroup(p) + Q = mu.invert_subgroup(Q) + nu = nu.restrict_to(Q) + R = nu.image().sylow_subgroup(p) + return nu.invert_subgroup(R) + + order = self.order() + if order % p != 0: + return PermutationGroup([self.identity]) + p_group, n = is_p_group(self) + if p_group: + return self + + if self.is_alt_sym(): + return PermutationGroup(self._sylow_alt_sym(p)) + + # if there is a non-trivial orbit with size not divisible + # by p, the sylow subgroup is contained in its stabilizer + # (by orbit-stabilizer theorem) + orbits = self.orbits() + non_p_orbits = [o for o in orbits if len(o) % p != 0 and len(o) != 1] + if non_p_orbits: + G = self.stabilizer(list(non_p_orbits[0]).pop()) + return G.sylow_subgroup(p) + + if not self.is_transitive(): + # apply _sylow_reduce to orbit actions + orbits = sorted(orbits, key=len) + omega1 = orbits.pop() + omega2 = orbits[0].union(*orbits) + mu = orbit_homomorphism(self, omega1) + nu = orbit_homomorphism(self, omega2) + return _sylow_reduce(mu, nu) + + blocks = self.minimal_blocks() + if len(blocks) > 1: + # apply _sylow_reduce to block system actions + mu = block_homomorphism(self, blocks[0]) + nu = block_homomorphism(self, blocks[1]) + return _sylow_reduce(mu, nu) + elif len(blocks) == 1: + block = list(blocks)[0] + if any(e != 0 for e in block): + # self is imprimitive + mu = block_homomorphism(self, block) + if not is_p_group(mu.image())[0]: + S = mu.image().sylow_subgroup(p) + return mu.invert_subgroup(S).sylow_subgroup(p) + + # find an element of order p + g = self.random() + g_order = g.order() + while g_order % p != 0 or g_order == 0: + g = self.random() + g_order = g.order() + g = g**(g_order // p) + if order % p**2 != 0: + return PermutationGroup(g) + + C = self.centralizer(g) + while C.order() % p**n != 0: + S = C.sylow_subgroup(p) + s_order = S.order() + Z = S.center() + P = Z._p_elements_group(p) + h = P.random() + C_h = self.centralizer(h) + while C_h.order() % p*s_order != 0: + h = P.random() + C_h = self.centralizer(h) + C = C_h + + return C.sylow_subgroup(p) + + def _block_verify(self, L, alpha): + delta = sorted(self.orbit(alpha)) + # p[i] will be the number of the block + # delta[i] belongs to + p = [-1]*len(delta) + blocks = [-1]*len(delta) + + B = [[]] # future list of blocks + u = [0]*len(delta) # u[i] in L s.t. alpha^u[i] = B[0][i] + + t = L.orbit_transversal(alpha, pairs=True) + for a, beta in t: + B[0].append(a) + i_a = delta.index(a) + p[i_a] = 0 + blocks[i_a] = alpha + u[i_a] = beta + + rho = 0 + m = 0 # number of blocks - 1 + + while rho <= m: + beta = B[rho][0] + for g in self.generators: + d = beta^g + i_d = delta.index(d) + sigma = p[i_d] + if sigma < 0: + # define a new block + m += 1 + sigma = m + u[i_d] = u[delta.index(beta)]*g + p[i_d] = sigma + rep = d + blocks[i_d] = rep + newb = [rep] + for gamma in B[rho][1:]: + i_gamma = delta.index(gamma) + d = gamma^g + i_d = delta.index(d) + if p[i_d] < 0: + u[i_d] = u[i_gamma]*g + p[i_d] = sigma + blocks[i_d] = rep + newb.append(d) + else: + # B[rho] is not a block + s = u[i_gamma]*g*u[i_d]**(-1) + return False, s + + B.append(newb) + else: + for h in B[rho][1:]: + if h^g not in B[sigma]: + # B[rho] is not a block + s = u[delta.index(beta)]*g*u[i_d]**(-1) + return False, s + rho += 1 + + return True, blocks + + def _verify(H, K, phi, z, alpha): + ''' + Return a list of relators ``rels`` in generators ``gens`_h` that + are mapped to ``H.generators`` by ``phi`` so that given a finite + presentation of ``K`` on a subset of ``gens_h`` + is a finite presentation of ``H``. + + Explanation + =========== + + ``H`` should be generated by the union of ``K.generators`` and ``z`` + (a single generator), and ``H.stabilizer(alpha) == K``; ``phi`` is a + canonical injection from a free group into a permutation group + containing ``H``. + + The algorithm is described in [1], Chapter 6. + + Examples + ======== + + >>> from sympy.combinatorics import free_group, Permutation, PermutationGroup + >>> from sympy.combinatorics.homomorphisms import homomorphism + >>> from sympy.combinatorics.fp_groups import FpGroup + + >>> H = PermutationGroup(Permutation(0, 2), Permutation (1, 5)) + >>> K = PermutationGroup(Permutation(5)(0, 2)) + >>> F = free_group("x_0 x_1")[0] + >>> gens = F.generators + >>> phi = homomorphism(F, H, F.generators, H.generators) + >>> rels_k = [gens[0]**2] # relators for presentation of K + >>> z= Permutation(1, 5) + >>> check, rels_h = H._verify(K, phi, z, 1) + >>> check + True + >>> rels = rels_k + rels_h + >>> G = FpGroup(F, rels) # presentation of H + >>> G.order() == H.order() + True + + See also + ======== + + strong_presentation, presentation, stabilizer + + ''' + + orbit = H.orbit(alpha) + beta = alpha^(z**-1) + + K_beta = K.stabilizer(beta) + + # orbit representatives of K_beta + gammas = [alpha, beta] + orbits = list({tuple(K_beta.orbit(o)) for o in orbit}) + orbit_reps = [orb[0] for orb in orbits] + for rep in orbit_reps: + if rep not in gammas: + gammas.append(rep) + + # orbit transversal of K + betas = [alpha, beta] + transversal = {alpha: phi.invert(H.identity), beta: phi.invert(z**-1)} + + for s, g in K.orbit_transversal(beta, pairs=True): + if s not in transversal: + transversal[s] = transversal[beta]*phi.invert(g) + + + union = K.orbit(alpha).union(K.orbit(beta)) + while (len(union) < len(orbit)): + for gamma in gammas: + if gamma in union: + r = gamma^z + if r not in union: + betas.append(r) + transversal[r] = transversal[gamma]*phi.invert(z) + for s, g in K.orbit_transversal(r, pairs=True): + if s not in transversal: + transversal[s] = transversal[r]*phi.invert(g) + union = union.union(K.orbit(r)) + break + + # compute relators + rels = [] + + for b in betas: + k_gens = K.stabilizer(b).generators + for y in k_gens: + new_rel = transversal[b] + gens = K.generator_product(y, original=True) + for g in gens[::-1]: + new_rel = new_rel*phi.invert(g) + new_rel = new_rel*transversal[b]**-1 + + perm = phi(new_rel) + try: + gens = K.generator_product(perm, original=True) + except ValueError: + return False, perm + for g in gens: + new_rel = new_rel*phi.invert(g)**-1 + if new_rel not in rels: + rels.append(new_rel) + + for gamma in gammas: + new_rel = transversal[gamma]*phi.invert(z)*transversal[gamma^z]**-1 + perm = phi(new_rel) + try: + gens = K.generator_product(perm, original=True) + except ValueError: + return False, perm + for g in gens: + new_rel = new_rel*phi.invert(g)**-1 + if new_rel not in rels: + rels.append(new_rel) + + return True, rels + + def strong_presentation(self): + ''' + Return a strong finite presentation of group. The generators + of the returned group are in the same order as the strong + generators of group. + + The algorithm is based on Sims' Verify algorithm described + in [1], Chapter 6. + + Examples + ======== + + >>> from sympy.combinatorics.named_groups import DihedralGroup + >>> P = DihedralGroup(4) + >>> G = P.strong_presentation() + >>> P.order() == G.order() + True + + See Also + ======== + + presentation, _verify + + ''' + from sympy.combinatorics.fp_groups import (FpGroup, + simplify_presentation) + from sympy.combinatorics.free_groups import free_group + from sympy.combinatorics.homomorphisms import (block_homomorphism, + homomorphism, GroupHomomorphism) + + strong_gens = self.strong_gens[:] + stabs = self.basic_stabilizers[:] + base = self.base[:] + + # injection from a free group on len(strong_gens) + # generators into G + gen_syms = [('x_%d'%i) for i in range(len(strong_gens))] + F = free_group(', '.join(gen_syms))[0] + phi = homomorphism(F, self, F.generators, strong_gens) + + H = PermutationGroup(self.identity) + while stabs: + alpha = base.pop() + K = H + H = stabs.pop() + new_gens = [g for g in H.generators if g not in K] + + if K.order() == 1: + z = new_gens.pop() + rels = [F.generators[-1]**z.order()] + intermediate_gens = [z] + K = PermutationGroup(intermediate_gens) + + # add generators one at a time building up from K to H + while new_gens: + z = new_gens.pop() + intermediate_gens = [z] + intermediate_gens + K_s = PermutationGroup(intermediate_gens) + orbit = K_s.orbit(alpha) + orbit_k = K.orbit(alpha) + + # split into cases based on the orbit of K_s + if orbit_k == orbit: + if z in K: + rel = phi.invert(z) + perm = z + else: + t = K.orbit_rep(alpha, alpha^z) + rel = phi.invert(z)*phi.invert(t)**-1 + perm = z*t**-1 + for g in K.generator_product(perm, original=True): + rel = rel*phi.invert(g)**-1 + new_rels = [rel] + elif len(orbit_k) == 1: + # `success` is always true because `strong_gens` + # and `base` are already a verified BSGS. Later + # this could be changed to start with a randomly + # generated (potential) BSGS, and then new elements + # would have to be appended to it when `success` + # is false. + success, new_rels = K_s._verify(K, phi, z, alpha) + else: + # K.orbit(alpha) should be a block + # under the action of K_s on K_s.orbit(alpha) + check, block = K_s._block_verify(K, alpha) + if check: + # apply _verify to the action of K_s + # on the block system; for convenience, + # add the blocks as additional points + # that K_s should act on + t = block_homomorphism(K_s, block) + m = t.codomain.degree # number of blocks + d = K_s.degree + + # conjugating with p will shift + # permutations in t.image() to + # higher numbers, e.g. + # p*(0 1)*p = (m m+1) + p = Permutation() + for i in range(m): + p *= Permutation(i, i+d) + + t_img = t.images + # combine generators of K_s with their + # action on the block system + images = {g: g*p*t_img[g]*p for g in t_img} + for g in self.strong_gens[:-len(K_s.generators)]: + images[g] = g + K_s_act = PermutationGroup(list(images.values())) + f = GroupHomomorphism(self, K_s_act, images) + + K_act = PermutationGroup([f(g) for g in K.generators]) + success, new_rels = K_s_act._verify(K_act, f.compose(phi), f(z), d) + + for n in new_rels: + if n not in rels: + rels.append(n) + K = K_s + + group = FpGroup(F, rels) + return simplify_presentation(group) + + def presentation(self, eliminate_gens=True): + ''' + Return an `FpGroup` presentation of the group. + + The algorithm is described in [1], Chapter 6.1. + + ''' + from sympy.combinatorics.fp_groups import (FpGroup, + simplify_presentation) + from sympy.combinatorics.coset_table import CosetTable + from sympy.combinatorics.free_groups import free_group + from sympy.combinatorics.homomorphisms import homomorphism + + if self._fp_presentation: + return self._fp_presentation + + def _factor_group_by_rels(G, rels): + if isinstance(G, FpGroup): + rels.extend(G.relators) + return FpGroup(G.free_group, list(set(rels))) + return FpGroup(G, rels) + + gens = self.generators + len_g = len(gens) + + if len_g == 1: + order = gens[0].order() + # handle the trivial group + if order == 1: + return free_group([])[0] + F, x = free_group('x') + return FpGroup(F, [x**order]) + + if self.order() > 20: + half_gens = self.generators[0:(len_g+1)//2] + else: + half_gens = [] + H = PermutationGroup(half_gens) + H_p = H.presentation() + + len_h = len(H_p.generators) + + C = self.coset_table(H) + n = len(C) # subgroup index + + gen_syms = [('x_%d'%i) for i in range(len(gens))] + F = free_group(', '.join(gen_syms))[0] + + # mapping generators of H_p to those of F + images = [F.generators[i] for i in range(len_h)] + R = homomorphism(H_p, F, H_p.generators, images, check=False) + + # rewrite relators + rels = R(H_p.relators) + G_p = FpGroup(F, rels) + + # injective homomorphism from G_p into self + T = homomorphism(G_p, self, G_p.generators, gens) + + C_p = CosetTable(G_p, []) + + C_p.table = [[None]*(2*len_g) for i in range(n)] + + # initiate the coset transversal + transversal = [None]*n + transversal[0] = G_p.identity + + # fill in the coset table as much as possible + for i in range(2*len_h): + C_p.table[0][i] = 0 + + gamma = 1 + for alpha, x in product(range(n), range(2*len_g)): + beta = C[alpha][x] + if beta == gamma: + gen = G_p.generators[x//2]**((-1)**(x % 2)) + transversal[beta] = transversal[alpha]*gen + C_p.table[alpha][x] = beta + C_p.table[beta][x + (-1)**(x % 2)] = alpha + gamma += 1 + if gamma == n: + break + + C_p.p = list(range(n)) + beta = x = 0 + + while not C_p.is_complete(): + # find the first undefined entry + while C_p.table[beta][x] == C[beta][x]: + x = (x + 1) % (2*len_g) + if x == 0: + beta = (beta + 1) % n + + # define a new relator + gen = G_p.generators[x//2]**((-1)**(x % 2)) + new_rel = transversal[beta]*gen*transversal[C[beta][x]]**-1 + perm = T(new_rel) + nxt = G_p.identity + for s in H.generator_product(perm, original=True): + nxt = nxt*T.invert(s)**-1 + new_rel = new_rel*nxt + + # continue coset enumeration + G_p = _factor_group_by_rels(G_p, [new_rel]) + C_p.scan_and_fill(0, new_rel) + C_p = G_p.coset_enumeration([], strategy="coset_table", + draft=C_p, max_cosets=n, incomplete=True) + + self._fp_presentation = simplify_presentation(G_p) + return self._fp_presentation + + def polycyclic_group(self): + """ + Return the PolycyclicGroup instance with below parameters: + + Explanation + =========== + + * pc_sequence : Polycyclic sequence is formed by collecting all + the missing generators between the adjacent groups in the + derived series of given permutation group. + + * pc_series : Polycyclic series is formed by adding all the missing + generators of ``der[i+1]`` in ``der[i]``, where ``der`` represents + the derived series. + + * relative_order : A list, computed by the ratio of adjacent groups in + pc_series. + + """ + from sympy.combinatorics.pc_groups import PolycyclicGroup + if not self.is_polycyclic: + raise ValueError("The group must be solvable") + + der = self.derived_series() + pc_series = [] + pc_sequence = [] + relative_order = [] + pc_series.append(der[-1]) + der.reverse() + + for i in range(len(der)-1): + H = der[i] + for g in der[i+1].generators: + if g not in H: + H = PermutationGroup([g] + H.generators) + pc_series.insert(0, H) + pc_sequence.insert(0, g) + + G1 = pc_series[0].order() + G2 = pc_series[1].order() + relative_order.insert(0, G1 // G2) + + return PolycyclicGroup(pc_sequence, pc_series, relative_order, collector=None) + + +def _orbit(degree, generators, alpha, action='tuples'): + r"""Compute the orbit of alpha `\{g(\alpha) | g \in G\}` as a set. + + Explanation + =========== + + The time complexity of the algorithm used here is `O(|Orb|*r)` where + `|Orb|` is the size of the orbit and ``r`` is the number of generators of + the group. For a more detailed analysis, see [1], p.78, [2], pp. 19-21. + Here alpha can be a single point, or a list of points. + + If alpha is a single point, the ordinary orbit is computed. + if alpha is a list of points, there are three available options: + + 'union' - computes the union of the orbits of the points in the list + 'tuples' - computes the orbit of the list interpreted as an ordered + tuple under the group action ( i.e., g((1, 2, 3)) = (g(1), g(2), g(3)) ) + 'sets' - computes the orbit of the list interpreted as a sets + + Examples + ======== + + >>> from sympy.combinatorics import Permutation, PermutationGroup + >>> from sympy.combinatorics.perm_groups import _orbit + >>> a = Permutation([1, 2, 0, 4, 5, 6, 3]) + >>> G = PermutationGroup([a]) + >>> _orbit(G.degree, G.generators, 0) + {0, 1, 2} + >>> _orbit(G.degree, G.generators, [0, 4], 'union') + {0, 1, 2, 3, 4, 5, 6} + + See Also + ======== + + orbit, orbit_transversal + + """ + if not hasattr(alpha, '__getitem__'): + alpha = [alpha] + + gens = [x._array_form for x in generators] + if len(alpha) == 1 or action == 'union': + orb = alpha + used = [False]*degree + for el in alpha: + used[el] = True + for b in orb: + for gen in gens: + temp = gen[b] + if used[temp] == False: + orb.append(temp) + used[temp] = True + return set(orb) + elif action == 'tuples': + alpha = tuple(alpha) + orb = [alpha] + used = {alpha} + for b in orb: + for gen in gens: + temp = tuple([gen[x] for x in b]) + if temp not in used: + orb.append(temp) + used.add(temp) + return set(orb) + elif action == 'sets': + alpha = frozenset(alpha) + orb = [alpha] + used = {alpha} + for b in orb: + for gen in gens: + temp = frozenset([gen[x] for x in b]) + if temp not in used: + orb.append(temp) + used.add(temp) + return {tuple(x) for x in orb} + + +def _orbits(degree, generators): + """Compute the orbits of G. + + If ``rep=False`` it returns a list of sets else it returns a list of + representatives of the orbits + + Examples + ======== + + >>> from sympy.combinatorics import Permutation + >>> from sympy.combinatorics.perm_groups import _orbits + >>> a = Permutation([0, 2, 1]) + >>> b = Permutation([1, 0, 2]) + >>> _orbits(a.size, [a, b]) + [{0, 1, 2}] + """ + + orbs = [] + sorted_I = list(range(degree)) + I = set(sorted_I) + while I: + i = sorted_I[0] + orb = _orbit(degree, generators, i) + orbs.append(orb) + # remove all indices that are in this orbit + I -= orb + sorted_I = [i for i in sorted_I if i not in orb] + return orbs + + +def _orbit_transversal(degree, generators, alpha, pairs, af=False, slp=False): + r"""Computes a transversal for the orbit of ``alpha`` as a set. + + Explanation + =========== + + generators generators of the group ``G`` + + For a permutation group ``G``, a transversal for the orbit + `Orb = \{g(\alpha) | g \in G\}` is a set + `\{g_\beta | g_\beta(\alpha) = \beta\}` for `\beta \in Orb`. + Note that there may be more than one possible transversal. + If ``pairs`` is set to ``True``, it returns the list of pairs + `(\beta, g_\beta)`. For a proof of correctness, see [1], p.79 + + if ``af`` is ``True``, the transversal elements are given in + array form. + + If `slp` is `True`, a dictionary `{beta: slp_beta}` is returned + for `\beta \in Orb` where `slp_beta` is a list of indices of the + generators in `generators` s.t. if `slp_beta = [i_1 \dots i_n]` + `g_\beta = generators[i_n] \times \dots \times generators[i_1]`. + + Examples + ======== + + >>> from sympy.combinatorics.named_groups import DihedralGroup + >>> from sympy.combinatorics.perm_groups import _orbit_transversal + >>> G = DihedralGroup(6) + >>> _orbit_transversal(G.degree, G.generators, 0, False) + [(5), (0 1 2 3 4 5), (0 5)(1 4)(2 3), (0 2 4)(1 3 5), (5)(0 4)(1 3), (0 3)(1 4)(2 5)] + """ + + tr = [(alpha, list(range(degree)))] + slp_dict = {alpha: []} + used = [False]*degree + used[alpha] = True + gens = [x._array_form for x in generators] + for x, px in tr: + px_slp = slp_dict[x] + for gen in gens: + temp = gen[x] + if used[temp] == False: + slp_dict[temp] = [gens.index(gen)] + px_slp + tr.append((temp, _af_rmul(gen, px))) + used[temp] = True + if pairs: + if not af: + tr = [(x, _af_new(y)) for x, y in tr] + if not slp: + return tr + return tr, slp_dict + + if af: + tr = [y for _, y in tr] + if not slp: + return tr + return tr, slp_dict + + tr = [_af_new(y) for _, y in tr] + if not slp: + return tr + return tr, slp_dict + + +def _stabilizer(degree, generators, alpha): + r"""Return the stabilizer subgroup of ``alpha``. + + Explanation + =========== + + The stabilizer of `\alpha` is the group `G_\alpha = + \{g \in G | g(\alpha) = \alpha\}`. + For a proof of correctness, see [1], p.79. + + degree : degree of G + generators : generators of G + + Examples + ======== + + >>> from sympy.combinatorics.perm_groups import _stabilizer + >>> from sympy.combinatorics.named_groups import DihedralGroup + >>> G = DihedralGroup(6) + >>> _stabilizer(G.degree, G.generators, 5) + [(5)(0 4)(1 3), (5)] + + See Also + ======== + + orbit + + """ + orb = [alpha] + table = {alpha: list(range(degree))} + table_inv = {alpha: list(range(degree))} + used = [False]*degree + used[alpha] = True + gens = [x._array_form for x in generators] + stab_gens = [] + for b in orb: + for gen in gens: + temp = gen[b] + if used[temp] is False: + gen_temp = _af_rmul(gen, table[b]) + orb.append(temp) + table[temp] = gen_temp + table_inv[temp] = _af_invert(gen_temp) + used[temp] = True + else: + schreier_gen = _af_rmuln(table_inv[temp], gen, table[b]) + if schreier_gen not in stab_gens: + stab_gens.append(schreier_gen) + return [_af_new(x) for x in stab_gens] + + +PermGroup = PermutationGroup + + +class SymmetricPermutationGroup(Basic): + """ + The class defining the lazy form of SymmetricGroup. + + deg : int + + """ + def __new__(cls, deg): + deg = _sympify(deg) + obj = Basic.__new__(cls, deg) + return obj + + def __init__(self, *args, **kwargs): + self._deg = self.args[0] + self._order = None + + def __contains__(self, i): + """Return ``True`` if *i* is contained in SymmetricPermutationGroup. + + Examples + ======== + + >>> from sympy.combinatorics import Permutation, SymmetricPermutationGroup + >>> G = SymmetricPermutationGroup(4) + >>> Permutation(1, 2, 3) in G + True + + """ + if not isinstance(i, Permutation): + raise TypeError("A SymmetricPermutationGroup contains only Permutations as " + "elements, not elements of type %s" % type(i)) + return i.size == self.degree + + def order(self): + """ + Return the order of the SymmetricPermutationGroup. + + Examples + ======== + + >>> from sympy.combinatorics import SymmetricPermutationGroup + >>> G = SymmetricPermutationGroup(4) + >>> G.order() + 24 + """ + if self._order is not None: + return self._order + n = self._deg + self._order = factorial(n) + return self._order + + @property + def degree(self): + """ + Return the degree of the SymmetricPermutationGroup. + + Examples + ======== + + >>> from sympy.combinatorics import SymmetricPermutationGroup + >>> G = SymmetricPermutationGroup(4) + >>> G.degree + 4 + + """ + return self._deg + + @property + def identity(self): + ''' + Return the identity element of the SymmetricPermutationGroup. + + Examples + ======== + + >>> from sympy.combinatorics import SymmetricPermutationGroup + >>> G = SymmetricPermutationGroup(4) + >>> G.identity() + (3) + + ''' + return _af_new(list(range(self._deg))) + + +class Coset(Basic): + """A left coset of a permutation group with respect to an element. + + Parameters + ========== + + g : Permutation + + H : PermutationGroup + + dir : "+" or "-", If not specified by default it will be "+" + here ``dir`` specified the type of coset "+" represent the + right coset and "-" represent the left coset. + + G : PermutationGroup, optional + The group which contains *H* as its subgroup and *g* as its + element. + + If not specified, it would automatically become a symmetric + group ``SymmetricPermutationGroup(g.size)`` and + ``SymmetricPermutationGroup(H.degree)`` if ``g.size`` and ``H.degree`` + are matching.``SymmetricPermutationGroup`` is a lazy form of SymmetricGroup + used for representation purpose. + + """ + + def __new__(cls, g, H, G=None, dir="+"): + g = _sympify(g) + if not isinstance(g, Permutation): + raise NotImplementedError + + H = _sympify(H) + if not isinstance(H, PermutationGroup): + raise NotImplementedError + + if G is not None: + G = _sympify(G) + if not isinstance(G, (PermutationGroup, SymmetricPermutationGroup)): + raise NotImplementedError + if not H.is_subgroup(G): + raise ValueError("{} must be a subgroup of {}.".format(H, G)) + if g not in G: + raise ValueError("{} must be an element of {}.".format(g, G)) + else: + g_size = g.size + h_degree = H.degree + if g_size != h_degree: + raise ValueError( + "The size of the permutation {} and the degree of " + "the permutation group {} should be matching " + .format(g, H)) + G = SymmetricPermutationGroup(g.size) + + if isinstance(dir, str): + dir = Symbol(dir) + elif not isinstance(dir, Symbol): + raise TypeError("dir must be of type basestring or " + "Symbol, not %s" % type(dir)) + if str(dir) not in ('+', '-'): + raise ValueError("dir must be one of '+' or '-' not %s" % dir) + obj = Basic.__new__(cls, g, H, G, dir) + return obj + + def __init__(self, *args, **kwargs): + self._dir = self.args[3] + + @property + def is_left_coset(self): + """ + Check if the coset is left coset that is ``gH``. + + Examples + ======== + + >>> from sympy.combinatorics import Permutation, PermutationGroup, Coset + >>> a = Permutation(1, 2) + >>> b = Permutation(0, 1) + >>> G = PermutationGroup([a, b]) + >>> cst = Coset(a, G, dir="-") + >>> cst.is_left_coset + True + + """ + return str(self._dir) == '-' + + @property + def is_right_coset(self): + """ + Check if the coset is right coset that is ``Hg``. + + Examples + ======== + + >>> from sympy.combinatorics import Permutation, PermutationGroup, Coset + >>> a = Permutation(1, 2) + >>> b = Permutation(0, 1) + >>> G = PermutationGroup([a, b]) + >>> cst = Coset(a, G, dir="+") + >>> cst.is_right_coset + True + + """ + return str(self._dir) == '+' + + def as_list(self): + """ + Return all the elements of coset in the form of list. + """ + g = self.args[0] + H = self.args[1] + cst = [] + if str(self._dir) == '+': + for h in H.elements: + cst.append(h*g) + else: + for h in H.elements: + cst.append(g*h) + return cst diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/polyhedron.py b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/polyhedron.py new file mode 100644 index 0000000000000000000000000000000000000000..a0433bdeafaeef738ef65d3b799ecbc2623b4f81 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/polyhedron.py @@ -0,0 +1,1019 @@ +from sympy.combinatorics import Permutation as Perm +from sympy.combinatorics.perm_groups import PermutationGroup +from sympy.core import Basic, Tuple, default_sort_key +from sympy.sets import FiniteSet +from sympy.utilities.iterables import (minlex, unflatten, flatten) +from sympy.utilities.misc import as_int + +rmul = Perm.rmul + + +class Polyhedron(Basic): + """ + Represents the polyhedral symmetry group (PSG). + + Explanation + =========== + + The PSG is one of the symmetry groups of the Platonic solids. + There are three polyhedral groups: the tetrahedral group + of order 12, the octahedral group of order 24, and the + icosahedral group of order 60. + + All doctests have been given in the docstring of the + constructor of the object. + + References + ========== + + .. [1] https://mathworld.wolfram.com/PolyhedralGroup.html + + """ + _edges = None + + def __new__(cls, corners, faces=(), pgroup=()): + """ + The constructor of the Polyhedron group object. + + Explanation + =========== + + It takes up to three parameters: the corners, faces, and + allowed transformations. + + The corners/vertices are entered as a list of arbitrary + expressions that are used to identify each vertex. + + The faces are entered as a list of tuples of indices; a tuple + of indices identifies the vertices which define the face. They + should be entered in a cw or ccw order; they will be standardized + by reversal and rotation to be give the lowest lexical ordering. + If no faces are given then no edges will be computed. + + >>> from sympy.combinatorics.polyhedron import Polyhedron + >>> Polyhedron(list('abc'), [(1, 2, 0)]).faces + {(0, 1, 2)} + >>> Polyhedron(list('abc'), [(1, 0, 2)]).faces + {(0, 1, 2)} + + The allowed transformations are entered as allowable permutations + of the vertices for the polyhedron. Instance of Permutations + (as with faces) should refer to the supplied vertices by index. + These permutation are stored as a PermutationGroup. + + Examples + ======== + + >>> from sympy.combinatorics.permutations import Permutation + >>> from sympy import init_printing + >>> from sympy.abc import w, x, y, z + >>> init_printing(pretty_print=False, perm_cyclic=False) + + Here we construct the Polyhedron object for a tetrahedron. + + >>> corners = [w, x, y, z] + >>> faces = [(0, 1, 2), (0, 2, 3), (0, 3, 1), (1, 2, 3)] + + Next, allowed transformations of the polyhedron must be given. This + is given as permutations of vertices. + + Although the vertices of a tetrahedron can be numbered in 24 (4!) + different ways, there are only 12 different orientations for a + physical tetrahedron. The following permutations, applied once or + twice, will generate all 12 of the orientations. (The identity + permutation, Permutation(range(4)), is not included since it does + not change the orientation of the vertices.) + + >>> pgroup = [Permutation([[0, 1, 2], [3]]), \ + Permutation([[0, 1, 3], [2]]), \ + Permutation([[0, 2, 3], [1]]), \ + Permutation([[1, 2, 3], [0]]), \ + Permutation([[0, 1], [2, 3]]), \ + Permutation([[0, 2], [1, 3]]), \ + Permutation([[0, 3], [1, 2]])] + + The Polyhedron is now constructed and demonstrated: + + >>> tetra = Polyhedron(corners, faces, pgroup) + >>> tetra.size + 4 + >>> tetra.edges + {(0, 1), (0, 2), (0, 3), (1, 2), (1, 3), (2, 3)} + >>> tetra.corners + (w, x, y, z) + + It can be rotated with an arbitrary permutation of vertices, e.g. + the following permutation is not in the pgroup: + + >>> tetra.rotate(Permutation([0, 1, 3, 2])) + >>> tetra.corners + (w, x, z, y) + + An allowed permutation of the vertices can be constructed by + repeatedly applying permutations from the pgroup to the vertices. + Here is a demonstration that applying p and p**2 for every p in + pgroup generates all the orientations of a tetrahedron and no others: + + >>> all = ( (w, x, y, z), \ + (x, y, w, z), \ + (y, w, x, z), \ + (w, z, x, y), \ + (z, w, y, x), \ + (w, y, z, x), \ + (y, z, w, x), \ + (x, z, y, w), \ + (z, y, x, w), \ + (y, x, z, w), \ + (x, w, z, y), \ + (z, x, w, y) ) + + >>> got = [] + >>> for p in (pgroup + [p**2 for p in pgroup]): + ... h = Polyhedron(corners) + ... h.rotate(p) + ... got.append(h.corners) + ... + >>> set(got) == set(all) + True + + The make_perm method of a PermutationGroup will randomly pick + permutations, multiply them together, and return the permutation that + can be applied to the polyhedron to give the orientation produced + by those individual permutations. + + Here, 3 permutations are used: + + >>> tetra.pgroup.make_perm(3) # doctest: +SKIP + Permutation([0, 3, 1, 2]) + + To select the permutations that should be used, supply a list + of indices to the permutations in pgroup in the order they should + be applied: + + >>> use = [0, 0, 2] + >>> p002 = tetra.pgroup.make_perm(3, use) + >>> p002 + Permutation([1, 0, 3, 2]) + + + Apply them one at a time: + + >>> tetra.reset() + >>> for i in use: + ... tetra.rotate(pgroup[i]) + ... + >>> tetra.vertices + (x, w, z, y) + >>> sequentially = tetra.vertices + + Apply the composite permutation: + + >>> tetra.reset() + >>> tetra.rotate(p002) + >>> tetra.corners + (x, w, z, y) + >>> tetra.corners in all and tetra.corners == sequentially + True + + Notes + ===== + + Defining permutation groups + --------------------------- + + It is not necessary to enter any permutations, nor is necessary to + enter a complete set of transformations. In fact, for a polyhedron, + all configurations can be constructed from just two permutations. + For example, the orientations of a tetrahedron can be generated from + an axis passing through a vertex and face and another axis passing + through a different vertex or from an axis passing through the + midpoints of two edges opposite of each other. + + For simplicity of presentation, consider a square -- + not a cube -- with vertices 1, 2, 3, and 4: + + 1-----2 We could think of axes of rotation being: + | | 1) through the face + | | 2) from midpoint 1-2 to 3-4 or 1-3 to 2-4 + 3-----4 3) lines 1-4 or 2-3 + + + To determine how to write the permutations, imagine 4 cameras, + one at each corner, labeled A-D: + + A B A B + 1-----2 1-----3 vertex index: + | | | | 1 0 + | | | | 2 1 + 3-----4 2-----4 3 2 + C D C D 4 3 + + original after rotation + along 1-4 + + A diagonal and a face axis will be chosen for the "permutation group" + from which any orientation can be constructed. + + >>> pgroup = [] + + Imagine a clockwise rotation when viewing 1-4 from camera A. The new + orientation is (in camera-order): 1, 3, 2, 4 so the permutation is + given using the *indices* of the vertices as: + + >>> pgroup.append(Permutation((0, 2, 1, 3))) + + Now imagine rotating clockwise when looking down an axis entering the + center of the square as viewed. The new camera-order would be + 3, 1, 4, 2 so the permutation is (using indices): + + >>> pgroup.append(Permutation((2, 0, 3, 1))) + + The square can now be constructed: + ** use real-world labels for the vertices, entering them in + camera order + ** for the faces we use zero-based indices of the vertices + in *edge-order* as the face is traversed; neither the + direction nor the starting point matter -- the faces are + only used to define edges (if so desired). + + >>> square = Polyhedron((1, 2, 3, 4), [(0, 1, 3, 2)], pgroup) + + To rotate the square with a single permutation we can do: + + >>> square.rotate(square.pgroup[0]) + >>> square.corners + (1, 3, 2, 4) + + To use more than one permutation (or to use one permutation more + than once) it is more convenient to use the make_perm method: + + >>> p011 = square.pgroup.make_perm([0, 1, 1]) # diag flip + 2 rotations + >>> square.reset() # return to initial orientation + >>> square.rotate(p011) + >>> square.corners + (4, 2, 3, 1) + + Thinking outside the box + ------------------------ + + Although the Polyhedron object has a direct physical meaning, it + actually has broader application. In the most general sense it is + just a decorated PermutationGroup, allowing one to connect the + permutations to something physical. For example, a Rubik's cube is + not a proper polyhedron, but the Polyhedron class can be used to + represent it in a way that helps to visualize the Rubik's cube. + + >>> from sympy import flatten, unflatten, symbols + >>> from sympy.combinatorics import RubikGroup + >>> facelets = flatten([symbols(s+'1:5') for s in 'UFRBLD']) + >>> def show(): + ... pairs = unflatten(r2.corners, 2) + ... print(pairs[::2]) + ... print(pairs[1::2]) + ... + >>> r2 = Polyhedron(facelets, pgroup=RubikGroup(2)) + >>> show() + [(U1, U2), (F1, F2), (R1, R2), (B1, B2), (L1, L2), (D1, D2)] + [(U3, U4), (F3, F4), (R3, R4), (B3, B4), (L3, L4), (D3, D4)] + >>> r2.rotate(0) # cw rotation of F + >>> show() + [(U1, U2), (F3, F1), (U3, R2), (B1, B2), (L1, D1), (R3, R1)] + [(L4, L2), (F4, F2), (U4, R4), (B3, B4), (L3, D2), (D3, D4)] + + Predefined Polyhedra + ==================== + + For convenience, the vertices and faces are defined for the following + standard solids along with a permutation group for transformations. + When the polyhedron is oriented as indicated below, the vertices in + a given horizontal plane are numbered in ccw direction, starting from + the vertex that will give the lowest indices in a given face. (In the + net of the vertices, indices preceded by "-" indicate replication of + the lhs index in the net.) + + tetrahedron, tetrahedron_faces + ------------------------------ + + 4 vertices (vertex up) net: + + 0 0-0 + 1 2 3-1 + + 4 faces: + + (0, 1, 2) (0, 2, 3) (0, 3, 1) (1, 2, 3) + + cube, cube_faces + ---------------- + + 8 vertices (face up) net: + + 0 1 2 3-0 + 4 5 6 7-4 + + 6 faces: + + (0, 1, 2, 3) + (0, 1, 5, 4) (1, 2, 6, 5) (2, 3, 7, 6) (0, 3, 7, 4) + (4, 5, 6, 7) + + octahedron, octahedron_faces + ---------------------------- + + 6 vertices (vertex up) net: + + 0 0 0-0 + 1 2 3 4-1 + 5 5 5-5 + + 8 faces: + + (0, 1, 2) (0, 2, 3) (0, 3, 4) (0, 1, 4) + (1, 2, 5) (2, 3, 5) (3, 4, 5) (1, 4, 5) + + dodecahedron, dodecahedron_faces + -------------------------------- + + 20 vertices (vertex up) net: + + 0 1 2 3 4 -0 + 5 6 7 8 9 -5 + 14 10 11 12 13-14 + 15 16 17 18 19-15 + + 12 faces: + + (0, 1, 2, 3, 4) (0, 1, 6, 10, 5) (1, 2, 7, 11, 6) + (2, 3, 8, 12, 7) (3, 4, 9, 13, 8) (0, 4, 9, 14, 5) + (5, 10, 16, 15, 14) (6, 10, 16, 17, 11) (7, 11, 17, 18, 12) + (8, 12, 18, 19, 13) (9, 13, 19, 15, 14)(15, 16, 17, 18, 19) + + icosahedron, icosahedron_faces + ------------------------------ + + 12 vertices (face up) net: + + 0 0 0 0 -0 + 1 2 3 4 5 -1 + 6 7 8 9 10 -6 + 11 11 11 11 -11 + + 20 faces: + + (0, 1, 2) (0, 2, 3) (0, 3, 4) + (0, 4, 5) (0, 1, 5) (1, 2, 6) + (2, 3, 7) (3, 4, 8) (4, 5, 9) + (1, 5, 10) (2, 6, 7) (3, 7, 8) + (4, 8, 9) (5, 9, 10) (1, 6, 10) + (6, 7, 11) (7, 8, 11) (8, 9, 11) + (9, 10, 11) (6, 10, 11) + + >>> from sympy.combinatorics.polyhedron import cube + >>> cube.edges + {(0, 1), (0, 3), (0, 4), (1, 2), (1, 5), (2, 3), (2, 6), (3, 7), (4, 5), (4, 7), (5, 6), (6, 7)} + + If you want to use letters or other names for the corners you + can still use the pre-calculated faces: + + >>> corners = list('abcdefgh') + >>> Polyhedron(corners, cube.faces).corners + (a, b, c, d, e, f, g, h) + + References + ========== + + .. [1] www.ocf.berkeley.edu/~wwu/articles/platonicsolids.pdf + + """ + faces = [minlex(f, directed=False, key=default_sort_key) for f in faces] + corners, faces, pgroup = args = \ + [Tuple(*a) for a in (corners, faces, pgroup)] + obj = Basic.__new__(cls, *args) + obj._corners = tuple(corners) # in order given + obj._faces = FiniteSet(*faces) + if pgroup and pgroup[0].size != len(corners): + raise ValueError("Permutation size unequal to number of corners.") + # use the identity permutation if none are given + obj._pgroup = PermutationGroup( + pgroup or [Perm(range(len(corners)))] ) + return obj + + @property + def corners(self): + """ + Get the corners of the Polyhedron. + + The method ``vertices`` is an alias for ``corners``. + + Examples + ======== + + >>> from sympy.combinatorics import Polyhedron + >>> from sympy.abc import a, b, c, d + >>> p = Polyhedron(list('abcd')) + >>> p.corners == p.vertices == (a, b, c, d) + True + + See Also + ======== + + array_form, cyclic_form + """ + return self._corners + vertices = corners + + @property + def array_form(self): + """Return the indices of the corners. + + The indices are given relative to the original position of corners. + + Examples + ======== + + >>> from sympy.combinatorics.polyhedron import tetrahedron + >>> tetrahedron = tetrahedron.copy() + >>> tetrahedron.array_form + [0, 1, 2, 3] + + >>> tetrahedron.rotate(0) + >>> tetrahedron.array_form + [0, 2, 3, 1] + >>> tetrahedron.pgroup[0].array_form + [0, 2, 3, 1] + + See Also + ======== + + corners, cyclic_form + """ + corners = list(self.args[0]) + return [corners.index(c) for c in self.corners] + + @property + def cyclic_form(self): + """Return the indices of the corners in cyclic notation. + + The indices are given relative to the original position of corners. + + See Also + ======== + + corners, array_form + """ + return Perm._af_new(self.array_form).cyclic_form + + @property + def size(self): + """ + Get the number of corners of the Polyhedron. + """ + return len(self._corners) + + @property + def faces(self): + """ + Get the faces of the Polyhedron. + """ + return self._faces + + @property + def pgroup(self): + """ + Get the permutations of the Polyhedron. + """ + return self._pgroup + + @property + def edges(self): + """ + Given the faces of the polyhedra we can get the edges. + + Examples + ======== + + >>> from sympy.combinatorics import Polyhedron + >>> from sympy.abc import a, b, c + >>> corners = (a, b, c) + >>> faces = [(0, 1, 2)] + >>> Polyhedron(corners, faces).edges + {(0, 1), (0, 2), (1, 2)} + + """ + if self._edges is None: + output = set() + for face in self.faces: + for i in range(len(face)): + edge = tuple(sorted([face[i], face[i - 1]])) + output.add(edge) + self._edges = FiniteSet(*output) + return self._edges + + def rotate(self, perm): + """ + Apply a permutation to the polyhedron *in place*. The permutation + may be given as a Permutation instance or an integer indicating + which permutation from pgroup of the Polyhedron should be + applied. + + This is an operation that is analogous to rotation about + an axis by a fixed increment. + + Notes + ===== + + When a Permutation is applied, no check is done to see if that + is a valid permutation for the Polyhedron. For example, a cube + could be given a permutation which effectively swaps only 2 + vertices. A valid permutation (that rotates the object in a + physical way) will be obtained if one only uses + permutations from the ``pgroup`` of the Polyhedron. On the other + hand, allowing arbitrary rotations (applications of permutations) + gives a way to follow named elements rather than indices since + Polyhedron allows vertices to be named while Permutation works + only with indices. + + Examples + ======== + + >>> from sympy.combinatorics import Polyhedron, Permutation + >>> from sympy.combinatorics.polyhedron import cube + >>> cube = cube.copy() + >>> cube.corners + (0, 1, 2, 3, 4, 5, 6, 7) + >>> cube.rotate(0) + >>> cube.corners + (1, 2, 3, 0, 5, 6, 7, 4) + + A non-physical "rotation" that is not prohibited by this method: + + >>> cube.reset() + >>> cube.rotate(Permutation([[1, 2]], size=8)) + >>> cube.corners + (0, 2, 1, 3, 4, 5, 6, 7) + + Polyhedron can be used to follow elements of set that are + identified by letters instead of integers: + + >>> shadow = h5 = Polyhedron(list('abcde')) + >>> p = Permutation([3, 0, 1, 2, 4]) + >>> h5.rotate(p) + >>> h5.corners + (d, a, b, c, e) + >>> _ == shadow.corners + True + >>> copy = h5.copy() + >>> h5.rotate(p) + >>> h5.corners == copy.corners + False + """ + if not isinstance(perm, Perm): + perm = self.pgroup[perm] + # and we know it's valid + else: + if perm.size != self.size: + raise ValueError('Polyhedron and Permutation sizes differ.') + a = perm.array_form + corners = [self.corners[a[i]] for i in range(len(self.corners))] + self._corners = tuple(corners) + + def reset(self): + """Return corners to their original positions. + + Examples + ======== + + >>> from sympy.combinatorics.polyhedron import tetrahedron as T + >>> T = T.copy() + >>> T.corners + (0, 1, 2, 3) + >>> T.rotate(0) + >>> T.corners + (0, 2, 3, 1) + >>> T.reset() + >>> T.corners + (0, 1, 2, 3) + """ + self._corners = self.args[0] + + +def _pgroup_calcs(): + """Return the permutation groups for each of the polyhedra and the face + definitions: tetrahedron, cube, octahedron, dodecahedron, icosahedron, + tetrahedron_faces, cube_faces, octahedron_faces, dodecahedron_faces, + icosahedron_faces + + Explanation + =========== + + (This author did not find and did not know of a better way to do it though + there likely is such a way.) + + Although only 2 permutations are needed for a polyhedron in order to + generate all the possible orientations, a group of permutations is + provided instead. A set of permutations is called a "group" if:: + + a*b = c (for any pair of permutations in the group, a and b, their + product, c, is in the group) + + a*(b*c) = (a*b)*c (for any 3 permutations in the group associativity holds) + + there is an identity permutation, I, such that I*a = a*I for all elements + in the group + + a*b = I (the inverse of each permutation is also in the group) + + None of the polyhedron groups defined follow these definitions of a group. + Instead, they are selected to contain those permutations whose powers + alone will construct all orientations of the polyhedron, i.e. for + permutations ``a``, ``b``, etc... in the group, ``a, a**2, ..., a**o_a``, + ``b, b**2, ..., b**o_b``, etc... (where ``o_i`` is the order of + permutation ``i``) generate all permutations of the polyhedron instead of + mixed products like ``a*b``, ``a*b**2``, etc.... + + Note that for a polyhedron with n vertices, the valid permutations of the + vertices exclude those that do not maintain its faces. e.g. the + permutation BCDE of a square's four corners, ABCD, is a valid + permutation while CBDE is not (because this would twist the square). + + Examples + ======== + + The is_group checks for: closure, the presence of the Identity permutation, + and the presence of the inverse for each of the elements in the group. This + confirms that none of the polyhedra are true groups: + + >>> from sympy.combinatorics.polyhedron import ( + ... tetrahedron, cube, octahedron, dodecahedron, icosahedron) + ... + >>> polyhedra = (tetrahedron, cube, octahedron, dodecahedron, icosahedron) + >>> [h.pgroup.is_group for h in polyhedra] + ... + [True, True, True, True, True] + + Although tests in polyhedron's test suite check that powers of the + permutations in the groups generate all permutations of the vertices + of the polyhedron, here we also demonstrate the powers of the given + permutations create a complete group for the tetrahedron: + + >>> from sympy.combinatorics import Permutation, PermutationGroup + >>> for h in polyhedra[:1]: + ... G = h.pgroup + ... perms = set() + ... for g in G: + ... for e in range(g.order()): + ... p = tuple((g**e).array_form) + ... perms.add(p) + ... + ... perms = [Permutation(p) for p in perms] + ... assert PermutationGroup(perms).is_group + + In addition to doing the above, the tests in the suite confirm that the + faces are all present after the application of each permutation. + + References + ========== + + .. [1] https://dogschool.tripod.com/trianglegroup.html + + """ + def _pgroup_of_double(polyh, ordered_faces, pgroup): + n = len(ordered_faces[0]) + # the vertices of the double which sits inside a give polyhedron + # can be found by tracking the faces of the outer polyhedron. + # A map between face and the vertex of the double is made so that + # after rotation the position of the vertices can be located + fmap = dict(zip(ordered_faces, + range(len(ordered_faces)))) + flat_faces = flatten(ordered_faces) + new_pgroup = [] + for i, p in enumerate(pgroup): + h = polyh.copy() + h.rotate(p) + c = h.corners + # reorder corners in the order they should appear when + # enumerating the faces + reorder = unflatten([c[j] for j in flat_faces], n) + # make them canonical + reorder = [tuple(map(as_int, + minlex(f, directed=False))) + for f in reorder] + # map face to vertex: the resulting list of vertices are the + # permutation that we seek for the double + new_pgroup.append(Perm([fmap[f] for f in reorder])) + return new_pgroup + + tetrahedron_faces = [ + (0, 1, 2), (0, 2, 3), (0, 3, 1), # upper 3 + (1, 2, 3), # bottom + ] + + # cw from top + # + _t_pgroup = [ + Perm([[1, 2, 3], [0]]), # cw from top + Perm([[0, 1, 2], [3]]), # cw from front face + Perm([[0, 3, 2], [1]]), # cw from back right face + Perm([[0, 3, 1], [2]]), # cw from back left face + Perm([[0, 1], [2, 3]]), # through front left edge + Perm([[0, 2], [1, 3]]), # through front right edge + Perm([[0, 3], [1, 2]]), # through back edge + ] + + tetrahedron = Polyhedron( + range(4), + tetrahedron_faces, + _t_pgroup) + + cube_faces = [ + (0, 1, 2, 3), # upper + (0, 1, 5, 4), (1, 2, 6, 5), (2, 3, 7, 6), (0, 3, 7, 4), # middle 4 + (4, 5, 6, 7), # lower + ] + + # U, D, F, B, L, R = up, down, front, back, left, right + _c_pgroup = [Perm(p) for p in + [ + [1, 2, 3, 0, 5, 6, 7, 4], # cw from top, U + [4, 0, 3, 7, 5, 1, 2, 6], # cw from F face + [4, 5, 1, 0, 7, 6, 2, 3], # cw from R face + + [1, 0, 4, 5, 2, 3, 7, 6], # cw through UF edge + [6, 2, 1, 5, 7, 3, 0, 4], # cw through UR edge + [6, 7, 3, 2, 5, 4, 0, 1], # cw through UB edge + [3, 7, 4, 0, 2, 6, 5, 1], # cw through UL edge + [4, 7, 6, 5, 0, 3, 2, 1], # cw through FL edge + [6, 5, 4, 7, 2, 1, 0, 3], # cw through FR edge + + [0, 3, 7, 4, 1, 2, 6, 5], # cw through UFL vertex + [5, 1, 0, 4, 6, 2, 3, 7], # cw through UFR vertex + [5, 6, 2, 1, 4, 7, 3, 0], # cw through UBR vertex + [7, 4, 0, 3, 6, 5, 1, 2], # cw through UBL + ]] + + cube = Polyhedron( + range(8), + cube_faces, + _c_pgroup) + + octahedron_faces = [ + (0, 1, 2), (0, 2, 3), (0, 3, 4), (0, 1, 4), # top 4 + (1, 2, 5), (2, 3, 5), (3, 4, 5), (1, 4, 5), # bottom 4 + ] + + octahedron = Polyhedron( + range(6), + octahedron_faces, + _pgroup_of_double(cube, cube_faces, _c_pgroup)) + + dodecahedron_faces = [ + (0, 1, 2, 3, 4), # top + (0, 1, 6, 10, 5), (1, 2, 7, 11, 6), (2, 3, 8, 12, 7), # upper 5 + (3, 4, 9, 13, 8), (0, 4, 9, 14, 5), + (5, 10, 16, 15, 14), (6, 10, 16, 17, 11), (7, 11, 17, 18, + 12), # lower 5 + (8, 12, 18, 19, 13), (9, 13, 19, 15, 14), + (15, 16, 17, 18, 19) # bottom + ] + + def _string_to_perm(s): + rv = [Perm(range(20))] + p = None + for si in s: + if si not in '01': + count = int(si) - 1 + else: + count = 1 + if si == '0': + p = _f0 + elif si == '1': + p = _f1 + rv.extend([p]*count) + return Perm.rmul(*rv) + + # top face cw + _f0 = Perm([ + 1, 2, 3, 4, 0, 6, 7, 8, 9, 5, 11, + 12, 13, 14, 10, 16, 17, 18, 19, 15]) + # front face cw + _f1 = Perm([ + 5, 0, 4, 9, 14, 10, 1, 3, 13, 15, + 6, 2, 8, 19, 16, 17, 11, 7, 12, 18]) + # the strings below, like 0104 are shorthand for F0*F1*F0**4 and are + # the remaining 4 face rotations, 15 edge permutations, and the + # 10 vertex rotations. + _dodeca_pgroup = [_f0, _f1] + [_string_to_perm(s) for s in ''' + 0104 140 014 0410 + 010 1403 03104 04103 102 + 120 1304 01303 021302 03130 + 0412041 041204103 04120410 041204104 041204102 + 10 01 1402 0140 04102 0412 1204 1302 0130 03120'''.strip().split()] + + dodecahedron = Polyhedron( + range(20), + dodecahedron_faces, + _dodeca_pgroup) + + icosahedron_faces = [ + (0, 1, 2), (0, 2, 3), (0, 3, 4), (0, 4, 5), (0, 1, 5), + (1, 6, 7), (1, 2, 7), (2, 7, 8), (2, 3, 8), (3, 8, 9), + (3, 4, 9), (4, 9, 10), (4, 5, 10), (5, 6, 10), (1, 5, 6), + (6, 7, 11), (7, 8, 11), (8, 9, 11), (9, 10, 11), (6, 10, 11)] + + icosahedron = Polyhedron( + range(12), + icosahedron_faces, + _pgroup_of_double( + dodecahedron, dodecahedron_faces, _dodeca_pgroup)) + + return (tetrahedron, cube, octahedron, dodecahedron, icosahedron, + tetrahedron_faces, cube_faces, octahedron_faces, + dodecahedron_faces, icosahedron_faces) + +# ----------------------------------------------------------------------- +# Standard Polyhedron groups +# +# These are generated using _pgroup_calcs() above. However to save +# import time we encode them explicitly here. +# ----------------------------------------------------------------------- + +tetrahedron = Polyhedron( + Tuple(0, 1, 2, 3), + Tuple( + Tuple(0, 1, 2), + Tuple(0, 2, 3), + Tuple(0, 1, 3), + Tuple(1, 2, 3)), + Tuple( + Perm(1, 2, 3), + Perm(3)(0, 1, 2), + Perm(0, 3, 2), + Perm(0, 3, 1), + Perm(0, 1)(2, 3), + Perm(0, 2)(1, 3), + Perm(0, 3)(1, 2) + )) + +cube = Polyhedron( + Tuple(0, 1, 2, 3, 4, 5, 6, 7), + Tuple( + Tuple(0, 1, 2, 3), + Tuple(0, 1, 5, 4), + Tuple(1, 2, 6, 5), + Tuple(2, 3, 7, 6), + Tuple(0, 3, 7, 4), + Tuple(4, 5, 6, 7)), + Tuple( + Perm(0, 1, 2, 3)(4, 5, 6, 7), + Perm(0, 4, 5, 1)(2, 3, 7, 6), + Perm(0, 4, 7, 3)(1, 5, 6, 2), + Perm(0, 1)(2, 4)(3, 5)(6, 7), + Perm(0, 6)(1, 2)(3, 5)(4, 7), + Perm(0, 6)(1, 7)(2, 3)(4, 5), + Perm(0, 3)(1, 7)(2, 4)(5, 6), + Perm(0, 4)(1, 7)(2, 6)(3, 5), + Perm(0, 6)(1, 5)(2, 4)(3, 7), + Perm(1, 3, 4)(2, 7, 5), + Perm(7)(0, 5, 2)(3, 4, 6), + Perm(0, 5, 7)(1, 6, 3), + Perm(0, 7, 2)(1, 4, 6))) + +octahedron = Polyhedron( + Tuple(0, 1, 2, 3, 4, 5), + Tuple( + Tuple(0, 1, 2), + Tuple(0, 2, 3), + Tuple(0, 3, 4), + Tuple(0, 1, 4), + Tuple(1, 2, 5), + Tuple(2, 3, 5), + Tuple(3, 4, 5), + Tuple(1, 4, 5)), + Tuple( + Perm(5)(1, 2, 3, 4), + Perm(0, 4, 5, 2), + Perm(0, 1, 5, 3), + Perm(0, 1)(2, 4)(3, 5), + Perm(0, 2)(1, 3)(4, 5), + Perm(0, 3)(1, 5)(2, 4), + Perm(0, 4)(1, 3)(2, 5), + Perm(0, 5)(1, 4)(2, 3), + Perm(0, 5)(1, 2)(3, 4), + Perm(0, 4, 1)(2, 3, 5), + Perm(0, 1, 2)(3, 4, 5), + Perm(0, 2, 3)(1, 5, 4), + Perm(0, 4, 3)(1, 5, 2))) + +dodecahedron = Polyhedron( + Tuple(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19), + Tuple( + Tuple(0, 1, 2, 3, 4), + Tuple(0, 1, 6, 10, 5), + Tuple(1, 2, 7, 11, 6), + Tuple(2, 3, 8, 12, 7), + Tuple(3, 4, 9, 13, 8), + Tuple(0, 4, 9, 14, 5), + Tuple(5, 10, 16, 15, 14), + Tuple(6, 10, 16, 17, 11), + Tuple(7, 11, 17, 18, 12), + Tuple(8, 12, 18, 19, 13), + Tuple(9, 13, 19, 15, 14), + Tuple(15, 16, 17, 18, 19)), + Tuple( + Perm(0, 1, 2, 3, 4)(5, 6, 7, 8, 9)(10, 11, 12, 13, 14)(15, 16, 17, 18, 19), + Perm(0, 5, 10, 6, 1)(2, 4, 14, 16, 11)(3, 9, 15, 17, 7)(8, 13, 19, 18, 12), + Perm(0, 10, 17, 12, 3)(1, 6, 11, 7, 2)(4, 5, 16, 18, 8)(9, 14, 15, 19, 13), + Perm(0, 6, 17, 19, 9)(1, 11, 18, 13, 4)(2, 7, 12, 8, 3)(5, 10, 16, 15, 14), + Perm(0, 2, 12, 19, 14)(1, 7, 18, 15, 5)(3, 8, 13, 9, 4)(6, 11, 17, 16, 10), + Perm(0, 4, 9, 14, 5)(1, 3, 13, 15, 10)(2, 8, 19, 16, 6)(7, 12, 18, 17, 11), + Perm(0, 1)(2, 5)(3, 10)(4, 6)(7, 14)(8, 16)(9, 11)(12, 15)(13, 17)(18, 19), + Perm(0, 7)(1, 2)(3, 6)(4, 11)(5, 12)(8, 10)(9, 17)(13, 16)(14, 18)(15, 19), + Perm(0, 12)(1, 8)(2, 3)(4, 7)(5, 18)(6, 13)(9, 11)(10, 19)(14, 17)(15, 16), + Perm(0, 8)(1, 13)(2, 9)(3, 4)(5, 12)(6, 19)(7, 14)(10, 18)(11, 15)(16, 17), + Perm(0, 4)(1, 9)(2, 14)(3, 5)(6, 13)(7, 15)(8, 10)(11, 19)(12, 16)(17, 18), + Perm(0, 5)(1, 14)(2, 15)(3, 16)(4, 10)(6, 9)(7, 19)(8, 17)(11, 13)(12, 18), + Perm(0, 11)(1, 6)(2, 10)(3, 16)(4, 17)(5, 7)(8, 15)(9, 18)(12, 14)(13, 19), + Perm(0, 18)(1, 12)(2, 7)(3, 11)(4, 17)(5, 19)(6, 8)(9, 16)(10, 13)(14, 15), + Perm(0, 18)(1, 19)(2, 13)(3, 8)(4, 12)(5, 17)(6, 15)(7, 9)(10, 16)(11, 14), + Perm(0, 13)(1, 19)(2, 15)(3, 14)(4, 9)(5, 8)(6, 18)(7, 16)(10, 12)(11, 17), + Perm(0, 16)(1, 15)(2, 19)(3, 18)(4, 17)(5, 10)(6, 14)(7, 13)(8, 12)(9, 11), + Perm(0, 18)(1, 17)(2, 16)(3, 15)(4, 19)(5, 12)(6, 11)(7, 10)(8, 14)(9, 13), + Perm(0, 15)(1, 19)(2, 18)(3, 17)(4, 16)(5, 14)(6, 13)(7, 12)(8, 11)(9, 10), + Perm(0, 17)(1, 16)(2, 15)(3, 19)(4, 18)(5, 11)(6, 10)(7, 14)(8, 13)(9, 12), + Perm(0, 19)(1, 18)(2, 17)(3, 16)(4, 15)(5, 13)(6, 12)(7, 11)(8, 10)(9, 14), + Perm(1, 4, 5)(2, 9, 10)(3, 14, 6)(7, 13, 16)(8, 15, 11)(12, 19, 17), + Perm(19)(0, 6, 2)(3, 5, 11)(4, 10, 7)(8, 14, 17)(9, 16, 12)(13, 15, 18), + Perm(0, 11, 8)(1, 7, 3)(4, 6, 12)(5, 17, 13)(9, 10, 18)(14, 16, 19), + Perm(0, 7, 13)(1, 12, 9)(2, 8, 4)(5, 11, 19)(6, 18, 14)(10, 17, 15), + Perm(0, 3, 9)(1, 8, 14)(2, 13, 5)(6, 12, 15)(7, 19, 10)(11, 18, 16), + Perm(0, 14, 10)(1, 9, 16)(2, 13, 17)(3, 19, 11)(4, 15, 6)(7, 8, 18), + Perm(0, 16, 7)(1, 10, 11)(2, 5, 17)(3, 14, 18)(4, 15, 12)(8, 9, 19), + Perm(0, 16, 13)(1, 17, 8)(2, 11, 12)(3, 6, 18)(4, 10, 19)(5, 15, 9), + Perm(0, 11, 15)(1, 17, 14)(2, 18, 9)(3, 12, 13)(4, 7, 19)(5, 6, 16), + Perm(0, 8, 15)(1, 12, 16)(2, 18, 10)(3, 19, 5)(4, 13, 14)(6, 7, 17))) + +icosahedron = Polyhedron( + Tuple(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11), + Tuple( + Tuple(0, 1, 2), + Tuple(0, 2, 3), + Tuple(0, 3, 4), + Tuple(0, 4, 5), + Tuple(0, 1, 5), + Tuple(1, 6, 7), + Tuple(1, 2, 7), + Tuple(2, 7, 8), + Tuple(2, 3, 8), + Tuple(3, 8, 9), + Tuple(3, 4, 9), + Tuple(4, 9, 10), + Tuple(4, 5, 10), + Tuple(5, 6, 10), + Tuple(1, 5, 6), + Tuple(6, 7, 11), + Tuple(7, 8, 11), + Tuple(8, 9, 11), + Tuple(9, 10, 11), + Tuple(6, 10, 11)), + Tuple( + Perm(11)(1, 2, 3, 4, 5)(6, 7, 8, 9, 10), + Perm(0, 5, 6, 7, 2)(3, 4, 10, 11, 8), + Perm(0, 1, 7, 8, 3)(4, 5, 6, 11, 9), + Perm(0, 2, 8, 9, 4)(1, 7, 11, 10, 5), + Perm(0, 3, 9, 10, 5)(1, 2, 8, 11, 6), + Perm(0, 4, 10, 6, 1)(2, 3, 9, 11, 7), + Perm(0, 1)(2, 5)(3, 6)(4, 7)(8, 10)(9, 11), + Perm(0, 2)(1, 3)(4, 7)(5, 8)(6, 9)(10, 11), + Perm(0, 3)(1, 9)(2, 4)(5, 8)(6, 11)(7, 10), + Perm(0, 4)(1, 9)(2, 10)(3, 5)(6, 8)(7, 11), + Perm(0, 5)(1, 4)(2, 10)(3, 6)(7, 9)(8, 11), + Perm(0, 6)(1, 5)(2, 10)(3, 11)(4, 7)(8, 9), + Perm(0, 7)(1, 2)(3, 6)(4, 11)(5, 8)(9, 10), + Perm(0, 8)(1, 9)(2, 3)(4, 7)(5, 11)(6, 10), + Perm(0, 9)(1, 11)(2, 10)(3, 4)(5, 8)(6, 7), + Perm(0, 10)(1, 9)(2, 11)(3, 6)(4, 5)(7, 8), + Perm(0, 11)(1, 6)(2, 10)(3, 9)(4, 8)(5, 7), + Perm(0, 11)(1, 8)(2, 7)(3, 6)(4, 10)(5, 9), + Perm(0, 11)(1, 10)(2, 9)(3, 8)(4, 7)(5, 6), + Perm(0, 11)(1, 7)(2, 6)(3, 10)(4, 9)(5, 8), + Perm(0, 11)(1, 9)(2, 8)(3, 7)(4, 6)(5, 10), + Perm(0, 5, 1)(2, 4, 6)(3, 10, 7)(8, 9, 11), + Perm(0, 1, 2)(3, 5, 7)(4, 6, 8)(9, 10, 11), + Perm(0, 2, 3)(1, 8, 4)(5, 7, 9)(6, 11, 10), + Perm(0, 3, 4)(1, 8, 10)(2, 9, 5)(6, 7, 11), + Perm(0, 4, 5)(1, 3, 10)(2, 9, 6)(7, 8, 11), + Perm(0, 10, 7)(1, 5, 6)(2, 4, 11)(3, 9, 8), + Perm(0, 6, 8)(1, 7, 2)(3, 5, 11)(4, 10, 9), + Perm(0, 7, 9)(1, 11, 4)(2, 8, 3)(5, 6, 10), + Perm(0, 8, 10)(1, 7, 6)(2, 11, 5)(3, 9, 4), + Perm(0, 9, 6)(1, 3, 11)(2, 8, 7)(4, 10, 5))) + +tetrahedron_faces = [tuple(arg) for arg in tetrahedron.faces] + +cube_faces = [tuple(arg) for arg in cube.faces] + +octahedron_faces = [tuple(arg) for arg in octahedron.faces] + +dodecahedron_faces = [tuple(arg) for arg in dodecahedron.faces] + +icosahedron_faces = [tuple(arg) for arg in icosahedron.faces] diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/prufer.py b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/prufer.py new file mode 100644 index 0000000000000000000000000000000000000000..c1241e055eb1fdf697ab06dae07ee0b322bf4d6d --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/prufer.py @@ -0,0 +1,436 @@ +from sympy.core import Basic +from sympy.core.containers import Tuple +from sympy.tensor.array import Array +from sympy.core.sympify import _sympify +from sympy.utilities.iterables import flatten, iterable +from sympy.utilities.misc import as_int + +from collections import defaultdict + + +class Prufer(Basic): + """ + The Prufer correspondence is an algorithm that describes the + bijection between labeled trees and the Prufer code. A Prufer + code of a labeled tree is unique up to isomorphism and has + a length of n - 2. + + Prufer sequences were first used by Heinz Prufer to give a + proof of Cayley's formula. + + References + ========== + + .. [1] https://mathworld.wolfram.com/LabeledTree.html + + """ + _prufer_repr = None + _tree_repr = None + _nodes = None + _rank = None + + @property + def prufer_repr(self): + """Returns Prufer sequence for the Prufer object. + + This sequence is found by removing the highest numbered vertex, + recording the node it was attached to, and continuing until only + two vertices remain. The Prufer sequence is the list of recorded nodes. + + Examples + ======== + + >>> from sympy.combinatorics.prufer import Prufer + >>> Prufer([[0, 3], [1, 3], [2, 3], [3, 4], [4, 5]]).prufer_repr + [3, 3, 3, 4] + >>> Prufer([1, 0, 0]).prufer_repr + [1, 0, 0] + + See Also + ======== + + to_prufer + + """ + if self._prufer_repr is None: + self._prufer_repr = self.to_prufer(self._tree_repr[:], self.nodes) + return self._prufer_repr + + @property + def tree_repr(self): + """Returns the tree representation of the Prufer object. + + Examples + ======== + + >>> from sympy.combinatorics.prufer import Prufer + >>> Prufer([[0, 3], [1, 3], [2, 3], [3, 4], [4, 5]]).tree_repr + [[0, 3], [1, 3], [2, 3], [3, 4], [4, 5]] + >>> Prufer([1, 0, 0]).tree_repr + [[1, 2], [0, 1], [0, 3], [0, 4]] + + See Also + ======== + + to_tree + + """ + if self._tree_repr is None: + self._tree_repr = self.to_tree(self._prufer_repr[:]) + return self._tree_repr + + @property + def nodes(self): + """Returns the number of nodes in the tree. + + Examples + ======== + + >>> from sympy.combinatorics.prufer import Prufer + >>> Prufer([[0, 3], [1, 3], [2, 3], [3, 4], [4, 5]]).nodes + 6 + >>> Prufer([1, 0, 0]).nodes + 5 + + """ + return self._nodes + + @property + def rank(self): + """Returns the rank of the Prufer sequence. + + Examples + ======== + + >>> from sympy.combinatorics.prufer import Prufer + >>> p = Prufer([[0, 3], [1, 3], [2, 3], [3, 4], [4, 5]]) + >>> p.rank + 778 + >>> p.next(1).rank + 779 + >>> p.prev().rank + 777 + + See Also + ======== + + prufer_rank, next, prev, size + + """ + if self._rank is None: + self._rank = self.prufer_rank() + return self._rank + + @property + def size(self): + """Return the number of possible trees of this Prufer object. + + Examples + ======== + + >>> from sympy.combinatorics.prufer import Prufer + >>> Prufer([0]*4).size == Prufer([6]*4).size == 1296 + True + + See Also + ======== + + prufer_rank, rank, next, prev + + """ + return self.prev(self.rank).prev().rank + 1 + + @staticmethod + def to_prufer(tree, n): + """Return the Prufer sequence for a tree given as a list of edges where + ``n`` is the number of nodes in the tree. + + Examples + ======== + + >>> from sympy.combinatorics.prufer import Prufer + >>> a = Prufer([[0, 1], [0, 2], [0, 3]]) + >>> a.prufer_repr + [0, 0] + >>> Prufer.to_prufer([[0, 1], [0, 2], [0, 3]], 4) + [0, 0] + + See Also + ======== + prufer_repr: returns Prufer sequence of a Prufer object. + + """ + d = defaultdict(int) + L = [] + for edge in tree: + # Increment the value of the corresponding + # node in the degree list as we encounter an + # edge involving it. + d[edge[0]] += 1 + d[edge[1]] += 1 + for i in range(n - 2): + # find the smallest leaf + for x in range(n): + if d[x] == 1: + break + # find the node it was connected to + y = None + for edge in tree: + if x == edge[0]: + y = edge[1] + elif x == edge[1]: + y = edge[0] + if y is not None: + break + # record and update + L.append(y) + for j in (x, y): + d[j] -= 1 + if not d[j]: + d.pop(j) + tree.remove(edge) + return L + + @staticmethod + def to_tree(prufer): + """Return the tree (as a list of edges) of the given Prufer sequence. + + Examples + ======== + + >>> from sympy.combinatorics.prufer import Prufer + >>> a = Prufer([0, 2], 4) + >>> a.tree_repr + [[0, 1], [0, 2], [2, 3]] + >>> Prufer.to_tree([0, 2]) + [[0, 1], [0, 2], [2, 3]] + + References + ========== + + .. [1] https://hamberg.no/erlend/posts/2010-11-06-prufer-sequence-compact-tree-representation.html + + See Also + ======== + tree_repr: returns tree representation of a Prufer object. + + """ + tree = [] + last = [] + n = len(prufer) + 2 + d = defaultdict(lambda: 1) + for p in prufer: + d[p] += 1 + for i in prufer: + for j in range(n): + # find the smallest leaf (degree = 1) + if d[j] == 1: + break + # (i, j) is the new edge that we append to the tree + # and remove from the degree dictionary + d[i] -= 1 + d[j] -= 1 + tree.append(sorted([i, j])) + last = [i for i in range(n) if d[i] == 1] or [0, 1] + tree.append(last) + + return tree + + @staticmethod + def edges(*runs): + """Return a list of edges and the number of nodes from the given runs + that connect nodes in an integer-labelled tree. + + All node numbers will be shifted so that the minimum node is 0. It is + not a problem if edges are repeated in the runs; only unique edges are + returned. There is no assumption made about what the range of the node + labels should be, but all nodes from the smallest through the largest + must be present. + + Examples + ======== + + >>> from sympy.combinatorics.prufer import Prufer + >>> Prufer.edges([1, 2, 3], [2, 4, 5]) # a T + ([[0, 1], [1, 2], [1, 3], [3, 4]], 5) + + Duplicate edges are removed: + + >>> Prufer.edges([0, 1, 2, 3], [1, 4, 5], [1, 4, 6]) # a K + ([[0, 1], [1, 2], [1, 4], [2, 3], [4, 5], [4, 6]], 7) + + """ + e = set() + nmin = runs[0][0] + for r in runs: + for i in range(len(r) - 1): + a, b = r[i: i + 2] + if b < a: + a, b = b, a + e.add((a, b)) + rv = [] + got = set() + nmin = nmax = None + for ei in e: + for i in ei: + got.add(i) + nmin = min(ei[0], nmin) if nmin is not None else ei[0] + nmax = max(ei[1], nmax) if nmax is not None else ei[1] + rv.append(list(ei)) + missing = set(range(nmin, nmax + 1)) - got + if missing: + missing = [i + nmin for i in missing] + if len(missing) == 1: + msg = 'Node %s is missing.' % missing.pop() + else: + msg = 'Nodes %s are missing.' % sorted(missing) + raise ValueError(msg) + if nmin != 0: + for i, ei in enumerate(rv): + rv[i] = [n - nmin for n in ei] + nmax -= nmin + return sorted(rv), nmax + 1 + + def prufer_rank(self): + """Computes the rank of a Prufer sequence. + + Examples + ======== + + >>> from sympy.combinatorics.prufer import Prufer + >>> a = Prufer([[0, 1], [0, 2], [0, 3]]) + >>> a.prufer_rank() + 0 + + See Also + ======== + + rank, next, prev, size + + """ + r = 0 + p = 1 + for i in range(self.nodes - 3, -1, -1): + r += p*self.prufer_repr[i] + p *= self.nodes + return r + + @classmethod + def unrank(self, rank, n): + """Finds the unranked Prufer sequence. + + Examples + ======== + + >>> from sympy.combinatorics.prufer import Prufer + >>> Prufer.unrank(0, 4) + Prufer([0, 0]) + + """ + n, rank = as_int(n), as_int(rank) + L = defaultdict(int) + for i in range(n - 3, -1, -1): + L[i] = rank % n + rank = (rank - L[i])//n + return Prufer([L[i] for i in range(len(L))]) + + def __new__(cls, *args, **kw_args): + """The constructor for the Prufer object. + + Examples + ======== + + >>> from sympy.combinatorics.prufer import Prufer + + A Prufer object can be constructed from a list of edges: + + >>> a = Prufer([[0, 1], [0, 2], [0, 3]]) + >>> a.prufer_repr + [0, 0] + + If the number of nodes is given, no checking of the nodes will + be performed; it will be assumed that nodes 0 through n - 1 are + present: + + >>> Prufer([[0, 1], [0, 2], [0, 3]], 4) + Prufer([[0, 1], [0, 2], [0, 3]], 4) + + A Prufer object can be constructed from a Prufer sequence: + + >>> b = Prufer([1, 3]) + >>> b.tree_repr + [[0, 1], [1, 3], [2, 3]] + + """ + arg0 = Array(args[0]) if args[0] else Tuple() + args = (arg0,) + tuple(_sympify(arg) for arg in args[1:]) + ret_obj = Basic.__new__(cls, *args, **kw_args) + args = [list(args[0])] + if args[0] and iterable(args[0][0]): + if not args[0][0]: + raise ValueError( + 'Prufer expects at least one edge in the tree.') + if len(args) > 1: + nnodes = args[1] + else: + nodes = set(flatten(args[0])) + nnodes = max(nodes) + 1 + if nnodes != len(nodes): + missing = set(range(nnodes)) - nodes + if len(missing) == 1: + msg = 'Node %s is missing.' % missing.pop() + else: + msg = 'Nodes %s are missing.' % sorted(missing) + raise ValueError(msg) + ret_obj._tree_repr = [list(i) for i in args[0]] + ret_obj._nodes = nnodes + else: + ret_obj._prufer_repr = args[0] + ret_obj._nodes = len(ret_obj._prufer_repr) + 2 + return ret_obj + + def next(self, delta=1): + """Generates the Prufer sequence that is delta beyond the current one. + + Examples + ======== + + >>> from sympy.combinatorics.prufer import Prufer + >>> a = Prufer([[0, 1], [0, 2], [0, 3]]) + >>> b = a.next(1) # == a.next() + >>> b.tree_repr + [[0, 2], [0, 1], [1, 3]] + >>> b.rank + 1 + + See Also + ======== + + prufer_rank, rank, prev, size + + """ + return Prufer.unrank(self.rank + delta, self.nodes) + + def prev(self, delta=1): + """Generates the Prufer sequence that is -delta before the current one. + + Examples + ======== + + >>> from sympy.combinatorics.prufer import Prufer + >>> a = Prufer([[0, 1], [1, 2], [2, 3], [1, 4]]) + >>> a.rank + 36 + >>> b = a.prev() + >>> b + Prufer([1, 2, 0]) + >>> b.rank + 35 + + See Also + ======== + + prufer_rank, rank, next, size + + """ + return Prufer.unrank(self.rank -delta, self.nodes) diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/rewritingsystem.py b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/rewritingsystem.py new file mode 100644 index 0000000000000000000000000000000000000000..4bacda085f9cb14f2cad14c915c05e5d036366bc --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/rewritingsystem.py @@ -0,0 +1,453 @@ +from collections import deque +from sympy.combinatorics.rewritingsystem_fsm import StateMachine + +class RewritingSystem: + ''' + A class implementing rewriting systems for `FpGroup`s. + + References + ========== + .. [1] Epstein, D., Holt, D. and Rees, S. (1991). + The use of Knuth-Bendix methods to solve the word problem in automatic groups. + Journal of Symbolic Computation, 12(4-5), pp.397-414. + + .. [2] GAP's Manual on its KBMAG package + https://www.gap-system.org/Manuals/pkg/kbmag-1.5.3/doc/manual.pdf + + ''' + def __init__(self, group): + self.group = group + self.alphabet = group.generators + self._is_confluent = None + + # these values are taken from [2] + self.maxeqns = 32767 # max rules + self.tidyint = 100 # rules before tidying + + # _max_exceeded is True if maxeqns is exceeded + # at any point + self._max_exceeded = False + + # Reduction automaton + self.reduction_automaton = None + self._new_rules = {} + + # dictionary of reductions + self.rules = {} + self.rules_cache = deque([], 50) + self._init_rules() + + + # All the transition symbols in the automaton + generators = list(self.alphabet) + generators += [gen**-1 for gen in generators] + # Create a finite state machine as an instance of the StateMachine object + self.reduction_automaton = StateMachine('Reduction automaton for '+ repr(self.group), generators) + self.construct_automaton() + + def set_max(self, n): + ''' + Set the maximum number of rules that can be defined + + ''' + if n > self.maxeqns: + self._max_exceeded = False + self.maxeqns = n + return + + @property + def is_confluent(self): + ''' + Return `True` if the system is confluent + + ''' + if self._is_confluent is None: + self._is_confluent = self._check_confluence() + return self._is_confluent + + def _init_rules(self): + identity = self.group.free_group.identity + for r in self.group.relators: + self.add_rule(r, identity) + self._remove_redundancies() + return + + def _add_rule(self, r1, r2): + ''' + Add the rule r1 -> r2 with no checking or further + deductions + + ''' + if len(self.rules) + 1 > self.maxeqns: + self._is_confluent = self._check_confluence() + self._max_exceeded = True + raise RuntimeError("Too many rules were defined.") + self.rules[r1] = r2 + # Add the newly added rule to the `new_rules` dictionary. + if self.reduction_automaton: + self._new_rules[r1] = r2 + + def add_rule(self, w1, w2, check=False): + new_keys = set() + + if w1 == w2: + return new_keys + + if w1 < w2: + w1, w2 = w2, w1 + + if (w1, w2) in self.rules_cache: + return new_keys + self.rules_cache.append((w1, w2)) + + s1, s2 = w1, w2 + + # The following is the equivalent of checking + # s1 for overlaps with the implicit reductions + # {g*g**-1 -> } and {g**-1*g -> } + # for any generator g without installing the + # redundant rules that would result from processing + # the overlaps. See [1], Section 3 for details. + + if len(s1) - len(s2) < 3: + if s1 not in self.rules: + new_keys.add(s1) + if not check: + self._add_rule(s1, s2) + if s2**-1 > s1**-1 and s2**-1 not in self.rules: + new_keys.add(s2**-1) + if not check: + self._add_rule(s2**-1, s1**-1) + + # overlaps on the right + while len(s1) - len(s2) > -1: + g = s1[len(s1)-1] + s1 = s1.subword(0, len(s1)-1) + s2 = s2*g**-1 + if len(s1) - len(s2) < 0: + if s2 not in self.rules: + if not check: + self._add_rule(s2, s1) + new_keys.add(s2) + elif len(s1) - len(s2) < 3: + new = self.add_rule(s1, s2, check) + new_keys.update(new) + + # overlaps on the left + while len(w1) - len(w2) > -1: + g = w1[0] + w1 = w1.subword(1, len(w1)) + w2 = g**-1*w2 + if len(w1) - len(w2) < 0: + if w2 not in self.rules: + if not check: + self._add_rule(w2, w1) + new_keys.add(w2) + elif len(w1) - len(w2) < 3: + new = self.add_rule(w1, w2, check) + new_keys.update(new) + + return new_keys + + def _remove_redundancies(self, changes=False): + ''' + Reduce left- and right-hand sides of reduction rules + and remove redundant equations (i.e. those for which + lhs == rhs). If `changes` is `True`, return a set + containing the removed keys and a set containing the + added keys + + ''' + removed = set() + added = set() + rules = self.rules.copy() + for r in rules: + v = self.reduce(r, exclude=r) + w = self.reduce(rules[r]) + if v != r: + del self.rules[r] + removed.add(r) + if v > w: + added.add(v) + self.rules[v] = w + elif v < w: + added.add(w) + self.rules[w] = v + else: + self.rules[v] = w + if changes: + return removed, added + return + + def make_confluent(self, check=False): + ''' + Try to make the system confluent using the Knuth-Bendix + completion algorithm + + ''' + if self._max_exceeded: + return self._is_confluent + lhs = list(self.rules.keys()) + + def _overlaps(r1, r2): + len1 = len(r1) + len2 = len(r2) + result = [] + for j in range(1, len1 + len2): + if (r1.subword(len1 - j, len1 + len2 - j, strict=False) + == r2.subword(j - len1, j, strict=False)): + a = r1.subword(0, len1-j, strict=False) + a = a*r2.subword(0, j-len1, strict=False) + b = r2.subword(j-len1, j, strict=False) + c = r2.subword(j, len2, strict=False) + c = c*r1.subword(len1 + len2 - j, len1, strict=False) + result.append(a*b*c) + return result + + def _process_overlap(w, r1, r2, check): + s = w.eliminate_word(r1, self.rules[r1]) + s = self.reduce(s) + t = w.eliminate_word(r2, self.rules[r2]) + t = self.reduce(t) + if s != t: + if check: + # system not confluent + return [0] + try: + new_keys = self.add_rule(t, s, check) + return new_keys + except RuntimeError: + return False + return + + added = 0 + i = 0 + while i < len(lhs): + r1 = lhs[i] + i += 1 + # j could be i+1 to not + # check each pair twice but lhs + # is extended in the loop and the new + # elements have to be checked with the + # preceding ones. there is probably a better way + # to handle this + j = 0 + while j < len(lhs): + r2 = lhs[j] + j += 1 + if r1 == r2: + continue + overlaps = _overlaps(r1, r2) + overlaps.extend(_overlaps(r1**-1, r2)) + if not overlaps: + continue + for w in overlaps: + new_keys = _process_overlap(w, r1, r2, check) + if new_keys: + if check: + return False + lhs.extend(new_keys) + added += len(new_keys) + elif new_keys == False: + # too many rules were added so the process + # couldn't complete + return self._is_confluent + + if added > self.tidyint and not check: + # tidy up + r, a = self._remove_redundancies(changes=True) + added = 0 + if r: + # reset i since some elements were removed + i = min([lhs.index(s) for s in r]) + lhs = [l for l in lhs if l not in r] + lhs.extend(a) + if r1 in r: + # r1 was removed as redundant + break + + self._is_confluent = True + if not check: + self._remove_redundancies() + return True + + def _check_confluence(self): + return self.make_confluent(check=True) + + def reduce(self, word, exclude=None): + ''' + Apply reduction rules to `word` excluding the reduction rule + for the lhs equal to `exclude` + + ''' + rules = {r: self.rules[r] for r in self.rules if r != exclude} + # the following is essentially `eliminate_words()` code from the + # `FreeGroupElement` class, the only difference being the first + # "if" statement + again = True + new = word + while again: + again = False + for r in rules: + prev = new + if rules[r]**-1 > r**-1: + new = new.eliminate_word(r, rules[r], _all=True, inverse=False) + else: + new = new.eliminate_word(r, rules[r], _all=True) + if new != prev: + again = True + return new + + def _compute_inverse_rules(self, rules): + ''' + Compute the inverse rules for a given set of rules. + The inverse rules are used in the automaton for word reduction. + + Arguments: + rules (dictionary): Rules for which the inverse rules are to computed. + + Returns: + Dictionary of inverse_rules. + + ''' + inverse_rules = {} + for r in rules: + rule_key_inverse = r**-1 + rule_value_inverse = (rules[r])**-1 + if (rule_value_inverse < rule_key_inverse): + inverse_rules[rule_key_inverse] = rule_value_inverse + else: + inverse_rules[rule_value_inverse] = rule_key_inverse + return inverse_rules + + def construct_automaton(self): + ''' + Construct the automaton based on the set of reduction rules of the system. + + Automata Design: + The accept states of the automaton are the proper prefixes of the left hand side of the rules. + The complete left hand side of the rules are the dead states of the automaton. + + ''' + self._add_to_automaton(self.rules) + + def _add_to_automaton(self, rules): + ''' + Add new states and transitions to the automaton. + + Summary: + States corresponding to the new rules added to the system are computed and added to the automaton. + Transitions in the previously added states are also modified if necessary. + + Arguments: + rules (dictionary) -- Dictionary of the newly added rules. + + ''' + # Automaton variables + automaton_alphabet = [] + proper_prefixes = {} + + # compute the inverses of all the new rules added + all_rules = rules + inverse_rules = self._compute_inverse_rules(all_rules) + all_rules.update(inverse_rules) + + # Keep track of the accept_states. + accept_states = [] + + for rule in all_rules: + # The symbols present in the new rules are the symbols to be verified at each state. + # computes the automaton_alphabet, as the transitions solely depend upon the new states. + automaton_alphabet += rule.letter_form_elm + # Compute the proper prefixes for every rule. + proper_prefixes[rule] = [] + letter_word_array = list(rule.letter_form_elm) + len_letter_word_array = len(letter_word_array) + for i in range (1, len_letter_word_array): + letter_word_array[i] = letter_word_array[i-1]*letter_word_array[i] + # Add accept states. + elem = letter_word_array[i-1] + if elem not in self.reduction_automaton.states: + self.reduction_automaton.add_state(elem, state_type='a') + accept_states.append(elem) + proper_prefixes[rule] = letter_word_array + # Check for overlaps between dead and accept states. + if rule in accept_states: + self.reduction_automaton.states[rule].state_type = 'd' + self.reduction_automaton.states[rule].rh_rule = all_rules[rule] + accept_states.remove(rule) + # Add dead states + if rule not in self.reduction_automaton.states: + self.reduction_automaton.add_state(rule, state_type='d', rh_rule=all_rules[rule]) + + automaton_alphabet = set(automaton_alphabet) + + # Add new transitions for every state. + for state in self.reduction_automaton.states: + current_state_name = state + current_state_type = self.reduction_automaton.states[state].state_type + # Transitions will be modified only when suffixes of the current_state + # belongs to the proper_prefixes of the new rules. + # The rest are ignored if they cannot lead to a dead state after a finite number of transisitons. + if current_state_type == 's': + for letter in automaton_alphabet: + if letter in self.reduction_automaton.states: + self.reduction_automaton.states[state].add_transition(letter, letter) + else: + self.reduction_automaton.states[state].add_transition(letter, current_state_name) + elif current_state_type == 'a': + # Check if the transition to any new state in possible. + for letter in automaton_alphabet: + _next = current_state_name*letter + while len(_next) and _next not in self.reduction_automaton.states: + _next = _next.subword(1, len(_next)) + if not len(_next): + _next = 'start' + self.reduction_automaton.states[state].add_transition(letter, _next) + + # Add transitions for new states. All symbols used in the automaton are considered here. + # Ignore this if `reduction_automaton.automaton_alphabet` = `automaton_alphabet`. + if len(self.reduction_automaton.automaton_alphabet) != len(automaton_alphabet): + for state in accept_states: + current_state_name = state + for letter in self.reduction_automaton.automaton_alphabet: + _next = current_state_name*letter + while len(_next) and _next not in self.reduction_automaton.states: + _next = _next.subword(1, len(_next)) + if not len(_next): + _next = 'start' + self.reduction_automaton.states[state].add_transition(letter, _next) + + def reduce_using_automaton(self, word): + ''' + Reduce a word using an automaton. + + Summary: + All the symbols of the word are stored in an array and are given as the input to the automaton. + If the automaton reaches a dead state that subword is replaced and the automaton is run from the beginning. + The complete word has to be replaced when the word is read and the automaton reaches a dead state. + So, this process is repeated until the word is read completely and the automaton reaches the accept state. + + Arguments: + word (instance of FreeGroupElement) -- Word that needs to be reduced. + + ''' + # Modify the automaton if new rules are found. + if self._new_rules: + self._add_to_automaton(self._new_rules) + self._new_rules = {} + + flag = 1 + while flag: + flag = 0 + current_state = self.reduction_automaton.states['start'] + for i, s in enumerate(word.letter_form_elm): + next_state_name = current_state.transitions[s] + next_state = self.reduction_automaton.states[next_state_name] + if next_state.state_type == 'd': + subst = next_state.rh_rule + word = word.substituted_word(i - len(next_state_name) + 1, i+1, subst) + flag = 1 + break + current_state = next_state + return word diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/rewritingsystem_fsm.py b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/rewritingsystem_fsm.py new file mode 100644 index 0000000000000000000000000000000000000000..21916530040ac321180692d1a0811da4ae36a056 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/rewritingsystem_fsm.py @@ -0,0 +1,60 @@ +class State: + ''' + A representation of a state managed by a ``StateMachine``. + + Attributes: + name (instance of FreeGroupElement or string) -- State name which is also assigned to the Machine. + transisitons (OrderedDict) -- Represents all the transitions of the state object. + state_type (string) -- Denotes the type (accept/start/dead) of the state. + rh_rule (instance of FreeGroupElement) -- right hand rule for dead state. + state_machine (instance of StateMachine object) -- The finite state machine that the state belongs to. + ''' + + def __init__(self, name, state_machine, state_type=None, rh_rule=None): + self.name = name + self.transitions = {} + self.state_machine = state_machine + self.state_type = state_type[0] + self.rh_rule = rh_rule + + def add_transition(self, letter, state): + ''' + Add a transition from the current state to a new state. + + Keyword Arguments: + letter -- The alphabet element the current state reads to make the state transition. + state -- This will be an instance of the State object which represents a new state after in the transition after the alphabet is read. + + ''' + self.transitions[letter] = state + +class StateMachine: + ''' + Representation of a finite state machine the manages the states and the transitions of the automaton. + + Attributes: + states (dictionary) -- Collection of all registered `State` objects. + name (str) -- Name of the state machine. + ''' + + def __init__(self, name, automaton_alphabet): + self.name = name + self.automaton_alphabet = automaton_alphabet + self.states = {} # Contains all the states in the machine. + self.add_state('start', state_type='s') + + def add_state(self, state_name, state_type=None, rh_rule=None): + ''' + Instantiate a state object and stores it in the 'states' dictionary. + + Arguments: + state_name (instance of FreeGroupElement or string) -- name of the new states. + state_type (string) -- Denotes the type (accept/start/dead) of the state added. + rh_rule (instance of FreeGroupElement) -- right hand rule for dead state. + + ''' + new_state = State(state_name, self, state_type, rh_rule) + self.states[state_name] = new_state + + def __repr__(self): + return "%s" % (self.name) diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/schur_number.py b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/schur_number.py new file mode 100644 index 0000000000000000000000000000000000000000..83aac98e543d4b54d4e6af17adca6e4f4de1b9ac --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/schur_number.py @@ -0,0 +1,160 @@ +""" +The Schur number S(k) is the largest integer n for which the interval [1,n] +can be partitioned into k sum-free sets.(https://mathworld.wolfram.com/SchurNumber.html) +""" +import math +from sympy.core import S +from sympy.core.basic import Basic +from sympy.core.function import Function +from sympy.core.numbers import Integer + + +class SchurNumber(Function): + r""" + This function creates a SchurNumber object + which is evaluated for `k \le 5` otherwise only + the lower bound information can be retrieved. + + Examples + ======== + + >>> from sympy.combinatorics.schur_number import SchurNumber + + Since S(3) = 13, hence the output is a number + >>> SchurNumber(3) + 13 + + We do not know the Schur number for values greater than 5, hence + only the object is returned + >>> SchurNumber(6) + SchurNumber(6) + + Now, the lower bound information can be retrieved using lower_bound() + method + >>> SchurNumber(6).lower_bound() + 536 + + """ + + @classmethod + def eval(cls, k): + if k.is_Number: + if k is S.Infinity: + return S.Infinity + if k.is_zero: + return S.Zero + if not k.is_integer or k.is_negative: + raise ValueError("k should be a positive integer") + first_known_schur_numbers = {1: 1, 2: 4, 3: 13, 4: 44, 5: 160} + if k <= 5: + return Integer(first_known_schur_numbers[k]) + + def lower_bound(self): + f_ = self.args[0] + # Improved lower bounds known for S(6) and S(7) + if f_ == 6: + return Integer(536) + if f_ == 7: + return Integer(1680) + # For other cases, use general expression + if f_.is_Integer: + return 3*self.func(f_ - 1).lower_bound() - 1 + return (3**f_ - 1)/2 + + +def _schur_subsets_number(n): + + if n is S.Infinity: + raise ValueError("Input must be finite") + if n <= 0: + raise ValueError("n must be a non-zero positive integer.") + elif n <= 3: + min_k = 1 + else: + min_k = math.ceil(math.log(2*n + 1, 3)) + + return Integer(min_k) + + +def schur_partition(n): + """ + + This function returns the partition in the minimum number of sum-free subsets + according to the lower bound given by the Schur Number. + + Parameters + ========== + + n: a number + n is the upper limit of the range [1, n] for which we need to find and + return the minimum number of free subsets according to the lower bound + of schur number + + Returns + ======= + + List of lists + List of the minimum number of sum-free subsets + + Notes + ===== + + It is possible for some n to make the partition into less + subsets since the only known Schur numbers are: + S(1) = 1, S(2) = 4, S(3) = 13, S(4) = 44. + e.g for n = 44 the lower bound from the function above is 5 subsets but it has been proven + that can be done with 4 subsets. + + Examples + ======== + + For n = 1, 2, 3 the answer is the set itself + + >>> from sympy.combinatorics.schur_number import schur_partition + >>> schur_partition(2) + [[1, 2]] + + For n > 3, the answer is the minimum number of sum-free subsets: + + >>> schur_partition(5) + [[3, 2], [5], [1, 4]] + + >>> schur_partition(8) + [[3, 2], [6, 5, 8], [1, 4, 7]] + """ + + if isinstance(n, Basic) and not n.is_Number: + raise ValueError("Input value must be a number") + + number_of_subsets = _schur_subsets_number(n) + if n == 1: + sum_free_subsets = [[1]] + elif n == 2: + sum_free_subsets = [[1, 2]] + elif n == 3: + sum_free_subsets = [[1, 2, 3]] + else: + sum_free_subsets = [[1, 4], [2, 3]] + + while len(sum_free_subsets) < number_of_subsets: + sum_free_subsets = _generate_next_list(sum_free_subsets, n) + missed_elements = [3*k + 1 for k in range(len(sum_free_subsets), (n-1)//3 + 1)] + sum_free_subsets[-1] += missed_elements + + return sum_free_subsets + + +def _generate_next_list(current_list, n): + new_list = [] + + for item in current_list: + temp_1 = [number*3 for number in item if number*3 <= n] + temp_2 = [number*3 - 1 for number in item if number*3 - 1 <= n] + new_item = temp_1 + temp_2 + new_list.append(new_item) + + last_list = [3*k + 1 for k in range(len(current_list)+1) if 3*k + 1 <= n] + new_list.append(last_list) + current_list = new_list + + return current_list diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/subsets.py b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/subsets.py new file mode 100644 index 0000000000000000000000000000000000000000..e540cb2395cb27e04c9d513831cb834a05ec2abd --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/subsets.py @@ -0,0 +1,619 @@ +from itertools import combinations + +from sympy.combinatorics.graycode import GrayCode + + +class Subset(): + """ + Represents a basic subset object. + + Explanation + =========== + + We generate subsets using essentially two techniques, + binary enumeration and lexicographic enumeration. + The Subset class takes two arguments, the first one + describes the initial subset to consider and the second + describes the superset. + + Examples + ======== + + >>> from sympy.combinatorics import Subset + >>> a = Subset(['c', 'd'], ['a', 'b', 'c', 'd']) + >>> a.next_binary().subset + ['b'] + >>> a.prev_binary().subset + ['c'] + """ + + _rank_binary = None + _rank_lex = None + _rank_graycode = None + _subset = None + _superset = None + + def __new__(cls, subset, superset): + """ + Default constructor. + + It takes the ``subset`` and its ``superset`` as its parameters. + + Examples + ======== + + >>> from sympy.combinatorics import Subset + >>> a = Subset(['c', 'd'], ['a', 'b', 'c', 'd']) + >>> a.subset + ['c', 'd'] + >>> a.superset + ['a', 'b', 'c', 'd'] + >>> a.size + 2 + """ + if len(subset) > len(superset): + raise ValueError('Invalid arguments have been provided. The ' + 'superset must be larger than the subset.') + for elem in subset: + if elem not in superset: + raise ValueError('The superset provided is invalid as it does ' + 'not contain the element {}'.format(elem)) + obj = object.__new__(cls) + obj._subset = subset + obj._superset = superset + return obj + + def __eq__(self, other): + """Return a boolean indicating whether a == b on the basis of + whether both objects are of the class Subset and if the values + of the subset and superset attributes are the same. + """ + if not isinstance(other, Subset): + return NotImplemented + return self.subset == other.subset and self.superset == other.superset + + def iterate_binary(self, k): + """ + This is a helper function. It iterates over the + binary subsets by ``k`` steps. This variable can be + both positive or negative. + + Examples + ======== + + >>> from sympy.combinatorics import Subset + >>> a = Subset(['c', 'd'], ['a', 'b', 'c', 'd']) + >>> a.iterate_binary(-2).subset + ['d'] + >>> a = Subset(['a', 'b', 'c'], ['a', 'b', 'c', 'd']) + >>> a.iterate_binary(2).subset + [] + + See Also + ======== + + next_binary, prev_binary + """ + bin_list = Subset.bitlist_from_subset(self.subset, self.superset) + n = (int(''.join(bin_list), 2) + k) % 2**self.superset_size + bits = bin(n)[2:].rjust(self.superset_size, '0') + return Subset.subset_from_bitlist(self.superset, bits) + + def next_binary(self): + """ + Generates the next binary ordered subset. + + Examples + ======== + + >>> from sympy.combinatorics import Subset + >>> a = Subset(['c', 'd'], ['a', 'b', 'c', 'd']) + >>> a.next_binary().subset + ['b'] + >>> a = Subset(['a', 'b', 'c', 'd'], ['a', 'b', 'c', 'd']) + >>> a.next_binary().subset + [] + + See Also + ======== + + prev_binary, iterate_binary + """ + return self.iterate_binary(1) + + def prev_binary(self): + """ + Generates the previous binary ordered subset. + + Examples + ======== + + >>> from sympy.combinatorics import Subset + >>> a = Subset([], ['a', 'b', 'c', 'd']) + >>> a.prev_binary().subset + ['a', 'b', 'c', 'd'] + >>> a = Subset(['c', 'd'], ['a', 'b', 'c', 'd']) + >>> a.prev_binary().subset + ['c'] + + See Also + ======== + + next_binary, iterate_binary + """ + return self.iterate_binary(-1) + + def next_lexicographic(self): + """ + Generates the next lexicographically ordered subset. + + Examples + ======== + + >>> from sympy.combinatorics import Subset + >>> a = Subset(['c', 'd'], ['a', 'b', 'c', 'd']) + >>> a.next_lexicographic().subset + ['d'] + >>> a = Subset(['d'], ['a', 'b', 'c', 'd']) + >>> a.next_lexicographic().subset + [] + + See Also + ======== + + prev_lexicographic + """ + i = self.superset_size - 1 + indices = Subset.subset_indices(self.subset, self.superset) + + if i in indices: + if i - 1 in indices: + indices.remove(i - 1) + else: + indices.remove(i) + i = i - 1 + while i >= 0 and i not in indices: + i = i - 1 + if i >= 0: + indices.remove(i) + indices.append(i+1) + else: + while i not in indices and i >= 0: + i = i - 1 + indices.append(i + 1) + + ret_set = [] + super_set = self.superset + for i in indices: + ret_set.append(super_set[i]) + return Subset(ret_set, super_set) + + def prev_lexicographic(self): + """ + Generates the previous lexicographically ordered subset. + + Examples + ======== + + >>> from sympy.combinatorics import Subset + >>> a = Subset([], ['a', 'b', 'c', 'd']) + >>> a.prev_lexicographic().subset + ['d'] + >>> a = Subset(['c','d'], ['a', 'b', 'c', 'd']) + >>> a.prev_lexicographic().subset + ['c'] + + See Also + ======== + + next_lexicographic + """ + i = self.superset_size - 1 + indices = Subset.subset_indices(self.subset, self.superset) + + while i >= 0 and i not in indices: + i = i - 1 + + if i == 0 or i - 1 in indices: + indices.remove(i) + else: + if i >= 0: + indices.remove(i) + indices.append(i - 1) + indices.append(self.superset_size - 1) + + ret_set = [] + super_set = self.superset + for i in indices: + ret_set.append(super_set[i]) + return Subset(ret_set, super_set) + + def iterate_graycode(self, k): + """ + Helper function used for prev_gray and next_gray. + It performs ``k`` step overs to get the respective Gray codes. + + Examples + ======== + + >>> from sympy.combinatorics import Subset + >>> a = Subset([1, 2, 3], [1, 2, 3, 4]) + >>> a.iterate_graycode(3).subset + [1, 4] + >>> a.iterate_graycode(-2).subset + [1, 2, 4] + + See Also + ======== + + next_gray, prev_gray + """ + unranked_code = GrayCode.unrank(self.superset_size, + (self.rank_gray + k) % self.cardinality) + return Subset.subset_from_bitlist(self.superset, + unranked_code) + + def next_gray(self): + """ + Generates the next Gray code ordered subset. + + Examples + ======== + + >>> from sympy.combinatorics import Subset + >>> a = Subset([1, 2, 3], [1, 2, 3, 4]) + >>> a.next_gray().subset + [1, 3] + + See Also + ======== + + iterate_graycode, prev_gray + """ + return self.iterate_graycode(1) + + def prev_gray(self): + """ + Generates the previous Gray code ordered subset. + + Examples + ======== + + >>> from sympy.combinatorics import Subset + >>> a = Subset([2, 3, 4], [1, 2, 3, 4, 5]) + >>> a.prev_gray().subset + [2, 3, 4, 5] + + See Also + ======== + + iterate_graycode, next_gray + """ + return self.iterate_graycode(-1) + + @property + def rank_binary(self): + """ + Computes the binary ordered rank. + + Examples + ======== + + >>> from sympy.combinatorics import Subset + >>> a = Subset([], ['a','b','c','d']) + >>> a.rank_binary + 0 + >>> a = Subset(['c', 'd'], ['a', 'b', 'c', 'd']) + >>> a.rank_binary + 3 + + See Also + ======== + + iterate_binary, unrank_binary + """ + if self._rank_binary is None: + self._rank_binary = int("".join( + Subset.bitlist_from_subset(self.subset, + self.superset)), 2) + return self._rank_binary + + @property + def rank_lexicographic(self): + """ + Computes the lexicographic ranking of the subset. + + Examples + ======== + + >>> from sympy.combinatorics import Subset + >>> a = Subset(['c', 'd'], ['a', 'b', 'c', 'd']) + >>> a.rank_lexicographic + 14 + >>> a = Subset([2, 4, 5], [1, 2, 3, 4, 5, 6]) + >>> a.rank_lexicographic + 43 + """ + if self._rank_lex is None: + def _ranklex(self, subset_index, i, n): + if subset_index == [] or i > n: + return 0 + if i in subset_index: + subset_index.remove(i) + return 1 + _ranklex(self, subset_index, i + 1, n) + return 2**(n - i - 1) + _ranklex(self, subset_index, i + 1, n) + indices = Subset.subset_indices(self.subset, self.superset) + self._rank_lex = _ranklex(self, indices, 0, self.superset_size) + return self._rank_lex + + @property + def rank_gray(self): + """ + Computes the Gray code ranking of the subset. + + Examples + ======== + + >>> from sympy.combinatorics import Subset + >>> a = Subset(['c','d'], ['a','b','c','d']) + >>> a.rank_gray + 2 + >>> a = Subset([2, 4, 5], [1, 2, 3, 4, 5, 6]) + >>> a.rank_gray + 27 + + See Also + ======== + + iterate_graycode, unrank_gray + """ + if self._rank_graycode is None: + bits = Subset.bitlist_from_subset(self.subset, self.superset) + self._rank_graycode = GrayCode(len(bits), start=bits).rank + return self._rank_graycode + + @property + def subset(self): + """ + Gets the subset represented by the current instance. + + Examples + ======== + + >>> from sympy.combinatorics import Subset + >>> a = Subset(['c', 'd'], ['a', 'b', 'c', 'd']) + >>> a.subset + ['c', 'd'] + + See Also + ======== + + superset, size, superset_size, cardinality + """ + return self._subset + + @property + def size(self): + """ + Gets the size of the subset. + + Examples + ======== + + >>> from sympy.combinatorics import Subset + >>> a = Subset(['c', 'd'], ['a', 'b', 'c', 'd']) + >>> a.size + 2 + + See Also + ======== + + subset, superset, superset_size, cardinality + """ + return len(self.subset) + + @property + def superset(self): + """ + Gets the superset of the subset. + + Examples + ======== + + >>> from sympy.combinatorics import Subset + >>> a = Subset(['c', 'd'], ['a', 'b', 'c', 'd']) + >>> a.superset + ['a', 'b', 'c', 'd'] + + See Also + ======== + + subset, size, superset_size, cardinality + """ + return self._superset + + @property + def superset_size(self): + """ + Returns the size of the superset. + + Examples + ======== + + >>> from sympy.combinatorics import Subset + >>> a = Subset(['c', 'd'], ['a', 'b', 'c', 'd']) + >>> a.superset_size + 4 + + See Also + ======== + + subset, superset, size, cardinality + """ + return len(self.superset) + + @property + def cardinality(self): + """ + Returns the number of all possible subsets. + + Examples + ======== + + >>> from sympy.combinatorics import Subset + >>> a = Subset(['c', 'd'], ['a', 'b', 'c', 'd']) + >>> a.cardinality + 16 + + See Also + ======== + + subset, superset, size, superset_size + """ + return 2**(self.superset_size) + + @classmethod + def subset_from_bitlist(self, super_set, bitlist): + """ + Gets the subset defined by the bitlist. + + Examples + ======== + + >>> from sympy.combinatorics import Subset + >>> Subset.subset_from_bitlist(['a', 'b', 'c', 'd'], '0011').subset + ['c', 'd'] + + See Also + ======== + + bitlist_from_subset + """ + if len(super_set) != len(bitlist): + raise ValueError("The sizes of the lists are not equal") + ret_set = [] + for i in range(len(bitlist)): + if bitlist[i] == '1': + ret_set.append(super_set[i]) + return Subset(ret_set, super_set) + + @classmethod + def bitlist_from_subset(self, subset, superset): + """ + Gets the bitlist corresponding to a subset. + + Examples + ======== + + >>> from sympy.combinatorics import Subset + >>> Subset.bitlist_from_subset(['c', 'd'], ['a', 'b', 'c', 'd']) + '0011' + + See Also + ======== + + subset_from_bitlist + """ + bitlist = ['0'] * len(superset) + if isinstance(subset, Subset): + subset = subset.subset + for i in Subset.subset_indices(subset, superset): + bitlist[i] = '1' + return ''.join(bitlist) + + @classmethod + def unrank_binary(self, rank, superset): + """ + Gets the binary ordered subset of the specified rank. + + Examples + ======== + + >>> from sympy.combinatorics import Subset + >>> Subset.unrank_binary(4, ['a', 'b', 'c', 'd']).subset + ['b'] + + See Also + ======== + + iterate_binary, rank_binary + """ + bits = bin(rank)[2:].rjust(len(superset), '0') + return Subset.subset_from_bitlist(superset, bits) + + @classmethod + def unrank_gray(self, rank, superset): + """ + Gets the Gray code ordered subset of the specified rank. + + Examples + ======== + + >>> from sympy.combinatorics import Subset + >>> Subset.unrank_gray(4, ['a', 'b', 'c']).subset + ['a', 'b'] + >>> Subset.unrank_gray(0, ['a', 'b', 'c']).subset + [] + + See Also + ======== + + iterate_graycode, rank_gray + """ + graycode_bitlist = GrayCode.unrank(len(superset), rank) + return Subset.subset_from_bitlist(superset, graycode_bitlist) + + @classmethod + def subset_indices(self, subset, superset): + """Return indices of subset in superset in a list; the list is empty + if all elements of ``subset`` are not in ``superset``. + + Examples + ======== + + >>> from sympy.combinatorics import Subset + >>> superset = [1, 3, 2, 5, 4] + >>> Subset.subset_indices([3, 2, 1], superset) + [1, 2, 0] + >>> Subset.subset_indices([1, 6], superset) + [] + >>> Subset.subset_indices([], superset) + [] + + """ + a, b = superset, subset + sb = set(b) + d = {} + for i, ai in enumerate(a): + if ai in sb: + d[ai] = i + sb.remove(ai) + if not sb: + break + else: + return [] + return [d[bi] for bi in b] + + +def ksubsets(superset, k): + """ + Finds the subsets of size ``k`` in lexicographic order. + + This uses the itertools generator. + + Examples + ======== + + >>> from sympy.combinatorics.subsets import ksubsets + >>> list(ksubsets([1, 2, 3], 2)) + [(1, 2), (1, 3), (2, 3)] + >>> list(ksubsets([1, 2, 3, 4, 5], 2)) + [(1, 2), (1, 3), (1, 4), (1, 5), (2, 3), (2, 4), \ + (2, 5), (3, 4), (3, 5), (4, 5)] + + See Also + ======== + + Subset + """ + return combinations(superset, k) diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tensor_can.py b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tensor_can.py new file mode 100644 index 0000000000000000000000000000000000000000..f132a3faffb0d0066a5960efa412e4d8f96db792 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tensor_can.py @@ -0,0 +1,1190 @@ +from sympy.combinatorics.permutations import Permutation, _af_rmul, \ + _af_invert, _af_new +from sympy.combinatorics.perm_groups import PermutationGroup, _orbit, \ + _orbit_transversal +from sympy.combinatorics.util import _distribute_gens_by_base, \ + _orbits_transversals_from_bsgs + +""" + References for tensor canonicalization: + + [1] R. Portugal "Algorithmic simplification of tensor expressions", + J. Phys. A 32 (1999) 7779-7789 + + [2] R. Portugal, B.F. Svaiter "Group-theoretic Approach for Symbolic + Tensor Manipulation: I. Free Indices" + arXiv:math-ph/0107031v1 + + [3] L.R.U. Manssur, R. Portugal "Group-theoretic Approach for Symbolic + Tensor Manipulation: II. Dummy Indices" + arXiv:math-ph/0107032v1 + + [4] xperm.c part of XPerm written by J. M. Martin-Garcia + http://www.xact.es/index.html +""" + + +def dummy_sgs(dummies, sym, n): + """ + Return the strong generators for dummy indices. + + Parameters + ========== + + dummies : List of dummy indices. + `dummies[2k], dummies[2k+1]` are paired indices. + In base form, the dummy indices are always in + consecutive positions. + sym : symmetry under interchange of contracted dummies:: + * None no symmetry + * 0 commuting + * 1 anticommuting + + n : number of indices + + Examples + ======== + + >>> from sympy.combinatorics.tensor_can import dummy_sgs + >>> dummy_sgs(list(range(2, 8)), 0, 8) + [[0, 1, 3, 2, 4, 5, 6, 7, 8, 9], [0, 1, 2, 3, 5, 4, 6, 7, 8, 9], + [0, 1, 2, 3, 4, 5, 7, 6, 8, 9], [0, 1, 4, 5, 2, 3, 6, 7, 8, 9], + [0, 1, 2, 3, 6, 7, 4, 5, 8, 9]] + """ + if len(dummies) > n: + raise ValueError("List too large") + res = [] + # exchange of contravariant and covariant indices + if sym is not None: + for j in dummies[::2]: + a = list(range(n + 2)) + if sym == 1: + a[n] = n + 1 + a[n + 1] = n + a[j], a[j + 1] = a[j + 1], a[j] + res.append(a) + # rename dummy indices + for j in dummies[:-3:2]: + a = list(range(n + 2)) + a[j:j + 4] = a[j + 2], a[j + 3], a[j], a[j + 1] + res.append(a) + return res + + +def _min_dummies(dummies, sym, indices): + """ + Return list of minima of the orbits of indices in group of dummies. + See ``double_coset_can_rep`` for the description of ``dummies`` and ``sym``. + ``indices`` is the initial list of dummy indices. + + Examples + ======== + + >>> from sympy.combinatorics.tensor_can import _min_dummies + >>> _min_dummies([list(range(2, 8))], [0], list(range(10))) + [0, 1, 2, 2, 2, 2, 2, 2, 8, 9] + """ + num_types = len(sym) + m = [min(dx) if dx else None for dx in dummies] + res = indices[:] + for i in range(num_types): + for c, i in enumerate(indices): + for j in range(num_types): + if i in dummies[j]: + res[c] = m[j] + break + return res + + +def _trace_S(s, j, b, S_cosets): + """ + Return the representative h satisfying s[h[b]] == j + + If there is not such a representative return None + """ + for h in S_cosets[b]: + if s[h[b]] == j: + return h + return None + + +def _trace_D(gj, p_i, Dxtrav): + """ + Return the representative h satisfying h[gj] == p_i + + If there is not such a representative return None + """ + for h in Dxtrav: + if h[gj] == p_i: + return h + return None + + +def _dumx_remove(dumx, dumx_flat, p0): + """ + remove p0 from dumx + """ + res = [] + for dx in dumx: + if p0 not in dx: + res.append(dx) + continue + k = dx.index(p0) + if k % 2 == 0: + p0_paired = dx[k + 1] + else: + p0_paired = dx[k - 1] + dx.remove(p0) + dx.remove(p0_paired) + dumx_flat.remove(p0) + dumx_flat.remove(p0_paired) + res.append(dx) + + +def transversal2coset(size, base, transversal): + a = [] + j = 0 + for i in range(size): + if i in base: + a.append(sorted(transversal[j].values())) + j += 1 + else: + a.append([list(range(size))]) + j = len(a) - 1 + while a[j] == [list(range(size))]: + j -= 1 + return a[:j + 1] + + +def double_coset_can_rep(dummies, sym, b_S, sgens, S_transversals, g): + r""" + Butler-Portugal algorithm for tensor canonicalization with dummy indices. + + Parameters + ========== + + dummies + list of lists of dummy indices, + one list for each type of index; + the dummy indices are put in order contravariant, covariant + [d0, -d0, d1, -d1, ...]. + + sym + list of the symmetries of the index metric for each type. + + possible symmetries of the metrics + * 0 symmetric + * 1 antisymmetric + * None no symmetry + + b_S + base of a minimal slot symmetry BSGS. + + sgens + generators of the slot symmetry BSGS. + + S_transversals + transversals for the slot BSGS. + + g + permutation representing the tensor. + + Returns + ======= + + Return 0 if the tensor is zero, else return the array form of + the permutation representing the canonical form of the tensor. + + Notes + ===== + + A tensor with dummy indices can be represented in a number + of equivalent ways which typically grows exponentially with + the number of indices. To be able to establish if two tensors + with many indices are equal becomes computationally very slow + in absence of an efficient algorithm. + + The Butler-Portugal algorithm [3] is an efficient algorithm to + put tensors in canonical form, solving the above problem. + + Portugal observed that a tensor can be represented by a permutation, + and that the class of tensors equivalent to it under slot and dummy + symmetries is equivalent to the double coset `D*g*S` + (Note: in this documentation we use the conventions for multiplication + of permutations p, q with (p*q)(i) = p[q[i]] which is opposite + to the one used in the Permutation class) + + Using the algorithm by Butler to find a representative of the + double coset one can find a canonical form for the tensor. + + To see this correspondence, + let `g` be a permutation in array form; a tensor with indices `ind` + (the indices including both the contravariant and the covariant ones) + can be written as + + `t = T(ind[g[0]], \dots, ind[g[n-1]])`, + + where `n = len(ind)`; + `g` has size `n + 2`, the last two indices for the sign of the tensor + (trick introduced in [4]). + + A slot symmetry transformation `s` is a permutation acting on the slots + `t \rightarrow T(ind[(g*s)[0]], \dots, ind[(g*s)[n-1]])` + + A dummy symmetry transformation acts on `ind` + `t \rightarrow T(ind[(d*g)[0]], \dots, ind[(d*g)[n-1]])` + + Being interested only in the transformations of the tensor under + these symmetries, one can represent the tensor by `g`, which transforms + as + + `g -> d*g*s`, so it belongs to the coset `D*g*S`, or in other words + to the set of all permutations allowed by the slot and dummy symmetries. + + Let us explain the conventions by an example. + + Given a tensor `T^{d3 d2 d1}{}_{d1 d2 d3}` with the slot symmetries + `T^{a0 a1 a2 a3 a4 a5} = -T^{a2 a1 a0 a3 a4 a5}` + + `T^{a0 a1 a2 a3 a4 a5} = -T^{a4 a1 a2 a3 a0 a5}` + + and symmetric metric, find the tensor equivalent to it which + is the lowest under the ordering of indices: + lexicographic ordering `d1, d2, d3` and then contravariant + before covariant index; that is the canonical form of the tensor. + + The canonical form is `-T^{d1 d2 d3}{}_{d1 d2 d3}` + obtained using `T^{a0 a1 a2 a3 a4 a5} = -T^{a2 a1 a0 a3 a4 a5}`. + + To convert this problem in the input for this function, + use the following ordering of the index names + (- for covariant for short) `d1, -d1, d2, -d2, d3, -d3` + + `T^{d3 d2 d1}{}_{d1 d2 d3}` corresponds to `g = [4, 2, 0, 1, 3, 5, 6, 7]` + where the last two indices are for the sign + + `sgens = [Permutation(0, 2)(6, 7), Permutation(0, 4)(6, 7)]` + + sgens[0] is the slot symmetry `-(0, 2)` + `T^{a0 a1 a2 a3 a4 a5} = -T^{a2 a1 a0 a3 a4 a5}` + + sgens[1] is the slot symmetry `-(0, 4)` + `T^{a0 a1 a2 a3 a4 a5} = -T^{a4 a1 a2 a3 a0 a5}` + + The dummy symmetry group D is generated by the strong base generators + `[(0, 1), (2, 3), (4, 5), (0, 2)(1, 3), (0, 4)(1, 5)]` + where the first three interchange covariant and contravariant + positions of the same index (d1 <-> -d1) and the last two interchange + the dummy indices themselves (d1 <-> d2). + + The dummy symmetry acts from the left + `d = [1, 0, 2, 3, 4, 5, 6, 7]` exchange `d1 \leftrightarrow -d1` + `T^{d3 d2 d1}{}_{d1 d2 d3} == T^{d3 d2}{}_{d1}{}^{d1}{}_{d2 d3}` + + `g=[4, 2, 0, 1, 3, 5, 6, 7] -> [4, 2, 1, 0, 3, 5, 6, 7] = _af_rmul(d, g)` + which differs from `_af_rmul(g, d)`. + + The slot symmetry acts from the right + `s = [2, 1, 0, 3, 4, 5, 7, 6]` exchanges slots 0 and 2 and changes sign + `T^{d3 d2 d1}{}_{d1 d2 d3} == -T^{d1 d2 d3}{}_{d1 d2 d3}` + + `g=[4,2,0,1,3,5,6,7] -> [0, 2, 4, 1, 3, 5, 7, 6] = _af_rmul(g, s)` + + Example in which the tensor is zero, same slot symmetries as above: + `T^{d2}{}_{d1 d3}{}^{d1 d3}{}_{d2}` + + `= -T^{d3}{}_{d1 d3}{}^{d1 d2}{}_{d2}` under slot symmetry `-(0,4)`; + + `= T_{d3 d1}{}^{d3}{}^{d1 d2}{}_{d2}` under slot symmetry `-(0,2)`; + + `= T^{d3}{}_{d1 d3}{}^{d1 d2}{}_{d2}` symmetric metric; + + `= 0` since two of these lines have tensors differ only for the sign. + + The double coset D*g*S consists of permutations `h = d*g*s` corresponding + to equivalent tensors; if there are two `h` which are the same apart + from the sign, return zero; otherwise + choose as representative the tensor with indices + ordered lexicographically according to `[d1, -d1, d2, -d2, d3, -d3]` + that is ``rep = min(D*g*S) = min([d*g*s for d in D for s in S])`` + + The indices are fixed one by one; first choose the lowest index + for slot 0, then the lowest remaining index for slot 1, etc. + Doing this one obtains a chain of stabilizers + + `S \rightarrow S_{b0} \rightarrow S_{b0,b1} \rightarrow \dots` and + `D \rightarrow D_{p0} \rightarrow D_{p0,p1} \rightarrow \dots` + + where ``[b0, b1, ...] = range(b)`` is a base of the symmetric group; + the strong base `b_S` of S is an ordered sublist of it; + therefore it is sufficient to compute once the + strong base generators of S using the Schreier-Sims algorithm; + the stabilizers of the strong base generators are the + strong base generators of the stabilizer subgroup. + + ``dbase = [p0, p1, ...]`` is not in general in lexicographic order, + so that one must recompute the strong base generators each time; + however this is trivial, there is no need to use the Schreier-Sims + algorithm for D. + + The algorithm keeps a TAB of elements `(s_i, d_i, h_i)` + where `h_i = d_i \times g \times s_i` satisfying `h_i[j] = p_j` for `0 \le j < i` + starting from `s_0 = id, d_0 = id, h_0 = g`. + + The equations `h_0[0] = p_0, h_1[1] = p_1, \dots` are solved in this order, + choosing each time the lowest possible value of p_i + + For `j < i` + `d_i*g*s_i*S_{b_0, \dots, b_{i-1}}*b_j = D_{p_0, \dots, p_{i-1}}*p_j` + so that for dx in `D_{p_0,\dots,p_{i-1}}` and sx in + `S_{base[0], \dots, base[i-1]}` one has `dx*d_i*g*s_i*sx*b_j = p_j` + + Search for dx, sx such that this equation holds for `j = i`; + it can be written as `s_i*sx*b_j = J, dx*d_i*g*J = p_j` + `sx*b_j = s_i**-1*J; sx = trace(s_i**-1, S_{b_0,...,b_{i-1}})` + `dx**-1*p_j = d_i*g*J; dx = trace(d_i*g*J, D_{p_0,...,p_{i-1}})` + + `s_{i+1} = s_i*trace(s_i**-1*J, S_{b_0,...,b_{i-1}})` + `d_{i+1} = trace(d_i*g*J, D_{p_0,...,p_{i-1}})**-1*d_i` + `h_{i+1}*b_i = d_{i+1}*g*s_{i+1}*b_i = p_i` + + `h_n*b_j = p_j` for all j, so that `h_n` is the solution. + + Add the found `(s, d, h)` to TAB1. + + At the end of the iteration sort TAB1 with respect to the `h`; + if there are two consecutive `h` in TAB1 which differ only for the + sign, the tensor is zero, so return 0; + if there are two consecutive `h` which are equal, keep only one. + + Then stabilize the slot generators under `i` and the dummy generators + under `p_i`. + + Assign `TAB = TAB1` at the end of the iteration step. + + At the end `TAB` contains a unique `(s, d, h)`, since all the slots + of the tensor `h` have been fixed to have the minimum value according + to the symmetries. The algorithm returns `h`. + + It is important that the slot BSGS has lexicographic minimal base, + otherwise there is an `i` which does not belong to the slot base + for which `p_i` is fixed by the dummy symmetry only, while `i` + is not invariant from the slot stabilizer, so `p_i` is not in + general the minimal value. + + This algorithm differs slightly from the original algorithm [3]: + the canonical form is minimal lexicographically, and + the BSGS has minimal base under lexicographic order. + Equal tensors `h` are eliminated from TAB. + + + Examples + ======== + + >>> from sympy.combinatorics.permutations import Permutation + >>> from sympy.combinatorics.tensor_can import double_coset_can_rep, get_transversals + >>> gens = [Permutation(x) for x in [[2, 1, 0, 3, 4, 5, 7, 6], [4, 1, 2, 3, 0, 5, 7, 6]]] + >>> base = [0, 2] + >>> g = Permutation([4, 2, 0, 1, 3, 5, 6, 7]) + >>> transversals = get_transversals(base, gens) + >>> double_coset_can_rep([list(range(6))], [0], base, gens, transversals, g) + [0, 1, 2, 3, 4, 5, 7, 6] + + >>> g = Permutation([4, 1, 3, 0, 5, 2, 6, 7]) + >>> double_coset_can_rep([list(range(6))], [0], base, gens, transversals, g) + 0 + """ + size = g.size + g = g.array_form + num_dummies = size - 2 + indices = list(range(num_dummies)) + all_metrics_with_sym = not any(_ is None for _ in sym) + num_types = len(sym) + dumx = dummies[:] + dumx_flat = [] + for dx in dumx: + dumx_flat.extend(dx) + b_S = b_S[:] + sgensx = [h._array_form for h in sgens] + if b_S: + S_transversals = transversal2coset(size, b_S, S_transversals) + # strong generating set for D + dsgsx = [] + for i in range(num_types): + dsgsx.extend(dummy_sgs(dumx[i], sym[i], num_dummies)) + idn = list(range(size)) + # TAB = list of entries (s, d, h) where h = _af_rmuln(d,g,s) + # for short, in the following d*g*s means _af_rmuln(d,g,s) + TAB = [(idn, idn, g)] + for i in range(size - 2): + b = i + testb = b in b_S and sgensx + if testb: + sgensx1 = [_af_new(_) for _ in sgensx] + deltab = _orbit(size, sgensx1, b) + else: + deltab = {b} + # p1 = min(IMAGES) = min(Union D_p*h*deltab for h in TAB) + if all_metrics_with_sym: + md = _min_dummies(dumx, sym, indices) + else: + md = [min(_orbit(size, [_af_new( + ddx) for ddx in dsgsx], ii)) for ii in range(size - 2)] + + p_i = min([min([md[h[x]] for x in deltab]) for s, d, h in TAB]) + dsgsx1 = [_af_new(_) for _ in dsgsx] + Dxtrav = _orbit_transversal(size, dsgsx1, p_i, False, af=True) \ + if dsgsx else None + if Dxtrav: + Dxtrav = [_af_invert(x) for x in Dxtrav] + # compute the orbit of p_i + for ii in range(num_types): + if p_i in dumx[ii]: + # the orbit is made by all the indices in dum[ii] + if sym[ii] is not None: + deltap = dumx[ii] + else: + # the orbit is made by all the even indices if p_i + # is even, by all the odd indices if p_i is odd + p_i_index = dumx[ii].index(p_i) % 2 + deltap = dumx[ii][p_i_index::2] + break + else: + deltap = [p_i] + TAB1 = [] + while TAB: + s, d, h = TAB.pop() + if min([md[h[x]] for x in deltab]) != p_i: + continue + deltab1 = [x for x in deltab if md[h[x]] == p_i] + # NEXT = s*deltab1 intersection (d*g)**-1*deltap + dg = _af_rmul(d, g) + dginv = _af_invert(dg) + sdeltab = [s[x] for x in deltab1] + gdeltap = [dginv[x] for x in deltap] + NEXT = [x for x in sdeltab if x in gdeltap] + # d, s satisfy + # d*g*s*base[i-1] = p_{i-1}; using the stabilizers + # d*g*s*S_{base[0],...,base[i-1]}*base[i-1] = + # D_{p_0,...,p_{i-1}}*p_{i-1} + # so that to find d1, s1 satisfying d1*g*s1*b = p_i + # one can look for dx in D_{p_0,...,p_{i-1}} and + # sx in S_{base[0],...,base[i-1]} + # d1 = dx*d; s1 = s*sx + # d1*g*s1*b = dx*d*g*s*sx*b = p_i + for j in NEXT: + if testb: + # solve s1*b = j with s1 = s*sx for some element sx + # of the stabilizer of ..., base[i-1] + # sx*b = s**-1*j; sx = _trace_S(s, j,...) + # s1 = s*trace_S(s**-1*j,...) + s1 = _trace_S(s, j, b, S_transversals) + if not s1: + continue + else: + s1 = [s[ix] for ix in s1] + else: + s1 = s + # assert s1[b] == j # invariant + # solve d1*g*j = p_i with d1 = dx*d for some element dg + # of the stabilizer of ..., p_{i-1} + # dx**-1*p_i = d*g*j; dx**-1 = trace_D(d*g*j,...) + # d1 = trace_D(d*g*j,...)**-1*d + # to save an inversion in the inner loop; notice we did + # Dxtrav = [perm_af_invert(x) for x in Dxtrav] out of the loop + if Dxtrav: + d1 = _trace_D(dg[j], p_i, Dxtrav) + if not d1: + continue + else: + if p_i != dg[j]: + continue + d1 = idn + assert d1[dg[j]] == p_i # invariant + d1 = [d1[ix] for ix in d] + h1 = [d1[g[ix]] for ix in s1] + # assert h1[b] == p_i # invariant + TAB1.append((s1, d1, h1)) + + # if TAB contains equal permutations, keep only one of them; + # if TAB contains equal permutations up to the sign, return 0 + TAB1.sort(key=lambda x: x[-1]) + prev = [0] * size + while TAB1: + s, d, h = TAB1.pop() + if h[:-2] == prev[:-2]: + if h[-1] != prev[-1]: + return 0 + else: + TAB.append((s, d, h)) + prev = h + + # stabilize the SGS + sgensx = [h for h in sgensx if h[b] == b] + if b in b_S: + b_S.remove(b) + _dumx_remove(dumx, dumx_flat, p_i) + dsgsx = [] + for i in range(num_types): + dsgsx.extend(dummy_sgs(dumx[i], sym[i], num_dummies)) + return TAB[0][-1] + + +def canonical_free(base, gens, g, num_free): + """ + Canonicalization of a tensor with respect to free indices + choosing the minimum with respect to lexicographical ordering + in the free indices. + + Explanation + =========== + + ``base``, ``gens`` BSGS for slot permutation group + ``g`` permutation representing the tensor + ``num_free`` number of free indices + The indices must be ordered with first the free indices + + See explanation in double_coset_can_rep + The algorithm is a variation of the one given in [2]. + + Examples + ======== + + >>> from sympy.combinatorics import Permutation + >>> from sympy.combinatorics.tensor_can import canonical_free + >>> gens = [[1, 0, 2, 3, 5, 4], [2, 3, 0, 1, 4, 5],[0, 1, 3, 2, 5, 4]] + >>> gens = [Permutation(h) for h in gens] + >>> base = [0, 2] + >>> g = Permutation([2, 1, 0, 3, 4, 5]) + >>> canonical_free(base, gens, g, 4) + [0, 3, 1, 2, 5, 4] + + Consider the product of Riemann tensors + ``T = R^{a}_{d0}^{d1,d2}*R_{d2,d1}^{d0,b}`` + The order of the indices is ``[a, b, d0, -d0, d1, -d1, d2, -d2]`` + The permutation corresponding to the tensor is + ``g = [0, 3, 4, 6, 7, 5, 2, 1, 8, 9]`` + + In particular ``a`` is position ``0``, ``b`` is in position ``9``. + Use the slot symmetries to get `T` is a form which is the minimal + in lexicographic order in the free indices ``a`` and ``b``, e.g. + ``-R^{a}_{d0}^{d1,d2}*R^{b,d0}_{d2,d1}`` corresponding to + ``[0, 3, 4, 6, 1, 2, 7, 5, 9, 8]`` + + >>> from sympy.combinatorics.tensor_can import riemann_bsgs, tensor_gens + >>> base, gens = riemann_bsgs + >>> size, sbase, sgens = tensor_gens(base, gens, [[], []], 0) + >>> g = Permutation([0, 3, 4, 6, 7, 5, 2, 1, 8, 9]) + >>> canonical_free(sbase, [Permutation(h) for h in sgens], g, 2) + [0, 3, 4, 6, 1, 2, 7, 5, 9, 8] + """ + g = g.array_form + size = len(g) + if not base: + return g[:] + + transversals = get_transversals(base, gens) + for x in sorted(g[:-2]): + if x not in base: + base.append(x) + h = g + for i, transv in enumerate(transversals): + h_i = [size]*num_free + # find the element s in transversals[i] such that + # _af_rmul(h, s) has its free elements with the lowest position in h + s = None + for sk in transv.values(): + h1 = _af_rmul(h, sk) + hi = [h1.index(ix) for ix in range(num_free)] + if hi < h_i: + h_i = hi + s = sk + if s: + h = _af_rmul(h, s) + return h + + +def _get_map_slots(size, fixed_slots): + res = list(range(size)) + pos = 0 + for i in range(size): + if i in fixed_slots: + continue + res[i] = pos + pos += 1 + return res + + +def _lift_sgens(size, fixed_slots, free, s): + a = [] + j = k = 0 + fd = list(zip(fixed_slots, free)) + fd = [y for x, y in sorted(fd)] + num_free = len(free) + for i in range(size): + if i in fixed_slots: + a.append(fd[k]) + k += 1 + else: + a.append(s[j] + num_free) + j += 1 + return a + + +def canonicalize(g, dummies, msym, *v): + """ + canonicalize tensor formed by tensors + + Parameters + ========== + + g : permutation representing the tensor + + dummies : list representing the dummy indices + it can be a list of dummy indices of the same type + or a list of lists of dummy indices, one list for each + type of index; + the dummy indices must come after the free indices, + and put in order contravariant, covariant + [d0, -d0, d1,-d1,...] + + msym : symmetry of the metric(s) + it can be an integer or a list; + in the first case it is the symmetry of the dummy index metric; + in the second case it is the list of the symmetries of the + index metric for each type + + v : list, (base_i, gens_i, n_i, sym_i) for tensors of type `i` + + base_i, gens_i : BSGS for tensors of this type. + The BSGS should have minimal base under lexicographic ordering; + if not, an attempt is made do get the minimal BSGS; + in case of failure, + canonicalize_naive is used, which is much slower. + + n_i : number of tensors of type `i`. + + sym_i : symmetry under exchange of component tensors of type `i`. + + Both for msym and sym_i the cases are + * None no symmetry + * 0 commuting + * 1 anticommuting + + Returns + ======= + + 0 if the tensor is zero, else return the array form of + the permutation representing the canonical form of the tensor. + + Algorithm + ========= + + First one uses canonical_free to get the minimum tensor under + lexicographic order, using only the slot symmetries. + If the component tensors have not minimal BSGS, it is attempted + to find it; if the attempt fails canonicalize_naive + is used instead. + + Compute the residual slot symmetry keeping fixed the free indices + using tensor_gens(base, gens, list_free_indices, sym). + + Reduce the problem eliminating the free indices. + + Then use double_coset_can_rep and lift back the result reintroducing + the free indices. + + Examples + ======== + + one type of index with commuting metric; + + `A_{a b}` and `B_{a b}` antisymmetric and commuting + + `T = A_{d0 d1} * B^{d0}{}_{d2} * B^{d2 d1}` + + `ord = [d0,-d0,d1,-d1,d2,-d2]` order of the indices + + g = [1, 3, 0, 5, 4, 2, 6, 7] + + `T_c = 0` + + >>> from sympy.combinatorics.tensor_can import get_symmetric_group_sgs, canonicalize, bsgs_direct_product + >>> from sympy.combinatorics import Permutation + >>> base2a, gens2a = get_symmetric_group_sgs(2, 1) + >>> t0 = (base2a, gens2a, 1, 0) + >>> t1 = (base2a, gens2a, 2, 0) + >>> g = Permutation([1, 3, 0, 5, 4, 2, 6, 7]) + >>> canonicalize(g, range(6), 0, t0, t1) + 0 + + same as above, but with `B_{a b}` anticommuting + + `T_c = -A^{d0 d1} * B_{d0}{}^{d2} * B_{d1 d2}` + + can = [0,2,1,4,3,5,7,6] + + >>> t1 = (base2a, gens2a, 2, 1) + >>> canonicalize(g, range(6), 0, t0, t1) + [0, 2, 1, 4, 3, 5, 7, 6] + + two types of indices `[a,b,c,d,e,f]` and `[m,n]`, in this order, + both with commuting metric + + `f^{a b c}` antisymmetric, commuting + + `A_{m a}` no symmetry, commuting + + `T = f^c{}_{d a} * f^f{}_{e b} * A_m{}^d * A^{m b} * A_n{}^a * A^{n e}` + + ord = [c,f,a,-a,b,-b,d,-d,e,-e,m,-m,n,-n] + + g = [0,7,3, 1,9,5, 11,6, 10,4, 13,2, 12,8, 14,15] + + The canonical tensor is + `T_c = -f^{c a b} * f^{f d e} * A^m{}_a * A_{m d} * A^n{}_b * A_{n e}` + + can = [0,2,4, 1,6,8, 10,3, 11,7, 12,5, 13,9, 15,14] + + >>> base_f, gens_f = get_symmetric_group_sgs(3, 1) + >>> base1, gens1 = get_symmetric_group_sgs(1) + >>> base_A, gens_A = bsgs_direct_product(base1, gens1, base1, gens1) + >>> t0 = (base_f, gens_f, 2, 0) + >>> t1 = (base_A, gens_A, 4, 0) + >>> dummies = [range(2, 10), range(10, 14)] + >>> g = Permutation([0, 7, 3, 1, 9, 5, 11, 6, 10, 4, 13, 2, 12, 8, 14, 15]) + >>> canonicalize(g, dummies, [0, 0], t0, t1) + [0, 2, 4, 1, 6, 8, 10, 3, 11, 7, 12, 5, 13, 9, 15, 14] + """ + from sympy.combinatorics.testutil import canonicalize_naive + if not isinstance(msym, list): + if msym not in (0, 1, None): + raise ValueError('msym must be 0, 1 or None') + num_types = 1 + else: + num_types = len(msym) + if not all(msymx in (0, 1, None) for msymx in msym): + raise ValueError('msym entries must be 0, 1 or None') + if len(dummies) != num_types: + raise ValueError( + 'dummies and msym must have the same number of elements') + size = g.size + num_tensors = 0 + v1 = [] + for base_i, gens_i, n_i, sym_i in v: + # check that the BSGS is minimal; + # this property is used in double_coset_can_rep; + # if it is not minimal use canonicalize_naive + if not _is_minimal_bsgs(base_i, gens_i): + mbsgs = get_minimal_bsgs(base_i, gens_i) + if not mbsgs: + can = canonicalize_naive(g, dummies, msym, *v) + return can + base_i, gens_i = mbsgs + v1.append((base_i, gens_i, [[]] * n_i, sym_i)) + num_tensors += n_i + + if num_types == 1 and not isinstance(msym, list): + dummies = [dummies] + msym = [msym] + flat_dummies = [] + for dumx in dummies: + flat_dummies.extend(dumx) + + if flat_dummies and flat_dummies != list(range(flat_dummies[0], flat_dummies[-1] + 1)): + raise ValueError('dummies is not valid') + + # slot symmetry of the tensor + size1, sbase, sgens = gens_products(*v1) + if size != size1: + raise ValueError( + 'g has size %d, generators have size %d' % (size, size1)) + free = [i for i in range(size - 2) if i not in flat_dummies] + num_free = len(free) + + # g1 minimal tensor under slot symmetry + g1 = canonical_free(sbase, sgens, g, num_free) + if not flat_dummies: + return g1 + # save the sign of g1 + sign = 0 if g1[-1] == size - 1 else 1 + + # the free indices are kept fixed. + # Determine free_i, the list of slots of tensors which are fixed + # since they are occupied by free indices, which are fixed. + start = 0 + for i, (base_i, gens_i, n_i, sym_i) in enumerate(v): + free_i = [] + len_tens = gens_i[0].size - 2 + # for each component tensor get a list od fixed islots + for j in range(n_i): + # get the elements corresponding to the component tensor + h = g1[start:(start + len_tens)] + fr = [] + # get the positions of the fixed elements in h + for k in free: + if k in h: + fr.append(h.index(k)) + free_i.append(fr) + start += len_tens + v1[i] = (base_i, gens_i, free_i, sym_i) + # BSGS of the tensor with fixed free indices + # if tensor_gens fails in gens_product, use canonicalize_naive + size, sbase, sgens = gens_products(*v1) + + # reduce the permutations getting rid of the free indices + pos_free = [g1.index(x) for x in range(num_free)] + size_red = size - num_free + g1_red = [x - num_free for x in g1 if x in flat_dummies] + if sign: + g1_red.extend([size_red - 1, size_red - 2]) + else: + g1_red.extend([size_red - 2, size_red - 1]) + map_slots = _get_map_slots(size, pos_free) + sbase_red = [map_slots[i] for i in sbase if i not in pos_free] + sgens_red = [_af_new([map_slots[i] for i in y._array_form if i not in pos_free]) for y in sgens] + dummies_red = [[x - num_free for x in y] for y in dummies] + transv_red = get_transversals(sbase_red, sgens_red) + g1_red = _af_new(g1_red) + g2 = double_coset_can_rep( + dummies_red, msym, sbase_red, sgens_red, transv_red, g1_red) + if g2 == 0: + return 0 + # lift to the case with the free indices + g3 = _lift_sgens(size, pos_free, free, g2) + return g3 + + +def perm_af_direct_product(gens1, gens2, signed=True): + """ + Direct products of the generators gens1 and gens2. + + Examples + ======== + + >>> from sympy.combinatorics.tensor_can import perm_af_direct_product + >>> gens1 = [[1, 0, 2, 3], [0, 1, 3, 2]] + >>> gens2 = [[1, 0]] + >>> perm_af_direct_product(gens1, gens2, False) + [[1, 0, 2, 3, 4, 5], [0, 1, 3, 2, 4, 5], [0, 1, 2, 3, 5, 4]] + >>> gens1 = [[1, 0, 2, 3, 5, 4], [0, 1, 3, 2, 4, 5]] + >>> gens2 = [[1, 0, 2, 3]] + >>> perm_af_direct_product(gens1, gens2, True) + [[1, 0, 2, 3, 4, 5, 7, 6], [0, 1, 3, 2, 4, 5, 6, 7], [0, 1, 2, 3, 5, 4, 6, 7]] + """ + gens1 = [list(x) for x in gens1] + gens2 = [list(x) for x in gens2] + s = 2 if signed else 0 + n1 = len(gens1[0]) - s + n2 = len(gens2[0]) - s + start = list(range(n1)) + end = list(range(n1, n1 + n2)) + if signed: + gens1 = [gen[:-2] + end + [gen[-2] + n2, gen[-1] + n2] + for gen in gens1] + gens2 = [start + [x + n1 for x in gen] for gen in gens2] + else: + gens1 = [gen + end for gen in gens1] + gens2 = [start + [x + n1 for x in gen] for gen in gens2] + + res = gens1 + gens2 + + return res + + +def bsgs_direct_product(base1, gens1, base2, gens2, signed=True): + """ + Direct product of two BSGS. + + Parameters + ========== + + base1 : base of the first BSGS. + + gens1 : strong generating sequence of the first BSGS. + + base2, gens2 : similarly for the second BSGS. + + signed : flag for signed permutations. + + Examples + ======== + + >>> from sympy.combinatorics.tensor_can import (get_symmetric_group_sgs, bsgs_direct_product) + >>> base1, gens1 = get_symmetric_group_sgs(1) + >>> base2, gens2 = get_symmetric_group_sgs(2) + >>> bsgs_direct_product(base1, gens1, base2, gens2) + ([1], [(4)(1 2)]) + """ + s = 2 if signed else 0 + n1 = gens1[0].size - s + base = list(base1) + base += [x + n1 for x in base2] + gens1 = [h._array_form for h in gens1] + gens2 = [h._array_form for h in gens2] + gens = perm_af_direct_product(gens1, gens2, signed) + size = len(gens[0]) + id_af = list(range(size)) + gens = [h for h in gens if h != id_af] + if not gens: + gens = [id_af] + return base, [_af_new(h) for h in gens] + + +def get_symmetric_group_sgs(n, antisym=False): + """ + Return base, gens of the minimal BSGS for (anti)symmetric tensor + + Parameters + ========== + + n : rank of the tensor + antisym : bool + ``antisym = False`` symmetric tensor + ``antisym = True`` antisymmetric tensor + + Examples + ======== + + >>> from sympy.combinatorics.tensor_can import get_symmetric_group_sgs + >>> get_symmetric_group_sgs(3) + ([0, 1], [(4)(0 1), (4)(1 2)]) + """ + if n == 1: + return [], [_af_new(list(range(3)))] + gens = [Permutation(n - 1)(i, i + 1)._array_form for i in range(n - 1)] + if antisym == 0: + gens = [x + [n, n + 1] for x in gens] + else: + gens = [x + [n + 1, n] for x in gens] + base = list(range(n - 1)) + return base, [_af_new(h) for h in gens] + +riemann_bsgs = [0, 2], [Permutation(0, 1)(4, 5), Permutation(2, 3)(4, 5), + Permutation(5)(0, 2)(1, 3)] + + +def get_transversals(base, gens): + """ + Return transversals for the group with BSGS base, gens + """ + if not base: + return [] + stabs = _distribute_gens_by_base(base, gens) + orbits, transversals = _orbits_transversals_from_bsgs(base, stabs) + transversals = [{x: h._array_form for x, h in y.items()} for y in + transversals] + return transversals + + +def _is_minimal_bsgs(base, gens): + """ + Check if the BSGS has minimal base under lexigographic order. + + base, gens BSGS + + Examples + ======== + + >>> from sympy.combinatorics import Permutation + >>> from sympy.combinatorics.tensor_can import riemann_bsgs, _is_minimal_bsgs + >>> _is_minimal_bsgs(*riemann_bsgs) + True + >>> riemann_bsgs1 = ([2, 0], ([Permutation(5)(0, 1)(4, 5), Permutation(5)(0, 2)(1, 3)])) + >>> _is_minimal_bsgs(*riemann_bsgs1) + False + """ + base1 = [] + sgs1 = gens[:] + size = gens[0].size + for i in range(size): + if not all(h._array_form[i] == i for h in sgs1): + base1.append(i) + sgs1 = [h for h in sgs1 if h._array_form[i] == i] + return base1 == base + + +def get_minimal_bsgs(base, gens): + """ + Compute a minimal GSGS + + base, gens BSGS + + If base, gens is a minimal BSGS return it; else return a minimal BSGS + if it fails in finding one, it returns None + + TODO: use baseswap in the case in which if it fails in finding a + minimal BSGS + + Examples + ======== + + >>> from sympy.combinatorics import Permutation + >>> from sympy.combinatorics.tensor_can import get_minimal_bsgs + >>> riemann_bsgs1 = ([2, 0], ([Permutation(5)(0, 1)(4, 5), Permutation(5)(0, 2)(1, 3)])) + >>> get_minimal_bsgs(*riemann_bsgs1) + ([0, 2], [(0 1)(4 5), (5)(0 2)(1 3), (2 3)(4 5)]) + """ + G = PermutationGroup(gens) + base, gens = G.schreier_sims_incremental() + if not _is_minimal_bsgs(base, gens): + return None + return base, gens + + +def tensor_gens(base, gens, list_free_indices, sym=0): + """ + Returns size, res_base, res_gens BSGS for n tensors of the + same type. + + Explanation + =========== + + base, gens BSGS for tensors of this type + list_free_indices list of the slots occupied by fixed indices + for each of the tensors + + sym symmetry under commutation of two tensors + sym None no symmetry + sym 0 commuting + sym 1 anticommuting + + Examples + ======== + + >>> from sympy.combinatorics.tensor_can import tensor_gens, get_symmetric_group_sgs + + two symmetric tensors with 3 indices without free indices + + >>> base, gens = get_symmetric_group_sgs(3) + >>> tensor_gens(base, gens, [[], []]) + (8, [0, 1, 3, 4], [(7)(0 1), (7)(1 2), (7)(3 4), (7)(4 5), (7)(0 3)(1 4)(2 5)]) + + two symmetric tensors with 3 indices with free indices in slot 1 and 0 + + >>> tensor_gens(base, gens, [[1], [0]]) + (8, [0, 4], [(7)(0 2), (7)(4 5)]) + + four symmetric tensors with 3 indices, two of which with free indices + + """ + def _get_bsgs(G, base, gens, free_indices): + """ + return the BSGS for G.pointwise_stabilizer(free_indices) + """ + if not free_indices: + return base[:], gens[:] + else: + H = G.pointwise_stabilizer(free_indices) + base, sgs = H.schreier_sims_incremental() + return base, sgs + + # if not base there is no slot symmetry for the component tensors + # if list_free_indices.count([]) < 2 there is no commutation symmetry + # so there is no resulting slot symmetry + if not base and list_free_indices.count([]) < 2: + n = len(list_free_indices) + size = gens[0].size + size = n * (size - 2) + 2 + return size, [], [_af_new(list(range(size)))] + + # if any(list_free_indices) one needs to compute the pointwise + # stabilizer, so G is needed + if any(list_free_indices): + G = PermutationGroup(gens) + else: + G = None + + # no_free list of lists of indices for component tensors without fixed + # indices + no_free = [] + size = gens[0].size + id_af = list(range(size)) + num_indices = size - 2 + if not list_free_indices[0]: + no_free.append(list(range(num_indices))) + res_base, res_gens = _get_bsgs(G, base, gens, list_free_indices[0]) + for i in range(1, len(list_free_indices)): + base1, gens1 = _get_bsgs(G, base, gens, list_free_indices[i]) + res_base, res_gens = bsgs_direct_product(res_base, res_gens, + base1, gens1, 1) + if not list_free_indices[i]: + no_free.append(list(range(size - 2, size - 2 + num_indices))) + size += num_indices + nr = size - 2 + res_gens = [h for h in res_gens if h._array_form != id_af] + # if sym there are no commuting tensors stop here + if sym is None or not no_free: + if not res_gens: + res_gens = [_af_new(id_af)] + return size, res_base, res_gens + + # if the component tensors have moinimal BSGS, so is their direct + # product P; the slot symmetry group is S = P*C, where C is the group + # to (anti)commute the component tensors with no free indices + # a stabilizer has the property S_i = P_i*C_i; + # the BSGS of P*C has SGS_P + SGS_C and the base is + # the ordered union of the bases of P and C. + # If P has minimal BSGS, so has S with this base. + base_comm = [] + for i in range(len(no_free) - 1): + ind1 = no_free[i] + ind2 = no_free[i + 1] + a = list(range(ind1[0])) + a.extend(ind2) + a.extend(ind1) + base_comm.append(ind1[0]) + a.extend(list(range(ind2[-1] + 1, nr))) + if sym == 0: + a.extend([nr, nr + 1]) + else: + a.extend([nr + 1, nr]) + res_gens.append(_af_new(a)) + res_base = list(res_base) + # each base is ordered; order the union of the two bases + for i in base_comm: + if i not in res_base: + res_base.append(i) + res_base.sort() + if not res_gens: + res_gens = [_af_new(id_af)] + + return size, res_base, res_gens + + +def gens_products(*v): + """ + Returns size, res_base, res_gens BSGS for n tensors of different types. + + Explanation + =========== + + v is a sequence of (base_i, gens_i, free_i, sym_i) + where + base_i, gens_i BSGS of tensor of type `i` + free_i list of the fixed slots for each of the tensors + of type `i`; if there are `n_i` tensors of type `i` + and none of them have fixed slots, `free = [[]]*n_i` + sym 0 (1) if the tensors of type `i` (anti)commute among themselves + + Examples + ======== + + >>> from sympy.combinatorics.tensor_can import get_symmetric_group_sgs, gens_products + >>> base, gens = get_symmetric_group_sgs(2) + >>> gens_products((base, gens, [[], []], 0)) + (6, [0, 2], [(5)(0 1), (5)(2 3), (5)(0 2)(1 3)]) + >>> gens_products((base, gens, [[1], []], 0)) + (6, [2], [(5)(2 3)]) + """ + res_size, res_base, res_gens = tensor_gens(*v[0]) + for i in range(1, len(v)): + size, base, gens = tensor_gens(*v[i]) + res_base, res_gens = bsgs_direct_product(res_base, res_gens, base, + gens, 1) + res_size = res_gens[0].size + id_af = list(range(res_size)) + res_gens = [h for h in res_gens if h != id_af] + if not res_gens: + res_gens = [id_af] + return res_size, res_base, res_gens diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/__init__.py b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_coset_table.py b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_coset_table.py new file mode 100644 index 0000000000000000000000000000000000000000..ab3f62880445c5deb526797ee0623fe3510bcbc3 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_coset_table.py @@ -0,0 +1,825 @@ +from sympy.combinatorics.fp_groups import FpGroup +from sympy.combinatorics.coset_table import (CosetTable, + coset_enumeration_r, coset_enumeration_c) +from sympy.combinatorics.coset_table import modified_coset_enumeration_r +from sympy.combinatorics.free_groups import free_group + +from sympy.testing.pytest import slow + +""" +References +========== + +[1] Holt, D., Eick, B., O'Brien, E. +"Handbook of Computational Group Theory" + +[2] John J. Cannon; Lucien A. Dimino; George Havas; Jane M. Watson +Mathematics of Computation, Vol. 27, No. 123. (Jul., 1973), pp. 463-490. +"Implementation and Analysis of the Todd-Coxeter Algorithm" + +""" + +def test_scan_1(): + # Example 5.1 from [1] + F, x, y = free_group("x, y") + f = FpGroup(F, [x**3, y**3, x**-1*y**-1*x*y]) + c = CosetTable(f, [x]) + + c.scan_and_fill(0, x) + assert c.table == [[0, 0, None, None]] + assert c.p == [0] + assert c.n == 1 + assert c.omega == [0] + + c.scan_and_fill(0, x**3) + assert c.table == [[0, 0, None, None]] + assert c.p == [0] + assert c.n == 1 + assert c.omega == [0] + + c.scan_and_fill(0, y**3) + assert c.table == [[0, 0, 1, 2], [None, None, 2, 0], [None, None, 0, 1]] + assert c.p == [0, 1, 2] + assert c.n == 3 + assert c.omega == [0, 1, 2] + + c.scan_and_fill(0, x**-1*y**-1*x*y) + assert c.table == [[0, 0, 1, 2], [None, None, 2, 0], [2, 2, 0, 1]] + assert c.p == [0, 1, 2] + assert c.n == 3 + assert c.omega == [0, 1, 2] + + c.scan_and_fill(1, x**3) + assert c.table == [[0, 0, 1, 2], [3, 4, 2, 0], [2, 2, 0, 1], \ + [4, 1, None, None], [1, 3, None, None]] + assert c.p == [0, 1, 2, 3, 4] + assert c.n == 5 + assert c.omega == [0, 1, 2, 3, 4] + + c.scan_and_fill(1, y**3) + assert c.table == [[0, 0, 1, 2], [3, 4, 2, 0], [2, 2, 0, 1], \ + [4, 1, None, None], [1, 3, None, None]] + assert c.p == [0, 1, 2, 3, 4] + assert c.n == 5 + assert c.omega == [0, 1, 2, 3, 4] + + c.scan_and_fill(1, x**-1*y**-1*x*y) + assert c.table == [[0, 0, 1, 2], [1, 1, 2, 0], [2, 2, 0, 1], \ + [None, 1, None, None], [1, 3, None, None]] + assert c.p == [0, 1, 2, 1, 1] + assert c.n == 3 + assert c.omega == [0, 1, 2] + + # Example 5.2 from [1] + f = FpGroup(F, [x**2, y**3, (x*y)**3]) + c = CosetTable(f, [x*y]) + + c.scan_and_fill(0, x*y) + assert c.table == [[1, None, None, 1], [None, 0, 0, None]] + assert c.p == [0, 1] + assert c.n == 2 + assert c.omega == [0, 1] + + c.scan_and_fill(0, x**2) + assert c.table == [[1, 1, None, 1], [0, 0, 0, None]] + assert c.p == [0, 1] + assert c.n == 2 + assert c.omega == [0, 1] + + c.scan_and_fill(0, y**3) + assert c.table == [[1, 1, 2, 1], [0, 0, 0, 2], [None, None, 1, 0]] + assert c.p == [0, 1, 2] + assert c.n == 3 + assert c.omega == [0, 1, 2] + + c.scan_and_fill(0, (x*y)**3) + assert c.table == [[1, 1, 2, 1], [0, 0, 0, 2], [None, None, 1, 0]] + assert c.p == [0, 1, 2] + assert c.n == 3 + assert c.omega == [0, 1, 2] + + c.scan_and_fill(1, x**2) + assert c.table == [[1, 1, 2, 1], [0, 0, 0, 2], [None, None, 1, 0]] + assert c.p == [0, 1, 2] + assert c.n == 3 + assert c.omega == [0, 1, 2] + + c.scan_and_fill(1, y**3) + assert c.table == [[1, 1, 2, 1], [0, 0, 0, 2], [None, None, 1, 0]] + assert c.p == [0, 1, 2] + assert c.n == 3 + assert c.omega == [0, 1, 2] + + c.scan_and_fill(1, (x*y)**3) + assert c.table == [[1, 1, 2, 1], [0, 0, 0, 2], [3, 4, 1, 0], [None, 2, 4, None], [2, None, None, 3]] + assert c.p == [0, 1, 2, 3, 4] + assert c.n == 5 + assert c.omega == [0, 1, 2, 3, 4] + + c.scan_and_fill(2, x**2) + assert c.table == [[1, 1, 2, 1], [0, 0, 0, 2], [3, 3, 1, 0], [2, 2, 3, 3], [2, None, None, 3]] + assert c.p == [0, 1, 2, 3, 3] + assert c.n == 4 + assert c.omega == [0, 1, 2, 3] + + +@slow +def test_coset_enumeration(): + # this test function contains the combined tests for the two strategies + # i.e. HLT and Felsch strategies. + + # Example 5.1 from [1] + F, x, y = free_group("x, y") + f = FpGroup(F, [x**3, y**3, x**-1*y**-1*x*y]) + C_r = coset_enumeration_r(f, [x]) + C_r.compress(); C_r.standardize() + C_c = coset_enumeration_c(f, [x]) + C_c.compress(); C_c.standardize() + table1 = [[0, 0, 1, 2], [1, 1, 2, 0], [2, 2, 0, 1]] + assert C_r.table == table1 + assert C_c.table == table1 + + # E1 from [2] Pg. 474 + F, r, s, t = free_group("r, s, t") + E1 = FpGroup(F, [t**-1*r*t*r**-2, r**-1*s*r*s**-2, s**-1*t*s*t**-2]) + C_r = coset_enumeration_r(E1, []) + C_r.compress() + C_c = coset_enumeration_c(E1, []) + C_c.compress() + table2 = [[0, 0, 0, 0, 0, 0]] + assert C_r.table == table2 + # test for issue #11449 + assert C_c.table == table2 + + # Cox group from [2] Pg. 474 + F, a, b = free_group("a, b") + Cox = FpGroup(F, [a**6, b**6, (a*b)**2, (a**2*b**2)**2, (a**3*b**3)**5]) + C_r = coset_enumeration_r(Cox, [a]) + C_r.compress(); C_r.standardize() + C_c = coset_enumeration_c(Cox, [a]) + C_c.compress(); C_c.standardize() + table3 = [[0, 0, 1, 2], + [2, 3, 4, 0], + [5, 1, 0, 6], + [1, 7, 8, 9], + [9, 10, 11, 1], + [12, 2, 9, 13], + [14, 9, 2, 11], + [3, 12, 15, 16], + [16, 17, 18, 3], + [6, 4, 3, 5], + [4, 19, 20, 21], + [21, 22, 6, 4], + [7, 5, 23, 24], + [25, 23, 5, 18], + [19, 6, 22, 26], + [24, 27, 28, 7], + [29, 8, 7, 30], + [8, 31, 32, 33], + [33, 34, 13, 8], + [10, 14, 35, 35], + [35, 36, 37, 10], + [30, 11, 10, 29], + [11, 38, 39, 14], + [13, 39, 38, 12], + [40, 15, 12, 41], + [42, 13, 34, 43], + [44, 35, 14, 45], + [15, 46, 47, 34], + [34, 48, 49, 15], + [50, 16, 21, 51], + [52, 21, 16, 49], + [17, 50, 53, 54], + [54, 55, 56, 17], + [41, 18, 17, 40], + [18, 28, 27, 25], + [26, 20, 19, 19], + [20, 57, 58, 59], + [59, 60, 51, 20], + [22, 52, 61, 23], + [23, 62, 63, 22], + [64, 24, 33, 65], + [48, 33, 24, 61], + [62, 25, 54, 66], + [67, 54, 25, 68], + [57, 26, 59, 69], + [70, 59, 26, 63], + [27, 64, 71, 72], + [72, 73, 68, 27], + [28, 41, 74, 75], + [75, 76, 30, 28], + [31, 29, 77, 78], + [79, 77, 29, 37], + [38, 30, 76, 80], + [78, 81, 82, 31], + [43, 32, 31, 42], + [32, 83, 84, 85], + [85, 86, 65, 32], + [36, 44, 87, 88], + [88, 89, 90, 36], + [45, 37, 36, 44], + [37, 82, 81, 79], + [80, 74, 41, 38], + [39, 42, 91, 92], + [92, 93, 45, 39], + [46, 40, 94, 95], + [96, 94, 40, 56], + [97, 91, 42, 82], + [83, 43, 98, 99], + [100, 98, 43, 47], + [101, 87, 44, 90], + [82, 45, 93, 97], + [95, 102, 103, 46], + [104, 47, 46, 105], + [47, 106, 107, 100], + [61, 108, 109, 48], + [105, 49, 48, 104], + [49, 110, 111, 52], + [51, 111, 110, 50], + [112, 53, 50, 113], + [114, 51, 60, 115], + [116, 61, 52, 117], + [53, 118, 119, 60], + [60, 70, 66, 53], + [55, 67, 120, 121], + [121, 122, 123, 55], + [113, 56, 55, 112], + [56, 103, 102, 96], + [69, 124, 125, 57], + [115, 58, 57, 114], + [58, 126, 127, 128], + [128, 128, 69, 58], + [66, 129, 130, 62], + [117, 63, 62, 116], + [63, 125, 124, 70], + [65, 109, 108, 64], + [131, 71, 64, 132], + [133, 65, 86, 134], + [135, 66, 70, 136], + [68, 130, 129, 67], + [137, 120, 67, 138], + [132, 68, 73, 131], + [139, 69, 128, 140], + [71, 141, 142, 86], + [86, 143, 144, 71], + [145, 72, 75, 146], + [147, 75, 72, 144], + [73, 145, 148, 120], + [120, 149, 150, 73], + [74, 151, 152, 94], + [94, 153, 146, 74], + [76, 147, 154, 77], + [77, 155, 156, 76], + [157, 78, 85, 158], + [143, 85, 78, 154], + [155, 79, 88, 159], + [160, 88, 79, 161], + [151, 80, 92, 162], + [163, 92, 80, 156], + [81, 157, 164, 165], + [165, 166, 161, 81], + [99, 107, 106, 83], + [134, 84, 83, 133], + [84, 167, 168, 169], + [169, 170, 158, 84], + [87, 171, 172, 93], + [93, 163, 159, 87], + [89, 160, 173, 174], + [174, 175, 176, 89], + [90, 90, 89, 101], + [91, 177, 178, 98], + [98, 179, 162, 91], + [180, 95, 100, 181], + [179, 100, 95, 152], + [153, 96, 121, 148], + [182, 121, 96, 183], + [177, 97, 165, 184], + [185, 165, 97, 172], + [186, 99, 169, 187], + [188, 169, 99, 178], + [171, 101, 174, 189], + [190, 174, 101, 176], + [102, 180, 191, 192], + [192, 193, 183, 102], + [103, 113, 194, 195], + [195, 196, 105, 103], + [106, 104, 197, 198], + [199, 197, 104, 109], + [110, 105, 196, 200], + [198, 201, 133, 106], + [107, 186, 202, 203], + [203, 204, 181, 107], + [108, 116, 205, 206], + [206, 207, 132, 108], + [109, 133, 201, 199], + [200, 194, 113, 110], + [111, 114, 208, 209], + [209, 210, 117, 111], + [118, 112, 211, 212], + [213, 211, 112, 123], + [214, 208, 114, 125], + [126, 115, 215, 216], + [217, 215, 115, 119], + [218, 205, 116, 130], + [125, 117, 210, 214], + [212, 219, 220, 118], + [136, 119, 118, 135], + [119, 221, 222, 217], + [122, 182, 223, 224], + [224, 225, 226, 122], + [138, 123, 122, 137], + [123, 220, 219, 213], + [124, 139, 227, 228], + [228, 229, 136, 124], + [216, 222, 221, 126], + [140, 127, 126, 139], + [127, 230, 231, 232], + [232, 233, 140, 127], + [129, 135, 234, 235], + [235, 236, 138, 129], + [130, 132, 207, 218], + [141, 131, 237, 238], + [239, 237, 131, 150], + [167, 134, 240, 241], + [242, 240, 134, 142], + [243, 234, 135, 220], + [221, 136, 229, 244], + [149, 137, 245, 246], + [247, 245, 137, 226], + [220, 138, 236, 243], + [244, 227, 139, 221], + [230, 140, 233, 248], + [238, 249, 250, 141], + [251, 142, 141, 252], + [142, 253, 254, 242], + [154, 255, 256, 143], + [252, 144, 143, 251], + [144, 257, 258, 147], + [146, 258, 257, 145], + [259, 148, 145, 260], + [261, 146, 153, 262], + [263, 154, 147, 264], + [148, 265, 266, 153], + [246, 267, 268, 149], + [260, 150, 149, 259], + [150, 250, 249, 239], + [162, 269, 270, 151], + [262, 152, 151, 261], + [152, 271, 272, 179], + [159, 273, 274, 155], + [264, 156, 155, 263], + [156, 270, 269, 163], + [158, 256, 255, 157], + [275, 164, 157, 276], + [277, 158, 170, 278], + [279, 159, 163, 280], + [161, 274, 273, 160], + [281, 173, 160, 282], + [276, 161, 166, 275], + [283, 162, 179, 284], + [164, 285, 286, 170], + [170, 188, 184, 164], + [166, 185, 189, 173], + [173, 287, 288, 166], + [241, 254, 253, 167], + [278, 168, 167, 277], + [168, 289, 290, 291], + [291, 292, 187, 168], + [189, 293, 294, 171], + [280, 172, 171, 279], + [172, 295, 296, 185], + [175, 190, 297, 297], + [297, 298, 299, 175], + [282, 176, 175, 281], + [176, 294, 293, 190], + [184, 296, 295, 177], + [284, 178, 177, 283], + [178, 300, 301, 188], + [181, 272, 271, 180], + [302, 191, 180, 303], + [304, 181, 204, 305], + [183, 266, 265, 182], + [306, 223, 182, 307], + [303, 183, 193, 302], + [308, 184, 188, 309], + [310, 189, 185, 311], + [187, 301, 300, 186], + [305, 202, 186, 304], + [312, 187, 292, 313], + [314, 297, 190, 315], + [191, 316, 317, 204], + [204, 318, 319, 191], + [320, 192, 195, 321], + [322, 195, 192, 319], + [193, 320, 323, 223], + [223, 324, 325, 193], + [194, 326, 327, 211], + [211, 328, 321, 194], + [196, 322, 329, 197], + [197, 330, 331, 196], + [332, 198, 203, 333], + [318, 203, 198, 329], + [330, 199, 206, 334], + [335, 206, 199, 336], + [326, 200, 209, 337], + [338, 209, 200, 331], + [201, 332, 339, 240], + [240, 340, 336, 201], + [202, 341, 342, 292], + [292, 343, 333, 202], + [205, 344, 345, 210], + [210, 338, 334, 205], + [207, 335, 346, 237], + [237, 347, 348, 207], + [208, 349, 350, 215], + [215, 351, 337, 208], + [352, 212, 217, 353], + [351, 217, 212, 327], + [328, 213, 224, 323], + [354, 224, 213, 355], + [349, 214, 228, 356], + [357, 228, 214, 345], + [358, 216, 232, 359], + [360, 232, 216, 350], + [344, 218, 235, 361], + [362, 235, 218, 348], + [219, 352, 363, 364], + [364, 365, 355, 219], + [222, 358, 366, 367], + [367, 368, 353, 222], + [225, 354, 369, 370], + [370, 371, 372, 225], + [307, 226, 225, 306], + [226, 268, 267, 247], + [227, 373, 374, 233], + [233, 360, 356, 227], + [229, 357, 361, 234], + [234, 375, 376, 229], + [248, 231, 230, 230], + [231, 377, 378, 379], + [379, 380, 359, 231], + [236, 362, 381, 245], + [245, 382, 383, 236], + [384, 238, 242, 385], + [340, 242, 238, 346], + [347, 239, 246, 381], + [386, 246, 239, 387], + [388, 241, 291, 389], + [343, 291, 241, 339], + [375, 243, 364, 390], + [391, 364, 243, 383], + [373, 244, 367, 392], + [393, 367, 244, 376], + [382, 247, 370, 394], + [395, 370, 247, 396], + [377, 248, 379, 397], + [398, 379, 248, 374], + [249, 384, 399, 400], + [400, 401, 387, 249], + [250, 260, 402, 403], + [403, 404, 252, 250], + [253, 251, 405, 406], + [407, 405, 251, 256], + [257, 252, 404, 408], + [406, 409, 277, 253], + [254, 388, 410, 411], + [411, 412, 385, 254], + [255, 263, 413, 414], + [414, 415, 276, 255], + [256, 277, 409, 407], + [408, 402, 260, 257], + [258, 261, 416, 417], + [417, 418, 264, 258], + [265, 259, 419, 420], + [421, 419, 259, 268], + [422, 416, 261, 270], + [271, 262, 423, 424], + [425, 423, 262, 266], + [426, 413, 263, 274], + [270, 264, 418, 422], + [420, 427, 307, 265], + [266, 303, 428, 425], + [267, 386, 429, 430], + [430, 431, 396, 267], + [268, 307, 427, 421], + [269, 283, 432, 433], + [433, 434, 280, 269], + [424, 428, 303, 271], + [272, 304, 435, 436], + [436, 437, 284, 272], + [273, 279, 438, 439], + [439, 440, 282, 273], + [274, 276, 415, 426], + [285, 275, 441, 442], + [443, 441, 275, 288], + [289, 278, 444, 445], + [446, 444, 278, 286], + [447, 438, 279, 294], + [295, 280, 434, 448], + [287, 281, 449, 450], + [451, 449, 281, 299], + [294, 282, 440, 447], + [448, 432, 283, 295], + [300, 284, 437, 452], + [442, 453, 454, 285], + [309, 286, 285, 308], + [286, 455, 456, 446], + [450, 457, 458, 287], + [311, 288, 287, 310], + [288, 454, 453, 443], + [445, 456, 455, 289], + [313, 290, 289, 312], + [290, 459, 460, 461], + [461, 462, 389, 290], + [293, 310, 463, 464], + [464, 465, 315, 293], + [296, 308, 466, 467], + [467, 468, 311, 296], + [298, 314, 469, 470], + [470, 471, 472, 298], + [315, 299, 298, 314], + [299, 458, 457, 451], + [452, 435, 304, 300], + [301, 312, 473, 474], + [474, 475, 309, 301], + [316, 302, 476, 477], + [478, 476, 302, 325], + [341, 305, 479, 480], + [481, 479, 305, 317], + [324, 306, 482, 483], + [484, 482, 306, 372], + [485, 466, 308, 454], + [455, 309, 475, 486], + [487, 463, 310, 458], + [454, 311, 468, 485], + [486, 473, 312, 455], + [459, 313, 488, 489], + [490, 488, 313, 342], + [491, 469, 314, 472], + [458, 315, 465, 487], + [477, 492, 485, 316], + [463, 317, 316, 468], + [317, 487, 493, 481], + [329, 447, 464, 318], + [468, 319, 318, 463], + [319, 467, 448, 322], + [321, 448, 467, 320], + [475, 323, 320, 466], + [432, 321, 328, 437], + [438, 329, 322, 434], + [323, 474, 452, 328], + [483, 494, 486, 324], + [466, 325, 324, 475], + [325, 485, 492, 478], + [337, 422, 433, 326], + [437, 327, 326, 432], + [327, 436, 424, 351], + [334, 426, 439, 330], + [434, 331, 330, 438], + [331, 433, 422, 338], + [333, 464, 447, 332], + [449, 339, 332, 440], + [465, 333, 343, 469], + [413, 334, 338, 418], + [336, 439, 426, 335], + [441, 346, 335, 415], + [440, 336, 340, 449], + [416, 337, 351, 423], + [339, 451, 470, 343], + [346, 443, 450, 340], + [480, 493, 487, 341], + [469, 342, 341, 465], + [342, 491, 495, 490], + [361, 407, 414, 344], + [418, 345, 344, 413], + [345, 417, 408, 357], + [381, 446, 442, 347], + [415, 348, 347, 441], + [348, 414, 407, 362], + [356, 408, 417, 349], + [423, 350, 349, 416], + [350, 425, 420, 360], + [353, 424, 436, 352], + [479, 363, 352, 435], + [428, 353, 368, 476], + [355, 452, 474, 354], + [488, 369, 354, 473], + [435, 355, 365, 479], + [402, 356, 360, 419], + [405, 361, 357, 404], + [359, 420, 425, 358], + [476, 366, 358, 428], + [427, 359, 380, 482], + [444, 381, 362, 409], + [363, 481, 477, 368], + [368, 393, 390, 363], + [365, 391, 394, 369], + [369, 490, 480, 365], + [366, 478, 483, 380], + [380, 398, 392, 366], + [371, 395, 496, 497], + [497, 498, 489, 371], + [473, 372, 371, 488], + [372, 486, 494, 484], + [392, 400, 403, 373], + [419, 374, 373, 402], + [374, 421, 430, 398], + [390, 411, 406, 375], + [404, 376, 375, 405], + [376, 403, 400, 393], + [397, 430, 421, 377], + [482, 378, 377, 427], + [378, 484, 497, 499], + [499, 499, 397, 378], + [394, 461, 445, 382], + [409, 383, 382, 444], + [383, 406, 411, 391], + [385, 450, 443, 384], + [492, 399, 384, 453], + [457, 385, 412, 493], + [387, 442, 446, 386], + [494, 429, 386, 456], + [453, 387, 401, 492], + [389, 470, 451, 388], + [493, 410, 388, 457], + [471, 389, 462, 495], + [412, 390, 393, 399], + [462, 394, 391, 410], + [401, 392, 398, 429], + [396, 445, 461, 395], + [498, 496, 395, 460], + [456, 396, 431, 494], + [431, 397, 499, 496], + [399, 477, 481, 412], + [429, 483, 478, 401], + [410, 480, 490, 462], + [496, 497, 484, 431], + [489, 495, 491, 459], + [495, 460, 459, 471], + [460, 489, 498, 498], + [472, 472, 471, 491]] + + assert C_r.table == table3 + assert C_c.table == table3 + + # Group denoted by B2,4 from [2] Pg. 474 + F, a, b = free_group("a, b") + B_2_4 = FpGroup(F, [a**4, b**4, (a*b)**4, (a**-1*b)**4, (a**2*b)**4, \ + (a*b**2)**4, (a**2*b**2)**4, (a**-1*b*a*b)**4, (a*b**-1*a*b)**4]) + C_r = coset_enumeration_r(B_2_4, [a]) + C_c = coset_enumeration_c(B_2_4, [a]) + index_r = 0 + for i in range(len(C_r.p)): + if C_r.p[i] == i: + index_r += 1 + assert index_r == 1024 + + index_c = 0 + for i in range(len(C_c.p)): + if C_c.p[i] == i: + index_c += 1 + assert index_c == 1024 + + # trivial Macdonald group G(2,2) from [2] Pg. 480 + M = FpGroup(F, [b**-1*a**-1*b*a*b**-1*a*b*a**-2, a**-1*b**-1*a*b*a**-1*b*a*b**-2]) + C_r = coset_enumeration_r(M, [a]) + C_r.compress(); C_r.standardize() + C_c = coset_enumeration_c(M, [a]) + C_c.compress(); C_c.standardize() + table4 = [[0, 0, 0, 0]] + assert C_r.table == table4 + assert C_c.table == table4 + + +def test_look_ahead(): + # Section 3.2 [Test Example] Example (d) from [2] + F, a, b, c = free_group("a, b, c") + f = FpGroup(F, [a**11, b**5, c**4, (a*c)**3, b**2*c**-1*b**-1*c, a**4*b**-1*a**-1*b]) + H = [c, b, c**2] + table0 = [[1, 2, 0, 0, 0, 0], + [3, 0, 4, 5, 6, 7], + [0, 8, 9, 10, 11, 12], + [5, 1, 10, 13, 14, 15], + [16, 5, 16, 1, 17, 18], + [4, 3, 1, 8, 19, 20], + [12, 21, 22, 23, 24, 1], + [25, 26, 27, 28, 1, 24], + [2, 10, 5, 16, 22, 28], + [10, 13, 13, 2, 29, 30]] + CosetTable.max_stack_size = 10 + C_c = coset_enumeration_c(f, H) + C_c.compress(); C_c.standardize() + assert C_c.table[: 10] == table0 + +def test_modified_methods(): + ''' + Tests for modified coset table methods. + Example 5.7 from [1] Holt, D., Eick, B., O'Brien + "Handbook of Computational Group Theory". + + ''' + F, x, y = free_group("x, y") + f = FpGroup(F, [x**3, y**5, (x*y)**2]) + H = [x*y, x**-1*y**-1*x*y*x] + C = CosetTable(f, H) + C.modified_define(0, x) + identity = C._grp.identity + a_0 = C._grp.generators[0] + a_1 = C._grp.generators[1] + + assert C.P == [[identity, None, None, None], + [None, identity, None, None]] + assert C.table == [[1, None, None, None], + [None, 0, None, None]] + + C.modified_define(1, x) + assert C.table == [[1, None, None, None], + [2, 0, None, None], + [None, 1, None, None]] + assert C.P == [[identity, None, None, None], + [identity, identity, None, None], + [None, identity, None, None]] + + C.modified_scan(0, x**3, C._grp.identity, fill=False) + assert C.P == [[identity, identity, None, None], + [identity, identity, None, None], + [identity, identity, None, None]] + assert C.table == [[1, 2, None, None], + [2, 0, None, None], + [0, 1, None, None]] + + C.modified_scan(0, x*y, C._grp.generators[0], fill=False) + assert C.P == [[identity, identity, None, a_0**-1], + [identity, identity, a_0, None], + [identity, identity, None, None]] + assert C.table == [[1, 2, None, 1], + [2, 0, 0, None], + [0, 1, None, None]] + + C.modified_define(2, y**-1) + assert C.table == [[1, 2, None, 1], + [2, 0, 0, None], + [0, 1, None, 3], + [None, None, 2, None]] + assert C.P == [[identity, identity, None, a_0**-1], + [identity, identity, a_0, None], + [identity, identity, None, identity], + [None, None, identity, None]] + + C.modified_scan(0, x**-1*y**-1*x*y*x, C._grp.generators[1]) + assert C.table == [[1, 2, None, 1], + [2, 0, 0, None], + [0, 1, None, 3], + [3, 3, 2, None]] + assert C.P == [[identity, identity, None, a_0**-1], + [identity, identity, a_0, None], + [identity, identity, None, identity], + [a_1, a_1**-1, identity, None]] + + C.modified_scan(2, (x*y)**2, C._grp.identity) + assert C.table == [[1, 2, 3, 1], + [2, 0, 0, None], + [0, 1, None, 3], + [3, 3, 2, 0]] + assert C.P == [[identity, identity, a_1**-1, a_0**-1], + [identity, identity, a_0, None], + [identity, identity, None, identity], + [a_1, a_1**-1, identity, a_1]] + + C.modified_define(2, y) + assert C.table == [[1, 2, 3, 1], + [2, 0, 0, None], + [0, 1, 4, 3], + [3, 3, 2, 0], + [None, None, None, 2]] + assert C.P == [[identity, identity, a_1**-1, a_0**-1], + [identity, identity, a_0, None], + [identity, identity, identity, identity], + [a_1, a_1**-1, identity, a_1], + [None, None, None, identity]] + + C.modified_scan(0, y**5, C._grp.identity) + assert C.table == [[1, 2, 3, 1], [2, 0, 0, 4], [0, 1, 4, 3], [3, 3, 2, 0], [None, None, 1, 2]] + assert C.P == [[identity, identity, a_1**-1, a_0**-1], + [identity, identity, a_0, a_0*a_1**-1], + [identity, identity, identity, identity], + [a_1, a_1**-1, identity, a_1], + [None, None, a_1*a_0**-1, identity]] + + C.modified_scan(1, (x*y)**2, C._grp.identity) + assert C.table == [[1, 2, 3, 1], + [2, 0, 0, 4], + [0, 1, 4, 3], + [3, 3, 2, 0], + [4, 4, 1, 2]] + assert C.P == [[identity, identity, a_1**-1, a_0**-1], + [identity, identity, a_0, a_0*a_1**-1], + [identity, identity, identity, identity], + [a_1, a_1**-1, identity, a_1], + [a_0*a_1**-1, a_1*a_0**-1, a_1*a_0**-1, identity]] + + # Modified coset enumeration test + f = FpGroup(F, [x**3, y**3, x**-1*y**-1*x*y]) + C = coset_enumeration_r(f, [x]) + C_m = modified_coset_enumeration_r(f, [x]) + assert C_m.table == C.table diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_fp_groups.py b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_fp_groups.py new file mode 100644 index 0000000000000000000000000000000000000000..2f042fc937ac747108e10bcbaba375a8b443e3a5 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_fp_groups.py @@ -0,0 +1,252 @@ +from sympy.core.singleton import S +from sympy.combinatorics.fp_groups import (FpGroup, low_index_subgroups, + reidemeister_presentation, FpSubgroup, + simplify_presentation) +from sympy.combinatorics.free_groups import (free_group, FreeGroup) + +from sympy.testing.pytest import slow + +""" +References +========== + +[1] Holt, D., Eick, B., O'Brien, E. +"Handbook of Computational Group Theory" + +[2] John J. Cannon; Lucien A. Dimino; George Havas; Jane M. Watson +Mathematics of Computation, Vol. 27, No. 123. (Jul., 1973), pp. 463-490. +"Implementation and Analysis of the Todd-Coxeter Algorithm" + +[3] PROC. SECOND INTERNAT. CONF. THEORY OF GROUPS, CANBERRA 1973, +pp. 347-356. "A Reidemeister-Schreier program" by George Havas. +http://staff.itee.uq.edu.au/havas/1973cdhw.pdf + +""" + +def test_low_index_subgroups(): + F, x, y = free_group("x, y") + + # Example 5.10 from [1] Pg. 194 + f = FpGroup(F, [x**2, y**3, (x*y)**4]) + L = low_index_subgroups(f, 4) + t1 = [[[0, 0, 0, 0]], + [[0, 0, 1, 2], [1, 1, 2, 0], [3, 3, 0, 1], [2, 2, 3, 3]], + [[0, 0, 1, 2], [2, 2, 2, 0], [1, 1, 0, 1]], + [[1, 1, 0, 0], [0, 0, 1, 1]]] + for i in range(len(t1)): + assert L[i].table == t1[i] + + f = FpGroup(F, [x**2, y**3, (x*y)**7]) + L = low_index_subgroups(f, 15) + t2 = [[[0, 0, 0, 0]], + [[0, 0, 1, 2], [1, 1, 2, 0], [3, 3, 0, 1], [2, 2, 4, 5], + [4, 4, 5, 3], [6, 6, 3, 4], [5, 5, 6, 6]], + [[0, 0, 1, 2], [1, 1, 2, 0], [3, 3, 0, 1], [2, 2, 4, 5], + [6, 6, 5, 3], [5, 5, 3, 4], [4, 4, 6, 6]], + [[0, 0, 1, 2], [1, 1, 2, 0], [3, 3, 0, 1], [2, 2, 4, 5], + [6, 6, 5, 3], [7, 7, 3, 4], [4, 4, 8, 9], [5, 5, 10, 11], + [11, 11, 9, 6], [9, 9, 6, 8], [12, 12, 11, 7], [8, 8, 7, 10], + [10, 10, 13, 14], [14, 14, 14, 12], [13, 13, 12, 13]], + [[0, 0, 1, 2], [1, 1, 2, 0], [3, 3, 0, 1], [2, 2, 4, 5], + [6, 6, 5, 3], [7, 7, 3, 4], [4, 4, 8, 9], [5, 5, 10, 11], + [11, 11, 9, 6], [12, 12, 6, 8], [10, 10, 11, 7], [8, 8, 7, 10], + [9, 9, 13, 14], [14, 14, 14, 12], [13, 13, 12, 13]], + [[0, 0, 1, 2], [1, 1, 2, 0], [3, 3, 0, 1], [2, 2, 4, 5], + [6, 6, 5, 3], [7, 7, 3, 4], [4, 4, 8, 9], [5, 5, 10, 11], + [11, 11, 9, 6], [12, 12, 6, 8], [13, 13, 11, 7], [8, 8, 7, 10], + [9, 9, 12, 12], [10, 10, 13, 13]], + [[0, 0, 1, 2], [3, 3, 2, 0], [4, 4, 0, 1], [1, 1, 3, 3], [2, 2, 5, 6] + , [7, 7, 6, 4], [8, 8, 4, 5], [5, 5, 8, 9], [6, 6, 9, 7], + [10, 10, 7, 8], [9, 9, 11, 12], [11, 11, 12, 10], [13, 13, 10, 11], + [12, 12, 13, 13]], + [[0, 0, 1, 2], [3, 3, 2, 0], [4, 4, 0, 1], [1, 1, 3, 3], [2, 2, 5, 6] + , [7, 7, 6, 4], [8, 8, 4, 5], [5, 5, 8, 9], [6, 6, 9, 7], + [10, 10, 7, 8], [9, 9, 11, 12], [13, 13, 12, 10], [12, 12, 10, 11], + [11, 11, 13, 13]], + [[0, 0, 1, 2], [3, 3, 2, 0], [4, 4, 0, 1], [1, 1, 5, 6], [2, 2, 4, 4] + , [7, 7, 6, 3], [8, 8, 3, 5], [5, 5, 8, 9], [6, 6, 9, 7], + [10, 10, 7, 8], [9, 9, 11, 12], [13, 13, 12, 10], [12, 12, 10, 11], + [11, 11, 13, 13]], + [[0, 0, 1, 2], [3, 3, 2, 0], [4, 4, 0, 1], [1, 1, 5, 6], [2, 2, 7, 8] + , [5, 5, 6, 3], [9, 9, 3, 5], [10, 10, 8, 4], [8, 8, 4, 7], + [6, 6, 10, 11], [7, 7, 11, 9], [12, 12, 9, 10], [11, 11, 13, 14], + [14, 14, 14, 12], [13, 13, 12, 13]], + [[0, 0, 1, 2], [3, 3, 2, 0], [4, 4, 0, 1], [1, 1, 5, 6], [2, 2, 7, 8] + , [6, 6, 6, 3], [5, 5, 3, 5], [8, 8, 8, 4], [7, 7, 4, 7]], + [[0, 0, 1, 2], [3, 3, 2, 0], [4, 4, 0, 1], [1, 1, 5, 6], [2, 2, 7, 8] + , [9, 9, 6, 3], [6, 6, 3, 5], [10, 10, 8, 4], [11, 11, 4, 7], + [5, 5, 10, 12], [7, 7, 12, 9], [8, 8, 11, 11], [13, 13, 9, 10], + [12, 12, 13, 13]], + [[0, 0, 1, 2], [3, 3, 2, 0], [4, 4, 0, 1], [1, 1, 5, 6], [2, 2, 7, 8] + , [9, 9, 6, 3], [6, 6, 3, 5], [10, 10, 8, 4], [11, 11, 4, 7], + [5, 5, 12, 11], [7, 7, 10, 10], [8, 8, 9, 12], [13, 13, 11, 9], + [12, 12, 13, 13]], + [[0, 0, 1, 2], [3, 3, 2, 0], [4, 4, 0, 1], [1, 1, 5, 6], [2, 2, 7, 8] + , [9, 9, 6, 3], [10, 10, 3, 5], [7, 7, 8, 4], [11, 11, 4, 7], + [5, 5, 9, 9], [6, 6, 11, 12], [8, 8, 12, 10], [13, 13, 10, 11], + [12, 12, 13, 13]], + [[0, 0, 1, 2], [3, 3, 2, 0], [4, 4, 0, 1], [1, 1, 5, 6], [2, 2, 7, 8] + , [9, 9, 6, 3], [10, 10, 3, 5], [7, 7, 8, 4], [11, 11, 4, 7], + [5, 5, 12, 11], [6, 6, 10, 10], [8, 8, 9, 12], [13, 13, 11, 9], + [12, 12, 13, 13]], + [[0, 0, 1, 2], [3, 3, 2, 0], [4, 4, 0, 1], [1, 1, 5, 6], [2, 2, 7, 8] + , [9, 9, 6, 3], [10, 10, 3, 5], [11, 11, 8, 4], [12, 12, 4, 7], + [5, 5, 9, 9], [6, 6, 12, 13], [7, 7, 11, 11], [8, 8, 13, 10], + [13, 13, 10, 12]], + [[1, 1, 0, 0], [0, 0, 2, 3], [4, 4, 3, 1], [5, 5, 1, 2], [2, 2, 4, 4] + , [3, 3, 6, 7], [7, 7, 7, 5], [6, 6, 5, 6]]] + for i in range(len(t2)): + assert L[i].table == t2[i] + + f = FpGroup(F, [x**2, y**3, (x*y)**7]) + L = low_index_subgroups(f, 10, [x]) + t3 = [[[0, 0, 0, 0]], + [[0, 0, 1, 2], [1, 1, 2, 0], [3, 3, 0, 1], [2, 2, 4, 5], [4, 4, 5, 3], + [6, 6, 3, 4], [5, 5, 6, 6]], + [[0, 0, 1, 2], [1, 1, 2, 0], [3, 3, 0, 1], [2, 2, 4, 5], [6, 6, 5, 3], + [5, 5, 3, 4], [4, 4, 6, 6]], + [[0, 0, 1, 2], [3, 3, 2, 0], [4, 4, 0, 1], [1, 1, 5, 6], [2, 2, 7, 8], + [6, 6, 6, 3], [5, 5, 3, 5], [8, 8, 8, 4], [7, 7, 4, 7]]] + for i in range(len(t3)): + assert L[i].table == t3[i] + + +def test_subgroup_presentations(): + F, x, y = free_group("x, y") + f = FpGroup(F, [x**3, y**5, (x*y)**2]) + H = [x*y, x**-1*y**-1*x*y*x] + p1 = reidemeister_presentation(f, H) + assert str(p1) == "((y_1, y_2), (y_1**2, y_2**3, y_2*y_1*y_2*y_1*y_2*y_1))" + + H = f.subgroup(H) + assert (H.generators, H.relators) == p1 + + f = FpGroup(F, [x**3, y**3, (x*y)**3]) + H = [x*y, x*y**-1] + p2 = reidemeister_presentation(f, H) + assert str(p2) == "((x_0, y_0), (x_0**3, y_0**3, x_0*y_0*x_0*y_0*x_0*y_0))" + + f = FpGroup(F, [x**2*y**2, y**-1*x*y*x**-3]) + H = [x] + p3 = reidemeister_presentation(f, H) + assert str(p3) == "((x_0,), (x_0**4,))" + + f = FpGroup(F, [x**3*y**-3, (x*y)**3, (x*y**-1)**2]) + H = [x] + p4 = reidemeister_presentation(f, H) + assert str(p4) == "((x_0,), (x_0**6,))" + + # this presentation can be improved, the most simplified form + # of presentation is + # See [2] Pg 474 group PSL_2(11) + # This is the group PSL_2(11) + F, a, b, c = free_group("a, b, c") + f = FpGroup(F, [a**11, b**5, c**4, (b*c**2)**2, (a*b*c)**3, (a**4*c**2)**3, b**2*c**-1*b**-1*c, a**4*b**-1*a**-1*b]) + H = [a, b, c**2] + gens, rels = reidemeister_presentation(f, H) + assert str(gens) == "(b_1, c_3)" + assert len(rels) == 18 + + +@slow +def test_order(): + F, x, y = free_group("x, y") + f = FpGroup(F, [x**4, y**2, x*y*x**-1*y]) + assert f.order() == 8 + + f = FpGroup(F, [x*y*x**-1*y**-1, y**2]) + assert f.order() is S.Infinity + + F, a, b, c = free_group("a, b, c") + f = FpGroup(F, [a**250, b**2, c*b*c**-1*b, c**4, c**-1*a**-1*c*a, a**-1*b**-1*a*b]) + assert f.order() == 2000 + + F, x = free_group("x") + f = FpGroup(F, []) + assert f.order() is S.Infinity + + f = FpGroup(free_group('')[0], []) + assert f.order() == 1 + +def test_fp_subgroup(): + def _test_subgroup(K, T, S): + _gens = T(K.generators) + assert all(elem in S for elem in _gens) + assert T.is_injective() + assert T.image().order() == S.order() + F, x, y = free_group("x, y") + f = FpGroup(F, [x**4, y**2, x*y*x**-1*y]) + S = FpSubgroup(f, [x*y]) + assert (x*y)**-3 in S + K, T = f.subgroup([x*y], homomorphism=True) + assert T(K.generators) == [y*x**-1] + _test_subgroup(K, T, S) + + S = FpSubgroup(f, [x**-1*y*x]) + assert x**-1*y**4*x in S + assert x**-1*y**4*x**2 not in S + K, T = f.subgroup([x**-1*y*x], homomorphism=True) + assert T(K.generators[0]**3) == y**3 + _test_subgroup(K, T, S) + + f = FpGroup(F, [x**3, y**5, (x*y)**2]) + H = [x*y, x**-1*y**-1*x*y*x] + K, T = f.subgroup(H, homomorphism=True) + S = FpSubgroup(f, H) + _test_subgroup(K, T, S) + +def test_permutation_methods(): + F, x, y = free_group("x, y") + # DihedralGroup(8) + G = FpGroup(F, [x**2, y**8, x*y*x**-1*y]) + T = G._to_perm_group()[1] + assert T.is_isomorphism() + assert G.center() == [y**4] + + # DiheadralGroup(4) + G = FpGroup(F, [x**2, y**4, x*y*x**-1*y]) + S = FpSubgroup(G, G.normal_closure([x])) + assert x in S + assert y**-1*x*y in S + + # Z_5xZ_4 + G = FpGroup(F, [x*y*x**-1*y**-1, y**5, x**4]) + assert G.is_abelian + assert G.is_solvable + + # AlternatingGroup(5) + G = FpGroup(F, [x**3, y**2, (x*y)**5]) + assert not G.is_solvable + + # AlternatingGroup(4) + G = FpGroup(F, [x**3, y**2, (x*y)**3]) + assert len(G.derived_series()) == 3 + S = FpSubgroup(G, G.derived_subgroup()) + assert S.order() == 4 + + +def test_simplify_presentation(): + # ref #16083 + G = simplify_presentation(FpGroup(FreeGroup([]), [])) + assert not G.generators + assert not G.relators + + +def test_cyclic(): + F, x, y = free_group("x, y") + f = FpGroup(F, [x*y, x**-1*y**-1*x*y*x]) + assert f.is_cyclic + f = FpGroup(F, [x*y, x*y**-1]) + assert f.is_cyclic + f = FpGroup(F, [x**4, y**2, x*y*x**-1*y]) + assert not f.is_cyclic + + +def test_abelian_invariants(): + F, x, y = free_group("x, y") + f = FpGroup(F, [x*y, x**-1*y**-1*x*y*x]) + assert f.abelian_invariants() == [] + f = FpGroup(F, [x*y, x*y**-1]) + assert f.abelian_invariants() == [2] + f = FpGroup(F, [x**4, y**2, x*y*x**-1*y]) + assert f.abelian_invariants() == [2, 4] diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_galois.py b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_galois.py new file mode 100644 index 0000000000000000000000000000000000000000..0d2ac29a846db88444d275b72a85ce3debaeaf05 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_galois.py @@ -0,0 +1,82 @@ +"""Test groups defined by the galois module. """ + +from sympy.combinatorics.galois import ( + S4TransitiveSubgroups, S5TransitiveSubgroups, S6TransitiveSubgroups, + find_transitive_subgroups_of_S6, +) +from sympy.combinatorics.homomorphisms import is_isomorphic +from sympy.combinatorics.named_groups import ( + SymmetricGroup, AlternatingGroup, CyclicGroup, +) + + +def test_four_group(): + G = S4TransitiveSubgroups.V.get_perm_group() + A4 = AlternatingGroup(4) + assert G.is_subgroup(A4) + assert G.degree == 4 + assert G.is_transitive() + assert G.order() == 4 + assert not G.is_cyclic + + +def test_M20(): + G = S5TransitiveSubgroups.M20.get_perm_group() + S5 = SymmetricGroup(5) + A5 = AlternatingGroup(5) + assert G.is_subgroup(S5) + assert not G.is_subgroup(A5) + assert G.degree == 5 + assert G.is_transitive() + assert G.order() == 20 + + +# Setting this True means that for each of the transitive subgroups of S6, +# we run a test not only on the fixed representation, but also on one freshly +# generated by the search procedure. +INCLUDE_SEARCH_REPS = False +S6_randomized = {} +if INCLUDE_SEARCH_REPS: + S6_randomized = find_transitive_subgroups_of_S6(*list(S6TransitiveSubgroups)) + + +def get_versions_of_S6_subgroup(name): + vers = [name.get_perm_group()] + if INCLUDE_SEARCH_REPS: + vers.append(S6_randomized[name]) + return vers + + +def test_S6_transitive_subgroups(): + """ + Test enough characteristics to distinguish all 16 transitive subgroups. + """ + ts = S6TransitiveSubgroups + A6 = AlternatingGroup(6) + for name, alt, order, is_isom, not_isom in [ + (ts.C6, False, 6, CyclicGroup(6), None), + (ts.S3, False, 6, SymmetricGroup(3), None), + (ts.D6, False, 12, None, None), + (ts.A4, True, 12, None, None), + (ts.G18, False, 18, None, None), + (ts.A4xC2, False, 24, None, SymmetricGroup(4)), + (ts.S4m, False, 24, SymmetricGroup(4), None), + (ts.S4p, True, 24, None, None), + (ts.G36m, False, 36, None, None), + (ts.G36p, True, 36, None, None), + (ts.S4xC2, False, 48, None, None), + (ts.PSL2F5, True, 60, None, None), + (ts.G72, False, 72, None, None), + (ts.PGL2F5, False, 120, None, None), + (ts.A6, True, 360, None, None), + (ts.S6, False, 720, None, None), + ]: + for G in get_versions_of_S6_subgroup(name): + assert G.is_transitive() + assert G.degree == 6 + assert G.is_subgroup(A6) is alt + assert G.order() == order + if is_isom: + assert is_isomorphic(G, is_isom) + if not_isom: + assert not is_isomorphic(G, not_isom) diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_generators.py b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_generators.py new file mode 100644 index 0000000000000000000000000000000000000000..795ef8f08f6ec212879f528c6a0c2f0bd73037f0 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_generators.py @@ -0,0 +1,105 @@ +from sympy.combinatorics.generators import symmetric, cyclic, alternating, \ + dihedral, rubik +from sympy.combinatorics.permutations import Permutation +from sympy.testing.pytest import raises + +def test_generators(): + + assert list(cyclic(6)) == [ + Permutation([0, 1, 2, 3, 4, 5]), + Permutation([1, 2, 3, 4, 5, 0]), + Permutation([2, 3, 4, 5, 0, 1]), + Permutation([3, 4, 5, 0, 1, 2]), + Permutation([4, 5, 0, 1, 2, 3]), + Permutation([5, 0, 1, 2, 3, 4])] + + assert list(cyclic(10)) == [ + Permutation([0, 1, 2, 3, 4, 5, 6, 7, 8, 9]), + Permutation([1, 2, 3, 4, 5, 6, 7, 8, 9, 0]), + Permutation([2, 3, 4, 5, 6, 7, 8, 9, 0, 1]), + Permutation([3, 4, 5, 6, 7, 8, 9, 0, 1, 2]), + Permutation([4, 5, 6, 7, 8, 9, 0, 1, 2, 3]), + Permutation([5, 6, 7, 8, 9, 0, 1, 2, 3, 4]), + Permutation([6, 7, 8, 9, 0, 1, 2, 3, 4, 5]), + Permutation([7, 8, 9, 0, 1, 2, 3, 4, 5, 6]), + Permutation([8, 9, 0, 1, 2, 3, 4, 5, 6, 7]), + Permutation([9, 0, 1, 2, 3, 4, 5, 6, 7, 8])] + + assert list(alternating(4)) == [ + Permutation([0, 1, 2, 3]), + Permutation([0, 2, 3, 1]), + Permutation([0, 3, 1, 2]), + Permutation([1, 0, 3, 2]), + Permutation([1, 2, 0, 3]), + Permutation([1, 3, 2, 0]), + Permutation([2, 0, 1, 3]), + Permutation([2, 1, 3, 0]), + Permutation([2, 3, 0, 1]), + Permutation([3, 0, 2, 1]), + Permutation([3, 1, 0, 2]), + Permutation([3, 2, 1, 0])] + + assert list(symmetric(3)) == [ + Permutation([0, 1, 2]), + Permutation([0, 2, 1]), + Permutation([1, 0, 2]), + Permutation([1, 2, 0]), + Permutation([2, 0, 1]), + Permutation([2, 1, 0])] + + assert list(symmetric(4)) == [ + Permutation([0, 1, 2, 3]), + Permutation([0, 1, 3, 2]), + Permutation([0, 2, 1, 3]), + Permutation([0, 2, 3, 1]), + Permutation([0, 3, 1, 2]), + Permutation([0, 3, 2, 1]), + Permutation([1, 0, 2, 3]), + Permutation([1, 0, 3, 2]), + Permutation([1, 2, 0, 3]), + Permutation([1, 2, 3, 0]), + Permutation([1, 3, 0, 2]), + Permutation([1, 3, 2, 0]), + Permutation([2, 0, 1, 3]), + Permutation([2, 0, 3, 1]), + Permutation([2, 1, 0, 3]), + Permutation([2, 1, 3, 0]), + Permutation([2, 3, 0, 1]), + Permutation([2, 3, 1, 0]), + Permutation([3, 0, 1, 2]), + Permutation([3, 0, 2, 1]), + Permutation([3, 1, 0, 2]), + Permutation([3, 1, 2, 0]), + Permutation([3, 2, 0, 1]), + Permutation([3, 2, 1, 0])] + + assert list(dihedral(1)) == [ + Permutation([0, 1]), Permutation([1, 0])] + + assert list(dihedral(2)) == [ + Permutation([0, 1, 2, 3]), + Permutation([1, 0, 3, 2]), + Permutation([2, 3, 0, 1]), + Permutation([3, 2, 1, 0])] + + assert list(dihedral(3)) == [ + Permutation([0, 1, 2]), + Permutation([2, 1, 0]), + Permutation([1, 2, 0]), + Permutation([0, 2, 1]), + Permutation([2, 0, 1]), + Permutation([1, 0, 2])] + + assert list(dihedral(5)) == [ + Permutation([0, 1, 2, 3, 4]), + Permutation([4, 3, 2, 1, 0]), + Permutation([1, 2, 3, 4, 0]), + Permutation([0, 4, 3, 2, 1]), + Permutation([2, 3, 4, 0, 1]), + Permutation([1, 0, 4, 3, 2]), + Permutation([3, 4, 0, 1, 2]), + Permutation([2, 1, 0, 4, 3]), + Permutation([4, 0, 1, 2, 3]), + Permutation([3, 2, 1, 0, 4])] + + raises(ValueError, lambda: rubik(1)) diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_graycode.py b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_graycode.py new file mode 100644 index 0000000000000000000000000000000000000000..a754a3c401b07c9c12cb9bdeeefdfc94f6cb8b5c --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_graycode.py @@ -0,0 +1,72 @@ +from sympy.combinatorics.graycode import (GrayCode, bin_to_gray, + random_bitstring, get_subset_from_bitstring, graycode_subsets, + gray_to_bin) +from sympy.testing.pytest import raises + +def test_graycode(): + g = GrayCode(2) + got = [] + for i in g.generate_gray(): + if i.startswith('0'): + g.skip() + got.append(i) + assert got == '00 11 10'.split() + a = GrayCode(6) + assert a.current == '0'*6 + assert a.rank == 0 + assert len(list(a.generate_gray())) == 64 + codes = ['011001', '011011', '011010', + '011110', '011111', '011101', '011100', '010100', '010101', '010111', + '010110', '010010', '010011', '010001', '010000', '110000', '110001', + '110011', '110010', '110110', '110111', '110101', '110100', '111100', + '111101', '111111', '111110', '111010', '111011', '111001', '111000', + '101000', '101001', '101011', '101010', '101110', '101111', '101101', + '101100', '100100', '100101', '100111', '100110', '100010', '100011', + '100001', '100000'] + assert list(a.generate_gray(start='011001')) == codes + assert list( + a.generate_gray(rank=GrayCode(6, start='011001').rank)) == codes + assert a.next().current == '000001' + assert a.next(2).current == '000011' + assert a.next(-1).current == '100000' + + a = GrayCode(5, start='10010') + assert a.rank == 28 + a = GrayCode(6, start='101000') + assert a.rank == 48 + + assert GrayCode(6, rank=4).current == '000110' + assert GrayCode(6, rank=4).rank == 4 + assert [GrayCode(4, start=s).rank for s in + GrayCode(4).generate_gray()] == [0, 1, 2, 3, 4, 5, 6, 7, 8, + 9, 10, 11, 12, 13, 14, 15] + a = GrayCode(15, rank=15) + assert a.current == '000000000001000' + + assert bin_to_gray('111') == '100' + + a = random_bitstring(5) + assert type(a) is str + assert len(a) == 5 + assert all(i in ['0', '1'] for i in a) + + assert get_subset_from_bitstring( + ['a', 'b', 'c', 'd'], '0011') == ['c', 'd'] + assert get_subset_from_bitstring('abcd', '1001') == ['a', 'd'] + assert list(graycode_subsets(['a', 'b', 'c'])) == \ + [[], ['c'], ['b', 'c'], ['b'], ['a', 'b'], ['a', 'b', 'c'], + ['a', 'c'], ['a']] + + raises(ValueError, lambda: GrayCode(0)) + raises(ValueError, lambda: GrayCode(2.2)) + raises(ValueError, lambda: GrayCode(2, start=[1, 1, 0])) + raises(ValueError, lambda: GrayCode(2, rank=2.5)) + raises(ValueError, lambda: get_subset_from_bitstring(['c', 'a', 'c'], '1100')) + raises(ValueError, lambda: list(GrayCode(3).generate_gray(start="1111"))) + + +def test_live_issue_117(): + assert bin_to_gray('0100') == '0110' + assert bin_to_gray('0101') == '0111' + for bits in ('0100', '0101'): + assert gray_to_bin(bin_to_gray(bits)) == bits diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_group_constructs.py b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_group_constructs.py new file mode 100644 index 0000000000000000000000000000000000000000..d0f7d6394bbc2e285650ea95d36be8e2ed5ea69e --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_group_constructs.py @@ -0,0 +1,15 @@ +from sympy.combinatorics.group_constructs import DirectProduct +from sympy.combinatorics.named_groups import CyclicGroup, DihedralGroup + + +def test_direct_product_n(): + C = CyclicGroup(4) + D = DihedralGroup(4) + G = DirectProduct(C, C, C) + assert G.order() == 64 + assert G.degree == 12 + assert len(G.orbits()) == 3 + assert G.is_abelian is True + H = DirectProduct(D, C) + assert H.order() == 32 + assert H.is_abelian is False diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_group_numbers.py b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_group_numbers.py new file mode 100644 index 0000000000000000000000000000000000000000..62de63cbcaadb0fd536ceb87932b8d7495334b15 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_group_numbers.py @@ -0,0 +1,27 @@ +from sympy.combinatorics.group_numbers import (is_nilpotent_number, + is_abelian_number, is_cyclic_number) +from sympy.testing.pytest import raises +from sympy import randprime + + +def test_is_nilpotent_number(): + assert is_nilpotent_number(21) == False + assert is_nilpotent_number(randprime(1, 30)**12) == True + raises(ValueError, lambda: is_nilpotent_number(-5)) + + +def test_is_abelian_number(): + assert is_abelian_number(4) == True + assert is_abelian_number(randprime(1, 2000)**2) == True + assert is_abelian_number(randprime(1000, 100000)) == True + assert is_abelian_number(60) == False + assert is_abelian_number(24) == False + raises(ValueError, lambda: is_abelian_number(-5)) + + +def test_is_cyclic_number(): + assert is_cyclic_number(15) == True + assert is_cyclic_number(randprime(1, 2000)**2) == False + assert is_cyclic_number(randprime(1000, 100000)) == True + assert is_cyclic_number(4) == False + raises(ValueError, lambda: is_cyclic_number(-5)) diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_homomorphisms.py b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_homomorphisms.py new file mode 100644 index 0000000000000000000000000000000000000000..0936bbddf46a16dccdfbaebda8d1c675c131f05a --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_homomorphisms.py @@ -0,0 +1,114 @@ +from sympy.combinatorics import Permutation +from sympy.combinatorics.perm_groups import PermutationGroup +from sympy.combinatorics.homomorphisms import homomorphism, group_isomorphism, is_isomorphic +from sympy.combinatorics.free_groups import free_group +from sympy.combinatorics.fp_groups import FpGroup +from sympy.combinatorics.named_groups import AlternatingGroup, DihedralGroup, CyclicGroup +from sympy.testing.pytest import raises + +def test_homomorphism(): + # FpGroup -> PermutationGroup + F, a, b = free_group("a, b") + G = FpGroup(F, [a**3, b**3, (a*b)**2]) + + c = Permutation(3)(0, 1, 2) + d = Permutation(3)(1, 2, 3) + A = AlternatingGroup(4) + T = homomorphism(G, A, [a, b], [c, d]) + assert T(a*b**2*a**-1) == c*d**2*c**-1 + assert T.is_isomorphism() + assert T(T.invert(Permutation(3)(0, 2, 3))) == Permutation(3)(0, 2, 3) + + T = homomorphism(G, AlternatingGroup(4), G.generators) + assert T.is_trivial() + assert T.kernel().order() == G.order() + + E, e = free_group("e") + G = FpGroup(E, [e**8]) + P = PermutationGroup([Permutation(0, 1, 2, 3), Permutation(0, 2)]) + T = homomorphism(G, P, [e], [Permutation(0, 1, 2, 3)]) + assert T.image().order() == 4 + assert T(T.invert(Permutation(0, 2)(1, 3))) == Permutation(0, 2)(1, 3) + + T = homomorphism(E, AlternatingGroup(4), E.generators, [c]) + assert T.invert(c**2) == e**-1 #order(c) == 3 so c**2 == c**-1 + + # FreeGroup -> FreeGroup + T = homomorphism(F, E, [a], [e]) + assert T(a**-2*b**4*a**2).is_identity + + # FreeGroup -> FpGroup + G = FpGroup(F, [a*b*a**-1*b**-1]) + T = homomorphism(F, G, F.generators, G.generators) + assert T.invert(a**-1*b**-1*a**2) == a*b**-1 + + # PermutationGroup -> PermutationGroup + D = DihedralGroup(8) + p = Permutation(0, 1, 2, 3, 4, 5, 6, 7) + P = PermutationGroup(p) + T = homomorphism(P, D, [p], [p]) + assert T.is_injective() + assert not T.is_isomorphism() + assert T.invert(p**3) == p**3 + + T2 = homomorphism(F, P, [F.generators[0]], P.generators) + T = T.compose(T2) + assert T.domain == F + assert T.codomain == D + assert T(a*b) == p + + D3 = DihedralGroup(3) + T = homomorphism(D3, D3, D3.generators, D3.generators) + assert T.is_isomorphism() + + +def test_isomorphisms(): + + F, a, b = free_group("a, b") + E, c, d = free_group("c, d") + # Infinite groups with differently ordered relators. + G = FpGroup(F, [a**2, b**3]) + H = FpGroup(F, [b**3, a**2]) + assert is_isomorphic(G, H) + + # Trivial Case + # FpGroup -> FpGroup + H = FpGroup(F, [a**3, b**3, (a*b)**2]) + F, c, d = free_group("c, d") + G = FpGroup(F, [c**3, d**3, (c*d)**2]) + check, T = group_isomorphism(G, H) + assert check + assert T(c**3*d**2) == a**3*b**2 + + # FpGroup -> PermutationGroup + # FpGroup is converted to the equivalent isomorphic group. + F, a, b = free_group("a, b") + G = FpGroup(F, [a**3, b**3, (a*b)**2]) + H = AlternatingGroup(4) + check, T = group_isomorphism(G, H) + assert check + assert T(b*a*b**-1*a**-1*b**-1) == Permutation(0, 2, 3) + assert T(b*a*b*a**-1*b**-1) == Permutation(0, 3, 2) + + # PermutationGroup -> PermutationGroup + D = DihedralGroup(8) + p = Permutation(0, 1, 2, 3, 4, 5, 6, 7) + P = PermutationGroup(p) + assert not is_isomorphic(D, P) + + A = CyclicGroup(5) + B = CyclicGroup(7) + assert not is_isomorphic(A, B) + + # Two groups of the same prime order are isomorphic to each other. + G = FpGroup(F, [a, b**5]) + H = CyclicGroup(5) + assert G.order() == H.order() + assert is_isomorphic(G, H) + + +def test_check_homomorphism(): + a = Permutation(1,2,3,4) + b = Permutation(1,3) + G = PermutationGroup([a, b]) + raises(ValueError, lambda: homomorphism(G, G, [a], [a])) diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_named_groups.py b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_named_groups.py new file mode 100644 index 0000000000000000000000000000000000000000..59bcb6ef3f020335de76d7a72152a0b58cbc6976 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_named_groups.py @@ -0,0 +1,70 @@ +from sympy.combinatorics.named_groups import (SymmetricGroup, CyclicGroup, + DihedralGroup, AlternatingGroup, + AbelianGroup, RubikGroup) +from sympy.testing.pytest import raises + + +def test_SymmetricGroup(): + G = SymmetricGroup(5) + elements = list(G.generate()) + assert (G.generators[0]).size == 5 + assert len(elements) == 120 + assert G.is_solvable is False + assert G.is_abelian is False + assert G.is_nilpotent is False + assert G.is_transitive() is True + H = SymmetricGroup(1) + assert H.order() == 1 + L = SymmetricGroup(2) + assert L.order() == 2 + + +def test_CyclicGroup(): + G = CyclicGroup(10) + elements = list(G.generate()) + assert len(elements) == 10 + assert (G.derived_subgroup()).order() == 1 + assert G.is_abelian is True + assert G.is_solvable is True + assert G.is_nilpotent is True + H = CyclicGroup(1) + assert H.order() == 1 + L = CyclicGroup(2) + assert L.order() == 2 + + +def test_DihedralGroup(): + G = DihedralGroup(6) + elements = list(G.generate()) + assert len(elements) == 12 + assert G.is_transitive() is True + assert G.is_abelian is False + assert G.is_solvable is True + assert G.is_nilpotent is False + H = DihedralGroup(1) + assert H.order() == 2 + L = DihedralGroup(2) + assert L.order() == 4 + assert L.is_abelian is True + assert L.is_nilpotent is True + + +def test_AlternatingGroup(): + G = AlternatingGroup(5) + elements = list(G.generate()) + assert len(elements) == 60 + assert [perm.is_even for perm in elements] == [True]*60 + H = AlternatingGroup(1) + assert H.order() == 1 + L = AlternatingGroup(2) + assert L.order() == 1 + + +def test_AbelianGroup(): + A = AbelianGroup(3, 3, 3) + assert A.order() == 27 + assert A.is_abelian is True + + +def test_RubikGroup(): + raises(ValueError, lambda: RubikGroup(1)) diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_partitions.py b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_partitions.py new file mode 100644 index 0000000000000000000000000000000000000000..32e70e53a53aadbb17c8292bbef8f52d1144d6e0 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_partitions.py @@ -0,0 +1,118 @@ +from sympy.core.sorting import ordered, default_sort_key +from sympy.combinatorics.partitions import (Partition, IntegerPartition, + RGS_enum, RGS_unrank, RGS_rank, + random_integer_partition) +from sympy.testing.pytest import raises +from sympy.utilities.iterables import partitions +from sympy.sets.sets import Set, FiniteSet + + +def test_partition_constructor(): + raises(ValueError, lambda: Partition([1, 1, 2])) + raises(ValueError, lambda: Partition([1, 2, 3], [2, 3, 4])) + raises(ValueError, lambda: Partition(1, 2, 3)) + raises(ValueError, lambda: Partition(*list(range(3)))) + + assert Partition([1, 2, 3], [4, 5]) == Partition([4, 5], [1, 2, 3]) + assert Partition({1, 2, 3}, {4, 5}) == Partition([1, 2, 3], [4, 5]) + + a = FiniteSet(1, 2, 3) + b = FiniteSet(4, 5) + assert Partition(a, b) == Partition([1, 2, 3], [4, 5]) + assert Partition({a, b}) == Partition(FiniteSet(a, b)) + assert Partition({a, b}) != Partition(a, b) + +def test_partition(): + from sympy.abc import x + + a = Partition([1, 2, 3], [4]) + b = Partition([1, 2], [3, 4]) + c = Partition([x]) + l = [a, b, c] + l.sort(key=default_sort_key) + assert l == [c, a, b] + l.sort(key=lambda w: default_sort_key(w, order='rev-lex')) + assert l == [c, a, b] + + assert (a == b) is False + assert a <= b + assert (a > b) is False + assert a != b + assert a < b + + assert (a + 2).partition == [[1, 2], [3, 4]] + assert (b - 1).partition == [[1, 2, 4], [3]] + + assert (a - 1).partition == [[1, 2, 3, 4]] + assert (a + 1).partition == [[1, 2, 4], [3]] + assert (b + 1).partition == [[1, 2], [3], [4]] + + assert a.rank == 1 + assert b.rank == 3 + + assert a.RGS == (0, 0, 0, 1) + assert b.RGS == (0, 0, 1, 1) + + +def test_integer_partition(): + # no zeros in partition + raises(ValueError, lambda: IntegerPartition(list(range(3)))) + # check fails since 1 + 2 != 100 + raises(ValueError, lambda: IntegerPartition(100, list(range(1, 3)))) + a = IntegerPartition(8, [1, 3, 4]) + b = a.next_lex() + c = IntegerPartition([1, 3, 4]) + d = IntegerPartition(8, {1: 3, 3: 1, 2: 1}) + assert a == c + assert a.integer == d.integer + assert a.conjugate == [3, 2, 2, 1] + assert (a == b) is False + assert a <= b + assert (a > b) is False + assert a != b + + for i in range(1, 11): + next = set() + prev = set() + a = IntegerPartition([i]) + ans = {IntegerPartition(p) for p in partitions(i)} + n = len(ans) + for j in range(n): + next.add(a) + a = a.next_lex() + IntegerPartition(i, a.partition) # check it by giving i + for j in range(n): + prev.add(a) + a = a.prev_lex() + IntegerPartition(i, a.partition) # check it by giving i + assert next == ans + assert prev == ans + + assert IntegerPartition([1, 2, 3]).as_ferrers() == '###\n##\n#' + assert IntegerPartition([1, 1, 3]).as_ferrers('o') == 'ooo\no\no' + assert str(IntegerPartition([1, 1, 3])) == '[3, 1, 1]' + assert IntegerPartition([1, 1, 3]).partition == [3, 1, 1] + + raises(ValueError, lambda: random_integer_partition(-1)) + assert random_integer_partition(1) == [1] + assert random_integer_partition(10, seed=[1, 3, 2, 1, 5, 1] + ) == [5, 2, 1, 1, 1] + + +def test_rgs(): + raises(ValueError, lambda: RGS_unrank(-1, 3)) + raises(ValueError, lambda: RGS_unrank(3, 0)) + raises(ValueError, lambda: RGS_unrank(10, 1)) + + raises(ValueError, lambda: Partition.from_rgs(list(range(3)), list(range(2)))) + raises(ValueError, lambda: Partition.from_rgs(list(range(1, 3)), list(range(2)))) + assert RGS_enum(-1) == 0 + assert RGS_enum(1) == 1 + assert RGS_unrank(7, 5) == [0, 0, 1, 0, 2] + assert RGS_unrank(23, 14) == [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 2, 2] + assert RGS_rank(RGS_unrank(40, 100)) == 40 + +def test_ordered_partition_9608(): + a = Partition([1, 2, 3], [4]) + b = Partition([1, 2], [3, 4]) + assert list(ordered([a,b], Set._infimum_key)) diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_pc_groups.py b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_pc_groups.py new file mode 100644 index 0000000000000000000000000000000000000000..b0c146279921e1e6499534fe9e33b993348d1503 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_pc_groups.py @@ -0,0 +1,87 @@ +from sympy.combinatorics.permutations import Permutation +from sympy.combinatorics.named_groups import SymmetricGroup, AlternatingGroup, DihedralGroup +from sympy.matrices import Matrix + +def test_pc_presentation(): + Groups = [SymmetricGroup(3), SymmetricGroup(4), SymmetricGroup(9).sylow_subgroup(3), + SymmetricGroup(9).sylow_subgroup(2), SymmetricGroup(8).sylow_subgroup(2), DihedralGroup(10)] + + S = SymmetricGroup(125).sylow_subgroup(5) + G = S.derived_series()[2] + Groups.append(G) + + G = SymmetricGroup(25).sylow_subgroup(5) + Groups.append(G) + + S = SymmetricGroup(11**2).sylow_subgroup(11) + G = S.derived_series()[2] + Groups.append(G) + + for G in Groups: + PcGroup = G.polycyclic_group() + collector = PcGroup.collector + pc_presentation = collector.pc_presentation + + pcgs = PcGroup.pcgs + free_group = collector.free_group + free_to_perm = {} + for s, g in zip(free_group.symbols, pcgs): + free_to_perm[s] = g + + for k, v in pc_presentation.items(): + k_array = k.array_form + if v != (): + v_array = v.array_form + + lhs = Permutation() + for gen in k_array: + s = gen[0] + e = gen[1] + lhs = lhs*free_to_perm[s]**e + + if v == (): + assert lhs.is_identity + continue + + rhs = Permutation() + for gen in v_array: + s = gen[0] + e = gen[1] + rhs = rhs*free_to_perm[s]**e + + assert lhs == rhs + + +def test_exponent_vector(): + + Groups = [SymmetricGroup(3), SymmetricGroup(4), SymmetricGroup(9).sylow_subgroup(3), + SymmetricGroup(9).sylow_subgroup(2), SymmetricGroup(8).sylow_subgroup(2)] + + for G in Groups: + PcGroup = G.polycyclic_group() + collector = PcGroup.collector + + pcgs = PcGroup.pcgs + # free_group = collector.free_group + + for gen in G.generators: + exp = collector.exponent_vector(gen) + g = Permutation() + for i in range(len(exp)): + g = g*pcgs[i]**exp[i] if exp[i] else g + assert g == gen + + +def test_induced_pcgs(): + G = [SymmetricGroup(9).sylow_subgroup(3), SymmetricGroup(20).sylow_subgroup(2), AlternatingGroup(4), + DihedralGroup(4), DihedralGroup(10), DihedralGroup(9), SymmetricGroup(3), SymmetricGroup(4)] + + for g in G: + PcGroup = g.polycyclic_group() + collector = PcGroup.collector + gens = list(g.generators) + ipcgs = collector.induced_pcgs(gens) + m = [] + for i in ipcgs: + m.append(collector.exponent_vector(i)) + assert Matrix(m).is_upper diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_perm_groups.py b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_perm_groups.py new file mode 100644 index 0000000000000000000000000000000000000000..41be445fa806167687e47bdae8a1883ea30e9f0f --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_perm_groups.py @@ -0,0 +1,1243 @@ +from sympy.core.containers import Tuple +from sympy.combinatorics.generators import rubik_cube_generators +from sympy.combinatorics.homomorphisms import is_isomorphic +from sympy.combinatorics.named_groups import SymmetricGroup, CyclicGroup,\ + DihedralGroup, AlternatingGroup, AbelianGroup, RubikGroup +from sympy.combinatorics.perm_groups import (PermutationGroup, + _orbit_transversal, Coset, SymmetricPermutationGroup) +from sympy.combinatorics.permutations import Permutation +from sympy.combinatorics.polyhedron import tetrahedron as Tetra, cube +from sympy.combinatorics.testutil import _verify_bsgs, _verify_centralizer,\ + _verify_normal_closure +from sympy.testing.pytest import skip, XFAIL, slow + +rmul = Permutation.rmul + + +def test_has(): + a = Permutation([1, 0]) + G = PermutationGroup([a]) + assert G.is_abelian + a = Permutation([2, 0, 1]) + b = Permutation([2, 1, 0]) + G = PermutationGroup([a, b]) + assert not G.is_abelian + + G = PermutationGroup([a]) + assert G.has(a) + assert not G.has(b) + + a = Permutation([2, 0, 1, 3, 4, 5]) + b = Permutation([0, 2, 1, 3, 4]) + assert PermutationGroup(a, b).degree == \ + PermutationGroup(a, b).degree == 6 + + g = PermutationGroup(Permutation(0, 2, 1)) + assert Tuple(1, g).has(g) + + +def test_generate(): + a = Permutation([1, 0]) + g = list(PermutationGroup([a]).generate()) + assert g == [Permutation([0, 1]), Permutation([1, 0])] + assert len(list(PermutationGroup(Permutation((0, 1))).generate())) == 1 + g = PermutationGroup([a]).generate(method='dimino') + assert list(g) == [Permutation([0, 1]), Permutation([1, 0])] + a = Permutation([2, 0, 1]) + b = Permutation([2, 1, 0]) + G = PermutationGroup([a, b]) + g = G.generate() + v1 = [p.array_form for p in list(g)] + v1.sort() + assert v1 == [[0, 1, 2], [0, 2, 1], [1, 0, 2], [1, 2, 0], [2, 0, + 1], [2, 1, 0]] + v2 = list(G.generate(method='dimino', af=True)) + assert v1 == sorted(v2) + a = Permutation([2, 0, 1, 3, 4, 5]) + b = Permutation([2, 1, 3, 4, 5, 0]) + g = PermutationGroup([a, b]).generate(af=True) + assert len(list(g)) == 360 + + +def test_order(): + a = Permutation([2, 0, 1, 3, 4, 5, 6, 7, 8, 9]) + b = Permutation([2, 1, 3, 4, 5, 6, 7, 8, 9, 0]) + g = PermutationGroup([a, b]) + assert g.order() == 1814400 + assert PermutationGroup().order() == 1 + + +def test_equality(): + p_1 = Permutation(0, 1, 3) + p_2 = Permutation(0, 2, 3) + p_3 = Permutation(0, 1, 2) + p_4 = Permutation(0, 1, 3) + g_1 = PermutationGroup(p_1, p_2) + g_2 = PermutationGroup(p_3, p_4) + g_3 = PermutationGroup(p_2, p_1) + g_4 = PermutationGroup(p_1, p_2) + + assert g_1 != g_2 + assert g_1.generators != g_2.generators + assert g_1.equals(g_2) + assert g_1 != g_3 + assert g_1.equals(g_3) + assert g_1 == g_4 + + +def test_stabilizer(): + S = SymmetricGroup(2) + H = S.stabilizer(0) + assert H.generators == [Permutation(1)] + a = Permutation([2, 0, 1, 3, 4, 5]) + b = Permutation([2, 1, 3, 4, 5, 0]) + G = PermutationGroup([a, b]) + G0 = G.stabilizer(0) + assert G0.order() == 60 + + gens_cube = [[1, 3, 5, 7, 0, 2, 4, 6], [1, 3, 0, 2, 5, 7, 4, 6]] + gens = [Permutation(p) for p in gens_cube] + G = PermutationGroup(gens) + G2 = G.stabilizer(2) + assert G2.order() == 6 + G2_1 = G2.stabilizer(1) + v = list(G2_1.generate(af=True)) + assert v == [[0, 1, 2, 3, 4, 5, 6, 7], [3, 1, 2, 0, 7, 5, 6, 4]] + + gens = ( + (1, 2, 0, 4, 5, 3, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19), + (0, 1, 2, 3, 4, 5, 19, 6, 8, 9, 10, 11, 12, 13, 14, + 15, 16, 7, 17, 18), + (0, 1, 2, 3, 4, 5, 6, 7, 9, 18, 16, 11, 12, 13, 14, 15, 8, 17, 10, 19)) + gens = [Permutation(p) for p in gens] + G = PermutationGroup(gens) + G2 = G.stabilizer(2) + assert G2.order() == 181440 + S = SymmetricGroup(3) + assert [G.order() for G in S.basic_stabilizers] == [6, 2] + + +def test_center(): + # the center of the dihedral group D_n is of order 2 for even n + for i in (4, 6, 10): + D = DihedralGroup(i) + assert (D.center()).order() == 2 + # the center of the dihedral group D_n is of order 1 for odd n>2 + for i in (3, 5, 7): + D = DihedralGroup(i) + assert (D.center()).order() == 1 + # the center of an abelian group is the group itself + for i in (2, 3, 5): + for j in (1, 5, 7): + for k in (1, 1, 11): + G = AbelianGroup(i, j, k) + assert G.center().is_subgroup(G) + # the center of a nonabelian simple group is trivial + for i in(1, 5, 9): + A = AlternatingGroup(i) + assert (A.center()).order() == 1 + # brute-force verifications + D = DihedralGroup(5) + A = AlternatingGroup(3) + C = CyclicGroup(4) + G.is_subgroup(D*A*C) + assert _verify_centralizer(G, G) + + +def test_centralizer(): + # the centralizer of the trivial group is the entire group + S = SymmetricGroup(2) + assert S.centralizer(Permutation(list(range(2)))).is_subgroup(S) + A = AlternatingGroup(5) + assert A.centralizer(Permutation(list(range(5)))).is_subgroup(A) + # a centralizer in the trivial group is the trivial group itself + triv = PermutationGroup([Permutation([0, 1, 2, 3])]) + D = DihedralGroup(4) + assert triv.centralizer(D).is_subgroup(triv) + # brute-force verifications for centralizers of groups + for i in (4, 5, 6): + S = SymmetricGroup(i) + A = AlternatingGroup(i) + C = CyclicGroup(i) + D = DihedralGroup(i) + for gp in (S, A, C, D): + for gp2 in (S, A, C, D): + if not gp2.is_subgroup(gp): + assert _verify_centralizer(gp, gp2) + # verify the centralizer for all elements of several groups + S = SymmetricGroup(5) + elements = list(S.generate_dimino()) + for element in elements: + assert _verify_centralizer(S, element) + A = AlternatingGroup(5) + elements = list(A.generate_dimino()) + for element in elements: + assert _verify_centralizer(A, element) + D = DihedralGroup(7) + elements = list(D.generate_dimino()) + for element in elements: + assert _verify_centralizer(D, element) + # verify centralizers of small groups within small groups + small = [] + for i in (1, 2, 3): + small.append(SymmetricGroup(i)) + small.append(AlternatingGroup(i)) + small.append(DihedralGroup(i)) + small.append(CyclicGroup(i)) + for gp in small: + for gp2 in small: + if gp.degree == gp2.degree: + assert _verify_centralizer(gp, gp2) + + +def test_coset_rank(): + gens_cube = [[1, 3, 5, 7, 0, 2, 4, 6], [1, 3, 0, 2, 5, 7, 4, 6]] + gens = [Permutation(p) for p in gens_cube] + G = PermutationGroup(gens) + i = 0 + for h in G.generate(af=True): + rk = G.coset_rank(h) + assert rk == i + h1 = G.coset_unrank(rk, af=True) + assert h == h1 + i += 1 + assert G.coset_unrank(48) is None + assert G.coset_unrank(G.coset_rank(gens[0])) == gens[0] + + +def test_coset_factor(): + a = Permutation([0, 2, 1]) + G = PermutationGroup([a]) + c = Permutation([2, 1, 0]) + assert not G.coset_factor(c) + assert G.coset_rank(c) is None + + a = Permutation([2, 0, 1, 3, 4, 5]) + b = Permutation([2, 1, 3, 4, 5, 0]) + g = PermutationGroup([a, b]) + assert g.order() == 360 + d = Permutation([1, 0, 2, 3, 4, 5]) + assert not g.coset_factor(d.array_form) + assert not g.contains(d) + assert Permutation(2) in G + c = Permutation([1, 0, 2, 3, 5, 4]) + v = g.coset_factor(c, True) + tr = g.basic_transversals + p = Permutation.rmul(*[tr[i][v[i]] for i in range(len(g.base))]) + assert p == c + v = g.coset_factor(c) + p = Permutation.rmul(*v) + assert p == c + assert g.contains(c) + G = PermutationGroup([Permutation([2, 1, 0])]) + p = Permutation([1, 0, 2]) + assert G.coset_factor(p) == [] + + +def test_orbits(): + a = Permutation([2, 0, 1]) + b = Permutation([2, 1, 0]) + g = PermutationGroup([a, b]) + assert g.orbit(0) == {0, 1, 2} + assert g.orbits() == [{0, 1, 2}] + assert g.is_transitive() and g.is_transitive(strict=False) + assert g.orbit_transversal(0) == \ + [Permutation( + [0, 1, 2]), Permutation([2, 0, 1]), Permutation([1, 2, 0])] + assert g.orbit_transversal(0, True) == \ + [(0, Permutation([0, 1, 2])), (2, Permutation([2, 0, 1])), + (1, Permutation([1, 2, 0]))] + + G = DihedralGroup(6) + transversal, slps = _orbit_transversal(G.degree, G.generators, 0, True, slp=True) + for i, t in transversal: + slp = slps[i] + w = G.identity + for s in slp: + w = G.generators[s]*w + assert w == t + + a = Permutation(list(range(1, 100)) + [0]) + G = PermutationGroup([a]) + assert [min(o) for o in G.orbits()] == [0] + G = PermutationGroup(rubik_cube_generators()) + assert [min(o) for o in G.orbits()] == [0, 1] + assert not G.is_transitive() and not G.is_transitive(strict=False) + G = PermutationGroup([Permutation(0, 1, 3), Permutation(3)(0, 1)]) + assert not G.is_transitive() and G.is_transitive(strict=False) + assert PermutationGroup( + Permutation(3)).is_transitive(strict=False) is False + + +def test_is_normal(): + gens_s5 = [Permutation(p) for p in [[1, 2, 3, 4, 0], [2, 1, 4, 0, 3]]] + G1 = PermutationGroup(gens_s5) + assert G1.order() == 120 + gens_a5 = [Permutation(p) for p in [[1, 0, 3, 2, 4], [2, 1, 4, 3, 0]]] + G2 = PermutationGroup(gens_a5) + assert G2.order() == 60 + assert G2.is_normal(G1) + gens3 = [Permutation(p) for p in [[2, 1, 3, 0, 4], [1, 2, 0, 3, 4]]] + G3 = PermutationGroup(gens3) + assert not G3.is_normal(G1) + assert G3.order() == 12 + G4 = G1.normal_closure(G3.generators) + assert G4.order() == 60 + gens5 = [Permutation(p) for p in [[1, 2, 3, 0, 4], [1, 2, 0, 3, 4]]] + G5 = PermutationGroup(gens5) + assert G5.order() == 24 + G6 = G1.normal_closure(G5.generators) + assert G6.order() == 120 + assert G1.is_subgroup(G6) + assert not G1.is_subgroup(G4) + assert G2.is_subgroup(G4) + I5 = PermutationGroup(Permutation(4)) + assert I5.is_normal(G5) + assert I5.is_normal(G6, strict=False) + p1 = Permutation([1, 0, 2, 3, 4]) + p2 = Permutation([0, 1, 2, 4, 3]) + p3 = Permutation([3, 4, 2, 1, 0]) + id_ = Permutation([0, 1, 2, 3, 4]) + H = PermutationGroup([p1, p3]) + H_n1 = PermutationGroup([p1, p2]) + H_n2_1 = PermutationGroup(p1) + H_n2_2 = PermutationGroup(p2) + H_id = PermutationGroup(id_) + assert H_n1.is_normal(H) + assert H_n2_1.is_normal(H_n1) + assert H_n2_2.is_normal(H_n1) + assert H_id.is_normal(H_n2_1) + assert H_id.is_normal(H_n1) + assert H_id.is_normal(H) + assert not H_n2_1.is_normal(H) + assert not H_n2_2.is_normal(H) + + +def test_eq(): + a = [[1, 2, 0, 3, 4, 5], [1, 0, 2, 3, 4, 5], [2, 1, 0, 3, 4, 5], [ + 1, 2, 0, 3, 4, 5]] + a = [Permutation(p) for p in a + [[1, 2, 3, 4, 5, 0]]] + g = Permutation([1, 2, 3, 4, 5, 0]) + G1, G2, G3 = [PermutationGroup(x) for x in [a[:2], a[2:4], [g, g**2]]] + assert G1.order() == G2.order() == G3.order() == 6 + assert G1.is_subgroup(G2) + assert not G1.is_subgroup(G3) + G4 = PermutationGroup([Permutation([0, 1])]) + assert not G1.is_subgroup(G4) + assert G4.is_subgroup(G1, 0) + assert PermutationGroup(g, g).is_subgroup(PermutationGroup(g)) + assert SymmetricGroup(3).is_subgroup(SymmetricGroup(4), 0) + assert SymmetricGroup(3).is_subgroup(SymmetricGroup(3)*CyclicGroup(5), 0) + assert not CyclicGroup(5).is_subgroup(SymmetricGroup(3)*CyclicGroup(5), 0) + assert CyclicGroup(3).is_subgroup(SymmetricGroup(3)*CyclicGroup(5), 0) + + +def test_derived_subgroup(): + a = Permutation([1, 0, 2, 4, 3]) + b = Permutation([0, 1, 3, 2, 4]) + G = PermutationGroup([a, b]) + C = G.derived_subgroup() + assert C.order() == 3 + assert C.is_normal(G) + assert C.is_subgroup(G, 0) + assert not G.is_subgroup(C, 0) + gens_cube = [[1, 3, 5, 7, 0, 2, 4, 6], [1, 3, 0, 2, 5, 7, 4, 6]] + gens = [Permutation(p) for p in gens_cube] + G = PermutationGroup(gens) + C = G.derived_subgroup() + assert C.order() == 12 + + +def test_is_solvable(): + a = Permutation([1, 2, 0]) + b = Permutation([1, 0, 2]) + G = PermutationGroup([a, b]) + assert G.is_solvable + G = PermutationGroup([a]) + assert G.is_solvable + a = Permutation([1, 2, 3, 4, 0]) + b = Permutation([1, 0, 2, 3, 4]) + G = PermutationGroup([a, b]) + assert not G.is_solvable + P = SymmetricGroup(10) + S = P.sylow_subgroup(3) + assert S.is_solvable + +def test_rubik1(): + gens = rubik_cube_generators() + gens1 = [gens[-1]] + [p**2 for p in gens[1:]] + G1 = PermutationGroup(gens1) + assert G1.order() == 19508428800 + gens2 = [p**2 for p in gens] + G2 = PermutationGroup(gens2) + assert G2.order() == 663552 + assert G2.is_subgroup(G1, 0) + C1 = G1.derived_subgroup() + assert C1.order() == 4877107200 + assert C1.is_subgroup(G1, 0) + assert not G2.is_subgroup(C1, 0) + + G = RubikGroup(2) + assert G.order() == 3674160 + + +@XFAIL +def test_rubik(): + skip('takes too much time') + G = PermutationGroup(rubik_cube_generators()) + assert G.order() == 43252003274489856000 + G1 = PermutationGroup(G[:3]) + assert G1.order() == 170659735142400 + assert not G1.is_normal(G) + G2 = G.normal_closure(G1.generators) + assert G2.is_subgroup(G) + + +def test_direct_product(): + C = CyclicGroup(4) + D = DihedralGroup(4) + G = C*C*C + assert G.order() == 64 + assert G.degree == 12 + assert len(G.orbits()) == 3 + assert G.is_abelian is True + H = D*C + assert H.order() == 32 + assert H.is_abelian is False + + +def test_orbit_rep(): + G = DihedralGroup(6) + assert G.orbit_rep(1, 3) in [Permutation([2, 3, 4, 5, 0, 1]), + Permutation([4, 3, 2, 1, 0, 5])] + H = CyclicGroup(4)*G + assert H.orbit_rep(1, 5) is False + + +def test_schreier_vector(): + G = CyclicGroup(50) + v = [0]*50 + v[23] = -1 + assert G.schreier_vector(23) == v + H = DihedralGroup(8) + assert H.schreier_vector(2) == [0, 1, -1, 0, 0, 1, 0, 0] + L = SymmetricGroup(4) + assert L.schreier_vector(1) == [1, -1, 0, 0] + + +def test_random_pr(): + D = DihedralGroup(6) + r = 11 + n = 3 + _random_prec_n = {} + _random_prec_n[0] = {'s': 7, 't': 3, 'x': 2, 'e': -1} + _random_prec_n[1] = {'s': 5, 't': 5, 'x': 1, 'e': -1} + _random_prec_n[2] = {'s': 3, 't': 4, 'x': 2, 'e': 1} + D._random_pr_init(r, n, _random_prec_n=_random_prec_n) + assert D._random_gens[11] == [0, 1, 2, 3, 4, 5] + _random_prec = {'s': 2, 't': 9, 'x': 1, 'e': -1} + assert D.random_pr(_random_prec=_random_prec) == \ + Permutation([0, 5, 4, 3, 2, 1]) + + +def test_is_alt_sym(): + G = DihedralGroup(10) + assert G.is_alt_sym() is False + assert G._eval_is_alt_sym_naive() is False + assert G._eval_is_alt_sym_naive(only_alt=True) is False + assert G._eval_is_alt_sym_naive(only_sym=True) is False + + S = SymmetricGroup(10) + assert S._eval_is_alt_sym_naive() is True + assert S._eval_is_alt_sym_naive(only_alt=True) is False + assert S._eval_is_alt_sym_naive(only_sym=True) is True + + N_eps = 10 + _random_prec = {'N_eps': N_eps, + 0: Permutation([[2], [1, 4], [0, 6, 7, 8, 9, 3, 5]]), + 1: Permutation([[1, 8, 7, 6, 3, 5, 2, 9], [0, 4]]), + 2: Permutation([[5, 8], [4, 7], [0, 1, 2, 3, 6, 9]]), + 3: Permutation([[3], [0, 8, 2, 7, 4, 1, 6, 9, 5]]), + 4: Permutation([[8], [4, 7, 9], [3, 6], [0, 5, 1, 2]]), + 5: Permutation([[6], [0, 2, 4, 5, 1, 8, 3, 9, 7]]), + 6: Permutation([[6, 9, 8], [4, 5], [1, 3, 7], [0, 2]]), + 7: Permutation([[4], [0, 2, 9, 1, 3, 8, 6, 5, 7]]), + 8: Permutation([[1, 5, 6, 3], [0, 2, 7, 8, 4, 9]]), + 9: Permutation([[8], [6, 7], [2, 3, 4, 5], [0, 1, 9]])} + assert S.is_alt_sym(_random_prec=_random_prec) is True + + A = AlternatingGroup(10) + assert A._eval_is_alt_sym_naive() is True + assert A._eval_is_alt_sym_naive(only_alt=True) is True + assert A._eval_is_alt_sym_naive(only_sym=True) is False + + _random_prec = {'N_eps': N_eps, + 0: Permutation([[1, 6, 4, 2, 7, 8, 5, 9, 3], [0]]), + 1: Permutation([[1], [0, 5, 8, 4, 9, 2, 3, 6, 7]]), + 2: Permutation([[1, 9, 8, 3, 2, 5], [0, 6, 7, 4]]), + 3: Permutation([[6, 8, 9], [4, 5], [1, 3, 7, 2], [0]]), + 4: Permutation([[8], [5], [4], [2, 6, 9, 3], [1], [0, 7]]), + 5: Permutation([[3, 6], [0, 8, 1, 7, 5, 9, 4, 2]]), + 6: Permutation([[5], [2, 9], [1, 8, 3], [0, 4, 7, 6]]), + 7: Permutation([[1, 8, 4, 7, 2, 3], [0, 6, 9, 5]]), + 8: Permutation([[5, 8, 7], [3], [1, 4, 2, 6], [0, 9]]), + 9: Permutation([[4, 9, 6], [3, 8], [1, 2], [0, 5, 7]])} + assert A.is_alt_sym(_random_prec=_random_prec) is False + + G = PermutationGroup( + Permutation(1, 3, size=8)(0, 2, 4, 6), + Permutation(5, 7, size=8)(0, 2, 4, 6)) + assert G.is_alt_sym() is False + + # Tests for monte-carlo c_n parameter setting, and which guarantees + # to give False. + G = DihedralGroup(10) + assert G._eval_is_alt_sym_monte_carlo() is False + G = DihedralGroup(20) + assert G._eval_is_alt_sym_monte_carlo() is False + + # A dry-running test to check if it looks up for the updated cache. + G = DihedralGroup(6) + G.is_alt_sym() + assert G.is_alt_sym() is False + + +def test_minimal_block(): + D = DihedralGroup(6) + block_system = D.minimal_block([0, 3]) + for i in range(3): + assert block_system[i] == block_system[i + 3] + S = SymmetricGroup(6) + assert S.minimal_block([0, 1]) == [0, 0, 0, 0, 0, 0] + + assert Tetra.pgroup.minimal_block([0, 1]) == [0, 0, 0, 0] + + P1 = PermutationGroup(Permutation(1, 5)(2, 4), Permutation(0, 1, 2, 3, 4, 5)) + P2 = PermutationGroup(Permutation(0, 1, 2, 3, 4, 5), Permutation(1, 5)(2, 4)) + assert P1.minimal_block([0, 2]) == [0, 1, 0, 1, 0, 1] + assert P2.minimal_block([0, 2]) == [0, 1, 0, 1, 0, 1] + + +def test_minimal_blocks(): + P = PermutationGroup(Permutation(1, 5)(2, 4), Permutation(0, 1, 2, 3, 4, 5)) + assert P.minimal_blocks() == [[0, 1, 0, 1, 0, 1], [0, 1, 2, 0, 1, 2]] + + P = SymmetricGroup(5) + assert P.minimal_blocks() == [[0]*5] + + P = PermutationGroup(Permutation(0, 3)) + assert P.minimal_blocks() is False + + +def test_max_div(): + S = SymmetricGroup(10) + assert S.max_div == 5 + + +def test_is_primitive(): + S = SymmetricGroup(5) + assert S.is_primitive() is True + C = CyclicGroup(7) + assert C.is_primitive() is True + + a = Permutation(0, 1, 2, size=6) + b = Permutation(3, 4, 5, size=6) + G = PermutationGroup(a, b) + assert G.is_primitive() is False + + +def test_random_stab(): + S = SymmetricGroup(5) + _random_el = Permutation([1, 3, 2, 0, 4]) + _random_prec = {'rand': _random_el} + g = S.random_stab(2, _random_prec=_random_prec) + assert g == Permutation([1, 3, 2, 0, 4]) + h = S.random_stab(1) + assert h(1) == 1 + + +def test_transitivity_degree(): + perm = Permutation([1, 2, 0]) + C = PermutationGroup([perm]) + assert C.transitivity_degree == 1 + gen1 = Permutation([1, 2, 0, 3, 4]) + gen2 = Permutation([1, 2, 3, 4, 0]) + # alternating group of degree 5 + Alt = PermutationGroup([gen1, gen2]) + assert Alt.transitivity_degree == 3 + + +def test_schreier_sims_random(): + assert sorted(Tetra.pgroup.base) == [0, 1] + + S = SymmetricGroup(3) + base = [0, 1] + strong_gens = [Permutation([1, 2, 0]), Permutation([1, 0, 2]), + Permutation([0, 2, 1])] + assert S.schreier_sims_random(base, strong_gens, 5) == (base, strong_gens) + D = DihedralGroup(3) + _random_prec = {'g': [Permutation([2, 0, 1]), Permutation([1, 2, 0]), + Permutation([1, 0, 2])]} + base = [0, 1] + strong_gens = [Permutation([1, 2, 0]), Permutation([2, 1, 0]), + Permutation([0, 2, 1])] + assert D.schreier_sims_random([], D.generators, 2, + _random_prec=_random_prec) == (base, strong_gens) + + +def test_baseswap(): + S = SymmetricGroup(4) + S.schreier_sims() + base = S.base + strong_gens = S.strong_gens + assert base == [0, 1, 2] + deterministic = S.baseswap(base, strong_gens, 1, randomized=False) + randomized = S.baseswap(base, strong_gens, 1) + assert deterministic[0] == [0, 2, 1] + assert _verify_bsgs(S, deterministic[0], deterministic[1]) is True + assert randomized[0] == [0, 2, 1] + assert _verify_bsgs(S, randomized[0], randomized[1]) is True + + +def test_schreier_sims_incremental(): + identity = Permutation([0, 1, 2, 3, 4]) + TrivialGroup = PermutationGroup([identity]) + base, strong_gens = TrivialGroup.schreier_sims_incremental(base=[0, 1, 2]) + assert _verify_bsgs(TrivialGroup, base, strong_gens) is True + S = SymmetricGroup(5) + base, strong_gens = S.schreier_sims_incremental(base=[0, 1, 2]) + assert _verify_bsgs(S, base, strong_gens) is True + D = DihedralGroup(2) + base, strong_gens = D.schreier_sims_incremental(base=[1]) + assert _verify_bsgs(D, base, strong_gens) is True + A = AlternatingGroup(7) + gens = A.generators[:] + gen0 = gens[0] + gen1 = gens[1] + gen1 = rmul(gen1, ~gen0) + gen0 = rmul(gen0, gen1) + gen1 = rmul(gen0, gen1) + base, strong_gens = A.schreier_sims_incremental(base=[0, 1], gens=gens) + assert _verify_bsgs(A, base, strong_gens) is True + C = CyclicGroup(11) + gen = C.generators[0] + base, strong_gens = C.schreier_sims_incremental(gens=[gen**3]) + assert _verify_bsgs(C, base, strong_gens) is True + + +def _subgroup_search(i, j, k): + prop_true = lambda x: True + prop_fix_points = lambda x: [x(point) for point in points] == points + prop_comm_g = lambda x: rmul(x, g) == rmul(g, x) + prop_even = lambda x: x.is_even + for i in range(i, j, k): + S = SymmetricGroup(i) + A = AlternatingGroup(i) + C = CyclicGroup(i) + Sym = S.subgroup_search(prop_true) + assert Sym.is_subgroup(S) + Alt = S.subgroup_search(prop_even) + assert Alt.is_subgroup(A) + Sym = S.subgroup_search(prop_true, init_subgroup=C) + assert Sym.is_subgroup(S) + points = [7] + assert S.stabilizer(7).is_subgroup(S.subgroup_search(prop_fix_points)) + points = [3, 4] + assert S.stabilizer(3).stabilizer(4).is_subgroup( + S.subgroup_search(prop_fix_points)) + points = [3, 5] + fix35 = A.subgroup_search(prop_fix_points) + points = [5] + fix5 = A.subgroup_search(prop_fix_points) + assert A.subgroup_search(prop_fix_points, init_subgroup=fix35 + ).is_subgroup(fix5) + base, strong_gens = A.schreier_sims_incremental() + g = A.generators[0] + comm_g = \ + A.subgroup_search(prop_comm_g, base=base, strong_gens=strong_gens) + assert _verify_bsgs(comm_g, base, comm_g.generators) is True + assert [prop_comm_g(gen) is True for gen in comm_g.generators] + + +def test_subgroup_search(): + _subgroup_search(10, 15, 2) + + +@XFAIL +def test_subgroup_search2(): + skip('takes too much time') + _subgroup_search(16, 17, 1) + + +def test_normal_closure(): + # the normal closure of the trivial group is trivial + S = SymmetricGroup(3) + identity = Permutation([0, 1, 2]) + closure = S.normal_closure(identity) + assert closure.is_trivial + # the normal closure of the entire group is the entire group + A = AlternatingGroup(4) + assert A.normal_closure(A).is_subgroup(A) + # brute-force verifications for subgroups + for i in (3, 4, 5): + S = SymmetricGroup(i) + A = AlternatingGroup(i) + D = DihedralGroup(i) + C = CyclicGroup(i) + for gp in (A, D, C): + assert _verify_normal_closure(S, gp) + # brute-force verifications for all elements of a group + S = SymmetricGroup(5) + elements = list(S.generate_dimino()) + for element in elements: + assert _verify_normal_closure(S, element) + # small groups + small = [] + for i in (1, 2, 3): + small.append(SymmetricGroup(i)) + small.append(AlternatingGroup(i)) + small.append(DihedralGroup(i)) + small.append(CyclicGroup(i)) + for gp in small: + for gp2 in small: + if gp2.is_subgroup(gp, 0) and gp2.degree == gp.degree: + assert _verify_normal_closure(gp, gp2) + + +def test_derived_series(): + # the derived series of the trivial group consists only of the trivial group + triv = PermutationGroup([Permutation([0, 1, 2])]) + assert triv.derived_series()[0].is_subgroup(triv) + # the derived series for a simple group consists only of the group itself + for i in (5, 6, 7): + A = AlternatingGroup(i) + assert A.derived_series()[0].is_subgroup(A) + # the derived series for S_4 is S_4 > A_4 > K_4 > triv + S = SymmetricGroup(4) + series = S.derived_series() + assert series[1].is_subgroup(AlternatingGroup(4)) + assert series[2].is_subgroup(DihedralGroup(2)) + assert series[3].is_trivial + + +def test_lower_central_series(): + # the lower central series of the trivial group consists of the trivial + # group + triv = PermutationGroup([Permutation([0, 1, 2])]) + assert triv.lower_central_series()[0].is_subgroup(triv) + # the lower central series of a simple group consists of the group itself + for i in (5, 6, 7): + A = AlternatingGroup(i) + assert A.lower_central_series()[0].is_subgroup(A) + # GAP-verified example + S = SymmetricGroup(6) + series = S.lower_central_series() + assert len(series) == 2 + assert series[1].is_subgroup(AlternatingGroup(6)) + + +def test_commutator(): + # the commutator of the trivial group and the trivial group is trivial + S = SymmetricGroup(3) + triv = PermutationGroup([Permutation([0, 1, 2])]) + assert S.commutator(triv, triv).is_subgroup(triv) + # the commutator of the trivial group and any other group is again trivial + A = AlternatingGroup(3) + assert S.commutator(triv, A).is_subgroup(triv) + # the commutator is commutative + for i in (3, 4, 5): + S = SymmetricGroup(i) + A = AlternatingGroup(i) + D = DihedralGroup(i) + assert S.commutator(A, D).is_subgroup(S.commutator(D, A)) + # the commutator of an abelian group is trivial + S = SymmetricGroup(7) + A1 = AbelianGroup(2, 5) + A2 = AbelianGroup(3, 4) + triv = PermutationGroup([Permutation([0, 1, 2, 3, 4, 5, 6])]) + assert S.commutator(A1, A1).is_subgroup(triv) + assert S.commutator(A2, A2).is_subgroup(triv) + # examples calculated by hand + S = SymmetricGroup(3) + A = AlternatingGroup(3) + assert S.commutator(A, S).is_subgroup(A) + + +def test_is_nilpotent(): + # every abelian group is nilpotent + for i in (1, 2, 3): + C = CyclicGroup(i) + Ab = AbelianGroup(i, i + 2) + assert C.is_nilpotent + assert Ab.is_nilpotent + Ab = AbelianGroup(5, 7, 10) + assert Ab.is_nilpotent + # A_5 is not solvable and thus not nilpotent + assert AlternatingGroup(5).is_nilpotent is False + + +def test_is_trivial(): + for i in range(5): + triv = PermutationGroup([Permutation(list(range(i)))]) + assert triv.is_trivial + + +def test_pointwise_stabilizer(): + S = SymmetricGroup(2) + stab = S.pointwise_stabilizer([0]) + assert stab.generators == [Permutation(1)] + S = SymmetricGroup(5) + points = [] + stab = S + for point in (2, 0, 3, 4, 1): + stab = stab.stabilizer(point) + points.append(point) + assert S.pointwise_stabilizer(points).is_subgroup(stab) + + +def test_make_perm(): + assert cube.pgroup.make_perm(5, seed=list(range(5))) == \ + Permutation([4, 7, 6, 5, 0, 3, 2, 1]) + assert cube.pgroup.make_perm(7, seed=list(range(7))) == \ + Permutation([6, 7, 3, 2, 5, 4, 0, 1]) + + +def test_elements(): + from sympy.sets.sets import FiniteSet + + p = Permutation(2, 3) + assert PermutationGroup(p).elements == {Permutation(3), Permutation(2, 3)} + assert FiniteSet(*PermutationGroup(p).elements) \ + == FiniteSet(Permutation(2, 3), Permutation(3)) + + +def test_is_group(): + assert PermutationGroup(Permutation(1,2), Permutation(2,4)).is_group is True + assert SymmetricGroup(4).is_group is True + + +def test_PermutationGroup(): + assert PermutationGroup() == PermutationGroup(Permutation()) + assert (PermutationGroup() == 0) is False + + +def test_coset_transvesal(): + G = AlternatingGroup(5) + H = PermutationGroup(Permutation(0,1,2),Permutation(1,2)(3,4)) + assert G.coset_transversal(H) == \ + [Permutation(4), Permutation(2, 3, 4), Permutation(2, 4, 3), + Permutation(1, 2, 4), Permutation(4)(1, 2, 3), Permutation(1, 3)(2, 4), + Permutation(0, 1, 2, 3, 4), Permutation(0, 1, 2, 4, 3), + Permutation(0, 1, 3, 2, 4), Permutation(0, 2, 4, 1, 3)] + + +def test_coset_table(): + G = PermutationGroup(Permutation(0,1,2,3), Permutation(0,1,2), + Permutation(0,4,2,7), Permutation(5,6), Permutation(0,7)); + H = PermutationGroup(Permutation(0,1,2,3), Permutation(0,7)) + assert G.coset_table(H) == \ + [[0, 0, 0, 0, 1, 2, 3, 3, 0, 0], [4, 5, 2, 5, 6, 0, 7, 7, 1, 1], + [5, 4, 5, 1, 0, 6, 8, 8, 6, 6], [3, 3, 3, 3, 7, 8, 0, 0, 3, 3], + [2, 1, 4, 4, 4, 4, 9, 9, 4, 4], [1, 2, 1, 2, 5, 5, 10, 10, 5, 5], + [6, 6, 6, 6, 2, 1, 11, 11, 2, 2], [9, 10, 8, 10, 11, 3, 1, 1, 7, 7], + [10, 9, 10, 7, 3, 11, 2, 2, 11, 11], [8, 7, 9, 9, 9, 9, 4, 4, 9, 9], + [7, 8, 7, 8, 10, 10, 5, 5, 10, 10], [11, 11, 11, 11, 8, 7, 6, 6, 8, 8]] + + +def test_subgroup(): + G = PermutationGroup(Permutation(0,1,2), Permutation(0,2,3)) + H = G.subgroup([Permutation(0,1,3)]) + assert H.is_subgroup(G) + + +def test_generator_product(): + G = SymmetricGroup(5) + p = Permutation(0, 2, 3)(1, 4) + gens = G.generator_product(p) + assert all(g in G.strong_gens for g in gens) + w = G.identity + for g in gens: + w = g*w + assert w == p + + +def test_sylow_subgroup(): + P = PermutationGroup(Permutation(1, 5)(2, 4), Permutation(0, 1, 2, 3, 4, 5)) + S = P.sylow_subgroup(2) + assert S.order() == 4 + + P = DihedralGroup(12) + S = P.sylow_subgroup(3) + assert S.order() == 3 + + P = PermutationGroup( + Permutation(1, 5)(2, 4), Permutation(0, 1, 2, 3, 4, 5), Permutation(0, 2)) + S = P.sylow_subgroup(3) + assert S.order() == 9 + S = P.sylow_subgroup(2) + assert S.order() == 8 + + P = SymmetricGroup(10) + S = P.sylow_subgroup(2) + assert S.order() == 256 + S = P.sylow_subgroup(3) + assert S.order() == 81 + S = P.sylow_subgroup(5) + assert S.order() == 25 + + # the length of the lower central series + # of a p-Sylow subgroup of Sym(n) grows with + # the highest exponent exp of p such + # that n >= p**exp + exp = 1 + length = 0 + for i in range(2, 9): + P = SymmetricGroup(i) + S = P.sylow_subgroup(2) + ls = S.lower_central_series() + if i // 2**exp > 0: + # length increases with exponent + assert len(ls) > length + length = len(ls) + exp += 1 + else: + assert len(ls) == length + + G = SymmetricGroup(100) + S = G.sylow_subgroup(3) + assert G.order() % S.order() == 0 + assert G.order()/S.order() % 3 > 0 + + G = AlternatingGroup(100) + S = G.sylow_subgroup(2) + assert G.order() % S.order() == 0 + assert G.order()/S.order() % 2 > 0 + + G = DihedralGroup(18) + S = G.sylow_subgroup(p=2) + assert S.order() == 4 + + G = DihedralGroup(50) + S = G.sylow_subgroup(p=2) + assert S.order() == 4 + + +@slow +def test_presentation(): + def _test(P): + G = P.presentation() + return G.order() == P.order() + + def _strong_test(P): + G = P.strong_presentation() + chk = len(G.generators) == len(P.strong_gens) + return chk and G.order() == P.order() + + P = PermutationGroup(Permutation(0,1,5,2)(3,7,4,6), Permutation(0,3,5,4)(1,6,2,7)) + assert _test(P) + + P = AlternatingGroup(5) + assert _test(P) + + P = SymmetricGroup(5) + assert _test(P) + + P = PermutationGroup( + [Permutation(0,3,1,2), Permutation(3)(0,1), Permutation(0,1)(2,3)]) + assert _strong_test(P) + + P = DihedralGroup(6) + assert _strong_test(P) + + a = Permutation(0,1)(2,3) + b = Permutation(0,2)(3,1) + c = Permutation(4,5) + P = PermutationGroup(c, a, b) + assert _strong_test(P) + + +def test_polycyclic(): + a = Permutation([0, 1, 2]) + b = Permutation([2, 1, 0]) + G = PermutationGroup([a, b]) + assert G.is_polycyclic is True + + a = Permutation([1, 2, 3, 4, 0]) + b = Permutation([1, 0, 2, 3, 4]) + G = PermutationGroup([a, b]) + assert G.is_polycyclic is False + + +def test_elementary(): + a = Permutation([1, 5, 2, 0, 3, 6, 4]) + G = PermutationGroup([a]) + assert G.is_elementary(7) is False + + a = Permutation(0, 1)(2, 3) + b = Permutation(0, 2)(3, 1) + G = PermutationGroup([a, b]) + assert G.is_elementary(2) is True + c = Permutation(4, 5, 6) + G = PermutationGroup([a, b, c]) + assert G.is_elementary(2) is False + + G = SymmetricGroup(4).sylow_subgroup(2) + assert G.is_elementary(2) is False + H = AlternatingGroup(4).sylow_subgroup(2) + assert H.is_elementary(2) is True + + +def test_perfect(): + G = AlternatingGroup(3) + assert G.is_perfect is False + G = AlternatingGroup(5) + assert G.is_perfect is True + + +def test_index(): + G = PermutationGroup(Permutation(0,1,2), Permutation(0,2,3)) + H = G.subgroup([Permutation(0,1,3)]) + assert G.index(H) == 4 + + +def test_cyclic(): + G = SymmetricGroup(2) + assert G.is_cyclic + G = AbelianGroup(3, 7) + assert G.is_cyclic + G = AbelianGroup(7, 7) + assert not G.is_cyclic + G = AlternatingGroup(3) + assert G.is_cyclic + G = AlternatingGroup(4) + assert not G.is_cyclic + + # Order less than 6 + G = PermutationGroup(Permutation(0, 1, 2), Permutation(0, 2, 1)) + assert G.is_cyclic + G = PermutationGroup( + Permutation(0, 1, 2, 3), + Permutation(0, 2)(1, 3) + ) + assert G.is_cyclic + G = PermutationGroup( + Permutation(3), + Permutation(0, 1)(2, 3), + Permutation(0, 2)(1, 3), + Permutation(0, 3)(1, 2) + ) + assert G.is_cyclic is False + + # Order 15 + G = PermutationGroup( + Permutation(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14), + Permutation(0, 2, 4, 6, 8, 10, 12, 14, 1, 3, 5, 7, 9, 11, 13) + ) + assert G.is_cyclic + + # Distinct prime orders + assert PermutationGroup._distinct_primes_lemma([3, 5]) is True + assert PermutationGroup._distinct_primes_lemma([5, 7]) is True + assert PermutationGroup._distinct_primes_lemma([2, 3]) is None + assert PermutationGroup._distinct_primes_lemma([3, 5, 7]) is None + assert PermutationGroup._distinct_primes_lemma([5, 7, 13]) is True + + G = PermutationGroup( + Permutation(0, 1, 2, 3), + Permutation(0, 2)(1, 3)) + assert G.is_cyclic + assert G._is_abelian + + # Non-abelian and therefore not cyclic + G = PermutationGroup(*SymmetricGroup(3).generators) + assert G.is_cyclic is False + + # Abelian and cyclic + G = PermutationGroup( + Permutation(0, 1, 2, 3), + Permutation(4, 5, 6) + ) + assert G.is_cyclic + + # Abelian but not cyclic + G = PermutationGroup( + Permutation(0, 1), + Permutation(2, 3), + Permutation(4, 5, 6) + ) + assert G.is_cyclic is False + + +def test_dihedral(): + G = SymmetricGroup(2) + assert G.is_dihedral + G = SymmetricGroup(3) + assert G.is_dihedral + + G = AbelianGroup(2, 2) + assert G.is_dihedral + G = CyclicGroup(4) + assert not G.is_dihedral + + G = AbelianGroup(3, 5) + assert not G.is_dihedral + G = AbelianGroup(2) + assert G.is_dihedral + G = AbelianGroup(6) + assert not G.is_dihedral + + # D6, generated by two adjacent flips + G = PermutationGroup( + Permutation(1, 5)(2, 4), + Permutation(0, 1)(3, 4)(2, 5)) + assert G.is_dihedral + + # D7, generated by a flip and a rotation + G = PermutationGroup( + Permutation(1, 6)(2, 5)(3, 4), + Permutation(0, 1, 2, 3, 4, 5, 6)) + assert G.is_dihedral + + # S4, presented by three generators, fails due to having exactly 9 + # elements of order 2: + G = PermutationGroup( + Permutation(0, 1), Permutation(0, 2), + Permutation(0, 3)) + assert not G.is_dihedral + + # D7, given by three generators + G = PermutationGroup( + Permutation(1, 6)(2, 5)(3, 4), + Permutation(2, 0)(3, 6)(4, 5), + Permutation(0, 1, 2, 3, 4, 5, 6)) + assert G.is_dihedral + + +def test_abelian_invariants(): + G = AbelianGroup(2, 3, 4) + assert G.abelian_invariants() == [2, 3, 4] + G=PermutationGroup([Permutation(1, 2, 3, 4), Permutation(1, 2), Permutation(5, 6)]) + assert G.abelian_invariants() == [2, 2] + G = AlternatingGroup(7) + assert G.abelian_invariants() == [] + G = AlternatingGroup(4) + assert G.abelian_invariants() == [3] + G = DihedralGroup(4) + assert G.abelian_invariants() == [2, 2] + + G = PermutationGroup([Permutation(1, 2, 3, 4, 5, 6, 7)]) + assert G.abelian_invariants() == [7] + G = DihedralGroup(12) + S = G.sylow_subgroup(3) + assert S.abelian_invariants() == [3] + G = PermutationGroup(Permutation(0, 1, 2), Permutation(0, 2, 3)) + assert G.abelian_invariants() == [3] + G = PermutationGroup([Permutation(0, 1), Permutation(0, 2, 4, 6)(1, 3, 5, 7)]) + assert G.abelian_invariants() == [2, 4] + G = SymmetricGroup(30) + S = G.sylow_subgroup(2) + assert S.abelian_invariants() == [2, 2, 2, 2, 2, 2, 2, 2, 2, 2] + S = G.sylow_subgroup(3) + assert S.abelian_invariants() == [3, 3, 3, 3] + S = G.sylow_subgroup(5) + assert S.abelian_invariants() == [5, 5, 5] + + +def test_composition_series(): + a = Permutation(1, 2, 3) + b = Permutation(1, 2) + G = PermutationGroup([a, b]) + comp_series = G.composition_series() + assert comp_series == G.derived_series() + # The first group in the composition series is always the group itself and + # the last group in the series is the trivial group. + S = SymmetricGroup(4) + assert S.composition_series()[0] == S + assert len(S.composition_series()) == 5 + A = AlternatingGroup(4) + assert A.composition_series()[0] == A + assert len(A.composition_series()) == 4 + + # the composition series for C_8 is C_8 > C_4 > C_2 > triv + G = CyclicGroup(8) + series = G.composition_series() + assert is_isomorphic(series[1], CyclicGroup(4)) + assert is_isomorphic(series[2], CyclicGroup(2)) + assert series[3].is_trivial + + +def test_is_symmetric(): + a = Permutation(0, 1, 2) + b = Permutation(0, 1, size=3) + assert PermutationGroup(a, b).is_symmetric is True + + a = Permutation(0, 2, 1) + b = Permutation(1, 2, size=3) + assert PermutationGroup(a, b).is_symmetric is True + + a = Permutation(0, 1, 2, 3) + b = Permutation(0, 3)(1, 2) + assert PermutationGroup(a, b).is_symmetric is False + +def test_conjugacy_class(): + S = SymmetricGroup(4) + x = Permutation(1, 2, 3) + C = {Permutation(0, 1, 2, size = 4), Permutation(0, 1, 3), + Permutation(0, 2, 1, size = 4), Permutation(0, 2, 3), + Permutation(0, 3, 1), Permutation(0, 3, 2), + Permutation(1, 2, 3), Permutation(1, 3, 2)} + assert S.conjugacy_class(x) == C + +def test_conjugacy_classes(): + S = SymmetricGroup(3) + expected = [{Permutation(size = 3)}, + {Permutation(0, 1, size = 3), Permutation(0, 2), Permutation(1, 2)}, + {Permutation(0, 1, 2), Permutation(0, 2, 1)}] + computed = S.conjugacy_classes() + + assert len(expected) == len(computed) + assert all(e in computed for e in expected) + +def test_coset_class(): + a = Permutation(1, 2) + b = Permutation(0, 1) + G = PermutationGroup([a, b]) + #Creating right coset + rht_coset = G*a + #Checking whether it is left coset or right coset + assert rht_coset.is_right_coset + assert not rht_coset.is_left_coset + #Creating list representation of coset + list_repr = rht_coset.as_list() + expected = [Permutation(0, 2), Permutation(0, 2, 1), Permutation(1, 2), + Permutation(2), Permutation(2)(0, 1), Permutation(0, 1, 2)] + for ele in list_repr: + assert ele in expected + #Creating left coset + left_coset = a*G + #Checking whether it is left coset or right coset + assert not left_coset.is_right_coset + assert left_coset.is_left_coset + #Creating list representation of Coset + list_repr = left_coset.as_list() + expected = [Permutation(2)(0, 1), Permutation(0, 1, 2), Permutation(1, 2), + Permutation(2), Permutation(0, 2), Permutation(0, 2, 1)] + for ele in list_repr: + assert ele in expected + + G = PermutationGroup(Permutation(1, 2, 3, 4), Permutation(2, 3, 4)) + H = PermutationGroup(Permutation(1, 2, 3, 4)) + g = Permutation(1, 3)(2, 4) + rht_coset = Coset(g, H, G, dir='+') + assert rht_coset.is_right_coset + list_repr = rht_coset.as_list() + expected = [Permutation(1, 2, 3, 4), Permutation(4), Permutation(1, 3)(2, 4), + Permutation(1, 4, 3, 2)] + for ele in list_repr: + assert ele in expected + +def test_symmetricpermutationgroup(): + a = SymmetricPermutationGroup(5) + assert a.degree == 5 + assert a.order() == 120 + assert a.identity() == Permutation(4) diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_permutations.py b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_permutations.py new file mode 100644 index 0000000000000000000000000000000000000000..6949d5e781e0cb4e27fc7cdd24862ae22d09cb01 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_permutations.py @@ -0,0 +1,562 @@ +from itertools import permutations + +from sympy.core.expr import unchanged +from sympy.core.numbers import Integer +from sympy.core.relational import Eq +from sympy.core.symbol import Symbol +from sympy.core.singleton import S +from sympy.combinatorics.permutations import \ + Permutation, _af_parity, _af_rmul, _af_rmuln, AppliedPermutation, Cycle +from sympy.printing import sstr, srepr, pretty, latex +from sympy.testing.pytest import raises, warns_deprecated_sympy + + +rmul = Permutation.rmul +a = Symbol('a', integer=True) + + +def test_Permutation(): + # don't auto fill 0 + raises(ValueError, lambda: Permutation([1])) + p = Permutation([0, 1, 2, 3]) + # call as bijective + assert [p(i) for i in range(p.size)] == list(p) + # call as operator + assert p(list(range(p.size))) == list(p) + # call as function + assert list(p(1, 2)) == [0, 2, 1, 3] + raises(TypeError, lambda: p(-1)) + raises(TypeError, lambda: p(5)) + # conversion to list + assert list(p) == list(range(4)) + assert Permutation(size=4) == Permutation(3) + assert Permutation(Permutation(3), size=5) == Permutation(4) + # cycle form with size + assert Permutation([[1, 2]], size=4) == Permutation([[1, 2], [0], [3]]) + # random generation + assert Permutation.random(2) in (Permutation([1, 0]), Permutation([0, 1])) + + p = Permutation([2, 5, 1, 6, 3, 0, 4]) + q = Permutation([[1], [0, 3, 5, 6, 2, 4]]) + assert len({p, p}) == 1 + r = Permutation([1, 3, 2, 0, 4, 6, 5]) + ans = Permutation(_af_rmuln(*[w.array_form for w in (p, q, r)])).array_form + assert rmul(p, q, r).array_form == ans + # make sure no other permutation of p, q, r could have given + # that answer + for a, b, c in permutations((p, q, r)): + if (a, b, c) == (p, q, r): + continue + assert rmul(a, b, c).array_form != ans + + assert p.support() == list(range(7)) + assert q.support() == [0, 2, 3, 4, 5, 6] + assert Permutation(p.cyclic_form).array_form == p.array_form + assert p.cardinality == 5040 + assert q.cardinality == 5040 + assert q.cycles == 2 + assert rmul(q, p) == Permutation([4, 6, 1, 2, 5, 3, 0]) + assert rmul(p, q) == Permutation([6, 5, 3, 0, 2, 4, 1]) + assert _af_rmul(p.array_form, q.array_form) == \ + [6, 5, 3, 0, 2, 4, 1] + + assert rmul(Permutation([[1, 2, 3], [0, 4]]), + Permutation([[1, 2, 4], [0], [3]])).cyclic_form == \ + [[0, 4, 2], [1, 3]] + assert q.array_form == [3, 1, 4, 5, 0, 6, 2] + assert q.cyclic_form == [[0, 3, 5, 6, 2, 4]] + assert q.full_cyclic_form == [[0, 3, 5, 6, 2, 4], [1]] + assert p.cyclic_form == [[0, 2, 1, 5], [3, 6, 4]] + t = p.transpositions() + assert t == [(0, 5), (0, 1), (0, 2), (3, 4), (3, 6)] + assert Permutation.rmul(*[Permutation(Cycle(*ti)) for ti in (t)]) + assert Permutation([1, 0]).transpositions() == [(0, 1)] + + assert p**13 == p + assert q**0 == Permutation(list(range(q.size))) + assert q**-2 == ~q**2 + assert q**2 == Permutation([5, 1, 0, 6, 3, 2, 4]) + assert q**3 == q**2*q + assert q**4 == q**2*q**2 + + a = Permutation(1, 3) + b = Permutation(2, 0, 3) + I = Permutation(3) + assert ~a == a**-1 + assert a*~a == I + assert a*b**-1 == a*~b + + ans = Permutation(0, 5, 3, 1, 6)(2, 4) + assert (p + q.rank()).rank() == ans.rank() + assert (p + q.rank())._rank == ans.rank() + assert (q + p.rank()).rank() == ans.rank() + raises(TypeError, lambda: p + Permutation(list(range(10)))) + + assert (p - q.rank()).rank() == Permutation(0, 6, 3, 1, 2, 5, 4).rank() + assert p.rank() - q.rank() < 0 # for coverage: make sure mod is used + assert (q - p.rank()).rank() == Permutation(1, 4, 6, 2)(3, 5).rank() + + assert p*q == Permutation(_af_rmuln(*[list(w) for w in (q, p)])) + assert p*Permutation([]) == p + assert Permutation([])*p == p + assert p*Permutation([[0, 1]]) == Permutation([2, 5, 0, 6, 3, 1, 4]) + assert Permutation([[0, 1]])*p == Permutation([5, 2, 1, 6, 3, 0, 4]) + + pq = p ^ q + assert pq == Permutation([5, 6, 0, 4, 1, 2, 3]) + assert pq == rmul(q, p, ~q) + qp = q ^ p + assert qp == Permutation([4, 3, 6, 2, 1, 5, 0]) + assert qp == rmul(p, q, ~p) + raises(ValueError, lambda: p ^ Permutation([])) + + assert p.commutator(q) == Permutation(0, 1, 3, 4, 6, 5, 2) + assert q.commutator(p) == Permutation(0, 2, 5, 6, 4, 3, 1) + assert p.commutator(q) == ~q.commutator(p) + raises(ValueError, lambda: p.commutator(Permutation([]))) + + assert len(p.atoms()) == 7 + assert q.atoms() == {0, 1, 2, 3, 4, 5, 6} + + assert p.inversion_vector() == [2, 4, 1, 3, 1, 0] + assert q.inversion_vector() == [3, 1, 2, 2, 0, 1] + + assert Permutation.from_inversion_vector(p.inversion_vector()) == p + assert Permutation.from_inversion_vector(q.inversion_vector()).array_form\ + == q.array_form + raises(ValueError, lambda: Permutation.from_inversion_vector([0, 2])) + assert Permutation(list(range(500, -1, -1))).inversions() == 125250 + + s = Permutation([0, 4, 1, 3, 2]) + assert s.parity() == 0 + _ = s.cyclic_form # needed to create a value for _cyclic_form + assert len(s._cyclic_form) != s.size and s.parity() == 0 + assert not s.is_odd + assert s.is_even + assert Permutation([0, 1, 4, 3, 2]).parity() == 1 + assert _af_parity([0, 4, 1, 3, 2]) == 0 + assert _af_parity([0, 1, 4, 3, 2]) == 1 + + s = Permutation([0]) + + assert s.is_Singleton + assert Permutation([]).is_Empty + + r = Permutation([3, 2, 1, 0]) + assert (r**2).is_Identity + + assert rmul(~p, p).is_Identity + assert (~p)**13 == Permutation([5, 2, 0, 4, 6, 1, 3]) + assert ~(r**2).is_Identity + assert p.max() == 6 + assert p.min() == 0 + + q = Permutation([[6], [5], [0, 1, 2, 3, 4]]) + + assert q.max() == 4 + assert q.min() == 0 + + p = Permutation([1, 5, 2, 0, 3, 6, 4]) + q = Permutation([[1, 2, 3, 5, 6], [0, 4]]) + + assert p.ascents() == [0, 3, 4] + assert q.ascents() == [1, 2, 4] + assert r.ascents() == [] + + assert p.descents() == [1, 2, 5] + assert q.descents() == [0, 3, 5] + assert Permutation(r.descents()).is_Identity + + assert p.inversions() == 7 + # test the merge-sort with a longer permutation + big = list(p) + list(range(p.max() + 1, p.max() + 130)) + assert Permutation(big).inversions() == 7 + assert p.signature() == -1 + assert q.inversions() == 11 + assert q.signature() == -1 + assert rmul(p, ~p).inversions() == 0 + assert rmul(p, ~p).signature() == 1 + + assert p.order() == 6 + assert q.order() == 10 + assert (p**(p.order())).is_Identity + + assert p.length() == 6 + assert q.length() == 7 + assert r.length() == 4 + + assert p.runs() == [[1, 5], [2], [0, 3, 6], [4]] + assert q.runs() == [[4], [2, 3, 5], [0, 6], [1]] + assert r.runs() == [[3], [2], [1], [0]] + + assert p.index() == 8 + assert q.index() == 8 + assert r.index() == 3 + + assert p.get_precedence_distance(q) == q.get_precedence_distance(p) + assert p.get_adjacency_distance(q) == p.get_adjacency_distance(q) + assert p.get_positional_distance(q) == p.get_positional_distance(q) + p = Permutation([0, 1, 2, 3]) + q = Permutation([3, 2, 1, 0]) + assert p.get_precedence_distance(q) == 6 + assert p.get_adjacency_distance(q) == 3 + assert p.get_positional_distance(q) == 8 + p = Permutation([0, 3, 1, 2, 4]) + q = Permutation.josephus(4, 5, 2) + assert p.get_adjacency_distance(q) == 3 + raises(ValueError, lambda: p.get_adjacency_distance(Permutation([]))) + raises(ValueError, lambda: p.get_positional_distance(Permutation([]))) + raises(ValueError, lambda: p.get_precedence_distance(Permutation([]))) + + a = [Permutation.unrank_nonlex(4, i) for i in range(5)] + iden = Permutation([0, 1, 2, 3]) + for i in range(5): + for j in range(i + 1, 5): + assert a[i].commutes_with(a[j]) == \ + (rmul(a[i], a[j]) == rmul(a[j], a[i])) + if a[i].commutes_with(a[j]): + assert a[i].commutator(a[j]) == iden + assert a[j].commutator(a[i]) == iden + + a = Permutation(3) + b = Permutation(0, 6, 3)(1, 2) + assert a.cycle_structure == {1: 4} + assert b.cycle_structure == {2: 1, 3: 1, 1: 2} + # issue 11130 + raises(ValueError, lambda: Permutation(3, size=3)) + raises(ValueError, lambda: Permutation([1, 2, 0, 3], size=3)) + + +def test_Permutation_subclassing(): + # Subclass that adds permutation application on iterables + class CustomPermutation(Permutation): + def __call__(self, *i): + try: + return super().__call__(*i) + except TypeError: + pass + + try: + perm_obj = i[0] + return [self._array_form[j] for j in perm_obj] + except TypeError: + raise TypeError('unrecognized argument') + + def __eq__(self, other): + if isinstance(other, Permutation): + return self._hashable_content() == other._hashable_content() + else: + return super().__eq__(other) + + def __hash__(self): + return super().__hash__() + + p = CustomPermutation([1, 2, 3, 0]) + q = Permutation([1, 2, 3, 0]) + + assert p == q + raises(TypeError, lambda: q([1, 2])) + assert [2, 3] == p([1, 2]) + + assert type(p * q) == CustomPermutation + assert type(q * p) == Permutation # True because q.__mul__(p) is called! + + # Run all tests for the Permutation class also on the subclass + def wrapped_test_Permutation(): + # Monkeypatch the class definition in the globals + globals()['__Perm'] = globals()['Permutation'] + globals()['Permutation'] = CustomPermutation + test_Permutation() + globals()['Permutation'] = globals()['__Perm'] # Restore + del globals()['__Perm'] + + wrapped_test_Permutation() + + +def test_josephus(): + assert Permutation.josephus(4, 6, 1) == Permutation([3, 1, 0, 2, 5, 4]) + assert Permutation.josephus(1, 5, 1).is_Identity + + +def test_ranking(): + assert Permutation.unrank_lex(5, 10).rank() == 10 + p = Permutation.unrank_lex(15, 225) + assert p.rank() == 225 + p1 = p.next_lex() + assert p1.rank() == 226 + assert Permutation.unrank_lex(15, 225).rank() == 225 + assert Permutation.unrank_lex(10, 0).is_Identity + p = Permutation.unrank_lex(4, 23) + assert p.rank() == 23 + assert p.array_form == [3, 2, 1, 0] + assert p.next_lex() is None + + p = Permutation([1, 5, 2, 0, 3, 6, 4]) + q = Permutation([[1, 2, 3, 5, 6], [0, 4]]) + a = [Permutation.unrank_trotterjohnson(4, i).array_form for i in range(5)] + assert a == [[0, 1, 2, 3], [0, 1, 3, 2], [0, 3, 1, 2], [3, 0, 1, + 2], [3, 0, 2, 1] ] + assert [Permutation(pa).rank_trotterjohnson() for pa in a] == list(range(5)) + assert Permutation([0, 1, 2, 3]).next_trotterjohnson() == \ + Permutation([0, 1, 3, 2]) + + assert q.rank_trotterjohnson() == 2283 + assert p.rank_trotterjohnson() == 3389 + assert Permutation([1, 0]).rank_trotterjohnson() == 1 + a = Permutation(list(range(3))) + b = a + l = [] + tj = [] + for i in range(6): + l.append(a) + tj.append(b) + a = a.next_lex() + b = b.next_trotterjohnson() + assert a == b is None + assert {tuple(a) for a in l} == {tuple(a) for a in tj} + + p = Permutation([2, 5, 1, 6, 3, 0, 4]) + q = Permutation([[6], [5], [0, 1, 2, 3, 4]]) + assert p.rank() == 1964 + assert q.rank() == 870 + assert Permutation([]).rank_nonlex() == 0 + prank = p.rank_nonlex() + assert prank == 1600 + assert Permutation.unrank_nonlex(7, 1600) == p + qrank = q.rank_nonlex() + assert qrank == 41 + assert Permutation.unrank_nonlex(7, 41) == Permutation(q.array_form) + + a = [Permutation.unrank_nonlex(4, i).array_form for i in range(24)] + assert a == [ + [1, 2, 3, 0], [3, 2, 0, 1], [1, 3, 0, 2], [1, 2, 0, 3], [2, 3, 1, 0], + [2, 0, 3, 1], [3, 0, 1, 2], [2, 0, 1, 3], [1, 3, 2, 0], [3, 0, 2, 1], + [1, 0, 3, 2], [1, 0, 2, 3], [2, 1, 3, 0], [2, 3, 0, 1], [3, 1, 0, 2], + [2, 1, 0, 3], [3, 2, 1, 0], [0, 2, 3, 1], [0, 3, 1, 2], [0, 2, 1, 3], + [3, 1, 2, 0], [0, 3, 2, 1], [0, 1, 3, 2], [0, 1, 2, 3]] + + N = 10 + p1 = Permutation(a[0]) + for i in range(1, N+1): + p1 = p1*Permutation(a[i]) + p2 = Permutation.rmul_with_af(*[Permutation(h) for h in a[N::-1]]) + assert p1 == p2 + + ok = [] + p = Permutation([1, 0]) + for i in range(3): + ok.append(p.array_form) + p = p.next_nonlex() + if p is None: + ok.append(None) + break + assert ok == [[1, 0], [0, 1], None] + assert Permutation([3, 2, 0, 1]).next_nonlex() == Permutation([1, 3, 0, 2]) + assert [Permutation(pa).rank_nonlex() for pa in a] == list(range(24)) + + +def test_mul(): + a, b = [0, 2, 1, 3], [0, 1, 3, 2] + assert _af_rmul(a, b) == [0, 2, 3, 1] + assert _af_rmuln(a, b, list(range(4))) == [0, 2, 3, 1] + assert rmul(Permutation(a), Permutation(b)).array_form == [0, 2, 3, 1] + + a = Permutation([0, 2, 1, 3]) + b = (0, 1, 3, 2) + c = (3, 1, 2, 0) + assert Permutation.rmul(a, b, c) == Permutation([1, 2, 3, 0]) + assert Permutation.rmul(a, c) == Permutation([3, 2, 1, 0]) + raises(TypeError, lambda: Permutation.rmul(b, c)) + + n = 6 + m = 8 + a = [Permutation.unrank_nonlex(n, i).array_form for i in range(m)] + h = list(range(n)) + for i in range(m): + h = _af_rmul(h, a[i]) + h2 = _af_rmuln(*a[:i + 1]) + assert h == h2 + + +def test_args(): + p = Permutation([(0, 3, 1, 2), (4, 5)]) + assert p._cyclic_form is None + assert Permutation(p) == p + assert p.cyclic_form == [[0, 3, 1, 2], [4, 5]] + assert p._array_form == [3, 2, 0, 1, 5, 4] + p = Permutation((0, 3, 1, 2)) + assert p._cyclic_form is None + assert p._array_form == [0, 3, 1, 2] + assert Permutation([0]) == Permutation((0, )) + assert Permutation([[0], [1]]) == Permutation(((0, ), (1, ))) == \ + Permutation(((0, ), [1])) + assert Permutation([[1, 2]]) == Permutation([0, 2, 1]) + assert Permutation([[1], [4, 2]]) == Permutation([0, 1, 4, 3, 2]) + assert Permutation([[1], [4, 2]], size=1) == Permutation([0, 1, 4, 3, 2]) + assert Permutation( + [[1], [4, 2]], size=6) == Permutation([0, 1, 4, 3, 2, 5]) + assert Permutation([[0, 1], [0, 2]]) == Permutation(0, 1, 2) + assert Permutation([], size=3) == Permutation([0, 1, 2]) + assert Permutation(3).list(5) == [0, 1, 2, 3, 4] + assert Permutation(3).list(-1) == [] + assert Permutation(5)(1, 2).list(-1) == [0, 2, 1] + assert Permutation(5)(1, 2).list() == [0, 2, 1, 3, 4, 5] + raises(ValueError, lambda: Permutation([1, 2], [0])) + # enclosing brackets needed + raises(ValueError, lambda: Permutation([[1, 2], 0])) + # enclosing brackets needed on 0 + raises(ValueError, lambda: Permutation([1, 1, 0])) + raises(ValueError, lambda: Permutation([4, 5], size=10)) # where are 0-3? + # but this is ok because cycles imply that only those listed moved + assert Permutation(4, 5) == Permutation([0, 1, 2, 3, 5, 4]) + + +def test_Cycle(): + assert str(Cycle()) == '()' + assert Cycle(Cycle(1,2)) == Cycle(1, 2) + assert Cycle(1,2).copy() == Cycle(1,2) + assert list(Cycle(1, 3, 2)) == [0, 3, 1, 2] + assert Cycle(1, 2)(2, 3) == Cycle(1, 3, 2) + assert Cycle(1, 2)(2, 3)(4, 5) == Cycle(1, 3, 2)(4, 5) + assert Permutation(Cycle(1, 2)(2, 1, 0, 3)).cyclic_form, Cycle(0, 2, 1) + raises(ValueError, lambda: Cycle().list()) + assert Cycle(1, 2).list() == [0, 2, 1] + assert Cycle(1, 2).list(4) == [0, 2, 1, 3] + assert Cycle(3).list(2) == [0, 1] + assert Cycle(3).list(6) == [0, 1, 2, 3, 4, 5] + assert Permutation(Cycle(1, 2), size=4) == \ + Permutation([0, 2, 1, 3]) + assert str(Cycle(1, 2)(4, 5)) == '(1 2)(4 5)' + assert str(Cycle(1, 2)) == '(1 2)' + assert Cycle(Permutation(list(range(3)))) == Cycle() + assert Cycle(1, 2).list() == [0, 2, 1] + assert Cycle(1, 2).list(4) == [0, 2, 1, 3] + assert Cycle().size == 0 + raises(ValueError, lambda: Cycle((1, 2))) + raises(ValueError, lambda: Cycle(1, 2, 1)) + raises(TypeError, lambda: Cycle(1, 2)*{}) + raises(ValueError, lambda: Cycle(4)[a]) + raises(ValueError, lambda: Cycle(2, -4, 3)) + + # check round-trip + p = Permutation([[1, 2], [4, 3]], size=5) + assert Permutation(Cycle(p)) == p + + +def test_from_sequence(): + assert Permutation.from_sequence('SymPy') == Permutation(4)(0, 1, 3) + assert Permutation.from_sequence('SymPy', key=lambda x: x.lower()) == \ + Permutation(4)(0, 2)(1, 3) + + +def test_resize(): + p = Permutation(0, 1, 2) + assert p.resize(5) == Permutation(0, 1, 2, size=5) + assert p.resize(4) == Permutation(0, 1, 2, size=4) + assert p.resize(3) == p + raises(ValueError, lambda: p.resize(2)) + + p = Permutation(0, 1, 2)(3, 4)(5, 6) + assert p.resize(3) == Permutation(0, 1, 2) + raises(ValueError, lambda: p.resize(4)) + + +def test_printing_cyclic(): + p1 = Permutation([0, 2, 1]) + assert repr(p1) == 'Permutation(1, 2)' + assert str(p1) == '(1 2)' + p2 = Permutation() + assert repr(p2) == 'Permutation()' + assert str(p2) == '()' + p3 = Permutation([1, 2, 0, 3]) + assert repr(p3) == 'Permutation(3)(0, 1, 2)' + + +def test_printing_non_cyclic(): + p1 = Permutation([0, 1, 2, 3, 4, 5]) + assert srepr(p1, perm_cyclic=False) == 'Permutation([], size=6)' + assert sstr(p1, perm_cyclic=False) == 'Permutation([], size=6)' + p2 = Permutation([0, 1, 2]) + assert srepr(p2, perm_cyclic=False) == 'Permutation([0, 1, 2])' + assert sstr(p2, perm_cyclic=False) == 'Permutation([0, 1, 2])' + + p3 = Permutation([0, 2, 1]) + assert srepr(p3, perm_cyclic=False) == 'Permutation([0, 2, 1])' + assert sstr(p3, perm_cyclic=False) == 'Permutation([0, 2, 1])' + p4 = Permutation([0, 1, 3, 2, 4, 5, 6, 7]) + assert srepr(p4, perm_cyclic=False) == 'Permutation([0, 1, 3, 2], size=8)' + + +def test_deprecated_print_cyclic(): + p = Permutation(0, 1, 2) + try: + Permutation.print_cyclic = True + with warns_deprecated_sympy(): + assert sstr(p) == '(0 1 2)' + with warns_deprecated_sympy(): + assert srepr(p) == 'Permutation(0, 1, 2)' + with warns_deprecated_sympy(): + assert pretty(p) == '(0 1 2)' + with warns_deprecated_sympy(): + assert latex(p) == r'\left( 0\; 1\; 2\right)' + + Permutation.print_cyclic = False + with warns_deprecated_sympy(): + assert sstr(p) == 'Permutation([1, 2, 0])' + with warns_deprecated_sympy(): + assert srepr(p) == 'Permutation([1, 2, 0])' + with warns_deprecated_sympy(): + assert pretty(p, use_unicode=False) == '/0 1 2\\\n\\1 2 0/' + with warns_deprecated_sympy(): + assert latex(p) == \ + r'\begin{pmatrix} 0 & 1 & 2 \\ 1 & 2 & 0 \end{pmatrix}' + finally: + Permutation.print_cyclic = None + + +def test_permutation_equality(): + a = Permutation(0, 1, 2) + b = Permutation(0, 1, 2) + assert Eq(a, b) is S.true + c = Permutation(0, 2, 1) + assert Eq(a, c) is S.false + + d = Permutation(0, 1, 2, size=4) + assert unchanged(Eq, a, d) + e = Permutation(0, 2, 1, size=4) + assert unchanged(Eq, a, e) + + i = Permutation() + assert unchanged(Eq, i, 0) + assert unchanged(Eq, 0, i) + + +def test_issue_17661(): + c1 = Cycle(1,2) + c2 = Cycle(1,2) + assert c1 == c2 + assert repr(c1) == 'Cycle(1, 2)' + assert c1 == c2 + + +def test_permutation_apply(): + x = Symbol('x') + p = Permutation(0, 1, 2) + assert p.apply(0) == 1 + assert isinstance(p.apply(0), Integer) + assert p.apply(x) == AppliedPermutation(p, x) + assert AppliedPermutation(p, x).subs(x, 0) == 1 + + x = Symbol('x', integer=False) + raises(NotImplementedError, lambda: p.apply(x)) + x = Symbol('x', negative=True) + raises(NotImplementedError, lambda: p.apply(x)) + + +def test_AppliedPermutation(): + x = Symbol('x') + p = Permutation(0, 1, 2) + raises(ValueError, lambda: AppliedPermutation((0, 1, 2), x)) + assert AppliedPermutation(p, 1, evaluate=True) == 2 + assert AppliedPermutation(p, 1, evaluate=False).__class__ == \ + AppliedPermutation diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_polyhedron.py b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_polyhedron.py new file mode 100644 index 0000000000000000000000000000000000000000..abf469bb560eef1f378eff4740a84b80b696035f --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_polyhedron.py @@ -0,0 +1,105 @@ +from sympy.core.symbol import symbols +from sympy.sets.sets import FiniteSet +from sympy.combinatorics.polyhedron import (Polyhedron, + tetrahedron, cube as square, octahedron, dodecahedron, icosahedron, + cube_faces) +from sympy.combinatorics.permutations import Permutation +from sympy.combinatorics.perm_groups import PermutationGroup +from sympy.testing.pytest import raises + +rmul = Permutation.rmul + + +def test_polyhedron(): + raises(ValueError, lambda: Polyhedron(list('ab'), + pgroup=[Permutation([0])])) + pgroup = [Permutation([[0, 7, 2, 5], [6, 1, 4, 3]]), + Permutation([[0, 7, 1, 6], [5, 2, 4, 3]]), + Permutation([[3, 6, 0, 5], [4, 1, 7, 2]]), + Permutation([[7, 4, 5], [1, 3, 0], [2], [6]]), + Permutation([[1, 3, 2], [7, 6, 5], [4], [0]]), + Permutation([[4, 7, 6], [2, 0, 3], [1], [5]]), + Permutation([[1, 2, 0], [4, 5, 6], [3], [7]]), + Permutation([[4, 2], [0, 6], [3, 7], [1, 5]]), + Permutation([[3, 5], [7, 1], [2, 6], [0, 4]]), + Permutation([[2, 5], [1, 6], [0, 4], [3, 7]]), + Permutation([[4, 3], [7, 0], [5, 1], [6, 2]]), + Permutation([[4, 1], [0, 5], [6, 2], [7, 3]]), + Permutation([[7, 2], [3, 6], [0, 4], [1, 5]]), + Permutation([0, 1, 2, 3, 4, 5, 6, 7])] + corners = tuple(symbols('A:H')) + faces = cube_faces + cube = Polyhedron(corners, faces, pgroup) + + assert cube.edges == FiniteSet(*( + (0, 1), (6, 7), (1, 2), (5, 6), (0, 3), (2, 3), + (4, 7), (4, 5), (3, 7), (1, 5), (0, 4), (2, 6))) + + for i in range(3): # add 180 degree face rotations + cube.rotate(cube.pgroup[i]**2) + + assert cube.corners == corners + + for i in range(3, 7): # add 240 degree axial corner rotations + cube.rotate(cube.pgroup[i]**2) + + assert cube.corners == corners + cube.rotate(1) + raises(ValueError, lambda: cube.rotate(Permutation([0, 1]))) + assert cube.corners != corners + assert cube.array_form == [7, 6, 4, 5, 3, 2, 0, 1] + assert cube.cyclic_form == [[0, 7, 1, 6], [2, 4, 3, 5]] + cube.reset() + assert cube.corners == corners + + def check(h, size, rpt, target): + + assert len(h.faces) + len(h.vertices) - len(h.edges) == 2 + assert h.size == size + + got = set() + for p in h.pgroup: + # make sure it restores original + P = h.copy() + hit = P.corners + for i in range(rpt): + P.rotate(p) + if P.corners == hit: + break + else: + print('error in permutation', p.array_form) + for i in range(rpt): + P.rotate(p) + got.add(tuple(P.corners)) + c = P.corners + f = [[c[i] for i in f] for f in P.faces] + assert h.faces == Polyhedron(c, f).faces + assert len(got) == target + assert PermutationGroup([Permutation(g) for g in got]).is_group + + for h, size, rpt, target in zip( + (tetrahedron, square, octahedron, dodecahedron, icosahedron), + (4, 8, 6, 20, 12), + (3, 4, 4, 5, 5), + (12, 24, 24, 60, 60)): + check(h, size, rpt, target) + + +def test_pgroups(): + from sympy.combinatorics.polyhedron import (cube, tetrahedron_faces, + octahedron_faces, dodecahedron_faces, icosahedron_faces) + from sympy.combinatorics.polyhedron import _pgroup_calcs + (tetrahedron2, cube2, octahedron2, dodecahedron2, icosahedron2, + tetrahedron_faces2, cube_faces2, octahedron_faces2, + dodecahedron_faces2, icosahedron_faces2) = _pgroup_calcs() + + assert tetrahedron == tetrahedron2 + assert cube == cube2 + assert octahedron == octahedron2 + assert dodecahedron == dodecahedron2 + assert icosahedron == icosahedron2 + assert sorted(map(sorted, tetrahedron_faces)) == sorted(map(sorted, tetrahedron_faces2)) + assert sorted(cube_faces) == sorted(cube_faces2) + assert sorted(octahedron_faces) == sorted(octahedron_faces2) + assert sorted(dodecahedron_faces) == sorted(dodecahedron_faces2) + assert sorted(icosahedron_faces) == sorted(icosahedron_faces2) diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_prufer.py b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_prufer.py new file mode 100644 index 0000000000000000000000000000000000000000..b077c7cf3f023a4c36d7039505e6165ab29f275a --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_prufer.py @@ -0,0 +1,74 @@ +from sympy.combinatorics.prufer import Prufer +from sympy.testing.pytest import raises + + +def test_prufer(): + # number of nodes is optional + assert Prufer([[0, 1], [0, 2], [0, 3], [0, 4]], 5).nodes == 5 + assert Prufer([[0, 1], [0, 2], [0, 3], [0, 4]]).nodes == 5 + + a = Prufer([[0, 1], [0, 2], [0, 3], [0, 4]]) + assert a.rank == 0 + assert a.nodes == 5 + assert a.prufer_repr == [0, 0, 0] + + a = Prufer([[2, 4], [1, 4], [1, 3], [0, 5], [0, 4]]) + assert a.rank == 924 + assert a.nodes == 6 + assert a.tree_repr == [[2, 4], [1, 4], [1, 3], [0, 5], [0, 4]] + assert a.prufer_repr == [4, 1, 4, 0] + + assert Prufer.edges([0, 1, 2, 3], [1, 4, 5], [1, 4, 6]) == \ + ([[0, 1], [1, 2], [1, 4], [2, 3], [4, 5], [4, 6]], 7) + assert Prufer([0]*4).size == Prufer([6]*4).size == 1296 + + # accept iterables but convert to list of lists + tree = [(0, 1), (1, 5), (0, 3), (0, 2), (2, 6), (4, 7), (2, 4)] + tree_lists = [list(t) for t in tree] + assert Prufer(tree).tree_repr == tree_lists + assert sorted(Prufer(set(tree)).tree_repr) == sorted(tree_lists) + + raises(ValueError, lambda: Prufer([[1, 2], [3, 4]])) # 0 is missing + raises(ValueError, lambda: Prufer([[2, 3], [3, 4]])) # 0, 1 are missing + assert Prufer(*Prufer.edges([1, 2], [3, 4])).prufer_repr == [1, 3] + raises(ValueError, lambda: Prufer.edges( + [1, 3], [3, 4])) # a broken tree but edges doesn't care + raises(ValueError, lambda: Prufer.edges([1, 2], [5, 6])) + raises(ValueError, lambda: Prufer([[]])) + + a = Prufer([[0, 1], [0, 2], [0, 3]]) + b = a.next() + assert b.tree_repr == [[0, 2], [0, 1], [1, 3]] + assert b.rank == 1 + + +def test_round_trip(): + def doit(t, b): + e, n = Prufer.edges(*t) + t = Prufer(e, n) + a = sorted(t.tree_repr) + b = [i - 1 for i in b] + assert t.prufer_repr == b + assert sorted(Prufer(b).tree_repr) == a + assert Prufer.unrank(t.rank, n).prufer_repr == b + + doit([[1, 2]], []) + doit([[2, 1, 3]], [1]) + doit([[1, 3, 2]], [3]) + doit([[1, 2, 3]], [2]) + doit([[2, 1, 4], [1, 3]], [1, 1]) + doit([[3, 2, 1, 4]], [2, 1]) + doit([[3, 2, 1], [2, 4]], [2, 2]) + doit([[1, 3, 2, 4]], [3, 2]) + doit([[1, 4, 2, 3]], [4, 2]) + doit([[3, 1, 4, 2]], [4, 1]) + doit([[4, 2, 1, 3]], [1, 2]) + doit([[1, 2, 4, 3]], [2, 4]) + doit([[1, 3, 4, 2]], [3, 4]) + doit([[2, 4, 1], [4, 3]], [4, 4]) + doit([[1, 2, 3, 4]], [2, 3]) + doit([[2, 3, 1], [3, 4]], [3, 3]) + doit([[1, 4, 3, 2]], [4, 3]) + doit([[2, 1, 4, 3]], [1, 4]) + doit([[2, 1, 3, 4]], [1, 3]) + doit([[6, 2, 1, 4], [1, 3, 5, 8], [3, 7]], [1, 2, 1, 3, 3, 5]) diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_rewriting.py b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_rewriting.py new file mode 100644 index 0000000000000000000000000000000000000000..97c562bd57a2cd6318fa1dcb13c6f6278c861cca --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_rewriting.py @@ -0,0 +1,49 @@ +from sympy.combinatorics.fp_groups import FpGroup +from sympy.combinatorics.free_groups import free_group +from sympy.testing.pytest import raises + + +def test_rewriting(): + F, a, b = free_group("a, b") + G = FpGroup(F, [a*b*a**-1*b**-1]) + a, b = G.generators + R = G._rewriting_system + assert R.is_confluent + + assert G.reduce(b**-1*a) == a*b**-1 + assert G.reduce(b**3*a**4*b**-2*a) == a**5*b + assert G.equals(b**2*a**-1*b, b**4*a**-1*b**-1) + + assert R.reduce_using_automaton(b*a*a**2*b**-1) == a**3 + assert R.reduce_using_automaton(b**3*a**4*b**-2*a) == a**5*b + assert R.reduce_using_automaton(b**-1*a) == a*b**-1 + + G = FpGroup(F, [a**3, b**3, (a*b)**2]) + R = G._rewriting_system + R.make_confluent() + # R._is_confluent should be set to True after + # a successful run of make_confluent + assert R.is_confluent + # but also the system should actually be confluent + assert R._check_confluence() + assert G.reduce(b*a**-1*b**-1*a**3*b**4*a**-1*b**-15) == a**-1*b**-1 + # check for automaton reduction + assert R.reduce_using_automaton(b*a**-1*b**-1*a**3*b**4*a**-1*b**-15) == a**-1*b**-1 + + G = FpGroup(F, [a**2, b**3, (a*b)**4]) + R = G._rewriting_system + assert G.reduce(a**2*b**-2*a**2*b) == b**-1 + assert R.reduce_using_automaton(a**2*b**-2*a**2*b) == b**-1 + assert G.reduce(a**3*b**-2*a**2*b) == a**-1*b**-1 + assert R.reduce_using_automaton(a**3*b**-2*a**2*b) == a**-1*b**-1 + # Check after adding a rule + R.add_rule(a**2, b) + assert R.reduce_using_automaton(a**2*b**-2*a**2*b) == b**-1 + assert R.reduce_using_automaton(a**4*b**-2*a**2*b**3) == b + + R.set_max(15) + raises(RuntimeError, lambda: R.add_rule(a**-3, b)) + R.set_max(20) + R.add_rule(a**-3, b) + + assert R.add_rule(a, a) == set() diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_schur_number.py b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_schur_number.py new file mode 100644 index 0000000000000000000000000000000000000000..e6beb9b11fa993a99b71d89b8485050fc3575b8e --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_schur_number.py @@ -0,0 +1,55 @@ +from sympy.core import S, Rational +from sympy.combinatorics.schur_number import schur_partition, SchurNumber +from sympy.core.random import _randint +from sympy.testing.pytest import raises +from sympy.core.symbol import symbols + + +def _sum_free_test(subset): + """ + Checks if subset is sum-free(There are no x,y,z in the subset such that + x + y = z) + """ + for i in subset: + for j in subset: + assert (i + j in subset) is False + + +def test_schur_partition(): + raises(ValueError, lambda: schur_partition(S.Infinity)) + raises(ValueError, lambda: schur_partition(-1)) + raises(ValueError, lambda: schur_partition(0)) + assert schur_partition(2) == [[1, 2]] + + random_number_generator = _randint(1000) + for _ in range(5): + n = random_number_generator(1, 1000) + result = schur_partition(n) + t = 0 + numbers = [] + for item in result: + _sum_free_test(item) + """ + Checks if the occurrence of all numbers is exactly one + """ + t += len(item) + for l in item: + assert (l in numbers) is False + numbers.append(l) + assert n == t + + x = symbols("x") + raises(ValueError, lambda: schur_partition(x)) + +def test_schur_number(): + first_known_schur_numbers = {1: 1, 2: 4, 3: 13, 4: 44, 5: 160} + for k in first_known_schur_numbers: + assert SchurNumber(k) == first_known_schur_numbers[k] + + assert SchurNumber(S.Infinity) == S.Infinity + assert SchurNumber(0) == 0 + raises(ValueError, lambda: SchurNumber(0.5)) + + n = symbols("n") + assert SchurNumber(n).lower_bound() == 3**n/2 - Rational(1, 2) + assert SchurNumber(8).lower_bound() == 5039 diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_subsets.py b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_subsets.py new file mode 100644 index 0000000000000000000000000000000000000000..1d50076da1c685294c2d2561dcc2a6af629eaf83 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_subsets.py @@ -0,0 +1,63 @@ +from sympy.combinatorics.subsets import Subset, ksubsets +from sympy.testing.pytest import raises + + +def test_subset(): + a = Subset(['c', 'd'], ['a', 'b', 'c', 'd']) + assert a.next_binary() == Subset(['b'], ['a', 'b', 'c', 'd']) + assert a.prev_binary() == Subset(['c'], ['a', 'b', 'c', 'd']) + assert a.next_lexicographic() == Subset(['d'], ['a', 'b', 'c', 'd']) + assert a.prev_lexicographic() == Subset(['c'], ['a', 'b', 'c', 'd']) + assert a.next_gray() == Subset(['c'], ['a', 'b', 'c', 'd']) + assert a.prev_gray() == Subset(['d'], ['a', 'b', 'c', 'd']) + assert a.rank_binary == 3 + assert a.rank_lexicographic == 14 + assert a.rank_gray == 2 + assert a.cardinality == 16 + assert a.size == 2 + assert Subset.bitlist_from_subset(a, ['a', 'b', 'c', 'd']) == '0011' + + a = Subset([2, 5, 7], [1, 2, 3, 4, 5, 6, 7]) + assert a.next_binary() == Subset([2, 5, 6], [1, 2, 3, 4, 5, 6, 7]) + assert a.prev_binary() == Subset([2, 5], [1, 2, 3, 4, 5, 6, 7]) + assert a.next_lexicographic() == Subset([2, 6], [1, 2, 3, 4, 5, 6, 7]) + assert a.prev_lexicographic() == Subset([2, 5, 6, 7], [1, 2, 3, 4, 5, 6, 7]) + assert a.next_gray() == Subset([2, 5, 6, 7], [1, 2, 3, 4, 5, 6, 7]) + assert a.prev_gray() == Subset([2, 5], [1, 2, 3, 4, 5, 6, 7]) + assert a.rank_binary == 37 + assert a.rank_lexicographic == 93 + assert a.rank_gray == 57 + assert a.cardinality == 128 + + superset = ['a', 'b', 'c', 'd'] + assert Subset.unrank_binary(4, superset).rank_binary == 4 + assert Subset.unrank_gray(10, superset).rank_gray == 10 + + superset = [1, 2, 3, 4, 5, 6, 7, 8, 9] + assert Subset.unrank_binary(33, superset).rank_binary == 33 + assert Subset.unrank_gray(25, superset).rank_gray == 25 + + a = Subset([], ['a', 'b', 'c', 'd']) + i = 1 + while a.subset != Subset(['d'], ['a', 'b', 'c', 'd']).subset: + a = a.next_lexicographic() + i = i + 1 + assert i == 16 + + i = 1 + while a.subset != Subset([], ['a', 'b', 'c', 'd']).subset: + a = a.prev_lexicographic() + i = i + 1 + assert i == 16 + + raises(ValueError, lambda: Subset(['a', 'b'], ['a'])) + raises(ValueError, lambda: Subset(['a'], ['b', 'c'])) + raises(ValueError, lambda: Subset.subset_from_bitlist(['a', 'b'], '010')) + + assert Subset(['a'], ['a', 'b']) != Subset(['b'], ['a', 'b']) + assert Subset(['a'], ['a', 'b']) != Subset(['a'], ['a', 'c']) + +def test_ksubsets(): + assert list(ksubsets([1, 2, 3], 2)) == [(1, 2), (1, 3), (2, 3)] + assert list(ksubsets([1, 2, 3, 4, 5], 2)) == [(1, 2), (1, 3), (1, 4), + (1, 5), (2, 3), (2, 4), (2, 5), (3, 4), (3, 5), (4, 5)] diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_tensor_can.py b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_tensor_can.py new file mode 100644 index 0000000000000000000000000000000000000000..3922419f20b92536426bfaae4b7e94df5db671b5 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_tensor_can.py @@ -0,0 +1,560 @@ +from sympy.combinatorics.permutations import Permutation, Perm +from sympy.combinatorics.tensor_can import (perm_af_direct_product, dummy_sgs, + riemann_bsgs, get_symmetric_group_sgs, canonicalize, bsgs_direct_product) +from sympy.combinatorics.testutil import canonicalize_naive, graph_certificate +from sympy.testing.pytest import skip, XFAIL + +def test_perm_af_direct_product(): + gens1 = [[1,0,2,3], [0,1,3,2]] + gens2 = [[1,0]] + assert perm_af_direct_product(gens1, gens2, 0) == [[1, 0, 2, 3, 4, 5], [0, 1, 3, 2, 4, 5], [0, 1, 2, 3, 5, 4]] + gens1 = [[1,0,2,3,5,4], [0,1,3,2,4,5]] + gens2 = [[1,0,2,3]] + assert [[1, 0, 2, 3, 4, 5, 7, 6], [0, 1, 3, 2, 4, 5, 6, 7], [0, 1, 2, 3, 5, 4, 6, 7]] + +def test_dummy_sgs(): + a = dummy_sgs([1,2], 0, 4) + assert a == [[0,2,1,3,4,5]] + a = dummy_sgs([2,3,4,5], 0, 8) + assert a == [x._array_form for x in [Perm(9)(2,3), Perm(9)(4,5), + Perm(9)(2,4)(3,5)]] + + a = dummy_sgs([2,3,4,5], 1, 8) + assert a == [x._array_form for x in [Perm(2,3)(8,9), Perm(4,5)(8,9), + Perm(9)(2,4)(3,5)]] + +def test_get_symmetric_group_sgs(): + assert get_symmetric_group_sgs(2) == ([0], [Permutation(3)(0,1)]) + assert get_symmetric_group_sgs(2, 1) == ([0], [Permutation(0,1)(2,3)]) + assert get_symmetric_group_sgs(3) == ([0,1], [Permutation(4)(0,1), Permutation(4)(1,2)]) + assert get_symmetric_group_sgs(3, 1) == ([0,1], [Permutation(0,1)(3,4), Permutation(1,2)(3,4)]) + assert get_symmetric_group_sgs(4) == ([0,1,2], [Permutation(5)(0,1), Permutation(5)(1,2), Permutation(5)(2,3)]) + assert get_symmetric_group_sgs(4, 1) == ([0,1,2], [Permutation(0,1)(4,5), Permutation(1,2)(4,5), Permutation(2,3)(4,5)]) + + +def test_canonicalize_no_slot_sym(): + # cases in which there is no slot symmetry after fixing the + # free indices; here and in the following if the symmetry of the + # metric is not specified, it is assumed to be symmetric. + # If it is not specified, tensors are commuting. + + # A_d0 * B^d0; g = [1,0, 2,3]; T_c = A^d0*B_d0; can = [0,1,2,3] + base1, gens1 = get_symmetric_group_sgs(1) + dummies = [0, 1] + g = Permutation([1,0,2,3]) + can = canonicalize(g, dummies, 0, (base1,gens1,1,0), (base1,gens1,1,0)) + assert can == [0,1,2,3] + # equivalently + can = canonicalize(g, dummies, 0, (base1, gens1, 2, None)) + assert can == [0,1,2,3] + + # with antisymmetric metric; T_c = -A^d0*B_d0; can = [0,1,3,2] + can = canonicalize(g, dummies, 1, (base1,gens1,1,0), (base1,gens1,1,0)) + assert can == [0,1,3,2] + + # A^a * B^b; ord = [a,b]; g = [0,1,2,3]; can = g + g = Permutation([0,1,2,3]) + dummies = [] + t0 = t1 = (base1, gens1, 1, 0) + can = canonicalize(g, dummies, 0, t0, t1) + assert can == [0,1,2,3] + # B^b * A^a + g = Permutation([1,0,2,3]) + can = canonicalize(g, dummies, 0, t0, t1) + assert can == [1,0,2,3] + + # A symmetric + # A^{b}_{d0}*A^{d0, a} order a,b,d0,-d0; T_c = A^{a d0}*A{b}_{d0} + # g = [1,3,2,0,4,5]; can = [0,2,1,3,4,5] + base2, gens2 = get_symmetric_group_sgs(2) + dummies = [2,3] + g = Permutation([1,3,2,0,4,5]) + can = canonicalize(g, dummies, 0, (base2, gens2, 2, 0)) + assert can == [0, 2, 1, 3, 4, 5] + # with antisymmetric metric + can = canonicalize(g, dummies, 1, (base2, gens2, 2, 0)) + assert can == [0, 2, 1, 3, 4, 5] + # A^{a}_{d0}*A^{d0, b} + g = Permutation([0,3,2,1,4,5]) + can = canonicalize(g, dummies, 1, (base2, gens2, 2, 0)) + assert can == [0, 2, 1, 3, 5, 4] + + # A, B symmetric + # A^b_d0*B^{d0,a}; g=[1,3,2,0,4,5] + # T_c = A^{b,d0}*B_{a,d0}; can = [1,2,0,3,4,5] + dummies = [2,3] + g = Permutation([1,3,2,0,4,5]) + can = canonicalize(g, dummies, 0, (base2,gens2,1,0), (base2,gens2,1,0)) + assert can == [1,2,0,3,4,5] + # same with antisymmetric metric + can = canonicalize(g, dummies, 1, (base2,gens2,1,0), (base2,gens2,1,0)) + assert can == [1,2,0,3,5,4] + + # A^{d1}_{d0}*B^d0*C_d1 ord=[d0,-d0,d1,-d1]; g = [2,1,0,3,4,5] + # T_c = A^{d0 d1}*B_d0*C_d1; can = [0,2,1,3,4,5] + base1, gens1 = get_symmetric_group_sgs(1) + base2, gens2 = get_symmetric_group_sgs(2) + g = Permutation([2,1,0,3,4,5]) + dummies = [0,1,2,3] + t0 = (base2, gens2, 1, 0) + t1 = t2 = (base1, gens1, 1, 0) + can = canonicalize(g, dummies, 0, t0, t1, t2) + assert can == [0, 2, 1, 3, 4, 5] + + # A without symmetry + # A^{d1}_{d0}*B^d0*C_d1 ord=[d0,-d0,d1,-d1]; g = [2,1,0,3,4,5] + # T_c = A^{d0 d1}*B_d1*C_d0; can = [0,2,3,1,4,5] + g = Permutation([2,1,0,3,4,5]) + dummies = [0,1,2,3] + t0 = ([], [Permutation(list(range(4)))], 1, 0) + can = canonicalize(g, dummies, 0, t0, t1, t2) + assert can == [0,2,3,1,4,5] + # A, B without symmetry + # A^{d1}_{d0}*B_{d1}^{d0}; g = [2,1,3,0,4,5] + # T_c = A^{d0 d1}*B_{d0 d1}; can = [0,2,1,3,4,5] + t0 = t1 = ([], [Permutation(list(range(4)))], 1, 0) + dummies = [0,1,2,3] + g = Permutation([2,1,3,0,4,5]) + can = canonicalize(g, dummies, 0, t0, t1) + assert can == [0, 2, 1, 3, 4, 5] + # A_{d0}^{d1}*B_{d1}^{d0}; g = [1,2,3,0,4,5] + # T_c = A^{d0 d1}*B_{d1 d0}; can = [0,2,3,1,4,5] + g = Permutation([1,2,3,0,4,5]) + can = canonicalize(g, dummies, 0, t0, t1) + assert can == [0,2,3,1,4,5] + + # A, B, C without symmetry + # A^{d1 d0}*B_{a d0}*C_{d1 b} ord=[a,b,d0,-d0,d1,-d1] + # g=[4,2,0,3,5,1,6,7] + # T_c=A^{d0 d1}*B_{a d1}*C_{d0 b}; can = [2,4,0,5,3,1,6,7] + t0 = t1 = t2 = ([], [Permutation(list(range(4)))], 1, 0) + dummies = [2,3,4,5] + g = Permutation([4,2,0,3,5,1,6,7]) + can = canonicalize(g, dummies, 0, t0, t1, t2) + assert can == [2,4,0,5,3,1,6,7] + + # A symmetric, B and C without symmetry + # A^{d1 d0}*B_{a d0}*C_{d1 b} ord=[a,b,d0,-d0,d1,-d1] + # g=[4,2,0,3,5,1,6,7] + # T_c = A^{d0 d1}*B_{a d0}*C_{d1 b}; can = [2,4,0,3,5,1,6,7] + t0 = (base2,gens2,1,0) + t1 = t2 = ([], [Permutation(list(range(4)))], 1, 0) + dummies = [2,3,4,5] + g = Permutation([4,2,0,3,5,1,6,7]) + can = canonicalize(g, dummies, 0, t0, t1, t2) + assert can == [2,4,0,3,5,1,6,7] + + # A and C symmetric, B without symmetry + # A^{d1 d0}*B_{a d0}*C_{d1 b} ord=[a,b,d0,-d0,d1,-d1] + # g=[4,2,0,3,5,1,6,7] + # T_c = A^{d0 d1}*B_{a d0}*C_{b d1}; can = [2,4,0,3,1,5,6,7] + t0 = t2 = (base2,gens2,1,0) + t1 = ([], [Permutation(list(range(4)))], 1, 0) + dummies = [2,3,4,5] + g = Permutation([4,2,0,3,5,1,6,7]) + can = canonicalize(g, dummies, 0, t0, t1, t2) + assert can == [2,4,0,3,1,5,6,7] + + # A symmetric, B without symmetry, C antisymmetric + # A^{d1 d0}*B_{a d0}*C_{d1 b} ord=[a,b,d0,-d0,d1,-d1] + # g=[4,2,0,3,5,1,6,7] + # T_c = -A^{d0 d1}*B_{a d0}*C_{b d1}; can = [2,4,0,3,1,5,7,6] + t0 = (base2,gens2, 1, 0) + t1 = ([], [Permutation(list(range(4)))], 1, 0) + base2a, gens2a = get_symmetric_group_sgs(2, 1) + t2 = (base2a, gens2a, 1, 0) + dummies = [2,3,4,5] + g = Permutation([4,2,0,3,5,1,6,7]) + can = canonicalize(g, dummies, 0, t0, t1, t2) + assert can == [2,4,0,3,1,5,7,6] + + +def test_canonicalize_no_dummies(): + base1, gens1 = get_symmetric_group_sgs(1) + base2, gens2 = get_symmetric_group_sgs(2) + base2a, gens2a = get_symmetric_group_sgs(2, 1) + + # A commuting + # A^c A^b A^a; ord = [a,b,c]; g = [2,1,0,3,4] + # T_c = A^a A^b A^c; can = list(range(5)) + g = Permutation([2,1,0,3,4]) + can = canonicalize(g, [], 0, (base1, gens1, 3, 0)) + assert can == list(range(5)) + + # A anticommuting + # A^c A^b A^a; ord = [a,b,c]; g = [2,1,0,3,4] + # T_c = -A^a A^b A^c; can = [0,1,2,4,3] + g = Permutation([2,1,0,3,4]) + can = canonicalize(g, [], 0, (base1, gens1, 3, 1)) + assert can == [0,1,2,4,3] + + # A commuting and symmetric + # A^{b,d}*A^{c,a}; ord = [a,b,c,d]; g = [1,3,2,0,4,5] + # T_c = A^{a c}*A^{b d}; can = [0,2,1,3,4,5] + g = Permutation([1,3,2,0,4,5]) + can = canonicalize(g, [], 0, (base2, gens2, 2, 0)) + assert can == [0,2,1,3,4,5] + + # A anticommuting and symmetric + # A^{b,d}*A^{c,a}; ord = [a,b,c,d]; g = [1,3,2,0,4,5] + # T_c = -A^{a c}*A^{b d}; can = [0,2,1,3,5,4] + g = Permutation([1,3,2,0,4,5]) + can = canonicalize(g, [], 0, (base2, gens2, 2, 1)) + assert can == [0,2,1,3,5,4] + # A^{c,a}*A^{b,d} ; g = [2,0,1,3,4,5] + # T_c = A^{a c}*A^{b d}; can = [0,2,1,3,4,5] + g = Permutation([2,0,1,3,4,5]) + can = canonicalize(g, [], 0, (base2, gens2, 2, 1)) + assert can == [0,2,1,3,4,5] + +def test_no_metric_symmetry(): + # no metric symmetry + # A^d1_d0 * A^d0_d1; ord = [d0,-d0,d1,-d1]; g= [2,1,0,3,4,5] + # T_c = A^d0_d1 * A^d1_d0; can = [0,3,2,1,4,5] + g = Permutation([2,1,0,3,4,5]) + can = canonicalize(g, list(range(4)), None, [[], [Permutation(list(range(4)))], 2, 0]) + assert can == [0,3,2,1,4,5] + + # A^d1_d2 * A^d0_d3 * A^d2_d1 * A^d3_d0 + # ord = [d0,-d0,d1,-d1,d2,-d2,d3,-d3] + # 0 1 2 3 4 5 6 7 + # g = [2,5,0,7,4,3,6,1,8,9] + # T_c = A^d0_d1 * A^d1_d0 * A^d2_d3 * A^d3_d2 + # can = [0,3,2,1,4,7,6,5,8,9] + g = Permutation([2,5,0,7,4,3,6,1,8,9]) + #can = canonicalize(g, list(range(8)), 0, [[], [list(range(4))], 4, 0]) + #assert can == [0, 2, 3, 1, 4, 6, 7, 5, 8, 9] + can = canonicalize(g, list(range(8)), None, [[], [Permutation(list(range(4)))], 4, 0]) + assert can == [0, 3, 2, 1, 4, 7, 6, 5, 8, 9] + + # A^d0_d2 * A^d1_d3 * A^d3_d0 * A^d2_d1 + # g = [0,5,2,7,6,1,4,3,8,9] + # T_c = A^d0_d1 * A^d1_d2 * A^d2_d3 * A^d3_d0 + # can = [0,3,2,5,4,7,6,1,8,9] + g = Permutation([0,5,2,7,6,1,4,3,8,9]) + can = canonicalize(g, list(range(8)), None, [[], [Permutation(list(range(4)))], 4, 0]) + assert can == [0,3,2,5,4,7,6,1,8,9] + + g = Permutation([12,7,10,3,14,13,4,11,6,1,2,9,0,15,8,5,16,17]) + can = canonicalize(g, list(range(16)), None, [[], [Permutation(list(range(4)))], 8, 0]) + assert can == [0,3,2,5,4,7,6,1,8,11,10,13,12,15,14,9,16,17] + +def test_canonical_free(): + # t = A^{d0 a1}*A_d0^a0 + # ord = [a0,a1,d0,-d0]; g = [2,1,3,0,4,5]; dummies = [[2,3]] + # t_c = A_d0^a0*A^{d0 a1} + # can = [3,0, 2,1, 4,5] + g = Permutation([2,1,3,0,4,5]) + dummies = [[2,3]] + can = canonicalize(g, dummies, [None], ([], [Permutation(3)], 2, 0)) + assert can == [3,0, 2,1, 4,5] + +def test_canonicalize1(): + base1, gens1 = get_symmetric_group_sgs(1) + base1a, gens1a = get_symmetric_group_sgs(1, 1) + base2, gens2 = get_symmetric_group_sgs(2) + base3, gens3 = get_symmetric_group_sgs(3) + base2a, gens2a = get_symmetric_group_sgs(2, 1) + base3a, gens3a = get_symmetric_group_sgs(3, 1) + + # A_d0*A^d0; ord = [d0,-d0]; g = [1,0,2,3] + # T_c = A^d0*A_d0; can = [0,1,2,3] + g = Permutation([1,0,2,3]) + can = canonicalize(g, [0, 1], 0, (base1, gens1, 2, 0)) + assert can == list(range(4)) + + # A commuting + # A_d0*A_d1*A_d2*A^d2*A^d1*A^d0; ord=[d0,-d0,d1,-d1,d2,-d2] + # g = [1,3,5,4,2,0,6,7] + # T_c = A^d0*A_d0*A^d1*A_d1*A^d2*A_d2; can = list(range(8)) + g = Permutation([1,3,5,4,2,0,6,7]) + can = canonicalize(g, list(range(6)), 0, (base1, gens1, 6, 0)) + assert can == list(range(8)) + + # A anticommuting + # A_d0*A_d1*A_d2*A^d2*A^d1*A^d0; ord=[d0,-d0,d1,-d1,d2,-d2] + # g = [1,3,5,4,2,0,6,7] + # T_c 0; can = 0 + g = Permutation([1,3,5,4,2,0,6,7]) + can = canonicalize(g, list(range(6)), 0, (base1, gens1, 6, 1)) + assert can == 0 + can1 = canonicalize_naive(g, list(range(6)), 0, (base1, gens1, 6, 1)) + assert can1 == 0 + + # A commuting symmetric + # A^{d0 b}*A^a_d1*A^d1_d0; ord=[a,b,d0,-d0,d1,-d1] + # g = [2,1,0,5,4,3,6,7] + # T_c = A^{a d0}*A^{b d1}*A_{d0 d1}; can = [0,2,1,4,3,5,6,7] + g = Permutation([2,1,0,5,4,3,6,7]) + can = canonicalize(g, list(range(2,6)), 0, (base2, gens2, 3, 0)) + assert can == [0,2,1,4,3,5,6,7] + + # A, B commuting symmetric + # A^{d0 b}*A^d1_d0*B^a_d1; ord=[a,b,d0,-d0,d1,-d1] + # g = [2,1,4,3,0,5,6,7] + # T_c = A^{b d0}*A_d0^d1*B^a_d1; can = [1,2,3,4,0,5,6,7] + g = Permutation([2,1,4,3,0,5,6,7]) + can = canonicalize(g, list(range(2,6)), 0, (base2,gens2,2,0), (base2,gens2,1,0)) + assert can == [1,2,3,4,0,5,6,7] + + # A commuting symmetric + # A^{d1 d0 b}*A^{a}_{d1 d0}; ord=[a,b, d0,-d0,d1,-d1] + # g = [4,2,1,0,5,3,6,7] + # T_c = A^{a d0 d1}*A^{b}_{d0 d1}; can = [0,2,4,1,3,5,6,7] + g = Permutation([4,2,1,0,5,3,6,7]) + can = canonicalize(g, list(range(2,6)), 0, (base3, gens3, 2, 0)) + assert can == [0,2,4,1,3,5,6,7] + + + # A^{d3 d0 d2}*A^a0_{d1 d2}*A^d1_d3^a1*A^{a2 a3}_d0 + # ord = [a0,a1,a2,a3,d0,-d0,d1,-d1,d2,-d2,d3,-d3] + # 0 1 2 3 4 5 6 7 8 9 10 11 + # g = [10,4,8, 0,7,9, 6,11,1, 2,3,5, 12,13] + # T_c = A^{a0 d0 d1}*A^a1_d0^d2*A^{a2 a3 d3}*A_{d1 d2 d3} + # can = [0,4,6, 1,5,8, 2,3,10, 7,9,11, 12,13] + g = Permutation([10,4,8, 0,7,9, 6,11,1, 2,3,5, 12,13]) + can = canonicalize(g, list(range(4,12)), 0, (base3, gens3, 4, 0)) + assert can == [0,4,6, 1,5,8, 2,3,10, 7,9,11, 12,13] + + # A commuting symmetric, B antisymmetric + # A^{d0 d1 d2} * A_{d2 d3 d1} * B_d0^d3 + # ord = [d0,-d0,d1,-d1,d2,-d2,d3,-d3] + # g = [0,2,4,5,7,3,1,6,8,9] + # in this esxample and in the next three, + # renaming dummy indices and using symmetry of A, + # T = A^{d0 d1 d2} * A_{d0 d1 d3} * B_d2^d3 + # can = 0 + g = Permutation([0,2,4,5,7,3,1,6,8,9]) + can = canonicalize(g, list(range(8)), 0, (base3, gens3,2,0), (base2a,gens2a,1,0)) + assert can == 0 + # A anticommuting symmetric, B anticommuting + # A^{d0 d1 d2} * A_{d2 d3 d1} * B_d0^d3 + # T_c = A^{d0 d1 d2} * A_{d0 d1}^d3 * B_{d2 d3} + # can = [0,2,4, 1,3,6, 5,7, 8,9] + can = canonicalize(g, list(range(8)), 0, (base3, gens3,2,1), (base2a,gens2a,1,0)) + assert can == [0,2,4, 1,3,6, 5,7, 8,9] + # A anticommuting symmetric, B antisymmetric commuting, antisymmetric metric + # A^{d0 d1 d2} * A_{d2 d3 d1} * B_d0^d3 + # T_c = -A^{d0 d1 d2} * A_{d0 d1}^d3 * B_{d2 d3} + # can = [0,2,4, 1,3,6, 5,7, 9,8] + can = canonicalize(g, list(range(8)), 1, (base3, gens3,2,1), (base2a,gens2a,1,0)) + assert can == [0,2,4, 1,3,6, 5,7, 9,8] + + # A anticommuting symmetric, B anticommuting anticommuting, + # no metric symmetry + # A^{d0 d1 d2} * A_{d2 d3 d1} * B_d0^d3 + # T_c = A^{d0 d1 d2} * A_{d0 d1 d3} * B_d2^d3 + # can = [0,2,4, 1,3,7, 5,6, 8,9] + can = canonicalize(g, list(range(8)), None, (base3, gens3,2,1), (base2a,gens2a,1,0)) + assert can == [0,2,4,1,3,7,5,6,8,9] + + # Gamma anticommuting + # Gamma_{mu nu} * gamma^rho * Gamma^{nu mu alpha} + # ord = [alpha, rho, mu,-mu,nu,-nu] + # g = [3,5,1,4,2,0,6,7] + # T_c = -Gamma^{mu nu} * gamma^rho * Gamma_{alpha mu nu} + # can = [2,4,1,0,3,5,7,6]] + g = Permutation([3,5,1,4,2,0,6,7]) + t0 = (base2a, gens2a, 1, None) + t1 = (base1, gens1, 1, None) + t2 = (base3a, gens3a, 1, None) + can = canonicalize(g, list(range(2, 6)), 0, t0, t1, t2) + assert can == [2,4,1,0,3,5,7,6] + + # Gamma_{mu nu} * Gamma^{gamma beta} * gamma_rho * Gamma^{nu mu alpha} + # ord = [alpha, beta, gamma, -rho, mu,-mu,nu,-nu] + # 0 1 2 3 4 5 6 7 + # g = [5,7,2,1,3,6,4,0,8,9] + # T_c = Gamma^{mu nu} * Gamma^{beta gamma} * gamma_rho * Gamma^alpha_{mu nu} # can = [4,6,1,2,3,0,5,7,8,9] + t0 = (base2a, gens2a, 2, None) + g = Permutation([5,7,2,1,3,6,4,0,8,9]) + can = canonicalize(g, list(range(4, 8)), 0, t0, t1, t2) + assert can == [4,6,1,2,3,0,5,7,8,9] + + # f^a_{b,c} antisymmetric in b,c; A_mu^a no symmetry + # f^c_{d a} * f_{c e b} * A_mu^d * A_nu^a * A^{nu e} * A^{mu b} + # ord = [mu,-mu,nu,-nu,a,-a,b,-b,c,-c,d,-d, e, -e] + # 0 1 2 3 4 5 6 7 8 9 10 11 12 13 + # g = [8,11,5, 9,13,7, 1,10, 3,4, 2,12, 0,6, 14,15] + # T_c = -f^{a b c} * f_a^{d e} * A^mu_b * A_{mu d} * A^nu_c * A_{nu e} + # can = [4,6,8, 5,10,12, 0,7, 1,11, 2,9, 3,13, 15,14] + g = Permutation([8,11,5, 9,13,7, 1,10, 3,4, 2,12, 0,6, 14,15]) + base_f, gens_f = bsgs_direct_product(base1, gens1, base2a, gens2a) + base_A, gens_A = bsgs_direct_product(base1, gens1, base1, gens1) + t0 = (base_f, gens_f, 2, 0) + t1 = (base_A, gens_A, 4, 0) + can = canonicalize(g, [list(range(4)), list(range(4, 14))], [0, 0], t0, t1) + assert can == [4,6,8, 5,10,12, 0,7, 1,11, 2,9, 3,13, 15,14] + + +def test_riemann_invariants(): + baser, gensr = riemann_bsgs + # R^{d0 d1}_{d1 d0}; ord = [d0,-d0,d1,-d1]; g = [0,2,3,1,4,5] + # T_c = -R^{d0 d1}_{d0 d1}; can = [0,2,1,3,5,4] + g = Permutation([0,2,3,1,4,5]) + can = canonicalize(g, list(range(2, 4)), 0, (baser, gensr, 1, 0)) + assert can == [0,2,1,3,5,4] + # use a non minimal BSGS + can = canonicalize(g, list(range(2, 4)), 0, ([2, 0], [Permutation([1,0,2,3,5,4]), Permutation([2,3,0,1,4,5])], 1, 0)) + assert can == [0,2,1,3,5,4] + + """ + The following tests in test_riemann_invariants and in + test_riemann_invariants1 have been checked using xperm.c from XPerm in + in [1] and with an older version contained in [2] + + [1] xperm.c part of xPerm written by J. M. Martin-Garcia + http://www.xact.es/index.html + [2] test_xperm.cc in cadabra by Kasper Peeters, http://cadabra.phi-sci.com/ + """ + # R_d11^d1_d0^d5 * R^{d6 d4 d0}_d5 * R_{d7 d2 d8 d9} * + # R_{d10 d3 d6 d4} * R^{d2 d7 d11}_d1 * R^{d8 d9 d3 d10} + # ord: contravariant d_k ->2*k, covariant d_k -> 2*k+1 + # T_c = R^{d0 d1 d2 d3} * R_{d0 d1}^{d4 d5} * R_{d2 d3}^{d6 d7} * + # R_{d4 d5}^{d8 d9} * R_{d6 d7}^{d10 d11} * R_{d8 d9 d10 d11} + g = Permutation([23,2,1,10,12,8,0,11,15,5,17,19,21,7,13,9,4,14,22,3,16,18,6,20,24,25]) + can = canonicalize(g, list(range(24)), 0, (baser, gensr, 6, 0)) + assert can == [0,2,4,6,1,3,8,10,5,7,12,14,9,11,16,18,13,15,20,22,17,19,21,23,24,25] + + # use a non minimal BSGS + can = canonicalize(g, list(range(24)), 0, ([2, 0], [Permutation([1,0,2,3,5,4]), Permutation([2,3,0,1,4,5])], 6, 0)) + assert can == [0,2,4,6,1,3,8,10,5,7,12,14,9,11,16,18,13,15,20,22,17,19,21,23,24,25] + + g = Permutation([0,2,5,7,4,6,9,11,8,10,13,15,12,14,17,19,16,18,21,23,20,22,25,27,24,26,29,31,28,30,33,35,32,34,37,39,36,38,1,3,40,41]) + can = canonicalize(g, list(range(40)), 0, (baser, gensr, 10, 0)) + assert can == [0,2,4,6,1,3,8,10,5,7,12,14,9,11,16,18,13,15,20,22,17,19,24,26,21,23,28,30,25,27,32,34,29,31,36,38,33,35,37,39,40,41] + + +@XFAIL +def test_riemann_invariants1(): + skip('takes too much time') + baser, gensr = riemann_bsgs + g = Permutation([17, 44, 11, 3, 0, 19, 23, 15, 38, 4, 25, 27, 43, 36, 22, 14, 8, 30, 41, 20, 2, 10, 12, 28, 18, 1, 29, 13, 37, 42, 33, 7, 9, 31, 24, 26, 39, 5, 34, 47, 32, 6, 21, 40, 35, 46, 45, 16, 48, 49]) + can = canonicalize(g, list(range(48)), 0, (baser, gensr, 12, 0)) + assert can == [0, 2, 4, 6, 1, 3, 8, 10, 5, 7, 12, 14, 9, 11, 16, 18, 13, 15, 20, 22, 17, 19, 24, 26, 21, 23, 28, 30, 25, 27, 32, 34, 29, 31, 36, 38, 33, 35, 40, 42, 37, 39, 44, 46, 41, 43, 45, 47, 48, 49] + + g = Permutation([0,2,4,6, 7,8,10,12, 14,16,18,20, 19,22,24,26, 5,21,28,30, 32,34,36,38, 40,42,44,46, 13,48,50,52, 15,49,54,56, 17,33,41,58, 9,23,60,62, 29,35,63,64, 3,45,66,68, 25,37,47,57, 11,31,69,70, 27,39,53,72, 1,59,73,74, 55,61,67,76, 43,65,75,78, 51,71,77,79, 80,81]) + can = canonicalize(g, list(range(80)), 0, (baser, gensr, 20, 0)) + assert can == [0,2,4,6, 1,8,10,12, 3,14,16,18, 5,20,22,24, 7,26,28,30, 9,15,32,34, 11,36,23,38, 13,40,42,44, 17,39,29,46, 19,48,43,50, 21,45,52,54, 25,56,33,58, 27,60,53,62, 31,51,64,66, 35,65,47,68, 37,70,49,72, 41,74,57,76, 55,67,59,78, 61,69,71,75, 63,79,73,77, 80,81] + + +def test_riemann_products(): + baser, gensr = riemann_bsgs + base1, gens1 = get_symmetric_group_sgs(1) + base2, gens2 = get_symmetric_group_sgs(2) + base2a, gens2a = get_symmetric_group_sgs(2, 1) + + # R^{a b d0}_d0 = 0 + g = Permutation([0,1,2,3,4,5]) + can = canonicalize(g, list(range(2,4)), 0, (baser, gensr, 1, 0)) + assert can == 0 + + # R^{d0 b a}_d0 ; ord = [a,b,d0,-d0}; g = [2,1,0,3,4,5] + # T_c = -R^{a d0 b}_d0; can = [0,2,1,3,5,4] + g = Permutation([2,1,0,3,4,5]) + can = canonicalize(g, list(range(2, 4)), 0, (baser, gensr, 1, 0)) + assert can == [0,2,1,3,5,4] + + # R^d1_d2^b_d0 * R^{d0 a}_d1^d2; ord=[a,b,d0,-d0,d1,-d1,d2,-d2] + # g = [4,7,1,3,2,0,5,6,8,9] + # T_c = -R^{a d0 d1 d2}* R^b_{d0 d1 d2} + # can = [0,2,4,6,1,3,5,7,9,8] + g = Permutation([4,7,1,3,2,0,5,6,8,9]) + can = canonicalize(g, list(range(2,8)), 0, (baser, gensr, 2, 0)) + assert can == [0,2,4,6,1,3,5,7,9,8] + can1 = canonicalize_naive(g, list(range(2,8)), 0, (baser, gensr, 2, 0)) + assert can == can1 + + # A symmetric commuting + # R^{d6 d5}_d2^d1 * R^{d4 d0 d2 d3} * A_{d6 d0} A_{d3 d1} * A_{d4 d5} + # g = [12,10,5,2, 8,0,4,6, 13,1, 7,3, 9,11,14,15] + # T_c = -R^{d0 d1 d2 d3} * R_d0^{d4 d5 d6} * A_{d1 d4}*A_{d2 d5}*A_{d3 d6} + + g = Permutation([12,10,5,2,8,0,4,6,13,1,7,3,9,11,14,15]) + can = canonicalize(g, list(range(14)), 0, ((baser,gensr,2,0)), (base2,gens2,3,0)) + assert can == [0, 2, 4, 6, 1, 8, 10, 12, 3, 9, 5, 11, 7, 13, 15, 14] + + # R^{d2 a0 a2 d0} * R^d1_d2^{a1 a3} * R^{a4 a5}_{d0 d1} + # ord = [a0,a1,a2,a3,a4,a5,d0,-d0,d1,-d1,d2,-d2] + # 0 1 2 3 4 5 6 7 8 9 10 11 + # can = [0, 6, 2, 8, 1, 3, 7, 10, 4, 5, 9, 11, 12, 13] + # T_c = R^{a0 d0 a2 d1}*R^{a1 a3}_d0^d2*R^{a4 a5}_{d1 d2} + g = Permutation([10,0,2,6,8,11,1,3,4,5,7,9,12,13]) + can = canonicalize(g, list(range(6,12)), 0, (baser, gensr, 3, 0)) + assert can == [0, 6, 2, 8, 1, 3, 7, 10, 4, 5, 9, 11, 12, 13] + #can1 = canonicalize_naive(g, list(range(6,12)), 0, (baser, gensr, 3, 0)) + #assert can == can1 + + # A^n_{i, j} antisymmetric in i,j + # A_m0^d0_a1 * A_m1^a0_d0; ord = [m0,m1,a0,a1,d0,-d0] + # g = [0,4,3,1,2,5,6,7] + # T_c = -A_{m a1}^d0 * A_m1^a0_d0 + # can = [0,3,4,1,2,5,7,6] + base, gens = bsgs_direct_product(base1, gens1, base2a, gens2a) + dummies = list(range(4, 6)) + g = Permutation([0,4,3,1,2,5,6,7]) + can = canonicalize(g, dummies, 0, (base, gens, 2, 0)) + assert can == [0, 3, 4, 1, 2, 5, 7, 6] + + + # A^n_{i, j} symmetric in i,j + # A^m0_a0^d2 * A^n0_d2^d1 * A^n1_d1^d0 * A_{m0 d0}^a1 + # ordering: first the free indices; then first n, then d + # ord=[n0,n1,a0,a1, m0,-m0,d0,-d0,d1,-d1,d2,-d2] + # 0 1 2 3 4 5 6 7 8 9 10 11] + # g = [4,2,10, 0,11,8, 1,9,6, 5,7,3, 12,13] + # if the dummy indices m_i and d_i were separated, + # one gets + # T_c = A^{n0 d0 d1} * A^n1_d0^d2 * A^m0^a0_d1 * A_m0^a1_d2 + # can = [0, 6, 8, 1, 7, 10, 4, 2, 9, 5, 3, 11, 12, 13] + # If they are not, so can is + # T_c = A^{n0 m0 d0} A^n1_m0^d1 A^{d2 a0}_d0 A_d2^a1_d1 + # can = [0, 4, 6, 1, 5, 8, 10, 2, 7, 11, 3, 9, 12, 13] + # case with single type of indices + + base, gens = bsgs_direct_product(base1, gens1, base2, gens2) + dummies = list(range(4, 12)) + g = Permutation([4,2,10, 0,11,8, 1,9,6, 5,7,3, 12,13]) + can = canonicalize(g, dummies, 0, (base, gens, 4, 0)) + assert can == [0, 4, 6, 1, 5, 8, 10, 2, 7, 11, 3, 9, 12, 13] + # case with separated indices + dummies = [list(range(4, 6)), list(range(6,12))] + sym = [0, 0] + can = canonicalize(g, dummies, sym, (base, gens, 4, 0)) + assert can == [0, 6, 8, 1, 7, 10, 4, 2, 9, 5, 3, 11, 12, 13] + # case with separated indices with the second type of index + # with antisymmetric metric: there is a sign change + sym = [0, 1] + can = canonicalize(g, dummies, sym, (base, gens, 4, 0)) + assert can == [0, 6, 8, 1, 7, 10, 4, 2, 9, 5, 3, 11, 13, 12] + +def test_graph_certificate(): + # test tensor invariants constructed from random regular graphs; + # checked graph isomorphism with networkx + import random + def randomize_graph(size, g): + p = list(range(size)) + random.shuffle(p) + g1a = {} + for k, v in g1.items(): + g1a[p[k]] = [p[i] for i in v] + return g1a + + g1 = {0: [2, 3, 7], 1: [4, 5, 7], 2: [0, 4, 6], 3: [0, 6, 7], 4: [1, 2, 5], 5: [1, 4, 6], 6: [2, 3, 5], 7: [0, 1, 3]} + g2 = {0: [2, 3, 7], 1: [2, 4, 5], 2: [0, 1, 5], 3: [0, 6, 7], 4: [1, 5, 6], 5: [1, 2, 4], 6: [3, 4, 7], 7: [0, 3, 6]} + + c1 = graph_certificate(g1) + c2 = graph_certificate(g2) + assert c1 != c2 + g1a = randomize_graph(8, g1) + c1a = graph_certificate(g1a) + assert c1 == c1a + + g1 = {0: [8, 1, 9, 7], 1: [0, 9, 3, 4], 2: [3, 4, 6, 7], 3: [1, 2, 5, 6], 4: [8, 1, 2, 5], 5: [9, 3, 4, 7], 6: [8, 2, 3, 7], 7: [0, 2, 5, 6], 8: [0, 9, 4, 6], 9: [8, 0, 5, 1]} + g2 = {0: [1, 2, 5, 6], 1: [0, 9, 5, 7], 2: [0, 4, 6, 7], 3: [8, 9, 6, 7], 4: [8, 2, 6, 7], 5: [0, 9, 8, 1], 6: [0, 2, 3, 4], 7: [1, 2, 3, 4], 8: [9, 3, 4, 5], 9: [8, 1, 3, 5]} + c1 = graph_certificate(g1) + c2 = graph_certificate(g2) + assert c1 != c2 + g1a = randomize_graph(10, g1) + c1a = graph_certificate(g1a) + assert c1 == c1a diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_testutil.py b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_testutil.py new file mode 100644 index 0000000000000000000000000000000000000000..e13f4d5b9913213bac7798c4df77f6665785c9ca --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_testutil.py @@ -0,0 +1,55 @@ +from sympy.combinatorics.named_groups import SymmetricGroup, AlternatingGroup,\ + CyclicGroup +from sympy.combinatorics.testutil import _verify_bsgs, _cmp_perm_lists,\ + _naive_list_centralizer, _verify_centralizer,\ + _verify_normal_closure +from sympy.combinatorics.permutations import Permutation +from sympy.combinatorics.perm_groups import PermutationGroup +from sympy.core.random import shuffle + + +def test_cmp_perm_lists(): + S = SymmetricGroup(4) + els = list(S.generate_dimino()) + other = els[:] + shuffle(other) + assert _cmp_perm_lists(els, other) is True + + +def test_naive_list_centralizer(): + # verified by GAP + S = SymmetricGroup(3) + A = AlternatingGroup(3) + assert _naive_list_centralizer(S, S) == [Permutation([0, 1, 2])] + assert PermutationGroup(_naive_list_centralizer(S, A)).is_subgroup(A) + + +def test_verify_bsgs(): + S = SymmetricGroup(5) + S.schreier_sims() + base = S.base + strong_gens = S.strong_gens + assert _verify_bsgs(S, base, strong_gens) is True + assert _verify_bsgs(S, base[:-1], strong_gens) is False + assert _verify_bsgs(S, base, S.generators) is False + + +def test_verify_centralizer(): + # verified by GAP + S = SymmetricGroup(3) + A = AlternatingGroup(3) + triv = PermutationGroup([Permutation([0, 1, 2])]) + assert _verify_centralizer(S, S, centr=triv) + assert _verify_centralizer(S, A, centr=A) + + +def test_verify_normal_closure(): + # verified by GAP + S = SymmetricGroup(3) + A = AlternatingGroup(3) + assert _verify_normal_closure(S, A, closure=A) + S = SymmetricGroup(5) + A = AlternatingGroup(5) + C = CyclicGroup(5) + assert _verify_normal_closure(S, A, closure=A) + assert _verify_normal_closure(S, C, closure=A) diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_util.py b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_util.py new file mode 100644 index 0000000000000000000000000000000000000000..bca183e81f354e398aee9ae809fe79b20c7f2468 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/tests/test_util.py @@ -0,0 +1,120 @@ +from sympy.combinatorics.named_groups import SymmetricGroup, DihedralGroup,\ + AlternatingGroup +from sympy.combinatorics.permutations import Permutation +from sympy.combinatorics.util import _check_cycles_alt_sym, _strip,\ + _distribute_gens_by_base, _strong_gens_from_distr,\ + _orbits_transversals_from_bsgs, _handle_precomputed_bsgs, _base_ordering,\ + _remove_gens +from sympy.combinatorics.testutil import _verify_bsgs + + +def test_check_cycles_alt_sym(): + perm1 = Permutation([[0, 1, 2, 3, 4, 5, 6], [7], [8], [9]]) + perm2 = Permutation([[0, 1, 2, 3, 4, 5], [6, 7, 8, 9]]) + perm3 = Permutation([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]) + assert _check_cycles_alt_sym(perm1) is True + assert _check_cycles_alt_sym(perm2) is False + assert _check_cycles_alt_sym(perm3) is False + + +def test_strip(): + D = DihedralGroup(5) + D.schreier_sims() + member = Permutation([4, 0, 1, 2, 3]) + not_member1 = Permutation([0, 1, 4, 3, 2]) + not_member2 = Permutation([3, 1, 4, 2, 0]) + identity = Permutation([0, 1, 2, 3, 4]) + res1 = _strip(member, D.base, D.basic_orbits, D.basic_transversals) + res2 = _strip(not_member1, D.base, D.basic_orbits, D.basic_transversals) + res3 = _strip(not_member2, D.base, D.basic_orbits, D.basic_transversals) + assert res1[0] == identity + assert res1[1] == len(D.base) + 1 + assert res2[0] == not_member1 + assert res2[1] == len(D.base) + 1 + assert res3[0] != identity + assert res3[1] == 2 + + +def test_distribute_gens_by_base(): + base = [0, 1, 2] + gens = [Permutation([0, 1, 2, 3]), Permutation([0, 1, 3, 2]), + Permutation([0, 2, 3, 1]), Permutation([3, 2, 1, 0])] + assert _distribute_gens_by_base(base, gens) == [gens, + [Permutation([0, 1, 2, 3]), + Permutation([0, 1, 3, 2]), + Permutation([0, 2, 3, 1])], + [Permutation([0, 1, 2, 3]), + Permutation([0, 1, 3, 2])]] + + +def test_strong_gens_from_distr(): + strong_gens_distr = [[Permutation([0, 2, 1]), Permutation([1, 2, 0]), + Permutation([1, 0, 2])], [Permutation([0, 2, 1])]] + assert _strong_gens_from_distr(strong_gens_distr) == \ + [Permutation([0, 2, 1]), + Permutation([1, 2, 0]), + Permutation([1, 0, 2])] + + +def test_orbits_transversals_from_bsgs(): + S = SymmetricGroup(4) + S.schreier_sims() + base = S.base + strong_gens = S.strong_gens + strong_gens_distr = _distribute_gens_by_base(base, strong_gens) + result = _orbits_transversals_from_bsgs(base, strong_gens_distr) + orbits = result[0] + transversals = result[1] + base_len = len(base) + for i in range(base_len): + for el in orbits[i]: + assert transversals[i][el](base[i]) == el + for j in range(i): + assert transversals[i][el](base[j]) == base[j] + order = 1 + for i in range(base_len): + order *= len(orbits[i]) + assert S.order() == order + + +def test_handle_precomputed_bsgs(): + A = AlternatingGroup(5) + A.schreier_sims() + base = A.base + strong_gens = A.strong_gens + result = _handle_precomputed_bsgs(base, strong_gens) + strong_gens_distr = _distribute_gens_by_base(base, strong_gens) + assert strong_gens_distr == result[2] + transversals = result[0] + orbits = result[1] + base_len = len(base) + for i in range(base_len): + for el in orbits[i]: + assert transversals[i][el](base[i]) == el + for j in range(i): + assert transversals[i][el](base[j]) == base[j] + order = 1 + for i in range(base_len): + order *= len(orbits[i]) + assert A.order() == order + + +def test_base_ordering(): + base = [2, 4, 5] + degree = 7 + assert _base_ordering(base, degree) == [3, 4, 0, 5, 1, 2, 6] + + +def test_remove_gens(): + S = SymmetricGroup(10) + base, strong_gens = S.schreier_sims_incremental() + new_gens = _remove_gens(base, strong_gens) + assert _verify_bsgs(S, base, new_gens) is True + A = AlternatingGroup(7) + base, strong_gens = A.schreier_sims_incremental() + new_gens = _remove_gens(base, strong_gens) + assert _verify_bsgs(A, base, new_gens) is True + D = DihedralGroup(2) + base, strong_gens = D.schreier_sims_incremental() + new_gens = _remove_gens(base, strong_gens) + assert _verify_bsgs(D, base, new_gens) is True diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/testutil.py b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/testutil.py new file mode 100644 index 0000000000000000000000000000000000000000..5b036ac29665744710e5552b6fe999bb63cf062d --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/testutil.py @@ -0,0 +1,358 @@ +from sympy.combinatorics import Permutation +from sympy.combinatorics.util import _distribute_gens_by_base + +rmul = Permutation.rmul + + +def _cmp_perm_lists(first, second): + """ + Compare two lists of permutations as sets. + + Explanation + =========== + + This is used for testing purposes. Since the array form of a + permutation is currently a list, Permutation is not hashable + and cannot be put into a set. + + Examples + ======== + + >>> from sympy.combinatorics.permutations import Permutation + >>> from sympy.combinatorics.testutil import _cmp_perm_lists + >>> a = Permutation([0, 2, 3, 4, 1]) + >>> b = Permutation([1, 2, 0, 4, 3]) + >>> c = Permutation([3, 4, 0, 1, 2]) + >>> ls1 = [a, b, c] + >>> ls2 = [b, c, a] + >>> _cmp_perm_lists(ls1, ls2) + True + + """ + return {tuple(a) for a in first} == \ + {tuple(a) for a in second} + + +def _naive_list_centralizer(self, other, af=False): + from sympy.combinatorics.perm_groups import PermutationGroup + """ + Return a list of elements for the centralizer of a subgroup/set/element. + + Explanation + =========== + + This is a brute force implementation that goes over all elements of the + group and checks for membership in the centralizer. It is used to + test ``.centralizer()`` from ``sympy.combinatorics.perm_groups``. + + Examples + ======== + + >>> from sympy.combinatorics.testutil import _naive_list_centralizer + >>> from sympy.combinatorics.named_groups import DihedralGroup + >>> D = DihedralGroup(4) + >>> _naive_list_centralizer(D, D) + [Permutation([0, 1, 2, 3]), Permutation([2, 3, 0, 1])] + + See Also + ======== + + sympy.combinatorics.perm_groups.centralizer + + """ + from sympy.combinatorics.permutations import _af_commutes_with + if hasattr(other, 'generators'): + elements = list(self.generate_dimino(af=True)) + gens = [x._array_form for x in other.generators] + commutes_with_gens = lambda x: all(_af_commutes_with(x, gen) for gen in gens) + centralizer_list = [] + if not af: + for element in elements: + if commutes_with_gens(element): + centralizer_list.append(Permutation._af_new(element)) + else: + for element in elements: + if commutes_with_gens(element): + centralizer_list.append(element) + return centralizer_list + elif hasattr(other, 'getitem'): + return _naive_list_centralizer(self, PermutationGroup(other), af) + elif hasattr(other, 'array_form'): + return _naive_list_centralizer(self, PermutationGroup([other]), af) + + +def _verify_bsgs(group, base, gens): + """ + Verify the correctness of a base and strong generating set. + + Explanation + =========== + + This is a naive implementation using the definition of a base and a strong + generating set relative to it. There are other procedures for + verifying a base and strong generating set, but this one will + serve for more robust testing. + + Examples + ======== + + >>> from sympy.combinatorics.named_groups import AlternatingGroup + >>> from sympy.combinatorics.testutil import _verify_bsgs + >>> A = AlternatingGroup(4) + >>> A.schreier_sims() + >>> _verify_bsgs(A, A.base, A.strong_gens) + True + + See Also + ======== + + sympy.combinatorics.perm_groups.PermutationGroup.schreier_sims + + """ + from sympy.combinatorics.perm_groups import PermutationGroup + strong_gens_distr = _distribute_gens_by_base(base, gens) + current_stabilizer = group + for i in range(len(base)): + candidate = PermutationGroup(strong_gens_distr[i]) + if current_stabilizer.order() != candidate.order(): + return False + current_stabilizer = current_stabilizer.stabilizer(base[i]) + if current_stabilizer.order() != 1: + return False + return True + + +def _verify_centralizer(group, arg, centr=None): + """ + Verify the centralizer of a group/set/element inside another group. + + This is used for testing ``.centralizer()`` from + ``sympy.combinatorics.perm_groups`` + + Examples + ======== + + >>> from sympy.combinatorics.named_groups import (SymmetricGroup, + ... AlternatingGroup) + >>> from sympy.combinatorics.perm_groups import PermutationGroup + >>> from sympy.combinatorics.permutations import Permutation + >>> from sympy.combinatorics.testutil import _verify_centralizer + >>> S = SymmetricGroup(5) + >>> A = AlternatingGroup(5) + >>> centr = PermutationGroup([Permutation([0, 1, 2, 3, 4])]) + >>> _verify_centralizer(S, A, centr) + True + + See Also + ======== + + _naive_list_centralizer, + sympy.combinatorics.perm_groups.PermutationGroup.centralizer, + _cmp_perm_lists + + """ + if centr is None: + centr = group.centralizer(arg) + centr_list = list(centr.generate_dimino(af=True)) + centr_list_naive = _naive_list_centralizer(group, arg, af=True) + return _cmp_perm_lists(centr_list, centr_list_naive) + + +def _verify_normal_closure(group, arg, closure=None): + from sympy.combinatorics.perm_groups import PermutationGroup + """ + Verify the normal closure of a subgroup/subset/element in a group. + + This is used to test + sympy.combinatorics.perm_groups.PermutationGroup.normal_closure + + Examples + ======== + + >>> from sympy.combinatorics.named_groups import (SymmetricGroup, + ... AlternatingGroup) + >>> from sympy.combinatorics.testutil import _verify_normal_closure + >>> S = SymmetricGroup(3) + >>> A = AlternatingGroup(3) + >>> _verify_normal_closure(S, A, closure=A) + True + + See Also + ======== + + sympy.combinatorics.perm_groups.PermutationGroup.normal_closure + + """ + if closure is None: + closure = group.normal_closure(arg) + conjugates = set() + if hasattr(arg, 'generators'): + subgr_gens = arg.generators + elif hasattr(arg, '__getitem__'): + subgr_gens = arg + elif hasattr(arg, 'array_form'): + subgr_gens = [arg] + for el in group.generate_dimino(): + for gen in subgr_gens: + conjugates.add(gen ^ el) + naive_closure = PermutationGroup(list(conjugates)) + return closure.is_subgroup(naive_closure) + + +def canonicalize_naive(g, dummies, sym, *v): + """ + Canonicalize tensor formed by tensors of the different types. + + Explanation + =========== + + sym_i symmetry under exchange of two component tensors of type `i` + None no symmetry + 0 commuting + 1 anticommuting + + Parameters + ========== + + g : Permutation representing the tensor. + dummies : List of dummy indices. + msym : Symmetry of the metric. + v : A list of (base_i, gens_i, n_i, sym_i) for tensors of type `i`. + base_i, gens_i BSGS for tensors of this type + n_i number of tensors of type `i` + + Returns + ======= + + Returns 0 if the tensor is zero, else returns the array form of + the permutation representing the canonical form of the tensor. + + Examples + ======== + + >>> from sympy.combinatorics.testutil import canonicalize_naive + >>> from sympy.combinatorics.tensor_can import get_symmetric_group_sgs + >>> from sympy.combinatorics import Permutation + >>> g = Permutation([1, 3, 2, 0, 4, 5]) + >>> base2, gens2 = get_symmetric_group_sgs(2) + >>> canonicalize_naive(g, [2, 3], 0, (base2, gens2, 2, 0)) + [0, 2, 1, 3, 4, 5] + """ + from sympy.combinatorics.perm_groups import PermutationGroup + from sympy.combinatorics.tensor_can import gens_products, dummy_sgs + from sympy.combinatorics.permutations import _af_rmul + v1 = [] + for i in range(len(v)): + base_i, gens_i, n_i, sym_i = v[i] + v1.append((base_i, gens_i, [[]]*n_i, sym_i)) + size, sbase, sgens = gens_products(*v1) + dgens = dummy_sgs(dummies, sym, size-2) + if isinstance(sym, int): + num_types = 1 + dummies = [dummies] + sym = [sym] + else: + num_types = len(sym) + dgens = [] + for i in range(num_types): + dgens.extend(dummy_sgs(dummies[i], sym[i], size - 2)) + S = PermutationGroup(sgens) + D = PermutationGroup([Permutation(x) for x in dgens]) + dlist = list(D.generate(af=True)) + g = g.array_form + st = set() + for s in S.generate(af=True): + h = _af_rmul(g, s) + for d in dlist: + q = tuple(_af_rmul(d, h)) + st.add(q) + a = list(st) + a.sort() + prev = (0,)*size + for h in a: + if h[:-2] == prev[:-2]: + if h[-1] != prev[-1]: + return 0 + prev = h + return list(a[0]) + + +def graph_certificate(gr): + """ + Return a certificate for the graph + + Parameters + ========== + + gr : adjacency list + + Explanation + =========== + + The graph is assumed to be unoriented and without + external lines. + + Associate to each vertex of the graph a symmetric tensor with + number of indices equal to the degree of the vertex; indices + are contracted when they correspond to the same line of the graph. + The canonical form of the tensor gives a certificate for the graph. + + This is not an efficient algorithm to get the certificate of a graph. + + Examples + ======== + + >>> from sympy.combinatorics.testutil import graph_certificate + >>> gr1 = {0:[1, 2, 3, 5], 1:[0, 2, 4], 2:[0, 1, 3, 4], 3:[0, 2, 4], 4:[1, 2, 3, 5], 5:[0, 4]} + >>> gr2 = {0:[1, 5], 1:[0, 2, 3, 4], 2:[1, 3, 5], 3:[1, 2, 4, 5], 4:[1, 3, 5], 5:[0, 2, 3, 4]} + >>> c1 = graph_certificate(gr1) + >>> c2 = graph_certificate(gr2) + >>> c1 + [0, 2, 4, 6, 1, 8, 10, 12, 3, 14, 16, 18, 5, 9, 15, 7, 11, 17, 13, 19, 20, 21] + >>> c1 == c2 + True + """ + from sympy.combinatorics.permutations import _af_invert + from sympy.combinatorics.tensor_can import get_symmetric_group_sgs, canonicalize + items = list(gr.items()) + items.sort(key=lambda x: len(x[1]), reverse=True) + pvert = [x[0] for x in items] + pvert = _af_invert(pvert) + + # the indices of the tensor are twice the number of lines of the graph + num_indices = 0 + for v, neigh in items: + num_indices += len(neigh) + # associate to each vertex its indices; for each line + # between two vertices assign the + # even index to the vertex which comes first in items, + # the odd index to the other vertex + vertices = [[] for i in items] + i = 0 + for v, neigh in items: + for v2 in neigh: + if pvert[v] < pvert[v2]: + vertices[pvert[v]].append(i) + vertices[pvert[v2]].append(i+1) + i += 2 + g = [] + for v in vertices: + g.extend(v) + assert len(g) == num_indices + g += [num_indices, num_indices + 1] + size = num_indices + 2 + assert sorted(g) == list(range(size)) + g = Permutation(g) + vlen = [0]*(len(vertices[0])+1) + for neigh in vertices: + vlen[len(neigh)] += 1 + v = [] + for i in range(len(vlen)): + n = vlen[i] + if n: + base, gens = get_symmetric_group_sgs(i) + v.append((base, gens, n, 0)) + v.reverse() + dummies = list(range(num_indices)) + can = canonicalize(g, dummies, 0, *v) + return can diff --git a/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/util.py b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/util.py new file mode 100644 index 0000000000000000000000000000000000000000..94e736f56e4f10184da8df0ebe65f58c78079048 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/sympy/combinatorics/util.py @@ -0,0 +1,536 @@ +from sympy.combinatorics.permutations import Permutation, _af_invert, _af_rmul +from sympy.ntheory import isprime + +rmul = Permutation.rmul +_af_new = Permutation._af_new + +############################################ +# +# Utilities for computational group theory +# +############################################ + + +def _base_ordering(base, degree): + r""" + Order `\{0, 1, \dots, n-1\}` so that base points come first and in order. + + Parameters + ========== + + base : the base + degree : the degree of the associated permutation group + + Returns + ======= + + A list ``base_ordering`` such that ``base_ordering[point]`` is the + number of ``point`` in the ordering. + + Examples + ======== + + >>> from sympy.combinatorics import SymmetricGroup + >>> from sympy.combinatorics.util import _base_ordering + >>> S = SymmetricGroup(4) + >>> S.schreier_sims() + >>> _base_ordering(S.base, S.degree) + [0, 1, 2, 3] + + Notes + ===== + + This is used in backtrack searches, when we define a relation `\ll` on + the underlying set for a permutation group of degree `n`, + `\{0, 1, \dots, n-1\}`, so that if `(b_1, b_2, \dots, b_k)` is a base we + have `b_i \ll b_j` whenever `i>> from sympy.combinatorics.util import _check_cycles_alt_sym + >>> from sympy.combinatorics import Permutation + >>> a = Permutation([[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10], [11, 12]]) + >>> _check_cycles_alt_sym(a) + False + >>> b = Permutation([[0, 1, 2, 3, 4, 5, 6], [7, 8, 9, 10]]) + >>> _check_cycles_alt_sym(b) + True + + See Also + ======== + + sympy.combinatorics.perm_groups.PermutationGroup.is_alt_sym + + """ + n = perm.size + af = perm.array_form + current_len = 0 + total_len = 0 + used = set() + for i in range(n//2): + if i not in used and i < n//2 - total_len: + current_len = 1 + used.add(i) + j = i + while af[j] != i: + current_len += 1 + j = af[j] + used.add(j) + total_len += current_len + if current_len > n//2 and current_len < n - 2 and isprime(current_len): + return True + return False + + +def _distribute_gens_by_base(base, gens): + r""" + Distribute the group elements ``gens`` by membership in basic stabilizers. + + Explanation + =========== + + Notice that for a base `(b_1, b_2, \dots, b_k)`, the basic stabilizers + are defined as `G^{(i)} = G_{b_1, \dots, b_{i-1}}` for + `i \in\{1, 2, \dots, k\}`. + + Parameters + ========== + + base : a sequence of points in `\{0, 1, \dots, n-1\}` + gens : a list of elements of a permutation group of degree `n`. + + Returns + ======= + + List of length `k`, where `k` is + the length of ``base``. The `i`-th entry contains those elements in + ``gens`` which fix the first `i` elements of ``base`` (so that the + `0`-th entry is equal to ``gens`` itself). If no element fixes the first + `i` elements of ``base``, the `i`-th element is set to a list containing + the identity element. + + Examples + ======== + + >>> from sympy.combinatorics.named_groups import DihedralGroup + >>> from sympy.combinatorics.util import _distribute_gens_by_base + >>> D = DihedralGroup(3) + >>> D.schreier_sims() + >>> D.strong_gens + [(0 1 2), (0 2), (1 2)] + >>> D.base + [0, 1] + >>> _distribute_gens_by_base(D.base, D.strong_gens) + [[(0 1 2), (0 2), (1 2)], + [(1 2)]] + + See Also + ======== + + _strong_gens_from_distr, _orbits_transversals_from_bsgs, + _handle_precomputed_bsgs + + """ + base_len = len(base) + degree = gens[0].size + stabs = [[] for _ in range(base_len)] + max_stab_index = 0 + for gen in gens: + j = 0 + while j < base_len - 1 and gen._array_form[base[j]] == base[j]: + j += 1 + if j > max_stab_index: + max_stab_index = j + for k in range(j + 1): + stabs[k].append(gen) + for i in range(max_stab_index + 1, base_len): + stabs[i].append(_af_new(list(range(degree)))) + return stabs + + +def _handle_precomputed_bsgs(base, strong_gens, transversals=None, + basic_orbits=None, strong_gens_distr=None): + """ + Calculate BSGS-related structures from those present. + + Explanation + =========== + + The base and strong generating set must be provided; if any of the + transversals, basic orbits or distributed strong generators are not + provided, they will be calculated from the base and strong generating set. + + Parameters + ========== + + ``base`` - the base + ``strong_gens`` - the strong generators + ``transversals`` - basic transversals + ``basic_orbits`` - basic orbits + ``strong_gens_distr`` - strong generators distributed by membership in basic + stabilizers + + Returns + ======= + + ``(transversals, basic_orbits, strong_gens_distr)`` where ``transversals`` + are the basic transversals, ``basic_orbits`` are the basic orbits, and + ``strong_gens_distr`` are the strong generators distributed by membership + in basic stabilizers. + + Examples + ======== + + >>> from sympy.combinatorics.named_groups import DihedralGroup + >>> from sympy.combinatorics.util import _handle_precomputed_bsgs + >>> D = DihedralGroup(3) + >>> D.schreier_sims() + >>> _handle_precomputed_bsgs(D.base, D.strong_gens, + ... basic_orbits=D.basic_orbits) + ([{0: (2), 1: (0 1 2), 2: (0 2)}, {1: (2), 2: (1 2)}], [[0, 1, 2], [1, 2]], [[(0 1 2), (0 2), (1 2)], [(1 2)]]) + + See Also + ======== + + _orbits_transversals_from_bsgs, _distribute_gens_by_base + + """ + if strong_gens_distr is None: + strong_gens_distr = _distribute_gens_by_base(base, strong_gens) + if transversals is None: + if basic_orbits is None: + basic_orbits, transversals = \ + _orbits_transversals_from_bsgs(base, strong_gens_distr) + else: + transversals = \ + _orbits_transversals_from_bsgs(base, strong_gens_distr, + transversals_only=True) + else: + if basic_orbits is None: + base_len = len(base) + basic_orbits = [None]*base_len + for i in range(base_len): + basic_orbits[i] = list(transversals[i].keys()) + return transversals, basic_orbits, strong_gens_distr + + +def _orbits_transversals_from_bsgs(base, strong_gens_distr, + transversals_only=False, slp=False): + """ + Compute basic orbits and transversals from a base and strong generating set. + + Explanation + =========== + + The generators are provided as distributed across the basic stabilizers. + If the optional argument ``transversals_only`` is set to True, only the + transversals are returned. + + Parameters + ========== + + ``base`` - The base. + ``strong_gens_distr`` - Strong generators distributed by membership in basic + stabilizers. + ``transversals_only`` - bool + A flag switching between returning only the + transversals and both orbits and transversals. + ``slp`` - + If ``True``, return a list of dictionaries containing the + generator presentations of the elements of the transversals, + i.e. the list of indices of generators from ``strong_gens_distr[i]`` + such that their product is the relevant transversal element. + + Examples + ======== + + >>> from sympy.combinatorics import SymmetricGroup + >>> from sympy.combinatorics.util import _distribute_gens_by_base + >>> S = SymmetricGroup(3) + >>> S.schreier_sims() + >>> strong_gens_distr = _distribute_gens_by_base(S.base, S.strong_gens) + >>> (S.base, strong_gens_distr) + ([0, 1], [[(0 1 2), (2)(0 1), (1 2)], [(1 2)]]) + + See Also + ======== + + _distribute_gens_by_base, _handle_precomputed_bsgs + + """ + from sympy.combinatorics.perm_groups import _orbit_transversal + base_len = len(base) + degree = strong_gens_distr[0][0].size + transversals = [None]*base_len + slps = [None]*base_len + if transversals_only is False: + basic_orbits = [None]*base_len + for i in range(base_len): + transversals[i], slps[i] = _orbit_transversal(degree, strong_gens_distr[i], + base[i], pairs=True, slp=True) + transversals[i] = dict(transversals[i]) + if transversals_only is False: + basic_orbits[i] = list(transversals[i].keys()) + if transversals_only: + return transversals + else: + if not slp: + return basic_orbits, transversals + return basic_orbits, transversals, slps + + +def _remove_gens(base, strong_gens, basic_orbits=None, strong_gens_distr=None): + """ + Remove redundant generators from a strong generating set. + + Parameters + ========== + + ``base`` - a base + ``strong_gens`` - a strong generating set relative to ``base`` + ``basic_orbits`` - basic orbits + ``strong_gens_distr`` - strong generators distributed by membership in basic + stabilizers + + Returns + ======= + + A strong generating set with respect to ``base`` which is a subset of + ``strong_gens``. + + Examples + ======== + + >>> from sympy.combinatorics import SymmetricGroup + >>> from sympy.combinatorics.util import _remove_gens + >>> from sympy.combinatorics.testutil import _verify_bsgs + >>> S = SymmetricGroup(15) + >>> base, strong_gens = S.schreier_sims_incremental() + >>> new_gens = _remove_gens(base, strong_gens) + >>> len(new_gens) + 14 + >>> _verify_bsgs(S, base, new_gens) + True + + Notes + ===== + + This procedure is outlined in [1],p.95. + + References + ========== + + .. [1] Holt, D., Eick, B., O'Brien, E. + "Handbook of computational group theory" + + """ + from sympy.combinatorics.perm_groups import _orbit + base_len = len(base) + degree = strong_gens[0].size + if strong_gens_distr is None: + strong_gens_distr = _distribute_gens_by_base(base, strong_gens) + if basic_orbits is None: + basic_orbits = [] + for i in range(base_len): + basic_orbit = _orbit(degree, strong_gens_distr[i], base[i]) + basic_orbits.append(basic_orbit) + strong_gens_distr.append([]) + res = strong_gens[:] + for i in range(base_len - 1, -1, -1): + gens_copy = strong_gens_distr[i][:] + for gen in strong_gens_distr[i]: + if gen not in strong_gens_distr[i + 1]: + temp_gens = gens_copy[:] + temp_gens.remove(gen) + if temp_gens == []: + continue + temp_orbit = _orbit(degree, temp_gens, base[i]) + if temp_orbit == basic_orbits[i]: + gens_copy.remove(gen) + res.remove(gen) + return res + + +def _strip(g, base, orbits, transversals): + """ + Attempt to decompose a permutation using a (possibly partial) BSGS + structure. + + Explanation + =========== + + This is done by treating the sequence ``base`` as an actual base, and + the orbits ``orbits`` and transversals ``transversals`` as basic orbits and + transversals relative to it. + + This process is called "sifting". A sift is unsuccessful when a certain + orbit element is not found or when after the sift the decomposition + does not end with the identity element. + + The argument ``transversals`` is a list of dictionaries that provides + transversal elements for the orbits ``orbits``. + + Parameters + ========== + + ``g`` - permutation to be decomposed + ``base`` - sequence of points + ``orbits`` - a list in which the ``i``-th entry is an orbit of ``base[i]`` + under some subgroup of the pointwise stabilizer of ` + `base[0], base[1], ..., base[i - 1]``. The groups themselves are implicit + in this function since the only information we need is encoded in the orbits + and transversals + ``transversals`` - a list of orbit transversals associated with the orbits + ``orbits``. + + Examples + ======== + + >>> from sympy.combinatorics import Permutation, SymmetricGroup + >>> from sympy.combinatorics.util import _strip + >>> S = SymmetricGroup(5) + >>> S.schreier_sims() + >>> g = Permutation([0, 2, 3, 1, 4]) + >>> _strip(g, S.base, S.basic_orbits, S.basic_transversals) + ((4), 5) + + Notes + ===== + + The algorithm is described in [1],pp.89-90. The reason for returning + both the current state of the element being decomposed and the level + at which the sifting ends is that they provide important information for + the randomized version of the Schreier-Sims algorithm. + + References + ========== + + .. [1] Holt, D., Eick, B., O'Brien, E."Handbook of computational group theory" + + See Also + ======== + + sympy.combinatorics.perm_groups.PermutationGroup.schreier_sims + sympy.combinatorics.perm_groups.PermutationGroup.schreier_sims_random + + """ + h = g._array_form + base_len = len(base) + for i in range(base_len): + beta = h[base[i]] + if beta == base[i]: + continue + if beta not in orbits[i]: + return _af_new(h), i + 1 + u = transversals[i][beta]._array_form + h = _af_rmul(_af_invert(u), h) + return _af_new(h), base_len + 1 + + +def _strip_af(h, base, orbits, transversals, j, slp=[], slps={}): + """ + optimized _strip, with h, transversals and result in array form + if the stripped elements is the identity, it returns False, base_len + 1 + + j h[base[i]] == base[i] for i <= j + + """ + base_len = len(base) + for i in range(j+1, base_len): + beta = h[base[i]] + if beta == base[i]: + continue + if beta not in orbits[i]: + if not slp: + return h, i + 1 + return h, i + 1, slp + u = transversals[i][beta] + if h == u: + if not slp: + return False, base_len + 1 + return False, base_len + 1, slp + h = _af_rmul(_af_invert(u), h) + if slp: + u_slp = slps[i][beta][:] + u_slp.reverse() + u_slp = [(i, (g,)) for g in u_slp] + slp = u_slp + slp + if not slp: + return h, base_len + 1 + return h, base_len + 1, slp + + +def _strong_gens_from_distr(strong_gens_distr): + """ + Retrieve strong generating set from generators of basic stabilizers. + + This is just the union of the generators of the first and second basic + stabilizers. + + Parameters + ========== + + ``strong_gens_distr`` - strong generators distributed by membership in basic + stabilizers + + Examples + ======== + + >>> from sympy.combinatorics import SymmetricGroup + >>> from sympy.combinatorics.util import (_strong_gens_from_distr, + ... _distribute_gens_by_base) + >>> S = SymmetricGroup(3) + >>> S.schreier_sims() + >>> S.strong_gens + [(0 1 2), (2)(0 1), (1 2)] + >>> strong_gens_distr = _distribute_gens_by_base(S.base, S.strong_gens) + >>> _strong_gens_from_distr(strong_gens_distr) + [(0 1 2), (2)(0 1), (1 2)] + + See Also + ======== + + _distribute_gens_by_base + + """ + if len(strong_gens_distr) == 1: + return strong_gens_distr[0][:] + else: + result = strong_gens_distr[0] + for gen in strong_gens_distr[1]: + if gen not in result: + result.append(gen) + return result