Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- ckpts/universal/global_step40/zero/10.post_attention_layernorm.weight/exp_avg.pt +3 -0
- ckpts/universal/global_step40/zero/10.post_attention_layernorm.weight/exp_avg_sq.pt +3 -0
- ckpts/universal/global_step40/zero/24.attention.dense.weight/fp32.pt +3 -0
- ckpts/universal/global_step40/zero/8.input_layernorm.weight/exp_avg.pt +3 -0
- ckpts/universal/global_step40/zero/8.input_layernorm.weight/exp_avg_sq.pt +3 -0
- ckpts/universal/global_step40/zero/8.input_layernorm.weight/fp32.pt +3 -0
- venv/lib/python3.10/site-packages/scipy/stats/__pycache__/_constants.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/scipy/stats/__pycache__/_entropy.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/scipy/stats/__pycache__/_mstats_extras.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/scipy/stats/__pycache__/_odds_ratio.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/scipy/stats/__pycache__/_stats_py.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/scipy/stats/__pycache__/morestats.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/scipy/stats/__pycache__/stats.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/scipy/stats/_boost/__init__.py +53 -0
- venv/lib/python3.10/site-packages/scipy/stats/_boost/__pycache__/__init__.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/scipy/stats/_boost/beta_ufunc.cpython-310-x86_64-linux-gnu.so +0 -0
- venv/lib/python3.10/site-packages/scipy/stats/_boost/binom_ufunc.cpython-310-x86_64-linux-gnu.so +0 -0
- venv/lib/python3.10/site-packages/scipy/stats/_boost/hypergeom_ufunc.cpython-310-x86_64-linux-gnu.so +0 -0
- venv/lib/python3.10/site-packages/scipy/stats/_boost/invgauss_ufunc.cpython-310-x86_64-linux-gnu.so +0 -0
- venv/lib/python3.10/site-packages/scipy/stats/_boost/nbinom_ufunc.cpython-310-x86_64-linux-gnu.so +0 -0
- venv/lib/python3.10/site-packages/scipy/stats/_boost/ncf_ufunc.cpython-310-x86_64-linux-gnu.so +0 -0
- venv/lib/python3.10/site-packages/scipy/stats/_boost/nct_ufunc.cpython-310-x86_64-linux-gnu.so +0 -0
- venv/lib/python3.10/site-packages/scipy/stats/_boost/ncx2_ufunc.cpython-310-x86_64-linux-gnu.so +0 -0
- venv/lib/python3.10/site-packages/scipy/stats/_boost/skewnorm_ufunc.cpython-310-x86_64-linux-gnu.so +0 -0
- venv/lib/python3.10/site-packages/scipy/stats/_levy_stable/__init__.py +1224 -0
- venv/lib/python3.10/site-packages/scipy/stats/_levy_stable/__pycache__/__init__.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/scipy/stats/_levy_stable/levyst.cpython-310-x86_64-linux-gnu.so +0 -0
- venv/lib/python3.10/site-packages/scipy/stats/_rcont/__init__.py +4 -0
- venv/lib/python3.10/site-packages/scipy/stats/_rcont/__pycache__/__init__.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/scipy/stats/_rcont/rcont.cpython-310-x86_64-linux-gnu.so +0 -0
- venv/lib/python3.10/site-packages/scipy/stats/tests/__init__.py +0 -0
- venv/lib/python3.10/site-packages/scipy/stats/tests/common_tests.py +351 -0
- venv/lib/python3.10/site-packages/scipy/stats/tests/data/__pycache__/_mvt.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/scipy/stats/tests/data/__pycache__/fisher_exact_results_from_r.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/scipy/stats/tests/data/_mvt.py +171 -0
- venv/lib/python3.10/site-packages/scipy/stats/tests/data/fisher_exact_results_from_r.py +607 -0
- venv/lib/python3.10/site-packages/scipy/stats/tests/data/nist_anova/AtmWtAg.dat +108 -0
- venv/lib/python3.10/site-packages/scipy/stats/tests/data/nist_anova/SiRstv.dat +85 -0
- venv/lib/python3.10/site-packages/scipy/stats/tests/data/nist_anova/SmLs01.dat +249 -0
- venv/lib/python3.10/site-packages/scipy/stats/tests/data/nist_anova/SmLs02.dat +1869 -0
- venv/lib/python3.10/site-packages/scipy/stats/tests/data/nist_anova/SmLs03.dat +0 -0
- venv/lib/python3.10/site-packages/scipy/stats/tests/data/nist_anova/SmLs04.dat +249 -0
- venv/lib/python3.10/site-packages/scipy/stats/tests/data/nist_anova/SmLs05.dat +1869 -0
- venv/lib/python3.10/site-packages/scipy/stats/tests/data/nist_anova/SmLs06.dat +0 -0
- venv/lib/python3.10/site-packages/scipy/stats/tests/data/nist_anova/SmLs07.dat +249 -0
- venv/lib/python3.10/site-packages/scipy/stats/tests/data/nist_anova/SmLs08.dat +1869 -0
- venv/lib/python3.10/site-packages/scipy/stats/tests/data/nist_anova/SmLs09.dat +0 -0
- venv/lib/python3.10/site-packages/scipy/stats/tests/data/nist_linregress/Norris.dat +97 -0
- venv/lib/python3.10/site-packages/scipy/stats/tests/data/studentized_range_mpmath_ref.json +1499 -0
- venv/lib/python3.10/site-packages/scipy/stats/tests/test_axis_nan_policy.py +1188 -0
ckpts/universal/global_step40/zero/10.post_attention_layernorm.weight/exp_avg.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:2499dee742d6c4650f7702856b9e26785b15c99ae5059300b818076b64439310
|
3 |
+
size 9372
|
ckpts/universal/global_step40/zero/10.post_attention_layernorm.weight/exp_avg_sq.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e6e85e9f611cc866b81ea8aa8bcb7296f7d76c074e5f67845124bbe9d5c2ab6f
|
3 |
+
size 9387
|
ckpts/universal/global_step40/zero/24.attention.dense.weight/fp32.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5e7e6fa8ccb9cc938eef23aaabb6fa7fa27a87928118f22a4289a5cdb895c8e1
|
3 |
+
size 16778317
|
ckpts/universal/global_step40/zero/8.input_layernorm.weight/exp_avg.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:17cea6275bb3db06ea0a6bea04b88be865cf24394828603fe67243c1af874849
|
3 |
+
size 9372
|
ckpts/universal/global_step40/zero/8.input_layernorm.weight/exp_avg_sq.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0d73f7b716570ed0cfc63d123110f9d5873e53868510d5511d8b75b2bf3cc7e0
|
3 |
+
size 9387
|
ckpts/universal/global_step40/zero/8.input_layernorm.weight/fp32.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:277b57f89c06ff0b973db615ac858f9a0783bf79da84adb04b4d997e05764998
|
3 |
+
size 9293
|
venv/lib/python3.10/site-packages/scipy/stats/__pycache__/_constants.cpython-310.pyc
ADDED
Binary file (531 Bytes). View file
|
|
venv/lib/python3.10/site-packages/scipy/stats/__pycache__/_entropy.cpython-310.pyc
ADDED
Binary file (15 kB). View file
|
|
venv/lib/python3.10/site-packages/scipy/stats/__pycache__/_mstats_extras.cpython-310.pyc
ADDED
Binary file (15.4 kB). View file
|
|
venv/lib/python3.10/site-packages/scipy/stats/__pycache__/_odds_ratio.cpython-310.pyc
ADDED
Binary file (15.8 kB). View file
|
|
venv/lib/python3.10/site-packages/scipy/stats/__pycache__/_stats_py.cpython-310.pyc
ADDED
Binary file (373 kB). View file
|
|
venv/lib/python3.10/site-packages/scipy/stats/__pycache__/morestats.cpython-310.pyc
ADDED
Binary file (1.29 kB). View file
|
|
venv/lib/python3.10/site-packages/scipy/stats/__pycache__/stats.cpython-310.pyc
ADDED
Binary file (1.88 kB). View file
|
|
venv/lib/python3.10/site-packages/scipy/stats/_boost/__init__.py
ADDED
@@ -0,0 +1,53 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from scipy.stats._boost.beta_ufunc import (
|
2 |
+
_beta_pdf, _beta_cdf, _beta_sf, _beta_ppf,
|
3 |
+
_beta_isf, _beta_mean, _beta_variance,
|
4 |
+
_beta_skewness, _beta_kurtosis_excess,
|
5 |
+
)
|
6 |
+
|
7 |
+
from scipy.stats._boost.binom_ufunc import (
|
8 |
+
_binom_pdf, _binom_cdf, _binom_sf, _binom_ppf,
|
9 |
+
_binom_isf, _binom_mean, _binom_variance,
|
10 |
+
_binom_skewness, _binom_kurtosis_excess,
|
11 |
+
)
|
12 |
+
|
13 |
+
from scipy.stats._boost.nbinom_ufunc import (
|
14 |
+
_nbinom_pdf, _nbinom_cdf, _nbinom_sf, _nbinom_ppf,
|
15 |
+
_nbinom_isf, _nbinom_mean, _nbinom_variance,
|
16 |
+
_nbinom_skewness, _nbinom_kurtosis_excess,
|
17 |
+
)
|
18 |
+
|
19 |
+
from scipy.stats._boost.hypergeom_ufunc import (
|
20 |
+
_hypergeom_pdf, _hypergeom_cdf, _hypergeom_sf, _hypergeom_ppf,
|
21 |
+
_hypergeom_isf, _hypergeom_mean, _hypergeom_variance,
|
22 |
+
_hypergeom_skewness, _hypergeom_kurtosis_excess,
|
23 |
+
)
|
24 |
+
|
25 |
+
from scipy.stats._boost.ncf_ufunc import (
|
26 |
+
_ncf_pdf, _ncf_cdf, _ncf_sf, _ncf_ppf,
|
27 |
+
_ncf_isf, _ncf_mean, _ncf_variance,
|
28 |
+
_ncf_skewness, _ncf_kurtosis_excess,
|
29 |
+
)
|
30 |
+
|
31 |
+
from scipy.stats._boost.ncx2_ufunc import (
|
32 |
+
_ncx2_pdf, _ncx2_cdf, _ncx2_sf, _ncx2_ppf,
|
33 |
+
_ncx2_isf, _ncx2_mean, _ncx2_variance,
|
34 |
+
_ncx2_skewness, _ncx2_kurtosis_excess,
|
35 |
+
)
|
36 |
+
|
37 |
+
from scipy.stats._boost.nct_ufunc import (
|
38 |
+
_nct_pdf, _nct_cdf, _nct_sf, _nct_ppf,
|
39 |
+
_nct_isf, _nct_mean, _nct_variance,
|
40 |
+
_nct_skewness, _nct_kurtosis_excess,
|
41 |
+
)
|
42 |
+
|
43 |
+
from scipy.stats._boost.skewnorm_ufunc import (
|
44 |
+
_skewnorm_pdf, _skewnorm_cdf, _skewnorm_sf, _skewnorm_ppf,
|
45 |
+
_skewnorm_isf, _skewnorm_mean, _skewnorm_variance,
|
46 |
+
_skewnorm_skewness, _skewnorm_kurtosis_excess,
|
47 |
+
)
|
48 |
+
|
49 |
+
from scipy.stats._boost.invgauss_ufunc import (
|
50 |
+
_invgauss_pdf, _invgauss_cdf, _invgauss_sf, _invgauss_ppf,
|
51 |
+
_invgauss_isf, _invgauss_mean, _invgauss_variance,
|
52 |
+
_invgauss_skewness, _invgauss_kurtosis_excess,
|
53 |
+
)
|
venv/lib/python3.10/site-packages/scipy/stats/_boost/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (2.53 kB). View file
|
|
venv/lib/python3.10/site-packages/scipy/stats/_boost/beta_ufunc.cpython-310-x86_64-linux-gnu.so
ADDED
Binary file (205 kB). View file
|
|
venv/lib/python3.10/site-packages/scipy/stats/_boost/binom_ufunc.cpython-310-x86_64-linux-gnu.so
ADDED
Binary file (176 kB). View file
|
|
venv/lib/python3.10/site-packages/scipy/stats/_boost/hypergeom_ufunc.cpython-310-x86_64-linux-gnu.so
ADDED
Binary file (121 kB). View file
|
|
venv/lib/python3.10/site-packages/scipy/stats/_boost/invgauss_ufunc.cpython-310-x86_64-linux-gnu.so
ADDED
Binary file (171 kB). View file
|
|
venv/lib/python3.10/site-packages/scipy/stats/_boost/nbinom_ufunc.cpython-310-x86_64-linux-gnu.so
ADDED
Binary file (180 kB). View file
|
|
venv/lib/python3.10/site-packages/scipy/stats/_boost/ncf_ufunc.cpython-310-x86_64-linux-gnu.so
ADDED
Binary file (174 kB). View file
|
|
venv/lib/python3.10/site-packages/scipy/stats/_boost/nct_ufunc.cpython-310-x86_64-linux-gnu.so
ADDED
Binary file (224 kB). View file
|
|
venv/lib/python3.10/site-packages/scipy/stats/_boost/ncx2_ufunc.cpython-310-x86_64-linux-gnu.so
ADDED
Binary file (175 kB). View file
|
|
venv/lib/python3.10/site-packages/scipy/stats/_boost/skewnorm_ufunc.cpython-310-x86_64-linux-gnu.so
ADDED
Binary file (109 kB). View file
|
|
venv/lib/python3.10/site-packages/scipy/stats/_levy_stable/__init__.py
ADDED
@@ -0,0 +1,1224 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#
|
2 |
+
|
3 |
+
import warnings
|
4 |
+
from functools import partial
|
5 |
+
|
6 |
+
import numpy as np
|
7 |
+
|
8 |
+
from scipy import optimize
|
9 |
+
from scipy import integrate
|
10 |
+
from scipy.integrate._quadrature import _builtincoeffs
|
11 |
+
from scipy import interpolate
|
12 |
+
from scipy.interpolate import RectBivariateSpline
|
13 |
+
import scipy.special as sc
|
14 |
+
from scipy._lib._util import _lazywhere
|
15 |
+
from .._distn_infrastructure import rv_continuous, _ShapeInfo
|
16 |
+
from .._continuous_distns import uniform, expon, _norm_pdf, _norm_cdf
|
17 |
+
from .levyst import Nolan
|
18 |
+
from scipy._lib.doccer import inherit_docstring_from
|
19 |
+
|
20 |
+
|
21 |
+
__all__ = ["levy_stable", "levy_stable_gen", "pdf_from_cf_with_fft"]
|
22 |
+
|
23 |
+
# Stable distributions are known for various parameterisations
|
24 |
+
# some being advantageous for numerical considerations and others
|
25 |
+
# useful due to their location/scale awareness.
|
26 |
+
#
|
27 |
+
# Here we follow [NO] convention (see the references in the docstring
|
28 |
+
# for levy_stable_gen below).
|
29 |
+
#
|
30 |
+
# S0 / Z0 / x0 (aka Zoleterav's M)
|
31 |
+
# S1 / Z1 / x1
|
32 |
+
#
|
33 |
+
# Where S* denotes parameterisation, Z* denotes standardized
|
34 |
+
# version where gamma = 1, delta = 0 and x* denotes variable.
|
35 |
+
#
|
36 |
+
# Scipy's original Stable was a random variate generator. It
|
37 |
+
# uses S1 and unfortunately is not a location/scale aware.
|
38 |
+
|
39 |
+
|
40 |
+
# default numerical integration tolerance
|
41 |
+
# used for epsrel in piecewise and both epsrel and epsabs in dni
|
42 |
+
# (epsabs needed in dni since weighted quad requires epsabs > 0)
|
43 |
+
_QUAD_EPS = 1.2e-14
|
44 |
+
|
45 |
+
|
46 |
+
def _Phi_Z0(alpha, t):
|
47 |
+
return (
|
48 |
+
-np.tan(np.pi * alpha / 2) * (np.abs(t) ** (1 - alpha) - 1)
|
49 |
+
if alpha != 1
|
50 |
+
else -2.0 * np.log(np.abs(t)) / np.pi
|
51 |
+
)
|
52 |
+
|
53 |
+
|
54 |
+
def _Phi_Z1(alpha, t):
|
55 |
+
return (
|
56 |
+
np.tan(np.pi * alpha / 2)
|
57 |
+
if alpha != 1
|
58 |
+
else -2.0 * np.log(np.abs(t)) / np.pi
|
59 |
+
)
|
60 |
+
|
61 |
+
|
62 |
+
def _cf(Phi, t, alpha, beta):
|
63 |
+
"""Characteristic function."""
|
64 |
+
return np.exp(
|
65 |
+
-(np.abs(t) ** alpha) * (1 - 1j * beta * np.sign(t) * Phi(alpha, t))
|
66 |
+
)
|
67 |
+
|
68 |
+
|
69 |
+
_cf_Z0 = partial(_cf, _Phi_Z0)
|
70 |
+
_cf_Z1 = partial(_cf, _Phi_Z1)
|
71 |
+
|
72 |
+
|
73 |
+
def _pdf_single_value_cf_integrate(Phi, x, alpha, beta, **kwds):
|
74 |
+
"""To improve DNI accuracy convert characteristic function in to real
|
75 |
+
valued integral using Euler's formula, then exploit cosine symmetry to
|
76 |
+
change limits to [0, inf). Finally use cosine addition formula to split
|
77 |
+
into two parts that can be handled by weighted quad pack.
|
78 |
+
"""
|
79 |
+
quad_eps = kwds.get("quad_eps", _QUAD_EPS)
|
80 |
+
|
81 |
+
def integrand1(t):
|
82 |
+
if t == 0:
|
83 |
+
return 0
|
84 |
+
return np.exp(-(t ** alpha)) * (
|
85 |
+
np.cos(beta * (t ** alpha) * Phi(alpha, t))
|
86 |
+
)
|
87 |
+
|
88 |
+
def integrand2(t):
|
89 |
+
if t == 0:
|
90 |
+
return 0
|
91 |
+
return np.exp(-(t ** alpha)) * (
|
92 |
+
np.sin(beta * (t ** alpha) * Phi(alpha, t))
|
93 |
+
)
|
94 |
+
|
95 |
+
with np.errstate(invalid="ignore"):
|
96 |
+
int1, *ret1 = integrate.quad(
|
97 |
+
integrand1,
|
98 |
+
0,
|
99 |
+
np.inf,
|
100 |
+
weight="cos",
|
101 |
+
wvar=x,
|
102 |
+
limit=1000,
|
103 |
+
epsabs=quad_eps,
|
104 |
+
epsrel=quad_eps,
|
105 |
+
full_output=1,
|
106 |
+
)
|
107 |
+
|
108 |
+
int2, *ret2 = integrate.quad(
|
109 |
+
integrand2,
|
110 |
+
0,
|
111 |
+
np.inf,
|
112 |
+
weight="sin",
|
113 |
+
wvar=x,
|
114 |
+
limit=1000,
|
115 |
+
epsabs=quad_eps,
|
116 |
+
epsrel=quad_eps,
|
117 |
+
full_output=1,
|
118 |
+
)
|
119 |
+
|
120 |
+
return (int1 + int2) / np.pi
|
121 |
+
|
122 |
+
|
123 |
+
_pdf_single_value_cf_integrate_Z0 = partial(
|
124 |
+
_pdf_single_value_cf_integrate, _Phi_Z0
|
125 |
+
)
|
126 |
+
_pdf_single_value_cf_integrate_Z1 = partial(
|
127 |
+
_pdf_single_value_cf_integrate, _Phi_Z1
|
128 |
+
)
|
129 |
+
|
130 |
+
|
131 |
+
def _nolan_round_x_near_zeta(x0, alpha, zeta, x_tol_near_zeta):
|
132 |
+
"""Round x close to zeta for Nolan's method in [NO]."""
|
133 |
+
# "8. When |x0-beta*tan(pi*alpha/2)| is small, the
|
134 |
+
# computations of the density and cumulative have numerical problems.
|
135 |
+
# The program works around this by setting
|
136 |
+
# z = beta*tan(pi*alpha/2) when
|
137 |
+
# |z-beta*tan(pi*alpha/2)| < tol(5)*alpha**(1/alpha).
|
138 |
+
# (The bound on the right is ad hoc, to get reasonable behavior
|
139 |
+
# when alpha is small)."
|
140 |
+
# where tol(5) = 0.5e-2 by default.
|
141 |
+
#
|
142 |
+
# We seem to have partially addressed this through re-expression of
|
143 |
+
# g(theta) here, but it still needs to be used in some extreme cases.
|
144 |
+
# Perhaps tol(5) = 0.5e-2 could be reduced for our implementation.
|
145 |
+
if np.abs(x0 - zeta) < x_tol_near_zeta * alpha ** (1 / alpha):
|
146 |
+
x0 = zeta
|
147 |
+
return x0
|
148 |
+
|
149 |
+
|
150 |
+
def _nolan_round_difficult_input(
|
151 |
+
x0, alpha, beta, zeta, x_tol_near_zeta, alpha_tol_near_one
|
152 |
+
):
|
153 |
+
"""Round difficult input values for Nolan's method in [NO]."""
|
154 |
+
|
155 |
+
# following Nolan's STABLE,
|
156 |
+
# "1. When 0 < |alpha-1| < 0.005, the program has numerical problems
|
157 |
+
# evaluating the pdf and cdf. The current version of the program sets
|
158 |
+
# alpha=1 in these cases. This approximation is not bad in the S0
|
159 |
+
# parameterization."
|
160 |
+
if np.abs(alpha - 1) < alpha_tol_near_one:
|
161 |
+
alpha = 1.0
|
162 |
+
|
163 |
+
# "2. When alpha=1 and |beta| < 0.005, the program has numerical
|
164 |
+
# problems. The current version sets beta=0."
|
165 |
+
# We seem to have addressed this through re-expression of g(theta) here
|
166 |
+
|
167 |
+
x0 = _nolan_round_x_near_zeta(x0, alpha, zeta, x_tol_near_zeta)
|
168 |
+
return x0, alpha, beta
|
169 |
+
|
170 |
+
|
171 |
+
def _pdf_single_value_piecewise_Z1(x, alpha, beta, **kwds):
|
172 |
+
# convert from Nolan's S_1 (aka S) to S_0 (aka Zolaterev M)
|
173 |
+
# parameterization
|
174 |
+
|
175 |
+
zeta = -beta * np.tan(np.pi * alpha / 2.0)
|
176 |
+
x0 = x + zeta if alpha != 1 else x
|
177 |
+
|
178 |
+
return _pdf_single_value_piecewise_Z0(x0, alpha, beta, **kwds)
|
179 |
+
|
180 |
+
|
181 |
+
def _pdf_single_value_piecewise_Z0(x0, alpha, beta, **kwds):
|
182 |
+
|
183 |
+
quad_eps = kwds.get("quad_eps", _QUAD_EPS)
|
184 |
+
x_tol_near_zeta = kwds.get("piecewise_x_tol_near_zeta", 0.005)
|
185 |
+
alpha_tol_near_one = kwds.get("piecewise_alpha_tol_near_one", 0.005)
|
186 |
+
|
187 |
+
zeta = -beta * np.tan(np.pi * alpha / 2.0)
|
188 |
+
x0, alpha, beta = _nolan_round_difficult_input(
|
189 |
+
x0, alpha, beta, zeta, x_tol_near_zeta, alpha_tol_near_one
|
190 |
+
)
|
191 |
+
|
192 |
+
# some other known distribution pdfs / analytical cases
|
193 |
+
# TODO: add more where possible with test coverage,
|
194 |
+
# eg https://en.wikipedia.org/wiki/Stable_distribution#Other_analytic_cases
|
195 |
+
if alpha == 2.0:
|
196 |
+
# normal
|
197 |
+
return _norm_pdf(x0 / np.sqrt(2)) / np.sqrt(2)
|
198 |
+
elif alpha == 0.5 and beta == 1.0:
|
199 |
+
# levy
|
200 |
+
# since S(1/2, 1, gamma, delta; <x>) ==
|
201 |
+
# S(1/2, 1, gamma, gamma + delta; <x0>).
|
202 |
+
_x = x0 + 1
|
203 |
+
if _x <= 0:
|
204 |
+
return 0
|
205 |
+
|
206 |
+
return 1 / np.sqrt(2 * np.pi * _x) / _x * np.exp(-1 / (2 * _x))
|
207 |
+
elif alpha == 0.5 and beta == 0.0 and x0 != 0:
|
208 |
+
# analytical solution [HO]
|
209 |
+
S, C = sc.fresnel([1 / np.sqrt(2 * np.pi * np.abs(x0))])
|
210 |
+
arg = 1 / (4 * np.abs(x0))
|
211 |
+
return (
|
212 |
+
np.sin(arg) * (0.5 - S[0]) + np.cos(arg) * (0.5 - C[0])
|
213 |
+
) / np.sqrt(2 * np.pi * np.abs(x0) ** 3)
|
214 |
+
elif alpha == 1.0 and beta == 0.0:
|
215 |
+
# cauchy
|
216 |
+
return 1 / (1 + x0 ** 2) / np.pi
|
217 |
+
|
218 |
+
return _pdf_single_value_piecewise_post_rounding_Z0(
|
219 |
+
x0, alpha, beta, quad_eps, x_tol_near_zeta
|
220 |
+
)
|
221 |
+
|
222 |
+
|
223 |
+
def _pdf_single_value_piecewise_post_rounding_Z0(x0, alpha, beta, quad_eps,
|
224 |
+
x_tol_near_zeta):
|
225 |
+
"""Calculate pdf using Nolan's methods as detailed in [NO]."""
|
226 |
+
|
227 |
+
_nolan = Nolan(alpha, beta, x0)
|
228 |
+
zeta = _nolan.zeta
|
229 |
+
xi = _nolan.xi
|
230 |
+
c2 = _nolan.c2
|
231 |
+
g = _nolan.g
|
232 |
+
|
233 |
+
# round x0 to zeta again if needed. zeta was recomputed and may have
|
234 |
+
# changed due to floating point differences.
|
235 |
+
# See https://github.com/scipy/scipy/pull/18133
|
236 |
+
x0 = _nolan_round_x_near_zeta(x0, alpha, zeta, x_tol_near_zeta)
|
237 |
+
# handle Nolan's initial case logic
|
238 |
+
if x0 == zeta:
|
239 |
+
return (
|
240 |
+
sc.gamma(1 + 1 / alpha)
|
241 |
+
* np.cos(xi)
|
242 |
+
/ np.pi
|
243 |
+
/ ((1 + zeta ** 2) ** (1 / alpha / 2))
|
244 |
+
)
|
245 |
+
elif x0 < zeta:
|
246 |
+
return _pdf_single_value_piecewise_post_rounding_Z0(
|
247 |
+
-x0, alpha, -beta, quad_eps, x_tol_near_zeta
|
248 |
+
)
|
249 |
+
|
250 |
+
# following Nolan, we may now assume
|
251 |
+
# x0 > zeta when alpha != 1
|
252 |
+
# beta != 0 when alpha == 1
|
253 |
+
|
254 |
+
# spare calculating integral on null set
|
255 |
+
# use isclose as macos has fp differences
|
256 |
+
if np.isclose(-xi, np.pi / 2, rtol=1e-014, atol=1e-014):
|
257 |
+
return 0.0
|
258 |
+
|
259 |
+
def integrand(theta):
|
260 |
+
# limit any numerical issues leading to g_1 < 0 near theta limits
|
261 |
+
g_1 = g(theta)
|
262 |
+
if not np.isfinite(g_1) or g_1 < 0:
|
263 |
+
g_1 = 0
|
264 |
+
return g_1 * np.exp(-g_1)
|
265 |
+
|
266 |
+
with np.errstate(all="ignore"):
|
267 |
+
peak = optimize.bisect(
|
268 |
+
lambda t: g(t) - 1, -xi, np.pi / 2, xtol=quad_eps
|
269 |
+
)
|
270 |
+
|
271 |
+
# this integrand can be very peaked, so we need to force
|
272 |
+
# QUADPACK to evaluate the function inside its support
|
273 |
+
#
|
274 |
+
|
275 |
+
# lastly, we add additional samples at
|
276 |
+
# ~exp(-100), ~exp(-10), ~exp(-5), ~exp(-1)
|
277 |
+
# to improve QUADPACK's detection of rapidly descending tail behavior
|
278 |
+
# (this choice is fairly ad hoc)
|
279 |
+
tail_points = [
|
280 |
+
optimize.bisect(lambda t: g(t) - exp_height, -xi, np.pi / 2)
|
281 |
+
for exp_height in [100, 10, 5]
|
282 |
+
# exp_height = 1 is handled by peak
|
283 |
+
]
|
284 |
+
intg_points = [0, peak] + tail_points
|
285 |
+
intg, *ret = integrate.quad(
|
286 |
+
integrand,
|
287 |
+
-xi,
|
288 |
+
np.pi / 2,
|
289 |
+
points=intg_points,
|
290 |
+
limit=100,
|
291 |
+
epsrel=quad_eps,
|
292 |
+
epsabs=0,
|
293 |
+
full_output=1,
|
294 |
+
)
|
295 |
+
|
296 |
+
return c2 * intg
|
297 |
+
|
298 |
+
|
299 |
+
def _cdf_single_value_piecewise_Z1(x, alpha, beta, **kwds):
|
300 |
+
# convert from Nolan's S_1 (aka S) to S_0 (aka Zolaterev M)
|
301 |
+
# parameterization
|
302 |
+
|
303 |
+
zeta = -beta * np.tan(np.pi * alpha / 2.0)
|
304 |
+
x0 = x + zeta if alpha != 1 else x
|
305 |
+
|
306 |
+
return _cdf_single_value_piecewise_Z0(x0, alpha, beta, **kwds)
|
307 |
+
|
308 |
+
|
309 |
+
def _cdf_single_value_piecewise_Z0(x0, alpha, beta, **kwds):
|
310 |
+
|
311 |
+
quad_eps = kwds.get("quad_eps", _QUAD_EPS)
|
312 |
+
x_tol_near_zeta = kwds.get("piecewise_x_tol_near_zeta", 0.005)
|
313 |
+
alpha_tol_near_one = kwds.get("piecewise_alpha_tol_near_one", 0.005)
|
314 |
+
|
315 |
+
zeta = -beta * np.tan(np.pi * alpha / 2.0)
|
316 |
+
x0, alpha, beta = _nolan_round_difficult_input(
|
317 |
+
x0, alpha, beta, zeta, x_tol_near_zeta, alpha_tol_near_one
|
318 |
+
)
|
319 |
+
|
320 |
+
# some other known distribution cdfs / analytical cases
|
321 |
+
# TODO: add more where possible with test coverage,
|
322 |
+
# eg https://en.wikipedia.org/wiki/Stable_distribution#Other_analytic_cases
|
323 |
+
if alpha == 2.0:
|
324 |
+
# normal
|
325 |
+
return _norm_cdf(x0 / np.sqrt(2))
|
326 |
+
elif alpha == 0.5 and beta == 1.0:
|
327 |
+
# levy
|
328 |
+
# since S(1/2, 1, gamma, delta; <x>) ==
|
329 |
+
# S(1/2, 1, gamma, gamma + delta; <x0>).
|
330 |
+
_x = x0 + 1
|
331 |
+
if _x <= 0:
|
332 |
+
return 0
|
333 |
+
|
334 |
+
return sc.erfc(np.sqrt(0.5 / _x))
|
335 |
+
elif alpha == 1.0 and beta == 0.0:
|
336 |
+
# cauchy
|
337 |
+
return 0.5 + np.arctan(x0) / np.pi
|
338 |
+
|
339 |
+
return _cdf_single_value_piecewise_post_rounding_Z0(
|
340 |
+
x0, alpha, beta, quad_eps, x_tol_near_zeta
|
341 |
+
)
|
342 |
+
|
343 |
+
|
344 |
+
def _cdf_single_value_piecewise_post_rounding_Z0(x0, alpha, beta, quad_eps,
|
345 |
+
x_tol_near_zeta):
|
346 |
+
"""Calculate cdf using Nolan's methods as detailed in [NO]."""
|
347 |
+
_nolan = Nolan(alpha, beta, x0)
|
348 |
+
zeta = _nolan.zeta
|
349 |
+
xi = _nolan.xi
|
350 |
+
c1 = _nolan.c1
|
351 |
+
# c2 = _nolan.c2
|
352 |
+
c3 = _nolan.c3
|
353 |
+
g = _nolan.g
|
354 |
+
# round x0 to zeta again if needed. zeta was recomputed and may have
|
355 |
+
# changed due to floating point differences.
|
356 |
+
# See https://github.com/scipy/scipy/pull/18133
|
357 |
+
x0 = _nolan_round_x_near_zeta(x0, alpha, zeta, x_tol_near_zeta)
|
358 |
+
# handle Nolan's initial case logic
|
359 |
+
if (alpha == 1 and beta < 0) or x0 < zeta:
|
360 |
+
# NOTE: Nolan's paper has a typo here!
|
361 |
+
# He states F(x) = 1 - F(x, alpha, -beta), but this is clearly
|
362 |
+
# incorrect since F(-infty) would be 1.0 in this case
|
363 |
+
# Indeed, the alpha != 1, x0 < zeta case is correct here.
|
364 |
+
return 1 - _cdf_single_value_piecewise_post_rounding_Z0(
|
365 |
+
-x0, alpha, -beta, quad_eps, x_tol_near_zeta
|
366 |
+
)
|
367 |
+
elif x0 == zeta:
|
368 |
+
return 0.5 - xi / np.pi
|
369 |
+
|
370 |
+
# following Nolan, we may now assume
|
371 |
+
# x0 > zeta when alpha != 1
|
372 |
+
# beta > 0 when alpha == 1
|
373 |
+
|
374 |
+
# spare calculating integral on null set
|
375 |
+
# use isclose as macos has fp differences
|
376 |
+
if np.isclose(-xi, np.pi / 2, rtol=1e-014, atol=1e-014):
|
377 |
+
return c1
|
378 |
+
|
379 |
+
def integrand(theta):
|
380 |
+
g_1 = g(theta)
|
381 |
+
return np.exp(-g_1)
|
382 |
+
|
383 |
+
with np.errstate(all="ignore"):
|
384 |
+
# shrink supports where required
|
385 |
+
left_support = -xi
|
386 |
+
right_support = np.pi / 2
|
387 |
+
if alpha > 1:
|
388 |
+
# integrand(t) monotonic 0 to 1
|
389 |
+
if integrand(-xi) != 0.0:
|
390 |
+
res = optimize.minimize(
|
391 |
+
integrand,
|
392 |
+
(-xi,),
|
393 |
+
method="L-BFGS-B",
|
394 |
+
bounds=[(-xi, np.pi / 2)],
|
395 |
+
)
|
396 |
+
left_support = res.x[0]
|
397 |
+
else:
|
398 |
+
# integrand(t) monotonic 1 to 0
|
399 |
+
if integrand(np.pi / 2) != 0.0:
|
400 |
+
res = optimize.minimize(
|
401 |
+
integrand,
|
402 |
+
(np.pi / 2,),
|
403 |
+
method="L-BFGS-B",
|
404 |
+
bounds=[(-xi, np.pi / 2)],
|
405 |
+
)
|
406 |
+
right_support = res.x[0]
|
407 |
+
|
408 |
+
intg, *ret = integrate.quad(
|
409 |
+
integrand,
|
410 |
+
left_support,
|
411 |
+
right_support,
|
412 |
+
points=[left_support, right_support],
|
413 |
+
limit=100,
|
414 |
+
epsrel=quad_eps,
|
415 |
+
epsabs=0,
|
416 |
+
full_output=1,
|
417 |
+
)
|
418 |
+
|
419 |
+
return c1 + c3 * intg
|
420 |
+
|
421 |
+
|
422 |
+
def _rvs_Z1(alpha, beta, size=None, random_state=None):
|
423 |
+
"""Simulate random variables using Nolan's methods as detailed in [NO].
|
424 |
+
"""
|
425 |
+
|
426 |
+
def alpha1func(alpha, beta, TH, aTH, bTH, cosTH, tanTH, W):
|
427 |
+
return (
|
428 |
+
2
|
429 |
+
/ np.pi
|
430 |
+
* (
|
431 |
+
(np.pi / 2 + bTH) * tanTH
|
432 |
+
- beta * np.log((np.pi / 2 * W * cosTH) / (np.pi / 2 + bTH))
|
433 |
+
)
|
434 |
+
)
|
435 |
+
|
436 |
+
def beta0func(alpha, beta, TH, aTH, bTH, cosTH, tanTH, W):
|
437 |
+
return (
|
438 |
+
W
|
439 |
+
/ (cosTH / np.tan(aTH) + np.sin(TH))
|
440 |
+
* ((np.cos(aTH) + np.sin(aTH) * tanTH) / W) ** (1.0 / alpha)
|
441 |
+
)
|
442 |
+
|
443 |
+
def otherwise(alpha, beta, TH, aTH, bTH, cosTH, tanTH, W):
|
444 |
+
# alpha is not 1 and beta is not 0
|
445 |
+
val0 = beta * np.tan(np.pi * alpha / 2)
|
446 |
+
th0 = np.arctan(val0) / alpha
|
447 |
+
val3 = W / (cosTH / np.tan(alpha * (th0 + TH)) + np.sin(TH))
|
448 |
+
res3 = val3 * (
|
449 |
+
(
|
450 |
+
np.cos(aTH)
|
451 |
+
+ np.sin(aTH) * tanTH
|
452 |
+
- val0 * (np.sin(aTH) - np.cos(aTH) * tanTH)
|
453 |
+
)
|
454 |
+
/ W
|
455 |
+
) ** (1.0 / alpha)
|
456 |
+
return res3
|
457 |
+
|
458 |
+
def alphanot1func(alpha, beta, TH, aTH, bTH, cosTH, tanTH, W):
|
459 |
+
res = _lazywhere(
|
460 |
+
beta == 0,
|
461 |
+
(alpha, beta, TH, aTH, bTH, cosTH, tanTH, W),
|
462 |
+
beta0func,
|
463 |
+
f2=otherwise,
|
464 |
+
)
|
465 |
+
return res
|
466 |
+
|
467 |
+
alpha = np.broadcast_to(alpha, size)
|
468 |
+
beta = np.broadcast_to(beta, size)
|
469 |
+
TH = uniform.rvs(
|
470 |
+
loc=-np.pi / 2.0, scale=np.pi, size=size, random_state=random_state
|
471 |
+
)
|
472 |
+
W = expon.rvs(size=size, random_state=random_state)
|
473 |
+
aTH = alpha * TH
|
474 |
+
bTH = beta * TH
|
475 |
+
cosTH = np.cos(TH)
|
476 |
+
tanTH = np.tan(TH)
|
477 |
+
res = _lazywhere(
|
478 |
+
alpha == 1,
|
479 |
+
(alpha, beta, TH, aTH, bTH, cosTH, tanTH, W),
|
480 |
+
alpha1func,
|
481 |
+
f2=alphanot1func,
|
482 |
+
)
|
483 |
+
return res
|
484 |
+
|
485 |
+
|
486 |
+
def _fitstart_S0(data):
|
487 |
+
alpha, beta, delta1, gamma = _fitstart_S1(data)
|
488 |
+
|
489 |
+
# Formulas for mapping parameters in S1 parameterization to
|
490 |
+
# those in S0 parameterization can be found in [NO]. Note that
|
491 |
+
# only delta changes.
|
492 |
+
if alpha != 1:
|
493 |
+
delta0 = delta1 + beta * gamma * np.tan(np.pi * alpha / 2.0)
|
494 |
+
else:
|
495 |
+
delta0 = delta1 + 2 * beta * gamma * np.log(gamma) / np.pi
|
496 |
+
|
497 |
+
return alpha, beta, delta0, gamma
|
498 |
+
|
499 |
+
|
500 |
+
def _fitstart_S1(data):
|
501 |
+
# We follow McCullock 1986 method - Simple Consistent Estimators
|
502 |
+
# of Stable Distribution Parameters
|
503 |
+
|
504 |
+
# fmt: off
|
505 |
+
# Table III and IV
|
506 |
+
nu_alpha_range = [2.439, 2.5, 2.6, 2.7, 2.8, 3, 3.2, 3.5, 4,
|
507 |
+
5, 6, 8, 10, 15, 25]
|
508 |
+
nu_beta_range = [0, 0.1, 0.2, 0.3, 0.5, 0.7, 1]
|
509 |
+
|
510 |
+
# table III - alpha = psi_1(nu_alpha, nu_beta)
|
511 |
+
alpha_table = np.array([
|
512 |
+
[2.000, 2.000, 2.000, 2.000, 2.000, 2.000, 2.000],
|
513 |
+
[1.916, 1.924, 1.924, 1.924, 1.924, 1.924, 1.924],
|
514 |
+
[1.808, 1.813, 1.829, 1.829, 1.829, 1.829, 1.829],
|
515 |
+
[1.729, 1.730, 1.737, 1.745, 1.745, 1.745, 1.745],
|
516 |
+
[1.664, 1.663, 1.663, 1.668, 1.676, 1.676, 1.676],
|
517 |
+
[1.563, 1.560, 1.553, 1.548, 1.547, 1.547, 1.547],
|
518 |
+
[1.484, 1.480, 1.471, 1.460, 1.448, 1.438, 1.438],
|
519 |
+
[1.391, 1.386, 1.378, 1.364, 1.337, 1.318, 1.318],
|
520 |
+
[1.279, 1.273, 1.266, 1.250, 1.210, 1.184, 1.150],
|
521 |
+
[1.128, 1.121, 1.114, 1.101, 1.067, 1.027, 0.973],
|
522 |
+
[1.029, 1.021, 1.014, 1.004, 0.974, 0.935, 0.874],
|
523 |
+
[0.896, 0.892, 0.884, 0.883, 0.855, 0.823, 0.769],
|
524 |
+
[0.818, 0.812, 0.806, 0.801, 0.780, 0.756, 0.691],
|
525 |
+
[0.698, 0.695, 0.692, 0.689, 0.676, 0.656, 0.597],
|
526 |
+
[0.593, 0.590, 0.588, 0.586, 0.579, 0.563, 0.513]]).T
|
527 |
+
# transpose because interpolation with `RectBivariateSpline` is with
|
528 |
+
# `nu_beta` as `x` and `nu_alpha` as `y`
|
529 |
+
|
530 |
+
# table IV - beta = psi_2(nu_alpha, nu_beta)
|
531 |
+
beta_table = np.array([
|
532 |
+
[0, 2.160, 1.000, 1.000, 1.000, 1.000, 1.000],
|
533 |
+
[0, 1.592, 3.390, 1.000, 1.000, 1.000, 1.000],
|
534 |
+
[0, 0.759, 1.800, 1.000, 1.000, 1.000, 1.000],
|
535 |
+
[0, 0.482, 1.048, 1.694, 1.000, 1.000, 1.000],
|
536 |
+
[0, 0.360, 0.760, 1.232, 2.229, 1.000, 1.000],
|
537 |
+
[0, 0.253, 0.518, 0.823, 1.575, 1.000, 1.000],
|
538 |
+
[0, 0.203, 0.410, 0.632, 1.244, 1.906, 1.000],
|
539 |
+
[0, 0.165, 0.332, 0.499, 0.943, 1.560, 1.000],
|
540 |
+
[0, 0.136, 0.271, 0.404, 0.689, 1.230, 2.195],
|
541 |
+
[0, 0.109, 0.216, 0.323, 0.539, 0.827, 1.917],
|
542 |
+
[0, 0.096, 0.190, 0.284, 0.472, 0.693, 1.759],
|
543 |
+
[0, 0.082, 0.163, 0.243, 0.412, 0.601, 1.596],
|
544 |
+
[0, 0.074, 0.147, 0.220, 0.377, 0.546, 1.482],
|
545 |
+
[0, 0.064, 0.128, 0.191, 0.330, 0.478, 1.362],
|
546 |
+
[0, 0.056, 0.112, 0.167, 0.285, 0.428, 1.274]]).T
|
547 |
+
|
548 |
+
# Table V and VII
|
549 |
+
# These are ordered with decreasing `alpha_range`; so we will need to
|
550 |
+
# reverse them as required by RectBivariateSpline.
|
551 |
+
alpha_range = [2, 1.9, 1.8, 1.7, 1.6, 1.5, 1.4, 1.3, 1.2, 1.1,
|
552 |
+
1, 0.9, 0.8, 0.7, 0.6, 0.5][::-1]
|
553 |
+
beta_range = [0, 0.25, 0.5, 0.75, 1]
|
554 |
+
|
555 |
+
# Table V - nu_c = psi_3(alpha, beta)
|
556 |
+
nu_c_table = np.array([
|
557 |
+
[1.908, 1.908, 1.908, 1.908, 1.908],
|
558 |
+
[1.914, 1.915, 1.916, 1.918, 1.921],
|
559 |
+
[1.921, 1.922, 1.927, 1.936, 1.947],
|
560 |
+
[1.927, 1.930, 1.943, 1.961, 1.987],
|
561 |
+
[1.933, 1.940, 1.962, 1.997, 2.043],
|
562 |
+
[1.939, 1.952, 1.988, 2.045, 2.116],
|
563 |
+
[1.946, 1.967, 2.022, 2.106, 2.211],
|
564 |
+
[1.955, 1.984, 2.067, 2.188, 2.333],
|
565 |
+
[1.965, 2.007, 2.125, 2.294, 2.491],
|
566 |
+
[1.980, 2.040, 2.205, 2.435, 2.696],
|
567 |
+
[2.000, 2.085, 2.311, 2.624, 2.973],
|
568 |
+
[2.040, 2.149, 2.461, 2.886, 3.356],
|
569 |
+
[2.098, 2.244, 2.676, 3.265, 3.912],
|
570 |
+
[2.189, 2.392, 3.004, 3.844, 4.775],
|
571 |
+
[2.337, 2.634, 3.542, 4.808, 6.247],
|
572 |
+
[2.588, 3.073, 4.534, 6.636, 9.144]])[::-1].T
|
573 |
+
# transpose because interpolation with `RectBivariateSpline` is with
|
574 |
+
# `beta` as `x` and `alpha` as `y`
|
575 |
+
|
576 |
+
# Table VII - nu_zeta = psi_5(alpha, beta)
|
577 |
+
nu_zeta_table = np.array([
|
578 |
+
[0, 0.000, 0.000, 0.000, 0.000],
|
579 |
+
[0, -0.017, -0.032, -0.049, -0.064],
|
580 |
+
[0, -0.030, -0.061, -0.092, -0.123],
|
581 |
+
[0, -0.043, -0.088, -0.132, -0.179],
|
582 |
+
[0, -0.056, -0.111, -0.170, -0.232],
|
583 |
+
[0, -0.066, -0.134, -0.206, -0.283],
|
584 |
+
[0, -0.075, -0.154, -0.241, -0.335],
|
585 |
+
[0, -0.084, -0.173, -0.276, -0.390],
|
586 |
+
[0, -0.090, -0.192, -0.310, -0.447],
|
587 |
+
[0, -0.095, -0.208, -0.346, -0.508],
|
588 |
+
[0, -0.098, -0.223, -0.380, -0.576],
|
589 |
+
[0, -0.099, -0.237, -0.424, -0.652],
|
590 |
+
[0, -0.096, -0.250, -0.469, -0.742],
|
591 |
+
[0, -0.089, -0.262, -0.520, -0.853],
|
592 |
+
[0, -0.078, -0.272, -0.581, -0.997],
|
593 |
+
[0, -0.061, -0.279, -0.659, -1.198]])[::-1].T
|
594 |
+
# fmt: on
|
595 |
+
|
596 |
+
psi_1 = RectBivariateSpline(nu_beta_range, nu_alpha_range,
|
597 |
+
alpha_table, kx=1, ky=1, s=0)
|
598 |
+
|
599 |
+
def psi_1_1(nu_beta, nu_alpha):
|
600 |
+
return psi_1(nu_beta, nu_alpha) \
|
601 |
+
if nu_beta > 0 else psi_1(-nu_beta, nu_alpha)
|
602 |
+
|
603 |
+
psi_2 = RectBivariateSpline(nu_beta_range, nu_alpha_range,
|
604 |
+
beta_table, kx=1, ky=1, s=0)
|
605 |
+
|
606 |
+
def psi_2_1(nu_beta, nu_alpha):
|
607 |
+
return psi_2(nu_beta, nu_alpha) \
|
608 |
+
if nu_beta > 0 else -psi_2(-nu_beta, nu_alpha)
|
609 |
+
|
610 |
+
phi_3 = RectBivariateSpline(beta_range, alpha_range, nu_c_table,
|
611 |
+
kx=1, ky=1, s=0)
|
612 |
+
|
613 |
+
def phi_3_1(beta, alpha):
|
614 |
+
return phi_3(beta, alpha) if beta > 0 else phi_3(-beta, alpha)
|
615 |
+
|
616 |
+
phi_5 = RectBivariateSpline(beta_range, alpha_range, nu_zeta_table,
|
617 |
+
kx=1, ky=1, s=0)
|
618 |
+
|
619 |
+
def phi_5_1(beta, alpha):
|
620 |
+
return phi_5(beta, alpha) if beta > 0 else -phi_5(-beta, alpha)
|
621 |
+
|
622 |
+
# quantiles
|
623 |
+
p05 = np.percentile(data, 5)
|
624 |
+
p50 = np.percentile(data, 50)
|
625 |
+
p95 = np.percentile(data, 95)
|
626 |
+
p25 = np.percentile(data, 25)
|
627 |
+
p75 = np.percentile(data, 75)
|
628 |
+
|
629 |
+
nu_alpha = (p95 - p05) / (p75 - p25)
|
630 |
+
nu_beta = (p95 + p05 - 2 * p50) / (p95 - p05)
|
631 |
+
|
632 |
+
if nu_alpha >= 2.439:
|
633 |
+
eps = np.finfo(float).eps
|
634 |
+
alpha = np.clip(psi_1_1(nu_beta, nu_alpha)[0, 0], eps, 2.)
|
635 |
+
beta = np.clip(psi_2_1(nu_beta, nu_alpha)[0, 0], -1.0, 1.0)
|
636 |
+
else:
|
637 |
+
alpha = 2.0
|
638 |
+
beta = np.sign(nu_beta)
|
639 |
+
c = (p75 - p25) / phi_3_1(beta, alpha)[0, 0]
|
640 |
+
zeta = p50 + c * phi_5_1(beta, alpha)[0, 0]
|
641 |
+
delta = zeta-beta*c*np.tan(np.pi*alpha/2.) if alpha != 1. else zeta
|
642 |
+
|
643 |
+
return (alpha, beta, delta, c)
|
644 |
+
|
645 |
+
|
646 |
+
class levy_stable_gen(rv_continuous):
|
647 |
+
r"""A Levy-stable continuous random variable.
|
648 |
+
|
649 |
+
%(before_notes)s
|
650 |
+
|
651 |
+
See Also
|
652 |
+
--------
|
653 |
+
levy, levy_l, cauchy, norm
|
654 |
+
|
655 |
+
Notes
|
656 |
+
-----
|
657 |
+
The distribution for `levy_stable` has characteristic function:
|
658 |
+
|
659 |
+
.. math::
|
660 |
+
|
661 |
+
\varphi(t, \alpha, \beta, c, \mu) =
|
662 |
+
e^{it\mu -|ct|^{\alpha}(1-i\beta\operatorname{sign}(t)\Phi(\alpha, t))}
|
663 |
+
|
664 |
+
where two different parameterizations are supported. The first :math:`S_1`:
|
665 |
+
|
666 |
+
.. math::
|
667 |
+
|
668 |
+
\Phi = \begin{cases}
|
669 |
+
\tan \left({\frac {\pi \alpha }{2}}\right)&\alpha \neq 1\\
|
670 |
+
-{\frac {2}{\pi }}\log |t|&\alpha =1
|
671 |
+
\end{cases}
|
672 |
+
|
673 |
+
The second :math:`S_0`:
|
674 |
+
|
675 |
+
.. math::
|
676 |
+
|
677 |
+
\Phi = \begin{cases}
|
678 |
+
-\tan \left({\frac {\pi \alpha }{2}}\right)(|ct|^{1-\alpha}-1)
|
679 |
+
&\alpha \neq 1\\
|
680 |
+
-{\frac {2}{\pi }}\log |ct|&\alpha =1
|
681 |
+
\end{cases}
|
682 |
+
|
683 |
+
|
684 |
+
The probability density function for `levy_stable` is:
|
685 |
+
|
686 |
+
.. math::
|
687 |
+
|
688 |
+
f(x) = \frac{1}{2\pi}\int_{-\infty}^\infty \varphi(t)e^{-ixt}\,dt
|
689 |
+
|
690 |
+
where :math:`-\infty < t < \infty`. This integral does not have a known
|
691 |
+
closed form.
|
692 |
+
|
693 |
+
`levy_stable` generalizes several distributions. Where possible, they
|
694 |
+
should be used instead. Specifically, when the shape parameters
|
695 |
+
assume the values in the table below, the corresponding equivalent
|
696 |
+
distribution should be used.
|
697 |
+
|
698 |
+
========= ======== ===========
|
699 |
+
``alpha`` ``beta`` Equivalent
|
700 |
+
========= ======== ===========
|
701 |
+
1/2 -1 `levy_l`
|
702 |
+
1/2 1 `levy`
|
703 |
+
1 0 `cauchy`
|
704 |
+
2 any `norm` (with ``scale=sqrt(2)``)
|
705 |
+
========= ======== ===========
|
706 |
+
|
707 |
+
Evaluation of the pdf uses Nolan's piecewise integration approach with the
|
708 |
+
Zolotarev :math:`M` parameterization by default. There is also the option
|
709 |
+
to use direct numerical integration of the standard parameterization of the
|
710 |
+
characteristic function or to evaluate by taking the FFT of the
|
711 |
+
characteristic function.
|
712 |
+
|
713 |
+
The default method can changed by setting the class variable
|
714 |
+
``levy_stable.pdf_default_method`` to one of 'piecewise' for Nolan's
|
715 |
+
approach, 'dni' for direct numerical integration, or 'fft-simpson' for the
|
716 |
+
FFT based approach. For the sake of backwards compatibility, the methods
|
717 |
+
'best' and 'zolotarev' are equivalent to 'piecewise' and the method
|
718 |
+
'quadrature' is equivalent to 'dni'.
|
719 |
+
|
720 |
+
The parameterization can be changed by setting the class variable
|
721 |
+
``levy_stable.parameterization`` to either 'S0' or 'S1'.
|
722 |
+
The default is 'S1'.
|
723 |
+
|
724 |
+
To improve performance of piecewise and direct numerical integration one
|
725 |
+
can specify ``levy_stable.quad_eps`` (defaults to 1.2e-14). This is used
|
726 |
+
as both the absolute and relative quadrature tolerance for direct numerical
|
727 |
+
integration and as the relative quadrature tolerance for the piecewise
|
728 |
+
method. One can also specify ``levy_stable.piecewise_x_tol_near_zeta``
|
729 |
+
(defaults to 0.005) for how close x is to zeta before it is considered the
|
730 |
+
same as x [NO]. The exact check is
|
731 |
+
``abs(x0 - zeta) < piecewise_x_tol_near_zeta*alpha**(1/alpha)``. One can
|
732 |
+
also specify ``levy_stable.piecewise_alpha_tol_near_one`` (defaults to
|
733 |
+
0.005) for how close alpha is to 1 before being considered equal to 1.
|
734 |
+
|
735 |
+
To increase accuracy of FFT calculation one can specify
|
736 |
+
``levy_stable.pdf_fft_grid_spacing`` (defaults to 0.001) and
|
737 |
+
``pdf_fft_n_points_two_power`` (defaults to None which means a value is
|
738 |
+
calculated that sufficiently covers the input range).
|
739 |
+
|
740 |
+
Further control over FFT calculation is available by setting
|
741 |
+
``pdf_fft_interpolation_degree`` (defaults to 3) for spline order and
|
742 |
+
``pdf_fft_interpolation_level`` for determining the number of points to use
|
743 |
+
in the Newton-Cotes formula when approximating the characteristic function
|
744 |
+
(considered experimental).
|
745 |
+
|
746 |
+
Evaluation of the cdf uses Nolan's piecewise integration approach with the
|
747 |
+
Zolatarev :math:`S_0` parameterization by default. There is also the option
|
748 |
+
to evaluate through integration of an interpolated spline of the pdf
|
749 |
+
calculated by means of the FFT method. The settings affecting FFT
|
750 |
+
calculation are the same as for pdf calculation. The default cdf method can
|
751 |
+
be changed by setting ``levy_stable.cdf_default_method`` to either
|
752 |
+
'piecewise' or 'fft-simpson'. For cdf calculations the Zolatarev method is
|
753 |
+
superior in accuracy, so FFT is disabled by default.
|
754 |
+
|
755 |
+
Fitting estimate uses quantile estimation method in [MC]. MLE estimation of
|
756 |
+
parameters in fit method uses this quantile estimate initially. Note that
|
757 |
+
MLE doesn't always converge if using FFT for pdf calculations; this will be
|
758 |
+
the case if alpha <= 1 where the FFT approach doesn't give good
|
759 |
+
approximations.
|
760 |
+
|
761 |
+
Any non-missing value for the attribute
|
762 |
+
``levy_stable.pdf_fft_min_points_threshold`` will set
|
763 |
+
``levy_stable.pdf_default_method`` to 'fft-simpson' if a valid
|
764 |
+
default method is not otherwise set.
|
765 |
+
|
766 |
+
|
767 |
+
|
768 |
+
.. warning::
|
769 |
+
|
770 |
+
For pdf calculations FFT calculation is considered experimental.
|
771 |
+
|
772 |
+
For cdf calculations FFT calculation is considered experimental. Use
|
773 |
+
Zolatarev's method instead (default).
|
774 |
+
|
775 |
+
The probability density above is defined in the "standardized" form. To
|
776 |
+
shift and/or scale the distribution use the ``loc`` and ``scale``
|
777 |
+
parameters.
|
778 |
+
Generally ``%(name)s.pdf(x, %(shapes)s, loc, scale)`` is identically
|
779 |
+
equivalent to ``%(name)s.pdf(y, %(shapes)s) / scale`` with
|
780 |
+
``y = (x - loc) / scale``, except in the ``S1`` parameterization if
|
781 |
+
``alpha == 1``. In that case ``%(name)s.pdf(x, %(shapes)s, loc, scale)``
|
782 |
+
is identically equivalent to ``%(name)s.pdf(y, %(shapes)s) / scale`` with
|
783 |
+
``y = (x - loc - 2 * beta * scale * np.log(scale) / np.pi) / scale``.
|
784 |
+
See [NO2]_ Definition 1.8 for more information.
|
785 |
+
Note that shifting the location of a distribution
|
786 |
+
does not make it a "noncentral" distribution.
|
787 |
+
|
788 |
+
References
|
789 |
+
----------
|
790 |
+
.. [MC] McCulloch, J., 1986. Simple consistent estimators of stable
|
791 |
+
distribution parameters. Communications in Statistics - Simulation and
|
792 |
+
Computation 15, 11091136.
|
793 |
+
.. [WZ] Wang, Li and Zhang, Ji-Hong, 2008. Simpson's rule based FFT method
|
794 |
+
to compute densities of stable distribution.
|
795 |
+
.. [NO] Nolan, J., 1997. Numerical Calculation of Stable Densities and
|
796 |
+
distributions Functions.
|
797 |
+
.. [NO2] Nolan, J., 2018. Stable Distributions: Models for Heavy Tailed
|
798 |
+
Data.
|
799 |
+
.. [HO] Hopcraft, K. I., Jakeman, E., Tanner, R. M. J., 1999. Lévy random
|
800 |
+
walks with fluctuating step number and multiscale behavior.
|
801 |
+
|
802 |
+
%(example)s
|
803 |
+
|
804 |
+
"""
|
805 |
+
# Configurable options as class variables
|
806 |
+
# (accessible from self by attribute lookup).
|
807 |
+
parameterization = "S1"
|
808 |
+
pdf_default_method = "piecewise"
|
809 |
+
cdf_default_method = "piecewise"
|
810 |
+
quad_eps = _QUAD_EPS
|
811 |
+
piecewise_x_tol_near_zeta = 0.005
|
812 |
+
piecewise_alpha_tol_near_one = 0.005
|
813 |
+
pdf_fft_min_points_threshold = None
|
814 |
+
pdf_fft_grid_spacing = 0.001
|
815 |
+
pdf_fft_n_points_two_power = None
|
816 |
+
pdf_fft_interpolation_level = 3
|
817 |
+
pdf_fft_interpolation_degree = 3
|
818 |
+
|
819 |
+
def _argcheck(self, alpha, beta):
|
820 |
+
return (alpha > 0) & (alpha <= 2) & (beta <= 1) & (beta >= -1)
|
821 |
+
|
822 |
+
def _shape_info(self):
|
823 |
+
ialpha = _ShapeInfo("alpha", False, (0, 2), (False, True))
|
824 |
+
ibeta = _ShapeInfo("beta", False, (-1, 1), (True, True))
|
825 |
+
return [ialpha, ibeta]
|
826 |
+
|
827 |
+
def _parameterization(self):
|
828 |
+
allowed = ("S0", "S1")
|
829 |
+
pz = self.parameterization
|
830 |
+
if pz not in allowed:
|
831 |
+
raise RuntimeError(
|
832 |
+
f"Parameterization '{pz}' in supported list: {allowed}"
|
833 |
+
)
|
834 |
+
return pz
|
835 |
+
|
836 |
+
@inherit_docstring_from(rv_continuous)
|
837 |
+
def rvs(self, *args, **kwds):
|
838 |
+
X1 = super().rvs(*args, **kwds)
|
839 |
+
|
840 |
+
kwds.pop("discrete", None)
|
841 |
+
kwds.pop("random_state", None)
|
842 |
+
(alpha, beta), delta, gamma, size = self._parse_args_rvs(*args, **kwds)
|
843 |
+
|
844 |
+
# shift location for this parameterisation (S1)
|
845 |
+
X1 = np.where(
|
846 |
+
alpha == 1.0, X1 + 2 * beta * gamma * np.log(gamma) / np.pi, X1
|
847 |
+
)
|
848 |
+
|
849 |
+
if self._parameterization() == "S0":
|
850 |
+
return np.where(
|
851 |
+
alpha == 1.0,
|
852 |
+
X1 - (beta * 2 * gamma * np.log(gamma) / np.pi),
|
853 |
+
X1 - gamma * beta * np.tan(np.pi * alpha / 2.0),
|
854 |
+
)
|
855 |
+
elif self._parameterization() == "S1":
|
856 |
+
return X1
|
857 |
+
|
858 |
+
def _rvs(self, alpha, beta, size=None, random_state=None):
|
859 |
+
return _rvs_Z1(alpha, beta, size, random_state)
|
860 |
+
|
861 |
+
@inherit_docstring_from(rv_continuous)
|
862 |
+
def pdf(self, x, *args, **kwds):
|
863 |
+
# override base class version to correct
|
864 |
+
# location for S1 parameterization
|
865 |
+
if self._parameterization() == "S0":
|
866 |
+
return super().pdf(x, *args, **kwds)
|
867 |
+
elif self._parameterization() == "S1":
|
868 |
+
(alpha, beta), delta, gamma = self._parse_args(*args, **kwds)
|
869 |
+
if np.all(np.reshape(alpha, (1, -1))[0, :] != 1):
|
870 |
+
return super().pdf(x, *args, **kwds)
|
871 |
+
else:
|
872 |
+
# correct location for this parameterisation
|
873 |
+
x = np.reshape(x, (1, -1))[0, :]
|
874 |
+
x, alpha, beta = np.broadcast_arrays(x, alpha, beta)
|
875 |
+
|
876 |
+
data_in = np.dstack((x, alpha, beta))[0]
|
877 |
+
data_out = np.empty(shape=(len(data_in), 1))
|
878 |
+
# group data in unique arrays of alpha, beta pairs
|
879 |
+
uniq_param_pairs = np.unique(data_in[:, 1:], axis=0)
|
880 |
+
for pair in uniq_param_pairs:
|
881 |
+
_alpha, _beta = pair
|
882 |
+
_delta = (
|
883 |
+
delta + 2 * _beta * gamma * np.log(gamma) / np.pi
|
884 |
+
if _alpha == 1.0
|
885 |
+
else delta
|
886 |
+
)
|
887 |
+
data_mask = np.all(data_in[:, 1:] == pair, axis=-1)
|
888 |
+
_x = data_in[data_mask, 0]
|
889 |
+
data_out[data_mask] = (
|
890 |
+
super()
|
891 |
+
.pdf(_x, _alpha, _beta, loc=_delta, scale=gamma)
|
892 |
+
.reshape(len(_x), 1)
|
893 |
+
)
|
894 |
+
output = data_out.T[0]
|
895 |
+
if output.shape == (1,):
|
896 |
+
return output[0]
|
897 |
+
return output
|
898 |
+
|
899 |
+
def _pdf(self, x, alpha, beta):
|
900 |
+
if self._parameterization() == "S0":
|
901 |
+
_pdf_single_value_piecewise = _pdf_single_value_piecewise_Z0
|
902 |
+
_pdf_single_value_cf_integrate = _pdf_single_value_cf_integrate_Z0
|
903 |
+
_cf = _cf_Z0
|
904 |
+
elif self._parameterization() == "S1":
|
905 |
+
_pdf_single_value_piecewise = _pdf_single_value_piecewise_Z1
|
906 |
+
_pdf_single_value_cf_integrate = _pdf_single_value_cf_integrate_Z1
|
907 |
+
_cf = _cf_Z1
|
908 |
+
|
909 |
+
x = np.asarray(x).reshape(1, -1)[0, :]
|
910 |
+
|
911 |
+
x, alpha, beta = np.broadcast_arrays(x, alpha, beta)
|
912 |
+
|
913 |
+
data_in = np.dstack((x, alpha, beta))[0]
|
914 |
+
data_out = np.empty(shape=(len(data_in), 1))
|
915 |
+
|
916 |
+
pdf_default_method_name = self.pdf_default_method
|
917 |
+
if pdf_default_method_name in ("piecewise", "best", "zolotarev"):
|
918 |
+
pdf_single_value_method = _pdf_single_value_piecewise
|
919 |
+
elif pdf_default_method_name in ("dni", "quadrature"):
|
920 |
+
pdf_single_value_method = _pdf_single_value_cf_integrate
|
921 |
+
elif (
|
922 |
+
pdf_default_method_name == "fft-simpson"
|
923 |
+
or self.pdf_fft_min_points_threshold is not None
|
924 |
+
):
|
925 |
+
pdf_single_value_method = None
|
926 |
+
|
927 |
+
pdf_single_value_kwds = {
|
928 |
+
"quad_eps": self.quad_eps,
|
929 |
+
"piecewise_x_tol_near_zeta": self.piecewise_x_tol_near_zeta,
|
930 |
+
"piecewise_alpha_tol_near_one": self.piecewise_alpha_tol_near_one,
|
931 |
+
}
|
932 |
+
|
933 |
+
fft_grid_spacing = self.pdf_fft_grid_spacing
|
934 |
+
fft_n_points_two_power = self.pdf_fft_n_points_two_power
|
935 |
+
fft_interpolation_level = self.pdf_fft_interpolation_level
|
936 |
+
fft_interpolation_degree = self.pdf_fft_interpolation_degree
|
937 |
+
|
938 |
+
# group data in unique arrays of alpha, beta pairs
|
939 |
+
uniq_param_pairs = np.unique(data_in[:, 1:], axis=0)
|
940 |
+
for pair in uniq_param_pairs:
|
941 |
+
data_mask = np.all(data_in[:, 1:] == pair, axis=-1)
|
942 |
+
data_subset = data_in[data_mask]
|
943 |
+
if pdf_single_value_method is not None:
|
944 |
+
data_out[data_mask] = np.array(
|
945 |
+
[
|
946 |
+
pdf_single_value_method(
|
947 |
+
_x, _alpha, _beta, **pdf_single_value_kwds
|
948 |
+
)
|
949 |
+
for _x, _alpha, _beta in data_subset
|
950 |
+
]
|
951 |
+
).reshape(len(data_subset), 1)
|
952 |
+
else:
|
953 |
+
warnings.warn(
|
954 |
+
"Density calculations experimental for FFT method."
|
955 |
+
+ " Use combination of piecewise and dni methods instead.",
|
956 |
+
RuntimeWarning, stacklevel=3,
|
957 |
+
)
|
958 |
+
_alpha, _beta = pair
|
959 |
+
_x = data_subset[:, (0,)]
|
960 |
+
|
961 |
+
if _alpha < 1.0:
|
962 |
+
raise RuntimeError(
|
963 |
+
"FFT method does not work well for alpha less than 1."
|
964 |
+
)
|
965 |
+
|
966 |
+
# need enough points to "cover" _x for interpolation
|
967 |
+
if fft_grid_spacing is None and fft_n_points_two_power is None:
|
968 |
+
raise ValueError(
|
969 |
+
"One of fft_grid_spacing or fft_n_points_two_power "
|
970 |
+
+ "needs to be set."
|
971 |
+
)
|
972 |
+
max_abs_x = np.max(np.abs(_x))
|
973 |
+
h = (
|
974 |
+
2 ** (3 - fft_n_points_two_power) * max_abs_x
|
975 |
+
if fft_grid_spacing is None
|
976 |
+
else fft_grid_spacing
|
977 |
+
)
|
978 |
+
q = (
|
979 |
+
np.ceil(np.log(2 * max_abs_x / h) / np.log(2)) + 2
|
980 |
+
if fft_n_points_two_power is None
|
981 |
+
else int(fft_n_points_two_power)
|
982 |
+
)
|
983 |
+
|
984 |
+
# for some parameters, the range of x can be quite
|
985 |
+
# large, let's choose an arbitrary cut off (8GB) to save on
|
986 |
+
# computer memory.
|
987 |
+
MAX_Q = 30
|
988 |
+
if q > MAX_Q:
|
989 |
+
raise RuntimeError(
|
990 |
+
"fft_n_points_two_power has a maximum "
|
991 |
+
+ f"value of {MAX_Q}"
|
992 |
+
)
|
993 |
+
|
994 |
+
density_x, density = pdf_from_cf_with_fft(
|
995 |
+
lambda t: _cf(t, _alpha, _beta),
|
996 |
+
h=h,
|
997 |
+
q=q,
|
998 |
+
level=fft_interpolation_level,
|
999 |
+
)
|
1000 |
+
f = interpolate.InterpolatedUnivariateSpline(
|
1001 |
+
density_x, np.real(density), k=fft_interpolation_degree
|
1002 |
+
) # patch FFT to use cubic
|
1003 |
+
data_out[data_mask] = f(_x)
|
1004 |
+
|
1005 |
+
return data_out.T[0]
|
1006 |
+
|
1007 |
+
@inherit_docstring_from(rv_continuous)
|
1008 |
+
def cdf(self, x, *args, **kwds):
|
1009 |
+
# override base class version to correct
|
1010 |
+
# location for S1 parameterization
|
1011 |
+
# NOTE: this is near identical to pdf() above
|
1012 |
+
if self._parameterization() == "S0":
|
1013 |
+
return super().cdf(x, *args, **kwds)
|
1014 |
+
elif self._parameterization() == "S1":
|
1015 |
+
(alpha, beta), delta, gamma = self._parse_args(*args, **kwds)
|
1016 |
+
if np.all(np.reshape(alpha, (1, -1))[0, :] != 1):
|
1017 |
+
return super().cdf(x, *args, **kwds)
|
1018 |
+
else:
|
1019 |
+
# correct location for this parameterisation
|
1020 |
+
x = np.reshape(x, (1, -1))[0, :]
|
1021 |
+
x, alpha, beta = np.broadcast_arrays(x, alpha, beta)
|
1022 |
+
|
1023 |
+
data_in = np.dstack((x, alpha, beta))[0]
|
1024 |
+
data_out = np.empty(shape=(len(data_in), 1))
|
1025 |
+
# group data in unique arrays of alpha, beta pairs
|
1026 |
+
uniq_param_pairs = np.unique(data_in[:, 1:], axis=0)
|
1027 |
+
for pair in uniq_param_pairs:
|
1028 |
+
_alpha, _beta = pair
|
1029 |
+
_delta = (
|
1030 |
+
delta + 2 * _beta * gamma * np.log(gamma) / np.pi
|
1031 |
+
if _alpha == 1.0
|
1032 |
+
else delta
|
1033 |
+
)
|
1034 |
+
data_mask = np.all(data_in[:, 1:] == pair, axis=-1)
|
1035 |
+
_x = data_in[data_mask, 0]
|
1036 |
+
data_out[data_mask] = (
|
1037 |
+
super()
|
1038 |
+
.cdf(_x, _alpha, _beta, loc=_delta, scale=gamma)
|
1039 |
+
.reshape(len(_x), 1)
|
1040 |
+
)
|
1041 |
+
output = data_out.T[0]
|
1042 |
+
if output.shape == (1,):
|
1043 |
+
return output[0]
|
1044 |
+
return output
|
1045 |
+
|
1046 |
+
def _cdf(self, x, alpha, beta):
|
1047 |
+
if self._parameterization() == "S0":
|
1048 |
+
_cdf_single_value_piecewise = _cdf_single_value_piecewise_Z0
|
1049 |
+
_cf = _cf_Z0
|
1050 |
+
elif self._parameterization() == "S1":
|
1051 |
+
_cdf_single_value_piecewise = _cdf_single_value_piecewise_Z1
|
1052 |
+
_cf = _cf_Z1
|
1053 |
+
|
1054 |
+
x = np.asarray(x).reshape(1, -1)[0, :]
|
1055 |
+
|
1056 |
+
x, alpha, beta = np.broadcast_arrays(x, alpha, beta)
|
1057 |
+
|
1058 |
+
data_in = np.dstack((x, alpha, beta))[0]
|
1059 |
+
data_out = np.empty(shape=(len(data_in), 1))
|
1060 |
+
|
1061 |
+
cdf_default_method_name = self.cdf_default_method
|
1062 |
+
if cdf_default_method_name == "piecewise":
|
1063 |
+
cdf_single_value_method = _cdf_single_value_piecewise
|
1064 |
+
elif cdf_default_method_name == "fft-simpson":
|
1065 |
+
cdf_single_value_method = None
|
1066 |
+
|
1067 |
+
cdf_single_value_kwds = {
|
1068 |
+
"quad_eps": self.quad_eps,
|
1069 |
+
"piecewise_x_tol_near_zeta": self.piecewise_x_tol_near_zeta,
|
1070 |
+
"piecewise_alpha_tol_near_one": self.piecewise_alpha_tol_near_one,
|
1071 |
+
}
|
1072 |
+
|
1073 |
+
fft_grid_spacing = self.pdf_fft_grid_spacing
|
1074 |
+
fft_n_points_two_power = self.pdf_fft_n_points_two_power
|
1075 |
+
fft_interpolation_level = self.pdf_fft_interpolation_level
|
1076 |
+
fft_interpolation_degree = self.pdf_fft_interpolation_degree
|
1077 |
+
|
1078 |
+
# group data in unique arrays of alpha, beta pairs
|
1079 |
+
uniq_param_pairs = np.unique(data_in[:, 1:], axis=0)
|
1080 |
+
for pair in uniq_param_pairs:
|
1081 |
+
data_mask = np.all(data_in[:, 1:] == pair, axis=-1)
|
1082 |
+
data_subset = data_in[data_mask]
|
1083 |
+
if cdf_single_value_method is not None:
|
1084 |
+
data_out[data_mask] = np.array(
|
1085 |
+
[
|
1086 |
+
cdf_single_value_method(
|
1087 |
+
_x, _alpha, _beta, **cdf_single_value_kwds
|
1088 |
+
)
|
1089 |
+
for _x, _alpha, _beta in data_subset
|
1090 |
+
]
|
1091 |
+
).reshape(len(data_subset), 1)
|
1092 |
+
else:
|
1093 |
+
warnings.warn(
|
1094 |
+
"Cumulative density calculations experimental for FFT"
|
1095 |
+
+ " method. Use piecewise method instead.",
|
1096 |
+
RuntimeWarning, stacklevel=3,
|
1097 |
+
)
|
1098 |
+
_alpha, _beta = pair
|
1099 |
+
_x = data_subset[:, (0,)]
|
1100 |
+
|
1101 |
+
# need enough points to "cover" _x for interpolation
|
1102 |
+
if fft_grid_spacing is None and fft_n_points_two_power is None:
|
1103 |
+
raise ValueError(
|
1104 |
+
"One of fft_grid_spacing or fft_n_points_two_power "
|
1105 |
+
+ "needs to be set."
|
1106 |
+
)
|
1107 |
+
max_abs_x = np.max(np.abs(_x))
|
1108 |
+
h = (
|
1109 |
+
2 ** (3 - fft_n_points_two_power) * max_abs_x
|
1110 |
+
if fft_grid_spacing is None
|
1111 |
+
else fft_grid_spacing
|
1112 |
+
)
|
1113 |
+
q = (
|
1114 |
+
np.ceil(np.log(2 * max_abs_x / h) / np.log(2)) + 2
|
1115 |
+
if fft_n_points_two_power is None
|
1116 |
+
else int(fft_n_points_two_power)
|
1117 |
+
)
|
1118 |
+
|
1119 |
+
density_x, density = pdf_from_cf_with_fft(
|
1120 |
+
lambda t: _cf(t, _alpha, _beta),
|
1121 |
+
h=h,
|
1122 |
+
q=q,
|
1123 |
+
level=fft_interpolation_level,
|
1124 |
+
)
|
1125 |
+
f = interpolate.InterpolatedUnivariateSpline(
|
1126 |
+
density_x, np.real(density), k=fft_interpolation_degree
|
1127 |
+
)
|
1128 |
+
data_out[data_mask] = np.array(
|
1129 |
+
[f.integral(self.a, float(x_1.squeeze())) for x_1 in _x]
|
1130 |
+
).reshape(data_out[data_mask].shape)
|
1131 |
+
|
1132 |
+
return data_out.T[0]
|
1133 |
+
|
1134 |
+
def _fitstart(self, data):
|
1135 |
+
if self._parameterization() == "S0":
|
1136 |
+
_fitstart = _fitstart_S0
|
1137 |
+
elif self._parameterization() == "S1":
|
1138 |
+
_fitstart = _fitstart_S1
|
1139 |
+
return _fitstart(data)
|
1140 |
+
|
1141 |
+
def _stats(self, alpha, beta):
|
1142 |
+
mu = 0 if alpha > 1 else np.nan
|
1143 |
+
mu2 = 2 if alpha == 2 else np.inf
|
1144 |
+
g1 = 0.0 if alpha == 2.0 else np.nan
|
1145 |
+
g2 = 0.0 if alpha == 2.0 else np.nan
|
1146 |
+
return mu, mu2, g1, g2
|
1147 |
+
|
1148 |
+
|
1149 |
+
# cotes numbers - see sequence from http://oeis.org/A100642
|
1150 |
+
Cotes_table = np.array(
|
1151 |
+
[[], [1]] + [v[2] for v in _builtincoeffs.values()], dtype=object
|
1152 |
+
)
|
1153 |
+
Cotes = np.array(
|
1154 |
+
[
|
1155 |
+
np.pad(r, (0, len(Cotes_table) - 1 - len(r)), mode='constant')
|
1156 |
+
for r in Cotes_table
|
1157 |
+
]
|
1158 |
+
)
|
1159 |
+
|
1160 |
+
|
1161 |
+
def pdf_from_cf_with_fft(cf, h=0.01, q=9, level=3):
|
1162 |
+
"""Calculates pdf from characteristic function.
|
1163 |
+
|
1164 |
+
Uses fast Fourier transform with Newton-Cotes integration following [WZ].
|
1165 |
+
Defaults to using Simpson's method (3-point Newton-Cotes integration).
|
1166 |
+
|
1167 |
+
Parameters
|
1168 |
+
----------
|
1169 |
+
cf : callable
|
1170 |
+
Single argument function from float -> complex expressing a
|
1171 |
+
characteristic function for some distribution.
|
1172 |
+
h : Optional[float]
|
1173 |
+
Step size for Newton-Cotes integration. Default: 0.01
|
1174 |
+
q : Optional[int]
|
1175 |
+
Use 2**q steps when performing Newton-Cotes integration.
|
1176 |
+
The infinite integral in the inverse Fourier transform will then
|
1177 |
+
be restricted to the interval [-2**q * h / 2, 2**q * h / 2]. Setting
|
1178 |
+
the number of steps equal to a power of 2 allows the fft to be
|
1179 |
+
calculated in O(n*log(n)) time rather than O(n**2).
|
1180 |
+
Default: 9
|
1181 |
+
level : Optional[int]
|
1182 |
+
Calculate integral using n-point Newton-Cotes integration for
|
1183 |
+
n = level. The 3-point Newton-Cotes formula corresponds to Simpson's
|
1184 |
+
rule. Default: 3
|
1185 |
+
|
1186 |
+
Returns
|
1187 |
+
-------
|
1188 |
+
x_l : ndarray
|
1189 |
+
Array of points x at which pdf is estimated. 2**q equally spaced
|
1190 |
+
points from -pi/h up to but not including pi/h.
|
1191 |
+
density : ndarray
|
1192 |
+
Estimated values of pdf corresponding to cf at points in x_l.
|
1193 |
+
|
1194 |
+
References
|
1195 |
+
----------
|
1196 |
+
.. [WZ] Wang, Li and Zhang, Ji-Hong, 2008. Simpson's rule based FFT method
|
1197 |
+
to compute densities of stable distribution.
|
1198 |
+
"""
|
1199 |
+
n = level
|
1200 |
+
N = 2**q
|
1201 |
+
steps = np.arange(0, N)
|
1202 |
+
L = N * h / 2
|
1203 |
+
x_l = np.pi * (steps - N / 2) / L
|
1204 |
+
if level > 1:
|
1205 |
+
indices = np.arange(n).reshape(n, 1)
|
1206 |
+
s1 = np.sum(
|
1207 |
+
(-1) ** steps * Cotes[n, indices] * np.fft.fft(
|
1208 |
+
(-1)**steps * cf(-L + h * steps + h * indices / (n - 1))
|
1209 |
+
) * np.exp(
|
1210 |
+
1j * np.pi * indices / (n - 1)
|
1211 |
+
- 2 * 1j * np.pi * indices * steps /
|
1212 |
+
(N * (n - 1))
|
1213 |
+
),
|
1214 |
+
axis=0
|
1215 |
+
)
|
1216 |
+
else:
|
1217 |
+
s1 = (-1) ** steps * Cotes[n, 0] * np.fft.fft(
|
1218 |
+
(-1) ** steps * cf(-L + h * steps)
|
1219 |
+
)
|
1220 |
+
density = h * s1 / (2 * np.pi * np.sum(Cotes[n]))
|
1221 |
+
return (x_l, density)
|
1222 |
+
|
1223 |
+
|
1224 |
+
levy_stable = levy_stable_gen(name="levy_stable")
|
venv/lib/python3.10/site-packages/scipy/stats/_levy_stable/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (32.6 kB). View file
|
|
venv/lib/python3.10/site-packages/scipy/stats/_levy_stable/levyst.cpython-310-x86_64-linux-gnu.so
ADDED
Binary file (66.5 kB). View file
|
|
venv/lib/python3.10/site-packages/scipy/stats/_rcont/__init__.py
ADDED
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#
|
2 |
+
from .rcont import rvs_rcont1, rvs_rcont2
|
3 |
+
|
4 |
+
__all__ = ["rvs_rcont1", "rvs_rcont2"]
|
venv/lib/python3.10/site-packages/scipy/stats/_rcont/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (276 Bytes). View file
|
|
venv/lib/python3.10/site-packages/scipy/stats/_rcont/rcont.cpython-310-x86_64-linux-gnu.so
ADDED
Binary file (299 kB). View file
|
|
venv/lib/python3.10/site-packages/scipy/stats/tests/__init__.py
ADDED
File without changes
|
venv/lib/python3.10/site-packages/scipy/stats/tests/common_tests.py
ADDED
@@ -0,0 +1,351 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pickle
|
2 |
+
|
3 |
+
import numpy as np
|
4 |
+
import numpy.testing as npt
|
5 |
+
from numpy.testing import assert_allclose, assert_equal
|
6 |
+
from pytest import raises as assert_raises
|
7 |
+
|
8 |
+
import numpy.ma.testutils as ma_npt
|
9 |
+
|
10 |
+
from scipy._lib._util import (
|
11 |
+
getfullargspec_no_self as _getfullargspec, np_long
|
12 |
+
)
|
13 |
+
from scipy import stats
|
14 |
+
|
15 |
+
|
16 |
+
def check_named_results(res, attributes, ma=False):
|
17 |
+
for i, attr in enumerate(attributes):
|
18 |
+
if ma:
|
19 |
+
ma_npt.assert_equal(res[i], getattr(res, attr))
|
20 |
+
else:
|
21 |
+
npt.assert_equal(res[i], getattr(res, attr))
|
22 |
+
|
23 |
+
|
24 |
+
def check_normalization(distfn, args, distname):
|
25 |
+
norm_moment = distfn.moment(0, *args)
|
26 |
+
npt.assert_allclose(norm_moment, 1.0)
|
27 |
+
|
28 |
+
if distname == "rv_histogram_instance":
|
29 |
+
atol, rtol = 1e-5, 0
|
30 |
+
else:
|
31 |
+
atol, rtol = 1e-7, 1e-7
|
32 |
+
|
33 |
+
normalization_expect = distfn.expect(lambda x: 1, args=args)
|
34 |
+
npt.assert_allclose(normalization_expect, 1.0, atol=atol, rtol=rtol,
|
35 |
+
err_msg=distname, verbose=True)
|
36 |
+
|
37 |
+
_a, _b = distfn.support(*args)
|
38 |
+
normalization_cdf = distfn.cdf(_b, *args)
|
39 |
+
npt.assert_allclose(normalization_cdf, 1.0)
|
40 |
+
|
41 |
+
|
42 |
+
def check_moment(distfn, arg, m, v, msg):
|
43 |
+
m1 = distfn.moment(1, *arg)
|
44 |
+
m2 = distfn.moment(2, *arg)
|
45 |
+
if not np.isinf(m):
|
46 |
+
npt.assert_almost_equal(m1, m, decimal=10,
|
47 |
+
err_msg=msg + ' - 1st moment')
|
48 |
+
else: # or np.isnan(m1),
|
49 |
+
npt.assert_(np.isinf(m1),
|
50 |
+
msg + ' - 1st moment -infinite, m1=%s' % str(m1))
|
51 |
+
|
52 |
+
if not np.isinf(v):
|
53 |
+
npt.assert_almost_equal(m2 - m1 * m1, v, decimal=10,
|
54 |
+
err_msg=msg + ' - 2ndt moment')
|
55 |
+
else: # or np.isnan(m2),
|
56 |
+
npt.assert_(np.isinf(m2), msg + f' - 2nd moment -infinite, {m2=}')
|
57 |
+
|
58 |
+
|
59 |
+
def check_mean_expect(distfn, arg, m, msg):
|
60 |
+
if np.isfinite(m):
|
61 |
+
m1 = distfn.expect(lambda x: x, arg)
|
62 |
+
npt.assert_almost_equal(m1, m, decimal=5,
|
63 |
+
err_msg=msg + ' - 1st moment (expect)')
|
64 |
+
|
65 |
+
|
66 |
+
def check_var_expect(distfn, arg, m, v, msg):
|
67 |
+
dist_looser_tolerances = {"rv_histogram_instance" , "ksone"}
|
68 |
+
kwargs = {'rtol': 5e-6} if msg in dist_looser_tolerances else {}
|
69 |
+
if np.isfinite(v):
|
70 |
+
m2 = distfn.expect(lambda x: x*x, arg)
|
71 |
+
npt.assert_allclose(m2, v + m*m, **kwargs)
|
72 |
+
|
73 |
+
|
74 |
+
def check_skew_expect(distfn, arg, m, v, s, msg):
|
75 |
+
if np.isfinite(s):
|
76 |
+
m3e = distfn.expect(lambda x: np.power(x-m, 3), arg)
|
77 |
+
npt.assert_almost_equal(m3e, s * np.power(v, 1.5),
|
78 |
+
decimal=5, err_msg=msg + ' - skew')
|
79 |
+
else:
|
80 |
+
npt.assert_(np.isnan(s))
|
81 |
+
|
82 |
+
|
83 |
+
def check_kurt_expect(distfn, arg, m, v, k, msg):
|
84 |
+
if np.isfinite(k):
|
85 |
+
m4e = distfn.expect(lambda x: np.power(x-m, 4), arg)
|
86 |
+
npt.assert_allclose(m4e, (k + 3.) * np.power(v, 2),
|
87 |
+
atol=1e-5, rtol=1e-5,
|
88 |
+
err_msg=msg + ' - kurtosis')
|
89 |
+
elif not np.isposinf(k):
|
90 |
+
npt.assert_(np.isnan(k))
|
91 |
+
|
92 |
+
|
93 |
+
def check_munp_expect(dist, args, msg):
|
94 |
+
# If _munp is overridden, test a higher moment. (Before gh-18634, some
|
95 |
+
# distributions had issues with moments 5 and higher.)
|
96 |
+
if dist._munp.__func__ != stats.rv_continuous._munp:
|
97 |
+
res = dist.moment(5, *args) # shouldn't raise an error
|
98 |
+
ref = dist.expect(lambda x: x ** 5, args, lb=-np.inf, ub=np.inf)
|
99 |
+
if not np.isfinite(res): # could be valid; automated test can't know
|
100 |
+
return
|
101 |
+
# loose tolerance, mostly to see whether _munp returns *something*
|
102 |
+
assert_allclose(res, ref, atol=1e-10, rtol=1e-4,
|
103 |
+
err_msg=msg + ' - higher moment / _munp')
|
104 |
+
|
105 |
+
|
106 |
+
def check_entropy(distfn, arg, msg):
|
107 |
+
ent = distfn.entropy(*arg)
|
108 |
+
npt.assert_(not np.isnan(ent), msg + 'test Entropy is nan')
|
109 |
+
|
110 |
+
|
111 |
+
def check_private_entropy(distfn, args, superclass):
|
112 |
+
# compare a generic _entropy with the distribution-specific implementation
|
113 |
+
npt.assert_allclose(distfn._entropy(*args),
|
114 |
+
superclass._entropy(distfn, *args))
|
115 |
+
|
116 |
+
|
117 |
+
def check_entropy_vect_scale(distfn, arg):
|
118 |
+
# check 2-d
|
119 |
+
sc = np.asarray([[1, 2], [3, 4]])
|
120 |
+
v_ent = distfn.entropy(*arg, scale=sc)
|
121 |
+
s_ent = [distfn.entropy(*arg, scale=s) for s in sc.ravel()]
|
122 |
+
s_ent = np.asarray(s_ent).reshape(v_ent.shape)
|
123 |
+
assert_allclose(v_ent, s_ent, atol=1e-14)
|
124 |
+
|
125 |
+
# check invalid value, check cast
|
126 |
+
sc = [1, 2, -3]
|
127 |
+
v_ent = distfn.entropy(*arg, scale=sc)
|
128 |
+
s_ent = [distfn.entropy(*arg, scale=s) for s in sc]
|
129 |
+
s_ent = np.asarray(s_ent).reshape(v_ent.shape)
|
130 |
+
assert_allclose(v_ent, s_ent, atol=1e-14)
|
131 |
+
|
132 |
+
|
133 |
+
def check_edge_support(distfn, args):
|
134 |
+
# Make sure that x=self.a and self.b are handled correctly.
|
135 |
+
x = distfn.support(*args)
|
136 |
+
if isinstance(distfn, stats.rv_discrete):
|
137 |
+
x = x[0]-1, x[1]
|
138 |
+
|
139 |
+
npt.assert_equal(distfn.cdf(x, *args), [0.0, 1.0])
|
140 |
+
npt.assert_equal(distfn.sf(x, *args), [1.0, 0.0])
|
141 |
+
|
142 |
+
if distfn.name not in ('skellam', 'dlaplace'):
|
143 |
+
# with a = -inf, log(0) generates warnings
|
144 |
+
npt.assert_equal(distfn.logcdf(x, *args), [-np.inf, 0.0])
|
145 |
+
npt.assert_equal(distfn.logsf(x, *args), [0.0, -np.inf])
|
146 |
+
|
147 |
+
npt.assert_equal(distfn.ppf([0.0, 1.0], *args), x)
|
148 |
+
npt.assert_equal(distfn.isf([0.0, 1.0], *args), x[::-1])
|
149 |
+
|
150 |
+
# out-of-bounds for isf & ppf
|
151 |
+
npt.assert_(np.isnan(distfn.isf([-1, 2], *args)).all())
|
152 |
+
npt.assert_(np.isnan(distfn.ppf([-1, 2], *args)).all())
|
153 |
+
|
154 |
+
|
155 |
+
def check_named_args(distfn, x, shape_args, defaults, meths):
|
156 |
+
## Check calling w/ named arguments.
|
157 |
+
|
158 |
+
# check consistency of shapes, numargs and _parse signature
|
159 |
+
signature = _getfullargspec(distfn._parse_args)
|
160 |
+
npt.assert_(signature.varargs is None)
|
161 |
+
npt.assert_(signature.varkw is None)
|
162 |
+
npt.assert_(not signature.kwonlyargs)
|
163 |
+
npt.assert_(list(signature.defaults) == list(defaults))
|
164 |
+
|
165 |
+
shape_argnames = signature.args[:-len(defaults)] # a, b, loc=0, scale=1
|
166 |
+
if distfn.shapes:
|
167 |
+
shapes_ = distfn.shapes.replace(',', ' ').split()
|
168 |
+
else:
|
169 |
+
shapes_ = ''
|
170 |
+
npt.assert_(len(shapes_) == distfn.numargs)
|
171 |
+
npt.assert_(len(shapes_) == len(shape_argnames))
|
172 |
+
|
173 |
+
# check calling w/ named arguments
|
174 |
+
shape_args = list(shape_args)
|
175 |
+
|
176 |
+
vals = [meth(x, *shape_args) for meth in meths]
|
177 |
+
npt.assert_(np.all(np.isfinite(vals)))
|
178 |
+
|
179 |
+
names, a, k = shape_argnames[:], shape_args[:], {}
|
180 |
+
while names:
|
181 |
+
k.update({names.pop(): a.pop()})
|
182 |
+
v = [meth(x, *a, **k) for meth in meths]
|
183 |
+
npt.assert_array_equal(vals, v)
|
184 |
+
if 'n' not in k.keys():
|
185 |
+
# `n` is first parameter of moment(), so can't be used as named arg
|
186 |
+
npt.assert_equal(distfn.moment(1, *a, **k),
|
187 |
+
distfn.moment(1, *shape_args))
|
188 |
+
|
189 |
+
# unknown arguments should not go through:
|
190 |
+
k.update({'kaboom': 42})
|
191 |
+
assert_raises(TypeError, distfn.cdf, x, **k)
|
192 |
+
|
193 |
+
|
194 |
+
def check_random_state_property(distfn, args):
|
195 |
+
# check the random_state attribute of a distribution *instance*
|
196 |
+
|
197 |
+
# This test fiddles with distfn.random_state. This breaks other tests,
|
198 |
+
# hence need to save it and then restore.
|
199 |
+
rndm = distfn.random_state
|
200 |
+
|
201 |
+
# baseline: this relies on the global state
|
202 |
+
np.random.seed(1234)
|
203 |
+
distfn.random_state = None
|
204 |
+
r0 = distfn.rvs(*args, size=8)
|
205 |
+
|
206 |
+
# use an explicit instance-level random_state
|
207 |
+
distfn.random_state = 1234
|
208 |
+
r1 = distfn.rvs(*args, size=8)
|
209 |
+
npt.assert_equal(r0, r1)
|
210 |
+
|
211 |
+
distfn.random_state = np.random.RandomState(1234)
|
212 |
+
r2 = distfn.rvs(*args, size=8)
|
213 |
+
npt.assert_equal(r0, r2)
|
214 |
+
|
215 |
+
# check that np.random.Generator can be used (numpy >= 1.17)
|
216 |
+
if hasattr(np.random, 'default_rng'):
|
217 |
+
# obtain a np.random.Generator object
|
218 |
+
rng = np.random.default_rng(1234)
|
219 |
+
distfn.rvs(*args, size=1, random_state=rng)
|
220 |
+
|
221 |
+
# can override the instance-level random_state for an individual .rvs call
|
222 |
+
distfn.random_state = 2
|
223 |
+
orig_state = distfn.random_state.get_state()
|
224 |
+
|
225 |
+
r3 = distfn.rvs(*args, size=8, random_state=np.random.RandomState(1234))
|
226 |
+
npt.assert_equal(r0, r3)
|
227 |
+
|
228 |
+
# ... and that does not alter the instance-level random_state!
|
229 |
+
npt.assert_equal(distfn.random_state.get_state(), orig_state)
|
230 |
+
|
231 |
+
# finally, restore the random_state
|
232 |
+
distfn.random_state = rndm
|
233 |
+
|
234 |
+
|
235 |
+
def check_meth_dtype(distfn, arg, meths):
|
236 |
+
q0 = [0.25, 0.5, 0.75]
|
237 |
+
x0 = distfn.ppf(q0, *arg)
|
238 |
+
x_cast = [x0.astype(tp) for tp in (np_long, np.float16, np.float32,
|
239 |
+
np.float64)]
|
240 |
+
|
241 |
+
for x in x_cast:
|
242 |
+
# casting may have clipped the values, exclude those
|
243 |
+
distfn._argcheck(*arg)
|
244 |
+
x = x[(distfn.a < x) & (x < distfn.b)]
|
245 |
+
for meth in meths:
|
246 |
+
val = meth(x, *arg)
|
247 |
+
npt.assert_(val.dtype == np.float64)
|
248 |
+
|
249 |
+
|
250 |
+
def check_ppf_dtype(distfn, arg):
|
251 |
+
q0 = np.asarray([0.25, 0.5, 0.75])
|
252 |
+
q_cast = [q0.astype(tp) for tp in (np.float16, np.float32, np.float64)]
|
253 |
+
for q in q_cast:
|
254 |
+
for meth in [distfn.ppf, distfn.isf]:
|
255 |
+
val = meth(q, *arg)
|
256 |
+
npt.assert_(val.dtype == np.float64)
|
257 |
+
|
258 |
+
|
259 |
+
def check_cmplx_deriv(distfn, arg):
|
260 |
+
# Distributions allow complex arguments.
|
261 |
+
def deriv(f, x, *arg):
|
262 |
+
x = np.asarray(x)
|
263 |
+
h = 1e-10
|
264 |
+
return (f(x + h*1j, *arg)/h).imag
|
265 |
+
|
266 |
+
x0 = distfn.ppf([0.25, 0.51, 0.75], *arg)
|
267 |
+
x_cast = [x0.astype(tp) for tp in (np_long, np.float16, np.float32,
|
268 |
+
np.float64)]
|
269 |
+
|
270 |
+
for x in x_cast:
|
271 |
+
# casting may have clipped the values, exclude those
|
272 |
+
distfn._argcheck(*arg)
|
273 |
+
x = x[(distfn.a < x) & (x < distfn.b)]
|
274 |
+
|
275 |
+
pdf, cdf, sf = distfn.pdf(x, *arg), distfn.cdf(x, *arg), distfn.sf(x, *arg)
|
276 |
+
assert_allclose(deriv(distfn.cdf, x, *arg), pdf, rtol=1e-5)
|
277 |
+
assert_allclose(deriv(distfn.logcdf, x, *arg), pdf/cdf, rtol=1e-5)
|
278 |
+
|
279 |
+
assert_allclose(deriv(distfn.sf, x, *arg), -pdf, rtol=1e-5)
|
280 |
+
assert_allclose(deriv(distfn.logsf, x, *arg), -pdf/sf, rtol=1e-5)
|
281 |
+
|
282 |
+
assert_allclose(deriv(distfn.logpdf, x, *arg),
|
283 |
+
deriv(distfn.pdf, x, *arg) / distfn.pdf(x, *arg),
|
284 |
+
rtol=1e-5)
|
285 |
+
|
286 |
+
|
287 |
+
def check_pickling(distfn, args):
|
288 |
+
# check that a distribution instance pickles and unpickles
|
289 |
+
# pay special attention to the random_state property
|
290 |
+
|
291 |
+
# save the random_state (restore later)
|
292 |
+
rndm = distfn.random_state
|
293 |
+
|
294 |
+
# check unfrozen
|
295 |
+
distfn.random_state = 1234
|
296 |
+
distfn.rvs(*args, size=8)
|
297 |
+
s = pickle.dumps(distfn)
|
298 |
+
r0 = distfn.rvs(*args, size=8)
|
299 |
+
|
300 |
+
unpickled = pickle.loads(s)
|
301 |
+
r1 = unpickled.rvs(*args, size=8)
|
302 |
+
npt.assert_equal(r0, r1)
|
303 |
+
|
304 |
+
# also smoke test some methods
|
305 |
+
medians = [distfn.ppf(0.5, *args), unpickled.ppf(0.5, *args)]
|
306 |
+
npt.assert_equal(medians[0], medians[1])
|
307 |
+
npt.assert_equal(distfn.cdf(medians[0], *args),
|
308 |
+
unpickled.cdf(medians[1], *args))
|
309 |
+
|
310 |
+
# check frozen pickling/unpickling with rvs
|
311 |
+
frozen_dist = distfn(*args)
|
312 |
+
pkl = pickle.dumps(frozen_dist)
|
313 |
+
unpickled = pickle.loads(pkl)
|
314 |
+
|
315 |
+
r0 = frozen_dist.rvs(size=8)
|
316 |
+
r1 = unpickled.rvs(size=8)
|
317 |
+
npt.assert_equal(r0, r1)
|
318 |
+
|
319 |
+
# check pickling/unpickling of .fit method
|
320 |
+
if hasattr(distfn, "fit"):
|
321 |
+
fit_function = distfn.fit
|
322 |
+
pickled_fit_function = pickle.dumps(fit_function)
|
323 |
+
unpickled_fit_function = pickle.loads(pickled_fit_function)
|
324 |
+
assert fit_function.__name__ == unpickled_fit_function.__name__ == "fit"
|
325 |
+
|
326 |
+
# restore the random_state
|
327 |
+
distfn.random_state = rndm
|
328 |
+
|
329 |
+
|
330 |
+
def check_freezing(distfn, args):
|
331 |
+
# regression test for gh-11089: freezing a distribution fails
|
332 |
+
# if loc and/or scale are specified
|
333 |
+
if isinstance(distfn, stats.rv_continuous):
|
334 |
+
locscale = {'loc': 1, 'scale': 2}
|
335 |
+
else:
|
336 |
+
locscale = {'loc': 1}
|
337 |
+
|
338 |
+
rv = distfn(*args, **locscale)
|
339 |
+
assert rv.a == distfn(*args).a
|
340 |
+
assert rv.b == distfn(*args).b
|
341 |
+
|
342 |
+
|
343 |
+
def check_rvs_broadcast(distfunc, distname, allargs, shape, shape_only, otype):
|
344 |
+
np.random.seed(123)
|
345 |
+
sample = distfunc.rvs(*allargs)
|
346 |
+
assert_equal(sample.shape, shape, "%s: rvs failed to broadcast" % distname)
|
347 |
+
if not shape_only:
|
348 |
+
rvs = np.vectorize(lambda *allargs: distfunc.rvs(*allargs), otypes=otype)
|
349 |
+
np.random.seed(123)
|
350 |
+
expected = rvs(*allargs)
|
351 |
+
assert_allclose(sample, expected, rtol=1e-13)
|
venv/lib/python3.10/site-packages/scipy/stats/tests/data/__pycache__/_mvt.cpython-310.pyc
ADDED
Binary file (4.04 kB). View file
|
|
venv/lib/python3.10/site-packages/scipy/stats/tests/data/__pycache__/fisher_exact_results_from_r.cpython-310.pyc
ADDED
Binary file (7.96 kB). View file
|
|
venv/lib/python3.10/site-packages/scipy/stats/tests/data/_mvt.py
ADDED
@@ -0,0 +1,171 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import math
|
2 |
+
import numpy as np
|
3 |
+
from scipy import special
|
4 |
+
from scipy.stats._qmc import primes_from_2_to
|
5 |
+
|
6 |
+
|
7 |
+
def _primes(n):
|
8 |
+
# Defined to facilitate comparison between translation and source
|
9 |
+
# In Matlab, primes(10.5) -> first four primes, primes(11.5) -> first five
|
10 |
+
return primes_from_2_to(math.ceil(n))
|
11 |
+
|
12 |
+
|
13 |
+
def _gaminv(a, b):
|
14 |
+
# Defined to facilitate comparison between translation and source
|
15 |
+
# Matlab's `gaminv` is like `special.gammaincinv` but args are reversed
|
16 |
+
return special.gammaincinv(b, a)
|
17 |
+
|
18 |
+
|
19 |
+
def _qsimvtv(m, nu, sigma, a, b, rng):
|
20 |
+
"""Estimates the multivariate t CDF using randomized QMC
|
21 |
+
|
22 |
+
Parameters
|
23 |
+
----------
|
24 |
+
m : int
|
25 |
+
The number of points
|
26 |
+
nu : float
|
27 |
+
Degrees of freedom
|
28 |
+
sigma : ndarray
|
29 |
+
A 2D positive semidefinite covariance matrix
|
30 |
+
a : ndarray
|
31 |
+
Lower integration limits
|
32 |
+
b : ndarray
|
33 |
+
Upper integration limits.
|
34 |
+
rng : Generator
|
35 |
+
Pseudorandom number generator
|
36 |
+
|
37 |
+
Returns
|
38 |
+
-------
|
39 |
+
p : float
|
40 |
+
The estimated CDF.
|
41 |
+
e : float
|
42 |
+
An absolute error estimate.
|
43 |
+
|
44 |
+
"""
|
45 |
+
# _qsimvtv is a Python translation of the Matlab function qsimvtv,
|
46 |
+
# semicolons and all.
|
47 |
+
#
|
48 |
+
# This function uses an algorithm given in the paper
|
49 |
+
# "Comparison of Methods for the Numerical Computation of
|
50 |
+
# Multivariate t Probabilities", in
|
51 |
+
# J. of Computational and Graphical Stat., 11(2002), pp. 950-971, by
|
52 |
+
# Alan Genz and Frank Bretz
|
53 |
+
#
|
54 |
+
# The primary references for the numerical integration are
|
55 |
+
# "On a Number-Theoretical Integration Method"
|
56 |
+
# H. Niederreiter, Aequationes Mathematicae, 8(1972), pp. 304-11.
|
57 |
+
# and
|
58 |
+
# "Randomization of Number Theoretic Methods for Multiple Integration"
|
59 |
+
# R. Cranley & T.N.L. Patterson, SIAM J Numer Anal, 13(1976), pp. 904-14.
|
60 |
+
#
|
61 |
+
# Alan Genz is the author of this function and following Matlab functions.
|
62 |
+
# Alan Genz, WSU Math, PO Box 643113, Pullman, WA 99164-3113
|
63 |
+
# Email : [email protected]
|
64 |
+
#
|
65 |
+
# Copyright (C) 2013, Alan Genz, All rights reserved.
|
66 |
+
#
|
67 |
+
# Redistribution and use in source and binary forms, with or without
|
68 |
+
# modification, are permitted provided the following conditions are met:
|
69 |
+
# 1. Redistributions of source code must retain the above copyright
|
70 |
+
# notice, this list of conditions and the following disclaimer.
|
71 |
+
# 2. Redistributions in binary form must reproduce the above copyright
|
72 |
+
# notice, this list of conditions and the following disclaimer in
|
73 |
+
# the documentation and/or other materials provided with the
|
74 |
+
# distribution.
|
75 |
+
# 3. The contributor name(s) may not be used to endorse or promote
|
76 |
+
# products derived from this software without specific prior
|
77 |
+
# written permission.
|
78 |
+
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
79 |
+
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
80 |
+
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
|
81 |
+
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
|
82 |
+
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
83 |
+
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
|
84 |
+
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
|
85 |
+
# OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
86 |
+
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
|
87 |
+
# TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF USE
|
88 |
+
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
89 |
+
|
90 |
+
# Initialization
|
91 |
+
sn = max(1, math.sqrt(nu)); ch, az, bz = _chlrps(sigma, a/sn, b/sn)
|
92 |
+
n = len(sigma); N = 10; P = math.ceil(m/N); on = np.ones(P); p = 0; e = 0
|
93 |
+
ps = np.sqrt(_primes(5*n*math.log(n+4)/4)); q = ps[:, np.newaxis] # Richtmyer gens.
|
94 |
+
|
95 |
+
# Randomization loop for ns samples
|
96 |
+
c = None; dc = None
|
97 |
+
for S in range(N):
|
98 |
+
vp = on.copy(); s = np.zeros((n, P))
|
99 |
+
for i in range(n):
|
100 |
+
x = np.abs(2*np.mod(q[i]*np.arange(1, P+1) + rng.random(), 1)-1) # periodizing transform
|
101 |
+
if i == 0:
|
102 |
+
r = on
|
103 |
+
if nu > 0:
|
104 |
+
r = np.sqrt(2*_gaminv(x, nu/2))
|
105 |
+
else:
|
106 |
+
y = _Phinv(c + x*dc)
|
107 |
+
s[i:] += ch[i:, i-1:i] * y
|
108 |
+
si = s[i, :]; c = on.copy(); ai = az[i]*r - si; d = on.copy(); bi = bz[i]*r - si
|
109 |
+
c[ai <= -9] = 0; tl = abs(ai) < 9; c[tl] = _Phi(ai[tl])
|
110 |
+
d[bi <= -9] = 0; tl = abs(bi) < 9; d[tl] = _Phi(bi[tl])
|
111 |
+
dc = d - c; vp = vp * dc
|
112 |
+
d = (np.mean(vp) - p)/(S + 1); p = p + d; e = (S - 1)*e/(S + 1) + d**2
|
113 |
+
e = math.sqrt(e) # error estimate is 3 times std error with N samples.
|
114 |
+
return p, e
|
115 |
+
|
116 |
+
|
117 |
+
# Standard statistical normal distribution functions
|
118 |
+
def _Phi(z):
|
119 |
+
return special.ndtr(z)
|
120 |
+
|
121 |
+
|
122 |
+
def _Phinv(p):
|
123 |
+
return special.ndtri(p)
|
124 |
+
|
125 |
+
|
126 |
+
def _chlrps(R, a, b):
|
127 |
+
"""
|
128 |
+
Computes permuted and scaled lower Cholesky factor c for R which may be
|
129 |
+
singular, also permuting and scaling integration limit vectors a and b.
|
130 |
+
"""
|
131 |
+
ep = 1e-10 # singularity tolerance
|
132 |
+
eps = np.finfo(R.dtype).eps
|
133 |
+
|
134 |
+
n = len(R); c = R.copy(); ap = a.copy(); bp = b.copy(); d = np.sqrt(np.maximum(np.diag(c), 0))
|
135 |
+
for i in range(n):
|
136 |
+
if d[i] > 0:
|
137 |
+
c[:, i] /= d[i]; c[i, :] /= d[i]
|
138 |
+
ap[i] /= d[i]; bp[i] /= d[i]
|
139 |
+
y = np.zeros((n, 1)); sqtp = math.sqrt(2*math.pi)
|
140 |
+
|
141 |
+
for k in range(n):
|
142 |
+
im = k; ckk = 0; dem = 1; s = 0
|
143 |
+
for i in range(k, n):
|
144 |
+
if c[i, i] > eps:
|
145 |
+
cii = math.sqrt(max(c[i, i], 0))
|
146 |
+
if i > 0: s = c[i, :k] @ y[:k]
|
147 |
+
ai = (ap[i]-s)/cii; bi = (bp[i]-s)/cii; de = _Phi(bi)-_Phi(ai)
|
148 |
+
if de <= dem:
|
149 |
+
ckk = cii; dem = de; am = ai; bm = bi; im = i
|
150 |
+
if im > k:
|
151 |
+
ap[[im, k]] = ap[[k, im]]; bp[[im, k]] = bp[[k, im]]; c[im, im] = c[k, k]
|
152 |
+
t = c[im, :k].copy(); c[im, :k] = c[k, :k]; c[k, :k] = t
|
153 |
+
t = c[im+1:, im].copy(); c[im+1:, im] = c[im+1:, k]; c[im+1:, k] = t
|
154 |
+
t = c[k+1:im, k].copy(); c[k+1:im, k] = c[im, k+1:im].T; c[im, k+1:im] = t.T
|
155 |
+
if ckk > ep*(k+1):
|
156 |
+
c[k, k] = ckk; c[k, k+1:] = 0
|
157 |
+
for i in range(k+1, n):
|
158 |
+
c[i, k] = c[i, k]/ckk; c[i, k+1:i+1] = c[i, k+1:i+1] - c[i, k]*c[k+1:i+1, k].T
|
159 |
+
if abs(dem) > ep:
|
160 |
+
y[k] = (np.exp(-am**2/2) - np.exp(-bm**2/2)) / (sqtp*dem)
|
161 |
+
else:
|
162 |
+
y[k] = (am + bm) / 2
|
163 |
+
if am < -10:
|
164 |
+
y[k] = bm
|
165 |
+
elif bm > 10:
|
166 |
+
y[k] = am
|
167 |
+
c[k, :k+1] /= ckk; ap[k] /= ckk; bp[k] /= ckk
|
168 |
+
else:
|
169 |
+
c[k:, k] = 0; y[k] = (ap[k] + bp[k])/2
|
170 |
+
pass
|
171 |
+
return c, ap, bp
|
venv/lib/python3.10/site-packages/scipy/stats/tests/data/fisher_exact_results_from_r.py
ADDED
@@ -0,0 +1,607 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# DO NOT EDIT THIS FILE!
|
2 |
+
# This file was generated by the R script
|
3 |
+
# generate_fisher_exact_results_from_r.R
|
4 |
+
# The script was run with R version 3.6.2 (2019-12-12) at 2020-11-09 06:16:09
|
5 |
+
|
6 |
+
|
7 |
+
from collections import namedtuple
|
8 |
+
import numpy as np
|
9 |
+
|
10 |
+
|
11 |
+
Inf = np.inf
|
12 |
+
|
13 |
+
Parameters = namedtuple('Parameters',
|
14 |
+
['table', 'confidence_level', 'alternative'])
|
15 |
+
RResults = namedtuple('RResults',
|
16 |
+
['pvalue', 'conditional_odds_ratio',
|
17 |
+
'conditional_odds_ratio_ci'])
|
18 |
+
data = [
|
19 |
+
(Parameters(table=[[100, 2], [1000, 5]],
|
20 |
+
confidence_level=0.95,
|
21 |
+
alternative='two.sided'),
|
22 |
+
RResults(pvalue=0.1300759363430016,
|
23 |
+
conditional_odds_ratio=0.25055839934223,
|
24 |
+
conditional_odds_ratio_ci=(0.04035202926536294,
|
25 |
+
2.662846672960251))),
|
26 |
+
(Parameters(table=[[2, 7], [8, 2]],
|
27 |
+
confidence_level=0.95,
|
28 |
+
alternative='two.sided'),
|
29 |
+
RResults(pvalue=0.02301413756522116,
|
30 |
+
conditional_odds_ratio=0.0858623513573622,
|
31 |
+
conditional_odds_ratio_ci=(0.004668988338943325,
|
32 |
+
0.895792956493601))),
|
33 |
+
(Parameters(table=[[5, 1], [10, 10]],
|
34 |
+
confidence_level=0.95,
|
35 |
+
alternative='two.sided'),
|
36 |
+
RResults(pvalue=0.1973244147157191,
|
37 |
+
conditional_odds_ratio=4.725646047336587,
|
38 |
+
conditional_odds_ratio_ci=(0.4153910882532168,
|
39 |
+
259.2593661129417))),
|
40 |
+
(Parameters(table=[[5, 15], [20, 20]],
|
41 |
+
confidence_level=0.95,
|
42 |
+
alternative='two.sided'),
|
43 |
+
RResults(pvalue=0.09580440012477633,
|
44 |
+
conditional_odds_ratio=0.3394396617440851,
|
45 |
+
conditional_odds_ratio_ci=(0.08056337526385809,
|
46 |
+
1.22704788545557))),
|
47 |
+
(Parameters(table=[[5, 16], [16, 25]],
|
48 |
+
confidence_level=0.95,
|
49 |
+
alternative='two.sided'),
|
50 |
+
RResults(pvalue=0.2697004098849359,
|
51 |
+
conditional_odds_ratio=0.4937791394540491,
|
52 |
+
conditional_odds_ratio_ci=(0.1176691231650079,
|
53 |
+
1.787463657995973))),
|
54 |
+
(Parameters(table=[[10, 5], [10, 1]],
|
55 |
+
confidence_level=0.95,
|
56 |
+
alternative='two.sided'),
|
57 |
+
RResults(pvalue=0.1973244147157192,
|
58 |
+
conditional_odds_ratio=0.2116112781158479,
|
59 |
+
conditional_odds_ratio_ci=(0.003857141267422399,
|
60 |
+
2.407369893767229))),
|
61 |
+
(Parameters(table=[[10, 5], [10, 0]],
|
62 |
+
confidence_level=0.95,
|
63 |
+
alternative='two.sided'),
|
64 |
+
RResults(pvalue=0.06126482213438735,
|
65 |
+
conditional_odds_ratio=0,
|
66 |
+
conditional_odds_ratio_ci=(0,
|
67 |
+
1.451643573543705))),
|
68 |
+
(Parameters(table=[[5, 0], [1, 4]],
|
69 |
+
confidence_level=0.95,
|
70 |
+
alternative='two.sided'),
|
71 |
+
RResults(pvalue=0.04761904761904762,
|
72 |
+
conditional_odds_ratio=Inf,
|
73 |
+
conditional_odds_ratio_ci=(1.024822256141754,
|
74 |
+
Inf))),
|
75 |
+
(Parameters(table=[[0, 5], [1, 4]],
|
76 |
+
confidence_level=0.95,
|
77 |
+
alternative='two.sided'),
|
78 |
+
RResults(pvalue=1,
|
79 |
+
conditional_odds_ratio=0,
|
80 |
+
conditional_odds_ratio_ci=(0,
|
81 |
+
39.00054996869288))),
|
82 |
+
(Parameters(table=[[5, 1], [0, 4]],
|
83 |
+
confidence_level=0.95,
|
84 |
+
alternative='two.sided'),
|
85 |
+
RResults(pvalue=0.04761904761904761,
|
86 |
+
conditional_odds_ratio=Inf,
|
87 |
+
conditional_odds_ratio_ci=(1.024822256141754,
|
88 |
+
Inf))),
|
89 |
+
(Parameters(table=[[0, 1], [3, 2]],
|
90 |
+
confidence_level=0.95,
|
91 |
+
alternative='two.sided'),
|
92 |
+
RResults(pvalue=1,
|
93 |
+
conditional_odds_ratio=0,
|
94 |
+
conditional_odds_ratio_ci=(0,
|
95 |
+
39.00054996869287))),
|
96 |
+
(Parameters(table=[[200, 7], [8, 300]],
|
97 |
+
confidence_level=0.95,
|
98 |
+
alternative='two.sided'),
|
99 |
+
RResults(pvalue=2.005657880389071e-122,
|
100 |
+
conditional_odds_ratio=977.7866978606228,
|
101 |
+
conditional_odds_ratio_ci=(349.2595113327733,
|
102 |
+
3630.382605689872))),
|
103 |
+
(Parameters(table=[[28, 21], [6, 1957]],
|
104 |
+
confidence_level=0.95,
|
105 |
+
alternative='two.sided'),
|
106 |
+
RResults(pvalue=5.728437460831947e-44,
|
107 |
+
conditional_odds_ratio=425.2403028434684,
|
108 |
+
conditional_odds_ratio_ci=(152.4166024390096,
|
109 |
+
1425.700792178893))),
|
110 |
+
(Parameters(table=[[190, 800], [200, 900]],
|
111 |
+
confidence_level=0.95,
|
112 |
+
alternative='two.sided'),
|
113 |
+
RResults(pvalue=0.574111858126088,
|
114 |
+
conditional_odds_ratio=1.068697577856801,
|
115 |
+
conditional_odds_ratio_ci=(0.8520462587912048,
|
116 |
+
1.340148950273938))),
|
117 |
+
(Parameters(table=[[100, 2], [1000, 5]],
|
118 |
+
confidence_level=0.99,
|
119 |
+
alternative='two.sided'),
|
120 |
+
RResults(pvalue=0.1300759363430016,
|
121 |
+
conditional_odds_ratio=0.25055839934223,
|
122 |
+
conditional_odds_ratio_ci=(0.02502345007115455,
|
123 |
+
6.304424772117853))),
|
124 |
+
(Parameters(table=[[2, 7], [8, 2]],
|
125 |
+
confidence_level=0.99,
|
126 |
+
alternative='two.sided'),
|
127 |
+
RResults(pvalue=0.02301413756522116,
|
128 |
+
conditional_odds_ratio=0.0858623513573622,
|
129 |
+
conditional_odds_ratio_ci=(0.001923034001462487,
|
130 |
+
1.53670836950172))),
|
131 |
+
(Parameters(table=[[5, 1], [10, 10]],
|
132 |
+
confidence_level=0.99,
|
133 |
+
alternative='two.sided'),
|
134 |
+
RResults(pvalue=0.1973244147157191,
|
135 |
+
conditional_odds_ratio=4.725646047336587,
|
136 |
+
conditional_odds_ratio_ci=(0.2397970951413721,
|
137 |
+
1291.342011095509))),
|
138 |
+
(Parameters(table=[[5, 15], [20, 20]],
|
139 |
+
confidence_level=0.99,
|
140 |
+
alternative='two.sided'),
|
141 |
+
RResults(pvalue=0.09580440012477633,
|
142 |
+
conditional_odds_ratio=0.3394396617440851,
|
143 |
+
conditional_odds_ratio_ci=(0.05127576113762925,
|
144 |
+
1.717176678806983))),
|
145 |
+
(Parameters(table=[[5, 16], [16, 25]],
|
146 |
+
confidence_level=0.99,
|
147 |
+
alternative='two.sided'),
|
148 |
+
RResults(pvalue=0.2697004098849359,
|
149 |
+
conditional_odds_ratio=0.4937791394540491,
|
150 |
+
conditional_odds_ratio_ci=(0.07498546954483619,
|
151 |
+
2.506969905199901))),
|
152 |
+
(Parameters(table=[[10, 5], [10, 1]],
|
153 |
+
confidence_level=0.99,
|
154 |
+
alternative='two.sided'),
|
155 |
+
RResults(pvalue=0.1973244147157192,
|
156 |
+
conditional_odds_ratio=0.2116112781158479,
|
157 |
+
conditional_odds_ratio_ci=(0.0007743881879531337,
|
158 |
+
4.170192301163831))),
|
159 |
+
(Parameters(table=[[10, 5], [10, 0]],
|
160 |
+
confidence_level=0.99,
|
161 |
+
alternative='two.sided'),
|
162 |
+
RResults(pvalue=0.06126482213438735,
|
163 |
+
conditional_odds_ratio=0,
|
164 |
+
conditional_odds_ratio_ci=(0,
|
165 |
+
2.642491011905582))),
|
166 |
+
(Parameters(table=[[5, 0], [1, 4]],
|
167 |
+
confidence_level=0.99,
|
168 |
+
alternative='two.sided'),
|
169 |
+
RResults(pvalue=0.04761904761904762,
|
170 |
+
conditional_odds_ratio=Inf,
|
171 |
+
conditional_odds_ratio_ci=(0.496935393325443,
|
172 |
+
Inf))),
|
173 |
+
(Parameters(table=[[0, 5], [1, 4]],
|
174 |
+
confidence_level=0.99,
|
175 |
+
alternative='two.sided'),
|
176 |
+
RResults(pvalue=1,
|
177 |
+
conditional_odds_ratio=0,
|
178 |
+
conditional_odds_ratio_ci=(0,
|
179 |
+
198.019801980198))),
|
180 |
+
(Parameters(table=[[5, 1], [0, 4]],
|
181 |
+
confidence_level=0.99,
|
182 |
+
alternative='two.sided'),
|
183 |
+
RResults(pvalue=0.04761904761904761,
|
184 |
+
conditional_odds_ratio=Inf,
|
185 |
+
conditional_odds_ratio_ci=(0.496935393325443,
|
186 |
+
Inf))),
|
187 |
+
(Parameters(table=[[0, 1], [3, 2]],
|
188 |
+
confidence_level=0.99,
|
189 |
+
alternative='two.sided'),
|
190 |
+
RResults(pvalue=1,
|
191 |
+
conditional_odds_ratio=0,
|
192 |
+
conditional_odds_ratio_ci=(0,
|
193 |
+
198.019801980198))),
|
194 |
+
(Parameters(table=[[200, 7], [8, 300]],
|
195 |
+
confidence_level=0.99,
|
196 |
+
alternative='two.sided'),
|
197 |
+
RResults(pvalue=2.005657880389071e-122,
|
198 |
+
conditional_odds_ratio=977.7866978606228,
|
199 |
+
conditional_odds_ratio_ci=(270.0334165523604,
|
200 |
+
5461.333333326708))),
|
201 |
+
(Parameters(table=[[28, 21], [6, 1957]],
|
202 |
+
confidence_level=0.99,
|
203 |
+
alternative='two.sided'),
|
204 |
+
RResults(pvalue=5.728437460831947e-44,
|
205 |
+
conditional_odds_ratio=425.2403028434684,
|
206 |
+
conditional_odds_ratio_ci=(116.7944750275836,
|
207 |
+
1931.995993191814))),
|
208 |
+
(Parameters(table=[[190, 800], [200, 900]],
|
209 |
+
confidence_level=0.99,
|
210 |
+
alternative='two.sided'),
|
211 |
+
RResults(pvalue=0.574111858126088,
|
212 |
+
conditional_odds_ratio=1.068697577856801,
|
213 |
+
conditional_odds_ratio_ci=(0.7949398282935892,
|
214 |
+
1.436229679394333))),
|
215 |
+
(Parameters(table=[[100, 2], [1000, 5]],
|
216 |
+
confidence_level=0.95,
|
217 |
+
alternative='less'),
|
218 |
+
RResults(pvalue=0.1300759363430016,
|
219 |
+
conditional_odds_ratio=0.25055839934223,
|
220 |
+
conditional_odds_ratio_ci=(0,
|
221 |
+
1.797867027270803))),
|
222 |
+
(Parameters(table=[[2, 7], [8, 2]],
|
223 |
+
confidence_level=0.95,
|
224 |
+
alternative='less'),
|
225 |
+
RResults(pvalue=0.0185217259520665,
|
226 |
+
conditional_odds_ratio=0.0858623513573622,
|
227 |
+
conditional_odds_ratio_ci=(0,
|
228 |
+
0.6785254803404526))),
|
229 |
+
(Parameters(table=[[5, 1], [10, 10]],
|
230 |
+
confidence_level=0.95,
|
231 |
+
alternative='less'),
|
232 |
+
RResults(pvalue=0.9782608695652173,
|
233 |
+
conditional_odds_ratio=4.725646047336587,
|
234 |
+
conditional_odds_ratio_ci=(0,
|
235 |
+
127.8497388102893))),
|
236 |
+
(Parameters(table=[[5, 15], [20, 20]],
|
237 |
+
confidence_level=0.95,
|
238 |
+
alternative='less'),
|
239 |
+
RResults(pvalue=0.05625775074399956,
|
240 |
+
conditional_odds_ratio=0.3394396617440851,
|
241 |
+
conditional_odds_ratio_ci=(0,
|
242 |
+
1.032332939718425))),
|
243 |
+
(Parameters(table=[[5, 16], [16, 25]],
|
244 |
+
confidence_level=0.95,
|
245 |
+
alternative='less'),
|
246 |
+
RResults(pvalue=0.1808979350599346,
|
247 |
+
conditional_odds_ratio=0.4937791394540491,
|
248 |
+
conditional_odds_ratio_ci=(0,
|
249 |
+
1.502407513296985))),
|
250 |
+
(Parameters(table=[[10, 5], [10, 1]],
|
251 |
+
confidence_level=0.95,
|
252 |
+
alternative='less'),
|
253 |
+
RResults(pvalue=0.1652173913043479,
|
254 |
+
conditional_odds_ratio=0.2116112781158479,
|
255 |
+
conditional_odds_ratio_ci=(0,
|
256 |
+
1.820421051562392))),
|
257 |
+
(Parameters(table=[[10, 5], [10, 0]],
|
258 |
+
confidence_level=0.95,
|
259 |
+
alternative='less'),
|
260 |
+
RResults(pvalue=0.0565217391304348,
|
261 |
+
conditional_odds_ratio=0,
|
262 |
+
conditional_odds_ratio_ci=(0,
|
263 |
+
1.06224603077045))),
|
264 |
+
(Parameters(table=[[5, 0], [1, 4]],
|
265 |
+
confidence_level=0.95,
|
266 |
+
alternative='less'),
|
267 |
+
RResults(pvalue=1,
|
268 |
+
conditional_odds_ratio=Inf,
|
269 |
+
conditional_odds_ratio_ci=(0,
|
270 |
+
Inf))),
|
271 |
+
(Parameters(table=[[0, 5], [1, 4]],
|
272 |
+
confidence_level=0.95,
|
273 |
+
alternative='less'),
|
274 |
+
RResults(pvalue=0.5,
|
275 |
+
conditional_odds_ratio=0,
|
276 |
+
conditional_odds_ratio_ci=(0,
|
277 |
+
19.00192394479939))),
|
278 |
+
(Parameters(table=[[5, 1], [0, 4]],
|
279 |
+
confidence_level=0.95,
|
280 |
+
alternative='less'),
|
281 |
+
RResults(pvalue=1,
|
282 |
+
conditional_odds_ratio=Inf,
|
283 |
+
conditional_odds_ratio_ci=(0,
|
284 |
+
Inf))),
|
285 |
+
(Parameters(table=[[0, 1], [3, 2]],
|
286 |
+
confidence_level=0.95,
|
287 |
+
alternative='less'),
|
288 |
+
RResults(pvalue=0.4999999999999999,
|
289 |
+
conditional_odds_ratio=0,
|
290 |
+
conditional_odds_ratio_ci=(0,
|
291 |
+
19.00192394479939))),
|
292 |
+
(Parameters(table=[[200, 7], [8, 300]],
|
293 |
+
confidence_level=0.95,
|
294 |
+
alternative='less'),
|
295 |
+
RResults(pvalue=1,
|
296 |
+
conditional_odds_ratio=977.7866978606228,
|
297 |
+
conditional_odds_ratio_ci=(0,
|
298 |
+
3045.460216525746))),
|
299 |
+
(Parameters(table=[[28, 21], [6, 1957]],
|
300 |
+
confidence_level=0.95,
|
301 |
+
alternative='less'),
|
302 |
+
RResults(pvalue=1,
|
303 |
+
conditional_odds_ratio=425.2403028434684,
|
304 |
+
conditional_odds_ratio_ci=(0,
|
305 |
+
1186.440170942579))),
|
306 |
+
(Parameters(table=[[190, 800], [200, 900]],
|
307 |
+
confidence_level=0.95,
|
308 |
+
alternative='less'),
|
309 |
+
RResults(pvalue=0.7416227010368963,
|
310 |
+
conditional_odds_ratio=1.068697577856801,
|
311 |
+
conditional_odds_ratio_ci=(0,
|
312 |
+
1.293551891610822))),
|
313 |
+
(Parameters(table=[[100, 2], [1000, 5]],
|
314 |
+
confidence_level=0.99,
|
315 |
+
alternative='less'),
|
316 |
+
RResults(pvalue=0.1300759363430016,
|
317 |
+
conditional_odds_ratio=0.25055839934223,
|
318 |
+
conditional_odds_ratio_ci=(0,
|
319 |
+
4.375946050832565))),
|
320 |
+
(Parameters(table=[[2, 7], [8, 2]],
|
321 |
+
confidence_level=0.99,
|
322 |
+
alternative='less'),
|
323 |
+
RResults(pvalue=0.0185217259520665,
|
324 |
+
conditional_odds_ratio=0.0858623513573622,
|
325 |
+
conditional_odds_ratio_ci=(0,
|
326 |
+
1.235282118191202))),
|
327 |
+
(Parameters(table=[[5, 1], [10, 10]],
|
328 |
+
confidence_level=0.99,
|
329 |
+
alternative='less'),
|
330 |
+
RResults(pvalue=0.9782608695652173,
|
331 |
+
conditional_odds_ratio=4.725646047336587,
|
332 |
+
conditional_odds_ratio_ci=(0,
|
333 |
+
657.2063583945989))),
|
334 |
+
(Parameters(table=[[5, 15], [20, 20]],
|
335 |
+
confidence_level=0.99,
|
336 |
+
alternative='less'),
|
337 |
+
RResults(pvalue=0.05625775074399956,
|
338 |
+
conditional_odds_ratio=0.3394396617440851,
|
339 |
+
conditional_odds_ratio_ci=(0,
|
340 |
+
1.498867660683128))),
|
341 |
+
(Parameters(table=[[5, 16], [16, 25]],
|
342 |
+
confidence_level=0.99,
|
343 |
+
alternative='less'),
|
344 |
+
RResults(pvalue=0.1808979350599346,
|
345 |
+
conditional_odds_ratio=0.4937791394540491,
|
346 |
+
conditional_odds_ratio_ci=(0,
|
347 |
+
2.186159386716762))),
|
348 |
+
(Parameters(table=[[10, 5], [10, 1]],
|
349 |
+
confidence_level=0.99,
|
350 |
+
alternative='less'),
|
351 |
+
RResults(pvalue=0.1652173913043479,
|
352 |
+
conditional_odds_ratio=0.2116112781158479,
|
353 |
+
conditional_odds_ratio_ci=(0,
|
354 |
+
3.335351451901569))),
|
355 |
+
(Parameters(table=[[10, 5], [10, 0]],
|
356 |
+
confidence_level=0.99,
|
357 |
+
alternative='less'),
|
358 |
+
RResults(pvalue=0.0565217391304348,
|
359 |
+
conditional_odds_ratio=0,
|
360 |
+
conditional_odds_ratio_ci=(0,
|
361 |
+
2.075407697450433))),
|
362 |
+
(Parameters(table=[[5, 0], [1, 4]],
|
363 |
+
confidence_level=0.99,
|
364 |
+
alternative='less'),
|
365 |
+
RResults(pvalue=1,
|
366 |
+
conditional_odds_ratio=Inf,
|
367 |
+
conditional_odds_ratio_ci=(0,
|
368 |
+
Inf))),
|
369 |
+
(Parameters(table=[[0, 5], [1, 4]],
|
370 |
+
confidence_level=0.99,
|
371 |
+
alternative='less'),
|
372 |
+
RResults(pvalue=0.5,
|
373 |
+
conditional_odds_ratio=0,
|
374 |
+
conditional_odds_ratio_ci=(0,
|
375 |
+
99.00009507969122))),
|
376 |
+
(Parameters(table=[[5, 1], [0, 4]],
|
377 |
+
confidence_level=0.99,
|
378 |
+
alternative='less'),
|
379 |
+
RResults(pvalue=1,
|
380 |
+
conditional_odds_ratio=Inf,
|
381 |
+
conditional_odds_ratio_ci=(0,
|
382 |
+
Inf))),
|
383 |
+
(Parameters(table=[[0, 1], [3, 2]],
|
384 |
+
confidence_level=0.99,
|
385 |
+
alternative='less'),
|
386 |
+
RResults(pvalue=0.4999999999999999,
|
387 |
+
conditional_odds_ratio=0,
|
388 |
+
conditional_odds_ratio_ci=(0,
|
389 |
+
99.00009507969123))),
|
390 |
+
(Parameters(table=[[200, 7], [8, 300]],
|
391 |
+
confidence_level=0.99,
|
392 |
+
alternative='less'),
|
393 |
+
RResults(pvalue=1,
|
394 |
+
conditional_odds_ratio=977.7866978606228,
|
395 |
+
conditional_odds_ratio_ci=(0,
|
396 |
+
4503.078257659934))),
|
397 |
+
(Parameters(table=[[28, 21], [6, 1957]],
|
398 |
+
confidence_level=0.99,
|
399 |
+
alternative='less'),
|
400 |
+
RResults(pvalue=1,
|
401 |
+
conditional_odds_ratio=425.2403028434684,
|
402 |
+
conditional_odds_ratio_ci=(0,
|
403 |
+
1811.766127544222))),
|
404 |
+
(Parameters(table=[[190, 800], [200, 900]],
|
405 |
+
confidence_level=0.99,
|
406 |
+
alternative='less'),
|
407 |
+
RResults(pvalue=0.7416227010368963,
|
408 |
+
conditional_odds_ratio=1.068697577856801,
|
409 |
+
conditional_odds_ratio_ci=(0,
|
410 |
+
1.396522811516685))),
|
411 |
+
(Parameters(table=[[100, 2], [1000, 5]],
|
412 |
+
confidence_level=0.95,
|
413 |
+
alternative='greater'),
|
414 |
+
RResults(pvalue=0.979790445314723,
|
415 |
+
conditional_odds_ratio=0.25055839934223,
|
416 |
+
conditional_odds_ratio_ci=(0.05119649909830196,
|
417 |
+
Inf))),
|
418 |
+
(Parameters(table=[[2, 7], [8, 2]],
|
419 |
+
confidence_level=0.95,
|
420 |
+
alternative='greater'),
|
421 |
+
RResults(pvalue=0.9990149169715733,
|
422 |
+
conditional_odds_ratio=0.0858623513573622,
|
423 |
+
conditional_odds_ratio_ci=(0.007163749169069961,
|
424 |
+
Inf))),
|
425 |
+
(Parameters(table=[[5, 1], [10, 10]],
|
426 |
+
confidence_level=0.95,
|
427 |
+
alternative='greater'),
|
428 |
+
RResults(pvalue=0.1652173913043478,
|
429 |
+
conditional_odds_ratio=4.725646047336587,
|
430 |
+
conditional_odds_ratio_ci=(0.5493234651081089,
|
431 |
+
Inf))),
|
432 |
+
(Parameters(table=[[5, 15], [20, 20]],
|
433 |
+
confidence_level=0.95,
|
434 |
+
alternative='greater'),
|
435 |
+
RResults(pvalue=0.9849086665340765,
|
436 |
+
conditional_odds_ratio=0.3394396617440851,
|
437 |
+
conditional_odds_ratio_ci=(0.1003538933958604,
|
438 |
+
Inf))),
|
439 |
+
(Parameters(table=[[5, 16], [16, 25]],
|
440 |
+
confidence_level=0.95,
|
441 |
+
alternative='greater'),
|
442 |
+
RResults(pvalue=0.9330176609214881,
|
443 |
+
conditional_odds_ratio=0.4937791394540491,
|
444 |
+
conditional_odds_ratio_ci=(0.146507416280863,
|
445 |
+
Inf))),
|
446 |
+
(Parameters(table=[[10, 5], [10, 1]],
|
447 |
+
confidence_level=0.95,
|
448 |
+
alternative='greater'),
|
449 |
+
RResults(pvalue=0.9782608695652174,
|
450 |
+
conditional_odds_ratio=0.2116112781158479,
|
451 |
+
conditional_odds_ratio_ci=(0.007821681994077808,
|
452 |
+
Inf))),
|
453 |
+
(Parameters(table=[[10, 5], [10, 0]],
|
454 |
+
confidence_level=0.95,
|
455 |
+
alternative='greater'),
|
456 |
+
RResults(pvalue=1,
|
457 |
+
conditional_odds_ratio=0,
|
458 |
+
conditional_odds_ratio_ci=(0,
|
459 |
+
Inf))),
|
460 |
+
(Parameters(table=[[5, 0], [1, 4]],
|
461 |
+
confidence_level=0.95,
|
462 |
+
alternative='greater'),
|
463 |
+
RResults(pvalue=0.02380952380952382,
|
464 |
+
conditional_odds_ratio=Inf,
|
465 |
+
conditional_odds_ratio_ci=(1.487678929918272,
|
466 |
+
Inf))),
|
467 |
+
(Parameters(table=[[0, 5], [1, 4]],
|
468 |
+
confidence_level=0.95,
|
469 |
+
alternative='greater'),
|
470 |
+
RResults(pvalue=1,
|
471 |
+
conditional_odds_ratio=0,
|
472 |
+
conditional_odds_ratio_ci=(0,
|
473 |
+
Inf))),
|
474 |
+
(Parameters(table=[[5, 1], [0, 4]],
|
475 |
+
confidence_level=0.95,
|
476 |
+
alternative='greater'),
|
477 |
+
RResults(pvalue=0.0238095238095238,
|
478 |
+
conditional_odds_ratio=Inf,
|
479 |
+
conditional_odds_ratio_ci=(1.487678929918272,
|
480 |
+
Inf))),
|
481 |
+
(Parameters(table=[[0, 1], [3, 2]],
|
482 |
+
confidence_level=0.95,
|
483 |
+
alternative='greater'),
|
484 |
+
RResults(pvalue=1,
|
485 |
+
conditional_odds_ratio=0,
|
486 |
+
conditional_odds_ratio_ci=(0,
|
487 |
+
Inf))),
|
488 |
+
(Parameters(table=[[200, 7], [8, 300]],
|
489 |
+
confidence_level=0.95,
|
490 |
+
alternative='greater'),
|
491 |
+
RResults(pvalue=2.005657880388915e-122,
|
492 |
+
conditional_odds_ratio=977.7866978606228,
|
493 |
+
conditional_odds_ratio_ci=(397.784359748113,
|
494 |
+
Inf))),
|
495 |
+
(Parameters(table=[[28, 21], [6, 1957]],
|
496 |
+
confidence_level=0.95,
|
497 |
+
alternative='greater'),
|
498 |
+
RResults(pvalue=5.728437460831983e-44,
|
499 |
+
conditional_odds_ratio=425.2403028434684,
|
500 |
+
conditional_odds_ratio_ci=(174.7148056880929,
|
501 |
+
Inf))),
|
502 |
+
(Parameters(table=[[190, 800], [200, 900]],
|
503 |
+
confidence_level=0.95,
|
504 |
+
alternative='greater'),
|
505 |
+
RResults(pvalue=0.2959825901308897,
|
506 |
+
conditional_odds_ratio=1.068697577856801,
|
507 |
+
conditional_odds_ratio_ci=(0.8828406663967776,
|
508 |
+
Inf))),
|
509 |
+
(Parameters(table=[[100, 2], [1000, 5]],
|
510 |
+
confidence_level=0.99,
|
511 |
+
alternative='greater'),
|
512 |
+
RResults(pvalue=0.979790445314723,
|
513 |
+
conditional_odds_ratio=0.25055839934223,
|
514 |
+
conditional_odds_ratio_ci=(0.03045407081240429,
|
515 |
+
Inf))),
|
516 |
+
(Parameters(table=[[2, 7], [8, 2]],
|
517 |
+
confidence_level=0.99,
|
518 |
+
alternative='greater'),
|
519 |
+
RResults(pvalue=0.9990149169715733,
|
520 |
+
conditional_odds_ratio=0.0858623513573622,
|
521 |
+
conditional_odds_ratio_ci=(0.002768053063547901,
|
522 |
+
Inf))),
|
523 |
+
(Parameters(table=[[5, 1], [10, 10]],
|
524 |
+
confidence_level=0.99,
|
525 |
+
alternative='greater'),
|
526 |
+
RResults(pvalue=0.1652173913043478,
|
527 |
+
conditional_odds_ratio=4.725646047336587,
|
528 |
+
conditional_odds_ratio_ci=(0.2998184792279909,
|
529 |
+
Inf))),
|
530 |
+
(Parameters(table=[[5, 15], [20, 20]],
|
531 |
+
confidence_level=0.99,
|
532 |
+
alternative='greater'),
|
533 |
+
RResults(pvalue=0.9849086665340765,
|
534 |
+
conditional_odds_ratio=0.3394396617440851,
|
535 |
+
conditional_odds_ratio_ci=(0.06180414342643172,
|
536 |
+
Inf))),
|
537 |
+
(Parameters(table=[[5, 16], [16, 25]],
|
538 |
+
confidence_level=0.99,
|
539 |
+
alternative='greater'),
|
540 |
+
RResults(pvalue=0.9330176609214881,
|
541 |
+
conditional_odds_ratio=0.4937791394540491,
|
542 |
+
conditional_odds_ratio_ci=(0.09037094010066403,
|
543 |
+
Inf))),
|
544 |
+
(Parameters(table=[[10, 5], [10, 1]],
|
545 |
+
confidence_level=0.99,
|
546 |
+
alternative='greater'),
|
547 |
+
RResults(pvalue=0.9782608695652174,
|
548 |
+
conditional_odds_ratio=0.2116112781158479,
|
549 |
+
conditional_odds_ratio_ci=(0.001521592095430679,
|
550 |
+
Inf))),
|
551 |
+
(Parameters(table=[[10, 5], [10, 0]],
|
552 |
+
confidence_level=0.99,
|
553 |
+
alternative='greater'),
|
554 |
+
RResults(pvalue=1,
|
555 |
+
conditional_odds_ratio=0,
|
556 |
+
conditional_odds_ratio_ci=(0,
|
557 |
+
Inf))),
|
558 |
+
(Parameters(table=[[5, 0], [1, 4]],
|
559 |
+
confidence_level=0.99,
|
560 |
+
alternative='greater'),
|
561 |
+
RResults(pvalue=0.02380952380952382,
|
562 |
+
conditional_odds_ratio=Inf,
|
563 |
+
conditional_odds_ratio_ci=(0.6661157890359722,
|
564 |
+
Inf))),
|
565 |
+
(Parameters(table=[[0, 5], [1, 4]],
|
566 |
+
confidence_level=0.99,
|
567 |
+
alternative='greater'),
|
568 |
+
RResults(pvalue=1,
|
569 |
+
conditional_odds_ratio=0,
|
570 |
+
conditional_odds_ratio_ci=(0,
|
571 |
+
Inf))),
|
572 |
+
(Parameters(table=[[5, 1], [0, 4]],
|
573 |
+
confidence_level=0.99,
|
574 |
+
alternative='greater'),
|
575 |
+
RResults(pvalue=0.0238095238095238,
|
576 |
+
conditional_odds_ratio=Inf,
|
577 |
+
conditional_odds_ratio_ci=(0.6661157890359725,
|
578 |
+
Inf))),
|
579 |
+
(Parameters(table=[[0, 1], [3, 2]],
|
580 |
+
confidence_level=0.99,
|
581 |
+
alternative='greater'),
|
582 |
+
RResults(pvalue=1,
|
583 |
+
conditional_odds_ratio=0,
|
584 |
+
conditional_odds_ratio_ci=(0,
|
585 |
+
Inf))),
|
586 |
+
(Parameters(table=[[200, 7], [8, 300]],
|
587 |
+
confidence_level=0.99,
|
588 |
+
alternative='greater'),
|
589 |
+
RResults(pvalue=2.005657880388915e-122,
|
590 |
+
conditional_odds_ratio=977.7866978606228,
|
591 |
+
conditional_odds_ratio_ci=(297.9619252357688,
|
592 |
+
Inf))),
|
593 |
+
(Parameters(table=[[28, 21], [6, 1957]],
|
594 |
+
confidence_level=0.99,
|
595 |
+
alternative='greater'),
|
596 |
+
RResults(pvalue=5.728437460831983e-44,
|
597 |
+
conditional_odds_ratio=425.2403028434684,
|
598 |
+
conditional_odds_ratio_ci=(130.3213490295859,
|
599 |
+
Inf))),
|
600 |
+
(Parameters(table=[[190, 800], [200, 900]],
|
601 |
+
confidence_level=0.99,
|
602 |
+
alternative='greater'),
|
603 |
+
RResults(pvalue=0.2959825901308897,
|
604 |
+
conditional_odds_ratio=1.068697577856801,
|
605 |
+
conditional_odds_ratio_ci=(0.8176272148267533,
|
606 |
+
Inf))),
|
607 |
+
]
|
venv/lib/python3.10/site-packages/scipy/stats/tests/data/nist_anova/AtmWtAg.dat
ADDED
@@ -0,0 +1,108 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
NIST/ITL StRD
|
2 |
+
Dataset Name: AtmWtAg (AtmWtAg.dat)
|
3 |
+
|
4 |
+
|
5 |
+
File Format: ASCII
|
6 |
+
Certified Values (lines 41 to 47)
|
7 |
+
Data (lines 61 to 108)
|
8 |
+
|
9 |
+
|
10 |
+
Procedure: Analysis of Variance
|
11 |
+
|
12 |
+
|
13 |
+
Reference: Powell, L.J., Murphy, T.J. and Gramlich, J.W. (1982).
|
14 |
+
"The Absolute Isotopic Abundance & Atomic Weight
|
15 |
+
of a Reference Sample of Silver".
|
16 |
+
NBS Journal of Research, 87, pp. 9-19.
|
17 |
+
|
18 |
+
|
19 |
+
Data: 1 Factor
|
20 |
+
2 Treatments
|
21 |
+
24 Replicates/Cell
|
22 |
+
48 Observations
|
23 |
+
7 Constant Leading Digits
|
24 |
+
Average Level of Difficulty
|
25 |
+
Observed Data
|
26 |
+
|
27 |
+
|
28 |
+
Model: 3 Parameters (mu, tau_1, tau_2)
|
29 |
+
y_{ij} = mu + tau_i + epsilon_{ij}
|
30 |
+
|
31 |
+
|
32 |
+
|
33 |
+
|
34 |
+
|
35 |
+
|
36 |
+
Certified Values:
|
37 |
+
|
38 |
+
Source of Sums of Mean
|
39 |
+
Variation df Squares Squares F Statistic
|
40 |
+
|
41 |
+
|
42 |
+
Between Instrument 1 3.63834187500000E-09 3.63834187500000E-09 1.59467335677930E+01
|
43 |
+
Within Instrument 46 1.04951729166667E-08 2.28155932971014E-10
|
44 |
+
|
45 |
+
Certified R-Squared 2.57426544538321E-01
|
46 |
+
|
47 |
+
Certified Residual
|
48 |
+
Standard Deviation 1.51048314446410E-05
|
49 |
+
|
50 |
+
|
51 |
+
|
52 |
+
|
53 |
+
|
54 |
+
|
55 |
+
|
56 |
+
|
57 |
+
|
58 |
+
|
59 |
+
|
60 |
+
Data: Instrument AgWt
|
61 |
+
1 107.8681568
|
62 |
+
1 107.8681465
|
63 |
+
1 107.8681572
|
64 |
+
1 107.8681785
|
65 |
+
1 107.8681446
|
66 |
+
1 107.8681903
|
67 |
+
1 107.8681526
|
68 |
+
1 107.8681494
|
69 |
+
1 107.8681616
|
70 |
+
1 107.8681587
|
71 |
+
1 107.8681519
|
72 |
+
1 107.8681486
|
73 |
+
1 107.8681419
|
74 |
+
1 107.8681569
|
75 |
+
1 107.8681508
|
76 |
+
1 107.8681672
|
77 |
+
1 107.8681385
|
78 |
+
1 107.8681518
|
79 |
+
1 107.8681662
|
80 |
+
1 107.8681424
|
81 |
+
1 107.8681360
|
82 |
+
1 107.8681333
|
83 |
+
1 107.8681610
|
84 |
+
1 107.8681477
|
85 |
+
2 107.8681079
|
86 |
+
2 107.8681344
|
87 |
+
2 107.8681513
|
88 |
+
2 107.8681197
|
89 |
+
2 107.8681604
|
90 |
+
2 107.8681385
|
91 |
+
2 107.8681642
|
92 |
+
2 107.8681365
|
93 |
+
2 107.8681151
|
94 |
+
2 107.8681082
|
95 |
+
2 107.8681517
|
96 |
+
2 107.8681448
|
97 |
+
2 107.8681198
|
98 |
+
2 107.8681482
|
99 |
+
2 107.8681334
|
100 |
+
2 107.8681609
|
101 |
+
2 107.8681101
|
102 |
+
2 107.8681512
|
103 |
+
2 107.8681469
|
104 |
+
2 107.8681360
|
105 |
+
2 107.8681254
|
106 |
+
2 107.8681261
|
107 |
+
2 107.8681450
|
108 |
+
2 107.8681368
|
venv/lib/python3.10/site-packages/scipy/stats/tests/data/nist_anova/SiRstv.dat
ADDED
@@ -0,0 +1,85 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
NIST/ITL StRD
|
2 |
+
Dataset Name: SiRstv (SiRstv.dat)
|
3 |
+
|
4 |
+
|
5 |
+
File Format: ASCII
|
6 |
+
Certified Values (lines 41 to 47)
|
7 |
+
Data (lines 61 to 85)
|
8 |
+
|
9 |
+
|
10 |
+
Procedure: Analysis of Variance
|
11 |
+
|
12 |
+
|
13 |
+
Reference: Ehrstein, James and Croarkin, M. Carroll.
|
14 |
+
Unpublished NIST dataset.
|
15 |
+
|
16 |
+
|
17 |
+
Data: 1 Factor
|
18 |
+
5 Treatments
|
19 |
+
5 Replicates/Cell
|
20 |
+
25 Observations
|
21 |
+
3 Constant Leading Digits
|
22 |
+
Lower Level of Difficulty
|
23 |
+
Observed Data
|
24 |
+
|
25 |
+
|
26 |
+
Model: 6 Parameters (mu,tau_1, ... , tau_5)
|
27 |
+
y_{ij} = mu + tau_i + epsilon_{ij}
|
28 |
+
|
29 |
+
|
30 |
+
|
31 |
+
|
32 |
+
|
33 |
+
|
34 |
+
|
35 |
+
|
36 |
+
Certified Values:
|
37 |
+
|
38 |
+
Source of Sums of Mean
|
39 |
+
Variation df Squares Squares F Statistic
|
40 |
+
|
41 |
+
Between Instrument 4 5.11462616000000E-02 1.27865654000000E-02 1.18046237440255E+00
|
42 |
+
Within Instrument 20 2.16636560000000E-01 1.08318280000000E-02
|
43 |
+
|
44 |
+
Certified R-Squared 1.90999039051129E-01
|
45 |
+
|
46 |
+
Certified Residual
|
47 |
+
Standard Deviation 1.04076068334656E-01
|
48 |
+
|
49 |
+
|
50 |
+
|
51 |
+
|
52 |
+
|
53 |
+
|
54 |
+
|
55 |
+
|
56 |
+
|
57 |
+
|
58 |
+
|
59 |
+
|
60 |
+
Data: Instrument Resistance
|
61 |
+
1 196.3052
|
62 |
+
1 196.1240
|
63 |
+
1 196.1890
|
64 |
+
1 196.2569
|
65 |
+
1 196.3403
|
66 |
+
2 196.3042
|
67 |
+
2 196.3825
|
68 |
+
2 196.1669
|
69 |
+
2 196.3257
|
70 |
+
2 196.0422
|
71 |
+
3 196.1303
|
72 |
+
3 196.2005
|
73 |
+
3 196.2889
|
74 |
+
3 196.0343
|
75 |
+
3 196.1811
|
76 |
+
4 196.2795
|
77 |
+
4 196.1748
|
78 |
+
4 196.1494
|
79 |
+
4 196.1485
|
80 |
+
4 195.9885
|
81 |
+
5 196.2119
|
82 |
+
5 196.1051
|
83 |
+
5 196.1850
|
84 |
+
5 196.0052
|
85 |
+
5 196.2090
|
venv/lib/python3.10/site-packages/scipy/stats/tests/data/nist_anova/SmLs01.dat
ADDED
@@ -0,0 +1,249 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
NIST/ITL StRD
|
2 |
+
Dataset Name: SmLs01 (SmLs01.dat)
|
3 |
+
|
4 |
+
|
5 |
+
File Format: ASCII
|
6 |
+
Certified Values (lines 41 to 47)
|
7 |
+
Data (lines 61 to 249)
|
8 |
+
|
9 |
+
|
10 |
+
Procedure: Analysis of Variance
|
11 |
+
|
12 |
+
|
13 |
+
Reference: Simon, Stephen D. and Lesage, James P. (1989).
|
14 |
+
"Assessing the Accuracy of ANOVA Calculations in
|
15 |
+
Statistical Software".
|
16 |
+
Computational Statistics & Data Analysis, 8, pp. 325-332.
|
17 |
+
|
18 |
+
|
19 |
+
Data: 1 Factor
|
20 |
+
9 Treatments
|
21 |
+
21 Replicates/Cell
|
22 |
+
189 Observations
|
23 |
+
1 Constant Leading Digit
|
24 |
+
Lower Level of Difficulty
|
25 |
+
Generated Data
|
26 |
+
|
27 |
+
|
28 |
+
Model: 10 Parameters (mu,tau_1, ... , tau_9)
|
29 |
+
y_{ij} = mu + tau_i + epsilon_{ij}
|
30 |
+
|
31 |
+
|
32 |
+
|
33 |
+
|
34 |
+
|
35 |
+
|
36 |
+
Certified Values:
|
37 |
+
|
38 |
+
Source of Sums of Mean
|
39 |
+
Variation df Squares Squares F Statistic
|
40 |
+
|
41 |
+
Between Treatment 8 1.68000000000000E+00 2.10000000000000E-01 2.10000000000000E+01
|
42 |
+
Within Treatment 180 1.80000000000000E+00 1.00000000000000E-02
|
43 |
+
|
44 |
+
Certified R-Squared 4.82758620689655E-01
|
45 |
+
|
46 |
+
Certified Residual
|
47 |
+
Standard Deviation 1.00000000000000E-01
|
48 |
+
|
49 |
+
|
50 |
+
|
51 |
+
|
52 |
+
|
53 |
+
|
54 |
+
|
55 |
+
|
56 |
+
|
57 |
+
|
58 |
+
|
59 |
+
|
60 |
+
Data: Treatment Response
|
61 |
+
1 1.4
|
62 |
+
1 1.3
|
63 |
+
1 1.5
|
64 |
+
1 1.3
|
65 |
+
1 1.5
|
66 |
+
1 1.3
|
67 |
+
1 1.5
|
68 |
+
1 1.3
|
69 |
+
1 1.5
|
70 |
+
1 1.3
|
71 |
+
1 1.5
|
72 |
+
1 1.3
|
73 |
+
1 1.5
|
74 |
+
1 1.3
|
75 |
+
1 1.5
|
76 |
+
1 1.3
|
77 |
+
1 1.5
|
78 |
+
1 1.3
|
79 |
+
1 1.5
|
80 |
+
1 1.3
|
81 |
+
1 1.5
|
82 |
+
2 1.3
|
83 |
+
2 1.2
|
84 |
+
2 1.4
|
85 |
+
2 1.2
|
86 |
+
2 1.4
|
87 |
+
2 1.2
|
88 |
+
2 1.4
|
89 |
+
2 1.2
|
90 |
+
2 1.4
|
91 |
+
2 1.2
|
92 |
+
2 1.4
|
93 |
+
2 1.2
|
94 |
+
2 1.4
|
95 |
+
2 1.2
|
96 |
+
2 1.4
|
97 |
+
2 1.2
|
98 |
+
2 1.4
|
99 |
+
2 1.2
|
100 |
+
2 1.4
|
101 |
+
2 1.2
|
102 |
+
2 1.4
|
103 |
+
3 1.5
|
104 |
+
3 1.4
|
105 |
+
3 1.6
|
106 |
+
3 1.4
|
107 |
+
3 1.6
|
108 |
+
3 1.4
|
109 |
+
3 1.6
|
110 |
+
3 1.4
|
111 |
+
3 1.6
|
112 |
+
3 1.4
|
113 |
+
3 1.6
|
114 |
+
3 1.4
|
115 |
+
3 1.6
|
116 |
+
3 1.4
|
117 |
+
3 1.6
|
118 |
+
3 1.4
|
119 |
+
3 1.6
|
120 |
+
3 1.4
|
121 |
+
3 1.6
|
122 |
+
3 1.4
|
123 |
+
3 1.6
|
124 |
+
4 1.3
|
125 |
+
4 1.2
|
126 |
+
4 1.4
|
127 |
+
4 1.2
|
128 |
+
4 1.4
|
129 |
+
4 1.2
|
130 |
+
4 1.4
|
131 |
+
4 1.2
|
132 |
+
4 1.4
|
133 |
+
4 1.2
|
134 |
+
4 1.4
|
135 |
+
4 1.2
|
136 |
+
4 1.4
|
137 |
+
4 1.2
|
138 |
+
4 1.4
|
139 |
+
4 1.2
|
140 |
+
4 1.4
|
141 |
+
4 1.2
|
142 |
+
4 1.4
|
143 |
+
4 1.2
|
144 |
+
4 1.4
|
145 |
+
5 1.5
|
146 |
+
5 1.4
|
147 |
+
5 1.6
|
148 |
+
5 1.4
|
149 |
+
5 1.6
|
150 |
+
5 1.4
|
151 |
+
5 1.6
|
152 |
+
5 1.4
|
153 |
+
5 1.6
|
154 |
+
5 1.4
|
155 |
+
5 1.6
|
156 |
+
5 1.4
|
157 |
+
5 1.6
|
158 |
+
5 1.4
|
159 |
+
5 1.6
|
160 |
+
5 1.4
|
161 |
+
5 1.6
|
162 |
+
5 1.4
|
163 |
+
5 1.6
|
164 |
+
5 1.4
|
165 |
+
5 1.6
|
166 |
+
6 1.3
|
167 |
+
6 1.2
|
168 |
+
6 1.4
|
169 |
+
6 1.2
|
170 |
+
6 1.4
|
171 |
+
6 1.2
|
172 |
+
6 1.4
|
173 |
+
6 1.2
|
174 |
+
6 1.4
|
175 |
+
6 1.2
|
176 |
+
6 1.4
|
177 |
+
6 1.2
|
178 |
+
6 1.4
|
179 |
+
6 1.2
|
180 |
+
6 1.4
|
181 |
+
6 1.2
|
182 |
+
6 1.4
|
183 |
+
6 1.2
|
184 |
+
6 1.4
|
185 |
+
6 1.2
|
186 |
+
6 1.4
|
187 |
+
7 1.5
|
188 |
+
7 1.4
|
189 |
+
7 1.6
|
190 |
+
7 1.4
|
191 |
+
7 1.6
|
192 |
+
7 1.4
|
193 |
+
7 1.6
|
194 |
+
7 1.4
|
195 |
+
7 1.6
|
196 |
+
7 1.4
|
197 |
+
7 1.6
|
198 |
+
7 1.4
|
199 |
+
7 1.6
|
200 |
+
7 1.4
|
201 |
+
7 1.6
|
202 |
+
7 1.4
|
203 |
+
7 1.6
|
204 |
+
7 1.4
|
205 |
+
7 1.6
|
206 |
+
7 1.4
|
207 |
+
7 1.6
|
208 |
+
8 1.3
|
209 |
+
8 1.2
|
210 |
+
8 1.4
|
211 |
+
8 1.2
|
212 |
+
8 1.4
|
213 |
+
8 1.2
|
214 |
+
8 1.4
|
215 |
+
8 1.2
|
216 |
+
8 1.4
|
217 |
+
8 1.2
|
218 |
+
8 1.4
|
219 |
+
8 1.2
|
220 |
+
8 1.4
|
221 |
+
8 1.2
|
222 |
+
8 1.4
|
223 |
+
8 1.2
|
224 |
+
8 1.4
|
225 |
+
8 1.2
|
226 |
+
8 1.4
|
227 |
+
8 1.2
|
228 |
+
8 1.4
|
229 |
+
9 1.5
|
230 |
+
9 1.4
|
231 |
+
9 1.6
|
232 |
+
9 1.4
|
233 |
+
9 1.6
|
234 |
+
9 1.4
|
235 |
+
9 1.6
|
236 |
+
9 1.4
|
237 |
+
9 1.6
|
238 |
+
9 1.4
|
239 |
+
9 1.6
|
240 |
+
9 1.4
|
241 |
+
9 1.6
|
242 |
+
9 1.4
|
243 |
+
9 1.6
|
244 |
+
9 1.4
|
245 |
+
9 1.6
|
246 |
+
9 1.4
|
247 |
+
9 1.6
|
248 |
+
9 1.4
|
249 |
+
9 1.6
|
venv/lib/python3.10/site-packages/scipy/stats/tests/data/nist_anova/SmLs02.dat
ADDED
@@ -0,0 +1,1869 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
NIST/ITL StRD
|
2 |
+
Dataset Name: SmLs02 (SmLs02.dat)
|
3 |
+
|
4 |
+
|
5 |
+
File Format: ASCII
|
6 |
+
Certified Values (lines 41 to 47)
|
7 |
+
Data (lines 61 to 1869)
|
8 |
+
|
9 |
+
|
10 |
+
Procedure: Analysis of Variance
|
11 |
+
|
12 |
+
|
13 |
+
Reference: Simon, Stephen D. and Lesage, James P. (1989).
|
14 |
+
"Assessing the Accuracy of ANOVA Calculations in
|
15 |
+
Statistical Software".
|
16 |
+
Computational Statistics & Data Analysis, 8, pp. 325-332.
|
17 |
+
|
18 |
+
|
19 |
+
Data: 1 Factor
|
20 |
+
9 Treatments
|
21 |
+
201 Replicates/Cell
|
22 |
+
1809 Observations
|
23 |
+
1 Constant Leading Digit
|
24 |
+
Lower Level of Difficulty
|
25 |
+
Generated Data
|
26 |
+
|
27 |
+
|
28 |
+
Model: 10 Parameters (mu,tau_1, ... , tau_9)
|
29 |
+
y_{ij} = mu + tau_i + epsilon_{ij}
|
30 |
+
|
31 |
+
|
32 |
+
|
33 |
+
|
34 |
+
|
35 |
+
|
36 |
+
Certified Values:
|
37 |
+
|
38 |
+
Source of Sums of Mean
|
39 |
+
Variation df Squares Squares F Statistic
|
40 |
+
|
41 |
+
Between Treatment 8 1.60800000000000E+01 2.01000000000000E+00 2.01000000000000E+02
|
42 |
+
Within Treatment 1800 1.80000000000000E+01 1.00000000000000E-02
|
43 |
+
|
44 |
+
Certified R-Squared 4.71830985915493E-01
|
45 |
+
|
46 |
+
Certified Residual
|
47 |
+
Standard Deviation 1.00000000000000E-01
|
48 |
+
|
49 |
+
|
50 |
+
|
51 |
+
|
52 |
+
|
53 |
+
|
54 |
+
|
55 |
+
|
56 |
+
|
57 |
+
|
58 |
+
|
59 |
+
|
60 |
+
Data: Treatment Response
|
61 |
+
1 1.4
|
62 |
+
1 1.3
|
63 |
+
1 1.5
|
64 |
+
1 1.3
|
65 |
+
1 1.5
|
66 |
+
1 1.3
|
67 |
+
1 1.5
|
68 |
+
1 1.3
|
69 |
+
1 1.5
|
70 |
+
1 1.3
|
71 |
+
1 1.5
|
72 |
+
1 1.3
|
73 |
+
1 1.5
|
74 |
+
1 1.3
|
75 |
+
1 1.5
|
76 |
+
1 1.3
|
77 |
+
1 1.5
|
78 |
+
1 1.3
|
79 |
+
1 1.5
|
80 |
+
1 1.3
|
81 |
+
1 1.5
|
82 |
+
1 1.3
|
83 |
+
1 1.5
|
84 |
+
1 1.3
|
85 |
+
1 1.5
|
86 |
+
1 1.3
|
87 |
+
1 1.5
|
88 |
+
1 1.3
|
89 |
+
1 1.5
|
90 |
+
1 1.3
|
91 |
+
1 1.5
|
92 |
+
1 1.3
|
93 |
+
1 1.5
|
94 |
+
1 1.3
|
95 |
+
1 1.5
|
96 |
+
1 1.3
|
97 |
+
1 1.5
|
98 |
+
1 1.3
|
99 |
+
1 1.5
|
100 |
+
1 1.3
|
101 |
+
1 1.5
|
102 |
+
1 1.3
|
103 |
+
1 1.5
|
104 |
+
1 1.3
|
105 |
+
1 1.5
|
106 |
+
1 1.3
|
107 |
+
1 1.5
|
108 |
+
1 1.3
|
109 |
+
1 1.5
|
110 |
+
1 1.3
|
111 |
+
1 1.5
|
112 |
+
1 1.3
|
113 |
+
1 1.5
|
114 |
+
1 1.3
|
115 |
+
1 1.5
|
116 |
+
1 1.3
|
117 |
+
1 1.5
|
118 |
+
1 1.3
|
119 |
+
1 1.5
|
120 |
+
1 1.3
|
121 |
+
1 1.5
|
122 |
+
1 1.3
|
123 |
+
1 1.5
|
124 |
+
1 1.3
|
125 |
+
1 1.5
|
126 |
+
1 1.3
|
127 |
+
1 1.5
|
128 |
+
1 1.3
|
129 |
+
1 1.5
|
130 |
+
1 1.3
|
131 |
+
1 1.5
|
132 |
+
1 1.3
|
133 |
+
1 1.5
|
134 |
+
1 1.3
|
135 |
+
1 1.5
|
136 |
+
1 1.3
|
137 |
+
1 1.5
|
138 |
+
1 1.3
|
139 |
+
1 1.5
|
140 |
+
1 1.3
|
141 |
+
1 1.5
|
142 |
+
1 1.3
|
143 |
+
1 1.5
|
144 |
+
1 1.3
|
145 |
+
1 1.5
|
146 |
+
1 1.3
|
147 |
+
1 1.5
|
148 |
+
1 1.3
|
149 |
+
1 1.5
|
150 |
+
1 1.3
|
151 |
+
1 1.5
|
152 |
+
1 1.3
|
153 |
+
1 1.5
|
154 |
+
1 1.3
|
155 |
+
1 1.5
|
156 |
+
1 1.3
|
157 |
+
1 1.5
|
158 |
+
1 1.3
|
159 |
+
1 1.5
|
160 |
+
1 1.3
|
161 |
+
1 1.5
|
162 |
+
1 1.3
|
163 |
+
1 1.5
|
164 |
+
1 1.3
|
165 |
+
1 1.5
|
166 |
+
1 1.3
|
167 |
+
1 1.5
|
168 |
+
1 1.3
|
169 |
+
1 1.5
|
170 |
+
1 1.3
|
171 |
+
1 1.5
|
172 |
+
1 1.3
|
173 |
+
1 1.5
|
174 |
+
1 1.3
|
175 |
+
1 1.5
|
176 |
+
1 1.3
|
177 |
+
1 1.5
|
178 |
+
1 1.3
|
179 |
+
1 1.5
|
180 |
+
1 1.3
|
181 |
+
1 1.5
|
182 |
+
1 1.3
|
183 |
+
1 1.5
|
184 |
+
1 1.3
|
185 |
+
1 1.5
|
186 |
+
1 1.3
|
187 |
+
1 1.5
|
188 |
+
1 1.3
|
189 |
+
1 1.5
|
190 |
+
1 1.3
|
191 |
+
1 1.5
|
192 |
+
1 1.3
|
193 |
+
1 1.5
|
194 |
+
1 1.3
|
195 |
+
1 1.5
|
196 |
+
1 1.3
|
197 |
+
1 1.5
|
198 |
+
1 1.3
|
199 |
+
1 1.5
|
200 |
+
1 1.3
|
201 |
+
1 1.5
|
202 |
+
1 1.3
|
203 |
+
1 1.5
|
204 |
+
1 1.3
|
205 |
+
1 1.5
|
206 |
+
1 1.3
|
207 |
+
1 1.5
|
208 |
+
1 1.3
|
209 |
+
1 1.5
|
210 |
+
1 1.3
|
211 |
+
1 1.5
|
212 |
+
1 1.3
|
213 |
+
1 1.5
|
214 |
+
1 1.3
|
215 |
+
1 1.5
|
216 |
+
1 1.3
|
217 |
+
1 1.5
|
218 |
+
1 1.3
|
219 |
+
1 1.5
|
220 |
+
1 1.3
|
221 |
+
1 1.5
|
222 |
+
1 1.3
|
223 |
+
1 1.5
|
224 |
+
1 1.3
|
225 |
+
1 1.5
|
226 |
+
1 1.3
|
227 |
+
1 1.5
|
228 |
+
1 1.3
|
229 |
+
1 1.5
|
230 |
+
1 1.3
|
231 |
+
1 1.5
|
232 |
+
1 1.3
|
233 |
+
1 1.5
|
234 |
+
1 1.3
|
235 |
+
1 1.5
|
236 |
+
1 1.3
|
237 |
+
1 1.5
|
238 |
+
1 1.3
|
239 |
+
1 1.5
|
240 |
+
1 1.3
|
241 |
+
1 1.5
|
242 |
+
1 1.3
|
243 |
+
1 1.5
|
244 |
+
1 1.3
|
245 |
+
1 1.5
|
246 |
+
1 1.3
|
247 |
+
1 1.5
|
248 |
+
1 1.3
|
249 |
+
1 1.5
|
250 |
+
1 1.3
|
251 |
+
1 1.5
|
252 |
+
1 1.3
|
253 |
+
1 1.5
|
254 |
+
1 1.3
|
255 |
+
1 1.5
|
256 |
+
1 1.3
|
257 |
+
1 1.5
|
258 |
+
1 1.3
|
259 |
+
1 1.5
|
260 |
+
1 1.3
|
261 |
+
1 1.5
|
262 |
+
2 1.3
|
263 |
+
2 1.2
|
264 |
+
2 1.4
|
265 |
+
2 1.2
|
266 |
+
2 1.4
|
267 |
+
2 1.2
|
268 |
+
2 1.4
|
269 |
+
2 1.2
|
270 |
+
2 1.4
|
271 |
+
2 1.2
|
272 |
+
2 1.4
|
273 |
+
2 1.2
|
274 |
+
2 1.4
|
275 |
+
2 1.2
|
276 |
+
2 1.4
|
277 |
+
2 1.2
|
278 |
+
2 1.4
|
279 |
+
2 1.2
|
280 |
+
2 1.4
|
281 |
+
2 1.2
|
282 |
+
2 1.4
|
283 |
+
2 1.2
|
284 |
+
2 1.4
|
285 |
+
2 1.2
|
286 |
+
2 1.4
|
287 |
+
2 1.2
|
288 |
+
2 1.4
|
289 |
+
2 1.2
|
290 |
+
2 1.4
|
291 |
+
2 1.2
|
292 |
+
2 1.4
|
293 |
+
2 1.2
|
294 |
+
2 1.4
|
295 |
+
2 1.2
|
296 |
+
2 1.4
|
297 |
+
2 1.2
|
298 |
+
2 1.4
|
299 |
+
2 1.2
|
300 |
+
2 1.4
|
301 |
+
2 1.2
|
302 |
+
2 1.4
|
303 |
+
2 1.2
|
304 |
+
2 1.4
|
305 |
+
2 1.2
|
306 |
+
2 1.4
|
307 |
+
2 1.2
|
308 |
+
2 1.4
|
309 |
+
2 1.2
|
310 |
+
2 1.4
|
311 |
+
2 1.2
|
312 |
+
2 1.4
|
313 |
+
2 1.2
|
314 |
+
2 1.4
|
315 |
+
2 1.2
|
316 |
+
2 1.4
|
317 |
+
2 1.2
|
318 |
+
2 1.4
|
319 |
+
2 1.2
|
320 |
+
2 1.4
|
321 |
+
2 1.2
|
322 |
+
2 1.4
|
323 |
+
2 1.2
|
324 |
+
2 1.4
|
325 |
+
2 1.2
|
326 |
+
2 1.4
|
327 |
+
2 1.2
|
328 |
+
2 1.4
|
329 |
+
2 1.2
|
330 |
+
2 1.4
|
331 |
+
2 1.2
|
332 |
+
2 1.4
|
333 |
+
2 1.2
|
334 |
+
2 1.4
|
335 |
+
2 1.2
|
336 |
+
2 1.4
|
337 |
+
2 1.2
|
338 |
+
2 1.4
|
339 |
+
2 1.2
|
340 |
+
2 1.4
|
341 |
+
2 1.2
|
342 |
+
2 1.4
|
343 |
+
2 1.2
|
344 |
+
2 1.4
|
345 |
+
2 1.2
|
346 |
+
2 1.4
|
347 |
+
2 1.2
|
348 |
+
2 1.4
|
349 |
+
2 1.2
|
350 |
+
2 1.4
|
351 |
+
2 1.2
|
352 |
+
2 1.4
|
353 |
+
2 1.2
|
354 |
+
2 1.4
|
355 |
+
2 1.2
|
356 |
+
2 1.4
|
357 |
+
2 1.2
|
358 |
+
2 1.4
|
359 |
+
2 1.2
|
360 |
+
2 1.4
|
361 |
+
2 1.2
|
362 |
+
2 1.4
|
363 |
+
2 1.2
|
364 |
+
2 1.4
|
365 |
+
2 1.2
|
366 |
+
2 1.4
|
367 |
+
2 1.2
|
368 |
+
2 1.4
|
369 |
+
2 1.2
|
370 |
+
2 1.4
|
371 |
+
2 1.2
|
372 |
+
2 1.4
|
373 |
+
2 1.2
|
374 |
+
2 1.4
|
375 |
+
2 1.2
|
376 |
+
2 1.4
|
377 |
+
2 1.2
|
378 |
+
2 1.4
|
379 |
+
2 1.2
|
380 |
+
2 1.4
|
381 |
+
2 1.2
|
382 |
+
2 1.4
|
383 |
+
2 1.2
|
384 |
+
2 1.4
|
385 |
+
2 1.2
|
386 |
+
2 1.4
|
387 |
+
2 1.2
|
388 |
+
2 1.4
|
389 |
+
2 1.2
|
390 |
+
2 1.4
|
391 |
+
2 1.2
|
392 |
+
2 1.4
|
393 |
+
2 1.2
|
394 |
+
2 1.4
|
395 |
+
2 1.2
|
396 |
+
2 1.4
|
397 |
+
2 1.2
|
398 |
+
2 1.4
|
399 |
+
2 1.2
|
400 |
+
2 1.4
|
401 |
+
2 1.2
|
402 |
+
2 1.4
|
403 |
+
2 1.2
|
404 |
+
2 1.4
|
405 |
+
2 1.2
|
406 |
+
2 1.4
|
407 |
+
2 1.2
|
408 |
+
2 1.4
|
409 |
+
2 1.2
|
410 |
+
2 1.4
|
411 |
+
2 1.2
|
412 |
+
2 1.4
|
413 |
+
2 1.2
|
414 |
+
2 1.4
|
415 |
+
2 1.2
|
416 |
+
2 1.4
|
417 |
+
2 1.2
|
418 |
+
2 1.4
|
419 |
+
2 1.2
|
420 |
+
2 1.4
|
421 |
+
2 1.2
|
422 |
+
2 1.4
|
423 |
+
2 1.2
|
424 |
+
2 1.4
|
425 |
+
2 1.2
|
426 |
+
2 1.4
|
427 |
+
2 1.2
|
428 |
+
2 1.4
|
429 |
+
2 1.2
|
430 |
+
2 1.4
|
431 |
+
2 1.2
|
432 |
+
2 1.4
|
433 |
+
2 1.2
|
434 |
+
2 1.4
|
435 |
+
2 1.2
|
436 |
+
2 1.4
|
437 |
+
2 1.2
|
438 |
+
2 1.4
|
439 |
+
2 1.2
|
440 |
+
2 1.4
|
441 |
+
2 1.2
|
442 |
+
2 1.4
|
443 |
+
2 1.2
|
444 |
+
2 1.4
|
445 |
+
2 1.2
|
446 |
+
2 1.4
|
447 |
+
2 1.2
|
448 |
+
2 1.4
|
449 |
+
2 1.2
|
450 |
+
2 1.4
|
451 |
+
2 1.2
|
452 |
+
2 1.4
|
453 |
+
2 1.2
|
454 |
+
2 1.4
|
455 |
+
2 1.2
|
456 |
+
2 1.4
|
457 |
+
2 1.2
|
458 |
+
2 1.4
|
459 |
+
2 1.2
|
460 |
+
2 1.4
|
461 |
+
2 1.2
|
462 |
+
2 1.4
|
463 |
+
3 1.5
|
464 |
+
3 1.4
|
465 |
+
3 1.6
|
466 |
+
3 1.4
|
467 |
+
3 1.6
|
468 |
+
3 1.4
|
469 |
+
3 1.6
|
470 |
+
3 1.4
|
471 |
+
3 1.6
|
472 |
+
3 1.4
|
473 |
+
3 1.6
|
474 |
+
3 1.4
|
475 |
+
3 1.6
|
476 |
+
3 1.4
|
477 |
+
3 1.6
|
478 |
+
3 1.4
|
479 |
+
3 1.6
|
480 |
+
3 1.4
|
481 |
+
3 1.6
|
482 |
+
3 1.4
|
483 |
+
3 1.6
|
484 |
+
3 1.4
|
485 |
+
3 1.6
|
486 |
+
3 1.4
|
487 |
+
3 1.6
|
488 |
+
3 1.4
|
489 |
+
3 1.6
|
490 |
+
3 1.4
|
491 |
+
3 1.6
|
492 |
+
3 1.4
|
493 |
+
3 1.6
|
494 |
+
3 1.4
|
495 |
+
3 1.6
|
496 |
+
3 1.4
|
497 |
+
3 1.6
|
498 |
+
3 1.4
|
499 |
+
3 1.6
|
500 |
+
3 1.4
|
501 |
+
3 1.6
|
502 |
+
3 1.4
|
503 |
+
3 1.6
|
504 |
+
3 1.4
|
505 |
+
3 1.6
|
506 |
+
3 1.4
|
507 |
+
3 1.6
|
508 |
+
3 1.4
|
509 |
+
3 1.6
|
510 |
+
3 1.4
|
511 |
+
3 1.6
|
512 |
+
3 1.4
|
513 |
+
3 1.6
|
514 |
+
3 1.4
|
515 |
+
3 1.6
|
516 |
+
3 1.4
|
517 |
+
3 1.6
|
518 |
+
3 1.4
|
519 |
+
3 1.6
|
520 |
+
3 1.4
|
521 |
+
3 1.6
|
522 |
+
3 1.4
|
523 |
+
3 1.6
|
524 |
+
3 1.4
|
525 |
+
3 1.6
|
526 |
+
3 1.4
|
527 |
+
3 1.6
|
528 |
+
3 1.4
|
529 |
+
3 1.6
|
530 |
+
3 1.4
|
531 |
+
3 1.6
|
532 |
+
3 1.4
|
533 |
+
3 1.6
|
534 |
+
3 1.4
|
535 |
+
3 1.6
|
536 |
+
3 1.4
|
537 |
+
3 1.6
|
538 |
+
3 1.4
|
539 |
+
3 1.6
|
540 |
+
3 1.4
|
541 |
+
3 1.6
|
542 |
+
3 1.4
|
543 |
+
3 1.6
|
544 |
+
3 1.4
|
545 |
+
3 1.6
|
546 |
+
3 1.4
|
547 |
+
3 1.6
|
548 |
+
3 1.4
|
549 |
+
3 1.6
|
550 |
+
3 1.4
|
551 |
+
3 1.6
|
552 |
+
3 1.4
|
553 |
+
3 1.6
|
554 |
+
3 1.4
|
555 |
+
3 1.6
|
556 |
+
3 1.4
|
557 |
+
3 1.6
|
558 |
+
3 1.4
|
559 |
+
3 1.6
|
560 |
+
3 1.4
|
561 |
+
3 1.6
|
562 |
+
3 1.4
|
563 |
+
3 1.6
|
564 |
+
3 1.4
|
565 |
+
3 1.6
|
566 |
+
3 1.4
|
567 |
+
3 1.6
|
568 |
+
3 1.4
|
569 |
+
3 1.6
|
570 |
+
3 1.4
|
571 |
+
3 1.6
|
572 |
+
3 1.4
|
573 |
+
3 1.6
|
574 |
+
3 1.4
|
575 |
+
3 1.6
|
576 |
+
3 1.4
|
577 |
+
3 1.6
|
578 |
+
3 1.4
|
579 |
+
3 1.6
|
580 |
+
3 1.4
|
581 |
+
3 1.6
|
582 |
+
3 1.4
|
583 |
+
3 1.6
|
584 |
+
3 1.4
|
585 |
+
3 1.6
|
586 |
+
3 1.4
|
587 |
+
3 1.6
|
588 |
+
3 1.4
|
589 |
+
3 1.6
|
590 |
+
3 1.4
|
591 |
+
3 1.6
|
592 |
+
3 1.4
|
593 |
+
3 1.6
|
594 |
+
3 1.4
|
595 |
+
3 1.6
|
596 |
+
3 1.4
|
597 |
+
3 1.6
|
598 |
+
3 1.4
|
599 |
+
3 1.6
|
600 |
+
3 1.4
|
601 |
+
3 1.6
|
602 |
+
3 1.4
|
603 |
+
3 1.6
|
604 |
+
3 1.4
|
605 |
+
3 1.6
|
606 |
+
3 1.4
|
607 |
+
3 1.6
|
608 |
+
3 1.4
|
609 |
+
3 1.6
|
610 |
+
3 1.4
|
611 |
+
3 1.6
|
612 |
+
3 1.4
|
613 |
+
3 1.6
|
614 |
+
3 1.4
|
615 |
+
3 1.6
|
616 |
+
3 1.4
|
617 |
+
3 1.6
|
618 |
+
3 1.4
|
619 |
+
3 1.6
|
620 |
+
3 1.4
|
621 |
+
3 1.6
|
622 |
+
3 1.4
|
623 |
+
3 1.6
|
624 |
+
3 1.4
|
625 |
+
3 1.6
|
626 |
+
3 1.4
|
627 |
+
3 1.6
|
628 |
+
3 1.4
|
629 |
+
3 1.6
|
630 |
+
3 1.4
|
631 |
+
3 1.6
|
632 |
+
3 1.4
|
633 |
+
3 1.6
|
634 |
+
3 1.4
|
635 |
+
3 1.6
|
636 |
+
3 1.4
|
637 |
+
3 1.6
|
638 |
+
3 1.4
|
639 |
+
3 1.6
|
640 |
+
3 1.4
|
641 |
+
3 1.6
|
642 |
+
3 1.4
|
643 |
+
3 1.6
|
644 |
+
3 1.4
|
645 |
+
3 1.6
|
646 |
+
3 1.4
|
647 |
+
3 1.6
|
648 |
+
3 1.4
|
649 |
+
3 1.6
|
650 |
+
3 1.4
|
651 |
+
3 1.6
|
652 |
+
3 1.4
|
653 |
+
3 1.6
|
654 |
+
3 1.4
|
655 |
+
3 1.6
|
656 |
+
3 1.4
|
657 |
+
3 1.6
|
658 |
+
3 1.4
|
659 |
+
3 1.6
|
660 |
+
3 1.4
|
661 |
+
3 1.6
|
662 |
+
3 1.4
|
663 |
+
3 1.6
|
664 |
+
4 1.3
|
665 |
+
4 1.2
|
666 |
+
4 1.4
|
667 |
+
4 1.2
|
668 |
+
4 1.4
|
669 |
+
4 1.2
|
670 |
+
4 1.4
|
671 |
+
4 1.2
|
672 |
+
4 1.4
|
673 |
+
4 1.2
|
674 |
+
4 1.4
|
675 |
+
4 1.2
|
676 |
+
4 1.4
|
677 |
+
4 1.2
|
678 |
+
4 1.4
|
679 |
+
4 1.2
|
680 |
+
4 1.4
|
681 |
+
4 1.2
|
682 |
+
4 1.4
|
683 |
+
4 1.2
|
684 |
+
4 1.4
|
685 |
+
4 1.2
|
686 |
+
4 1.4
|
687 |
+
4 1.2
|
688 |
+
4 1.4
|
689 |
+
4 1.2
|
690 |
+
4 1.4
|
691 |
+
4 1.2
|
692 |
+
4 1.4
|
693 |
+
4 1.2
|
694 |
+
4 1.4
|
695 |
+
4 1.2
|
696 |
+
4 1.4
|
697 |
+
4 1.2
|
698 |
+
4 1.4
|
699 |
+
4 1.2
|
700 |
+
4 1.4
|
701 |
+
4 1.2
|
702 |
+
4 1.4
|
703 |
+
4 1.2
|
704 |
+
4 1.4
|
705 |
+
4 1.2
|
706 |
+
4 1.4
|
707 |
+
4 1.2
|
708 |
+
4 1.4
|
709 |
+
4 1.2
|
710 |
+
4 1.4
|
711 |
+
4 1.2
|
712 |
+
4 1.4
|
713 |
+
4 1.2
|
714 |
+
4 1.4
|
715 |
+
4 1.2
|
716 |
+
4 1.4
|
717 |
+
4 1.2
|
718 |
+
4 1.4
|
719 |
+
4 1.2
|
720 |
+
4 1.4
|
721 |
+
4 1.2
|
722 |
+
4 1.4
|
723 |
+
4 1.2
|
724 |
+
4 1.4
|
725 |
+
4 1.2
|
726 |
+
4 1.4
|
727 |
+
4 1.2
|
728 |
+
4 1.4
|
729 |
+
4 1.2
|
730 |
+
4 1.4
|
731 |
+
4 1.2
|
732 |
+
4 1.4
|
733 |
+
4 1.2
|
734 |
+
4 1.4
|
735 |
+
4 1.2
|
736 |
+
4 1.4
|
737 |
+
4 1.2
|
738 |
+
4 1.4
|
739 |
+
4 1.2
|
740 |
+
4 1.4
|
741 |
+
4 1.2
|
742 |
+
4 1.4
|
743 |
+
4 1.2
|
744 |
+
4 1.4
|
745 |
+
4 1.2
|
746 |
+
4 1.4
|
747 |
+
4 1.2
|
748 |
+
4 1.4
|
749 |
+
4 1.2
|
750 |
+
4 1.4
|
751 |
+
4 1.2
|
752 |
+
4 1.4
|
753 |
+
4 1.2
|
754 |
+
4 1.4
|
755 |
+
4 1.2
|
756 |
+
4 1.4
|
757 |
+
4 1.2
|
758 |
+
4 1.4
|
759 |
+
4 1.2
|
760 |
+
4 1.4
|
761 |
+
4 1.2
|
762 |
+
4 1.4
|
763 |
+
4 1.2
|
764 |
+
4 1.4
|
765 |
+
4 1.2
|
766 |
+
4 1.4
|
767 |
+
4 1.2
|
768 |
+
4 1.4
|
769 |
+
4 1.2
|
770 |
+
4 1.4
|
771 |
+
4 1.2
|
772 |
+
4 1.4
|
773 |
+
4 1.2
|
774 |
+
4 1.4
|
775 |
+
4 1.2
|
776 |
+
4 1.4
|
777 |
+
4 1.2
|
778 |
+
4 1.4
|
779 |
+
4 1.2
|
780 |
+
4 1.4
|
781 |
+
4 1.2
|
782 |
+
4 1.4
|
783 |
+
4 1.2
|
784 |
+
4 1.4
|
785 |
+
4 1.2
|
786 |
+
4 1.4
|
787 |
+
4 1.2
|
788 |
+
4 1.4
|
789 |
+
4 1.2
|
790 |
+
4 1.4
|
791 |
+
4 1.2
|
792 |
+
4 1.4
|
793 |
+
4 1.2
|
794 |
+
4 1.4
|
795 |
+
4 1.2
|
796 |
+
4 1.4
|
797 |
+
4 1.2
|
798 |
+
4 1.4
|
799 |
+
4 1.2
|
800 |
+
4 1.4
|
801 |
+
4 1.2
|
802 |
+
4 1.4
|
803 |
+
4 1.2
|
804 |
+
4 1.4
|
805 |
+
4 1.2
|
806 |
+
4 1.4
|
807 |
+
4 1.2
|
808 |
+
4 1.4
|
809 |
+
4 1.2
|
810 |
+
4 1.4
|
811 |
+
4 1.2
|
812 |
+
4 1.4
|
813 |
+
4 1.2
|
814 |
+
4 1.4
|
815 |
+
4 1.2
|
816 |
+
4 1.4
|
817 |
+
4 1.2
|
818 |
+
4 1.4
|
819 |
+
4 1.2
|
820 |
+
4 1.4
|
821 |
+
4 1.2
|
822 |
+
4 1.4
|
823 |
+
4 1.2
|
824 |
+
4 1.4
|
825 |
+
4 1.2
|
826 |
+
4 1.4
|
827 |
+
4 1.2
|
828 |
+
4 1.4
|
829 |
+
4 1.2
|
830 |
+
4 1.4
|
831 |
+
4 1.2
|
832 |
+
4 1.4
|
833 |
+
4 1.2
|
834 |
+
4 1.4
|
835 |
+
4 1.2
|
836 |
+
4 1.4
|
837 |
+
4 1.2
|
838 |
+
4 1.4
|
839 |
+
4 1.2
|
840 |
+
4 1.4
|
841 |
+
4 1.2
|
842 |
+
4 1.4
|
843 |
+
4 1.2
|
844 |
+
4 1.4
|
845 |
+
4 1.2
|
846 |
+
4 1.4
|
847 |
+
4 1.2
|
848 |
+
4 1.4
|
849 |
+
4 1.2
|
850 |
+
4 1.4
|
851 |
+
4 1.2
|
852 |
+
4 1.4
|
853 |
+
4 1.2
|
854 |
+
4 1.4
|
855 |
+
4 1.2
|
856 |
+
4 1.4
|
857 |
+
4 1.2
|
858 |
+
4 1.4
|
859 |
+
4 1.2
|
860 |
+
4 1.4
|
861 |
+
4 1.2
|
862 |
+
4 1.4
|
863 |
+
4 1.2
|
864 |
+
4 1.4
|
865 |
+
5 1.5
|
866 |
+
5 1.4
|
867 |
+
5 1.6
|
868 |
+
5 1.4
|
869 |
+
5 1.6
|
870 |
+
5 1.4
|
871 |
+
5 1.6
|
872 |
+
5 1.4
|
873 |
+
5 1.6
|
874 |
+
5 1.4
|
875 |
+
5 1.6
|
876 |
+
5 1.4
|
877 |
+
5 1.6
|
878 |
+
5 1.4
|
879 |
+
5 1.6
|
880 |
+
5 1.4
|
881 |
+
5 1.6
|
882 |
+
5 1.4
|
883 |
+
5 1.6
|
884 |
+
5 1.4
|
885 |
+
5 1.6
|
886 |
+
5 1.4
|
887 |
+
5 1.6
|
888 |
+
5 1.4
|
889 |
+
5 1.6
|
890 |
+
5 1.4
|
891 |
+
5 1.6
|
892 |
+
5 1.4
|
893 |
+
5 1.6
|
894 |
+
5 1.4
|
895 |
+
5 1.6
|
896 |
+
5 1.4
|
897 |
+
5 1.6
|
898 |
+
5 1.4
|
899 |
+
5 1.6
|
900 |
+
5 1.4
|
901 |
+
5 1.6
|
902 |
+
5 1.4
|
903 |
+
5 1.6
|
904 |
+
5 1.4
|
905 |
+
5 1.6
|
906 |
+
5 1.4
|
907 |
+
5 1.6
|
908 |
+
5 1.4
|
909 |
+
5 1.6
|
910 |
+
5 1.4
|
911 |
+
5 1.6
|
912 |
+
5 1.4
|
913 |
+
5 1.6
|
914 |
+
5 1.4
|
915 |
+
5 1.6
|
916 |
+
5 1.4
|
917 |
+
5 1.6
|
918 |
+
5 1.4
|
919 |
+
5 1.6
|
920 |
+
5 1.4
|
921 |
+
5 1.6
|
922 |
+
5 1.4
|
923 |
+
5 1.6
|
924 |
+
5 1.4
|
925 |
+
5 1.6
|
926 |
+
5 1.4
|
927 |
+
5 1.6
|
928 |
+
5 1.4
|
929 |
+
5 1.6
|
930 |
+
5 1.4
|
931 |
+
5 1.6
|
932 |
+
5 1.4
|
933 |
+
5 1.6
|
934 |
+
5 1.4
|
935 |
+
5 1.6
|
936 |
+
5 1.4
|
937 |
+
5 1.6
|
938 |
+
5 1.4
|
939 |
+
5 1.6
|
940 |
+
5 1.4
|
941 |
+
5 1.6
|
942 |
+
5 1.4
|
943 |
+
5 1.6
|
944 |
+
5 1.4
|
945 |
+
5 1.6
|
946 |
+
5 1.4
|
947 |
+
5 1.6
|
948 |
+
5 1.4
|
949 |
+
5 1.6
|
950 |
+
5 1.4
|
951 |
+
5 1.6
|
952 |
+
5 1.4
|
953 |
+
5 1.6
|
954 |
+
5 1.4
|
955 |
+
5 1.6
|
956 |
+
5 1.4
|
957 |
+
5 1.6
|
958 |
+
5 1.4
|
959 |
+
5 1.6
|
960 |
+
5 1.4
|
961 |
+
5 1.6
|
962 |
+
5 1.4
|
963 |
+
5 1.6
|
964 |
+
5 1.4
|
965 |
+
5 1.6
|
966 |
+
5 1.4
|
967 |
+
5 1.6
|
968 |
+
5 1.4
|
969 |
+
5 1.6
|
970 |
+
5 1.4
|
971 |
+
5 1.6
|
972 |
+
5 1.4
|
973 |
+
5 1.6
|
974 |
+
5 1.4
|
975 |
+
5 1.6
|
976 |
+
5 1.4
|
977 |
+
5 1.6
|
978 |
+
5 1.4
|
979 |
+
5 1.6
|
980 |
+
5 1.4
|
981 |
+
5 1.6
|
982 |
+
5 1.4
|
983 |
+
5 1.6
|
984 |
+
5 1.4
|
985 |
+
5 1.6
|
986 |
+
5 1.4
|
987 |
+
5 1.6
|
988 |
+
5 1.4
|
989 |
+
5 1.6
|
990 |
+
5 1.4
|
991 |
+
5 1.6
|
992 |
+
5 1.4
|
993 |
+
5 1.6
|
994 |
+
5 1.4
|
995 |
+
5 1.6
|
996 |
+
5 1.4
|
997 |
+
5 1.6
|
998 |
+
5 1.4
|
999 |
+
5 1.6
|
1000 |
+
5 1.4
|
1001 |
+
5 1.6
|
1002 |
+
5 1.4
|
1003 |
+
5 1.6
|
1004 |
+
5 1.4
|
1005 |
+
5 1.6
|
1006 |
+
5 1.4
|
1007 |
+
5 1.6
|
1008 |
+
5 1.4
|
1009 |
+
5 1.6
|
1010 |
+
5 1.4
|
1011 |
+
5 1.6
|
1012 |
+
5 1.4
|
1013 |
+
5 1.6
|
1014 |
+
5 1.4
|
1015 |
+
5 1.6
|
1016 |
+
5 1.4
|
1017 |
+
5 1.6
|
1018 |
+
5 1.4
|
1019 |
+
5 1.6
|
1020 |
+
5 1.4
|
1021 |
+
5 1.6
|
1022 |
+
5 1.4
|
1023 |
+
5 1.6
|
1024 |
+
5 1.4
|
1025 |
+
5 1.6
|
1026 |
+
5 1.4
|
1027 |
+
5 1.6
|
1028 |
+
5 1.4
|
1029 |
+
5 1.6
|
1030 |
+
5 1.4
|
1031 |
+
5 1.6
|
1032 |
+
5 1.4
|
1033 |
+
5 1.6
|
1034 |
+
5 1.4
|
1035 |
+
5 1.6
|
1036 |
+
5 1.4
|
1037 |
+
5 1.6
|
1038 |
+
5 1.4
|
1039 |
+
5 1.6
|
1040 |
+
5 1.4
|
1041 |
+
5 1.6
|
1042 |
+
5 1.4
|
1043 |
+
5 1.6
|
1044 |
+
5 1.4
|
1045 |
+
5 1.6
|
1046 |
+
5 1.4
|
1047 |
+
5 1.6
|
1048 |
+
5 1.4
|
1049 |
+
5 1.6
|
1050 |
+
5 1.4
|
1051 |
+
5 1.6
|
1052 |
+
5 1.4
|
1053 |
+
5 1.6
|
1054 |
+
5 1.4
|
1055 |
+
5 1.6
|
1056 |
+
5 1.4
|
1057 |
+
5 1.6
|
1058 |
+
5 1.4
|
1059 |
+
5 1.6
|
1060 |
+
5 1.4
|
1061 |
+
5 1.6
|
1062 |
+
5 1.4
|
1063 |
+
5 1.6
|
1064 |
+
5 1.4
|
1065 |
+
5 1.6
|
1066 |
+
6 1.3
|
1067 |
+
6 1.2
|
1068 |
+
6 1.4
|
1069 |
+
6 1.2
|
1070 |
+
6 1.4
|
1071 |
+
6 1.2
|
1072 |
+
6 1.4
|
1073 |
+
6 1.2
|
1074 |
+
6 1.4
|
1075 |
+
6 1.2
|
1076 |
+
6 1.4
|
1077 |
+
6 1.2
|
1078 |
+
6 1.4
|
1079 |
+
6 1.2
|
1080 |
+
6 1.4
|
1081 |
+
6 1.2
|
1082 |
+
6 1.4
|
1083 |
+
6 1.2
|
1084 |
+
6 1.4
|
1085 |
+
6 1.2
|
1086 |
+
6 1.4
|
1087 |
+
6 1.2
|
1088 |
+
6 1.4
|
1089 |
+
6 1.2
|
1090 |
+
6 1.4
|
1091 |
+
6 1.2
|
1092 |
+
6 1.4
|
1093 |
+
6 1.2
|
1094 |
+
6 1.4
|
1095 |
+
6 1.2
|
1096 |
+
6 1.4
|
1097 |
+
6 1.2
|
1098 |
+
6 1.4
|
1099 |
+
6 1.2
|
1100 |
+
6 1.4
|
1101 |
+
6 1.2
|
1102 |
+
6 1.4
|
1103 |
+
6 1.2
|
1104 |
+
6 1.4
|
1105 |
+
6 1.2
|
1106 |
+
6 1.4
|
1107 |
+
6 1.2
|
1108 |
+
6 1.4
|
1109 |
+
6 1.2
|
1110 |
+
6 1.4
|
1111 |
+
6 1.2
|
1112 |
+
6 1.4
|
1113 |
+
6 1.2
|
1114 |
+
6 1.4
|
1115 |
+
6 1.2
|
1116 |
+
6 1.4
|
1117 |
+
6 1.2
|
1118 |
+
6 1.4
|
1119 |
+
6 1.2
|
1120 |
+
6 1.4
|
1121 |
+
6 1.2
|
1122 |
+
6 1.4
|
1123 |
+
6 1.2
|
1124 |
+
6 1.4
|
1125 |
+
6 1.2
|
1126 |
+
6 1.4
|
1127 |
+
6 1.2
|
1128 |
+
6 1.4
|
1129 |
+
6 1.2
|
1130 |
+
6 1.4
|
1131 |
+
6 1.2
|
1132 |
+
6 1.4
|
1133 |
+
6 1.2
|
1134 |
+
6 1.4
|
1135 |
+
6 1.2
|
1136 |
+
6 1.4
|
1137 |
+
6 1.2
|
1138 |
+
6 1.4
|
1139 |
+
6 1.2
|
1140 |
+
6 1.4
|
1141 |
+
6 1.2
|
1142 |
+
6 1.4
|
1143 |
+
6 1.2
|
1144 |
+
6 1.4
|
1145 |
+
6 1.2
|
1146 |
+
6 1.4
|
1147 |
+
6 1.2
|
1148 |
+
6 1.4
|
1149 |
+
6 1.2
|
1150 |
+
6 1.4
|
1151 |
+
6 1.2
|
1152 |
+
6 1.4
|
1153 |
+
6 1.2
|
1154 |
+
6 1.4
|
1155 |
+
6 1.2
|
1156 |
+
6 1.4
|
1157 |
+
6 1.2
|
1158 |
+
6 1.4
|
1159 |
+
6 1.2
|
1160 |
+
6 1.4
|
1161 |
+
6 1.2
|
1162 |
+
6 1.4
|
1163 |
+
6 1.2
|
1164 |
+
6 1.4
|
1165 |
+
6 1.2
|
1166 |
+
6 1.4
|
1167 |
+
6 1.2
|
1168 |
+
6 1.4
|
1169 |
+
6 1.2
|
1170 |
+
6 1.4
|
1171 |
+
6 1.2
|
1172 |
+
6 1.4
|
1173 |
+
6 1.2
|
1174 |
+
6 1.4
|
1175 |
+
6 1.2
|
1176 |
+
6 1.4
|
1177 |
+
6 1.2
|
1178 |
+
6 1.4
|
1179 |
+
6 1.2
|
1180 |
+
6 1.4
|
1181 |
+
6 1.2
|
1182 |
+
6 1.4
|
1183 |
+
6 1.2
|
1184 |
+
6 1.4
|
1185 |
+
6 1.2
|
1186 |
+
6 1.4
|
1187 |
+
6 1.2
|
1188 |
+
6 1.4
|
1189 |
+
6 1.2
|
1190 |
+
6 1.4
|
1191 |
+
6 1.2
|
1192 |
+
6 1.4
|
1193 |
+
6 1.2
|
1194 |
+
6 1.4
|
1195 |
+
6 1.2
|
1196 |
+
6 1.4
|
1197 |
+
6 1.2
|
1198 |
+
6 1.4
|
1199 |
+
6 1.2
|
1200 |
+
6 1.4
|
1201 |
+
6 1.2
|
1202 |
+
6 1.4
|
1203 |
+
6 1.2
|
1204 |
+
6 1.4
|
1205 |
+
6 1.2
|
1206 |
+
6 1.4
|
1207 |
+
6 1.2
|
1208 |
+
6 1.4
|
1209 |
+
6 1.2
|
1210 |
+
6 1.4
|
1211 |
+
6 1.2
|
1212 |
+
6 1.4
|
1213 |
+
6 1.2
|
1214 |
+
6 1.4
|
1215 |
+
6 1.2
|
1216 |
+
6 1.4
|
1217 |
+
6 1.2
|
1218 |
+
6 1.4
|
1219 |
+
6 1.2
|
1220 |
+
6 1.4
|
1221 |
+
6 1.2
|
1222 |
+
6 1.4
|
1223 |
+
6 1.2
|
1224 |
+
6 1.4
|
1225 |
+
6 1.2
|
1226 |
+
6 1.4
|
1227 |
+
6 1.2
|
1228 |
+
6 1.4
|
1229 |
+
6 1.2
|
1230 |
+
6 1.4
|
1231 |
+
6 1.2
|
1232 |
+
6 1.4
|
1233 |
+
6 1.2
|
1234 |
+
6 1.4
|
1235 |
+
6 1.2
|
1236 |
+
6 1.4
|
1237 |
+
6 1.2
|
1238 |
+
6 1.4
|
1239 |
+
6 1.2
|
1240 |
+
6 1.4
|
1241 |
+
6 1.2
|
1242 |
+
6 1.4
|
1243 |
+
6 1.2
|
1244 |
+
6 1.4
|
1245 |
+
6 1.2
|
1246 |
+
6 1.4
|
1247 |
+
6 1.2
|
1248 |
+
6 1.4
|
1249 |
+
6 1.2
|
1250 |
+
6 1.4
|
1251 |
+
6 1.2
|
1252 |
+
6 1.4
|
1253 |
+
6 1.2
|
1254 |
+
6 1.4
|
1255 |
+
6 1.2
|
1256 |
+
6 1.4
|
1257 |
+
6 1.2
|
1258 |
+
6 1.4
|
1259 |
+
6 1.2
|
1260 |
+
6 1.4
|
1261 |
+
6 1.2
|
1262 |
+
6 1.4
|
1263 |
+
6 1.2
|
1264 |
+
6 1.4
|
1265 |
+
6 1.2
|
1266 |
+
6 1.4
|
1267 |
+
7 1.5
|
1268 |
+
7 1.4
|
1269 |
+
7 1.6
|
1270 |
+
7 1.4
|
1271 |
+
7 1.6
|
1272 |
+
7 1.4
|
1273 |
+
7 1.6
|
1274 |
+
7 1.4
|
1275 |
+
7 1.6
|
1276 |
+
7 1.4
|
1277 |
+
7 1.6
|
1278 |
+
7 1.4
|
1279 |
+
7 1.6
|
1280 |
+
7 1.4
|
1281 |
+
7 1.6
|
1282 |
+
7 1.4
|
1283 |
+
7 1.6
|
1284 |
+
7 1.4
|
1285 |
+
7 1.6
|
1286 |
+
7 1.4
|
1287 |
+
7 1.6
|
1288 |
+
7 1.4
|
1289 |
+
7 1.6
|
1290 |
+
7 1.4
|
1291 |
+
7 1.6
|
1292 |
+
7 1.4
|
1293 |
+
7 1.6
|
1294 |
+
7 1.4
|
1295 |
+
7 1.6
|
1296 |
+
7 1.4
|
1297 |
+
7 1.6
|
1298 |
+
7 1.4
|
1299 |
+
7 1.6
|
1300 |
+
7 1.4
|
1301 |
+
7 1.6
|
1302 |
+
7 1.4
|
1303 |
+
7 1.6
|
1304 |
+
7 1.4
|
1305 |
+
7 1.6
|
1306 |
+
7 1.4
|
1307 |
+
7 1.6
|
1308 |
+
7 1.4
|
1309 |
+
7 1.6
|
1310 |
+
7 1.4
|
1311 |
+
7 1.6
|
1312 |
+
7 1.4
|
1313 |
+
7 1.6
|
1314 |
+
7 1.4
|
1315 |
+
7 1.6
|
1316 |
+
7 1.4
|
1317 |
+
7 1.6
|
1318 |
+
7 1.4
|
1319 |
+
7 1.6
|
1320 |
+
7 1.4
|
1321 |
+
7 1.6
|
1322 |
+
7 1.4
|
1323 |
+
7 1.6
|
1324 |
+
7 1.4
|
1325 |
+
7 1.6
|
1326 |
+
7 1.4
|
1327 |
+
7 1.6
|
1328 |
+
7 1.4
|
1329 |
+
7 1.6
|
1330 |
+
7 1.4
|
1331 |
+
7 1.6
|
1332 |
+
7 1.4
|
1333 |
+
7 1.6
|
1334 |
+
7 1.4
|
1335 |
+
7 1.6
|
1336 |
+
7 1.4
|
1337 |
+
7 1.6
|
1338 |
+
7 1.4
|
1339 |
+
7 1.6
|
1340 |
+
7 1.4
|
1341 |
+
7 1.6
|
1342 |
+
7 1.4
|
1343 |
+
7 1.6
|
1344 |
+
7 1.4
|
1345 |
+
7 1.6
|
1346 |
+
7 1.4
|
1347 |
+
7 1.6
|
1348 |
+
7 1.4
|
1349 |
+
7 1.6
|
1350 |
+
7 1.4
|
1351 |
+
7 1.6
|
1352 |
+
7 1.4
|
1353 |
+
7 1.6
|
1354 |
+
7 1.4
|
1355 |
+
7 1.6
|
1356 |
+
7 1.4
|
1357 |
+
7 1.6
|
1358 |
+
7 1.4
|
1359 |
+
7 1.6
|
1360 |
+
7 1.4
|
1361 |
+
7 1.6
|
1362 |
+
7 1.4
|
1363 |
+
7 1.6
|
1364 |
+
7 1.4
|
1365 |
+
7 1.6
|
1366 |
+
7 1.4
|
1367 |
+
7 1.6
|
1368 |
+
7 1.4
|
1369 |
+
7 1.6
|
1370 |
+
7 1.4
|
1371 |
+
7 1.6
|
1372 |
+
7 1.4
|
1373 |
+
7 1.6
|
1374 |
+
7 1.4
|
1375 |
+
7 1.6
|
1376 |
+
7 1.4
|
1377 |
+
7 1.6
|
1378 |
+
7 1.4
|
1379 |
+
7 1.6
|
1380 |
+
7 1.4
|
1381 |
+
7 1.6
|
1382 |
+
7 1.4
|
1383 |
+
7 1.6
|
1384 |
+
7 1.4
|
1385 |
+
7 1.6
|
1386 |
+
7 1.4
|
1387 |
+
7 1.6
|
1388 |
+
7 1.4
|
1389 |
+
7 1.6
|
1390 |
+
7 1.4
|
1391 |
+
7 1.6
|
1392 |
+
7 1.4
|
1393 |
+
7 1.6
|
1394 |
+
7 1.4
|
1395 |
+
7 1.6
|
1396 |
+
7 1.4
|
1397 |
+
7 1.6
|
1398 |
+
7 1.4
|
1399 |
+
7 1.6
|
1400 |
+
7 1.4
|
1401 |
+
7 1.6
|
1402 |
+
7 1.4
|
1403 |
+
7 1.6
|
1404 |
+
7 1.4
|
1405 |
+
7 1.6
|
1406 |
+
7 1.4
|
1407 |
+
7 1.6
|
1408 |
+
7 1.4
|
1409 |
+
7 1.6
|
1410 |
+
7 1.4
|
1411 |
+
7 1.6
|
1412 |
+
7 1.4
|
1413 |
+
7 1.6
|
1414 |
+
7 1.4
|
1415 |
+
7 1.6
|
1416 |
+
7 1.4
|
1417 |
+
7 1.6
|
1418 |
+
7 1.4
|
1419 |
+
7 1.6
|
1420 |
+
7 1.4
|
1421 |
+
7 1.6
|
1422 |
+
7 1.4
|
1423 |
+
7 1.6
|
1424 |
+
7 1.4
|
1425 |
+
7 1.6
|
1426 |
+
7 1.4
|
1427 |
+
7 1.6
|
1428 |
+
7 1.4
|
1429 |
+
7 1.6
|
1430 |
+
7 1.4
|
1431 |
+
7 1.6
|
1432 |
+
7 1.4
|
1433 |
+
7 1.6
|
1434 |
+
7 1.4
|
1435 |
+
7 1.6
|
1436 |
+
7 1.4
|
1437 |
+
7 1.6
|
1438 |
+
7 1.4
|
1439 |
+
7 1.6
|
1440 |
+
7 1.4
|
1441 |
+
7 1.6
|
1442 |
+
7 1.4
|
1443 |
+
7 1.6
|
1444 |
+
7 1.4
|
1445 |
+
7 1.6
|
1446 |
+
7 1.4
|
1447 |
+
7 1.6
|
1448 |
+
7 1.4
|
1449 |
+
7 1.6
|
1450 |
+
7 1.4
|
1451 |
+
7 1.6
|
1452 |
+
7 1.4
|
1453 |
+
7 1.6
|
1454 |
+
7 1.4
|
1455 |
+
7 1.6
|
1456 |
+
7 1.4
|
1457 |
+
7 1.6
|
1458 |
+
7 1.4
|
1459 |
+
7 1.6
|
1460 |
+
7 1.4
|
1461 |
+
7 1.6
|
1462 |
+
7 1.4
|
1463 |
+
7 1.6
|
1464 |
+
7 1.4
|
1465 |
+
7 1.6
|
1466 |
+
7 1.4
|
1467 |
+
7 1.6
|
1468 |
+
8 1.3
|
1469 |
+
8 1.2
|
1470 |
+
8 1.4
|
1471 |
+
8 1.2
|
1472 |
+
8 1.4
|
1473 |
+
8 1.2
|
1474 |
+
8 1.4
|
1475 |
+
8 1.2
|
1476 |
+
8 1.4
|
1477 |
+
8 1.2
|
1478 |
+
8 1.4
|
1479 |
+
8 1.2
|
1480 |
+
8 1.4
|
1481 |
+
8 1.2
|
1482 |
+
8 1.4
|
1483 |
+
8 1.2
|
1484 |
+
8 1.4
|
1485 |
+
8 1.2
|
1486 |
+
8 1.4
|
1487 |
+
8 1.2
|
1488 |
+
8 1.4
|
1489 |
+
8 1.2
|
1490 |
+
8 1.4
|
1491 |
+
8 1.2
|
1492 |
+
8 1.4
|
1493 |
+
8 1.2
|
1494 |
+
8 1.4
|
1495 |
+
8 1.2
|
1496 |
+
8 1.4
|
1497 |
+
8 1.2
|
1498 |
+
8 1.4
|
1499 |
+
8 1.2
|
1500 |
+
8 1.4
|
1501 |
+
8 1.2
|
1502 |
+
8 1.4
|
1503 |
+
8 1.2
|
1504 |
+
8 1.4
|
1505 |
+
8 1.2
|
1506 |
+
8 1.4
|
1507 |
+
8 1.2
|
1508 |
+
8 1.4
|
1509 |
+
8 1.2
|
1510 |
+
8 1.4
|
1511 |
+
8 1.2
|
1512 |
+
8 1.4
|
1513 |
+
8 1.2
|
1514 |
+
8 1.4
|
1515 |
+
8 1.2
|
1516 |
+
8 1.4
|
1517 |
+
8 1.2
|
1518 |
+
8 1.4
|
1519 |
+
8 1.2
|
1520 |
+
8 1.4
|
1521 |
+
8 1.2
|
1522 |
+
8 1.4
|
1523 |
+
8 1.2
|
1524 |
+
8 1.4
|
1525 |
+
8 1.2
|
1526 |
+
8 1.4
|
1527 |
+
8 1.2
|
1528 |
+
8 1.4
|
1529 |
+
8 1.2
|
1530 |
+
8 1.4
|
1531 |
+
8 1.2
|
1532 |
+
8 1.4
|
1533 |
+
8 1.2
|
1534 |
+
8 1.4
|
1535 |
+
8 1.2
|
1536 |
+
8 1.4
|
1537 |
+
8 1.2
|
1538 |
+
8 1.4
|
1539 |
+
8 1.2
|
1540 |
+
8 1.4
|
1541 |
+
8 1.2
|
1542 |
+
8 1.4
|
1543 |
+
8 1.2
|
1544 |
+
8 1.4
|
1545 |
+
8 1.2
|
1546 |
+
8 1.4
|
1547 |
+
8 1.2
|
1548 |
+
8 1.4
|
1549 |
+
8 1.2
|
1550 |
+
8 1.4
|
1551 |
+
8 1.2
|
1552 |
+
8 1.4
|
1553 |
+
8 1.2
|
1554 |
+
8 1.4
|
1555 |
+
8 1.2
|
1556 |
+
8 1.4
|
1557 |
+
8 1.2
|
1558 |
+
8 1.4
|
1559 |
+
8 1.2
|
1560 |
+
8 1.4
|
1561 |
+
8 1.2
|
1562 |
+
8 1.4
|
1563 |
+
8 1.2
|
1564 |
+
8 1.4
|
1565 |
+
8 1.2
|
1566 |
+
8 1.4
|
1567 |
+
8 1.2
|
1568 |
+
8 1.4
|
1569 |
+
8 1.2
|
1570 |
+
8 1.4
|
1571 |
+
8 1.2
|
1572 |
+
8 1.4
|
1573 |
+
8 1.2
|
1574 |
+
8 1.4
|
1575 |
+
8 1.2
|
1576 |
+
8 1.4
|
1577 |
+
8 1.2
|
1578 |
+
8 1.4
|
1579 |
+
8 1.2
|
1580 |
+
8 1.4
|
1581 |
+
8 1.2
|
1582 |
+
8 1.4
|
1583 |
+
8 1.2
|
1584 |
+
8 1.4
|
1585 |
+
8 1.2
|
1586 |
+
8 1.4
|
1587 |
+
8 1.2
|
1588 |
+
8 1.4
|
1589 |
+
8 1.2
|
1590 |
+
8 1.4
|
1591 |
+
8 1.2
|
1592 |
+
8 1.4
|
1593 |
+
8 1.2
|
1594 |
+
8 1.4
|
1595 |
+
8 1.2
|
1596 |
+
8 1.4
|
1597 |
+
8 1.2
|
1598 |
+
8 1.4
|
1599 |
+
8 1.2
|
1600 |
+
8 1.4
|
1601 |
+
8 1.2
|
1602 |
+
8 1.4
|
1603 |
+
8 1.2
|
1604 |
+
8 1.4
|
1605 |
+
8 1.2
|
1606 |
+
8 1.4
|
1607 |
+
8 1.2
|
1608 |
+
8 1.4
|
1609 |
+
8 1.2
|
1610 |
+
8 1.4
|
1611 |
+
8 1.2
|
1612 |
+
8 1.4
|
1613 |
+
8 1.2
|
1614 |
+
8 1.4
|
1615 |
+
8 1.2
|
1616 |
+
8 1.4
|
1617 |
+
8 1.2
|
1618 |
+
8 1.4
|
1619 |
+
8 1.2
|
1620 |
+
8 1.4
|
1621 |
+
8 1.2
|
1622 |
+
8 1.4
|
1623 |
+
8 1.2
|
1624 |
+
8 1.4
|
1625 |
+
8 1.2
|
1626 |
+
8 1.4
|
1627 |
+
8 1.2
|
1628 |
+
8 1.4
|
1629 |
+
8 1.2
|
1630 |
+
8 1.4
|
1631 |
+
8 1.2
|
1632 |
+
8 1.4
|
1633 |
+
8 1.2
|
1634 |
+
8 1.4
|
1635 |
+
8 1.2
|
1636 |
+
8 1.4
|
1637 |
+
8 1.2
|
1638 |
+
8 1.4
|
1639 |
+
8 1.2
|
1640 |
+
8 1.4
|
1641 |
+
8 1.2
|
1642 |
+
8 1.4
|
1643 |
+
8 1.2
|
1644 |
+
8 1.4
|
1645 |
+
8 1.2
|
1646 |
+
8 1.4
|
1647 |
+
8 1.2
|
1648 |
+
8 1.4
|
1649 |
+
8 1.2
|
1650 |
+
8 1.4
|
1651 |
+
8 1.2
|
1652 |
+
8 1.4
|
1653 |
+
8 1.2
|
1654 |
+
8 1.4
|
1655 |
+
8 1.2
|
1656 |
+
8 1.4
|
1657 |
+
8 1.2
|
1658 |
+
8 1.4
|
1659 |
+
8 1.2
|
1660 |
+
8 1.4
|
1661 |
+
8 1.2
|
1662 |
+
8 1.4
|
1663 |
+
8 1.2
|
1664 |
+
8 1.4
|
1665 |
+
8 1.2
|
1666 |
+
8 1.4
|
1667 |
+
8 1.2
|
1668 |
+
8 1.4
|
1669 |
+
9 1.5
|
1670 |
+
9 1.4
|
1671 |
+
9 1.6
|
1672 |
+
9 1.4
|
1673 |
+
9 1.6
|
1674 |
+
9 1.4
|
1675 |
+
9 1.6
|
1676 |
+
9 1.4
|
1677 |
+
9 1.6
|
1678 |
+
9 1.4
|
1679 |
+
9 1.6
|
1680 |
+
9 1.4
|
1681 |
+
9 1.6
|
1682 |
+
9 1.4
|
1683 |
+
9 1.6
|
1684 |
+
9 1.4
|
1685 |
+
9 1.6
|
1686 |
+
9 1.4
|
1687 |
+
9 1.6
|
1688 |
+
9 1.4
|
1689 |
+
9 1.6
|
1690 |
+
9 1.4
|
1691 |
+
9 1.6
|
1692 |
+
9 1.4
|
1693 |
+
9 1.6
|
1694 |
+
9 1.4
|
1695 |
+
9 1.6
|
1696 |
+
9 1.4
|
1697 |
+
9 1.6
|
1698 |
+
9 1.4
|
1699 |
+
9 1.6
|
1700 |
+
9 1.4
|
1701 |
+
9 1.6
|
1702 |
+
9 1.4
|
1703 |
+
9 1.6
|
1704 |
+
9 1.4
|
1705 |
+
9 1.6
|
1706 |
+
9 1.4
|
1707 |
+
9 1.6
|
1708 |
+
9 1.4
|
1709 |
+
9 1.6
|
1710 |
+
9 1.4
|
1711 |
+
9 1.6
|
1712 |
+
9 1.4
|
1713 |
+
9 1.6
|
1714 |
+
9 1.4
|
1715 |
+
9 1.6
|
1716 |
+
9 1.4
|
1717 |
+
9 1.6
|
1718 |
+
9 1.4
|
1719 |
+
9 1.6
|
1720 |
+
9 1.4
|
1721 |
+
9 1.6
|
1722 |
+
9 1.4
|
1723 |
+
9 1.6
|
1724 |
+
9 1.4
|
1725 |
+
9 1.6
|
1726 |
+
9 1.4
|
1727 |
+
9 1.6
|
1728 |
+
9 1.4
|
1729 |
+
9 1.6
|
1730 |
+
9 1.4
|
1731 |
+
9 1.6
|
1732 |
+
9 1.4
|
1733 |
+
9 1.6
|
1734 |
+
9 1.4
|
1735 |
+
9 1.6
|
1736 |
+
9 1.4
|
1737 |
+
9 1.6
|
1738 |
+
9 1.4
|
1739 |
+
9 1.6
|
1740 |
+
9 1.4
|
1741 |
+
9 1.6
|
1742 |
+
9 1.4
|
1743 |
+
9 1.6
|
1744 |
+
9 1.4
|
1745 |
+
9 1.6
|
1746 |
+
9 1.4
|
1747 |
+
9 1.6
|
1748 |
+
9 1.4
|
1749 |
+
9 1.6
|
1750 |
+
9 1.4
|
1751 |
+
9 1.6
|
1752 |
+
9 1.4
|
1753 |
+
9 1.6
|
1754 |
+
9 1.4
|
1755 |
+
9 1.6
|
1756 |
+
9 1.4
|
1757 |
+
9 1.6
|
1758 |
+
9 1.4
|
1759 |
+
9 1.6
|
1760 |
+
9 1.4
|
1761 |
+
9 1.6
|
1762 |
+
9 1.4
|
1763 |
+
9 1.6
|
1764 |
+
9 1.4
|
1765 |
+
9 1.6
|
1766 |
+
9 1.4
|
1767 |
+
9 1.6
|
1768 |
+
9 1.4
|
1769 |
+
9 1.6
|
1770 |
+
9 1.4
|
1771 |
+
9 1.6
|
1772 |
+
9 1.4
|
1773 |
+
9 1.6
|
1774 |
+
9 1.4
|
1775 |
+
9 1.6
|
1776 |
+
9 1.4
|
1777 |
+
9 1.6
|
1778 |
+
9 1.4
|
1779 |
+
9 1.6
|
1780 |
+
9 1.4
|
1781 |
+
9 1.6
|
1782 |
+
9 1.4
|
1783 |
+
9 1.6
|
1784 |
+
9 1.4
|
1785 |
+
9 1.6
|
1786 |
+
9 1.4
|
1787 |
+
9 1.6
|
1788 |
+
9 1.4
|
1789 |
+
9 1.6
|
1790 |
+
9 1.4
|
1791 |
+
9 1.6
|
1792 |
+
9 1.4
|
1793 |
+
9 1.6
|
1794 |
+
9 1.4
|
1795 |
+
9 1.6
|
1796 |
+
9 1.4
|
1797 |
+
9 1.6
|
1798 |
+
9 1.4
|
1799 |
+
9 1.6
|
1800 |
+
9 1.4
|
1801 |
+
9 1.6
|
1802 |
+
9 1.4
|
1803 |
+
9 1.6
|
1804 |
+
9 1.4
|
1805 |
+
9 1.6
|
1806 |
+
9 1.4
|
1807 |
+
9 1.6
|
1808 |
+
9 1.4
|
1809 |
+
9 1.6
|
1810 |
+
9 1.4
|
1811 |
+
9 1.6
|
1812 |
+
9 1.4
|
1813 |
+
9 1.6
|
1814 |
+
9 1.4
|
1815 |
+
9 1.6
|
1816 |
+
9 1.4
|
1817 |
+
9 1.6
|
1818 |
+
9 1.4
|
1819 |
+
9 1.6
|
1820 |
+
9 1.4
|
1821 |
+
9 1.6
|
1822 |
+
9 1.4
|
1823 |
+
9 1.6
|
1824 |
+
9 1.4
|
1825 |
+
9 1.6
|
1826 |
+
9 1.4
|
1827 |
+
9 1.6
|
1828 |
+
9 1.4
|
1829 |
+
9 1.6
|
1830 |
+
9 1.4
|
1831 |
+
9 1.6
|
1832 |
+
9 1.4
|
1833 |
+
9 1.6
|
1834 |
+
9 1.4
|
1835 |
+
9 1.6
|
1836 |
+
9 1.4
|
1837 |
+
9 1.6
|
1838 |
+
9 1.4
|
1839 |
+
9 1.6
|
1840 |
+
9 1.4
|
1841 |
+
9 1.6
|
1842 |
+
9 1.4
|
1843 |
+
9 1.6
|
1844 |
+
9 1.4
|
1845 |
+
9 1.6
|
1846 |
+
9 1.4
|
1847 |
+
9 1.6
|
1848 |
+
9 1.4
|
1849 |
+
9 1.6
|
1850 |
+
9 1.4
|
1851 |
+
9 1.6
|
1852 |
+
9 1.4
|
1853 |
+
9 1.6
|
1854 |
+
9 1.4
|
1855 |
+
9 1.6
|
1856 |
+
9 1.4
|
1857 |
+
9 1.6
|
1858 |
+
9 1.4
|
1859 |
+
9 1.6
|
1860 |
+
9 1.4
|
1861 |
+
9 1.6
|
1862 |
+
9 1.4
|
1863 |
+
9 1.6
|
1864 |
+
9 1.4
|
1865 |
+
9 1.6
|
1866 |
+
9 1.4
|
1867 |
+
9 1.6
|
1868 |
+
9 1.4
|
1869 |
+
9 1.6
|
venv/lib/python3.10/site-packages/scipy/stats/tests/data/nist_anova/SmLs03.dat
ADDED
The diff for this file is too large to render.
See raw diff
|
|
venv/lib/python3.10/site-packages/scipy/stats/tests/data/nist_anova/SmLs04.dat
ADDED
@@ -0,0 +1,249 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
NIST/ITL StRD
|
2 |
+
Dataset Name: SmLs04 (SmLs04.dat)
|
3 |
+
|
4 |
+
|
5 |
+
File Format: ASCII
|
6 |
+
Certified Values (lines 41 to 47)
|
7 |
+
Data (lines 61 to 249)
|
8 |
+
|
9 |
+
|
10 |
+
Procedure: Analysis of Variance
|
11 |
+
|
12 |
+
|
13 |
+
Reference: Simon, Stephen D. and Lesage, James P. (1989).
|
14 |
+
"Assessing the Accuracy of ANOVA Calculations in
|
15 |
+
Statistical Software".
|
16 |
+
Computational Statistics & Data Analysis, 8, pp. 325-332.
|
17 |
+
|
18 |
+
|
19 |
+
Data: 1 Factor
|
20 |
+
9 Treatments
|
21 |
+
21 Replicates/Cell
|
22 |
+
189 Observations
|
23 |
+
7 Constant Leading Digits
|
24 |
+
Average Level of Difficulty
|
25 |
+
Generated Data
|
26 |
+
|
27 |
+
|
28 |
+
Model: 10 Parameters (mu,tau_1, ... , tau_9)
|
29 |
+
y_{ij} = mu + tau_i + epsilon_{ij}
|
30 |
+
|
31 |
+
|
32 |
+
|
33 |
+
|
34 |
+
|
35 |
+
|
36 |
+
Certified Values:
|
37 |
+
|
38 |
+
Source of Sums of Mean
|
39 |
+
Variation df Squares Squares F Statistic
|
40 |
+
|
41 |
+
Between Treatment 8 1.68000000000000E+00 2.10000000000000E-01 2.10000000000000E+01
|
42 |
+
Within Treatment 180 1.80000000000000E+00 1.00000000000000E-02
|
43 |
+
|
44 |
+
Certified R-Squared 4.82758620689655E-01
|
45 |
+
|
46 |
+
Certified Residual
|
47 |
+
Standard Deviation 1.00000000000000E-01
|
48 |
+
|
49 |
+
|
50 |
+
|
51 |
+
|
52 |
+
|
53 |
+
|
54 |
+
|
55 |
+
|
56 |
+
|
57 |
+
|
58 |
+
|
59 |
+
|
60 |
+
Data: Treatment Response
|
61 |
+
1 1000000.4
|
62 |
+
1 1000000.3
|
63 |
+
1 1000000.5
|
64 |
+
1 1000000.3
|
65 |
+
1 1000000.5
|
66 |
+
1 1000000.3
|
67 |
+
1 1000000.5
|
68 |
+
1 1000000.3
|
69 |
+
1 1000000.5
|
70 |
+
1 1000000.3
|
71 |
+
1 1000000.5
|
72 |
+
1 1000000.3
|
73 |
+
1 1000000.5
|
74 |
+
1 1000000.3
|
75 |
+
1 1000000.5
|
76 |
+
1 1000000.3
|
77 |
+
1 1000000.5
|
78 |
+
1 1000000.3
|
79 |
+
1 1000000.5
|
80 |
+
1 1000000.3
|
81 |
+
1 1000000.5
|
82 |
+
2 1000000.3
|
83 |
+
2 1000000.2
|
84 |
+
2 1000000.4
|
85 |
+
2 1000000.2
|
86 |
+
2 1000000.4
|
87 |
+
2 1000000.2
|
88 |
+
2 1000000.4
|
89 |
+
2 1000000.2
|
90 |
+
2 1000000.4
|
91 |
+
2 1000000.2
|
92 |
+
2 1000000.4
|
93 |
+
2 1000000.2
|
94 |
+
2 1000000.4
|
95 |
+
2 1000000.2
|
96 |
+
2 1000000.4
|
97 |
+
2 1000000.2
|
98 |
+
2 1000000.4
|
99 |
+
2 1000000.2
|
100 |
+
2 1000000.4
|
101 |
+
2 1000000.2
|
102 |
+
2 1000000.4
|
103 |
+
3 1000000.5
|
104 |
+
3 1000000.4
|
105 |
+
3 1000000.6
|
106 |
+
3 1000000.4
|
107 |
+
3 1000000.6
|
108 |
+
3 1000000.4
|
109 |
+
3 1000000.6
|
110 |
+
3 1000000.4
|
111 |
+
3 1000000.6
|
112 |
+
3 1000000.4
|
113 |
+
3 1000000.6
|
114 |
+
3 1000000.4
|
115 |
+
3 1000000.6
|
116 |
+
3 1000000.4
|
117 |
+
3 1000000.6
|
118 |
+
3 1000000.4
|
119 |
+
3 1000000.6
|
120 |
+
3 1000000.4
|
121 |
+
3 1000000.6
|
122 |
+
3 1000000.4
|
123 |
+
3 1000000.6
|
124 |
+
4 1000000.3
|
125 |
+
4 1000000.2
|
126 |
+
4 1000000.4
|
127 |
+
4 1000000.2
|
128 |
+
4 1000000.4
|
129 |
+
4 1000000.2
|
130 |
+
4 1000000.4
|
131 |
+
4 1000000.2
|
132 |
+
4 1000000.4
|
133 |
+
4 1000000.2
|
134 |
+
4 1000000.4
|
135 |
+
4 1000000.2
|
136 |
+
4 1000000.4
|
137 |
+
4 1000000.2
|
138 |
+
4 1000000.4
|
139 |
+
4 1000000.2
|
140 |
+
4 1000000.4
|
141 |
+
4 1000000.2
|
142 |
+
4 1000000.4
|
143 |
+
4 1000000.2
|
144 |
+
4 1000000.4
|
145 |
+
5 1000000.5
|
146 |
+
5 1000000.4
|
147 |
+
5 1000000.6
|
148 |
+
5 1000000.4
|
149 |
+
5 1000000.6
|
150 |
+
5 1000000.4
|
151 |
+
5 1000000.6
|
152 |
+
5 1000000.4
|
153 |
+
5 1000000.6
|
154 |
+
5 1000000.4
|
155 |
+
5 1000000.6
|
156 |
+
5 1000000.4
|
157 |
+
5 1000000.6
|
158 |
+
5 1000000.4
|
159 |
+
5 1000000.6
|
160 |
+
5 1000000.4
|
161 |
+
5 1000000.6
|
162 |
+
5 1000000.4
|
163 |
+
5 1000000.6
|
164 |
+
5 1000000.4
|
165 |
+
5 1000000.6
|
166 |
+
6 1000000.3
|
167 |
+
6 1000000.2
|
168 |
+
6 1000000.4
|
169 |
+
6 1000000.2
|
170 |
+
6 1000000.4
|
171 |
+
6 1000000.2
|
172 |
+
6 1000000.4
|
173 |
+
6 1000000.2
|
174 |
+
6 1000000.4
|
175 |
+
6 1000000.2
|
176 |
+
6 1000000.4
|
177 |
+
6 1000000.2
|
178 |
+
6 1000000.4
|
179 |
+
6 1000000.2
|
180 |
+
6 1000000.4
|
181 |
+
6 1000000.2
|
182 |
+
6 1000000.4
|
183 |
+
6 1000000.2
|
184 |
+
6 1000000.4
|
185 |
+
6 1000000.2
|
186 |
+
6 1000000.4
|
187 |
+
7 1000000.5
|
188 |
+
7 1000000.4
|
189 |
+
7 1000000.6
|
190 |
+
7 1000000.4
|
191 |
+
7 1000000.6
|
192 |
+
7 1000000.4
|
193 |
+
7 1000000.6
|
194 |
+
7 1000000.4
|
195 |
+
7 1000000.6
|
196 |
+
7 1000000.4
|
197 |
+
7 1000000.6
|
198 |
+
7 1000000.4
|
199 |
+
7 1000000.6
|
200 |
+
7 1000000.4
|
201 |
+
7 1000000.6
|
202 |
+
7 1000000.4
|
203 |
+
7 1000000.6
|
204 |
+
7 1000000.4
|
205 |
+
7 1000000.6
|
206 |
+
7 1000000.4
|
207 |
+
7 1000000.6
|
208 |
+
8 1000000.3
|
209 |
+
8 1000000.2
|
210 |
+
8 1000000.4
|
211 |
+
8 1000000.2
|
212 |
+
8 1000000.4
|
213 |
+
8 1000000.2
|
214 |
+
8 1000000.4
|
215 |
+
8 1000000.2
|
216 |
+
8 1000000.4
|
217 |
+
8 1000000.2
|
218 |
+
8 1000000.4
|
219 |
+
8 1000000.2
|
220 |
+
8 1000000.4
|
221 |
+
8 1000000.2
|
222 |
+
8 1000000.4
|
223 |
+
8 1000000.2
|
224 |
+
8 1000000.4
|
225 |
+
8 1000000.2
|
226 |
+
8 1000000.4
|
227 |
+
8 1000000.2
|
228 |
+
8 1000000.4
|
229 |
+
9 1000000.5
|
230 |
+
9 1000000.4
|
231 |
+
9 1000000.6
|
232 |
+
9 1000000.4
|
233 |
+
9 1000000.6
|
234 |
+
9 1000000.4
|
235 |
+
9 1000000.6
|
236 |
+
9 1000000.4
|
237 |
+
9 1000000.6
|
238 |
+
9 1000000.4
|
239 |
+
9 1000000.6
|
240 |
+
9 1000000.4
|
241 |
+
9 1000000.6
|
242 |
+
9 1000000.4
|
243 |
+
9 1000000.6
|
244 |
+
9 1000000.4
|
245 |
+
9 1000000.6
|
246 |
+
9 1000000.4
|
247 |
+
9 1000000.6
|
248 |
+
9 1000000.4
|
249 |
+
9 1000000.6
|
venv/lib/python3.10/site-packages/scipy/stats/tests/data/nist_anova/SmLs05.dat
ADDED
@@ -0,0 +1,1869 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
NIST/ITL StRD
|
2 |
+
Dataset Name: SmLs05 (SmLs05.dat)
|
3 |
+
|
4 |
+
|
5 |
+
File Format: ASCII
|
6 |
+
Certified Values (lines 41 to 47)
|
7 |
+
Data (lines 61 to 1869)
|
8 |
+
|
9 |
+
|
10 |
+
Procedure: Analysis of Variance
|
11 |
+
|
12 |
+
|
13 |
+
Reference: Simon, Stephen D. and Lesage, James P. (1989).
|
14 |
+
"Assessing the Accuracy of ANOVA Calculations in
|
15 |
+
Statistical Software".
|
16 |
+
Computational Statistics & Data Analysis, 8, pp. 325-332.
|
17 |
+
|
18 |
+
|
19 |
+
Data: 1 Factor
|
20 |
+
9 Treatments
|
21 |
+
201 Replicates/Cell
|
22 |
+
1809 Observations
|
23 |
+
7 Constant Leading Digits
|
24 |
+
Average Level of Difficulty
|
25 |
+
Generated Data
|
26 |
+
|
27 |
+
|
28 |
+
Model: 10 Parameters (mu,tau_1, ... , tau_9)
|
29 |
+
y_{ij} = mu + tau_i + epsilon_{ij}
|
30 |
+
|
31 |
+
|
32 |
+
|
33 |
+
|
34 |
+
|
35 |
+
|
36 |
+
Certified Values:
|
37 |
+
|
38 |
+
Source of Sums of Mean
|
39 |
+
Variation df Squares Squares F Statistic
|
40 |
+
|
41 |
+
Between Treatment 8 1.60800000000000E+01 2.01000000000000E+00 2.01000000000000E+02
|
42 |
+
Within Treatment 1800 1.80000000000000E+01 1.00000000000000E-02
|
43 |
+
|
44 |
+
Certified R-Squared 4.71830985915493E-01
|
45 |
+
|
46 |
+
Certified Residual
|
47 |
+
Standard Deviation 1.00000000000000E-01
|
48 |
+
|
49 |
+
|
50 |
+
|
51 |
+
|
52 |
+
|
53 |
+
|
54 |
+
|
55 |
+
|
56 |
+
|
57 |
+
|
58 |
+
|
59 |
+
|
60 |
+
Data: Treatment Response
|
61 |
+
1 1000000.4
|
62 |
+
1 1000000.3
|
63 |
+
1 1000000.5
|
64 |
+
1 1000000.3
|
65 |
+
1 1000000.5
|
66 |
+
1 1000000.3
|
67 |
+
1 1000000.5
|
68 |
+
1 1000000.3
|
69 |
+
1 1000000.5
|
70 |
+
1 1000000.3
|
71 |
+
1 1000000.5
|
72 |
+
1 1000000.3
|
73 |
+
1 1000000.5
|
74 |
+
1 1000000.3
|
75 |
+
1 1000000.5
|
76 |
+
1 1000000.3
|
77 |
+
1 1000000.5
|
78 |
+
1 1000000.3
|
79 |
+
1 1000000.5
|
80 |
+
1 1000000.3
|
81 |
+
1 1000000.5
|
82 |
+
1 1000000.3
|
83 |
+
1 1000000.5
|
84 |
+
1 1000000.3
|
85 |
+
1 1000000.5
|
86 |
+
1 1000000.3
|
87 |
+
1 1000000.5
|
88 |
+
1 1000000.3
|
89 |
+
1 1000000.5
|
90 |
+
1 1000000.3
|
91 |
+
1 1000000.5
|
92 |
+
1 1000000.3
|
93 |
+
1 1000000.5
|
94 |
+
1 1000000.3
|
95 |
+
1 1000000.5
|
96 |
+
1 1000000.3
|
97 |
+
1 1000000.5
|
98 |
+
1 1000000.3
|
99 |
+
1 1000000.5
|
100 |
+
1 1000000.3
|
101 |
+
1 1000000.5
|
102 |
+
1 1000000.3
|
103 |
+
1 1000000.5
|
104 |
+
1 1000000.3
|
105 |
+
1 1000000.5
|
106 |
+
1 1000000.3
|
107 |
+
1 1000000.5
|
108 |
+
1 1000000.3
|
109 |
+
1 1000000.5
|
110 |
+
1 1000000.3
|
111 |
+
1 1000000.5
|
112 |
+
1 1000000.3
|
113 |
+
1 1000000.5
|
114 |
+
1 1000000.3
|
115 |
+
1 1000000.5
|
116 |
+
1 1000000.3
|
117 |
+
1 1000000.5
|
118 |
+
1 1000000.3
|
119 |
+
1 1000000.5
|
120 |
+
1 1000000.3
|
121 |
+
1 1000000.5
|
122 |
+
1 1000000.3
|
123 |
+
1 1000000.5
|
124 |
+
1 1000000.3
|
125 |
+
1 1000000.5
|
126 |
+
1 1000000.3
|
127 |
+
1 1000000.5
|
128 |
+
1 1000000.3
|
129 |
+
1 1000000.5
|
130 |
+
1 1000000.3
|
131 |
+
1 1000000.5
|
132 |
+
1 1000000.3
|
133 |
+
1 1000000.5
|
134 |
+
1 1000000.3
|
135 |
+
1 1000000.5
|
136 |
+
1 1000000.3
|
137 |
+
1 1000000.5
|
138 |
+
1 1000000.3
|
139 |
+
1 1000000.5
|
140 |
+
1 1000000.3
|
141 |
+
1 1000000.5
|
142 |
+
1 1000000.3
|
143 |
+
1 1000000.5
|
144 |
+
1 1000000.3
|
145 |
+
1 1000000.5
|
146 |
+
1 1000000.3
|
147 |
+
1 1000000.5
|
148 |
+
1 1000000.3
|
149 |
+
1 1000000.5
|
150 |
+
1 1000000.3
|
151 |
+
1 1000000.5
|
152 |
+
1 1000000.3
|
153 |
+
1 1000000.5
|
154 |
+
1 1000000.3
|
155 |
+
1 1000000.5
|
156 |
+
1 1000000.3
|
157 |
+
1 1000000.5
|
158 |
+
1 1000000.3
|
159 |
+
1 1000000.5
|
160 |
+
1 1000000.3
|
161 |
+
1 1000000.5
|
162 |
+
1 1000000.3
|
163 |
+
1 1000000.5
|
164 |
+
1 1000000.3
|
165 |
+
1 1000000.5
|
166 |
+
1 1000000.3
|
167 |
+
1 1000000.5
|
168 |
+
1 1000000.3
|
169 |
+
1 1000000.5
|
170 |
+
1 1000000.3
|
171 |
+
1 1000000.5
|
172 |
+
1 1000000.3
|
173 |
+
1 1000000.5
|
174 |
+
1 1000000.3
|
175 |
+
1 1000000.5
|
176 |
+
1 1000000.3
|
177 |
+
1 1000000.5
|
178 |
+
1 1000000.3
|
179 |
+
1 1000000.5
|
180 |
+
1 1000000.3
|
181 |
+
1 1000000.5
|
182 |
+
1 1000000.3
|
183 |
+
1 1000000.5
|
184 |
+
1 1000000.3
|
185 |
+
1 1000000.5
|
186 |
+
1 1000000.3
|
187 |
+
1 1000000.5
|
188 |
+
1 1000000.3
|
189 |
+
1 1000000.5
|
190 |
+
1 1000000.3
|
191 |
+
1 1000000.5
|
192 |
+
1 1000000.3
|
193 |
+
1 1000000.5
|
194 |
+
1 1000000.3
|
195 |
+
1 1000000.5
|
196 |
+
1 1000000.3
|
197 |
+
1 1000000.5
|
198 |
+
1 1000000.3
|
199 |
+
1 1000000.5
|
200 |
+
1 1000000.3
|
201 |
+
1 1000000.5
|
202 |
+
1 1000000.3
|
203 |
+
1 1000000.5
|
204 |
+
1 1000000.3
|
205 |
+
1 1000000.5
|
206 |
+
1 1000000.3
|
207 |
+
1 1000000.5
|
208 |
+
1 1000000.3
|
209 |
+
1 1000000.5
|
210 |
+
1 1000000.3
|
211 |
+
1 1000000.5
|
212 |
+
1 1000000.3
|
213 |
+
1 1000000.5
|
214 |
+
1 1000000.3
|
215 |
+
1 1000000.5
|
216 |
+
1 1000000.3
|
217 |
+
1 1000000.5
|
218 |
+
1 1000000.3
|
219 |
+
1 1000000.5
|
220 |
+
1 1000000.3
|
221 |
+
1 1000000.5
|
222 |
+
1 1000000.3
|
223 |
+
1 1000000.5
|
224 |
+
1 1000000.3
|
225 |
+
1 1000000.5
|
226 |
+
1 1000000.3
|
227 |
+
1 1000000.5
|
228 |
+
1 1000000.3
|
229 |
+
1 1000000.5
|
230 |
+
1 1000000.3
|
231 |
+
1 1000000.5
|
232 |
+
1 1000000.3
|
233 |
+
1 1000000.5
|
234 |
+
1 1000000.3
|
235 |
+
1 1000000.5
|
236 |
+
1 1000000.3
|
237 |
+
1 1000000.5
|
238 |
+
1 1000000.3
|
239 |
+
1 1000000.5
|
240 |
+
1 1000000.3
|
241 |
+
1 1000000.5
|
242 |
+
1 1000000.3
|
243 |
+
1 1000000.5
|
244 |
+
1 1000000.3
|
245 |
+
1 1000000.5
|
246 |
+
1 1000000.3
|
247 |
+
1 1000000.5
|
248 |
+
1 1000000.3
|
249 |
+
1 1000000.5
|
250 |
+
1 1000000.3
|
251 |
+
1 1000000.5
|
252 |
+
1 1000000.3
|
253 |
+
1 1000000.5
|
254 |
+
1 1000000.3
|
255 |
+
1 1000000.5
|
256 |
+
1 1000000.3
|
257 |
+
1 1000000.5
|
258 |
+
1 1000000.3
|
259 |
+
1 1000000.5
|
260 |
+
1 1000000.3
|
261 |
+
1 1000000.5
|
262 |
+
2 1000000.3
|
263 |
+
2 1000000.2
|
264 |
+
2 1000000.4
|
265 |
+
2 1000000.2
|
266 |
+
2 1000000.4
|
267 |
+
2 1000000.2
|
268 |
+
2 1000000.4
|
269 |
+
2 1000000.2
|
270 |
+
2 1000000.4
|
271 |
+
2 1000000.2
|
272 |
+
2 1000000.4
|
273 |
+
2 1000000.2
|
274 |
+
2 1000000.4
|
275 |
+
2 1000000.2
|
276 |
+
2 1000000.4
|
277 |
+
2 1000000.2
|
278 |
+
2 1000000.4
|
279 |
+
2 1000000.2
|
280 |
+
2 1000000.4
|
281 |
+
2 1000000.2
|
282 |
+
2 1000000.4
|
283 |
+
2 1000000.2
|
284 |
+
2 1000000.4
|
285 |
+
2 1000000.2
|
286 |
+
2 1000000.4
|
287 |
+
2 1000000.2
|
288 |
+
2 1000000.4
|
289 |
+
2 1000000.2
|
290 |
+
2 1000000.4
|
291 |
+
2 1000000.2
|
292 |
+
2 1000000.4
|
293 |
+
2 1000000.2
|
294 |
+
2 1000000.4
|
295 |
+
2 1000000.2
|
296 |
+
2 1000000.4
|
297 |
+
2 1000000.2
|
298 |
+
2 1000000.4
|
299 |
+
2 1000000.2
|
300 |
+
2 1000000.4
|
301 |
+
2 1000000.2
|
302 |
+
2 1000000.4
|
303 |
+
2 1000000.2
|
304 |
+
2 1000000.4
|
305 |
+
2 1000000.2
|
306 |
+
2 1000000.4
|
307 |
+
2 1000000.2
|
308 |
+
2 1000000.4
|
309 |
+
2 1000000.2
|
310 |
+
2 1000000.4
|
311 |
+
2 1000000.2
|
312 |
+
2 1000000.4
|
313 |
+
2 1000000.2
|
314 |
+
2 1000000.4
|
315 |
+
2 1000000.2
|
316 |
+
2 1000000.4
|
317 |
+
2 1000000.2
|
318 |
+
2 1000000.4
|
319 |
+
2 1000000.2
|
320 |
+
2 1000000.4
|
321 |
+
2 1000000.2
|
322 |
+
2 1000000.4
|
323 |
+
2 1000000.2
|
324 |
+
2 1000000.4
|
325 |
+
2 1000000.2
|
326 |
+
2 1000000.4
|
327 |
+
2 1000000.2
|
328 |
+
2 1000000.4
|
329 |
+
2 1000000.2
|
330 |
+
2 1000000.4
|
331 |
+
2 1000000.2
|
332 |
+
2 1000000.4
|
333 |
+
2 1000000.2
|
334 |
+
2 1000000.4
|
335 |
+
2 1000000.2
|
336 |
+
2 1000000.4
|
337 |
+
2 1000000.2
|
338 |
+
2 1000000.4
|
339 |
+
2 1000000.2
|
340 |
+
2 1000000.4
|
341 |
+
2 1000000.2
|
342 |
+
2 1000000.4
|
343 |
+
2 1000000.2
|
344 |
+
2 1000000.4
|
345 |
+
2 1000000.2
|
346 |
+
2 1000000.4
|
347 |
+
2 1000000.2
|
348 |
+
2 1000000.4
|
349 |
+
2 1000000.2
|
350 |
+
2 1000000.4
|
351 |
+
2 1000000.2
|
352 |
+
2 1000000.4
|
353 |
+
2 1000000.2
|
354 |
+
2 1000000.4
|
355 |
+
2 1000000.2
|
356 |
+
2 1000000.4
|
357 |
+
2 1000000.2
|
358 |
+
2 1000000.4
|
359 |
+
2 1000000.2
|
360 |
+
2 1000000.4
|
361 |
+
2 1000000.2
|
362 |
+
2 1000000.4
|
363 |
+
2 1000000.2
|
364 |
+
2 1000000.4
|
365 |
+
2 1000000.2
|
366 |
+
2 1000000.4
|
367 |
+
2 1000000.2
|
368 |
+
2 1000000.4
|
369 |
+
2 1000000.2
|
370 |
+
2 1000000.4
|
371 |
+
2 1000000.2
|
372 |
+
2 1000000.4
|
373 |
+
2 1000000.2
|
374 |
+
2 1000000.4
|
375 |
+
2 1000000.2
|
376 |
+
2 1000000.4
|
377 |
+
2 1000000.2
|
378 |
+
2 1000000.4
|
379 |
+
2 1000000.2
|
380 |
+
2 1000000.4
|
381 |
+
2 1000000.2
|
382 |
+
2 1000000.4
|
383 |
+
2 1000000.2
|
384 |
+
2 1000000.4
|
385 |
+
2 1000000.2
|
386 |
+
2 1000000.4
|
387 |
+
2 1000000.2
|
388 |
+
2 1000000.4
|
389 |
+
2 1000000.2
|
390 |
+
2 1000000.4
|
391 |
+
2 1000000.2
|
392 |
+
2 1000000.4
|
393 |
+
2 1000000.2
|
394 |
+
2 1000000.4
|
395 |
+
2 1000000.2
|
396 |
+
2 1000000.4
|
397 |
+
2 1000000.2
|
398 |
+
2 1000000.4
|
399 |
+
2 1000000.2
|
400 |
+
2 1000000.4
|
401 |
+
2 1000000.2
|
402 |
+
2 1000000.4
|
403 |
+
2 1000000.2
|
404 |
+
2 1000000.4
|
405 |
+
2 1000000.2
|
406 |
+
2 1000000.4
|
407 |
+
2 1000000.2
|
408 |
+
2 1000000.4
|
409 |
+
2 1000000.2
|
410 |
+
2 1000000.4
|
411 |
+
2 1000000.2
|
412 |
+
2 1000000.4
|
413 |
+
2 1000000.2
|
414 |
+
2 1000000.4
|
415 |
+
2 1000000.2
|
416 |
+
2 1000000.4
|
417 |
+
2 1000000.2
|
418 |
+
2 1000000.4
|
419 |
+
2 1000000.2
|
420 |
+
2 1000000.4
|
421 |
+
2 1000000.2
|
422 |
+
2 1000000.4
|
423 |
+
2 1000000.2
|
424 |
+
2 1000000.4
|
425 |
+
2 1000000.2
|
426 |
+
2 1000000.4
|
427 |
+
2 1000000.2
|
428 |
+
2 1000000.4
|
429 |
+
2 1000000.2
|
430 |
+
2 1000000.4
|
431 |
+
2 1000000.2
|
432 |
+
2 1000000.4
|
433 |
+
2 1000000.2
|
434 |
+
2 1000000.4
|
435 |
+
2 1000000.2
|
436 |
+
2 1000000.4
|
437 |
+
2 1000000.2
|
438 |
+
2 1000000.4
|
439 |
+
2 1000000.2
|
440 |
+
2 1000000.4
|
441 |
+
2 1000000.2
|
442 |
+
2 1000000.4
|
443 |
+
2 1000000.2
|
444 |
+
2 1000000.4
|
445 |
+
2 1000000.2
|
446 |
+
2 1000000.4
|
447 |
+
2 1000000.2
|
448 |
+
2 1000000.4
|
449 |
+
2 1000000.2
|
450 |
+
2 1000000.4
|
451 |
+
2 1000000.2
|
452 |
+
2 1000000.4
|
453 |
+
2 1000000.2
|
454 |
+
2 1000000.4
|
455 |
+
2 1000000.2
|
456 |
+
2 1000000.4
|
457 |
+
2 1000000.2
|
458 |
+
2 1000000.4
|
459 |
+
2 1000000.2
|
460 |
+
2 1000000.4
|
461 |
+
2 1000000.2
|
462 |
+
2 1000000.4
|
463 |
+
3 1000000.5
|
464 |
+
3 1000000.4
|
465 |
+
3 1000000.6
|
466 |
+
3 1000000.4
|
467 |
+
3 1000000.6
|
468 |
+
3 1000000.4
|
469 |
+
3 1000000.6
|
470 |
+
3 1000000.4
|
471 |
+
3 1000000.6
|
472 |
+
3 1000000.4
|
473 |
+
3 1000000.6
|
474 |
+
3 1000000.4
|
475 |
+
3 1000000.6
|
476 |
+
3 1000000.4
|
477 |
+
3 1000000.6
|
478 |
+
3 1000000.4
|
479 |
+
3 1000000.6
|
480 |
+
3 1000000.4
|
481 |
+
3 1000000.6
|
482 |
+
3 1000000.4
|
483 |
+
3 1000000.6
|
484 |
+
3 1000000.4
|
485 |
+
3 1000000.6
|
486 |
+
3 1000000.4
|
487 |
+
3 1000000.6
|
488 |
+
3 1000000.4
|
489 |
+
3 1000000.6
|
490 |
+
3 1000000.4
|
491 |
+
3 1000000.6
|
492 |
+
3 1000000.4
|
493 |
+
3 1000000.6
|
494 |
+
3 1000000.4
|
495 |
+
3 1000000.6
|
496 |
+
3 1000000.4
|
497 |
+
3 1000000.6
|
498 |
+
3 1000000.4
|
499 |
+
3 1000000.6
|
500 |
+
3 1000000.4
|
501 |
+
3 1000000.6
|
502 |
+
3 1000000.4
|
503 |
+
3 1000000.6
|
504 |
+
3 1000000.4
|
505 |
+
3 1000000.6
|
506 |
+
3 1000000.4
|
507 |
+
3 1000000.6
|
508 |
+
3 1000000.4
|
509 |
+
3 1000000.6
|
510 |
+
3 1000000.4
|
511 |
+
3 1000000.6
|
512 |
+
3 1000000.4
|
513 |
+
3 1000000.6
|
514 |
+
3 1000000.4
|
515 |
+
3 1000000.6
|
516 |
+
3 1000000.4
|
517 |
+
3 1000000.6
|
518 |
+
3 1000000.4
|
519 |
+
3 1000000.6
|
520 |
+
3 1000000.4
|
521 |
+
3 1000000.6
|
522 |
+
3 1000000.4
|
523 |
+
3 1000000.6
|
524 |
+
3 1000000.4
|
525 |
+
3 1000000.6
|
526 |
+
3 1000000.4
|
527 |
+
3 1000000.6
|
528 |
+
3 1000000.4
|
529 |
+
3 1000000.6
|
530 |
+
3 1000000.4
|
531 |
+
3 1000000.6
|
532 |
+
3 1000000.4
|
533 |
+
3 1000000.6
|
534 |
+
3 1000000.4
|
535 |
+
3 1000000.6
|
536 |
+
3 1000000.4
|
537 |
+
3 1000000.6
|
538 |
+
3 1000000.4
|
539 |
+
3 1000000.6
|
540 |
+
3 1000000.4
|
541 |
+
3 1000000.6
|
542 |
+
3 1000000.4
|
543 |
+
3 1000000.6
|
544 |
+
3 1000000.4
|
545 |
+
3 1000000.6
|
546 |
+
3 1000000.4
|
547 |
+
3 1000000.6
|
548 |
+
3 1000000.4
|
549 |
+
3 1000000.6
|
550 |
+
3 1000000.4
|
551 |
+
3 1000000.6
|
552 |
+
3 1000000.4
|
553 |
+
3 1000000.6
|
554 |
+
3 1000000.4
|
555 |
+
3 1000000.6
|
556 |
+
3 1000000.4
|
557 |
+
3 1000000.6
|
558 |
+
3 1000000.4
|
559 |
+
3 1000000.6
|
560 |
+
3 1000000.4
|
561 |
+
3 1000000.6
|
562 |
+
3 1000000.4
|
563 |
+
3 1000000.6
|
564 |
+
3 1000000.4
|
565 |
+
3 1000000.6
|
566 |
+
3 1000000.4
|
567 |
+
3 1000000.6
|
568 |
+
3 1000000.4
|
569 |
+
3 1000000.6
|
570 |
+
3 1000000.4
|
571 |
+
3 1000000.6
|
572 |
+
3 1000000.4
|
573 |
+
3 1000000.6
|
574 |
+
3 1000000.4
|
575 |
+
3 1000000.6
|
576 |
+
3 1000000.4
|
577 |
+
3 1000000.6
|
578 |
+
3 1000000.4
|
579 |
+
3 1000000.6
|
580 |
+
3 1000000.4
|
581 |
+
3 1000000.6
|
582 |
+
3 1000000.4
|
583 |
+
3 1000000.6
|
584 |
+
3 1000000.4
|
585 |
+
3 1000000.6
|
586 |
+
3 1000000.4
|
587 |
+
3 1000000.6
|
588 |
+
3 1000000.4
|
589 |
+
3 1000000.6
|
590 |
+
3 1000000.4
|
591 |
+
3 1000000.6
|
592 |
+
3 1000000.4
|
593 |
+
3 1000000.6
|
594 |
+
3 1000000.4
|
595 |
+
3 1000000.6
|
596 |
+
3 1000000.4
|
597 |
+
3 1000000.6
|
598 |
+
3 1000000.4
|
599 |
+
3 1000000.6
|
600 |
+
3 1000000.4
|
601 |
+
3 1000000.6
|
602 |
+
3 1000000.4
|
603 |
+
3 1000000.6
|
604 |
+
3 1000000.4
|
605 |
+
3 1000000.6
|
606 |
+
3 1000000.4
|
607 |
+
3 1000000.6
|
608 |
+
3 1000000.4
|
609 |
+
3 1000000.6
|
610 |
+
3 1000000.4
|
611 |
+
3 1000000.6
|
612 |
+
3 1000000.4
|
613 |
+
3 1000000.6
|
614 |
+
3 1000000.4
|
615 |
+
3 1000000.6
|
616 |
+
3 1000000.4
|
617 |
+
3 1000000.6
|
618 |
+
3 1000000.4
|
619 |
+
3 1000000.6
|
620 |
+
3 1000000.4
|
621 |
+
3 1000000.6
|
622 |
+
3 1000000.4
|
623 |
+
3 1000000.6
|
624 |
+
3 1000000.4
|
625 |
+
3 1000000.6
|
626 |
+
3 1000000.4
|
627 |
+
3 1000000.6
|
628 |
+
3 1000000.4
|
629 |
+
3 1000000.6
|
630 |
+
3 1000000.4
|
631 |
+
3 1000000.6
|
632 |
+
3 1000000.4
|
633 |
+
3 1000000.6
|
634 |
+
3 1000000.4
|
635 |
+
3 1000000.6
|
636 |
+
3 1000000.4
|
637 |
+
3 1000000.6
|
638 |
+
3 1000000.4
|
639 |
+
3 1000000.6
|
640 |
+
3 1000000.4
|
641 |
+
3 1000000.6
|
642 |
+
3 1000000.4
|
643 |
+
3 1000000.6
|
644 |
+
3 1000000.4
|
645 |
+
3 1000000.6
|
646 |
+
3 1000000.4
|
647 |
+
3 1000000.6
|
648 |
+
3 1000000.4
|
649 |
+
3 1000000.6
|
650 |
+
3 1000000.4
|
651 |
+
3 1000000.6
|
652 |
+
3 1000000.4
|
653 |
+
3 1000000.6
|
654 |
+
3 1000000.4
|
655 |
+
3 1000000.6
|
656 |
+
3 1000000.4
|
657 |
+
3 1000000.6
|
658 |
+
3 1000000.4
|
659 |
+
3 1000000.6
|
660 |
+
3 1000000.4
|
661 |
+
3 1000000.6
|
662 |
+
3 1000000.4
|
663 |
+
3 1000000.6
|
664 |
+
4 1000000.3
|
665 |
+
4 1000000.2
|
666 |
+
4 1000000.4
|
667 |
+
4 1000000.2
|
668 |
+
4 1000000.4
|
669 |
+
4 1000000.2
|
670 |
+
4 1000000.4
|
671 |
+
4 1000000.2
|
672 |
+
4 1000000.4
|
673 |
+
4 1000000.2
|
674 |
+
4 1000000.4
|
675 |
+
4 1000000.2
|
676 |
+
4 1000000.4
|
677 |
+
4 1000000.2
|
678 |
+
4 1000000.4
|
679 |
+
4 1000000.2
|
680 |
+
4 1000000.4
|
681 |
+
4 1000000.2
|
682 |
+
4 1000000.4
|
683 |
+
4 1000000.2
|
684 |
+
4 1000000.4
|
685 |
+
4 1000000.2
|
686 |
+
4 1000000.4
|
687 |
+
4 1000000.2
|
688 |
+
4 1000000.4
|
689 |
+
4 1000000.2
|
690 |
+
4 1000000.4
|
691 |
+
4 1000000.2
|
692 |
+
4 1000000.4
|
693 |
+
4 1000000.2
|
694 |
+
4 1000000.4
|
695 |
+
4 1000000.2
|
696 |
+
4 1000000.4
|
697 |
+
4 1000000.2
|
698 |
+
4 1000000.4
|
699 |
+
4 1000000.2
|
700 |
+
4 1000000.4
|
701 |
+
4 1000000.2
|
702 |
+
4 1000000.4
|
703 |
+
4 1000000.2
|
704 |
+
4 1000000.4
|
705 |
+
4 1000000.2
|
706 |
+
4 1000000.4
|
707 |
+
4 1000000.2
|
708 |
+
4 1000000.4
|
709 |
+
4 1000000.2
|
710 |
+
4 1000000.4
|
711 |
+
4 1000000.2
|
712 |
+
4 1000000.4
|
713 |
+
4 1000000.2
|
714 |
+
4 1000000.4
|
715 |
+
4 1000000.2
|
716 |
+
4 1000000.4
|
717 |
+
4 1000000.2
|
718 |
+
4 1000000.4
|
719 |
+
4 1000000.2
|
720 |
+
4 1000000.4
|
721 |
+
4 1000000.2
|
722 |
+
4 1000000.4
|
723 |
+
4 1000000.2
|
724 |
+
4 1000000.4
|
725 |
+
4 1000000.2
|
726 |
+
4 1000000.4
|
727 |
+
4 1000000.2
|
728 |
+
4 1000000.4
|
729 |
+
4 1000000.2
|
730 |
+
4 1000000.4
|
731 |
+
4 1000000.2
|
732 |
+
4 1000000.4
|
733 |
+
4 1000000.2
|
734 |
+
4 1000000.4
|
735 |
+
4 1000000.2
|
736 |
+
4 1000000.4
|
737 |
+
4 1000000.2
|
738 |
+
4 1000000.4
|
739 |
+
4 1000000.2
|
740 |
+
4 1000000.4
|
741 |
+
4 1000000.2
|
742 |
+
4 1000000.4
|
743 |
+
4 1000000.2
|
744 |
+
4 1000000.4
|
745 |
+
4 1000000.2
|
746 |
+
4 1000000.4
|
747 |
+
4 1000000.2
|
748 |
+
4 1000000.4
|
749 |
+
4 1000000.2
|
750 |
+
4 1000000.4
|
751 |
+
4 1000000.2
|
752 |
+
4 1000000.4
|
753 |
+
4 1000000.2
|
754 |
+
4 1000000.4
|
755 |
+
4 1000000.2
|
756 |
+
4 1000000.4
|
757 |
+
4 1000000.2
|
758 |
+
4 1000000.4
|
759 |
+
4 1000000.2
|
760 |
+
4 1000000.4
|
761 |
+
4 1000000.2
|
762 |
+
4 1000000.4
|
763 |
+
4 1000000.2
|
764 |
+
4 1000000.4
|
765 |
+
4 1000000.2
|
766 |
+
4 1000000.4
|
767 |
+
4 1000000.2
|
768 |
+
4 1000000.4
|
769 |
+
4 1000000.2
|
770 |
+
4 1000000.4
|
771 |
+
4 1000000.2
|
772 |
+
4 1000000.4
|
773 |
+
4 1000000.2
|
774 |
+
4 1000000.4
|
775 |
+
4 1000000.2
|
776 |
+
4 1000000.4
|
777 |
+
4 1000000.2
|
778 |
+
4 1000000.4
|
779 |
+
4 1000000.2
|
780 |
+
4 1000000.4
|
781 |
+
4 1000000.2
|
782 |
+
4 1000000.4
|
783 |
+
4 1000000.2
|
784 |
+
4 1000000.4
|
785 |
+
4 1000000.2
|
786 |
+
4 1000000.4
|
787 |
+
4 1000000.2
|
788 |
+
4 1000000.4
|
789 |
+
4 1000000.2
|
790 |
+
4 1000000.4
|
791 |
+
4 1000000.2
|
792 |
+
4 1000000.4
|
793 |
+
4 1000000.2
|
794 |
+
4 1000000.4
|
795 |
+
4 1000000.2
|
796 |
+
4 1000000.4
|
797 |
+
4 1000000.2
|
798 |
+
4 1000000.4
|
799 |
+
4 1000000.2
|
800 |
+
4 1000000.4
|
801 |
+
4 1000000.2
|
802 |
+
4 1000000.4
|
803 |
+
4 1000000.2
|
804 |
+
4 1000000.4
|
805 |
+
4 1000000.2
|
806 |
+
4 1000000.4
|
807 |
+
4 1000000.2
|
808 |
+
4 1000000.4
|
809 |
+
4 1000000.2
|
810 |
+
4 1000000.4
|
811 |
+
4 1000000.2
|
812 |
+
4 1000000.4
|
813 |
+
4 1000000.2
|
814 |
+
4 1000000.4
|
815 |
+
4 1000000.2
|
816 |
+
4 1000000.4
|
817 |
+
4 1000000.2
|
818 |
+
4 1000000.4
|
819 |
+
4 1000000.2
|
820 |
+
4 1000000.4
|
821 |
+
4 1000000.2
|
822 |
+
4 1000000.4
|
823 |
+
4 1000000.2
|
824 |
+
4 1000000.4
|
825 |
+
4 1000000.2
|
826 |
+
4 1000000.4
|
827 |
+
4 1000000.2
|
828 |
+
4 1000000.4
|
829 |
+
4 1000000.2
|
830 |
+
4 1000000.4
|
831 |
+
4 1000000.2
|
832 |
+
4 1000000.4
|
833 |
+
4 1000000.2
|
834 |
+
4 1000000.4
|
835 |
+
4 1000000.2
|
836 |
+
4 1000000.4
|
837 |
+
4 1000000.2
|
838 |
+
4 1000000.4
|
839 |
+
4 1000000.2
|
840 |
+
4 1000000.4
|
841 |
+
4 1000000.2
|
842 |
+
4 1000000.4
|
843 |
+
4 1000000.2
|
844 |
+
4 1000000.4
|
845 |
+
4 1000000.2
|
846 |
+
4 1000000.4
|
847 |
+
4 1000000.2
|
848 |
+
4 1000000.4
|
849 |
+
4 1000000.2
|
850 |
+
4 1000000.4
|
851 |
+
4 1000000.2
|
852 |
+
4 1000000.4
|
853 |
+
4 1000000.2
|
854 |
+
4 1000000.4
|
855 |
+
4 1000000.2
|
856 |
+
4 1000000.4
|
857 |
+
4 1000000.2
|
858 |
+
4 1000000.4
|
859 |
+
4 1000000.2
|
860 |
+
4 1000000.4
|
861 |
+
4 1000000.2
|
862 |
+
4 1000000.4
|
863 |
+
4 1000000.2
|
864 |
+
4 1000000.4
|
865 |
+
5 1000000.5
|
866 |
+
5 1000000.4
|
867 |
+
5 1000000.6
|
868 |
+
5 1000000.4
|
869 |
+
5 1000000.6
|
870 |
+
5 1000000.4
|
871 |
+
5 1000000.6
|
872 |
+
5 1000000.4
|
873 |
+
5 1000000.6
|
874 |
+
5 1000000.4
|
875 |
+
5 1000000.6
|
876 |
+
5 1000000.4
|
877 |
+
5 1000000.6
|
878 |
+
5 1000000.4
|
879 |
+
5 1000000.6
|
880 |
+
5 1000000.4
|
881 |
+
5 1000000.6
|
882 |
+
5 1000000.4
|
883 |
+
5 1000000.6
|
884 |
+
5 1000000.4
|
885 |
+
5 1000000.6
|
886 |
+
5 1000000.4
|
887 |
+
5 1000000.6
|
888 |
+
5 1000000.4
|
889 |
+
5 1000000.6
|
890 |
+
5 1000000.4
|
891 |
+
5 1000000.6
|
892 |
+
5 1000000.4
|
893 |
+
5 1000000.6
|
894 |
+
5 1000000.4
|
895 |
+
5 1000000.6
|
896 |
+
5 1000000.4
|
897 |
+
5 1000000.6
|
898 |
+
5 1000000.4
|
899 |
+
5 1000000.6
|
900 |
+
5 1000000.4
|
901 |
+
5 1000000.6
|
902 |
+
5 1000000.4
|
903 |
+
5 1000000.6
|
904 |
+
5 1000000.4
|
905 |
+
5 1000000.6
|
906 |
+
5 1000000.4
|
907 |
+
5 1000000.6
|
908 |
+
5 1000000.4
|
909 |
+
5 1000000.6
|
910 |
+
5 1000000.4
|
911 |
+
5 1000000.6
|
912 |
+
5 1000000.4
|
913 |
+
5 1000000.6
|
914 |
+
5 1000000.4
|
915 |
+
5 1000000.6
|
916 |
+
5 1000000.4
|
917 |
+
5 1000000.6
|
918 |
+
5 1000000.4
|
919 |
+
5 1000000.6
|
920 |
+
5 1000000.4
|
921 |
+
5 1000000.6
|
922 |
+
5 1000000.4
|
923 |
+
5 1000000.6
|
924 |
+
5 1000000.4
|
925 |
+
5 1000000.6
|
926 |
+
5 1000000.4
|
927 |
+
5 1000000.6
|
928 |
+
5 1000000.4
|
929 |
+
5 1000000.6
|
930 |
+
5 1000000.4
|
931 |
+
5 1000000.6
|
932 |
+
5 1000000.4
|
933 |
+
5 1000000.6
|
934 |
+
5 1000000.4
|
935 |
+
5 1000000.6
|
936 |
+
5 1000000.4
|
937 |
+
5 1000000.6
|
938 |
+
5 1000000.4
|
939 |
+
5 1000000.6
|
940 |
+
5 1000000.4
|
941 |
+
5 1000000.6
|
942 |
+
5 1000000.4
|
943 |
+
5 1000000.6
|
944 |
+
5 1000000.4
|
945 |
+
5 1000000.6
|
946 |
+
5 1000000.4
|
947 |
+
5 1000000.6
|
948 |
+
5 1000000.4
|
949 |
+
5 1000000.6
|
950 |
+
5 1000000.4
|
951 |
+
5 1000000.6
|
952 |
+
5 1000000.4
|
953 |
+
5 1000000.6
|
954 |
+
5 1000000.4
|
955 |
+
5 1000000.6
|
956 |
+
5 1000000.4
|
957 |
+
5 1000000.6
|
958 |
+
5 1000000.4
|
959 |
+
5 1000000.6
|
960 |
+
5 1000000.4
|
961 |
+
5 1000000.6
|
962 |
+
5 1000000.4
|
963 |
+
5 1000000.6
|
964 |
+
5 1000000.4
|
965 |
+
5 1000000.6
|
966 |
+
5 1000000.4
|
967 |
+
5 1000000.6
|
968 |
+
5 1000000.4
|
969 |
+
5 1000000.6
|
970 |
+
5 1000000.4
|
971 |
+
5 1000000.6
|
972 |
+
5 1000000.4
|
973 |
+
5 1000000.6
|
974 |
+
5 1000000.4
|
975 |
+
5 1000000.6
|
976 |
+
5 1000000.4
|
977 |
+
5 1000000.6
|
978 |
+
5 1000000.4
|
979 |
+
5 1000000.6
|
980 |
+
5 1000000.4
|
981 |
+
5 1000000.6
|
982 |
+
5 1000000.4
|
983 |
+
5 1000000.6
|
984 |
+
5 1000000.4
|
985 |
+
5 1000000.6
|
986 |
+
5 1000000.4
|
987 |
+
5 1000000.6
|
988 |
+
5 1000000.4
|
989 |
+
5 1000000.6
|
990 |
+
5 1000000.4
|
991 |
+
5 1000000.6
|
992 |
+
5 1000000.4
|
993 |
+
5 1000000.6
|
994 |
+
5 1000000.4
|
995 |
+
5 1000000.6
|
996 |
+
5 1000000.4
|
997 |
+
5 1000000.6
|
998 |
+
5 1000000.4
|
999 |
+
5 1000000.6
|
1000 |
+
5 1000000.4
|
1001 |
+
5 1000000.6
|
1002 |
+
5 1000000.4
|
1003 |
+
5 1000000.6
|
1004 |
+
5 1000000.4
|
1005 |
+
5 1000000.6
|
1006 |
+
5 1000000.4
|
1007 |
+
5 1000000.6
|
1008 |
+
5 1000000.4
|
1009 |
+
5 1000000.6
|
1010 |
+
5 1000000.4
|
1011 |
+
5 1000000.6
|
1012 |
+
5 1000000.4
|
1013 |
+
5 1000000.6
|
1014 |
+
5 1000000.4
|
1015 |
+
5 1000000.6
|
1016 |
+
5 1000000.4
|
1017 |
+
5 1000000.6
|
1018 |
+
5 1000000.4
|
1019 |
+
5 1000000.6
|
1020 |
+
5 1000000.4
|
1021 |
+
5 1000000.6
|
1022 |
+
5 1000000.4
|
1023 |
+
5 1000000.6
|
1024 |
+
5 1000000.4
|
1025 |
+
5 1000000.6
|
1026 |
+
5 1000000.4
|
1027 |
+
5 1000000.6
|
1028 |
+
5 1000000.4
|
1029 |
+
5 1000000.6
|
1030 |
+
5 1000000.4
|
1031 |
+
5 1000000.6
|
1032 |
+
5 1000000.4
|
1033 |
+
5 1000000.6
|
1034 |
+
5 1000000.4
|
1035 |
+
5 1000000.6
|
1036 |
+
5 1000000.4
|
1037 |
+
5 1000000.6
|
1038 |
+
5 1000000.4
|
1039 |
+
5 1000000.6
|
1040 |
+
5 1000000.4
|
1041 |
+
5 1000000.6
|
1042 |
+
5 1000000.4
|
1043 |
+
5 1000000.6
|
1044 |
+
5 1000000.4
|
1045 |
+
5 1000000.6
|
1046 |
+
5 1000000.4
|
1047 |
+
5 1000000.6
|
1048 |
+
5 1000000.4
|
1049 |
+
5 1000000.6
|
1050 |
+
5 1000000.4
|
1051 |
+
5 1000000.6
|
1052 |
+
5 1000000.4
|
1053 |
+
5 1000000.6
|
1054 |
+
5 1000000.4
|
1055 |
+
5 1000000.6
|
1056 |
+
5 1000000.4
|
1057 |
+
5 1000000.6
|
1058 |
+
5 1000000.4
|
1059 |
+
5 1000000.6
|
1060 |
+
5 1000000.4
|
1061 |
+
5 1000000.6
|
1062 |
+
5 1000000.4
|
1063 |
+
5 1000000.6
|
1064 |
+
5 1000000.4
|
1065 |
+
5 1000000.6
|
1066 |
+
6 1000000.3
|
1067 |
+
6 1000000.2
|
1068 |
+
6 1000000.4
|
1069 |
+
6 1000000.2
|
1070 |
+
6 1000000.4
|
1071 |
+
6 1000000.2
|
1072 |
+
6 1000000.4
|
1073 |
+
6 1000000.2
|
1074 |
+
6 1000000.4
|
1075 |
+
6 1000000.2
|
1076 |
+
6 1000000.4
|
1077 |
+
6 1000000.2
|
1078 |
+
6 1000000.4
|
1079 |
+
6 1000000.2
|
1080 |
+
6 1000000.4
|
1081 |
+
6 1000000.2
|
1082 |
+
6 1000000.4
|
1083 |
+
6 1000000.2
|
1084 |
+
6 1000000.4
|
1085 |
+
6 1000000.2
|
1086 |
+
6 1000000.4
|
1087 |
+
6 1000000.2
|
1088 |
+
6 1000000.4
|
1089 |
+
6 1000000.2
|
1090 |
+
6 1000000.4
|
1091 |
+
6 1000000.2
|
1092 |
+
6 1000000.4
|
1093 |
+
6 1000000.2
|
1094 |
+
6 1000000.4
|
1095 |
+
6 1000000.2
|
1096 |
+
6 1000000.4
|
1097 |
+
6 1000000.2
|
1098 |
+
6 1000000.4
|
1099 |
+
6 1000000.2
|
1100 |
+
6 1000000.4
|
1101 |
+
6 1000000.2
|
1102 |
+
6 1000000.4
|
1103 |
+
6 1000000.2
|
1104 |
+
6 1000000.4
|
1105 |
+
6 1000000.2
|
1106 |
+
6 1000000.4
|
1107 |
+
6 1000000.2
|
1108 |
+
6 1000000.4
|
1109 |
+
6 1000000.2
|
1110 |
+
6 1000000.4
|
1111 |
+
6 1000000.2
|
1112 |
+
6 1000000.4
|
1113 |
+
6 1000000.2
|
1114 |
+
6 1000000.4
|
1115 |
+
6 1000000.2
|
1116 |
+
6 1000000.4
|
1117 |
+
6 1000000.2
|
1118 |
+
6 1000000.4
|
1119 |
+
6 1000000.2
|
1120 |
+
6 1000000.4
|
1121 |
+
6 1000000.2
|
1122 |
+
6 1000000.4
|
1123 |
+
6 1000000.2
|
1124 |
+
6 1000000.4
|
1125 |
+
6 1000000.2
|
1126 |
+
6 1000000.4
|
1127 |
+
6 1000000.2
|
1128 |
+
6 1000000.4
|
1129 |
+
6 1000000.2
|
1130 |
+
6 1000000.4
|
1131 |
+
6 1000000.2
|
1132 |
+
6 1000000.4
|
1133 |
+
6 1000000.2
|
1134 |
+
6 1000000.4
|
1135 |
+
6 1000000.2
|
1136 |
+
6 1000000.4
|
1137 |
+
6 1000000.2
|
1138 |
+
6 1000000.4
|
1139 |
+
6 1000000.2
|
1140 |
+
6 1000000.4
|
1141 |
+
6 1000000.2
|
1142 |
+
6 1000000.4
|
1143 |
+
6 1000000.2
|
1144 |
+
6 1000000.4
|
1145 |
+
6 1000000.2
|
1146 |
+
6 1000000.4
|
1147 |
+
6 1000000.2
|
1148 |
+
6 1000000.4
|
1149 |
+
6 1000000.2
|
1150 |
+
6 1000000.4
|
1151 |
+
6 1000000.2
|
1152 |
+
6 1000000.4
|
1153 |
+
6 1000000.2
|
1154 |
+
6 1000000.4
|
1155 |
+
6 1000000.2
|
1156 |
+
6 1000000.4
|
1157 |
+
6 1000000.2
|
1158 |
+
6 1000000.4
|
1159 |
+
6 1000000.2
|
1160 |
+
6 1000000.4
|
1161 |
+
6 1000000.2
|
1162 |
+
6 1000000.4
|
1163 |
+
6 1000000.2
|
1164 |
+
6 1000000.4
|
1165 |
+
6 1000000.2
|
1166 |
+
6 1000000.4
|
1167 |
+
6 1000000.2
|
1168 |
+
6 1000000.4
|
1169 |
+
6 1000000.2
|
1170 |
+
6 1000000.4
|
1171 |
+
6 1000000.2
|
1172 |
+
6 1000000.4
|
1173 |
+
6 1000000.2
|
1174 |
+
6 1000000.4
|
1175 |
+
6 1000000.2
|
1176 |
+
6 1000000.4
|
1177 |
+
6 1000000.2
|
1178 |
+
6 1000000.4
|
1179 |
+
6 1000000.2
|
1180 |
+
6 1000000.4
|
1181 |
+
6 1000000.2
|
1182 |
+
6 1000000.4
|
1183 |
+
6 1000000.2
|
1184 |
+
6 1000000.4
|
1185 |
+
6 1000000.2
|
1186 |
+
6 1000000.4
|
1187 |
+
6 1000000.2
|
1188 |
+
6 1000000.4
|
1189 |
+
6 1000000.2
|
1190 |
+
6 1000000.4
|
1191 |
+
6 1000000.2
|
1192 |
+
6 1000000.4
|
1193 |
+
6 1000000.2
|
1194 |
+
6 1000000.4
|
1195 |
+
6 1000000.2
|
1196 |
+
6 1000000.4
|
1197 |
+
6 1000000.2
|
1198 |
+
6 1000000.4
|
1199 |
+
6 1000000.2
|
1200 |
+
6 1000000.4
|
1201 |
+
6 1000000.2
|
1202 |
+
6 1000000.4
|
1203 |
+
6 1000000.2
|
1204 |
+
6 1000000.4
|
1205 |
+
6 1000000.2
|
1206 |
+
6 1000000.4
|
1207 |
+
6 1000000.2
|
1208 |
+
6 1000000.4
|
1209 |
+
6 1000000.2
|
1210 |
+
6 1000000.4
|
1211 |
+
6 1000000.2
|
1212 |
+
6 1000000.4
|
1213 |
+
6 1000000.2
|
1214 |
+
6 1000000.4
|
1215 |
+
6 1000000.2
|
1216 |
+
6 1000000.4
|
1217 |
+
6 1000000.2
|
1218 |
+
6 1000000.4
|
1219 |
+
6 1000000.2
|
1220 |
+
6 1000000.4
|
1221 |
+
6 1000000.2
|
1222 |
+
6 1000000.4
|
1223 |
+
6 1000000.2
|
1224 |
+
6 1000000.4
|
1225 |
+
6 1000000.2
|
1226 |
+
6 1000000.4
|
1227 |
+
6 1000000.2
|
1228 |
+
6 1000000.4
|
1229 |
+
6 1000000.2
|
1230 |
+
6 1000000.4
|
1231 |
+
6 1000000.2
|
1232 |
+
6 1000000.4
|
1233 |
+
6 1000000.2
|
1234 |
+
6 1000000.4
|
1235 |
+
6 1000000.2
|
1236 |
+
6 1000000.4
|
1237 |
+
6 1000000.2
|
1238 |
+
6 1000000.4
|
1239 |
+
6 1000000.2
|
1240 |
+
6 1000000.4
|
1241 |
+
6 1000000.2
|
1242 |
+
6 1000000.4
|
1243 |
+
6 1000000.2
|
1244 |
+
6 1000000.4
|
1245 |
+
6 1000000.2
|
1246 |
+
6 1000000.4
|
1247 |
+
6 1000000.2
|
1248 |
+
6 1000000.4
|
1249 |
+
6 1000000.2
|
1250 |
+
6 1000000.4
|
1251 |
+
6 1000000.2
|
1252 |
+
6 1000000.4
|
1253 |
+
6 1000000.2
|
1254 |
+
6 1000000.4
|
1255 |
+
6 1000000.2
|
1256 |
+
6 1000000.4
|
1257 |
+
6 1000000.2
|
1258 |
+
6 1000000.4
|
1259 |
+
6 1000000.2
|
1260 |
+
6 1000000.4
|
1261 |
+
6 1000000.2
|
1262 |
+
6 1000000.4
|
1263 |
+
6 1000000.2
|
1264 |
+
6 1000000.4
|
1265 |
+
6 1000000.2
|
1266 |
+
6 1000000.4
|
1267 |
+
7 1000000.5
|
1268 |
+
7 1000000.4
|
1269 |
+
7 1000000.6
|
1270 |
+
7 1000000.4
|
1271 |
+
7 1000000.6
|
1272 |
+
7 1000000.4
|
1273 |
+
7 1000000.6
|
1274 |
+
7 1000000.4
|
1275 |
+
7 1000000.6
|
1276 |
+
7 1000000.4
|
1277 |
+
7 1000000.6
|
1278 |
+
7 1000000.4
|
1279 |
+
7 1000000.6
|
1280 |
+
7 1000000.4
|
1281 |
+
7 1000000.6
|
1282 |
+
7 1000000.4
|
1283 |
+
7 1000000.6
|
1284 |
+
7 1000000.4
|
1285 |
+
7 1000000.6
|
1286 |
+
7 1000000.4
|
1287 |
+
7 1000000.6
|
1288 |
+
7 1000000.4
|
1289 |
+
7 1000000.6
|
1290 |
+
7 1000000.4
|
1291 |
+
7 1000000.6
|
1292 |
+
7 1000000.4
|
1293 |
+
7 1000000.6
|
1294 |
+
7 1000000.4
|
1295 |
+
7 1000000.6
|
1296 |
+
7 1000000.4
|
1297 |
+
7 1000000.6
|
1298 |
+
7 1000000.4
|
1299 |
+
7 1000000.6
|
1300 |
+
7 1000000.4
|
1301 |
+
7 1000000.6
|
1302 |
+
7 1000000.4
|
1303 |
+
7 1000000.6
|
1304 |
+
7 1000000.4
|
1305 |
+
7 1000000.6
|
1306 |
+
7 1000000.4
|
1307 |
+
7 1000000.6
|
1308 |
+
7 1000000.4
|
1309 |
+
7 1000000.6
|
1310 |
+
7 1000000.4
|
1311 |
+
7 1000000.6
|
1312 |
+
7 1000000.4
|
1313 |
+
7 1000000.6
|
1314 |
+
7 1000000.4
|
1315 |
+
7 1000000.6
|
1316 |
+
7 1000000.4
|
1317 |
+
7 1000000.6
|
1318 |
+
7 1000000.4
|
1319 |
+
7 1000000.6
|
1320 |
+
7 1000000.4
|
1321 |
+
7 1000000.6
|
1322 |
+
7 1000000.4
|
1323 |
+
7 1000000.6
|
1324 |
+
7 1000000.4
|
1325 |
+
7 1000000.6
|
1326 |
+
7 1000000.4
|
1327 |
+
7 1000000.6
|
1328 |
+
7 1000000.4
|
1329 |
+
7 1000000.6
|
1330 |
+
7 1000000.4
|
1331 |
+
7 1000000.6
|
1332 |
+
7 1000000.4
|
1333 |
+
7 1000000.6
|
1334 |
+
7 1000000.4
|
1335 |
+
7 1000000.6
|
1336 |
+
7 1000000.4
|
1337 |
+
7 1000000.6
|
1338 |
+
7 1000000.4
|
1339 |
+
7 1000000.6
|
1340 |
+
7 1000000.4
|
1341 |
+
7 1000000.6
|
1342 |
+
7 1000000.4
|
1343 |
+
7 1000000.6
|
1344 |
+
7 1000000.4
|
1345 |
+
7 1000000.6
|
1346 |
+
7 1000000.4
|
1347 |
+
7 1000000.6
|
1348 |
+
7 1000000.4
|
1349 |
+
7 1000000.6
|
1350 |
+
7 1000000.4
|
1351 |
+
7 1000000.6
|
1352 |
+
7 1000000.4
|
1353 |
+
7 1000000.6
|
1354 |
+
7 1000000.4
|
1355 |
+
7 1000000.6
|
1356 |
+
7 1000000.4
|
1357 |
+
7 1000000.6
|
1358 |
+
7 1000000.4
|
1359 |
+
7 1000000.6
|
1360 |
+
7 1000000.4
|
1361 |
+
7 1000000.6
|
1362 |
+
7 1000000.4
|
1363 |
+
7 1000000.6
|
1364 |
+
7 1000000.4
|
1365 |
+
7 1000000.6
|
1366 |
+
7 1000000.4
|
1367 |
+
7 1000000.6
|
1368 |
+
7 1000000.4
|
1369 |
+
7 1000000.6
|
1370 |
+
7 1000000.4
|
1371 |
+
7 1000000.6
|
1372 |
+
7 1000000.4
|
1373 |
+
7 1000000.6
|
1374 |
+
7 1000000.4
|
1375 |
+
7 1000000.6
|
1376 |
+
7 1000000.4
|
1377 |
+
7 1000000.6
|
1378 |
+
7 1000000.4
|
1379 |
+
7 1000000.6
|
1380 |
+
7 1000000.4
|
1381 |
+
7 1000000.6
|
1382 |
+
7 1000000.4
|
1383 |
+
7 1000000.6
|
1384 |
+
7 1000000.4
|
1385 |
+
7 1000000.6
|
1386 |
+
7 1000000.4
|
1387 |
+
7 1000000.6
|
1388 |
+
7 1000000.4
|
1389 |
+
7 1000000.6
|
1390 |
+
7 1000000.4
|
1391 |
+
7 1000000.6
|
1392 |
+
7 1000000.4
|
1393 |
+
7 1000000.6
|
1394 |
+
7 1000000.4
|
1395 |
+
7 1000000.6
|
1396 |
+
7 1000000.4
|
1397 |
+
7 1000000.6
|
1398 |
+
7 1000000.4
|
1399 |
+
7 1000000.6
|
1400 |
+
7 1000000.4
|
1401 |
+
7 1000000.6
|
1402 |
+
7 1000000.4
|
1403 |
+
7 1000000.6
|
1404 |
+
7 1000000.4
|
1405 |
+
7 1000000.6
|
1406 |
+
7 1000000.4
|
1407 |
+
7 1000000.6
|
1408 |
+
7 1000000.4
|
1409 |
+
7 1000000.6
|
1410 |
+
7 1000000.4
|
1411 |
+
7 1000000.6
|
1412 |
+
7 1000000.4
|
1413 |
+
7 1000000.6
|
1414 |
+
7 1000000.4
|
1415 |
+
7 1000000.6
|
1416 |
+
7 1000000.4
|
1417 |
+
7 1000000.6
|
1418 |
+
7 1000000.4
|
1419 |
+
7 1000000.6
|
1420 |
+
7 1000000.4
|
1421 |
+
7 1000000.6
|
1422 |
+
7 1000000.4
|
1423 |
+
7 1000000.6
|
1424 |
+
7 1000000.4
|
1425 |
+
7 1000000.6
|
1426 |
+
7 1000000.4
|
1427 |
+
7 1000000.6
|
1428 |
+
7 1000000.4
|
1429 |
+
7 1000000.6
|
1430 |
+
7 1000000.4
|
1431 |
+
7 1000000.6
|
1432 |
+
7 1000000.4
|
1433 |
+
7 1000000.6
|
1434 |
+
7 1000000.4
|
1435 |
+
7 1000000.6
|
1436 |
+
7 1000000.4
|
1437 |
+
7 1000000.6
|
1438 |
+
7 1000000.4
|
1439 |
+
7 1000000.6
|
1440 |
+
7 1000000.4
|
1441 |
+
7 1000000.6
|
1442 |
+
7 1000000.4
|
1443 |
+
7 1000000.6
|
1444 |
+
7 1000000.4
|
1445 |
+
7 1000000.6
|
1446 |
+
7 1000000.4
|
1447 |
+
7 1000000.6
|
1448 |
+
7 1000000.4
|
1449 |
+
7 1000000.6
|
1450 |
+
7 1000000.4
|
1451 |
+
7 1000000.6
|
1452 |
+
7 1000000.4
|
1453 |
+
7 1000000.6
|
1454 |
+
7 1000000.4
|
1455 |
+
7 1000000.6
|
1456 |
+
7 1000000.4
|
1457 |
+
7 1000000.6
|
1458 |
+
7 1000000.4
|
1459 |
+
7 1000000.6
|
1460 |
+
7 1000000.4
|
1461 |
+
7 1000000.6
|
1462 |
+
7 1000000.4
|
1463 |
+
7 1000000.6
|
1464 |
+
7 1000000.4
|
1465 |
+
7 1000000.6
|
1466 |
+
7 1000000.4
|
1467 |
+
7 1000000.6
|
1468 |
+
8 1000000.3
|
1469 |
+
8 1000000.2
|
1470 |
+
8 1000000.4
|
1471 |
+
8 1000000.2
|
1472 |
+
8 1000000.4
|
1473 |
+
8 1000000.2
|
1474 |
+
8 1000000.4
|
1475 |
+
8 1000000.2
|
1476 |
+
8 1000000.4
|
1477 |
+
8 1000000.2
|
1478 |
+
8 1000000.4
|
1479 |
+
8 1000000.2
|
1480 |
+
8 1000000.4
|
1481 |
+
8 1000000.2
|
1482 |
+
8 1000000.4
|
1483 |
+
8 1000000.2
|
1484 |
+
8 1000000.4
|
1485 |
+
8 1000000.2
|
1486 |
+
8 1000000.4
|
1487 |
+
8 1000000.2
|
1488 |
+
8 1000000.4
|
1489 |
+
8 1000000.2
|
1490 |
+
8 1000000.4
|
1491 |
+
8 1000000.2
|
1492 |
+
8 1000000.4
|
1493 |
+
8 1000000.2
|
1494 |
+
8 1000000.4
|
1495 |
+
8 1000000.2
|
1496 |
+
8 1000000.4
|
1497 |
+
8 1000000.2
|
1498 |
+
8 1000000.4
|
1499 |
+
8 1000000.2
|
1500 |
+
8 1000000.4
|
1501 |
+
8 1000000.2
|
1502 |
+
8 1000000.4
|
1503 |
+
8 1000000.2
|
1504 |
+
8 1000000.4
|
1505 |
+
8 1000000.2
|
1506 |
+
8 1000000.4
|
1507 |
+
8 1000000.2
|
1508 |
+
8 1000000.4
|
1509 |
+
8 1000000.2
|
1510 |
+
8 1000000.4
|
1511 |
+
8 1000000.2
|
1512 |
+
8 1000000.4
|
1513 |
+
8 1000000.2
|
1514 |
+
8 1000000.4
|
1515 |
+
8 1000000.2
|
1516 |
+
8 1000000.4
|
1517 |
+
8 1000000.2
|
1518 |
+
8 1000000.4
|
1519 |
+
8 1000000.2
|
1520 |
+
8 1000000.4
|
1521 |
+
8 1000000.2
|
1522 |
+
8 1000000.4
|
1523 |
+
8 1000000.2
|
1524 |
+
8 1000000.4
|
1525 |
+
8 1000000.2
|
1526 |
+
8 1000000.4
|
1527 |
+
8 1000000.2
|
1528 |
+
8 1000000.4
|
1529 |
+
8 1000000.2
|
1530 |
+
8 1000000.4
|
1531 |
+
8 1000000.2
|
1532 |
+
8 1000000.4
|
1533 |
+
8 1000000.2
|
1534 |
+
8 1000000.4
|
1535 |
+
8 1000000.2
|
1536 |
+
8 1000000.4
|
1537 |
+
8 1000000.2
|
1538 |
+
8 1000000.4
|
1539 |
+
8 1000000.2
|
1540 |
+
8 1000000.4
|
1541 |
+
8 1000000.2
|
1542 |
+
8 1000000.4
|
1543 |
+
8 1000000.2
|
1544 |
+
8 1000000.4
|
1545 |
+
8 1000000.2
|
1546 |
+
8 1000000.4
|
1547 |
+
8 1000000.2
|
1548 |
+
8 1000000.4
|
1549 |
+
8 1000000.2
|
1550 |
+
8 1000000.4
|
1551 |
+
8 1000000.2
|
1552 |
+
8 1000000.4
|
1553 |
+
8 1000000.2
|
1554 |
+
8 1000000.4
|
1555 |
+
8 1000000.2
|
1556 |
+
8 1000000.4
|
1557 |
+
8 1000000.2
|
1558 |
+
8 1000000.4
|
1559 |
+
8 1000000.2
|
1560 |
+
8 1000000.4
|
1561 |
+
8 1000000.2
|
1562 |
+
8 1000000.4
|
1563 |
+
8 1000000.2
|
1564 |
+
8 1000000.4
|
1565 |
+
8 1000000.2
|
1566 |
+
8 1000000.4
|
1567 |
+
8 1000000.2
|
1568 |
+
8 1000000.4
|
1569 |
+
8 1000000.2
|
1570 |
+
8 1000000.4
|
1571 |
+
8 1000000.2
|
1572 |
+
8 1000000.4
|
1573 |
+
8 1000000.2
|
1574 |
+
8 1000000.4
|
1575 |
+
8 1000000.2
|
1576 |
+
8 1000000.4
|
1577 |
+
8 1000000.2
|
1578 |
+
8 1000000.4
|
1579 |
+
8 1000000.2
|
1580 |
+
8 1000000.4
|
1581 |
+
8 1000000.2
|
1582 |
+
8 1000000.4
|
1583 |
+
8 1000000.2
|
1584 |
+
8 1000000.4
|
1585 |
+
8 1000000.2
|
1586 |
+
8 1000000.4
|
1587 |
+
8 1000000.2
|
1588 |
+
8 1000000.4
|
1589 |
+
8 1000000.2
|
1590 |
+
8 1000000.4
|
1591 |
+
8 1000000.2
|
1592 |
+
8 1000000.4
|
1593 |
+
8 1000000.2
|
1594 |
+
8 1000000.4
|
1595 |
+
8 1000000.2
|
1596 |
+
8 1000000.4
|
1597 |
+
8 1000000.2
|
1598 |
+
8 1000000.4
|
1599 |
+
8 1000000.2
|
1600 |
+
8 1000000.4
|
1601 |
+
8 1000000.2
|
1602 |
+
8 1000000.4
|
1603 |
+
8 1000000.2
|
1604 |
+
8 1000000.4
|
1605 |
+
8 1000000.2
|
1606 |
+
8 1000000.4
|
1607 |
+
8 1000000.2
|
1608 |
+
8 1000000.4
|
1609 |
+
8 1000000.2
|
1610 |
+
8 1000000.4
|
1611 |
+
8 1000000.2
|
1612 |
+
8 1000000.4
|
1613 |
+
8 1000000.2
|
1614 |
+
8 1000000.4
|
1615 |
+
8 1000000.2
|
1616 |
+
8 1000000.4
|
1617 |
+
8 1000000.2
|
1618 |
+
8 1000000.4
|
1619 |
+
8 1000000.2
|
1620 |
+
8 1000000.4
|
1621 |
+
8 1000000.2
|
1622 |
+
8 1000000.4
|
1623 |
+
8 1000000.2
|
1624 |
+
8 1000000.4
|
1625 |
+
8 1000000.2
|
1626 |
+
8 1000000.4
|
1627 |
+
8 1000000.2
|
1628 |
+
8 1000000.4
|
1629 |
+
8 1000000.2
|
1630 |
+
8 1000000.4
|
1631 |
+
8 1000000.2
|
1632 |
+
8 1000000.4
|
1633 |
+
8 1000000.2
|
1634 |
+
8 1000000.4
|
1635 |
+
8 1000000.2
|
1636 |
+
8 1000000.4
|
1637 |
+
8 1000000.2
|
1638 |
+
8 1000000.4
|
1639 |
+
8 1000000.2
|
1640 |
+
8 1000000.4
|
1641 |
+
8 1000000.2
|
1642 |
+
8 1000000.4
|
1643 |
+
8 1000000.2
|
1644 |
+
8 1000000.4
|
1645 |
+
8 1000000.2
|
1646 |
+
8 1000000.4
|
1647 |
+
8 1000000.2
|
1648 |
+
8 1000000.4
|
1649 |
+
8 1000000.2
|
1650 |
+
8 1000000.4
|
1651 |
+
8 1000000.2
|
1652 |
+
8 1000000.4
|
1653 |
+
8 1000000.2
|
1654 |
+
8 1000000.4
|
1655 |
+
8 1000000.2
|
1656 |
+
8 1000000.4
|
1657 |
+
8 1000000.2
|
1658 |
+
8 1000000.4
|
1659 |
+
8 1000000.2
|
1660 |
+
8 1000000.4
|
1661 |
+
8 1000000.2
|
1662 |
+
8 1000000.4
|
1663 |
+
8 1000000.2
|
1664 |
+
8 1000000.4
|
1665 |
+
8 1000000.2
|
1666 |
+
8 1000000.4
|
1667 |
+
8 1000000.2
|
1668 |
+
8 1000000.4
|
1669 |
+
9 1000000.5
|
1670 |
+
9 1000000.4
|
1671 |
+
9 1000000.6
|
1672 |
+
9 1000000.4
|
1673 |
+
9 1000000.6
|
1674 |
+
9 1000000.4
|
1675 |
+
9 1000000.6
|
1676 |
+
9 1000000.4
|
1677 |
+
9 1000000.6
|
1678 |
+
9 1000000.4
|
1679 |
+
9 1000000.6
|
1680 |
+
9 1000000.4
|
1681 |
+
9 1000000.6
|
1682 |
+
9 1000000.4
|
1683 |
+
9 1000000.6
|
1684 |
+
9 1000000.4
|
1685 |
+
9 1000000.6
|
1686 |
+
9 1000000.4
|
1687 |
+
9 1000000.6
|
1688 |
+
9 1000000.4
|
1689 |
+
9 1000000.6
|
1690 |
+
9 1000000.4
|
1691 |
+
9 1000000.6
|
1692 |
+
9 1000000.4
|
1693 |
+
9 1000000.6
|
1694 |
+
9 1000000.4
|
1695 |
+
9 1000000.6
|
1696 |
+
9 1000000.4
|
1697 |
+
9 1000000.6
|
1698 |
+
9 1000000.4
|
1699 |
+
9 1000000.6
|
1700 |
+
9 1000000.4
|
1701 |
+
9 1000000.6
|
1702 |
+
9 1000000.4
|
1703 |
+
9 1000000.6
|
1704 |
+
9 1000000.4
|
1705 |
+
9 1000000.6
|
1706 |
+
9 1000000.4
|
1707 |
+
9 1000000.6
|
1708 |
+
9 1000000.4
|
1709 |
+
9 1000000.6
|
1710 |
+
9 1000000.4
|
1711 |
+
9 1000000.6
|
1712 |
+
9 1000000.4
|
1713 |
+
9 1000000.6
|
1714 |
+
9 1000000.4
|
1715 |
+
9 1000000.6
|
1716 |
+
9 1000000.4
|
1717 |
+
9 1000000.6
|
1718 |
+
9 1000000.4
|
1719 |
+
9 1000000.6
|
1720 |
+
9 1000000.4
|
1721 |
+
9 1000000.6
|
1722 |
+
9 1000000.4
|
1723 |
+
9 1000000.6
|
1724 |
+
9 1000000.4
|
1725 |
+
9 1000000.6
|
1726 |
+
9 1000000.4
|
1727 |
+
9 1000000.6
|
1728 |
+
9 1000000.4
|
1729 |
+
9 1000000.6
|
1730 |
+
9 1000000.4
|
1731 |
+
9 1000000.6
|
1732 |
+
9 1000000.4
|
1733 |
+
9 1000000.6
|
1734 |
+
9 1000000.4
|
1735 |
+
9 1000000.6
|
1736 |
+
9 1000000.4
|
1737 |
+
9 1000000.6
|
1738 |
+
9 1000000.4
|
1739 |
+
9 1000000.6
|
1740 |
+
9 1000000.4
|
1741 |
+
9 1000000.6
|
1742 |
+
9 1000000.4
|
1743 |
+
9 1000000.6
|
1744 |
+
9 1000000.4
|
1745 |
+
9 1000000.6
|
1746 |
+
9 1000000.4
|
1747 |
+
9 1000000.6
|
1748 |
+
9 1000000.4
|
1749 |
+
9 1000000.6
|
1750 |
+
9 1000000.4
|
1751 |
+
9 1000000.6
|
1752 |
+
9 1000000.4
|
1753 |
+
9 1000000.6
|
1754 |
+
9 1000000.4
|
1755 |
+
9 1000000.6
|
1756 |
+
9 1000000.4
|
1757 |
+
9 1000000.6
|
1758 |
+
9 1000000.4
|
1759 |
+
9 1000000.6
|
1760 |
+
9 1000000.4
|
1761 |
+
9 1000000.6
|
1762 |
+
9 1000000.4
|
1763 |
+
9 1000000.6
|
1764 |
+
9 1000000.4
|
1765 |
+
9 1000000.6
|
1766 |
+
9 1000000.4
|
1767 |
+
9 1000000.6
|
1768 |
+
9 1000000.4
|
1769 |
+
9 1000000.6
|
1770 |
+
9 1000000.4
|
1771 |
+
9 1000000.6
|
1772 |
+
9 1000000.4
|
1773 |
+
9 1000000.6
|
1774 |
+
9 1000000.4
|
1775 |
+
9 1000000.6
|
1776 |
+
9 1000000.4
|
1777 |
+
9 1000000.6
|
1778 |
+
9 1000000.4
|
1779 |
+
9 1000000.6
|
1780 |
+
9 1000000.4
|
1781 |
+
9 1000000.6
|
1782 |
+
9 1000000.4
|
1783 |
+
9 1000000.6
|
1784 |
+
9 1000000.4
|
1785 |
+
9 1000000.6
|
1786 |
+
9 1000000.4
|
1787 |
+
9 1000000.6
|
1788 |
+
9 1000000.4
|
1789 |
+
9 1000000.6
|
1790 |
+
9 1000000.4
|
1791 |
+
9 1000000.6
|
1792 |
+
9 1000000.4
|
1793 |
+
9 1000000.6
|
1794 |
+
9 1000000.4
|
1795 |
+
9 1000000.6
|
1796 |
+
9 1000000.4
|
1797 |
+
9 1000000.6
|
1798 |
+
9 1000000.4
|
1799 |
+
9 1000000.6
|
1800 |
+
9 1000000.4
|
1801 |
+
9 1000000.6
|
1802 |
+
9 1000000.4
|
1803 |
+
9 1000000.6
|
1804 |
+
9 1000000.4
|
1805 |
+
9 1000000.6
|
1806 |
+
9 1000000.4
|
1807 |
+
9 1000000.6
|
1808 |
+
9 1000000.4
|
1809 |
+
9 1000000.6
|
1810 |
+
9 1000000.4
|
1811 |
+
9 1000000.6
|
1812 |
+
9 1000000.4
|
1813 |
+
9 1000000.6
|
1814 |
+
9 1000000.4
|
1815 |
+
9 1000000.6
|
1816 |
+
9 1000000.4
|
1817 |
+
9 1000000.6
|
1818 |
+
9 1000000.4
|
1819 |
+
9 1000000.6
|
1820 |
+
9 1000000.4
|
1821 |
+
9 1000000.6
|
1822 |
+
9 1000000.4
|
1823 |
+
9 1000000.6
|
1824 |
+
9 1000000.4
|
1825 |
+
9 1000000.6
|
1826 |
+
9 1000000.4
|
1827 |
+
9 1000000.6
|
1828 |
+
9 1000000.4
|
1829 |
+
9 1000000.6
|
1830 |
+
9 1000000.4
|
1831 |
+
9 1000000.6
|
1832 |
+
9 1000000.4
|
1833 |
+
9 1000000.6
|
1834 |
+
9 1000000.4
|
1835 |
+
9 1000000.6
|
1836 |
+
9 1000000.4
|
1837 |
+
9 1000000.6
|
1838 |
+
9 1000000.4
|
1839 |
+
9 1000000.6
|
1840 |
+
9 1000000.4
|
1841 |
+
9 1000000.6
|
1842 |
+
9 1000000.4
|
1843 |
+
9 1000000.6
|
1844 |
+
9 1000000.4
|
1845 |
+
9 1000000.6
|
1846 |
+
9 1000000.4
|
1847 |
+
9 1000000.6
|
1848 |
+
9 1000000.4
|
1849 |
+
9 1000000.6
|
1850 |
+
9 1000000.4
|
1851 |
+
9 1000000.6
|
1852 |
+
9 1000000.4
|
1853 |
+
9 1000000.6
|
1854 |
+
9 1000000.4
|
1855 |
+
9 1000000.6
|
1856 |
+
9 1000000.4
|
1857 |
+
9 1000000.6
|
1858 |
+
9 1000000.4
|
1859 |
+
9 1000000.6
|
1860 |
+
9 1000000.4
|
1861 |
+
9 1000000.6
|
1862 |
+
9 1000000.4
|
1863 |
+
9 1000000.6
|
1864 |
+
9 1000000.4
|
1865 |
+
9 1000000.6
|
1866 |
+
9 1000000.4
|
1867 |
+
9 1000000.6
|
1868 |
+
9 1000000.4
|
1869 |
+
9 1000000.6
|
venv/lib/python3.10/site-packages/scipy/stats/tests/data/nist_anova/SmLs06.dat
ADDED
The diff for this file is too large to render.
See raw diff
|
|
venv/lib/python3.10/site-packages/scipy/stats/tests/data/nist_anova/SmLs07.dat
ADDED
@@ -0,0 +1,249 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
NIST/ITL StRD
|
2 |
+
Dataset Name: SmLs07 (SmLs07.dat)
|
3 |
+
|
4 |
+
|
5 |
+
File Format: ASCII
|
6 |
+
Certified Values (lines 41 to 47)
|
7 |
+
Data (lines 61 to 249)
|
8 |
+
|
9 |
+
|
10 |
+
Procedure: Analysis of Variance
|
11 |
+
|
12 |
+
|
13 |
+
Reference: Simon, Stephen D. and Lesage, James P. (1989).
|
14 |
+
"Assessing the Accuracy of ANOVA Calculations in
|
15 |
+
Statistical Software".
|
16 |
+
Computational Statistics & Data Analysis, 8, pp. 325-332.
|
17 |
+
|
18 |
+
|
19 |
+
Data: 1 Factor
|
20 |
+
9 Treatments
|
21 |
+
21 Replicates/Cell
|
22 |
+
189 Observations
|
23 |
+
13 Constant Leading Digits
|
24 |
+
Higher Level of Difficulty
|
25 |
+
Generated Data
|
26 |
+
|
27 |
+
|
28 |
+
Model: 10 Parameters (mu,tau_1, ... , tau_9)
|
29 |
+
y_{ij} = mu + tau_i + epsilon_{ij}
|
30 |
+
|
31 |
+
|
32 |
+
|
33 |
+
|
34 |
+
|
35 |
+
|
36 |
+
Certified Values:
|
37 |
+
|
38 |
+
Source of Sums of Mean
|
39 |
+
Variation df Squares Squares F Statistic
|
40 |
+
|
41 |
+
Between Treatment 8 1.68000000000000E+00 2.10000000000000E-01 2.10000000000000E+01
|
42 |
+
Within Treatment 180 1.80000000000000E+00 1.00000000000000E-02
|
43 |
+
|
44 |
+
Certified R-Squared 4.82758620689655E-01
|
45 |
+
|
46 |
+
Certified Residual
|
47 |
+
Standard Deviation 1.00000000000000E-01
|
48 |
+
|
49 |
+
|
50 |
+
|
51 |
+
|
52 |
+
|
53 |
+
|
54 |
+
|
55 |
+
|
56 |
+
|
57 |
+
|
58 |
+
|
59 |
+
|
60 |
+
Data: Treatment Response
|
61 |
+
1 1000000000000.4
|
62 |
+
1 1000000000000.3
|
63 |
+
1 1000000000000.5
|
64 |
+
1 1000000000000.3
|
65 |
+
1 1000000000000.5
|
66 |
+
1 1000000000000.3
|
67 |
+
1 1000000000000.5
|
68 |
+
1 1000000000000.3
|
69 |
+
1 1000000000000.5
|
70 |
+
1 1000000000000.3
|
71 |
+
1 1000000000000.5
|
72 |
+
1 1000000000000.3
|
73 |
+
1 1000000000000.5
|
74 |
+
1 1000000000000.3
|
75 |
+
1 1000000000000.5
|
76 |
+
1 1000000000000.3
|
77 |
+
1 1000000000000.5
|
78 |
+
1 1000000000000.3
|
79 |
+
1 1000000000000.5
|
80 |
+
1 1000000000000.3
|
81 |
+
1 1000000000000.5
|
82 |
+
2 1000000000000.3
|
83 |
+
2 1000000000000.2
|
84 |
+
2 1000000000000.4
|
85 |
+
2 1000000000000.2
|
86 |
+
2 1000000000000.4
|
87 |
+
2 1000000000000.2
|
88 |
+
2 1000000000000.4
|
89 |
+
2 1000000000000.2
|
90 |
+
2 1000000000000.4
|
91 |
+
2 1000000000000.2
|
92 |
+
2 1000000000000.4
|
93 |
+
2 1000000000000.2
|
94 |
+
2 1000000000000.4
|
95 |
+
2 1000000000000.2
|
96 |
+
2 1000000000000.4
|
97 |
+
2 1000000000000.2
|
98 |
+
2 1000000000000.4
|
99 |
+
2 1000000000000.2
|
100 |
+
2 1000000000000.4
|
101 |
+
2 1000000000000.2
|
102 |
+
2 1000000000000.4
|
103 |
+
3 1000000000000.5
|
104 |
+
3 1000000000000.4
|
105 |
+
3 1000000000000.6
|
106 |
+
3 1000000000000.4
|
107 |
+
3 1000000000000.6
|
108 |
+
3 1000000000000.4
|
109 |
+
3 1000000000000.6
|
110 |
+
3 1000000000000.4
|
111 |
+
3 1000000000000.6
|
112 |
+
3 1000000000000.4
|
113 |
+
3 1000000000000.6
|
114 |
+
3 1000000000000.4
|
115 |
+
3 1000000000000.6
|
116 |
+
3 1000000000000.4
|
117 |
+
3 1000000000000.6
|
118 |
+
3 1000000000000.4
|
119 |
+
3 1000000000000.6
|
120 |
+
3 1000000000000.4
|
121 |
+
3 1000000000000.6
|
122 |
+
3 1000000000000.4
|
123 |
+
3 1000000000000.6
|
124 |
+
4 1000000000000.3
|
125 |
+
4 1000000000000.2
|
126 |
+
4 1000000000000.4
|
127 |
+
4 1000000000000.2
|
128 |
+
4 1000000000000.4
|
129 |
+
4 1000000000000.2
|
130 |
+
4 1000000000000.4
|
131 |
+
4 1000000000000.2
|
132 |
+
4 1000000000000.4
|
133 |
+
4 1000000000000.2
|
134 |
+
4 1000000000000.4
|
135 |
+
4 1000000000000.2
|
136 |
+
4 1000000000000.4
|
137 |
+
4 1000000000000.2
|
138 |
+
4 1000000000000.4
|
139 |
+
4 1000000000000.2
|
140 |
+
4 1000000000000.4
|
141 |
+
4 1000000000000.2
|
142 |
+
4 1000000000000.4
|
143 |
+
4 1000000000000.2
|
144 |
+
4 1000000000000.4
|
145 |
+
5 1000000000000.5
|
146 |
+
5 1000000000000.4
|
147 |
+
5 1000000000000.6
|
148 |
+
5 1000000000000.4
|
149 |
+
5 1000000000000.6
|
150 |
+
5 1000000000000.4
|
151 |
+
5 1000000000000.6
|
152 |
+
5 1000000000000.4
|
153 |
+
5 1000000000000.6
|
154 |
+
5 1000000000000.4
|
155 |
+
5 1000000000000.6
|
156 |
+
5 1000000000000.4
|
157 |
+
5 1000000000000.6
|
158 |
+
5 1000000000000.4
|
159 |
+
5 1000000000000.6
|
160 |
+
5 1000000000000.4
|
161 |
+
5 1000000000000.6
|
162 |
+
5 1000000000000.4
|
163 |
+
5 1000000000000.6
|
164 |
+
5 1000000000000.4
|
165 |
+
5 1000000000000.6
|
166 |
+
6 1000000000000.3
|
167 |
+
6 1000000000000.2
|
168 |
+
6 1000000000000.4
|
169 |
+
6 1000000000000.2
|
170 |
+
6 1000000000000.4
|
171 |
+
6 1000000000000.2
|
172 |
+
6 1000000000000.4
|
173 |
+
6 1000000000000.2
|
174 |
+
6 1000000000000.4
|
175 |
+
6 1000000000000.2
|
176 |
+
6 1000000000000.4
|
177 |
+
6 1000000000000.2
|
178 |
+
6 1000000000000.4
|
179 |
+
6 1000000000000.2
|
180 |
+
6 1000000000000.4
|
181 |
+
6 1000000000000.2
|
182 |
+
6 1000000000000.4
|
183 |
+
6 1000000000000.2
|
184 |
+
6 1000000000000.4
|
185 |
+
6 1000000000000.2
|
186 |
+
6 1000000000000.4
|
187 |
+
7 1000000000000.5
|
188 |
+
7 1000000000000.4
|
189 |
+
7 1000000000000.6
|
190 |
+
7 1000000000000.4
|
191 |
+
7 1000000000000.6
|
192 |
+
7 1000000000000.4
|
193 |
+
7 1000000000000.6
|
194 |
+
7 1000000000000.4
|
195 |
+
7 1000000000000.6
|
196 |
+
7 1000000000000.4
|
197 |
+
7 1000000000000.6
|
198 |
+
7 1000000000000.4
|
199 |
+
7 1000000000000.6
|
200 |
+
7 1000000000000.4
|
201 |
+
7 1000000000000.6
|
202 |
+
7 1000000000000.4
|
203 |
+
7 1000000000000.6
|
204 |
+
7 1000000000000.4
|
205 |
+
7 1000000000000.6
|
206 |
+
7 1000000000000.4
|
207 |
+
7 1000000000000.6
|
208 |
+
8 1000000000000.3
|
209 |
+
8 1000000000000.2
|
210 |
+
8 1000000000000.4
|
211 |
+
8 1000000000000.2
|
212 |
+
8 1000000000000.4
|
213 |
+
8 1000000000000.2
|
214 |
+
8 1000000000000.4
|
215 |
+
8 1000000000000.2
|
216 |
+
8 1000000000000.4
|
217 |
+
8 1000000000000.2
|
218 |
+
8 1000000000000.4
|
219 |
+
8 1000000000000.2
|
220 |
+
8 1000000000000.4
|
221 |
+
8 1000000000000.2
|
222 |
+
8 1000000000000.4
|
223 |
+
8 1000000000000.2
|
224 |
+
8 1000000000000.4
|
225 |
+
8 1000000000000.2
|
226 |
+
8 1000000000000.4
|
227 |
+
8 1000000000000.2
|
228 |
+
8 1000000000000.4
|
229 |
+
9 1000000000000.5
|
230 |
+
9 1000000000000.4
|
231 |
+
9 1000000000000.6
|
232 |
+
9 1000000000000.4
|
233 |
+
9 1000000000000.6
|
234 |
+
9 1000000000000.4
|
235 |
+
9 1000000000000.6
|
236 |
+
9 1000000000000.4
|
237 |
+
9 1000000000000.6
|
238 |
+
9 1000000000000.4
|
239 |
+
9 1000000000000.6
|
240 |
+
9 1000000000000.4
|
241 |
+
9 1000000000000.6
|
242 |
+
9 1000000000000.4
|
243 |
+
9 1000000000000.6
|
244 |
+
9 1000000000000.4
|
245 |
+
9 1000000000000.6
|
246 |
+
9 1000000000000.4
|
247 |
+
9 1000000000000.6
|
248 |
+
9 1000000000000.4
|
249 |
+
9 1000000000000.6
|
venv/lib/python3.10/site-packages/scipy/stats/tests/data/nist_anova/SmLs08.dat
ADDED
@@ -0,0 +1,1869 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
NIST/ITL StRD
|
2 |
+
Dataset Name: SmLs08 (SmLs08.dat)
|
3 |
+
|
4 |
+
|
5 |
+
File Format: ASCII
|
6 |
+
Certified Values (lines 41 to 47)
|
7 |
+
Data (lines 61 to 1869)
|
8 |
+
|
9 |
+
|
10 |
+
Procedure: Analysis of Variance
|
11 |
+
|
12 |
+
|
13 |
+
Reference: Simon, Stephen D. and Lesage, James P. (1989).
|
14 |
+
"Assessing the Accuracy of ANOVA Calculations in
|
15 |
+
Statistical Software".
|
16 |
+
Computational Statistics & Data Analysis, 8, pp. 325-332.
|
17 |
+
|
18 |
+
|
19 |
+
Data: 1 Factor
|
20 |
+
9 Treatments
|
21 |
+
201 Replicates/Cell
|
22 |
+
1809 Observations
|
23 |
+
13 Constant Leading Digits
|
24 |
+
Higher Level of Difficulty
|
25 |
+
Generated Data
|
26 |
+
|
27 |
+
|
28 |
+
Model: 10 Parameters (mu,tau_1, ... , tau_9)
|
29 |
+
y_{ij} = mu + tau_i + epsilon_{ij}
|
30 |
+
|
31 |
+
|
32 |
+
|
33 |
+
|
34 |
+
|
35 |
+
|
36 |
+
Certified Values:
|
37 |
+
|
38 |
+
Source of Sums of Mean
|
39 |
+
Variation df Squares Squares F Statistic
|
40 |
+
|
41 |
+
Between Treatment 8 1.60800000000000E+01 2.01000000000000E+00 2.01000000000000E+02
|
42 |
+
Within Treatment 1800 1.80000000000000E+01 1.00000000000000E-02
|
43 |
+
|
44 |
+
Certified R-Squared 4.71830985915493E-01
|
45 |
+
|
46 |
+
Certified Residual
|
47 |
+
Standard Deviation 1.00000000000000E-01
|
48 |
+
|
49 |
+
|
50 |
+
|
51 |
+
|
52 |
+
|
53 |
+
|
54 |
+
|
55 |
+
|
56 |
+
|
57 |
+
|
58 |
+
|
59 |
+
|
60 |
+
Data: Treatment Response
|
61 |
+
1 1000000000000.4
|
62 |
+
1 1000000000000.3
|
63 |
+
1 1000000000000.5
|
64 |
+
1 1000000000000.3
|
65 |
+
1 1000000000000.5
|
66 |
+
1 1000000000000.3
|
67 |
+
1 1000000000000.5
|
68 |
+
1 1000000000000.3
|
69 |
+
1 1000000000000.5
|
70 |
+
1 1000000000000.3
|
71 |
+
1 1000000000000.5
|
72 |
+
1 1000000000000.3
|
73 |
+
1 1000000000000.5
|
74 |
+
1 1000000000000.3
|
75 |
+
1 1000000000000.5
|
76 |
+
1 1000000000000.3
|
77 |
+
1 1000000000000.5
|
78 |
+
1 1000000000000.3
|
79 |
+
1 1000000000000.5
|
80 |
+
1 1000000000000.3
|
81 |
+
1 1000000000000.5
|
82 |
+
1 1000000000000.3
|
83 |
+
1 1000000000000.5
|
84 |
+
1 1000000000000.3
|
85 |
+
1 1000000000000.5
|
86 |
+
1 1000000000000.3
|
87 |
+
1 1000000000000.5
|
88 |
+
1 1000000000000.3
|
89 |
+
1 1000000000000.5
|
90 |
+
1 1000000000000.3
|
91 |
+
1 1000000000000.5
|
92 |
+
1 1000000000000.3
|
93 |
+
1 1000000000000.5
|
94 |
+
1 1000000000000.3
|
95 |
+
1 1000000000000.5
|
96 |
+
1 1000000000000.3
|
97 |
+
1 1000000000000.5
|
98 |
+
1 1000000000000.3
|
99 |
+
1 1000000000000.5
|
100 |
+
1 1000000000000.3
|
101 |
+
1 1000000000000.5
|
102 |
+
1 1000000000000.3
|
103 |
+
1 1000000000000.5
|
104 |
+
1 1000000000000.3
|
105 |
+
1 1000000000000.5
|
106 |
+
1 1000000000000.3
|
107 |
+
1 1000000000000.5
|
108 |
+
1 1000000000000.3
|
109 |
+
1 1000000000000.5
|
110 |
+
1 1000000000000.3
|
111 |
+
1 1000000000000.5
|
112 |
+
1 1000000000000.3
|
113 |
+
1 1000000000000.5
|
114 |
+
1 1000000000000.3
|
115 |
+
1 1000000000000.5
|
116 |
+
1 1000000000000.3
|
117 |
+
1 1000000000000.5
|
118 |
+
1 1000000000000.3
|
119 |
+
1 1000000000000.5
|
120 |
+
1 1000000000000.3
|
121 |
+
1 1000000000000.5
|
122 |
+
1 1000000000000.3
|
123 |
+
1 1000000000000.5
|
124 |
+
1 1000000000000.3
|
125 |
+
1 1000000000000.5
|
126 |
+
1 1000000000000.3
|
127 |
+
1 1000000000000.5
|
128 |
+
1 1000000000000.3
|
129 |
+
1 1000000000000.5
|
130 |
+
1 1000000000000.3
|
131 |
+
1 1000000000000.5
|
132 |
+
1 1000000000000.3
|
133 |
+
1 1000000000000.5
|
134 |
+
1 1000000000000.3
|
135 |
+
1 1000000000000.5
|
136 |
+
1 1000000000000.3
|
137 |
+
1 1000000000000.5
|
138 |
+
1 1000000000000.3
|
139 |
+
1 1000000000000.5
|
140 |
+
1 1000000000000.3
|
141 |
+
1 1000000000000.5
|
142 |
+
1 1000000000000.3
|
143 |
+
1 1000000000000.5
|
144 |
+
1 1000000000000.3
|
145 |
+
1 1000000000000.5
|
146 |
+
1 1000000000000.3
|
147 |
+
1 1000000000000.5
|
148 |
+
1 1000000000000.3
|
149 |
+
1 1000000000000.5
|
150 |
+
1 1000000000000.3
|
151 |
+
1 1000000000000.5
|
152 |
+
1 1000000000000.3
|
153 |
+
1 1000000000000.5
|
154 |
+
1 1000000000000.3
|
155 |
+
1 1000000000000.5
|
156 |
+
1 1000000000000.3
|
157 |
+
1 1000000000000.5
|
158 |
+
1 1000000000000.3
|
159 |
+
1 1000000000000.5
|
160 |
+
1 1000000000000.3
|
161 |
+
1 1000000000000.5
|
162 |
+
1 1000000000000.3
|
163 |
+
1 1000000000000.5
|
164 |
+
1 1000000000000.3
|
165 |
+
1 1000000000000.5
|
166 |
+
1 1000000000000.3
|
167 |
+
1 1000000000000.5
|
168 |
+
1 1000000000000.3
|
169 |
+
1 1000000000000.5
|
170 |
+
1 1000000000000.3
|
171 |
+
1 1000000000000.5
|
172 |
+
1 1000000000000.3
|
173 |
+
1 1000000000000.5
|
174 |
+
1 1000000000000.3
|
175 |
+
1 1000000000000.5
|
176 |
+
1 1000000000000.3
|
177 |
+
1 1000000000000.5
|
178 |
+
1 1000000000000.3
|
179 |
+
1 1000000000000.5
|
180 |
+
1 1000000000000.3
|
181 |
+
1 1000000000000.5
|
182 |
+
1 1000000000000.3
|
183 |
+
1 1000000000000.5
|
184 |
+
1 1000000000000.3
|
185 |
+
1 1000000000000.5
|
186 |
+
1 1000000000000.3
|
187 |
+
1 1000000000000.5
|
188 |
+
1 1000000000000.3
|
189 |
+
1 1000000000000.5
|
190 |
+
1 1000000000000.3
|
191 |
+
1 1000000000000.5
|
192 |
+
1 1000000000000.3
|
193 |
+
1 1000000000000.5
|
194 |
+
1 1000000000000.3
|
195 |
+
1 1000000000000.5
|
196 |
+
1 1000000000000.3
|
197 |
+
1 1000000000000.5
|
198 |
+
1 1000000000000.3
|
199 |
+
1 1000000000000.5
|
200 |
+
1 1000000000000.3
|
201 |
+
1 1000000000000.5
|
202 |
+
1 1000000000000.3
|
203 |
+
1 1000000000000.5
|
204 |
+
1 1000000000000.3
|
205 |
+
1 1000000000000.5
|
206 |
+
1 1000000000000.3
|
207 |
+
1 1000000000000.5
|
208 |
+
1 1000000000000.3
|
209 |
+
1 1000000000000.5
|
210 |
+
1 1000000000000.3
|
211 |
+
1 1000000000000.5
|
212 |
+
1 1000000000000.3
|
213 |
+
1 1000000000000.5
|
214 |
+
1 1000000000000.3
|
215 |
+
1 1000000000000.5
|
216 |
+
1 1000000000000.3
|
217 |
+
1 1000000000000.5
|
218 |
+
1 1000000000000.3
|
219 |
+
1 1000000000000.5
|
220 |
+
1 1000000000000.3
|
221 |
+
1 1000000000000.5
|
222 |
+
1 1000000000000.3
|
223 |
+
1 1000000000000.5
|
224 |
+
1 1000000000000.3
|
225 |
+
1 1000000000000.5
|
226 |
+
1 1000000000000.3
|
227 |
+
1 1000000000000.5
|
228 |
+
1 1000000000000.3
|
229 |
+
1 1000000000000.5
|
230 |
+
1 1000000000000.3
|
231 |
+
1 1000000000000.5
|
232 |
+
1 1000000000000.3
|
233 |
+
1 1000000000000.5
|
234 |
+
1 1000000000000.3
|
235 |
+
1 1000000000000.5
|
236 |
+
1 1000000000000.3
|
237 |
+
1 1000000000000.5
|
238 |
+
1 1000000000000.3
|
239 |
+
1 1000000000000.5
|
240 |
+
1 1000000000000.3
|
241 |
+
1 1000000000000.5
|
242 |
+
1 1000000000000.3
|
243 |
+
1 1000000000000.5
|
244 |
+
1 1000000000000.3
|
245 |
+
1 1000000000000.5
|
246 |
+
1 1000000000000.3
|
247 |
+
1 1000000000000.5
|
248 |
+
1 1000000000000.3
|
249 |
+
1 1000000000000.5
|
250 |
+
1 1000000000000.3
|
251 |
+
1 1000000000000.5
|
252 |
+
1 1000000000000.3
|
253 |
+
1 1000000000000.5
|
254 |
+
1 1000000000000.3
|
255 |
+
1 1000000000000.5
|
256 |
+
1 1000000000000.3
|
257 |
+
1 1000000000000.5
|
258 |
+
1 1000000000000.3
|
259 |
+
1 1000000000000.5
|
260 |
+
1 1000000000000.3
|
261 |
+
1 1000000000000.5
|
262 |
+
2 1000000000000.3
|
263 |
+
2 1000000000000.2
|
264 |
+
2 1000000000000.4
|
265 |
+
2 1000000000000.2
|
266 |
+
2 1000000000000.4
|
267 |
+
2 1000000000000.2
|
268 |
+
2 1000000000000.4
|
269 |
+
2 1000000000000.2
|
270 |
+
2 1000000000000.4
|
271 |
+
2 1000000000000.2
|
272 |
+
2 1000000000000.4
|
273 |
+
2 1000000000000.2
|
274 |
+
2 1000000000000.4
|
275 |
+
2 1000000000000.2
|
276 |
+
2 1000000000000.4
|
277 |
+
2 1000000000000.2
|
278 |
+
2 1000000000000.4
|
279 |
+
2 1000000000000.2
|
280 |
+
2 1000000000000.4
|
281 |
+
2 1000000000000.2
|
282 |
+
2 1000000000000.4
|
283 |
+
2 1000000000000.2
|
284 |
+
2 1000000000000.4
|
285 |
+
2 1000000000000.2
|
286 |
+
2 1000000000000.4
|
287 |
+
2 1000000000000.2
|
288 |
+
2 1000000000000.4
|
289 |
+
2 1000000000000.2
|
290 |
+
2 1000000000000.4
|
291 |
+
2 1000000000000.2
|
292 |
+
2 1000000000000.4
|
293 |
+
2 1000000000000.2
|
294 |
+
2 1000000000000.4
|
295 |
+
2 1000000000000.2
|
296 |
+
2 1000000000000.4
|
297 |
+
2 1000000000000.2
|
298 |
+
2 1000000000000.4
|
299 |
+
2 1000000000000.2
|
300 |
+
2 1000000000000.4
|
301 |
+
2 1000000000000.2
|
302 |
+
2 1000000000000.4
|
303 |
+
2 1000000000000.2
|
304 |
+
2 1000000000000.4
|
305 |
+
2 1000000000000.2
|
306 |
+
2 1000000000000.4
|
307 |
+
2 1000000000000.2
|
308 |
+
2 1000000000000.4
|
309 |
+
2 1000000000000.2
|
310 |
+
2 1000000000000.4
|
311 |
+
2 1000000000000.2
|
312 |
+
2 1000000000000.4
|
313 |
+
2 1000000000000.2
|
314 |
+
2 1000000000000.4
|
315 |
+
2 1000000000000.2
|
316 |
+
2 1000000000000.4
|
317 |
+
2 1000000000000.2
|
318 |
+
2 1000000000000.4
|
319 |
+
2 1000000000000.2
|
320 |
+
2 1000000000000.4
|
321 |
+
2 1000000000000.2
|
322 |
+
2 1000000000000.4
|
323 |
+
2 1000000000000.2
|
324 |
+
2 1000000000000.4
|
325 |
+
2 1000000000000.2
|
326 |
+
2 1000000000000.4
|
327 |
+
2 1000000000000.2
|
328 |
+
2 1000000000000.4
|
329 |
+
2 1000000000000.2
|
330 |
+
2 1000000000000.4
|
331 |
+
2 1000000000000.2
|
332 |
+
2 1000000000000.4
|
333 |
+
2 1000000000000.2
|
334 |
+
2 1000000000000.4
|
335 |
+
2 1000000000000.2
|
336 |
+
2 1000000000000.4
|
337 |
+
2 1000000000000.2
|
338 |
+
2 1000000000000.4
|
339 |
+
2 1000000000000.2
|
340 |
+
2 1000000000000.4
|
341 |
+
2 1000000000000.2
|
342 |
+
2 1000000000000.4
|
343 |
+
2 1000000000000.2
|
344 |
+
2 1000000000000.4
|
345 |
+
2 1000000000000.2
|
346 |
+
2 1000000000000.4
|
347 |
+
2 1000000000000.2
|
348 |
+
2 1000000000000.4
|
349 |
+
2 1000000000000.2
|
350 |
+
2 1000000000000.4
|
351 |
+
2 1000000000000.2
|
352 |
+
2 1000000000000.4
|
353 |
+
2 1000000000000.2
|
354 |
+
2 1000000000000.4
|
355 |
+
2 1000000000000.2
|
356 |
+
2 1000000000000.4
|
357 |
+
2 1000000000000.2
|
358 |
+
2 1000000000000.4
|
359 |
+
2 1000000000000.2
|
360 |
+
2 1000000000000.4
|
361 |
+
2 1000000000000.2
|
362 |
+
2 1000000000000.4
|
363 |
+
2 1000000000000.2
|
364 |
+
2 1000000000000.4
|
365 |
+
2 1000000000000.2
|
366 |
+
2 1000000000000.4
|
367 |
+
2 1000000000000.2
|
368 |
+
2 1000000000000.4
|
369 |
+
2 1000000000000.2
|
370 |
+
2 1000000000000.4
|
371 |
+
2 1000000000000.2
|
372 |
+
2 1000000000000.4
|
373 |
+
2 1000000000000.2
|
374 |
+
2 1000000000000.4
|
375 |
+
2 1000000000000.2
|
376 |
+
2 1000000000000.4
|
377 |
+
2 1000000000000.2
|
378 |
+
2 1000000000000.4
|
379 |
+
2 1000000000000.2
|
380 |
+
2 1000000000000.4
|
381 |
+
2 1000000000000.2
|
382 |
+
2 1000000000000.4
|
383 |
+
2 1000000000000.2
|
384 |
+
2 1000000000000.4
|
385 |
+
2 1000000000000.2
|
386 |
+
2 1000000000000.4
|
387 |
+
2 1000000000000.2
|
388 |
+
2 1000000000000.4
|
389 |
+
2 1000000000000.2
|
390 |
+
2 1000000000000.4
|
391 |
+
2 1000000000000.2
|
392 |
+
2 1000000000000.4
|
393 |
+
2 1000000000000.2
|
394 |
+
2 1000000000000.4
|
395 |
+
2 1000000000000.2
|
396 |
+
2 1000000000000.4
|
397 |
+
2 1000000000000.2
|
398 |
+
2 1000000000000.4
|
399 |
+
2 1000000000000.2
|
400 |
+
2 1000000000000.4
|
401 |
+
2 1000000000000.2
|
402 |
+
2 1000000000000.4
|
403 |
+
2 1000000000000.2
|
404 |
+
2 1000000000000.4
|
405 |
+
2 1000000000000.2
|
406 |
+
2 1000000000000.4
|
407 |
+
2 1000000000000.2
|
408 |
+
2 1000000000000.4
|
409 |
+
2 1000000000000.2
|
410 |
+
2 1000000000000.4
|
411 |
+
2 1000000000000.2
|
412 |
+
2 1000000000000.4
|
413 |
+
2 1000000000000.2
|
414 |
+
2 1000000000000.4
|
415 |
+
2 1000000000000.2
|
416 |
+
2 1000000000000.4
|
417 |
+
2 1000000000000.2
|
418 |
+
2 1000000000000.4
|
419 |
+
2 1000000000000.2
|
420 |
+
2 1000000000000.4
|
421 |
+
2 1000000000000.2
|
422 |
+
2 1000000000000.4
|
423 |
+
2 1000000000000.2
|
424 |
+
2 1000000000000.4
|
425 |
+
2 1000000000000.2
|
426 |
+
2 1000000000000.4
|
427 |
+
2 1000000000000.2
|
428 |
+
2 1000000000000.4
|
429 |
+
2 1000000000000.2
|
430 |
+
2 1000000000000.4
|
431 |
+
2 1000000000000.2
|
432 |
+
2 1000000000000.4
|
433 |
+
2 1000000000000.2
|
434 |
+
2 1000000000000.4
|
435 |
+
2 1000000000000.2
|
436 |
+
2 1000000000000.4
|
437 |
+
2 1000000000000.2
|
438 |
+
2 1000000000000.4
|
439 |
+
2 1000000000000.2
|
440 |
+
2 1000000000000.4
|
441 |
+
2 1000000000000.2
|
442 |
+
2 1000000000000.4
|
443 |
+
2 1000000000000.2
|
444 |
+
2 1000000000000.4
|
445 |
+
2 1000000000000.2
|
446 |
+
2 1000000000000.4
|
447 |
+
2 1000000000000.2
|
448 |
+
2 1000000000000.4
|
449 |
+
2 1000000000000.2
|
450 |
+
2 1000000000000.4
|
451 |
+
2 1000000000000.2
|
452 |
+
2 1000000000000.4
|
453 |
+
2 1000000000000.2
|
454 |
+
2 1000000000000.4
|
455 |
+
2 1000000000000.2
|
456 |
+
2 1000000000000.4
|
457 |
+
2 1000000000000.2
|
458 |
+
2 1000000000000.4
|
459 |
+
2 1000000000000.2
|
460 |
+
2 1000000000000.4
|
461 |
+
2 1000000000000.2
|
462 |
+
2 1000000000000.4
|
463 |
+
3 1000000000000.5
|
464 |
+
3 1000000000000.4
|
465 |
+
3 1000000000000.6
|
466 |
+
3 1000000000000.4
|
467 |
+
3 1000000000000.6
|
468 |
+
3 1000000000000.4
|
469 |
+
3 1000000000000.6
|
470 |
+
3 1000000000000.4
|
471 |
+
3 1000000000000.6
|
472 |
+
3 1000000000000.4
|
473 |
+
3 1000000000000.6
|
474 |
+
3 1000000000000.4
|
475 |
+
3 1000000000000.6
|
476 |
+
3 1000000000000.4
|
477 |
+
3 1000000000000.6
|
478 |
+
3 1000000000000.4
|
479 |
+
3 1000000000000.6
|
480 |
+
3 1000000000000.4
|
481 |
+
3 1000000000000.6
|
482 |
+
3 1000000000000.4
|
483 |
+
3 1000000000000.6
|
484 |
+
3 1000000000000.4
|
485 |
+
3 1000000000000.6
|
486 |
+
3 1000000000000.4
|
487 |
+
3 1000000000000.6
|
488 |
+
3 1000000000000.4
|
489 |
+
3 1000000000000.6
|
490 |
+
3 1000000000000.4
|
491 |
+
3 1000000000000.6
|
492 |
+
3 1000000000000.4
|
493 |
+
3 1000000000000.6
|
494 |
+
3 1000000000000.4
|
495 |
+
3 1000000000000.6
|
496 |
+
3 1000000000000.4
|
497 |
+
3 1000000000000.6
|
498 |
+
3 1000000000000.4
|
499 |
+
3 1000000000000.6
|
500 |
+
3 1000000000000.4
|
501 |
+
3 1000000000000.6
|
502 |
+
3 1000000000000.4
|
503 |
+
3 1000000000000.6
|
504 |
+
3 1000000000000.4
|
505 |
+
3 1000000000000.6
|
506 |
+
3 1000000000000.4
|
507 |
+
3 1000000000000.6
|
508 |
+
3 1000000000000.4
|
509 |
+
3 1000000000000.6
|
510 |
+
3 1000000000000.4
|
511 |
+
3 1000000000000.6
|
512 |
+
3 1000000000000.4
|
513 |
+
3 1000000000000.6
|
514 |
+
3 1000000000000.4
|
515 |
+
3 1000000000000.6
|
516 |
+
3 1000000000000.4
|
517 |
+
3 1000000000000.6
|
518 |
+
3 1000000000000.4
|
519 |
+
3 1000000000000.6
|
520 |
+
3 1000000000000.4
|
521 |
+
3 1000000000000.6
|
522 |
+
3 1000000000000.4
|
523 |
+
3 1000000000000.6
|
524 |
+
3 1000000000000.4
|
525 |
+
3 1000000000000.6
|
526 |
+
3 1000000000000.4
|
527 |
+
3 1000000000000.6
|
528 |
+
3 1000000000000.4
|
529 |
+
3 1000000000000.6
|
530 |
+
3 1000000000000.4
|
531 |
+
3 1000000000000.6
|
532 |
+
3 1000000000000.4
|
533 |
+
3 1000000000000.6
|
534 |
+
3 1000000000000.4
|
535 |
+
3 1000000000000.6
|
536 |
+
3 1000000000000.4
|
537 |
+
3 1000000000000.6
|
538 |
+
3 1000000000000.4
|
539 |
+
3 1000000000000.6
|
540 |
+
3 1000000000000.4
|
541 |
+
3 1000000000000.6
|
542 |
+
3 1000000000000.4
|
543 |
+
3 1000000000000.6
|
544 |
+
3 1000000000000.4
|
545 |
+
3 1000000000000.6
|
546 |
+
3 1000000000000.4
|
547 |
+
3 1000000000000.6
|
548 |
+
3 1000000000000.4
|
549 |
+
3 1000000000000.6
|
550 |
+
3 1000000000000.4
|
551 |
+
3 1000000000000.6
|
552 |
+
3 1000000000000.4
|
553 |
+
3 1000000000000.6
|
554 |
+
3 1000000000000.4
|
555 |
+
3 1000000000000.6
|
556 |
+
3 1000000000000.4
|
557 |
+
3 1000000000000.6
|
558 |
+
3 1000000000000.4
|
559 |
+
3 1000000000000.6
|
560 |
+
3 1000000000000.4
|
561 |
+
3 1000000000000.6
|
562 |
+
3 1000000000000.4
|
563 |
+
3 1000000000000.6
|
564 |
+
3 1000000000000.4
|
565 |
+
3 1000000000000.6
|
566 |
+
3 1000000000000.4
|
567 |
+
3 1000000000000.6
|
568 |
+
3 1000000000000.4
|
569 |
+
3 1000000000000.6
|
570 |
+
3 1000000000000.4
|
571 |
+
3 1000000000000.6
|
572 |
+
3 1000000000000.4
|
573 |
+
3 1000000000000.6
|
574 |
+
3 1000000000000.4
|
575 |
+
3 1000000000000.6
|
576 |
+
3 1000000000000.4
|
577 |
+
3 1000000000000.6
|
578 |
+
3 1000000000000.4
|
579 |
+
3 1000000000000.6
|
580 |
+
3 1000000000000.4
|
581 |
+
3 1000000000000.6
|
582 |
+
3 1000000000000.4
|
583 |
+
3 1000000000000.6
|
584 |
+
3 1000000000000.4
|
585 |
+
3 1000000000000.6
|
586 |
+
3 1000000000000.4
|
587 |
+
3 1000000000000.6
|
588 |
+
3 1000000000000.4
|
589 |
+
3 1000000000000.6
|
590 |
+
3 1000000000000.4
|
591 |
+
3 1000000000000.6
|
592 |
+
3 1000000000000.4
|
593 |
+
3 1000000000000.6
|
594 |
+
3 1000000000000.4
|
595 |
+
3 1000000000000.6
|
596 |
+
3 1000000000000.4
|
597 |
+
3 1000000000000.6
|
598 |
+
3 1000000000000.4
|
599 |
+
3 1000000000000.6
|
600 |
+
3 1000000000000.4
|
601 |
+
3 1000000000000.6
|
602 |
+
3 1000000000000.4
|
603 |
+
3 1000000000000.6
|
604 |
+
3 1000000000000.4
|
605 |
+
3 1000000000000.6
|
606 |
+
3 1000000000000.4
|
607 |
+
3 1000000000000.6
|
608 |
+
3 1000000000000.4
|
609 |
+
3 1000000000000.6
|
610 |
+
3 1000000000000.4
|
611 |
+
3 1000000000000.6
|
612 |
+
3 1000000000000.4
|
613 |
+
3 1000000000000.6
|
614 |
+
3 1000000000000.4
|
615 |
+
3 1000000000000.6
|
616 |
+
3 1000000000000.4
|
617 |
+
3 1000000000000.6
|
618 |
+
3 1000000000000.4
|
619 |
+
3 1000000000000.6
|
620 |
+
3 1000000000000.4
|
621 |
+
3 1000000000000.6
|
622 |
+
3 1000000000000.4
|
623 |
+
3 1000000000000.6
|
624 |
+
3 1000000000000.4
|
625 |
+
3 1000000000000.6
|
626 |
+
3 1000000000000.4
|
627 |
+
3 1000000000000.6
|
628 |
+
3 1000000000000.4
|
629 |
+
3 1000000000000.6
|
630 |
+
3 1000000000000.4
|
631 |
+
3 1000000000000.6
|
632 |
+
3 1000000000000.4
|
633 |
+
3 1000000000000.6
|
634 |
+
3 1000000000000.4
|
635 |
+
3 1000000000000.6
|
636 |
+
3 1000000000000.4
|
637 |
+
3 1000000000000.6
|
638 |
+
3 1000000000000.4
|
639 |
+
3 1000000000000.6
|
640 |
+
3 1000000000000.4
|
641 |
+
3 1000000000000.6
|
642 |
+
3 1000000000000.4
|
643 |
+
3 1000000000000.6
|
644 |
+
3 1000000000000.4
|
645 |
+
3 1000000000000.6
|
646 |
+
3 1000000000000.4
|
647 |
+
3 1000000000000.6
|
648 |
+
3 1000000000000.4
|
649 |
+
3 1000000000000.6
|
650 |
+
3 1000000000000.4
|
651 |
+
3 1000000000000.6
|
652 |
+
3 1000000000000.4
|
653 |
+
3 1000000000000.6
|
654 |
+
3 1000000000000.4
|
655 |
+
3 1000000000000.6
|
656 |
+
3 1000000000000.4
|
657 |
+
3 1000000000000.6
|
658 |
+
3 1000000000000.4
|
659 |
+
3 1000000000000.6
|
660 |
+
3 1000000000000.4
|
661 |
+
3 1000000000000.6
|
662 |
+
3 1000000000000.4
|
663 |
+
3 1000000000000.6
|
664 |
+
4 1000000000000.3
|
665 |
+
4 1000000000000.2
|
666 |
+
4 1000000000000.4
|
667 |
+
4 1000000000000.2
|
668 |
+
4 1000000000000.4
|
669 |
+
4 1000000000000.2
|
670 |
+
4 1000000000000.4
|
671 |
+
4 1000000000000.2
|
672 |
+
4 1000000000000.4
|
673 |
+
4 1000000000000.2
|
674 |
+
4 1000000000000.4
|
675 |
+
4 1000000000000.2
|
676 |
+
4 1000000000000.4
|
677 |
+
4 1000000000000.2
|
678 |
+
4 1000000000000.4
|
679 |
+
4 1000000000000.2
|
680 |
+
4 1000000000000.4
|
681 |
+
4 1000000000000.2
|
682 |
+
4 1000000000000.4
|
683 |
+
4 1000000000000.2
|
684 |
+
4 1000000000000.4
|
685 |
+
4 1000000000000.2
|
686 |
+
4 1000000000000.4
|
687 |
+
4 1000000000000.2
|
688 |
+
4 1000000000000.4
|
689 |
+
4 1000000000000.2
|
690 |
+
4 1000000000000.4
|
691 |
+
4 1000000000000.2
|
692 |
+
4 1000000000000.4
|
693 |
+
4 1000000000000.2
|
694 |
+
4 1000000000000.4
|
695 |
+
4 1000000000000.2
|
696 |
+
4 1000000000000.4
|
697 |
+
4 1000000000000.2
|
698 |
+
4 1000000000000.4
|
699 |
+
4 1000000000000.2
|
700 |
+
4 1000000000000.4
|
701 |
+
4 1000000000000.2
|
702 |
+
4 1000000000000.4
|
703 |
+
4 1000000000000.2
|
704 |
+
4 1000000000000.4
|
705 |
+
4 1000000000000.2
|
706 |
+
4 1000000000000.4
|
707 |
+
4 1000000000000.2
|
708 |
+
4 1000000000000.4
|
709 |
+
4 1000000000000.2
|
710 |
+
4 1000000000000.4
|
711 |
+
4 1000000000000.2
|
712 |
+
4 1000000000000.4
|
713 |
+
4 1000000000000.2
|
714 |
+
4 1000000000000.4
|
715 |
+
4 1000000000000.2
|
716 |
+
4 1000000000000.4
|
717 |
+
4 1000000000000.2
|
718 |
+
4 1000000000000.4
|
719 |
+
4 1000000000000.2
|
720 |
+
4 1000000000000.4
|
721 |
+
4 1000000000000.2
|
722 |
+
4 1000000000000.4
|
723 |
+
4 1000000000000.2
|
724 |
+
4 1000000000000.4
|
725 |
+
4 1000000000000.2
|
726 |
+
4 1000000000000.4
|
727 |
+
4 1000000000000.2
|
728 |
+
4 1000000000000.4
|
729 |
+
4 1000000000000.2
|
730 |
+
4 1000000000000.4
|
731 |
+
4 1000000000000.2
|
732 |
+
4 1000000000000.4
|
733 |
+
4 1000000000000.2
|
734 |
+
4 1000000000000.4
|
735 |
+
4 1000000000000.2
|
736 |
+
4 1000000000000.4
|
737 |
+
4 1000000000000.2
|
738 |
+
4 1000000000000.4
|
739 |
+
4 1000000000000.2
|
740 |
+
4 1000000000000.4
|
741 |
+
4 1000000000000.2
|
742 |
+
4 1000000000000.4
|
743 |
+
4 1000000000000.2
|
744 |
+
4 1000000000000.4
|
745 |
+
4 1000000000000.2
|
746 |
+
4 1000000000000.4
|
747 |
+
4 1000000000000.2
|
748 |
+
4 1000000000000.4
|
749 |
+
4 1000000000000.2
|
750 |
+
4 1000000000000.4
|
751 |
+
4 1000000000000.2
|
752 |
+
4 1000000000000.4
|
753 |
+
4 1000000000000.2
|
754 |
+
4 1000000000000.4
|
755 |
+
4 1000000000000.2
|
756 |
+
4 1000000000000.4
|
757 |
+
4 1000000000000.2
|
758 |
+
4 1000000000000.4
|
759 |
+
4 1000000000000.2
|
760 |
+
4 1000000000000.4
|
761 |
+
4 1000000000000.2
|
762 |
+
4 1000000000000.4
|
763 |
+
4 1000000000000.2
|
764 |
+
4 1000000000000.4
|
765 |
+
4 1000000000000.2
|
766 |
+
4 1000000000000.4
|
767 |
+
4 1000000000000.2
|
768 |
+
4 1000000000000.4
|
769 |
+
4 1000000000000.2
|
770 |
+
4 1000000000000.4
|
771 |
+
4 1000000000000.2
|
772 |
+
4 1000000000000.4
|
773 |
+
4 1000000000000.2
|
774 |
+
4 1000000000000.4
|
775 |
+
4 1000000000000.2
|
776 |
+
4 1000000000000.4
|
777 |
+
4 1000000000000.2
|
778 |
+
4 1000000000000.4
|
779 |
+
4 1000000000000.2
|
780 |
+
4 1000000000000.4
|
781 |
+
4 1000000000000.2
|
782 |
+
4 1000000000000.4
|
783 |
+
4 1000000000000.2
|
784 |
+
4 1000000000000.4
|
785 |
+
4 1000000000000.2
|
786 |
+
4 1000000000000.4
|
787 |
+
4 1000000000000.2
|
788 |
+
4 1000000000000.4
|
789 |
+
4 1000000000000.2
|
790 |
+
4 1000000000000.4
|
791 |
+
4 1000000000000.2
|
792 |
+
4 1000000000000.4
|
793 |
+
4 1000000000000.2
|
794 |
+
4 1000000000000.4
|
795 |
+
4 1000000000000.2
|
796 |
+
4 1000000000000.4
|
797 |
+
4 1000000000000.2
|
798 |
+
4 1000000000000.4
|
799 |
+
4 1000000000000.2
|
800 |
+
4 1000000000000.4
|
801 |
+
4 1000000000000.2
|
802 |
+
4 1000000000000.4
|
803 |
+
4 1000000000000.2
|
804 |
+
4 1000000000000.4
|
805 |
+
4 1000000000000.2
|
806 |
+
4 1000000000000.4
|
807 |
+
4 1000000000000.2
|
808 |
+
4 1000000000000.4
|
809 |
+
4 1000000000000.2
|
810 |
+
4 1000000000000.4
|
811 |
+
4 1000000000000.2
|
812 |
+
4 1000000000000.4
|
813 |
+
4 1000000000000.2
|
814 |
+
4 1000000000000.4
|
815 |
+
4 1000000000000.2
|
816 |
+
4 1000000000000.4
|
817 |
+
4 1000000000000.2
|
818 |
+
4 1000000000000.4
|
819 |
+
4 1000000000000.2
|
820 |
+
4 1000000000000.4
|
821 |
+
4 1000000000000.2
|
822 |
+
4 1000000000000.4
|
823 |
+
4 1000000000000.2
|
824 |
+
4 1000000000000.4
|
825 |
+
4 1000000000000.2
|
826 |
+
4 1000000000000.4
|
827 |
+
4 1000000000000.2
|
828 |
+
4 1000000000000.4
|
829 |
+
4 1000000000000.2
|
830 |
+
4 1000000000000.4
|
831 |
+
4 1000000000000.2
|
832 |
+
4 1000000000000.4
|
833 |
+
4 1000000000000.2
|
834 |
+
4 1000000000000.4
|
835 |
+
4 1000000000000.2
|
836 |
+
4 1000000000000.4
|
837 |
+
4 1000000000000.2
|
838 |
+
4 1000000000000.4
|
839 |
+
4 1000000000000.2
|
840 |
+
4 1000000000000.4
|
841 |
+
4 1000000000000.2
|
842 |
+
4 1000000000000.4
|
843 |
+
4 1000000000000.2
|
844 |
+
4 1000000000000.4
|
845 |
+
4 1000000000000.2
|
846 |
+
4 1000000000000.4
|
847 |
+
4 1000000000000.2
|
848 |
+
4 1000000000000.4
|
849 |
+
4 1000000000000.2
|
850 |
+
4 1000000000000.4
|
851 |
+
4 1000000000000.2
|
852 |
+
4 1000000000000.4
|
853 |
+
4 1000000000000.2
|
854 |
+
4 1000000000000.4
|
855 |
+
4 1000000000000.2
|
856 |
+
4 1000000000000.4
|
857 |
+
4 1000000000000.2
|
858 |
+
4 1000000000000.4
|
859 |
+
4 1000000000000.2
|
860 |
+
4 1000000000000.4
|
861 |
+
4 1000000000000.2
|
862 |
+
4 1000000000000.4
|
863 |
+
4 1000000000000.2
|
864 |
+
4 1000000000000.4
|
865 |
+
5 1000000000000.5
|
866 |
+
5 1000000000000.4
|
867 |
+
5 1000000000000.6
|
868 |
+
5 1000000000000.4
|
869 |
+
5 1000000000000.6
|
870 |
+
5 1000000000000.4
|
871 |
+
5 1000000000000.6
|
872 |
+
5 1000000000000.4
|
873 |
+
5 1000000000000.6
|
874 |
+
5 1000000000000.4
|
875 |
+
5 1000000000000.6
|
876 |
+
5 1000000000000.4
|
877 |
+
5 1000000000000.6
|
878 |
+
5 1000000000000.4
|
879 |
+
5 1000000000000.6
|
880 |
+
5 1000000000000.4
|
881 |
+
5 1000000000000.6
|
882 |
+
5 1000000000000.4
|
883 |
+
5 1000000000000.6
|
884 |
+
5 1000000000000.4
|
885 |
+
5 1000000000000.6
|
886 |
+
5 1000000000000.4
|
887 |
+
5 1000000000000.6
|
888 |
+
5 1000000000000.4
|
889 |
+
5 1000000000000.6
|
890 |
+
5 1000000000000.4
|
891 |
+
5 1000000000000.6
|
892 |
+
5 1000000000000.4
|
893 |
+
5 1000000000000.6
|
894 |
+
5 1000000000000.4
|
895 |
+
5 1000000000000.6
|
896 |
+
5 1000000000000.4
|
897 |
+
5 1000000000000.6
|
898 |
+
5 1000000000000.4
|
899 |
+
5 1000000000000.6
|
900 |
+
5 1000000000000.4
|
901 |
+
5 1000000000000.6
|
902 |
+
5 1000000000000.4
|
903 |
+
5 1000000000000.6
|
904 |
+
5 1000000000000.4
|
905 |
+
5 1000000000000.6
|
906 |
+
5 1000000000000.4
|
907 |
+
5 1000000000000.6
|
908 |
+
5 1000000000000.4
|
909 |
+
5 1000000000000.6
|
910 |
+
5 1000000000000.4
|
911 |
+
5 1000000000000.6
|
912 |
+
5 1000000000000.4
|
913 |
+
5 1000000000000.6
|
914 |
+
5 1000000000000.4
|
915 |
+
5 1000000000000.6
|
916 |
+
5 1000000000000.4
|
917 |
+
5 1000000000000.6
|
918 |
+
5 1000000000000.4
|
919 |
+
5 1000000000000.6
|
920 |
+
5 1000000000000.4
|
921 |
+
5 1000000000000.6
|
922 |
+
5 1000000000000.4
|
923 |
+
5 1000000000000.6
|
924 |
+
5 1000000000000.4
|
925 |
+
5 1000000000000.6
|
926 |
+
5 1000000000000.4
|
927 |
+
5 1000000000000.6
|
928 |
+
5 1000000000000.4
|
929 |
+
5 1000000000000.6
|
930 |
+
5 1000000000000.4
|
931 |
+
5 1000000000000.6
|
932 |
+
5 1000000000000.4
|
933 |
+
5 1000000000000.6
|
934 |
+
5 1000000000000.4
|
935 |
+
5 1000000000000.6
|
936 |
+
5 1000000000000.4
|
937 |
+
5 1000000000000.6
|
938 |
+
5 1000000000000.4
|
939 |
+
5 1000000000000.6
|
940 |
+
5 1000000000000.4
|
941 |
+
5 1000000000000.6
|
942 |
+
5 1000000000000.4
|
943 |
+
5 1000000000000.6
|
944 |
+
5 1000000000000.4
|
945 |
+
5 1000000000000.6
|
946 |
+
5 1000000000000.4
|
947 |
+
5 1000000000000.6
|
948 |
+
5 1000000000000.4
|
949 |
+
5 1000000000000.6
|
950 |
+
5 1000000000000.4
|
951 |
+
5 1000000000000.6
|
952 |
+
5 1000000000000.4
|
953 |
+
5 1000000000000.6
|
954 |
+
5 1000000000000.4
|
955 |
+
5 1000000000000.6
|
956 |
+
5 1000000000000.4
|
957 |
+
5 1000000000000.6
|
958 |
+
5 1000000000000.4
|
959 |
+
5 1000000000000.6
|
960 |
+
5 1000000000000.4
|
961 |
+
5 1000000000000.6
|
962 |
+
5 1000000000000.4
|
963 |
+
5 1000000000000.6
|
964 |
+
5 1000000000000.4
|
965 |
+
5 1000000000000.6
|
966 |
+
5 1000000000000.4
|
967 |
+
5 1000000000000.6
|
968 |
+
5 1000000000000.4
|
969 |
+
5 1000000000000.6
|
970 |
+
5 1000000000000.4
|
971 |
+
5 1000000000000.6
|
972 |
+
5 1000000000000.4
|
973 |
+
5 1000000000000.6
|
974 |
+
5 1000000000000.4
|
975 |
+
5 1000000000000.6
|
976 |
+
5 1000000000000.4
|
977 |
+
5 1000000000000.6
|
978 |
+
5 1000000000000.4
|
979 |
+
5 1000000000000.6
|
980 |
+
5 1000000000000.4
|
981 |
+
5 1000000000000.6
|
982 |
+
5 1000000000000.4
|
983 |
+
5 1000000000000.6
|
984 |
+
5 1000000000000.4
|
985 |
+
5 1000000000000.6
|
986 |
+
5 1000000000000.4
|
987 |
+
5 1000000000000.6
|
988 |
+
5 1000000000000.4
|
989 |
+
5 1000000000000.6
|
990 |
+
5 1000000000000.4
|
991 |
+
5 1000000000000.6
|
992 |
+
5 1000000000000.4
|
993 |
+
5 1000000000000.6
|
994 |
+
5 1000000000000.4
|
995 |
+
5 1000000000000.6
|
996 |
+
5 1000000000000.4
|
997 |
+
5 1000000000000.6
|
998 |
+
5 1000000000000.4
|
999 |
+
5 1000000000000.6
|
1000 |
+
5 1000000000000.4
|
1001 |
+
5 1000000000000.6
|
1002 |
+
5 1000000000000.4
|
1003 |
+
5 1000000000000.6
|
1004 |
+
5 1000000000000.4
|
1005 |
+
5 1000000000000.6
|
1006 |
+
5 1000000000000.4
|
1007 |
+
5 1000000000000.6
|
1008 |
+
5 1000000000000.4
|
1009 |
+
5 1000000000000.6
|
1010 |
+
5 1000000000000.4
|
1011 |
+
5 1000000000000.6
|
1012 |
+
5 1000000000000.4
|
1013 |
+
5 1000000000000.6
|
1014 |
+
5 1000000000000.4
|
1015 |
+
5 1000000000000.6
|
1016 |
+
5 1000000000000.4
|
1017 |
+
5 1000000000000.6
|
1018 |
+
5 1000000000000.4
|
1019 |
+
5 1000000000000.6
|
1020 |
+
5 1000000000000.4
|
1021 |
+
5 1000000000000.6
|
1022 |
+
5 1000000000000.4
|
1023 |
+
5 1000000000000.6
|
1024 |
+
5 1000000000000.4
|
1025 |
+
5 1000000000000.6
|
1026 |
+
5 1000000000000.4
|
1027 |
+
5 1000000000000.6
|
1028 |
+
5 1000000000000.4
|
1029 |
+
5 1000000000000.6
|
1030 |
+
5 1000000000000.4
|
1031 |
+
5 1000000000000.6
|
1032 |
+
5 1000000000000.4
|
1033 |
+
5 1000000000000.6
|
1034 |
+
5 1000000000000.4
|
1035 |
+
5 1000000000000.6
|
1036 |
+
5 1000000000000.4
|
1037 |
+
5 1000000000000.6
|
1038 |
+
5 1000000000000.4
|
1039 |
+
5 1000000000000.6
|
1040 |
+
5 1000000000000.4
|
1041 |
+
5 1000000000000.6
|
1042 |
+
5 1000000000000.4
|
1043 |
+
5 1000000000000.6
|
1044 |
+
5 1000000000000.4
|
1045 |
+
5 1000000000000.6
|
1046 |
+
5 1000000000000.4
|
1047 |
+
5 1000000000000.6
|
1048 |
+
5 1000000000000.4
|
1049 |
+
5 1000000000000.6
|
1050 |
+
5 1000000000000.4
|
1051 |
+
5 1000000000000.6
|
1052 |
+
5 1000000000000.4
|
1053 |
+
5 1000000000000.6
|
1054 |
+
5 1000000000000.4
|
1055 |
+
5 1000000000000.6
|
1056 |
+
5 1000000000000.4
|
1057 |
+
5 1000000000000.6
|
1058 |
+
5 1000000000000.4
|
1059 |
+
5 1000000000000.6
|
1060 |
+
5 1000000000000.4
|
1061 |
+
5 1000000000000.6
|
1062 |
+
5 1000000000000.4
|
1063 |
+
5 1000000000000.6
|
1064 |
+
5 1000000000000.4
|
1065 |
+
5 1000000000000.6
|
1066 |
+
6 1000000000000.3
|
1067 |
+
6 1000000000000.2
|
1068 |
+
6 1000000000000.4
|
1069 |
+
6 1000000000000.2
|
1070 |
+
6 1000000000000.4
|
1071 |
+
6 1000000000000.2
|
1072 |
+
6 1000000000000.4
|
1073 |
+
6 1000000000000.2
|
1074 |
+
6 1000000000000.4
|
1075 |
+
6 1000000000000.2
|
1076 |
+
6 1000000000000.4
|
1077 |
+
6 1000000000000.2
|
1078 |
+
6 1000000000000.4
|
1079 |
+
6 1000000000000.2
|
1080 |
+
6 1000000000000.4
|
1081 |
+
6 1000000000000.2
|
1082 |
+
6 1000000000000.4
|
1083 |
+
6 1000000000000.2
|
1084 |
+
6 1000000000000.4
|
1085 |
+
6 1000000000000.2
|
1086 |
+
6 1000000000000.4
|
1087 |
+
6 1000000000000.2
|
1088 |
+
6 1000000000000.4
|
1089 |
+
6 1000000000000.2
|
1090 |
+
6 1000000000000.4
|
1091 |
+
6 1000000000000.2
|
1092 |
+
6 1000000000000.4
|
1093 |
+
6 1000000000000.2
|
1094 |
+
6 1000000000000.4
|
1095 |
+
6 1000000000000.2
|
1096 |
+
6 1000000000000.4
|
1097 |
+
6 1000000000000.2
|
1098 |
+
6 1000000000000.4
|
1099 |
+
6 1000000000000.2
|
1100 |
+
6 1000000000000.4
|
1101 |
+
6 1000000000000.2
|
1102 |
+
6 1000000000000.4
|
1103 |
+
6 1000000000000.2
|
1104 |
+
6 1000000000000.4
|
1105 |
+
6 1000000000000.2
|
1106 |
+
6 1000000000000.4
|
1107 |
+
6 1000000000000.2
|
1108 |
+
6 1000000000000.4
|
1109 |
+
6 1000000000000.2
|
1110 |
+
6 1000000000000.4
|
1111 |
+
6 1000000000000.2
|
1112 |
+
6 1000000000000.4
|
1113 |
+
6 1000000000000.2
|
1114 |
+
6 1000000000000.4
|
1115 |
+
6 1000000000000.2
|
1116 |
+
6 1000000000000.4
|
1117 |
+
6 1000000000000.2
|
1118 |
+
6 1000000000000.4
|
1119 |
+
6 1000000000000.2
|
1120 |
+
6 1000000000000.4
|
1121 |
+
6 1000000000000.2
|
1122 |
+
6 1000000000000.4
|
1123 |
+
6 1000000000000.2
|
1124 |
+
6 1000000000000.4
|
1125 |
+
6 1000000000000.2
|
1126 |
+
6 1000000000000.4
|
1127 |
+
6 1000000000000.2
|
1128 |
+
6 1000000000000.4
|
1129 |
+
6 1000000000000.2
|
1130 |
+
6 1000000000000.4
|
1131 |
+
6 1000000000000.2
|
1132 |
+
6 1000000000000.4
|
1133 |
+
6 1000000000000.2
|
1134 |
+
6 1000000000000.4
|
1135 |
+
6 1000000000000.2
|
1136 |
+
6 1000000000000.4
|
1137 |
+
6 1000000000000.2
|
1138 |
+
6 1000000000000.4
|
1139 |
+
6 1000000000000.2
|
1140 |
+
6 1000000000000.4
|
1141 |
+
6 1000000000000.2
|
1142 |
+
6 1000000000000.4
|
1143 |
+
6 1000000000000.2
|
1144 |
+
6 1000000000000.4
|
1145 |
+
6 1000000000000.2
|
1146 |
+
6 1000000000000.4
|
1147 |
+
6 1000000000000.2
|
1148 |
+
6 1000000000000.4
|
1149 |
+
6 1000000000000.2
|
1150 |
+
6 1000000000000.4
|
1151 |
+
6 1000000000000.2
|
1152 |
+
6 1000000000000.4
|
1153 |
+
6 1000000000000.2
|
1154 |
+
6 1000000000000.4
|
1155 |
+
6 1000000000000.2
|
1156 |
+
6 1000000000000.4
|
1157 |
+
6 1000000000000.2
|
1158 |
+
6 1000000000000.4
|
1159 |
+
6 1000000000000.2
|
1160 |
+
6 1000000000000.4
|
1161 |
+
6 1000000000000.2
|
1162 |
+
6 1000000000000.4
|
1163 |
+
6 1000000000000.2
|
1164 |
+
6 1000000000000.4
|
1165 |
+
6 1000000000000.2
|
1166 |
+
6 1000000000000.4
|
1167 |
+
6 1000000000000.2
|
1168 |
+
6 1000000000000.4
|
1169 |
+
6 1000000000000.2
|
1170 |
+
6 1000000000000.4
|
1171 |
+
6 1000000000000.2
|
1172 |
+
6 1000000000000.4
|
1173 |
+
6 1000000000000.2
|
1174 |
+
6 1000000000000.4
|
1175 |
+
6 1000000000000.2
|
1176 |
+
6 1000000000000.4
|
1177 |
+
6 1000000000000.2
|
1178 |
+
6 1000000000000.4
|
1179 |
+
6 1000000000000.2
|
1180 |
+
6 1000000000000.4
|
1181 |
+
6 1000000000000.2
|
1182 |
+
6 1000000000000.4
|
1183 |
+
6 1000000000000.2
|
1184 |
+
6 1000000000000.4
|
1185 |
+
6 1000000000000.2
|
1186 |
+
6 1000000000000.4
|
1187 |
+
6 1000000000000.2
|
1188 |
+
6 1000000000000.4
|
1189 |
+
6 1000000000000.2
|
1190 |
+
6 1000000000000.4
|
1191 |
+
6 1000000000000.2
|
1192 |
+
6 1000000000000.4
|
1193 |
+
6 1000000000000.2
|
1194 |
+
6 1000000000000.4
|
1195 |
+
6 1000000000000.2
|
1196 |
+
6 1000000000000.4
|
1197 |
+
6 1000000000000.2
|
1198 |
+
6 1000000000000.4
|
1199 |
+
6 1000000000000.2
|
1200 |
+
6 1000000000000.4
|
1201 |
+
6 1000000000000.2
|
1202 |
+
6 1000000000000.4
|
1203 |
+
6 1000000000000.2
|
1204 |
+
6 1000000000000.4
|
1205 |
+
6 1000000000000.2
|
1206 |
+
6 1000000000000.4
|
1207 |
+
6 1000000000000.2
|
1208 |
+
6 1000000000000.4
|
1209 |
+
6 1000000000000.2
|
1210 |
+
6 1000000000000.4
|
1211 |
+
6 1000000000000.2
|
1212 |
+
6 1000000000000.4
|
1213 |
+
6 1000000000000.2
|
1214 |
+
6 1000000000000.4
|
1215 |
+
6 1000000000000.2
|
1216 |
+
6 1000000000000.4
|
1217 |
+
6 1000000000000.2
|
1218 |
+
6 1000000000000.4
|
1219 |
+
6 1000000000000.2
|
1220 |
+
6 1000000000000.4
|
1221 |
+
6 1000000000000.2
|
1222 |
+
6 1000000000000.4
|
1223 |
+
6 1000000000000.2
|
1224 |
+
6 1000000000000.4
|
1225 |
+
6 1000000000000.2
|
1226 |
+
6 1000000000000.4
|
1227 |
+
6 1000000000000.2
|
1228 |
+
6 1000000000000.4
|
1229 |
+
6 1000000000000.2
|
1230 |
+
6 1000000000000.4
|
1231 |
+
6 1000000000000.2
|
1232 |
+
6 1000000000000.4
|
1233 |
+
6 1000000000000.2
|
1234 |
+
6 1000000000000.4
|
1235 |
+
6 1000000000000.2
|
1236 |
+
6 1000000000000.4
|
1237 |
+
6 1000000000000.2
|
1238 |
+
6 1000000000000.4
|
1239 |
+
6 1000000000000.2
|
1240 |
+
6 1000000000000.4
|
1241 |
+
6 1000000000000.2
|
1242 |
+
6 1000000000000.4
|
1243 |
+
6 1000000000000.2
|
1244 |
+
6 1000000000000.4
|
1245 |
+
6 1000000000000.2
|
1246 |
+
6 1000000000000.4
|
1247 |
+
6 1000000000000.2
|
1248 |
+
6 1000000000000.4
|
1249 |
+
6 1000000000000.2
|
1250 |
+
6 1000000000000.4
|
1251 |
+
6 1000000000000.2
|
1252 |
+
6 1000000000000.4
|
1253 |
+
6 1000000000000.2
|
1254 |
+
6 1000000000000.4
|
1255 |
+
6 1000000000000.2
|
1256 |
+
6 1000000000000.4
|
1257 |
+
6 1000000000000.2
|
1258 |
+
6 1000000000000.4
|
1259 |
+
6 1000000000000.2
|
1260 |
+
6 1000000000000.4
|
1261 |
+
6 1000000000000.2
|
1262 |
+
6 1000000000000.4
|
1263 |
+
6 1000000000000.2
|
1264 |
+
6 1000000000000.4
|
1265 |
+
6 1000000000000.2
|
1266 |
+
6 1000000000000.4
|
1267 |
+
7 1000000000000.5
|
1268 |
+
7 1000000000000.4
|
1269 |
+
7 1000000000000.6
|
1270 |
+
7 1000000000000.4
|
1271 |
+
7 1000000000000.6
|
1272 |
+
7 1000000000000.4
|
1273 |
+
7 1000000000000.6
|
1274 |
+
7 1000000000000.4
|
1275 |
+
7 1000000000000.6
|
1276 |
+
7 1000000000000.4
|
1277 |
+
7 1000000000000.6
|
1278 |
+
7 1000000000000.4
|
1279 |
+
7 1000000000000.6
|
1280 |
+
7 1000000000000.4
|
1281 |
+
7 1000000000000.6
|
1282 |
+
7 1000000000000.4
|
1283 |
+
7 1000000000000.6
|
1284 |
+
7 1000000000000.4
|
1285 |
+
7 1000000000000.6
|
1286 |
+
7 1000000000000.4
|
1287 |
+
7 1000000000000.6
|
1288 |
+
7 1000000000000.4
|
1289 |
+
7 1000000000000.6
|
1290 |
+
7 1000000000000.4
|
1291 |
+
7 1000000000000.6
|
1292 |
+
7 1000000000000.4
|
1293 |
+
7 1000000000000.6
|
1294 |
+
7 1000000000000.4
|
1295 |
+
7 1000000000000.6
|
1296 |
+
7 1000000000000.4
|
1297 |
+
7 1000000000000.6
|
1298 |
+
7 1000000000000.4
|
1299 |
+
7 1000000000000.6
|
1300 |
+
7 1000000000000.4
|
1301 |
+
7 1000000000000.6
|
1302 |
+
7 1000000000000.4
|
1303 |
+
7 1000000000000.6
|
1304 |
+
7 1000000000000.4
|
1305 |
+
7 1000000000000.6
|
1306 |
+
7 1000000000000.4
|
1307 |
+
7 1000000000000.6
|
1308 |
+
7 1000000000000.4
|
1309 |
+
7 1000000000000.6
|
1310 |
+
7 1000000000000.4
|
1311 |
+
7 1000000000000.6
|
1312 |
+
7 1000000000000.4
|
1313 |
+
7 1000000000000.6
|
1314 |
+
7 1000000000000.4
|
1315 |
+
7 1000000000000.6
|
1316 |
+
7 1000000000000.4
|
1317 |
+
7 1000000000000.6
|
1318 |
+
7 1000000000000.4
|
1319 |
+
7 1000000000000.6
|
1320 |
+
7 1000000000000.4
|
1321 |
+
7 1000000000000.6
|
1322 |
+
7 1000000000000.4
|
1323 |
+
7 1000000000000.6
|
1324 |
+
7 1000000000000.4
|
1325 |
+
7 1000000000000.6
|
1326 |
+
7 1000000000000.4
|
1327 |
+
7 1000000000000.6
|
1328 |
+
7 1000000000000.4
|
1329 |
+
7 1000000000000.6
|
1330 |
+
7 1000000000000.4
|
1331 |
+
7 1000000000000.6
|
1332 |
+
7 1000000000000.4
|
1333 |
+
7 1000000000000.6
|
1334 |
+
7 1000000000000.4
|
1335 |
+
7 1000000000000.6
|
1336 |
+
7 1000000000000.4
|
1337 |
+
7 1000000000000.6
|
1338 |
+
7 1000000000000.4
|
1339 |
+
7 1000000000000.6
|
1340 |
+
7 1000000000000.4
|
1341 |
+
7 1000000000000.6
|
1342 |
+
7 1000000000000.4
|
1343 |
+
7 1000000000000.6
|
1344 |
+
7 1000000000000.4
|
1345 |
+
7 1000000000000.6
|
1346 |
+
7 1000000000000.4
|
1347 |
+
7 1000000000000.6
|
1348 |
+
7 1000000000000.4
|
1349 |
+
7 1000000000000.6
|
1350 |
+
7 1000000000000.4
|
1351 |
+
7 1000000000000.6
|
1352 |
+
7 1000000000000.4
|
1353 |
+
7 1000000000000.6
|
1354 |
+
7 1000000000000.4
|
1355 |
+
7 1000000000000.6
|
1356 |
+
7 1000000000000.4
|
1357 |
+
7 1000000000000.6
|
1358 |
+
7 1000000000000.4
|
1359 |
+
7 1000000000000.6
|
1360 |
+
7 1000000000000.4
|
1361 |
+
7 1000000000000.6
|
1362 |
+
7 1000000000000.4
|
1363 |
+
7 1000000000000.6
|
1364 |
+
7 1000000000000.4
|
1365 |
+
7 1000000000000.6
|
1366 |
+
7 1000000000000.4
|
1367 |
+
7 1000000000000.6
|
1368 |
+
7 1000000000000.4
|
1369 |
+
7 1000000000000.6
|
1370 |
+
7 1000000000000.4
|
1371 |
+
7 1000000000000.6
|
1372 |
+
7 1000000000000.4
|
1373 |
+
7 1000000000000.6
|
1374 |
+
7 1000000000000.4
|
1375 |
+
7 1000000000000.6
|
1376 |
+
7 1000000000000.4
|
1377 |
+
7 1000000000000.6
|
1378 |
+
7 1000000000000.4
|
1379 |
+
7 1000000000000.6
|
1380 |
+
7 1000000000000.4
|
1381 |
+
7 1000000000000.6
|
1382 |
+
7 1000000000000.4
|
1383 |
+
7 1000000000000.6
|
1384 |
+
7 1000000000000.4
|
1385 |
+
7 1000000000000.6
|
1386 |
+
7 1000000000000.4
|
1387 |
+
7 1000000000000.6
|
1388 |
+
7 1000000000000.4
|
1389 |
+
7 1000000000000.6
|
1390 |
+
7 1000000000000.4
|
1391 |
+
7 1000000000000.6
|
1392 |
+
7 1000000000000.4
|
1393 |
+
7 1000000000000.6
|
1394 |
+
7 1000000000000.4
|
1395 |
+
7 1000000000000.6
|
1396 |
+
7 1000000000000.4
|
1397 |
+
7 1000000000000.6
|
1398 |
+
7 1000000000000.4
|
1399 |
+
7 1000000000000.6
|
1400 |
+
7 1000000000000.4
|
1401 |
+
7 1000000000000.6
|
1402 |
+
7 1000000000000.4
|
1403 |
+
7 1000000000000.6
|
1404 |
+
7 1000000000000.4
|
1405 |
+
7 1000000000000.6
|
1406 |
+
7 1000000000000.4
|
1407 |
+
7 1000000000000.6
|
1408 |
+
7 1000000000000.4
|
1409 |
+
7 1000000000000.6
|
1410 |
+
7 1000000000000.4
|
1411 |
+
7 1000000000000.6
|
1412 |
+
7 1000000000000.4
|
1413 |
+
7 1000000000000.6
|
1414 |
+
7 1000000000000.4
|
1415 |
+
7 1000000000000.6
|
1416 |
+
7 1000000000000.4
|
1417 |
+
7 1000000000000.6
|
1418 |
+
7 1000000000000.4
|
1419 |
+
7 1000000000000.6
|
1420 |
+
7 1000000000000.4
|
1421 |
+
7 1000000000000.6
|
1422 |
+
7 1000000000000.4
|
1423 |
+
7 1000000000000.6
|
1424 |
+
7 1000000000000.4
|
1425 |
+
7 1000000000000.6
|
1426 |
+
7 1000000000000.4
|
1427 |
+
7 1000000000000.6
|
1428 |
+
7 1000000000000.4
|
1429 |
+
7 1000000000000.6
|
1430 |
+
7 1000000000000.4
|
1431 |
+
7 1000000000000.6
|
1432 |
+
7 1000000000000.4
|
1433 |
+
7 1000000000000.6
|
1434 |
+
7 1000000000000.4
|
1435 |
+
7 1000000000000.6
|
1436 |
+
7 1000000000000.4
|
1437 |
+
7 1000000000000.6
|
1438 |
+
7 1000000000000.4
|
1439 |
+
7 1000000000000.6
|
1440 |
+
7 1000000000000.4
|
1441 |
+
7 1000000000000.6
|
1442 |
+
7 1000000000000.4
|
1443 |
+
7 1000000000000.6
|
1444 |
+
7 1000000000000.4
|
1445 |
+
7 1000000000000.6
|
1446 |
+
7 1000000000000.4
|
1447 |
+
7 1000000000000.6
|
1448 |
+
7 1000000000000.4
|
1449 |
+
7 1000000000000.6
|
1450 |
+
7 1000000000000.4
|
1451 |
+
7 1000000000000.6
|
1452 |
+
7 1000000000000.4
|
1453 |
+
7 1000000000000.6
|
1454 |
+
7 1000000000000.4
|
1455 |
+
7 1000000000000.6
|
1456 |
+
7 1000000000000.4
|
1457 |
+
7 1000000000000.6
|
1458 |
+
7 1000000000000.4
|
1459 |
+
7 1000000000000.6
|
1460 |
+
7 1000000000000.4
|
1461 |
+
7 1000000000000.6
|
1462 |
+
7 1000000000000.4
|
1463 |
+
7 1000000000000.6
|
1464 |
+
7 1000000000000.4
|
1465 |
+
7 1000000000000.6
|
1466 |
+
7 1000000000000.4
|
1467 |
+
7 1000000000000.6
|
1468 |
+
8 1000000000000.3
|
1469 |
+
8 1000000000000.2
|
1470 |
+
8 1000000000000.4
|
1471 |
+
8 1000000000000.2
|
1472 |
+
8 1000000000000.4
|
1473 |
+
8 1000000000000.2
|
1474 |
+
8 1000000000000.4
|
1475 |
+
8 1000000000000.2
|
1476 |
+
8 1000000000000.4
|
1477 |
+
8 1000000000000.2
|
1478 |
+
8 1000000000000.4
|
1479 |
+
8 1000000000000.2
|
1480 |
+
8 1000000000000.4
|
1481 |
+
8 1000000000000.2
|
1482 |
+
8 1000000000000.4
|
1483 |
+
8 1000000000000.2
|
1484 |
+
8 1000000000000.4
|
1485 |
+
8 1000000000000.2
|
1486 |
+
8 1000000000000.4
|
1487 |
+
8 1000000000000.2
|
1488 |
+
8 1000000000000.4
|
1489 |
+
8 1000000000000.2
|
1490 |
+
8 1000000000000.4
|
1491 |
+
8 1000000000000.2
|
1492 |
+
8 1000000000000.4
|
1493 |
+
8 1000000000000.2
|
1494 |
+
8 1000000000000.4
|
1495 |
+
8 1000000000000.2
|
1496 |
+
8 1000000000000.4
|
1497 |
+
8 1000000000000.2
|
1498 |
+
8 1000000000000.4
|
1499 |
+
8 1000000000000.2
|
1500 |
+
8 1000000000000.4
|
1501 |
+
8 1000000000000.2
|
1502 |
+
8 1000000000000.4
|
1503 |
+
8 1000000000000.2
|
1504 |
+
8 1000000000000.4
|
1505 |
+
8 1000000000000.2
|
1506 |
+
8 1000000000000.4
|
1507 |
+
8 1000000000000.2
|
1508 |
+
8 1000000000000.4
|
1509 |
+
8 1000000000000.2
|
1510 |
+
8 1000000000000.4
|
1511 |
+
8 1000000000000.2
|
1512 |
+
8 1000000000000.4
|
1513 |
+
8 1000000000000.2
|
1514 |
+
8 1000000000000.4
|
1515 |
+
8 1000000000000.2
|
1516 |
+
8 1000000000000.4
|
1517 |
+
8 1000000000000.2
|
1518 |
+
8 1000000000000.4
|
1519 |
+
8 1000000000000.2
|
1520 |
+
8 1000000000000.4
|
1521 |
+
8 1000000000000.2
|
1522 |
+
8 1000000000000.4
|
1523 |
+
8 1000000000000.2
|
1524 |
+
8 1000000000000.4
|
1525 |
+
8 1000000000000.2
|
1526 |
+
8 1000000000000.4
|
1527 |
+
8 1000000000000.2
|
1528 |
+
8 1000000000000.4
|
1529 |
+
8 1000000000000.2
|
1530 |
+
8 1000000000000.4
|
1531 |
+
8 1000000000000.2
|
1532 |
+
8 1000000000000.4
|
1533 |
+
8 1000000000000.2
|
1534 |
+
8 1000000000000.4
|
1535 |
+
8 1000000000000.2
|
1536 |
+
8 1000000000000.4
|
1537 |
+
8 1000000000000.2
|
1538 |
+
8 1000000000000.4
|
1539 |
+
8 1000000000000.2
|
1540 |
+
8 1000000000000.4
|
1541 |
+
8 1000000000000.2
|
1542 |
+
8 1000000000000.4
|
1543 |
+
8 1000000000000.2
|
1544 |
+
8 1000000000000.4
|
1545 |
+
8 1000000000000.2
|
1546 |
+
8 1000000000000.4
|
1547 |
+
8 1000000000000.2
|
1548 |
+
8 1000000000000.4
|
1549 |
+
8 1000000000000.2
|
1550 |
+
8 1000000000000.4
|
1551 |
+
8 1000000000000.2
|
1552 |
+
8 1000000000000.4
|
1553 |
+
8 1000000000000.2
|
1554 |
+
8 1000000000000.4
|
1555 |
+
8 1000000000000.2
|
1556 |
+
8 1000000000000.4
|
1557 |
+
8 1000000000000.2
|
1558 |
+
8 1000000000000.4
|
1559 |
+
8 1000000000000.2
|
1560 |
+
8 1000000000000.4
|
1561 |
+
8 1000000000000.2
|
1562 |
+
8 1000000000000.4
|
1563 |
+
8 1000000000000.2
|
1564 |
+
8 1000000000000.4
|
1565 |
+
8 1000000000000.2
|
1566 |
+
8 1000000000000.4
|
1567 |
+
8 1000000000000.2
|
1568 |
+
8 1000000000000.4
|
1569 |
+
8 1000000000000.2
|
1570 |
+
8 1000000000000.4
|
1571 |
+
8 1000000000000.2
|
1572 |
+
8 1000000000000.4
|
1573 |
+
8 1000000000000.2
|
1574 |
+
8 1000000000000.4
|
1575 |
+
8 1000000000000.2
|
1576 |
+
8 1000000000000.4
|
1577 |
+
8 1000000000000.2
|
1578 |
+
8 1000000000000.4
|
1579 |
+
8 1000000000000.2
|
1580 |
+
8 1000000000000.4
|
1581 |
+
8 1000000000000.2
|
1582 |
+
8 1000000000000.4
|
1583 |
+
8 1000000000000.2
|
1584 |
+
8 1000000000000.4
|
1585 |
+
8 1000000000000.2
|
1586 |
+
8 1000000000000.4
|
1587 |
+
8 1000000000000.2
|
1588 |
+
8 1000000000000.4
|
1589 |
+
8 1000000000000.2
|
1590 |
+
8 1000000000000.4
|
1591 |
+
8 1000000000000.2
|
1592 |
+
8 1000000000000.4
|
1593 |
+
8 1000000000000.2
|
1594 |
+
8 1000000000000.4
|
1595 |
+
8 1000000000000.2
|
1596 |
+
8 1000000000000.4
|
1597 |
+
8 1000000000000.2
|
1598 |
+
8 1000000000000.4
|
1599 |
+
8 1000000000000.2
|
1600 |
+
8 1000000000000.4
|
1601 |
+
8 1000000000000.2
|
1602 |
+
8 1000000000000.4
|
1603 |
+
8 1000000000000.2
|
1604 |
+
8 1000000000000.4
|
1605 |
+
8 1000000000000.2
|
1606 |
+
8 1000000000000.4
|
1607 |
+
8 1000000000000.2
|
1608 |
+
8 1000000000000.4
|
1609 |
+
8 1000000000000.2
|
1610 |
+
8 1000000000000.4
|
1611 |
+
8 1000000000000.2
|
1612 |
+
8 1000000000000.4
|
1613 |
+
8 1000000000000.2
|
1614 |
+
8 1000000000000.4
|
1615 |
+
8 1000000000000.2
|
1616 |
+
8 1000000000000.4
|
1617 |
+
8 1000000000000.2
|
1618 |
+
8 1000000000000.4
|
1619 |
+
8 1000000000000.2
|
1620 |
+
8 1000000000000.4
|
1621 |
+
8 1000000000000.2
|
1622 |
+
8 1000000000000.4
|
1623 |
+
8 1000000000000.2
|
1624 |
+
8 1000000000000.4
|
1625 |
+
8 1000000000000.2
|
1626 |
+
8 1000000000000.4
|
1627 |
+
8 1000000000000.2
|
1628 |
+
8 1000000000000.4
|
1629 |
+
8 1000000000000.2
|
1630 |
+
8 1000000000000.4
|
1631 |
+
8 1000000000000.2
|
1632 |
+
8 1000000000000.4
|
1633 |
+
8 1000000000000.2
|
1634 |
+
8 1000000000000.4
|
1635 |
+
8 1000000000000.2
|
1636 |
+
8 1000000000000.4
|
1637 |
+
8 1000000000000.2
|
1638 |
+
8 1000000000000.4
|
1639 |
+
8 1000000000000.2
|
1640 |
+
8 1000000000000.4
|
1641 |
+
8 1000000000000.2
|
1642 |
+
8 1000000000000.4
|
1643 |
+
8 1000000000000.2
|
1644 |
+
8 1000000000000.4
|
1645 |
+
8 1000000000000.2
|
1646 |
+
8 1000000000000.4
|
1647 |
+
8 1000000000000.2
|
1648 |
+
8 1000000000000.4
|
1649 |
+
8 1000000000000.2
|
1650 |
+
8 1000000000000.4
|
1651 |
+
8 1000000000000.2
|
1652 |
+
8 1000000000000.4
|
1653 |
+
8 1000000000000.2
|
1654 |
+
8 1000000000000.4
|
1655 |
+
8 1000000000000.2
|
1656 |
+
8 1000000000000.4
|
1657 |
+
8 1000000000000.2
|
1658 |
+
8 1000000000000.4
|
1659 |
+
8 1000000000000.2
|
1660 |
+
8 1000000000000.4
|
1661 |
+
8 1000000000000.2
|
1662 |
+
8 1000000000000.4
|
1663 |
+
8 1000000000000.2
|
1664 |
+
8 1000000000000.4
|
1665 |
+
8 1000000000000.2
|
1666 |
+
8 1000000000000.4
|
1667 |
+
8 1000000000000.2
|
1668 |
+
8 1000000000000.4
|
1669 |
+
9 1000000000000.5
|
1670 |
+
9 1000000000000.4
|
1671 |
+
9 1000000000000.6
|
1672 |
+
9 1000000000000.4
|
1673 |
+
9 1000000000000.6
|
1674 |
+
9 1000000000000.4
|
1675 |
+
9 1000000000000.6
|
1676 |
+
9 1000000000000.4
|
1677 |
+
9 1000000000000.6
|
1678 |
+
9 1000000000000.4
|
1679 |
+
9 1000000000000.6
|
1680 |
+
9 1000000000000.4
|
1681 |
+
9 1000000000000.6
|
1682 |
+
9 1000000000000.4
|
1683 |
+
9 1000000000000.6
|
1684 |
+
9 1000000000000.4
|
1685 |
+
9 1000000000000.6
|
1686 |
+
9 1000000000000.4
|
1687 |
+
9 1000000000000.6
|
1688 |
+
9 1000000000000.4
|
1689 |
+
9 1000000000000.6
|
1690 |
+
9 1000000000000.4
|
1691 |
+
9 1000000000000.6
|
1692 |
+
9 1000000000000.4
|
1693 |
+
9 1000000000000.6
|
1694 |
+
9 1000000000000.4
|
1695 |
+
9 1000000000000.6
|
1696 |
+
9 1000000000000.4
|
1697 |
+
9 1000000000000.6
|
1698 |
+
9 1000000000000.4
|
1699 |
+
9 1000000000000.6
|
1700 |
+
9 1000000000000.4
|
1701 |
+
9 1000000000000.6
|
1702 |
+
9 1000000000000.4
|
1703 |
+
9 1000000000000.6
|
1704 |
+
9 1000000000000.4
|
1705 |
+
9 1000000000000.6
|
1706 |
+
9 1000000000000.4
|
1707 |
+
9 1000000000000.6
|
1708 |
+
9 1000000000000.4
|
1709 |
+
9 1000000000000.6
|
1710 |
+
9 1000000000000.4
|
1711 |
+
9 1000000000000.6
|
1712 |
+
9 1000000000000.4
|
1713 |
+
9 1000000000000.6
|
1714 |
+
9 1000000000000.4
|
1715 |
+
9 1000000000000.6
|
1716 |
+
9 1000000000000.4
|
1717 |
+
9 1000000000000.6
|
1718 |
+
9 1000000000000.4
|
1719 |
+
9 1000000000000.6
|
1720 |
+
9 1000000000000.4
|
1721 |
+
9 1000000000000.6
|
1722 |
+
9 1000000000000.4
|
1723 |
+
9 1000000000000.6
|
1724 |
+
9 1000000000000.4
|
1725 |
+
9 1000000000000.6
|
1726 |
+
9 1000000000000.4
|
1727 |
+
9 1000000000000.6
|
1728 |
+
9 1000000000000.4
|
1729 |
+
9 1000000000000.6
|
1730 |
+
9 1000000000000.4
|
1731 |
+
9 1000000000000.6
|
1732 |
+
9 1000000000000.4
|
1733 |
+
9 1000000000000.6
|
1734 |
+
9 1000000000000.4
|
1735 |
+
9 1000000000000.6
|
1736 |
+
9 1000000000000.4
|
1737 |
+
9 1000000000000.6
|
1738 |
+
9 1000000000000.4
|
1739 |
+
9 1000000000000.6
|
1740 |
+
9 1000000000000.4
|
1741 |
+
9 1000000000000.6
|
1742 |
+
9 1000000000000.4
|
1743 |
+
9 1000000000000.6
|
1744 |
+
9 1000000000000.4
|
1745 |
+
9 1000000000000.6
|
1746 |
+
9 1000000000000.4
|
1747 |
+
9 1000000000000.6
|
1748 |
+
9 1000000000000.4
|
1749 |
+
9 1000000000000.6
|
1750 |
+
9 1000000000000.4
|
1751 |
+
9 1000000000000.6
|
1752 |
+
9 1000000000000.4
|
1753 |
+
9 1000000000000.6
|
1754 |
+
9 1000000000000.4
|
1755 |
+
9 1000000000000.6
|
1756 |
+
9 1000000000000.4
|
1757 |
+
9 1000000000000.6
|
1758 |
+
9 1000000000000.4
|
1759 |
+
9 1000000000000.6
|
1760 |
+
9 1000000000000.4
|
1761 |
+
9 1000000000000.6
|
1762 |
+
9 1000000000000.4
|
1763 |
+
9 1000000000000.6
|
1764 |
+
9 1000000000000.4
|
1765 |
+
9 1000000000000.6
|
1766 |
+
9 1000000000000.4
|
1767 |
+
9 1000000000000.6
|
1768 |
+
9 1000000000000.4
|
1769 |
+
9 1000000000000.6
|
1770 |
+
9 1000000000000.4
|
1771 |
+
9 1000000000000.6
|
1772 |
+
9 1000000000000.4
|
1773 |
+
9 1000000000000.6
|
1774 |
+
9 1000000000000.4
|
1775 |
+
9 1000000000000.6
|
1776 |
+
9 1000000000000.4
|
1777 |
+
9 1000000000000.6
|
1778 |
+
9 1000000000000.4
|
1779 |
+
9 1000000000000.6
|
1780 |
+
9 1000000000000.4
|
1781 |
+
9 1000000000000.6
|
1782 |
+
9 1000000000000.4
|
1783 |
+
9 1000000000000.6
|
1784 |
+
9 1000000000000.4
|
1785 |
+
9 1000000000000.6
|
1786 |
+
9 1000000000000.4
|
1787 |
+
9 1000000000000.6
|
1788 |
+
9 1000000000000.4
|
1789 |
+
9 1000000000000.6
|
1790 |
+
9 1000000000000.4
|
1791 |
+
9 1000000000000.6
|
1792 |
+
9 1000000000000.4
|
1793 |
+
9 1000000000000.6
|
1794 |
+
9 1000000000000.4
|
1795 |
+
9 1000000000000.6
|
1796 |
+
9 1000000000000.4
|
1797 |
+
9 1000000000000.6
|
1798 |
+
9 1000000000000.4
|
1799 |
+
9 1000000000000.6
|
1800 |
+
9 1000000000000.4
|
1801 |
+
9 1000000000000.6
|
1802 |
+
9 1000000000000.4
|
1803 |
+
9 1000000000000.6
|
1804 |
+
9 1000000000000.4
|
1805 |
+
9 1000000000000.6
|
1806 |
+
9 1000000000000.4
|
1807 |
+
9 1000000000000.6
|
1808 |
+
9 1000000000000.4
|
1809 |
+
9 1000000000000.6
|
1810 |
+
9 1000000000000.4
|
1811 |
+
9 1000000000000.6
|
1812 |
+
9 1000000000000.4
|
1813 |
+
9 1000000000000.6
|
1814 |
+
9 1000000000000.4
|
1815 |
+
9 1000000000000.6
|
1816 |
+
9 1000000000000.4
|
1817 |
+
9 1000000000000.6
|
1818 |
+
9 1000000000000.4
|
1819 |
+
9 1000000000000.6
|
1820 |
+
9 1000000000000.4
|
1821 |
+
9 1000000000000.6
|
1822 |
+
9 1000000000000.4
|
1823 |
+
9 1000000000000.6
|
1824 |
+
9 1000000000000.4
|
1825 |
+
9 1000000000000.6
|
1826 |
+
9 1000000000000.4
|
1827 |
+
9 1000000000000.6
|
1828 |
+
9 1000000000000.4
|
1829 |
+
9 1000000000000.6
|
1830 |
+
9 1000000000000.4
|
1831 |
+
9 1000000000000.6
|
1832 |
+
9 1000000000000.4
|
1833 |
+
9 1000000000000.6
|
1834 |
+
9 1000000000000.4
|
1835 |
+
9 1000000000000.6
|
1836 |
+
9 1000000000000.4
|
1837 |
+
9 1000000000000.6
|
1838 |
+
9 1000000000000.4
|
1839 |
+
9 1000000000000.6
|
1840 |
+
9 1000000000000.4
|
1841 |
+
9 1000000000000.6
|
1842 |
+
9 1000000000000.4
|
1843 |
+
9 1000000000000.6
|
1844 |
+
9 1000000000000.4
|
1845 |
+
9 1000000000000.6
|
1846 |
+
9 1000000000000.4
|
1847 |
+
9 1000000000000.6
|
1848 |
+
9 1000000000000.4
|
1849 |
+
9 1000000000000.6
|
1850 |
+
9 1000000000000.4
|
1851 |
+
9 1000000000000.6
|
1852 |
+
9 1000000000000.4
|
1853 |
+
9 1000000000000.6
|
1854 |
+
9 1000000000000.4
|
1855 |
+
9 1000000000000.6
|
1856 |
+
9 1000000000000.4
|
1857 |
+
9 1000000000000.6
|
1858 |
+
9 1000000000000.4
|
1859 |
+
9 1000000000000.6
|
1860 |
+
9 1000000000000.4
|
1861 |
+
9 1000000000000.6
|
1862 |
+
9 1000000000000.4
|
1863 |
+
9 1000000000000.6
|
1864 |
+
9 1000000000000.4
|
1865 |
+
9 1000000000000.6
|
1866 |
+
9 1000000000000.4
|
1867 |
+
9 1000000000000.6
|
1868 |
+
9 1000000000000.4
|
1869 |
+
9 1000000000000.6
|
venv/lib/python3.10/site-packages/scipy/stats/tests/data/nist_anova/SmLs09.dat
ADDED
The diff for this file is too large to render.
See raw diff
|
|
venv/lib/python3.10/site-packages/scipy/stats/tests/data/nist_linregress/Norris.dat
ADDED
@@ -0,0 +1,97 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
NIST/ITL StRD
|
2 |
+
Dataset Name: Norris (Norris.dat)
|
3 |
+
|
4 |
+
File Format: ASCII
|
5 |
+
Certified Values (lines 31 to 46)
|
6 |
+
Data (lines 61 to 96)
|
7 |
+
|
8 |
+
Procedure: Linear Least Squares Regression
|
9 |
+
|
10 |
+
Reference: Norris, J., NIST.
|
11 |
+
Calibration of Ozone Monitors.
|
12 |
+
|
13 |
+
Data: 1 Response Variable (y)
|
14 |
+
1 Predictor Variable (x)
|
15 |
+
36 Observations
|
16 |
+
Lower Level of Difficulty
|
17 |
+
Observed Data
|
18 |
+
|
19 |
+
Model: Linear Class
|
20 |
+
2 Parameters (B0,B1)
|
21 |
+
|
22 |
+
y = B0 + B1*x + e
|
23 |
+
|
24 |
+
|
25 |
+
|
26 |
+
Certified Regression Statistics
|
27 |
+
|
28 |
+
Standard Deviation
|
29 |
+
Parameter Estimate of Estimate
|
30 |
+
|
31 |
+
B0 -0.262323073774029 0.232818234301152
|
32 |
+
B1 1.00211681802045 0.429796848199937E-03
|
33 |
+
|
34 |
+
Residual
|
35 |
+
Standard Deviation 0.884796396144373
|
36 |
+
|
37 |
+
R-Squared 0.999993745883712
|
38 |
+
|
39 |
+
|
40 |
+
Certified Analysis of Variance Table
|
41 |
+
|
42 |
+
Source of Degrees of Sums of Mean
|
43 |
+
Variation Freedom Squares Squares F Statistic
|
44 |
+
|
45 |
+
Regression 1 4255954.13232369 4255954.13232369 5436385.54079785
|
46 |
+
Residual 34 26.6173985294224 0.782864662630069
|
47 |
+
|
48 |
+
|
49 |
+
|
50 |
+
|
51 |
+
|
52 |
+
|
53 |
+
|
54 |
+
|
55 |
+
|
56 |
+
|
57 |
+
|
58 |
+
|
59 |
+
|
60 |
+
Data: y x
|
61 |
+
0.1 0.2
|
62 |
+
338.8 337.4
|
63 |
+
118.1 118.2
|
64 |
+
888.0 884.6
|
65 |
+
9.2 10.1
|
66 |
+
228.1 226.5
|
67 |
+
668.5 666.3
|
68 |
+
998.5 996.3
|
69 |
+
449.1 448.6
|
70 |
+
778.9 777.0
|
71 |
+
559.2 558.2
|
72 |
+
0.3 0.4
|
73 |
+
0.1 0.6
|
74 |
+
778.1 775.5
|
75 |
+
668.8 666.9
|
76 |
+
339.3 338.0
|
77 |
+
448.9 447.5
|
78 |
+
10.8 11.6
|
79 |
+
557.7 556.0
|
80 |
+
228.3 228.1
|
81 |
+
998.0 995.8
|
82 |
+
888.8 887.6
|
83 |
+
119.6 120.2
|
84 |
+
0.3 0.3
|
85 |
+
0.6 0.3
|
86 |
+
557.6 556.8
|
87 |
+
339.3 339.1
|
88 |
+
888.0 887.2
|
89 |
+
998.5 999.0
|
90 |
+
778.9 779.0
|
91 |
+
10.2 11.1
|
92 |
+
117.6 118.3
|
93 |
+
228.9 229.2
|
94 |
+
668.4 669.1
|
95 |
+
449.2 448.9
|
96 |
+
0.2 0.5
|
97 |
+
|
venv/lib/python3.10/site-packages/scipy/stats/tests/data/studentized_range_mpmath_ref.json
ADDED
@@ -0,0 +1,1499 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"COMMENT": "!!!!!! THIS FILE WAS AUTOGENERATED BY RUNNING `python studentized_range_mpmath_ref.py` !!!!!!",
|
3 |
+
"moment_data": [
|
4 |
+
{
|
5 |
+
"src_case": {
|
6 |
+
"m": 0,
|
7 |
+
"k": 3,
|
8 |
+
"v": 10,
|
9 |
+
"expected_atol": 1e-09,
|
10 |
+
"expected_rtol": 1e-09
|
11 |
+
},
|
12 |
+
"mp_result": 1.0
|
13 |
+
},
|
14 |
+
{
|
15 |
+
"src_case": {
|
16 |
+
"m": 1,
|
17 |
+
"k": 3,
|
18 |
+
"v": 10,
|
19 |
+
"expected_atol": 1e-09,
|
20 |
+
"expected_rtol": 1e-09
|
21 |
+
},
|
22 |
+
"mp_result": 1.8342745127927962
|
23 |
+
},
|
24 |
+
{
|
25 |
+
"src_case": {
|
26 |
+
"m": 2,
|
27 |
+
"k": 3,
|
28 |
+
"v": 10,
|
29 |
+
"expected_atol": 1e-09,
|
30 |
+
"expected_rtol": 1e-09
|
31 |
+
},
|
32 |
+
"mp_result": 4.567483357831711
|
33 |
+
},
|
34 |
+
{
|
35 |
+
"src_case": {
|
36 |
+
"m": 3,
|
37 |
+
"k": 3,
|
38 |
+
"v": 10,
|
39 |
+
"expected_atol": 1e-09,
|
40 |
+
"expected_rtol": 1e-09
|
41 |
+
},
|
42 |
+
"mp_result": 14.412156886227011
|
43 |
+
},
|
44 |
+
{
|
45 |
+
"src_case": {
|
46 |
+
"m": 4,
|
47 |
+
"k": 3,
|
48 |
+
"v": 10,
|
49 |
+
"expected_atol": 1e-09,
|
50 |
+
"expected_rtol": 1e-09
|
51 |
+
},
|
52 |
+
"mp_result": 56.012250366720444
|
53 |
+
}
|
54 |
+
],
|
55 |
+
"cdf_data": [
|
56 |
+
{
|
57 |
+
"src_case": {
|
58 |
+
"q": 0.1,
|
59 |
+
"k": 3,
|
60 |
+
"v": 3,
|
61 |
+
"expected_atol": 1e-11,
|
62 |
+
"expected_rtol": 1e-11
|
63 |
+
},
|
64 |
+
"mp_result": 0.0027502772229359594
|
65 |
+
},
|
66 |
+
{
|
67 |
+
"src_case": {
|
68 |
+
"q": 0.1,
|
69 |
+
"k": 10,
|
70 |
+
"v": 10,
|
71 |
+
"expected_atol": 1e-11,
|
72 |
+
"expected_rtol": 1e-11
|
73 |
+
},
|
74 |
+
"mp_result": 2.8544145010066327e-12
|
75 |
+
},
|
76 |
+
{
|
77 |
+
"src_case": {
|
78 |
+
"q": 0.1,
|
79 |
+
"k": 3,
|
80 |
+
"v": 10,
|
81 |
+
"expected_atol": 1e-11,
|
82 |
+
"expected_rtol": 1e-11
|
83 |
+
},
|
84 |
+
"mp_result": 0.0027520560662338336
|
85 |
+
},
|
86 |
+
{
|
87 |
+
"src_case": {
|
88 |
+
"q": 0.1,
|
89 |
+
"k": 10,
|
90 |
+
"v": 100,
|
91 |
+
"expected_atol": 1e-11,
|
92 |
+
"expected_rtol": 1e-11
|
93 |
+
},
|
94 |
+
"mp_result": 9.39089126131273e-13
|
95 |
+
},
|
96 |
+
{
|
97 |
+
"src_case": {
|
98 |
+
"q": 0.1,
|
99 |
+
"k": 3,
|
100 |
+
"v": 20,
|
101 |
+
"expected_atol": 1e-11,
|
102 |
+
"expected_rtol": 1e-11
|
103 |
+
},
|
104 |
+
"mp_result": 0.002752437649536182
|
105 |
+
},
|
106 |
+
{
|
107 |
+
"src_case": {
|
108 |
+
"q": 0.1,
|
109 |
+
"k": 10,
|
110 |
+
"v": 50,
|
111 |
+
"expected_atol": 1e-11,
|
112 |
+
"expected_rtol": 1e-11
|
113 |
+
},
|
114 |
+
"mp_result": 1.0862189999210748e-12
|
115 |
+
},
|
116 |
+
{
|
117 |
+
"src_case": {
|
118 |
+
"q": 0.1,
|
119 |
+
"k": 3,
|
120 |
+
"v": 120,
|
121 |
+
"expected_atol": 1e-11,
|
122 |
+
"expected_rtol": 1e-11
|
123 |
+
},
|
124 |
+
"mp_result": 0.002752755744313648
|
125 |
+
},
|
126 |
+
{
|
127 |
+
"src_case": {
|
128 |
+
"q": 0.1,
|
129 |
+
"k": 3,
|
130 |
+
"v": 100,
|
131 |
+
"expected_atol": 1e-11,
|
132 |
+
"expected_rtol": 1e-11
|
133 |
+
},
|
134 |
+
"mp_result": 0.0027527430186246545
|
135 |
+
},
|
136 |
+
{
|
137 |
+
"src_case": {
|
138 |
+
"q": 0.1,
|
139 |
+
"k": 3,
|
140 |
+
"v": 50,
|
141 |
+
"expected_atol": 1e-11,
|
142 |
+
"expected_rtol": 1e-11
|
143 |
+
},
|
144 |
+
"mp_result": 0.002752666667812431
|
145 |
+
},
|
146 |
+
{
|
147 |
+
"src_case": {
|
148 |
+
"q": 0.1,
|
149 |
+
"k": 20,
|
150 |
+
"v": 10,
|
151 |
+
"expected_atol": 1e-11,
|
152 |
+
"expected_rtol": 1e-11
|
153 |
+
},
|
154 |
+
"mp_result": 2.505275157135514e-24
|
155 |
+
},
|
156 |
+
{
|
157 |
+
"src_case": {
|
158 |
+
"q": 0.1,
|
159 |
+
"k": 20,
|
160 |
+
"v": 20,
|
161 |
+
"expected_atol": 1e-11,
|
162 |
+
"expected_rtol": 1e-11
|
163 |
+
},
|
164 |
+
"mp_result": 3.8546698113384126e-25
|
165 |
+
},
|
166 |
+
{
|
167 |
+
"src_case": {
|
168 |
+
"q": 0.1,
|
169 |
+
"k": 10,
|
170 |
+
"v": 3,
|
171 |
+
"expected_atol": 1e-11,
|
172 |
+
"expected_rtol": 1e-11
|
173 |
+
},
|
174 |
+
"mp_result": 1.7362668562706085e-11
|
175 |
+
},
|
176 |
+
{
|
177 |
+
"src_case": {
|
178 |
+
"q": 0.1,
|
179 |
+
"k": 20,
|
180 |
+
"v": 50,
|
181 |
+
"expected_atol": 1e-11,
|
182 |
+
"expected_rtol": 1e-11
|
183 |
+
},
|
184 |
+
"mp_result": 5.571947730052616e-26
|
185 |
+
},
|
186 |
+
{
|
187 |
+
"src_case": {
|
188 |
+
"q": 0.1,
|
189 |
+
"k": 20,
|
190 |
+
"v": 100,
|
191 |
+
"expected_atol": 1e-11,
|
192 |
+
"expected_rtol": 1e-11
|
193 |
+
},
|
194 |
+
"mp_result": 2.032619249089036e-27
|
195 |
+
},
|
196 |
+
{
|
197 |
+
"src_case": {
|
198 |
+
"q": 0.1,
|
199 |
+
"k": 20,
|
200 |
+
"v": 3,
|
201 |
+
"expected_atol": 1e-11,
|
202 |
+
"expected_rtol": 1e-11
|
203 |
+
},
|
204 |
+
"mp_result": 9.539763646681808e-22
|
205 |
+
},
|
206 |
+
{
|
207 |
+
"src_case": {
|
208 |
+
"q": 0.1,
|
209 |
+
"k": 10,
|
210 |
+
"v": 20,
|
211 |
+
"expected_atol": 1e-11,
|
212 |
+
"expected_rtol": 1e-11
|
213 |
+
},
|
214 |
+
"mp_result": 1.618313512511099e-12
|
215 |
+
},
|
216 |
+
{
|
217 |
+
"src_case": {
|
218 |
+
"q": 0.1,
|
219 |
+
"k": 20,
|
220 |
+
"v": 120,
|
221 |
+
"expected_atol": 1e-11,
|
222 |
+
"expected_rtol": 1e-11
|
223 |
+
},
|
224 |
+
"mp_result": 4.919231733354114e-28
|
225 |
+
},
|
226 |
+
{
|
227 |
+
"src_case": {
|
228 |
+
"q": 0.1,
|
229 |
+
"k": 10,
|
230 |
+
"v": 120,
|
231 |
+
"expected_atol": 1e-11,
|
232 |
+
"expected_rtol": 1e-11
|
233 |
+
},
|
234 |
+
"mp_result": 9.159348906295542e-13
|
235 |
+
},
|
236 |
+
{
|
237 |
+
"src_case": {
|
238 |
+
"q": 1,
|
239 |
+
"k": 3,
|
240 |
+
"v": 3,
|
241 |
+
"expected_atol": 1e-11,
|
242 |
+
"expected_rtol": 1e-11
|
243 |
+
},
|
244 |
+
"mp_result": 0.22331624289542043
|
245 |
+
},
|
246 |
+
{
|
247 |
+
"src_case": {
|
248 |
+
"q": 1,
|
249 |
+
"k": 3,
|
250 |
+
"v": 50,
|
251 |
+
"expected_atol": 1e-11,
|
252 |
+
"expected_rtol": 1e-11
|
253 |
+
},
|
254 |
+
"mp_result": 0.2395624637676257
|
255 |
+
},
|
256 |
+
{
|
257 |
+
"src_case": {
|
258 |
+
"q": 1,
|
259 |
+
"k": 3,
|
260 |
+
"v": 10,
|
261 |
+
"expected_atol": 1e-11,
|
262 |
+
"expected_rtol": 1e-11
|
263 |
+
},
|
264 |
+
"mp_result": 0.23510918942128056
|
265 |
+
},
|
266 |
+
{
|
267 |
+
"src_case": {
|
268 |
+
"q": 1,
|
269 |
+
"k": 3,
|
270 |
+
"v": 20,
|
271 |
+
"expected_atol": 1e-11,
|
272 |
+
"expected_rtol": 1e-11
|
273 |
+
},
|
274 |
+
"mp_result": 0.23786536230099864
|
275 |
+
},
|
276 |
+
{
|
277 |
+
"src_case": {
|
278 |
+
"q": 1,
|
279 |
+
"k": 10,
|
280 |
+
"v": 50,
|
281 |
+
"expected_atol": 1e-11,
|
282 |
+
"expected_rtol": 1e-11
|
283 |
+
},
|
284 |
+
"mp_result": 0.000651656693149116
|
285 |
+
},
|
286 |
+
{
|
287 |
+
"src_case": {
|
288 |
+
"q": 1,
|
289 |
+
"k": 3,
|
290 |
+
"v": 100,
|
291 |
+
"expected_atol": 1e-11,
|
292 |
+
"expected_rtol": 1e-11
|
293 |
+
},
|
294 |
+
"mp_result": 0.2401356460422021
|
295 |
+
},
|
296 |
+
{
|
297 |
+
"src_case": {
|
298 |
+
"q": 1,
|
299 |
+
"k": 10,
|
300 |
+
"v": 3,
|
301 |
+
"expected_atol": 1e-11,
|
302 |
+
"expected_rtol": 1e-11
|
303 |
+
},
|
304 |
+
"mp_result": 0.003971273224673166
|
305 |
+
},
|
306 |
+
{
|
307 |
+
"src_case": {
|
308 |
+
"q": 1,
|
309 |
+
"k": 10,
|
310 |
+
"v": 20,
|
311 |
+
"expected_atol": 1e-11,
|
312 |
+
"expected_rtol": 1e-11
|
313 |
+
},
|
314 |
+
"mp_result": 0.0008732969319364606
|
315 |
+
},
|
316 |
+
{
|
317 |
+
"src_case": {
|
318 |
+
"q": 1,
|
319 |
+
"k": 3,
|
320 |
+
"v": 120,
|
321 |
+
"expected_atol": 1e-11,
|
322 |
+
"expected_rtol": 1e-11
|
323 |
+
},
|
324 |
+
"mp_result": 0.24023154593376422
|
325 |
+
},
|
326 |
+
{
|
327 |
+
"src_case": {
|
328 |
+
"q": 1,
|
329 |
+
"k": 10,
|
330 |
+
"v": 10,
|
331 |
+
"expected_atol": 1e-11,
|
332 |
+
"expected_rtol": 1e-11
|
333 |
+
},
|
334 |
+
"mp_result": 0.001300816146573152
|
335 |
+
},
|
336 |
+
{
|
337 |
+
"src_case": {
|
338 |
+
"q": 1,
|
339 |
+
"k": 20,
|
340 |
+
"v": 50,
|
341 |
+
"expected_atol": 1e-11,
|
342 |
+
"expected_rtol": 1e-11
|
343 |
+
},
|
344 |
+
"mp_result": 1.5682573722040226e-07
|
345 |
+
},
|
346 |
+
{
|
347 |
+
"src_case": {
|
348 |
+
"q": 1,
|
349 |
+
"k": 10,
|
350 |
+
"v": 100,
|
351 |
+
"expected_atol": 1e-11,
|
352 |
+
"expected_rtol": 1e-11
|
353 |
+
},
|
354 |
+
"mp_result": 0.0005841098057517027
|
355 |
+
},
|
356 |
+
{
|
357 |
+
"src_case": {
|
358 |
+
"q": 1,
|
359 |
+
"k": 20,
|
360 |
+
"v": 3,
|
361 |
+
"expected_atol": 1e-11,
|
362 |
+
"expected_rtol": 1e-11
|
363 |
+
},
|
364 |
+
"mp_result": 9.2267674885784e-05
|
365 |
+
},
|
366 |
+
{
|
367 |
+
"src_case": {
|
368 |
+
"q": 1,
|
369 |
+
"k": 10,
|
370 |
+
"v": 120,
|
371 |
+
"expected_atol": 1e-11,
|
372 |
+
"expected_rtol": 1e-11
|
373 |
+
},
|
374 |
+
"mp_result": 0.0005731712496327297
|
375 |
+
},
|
376 |
+
{
|
377 |
+
"src_case": {
|
378 |
+
"q": 1,
|
379 |
+
"k": 20,
|
380 |
+
"v": 10,
|
381 |
+
"expected_atol": 1e-11,
|
382 |
+
"expected_rtol": 1e-11
|
383 |
+
},
|
384 |
+
"mp_result": 2.746798012658064e-06
|
385 |
+
},
|
386 |
+
{
|
387 |
+
"src_case": {
|
388 |
+
"q": 1,
|
389 |
+
"k": 20,
|
390 |
+
"v": 20,
|
391 |
+
"expected_atol": 1e-11,
|
392 |
+
"expected_rtol": 1e-11
|
393 |
+
},
|
394 |
+
"mp_result": 5.807700350854172e-07
|
395 |
+
},
|
396 |
+
{
|
397 |
+
"src_case": {
|
398 |
+
"q": 1,
|
399 |
+
"k": 20,
|
400 |
+
"v": 100,
|
401 |
+
"expected_atol": 1e-11,
|
402 |
+
"expected_rtol": 1e-11
|
403 |
+
},
|
404 |
+
"mp_result": 9.147637957472628e-08
|
405 |
+
},
|
406 |
+
{
|
407 |
+
"src_case": {
|
408 |
+
"q": 1,
|
409 |
+
"k": 20,
|
410 |
+
"v": 120,
|
411 |
+
"expected_atol": 1e-11,
|
412 |
+
"expected_rtol": 1e-11
|
413 |
+
},
|
414 |
+
"mp_result": 8.306675539750552e-08
|
415 |
+
},
|
416 |
+
{
|
417 |
+
"src_case": {
|
418 |
+
"q": 4,
|
419 |
+
"k": 3,
|
420 |
+
"v": 3,
|
421 |
+
"expected_atol": 1e-11,
|
422 |
+
"expected_rtol": 1e-11
|
423 |
+
},
|
424 |
+
"mp_result": 0.8711786295203324
|
425 |
+
},
|
426 |
+
{
|
427 |
+
"src_case": {
|
428 |
+
"q": 4,
|
429 |
+
"k": 3,
|
430 |
+
"v": 50,
|
431 |
+
"expected_atol": 1e-11,
|
432 |
+
"expected_rtol": 1e-11
|
433 |
+
},
|
434 |
+
"mp_result": 0.9818862781476212
|
435 |
+
},
|
436 |
+
{
|
437 |
+
"src_case": {
|
438 |
+
"q": 4,
|
439 |
+
"k": 3,
|
440 |
+
"v": 10,
|
441 |
+
"expected_atol": 1e-11,
|
442 |
+
"expected_rtol": 1e-11
|
443 |
+
},
|
444 |
+
"mp_result": 0.9566506502400175
|
445 |
+
},
|
446 |
+
{
|
447 |
+
"src_case": {
|
448 |
+
"q": 4,
|
449 |
+
"k": 3,
|
450 |
+
"v": 120,
|
451 |
+
"expected_atol": 1e-11,
|
452 |
+
"expected_rtol": 1e-11
|
453 |
+
},
|
454 |
+
"mp_result": 0.9849546621386962
|
455 |
+
},
|
456 |
+
{
|
457 |
+
"src_case": {
|
458 |
+
"q": 4,
|
459 |
+
"k": 3,
|
460 |
+
"v": 20,
|
461 |
+
"expected_atol": 1e-11,
|
462 |
+
"expected_rtol": 1e-11
|
463 |
+
},
|
464 |
+
"mp_result": 0.9731488893573804
|
465 |
+
},
|
466 |
+
{
|
467 |
+
"src_case": {
|
468 |
+
"q": 4,
|
469 |
+
"k": 10,
|
470 |
+
"v": 50,
|
471 |
+
"expected_atol": 1e-11,
|
472 |
+
"expected_rtol": 1e-11
|
473 |
+
},
|
474 |
+
"mp_result": 0.8450530667988544
|
475 |
+
},
|
476 |
+
{
|
477 |
+
"src_case": {
|
478 |
+
"q": 4,
|
479 |
+
"k": 10,
|
480 |
+
"v": 3,
|
481 |
+
"expected_atol": 1e-11,
|
482 |
+
"expected_rtol": 1e-11
|
483 |
+
},
|
484 |
+
"mp_result": 0.6164875232404174
|
485 |
+
},
|
486 |
+
{
|
487 |
+
"src_case": {
|
488 |
+
"q": 4,
|
489 |
+
"k": 3,
|
490 |
+
"v": 100,
|
491 |
+
"expected_atol": 1e-11,
|
492 |
+
"expected_rtol": 1e-11
|
493 |
+
},
|
494 |
+
"mp_result": 0.9845292772767739
|
495 |
+
},
|
496 |
+
{
|
497 |
+
"src_case": {
|
498 |
+
"q": 4,
|
499 |
+
"k": 10,
|
500 |
+
"v": 20,
|
501 |
+
"expected_atol": 1e-11,
|
502 |
+
"expected_rtol": 1e-11
|
503 |
+
},
|
504 |
+
"mp_result": 0.8079691517949077
|
505 |
+
},
|
506 |
+
{
|
507 |
+
"src_case": {
|
508 |
+
"q": 4,
|
509 |
+
"k": 10,
|
510 |
+
"v": 10,
|
511 |
+
"expected_atol": 1e-11,
|
512 |
+
"expected_rtol": 1e-11
|
513 |
+
},
|
514 |
+
"mp_result": 0.7573606942645745
|
515 |
+
},
|
516 |
+
{
|
517 |
+
"src_case": {
|
518 |
+
"q": 4,
|
519 |
+
"k": 10,
|
520 |
+
"v": 100,
|
521 |
+
"expected_atol": 1e-11,
|
522 |
+
"expected_rtol": 1e-11
|
523 |
+
},
|
524 |
+
"mp_result": 0.8587525248147736
|
525 |
+
},
|
526 |
+
{
|
527 |
+
"src_case": {
|
528 |
+
"q": 4,
|
529 |
+
"k": 10,
|
530 |
+
"v": 120,
|
531 |
+
"expected_atol": 1e-11,
|
532 |
+
"expected_rtol": 1e-11
|
533 |
+
},
|
534 |
+
"mp_result": 0.8611036193280976
|
535 |
+
},
|
536 |
+
{
|
537 |
+
"src_case": {
|
538 |
+
"q": 4,
|
539 |
+
"k": 20,
|
540 |
+
"v": 3,
|
541 |
+
"expected_atol": 1e-11,
|
542 |
+
"expected_rtol": 1e-11
|
543 |
+
},
|
544 |
+
"mp_result": 0.46523135355387657
|
545 |
+
},
|
546 |
+
{
|
547 |
+
"src_case": {
|
548 |
+
"q": 4,
|
549 |
+
"k": 20,
|
550 |
+
"v": 50,
|
551 |
+
"expected_atol": 1e-11,
|
552 |
+
"expected_rtol": 1e-11
|
553 |
+
},
|
554 |
+
"mp_result": 0.6318042819232383
|
555 |
+
},
|
556 |
+
{
|
557 |
+
"src_case": {
|
558 |
+
"q": 4,
|
559 |
+
"k": 20,
|
560 |
+
"v": 10,
|
561 |
+
"expected_atol": 1e-11,
|
562 |
+
"expected_rtol": 1e-11
|
563 |
+
},
|
564 |
+
"mp_result": 0.5574947140294286
|
565 |
+
},
|
566 |
+
{
|
567 |
+
"src_case": {
|
568 |
+
"q": 4,
|
569 |
+
"k": 20,
|
570 |
+
"v": 20,
|
571 |
+
"expected_atol": 1e-11,
|
572 |
+
"expected_rtol": 1e-11
|
573 |
+
},
|
574 |
+
"mp_result": 0.5970517763141937
|
575 |
+
},
|
576 |
+
{
|
577 |
+
"src_case": {
|
578 |
+
"q": 4,
|
579 |
+
"k": 20,
|
580 |
+
"v": 120,
|
581 |
+
"expected_atol": 1e-11,
|
582 |
+
"expected_rtol": 1e-11
|
583 |
+
},
|
584 |
+
"mp_result": 0.6493671527818267
|
585 |
+
},
|
586 |
+
{
|
587 |
+
"src_case": {
|
588 |
+
"q": 4,
|
589 |
+
"k": 20,
|
590 |
+
"v": 100,
|
591 |
+
"expected_atol": 1e-11,
|
592 |
+
"expected_rtol": 1e-11
|
593 |
+
},
|
594 |
+
"mp_result": 0.6466699776044968
|
595 |
+
},
|
596 |
+
{
|
597 |
+
"src_case": {
|
598 |
+
"q": 10,
|
599 |
+
"k": 3,
|
600 |
+
"v": 3,
|
601 |
+
"expected_atol": 1e-11,
|
602 |
+
"expected_rtol": 1e-11
|
603 |
+
},
|
604 |
+
"mp_result": 0.9881335633712994
|
605 |
+
},
|
606 |
+
{
|
607 |
+
"src_case": {
|
608 |
+
"q": 10,
|
609 |
+
"k": 3,
|
610 |
+
"v": 50,
|
611 |
+
"expected_atol": 1e-11,
|
612 |
+
"expected_rtol": 1e-11
|
613 |
+
},
|
614 |
+
"mp_result": 0.9999999861266821
|
615 |
+
},
|
616 |
+
{
|
617 |
+
"src_case": {
|
618 |
+
"q": 10,
|
619 |
+
"k": 3,
|
620 |
+
"v": 10,
|
621 |
+
"expected_atol": 1e-11,
|
622 |
+
"expected_rtol": 1e-11
|
623 |
+
},
|
624 |
+
"mp_result": 0.999908236635449
|
625 |
+
},
|
626 |
+
{
|
627 |
+
"src_case": {
|
628 |
+
"q": 10,
|
629 |
+
"k": 3,
|
630 |
+
"v": 20,
|
631 |
+
"expected_atol": 1e-11,
|
632 |
+
"expected_rtol": 1e-11
|
633 |
+
},
|
634 |
+
"mp_result": 0.9999978467928313
|
635 |
+
},
|
636 |
+
{
|
637 |
+
"src_case": {
|
638 |
+
"q": 10,
|
639 |
+
"k": 3,
|
640 |
+
"v": 120,
|
641 |
+
"expected_atol": 1e-11,
|
642 |
+
"expected_rtol": 1e-11
|
643 |
+
},
|
644 |
+
"mp_result": 0.9999999996690216
|
645 |
+
},
|
646 |
+
{
|
647 |
+
"src_case": {
|
648 |
+
"q": 10,
|
649 |
+
"k": 3,
|
650 |
+
"v": 100,
|
651 |
+
"expected_atol": 1e-11,
|
652 |
+
"expected_rtol": 1e-11
|
653 |
+
},
|
654 |
+
"mp_result": 0.9999999993640496
|
655 |
+
},
|
656 |
+
{
|
657 |
+
"src_case": {
|
658 |
+
"q": 10,
|
659 |
+
"k": 10,
|
660 |
+
"v": 3,
|
661 |
+
"expected_atol": 1e-11,
|
662 |
+
"expected_rtol": 1e-11
|
663 |
+
},
|
664 |
+
"mp_result": 0.9570401457077894
|
665 |
+
},
|
666 |
+
{
|
667 |
+
"src_case": {
|
668 |
+
"q": 10,
|
669 |
+
"k": 10,
|
670 |
+
"v": 50,
|
671 |
+
"expected_atol": 1e-11,
|
672 |
+
"expected_rtol": 1e-11
|
673 |
+
},
|
674 |
+
"mp_result": 0.9999997977351971
|
675 |
+
},
|
676 |
+
{
|
677 |
+
"src_case": {
|
678 |
+
"q": 10,
|
679 |
+
"k": 10,
|
680 |
+
"v": 10,
|
681 |
+
"expected_atol": 1e-11,
|
682 |
+
"expected_rtol": 1e-11
|
683 |
+
},
|
684 |
+
"mp_result": 0.9991738325963548
|
685 |
+
},
|
686 |
+
{
|
687 |
+
"src_case": {
|
688 |
+
"q": 10,
|
689 |
+
"k": 10,
|
690 |
+
"v": 20,
|
691 |
+
"expected_atol": 1e-11,
|
692 |
+
"expected_rtol": 1e-11
|
693 |
+
},
|
694 |
+
"mp_result": 0.9999730883609333
|
695 |
+
},
|
696 |
+
{
|
697 |
+
"src_case": {
|
698 |
+
"q": 10,
|
699 |
+
"k": 10,
|
700 |
+
"v": 100,
|
701 |
+
"expected_atol": 1e-11,
|
702 |
+
"expected_rtol": 1e-11
|
703 |
+
},
|
704 |
+
"mp_result": 0.9999999905199205
|
705 |
+
},
|
706 |
+
{
|
707 |
+
"src_case": {
|
708 |
+
"q": 10,
|
709 |
+
"k": 10,
|
710 |
+
"v": 120,
|
711 |
+
"expected_atol": 1e-11,
|
712 |
+
"expected_rtol": 1e-11
|
713 |
+
},
|
714 |
+
"mp_result": 0.9999999950566264
|
715 |
+
},
|
716 |
+
{
|
717 |
+
"src_case": {
|
718 |
+
"q": 10,
|
719 |
+
"k": 20,
|
720 |
+
"v": 3,
|
721 |
+
"expected_atol": 1e-11,
|
722 |
+
"expected_rtol": 1e-11
|
723 |
+
},
|
724 |
+
"mp_result": 0.9312318042339768
|
725 |
+
},
|
726 |
+
{
|
727 |
+
"src_case": {
|
728 |
+
"q": 10,
|
729 |
+
"k": 20,
|
730 |
+
"v": 50,
|
731 |
+
"expected_atol": 1e-11,
|
732 |
+
"expected_rtol": 1e-11
|
733 |
+
},
|
734 |
+
"mp_result": 0.9999991743904675
|
735 |
+
},
|
736 |
+
{
|
737 |
+
"src_case": {
|
738 |
+
"q": 10,
|
739 |
+
"k": 20,
|
740 |
+
"v": 10,
|
741 |
+
"expected_atol": 1e-11,
|
742 |
+
"expected_rtol": 1e-11
|
743 |
+
},
|
744 |
+
"mp_result": 0.9977643922032399
|
745 |
+
},
|
746 |
+
{
|
747 |
+
"src_case": {
|
748 |
+
"q": 10,
|
749 |
+
"k": 20,
|
750 |
+
"v": 20,
|
751 |
+
"expected_atol": 1e-11,
|
752 |
+
"expected_rtol": 1e-11
|
753 |
+
},
|
754 |
+
"mp_result": 0.9999054426012515
|
755 |
+
},
|
756 |
+
{
|
757 |
+
"src_case": {
|
758 |
+
"q": 10,
|
759 |
+
"k": 20,
|
760 |
+
"v": 100,
|
761 |
+
"expected_atol": 1e-11,
|
762 |
+
"expected_rtol": 1e-11
|
763 |
+
},
|
764 |
+
"mp_result": 0.9999999602948055
|
765 |
+
},
|
766 |
+
{
|
767 |
+
"src_case": {
|
768 |
+
"q": 10,
|
769 |
+
"k": 20,
|
770 |
+
"v": 120,
|
771 |
+
"expected_atol": 1e-11,
|
772 |
+
"expected_rtol": 1e-11
|
773 |
+
},
|
774 |
+
"mp_result": 0.9999999792458618
|
775 |
+
}
|
776 |
+
],
|
777 |
+
"pdf_data": [
|
778 |
+
{
|
779 |
+
"src_case": {
|
780 |
+
"q": 0.1,
|
781 |
+
"k": 3,
|
782 |
+
"v": 3,
|
783 |
+
"expected_atol": 1e-11,
|
784 |
+
"expected_rtol": 1e-11
|
785 |
+
},
|
786 |
+
"mp_result": 0.05487847613526332
|
787 |
+
},
|
788 |
+
{
|
789 |
+
"src_case": {
|
790 |
+
"q": 0.1,
|
791 |
+
"k": 10,
|
792 |
+
"v": 10,
|
793 |
+
"expected_atol": 1e-11,
|
794 |
+
"expected_rtol": 1e-11
|
795 |
+
},
|
796 |
+
"mp_result": 2.564099684606509e-10
|
797 |
+
},
|
798 |
+
{
|
799 |
+
"src_case": {
|
800 |
+
"q": 0.1,
|
801 |
+
"k": 3,
|
802 |
+
"v": 10,
|
803 |
+
"expected_atol": 1e-11,
|
804 |
+
"expected_rtol": 1e-11
|
805 |
+
},
|
806 |
+
"mp_result": 0.05494947290360002
|
807 |
+
},
|
808 |
+
{
|
809 |
+
"src_case": {
|
810 |
+
"q": 0.1,
|
811 |
+
"k": 10,
|
812 |
+
"v": 100,
|
813 |
+
"expected_atol": 1e-11,
|
814 |
+
"expected_rtol": 1e-11
|
815 |
+
},
|
816 |
+
"mp_result": 8.442593793786411e-11
|
817 |
+
},
|
818 |
+
{
|
819 |
+
"src_case": {
|
820 |
+
"q": 0.1,
|
821 |
+
"k": 3,
|
822 |
+
"v": 20,
|
823 |
+
"expected_atol": 1e-11,
|
824 |
+
"expected_rtol": 1e-11
|
825 |
+
},
|
826 |
+
"mp_result": 0.054964710604860405
|
827 |
+
},
|
828 |
+
{
|
829 |
+
"src_case": {
|
830 |
+
"q": 0.1,
|
831 |
+
"k": 10,
|
832 |
+
"v": 50,
|
833 |
+
"expected_atol": 1e-11,
|
834 |
+
"expected_rtol": 1e-11
|
835 |
+
},
|
836 |
+
"mp_result": 9.764441961563576e-11
|
837 |
+
},
|
838 |
+
{
|
839 |
+
"src_case": {
|
840 |
+
"q": 0.1,
|
841 |
+
"k": 3,
|
842 |
+
"v": 100,
|
843 |
+
"expected_atol": 1e-11,
|
844 |
+
"expected_rtol": 1e-11
|
845 |
+
},
|
846 |
+
"mp_result": 0.05497690690332341
|
847 |
+
},
|
848 |
+
{
|
849 |
+
"src_case": {
|
850 |
+
"q": 0.1,
|
851 |
+
"k": 3,
|
852 |
+
"v": 50,
|
853 |
+
"expected_atol": 1e-11,
|
854 |
+
"expected_rtol": 1e-11
|
855 |
+
},
|
856 |
+
"mp_result": 0.05497385731702228
|
857 |
+
},
|
858 |
+
{
|
859 |
+
"src_case": {
|
860 |
+
"q": 0.1,
|
861 |
+
"k": 20,
|
862 |
+
"v": 10,
|
863 |
+
"expected_atol": 1e-11,
|
864 |
+
"expected_rtol": 1e-11
|
865 |
+
},
|
866 |
+
"mp_result": 4.758021225803992e-22
|
867 |
+
},
|
868 |
+
{
|
869 |
+
"src_case": {
|
870 |
+
"q": 0.1,
|
871 |
+
"k": 3,
|
872 |
+
"v": 120,
|
873 |
+
"expected_atol": 1e-11,
|
874 |
+
"expected_rtol": 1e-11
|
875 |
+
},
|
876 |
+
"mp_result": 0.054977415200879516
|
877 |
+
},
|
878 |
+
{
|
879 |
+
"src_case": {
|
880 |
+
"q": 0.1,
|
881 |
+
"k": 20,
|
882 |
+
"v": 3,
|
883 |
+
"expected_atol": 1e-11,
|
884 |
+
"expected_rtol": 1e-11
|
885 |
+
},
|
886 |
+
"mp_result": 1.8004731453548083e-19
|
887 |
+
},
|
888 |
+
{
|
889 |
+
"src_case": {
|
890 |
+
"q": 0.1,
|
891 |
+
"k": 10,
|
892 |
+
"v": 3,
|
893 |
+
"expected_atol": 1e-11,
|
894 |
+
"expected_rtol": 1e-11
|
895 |
+
},
|
896 |
+
"mp_result": 1.5564176176604816e-09
|
897 |
+
},
|
898 |
+
{
|
899 |
+
"src_case": {
|
900 |
+
"q": 0.1,
|
901 |
+
"k": 20,
|
902 |
+
"v": 50,
|
903 |
+
"expected_atol": 1e-11,
|
904 |
+
"expected_rtol": 1e-11
|
905 |
+
},
|
906 |
+
"mp_result": 9.342768070688728e-24
|
907 |
+
},
|
908 |
+
{
|
909 |
+
"src_case": {
|
910 |
+
"q": 0.1,
|
911 |
+
"k": 10,
|
912 |
+
"v": 20,
|
913 |
+
"expected_atol": 1e-11,
|
914 |
+
"expected_rtol": 1e-11
|
915 |
+
},
|
916 |
+
"mp_result": 1.454372265306114e-10
|
917 |
+
},
|
918 |
+
{
|
919 |
+
"src_case": {
|
920 |
+
"q": 0.1,
|
921 |
+
"k": 20,
|
922 |
+
"v": 100,
|
923 |
+
"expected_atol": 1e-11,
|
924 |
+
"expected_rtol": 1e-11
|
925 |
+
},
|
926 |
+
"mp_result": 3.9138464398429654e-25
|
927 |
+
},
|
928 |
+
{
|
929 |
+
"src_case": {
|
930 |
+
"q": 0.1,
|
931 |
+
"k": 20,
|
932 |
+
"v": 20,
|
933 |
+
"expected_atol": 1e-11,
|
934 |
+
"expected_rtol": 1e-11
|
935 |
+
},
|
936 |
+
"mp_result": 5.266341131767418e-23
|
937 |
+
},
|
938 |
+
{
|
939 |
+
"src_case": {
|
940 |
+
"q": 0.1,
|
941 |
+
"k": 10,
|
942 |
+
"v": 120,
|
943 |
+
"expected_atol": 1e-11,
|
944 |
+
"expected_rtol": 1e-11
|
945 |
+
},
|
946 |
+
"mp_result": 8.234556126446594e-11
|
947 |
+
},
|
948 |
+
{
|
949 |
+
"src_case": {
|
950 |
+
"q": 0.1,
|
951 |
+
"k": 20,
|
952 |
+
"v": 120,
|
953 |
+
"expected_atol": 1e-11,
|
954 |
+
"expected_rtol": 1e-11
|
955 |
+
},
|
956 |
+
"mp_result": 9.32929780487562e-26
|
957 |
+
},
|
958 |
+
{
|
959 |
+
"src_case": {
|
960 |
+
"q": 1,
|
961 |
+
"k": 3,
|
962 |
+
"v": 3,
|
963 |
+
"expected_atol": 1e-11,
|
964 |
+
"expected_rtol": 1e-11
|
965 |
+
},
|
966 |
+
"mp_result": 0.36083736990527154
|
967 |
+
},
|
968 |
+
{
|
969 |
+
"src_case": {
|
970 |
+
"q": 1,
|
971 |
+
"k": 3,
|
972 |
+
"v": 50,
|
973 |
+
"expected_atol": 1e-11,
|
974 |
+
"expected_rtol": 1e-11
|
975 |
+
},
|
976 |
+
"mp_result": 0.4137959132282269
|
977 |
+
},
|
978 |
+
{
|
979 |
+
"src_case": {
|
980 |
+
"q": 1,
|
981 |
+
"k": 3,
|
982 |
+
"v": 20,
|
983 |
+
"expected_atol": 1e-11,
|
984 |
+
"expected_rtol": 1e-11
|
985 |
+
},
|
986 |
+
"mp_result": 0.4080239698771056
|
987 |
+
},
|
988 |
+
{
|
989 |
+
"src_case": {
|
990 |
+
"q": 1,
|
991 |
+
"k": 3,
|
992 |
+
"v": 10,
|
993 |
+
"expected_atol": 1e-11,
|
994 |
+
"expected_rtol": 1e-11
|
995 |
+
},
|
996 |
+
"mp_result": 0.398772020275752
|
997 |
+
},
|
998 |
+
{
|
999 |
+
"src_case": {
|
1000 |
+
"q": 1,
|
1001 |
+
"k": 3,
|
1002 |
+
"v": 120,
|
1003 |
+
"expected_atol": 1e-11,
|
1004 |
+
"expected_rtol": 1e-11
|
1005 |
+
},
|
1006 |
+
"mp_result": 0.4160873922094346
|
1007 |
+
},
|
1008 |
+
{
|
1009 |
+
"src_case": {
|
1010 |
+
"q": 1,
|
1011 |
+
"k": 3,
|
1012 |
+
"v": 100,
|
1013 |
+
"expected_atol": 1e-11,
|
1014 |
+
"expected_rtol": 1e-11
|
1015 |
+
},
|
1016 |
+
"mp_result": 0.4157583991350054
|
1017 |
+
},
|
1018 |
+
{
|
1019 |
+
"src_case": {
|
1020 |
+
"q": 1,
|
1021 |
+
"k": 10,
|
1022 |
+
"v": 50,
|
1023 |
+
"expected_atol": 1e-11,
|
1024 |
+
"expected_rtol": 1e-11
|
1025 |
+
},
|
1026 |
+
"mp_result": 0.005210720148451848
|
1027 |
+
},
|
1028 |
+
{
|
1029 |
+
"src_case": {
|
1030 |
+
"q": 1,
|
1031 |
+
"k": 10,
|
1032 |
+
"v": 3,
|
1033 |
+
"expected_atol": 1e-11,
|
1034 |
+
"expected_rtol": 1e-11
|
1035 |
+
},
|
1036 |
+
"mp_result": 0.02575314059867804
|
1037 |
+
},
|
1038 |
+
{
|
1039 |
+
"src_case": {
|
1040 |
+
"q": 1,
|
1041 |
+
"k": 10,
|
1042 |
+
"v": 10,
|
1043 |
+
"expected_atol": 1e-11,
|
1044 |
+
"expected_rtol": 1e-11
|
1045 |
+
},
|
1046 |
+
"mp_result": 0.009782573637596617
|
1047 |
+
},
|
1048 |
+
{
|
1049 |
+
"src_case": {
|
1050 |
+
"q": 1,
|
1051 |
+
"k": 10,
|
1052 |
+
"v": 20,
|
1053 |
+
"expected_atol": 1e-11,
|
1054 |
+
"expected_rtol": 1e-11
|
1055 |
+
},
|
1056 |
+
"mp_result": 0.006818708302379005
|
1057 |
+
},
|
1058 |
+
{
|
1059 |
+
"src_case": {
|
1060 |
+
"q": 1,
|
1061 |
+
"k": 10,
|
1062 |
+
"v": 100,
|
1063 |
+
"expected_atol": 1e-11,
|
1064 |
+
"expected_rtol": 1e-11
|
1065 |
+
},
|
1066 |
+
"mp_result": 0.0047089182958790715
|
1067 |
+
},
|
1068 |
+
{
|
1069 |
+
"src_case": {
|
1070 |
+
"q": 1,
|
1071 |
+
"k": 10,
|
1072 |
+
"v": 120,
|
1073 |
+
"expected_atol": 1e-11,
|
1074 |
+
"expected_rtol": 1e-11
|
1075 |
+
},
|
1076 |
+
"mp_result": 0.004627085294166373
|
1077 |
+
},
|
1078 |
+
{
|
1079 |
+
"src_case": {
|
1080 |
+
"q": 1,
|
1081 |
+
"k": 20,
|
1082 |
+
"v": 3,
|
1083 |
+
"expected_atol": 1e-11,
|
1084 |
+
"expected_rtol": 1e-11
|
1085 |
+
},
|
1086 |
+
"mp_result": 0.0010886280311369462
|
1087 |
+
},
|
1088 |
+
{
|
1089 |
+
"src_case": {
|
1090 |
+
"q": 1,
|
1091 |
+
"k": 20,
|
1092 |
+
"v": 50,
|
1093 |
+
"expected_atol": 1e-11,
|
1094 |
+
"expected_rtol": 1e-11
|
1095 |
+
},
|
1096 |
+
"mp_result": 2.630674470916427e-06
|
1097 |
+
},
|
1098 |
+
{
|
1099 |
+
"src_case": {
|
1100 |
+
"q": 1,
|
1101 |
+
"k": 20,
|
1102 |
+
"v": 10,
|
1103 |
+
"expected_atol": 1e-11,
|
1104 |
+
"expected_rtol": 1e-11
|
1105 |
+
},
|
1106 |
+
"mp_result": 4.121713278199428e-05
|
1107 |
+
},
|
1108 |
+
{
|
1109 |
+
"src_case": {
|
1110 |
+
"q": 1,
|
1111 |
+
"k": 20,
|
1112 |
+
"v": 20,
|
1113 |
+
"expected_atol": 1e-11,
|
1114 |
+
"expected_rtol": 1e-11
|
1115 |
+
},
|
1116 |
+
"mp_result": 9.319506007252685e-06
|
1117 |
+
},
|
1118 |
+
{
|
1119 |
+
"src_case": {
|
1120 |
+
"q": 1,
|
1121 |
+
"k": 20,
|
1122 |
+
"v": 100,
|
1123 |
+
"expected_atol": 1e-11,
|
1124 |
+
"expected_rtol": 1e-11
|
1125 |
+
},
|
1126 |
+
"mp_result": 1.5585754418789747e-06
|
1127 |
+
},
|
1128 |
+
{
|
1129 |
+
"src_case": {
|
1130 |
+
"q": 1,
|
1131 |
+
"k": 20,
|
1132 |
+
"v": 120,
|
1133 |
+
"expected_atol": 1e-11,
|
1134 |
+
"expected_rtol": 1e-11
|
1135 |
+
},
|
1136 |
+
"mp_result": 1.4190335899441991e-06
|
1137 |
+
},
|
1138 |
+
{
|
1139 |
+
"src_case": {
|
1140 |
+
"q": 4,
|
1141 |
+
"k": 3,
|
1142 |
+
"v": 3,
|
1143 |
+
"expected_atol": 1e-11,
|
1144 |
+
"expected_rtol": 1e-11
|
1145 |
+
},
|
1146 |
+
"mp_result": 0.07185383302009114
|
1147 |
+
},
|
1148 |
+
{
|
1149 |
+
"src_case": {
|
1150 |
+
"q": 4,
|
1151 |
+
"k": 3,
|
1152 |
+
"v": 10,
|
1153 |
+
"expected_atol": 1e-11,
|
1154 |
+
"expected_rtol": 1e-11
|
1155 |
+
},
|
1156 |
+
"mp_result": 0.050268901219386576
|
1157 |
+
},
|
1158 |
+
{
|
1159 |
+
"src_case": {
|
1160 |
+
"q": 4,
|
1161 |
+
"k": 3,
|
1162 |
+
"v": 50,
|
1163 |
+
"expected_atol": 1e-11,
|
1164 |
+
"expected_rtol": 1e-11
|
1165 |
+
},
|
1166 |
+
"mp_result": 0.03321056847176124
|
1167 |
+
},
|
1168 |
+
{
|
1169 |
+
"src_case": {
|
1170 |
+
"q": 4,
|
1171 |
+
"k": 3,
|
1172 |
+
"v": 20,
|
1173 |
+
"expected_atol": 1e-11,
|
1174 |
+
"expected_rtol": 1e-11
|
1175 |
+
},
|
1176 |
+
"mp_result": 0.04044172384981084
|
1177 |
+
},
|
1178 |
+
{
|
1179 |
+
"src_case": {
|
1180 |
+
"q": 4,
|
1181 |
+
"k": 3,
|
1182 |
+
"v": 100,
|
1183 |
+
"expected_atol": 1e-11,
|
1184 |
+
"expected_rtol": 1e-11
|
1185 |
+
},
|
1186 |
+
"mp_result": 0.030571365659999617
|
1187 |
+
},
|
1188 |
+
{
|
1189 |
+
"src_case": {
|
1190 |
+
"q": 4,
|
1191 |
+
"k": 3,
|
1192 |
+
"v": 120,
|
1193 |
+
"expected_atol": 1e-11,
|
1194 |
+
"expected_rtol": 1e-11
|
1195 |
+
},
|
1196 |
+
"mp_result": 0.030120779149073032
|
1197 |
+
},
|
1198 |
+
{
|
1199 |
+
"src_case": {
|
1200 |
+
"q": 4,
|
1201 |
+
"k": 10,
|
1202 |
+
"v": 3,
|
1203 |
+
"expected_atol": 1e-11,
|
1204 |
+
"expected_rtol": 1e-11
|
1205 |
+
},
|
1206 |
+
"mp_result": 0.17501664247670937
|
1207 |
+
},
|
1208 |
+
{
|
1209 |
+
"src_case": {
|
1210 |
+
"q": 4,
|
1211 |
+
"k": 10,
|
1212 |
+
"v": 10,
|
1213 |
+
"expected_atol": 1e-11,
|
1214 |
+
"expected_rtol": 1e-11
|
1215 |
+
},
|
1216 |
+
"mp_result": 0.22374394725370736
|
1217 |
+
},
|
1218 |
+
{
|
1219 |
+
"src_case": {
|
1220 |
+
"q": 4,
|
1221 |
+
"k": 10,
|
1222 |
+
"v": 50,
|
1223 |
+
"expected_atol": 1e-11,
|
1224 |
+
"expected_rtol": 1e-11
|
1225 |
+
},
|
1226 |
+
"mp_result": 0.23246597521020534
|
1227 |
+
},
|
1228 |
+
{
|
1229 |
+
"src_case": {
|
1230 |
+
"q": 4,
|
1231 |
+
"k": 10,
|
1232 |
+
"v": 20,
|
1233 |
+
"expected_atol": 1e-11,
|
1234 |
+
"expected_rtol": 1e-11
|
1235 |
+
},
|
1236 |
+
"mp_result": 0.23239043677504484
|
1237 |
+
},
|
1238 |
+
{
|
1239 |
+
"src_case": {
|
1240 |
+
"q": 4,
|
1241 |
+
"k": 10,
|
1242 |
+
"v": 100,
|
1243 |
+
"expected_atol": 1e-11,
|
1244 |
+
"expected_rtol": 1e-11
|
1245 |
+
},
|
1246 |
+
"mp_result": 0.23057775622748988
|
1247 |
+
},
|
1248 |
+
{
|
1249 |
+
"src_case": {
|
1250 |
+
"q": 4,
|
1251 |
+
"k": 10,
|
1252 |
+
"v": 120,
|
1253 |
+
"expected_atol": 1e-11,
|
1254 |
+
"expected_rtol": 1e-11
|
1255 |
+
},
|
1256 |
+
"mp_result": 0.23012666145240815
|
1257 |
+
},
|
1258 |
+
{
|
1259 |
+
"src_case": {
|
1260 |
+
"q": 4,
|
1261 |
+
"k": 20,
|
1262 |
+
"v": 3,
|
1263 |
+
"expected_atol": 1e-11,
|
1264 |
+
"expected_rtol": 1e-11
|
1265 |
+
},
|
1266 |
+
"mp_result": 0.2073676639537027
|
1267 |
+
},
|
1268 |
+
{
|
1269 |
+
"src_case": {
|
1270 |
+
"q": 4,
|
1271 |
+
"k": 20,
|
1272 |
+
"v": 10,
|
1273 |
+
"expected_atol": 1e-11,
|
1274 |
+
"expected_rtol": 1e-11
|
1275 |
+
},
|
1276 |
+
"mp_result": 0.3245990542431859
|
1277 |
+
},
|
1278 |
+
{
|
1279 |
+
"src_case": {
|
1280 |
+
"q": 10,
|
1281 |
+
"k": 3,
|
1282 |
+
"v": 3,
|
1283 |
+
"expected_atol": 1e-11,
|
1284 |
+
"expected_rtol": 1e-11
|
1285 |
+
},
|
1286 |
+
"mp_result": 0.0033733228559870584
|
1287 |
+
},
|
1288 |
+
{
|
1289 |
+
"src_case": {
|
1290 |
+
"q": 10,
|
1291 |
+
"k": 3,
|
1292 |
+
"v": 10,
|
1293 |
+
"expected_atol": 1e-11,
|
1294 |
+
"expected_rtol": 1e-11
|
1295 |
+
},
|
1296 |
+
"mp_result": 7.728665739003835e-05
|
1297 |
+
},
|
1298 |
+
{
|
1299 |
+
"src_case": {
|
1300 |
+
"q": 4,
|
1301 |
+
"k": 20,
|
1302 |
+
"v": 20,
|
1303 |
+
"expected_atol": 1e-11,
|
1304 |
+
"expected_rtol": 1e-11
|
1305 |
+
},
|
1306 |
+
"mp_result": 0.38244500549096866
|
1307 |
+
},
|
1308 |
+
{
|
1309 |
+
"src_case": {
|
1310 |
+
"q": 4,
|
1311 |
+
"k": 20,
|
1312 |
+
"v": 100,
|
1313 |
+
"expected_atol": 1e-11,
|
1314 |
+
"expected_rtol": 1e-11
|
1315 |
+
},
|
1316 |
+
"mp_result": 0.45434978340834464
|
1317 |
+
},
|
1318 |
+
{
|
1319 |
+
"src_case": {
|
1320 |
+
"q": 4,
|
1321 |
+
"k": 20,
|
1322 |
+
"v": 50,
|
1323 |
+
"expected_atol": 1e-11,
|
1324 |
+
"expected_rtol": 1e-11
|
1325 |
+
},
|
1326 |
+
"mp_result": 0.43334135870667473
|
1327 |
+
},
|
1328 |
+
{
|
1329 |
+
"src_case": {
|
1330 |
+
"q": 10,
|
1331 |
+
"k": 3,
|
1332 |
+
"v": 100,
|
1333 |
+
"expected_atol": 1e-11,
|
1334 |
+
"expected_rtol": 1e-11
|
1335 |
+
},
|
1336 |
+
"mp_result": 2.159522630228393e-09
|
1337 |
+
},
|
1338 |
+
{
|
1339 |
+
"src_case": {
|
1340 |
+
"q": 4,
|
1341 |
+
"k": 20,
|
1342 |
+
"v": 120,
|
1343 |
+
"expected_atol": 1e-11,
|
1344 |
+
"expected_rtol": 1e-11
|
1345 |
+
},
|
1346 |
+
"mp_result": 0.45807877248528855
|
1347 |
+
},
|
1348 |
+
{
|
1349 |
+
"src_case": {
|
1350 |
+
"q": 10,
|
1351 |
+
"k": 3,
|
1352 |
+
"v": 50,
|
1353 |
+
"expected_atol": 1e-11,
|
1354 |
+
"expected_rtol": 1e-11
|
1355 |
+
},
|
1356 |
+
"mp_result": 3.5303467191175695e-08
|
1357 |
+
},
|
1358 |
+
{
|
1359 |
+
"src_case": {
|
1360 |
+
"q": 10,
|
1361 |
+
"k": 3,
|
1362 |
+
"v": 20,
|
1363 |
+
"expected_atol": 1e-11,
|
1364 |
+
"expected_rtol": 1e-11
|
1365 |
+
},
|
1366 |
+
"mp_result": 3.121281850105421e-06
|
1367 |
+
},
|
1368 |
+
{
|
1369 |
+
"src_case": {
|
1370 |
+
"q": 10,
|
1371 |
+
"k": 3,
|
1372 |
+
"v": 120,
|
1373 |
+
"expected_atol": 1e-11,
|
1374 |
+
"expected_rtol": 1e-11
|
1375 |
+
},
|
1376 |
+
"mp_result": 1.1901591191700855e-09
|
1377 |
+
},
|
1378 |
+
{
|
1379 |
+
"src_case": {
|
1380 |
+
"q": 10,
|
1381 |
+
"k": 10,
|
1382 |
+
"v": 10,
|
1383 |
+
"expected_atol": 1e-11,
|
1384 |
+
"expected_rtol": 1e-11
|
1385 |
+
},
|
1386 |
+
"mp_result": 0.0006784051704217357
|
1387 |
+
},
|
1388 |
+
{
|
1389 |
+
"src_case": {
|
1390 |
+
"q": 10,
|
1391 |
+
"k": 10,
|
1392 |
+
"v": 3,
|
1393 |
+
"expected_atol": 1e-11,
|
1394 |
+
"expected_rtol": 1e-11
|
1395 |
+
},
|
1396 |
+
"mp_result": 0.011845582636101885
|
1397 |
+
},
|
1398 |
+
{
|
1399 |
+
"src_case": {
|
1400 |
+
"q": 10,
|
1401 |
+
"k": 10,
|
1402 |
+
"v": 20,
|
1403 |
+
"expected_atol": 1e-11,
|
1404 |
+
"expected_rtol": 1e-11
|
1405 |
+
},
|
1406 |
+
"mp_result": 3.844183552674918e-05
|
1407 |
+
},
|
1408 |
+
{
|
1409 |
+
"src_case": {
|
1410 |
+
"q": 10,
|
1411 |
+
"k": 10,
|
1412 |
+
"v": 100,
|
1413 |
+
"expected_atol": 1e-11,
|
1414 |
+
"expected_rtol": 1e-11
|
1415 |
+
},
|
1416 |
+
"mp_result": 3.215093171597309e-08
|
1417 |
+
},
|
1418 |
+
{
|
1419 |
+
"src_case": {
|
1420 |
+
"q": 10,
|
1421 |
+
"k": 10,
|
1422 |
+
"v": 50,
|
1423 |
+
"expected_atol": 1e-11,
|
1424 |
+
"expected_rtol": 1e-11
|
1425 |
+
},
|
1426 |
+
"mp_result": 5.125792577534542e-07
|
1427 |
+
},
|
1428 |
+
{
|
1429 |
+
"src_case": {
|
1430 |
+
"q": 10,
|
1431 |
+
"k": 10,
|
1432 |
+
"v": 120,
|
1433 |
+
"expected_atol": 1e-11,
|
1434 |
+
"expected_rtol": 1e-11
|
1435 |
+
},
|
1436 |
+
"mp_result": 1.7759015355532446e-08
|
1437 |
+
},
|
1438 |
+
{
|
1439 |
+
"src_case": {
|
1440 |
+
"q": 10,
|
1441 |
+
"k": 20,
|
1442 |
+
"v": 10,
|
1443 |
+
"expected_atol": 1e-11,
|
1444 |
+
"expected_rtol": 1e-11
|
1445 |
+
},
|
1446 |
+
"mp_result": 0.0017957646258393628
|
1447 |
+
},
|
1448 |
+
{
|
1449 |
+
"src_case": {
|
1450 |
+
"q": 10,
|
1451 |
+
"k": 20,
|
1452 |
+
"v": 3,
|
1453 |
+
"expected_atol": 1e-11,
|
1454 |
+
"expected_rtol": 1e-11
|
1455 |
+
},
|
1456 |
+
"mp_result": 0.018534407764819284
|
1457 |
+
},
|
1458 |
+
{
|
1459 |
+
"src_case": {
|
1460 |
+
"q": 10,
|
1461 |
+
"k": 20,
|
1462 |
+
"v": 20,
|
1463 |
+
"expected_atol": 1e-11,
|
1464 |
+
"expected_rtol": 1e-11
|
1465 |
+
},
|
1466 |
+
"mp_result": 0.00013316083413164858
|
1467 |
+
},
|
1468 |
+
{
|
1469 |
+
"src_case": {
|
1470 |
+
"q": 10,
|
1471 |
+
"k": 20,
|
1472 |
+
"v": 50,
|
1473 |
+
"expected_atol": 1e-11,
|
1474 |
+
"expected_rtol": 1e-11
|
1475 |
+
},
|
1476 |
+
"mp_result": 2.082489228991225e-06
|
1477 |
+
},
|
1478 |
+
{
|
1479 |
+
"src_case": {
|
1480 |
+
"q": 10,
|
1481 |
+
"k": 20,
|
1482 |
+
"v": 100,
|
1483 |
+
"expected_atol": 1e-11,
|
1484 |
+
"expected_rtol": 1e-11
|
1485 |
+
},
|
1486 |
+
"mp_result": 1.3444226792257012e-07
|
1487 |
+
},
|
1488 |
+
{
|
1489 |
+
"src_case": {
|
1490 |
+
"q": 10,
|
1491 |
+
"k": 20,
|
1492 |
+
"v": 120,
|
1493 |
+
"expected_atol": 1e-11,
|
1494 |
+
"expected_rtol": 1e-11
|
1495 |
+
},
|
1496 |
+
"mp_result": 7.446912854228521e-08
|
1497 |
+
}
|
1498 |
+
]
|
1499 |
+
}
|
venv/lib/python3.10/site-packages/scipy/stats/tests/test_axis_nan_policy.py
ADDED
@@ -0,0 +1,1188 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Many scipy.stats functions support `axis` and `nan_policy` parameters.
|
2 |
+
# When the two are combined, it can be tricky to get all the behavior just
|
3 |
+
# right. This file contains a suite of common tests for scipy.stats functions
|
4 |
+
# that support `axis` and `nan_policy` and additional tests for some associated
|
5 |
+
# functions in stats._util.
|
6 |
+
|
7 |
+
from itertools import product, combinations_with_replacement, permutations
|
8 |
+
import re
|
9 |
+
import pickle
|
10 |
+
import pytest
|
11 |
+
|
12 |
+
import numpy as np
|
13 |
+
from numpy.testing import assert_allclose, assert_equal, suppress_warnings
|
14 |
+
from scipy import stats
|
15 |
+
from scipy.stats import norm # type: ignore[attr-defined]
|
16 |
+
from scipy.stats._axis_nan_policy import _masked_arrays_2_sentinel_arrays
|
17 |
+
from scipy._lib._util import AxisError
|
18 |
+
|
19 |
+
|
20 |
+
def unpack_ttest_result(res):
|
21 |
+
low, high = res.confidence_interval()
|
22 |
+
return (res.statistic, res.pvalue, res.df, res._standard_error,
|
23 |
+
res._estimate, low, high)
|
24 |
+
|
25 |
+
|
26 |
+
def _get_ttest_ci(ttest):
|
27 |
+
# get a function that returns the CI bounds of provided `ttest`
|
28 |
+
def ttest_ci(*args, **kwargs):
|
29 |
+
res = ttest(*args, **kwargs)
|
30 |
+
return res.confidence_interval()
|
31 |
+
return ttest_ci
|
32 |
+
|
33 |
+
|
34 |
+
axis_nan_policy_cases = [
|
35 |
+
# function, args, kwds, number of samples, number of outputs,
|
36 |
+
# ... paired, unpacker function
|
37 |
+
# args, kwds typically aren't needed; just showing that they work
|
38 |
+
(stats.kruskal, tuple(), dict(), 3, 2, False, None), # 4 samples is slow
|
39 |
+
(stats.ranksums, ('less',), dict(), 2, 2, False, None),
|
40 |
+
(stats.mannwhitneyu, tuple(), {'method': 'asymptotic'}, 2, 2, False, None),
|
41 |
+
(stats.wilcoxon, ('pratt',), {'mode': 'auto'}, 2, 2, True,
|
42 |
+
lambda res: (res.statistic, res.pvalue)),
|
43 |
+
(stats.wilcoxon, tuple(), dict(), 1, 2, True,
|
44 |
+
lambda res: (res.statistic, res.pvalue)),
|
45 |
+
(stats.wilcoxon, tuple(), {'mode': 'approx'}, 1, 3, True,
|
46 |
+
lambda res: (res.statistic, res.pvalue, res.zstatistic)),
|
47 |
+
(stats.gmean, tuple(), dict(), 1, 1, False, lambda x: (x,)),
|
48 |
+
(stats.hmean, tuple(), dict(), 1, 1, False, lambda x: (x,)),
|
49 |
+
(stats.pmean, (1.42,), dict(), 1, 1, False, lambda x: (x,)),
|
50 |
+
(stats.sem, tuple(), dict(), 1, 1, False, lambda x: (x,)),
|
51 |
+
(stats.iqr, tuple(), dict(), 1, 1, False, lambda x: (x,)),
|
52 |
+
(stats.kurtosis, tuple(), dict(), 1, 1, False, lambda x: (x,)),
|
53 |
+
(stats.skew, tuple(), dict(), 1, 1, False, lambda x: (x,)),
|
54 |
+
(stats.kstat, tuple(), dict(), 1, 1, False, lambda x: (x,)),
|
55 |
+
(stats.kstatvar, tuple(), dict(), 1, 1, False, lambda x: (x,)),
|
56 |
+
(stats.moment, tuple(), dict(), 1, 1, False, lambda x: (x,)),
|
57 |
+
(stats.moment, tuple(), dict(order=[1, 2]), 1, 2, False, None),
|
58 |
+
(stats.jarque_bera, tuple(), dict(), 1, 2, False, None),
|
59 |
+
(stats.ttest_1samp, (np.array([0]),), dict(), 1, 7, False,
|
60 |
+
unpack_ttest_result),
|
61 |
+
(stats.ttest_rel, tuple(), dict(), 2, 7, True, unpack_ttest_result),
|
62 |
+
(stats.ttest_ind, tuple(), dict(), 2, 7, False, unpack_ttest_result),
|
63 |
+
(_get_ttest_ci(stats.ttest_1samp), (0,), dict(), 1, 2, False, None),
|
64 |
+
(_get_ttest_ci(stats.ttest_rel), tuple(), dict(), 2, 2, True, None),
|
65 |
+
(_get_ttest_ci(stats.ttest_ind), tuple(), dict(), 2, 2, False, None),
|
66 |
+
(stats.mode, tuple(), dict(), 1, 2, True, lambda x: (x.mode, x.count)),
|
67 |
+
(stats.differential_entropy, tuple(), dict(), 1, 1, False, lambda x: (x,)),
|
68 |
+
(stats.variation, tuple(), dict(), 1, 1, False, lambda x: (x,)),
|
69 |
+
(stats.friedmanchisquare, tuple(), dict(), 3, 2, True, None),
|
70 |
+
(stats.brunnermunzel, tuple(), dict(), 2, 2, False, None),
|
71 |
+
(stats.mood, tuple(), {}, 2, 2, False, None),
|
72 |
+
(stats.shapiro, tuple(), {}, 1, 2, False, None),
|
73 |
+
(stats.ks_1samp, (norm().cdf,), dict(), 1, 4, False,
|
74 |
+
lambda res: (*res, res.statistic_location, res.statistic_sign)),
|
75 |
+
(stats.ks_2samp, tuple(), dict(), 2, 4, False,
|
76 |
+
lambda res: (*res, res.statistic_location, res.statistic_sign)),
|
77 |
+
(stats.kstest, (norm().cdf,), dict(), 1, 4, False,
|
78 |
+
lambda res: (*res, res.statistic_location, res.statistic_sign)),
|
79 |
+
(stats.kstest, tuple(), dict(), 2, 4, False,
|
80 |
+
lambda res: (*res, res.statistic_location, res.statistic_sign)),
|
81 |
+
(stats.levene, tuple(), {}, 2, 2, False, None),
|
82 |
+
(stats.fligner, tuple(), {'center': 'trimmed', 'proportiontocut': 0.01},
|
83 |
+
2, 2, False, None),
|
84 |
+
(stats.ansari, tuple(), {}, 2, 2, False, None),
|
85 |
+
(stats.entropy, tuple(), dict(), 1, 1, False, lambda x: (x,)),
|
86 |
+
(stats.entropy, tuple(), dict(), 2, 1, True, lambda x: (x,)),
|
87 |
+
(stats.skewtest, tuple(), dict(), 1, 2, False, None),
|
88 |
+
(stats.kurtosistest, tuple(), dict(), 1, 2, False, None),
|
89 |
+
(stats.normaltest, tuple(), dict(), 1, 2, False, None),
|
90 |
+
(stats.cramervonmises, ("norm",), dict(), 1, 2, False,
|
91 |
+
lambda res: (res.statistic, res.pvalue)),
|
92 |
+
(stats.cramervonmises_2samp, tuple(), dict(), 2, 2, False,
|
93 |
+
lambda res: (res.statistic, res.pvalue)),
|
94 |
+
(stats.epps_singleton_2samp, tuple(), dict(), 2, 2, False, None),
|
95 |
+
(stats.bartlett, tuple(), {}, 2, 2, False, None),
|
96 |
+
(stats.tmean, tuple(), {}, 1, 1, False, lambda x: (x,)),
|
97 |
+
(stats.tvar, tuple(), {}, 1, 1, False, lambda x: (x,)),
|
98 |
+
(stats.tmin, tuple(), {}, 1, 1, False, lambda x: (x,)),
|
99 |
+
(stats.tmax, tuple(), {}, 1, 1, False, lambda x: (x,)),
|
100 |
+
(stats.tstd, tuple(), {}, 1, 1, False, lambda x: (x,)),
|
101 |
+
(stats.tsem, tuple(), {}, 1, 1, False, lambda x: (x,)),
|
102 |
+
(stats.circmean, tuple(), dict(), 1, 1, False, lambda x: (x,)),
|
103 |
+
(stats.circvar, tuple(), dict(), 1, 1, False, lambda x: (x,)),
|
104 |
+
(stats.circstd, tuple(), dict(), 1, 1, False, lambda x: (x,)),
|
105 |
+
(stats.f_oneway, tuple(), {}, 2, 2, False, None),
|
106 |
+
(stats.alexandergovern, tuple(), {}, 2, 2, False,
|
107 |
+
lambda res: (res.statistic, res.pvalue)),
|
108 |
+
(stats.combine_pvalues, tuple(), {}, 1, 2, False, None),
|
109 |
+
]
|
110 |
+
|
111 |
+
# If the message is one of those expected, put nans in
|
112 |
+
# appropriate places of `statistics` and `pvalues`
|
113 |
+
too_small_messages = {"The input contains nan", # for nan_policy="raise"
|
114 |
+
"Degrees of freedom <= 0 for slice",
|
115 |
+
"x and y should have at least 5 elements",
|
116 |
+
"Data must be at least length 3",
|
117 |
+
"The sample must contain at least two",
|
118 |
+
"x and y must contain at least two",
|
119 |
+
"division by zero",
|
120 |
+
"Mean of empty slice",
|
121 |
+
"Data passed to ks_2samp must not be empty",
|
122 |
+
"Not enough test observations",
|
123 |
+
"Not enough other observations",
|
124 |
+
"Not enough observations.",
|
125 |
+
"At least one observation is required",
|
126 |
+
"zero-size array to reduction operation maximum",
|
127 |
+
"`x` and `y` must be of nonzero size.",
|
128 |
+
"The exact distribution of the Wilcoxon test",
|
129 |
+
"Data input must not be empty",
|
130 |
+
"Window length (0) must be positive and less",
|
131 |
+
"Window length (1) must be positive and less",
|
132 |
+
"Window length (2) must be positive and less",
|
133 |
+
"skewtest is not valid with less than",
|
134 |
+
"kurtosistest requires at least 5",
|
135 |
+
"attempt to get argmax of an empty sequence",
|
136 |
+
"No array values within given limits",
|
137 |
+
"Input sample size must be greater than one.",}
|
138 |
+
|
139 |
+
# If the message is one of these, results of the function may be inaccurate,
|
140 |
+
# but NaNs are not to be placed
|
141 |
+
inaccuracy_messages = {"Precision loss occurred in moment calculation",
|
142 |
+
"Sample size too small for normal approximation."}
|
143 |
+
|
144 |
+
# For some functions, nan_policy='propagate' should not just return NaNs
|
145 |
+
override_propagate_funcs = {stats.mode}
|
146 |
+
|
147 |
+
# For some functions, empty arrays produce non-NaN results
|
148 |
+
empty_special_case_funcs = {stats.entropy}
|
149 |
+
|
150 |
+
def _mixed_data_generator(n_samples, n_repetitions, axis, rng,
|
151 |
+
paired=False):
|
152 |
+
# generate random samples to check the response of hypothesis tests to
|
153 |
+
# samples with different (but broadcastable) shapes and various
|
154 |
+
# nan patterns (e.g. all nans, some nans, no nans) along axis-slices
|
155 |
+
|
156 |
+
data = []
|
157 |
+
for i in range(n_samples):
|
158 |
+
n_patterns = 6 # number of distinct nan patterns
|
159 |
+
n_obs = 20 if paired else 20 + i # observations per axis-slice
|
160 |
+
x = np.ones((n_repetitions, n_patterns, n_obs)) * np.nan
|
161 |
+
|
162 |
+
for j in range(n_repetitions):
|
163 |
+
samples = x[j, :, :]
|
164 |
+
|
165 |
+
# case 0: axis-slice with all nans (0 reals)
|
166 |
+
# cases 1-3: axis-slice with 1-3 reals (the rest nans)
|
167 |
+
# case 4: axis-slice with mostly (all but two) reals
|
168 |
+
# case 5: axis slice with all reals
|
169 |
+
for k, n_reals in enumerate([0, 1, 2, 3, n_obs-2, n_obs]):
|
170 |
+
# for cases 1-3, need paired nansw to be in the same place
|
171 |
+
indices = rng.permutation(n_obs)[:n_reals]
|
172 |
+
samples[k, indices] = rng.random(size=n_reals)
|
173 |
+
|
174 |
+
# permute the axis-slices just to show that order doesn't matter
|
175 |
+
samples[:] = rng.permutation(samples, axis=0)
|
176 |
+
|
177 |
+
# For multi-sample tests, we want to test broadcasting and check
|
178 |
+
# that nan policy works correctly for each nan pattern for each input.
|
179 |
+
# This takes care of both simultaneously.
|
180 |
+
new_shape = [n_repetitions] + [1]*n_samples + [n_obs]
|
181 |
+
new_shape[1 + i] = 6
|
182 |
+
x = x.reshape(new_shape)
|
183 |
+
|
184 |
+
x = np.moveaxis(x, -1, axis)
|
185 |
+
data.append(x)
|
186 |
+
return data
|
187 |
+
|
188 |
+
|
189 |
+
def _homogeneous_data_generator(n_samples, n_repetitions, axis, rng,
|
190 |
+
paired=False, all_nans=True):
|
191 |
+
# generate random samples to check the response of hypothesis tests to
|
192 |
+
# samples with different (but broadcastable) shapes and homogeneous
|
193 |
+
# data (all nans or all finite)
|
194 |
+
data = []
|
195 |
+
for i in range(n_samples):
|
196 |
+
n_obs = 20 if paired else 20 + i # observations per axis-slice
|
197 |
+
shape = [n_repetitions] + [1]*n_samples + [n_obs]
|
198 |
+
shape[1 + i] = 2
|
199 |
+
x = np.ones(shape) * np.nan if all_nans else rng.random(shape)
|
200 |
+
x = np.moveaxis(x, -1, axis)
|
201 |
+
data.append(x)
|
202 |
+
return data
|
203 |
+
|
204 |
+
|
205 |
+
def nan_policy_1d(hypotest, data1d, unpacker, *args, n_outputs=2,
|
206 |
+
nan_policy='raise', paired=False, _no_deco=True, **kwds):
|
207 |
+
# Reference implementation for how `nan_policy` should work for 1d samples
|
208 |
+
|
209 |
+
if nan_policy == 'raise':
|
210 |
+
for sample in data1d:
|
211 |
+
if np.any(np.isnan(sample)):
|
212 |
+
raise ValueError("The input contains nan values")
|
213 |
+
|
214 |
+
elif (nan_policy == 'propagate'
|
215 |
+
and hypotest not in override_propagate_funcs):
|
216 |
+
# For all hypothesis tests tested, returning nans is the right thing.
|
217 |
+
# But many hypothesis tests don't propagate correctly (e.g. they treat
|
218 |
+
# np.nan the same as np.inf, which doesn't make sense when ranks are
|
219 |
+
# involved) so override that behavior here.
|
220 |
+
for sample in data1d:
|
221 |
+
if np.any(np.isnan(sample)):
|
222 |
+
return np.full(n_outputs, np.nan)
|
223 |
+
|
224 |
+
elif nan_policy == 'omit':
|
225 |
+
# manually omit nans (or pairs in which at least one element is nan)
|
226 |
+
if not paired:
|
227 |
+
data1d = [sample[~np.isnan(sample)] for sample in data1d]
|
228 |
+
else:
|
229 |
+
nan_mask = np.isnan(data1d[0])
|
230 |
+
for sample in data1d[1:]:
|
231 |
+
nan_mask = np.logical_or(nan_mask, np.isnan(sample))
|
232 |
+
data1d = [sample[~nan_mask] for sample in data1d]
|
233 |
+
|
234 |
+
return unpacker(hypotest(*data1d, *args, _no_deco=_no_deco, **kwds))
|
235 |
+
|
236 |
+
|
237 |
+
@pytest.mark.filterwarnings('ignore::RuntimeWarning')
|
238 |
+
@pytest.mark.filterwarnings('ignore::UserWarning')
|
239 |
+
@pytest.mark.parametrize(("hypotest", "args", "kwds", "n_samples", "n_outputs",
|
240 |
+
"paired", "unpacker"), axis_nan_policy_cases)
|
241 |
+
@pytest.mark.parametrize(("nan_policy"), ("propagate", "omit", "raise"))
|
242 |
+
@pytest.mark.parametrize(("axis"), (1,))
|
243 |
+
@pytest.mark.parametrize(("data_generator"), ("mixed",))
|
244 |
+
def test_axis_nan_policy_fast(hypotest, args, kwds, n_samples, n_outputs,
|
245 |
+
paired, unpacker, nan_policy, axis,
|
246 |
+
data_generator):
|
247 |
+
_axis_nan_policy_test(hypotest, args, kwds, n_samples, n_outputs, paired,
|
248 |
+
unpacker, nan_policy, axis, data_generator)
|
249 |
+
|
250 |
+
|
251 |
+
@pytest.mark.slow
|
252 |
+
@pytest.mark.filterwarnings('ignore::RuntimeWarning')
|
253 |
+
@pytest.mark.filterwarnings('ignore::UserWarning')
|
254 |
+
@pytest.mark.parametrize(("hypotest", "args", "kwds", "n_samples", "n_outputs",
|
255 |
+
"paired", "unpacker"), axis_nan_policy_cases)
|
256 |
+
@pytest.mark.parametrize(("nan_policy"), ("propagate", "omit", "raise"))
|
257 |
+
@pytest.mark.parametrize(("axis"), range(-3, 3))
|
258 |
+
@pytest.mark.parametrize(("data_generator"),
|
259 |
+
("all_nans", "all_finite", "mixed"))
|
260 |
+
def test_axis_nan_policy_full(hypotest, args, kwds, n_samples, n_outputs,
|
261 |
+
paired, unpacker, nan_policy, axis,
|
262 |
+
data_generator):
|
263 |
+
_axis_nan_policy_test(hypotest, args, kwds, n_samples, n_outputs, paired,
|
264 |
+
unpacker, nan_policy, axis, data_generator)
|
265 |
+
|
266 |
+
|
267 |
+
def _axis_nan_policy_test(hypotest, args, kwds, n_samples, n_outputs, paired,
|
268 |
+
unpacker, nan_policy, axis, data_generator):
|
269 |
+
# Tests the 1D and vectorized behavior of hypothesis tests against a
|
270 |
+
# reference implementation (nan_policy_1d with np.ndenumerate)
|
271 |
+
|
272 |
+
# Some hypothesis tests return a non-iterable that needs an `unpacker` to
|
273 |
+
# extract the statistic and p-value. For those that don't:
|
274 |
+
if not unpacker:
|
275 |
+
def unpacker(res):
|
276 |
+
return res
|
277 |
+
|
278 |
+
rng = np.random.default_rng(0)
|
279 |
+
|
280 |
+
# Generate multi-dimensional test data with all important combinations
|
281 |
+
# of patterns of nans along `axis`
|
282 |
+
n_repetitions = 3 # number of repetitions of each pattern
|
283 |
+
data_gen_kwds = {'n_samples': n_samples, 'n_repetitions': n_repetitions,
|
284 |
+
'axis': axis, 'rng': rng, 'paired': paired}
|
285 |
+
if data_generator == 'mixed':
|
286 |
+
inherent_size = 6 # number of distinct types of patterns
|
287 |
+
data = _mixed_data_generator(**data_gen_kwds)
|
288 |
+
elif data_generator == 'all_nans':
|
289 |
+
inherent_size = 2 # hard-coded in _homogeneous_data_generator
|
290 |
+
data_gen_kwds['all_nans'] = True
|
291 |
+
data = _homogeneous_data_generator(**data_gen_kwds)
|
292 |
+
elif data_generator == 'all_finite':
|
293 |
+
inherent_size = 2 # hard-coded in _homogeneous_data_generator
|
294 |
+
data_gen_kwds['all_nans'] = False
|
295 |
+
data = _homogeneous_data_generator(**data_gen_kwds)
|
296 |
+
|
297 |
+
output_shape = [n_repetitions] + [inherent_size]*n_samples
|
298 |
+
|
299 |
+
# To generate reference behavior to compare against, loop over the axis-
|
300 |
+
# slices in data. Make indexing easier by moving `axis` to the end and
|
301 |
+
# broadcasting all samples to the same shape.
|
302 |
+
data_b = [np.moveaxis(sample, axis, -1) for sample in data]
|
303 |
+
data_b = [np.broadcast_to(sample, output_shape + [sample.shape[-1]])
|
304 |
+
for sample in data_b]
|
305 |
+
statistics = np.zeros(output_shape)
|
306 |
+
pvalues = np.zeros(output_shape)
|
307 |
+
|
308 |
+
for i, _ in np.ndenumerate(statistics):
|
309 |
+
data1d = [sample[i] for sample in data_b]
|
310 |
+
with np.errstate(divide='ignore', invalid='ignore'):
|
311 |
+
try:
|
312 |
+
res1d = nan_policy_1d(hypotest, data1d, unpacker, *args,
|
313 |
+
n_outputs=n_outputs,
|
314 |
+
nan_policy=nan_policy,
|
315 |
+
paired=paired, _no_deco=True, **kwds)
|
316 |
+
|
317 |
+
# Eventually we'll check the results of a single, vectorized
|
318 |
+
# call of `hypotest` against the arrays `statistics` and
|
319 |
+
# `pvalues` populated using the reference `nan_policy_1d`.
|
320 |
+
# But while we're at it, check the results of a 1D call to
|
321 |
+
# `hypotest` against the reference `nan_policy_1d`.
|
322 |
+
res1db = unpacker(hypotest(*data1d, *args,
|
323 |
+
nan_policy=nan_policy, **kwds))
|
324 |
+
assert_equal(res1db[0], res1d[0])
|
325 |
+
if len(res1db) == 2:
|
326 |
+
assert_equal(res1db[1], res1d[1])
|
327 |
+
|
328 |
+
# When there is not enough data in 1D samples, many existing
|
329 |
+
# hypothesis tests raise errors instead of returning nans .
|
330 |
+
# For vectorized calls, we put nans in the corresponding elements
|
331 |
+
# of the output.
|
332 |
+
except (RuntimeWarning, UserWarning, ValueError,
|
333 |
+
ZeroDivisionError) as e:
|
334 |
+
|
335 |
+
# whatever it is, make sure same error is raised by both
|
336 |
+
# `nan_policy_1d` and `hypotest`
|
337 |
+
with pytest.raises(type(e), match=re.escape(str(e))):
|
338 |
+
nan_policy_1d(hypotest, data1d, unpacker, *args,
|
339 |
+
n_outputs=n_outputs, nan_policy=nan_policy,
|
340 |
+
paired=paired, _no_deco=True, **kwds)
|
341 |
+
with pytest.raises(type(e), match=re.escape(str(e))):
|
342 |
+
hypotest(*data1d, *args, nan_policy=nan_policy, **kwds)
|
343 |
+
|
344 |
+
if any([str(e).startswith(message)
|
345 |
+
for message in too_small_messages]):
|
346 |
+
res1d = np.full(n_outputs, np.nan)
|
347 |
+
elif any([str(e).startswith(message)
|
348 |
+
for message in inaccuracy_messages]):
|
349 |
+
with suppress_warnings() as sup:
|
350 |
+
sup.filter(RuntimeWarning)
|
351 |
+
sup.filter(UserWarning)
|
352 |
+
res1d = nan_policy_1d(hypotest, data1d, unpacker,
|
353 |
+
*args, n_outputs=n_outputs,
|
354 |
+
nan_policy=nan_policy,
|
355 |
+
paired=paired, _no_deco=True,
|
356 |
+
**kwds)
|
357 |
+
else:
|
358 |
+
raise e
|
359 |
+
statistics[i] = res1d[0]
|
360 |
+
if len(res1d) == 2:
|
361 |
+
pvalues[i] = res1d[1]
|
362 |
+
|
363 |
+
# Perform a vectorized call to the hypothesis test.
|
364 |
+
# If `nan_policy == 'raise'`, check that it raises the appropriate error.
|
365 |
+
# If not, compare against the output against `statistics` and `pvalues`
|
366 |
+
if nan_policy == 'raise' and not data_generator == "all_finite":
|
367 |
+
message = 'The input contains nan values'
|
368 |
+
with pytest.raises(ValueError, match=message):
|
369 |
+
hypotest(*data, axis=axis, nan_policy=nan_policy, *args, **kwds)
|
370 |
+
|
371 |
+
else:
|
372 |
+
with suppress_warnings() as sup, \
|
373 |
+
np.errstate(divide='ignore', invalid='ignore'):
|
374 |
+
sup.filter(RuntimeWarning, "Precision loss occurred in moment")
|
375 |
+
sup.filter(UserWarning, "Sample size too small for normal "
|
376 |
+
"approximation.")
|
377 |
+
res = unpacker(hypotest(*data, axis=axis, nan_policy=nan_policy,
|
378 |
+
*args, **kwds))
|
379 |
+
assert_allclose(res[0], statistics, rtol=1e-15)
|
380 |
+
assert_equal(res[0].dtype, statistics.dtype)
|
381 |
+
|
382 |
+
if len(res) == 2:
|
383 |
+
assert_allclose(res[1], pvalues, rtol=1e-15)
|
384 |
+
assert_equal(res[1].dtype, pvalues.dtype)
|
385 |
+
|
386 |
+
|
387 |
+
@pytest.mark.filterwarnings('ignore::RuntimeWarning')
|
388 |
+
@pytest.mark.parametrize(("hypotest", "args", "kwds", "n_samples", "n_outputs",
|
389 |
+
"paired", "unpacker"), axis_nan_policy_cases)
|
390 |
+
@pytest.mark.parametrize(("nan_policy"), ("propagate", "omit", "raise"))
|
391 |
+
@pytest.mark.parametrize(("data_generator"),
|
392 |
+
("all_nans", "all_finite", "mixed", "empty"))
|
393 |
+
def test_axis_nan_policy_axis_is_None(hypotest, args, kwds, n_samples,
|
394 |
+
n_outputs, paired, unpacker, nan_policy,
|
395 |
+
data_generator):
|
396 |
+
# check for correct behavior when `axis=None`
|
397 |
+
|
398 |
+
if not unpacker:
|
399 |
+
def unpacker(res):
|
400 |
+
return res
|
401 |
+
|
402 |
+
rng = np.random.default_rng(0)
|
403 |
+
|
404 |
+
if data_generator == "empty":
|
405 |
+
data = [rng.random((2, 0)) for i in range(n_samples)]
|
406 |
+
else:
|
407 |
+
data = [rng.random((2, 20)) for i in range(n_samples)]
|
408 |
+
|
409 |
+
if data_generator == "mixed":
|
410 |
+
masks = [rng.random((2, 20)) > 0.9 for i in range(n_samples)]
|
411 |
+
for sample, mask in zip(data, masks):
|
412 |
+
sample[mask] = np.nan
|
413 |
+
elif data_generator == "all_nans":
|
414 |
+
data = [sample * np.nan for sample in data]
|
415 |
+
|
416 |
+
data_raveled = [sample.ravel() for sample in data]
|
417 |
+
|
418 |
+
if nan_policy == 'raise' and data_generator not in {"all_finite", "empty"}:
|
419 |
+
message = 'The input contains nan values'
|
420 |
+
|
421 |
+
# check for correct behavior whether or not data is 1d to begin with
|
422 |
+
with pytest.raises(ValueError, match=message):
|
423 |
+
hypotest(*data, axis=None, nan_policy=nan_policy,
|
424 |
+
*args, **kwds)
|
425 |
+
with pytest.raises(ValueError, match=message):
|
426 |
+
hypotest(*data_raveled, axis=None, nan_policy=nan_policy,
|
427 |
+
*args, **kwds)
|
428 |
+
|
429 |
+
else:
|
430 |
+
# behavior of reference implementation with 1d input, hypotest with 1d
|
431 |
+
# input, and hypotest with Nd input should match, whether that means
|
432 |
+
# that outputs are equal or they raise the same exception
|
433 |
+
|
434 |
+
ea_str, eb_str, ec_str = None, None, None
|
435 |
+
with np.errstate(divide='ignore', invalid='ignore'):
|
436 |
+
try:
|
437 |
+
res1da = nan_policy_1d(hypotest, data_raveled, unpacker, *args,
|
438 |
+
n_outputs=n_outputs,
|
439 |
+
nan_policy=nan_policy, paired=paired,
|
440 |
+
_no_deco=True, **kwds)
|
441 |
+
except (RuntimeWarning, ValueError, ZeroDivisionError) as ea:
|
442 |
+
ea_str = str(ea)
|
443 |
+
|
444 |
+
try:
|
445 |
+
res1db = unpacker(hypotest(*data_raveled, *args,
|
446 |
+
nan_policy=nan_policy, **kwds))
|
447 |
+
except (RuntimeWarning, ValueError, ZeroDivisionError) as eb:
|
448 |
+
eb_str = str(eb)
|
449 |
+
|
450 |
+
try:
|
451 |
+
res1dc = unpacker(hypotest(*data, *args, axis=None,
|
452 |
+
nan_policy=nan_policy, **kwds))
|
453 |
+
except (RuntimeWarning, ValueError, ZeroDivisionError) as ec:
|
454 |
+
ec_str = str(ec)
|
455 |
+
|
456 |
+
if ea_str or eb_str or ec_str:
|
457 |
+
assert any([str(ea_str).startswith(message)
|
458 |
+
for message in too_small_messages])
|
459 |
+
assert ea_str == eb_str == ec_str
|
460 |
+
else:
|
461 |
+
assert_equal(res1db, res1da)
|
462 |
+
assert_equal(res1dc, res1da)
|
463 |
+
for item in list(res1da) + list(res1db) + list(res1dc):
|
464 |
+
# Most functions naturally return NumPy numbers, which
|
465 |
+
# are drop-in replacements for the Python versions but with
|
466 |
+
# desirable attributes. Make sure this is consistent.
|
467 |
+
assert np.issubdtype(item.dtype, np.number)
|
468 |
+
|
469 |
+
# Test keepdims for:
|
470 |
+
# - single-output and multi-output functions (gmean and mannwhitneyu)
|
471 |
+
# - Axis negative, positive, None, and tuple
|
472 |
+
# - 1D with no NaNs
|
473 |
+
# - 1D with NaN propagation
|
474 |
+
# - Zero-sized output
|
475 |
+
@pytest.mark.parametrize("nan_policy", ("omit", "propagate"))
|
476 |
+
@pytest.mark.parametrize(
|
477 |
+
("hypotest", "args", "kwds", "n_samples", "unpacker"),
|
478 |
+
((stats.gmean, tuple(), dict(), 1, lambda x: (x,)),
|
479 |
+
(stats.mannwhitneyu, tuple(), {'method': 'asymptotic'}, 2, None))
|
480 |
+
)
|
481 |
+
@pytest.mark.parametrize(
|
482 |
+
("sample_shape", "axis_cases"),
|
483 |
+
(((2, 3, 3, 4), (None, 0, -1, (0, 2), (1, -1), (3, 1, 2, 0))),
|
484 |
+
((10, ), (0, -1)),
|
485 |
+
((20, 0), (0, 1)))
|
486 |
+
)
|
487 |
+
def test_keepdims(hypotest, args, kwds, n_samples, unpacker,
|
488 |
+
sample_shape, axis_cases, nan_policy):
|
489 |
+
# test if keepdims parameter works correctly
|
490 |
+
if not unpacker:
|
491 |
+
def unpacker(res):
|
492 |
+
return res
|
493 |
+
rng = np.random.default_rng(0)
|
494 |
+
data = [rng.random(sample_shape) for _ in range(n_samples)]
|
495 |
+
nan_data = [sample.copy() for sample in data]
|
496 |
+
nan_mask = [rng.random(sample_shape) < 0.2 for _ in range(n_samples)]
|
497 |
+
for sample, mask in zip(nan_data, nan_mask):
|
498 |
+
sample[mask] = np.nan
|
499 |
+
for axis in axis_cases:
|
500 |
+
expected_shape = list(sample_shape)
|
501 |
+
if axis is None:
|
502 |
+
expected_shape = np.ones(len(sample_shape))
|
503 |
+
else:
|
504 |
+
if isinstance(axis, int):
|
505 |
+
expected_shape[axis] = 1
|
506 |
+
else:
|
507 |
+
for ax in axis:
|
508 |
+
expected_shape[ax] = 1
|
509 |
+
expected_shape = tuple(expected_shape)
|
510 |
+
res = unpacker(hypotest(*data, *args, axis=axis, keepdims=True,
|
511 |
+
**kwds))
|
512 |
+
res_base = unpacker(hypotest(*data, *args, axis=axis, keepdims=False,
|
513 |
+
**kwds))
|
514 |
+
nan_res = unpacker(hypotest(*nan_data, *args, axis=axis,
|
515 |
+
keepdims=True, nan_policy=nan_policy,
|
516 |
+
**kwds))
|
517 |
+
nan_res_base = unpacker(hypotest(*nan_data, *args, axis=axis,
|
518 |
+
keepdims=False,
|
519 |
+
nan_policy=nan_policy, **kwds))
|
520 |
+
for r, r_base, rn, rn_base in zip(res, res_base, nan_res,
|
521 |
+
nan_res_base):
|
522 |
+
assert r.shape == expected_shape
|
523 |
+
r = np.squeeze(r, axis=axis)
|
524 |
+
assert_equal(r, r_base)
|
525 |
+
assert rn.shape == expected_shape
|
526 |
+
rn = np.squeeze(rn, axis=axis)
|
527 |
+
assert_equal(rn, rn_base)
|
528 |
+
|
529 |
+
|
530 |
+
@pytest.mark.parametrize(("fun", "nsamp"),
|
531 |
+
[(stats.kstat, 1),
|
532 |
+
(stats.kstatvar, 1)])
|
533 |
+
def test_hypotest_back_compat_no_axis(fun, nsamp):
|
534 |
+
m, n = 8, 9
|
535 |
+
|
536 |
+
rng = np.random.default_rng(0)
|
537 |
+
x = rng.random((nsamp, m, n))
|
538 |
+
res = fun(*x)
|
539 |
+
res2 = fun(*x, _no_deco=True)
|
540 |
+
res3 = fun([xi.ravel() for xi in x])
|
541 |
+
assert_equal(res, res2)
|
542 |
+
assert_equal(res, res3)
|
543 |
+
|
544 |
+
|
545 |
+
@pytest.mark.parametrize(("axis"), (0, 1, 2))
|
546 |
+
def test_axis_nan_policy_decorated_positional_axis(axis):
|
547 |
+
# Test for correct behavior of function decorated with
|
548 |
+
# _axis_nan_policy_decorator whether `axis` is provided as positional or
|
549 |
+
# keyword argument
|
550 |
+
|
551 |
+
shape = (8, 9, 10)
|
552 |
+
rng = np.random.default_rng(0)
|
553 |
+
x = rng.random(shape)
|
554 |
+
y = rng.random(shape)
|
555 |
+
res1 = stats.mannwhitneyu(x, y, True, 'two-sided', axis)
|
556 |
+
res2 = stats.mannwhitneyu(x, y, True, 'two-sided', axis=axis)
|
557 |
+
assert_equal(res1, res2)
|
558 |
+
|
559 |
+
message = "mannwhitneyu() got multiple values for argument 'axis'"
|
560 |
+
with pytest.raises(TypeError, match=re.escape(message)):
|
561 |
+
stats.mannwhitneyu(x, y, True, 'two-sided', axis, axis=axis)
|
562 |
+
|
563 |
+
|
564 |
+
def test_axis_nan_policy_decorated_positional_args():
|
565 |
+
# Test for correct behavior of function decorated with
|
566 |
+
# _axis_nan_policy_decorator when function accepts *args
|
567 |
+
|
568 |
+
shape = (3, 8, 9, 10)
|
569 |
+
rng = np.random.default_rng(0)
|
570 |
+
x = rng.random(shape)
|
571 |
+
x[0, 0, 0, 0] = np.nan
|
572 |
+
stats.kruskal(*x)
|
573 |
+
|
574 |
+
message = "kruskal() got an unexpected keyword argument 'samples'"
|
575 |
+
with pytest.raises(TypeError, match=re.escape(message)):
|
576 |
+
stats.kruskal(samples=x)
|
577 |
+
|
578 |
+
with pytest.raises(TypeError, match=re.escape(message)):
|
579 |
+
stats.kruskal(*x, samples=x)
|
580 |
+
|
581 |
+
|
582 |
+
def test_axis_nan_policy_decorated_keyword_samples():
|
583 |
+
# Test for correct behavior of function decorated with
|
584 |
+
# _axis_nan_policy_decorator whether samples are provided as positional or
|
585 |
+
# keyword arguments
|
586 |
+
|
587 |
+
shape = (2, 8, 9, 10)
|
588 |
+
rng = np.random.default_rng(0)
|
589 |
+
x = rng.random(shape)
|
590 |
+
x[0, 0, 0, 0] = np.nan
|
591 |
+
res1 = stats.mannwhitneyu(*x)
|
592 |
+
res2 = stats.mannwhitneyu(x=x[0], y=x[1])
|
593 |
+
assert_equal(res1, res2)
|
594 |
+
|
595 |
+
message = "mannwhitneyu() got multiple values for argument"
|
596 |
+
with pytest.raises(TypeError, match=re.escape(message)):
|
597 |
+
stats.mannwhitneyu(*x, x=x[0], y=x[1])
|
598 |
+
|
599 |
+
|
600 |
+
@pytest.mark.parametrize(("hypotest", "args", "kwds", "n_samples", "n_outputs",
|
601 |
+
"paired", "unpacker"), axis_nan_policy_cases)
|
602 |
+
def test_axis_nan_policy_decorated_pickled(hypotest, args, kwds, n_samples,
|
603 |
+
n_outputs, paired, unpacker):
|
604 |
+
if "ttest_ci" in hypotest.__name__:
|
605 |
+
pytest.skip("Can't pickle functions defined within functions.")
|
606 |
+
|
607 |
+
rng = np.random.default_rng(0)
|
608 |
+
|
609 |
+
# Some hypothesis tests return a non-iterable that needs an `unpacker` to
|
610 |
+
# extract the statistic and p-value. For those that don't:
|
611 |
+
if not unpacker:
|
612 |
+
def unpacker(res):
|
613 |
+
return res
|
614 |
+
|
615 |
+
data = rng.uniform(size=(n_samples, 2, 30))
|
616 |
+
pickled_hypotest = pickle.dumps(hypotest)
|
617 |
+
unpickled_hypotest = pickle.loads(pickled_hypotest)
|
618 |
+
res1 = unpacker(hypotest(*data, *args, axis=-1, **kwds))
|
619 |
+
res2 = unpacker(unpickled_hypotest(*data, *args, axis=-1, **kwds))
|
620 |
+
assert_allclose(res1, res2, rtol=1e-12)
|
621 |
+
|
622 |
+
|
623 |
+
def test_check_empty_inputs():
|
624 |
+
# Test that _check_empty_inputs is doing its job, at least for single-
|
625 |
+
# sample inputs. (Multi-sample functionality is tested below.)
|
626 |
+
# If the input sample is not empty, it should return None.
|
627 |
+
# If the input sample is empty, it should return an array of NaNs or an
|
628 |
+
# empty array of appropriate shape. np.mean is used as a reference for the
|
629 |
+
# output because, like the statistics calculated by these functions,
|
630 |
+
# it works along and "consumes" `axis` but preserves the other axes.
|
631 |
+
for i in range(5):
|
632 |
+
for combo in combinations_with_replacement([0, 1, 2], i):
|
633 |
+
for axis in range(len(combo)):
|
634 |
+
samples = (np.zeros(combo),)
|
635 |
+
output = stats._axis_nan_policy._check_empty_inputs(samples,
|
636 |
+
axis)
|
637 |
+
if output is not None:
|
638 |
+
with np.testing.suppress_warnings() as sup:
|
639 |
+
sup.filter(RuntimeWarning, "Mean of empty slice.")
|
640 |
+
sup.filter(RuntimeWarning, "invalid value encountered")
|
641 |
+
reference = samples[0].mean(axis=axis)
|
642 |
+
np.testing.assert_equal(output, reference)
|
643 |
+
|
644 |
+
|
645 |
+
def _check_arrays_broadcastable(arrays, axis):
|
646 |
+
# https://numpy.org/doc/stable/user/basics.broadcasting.html
|
647 |
+
# "When operating on two arrays, NumPy compares their shapes element-wise.
|
648 |
+
# It starts with the trailing (i.e. rightmost) dimensions and works its
|
649 |
+
# way left.
|
650 |
+
# Two dimensions are compatible when
|
651 |
+
# 1. they are equal, or
|
652 |
+
# 2. one of them is 1
|
653 |
+
# ...
|
654 |
+
# Arrays do not need to have the same number of dimensions."
|
655 |
+
# (Clarification: if the arrays are compatible according to the criteria
|
656 |
+
# above and an array runs out of dimensions, it is still compatible.)
|
657 |
+
# Below, we follow the rules above except ignoring `axis`
|
658 |
+
|
659 |
+
n_dims = max([arr.ndim for arr in arrays])
|
660 |
+
if axis is not None:
|
661 |
+
# convert to negative axis
|
662 |
+
axis = (-n_dims + axis) if axis >= 0 else axis
|
663 |
+
|
664 |
+
for dim in range(1, n_dims+1): # we'll index from -1 to -n_dims, inclusive
|
665 |
+
if -dim == axis:
|
666 |
+
continue # ignore lengths along `axis`
|
667 |
+
|
668 |
+
dim_lengths = set()
|
669 |
+
for arr in arrays:
|
670 |
+
if dim <= arr.ndim and arr.shape[-dim] != 1:
|
671 |
+
dim_lengths.add(arr.shape[-dim])
|
672 |
+
|
673 |
+
if len(dim_lengths) > 1:
|
674 |
+
return False
|
675 |
+
return True
|
676 |
+
|
677 |
+
|
678 |
+
@pytest.mark.slow
|
679 |
+
@pytest.mark.parametrize(("hypotest", "args", "kwds", "n_samples", "n_outputs",
|
680 |
+
"paired", "unpacker"), axis_nan_policy_cases)
|
681 |
+
def test_empty(hypotest, args, kwds, n_samples, n_outputs, paired, unpacker):
|
682 |
+
# test for correct output shape when at least one input is empty
|
683 |
+
|
684 |
+
if hypotest in override_propagate_funcs:
|
685 |
+
reason = "Doesn't follow the usual pattern. Tested separately."
|
686 |
+
pytest.skip(reason=reason)
|
687 |
+
|
688 |
+
if unpacker is None:
|
689 |
+
unpacker = lambda res: (res[0], res[1]) # noqa: E731
|
690 |
+
|
691 |
+
def small_data_generator(n_samples, n_dims):
|
692 |
+
|
693 |
+
def small_sample_generator(n_dims):
|
694 |
+
# return all possible "small" arrays in up to n_dim dimensions
|
695 |
+
for i in n_dims:
|
696 |
+
# "small" means with size along dimension either 0 or 1
|
697 |
+
for combo in combinations_with_replacement([0, 1, 2], i):
|
698 |
+
yield np.zeros(combo)
|
699 |
+
|
700 |
+
# yield all possible combinations of small samples
|
701 |
+
gens = [small_sample_generator(n_dims) for i in range(n_samples)]
|
702 |
+
yield from product(*gens)
|
703 |
+
|
704 |
+
n_dims = [2, 3]
|
705 |
+
for samples in small_data_generator(n_samples, n_dims):
|
706 |
+
|
707 |
+
# this test is only for arrays of zero size
|
708 |
+
if not any(sample.size == 0 for sample in samples):
|
709 |
+
continue
|
710 |
+
|
711 |
+
max_axis = max(sample.ndim for sample in samples)
|
712 |
+
|
713 |
+
# need to test for all valid values of `axis` parameter, too
|
714 |
+
for axis in range(-max_axis, max_axis):
|
715 |
+
|
716 |
+
try:
|
717 |
+
# After broadcasting, all arrays are the same shape, so
|
718 |
+
# the shape of the output should be the same as a single-
|
719 |
+
# sample statistic. Use np.mean as a reference.
|
720 |
+
concat = stats._stats_py._broadcast_concatenate(samples, axis)
|
721 |
+
with np.testing.suppress_warnings() as sup:
|
722 |
+
sup.filter(RuntimeWarning, "Mean of empty slice.")
|
723 |
+
sup.filter(RuntimeWarning, "invalid value encountered")
|
724 |
+
expected = np.mean(concat, axis=axis) * np.nan
|
725 |
+
|
726 |
+
if hypotest in empty_special_case_funcs:
|
727 |
+
empty_val = hypotest(*([[]]*len(samples)), *args, **kwds)
|
728 |
+
mask = np.isnan(expected)
|
729 |
+
expected[mask] = empty_val
|
730 |
+
|
731 |
+
with np.testing.suppress_warnings() as sup:
|
732 |
+
# generated by f_oneway for too_small inputs
|
733 |
+
sup.filter(stats.DegenerateDataWarning)
|
734 |
+
res = hypotest(*samples, *args, axis=axis, **kwds)
|
735 |
+
res = unpacker(res)
|
736 |
+
|
737 |
+
for i in range(n_outputs):
|
738 |
+
assert_equal(res[i], expected)
|
739 |
+
|
740 |
+
except ValueError:
|
741 |
+
# confirm that the arrays truly are not broadcastable
|
742 |
+
assert not _check_arrays_broadcastable(samples,
|
743 |
+
None if paired else axis)
|
744 |
+
|
745 |
+
# confirm that _both_ `_broadcast_concatenate` and `hypotest`
|
746 |
+
# produce this information.
|
747 |
+
message = "Array shapes are incompatible for broadcasting."
|
748 |
+
with pytest.raises(ValueError, match=message):
|
749 |
+
stats._stats_py._broadcast_concatenate(samples, axis, paired)
|
750 |
+
with pytest.raises(ValueError, match=message):
|
751 |
+
hypotest(*samples, *args, axis=axis, **kwds)
|
752 |
+
|
753 |
+
|
754 |
+
def test_masked_array_2_sentinel_array():
|
755 |
+
# prepare arrays
|
756 |
+
np.random.seed(0)
|
757 |
+
A = np.random.rand(10, 11, 12)
|
758 |
+
B = np.random.rand(12)
|
759 |
+
mask = A < 0.5
|
760 |
+
A = np.ma.masked_array(A, mask)
|
761 |
+
|
762 |
+
# set arbitrary elements to special values
|
763 |
+
# (these values might have been considered for use as sentinel values)
|
764 |
+
max_float = np.finfo(np.float64).max
|
765 |
+
max_float2 = np.nextafter(max_float, -np.inf)
|
766 |
+
max_float3 = np.nextafter(max_float2, -np.inf)
|
767 |
+
A[3, 4, 1] = np.nan
|
768 |
+
A[4, 5, 2] = np.inf
|
769 |
+
A[5, 6, 3] = max_float
|
770 |
+
B[8] = np.nan
|
771 |
+
B[7] = np.inf
|
772 |
+
B[6] = max_float2
|
773 |
+
|
774 |
+
# convert masked A to array with sentinel value, don't modify B
|
775 |
+
out_arrays, sentinel = _masked_arrays_2_sentinel_arrays([A, B])
|
776 |
+
A_out, B_out = out_arrays
|
777 |
+
|
778 |
+
# check that good sentinel value was chosen (according to intended logic)
|
779 |
+
assert (sentinel != max_float) and (sentinel != max_float2)
|
780 |
+
assert sentinel == max_float3
|
781 |
+
|
782 |
+
# check that output arrays are as intended
|
783 |
+
A_reference = A.data
|
784 |
+
A_reference[A.mask] = sentinel
|
785 |
+
np.testing.assert_array_equal(A_out, A_reference)
|
786 |
+
assert B_out is B
|
787 |
+
|
788 |
+
|
789 |
+
def test_masked_dtype():
|
790 |
+
# When _masked_arrays_2_sentinel_arrays was first added, it always
|
791 |
+
# upcast the arrays to np.float64. After gh16662, check expected promotion
|
792 |
+
# and that the expected sentinel is found.
|
793 |
+
|
794 |
+
# these are important because the max of the promoted dtype is the first
|
795 |
+
# candidate to be the sentinel value
|
796 |
+
max16 = np.iinfo(np.int16).max
|
797 |
+
max128c = np.finfo(np.complex128).max
|
798 |
+
|
799 |
+
# a is a regular array, b has masked elements, and c has no masked elements
|
800 |
+
a = np.array([1, 2, max16], dtype=np.int16)
|
801 |
+
b = np.ma.array([1, 2, 1], dtype=np.int8, mask=[0, 1, 0])
|
802 |
+
c = np.ma.array([1, 2, 1], dtype=np.complex128, mask=[0, 0, 0])
|
803 |
+
|
804 |
+
# check integer masked -> sentinel conversion
|
805 |
+
out_arrays, sentinel = _masked_arrays_2_sentinel_arrays([a, b])
|
806 |
+
a_out, b_out = out_arrays
|
807 |
+
assert sentinel == max16-1 # not max16 because max16 was in the data
|
808 |
+
assert b_out.dtype == np.int16 # check expected promotion
|
809 |
+
assert_allclose(b_out, [b[0], sentinel, b[-1]]) # check sentinel placement
|
810 |
+
assert a_out is a # not a masked array, so left untouched
|
811 |
+
assert not isinstance(b_out, np.ma.MaskedArray) # b became regular array
|
812 |
+
|
813 |
+
# similarly with complex
|
814 |
+
out_arrays, sentinel = _masked_arrays_2_sentinel_arrays([b, c])
|
815 |
+
b_out, c_out = out_arrays
|
816 |
+
assert sentinel == max128c # max128c was not in the data
|
817 |
+
assert b_out.dtype == np.complex128 # b got promoted
|
818 |
+
assert_allclose(b_out, [b[0], sentinel, b[-1]]) # check sentinel placement
|
819 |
+
assert not isinstance(b_out, np.ma.MaskedArray) # b became regular array
|
820 |
+
assert not isinstance(c_out, np.ma.MaskedArray) # c became regular array
|
821 |
+
|
822 |
+
# Also, check edge case when a sentinel value cannot be found in the data
|
823 |
+
min8, max8 = np.iinfo(np.int8).min, np.iinfo(np.int8).max
|
824 |
+
a = np.arange(min8, max8+1, dtype=np.int8) # use all possible values
|
825 |
+
mask1 = np.zeros_like(a, dtype=bool)
|
826 |
+
mask0 = np.zeros_like(a, dtype=bool)
|
827 |
+
|
828 |
+
# a masked value can be used as the sentinel
|
829 |
+
mask1[1] = True
|
830 |
+
a1 = np.ma.array(a, mask=mask1)
|
831 |
+
out_arrays, sentinel = _masked_arrays_2_sentinel_arrays([a1])
|
832 |
+
assert sentinel == min8+1
|
833 |
+
|
834 |
+
# unless it's the smallest possible; skipped for simiplicity (see code)
|
835 |
+
mask0[0] = True
|
836 |
+
a0 = np.ma.array(a, mask=mask0)
|
837 |
+
message = "This function replaces masked elements with sentinel..."
|
838 |
+
with pytest.raises(ValueError, match=message):
|
839 |
+
_masked_arrays_2_sentinel_arrays([a0])
|
840 |
+
|
841 |
+
# test that dtype is preserved in functions
|
842 |
+
a = np.ma.array([1, 2, 3], mask=[0, 1, 0], dtype=np.float32)
|
843 |
+
assert stats.gmean(a).dtype == np.float32
|
844 |
+
|
845 |
+
|
846 |
+
def test_masked_stat_1d():
|
847 |
+
# basic test of _axis_nan_policy_factory with 1D masked sample
|
848 |
+
males = [19, 22, 16, 29, 24]
|
849 |
+
females = [20, 11, 17, 12]
|
850 |
+
res = stats.mannwhitneyu(males, females)
|
851 |
+
|
852 |
+
# same result when extra nan is omitted
|
853 |
+
females2 = [20, 11, 17, np.nan, 12]
|
854 |
+
res2 = stats.mannwhitneyu(males, females2, nan_policy='omit')
|
855 |
+
np.testing.assert_array_equal(res2, res)
|
856 |
+
|
857 |
+
# same result when extra element is masked
|
858 |
+
females3 = [20, 11, 17, 1000, 12]
|
859 |
+
mask3 = [False, False, False, True, False]
|
860 |
+
females3 = np.ma.masked_array(females3, mask=mask3)
|
861 |
+
res3 = stats.mannwhitneyu(males, females3)
|
862 |
+
np.testing.assert_array_equal(res3, res)
|
863 |
+
|
864 |
+
# same result when extra nan is omitted and additional element is masked
|
865 |
+
females4 = [20, 11, 17, np.nan, 1000, 12]
|
866 |
+
mask4 = [False, False, False, False, True, False]
|
867 |
+
females4 = np.ma.masked_array(females4, mask=mask4)
|
868 |
+
res4 = stats.mannwhitneyu(males, females4, nan_policy='omit')
|
869 |
+
np.testing.assert_array_equal(res4, res)
|
870 |
+
|
871 |
+
# same result when extra elements, including nan, are masked
|
872 |
+
females5 = [20, 11, 17, np.nan, 1000, 12]
|
873 |
+
mask5 = [False, False, False, True, True, False]
|
874 |
+
females5 = np.ma.masked_array(females5, mask=mask5)
|
875 |
+
res5 = stats.mannwhitneyu(males, females5, nan_policy='propagate')
|
876 |
+
res6 = stats.mannwhitneyu(males, females5, nan_policy='raise')
|
877 |
+
np.testing.assert_array_equal(res5, res)
|
878 |
+
np.testing.assert_array_equal(res6, res)
|
879 |
+
|
880 |
+
|
881 |
+
@pytest.mark.parametrize(("axis"), range(-3, 3))
|
882 |
+
def test_masked_stat_3d(axis):
|
883 |
+
# basic test of _axis_nan_policy_factory with 3D masked sample
|
884 |
+
np.random.seed(0)
|
885 |
+
a = np.random.rand(3, 4, 5)
|
886 |
+
b = np.random.rand(4, 5)
|
887 |
+
c = np.random.rand(4, 1)
|
888 |
+
|
889 |
+
mask_a = a < 0.1
|
890 |
+
mask_c = [False, False, False, True]
|
891 |
+
a_masked = np.ma.masked_array(a, mask=mask_a)
|
892 |
+
c_masked = np.ma.masked_array(c, mask=mask_c)
|
893 |
+
|
894 |
+
a_nans = a.copy()
|
895 |
+
a_nans[mask_a] = np.nan
|
896 |
+
c_nans = c.copy()
|
897 |
+
c_nans[mask_c] = np.nan
|
898 |
+
|
899 |
+
res = stats.kruskal(a_nans, b, c_nans, nan_policy='omit', axis=axis)
|
900 |
+
res2 = stats.kruskal(a_masked, b, c_masked, axis=axis)
|
901 |
+
np.testing.assert_array_equal(res, res2)
|
902 |
+
|
903 |
+
|
904 |
+
def test_mixed_mask_nan_1():
|
905 |
+
# targeted test of _axis_nan_policy_factory with 2D masked sample:
|
906 |
+
# omitting samples with masks and nan_policy='omit' are equivalent
|
907 |
+
# also checks paired-sample sentinel value removal
|
908 |
+
m, n = 3, 20
|
909 |
+
axis = -1
|
910 |
+
|
911 |
+
np.random.seed(0)
|
912 |
+
a = np.random.rand(m, n)
|
913 |
+
b = np.random.rand(m, n)
|
914 |
+
mask_a1 = np.random.rand(m, n) < 0.2
|
915 |
+
mask_a2 = np.random.rand(m, n) < 0.1
|
916 |
+
mask_b1 = np.random.rand(m, n) < 0.15
|
917 |
+
mask_b2 = np.random.rand(m, n) < 0.15
|
918 |
+
mask_a1[2, :] = True
|
919 |
+
|
920 |
+
a_nans = a.copy()
|
921 |
+
b_nans = b.copy()
|
922 |
+
a_nans[mask_a1 | mask_a2] = np.nan
|
923 |
+
b_nans[mask_b1 | mask_b2] = np.nan
|
924 |
+
|
925 |
+
a_masked1 = np.ma.masked_array(a, mask=mask_a1)
|
926 |
+
b_masked1 = np.ma.masked_array(b, mask=mask_b1)
|
927 |
+
a_masked1[mask_a2] = np.nan
|
928 |
+
b_masked1[mask_b2] = np.nan
|
929 |
+
|
930 |
+
a_masked2 = np.ma.masked_array(a, mask=mask_a2)
|
931 |
+
b_masked2 = np.ma.masked_array(b, mask=mask_b2)
|
932 |
+
a_masked2[mask_a1] = np.nan
|
933 |
+
b_masked2[mask_b1] = np.nan
|
934 |
+
|
935 |
+
a_masked3 = np.ma.masked_array(a, mask=(mask_a1 | mask_a2))
|
936 |
+
b_masked3 = np.ma.masked_array(b, mask=(mask_b1 | mask_b2))
|
937 |
+
|
938 |
+
res = stats.wilcoxon(a_nans, b_nans, nan_policy='omit', axis=axis)
|
939 |
+
res1 = stats.wilcoxon(a_masked1, b_masked1, nan_policy='omit', axis=axis)
|
940 |
+
res2 = stats.wilcoxon(a_masked2, b_masked2, nan_policy='omit', axis=axis)
|
941 |
+
res3 = stats.wilcoxon(a_masked3, b_masked3, nan_policy='raise', axis=axis)
|
942 |
+
res4 = stats.wilcoxon(a_masked3, b_masked3,
|
943 |
+
nan_policy='propagate', axis=axis)
|
944 |
+
|
945 |
+
np.testing.assert_array_equal(res1, res)
|
946 |
+
np.testing.assert_array_equal(res2, res)
|
947 |
+
np.testing.assert_array_equal(res3, res)
|
948 |
+
np.testing.assert_array_equal(res4, res)
|
949 |
+
|
950 |
+
|
951 |
+
def test_mixed_mask_nan_2():
|
952 |
+
# targeted test of _axis_nan_policy_factory with 2D masked sample:
|
953 |
+
# check for expected interaction between masks and nans
|
954 |
+
|
955 |
+
# Cases here are
|
956 |
+
# [mixed nan/mask, all nans, all masked,
|
957 |
+
# unmasked nan, masked nan, unmasked non-nan]
|
958 |
+
a = [[1, np.nan, 2], [np.nan, np.nan, np.nan], [1, 2, 3],
|
959 |
+
[1, np.nan, 3], [1, np.nan, 3], [1, 2, 3]]
|
960 |
+
mask = [[1, 0, 1], [0, 0, 0], [1, 1, 1],
|
961 |
+
[0, 0, 0], [0, 1, 0], [0, 0, 0]]
|
962 |
+
a_masked = np.ma.masked_array(a, mask=mask)
|
963 |
+
b = [[4, 5, 6]]
|
964 |
+
ref1 = stats.ranksums([1, 3], [4, 5, 6])
|
965 |
+
ref2 = stats.ranksums([1, 2, 3], [4, 5, 6])
|
966 |
+
|
967 |
+
# nan_policy = 'omit'
|
968 |
+
# all elements are removed from first three rows
|
969 |
+
# middle element is removed from fourth and fifth rows
|
970 |
+
# no elements removed from last row
|
971 |
+
res = stats.ranksums(a_masked, b, nan_policy='omit', axis=-1)
|
972 |
+
stat_ref = [np.nan, np.nan, np.nan,
|
973 |
+
ref1.statistic, ref1.statistic, ref2.statistic]
|
974 |
+
p_ref = [np.nan, np.nan, np.nan,
|
975 |
+
ref1.pvalue, ref1.pvalue, ref2.pvalue]
|
976 |
+
np.testing.assert_array_equal(res.statistic, stat_ref)
|
977 |
+
np.testing.assert_array_equal(res.pvalue, p_ref)
|
978 |
+
|
979 |
+
# nan_policy = 'propagate'
|
980 |
+
# nans propagate in first, second, and fourth row
|
981 |
+
# all elements are removed by mask from third row
|
982 |
+
# middle element is removed from fifth row
|
983 |
+
# no elements removed from last row
|
984 |
+
res = stats.ranksums(a_masked, b, nan_policy='propagate', axis=-1)
|
985 |
+
stat_ref = [np.nan, np.nan, np.nan,
|
986 |
+
np.nan, ref1.statistic, ref2.statistic]
|
987 |
+
p_ref = [np.nan, np.nan, np.nan,
|
988 |
+
np.nan, ref1.pvalue, ref2.pvalue]
|
989 |
+
np.testing.assert_array_equal(res.statistic, stat_ref)
|
990 |
+
np.testing.assert_array_equal(res.pvalue, p_ref)
|
991 |
+
|
992 |
+
|
993 |
+
def test_axis_None_vs_tuple():
|
994 |
+
# `axis` `None` should be equivalent to tuple with all axes
|
995 |
+
shape = (3, 8, 9, 10)
|
996 |
+
rng = np.random.default_rng(0)
|
997 |
+
x = rng.random(shape)
|
998 |
+
res = stats.kruskal(*x, axis=None)
|
999 |
+
res2 = stats.kruskal(*x, axis=(0, 1, 2))
|
1000 |
+
np.testing.assert_array_equal(res, res2)
|
1001 |
+
|
1002 |
+
|
1003 |
+
def test_axis_None_vs_tuple_with_broadcasting():
|
1004 |
+
# `axis` `None` should be equivalent to tuple with all axes,
|
1005 |
+
# which should be equivalent to raveling the arrays before passing them
|
1006 |
+
rng = np.random.default_rng(0)
|
1007 |
+
x = rng.random((5, 1))
|
1008 |
+
y = rng.random((1, 5))
|
1009 |
+
x2, y2 = np.broadcast_arrays(x, y)
|
1010 |
+
|
1011 |
+
res0 = stats.mannwhitneyu(x.ravel(), y.ravel())
|
1012 |
+
res1 = stats.mannwhitneyu(x, y, axis=None)
|
1013 |
+
res2 = stats.mannwhitneyu(x, y, axis=(0, 1))
|
1014 |
+
res3 = stats.mannwhitneyu(x2.ravel(), y2.ravel())
|
1015 |
+
|
1016 |
+
assert res1 == res0
|
1017 |
+
assert res2 == res0
|
1018 |
+
assert res3 != res0
|
1019 |
+
|
1020 |
+
|
1021 |
+
@pytest.mark.parametrize(("axis"),
|
1022 |
+
list(permutations(range(-3, 3), 2)) + [(-4, 1)])
|
1023 |
+
def test_other_axis_tuples(axis):
|
1024 |
+
# Check that _axis_nan_policy_factory treats all `axis` tuples as expected
|
1025 |
+
rng = np.random.default_rng(0)
|
1026 |
+
shape_x = (4, 5, 6)
|
1027 |
+
shape_y = (1, 6)
|
1028 |
+
x = rng.random(shape_x)
|
1029 |
+
y = rng.random(shape_y)
|
1030 |
+
axis_original = axis
|
1031 |
+
|
1032 |
+
# convert axis elements to positive
|
1033 |
+
axis = tuple([(i if i >= 0 else 3 + i) for i in axis])
|
1034 |
+
axis = sorted(axis)
|
1035 |
+
|
1036 |
+
if len(set(axis)) != len(axis):
|
1037 |
+
message = "`axis` must contain only distinct elements"
|
1038 |
+
with pytest.raises(AxisError, match=re.escape(message)):
|
1039 |
+
stats.mannwhitneyu(x, y, axis=axis_original)
|
1040 |
+
return
|
1041 |
+
|
1042 |
+
if axis[0] < 0 or axis[-1] > 2:
|
1043 |
+
message = "`axis` is out of bounds for array of dimension 3"
|
1044 |
+
with pytest.raises(AxisError, match=re.escape(message)):
|
1045 |
+
stats.mannwhitneyu(x, y, axis=axis_original)
|
1046 |
+
return
|
1047 |
+
|
1048 |
+
res = stats.mannwhitneyu(x, y, axis=axis_original)
|
1049 |
+
|
1050 |
+
# reference behavior
|
1051 |
+
not_axis = {0, 1, 2} - set(axis) # which axis is not part of `axis`
|
1052 |
+
not_axis = next(iter(not_axis)) # take it out of the set
|
1053 |
+
|
1054 |
+
x2 = x
|
1055 |
+
shape_y_broadcasted = [1, 1, 6]
|
1056 |
+
shape_y_broadcasted[not_axis] = shape_x[not_axis]
|
1057 |
+
y2 = np.broadcast_to(y, shape_y_broadcasted)
|
1058 |
+
|
1059 |
+
m = x2.shape[not_axis]
|
1060 |
+
x2 = np.moveaxis(x2, axis, (1, 2))
|
1061 |
+
y2 = np.moveaxis(y2, axis, (1, 2))
|
1062 |
+
x2 = np.reshape(x2, (m, -1))
|
1063 |
+
y2 = np.reshape(y2, (m, -1))
|
1064 |
+
res2 = stats.mannwhitneyu(x2, y2, axis=1)
|
1065 |
+
|
1066 |
+
np.testing.assert_array_equal(res, res2)
|
1067 |
+
|
1068 |
+
|
1069 |
+
@pytest.mark.parametrize(
|
1070 |
+
("weighted_fun_name, unpacker"),
|
1071 |
+
[
|
1072 |
+
("gmean", lambda x: x),
|
1073 |
+
("hmean", lambda x: x),
|
1074 |
+
("pmean", lambda x: x),
|
1075 |
+
("combine_pvalues", lambda x: (x.pvalue, x.statistic)),
|
1076 |
+
],
|
1077 |
+
)
|
1078 |
+
def test_mean_mixed_mask_nan_weights(weighted_fun_name, unpacker):
|
1079 |
+
# targeted test of _axis_nan_policy_factory with 2D masked sample:
|
1080 |
+
# omitting samples with masks and nan_policy='omit' are equivalent
|
1081 |
+
# also checks paired-sample sentinel value removal
|
1082 |
+
|
1083 |
+
if weighted_fun_name == 'pmean':
|
1084 |
+
def weighted_fun(a, **kwargs):
|
1085 |
+
return stats.pmean(a, p=0.42, **kwargs)
|
1086 |
+
else:
|
1087 |
+
weighted_fun = getattr(stats, weighted_fun_name)
|
1088 |
+
|
1089 |
+
def func(*args, **kwargs):
|
1090 |
+
return unpacker(weighted_fun(*args, **kwargs))
|
1091 |
+
|
1092 |
+
m, n = 3, 20
|
1093 |
+
axis = -1
|
1094 |
+
|
1095 |
+
rng = np.random.default_rng(6541968121)
|
1096 |
+
a = rng.uniform(size=(m, n))
|
1097 |
+
b = rng.uniform(size=(m, n))
|
1098 |
+
mask_a1 = rng.uniform(size=(m, n)) < 0.2
|
1099 |
+
mask_a2 = rng.uniform(size=(m, n)) < 0.1
|
1100 |
+
mask_b1 = rng.uniform(size=(m, n)) < 0.15
|
1101 |
+
mask_b2 = rng.uniform(size=(m, n)) < 0.15
|
1102 |
+
mask_a1[2, :] = True
|
1103 |
+
|
1104 |
+
a_nans = a.copy()
|
1105 |
+
b_nans = b.copy()
|
1106 |
+
a_nans[mask_a1 | mask_a2] = np.nan
|
1107 |
+
b_nans[mask_b1 | mask_b2] = np.nan
|
1108 |
+
|
1109 |
+
a_masked1 = np.ma.masked_array(a, mask=mask_a1)
|
1110 |
+
b_masked1 = np.ma.masked_array(b, mask=mask_b1)
|
1111 |
+
a_masked1[mask_a2] = np.nan
|
1112 |
+
b_masked1[mask_b2] = np.nan
|
1113 |
+
|
1114 |
+
a_masked2 = np.ma.masked_array(a, mask=mask_a2)
|
1115 |
+
b_masked2 = np.ma.masked_array(b, mask=mask_b2)
|
1116 |
+
a_masked2[mask_a1] = np.nan
|
1117 |
+
b_masked2[mask_b1] = np.nan
|
1118 |
+
|
1119 |
+
a_masked3 = np.ma.masked_array(a, mask=(mask_a1 | mask_a2))
|
1120 |
+
b_masked3 = np.ma.masked_array(b, mask=(mask_b1 | mask_b2))
|
1121 |
+
|
1122 |
+
mask_all = (mask_a1 | mask_a2 | mask_b1 | mask_b2)
|
1123 |
+
a_masked4 = np.ma.masked_array(a, mask=mask_all)
|
1124 |
+
b_masked4 = np.ma.masked_array(b, mask=mask_all)
|
1125 |
+
|
1126 |
+
with np.testing.suppress_warnings() as sup:
|
1127 |
+
message = 'invalid value encountered'
|
1128 |
+
sup.filter(RuntimeWarning, message)
|
1129 |
+
res = func(a_nans, weights=b_nans, nan_policy="omit", axis=axis)
|
1130 |
+
res1 = func(a_masked1, weights=b_masked1, nan_policy="omit", axis=axis)
|
1131 |
+
res2 = func(a_masked2, weights=b_masked2, nan_policy="omit", axis=axis)
|
1132 |
+
res3 = func(a_masked3, weights=b_masked3, nan_policy="raise", axis=axis)
|
1133 |
+
res4 = func(a_masked3, weights=b_masked3, nan_policy="propagate", axis=axis)
|
1134 |
+
# Would test with a_masked3/b_masked3, but there is a bug in np.average
|
1135 |
+
# that causes a bug in _no_deco mean with masked weights. Would use
|
1136 |
+
# np.ma.average, but that causes other problems. See numpy/numpy#7330.
|
1137 |
+
if weighted_fun_name in {"hmean"}:
|
1138 |
+
weighted_fun_ma = getattr(stats.mstats, weighted_fun_name)
|
1139 |
+
res5 = weighted_fun_ma(a_masked4, weights=b_masked4,
|
1140 |
+
axis=axis, _no_deco=True)
|
1141 |
+
|
1142 |
+
np.testing.assert_array_equal(res1, res)
|
1143 |
+
np.testing.assert_array_equal(res2, res)
|
1144 |
+
np.testing.assert_array_equal(res3, res)
|
1145 |
+
np.testing.assert_array_equal(res4, res)
|
1146 |
+
if weighted_fun_name in {"hmean"}:
|
1147 |
+
# _no_deco mean returns masked array, last element was masked
|
1148 |
+
np.testing.assert_allclose(res5.compressed(), res[~np.isnan(res)])
|
1149 |
+
|
1150 |
+
|
1151 |
+
def test_raise_invalid_args_g17713():
|
1152 |
+
# other cases are handled in:
|
1153 |
+
# test_axis_nan_policy_decorated_positional_axis - multiple values for arg
|
1154 |
+
# test_axis_nan_policy_decorated_positional_args - unexpected kwd arg
|
1155 |
+
message = "got an unexpected keyword argument"
|
1156 |
+
with pytest.raises(TypeError, match=message):
|
1157 |
+
stats.gmean([1, 2, 3], invalid_arg=True)
|
1158 |
+
|
1159 |
+
message = " got multiple values for argument"
|
1160 |
+
with pytest.raises(TypeError, match=message):
|
1161 |
+
stats.gmean([1, 2, 3], a=True)
|
1162 |
+
|
1163 |
+
message = "missing 1 required positional argument"
|
1164 |
+
with pytest.raises(TypeError, match=message):
|
1165 |
+
stats.gmean()
|
1166 |
+
|
1167 |
+
message = "takes from 1 to 4 positional arguments but 5 were given"
|
1168 |
+
with pytest.raises(TypeError, match=message):
|
1169 |
+
stats.gmean([1, 2, 3], 0, float, [1, 1, 1], 10)
|
1170 |
+
|
1171 |
+
|
1172 |
+
@pytest.mark.parametrize('dtype', [np.int16, np.float32, np.complex128])
|
1173 |
+
def test_array_like_input(dtype):
|
1174 |
+
# Check that `_axis_nan_policy`-decorated functions work with custom
|
1175 |
+
# containers that are coercible to numeric arrays
|
1176 |
+
|
1177 |
+
class ArrLike:
|
1178 |
+
def __init__(self, x, dtype):
|
1179 |
+
self._x = x
|
1180 |
+
self._dtype = dtype
|
1181 |
+
|
1182 |
+
def __array__(self, dtype=None, copy=None):
|
1183 |
+
return np.asarray(x, dtype=self._dtype)
|
1184 |
+
|
1185 |
+
x = [1]*2 + [3, 4, 5]
|
1186 |
+
res = stats.mode(ArrLike(x, dtype=dtype))
|
1187 |
+
assert res.mode == 1
|
1188 |
+
assert res.count == 2
|