instance_id
stringlengths 12
57
| base_commit
stringlengths 40
40
| created_at
stringdate 2015-01-06 14:05:07
2025-04-29 17:56:51
| environment_setup_commit
stringlengths 40
40
| hints_text
stringlengths 0
158k
| patch
stringlengths 261
20.8k
| problem_statement
stringlengths 11
52.5k
| repo
stringlengths 7
53
| test_patch
stringlengths 280
206k
| meta
dict | version
stringclasses 463
values | install_config
dict | requirements
stringlengths 93
34k
⌀ | environment
stringlengths 772
20k
⌀ | FAIL_TO_PASS
sequencelengths 1
856
| FAIL_TO_FAIL
sequencelengths 0
536
| PASS_TO_PASS
sequencelengths 0
7.87k
| PASS_TO_FAIL
sequencelengths 0
92
| license_name
stringclasses 35
values | __index_level_0__
int64 11
21.4k
| num_tokens_patch
int64 103
4.99k
| before_filepaths
sequencelengths 0
14
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
TabViewer__tabview-165 | b73659ffa8469d132f91d7abf3c19e117fe8b145 | 2019-11-17 13:04:32 | b73659ffa8469d132f91d7abf3c19e117fe8b145 | firecat53: Happy to merge this. Can you please provide unit tests?
asparagii: Sure thing. Where should I put them? Everything in the same file? | diff --git a/tabview/tabview.py b/tabview/tabview.py
index c98188a..b7f441c 100644
--- a/tabview/tabview.py
+++ b/tabview/tabview.py
@@ -25,6 +25,11 @@ from textwrap import wrap
import unicodedata
import shlex
+if sys.version_info.major < 3:
+ from urlparse import urlparse
+else:
+ from urllib.parse import urlparse
+
if sys.version_info.major < 3:
# Python 2.7 shim
@@ -1356,7 +1361,8 @@ def view(data, enc=None, start_pos=(0, 0), column_width=20, column_gap=2,
while True:
try:
if isinstance(data, basestring):
- with open(data, 'rb') as fd:
+ parsed_path = parse_path(data)
+ with open(parsed_path, 'rb') as fd:
new_data = fd.readlines()
if info == "":
info = data
@@ -1395,3 +1401,8 @@ def view(data, enc=None, start_pos=(0, 0), column_width=20, column_gap=2,
finally:
if lc_all is not None:
locale.setlocale(locale.LC_ALL, lc_all)
+
+
+def parse_path(path):
+ parse_result = urlparse(path)
+ return parse_result.path
| File URI scheme not supported
File URI scheme as filename is not supported:
`$ tabview file://home/asparagii/test.csv` leads to a FileNotFoundException | TabViewer/tabview | diff --git a/test/test_tabview.py b/test/test_tabview.py
index b82c38e..91f4409 100644
--- a/test/test_tabview.py
+++ b/test/test_tabview.py
@@ -107,6 +107,26 @@ class TestTabviewUnits(unittest.TestCase):
i = str(i)
self.assertEqual(i, res[0][j])
+ def test_tabview_uri_parse(self):
+ # Strip 'file://' from uri (three slashes)
+ path = t.parse_path('file:///home/user/test.csv')
+ self.assertEqual(path, '/home/user/test.csv')
+
+ # Two slashes
+ path = t.parse_path('file://localhost/test.csv')
+ self.assertEqual(path, '/test.csv')
+
+ # Don't change if no 'file://' in string
+ path = t.parse_path('/home/user/test.csv')
+ self.assertEqual(path, '/home/user/test.csv')
+
+ # Don't change if relative path
+ path = t.parse_path('../test.csv')
+ self.assertEqual(path, '../test.csv')
+
+ path = t.parse_path('test.csv')
+ self.assertEqual(path, 'test.csv')
+
class TestTabviewIntegration(unittest.TestCase):
"""Integration tests for tabview. Run through the curses routines and some
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 1
} | 1.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
pytest @ file:///croot/pytest_1738938843180/work
-e git+https://github.com/TabViewer/tabview.git@b73659ffa8469d132f91d7abf3c19e117fe8b145#egg=tabview
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
| name: tabview
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
prefix: /opt/conda/envs/tabview
| [
"test/test_tabview.py::TestTabviewUnits::test_tabview_uri_parse"
] | [
"test/test_tabview.py::TestTabviewIntegration::test_tabview_annotated_comment",
"test/test_tabview.py::TestTabviewIntegration::test_tabview_latin1",
"test/test_tabview.py::TestTabviewIntegration::test_tabview_list",
"test/test_tabview.py::TestTabviewIntegration::test_tabview_unicode",
"test/test_tabview.py::TestTabviewIntegration::test_tabview_windows_newlines"
] | [
"test/test_tabview.py::TestTabviewUnits::test_tabview_encoding_latin1",
"test/test_tabview.py::TestTabviewUnits::test_tabview_encoding_utf8",
"test/test_tabview.py::TestTabviewUnits::test_tabview_file_annotated_comment",
"test/test_tabview.py::TestTabviewUnits::test_tabview_file_latin1",
"test/test_tabview.py::TestTabviewUnits::test_tabview_file_unicode"
] | [] | MIT License | 5,830 | 328 | [
"tabview/tabview.py"
] |
sscpac__statick-157 | 7e56cf7aa79e2b6fa3af28e2a2194008d2d9562b | 2019-11-17 22:31:14 | 7e56cf7aa79e2b6fa3af28e2a2194008d2d9562b | codecov-io: # [Codecov](https://codecov.io/gh/sscpac/statick/pull/157?src=pr&el=h1) Report
> Merging [#157](https://codecov.io/gh/sscpac/statick/pull/157?src=pr&el=desc) into [master](https://codecov.io/gh/sscpac/statick/commit/7e56cf7aa79e2b6fa3af28e2a2194008d2d9562b?src=pr&el=desc) will **decrease** coverage by `0.06%`.
> The diff coverage is `94.11%`.
[](https://codecov.io/gh/sscpac/statick/pull/157?src=pr&el=tree)
```diff
@@ Coverage Diff @@
## master #157 +/- ##
=========================================
- Coverage 94.36% 94.3% -0.07%
=========================================
Files 44 44
Lines 2149 2161 +12
=========================================
+ Hits 2028 2038 +10
- Misses 121 123 +2
```
| [Impacted Files](https://codecov.io/gh/sscpac/statick/pull/157?src=pr&el=tree) | Coverage Δ | |
|---|---|---|
| [statick\_tool/plugins/tool/pyflakes\_tool\_plugin.py](https://codecov.io/gh/sscpac/statick/pull/157/diff?src=pr&el=tree#diff-c3RhdGlja190b29sL3BsdWdpbnMvdG9vbC9weWZsYWtlc190b29sX3BsdWdpbi5weQ==) | `97.18% <94.11%> (-2.82%)` | :arrow_down: |
| [statick\_tool/discovery\_plugin.py](https://codecov.io/gh/sscpac/statick/pull/157/diff?src=pr&el=tree#diff-c3RhdGlja190b29sL2Rpc2NvdmVyeV9wbHVnaW4ucHk=) | `89.47% <0%> (-0.53%)` | :arrow_down: |
| [statick\_tool/plugins/tool/make\_tool\_plugin.py](https://codecov.io/gh/sscpac/statick/pull/157/diff?src=pr&el=tree#diff-c3RhdGlja190b29sL3BsdWdpbnMvdG9vbC9tYWtlX3Rvb2xfcGx1Z2luLnB5) | `80.72% <0%> (-0.46%)` | :arrow_down: |
| [...k\_tool/plugins/discovery/cmake\_discovery\_plugin.py](https://codecov.io/gh/sscpac/statick/pull/157/diff?src=pr&el=tree#diff-c3RhdGlja190b29sL3BsdWdpbnMvZGlzY292ZXJ5L2NtYWtlX2Rpc2NvdmVyeV9wbHVnaW4ucHk=) | `77.33% <0%> (-0.3%)` | :arrow_down: |
| [statick\_tool/plugins/tool/cppcheck\_tool\_plugin.py](https://codecov.io/gh/sscpac/statick/pull/157/diff?src=pr&el=tree#diff-c3RhdGlja190b29sL3BsdWdpbnMvdG9vbC9jcHBjaGVja190b29sX3BsdWdpbi5weQ==) | `83.9% <0%> (-0.19%)` | :arrow_down: |
| [...tatick\_tool/plugins/tool/clang\_tidy\_tool\_plugin.py](https://codecov.io/gh/sscpac/statick/pull/157/diff?src=pr&el=tree#diff-c3RhdGlja190b29sL3BsdWdpbnMvdG9vbC9jbGFuZ190aWR5X3Rvb2xfcGx1Z2luLnB5) | `95.31% <0%> (-0.08%)` | :arrow_down: |
| [statick\_tool/exceptions.py](https://codecov.io/gh/sscpac/statick/pull/157/diff?src=pr&el=tree#diff-c3RhdGlja190b29sL2V4Y2VwdGlvbnMucHk=) | `98.19% <0%> (-0.04%)` | :arrow_down: |
| [statick\_tool/plugins/tool/cccc\_tool\_plugin.py](https://codecov.io/gh/sscpac/statick/pull/157/diff?src=pr&el=tree#diff-c3RhdGlja190b29sL3BsdWdpbnMvdG9vbC9jY2NjX3Rvb2xfcGx1Z2luLnB5) | `99.31% <0%> (-0.01%)` | :arrow_down: |
| [statick\_tool/tool\_plugin.py](https://codecov.io/gh/sscpac/statick/pull/157/diff?src=pr&el=tree#diff-c3RhdGlja190b29sL3Rvb2xfcGx1Z2luLnB5) | `100% <0%> (ø)` | :arrow_up: |
| [...atick\_tool/plugins/tool/catkin\_lint\_tool\_plugin.py](https://codecov.io/gh/sscpac/statick/pull/157/diff?src=pr&el=tree#diff-c3RhdGlja190b29sL3BsdWdpbnMvdG9vbC9jYXRraW5fbGludF90b29sX3BsdWdpbi5weQ==) | `100% <0%> (ø)` | :arrow_up: |
| ... and [1 more](https://codecov.io/gh/sscpac/statick/pull/157/diff?src=pr&el=tree-more) | |
------
[Continue to review full report at Codecov](https://codecov.io/gh/sscpac/statick/pull/157?src=pr&el=continue).
> **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta)
> `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data`
> Powered by [Codecov](https://codecov.io/gh/sscpac/statick/pull/157?src=pr&el=footer). Last update [7e56cf7...f92d3d3](https://codecov.io/gh/sscpac/statick/pull/157?src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments).
| diff --git a/statick_tool/plugins/tool/pyflakes_tool_plugin.py b/statick_tool/plugins/tool/pyflakes_tool_plugin.py
index 35f29e6..2eab8cb 100644
--- a/statick_tool/plugins/tool/pyflakes_tool_plugin.py
+++ b/statick_tool/plugins/tool/pyflakes_tool_plugin.py
@@ -55,18 +55,48 @@ class PyflakesToolPlugin(ToolPlugin):
issues = self.parse_output(total_output)
return issues
- def parse_output(self, total_output):
+ def parse_output(self, total_output): # pylint: disable=too-many-locals
"""Parse tool output and report issues."""
- pyflakes_re = r"(.+):(\d+):\s(.+)"
- parse = re.compile(pyflakes_re)
+ tool_re_first = r"(.+):(\d+):(\d+):\s(.+)"
+ parse_first = re.compile(tool_re_first)
+ tool_re_second = r"(.+):(\d+):\s(.+)"
+ parse_second = re.compile(tool_re_second)
+ tool_re_third = r"\s(.+)"
+ parse_third = re.compile(tool_re_third)
issues = []
+ filename = ''
+ line_number = 0
+ issue_type = ''
+ message = ''
for output in total_output:
+ first_line = True
+ found_match = False
for line in output.splitlines():
- match = parse.match(line)
- if match:
- issues.append(Issue(match.group(1), match.group(2),
- self.get_name(), self.get_name(),
- "5", match.group(3), None))
+ if first_line:
+ match = parse_first.match(line)
+ first_line = False
+ if match:
+ found_match = True
+ filename = match.group(1)
+ line_number = match.group(2)
+ issue_type = match.group(4)
+ else:
+ match = parse_second.match(line)
+ if match:
+ found_match = True
+ filename = match.group(1)
+ line_number = match.group(2)
+ issue_type = match.group(3)
+ else:
+ match = parse_third.match(line)
+ first_line = True
+ if match:
+ found_match = True
+ message = match.group(1)
+ if found_match:
+ issues.append(Issue(filename, line_number,
+ self.get_name(), issue_type,
+ "5", message, None))
return issues
| Support Python 3.8
According to [PEP-569](https://www.python.org/dev/peps/pep-0569/), Python 3.8 will be released on October 21, 2019. When that happens we should add it to the tox configuration. | sscpac/statick | diff --git a/tests/plugins/tool/pyflakes_tool_plugin/test_pyflakes_tool_plugin.py b/tests/plugins/tool/pyflakes_tool_plugin/test_pyflakes_tool_plugin.py
index d0b148f..5c1dd05 100644
--- a/tests/plugins/tool/pyflakes_tool_plugin/test_pyflakes_tool_plugin.py
+++ b/tests/plugins/tool/pyflakes_tool_plugin/test_pyflakes_tool_plugin.py
@@ -63,15 +63,15 @@ def test_pyflakes_tool_plugin_scan_valid():
def test_pyflakes_tool_plugin_parse_valid():
"""Verify that we can parse the normal output of pyflakes."""
pftp = setup_pyflakes_tool_plugin()
- output = "pyflakes_test.py:4: 'json' imported but unused"
+ output = "pyflakes_test.py:39:34: invalid syntax\nprint 'No files in %s' " \
+ "% (source_dir)"
issues = pftp.parse_output([output])
assert len(issues) == 1
assert issues[0].filename == 'pyflakes_test.py'
- assert issues[0].line_number == '4'
+ assert issues[0].line_number == '39'
assert issues[0].tool == 'pyflakes'
- assert issues[0].issue_type == 'pyflakes'
+ assert issues[0].issue_type == 'invalid syntax'
assert issues[0].severity == '5'
- assert issues[0].message == "'json' imported but unused"
def test_pyflakes_tool_plugin_parse_invalid():
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 1
},
"num_modified_files": 1
} | 0.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[test]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-flake8",
"pytest-isort"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | astroid==3.3.9
backports.tempfile==1.0
backports.weakref==1.0.post1
bandit==1.8.3
cmakelint==1.4.3
coverage==7.8.0
cpplint==2.0.0
dill==0.3.9
exceptiongroup==1.2.2
flake8==7.2.0
flawfinder==2.0.19
iniconfig==2.1.0
isort==6.0.1
lizard==1.17.23
markdown-it-py==3.0.0
mccabe==0.7.0
mdurl==0.1.2
mock==5.2.0
packaging==24.2
pathspec==0.12.1
pbr==6.1.1
platformdirs==4.3.7
pluggy==1.5.0
pycodestyle==2.13.0
pydocstyle==6.3.0
pyflakes==3.3.1
Pygments==2.19.1
pylint==3.3.6
pylint-django==0.11.1
pylint-plugin-utils==0.8.2
pytest==8.3.5
pytest-cov==6.0.0
pytest-flake8==1.3.0
pytest-isort==4.0.0
PyYAML==6.0.2
rich==14.0.0
snowballstemmer==2.2.0
-e git+https://github.com/sscpac/statick.git@7e56cf7aa79e2b6fa3af28e2a2194008d2d9562b#egg=statick
stevedore==5.4.1
tomli==2.2.1
tomlkit==0.13.2
typing_extensions==4.13.0
xmltodict==0.14.2
yamllint==1.37.0
Yapsy==1.12.2
| name: statick
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- astroid==3.3.9
- backports-tempfile==1.0
- backports-weakref==1.0.post1
- bandit==1.8.3
- cmakelint==1.4.3
- coverage==7.8.0
- cpplint==2.0.0
- dill==0.3.9
- exceptiongroup==1.2.2
- flake8==7.2.0
- flawfinder==2.0.19
- iniconfig==2.1.0
- isort==6.0.1
- lizard==1.17.23
- markdown-it-py==3.0.0
- mccabe==0.7.0
- mdurl==0.1.2
- mock==5.2.0
- packaging==24.2
- pathspec==0.12.1
- pbr==6.1.1
- platformdirs==4.3.7
- pluggy==1.5.0
- pycodestyle==2.13.0
- pydocstyle==6.3.0
- pyflakes==3.3.1
- pygments==2.19.1
- pylint==3.3.6
- pylint-django==0.11.1
- pylint-plugin-utils==0.8.2
- pytest==8.3.5
- pytest-cov==6.0.0
- pytest-flake8==1.3.0
- pytest-isort==4.0.0
- pyyaml==6.0.2
- rich==14.0.0
- snowballstemmer==2.2.0
- stevedore==5.4.1
- tomli==2.2.1
- tomlkit==0.13.2
- typing-extensions==4.13.0
- xmltodict==0.14.2
- yamllint==1.37.0
- yapsy==1.12.2
prefix: /opt/conda/envs/statick
| [
"tests/plugins/tool/pyflakes_tool_plugin/test_pyflakes_tool_plugin.py::test_pyflakes_tool_plugin_parse_valid"
] | [] | [
"tests/plugins/tool/pyflakes_tool_plugin/test_pyflakes_tool_plugin.py::test_pyflakes_tool_plugin_found",
"tests/plugins/tool/pyflakes_tool_plugin/test_pyflakes_tool_plugin.py::test_pyflakes_tool_plugin_scan_valid",
"tests/plugins/tool/pyflakes_tool_plugin/test_pyflakes_tool_plugin.py::test_pyflakes_tool_plugin_parse_invalid",
"tests/plugins/tool/pyflakes_tool_plugin/test_pyflakes_tool_plugin.py::test_pyflakes_tool_plugin_scan_calledprocesserror",
"tests/plugins/tool/pyflakes_tool_plugin/test_pyflakes_tool_plugin.py::test_pyflakes_tool_plugin_scan_oserror"
] | [] | Creative Commons Zero v1.0 Universal | 5,833 | 581 | [
"statick_tool/plugins/tool/pyflakes_tool_plugin.py"
] |
iterative__dvc-2809 | 38c210068ba5aeb70ad92a8c22e85832640d8ebd | 2019-11-17 23:01:36 | 38c210068ba5aeb70ad92a8c22e85832640d8ebd | casperdcl: @efiop @xliiv this is the sort of thing I intended. The only reason I haven't implement it before is I don't know how to test it (https://github.com/iterative/dvc/issues/1566#issuecomment-554794000) | diff --git a/dvc/remote/gs.py b/dvc/remote/gs.py
index c2fb4efc1..0ba106dbd 100644
--- a/dvc/remote/gs.py
+++ b/dvc/remote/gs.py
@@ -1,14 +1,17 @@
-from __future__ import unicode_literals
+from __future__ import unicode_literals, division
import logging
from datetime import timedelta
from functools import wraps
+import io
+import os.path
from funcy import cached_property
from dvc.config import Config
from dvc.exceptions import DvcException
from dvc.path_info import CloudURLInfo
+from dvc.progress import Tqdm
from dvc.remote.base import RemoteBASE
from dvc.scheme import Schemes
from dvc.utils.compat import FileNotFoundError # skipcq: PYL-W0622
@@ -20,25 +23,24 @@ def dynamic_chunk_size(func):
@wraps(func)
def wrapper(*args, **kwargs):
import requests
- from google.cloud.storage.blob import Blob, _DEFAULT_CHUNKSIZE
+ from google.cloud.storage.blob import Blob
- # Default chunk size for gs is 100M, which might be too much for
- # particular network (see [1]). So if we are getting ConnectionError,
- # we should try lowering the chunk size until we reach the minimum
- # allowed chunk size of 256K. Also note that `chunk_size` must be a
- # multiple of 256K per the API specification.
+ # `ConnectionError` may be due to too large `chunk_size`
+ # (see [#2572]) so try halving on error.
+ # Note: start with 40 * [default: 256K] = 10M.
+ # Note: must be multiple of 256K.
#
- # [1] https://github.com/iterative/dvc/issues/2572
+ # [#2572]: https://github.com/iterative/dvc/issues/2572
# skipcq: PYL-W0212
- multiplier = int(_DEFAULT_CHUNKSIZE / Blob._CHUNK_SIZE_MULTIPLE)
+ multiplier = 40
while True:
try:
# skipcq: PYL-W0212
chunk_size = Blob._CHUNK_SIZE_MULTIPLE * multiplier
return func(*args, chunk_size=chunk_size, **kwargs)
except requests.exceptions.ConnectionError:
- multiplier = int(multiplier / 2)
+ multiplier //= 2
if not multiplier:
raise
@@ -46,9 +48,32 @@ def dynamic_chunk_size(func):
@dynamic_chunk_size
-def _upload_to_bucket(bucket, from_file, to_info, **kwargs):
- blob = bucket.blob(to_info.path, **kwargs)
- blob.upload_from_filename(from_file)
+def _upload_to_bucket(
+ bucket,
+ from_file,
+ to_info,
+ chunk_size=None,
+ name=None,
+ no_progress_bar=True,
+):
+ blob = bucket.blob(to_info.path, chunk_size=chunk_size)
+ with Tqdm(
+ desc=name or to_info.path,
+ total=os.path.getsize(from_file),
+ bytes=True,
+ disable=no_progress_bar,
+ ) as pbar:
+ with io.open(from_file, mode="rb") as fobj:
+ raw_read = fobj.read
+
+ def read(size=chunk_size):
+ res = raw_read(size)
+ if res:
+ pbar.update(len(res))
+ return res
+
+ fobj.read = read
+ blob.upload_from_file(fobj)
class RemoteGS(RemoteBASE):
@@ -123,14 +148,34 @@ class RemoteGS(RemoteBASE):
paths = set(self._list_paths(path_info.bucket, path_info.path))
return any(path_info.path == path for path in paths)
- def _upload(self, from_file, to_info, **_kwargs):
+ def _upload(self, from_file, to_info, name=None, no_progress_bar=True):
bucket = self.gs.bucket(to_info.bucket)
- _upload_to_bucket(bucket, from_file, to_info)
+ _upload_to_bucket(
+ bucket,
+ from_file,
+ to_info,
+ name=name,
+ no_progress_bar=no_progress_bar,
+ )
- def _download(self, from_info, to_file, **_kwargs):
+ def _download(self, from_info, to_file, name=None, no_progress_bar=True):
bucket = self.gs.bucket(from_info.bucket)
blob = bucket.get_blob(from_info.path)
- blob.download_to_filename(to_file)
+ with Tqdm(
+ desc=name or from_info.path,
+ total=blob.size,
+ bytes=True,
+ disable=no_progress_bar,
+ ) as pbar:
+ with io.open(to_file, mode="wb") as fobj:
+ raw_write = fobj.write
+
+ def write(byte_string):
+ raw_write(byte_string)
+ pbar.update(len(byte_string))
+
+ fobj.write = write
+ blob.download_to_file(fobj)
def _generate_download_url(self, path_info, expires=3600):
expiration = timedelta(seconds=int(expires))
| gs: support progress callback
I may have missed an issue concerning this, but when I am `dvc push`ing to a google cloud remote, the progress bars that are currently displayed don't get updated progressively (They go from 0 when starting to 100 when finished).
It would be nice to have a dynamic progressbar with upload speed and remaining time estimation, especially when push huge sets of data. | iterative/dvc | diff --git a/tests/unit/remote/test_gs.py b/tests/unit/remote/test_gs.py
index 817b64c59..62923e41e 100644
--- a/tests/unit/remote/test_gs.py
+++ b/tests/unit/remote/test_gs.py
@@ -53,14 +53,4 @@ def test_dynamic_chunk_size():
with pytest.raises(requests.exceptions.ConnectionError):
upload()
- assert chunk_sizes == [
- 104857600,
- 52428800,
- 26214400,
- 13107200,
- 6553600,
- 3145728,
- 1572864,
- 786432,
- 262144,
- ]
+ assert chunk_sizes == [10485760, 5242880, 2621440, 1310720, 524288, 262144]
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 3
},
"num_modified_files": 1
} | 0.68 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[all]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest",
"pytest-timeout",
"pytest-cov",
"pytest-xdist",
"pytest-mock",
"flaky",
"mock",
"xmltodict",
"awscli",
"google-compute-engine",
"Pygments",
"collective.checkdocs",
"flake8",
"psutil",
"flake8-docstrings",
"pydocstyle",
"jaraco.windows",
"mock-ssh-server",
"moto",
"rangehttpserver"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.7",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | aliyun-python-sdk-core==2.16.0
aliyun-python-sdk-core-v3==2.13.33
aliyun-python-sdk-kms==2.16.5
appdirs==1.4.4
asciimatics==1.14.0
atpublic==3.1.2
attrs @ file:///croot/attrs_1668696182826/work
autocommand==2.2.2
awscli==1.31.13
azure-common==1.1.28
azure-storage-blob==2.1.0
azure-storage-common==2.1.0
bcrypt==4.2.1
boto==2.49.0
boto3==1.9.115
botocore==1.12.253
cachetools==4.2.4
certifi @ file:///croot/certifi_1671487769961/work/certifi
cffi==1.15.1
charset-normalizer==3.4.1
collective.checkdocs==0.2
colorama==0.4.4
configobj==5.0.9
configparser==5.3.0
coverage==7.2.7
crcmod==1.7
cryptography==44.0.2
decorator==5.1.1
distro==1.9.0
docutils==0.15.2
-e git+https://github.com/iterative/dvc.git@38c210068ba5aeb70ad92a8c22e85832640d8ebd#egg=dvc
execnet==2.0.2
flake8==5.0.4
flake8-docstrings==1.7.0
flaky==3.8.1
flit_core @ file:///opt/conda/conda-bld/flit-core_1644941570762/work/source/flit_core
flufl.lock==7.1.1
funcy==2.0
future==1.0.0
gitdb==4.0.12
gitdb2==4.0.2
GitPython==3.1.44
google-api-core==2.10.2
google-auth==1.35.0
google-cloud-core==1.7.3
google-cloud-storage==1.19.0
google-compute-engine==2.8.13
google-crc32c==1.5.0
google-resumable-media==2.7.2
googleapis-common-protos==1.69.2
grandalf==0.6
humanize==4.6.0
idna==3.10
importlib-metadata==4.2.0
importlib-resources==5.12.0
inflect==6.0.5
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
jaraco.classes==3.2.3
jaraco.collections==4.2.0
jaraco.context==4.3.0
jaraco.functools==3.7.0
jaraco.structures==2.1.0
jaraco.text==3.11.1
jaraco.ui==2.3.0
jaraco.windows==5.7.0
Jinja2==3.1.6
jmespath==0.10.0
jsonpath-ng==1.7.0
MarkupSafe==2.1.5
mccabe==0.7.0
mock==5.2.0
mock-ssh-server==0.9.1
more-itertools==9.1.0
moto==4.2.14
nanotime==0.5.2
networkx==2.3
numpy==1.21.6
oss2==2.6.1
packaging @ file:///croot/packaging_1671697413597/work
paramiko==3.5.1
path==16.6.0
pathspec==0.11.2
Pillow==9.5.0
pluggy @ file:///tmp/build/80754af9/pluggy_1648042572264/work
ply==3.11
protobuf==4.24.4
psutil==7.0.0
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pyarrow==0.14.0
pyasn1==0.5.1
pyasn1-modules==0.3.0
pycodestyle==2.9.1
pycparser==2.21
pycryptodome==3.22.0
pydantic==1.10.21
pydocstyle==6.3.0
pyfiglet==0.8.post1
pyflakes==2.5.0
Pygments==2.17.2
PyNaCl==1.5.0
pyparsing==3.1.4
pytest==7.1.2
pytest-cov==4.1.0
pytest-mock==3.11.1
pytest-timeout==2.3.1
pytest-xdist==3.5.0
python-dateutil==2.9.0.post0
PyYAML==6.0.1
rangehttpserver==1.4.0
requests==2.31.0
responses==0.23.3
rsa==4.7.2
ruamel.yaml==0.18.10
ruamel.yaml.clib==0.2.8
s3transfer==0.2.1
schema==0.7.7
shortuuid==1.0.13
six==1.17.0
smmap==5.0.2
snowballstemmer==2.2.0
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
tqdm==4.67.1
treelib==1.7.1
types-PyYAML==6.0.12.12
typing_extensions @ file:///croot/typing_extensions_1669924550328/work
urllib3==1.25.11
wcwidth==0.2.13
Werkzeug==2.2.3
xmltodict==0.14.2
zipp @ file:///croot/zipp_1672387121353/work
| name: dvc
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=22.1.0=py37h06a4308_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- flit-core=3.6.0=pyhd3eb1b0_0
- importlib_metadata=4.11.3=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=22.0=py37h06a4308_0
- pip=22.3.1=py37h06a4308_0
- pluggy=1.0.0=py37h06a4308_1
- py=1.11.0=pyhd3eb1b0_0
- pytest=7.1.2=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py37h06a4308_0
- typing_extensions=4.4.0=py37h06a4308_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zipp=3.11.0=py37h06a4308_0
- zlib=1.2.13=h5eee18b_1
- pip:
- aliyun-python-sdk-core==2.16.0
- aliyun-python-sdk-core-v3==2.13.33
- aliyun-python-sdk-kms==2.16.5
- appdirs==1.4.4
- asciimatics==1.14.0
- atpublic==3.1.2
- autocommand==2.2.2
- awscli==1.31.13
- azure-common==1.1.28
- azure-storage-blob==2.1.0
- azure-storage-common==2.1.0
- bcrypt==4.2.1
- boto==2.49.0
- boto3==1.9.115
- botocore==1.12.253
- cachetools==4.2.4
- cffi==1.15.1
- charset-normalizer==3.4.1
- collective-checkdocs==0.2
- colorama==0.4.4
- configobj==5.0.9
- configparser==5.3.0
- coverage==7.2.7
- crcmod==1.7
- cryptography==44.0.2
- decorator==5.1.1
- distro==1.9.0
- docutils==0.15.2
- dvc==0.68.1+38c210
- execnet==2.0.2
- flake8==5.0.4
- flake8-docstrings==1.7.0
- flaky==3.8.1
- flufl-lock==7.1.1
- funcy==2.0
- future==1.0.0
- gitdb==4.0.12
- gitdb2==4.0.2
- gitpython==3.1.44
- google-api-core==2.10.2
- google-auth==1.35.0
- google-cloud-core==1.7.3
- google-cloud-storage==1.19.0
- google-compute-engine==2.8.13
- google-crc32c==1.5.0
- google-resumable-media==2.7.2
- googleapis-common-protos==1.69.2
- grandalf==0.6
- humanize==4.6.0
- idna==3.10
- importlib-metadata==4.2.0
- importlib-resources==5.12.0
- inflect==6.0.5
- jaraco-classes==3.2.3
- jaraco-collections==4.2.0
- jaraco-context==4.3.0
- jaraco-functools==3.7.0
- jaraco-structures==2.1.0
- jaraco-text==3.11.1
- jaraco-ui==2.3.0
- jaraco-windows==5.7.0
- jinja2==3.1.6
- jmespath==0.10.0
- jsonpath-ng==1.7.0
- markupsafe==2.1.5
- mccabe==0.7.0
- mock==5.2.0
- mock-ssh-server==0.9.1
- more-itertools==9.1.0
- moto==4.2.14
- nanotime==0.5.2
- networkx==2.3
- numpy==1.21.6
- oss2==2.6.1
- paramiko==3.5.1
- path==16.6.0
- pathspec==0.11.2
- pillow==9.5.0
- ply==3.11
- protobuf==4.24.4
- psutil==7.0.0
- pyarrow==0.14.0
- pyasn1==0.5.1
- pyasn1-modules==0.3.0
- pycodestyle==2.9.1
- pycparser==2.21
- pycryptodome==3.22.0
- pydantic==1.10.21
- pydocstyle==6.3.0
- pyfiglet==0.8.post1
- pyflakes==2.5.0
- pygments==2.17.2
- pynacl==1.5.0
- pyparsing==3.1.4
- pytest-cov==4.1.0
- pytest-mock==3.11.1
- pytest-timeout==2.3.1
- pytest-xdist==3.5.0
- python-dateutil==2.9.0.post0
- pyyaml==6.0.1
- rangehttpserver==1.4.0
- requests==2.31.0
- responses==0.23.3
- rsa==4.7.2
- ruamel-yaml==0.18.10
- ruamel-yaml-clib==0.2.8
- s3transfer==0.2.1
- schema==0.7.7
- shortuuid==1.0.13
- six==1.17.0
- smmap==5.0.2
- snowballstemmer==2.2.0
- tqdm==4.67.1
- treelib==1.7.1
- types-pyyaml==6.0.12.12
- urllib3==1.25.11
- wcwidth==0.2.13
- werkzeug==2.2.3
- xmltodict==0.14.2
prefix: /opt/conda/envs/dvc
| [
"tests/unit/remote/test_gs.py::test_dynamic_chunk_size"
] | [] | [
"tests/unit/remote/test_gs.py::TestRemoteGS::test_gs",
"tests/unit/remote/test_gs.py::TestRemoteGS::test_gs_no_credspath",
"tests/unit/remote/test_gs.py::TestRemoteGS::test_init"
] | [] | Apache License 2.0 | 5,834 | 1,215 | [
"dvc/remote/gs.py"
] |
altair-viz__altair-1794 | 1d80b5979a47b118db67c408d09237a9173ea455 | 2019-11-18 13:21:29 | 76b8fde8c2180927e1d013713b6d94db94c798d3 | diff --git a/altair/vegalite/v3/api.py b/altair/vegalite/v3/api.py
index 2b372f35..ceef45ef 100644
--- a/altair/vegalite/v3/api.py
+++ b/altair/vegalite/v3/api.py
@@ -1882,8 +1882,7 @@ class LayerChart(TopLevelMixin, _EncodingMixin, core.TopLevelLayerSpec):
if not selections or not self.layer:
return self
copy = self.copy()
- copy.layer = [chart.add_selection(*selections)
- for chart in copy.layer]
+ copy.layer[0] = copy.layer[0].add_selection(*selections)
return copy
| Calling add_selection() on a layered chart results in an invalid spec
Example:
```python
import altair as alt
import pandas as pd
df = pd.DataFrame({
'x': range(5),
'y1': [1, 3, 2, 4, 5],
'y2': [2, 1, 4, 5, 3]
})
alt.layer(
alt.Chart(df).mark_line().encode(x='x', y='y1'),
alt.Chart(df).mark_line().encode(x='x', y='y2')
).add_selection(
alt.selection_interval()
)
```
```err
JavaScript Error: Duplicate signal name: "selector012_x_1"
This usually means there's a typo in your chart specification. See the javascript console for the full traceback. | altair-viz/altair | diff --git a/altair/vegalite/v3/tests/test_api.py b/altair/vegalite/v3/tests/test_api.py
index caaa9fc8..8400b3c6 100644
--- a/altair/vegalite/v3/tests/test_api.py
+++ b/altair/vegalite/v3/tests/test_api.py
@@ -535,7 +535,15 @@ def test_facet_add_selections():
assert chart1.to_dict() == chart2.to_dict()
[email protected]('charttype', [alt.layer, alt.concat, alt.hconcat, alt.vconcat])
+def test_layer_add_selection():
+ base = alt.Chart('data.csv').mark_point()
+ selection = alt.selection_single()
+ chart1 = alt.layer(base.add_selection(selection), base)
+ chart2 = alt.layer(base, base).add_selection(selection)
+ assert chart1.to_dict() == chart2.to_dict()
+
+
[email protected]('charttype', [alt.concat, alt.hconcat, alt.vconcat])
def test_compound_add_selections(charttype):
base = alt.Chart('data.csv').mark_point()
selection = alt.selection_single()
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 1
},
"num_modified_files": 1
} | 3.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.16
-e git+https://github.com/altair-viz/altair.git@1d80b5979a47b118db67c408d09237a9173ea455#egg=altair
asttokens==3.0.0
attrs==25.3.0
babel==2.17.0
certifi==2025.1.31
charset-normalizer==3.4.1
commonmark==0.9.1
decorator==5.2.1
docutils==0.21.2
entrypoints==0.4
exceptiongroup==1.2.2
executing==2.2.0
flake8==7.2.0
idna==3.10
imagesize==1.4.1
importlib_metadata==8.6.1
iniconfig==2.1.0
ipython==8.18.1
jedi==0.19.2
Jinja2==3.1.6
jsonschema==4.23.0
jsonschema-specifications==2024.10.1
m2r==0.3.1
MarkupSafe==3.0.2
matplotlib-inline==0.1.7
mccabe==0.7.0
mistune==0.8.4
numpy==2.0.2
packaging==24.2
pandas==2.2.3
parso==0.8.4
pexpect==4.9.0
pluggy==1.5.0
prompt_toolkit==3.0.50
ptyprocess==0.7.0
pure_eval==0.2.3
pycodestyle==2.13.0
pyflakes==3.3.2
Pygments==2.19.1
pytest==8.3.5
python-dateutil==2.9.0.post0
pytz==2025.2
recommonmark==0.7.1
referencing==0.36.2
requests==2.32.3
rpds-py==0.24.0
six==1.17.0
snowballstemmer==2.2.0
Sphinx==7.4.7
sphinxcontrib-applehelp==2.0.0
sphinxcontrib-devhelp==2.0.0
sphinxcontrib-htmlhelp==2.1.0
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==2.0.0
sphinxcontrib-serializinghtml==2.0.0
stack-data==0.6.3
tomli==2.2.1
toolz==1.0.0
traitlets==5.14.3
typing_extensions==4.13.0
tzdata==2025.2
urllib3==2.3.0
vega-datasets==0.9.0
wcwidth==0.2.13
zipp==3.21.0
| name: altair
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.16
- asttokens==3.0.0
- attrs==25.3.0
- babel==2.17.0
- certifi==2025.1.31
- charset-normalizer==3.4.1
- commonmark==0.9.1
- decorator==5.2.1
- docutils==0.21.2
- entrypoints==0.4
- exceptiongroup==1.2.2
- executing==2.2.0
- flake8==7.2.0
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- ipython==8.18.1
- jedi==0.19.2
- jinja2==3.1.6
- jsonschema==4.23.0
- jsonschema-specifications==2024.10.1
- m2r==0.3.1
- markupsafe==3.0.2
- matplotlib-inline==0.1.7
- mccabe==0.7.0
- mistune==0.8.4
- numpy==2.0.2
- packaging==24.2
- pandas==2.2.3
- parso==0.8.4
- pexpect==4.9.0
- pluggy==1.5.0
- prompt-toolkit==3.0.50
- ptyprocess==0.7.0
- pure-eval==0.2.3
- pycodestyle==2.13.0
- pyflakes==3.3.2
- pygments==2.19.1
- pytest==8.3.5
- python-dateutil==2.9.0.post0
- pytz==2025.2
- recommonmark==0.7.1
- referencing==0.36.2
- requests==2.32.3
- rpds-py==0.24.0
- six==1.17.0
- snowballstemmer==2.2.0
- sphinx==7.4.7
- sphinxcontrib-applehelp==2.0.0
- sphinxcontrib-devhelp==2.0.0
- sphinxcontrib-htmlhelp==2.1.0
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==2.0.0
- sphinxcontrib-serializinghtml==2.0.0
- stack-data==0.6.3
- tomli==2.2.1
- toolz==1.0.0
- traitlets==5.14.3
- typing-extensions==4.13.0
- tzdata==2025.2
- urllib3==2.3.0
- vega-datasets==0.9.0
- wcwidth==0.2.13
- zipp==3.21.0
prefix: /opt/conda/envs/altair
| [
"altair/vegalite/v3/tests/test_api.py::test_layer_add_selection"
] | [
"altair/vegalite/v3/tests/test_api.py::test_chart_data_types",
"altair/vegalite/v3/tests/test_api.py::test_chart_infer_types",
"altair/vegalite/v3/tests/test_api.py::test_multiple_encodings[args0-kwargs0]",
"altair/vegalite/v3/tests/test_api.py::test_multiple_encodings[args1-kwargs1]",
"altair/vegalite/v3/tests/test_api.py::test_multiple_encodings[args2-kwargs2]",
"altair/vegalite/v3/tests/test_api.py::test_multiple_encodings[args3-kwargs3]",
"altair/vegalite/v3/tests/test_api.py::test_multiple_encodings[args4-kwargs4]",
"altair/vegalite/v3/tests/test_api.py::test_multiple_encodings[args5-kwargs5]",
"altair/vegalite/v3/tests/test_api.py::test_facet[None-chart]",
"altair/vegalite/v3/tests/test_api.py::test_facet[None-layer]",
"altair/vegalite/v3/tests/test_api.py::test_facet[None-facet_encoding]",
"altair/vegalite/v3/tests/test_api.py::test_facet[facet-chart]",
"altair/vegalite/v3/tests/test_api.py::test_facet[facet-layer]",
"altair/vegalite/v3/tests/test_api.py::test_facet[facet-facet_encoding]",
"altair/vegalite/v3/tests/test_api.py::test_facet[row-chart]",
"altair/vegalite/v3/tests/test_api.py::test_facet[row-layer]",
"altair/vegalite/v3/tests/test_api.py::test_facet[row-facet_encoding]",
"altair/vegalite/v3/tests/test_api.py::test_facet[column-chart]",
"altair/vegalite/v3/tests/test_api.py::test_facet[column-layer]",
"altair/vegalite/v3/tests/test_api.py::test_facet[column-facet_encoding]",
"altair/vegalite/v3/tests/test_api.py::test_facet_parse_data",
"altair/vegalite/v3/tests/test_api.py::test_transforms",
"altair/vegalite/v3/tests/test_api.py::test_LookupData",
"altair/vegalite/v3/tests/test_api.py::test_consolidate_datasets",
"altair/vegalite/v3/tests/test_api.py::test_data_property",
"altair/vegalite/v3/tests/test_api.py::test_layer_facet",
"altair/vegalite/v3/tests/test_api.py::test_resolve[layer]",
"altair/vegalite/v3/tests/test_api.py::test_resolve[hconcat]",
"altair/vegalite/v3/tests/test_api.py::test_resolve[vconcat]",
"altair/vegalite/v3/tests/test_api.py::test_resolve[concat]",
"altair/vegalite/v3/tests/test_api.py::test_resolve[facet]",
"altair/vegalite/v3/tests/test_api.py::test_resolve[facet_encoding]",
"altair/vegalite/v3/tests/test_api.py::test_resolve[repeat]"
] | [
"altair/vegalite/v3/tests/test_api.py::test_chart_operations",
"altair/vegalite/v3/tests/test_api.py::test_selection_to_dict",
"altair/vegalite/v3/tests/test_api.py::test_selection_expression",
"altair/vegalite/v3/tests/test_api.py::test_facet_parse",
"altair/vegalite/v3/tests/test_api.py::test_selection",
"altair/vegalite/v3/tests/test_api.py::test_filter_transform_selection_predicates",
"altair/vegalite/v3/tests/test_api.py::test_resolve_methods",
"altair/vegalite/v3/tests/test_api.py::test_layer_encodings",
"altair/vegalite/v3/tests/test_api.py::test_add_selection",
"altair/vegalite/v3/tests/test_api.py::test_repeat_add_selections",
"altair/vegalite/v3/tests/test_api.py::test_facet_add_selections",
"altair/vegalite/v3/tests/test_api.py::test_compound_add_selections[concat]",
"altair/vegalite/v3/tests/test_api.py::test_compound_add_selections[hconcat]",
"altair/vegalite/v3/tests/test_api.py::test_compound_add_selections[vconcat]",
"altair/vegalite/v3/tests/test_api.py::test_selection_property",
"altair/vegalite/v3/tests/test_api.py::test_themes",
"altair/vegalite/v3/tests/test_api.py::test_chart_from_dict",
"altair/vegalite/v3/tests/test_api.py::test_consolidate_InlineData",
"altair/vegalite/v3/tests/test_api.py::test_deprecated_encodings",
"altair/vegalite/v3/tests/test_api.py::test_repeat",
"altair/vegalite/v3/tests/test_api.py::test_subcharts_with_same_data[data.json-layer]",
"altair/vegalite/v3/tests/test_api.py::test_subcharts_with_same_data[data.json-hconcat]",
"altair/vegalite/v3/tests/test_api.py::test_subcharts_with_same_data[data.json-vconcat]",
"altair/vegalite/v3/tests/test_api.py::test_subcharts_with_same_data[data.json-concat]",
"altair/vegalite/v3/tests/test_api.py::test_subcharts_with_same_data[data1-layer]",
"altair/vegalite/v3/tests/test_api.py::test_subcharts_with_same_data[data1-hconcat]",
"altair/vegalite/v3/tests/test_api.py::test_subcharts_with_same_data[data1-vconcat]",
"altair/vegalite/v3/tests/test_api.py::test_subcharts_with_same_data[data1-concat]",
"altair/vegalite/v3/tests/test_api.py::test_subcharts_different_data[data.json-layer]",
"altair/vegalite/v3/tests/test_api.py::test_subcharts_different_data[data.json-hconcat]",
"altair/vegalite/v3/tests/test_api.py::test_subcharts_different_data[data.json-vconcat]",
"altair/vegalite/v3/tests/test_api.py::test_subcharts_different_data[data.json-concat]",
"altair/vegalite/v3/tests/test_api.py::test_subcharts_different_data[data1-layer]",
"altair/vegalite/v3/tests/test_api.py::test_subcharts_different_data[data1-hconcat]",
"altair/vegalite/v3/tests/test_api.py::test_subcharts_different_data[data1-vconcat]",
"altair/vegalite/v3/tests/test_api.py::test_subcharts_different_data[data1-concat]",
"altair/vegalite/v3/tests/test_api.py::test_layer_errors",
"altair/vegalite/v3/tests/test_api.py::test_sequence",
"altair/vegalite/v3/tests/test_api.py::test_graticule",
"altair/vegalite/v3/tests/test_api.py::test_sphere"
] | [] | BSD 3-Clause "New" or "Revised" License | 5,837 | 178 | [
"altair/vegalite/v3/api.py"
] |
|
ucfopen__canvasapi-340 | 8bd85663a2a91cf2ec37de431257ab80ce8e7d57 | 2019-11-18 16:06:30 | 62aa3eb8045f4a5f5edeb16fafad3fdf44bff624 | coveralls:
[](https://coveralls.io/builds/27058680)
Coverage remained the same at 100.0% when pulling **499de4205b2c7e941b389efa27d52b3ed98a7cf0 on Thetwam:issue/303-submissions-key-missing** into **34ecd3ee222d74bf552d28339b6beff75ccd6c27 on ucfopen:develop**.
| diff --git a/canvasapi/submission.py b/canvasapi/submission.py
index 588a24a..15df324 100644
--- a/canvasapi/submission.py
+++ b/canvasapi/submission.py
@@ -179,17 +179,16 @@ class Submission(CanvasObject):
@python_2_unicode_compatible
class GroupedSubmission(CanvasObject):
def __init__(self, requester, attributes):
+ super(GroupedSubmission, self).__init__(requester, attributes)
+
try:
self.submissions = [
Submission(requester, submission)
for submission in attributes["submissions"]
]
- del attributes["submissions"]
except KeyError:
self.submissions = list()
- super(GroupedSubmission, self).__init__(requester, attributes)
-
def __str__(self):
return "{} submission(s) for User #{}".format(
len(self.submissions), self.user_id
| GroupedSubmission 'submissions' key:value deleted
# Describe the bug
In #280 we added the first instance of nesting a list of one type of object inside another. While this generally works well, there is a small side effect: The `'submissions'` key:value pair is removed from the `attributes` dict. This is potentially misleading, since one of the uses of calling `attributes` is to see what data was originally used to create the object (via `.to_json()`).
`'submissions'` being removed may give the (incorrect) impression that submissions were never returned from Canvas.
# To Reproduce
Steps to reproduce the behavior:
1. Create a `GroupedSubmission` object with nested submissions
2. Print the attributes of the created object (`print(grouped_submission.attributes)`)
3. Observe that there is no `'submissions'` key:value pair
# Expected behavior
`'submissions'` should be visible in the printed attributes, to assist in debugging.
# Environment information
- Python version (`python --version`)
3.7+
- CanvasAPI version (`pip show canvasapi`)
0.14.0
# Additional context
This is somewhat related to #56, as we'll need to be careful about deleting keys in the future.
| ucfopen/canvasapi | diff --git a/tests/test_submission.py b/tests/test_submission.py
index ffe0731..0bbe318 100644
--- a/tests/test_submission.py
+++ b/tests/test_submission.py
@@ -154,6 +154,29 @@ class TestGroupedSubmission(unittest.TestCase):
self.assertIsInstance(grouped_submission.submissions, list)
self.assertEqual(len(grouped_submission.submissions), 0)
+ def test__init__issue_303_regression(self):
+ """
+ Regression test for issue #303
+ https://github.com/ucfopen/canvasapi/issues/303
+ """
+ grouped_submission = GroupedSubmission(
+ self.canvas._Canvas__requester,
+ {
+ "user_id": 1,
+ "submissions": [
+ {
+ "id": 1,
+ "assignments_id": 1,
+ "user_id": 1,
+ "html_url": "https://example.com/courses/1/assignments/1/submissions/1",
+ "submission_type": "online_upload",
+ }
+ ],
+ },
+ )
+ self.assertTrue(hasattr(grouped_submission, "submissions"))
+ self.assertIn("submissions", grouped_submission.attributes)
+
# __str__()
def test__str__(self):
string = str(self.grouped_submission)
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_issue_reference"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 1
} | 0.14 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"coverage",
"flake8",
"requests-mock"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.6",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
-e git+https://github.com/ucfopen/canvasapi.git@8bd85663a2a91cf2ec37de431257ab80ce8e7d57#egg=canvasapi
certifi==2021.5.30
charset-normalizer==2.0.12
coverage==6.2
flake8==5.0.4
idna==3.10
importlib-metadata==4.2.0
iniconfig==1.1.1
mccabe==0.7.0
packaging==21.3
pluggy==1.0.0
py==1.11.0
pycodestyle==2.9.1
pyflakes==2.5.0
pyparsing==3.1.4
pytest==7.0.1
pytz==2025.2
requests==2.27.1
requests-mock==1.12.1
six==1.17.0
tomli==1.2.3
typing_extensions==4.1.1
urllib3==1.26.20
zipp==3.6.0
| name: canvasapi
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- charset-normalizer==2.0.12
- coverage==6.2
- flake8==5.0.4
- idna==3.10
- importlib-metadata==4.2.0
- iniconfig==1.1.1
- mccabe==0.7.0
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pycodestyle==2.9.1
- pyflakes==2.5.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytz==2025.2
- requests==2.27.1
- requests-mock==1.12.1
- six==1.17.0
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- zipp==3.6.0
prefix: /opt/conda/envs/canvasapi
| [
"tests/test_submission.py::TestGroupedSubmission::test__init__issue_303_regression"
] | [] | [
"tests/test_submission.py::TestSubmission::test__str__",
"tests/test_submission.py::TestSubmission::test_create_submission_peer_review",
"tests/test_submission.py::TestSubmission::test_delete_submission_peer_review",
"tests/test_submission.py::TestSubmission::test_edit",
"tests/test_submission.py::TestSubmission::test_get_submission_peer_reviews",
"tests/test_submission.py::TestSubmission::test_upload_comment",
"tests/test_submission.py::TestSubmission::test_upload_comment_section",
"tests/test_submission.py::TestGroupedSubmission::test__init__no_submission_key",
"tests/test_submission.py::TestGroupedSubmission::test__str__"
] | [] | MIT License | 5,840 | 217 | [
"canvasapi/submission.py"
] |
hdmf-dev__hdmf-203 | 42026c5d7b30b7d176f75af7570b5e54c8ebadcd | 2019-11-18 19:42:03 | ee1684a8a4ba8a4d70fb5ba4e78e1998d92c8ba1 | codecov[bot]: # [Codecov](https://codecov.io/gh/hdmf-dev/hdmf/pull/203?src=pr&el=h1) Report
> :exclamation: No coverage uploaded for pull request base (`dev@42026c5`). [Click here to learn what that means](https://docs.codecov.io/docs/error-reference#section-missing-base-commit).
> The diff coverage is `66.66%`.
[](https://codecov.io/gh/hdmf-dev/hdmf/pull/203?src=pr&el=tree)
```diff
@@ Coverage Diff @@
## dev #203 +/- ##
======================================
Coverage ? 70.33%
======================================
Files ? 30
Lines ? 5956
Branches ? 1399
======================================
Hits ? 4189
Misses ? 1329
Partials ? 438
```
| [Impacted Files](https://codecov.io/gh/hdmf-dev/hdmf/pull/203?src=pr&el=tree) | Coverage Δ | |
|---|---|---|
| [src/hdmf/utils.py](https://codecov.io/gh/hdmf-dev/hdmf/pull/203/diff?src=pr&el=tree#diff-c3JjL2hkbWYvdXRpbHMucHk=) | `89.7% <66.66%> (ø)` | |
------
[Continue to review full report at Codecov](https://codecov.io/gh/hdmf-dev/hdmf/pull/203?src=pr&el=continue).
> **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta)
> `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data`
> Powered by [Codecov](https://codecov.io/gh/hdmf-dev/hdmf/pull/203?src=pr&el=footer). Last update [42026c5...64e0c65](https://codecov.io/gh/hdmf-dev/hdmf/pull/203?src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments).
| diff --git a/src/hdmf/utils.py b/src/hdmf/utils.py
index 289c182..fd800c8 100644
--- a/src/hdmf/utils.py
+++ b/src/hdmf/utils.py
@@ -51,6 +51,8 @@ def __type_okay(value, argtype, allow_none=False):
return __is_int(value)
elif argtype == 'float':
return __is_float(value)
+ elif argtype == 'bool':
+ return __is_bool(value)
return argtype in [cls.__name__ for cls in value.__class__.__mro__]
elif isinstance(argtype, type):
if argtype == six.text_type:
@@ -61,6 +63,8 @@ def __type_okay(value, argtype, allow_none=False):
return __is_int(value)
elif argtype is float:
return __is_float(value)
+ elif argtype is bool:
+ return __is_bool(value)
return isinstance(value, argtype)
elif isinstance(argtype, tuple) or isinstance(argtype, list):
return any(__type_okay(value, i) for i in argtype)
@@ -100,6 +104,10 @@ def __is_float(value):
return any(isinstance(value, i) for i in SUPPORTED_FLOAT_TYPES)
+def __is_bool(value):
+ return isinstance(value, bool) or isinstance(value, np.bool_)
+
+
def __format_type(argtype):
if isinstance(argtype, str):
return argtype
| Getting TypeError: incorrect type for 'bool' attribute in extension
I’m having a problem creating an [extension](https://github.com/ben-dichter-consulting/ndx-labmetadata-giocomo), more specifically, to create an attribute of type `bool`.
On the list of valid primary dtypes, I get:
{'isodatetime', 'datetime64', 'region', 'int8', 'int16', 'bool', 'int', 'ascii', 'int32', 'text', 'bytes', 'int64', 'utf-8', 'double', 'utf', 'datetime', 'uint32', 'uint', 'uint64', 'uint16', 'object', 'numeric', 'float32', 'float', 'short', 'float64', 'long', 'uint8', 'utf8'}
Then I try to add an attribute of type `bool`:
```python
LabMetaData_ext.add_attribute(
name='high_pass_filtered',
doc='binary variable as to whether raw data was high-pass filtered or not',
dtype='bool',
shape=None,
)
```
[Here’s](https://github.com/ben-dichter-consulting/ndx-labmetadata-giocomo/blob/master/src/spec/create_extension_spec.py) the full code.
When I run [this test](https://github.com/ben-dichter-consulting/ndx-labmetadata-giocomo/blob/master/src/pynwb/ndx_labmetadata_giocomo/test.py), it apparently works well in saving the nwb file with the new extension, I can even open the file with HDFView, but fails opening the new file with NWBHDF5IO. The error:
```
Traceback (most recent call last):
File "C:\Users\Luiz\Anaconda3\envs\nwbn_conversion\lib\site-packages\hdmf\build\map.py", line 1207, in construct
obj.__init__(**kwargs)
File "C:\Users\Luiz\Anaconda3\envs\nwbn_conversion\lib\site-packages\hdmf\utils.py", line 436, in func_call
raise_from(ExceptionType(msg), None)
File "<string>", line 3, in raise_from
TypeError: incorrect type for 'high_pass_filtered' (got 'bool_', expected 'bool')
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "test.py", line 28, in <module>
nwb = io.read()
File "C:\Users\Luiz\Anaconda3\envs\nwbn_conversion\lib\site-packages\hdmf\backends\hdf5\h5tools.py", line 293, in read
return call_docval_func(super(HDF5IO, self).read, kwargs)
File "C:\Users\Luiz\Anaconda3\envs\nwbn_conversion\lib\site-packages\hdmf\utils.py", line 327, in call_docval_func
return func(*fargs, **fkwargs)
File "C:\Users\Luiz\Anaconda3\envs\nwbn_conversion\lib\site-packages\hdmf\utils.py", line 438, in func_call
return func(self, **parsed['args'])
File "C:\Users\Luiz\Anaconda3\envs\nwbn_conversion\lib\site-packages\hdmf\backends\io.py", line 35, in read
container = self.__manager.construct(f_builder)
File "C:\Users\Luiz\Anaconda3\envs\nwbn_conversion\lib\site-packages\hdmf\utils.py", line 438, in func_call
return func(self, **parsed['args'])
File "C:\Users\Luiz\Anaconda3\envs\nwbn_conversion\lib\site-packages\hdmf\build\map.py", line 208, in construct
result = self.__type_map.construct(builder, self, None)
File "C:\Users\Luiz\Anaconda3\envs\nwbn_conversion\lib\site-packages\hdmf\utils.py", line 438, in func_call
return func(self, **parsed['args'])
File "C:\Users\Luiz\Anaconda3\envs\nwbn_conversion\lib\site-packages\hdmf\build\map.py", line 1712, in construct
return attr_map.construct(builder, build_manager, parent)
File "C:\Users\Luiz\Anaconda3\envs\nwbn_conversion\lib\site-packages\hdmf\utils.py", line 438, in func_call
return func(self, **parsed['args'])
File "C:\Users\Luiz\Anaconda3\envs\nwbn_conversion\lib\site-packages\hdmf\build\map.py", line 1175, in construct
subspecs = self.__get_subspec_values(builder, self.spec, manager)
File "C:\Users\Luiz\Anaconda3\envs\nwbn_conversion\lib\site-packages\hdmf\build\map.py", line 1117, in __get_subspec_values
self.__get_sub_builders(groups, spec.groups, manager, ret)
File "C:\Users\Luiz\Anaconda3\envs\nwbn_conversion\lib\site-packages\hdmf\build\map.py", line 1155, in __get_sub_builders
ret.update(self.__get_subspec_values(sub_builder, subspec, manager))
File "C:\Users\Luiz\Anaconda3\envs\nwbn_conversion\lib\site-packages\hdmf\build\map.py", line 1117, in __get_subspec_values
self.__get_sub_builders(groups, spec.groups, manager, ret)
File "C:\Users\Luiz\Anaconda3\envs\nwbn_conversion\lib\site-packages\hdmf\build\map.py", line 1147, in __get_sub_builders
sub_builder = self.__flatten(sub_builder, subspec, manager)
File "C:\Users\Luiz\Anaconda3\envs\nwbn_conversion\lib\site-packages\hdmf\build\map.py", line 1160, in __flatten
tmp = [manager.construct(b) for b in sub_builder]
File "C:\Users\Luiz\Anaconda3\envs\nwbn_conversion\lib\site-packages\hdmf\build\map.py", line 1160, in <listcomp>
tmp = [manager.construct(b) for b in sub_builder]
File "C:\Users\Luiz\Anaconda3\envs\nwbn_conversion\lib\site-packages\hdmf\utils.py", line 438, in func_call
return func(self, **parsed['args'])
File "C:\Users\Luiz\Anaconda3\envs\nwbn_conversion\lib\site-packages\hdmf\build\map.py", line 204, in construct
result = self.__type_map.construct(builder, self, parent)
File "C:\Users\Luiz\Anaconda3\envs\nwbn_conversion\lib\site-packages\hdmf\utils.py", line 438, in func_call
return func(self, **parsed['args'])
File "C:\Users\Luiz\Anaconda3\envs\nwbn_conversion\lib\site-packages\hdmf\build\map.py", line 1712, in construct
return attr_map.construct(builder, build_manager, parent)
File "C:\Users\Luiz\Anaconda3\envs\nwbn_conversion\lib\site-packages\hdmf\utils.py", line 438, in func_call
return func(self, **parsed['args'])
File "C:\Users\Luiz\Anaconda3\envs\nwbn_conversion\lib\site-packages\hdmf\build\map.py", line 1210, in construct
raise_from(Exception(msg), ex)
File "<string>", line 3, in raise_from
Exception: Could not construct LabMetaData_ext object
```
I’m guessing the source is here?
```
TypeError: incorrect type for 'high_pass_filtered' (got 'bool_', expected 'bool')
```
Does anyone have a hint how to solve it?
##
Python Version: 3.7
Operating System: Windows
HDMF Version: 1.3.3
## Checklist
- [x ] Have you ensured the feature or change was not already [reported](https://github.com/hdmf-dev/hdmf/issues) ?
- [ x] Have you included a brief and descriptive title?
- [ x] Have you included a clear description of the problem you are trying to solve?
- [ x] Have you included a minimal code snippet that reproduces the issue you are encountering?
- [ x] Have you checked our [Contributing](https://github.com/hdmf-dev/hdmf/blob/dev/docs/CONTRIBUTING.rst) document?
| hdmf-dev/hdmf | diff --git a/tests/unit/build_tests/test_io_map.py b/tests/unit/build_tests/test_io_map.py
index ae839dd..1405364 100644
--- a/tests/unit/build_tests/test_io_map.py
+++ b/tests/unit/build_tests/test_io_map.py
@@ -701,6 +701,22 @@ class TestConvertDtype(unittest.TestCase):
self.assertTupleEqual(ret, match)
self.assertIs(ret[0].dtype.type, match[1])
+ def test_bool_spec(self):
+ spec_type = 'bool'
+ spec = DatasetSpec('an example dataset', spec_type, name='data')
+
+ value = np.bool_(True)
+ ret = ObjectMapper.convert_dtype(spec, value)
+ match = (value, np.bool_)
+ self.assertTupleEqual(ret, match)
+ self.assertIs(type(ret[0]), match[1])
+
+ value = True
+ ret = ObjectMapper.convert_dtype(spec, value)
+ match = (value, np.bool_)
+ self.assertTupleEqual(ret, match)
+ self.assertIs(type(ret[0]), match[1])
+
if __name__ == '__main__':
unittest.main()
diff --git a/tests/unit/utils_test/test_docval.py b/tests/unit/utils_test/test_docval.py
index ebe23d1..5b711de 100644
--- a/tests/unit/utils_test/test_docval.py
+++ b/tests/unit/utils_test/test_docval.py
@@ -1,5 +1,6 @@
import unittest
from six import text_type
+import numpy as np
from hdmf.utils import docval, fmt_docval_args, get_docval, popargs
@@ -478,6 +479,19 @@ class TestDocValidator(unittest.TestCase):
with self.assertRaisesRegex(ValueError, r'Function __init__ has no docval arguments'):
get_docval(self.test_obj.__init__, 'arg3')
+ def test_bool_type(self):
+ @docval({'name': 'arg1', 'type': bool, 'doc': 'this is a bool'})
+ def method(self, **kwargs):
+ return popargs('arg1', kwargs)
+
+ res = method(self, arg1=True)
+ self.assertEqual(res, True)
+ self.assertIsInstance(res, bool)
+
+ res = method(self, arg1=np.bool_(True))
+ self.assertEqual(res, np.bool_(True))
+ self.assertIsInstance(res, np.bool_)
+
class TestDocValidatorChain(unittest.TestCase):
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 1
} | 1.3 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"tox",
"flake8",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.7",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | cachetools==5.5.2
certifi @ file:///croot/certifi_1671487769961/work/certifi
chardet==5.2.0
colorama==0.4.6
distlib==0.3.9
exceptiongroup==1.2.2
filelock==3.12.2
flake8==3.9.2
h5py==2.10.0
-e git+https://github.com/hdmf-dev/hdmf.git@42026c5d7b30b7d176f75af7570b5e54c8ebadcd#egg=hdmf
importlib-metadata==6.7.0
iniconfig==2.0.0
mccabe==0.6.1
numpy==1.17.2
packaging==24.0
pandas==0.25.1
platformdirs==4.0.0
pluggy==1.2.0
pycodestyle==2.7.0
pyflakes==2.3.1
pyproject-api==1.5.3
pytest==7.4.4
python-dateutil==2.9.0.post0
pytz==2025.2
ruamel.yaml==0.16.5
ruamel.yaml.clib==0.2.8
scipy==1.3.1
six==1.12.0
tomli==2.0.1
tox==4.8.0
typing_extensions==4.7.1
virtualenv==20.26.6
zipp==3.15.0
| name: hdmf
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=22.3.1=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- cachetools==5.5.2
- chardet==5.2.0
- colorama==0.4.6
- distlib==0.3.9
- exceptiongroup==1.2.2
- filelock==3.12.2
- flake8==3.9.2
- h5py==2.10.0
- importlib-metadata==6.7.0
- iniconfig==2.0.0
- mccabe==0.6.1
- numpy==1.17.2
- packaging==24.0
- pandas==0.25.1
- platformdirs==4.0.0
- pluggy==1.2.0
- pycodestyle==2.7.0
- pyflakes==2.3.1
- pyproject-api==1.5.3
- pytest==7.4.4
- python-dateutil==2.9.0.post0
- pytz==2025.2
- ruamel-yaml==0.16.5
- ruamel-yaml-clib==0.2.8
- scipy==1.3.1
- six==1.12.0
- tomli==2.0.1
- tox==4.8.0
- typing-extensions==4.7.1
- virtualenv==20.26.6
- zipp==3.15.0
prefix: /opt/conda/envs/hdmf
| [
"tests/unit/utils_test/test_docval.py::TestDocValidator::test_bool_type"
] | [] | [
"tests/unit/build_tests/test_io_map.py::TestGetSubSpec::test_get_subspec_data_type_noname",
"tests/unit/build_tests/test_io_map.py::TestGetSubSpec::test_get_subspec_named",
"tests/unit/build_tests/test_io_map.py::TestTypeMap::test_get_map",
"tests/unit/build_tests/test_io_map.py::TestTypeMap::test_get_map_register",
"tests/unit/build_tests/test_io_map.py::TestTypeMap::test_get_map_unique_mappers",
"tests/unit/build_tests/test_io_map.py::TestMapStrings::test_build_1d",
"tests/unit/build_tests/test_io_map.py::TestMapStrings::test_build_dataio",
"tests/unit/build_tests/test_io_map.py::TestMapStrings::test_build_scalar",
"tests/unit/build_tests/test_io_map.py::TestDynamicContainer::test_dynamic_container_composition",
"tests/unit/build_tests/test_io_map.py::TestDynamicContainer::test_dynamic_container_composition_wrong_order",
"tests/unit/build_tests/test_io_map.py::TestDynamicContainer::test_dynamic_container_constructor",
"tests/unit/build_tests/test_io_map.py::TestDynamicContainer::test_dynamic_container_constructor_name",
"tests/unit/build_tests/test_io_map.py::TestDynamicContainer::test_dynamic_container_constructor_name_default_name",
"tests/unit/build_tests/test_io_map.py::TestDynamicContainer::test_dynamic_container_creation",
"tests/unit/build_tests/test_io_map.py::TestDynamicContainer::test_dynamic_container_creation_defaults",
"tests/unit/build_tests/test_io_map.py::TestDynamicContainer::test_dynamic_container_default_name",
"tests/unit/build_tests/test_io_map.py::TestObjectMapperNested::test_build",
"tests/unit/build_tests/test_io_map.py::TestObjectMapperNested::test_construct",
"tests/unit/build_tests/test_io_map.py::TestObjectMapperNested::test_default_mapping",
"tests/unit/build_tests/test_io_map.py::TestObjectMapperNested::test_default_mapping_keys",
"tests/unit/build_tests/test_io_map.py::TestObjectMapperNested::test_remap_keys",
"tests/unit/build_tests/test_io_map.py::TestObjectMapperNoNesting::test_build",
"tests/unit/build_tests/test_io_map.py::TestObjectMapperNoNesting::test_build_empty",
"tests/unit/build_tests/test_io_map.py::TestObjectMapperNoNesting::test_construct",
"tests/unit/build_tests/test_io_map.py::TestObjectMapperNoNesting::test_default_mapping",
"tests/unit/build_tests/test_io_map.py::TestObjectMapperNoNesting::test_default_mapping_keys",
"tests/unit/build_tests/test_io_map.py::TestObjectMapperContainer::test_default_mapping",
"tests/unit/build_tests/test_io_map.py::TestObjectMapperContainer::test_default_mapping_keys",
"tests/unit/build_tests/test_io_map.py::TestLinkedContainer::test_build_child_link",
"tests/unit/build_tests/test_io_map.py::TestConvertDtype::test_bool_spec",
"tests/unit/build_tests/test_io_map.py::TestConvertDtype::test_convert_higher_precision",
"tests/unit/build_tests/test_io_map.py::TestConvertDtype::test_keep_higher_precision",
"tests/unit/build_tests/test_io_map.py::TestConvertDtype::test_no_spec",
"tests/unit/build_tests/test_io_map.py::TestConvertDtype::test_numeric_spec",
"tests/unit/build_tests/test_io_map.py::TestConvertDtype::test_value_none",
"tests/unit/utils_test/test_docval.py::TestDocValidator::test_bad_shape",
"tests/unit/utils_test/test_docval.py::TestDocValidator::test_bad_type",
"tests/unit/utils_test/test_docval.py::TestDocValidator::test_catch_duplicate_names",
"tests/unit/utils_test/test_docval.py::TestDocValidator::test_docval_add",
"tests/unit/utils_test/test_docval.py::TestDocValidator::test_docval_add2",
"tests/unit/utils_test/test_docval.py::TestDocValidator::test_docval_add2_kw_all_kw_syntax",
"tests/unit/utils_test/test_docval.py::TestDocValidator::test_docval_add2_kw_default",
"tests/unit/utils_test/test_docval.py::TestDocValidator::test_docval_add2_kw_default_sub",
"tests/unit/utils_test/test_docval.py::TestDocValidator::test_docval_add2_kw_default_sub_missing_args",
"tests/unit/utils_test/test_docval.py::TestDocValidator::test_docval_add2_kw_kw_syntax",
"tests/unit/utils_test/test_docval.py::TestDocValidator::test_docval_add2_kw_kwsyntax_sub",
"tests/unit/utils_test/test_docval.py::TestDocValidator::test_docval_add2_kw_kwsyntax_sub_missing_args",
"tests/unit/utils_test/test_docval.py::TestDocValidator::test_docval_add2_kw_kwsyntax_sub_nonetype_arg",
"tests/unit/utils_test/test_docval.py::TestDocValidator::test_docval_add2_kw_pos_syntax",
"tests/unit/utils_test/test_docval.py::TestDocValidator::test_docval_add2_kw_pos_syntax_missing_args",
"tests/unit/utils_test/test_docval.py::TestDocValidator::test_docval_add2_pos_as_kw",
"tests/unit/utils_test/test_docval.py::TestDocValidator::test_docval_add2_text_type_w_str",
"tests/unit/utils_test/test_docval.py::TestDocValidator::test_docval_add2_text_type_w_unicode",
"tests/unit/utils_test/test_docval.py::TestDocValidator::test_docval_add_kw",
"tests/unit/utils_test/test_docval.py::TestDocValidator::test_docval_add_missing_args",
"tests/unit/utils_test/test_docval.py::TestDocValidator::test_docval_add_sub",
"tests/unit/utils_test/test_docval.py::TestDocValidator::test_dup_kw",
"tests/unit/utils_test/test_docval.py::TestDocValidator::test_extra_args_dup_kw",
"tests/unit/utils_test/test_docval.py::TestDocValidator::test_extra_args_pos_kw",
"tests/unit/utils_test/test_docval.py::TestDocValidator::test_extra_args_pos_kw_ok",
"tests/unit/utils_test/test_docval.py::TestDocValidator::test_extra_args_pos_only",
"tests/unit/utils_test/test_docval.py::TestDocValidator::test_extra_args_pos_only_ok",
"tests/unit/utils_test/test_docval.py::TestDocValidator::test_extra_kwarg",
"tests/unit/utils_test/test_docval.py::TestDocValidator::test_extra_kwargs_pos_kw",
"tests/unit/utils_test/test_docval.py::TestDocValidator::test_fmt_docval_args",
"tests/unit/utils_test/test_docval.py::TestDocValidator::test_get_docval_all",
"tests/unit/utils_test/test_docval.py::TestDocValidator::test_get_docval_missing_arg",
"tests/unit/utils_test/test_docval.py::TestDocValidator::test_get_docval_missing_arg_of_many_ok",
"tests/unit/utils_test/test_docval.py::TestDocValidator::test_get_docval_missing_args",
"tests/unit/utils_test/test_docval.py::TestDocValidator::test_get_docval_none",
"tests/unit/utils_test/test_docval.py::TestDocValidator::test_get_docval_none_arg",
"tests/unit/utils_test/test_docval.py::TestDocValidator::test_get_docval_one_arg",
"tests/unit/utils_test/test_docval.py::TestDocValidator::test_get_docval_two_args",
"tests/unit/utils_test/test_docval.py::TestDocValidator::test_multi_shape",
"tests/unit/utils_test/test_docval.py::TestDocValidator::test_only_kw_arg1_arg2",
"tests/unit/utils_test/test_docval.py::TestDocValidator::test_only_kw_arg1_arg2_pos",
"tests/unit/utils_test/test_docval.py::TestDocValidator::test_only_kw_arg1_no_arg2",
"tests/unit/utils_test/test_docval.py::TestDocValidator::test_only_kw_arg1_pos_no_arg2",
"tests/unit/utils_test/test_docval.py::TestDocValidator::test_only_kw_arg2_no_arg1",
"tests/unit/utils_test/test_docval.py::TestDocValidator::test_only_kw_no_args",
"tests/unit/utils_test/test_docval.py::TestDocValidator::test_unsupported_docval_term",
"tests/unit/utils_test/test_docval.py::TestDocValidatorChain::test_shape_invalid_unpack",
"tests/unit/utils_test/test_docval.py::TestDocValidatorChain::test_shape_invalid_unpack_default",
"tests/unit/utils_test/test_docval.py::TestDocValidatorChain::test_shape_none_unpack",
"tests/unit/utils_test/test_docval.py::TestDocValidatorChain::test_shape_none_unpack_default",
"tests/unit/utils_test/test_docval.py::TestDocValidatorChain::test_shape_other_unpack",
"tests/unit/utils_test/test_docval.py::TestDocValidatorChain::test_shape_other_unpack_default",
"tests/unit/utils_test/test_docval.py::TestDocValidatorChain::test_shape_valid_unpack",
"tests/unit/utils_test/test_docval.py::TestDocValidatorChain::test_shape_valid_unpack_default",
"tests/unit/utils_test/test_docval.py::TestDocValidatorChain::test_type_arg",
"tests/unit/utils_test/test_docval.py::TestDocValidatorChain::test_type_arg_wrong_type"
] | [] | BSD-3-Clause | 5,843 | 348 | [
"src/hdmf/utils.py"
] |
lundberg__respx-20 | 05747d93cdac6c9c91e909f63c95bd56410da15b | 2019-11-19 08:01:50 | 3e36da0d783a0bc143df9828a0e61c148f0fbc0c | diff --git a/respx/mock.py b/respx/mock.py
index 39d4176..c3f45e5 100644
--- a/respx/mock.py
+++ b/respx/mock.py
@@ -108,6 +108,7 @@ class HTTPXMock:
content: typing.Optional[ContentDataTypes] = None,
content_type: typing.Optional[str] = None,
headers: typing.Optional[HeaderTypes] = None,
+ pass_through: bool = False,
alias: typing.Optional[str] = None,
) -> RequestPattern:
"""
@@ -118,7 +119,9 @@ class HTTPXMock:
headers["Content-Type"] = content_type
response = ResponseTemplate(status_code, headers, content)
- pattern = RequestPattern(method, url, response, alias=alias)
+ pattern = RequestPattern(
+ method, url, response, pass_through=pass_through, alias=alias
+ )
self.add(pattern, alias=alias)
@@ -145,8 +148,8 @@ class HTTPXMock:
matched_response: typing.Optional[ResponseTemplate] = None
for i, pattern in enumerate(self._patterns):
- response = pattern.match(request)
- if not response:
+ match = pattern.match(request)
+ if not match:
continue
if found_index is not None:
@@ -156,38 +159,60 @@ class HTTPXMock:
found_index = i
matched_pattern = pattern
- matched_response = response
+
+ if isinstance(match, ResponseTemplate):
+ # Mock response
+ matched_response = match
+ elif isinstance(match, AsyncRequest):
+ # Pass-through request
+ matched_response = None
+ else:
+ raise ValueError(
+ (
+ "Matched request pattern must return either a "
+ 'ResponseTemplate or an AsyncResponse, got "{}"'
+ ).format(type(match))
+ )
return matched_pattern, matched_response
@contextmanager
- def _patch_backend(self, backend: ConcurrencyBackend) -> typing.Iterator[None]:
+ def _patch_backend(
+ self,
+ backend: ConcurrencyBackend,
+ request: AsyncRequest,
+ response: typing.Optional[ResponseTemplate],
+ ) -> typing.Iterator[None]:
patchers = []
- # Mock open_tcp_stream()
- patchers.append(
- asynctest.mock.patch.object(
- backend, "open_tcp_stream", self._open_tcp_stream_mock
- )
- )
+ if response is not None:
+ # 1. Patch request url with response for later pickup in patched backend
+ request.url = URLResponse(request.url, response)
- # Mock open_uds_stream()
- # TODO: Remove if-statement once httpx uds support is released
- if hasattr(backend, "open_uds_stream"): # pragma: nocover
+ # Mock open_tcp_stream()
patchers.append(
asynctest.mock.patch.object(
- backend, "open_uds_stream", self._open_uds_stream_mock
+ backend, "open_tcp_stream", self._open_tcp_stream_mock
)
)
- # Start patchers
+ # Mock open_uds_stream()
+ # TODO: Remove if-statement once httpx uds support is released
+ if hasattr(backend, "open_uds_stream"): # pragma: nocover
+ patchers.append(
+ asynctest.mock.patch.object(
+ backend, "open_uds_stream", self._open_uds_stream_mock
+ )
+ )
+
+ # 2. Start patchers
for patcher in patchers:
patcher.start()
try:
yield
finally:
- # Stop patchers
+ # 3. Stop patchers
for patcher in patchers:
patcher.start()
@@ -203,17 +228,17 @@ class HTTPXMock:
# 1. Match request against added patterns
pattern, template = self._match(request)
- # 2. Patch request url with response for later pickup in mocked backend methods
- request.url = URLResponse(request.url, template or ResponseTemplate())
+ if pattern is None:
+ template = ResponseTemplate()
- # 3. Patch client's backend and pass-through to _get_response
+ # 2. Patch client's backend and continue to original _get_response
try:
global _get_response
- with self._patch_backend(client.concurrency_backend):
+ with self._patch_backend(client.concurrency_backend, request, template):
response = None
response = await _get_response(client, request, **kwargs)
finally:
- # 4. Update stats
+ # 3. Update stats
if pattern:
pattern(request, response)
self.calls.append((request, response))
diff --git a/respx/models.py b/respx/models.py
index dc32211..5131247 100644
--- a/respx/models.py
+++ b/respx/models.py
@@ -74,6 +74,7 @@ class RequestPattern:
method: typing.Union[str, typing.Callable],
url: typing.Optional[typing.Union[str, typing.Pattern]],
response: ResponseTemplate,
+ pass_through: bool = False,
alias: typing.Optional[str] = None,
) -> None:
self._match_func: typing.Optional[typing.Callable] = None
@@ -81,10 +82,12 @@ class RequestPattern:
if callable(method):
self.method = None
self.url = None
+ self.pass_through = None
self._match_func = method
else:
self.method = method
self.url = url
+ self.pass_through = pass_through
self.response = response
self.alias = alias
@@ -106,10 +109,22 @@ class RequestPattern:
) -> None:
self._stats(request, response)
- def match(self, request: AsyncRequest) -> typing.Optional[ResponseTemplate]:
+ def match(
+ self, request: AsyncRequest
+ ) -> typing.Optional[typing.Union[AsyncRequest, ResponseTemplate]]:
+ """
+ Matches request with configured pattern;
+ custom matcher function or http method + url pattern.
+
+ Returns None for a non-matching pattern, mocked response for a match,
+ or input request for pass-through.
+ """
matches = False
url_params: Kwargs = {}
+ if self.pass_through:
+ return request
+
if self._match_func:
response = self.response.clone(context={"request": request})
return self._match_func(request, response)
| Pass-through requests
It would sometimes be useful to mark a pattern to pass-through the request to the original url.
**Ideas:**
```py
with respx.mock():
respx.get(...., pass_through=True)
# -- and/or --
respx.get(...., pass_through=pass_through_or_not)
def pass_through_or_not(request, **kwargs):
if some_awesome_logic:
return True
```
Relates to #16 | lundberg/respx | diff --git a/tests/test_mock.py b/tests/test_mock.py
index 6870958..9624c47 100644
--- a/tests/test_mock.py
+++ b/tests/test_mock.py
@@ -309,8 +309,8 @@ class HTTPXMockTestCase(asynctest.TestCase):
response.context["id"] = 123
return response
- with respx.HTTPXMock() as httpx_mock:
- httpx_mock.request(matcher, status_code=202, headers={"X-Ham": "Spam"})
+ with respx.HTTPXMock(assert_all_called=False) as httpx_mock:
+ request = httpx_mock.request(matcher, status_code=202, headers={"X-Ham": "Spam"})
response = httpx.get("https://foo/bar/")
self.assertEqual(response.status_code, 202)
@@ -319,6 +319,12 @@ class HTTPXMockTestCase(asynctest.TestCase):
httpx.Headers({"Content-Type": "text/plain", "X-Ham": "Spam"}),
)
self.assertEqual(response.text, "foobar #123")
+ self.assertTrue(request.called)
+ self.assertFalse(request.pass_through)
+
+ with self.assertRaises(ValueError):
+ httpx_mock.request(lambda req, res: "invalid")
+ httpx.get("https://ham/spam/")
def test_assert_all_called_fail(self):
with self.assertRaises(AssertionError):
@@ -355,6 +361,36 @@ class HTTPXMockTestCase(asynctest.TestCase):
self.assertTrue(request1.called)
self.assertTrue(request2.called)
+ def test_pass_through_with_arg(self):
+ with respx.mock():
+ request = respx.get("https://www.example.org/", pass_through=True)
+
+ with asynctest.mock.patch(
+ "asyncio.open_connection",
+ side_effect=ConnectionRefusedError("test request blocked"),
+ ) as open_connection:
+ with self.assertRaises(ConnectionRefusedError):
+ httpx.get("https://www.example.org/")
+
+ self.assertTrue(open_connection.called)
+ self.assertTrue(request.called)
+ self.assertTrue(request.pass_through)
+
+ def test_pass_through_with_custom_matcher(self):
+ with respx.mock():
+ request = respx.request(lambda request, response: request)
+
+ with asynctest.mock.patch(
+ "asyncio.open_connection",
+ side_effect=ConnectionRefusedError("test request blocked"),
+ ) as open_connection:
+ with self.assertRaises(ConnectionRefusedError):
+ httpx.get("https://www.example.org/")
+
+ self.assertTrue(open_connection.called)
+ self.assertTrue(request.called)
+ self.assertIsNone(request.pass_through)
+
async def test_stats(self, backend=None):
with respx.mock():
url = "https://foo/bar/1/"
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_issue_reference",
"has_many_modified_files",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 2
} | 0.6 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest",
"pytest-asyncio",
"pytest-cov",
"trio"
],
"pre_install": null,
"python": "3.8",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | asynctest==0.13.0
attrs==25.3.0
certifi==2025.1.31
chardet==3.0.4
coverage==7.6.1
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
h11==0.8.1
h2==3.2.0
hpack==3.0.0
hstspreload==2025.1.1
httpx==0.7.6
hyperframe==5.2.0
idna==2.10
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
outcome==1.3.0.post0
packaging @ file:///croot/packaging_1720101850331/work
pluggy==1.5.0
pytest==8.3.5
pytest-asyncio==0.24.0
pytest-cov==5.0.0
-e git+https://github.com/lundberg/respx.git@05747d93cdac6c9c91e909f63c95bd56410da15b#egg=respx
rfc3986==1.5.0
sniffio==1.3.1
sortedcontainers==2.4.0
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
trio==0.27.0
| name: respx
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py38h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.1=py38h06a4308_0
- pip=24.2=py38h06a4308_0
- python=3.8.20=he870216_0
- readline=8.2=h5eee18b_0
- setuptools=75.1.0=py38h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py38h06a4308_0
- wheel=0.44.0=py38h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- asynctest==0.13.0
- attrs==25.3.0
- certifi==2025.1.31
- chardet==3.0.4
- coverage==7.6.1
- h11==0.8.1
- h2==3.2.0
- hpack==3.0.0
- hstspreload==2025.1.1
- httpx==0.7.6
- hyperframe==5.2.0
- idna==2.10
- outcome==1.3.0.post0
- pluggy==1.5.0
- pytest==8.3.5
- pytest-asyncio==0.24.0
- pytest-cov==5.0.0
- rfc3986==1.5.0
- sniffio==1.3.1
- sortedcontainers==2.4.0
- trio==0.27.0
prefix: /opt/conda/envs/respx
| [
"tests/test_mock.py::HTTPXMockTestCase::test_custom_matcher",
"tests/test_mock.py::HTTPXMockTestCase::test_pass_through_with_arg",
"tests/test_mock.py::HTTPXMockTestCase::test_pass_through_with_custom_matcher"
] | [] | [
"tests/test_mock.py::HTTPXMockTestCase::test_alias",
"tests/test_mock.py::HTTPXMockTestCase::test_api",
"tests/test_mock.py::HTTPXMockTestCase::test_assert_all_called_disabled",
"tests/test_mock.py::HTTPXMockTestCase::test_assert_all_called_fail",
"tests/test_mock.py::HTTPXMockTestCase::test_assert_all_called_sucess",
"tests/test_mock.py::HTTPXMockTestCase::test_async_client",
"tests/test_mock.py::HTTPXMockTestCase::test_callable_content",
"tests/test_mock.py::HTTPXMockTestCase::test_content_type",
"tests/test_mock.py::HTTPXMockTestCase::test_exception",
"tests/test_mock.py::HTTPXMockTestCase::test_headers",
"tests/test_mock.py::HTTPXMockTestCase::test_http_methods",
"tests/test_mock.py::HTTPXMockTestCase::test_invalid_url_pattern",
"tests/test_mock.py::HTTPXMockTestCase::test_json_content",
"tests/test_mock.py::HTTPXMockTestCase::test_mock_contextmanager",
"tests/test_mock.py::HTTPXMockTestCase::test_mock_decorator",
"tests/test_mock.py::HTTPXMockTestCase::test_raising_content",
"tests/test_mock.py::HTTPXMockTestCase::test_raw_content",
"tests/test_mock.py::HTTPXMockTestCase::test_regex_url_pattern",
"tests/test_mock.py::HTTPXMockTestCase::test_repeated_pattern",
"tests/test_mock.py::HTTPXMockTestCase::test_stats",
"tests/test_mock.py::HTTPXMockTestCase::test_status_code",
"tests/test_mock.py::HTTPXMockTestCase::test_string_url_pattern",
"tests/test_mock.py::HTTPXMockTestCase::test_sync_client",
"tests/test_mock.py::HTTPXMockTestCase::test_trio_backend",
"tests/test_mock.py::HTTPXMockTestCase::test_unknown_url"
] | [] | BSD 3-Clause "New" or "Revised" License | 5,847 | 1,523 | [
"respx/mock.py",
"respx/models.py"
] |
|
lundberg__respx-21 | 3e36da0d783a0bc143df9828a0e61c148f0fbc0c | 2019-11-19 11:03:45 | 3e36da0d783a0bc143df9828a0e61c148f0fbc0c | diff --git a/respx/mock.py b/respx/mock.py
index c3f45e5..fd62371 100644
--- a/respx/mock.py
+++ b/respx/mock.py
@@ -24,8 +24,11 @@ __all__ = ["HTTPXMock"]
class HTTPXMock:
- def __init__(self, assert_all_called: bool = True) -> None:
+ def __init__(
+ self, assert_all_called: bool = True, assert_all_mocked: bool = True
+ ) -> None:
self._assert_all_called = assert_all_called
+ self._assert_all_mocked = assert_all_mocked
self._patchers: typing.List[asynctest.mock._patch] = []
self._patterns: typing.List[RequestPattern] = []
self.aliases: typing.Dict[str, RequestPattern] = {}
@@ -93,7 +96,7 @@ class HTTPXMock:
def assert_all_called(self):
assert all(
(pattern.called for pattern in self._patterns)
- ), "not all requests called"
+ ), "RESPX: some mocked requests were not called!"
def add(self, pattern: RequestPattern, alias: typing.Optional[str] = None) -> None:
self._patterns.append(pattern)
@@ -143,29 +146,29 @@ class HTTPXMock:
) -> typing.Tuple[
typing.Optional[RequestPattern], typing.Optional[ResponseTemplate]
]:
- found_index: typing.Optional[int] = None
matched_pattern: typing.Optional[RequestPattern] = None
- matched_response: typing.Optional[ResponseTemplate] = None
+ matched_pattern_index: typing.Optional[int] = None
+ response: typing.Optional[ResponseTemplate] = None
for i, pattern in enumerate(self._patterns):
match = pattern.match(request)
if not match:
continue
- if found_index is not None:
+ if matched_pattern_index is not None:
# Multiple matches found, drop and use the first one
- self._patterns.pop(found_index)
+ self._patterns.pop(matched_pattern_index)
break
- found_index = i
matched_pattern = pattern
+ matched_pattern_index = i
if isinstance(match, ResponseTemplate):
# Mock response
- matched_response = match
+ response = match
elif isinstance(match, AsyncRequest):
# Pass-through request
- matched_response = None
+ response = None
else:
raise ValueError(
(
@@ -174,7 +177,17 @@ class HTTPXMock:
).format(type(match))
)
- return matched_pattern, matched_response
+ # Assert we always get a pattern match, if check is enabled
+ assert (
+ not self._assert_all_mocked
+ or self._assert_all_mocked
+ and matched_pattern is not None
+ ), f"RESPX: {request!r} not mocked!"
+
+ if matched_pattern is None:
+ response = ResponseTemplate()
+
+ return matched_pattern, response
@contextmanager
def _patch_backend(
@@ -226,15 +239,11 @@ class HTTPXMock:
and mocks client backend open stream methods.
"""
# 1. Match request against added patterns
- pattern, template = self._match(request)
-
- if pattern is None:
- template = ResponseTemplate()
+ pattern, _response = self._match(request)
# 2. Patch client's backend and continue to original _get_response
try:
- global _get_response
- with self._patch_backend(client.concurrency_backend, request, template):
+ with self._patch_backend(client.concurrency_backend, request, _response):
response = None
response = await _get_response(client, request, **kwargs)
finally:
| Raise on non-matching request
Add setting to raise an exception when a request is captured but not matching any added pattern, with the possibility to disable/enable the feature.
Decide what the default behaviour should be, raise or always mock responses.
What exception should be raised, `AssertionError` or `ConnectionError`?
**Idea**:
```py
with respx.HTTPXMock(assert_all_mocked=False):
respx.get("https://foo/bar/")
httpx.get("https://ham/spam/") # Would raise an AssertionError("non-matching")
``` | lundberg/respx | diff --git a/tests/test_mock.py b/tests/test_mock.py
index 9624c47..97fd913 100644
--- a/tests/test_mock.py
+++ b/tests/test_mock.py
@@ -106,9 +106,11 @@ class HTTPXMockTestCase(asynctest.TestCase):
self.assertFalse(foobar.called)
def test_unknown_url(self):
- with respx.mock():
+ with respx.HTTPXMock(
+ assert_all_called=False, assert_all_mocked=False
+ ) as httpx_mock:
url = "https://foo/bar/"
- foobar = respx.post(url) # Non-matching method
+ foobar = httpx_mock.post(url) # Non-matching method
response = httpx.get(url)
self.assertFalse(foobar.called)
@@ -118,8 +120,8 @@ class HTTPXMockTestCase(asynctest.TestCase):
)
self.assertEqual(response.text, "")
- self.assertEqual(len(respx.calls), 1)
- request, response = respx.calls[-1]
+ self.assertEqual(len(httpx_mock.calls), 1)
+ request, response = httpx_mock.calls[-1]
self.assertIsNotNone(request)
self.assertIsNotNone(response)
@@ -294,6 +296,7 @@ class HTTPXMockTestCase(asynctest.TestCase):
side_effect=ValueError("mock"),
):
url = "https://foo/bar/1/"
+ httpx_mock.get(url)
with self.assertRaises(ValueError):
httpx.get(url)
@@ -310,7 +313,9 @@ class HTTPXMockTestCase(asynctest.TestCase):
return response
with respx.HTTPXMock(assert_all_called=False) as httpx_mock:
- request = httpx_mock.request(matcher, status_code=202, headers={"X-Ham": "Spam"})
+ request = httpx_mock.request(
+ matcher, status_code=202, headers={"X-Ham": "Spam"}
+ )
response = httpx.get("https://foo/bar/")
self.assertEqual(response.status_code, 202)
@@ -361,6 +366,20 @@ class HTTPXMockTestCase(asynctest.TestCase):
self.assertTrue(request1.called)
self.assertTrue(request2.called)
+ def test_assert_all_mocked_fail(self):
+ with self.assertRaises(AssertionError):
+ with respx.HTTPXMock(assert_all_mocked=True) as httpx_mock:
+ httpx.get("https://foo/bar/")
+
+ self.assertEqual(len(httpx_mock.calls), 0)
+
+ def test_assert_all_mocked_disabled(self):
+ with respx.HTTPXMock(assert_all_mocked=False) as httpx_mock:
+ response = httpx.get("https://foo/bar/")
+
+ self.assertEqual(response.status_code, 200)
+ self.assertEqual(len(httpx_mock.calls), 1)
+
def test_pass_through_with_arg(self):
with respx.mock():
request = respx.get("https://www.example.org/", pass_through=True)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 1
} | 0.6 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest",
"pytest-asyncio",
"pytest-cov",
"trio"
],
"pre_install": null,
"python": "3.8",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | asynctest==0.13.0
attrs==25.3.0
certifi==2025.1.31
chardet==3.0.4
coverage==7.6.1
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
h11==0.8.1
h2==3.2.0
hpack==3.0.0
hstspreload==2025.1.1
httpx==0.7.6
hyperframe==5.2.0
idna==2.10
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
outcome==1.3.0.post0
packaging @ file:///croot/packaging_1720101850331/work
pluggy==1.5.0
pytest==8.3.5
pytest-asyncio==0.24.0
pytest-cov==5.0.0
-e git+https://github.com/lundberg/respx.git@3e36da0d783a0bc143df9828a0e61c148f0fbc0c#egg=respx
rfc3986==1.5.0
sniffio==1.3.1
sortedcontainers==2.4.0
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
trio==0.27.0
| name: respx
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py38h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.1=py38h06a4308_0
- pip=24.2=py38h06a4308_0
- python=3.8.20=he870216_0
- readline=8.2=h5eee18b_0
- setuptools=75.1.0=py38h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py38h06a4308_0
- wheel=0.44.0=py38h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- asynctest==0.13.0
- attrs==25.3.0
- certifi==2025.1.31
- chardet==3.0.4
- coverage==7.6.1
- h11==0.8.1
- h2==3.2.0
- hpack==3.0.0
- hstspreload==2025.1.1
- httpx==0.7.6
- hyperframe==5.2.0
- idna==2.10
- outcome==1.3.0.post0
- pluggy==1.5.0
- pytest==8.3.5
- pytest-asyncio==0.24.0
- pytest-cov==5.0.0
- rfc3986==1.5.0
- sniffio==1.3.1
- sortedcontainers==2.4.0
- trio==0.27.0
prefix: /opt/conda/envs/respx
| [
"tests/test_mock.py::HTTPXMockTestCase::test_assert_all_mocked_disabled",
"tests/test_mock.py::HTTPXMockTestCase::test_assert_all_mocked_fail",
"tests/test_mock.py::HTTPXMockTestCase::test_unknown_url"
] | [] | [
"tests/test_mock.py::HTTPXMockTestCase::test_alias",
"tests/test_mock.py::HTTPXMockTestCase::test_api",
"tests/test_mock.py::HTTPXMockTestCase::test_assert_all_called_disabled",
"tests/test_mock.py::HTTPXMockTestCase::test_assert_all_called_fail",
"tests/test_mock.py::HTTPXMockTestCase::test_assert_all_called_sucess",
"tests/test_mock.py::HTTPXMockTestCase::test_async_client",
"tests/test_mock.py::HTTPXMockTestCase::test_callable_content",
"tests/test_mock.py::HTTPXMockTestCase::test_content_type",
"tests/test_mock.py::HTTPXMockTestCase::test_custom_matcher",
"tests/test_mock.py::HTTPXMockTestCase::test_exception",
"tests/test_mock.py::HTTPXMockTestCase::test_headers",
"tests/test_mock.py::HTTPXMockTestCase::test_http_methods",
"tests/test_mock.py::HTTPXMockTestCase::test_invalid_url_pattern",
"tests/test_mock.py::HTTPXMockTestCase::test_json_content",
"tests/test_mock.py::HTTPXMockTestCase::test_mock_contextmanager",
"tests/test_mock.py::HTTPXMockTestCase::test_mock_decorator",
"tests/test_mock.py::HTTPXMockTestCase::test_pass_through_with_arg",
"tests/test_mock.py::HTTPXMockTestCase::test_pass_through_with_custom_matcher",
"tests/test_mock.py::HTTPXMockTestCase::test_raising_content",
"tests/test_mock.py::HTTPXMockTestCase::test_raw_content",
"tests/test_mock.py::HTTPXMockTestCase::test_regex_url_pattern",
"tests/test_mock.py::HTTPXMockTestCase::test_repeated_pattern",
"tests/test_mock.py::HTTPXMockTestCase::test_stats",
"tests/test_mock.py::HTTPXMockTestCase::test_status_code",
"tests/test_mock.py::HTTPXMockTestCase::test_string_url_pattern",
"tests/test_mock.py::HTTPXMockTestCase::test_sync_client",
"tests/test_mock.py::HTTPXMockTestCase::test_trio_backend"
] | [] | BSD 3-Clause "New" or "Revised" License | 5,848 | 869 | [
"respx/mock.py"
] |
|
ReproNim__reproman-488 | a5da59d720baf3048a30000632b78e1eae215f5f | 2019-11-19 18:05:07 | b82a412ecccce33c885247dbd9464972b0e87c3f | diff --git a/reproman/resource/docker_container.py b/reproman/resource/docker_container.py
index 080d6e6..d9d0b2e 100644
--- a/reproman/resource/docker_container.py
+++ b/reproman/resource/docker_container.py
@@ -30,6 +30,16 @@ import logging
lgr = logging.getLogger('reproman.resource.docker_container')
+def _image_latest_default(image):
+ # Given the restricted character set for names, the presence of ":" or "@"
+ # should be a reliable indication of a tag or digest, respectively. See
+ # - https://docs.docker.com/engine/reference/commandline/tag/#extended-description
+ # - vendor/github.com/docker/distribution/reference/regexp.go
+ if ":" not in image and "@" not in image:
+ image += ":latest"
+ return image
+
+
@attr.s
class DockerContainer(Resource):
"""
@@ -43,8 +53,10 @@ class DockerContainer(Resource):
id = attrib()
type = attrib(default='docker-container')
- image = attrib(default='ubuntu:latest',
- doc="Docker base image ID from which to create the running instance")
+ image = attrib(
+ default='ubuntu:latest',
+ doc="Docker base image ID from which to create the running instance",
+ converter=_image_latest_default)
engine_url = attrib(default='unix:///var/run/docker.sock',
doc="Docker server URL where engine is listening for connections")
seccomp_unconfined = attrib(default=False,
| create: Default to :latest tag when pulling Docker image
`reproman create` callers that are familiar with the command-line interface to `docker pull` would reasonably expect that, if they omit a tag, the default `:latest` will be used. Instead we download all tags:
```
$ docker images busybox
REPOSITORY TAG IMAGE ID CREATED SIZE
$ reproman create bb -t docker-container -b image=busybox >notag-out 2>&1
$ wc -l notag-out
84 notag-out
$ grep -c "Download complete" notag-out
7
$ tail -n3 notag-out
2019-11-05 15:14:27,259 [INFO] Downloading [====================> ] 924.4kB/2.202MB
ERROR:
Interrupted by user while doing magic
$ docker images busybox
REPOSITORY TAG IMAGE ID CREATED SIZE
busybox 1-musl ff773d70e0ec 5 days ago 1.44MB
busybox 1-glibc 7636bfb4b772 5 days ago 5.2MB
busybox 1-uclibc 020584afccce 5 days ago 1.22MB
busybox 1-ubuntu d34ea343a882 3 years ago 4.35MB
busybox 1.21-ubuntu d34ea343a882 3 years ago 4.35MB
busybox 1.21.0-ubuntu d34ea343a882 3 years ago 4.35MB
busybox 1.23 a84c36ecc374 4 years ago 1.1MB
``` | ReproNim/reproman | diff --git a/reproman/resource/tests/test_docker_container.py b/reproman/resource/tests/test_docker_container.py
index 1a9b5a7..3e7238b 100644
--- a/reproman/resource/tests/test_docker_container.py
+++ b/reproman/resource/tests/test_docker_container.py
@@ -175,3 +175,12 @@ def test_container_exists(setup_ubuntu):
from ..docker_container import DockerContainer
assert DockerContainer.is_container_running(setup_ubuntu['name'])
assert not DockerContainer.is_container_running('foo')
+
+
[email protected]_no_docker_dependencies
+def test_image_name_latest_default():
+ from ..docker_container import DockerContainer
+ for img, expected in [("debian:buster", "debian:buster"),
+ ("busybox@ddeeaa", "busybox@ddeeaa"),
+ ("busybox", "busybox:latest")]:
+ assert DockerContainer(name="cname", image=img).image == expected
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 1
},
"num_modified_files": 1
} | 0.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[devel]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-xdist",
"pytest-mock",
"pytest-asyncio"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements-devel.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.16
appdirs==1.4.4
attrs==25.3.0
babel==2.17.0
bcrypt==4.3.0
boto3==1.37.23
botocore==1.37.23
certifi==2025.1.31
cffi==1.17.1
chardet==5.2.0
charset-normalizer==3.4.1
coverage==7.8.0
cryptography==44.0.2
decorator==5.2.1
Deprecated==1.2.18
distro==1.9.0
docker==7.1.0
dockerpty==0.4.1
docutils==0.21.2
exceptiongroup==1.2.2
execnet==2.1.1
fabric==3.2.2
humanize==4.12.2
idna==3.10
imagesize==1.4.1
importlib_metadata==8.6.1
importlib_resources==6.5.2
iniconfig==2.1.0
invoke==2.2.0
isodate==0.7.2
Jinja2==3.1.6
jmespath==1.0.1
line_profiler==4.2.0
MarkupSafe==3.0.2
nibabel==5.3.2
numpy==2.0.2
packaging==24.2
paramiko==3.5.1
pluggy==1.5.0
pycparser==2.22
pycrypto==2.6.1
Pygments==2.19.1
PyNaCl==1.5.0
pyOpenSSL==16.2.0
pyparsing==3.2.3
pytest==8.3.5
pytest-asyncio==0.26.0
pytest-cov==6.0.0
pytest-mock==3.14.0
pytest-xdist==3.6.1
python-dateutil==2.9.0.post0
python-debian==1.0.1
pytz==2025.2
PyYAML==6.0.2
rdflib==7.1.4
-e git+https://github.com/ReproNim/reproman.git@a5da59d720baf3048a30000632b78e1eae215f5f#egg=reproman
reprozip==1.3
requests==2.32.3
rpaths==1.0.0
s3transfer==0.11.4
scp==0.15.0
six==1.17.0
snowballstemmer==2.2.0
Sphinx==7.4.7
sphinx-rtd-theme==3.0.2
sphinxcontrib-applehelp==2.0.0
sphinxcontrib-devhelp==2.0.0
sphinxcontrib-htmlhelp==2.1.0
sphinxcontrib-jquery==4.1
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==2.0.0
sphinxcontrib-serializinghtml==2.0.0
tomli==2.2.1
tqdm==4.67.1
typing_extensions==4.13.0
urllib3==1.26.20
usagestats==1.0.1
wrapt==1.17.2
zipp==3.21.0
| name: reproman
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.16
- appdirs==1.4.4
- attrs==25.3.0
- babel==2.17.0
- bcrypt==4.3.0
- boto3==1.37.23
- botocore==1.37.23
- certifi==2025.1.31
- cffi==1.17.1
- chardet==5.2.0
- charset-normalizer==3.4.1
- coverage==7.8.0
- cryptography==44.0.2
- decorator==5.2.1
- deprecated==1.2.18
- distro==1.9.0
- docker==7.1.0
- dockerpty==0.4.1
- docutils==0.21.2
- exceptiongroup==1.2.2
- execnet==2.1.1
- fabric==3.2.2
- humanize==4.12.2
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==8.6.1
- importlib-resources==6.5.2
- iniconfig==2.1.0
- invoke==2.2.0
- isodate==0.7.2
- jinja2==3.1.6
- jmespath==1.0.1
- line-profiler==4.2.0
- markupsafe==3.0.2
- nibabel==5.3.2
- numpy==2.0.2
- packaging==24.2
- paramiko==3.5.1
- pluggy==1.5.0
- pycparser==2.22
- pycrypto==2.6.1
- pygments==2.19.1
- pynacl==1.5.0
- pyopenssl==16.2.0
- pyparsing==3.2.3
- pytest==8.3.5
- pytest-asyncio==0.26.0
- pytest-cov==6.0.0
- pytest-mock==3.14.0
- pytest-xdist==3.6.1
- python-dateutil==2.9.0.post0
- python-debian==1.0.1
- pytz==2025.2
- pyyaml==6.0.2
- rdflib==7.1.4
- reprozip==1.3
- requests==2.32.3
- rpaths==1.0.0
- s3transfer==0.11.4
- scp==0.15.0
- six==1.17.0
- snowballstemmer==2.2.0
- sphinx==7.4.7
- sphinx-rtd-theme==3.0.2
- sphinxcontrib-applehelp==2.0.0
- sphinxcontrib-devhelp==2.0.0
- sphinxcontrib-htmlhelp==2.1.0
- sphinxcontrib-jquery==4.1
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==2.0.0
- sphinxcontrib-serializinghtml==2.0.0
- tomli==2.2.1
- tqdm==4.67.1
- typing-extensions==4.13.0
- urllib3==1.26.20
- usagestats==1.0.1
- wrapt==1.17.2
- zipp==3.21.0
prefix: /opt/conda/envs/reproman
| [
"reproman/resource/tests/test_docker_container.py::test_image_name_latest_default"
] | [] | [
"reproman/resource/tests/test_docker_container.py::test_dockercontainer_class"
] | [] | MIT License | 5,853 | 349 | [
"reproman/resource/docker_container.py"
] |
|
blue-yonder__tsfresh-599 | d81c57dc826c522484f0fd268144d189517df6cb | 2019-11-19 20:59:40 | d81c57dc826c522484f0fd268144d189517df6cb | diff --git a/setup.py b/setup.py
index be4e01e..9022dc0 100644
--- a/setup.py
+++ b/setup.py
@@ -25,6 +25,6 @@ setup(
long_description=long_description,
long_description_content_type="text/markdown",
setup_requires=["six", "setuptools_scm"] + sphinx,
- packages=find_packages(),
+ packages=find_packages(exclude=["tests.*", "tests"]),
install_requires=requirements,
)
diff --git a/tsfresh/utilities/dataframe_functions.py b/tsfresh/utilities/dataframe_functions.py
index 381cc71..8d9a59c 100644
--- a/tsfresh/utilities/dataframe_functions.py
+++ b/tsfresh/utilities/dataframe_functions.py
@@ -321,12 +321,14 @@ def _normalize_input_to_internal_representation(timeseries_container, column_id,
column_kind = "_variables"
column_value = "_values"
+ if not set(timeseries_container.columns) - {column_id}:
+ raise ValueError("There is no column with values in your data!")
+
timeseries_container.index.name = 'index'
timeseries_container = pd.melt(timeseries_container.reset_index(),
id_vars=['index', column_id],
value_name=column_value, var_name=column_kind)
timeseries_container = timeseries_container.set_index('index')
-
timeseries_container[column_sort] = np.tile(sort, (len(timeseries_container) // len(sort)))
# Check kind column
| the install process is globally installing a "tests" module (using version 0.12)
Oh no, you encountered a problem while using *tsfesh*.
We, the maintainers, are happy to help you. When opening an issue, please provide the following information to us:
1. Your operating system
Ubuntu 18
2. The version of *tsfresh* that you are using
0.12
3. The data on which the problem occurred (please do not upload 1000s of time series but try to boil the problem down to a small group or even a singular one)
4. A minimal code snippet which reproduces the problem/bug
5. Any reported errors or traceback
For questions, you can also use our [gitter chatroom](https://gitter.im/tsfresh/)
Question 3-5 don't apply as this is an installation issue.
This version is globally installing a ``tests`` module, thus polluting the global namespace of modules with its own tests.
Once tsfresh is installed, we find the following under ``.../lib/python3.7/site-packages/tests``:
```
├── fixtures.py
├── __init__.py
├── integrations
│ ├── examples
│ │ ├── __init__.py
│ │ ├── __pycache__
│ │ │ ├── __init__.cpython-37.pyc
│ │ │ ├── test_driftbif_simulation.cpython-37.pyc
│ │ │ ├── test_har_dataset.cpython-37.pyc
│ │ │ └── test_robot_execution_failures.cpython-37.pyc
│ │ ├── test_driftbif_simulation.py
│ │ ├── test_har_dataset.py
│ │ └── test_robot_execution_failures.py
│ ├── __init__.py
│ ├── __pycache__
│ │ ├── __init__.cpython-37.pyc
│ │ ├── test_full_pipeline.cpython-37.pyc
│ │ ├── test_notebooks.cpython-37.pyc
│ │ └── test_relevant_feature_extraction.cpython-37.pyc
│ ├── test_full_pipeline.py
│ ├── test_notebooks.py
│ └── test_relevant_feature_extraction.py
├── __pycache__
│ ├── fixtures.cpython-37.pyc
│ └── __init__.cpython-37.pyc
└── units
├── feature_extraction
│ ├── __init__.py
│ ├── __pycache__
│ │ ├── __init__.cpython-37.pyc
│ │ ├── test_extraction.cpython-37.pyc
│ │ ├── test_feature_calculations.cpython-37.pyc
│ │ └── test_settings.cpython-37.pyc
│ ├── test_extraction.py
│ ├── test_feature_calculations.py
│ └── test_settings.py
├── feature_selection
│ ├── __init__.py
│ ├── __pycache__
│ │ ├── __init__.cpython-37.pyc
│ │ ├── test_benjamini_hochberg_test.cpython-37.pyc
│ │ ├── test_checks.cpython-37.pyc
│ │ ├── test_feature_significance.cpython-37.pyc
│ │ ├── test_relevance.cpython-37.pyc
│ │ ├── test_selection.cpython-37.pyc
│ │ └── test_significance_tests.cpython-37.pyc
│ ├── test_benjamini_hochberg_test.py
│ ├── test_checks.py
│ ├── test_feature_significance.py
│ ├── test_relevance.py
│ ├── test_selection.py
│ └── test_significance_tests.py
├── __init__.py
├── __pycache__
│ └── __init__.cpython-37.pyc
├── scripts
│ ├── __init__.py
│ ├── __pycache__
│ │ ├── __init__.cpython-37.pyc
│ │ └── test_run_tsfresh.cpython-37.pyc
│ └── test_run_tsfresh.py
├── transformers
│ ├── __init__.py
│ ├── __pycache__
│ │ ├── __init__.cpython-37.pyc
│ │ ├── test_feature_augmenter.cpython-37.pyc
│ │ ├── test_feature_selector.cpython-37.pyc
│ │ ├── test_per_column_imputer.cpython-37.pyc
│ │ └── test_relevant_feature_augmenter.cpython-37.pyc
│ ├── test_feature_augmenter.py
│ ├── test_feature_selector.py
│ ├── test_per_column_imputer.py
│ └── test_relevant_feature_augmenter.py
└── utilities
├── __init__.py
├── __pycache__
│ ├── __init__.cpython-37.pyc
│ ├── test_dataframe_functions.cpython-37.pyc
│ ├── test_distribution.cpython-37.pyc
│ └── test_string_manipilations.cpython-37.pyc
├── test_dataframe_functions.py
├── test_distribution.py
└── test_string_manipilations.py
```
| blue-yonder/tsfresh | diff --git a/tests/units/utilities/test_dataframe_functions.py b/tests/units/utilities/test_dataframe_functions.py
index f498ee0..8181a97 100644
--- a/tests/units/utilities/test_dataframe_functions.py
+++ b/tests/units/utilities/test_dataframe_functions.py
@@ -138,6 +138,15 @@ class NormalizeTestCase(TestCase):
self.assertRaises(ValueError, dataframe_functions._normalize_input_to_internal_representation, test_df,
None, None, None, "value")
+ test_df = pd.DataFrame([{"id": 0}])
+ self.assertRaises(ValueError, dataframe_functions._normalize_input_to_internal_representation, test_df,
+ "id", None, None, None)
+
+ test_df = pd.DataFrame([{"id": 0, "sort": 0}])
+ self.assertRaises(ValueError, dataframe_functions._normalize_input_to_internal_representation, test_df,
+ "id", "sort", None, None)
+
+
def test_wide_dataframe_order_preserved_with_sort_column(self):
""" verifies that the order of the sort column from a wide time series container is preserved
"""
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 1
},
"num_modified_files": 2
} | 0.12 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest>=4.4.0",
"pytest-cov>=2.6.1",
"pytest-xdist>=1.26.1",
"mock>=2.0.0",
"matplotlib>=2.0.0",
"seaborn>=0.7.1",
"ipython>=5.3.0",
"notebook>=4.4.1",
"pandas-datareader>=0.5.0",
"coveralls",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5.2",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | argon2-cffi==21.3.0
argon2-cffi-bindings==21.2.0
async-generator==1.10
attrs==22.2.0
backcall==0.2.0
bleach==4.1.0
certifi==2021.5.30
cffi==1.15.1
charset-normalizer==2.0.12
click==8.0.4
cloudpickle==2.2.1
contextvars==2.4
coverage==6.2
coveralls==3.3.1
cycler==0.11.0
dask==2021.3.0
dataclasses==0.8
decorator==5.1.1
defusedxml==0.7.1
distributed==2021.3.0
docopt==0.6.2
entrypoints==0.4
execnet==1.9.0
future==1.0.0
HeapDict==1.0.1
idna==3.10
immutables==0.19
importlib-metadata==4.8.3
importlib-resources==5.4.0
iniconfig==1.1.1
ipykernel==5.5.6
ipython==7.16.3
ipython-genutils==0.2.0
jedi==0.17.2
Jinja2==3.0.3
joblib==1.1.1
jsonschema==3.2.0
jupyter-client==7.1.2
jupyter-core==4.9.2
jupyterlab-pygments==0.1.2
kiwisolver==1.3.1
lxml==5.3.1
MarkupSafe==2.0.1
matplotlib==3.3.4
mistune==0.8.4
mock==5.2.0
msgpack==1.0.5
nbclient==0.5.9
nbconvert==6.0.7
nbformat==5.1.3
nest-asyncio==1.6.0
notebook==6.4.10
numpy==1.19.5
packaging==21.3
pandas==0.23.4
pandas-datareader==0.10.0
pandocfilters==1.5.1
parso==0.7.1
patsy==1.0.1
pexpect==4.9.0
pickleshare==0.7.5
Pillow==8.4.0
pluggy==1.0.0
prometheus-client==0.17.1
prompt-toolkit==3.0.36
psutil==7.0.0
ptyprocess==0.7.0
py==1.11.0
pycparser==2.21
Pygments==2.14.0
pyparsing==3.1.4
pyrsistent==0.18.0
pytest==7.0.1
pytest-cov==4.0.0
pytest-xdist==3.0.2
python-dateutil==2.9.0.post0
pytz==2025.2
PyYAML==6.0.1
pyzmq==25.1.2
requests==2.27.1
scikit-learn==0.24.2
scipy==1.5.4
seaborn==0.11.2
Send2Trash==1.8.3
six==1.17.0
sortedcontainers==2.4.0
statsmodels==0.12.2
tblib==1.7.0
terminado==0.12.1
testpath==0.6.0
threadpoolctl==3.1.0
tomli==1.2.3
toolz==0.12.0
tornado==6.1
tqdm==4.64.1
traitlets==4.3.3
-e git+https://github.com/blue-yonder/tsfresh.git@d81c57dc826c522484f0fd268144d189517df6cb#egg=tsfresh
typing_extensions==4.1.1
urllib3==1.26.20
wcwidth==0.2.13
webencodings==0.5.1
zict==2.1.0
zipp==3.6.0
| name: tsfresh
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- argon2-cffi==21.3.0
- argon2-cffi-bindings==21.2.0
- async-generator==1.10
- attrs==22.2.0
- backcall==0.2.0
- bleach==4.1.0
- cffi==1.15.1
- charset-normalizer==2.0.12
- click==8.0.4
- cloudpickle==2.2.1
- contextvars==2.4
- coverage==6.2
- coveralls==3.3.1
- cycler==0.11.0
- dask==2021.3.0
- dataclasses==0.8
- decorator==5.1.1
- defusedxml==0.7.1
- distributed==2021.3.0
- docopt==0.6.2
- entrypoints==0.4
- execnet==1.9.0
- future==1.0.0
- heapdict==1.0.1
- idna==3.10
- immutables==0.19
- importlib-metadata==4.8.3
- importlib-resources==5.4.0
- iniconfig==1.1.1
- ipykernel==5.5.6
- ipython==7.16.3
- ipython-genutils==0.2.0
- jedi==0.17.2
- jinja2==3.0.3
- joblib==1.1.1
- jsonschema==3.2.0
- jupyter-client==7.1.2
- jupyter-core==4.9.2
- jupyterlab-pygments==0.1.2
- kiwisolver==1.3.1
- lxml==5.3.1
- markupsafe==2.0.1
- matplotlib==3.3.4
- mistune==0.8.4
- mock==5.2.0
- msgpack==1.0.5
- nbclient==0.5.9
- nbconvert==6.0.7
- nbformat==5.1.3
- nest-asyncio==1.6.0
- notebook==6.4.10
- numpy==1.19.5
- packaging==21.3
- pandas==0.23.4
- pandas-datareader==0.10.0
- pandocfilters==1.5.1
- parso==0.7.1
- patsy==1.0.1
- pexpect==4.9.0
- pickleshare==0.7.5
- pillow==8.4.0
- pluggy==1.0.0
- prometheus-client==0.17.1
- prompt-toolkit==3.0.36
- psutil==7.0.0
- ptyprocess==0.7.0
- py==1.11.0
- pycparser==2.21
- pygments==2.14.0
- pyparsing==3.1.4
- pyrsistent==0.18.0
- pytest==7.0.1
- pytest-cov==4.0.0
- pytest-xdist==3.0.2
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyyaml==6.0.1
- pyzmq==25.1.2
- requests==2.27.1
- scikit-learn==0.24.2
- scipy==1.5.4
- seaborn==0.11.2
- send2trash==1.8.3
- six==1.17.0
- sortedcontainers==2.4.0
- statsmodels==0.12.2
- tblib==1.7.0
- terminado==0.12.1
- testpath==0.6.0
- threadpoolctl==3.1.0
- tomli==1.2.3
- toolz==0.12.0
- tornado==6.1
- tqdm==4.64.1
- traitlets==4.3.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- wcwidth==0.2.13
- webencodings==0.5.1
- zict==2.1.0
- zipp==3.6.0
prefix: /opt/conda/envs/tsfresh
| [
"tests/units/utilities/test_dataframe_functions.py::NormalizeTestCase::test_with_wrong_input"
] | [] | [
"tests/units/utilities/test_dataframe_functions.py::GetRangeValuesPerColumnTestCase::test_range_values_correct_with_even_length",
"tests/units/utilities/test_dataframe_functions.py::RestrictTestCase::test_restrict_dict",
"tests/units/utilities/test_dataframe_functions.py::GetRangeValuesPerColumnTestCase::test_ignores_non_finite_values",
"tests/units/utilities/test_dataframe_functions.py::RestrictTestCase::test_restrict_dataframe",
"tests/units/utilities/test_dataframe_functions.py::RollingTestCase::test_with_wrong_input",
"tests/units/utilities/test_dataframe_functions.py::GetRangeValuesPerColumnTestCase::test_no_finite_values_yields_0",
"tests/units/utilities/test_dataframe_functions.py::NormalizeTestCase::test_with_df_3",
"tests/units/utilities/test_dataframe_functions.py::NormalizeTestCase::test_with_df_1",
"tests/units/utilities/test_dataframe_functions.py::ImputeTestCase::test_impute_zero",
"tests/units/utilities/test_dataframe_functions.py::CheckForNanTestCase::test_not_all_columns",
"tests/units/utilities/test_dataframe_functions.py::GetRangeValuesPerColumnTestCase::test_range_values_correct_with_uneven_length",
"tests/units/utilities/test_dataframe_functions.py::RollingTestCase::test_assert_single_row",
"tests/units/utilities/test_dataframe_functions.py::CheckForNanTestCase::test_all_columns",
"tests/units/utilities/test_dataframe_functions.py::GetIDsTestCase::test_get_id__correct_DataFrame",
"tests/units/utilities/test_dataframe_functions.py::MakeForecastingFrameTestCase::test_make_forecasting_frame_range",
"tests/units/utilities/test_dataframe_functions.py::ImputeTestCase::test_impute_range",
"tests/units/utilities/test_dataframe_functions.py::NormalizeTestCase::test_with_df_2",
"tests/units/utilities/test_dataframe_functions.py::RestrictTestCase::test_restrict_wrong",
"tests/units/utilities/test_dataframe_functions.py::RollingTestCase::test_dict_rolling_maxshift_1",
"tests/units/utilities/test_dataframe_functions.py::RollingTestCase::test_stacked_rolling",
"tests/units/utilities/test_dataframe_functions.py::GetIDsTestCase::test_get_id__correct_dict",
"tests/units/utilities/test_dataframe_functions.py::MakeForecastingFrameTestCase::test_make_forecasting_frame_list",
"tests/units/utilities/test_dataframe_functions.py::NormalizeTestCase::test_wide_dataframe_order_preserved",
"tests/units/utilities/test_dataframe_functions.py::RollingTestCase::test_warning_on_non_uniform_time_steps",
"tests/units/utilities/test_dataframe_functions.py::NormalizeTestCase::test_with_dictionaries_one_row",
"tests/units/utilities/test_dataframe_functions.py::RollingTestCase::test_dict_rolling",
"tests/units/utilities/test_dataframe_functions.py::NormalizeTestCase::test_wide_dataframe_order_preserved_with_sort_column",
"tests/units/utilities/test_dataframe_functions.py::ImputeTestCase::test_toplevel_impute",
"tests/units/utilities/test_dataframe_functions.py::RollingTestCase::test_positive_rolling",
"tests/units/utilities/test_dataframe_functions.py::MakeForecastingFrameTestCase::test_make_forecasting_frame_pdSeries",
"tests/units/utilities/test_dataframe_functions.py::NormalizeTestCase::test_with_dictionaries_two_rows_sorted",
"tests/units/utilities/test_dataframe_functions.py::RollingTestCase::test_negative_rolling",
"tests/units/utilities/test_dataframe_functions.py::NormalizeTestCase::test_with_dictionaries_two_rows"
] | [] | MIT License | 5,854 | 350 | [
"setup.py",
"tsfresh/utilities/dataframe_functions.py"
] |
|
Alexei-Kornienko__schematics_to_swagger-7 | 3ddc537a8ed7682e9bb709ebd749b99d7ef09473 | 2019-11-20 22:11:16 | 3ddc537a8ed7682e9bb709ebd749b99d7ef09473 | diff --git a/schematics_to_swagger/__init__.py b/schematics_to_swagger/__init__.py
index d108f3f..d203de0 100644
--- a/schematics_to_swagger/__init__.py
+++ b/schematics_to_swagger/__init__.py
@@ -54,17 +54,24 @@ def _map_schematics_type(t):
def model_to_definition(model):
- fields = model.fields.items()
+ properties = {}
+ required = []
+
+ for field_name, field in model.fields.items():
+ if field_name.startswith(f'_{model.__name__}'):
+ continue # Exclude private fields
+ properties[field_name] = _map_schematics_type(field)
+ if getattr(field, 'required'):
+ required.append(field_name)
+
result_info = {
'type': 'object',
'title': model.__name__,
'description': model.__doc__,
- 'properties': {k: _map_schematics_type(v) for k, v in fields}
+ 'properties': properties
}
- required = [k for k, v in fields if getattr(v, 'required')]
if required:
result_info['required'] = required
-
return result_info
| Hide private model fields in swagger doc | Alexei-Kornienko/schematics_to_swagger | diff --git a/tests/models.py b/tests/models.py
index 5392711..7cd4582 100644
--- a/tests/models.py
+++ b/tests/models.py
@@ -16,3 +16,10 @@ class WeatherStats(Model):
last_report = types.ModelType(WeatherReport)
prev_reports = types.ListType(types.ModelType(WeatherReport))
date_list = types.ListType(types.DateTimeType())
+
+
+class WeatherPrivateData(Model):
+ """Some sample model with private field"""
+ city = types.StringType(max_length=50, metadata={'readOnly': True})
+ temperature = types.DecimalType(required=True)
+ __private_information = types.StringType(max_length=50)
diff --git a/tests/test_model.py b/tests/test_model.py
index ddeabe3..1ed6fba 100644
--- a/tests/test_model.py
+++ b/tests/test_model.py
@@ -53,6 +53,23 @@ WEATHER_STATS_DEF = {
}
},
}
+WEATHER_PRIVATE_DATA = {
+ 'title': 'WeatherPrivateData',
+ 'type': 'object',
+ 'description': 'Some sample model with private field',
+ 'properties': {
+ 'city': {
+ 'type': 'string',
+ 'maxLength': 50,
+ 'readOnly': True
+ },
+ 'temperature': {
+ 'type': 'number',
+ 'format': 'double'
+ }
+ },
+ 'required': ['temperature']
+}
def test_model_to_definition():
@@ -64,7 +81,8 @@ def test_model_to_definition():
def test_read_models_from_module():
expected = {
'WeatherReport': WEATHER_REPORT_DEFINITION,
- 'WeatherStats': WEATHER_STATS_DEF
+ 'WeatherStats': WEATHER_STATS_DEF,
+ 'WeatherPrivateData': WEATHER_PRIVATE_DATA
}
data = schematics_to_swagger.read_models_from_module(models)
assert expected == data
@@ -74,3 +92,9 @@ def test_compound_type():
expected = WEATHER_STATS_DEF
data = schematics_to_swagger.model_to_definition(models.WeatherStats)
assert expected == data
+
+
+def test_private_fields():
+ expected = WEATHER_PRIVATE_DATA
+ definition = schematics_to_swagger.model_to_definition(models.WeatherPrivateData)
+ assert expected == definition
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 1
} | 1.3 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest>=3.6",
"pytest-cov",
"codecov",
"flake8"
],
"pre_install": null,
"python": "3.6",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
charset-normalizer==2.0.12
codecov==2.1.13
coverage==6.2
flake8==5.0.4
idna==3.10
importlib-metadata==4.2.0
iniconfig==1.1.1
mccabe==0.7.0
packaging==21.3
pluggy==1.0.0
py==1.11.0
pycodestyle==2.9.1
pyflakes==2.5.0
pyparsing==3.1.4
pytest==7.0.1
pytest-cov==4.0.0
requests==2.27.1
schematics==2.1.1
-e git+https://github.com/Alexei-Kornienko/schematics_to_swagger.git@3ddc537a8ed7682e9bb709ebd749b99d7ef09473#egg=schematics_to_swagger
tomli==1.2.3
typing_extensions==4.1.1
urllib3==1.26.20
zipp==3.6.0
| name: schematics_to_swagger
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- charset-normalizer==2.0.12
- codecov==2.1.13
- coverage==6.2
- flake8==5.0.4
- idna==3.10
- importlib-metadata==4.2.0
- iniconfig==1.1.1
- mccabe==0.7.0
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pycodestyle==2.9.1
- pyflakes==2.5.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-cov==4.0.0
- requests==2.27.1
- schematics==2.1.1
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- zipp==3.6.0
prefix: /opt/conda/envs/schematics_to_swagger
| [
"tests/test_model.py::test_read_models_from_module",
"tests/test_model.py::test_private_fields"
] | [] | [
"tests/test_model.py::test_model_to_definition",
"tests/test_model.py::test_compound_type"
] | [] | MIT License | 5,859 | 291 | [
"schematics_to_swagger/__init__.py"
] |
|
MatterMiners__lapis-69 | 5e10d37d054ff87da91010260ab8ab8da52614c0 | 2019-11-21 13:27:31 | 1434d4a7e07fde54e2afc3cac7f142078400d887 | diff --git a/lapis/drone.py b/lapis/drone.py
index 7126242..0e6845a 100644
--- a/lapis/drone.py
+++ b/lapis/drone.py
@@ -127,7 +127,7 @@ class Drone(interfaces.Pool):
self._utilisation = self._allocation = None
- job_execution = scope.do(job.run())
+ job_execution = scope.do(job.run(self))
self.jobs += 1
try:
async with self.resources.claim(
@@ -154,7 +154,6 @@ class Drone(interfaces.Pool):
await instant
job_execution.cancel()
self.jobs -= 1
- job.drone = None
await self.scheduler.job_finished(job)
self._utilisation = self._allocation = None
self.scheduler.update_drone(self)
diff --git a/lapis/job.py b/lapis/job.py
index 934ceee..c4627e0 100644
--- a/lapis/job.py
+++ b/lapis/job.py
@@ -91,18 +91,23 @@ class Job(object):
return self.in_queue_until - self.in_queue_since
return float("Inf")
- async def run(self):
+ async def run(self, drone: "Drone"):
+ assert drone, "Jobs cannot run without a drone being assigned"
+ self.drone = drone
self.in_queue_until = time.now
self._success = None
await sampling_required.put(self)
try:
await (time + self.walltime)
except CancelTask:
+ self.drone = None
self._success = False
except BaseException:
+ self.drone = None
self._success = False
raise
else:
+ self.drone = None
self._success = True
await sampling_required.put(self)
| Jobs can run without a drone being assigned
Apparently jobs can run although not being properly assigned to a drone. We should take care that this cannot happen.
See:
> This happens in the unit tests... Same with the fileprovider in
> https://github.com/tfesenbecker/lapis/blob/53ebec5ea004e4f031a0f70102276fe0102d62e4/lapis/job.py#L96
_Originally posted by @tfesenbecker in https://github.com/MatterMiners/lapis/pull/53_ | MatterMiners/lapis | diff --git a/lapis_tests/__init__.py b/lapis_tests/__init__.py
index d0c54e8..722b3a2 100644
--- a/lapis_tests/__init__.py
+++ b/lapis_tests/__init__.py
@@ -43,3 +43,7 @@ class DummyScheduler:
@staticmethod
def update_drone(drone: Drone):
pass
+
+
+class DummyDrone:
+ pass
diff --git a/lapis_tests/test_job.py b/lapis_tests/test_job.py
index 3c75916..181bb1a 100644
--- a/lapis_tests/test_job.py
+++ b/lapis_tests/test_job.py
@@ -3,7 +3,7 @@ from usim import Scope, time
from lapis.drone import Drone
from lapis.job import Job
-from lapis_tests import via_usim, DummyScheduler
+from lapis_tests import via_usim, DummyScheduler, DummyDrone
class TestJob(object):
@@ -27,10 +27,11 @@ class TestJob(object):
@via_usim
async def test_run_job(self):
+ drone = DummyDrone()
job = Job(resources={"walltime": 50}, used_resources={"walltime": 10})
assert float("inf") == job.waiting_time
async with Scope() as scope:
- scope.do(job.run())
+ scope.do(job.run(drone))
assert 10 == time
assert 0 == job.waiting_time
assert job.successful
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 0
},
"num_modified_files": 2
} | 0.3 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"flake8"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.7",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==24.2.0
certifi @ file:///croot/certifi_1671487769961/work/certifi
cfgv==3.3.1
click==8.1.8
cobald==0.13.0
distlib==0.3.9
entrypoints==0.4
exceptiongroup==1.2.2
filelock==3.12.2
flake8==5.0.4
identify==2.5.24
idna==3.10
importlib-metadata==6.7.0
iniconfig==2.0.0
-e git+https://github.com/MatterMiners/lapis.git@5e10d37d054ff87da91010260ab8ab8da52614c0#egg=lapis_sim
mccabe==0.7.0
nodeenv==1.9.1
outcome==1.3.0.post0
packaging==24.0
platformdirs==4.0.0
pluggy==1.2.0
pre-commit==2.21.0
pycodestyle==2.9.1
pyflakes==2.5.0
pytest==7.4.4
PyYAML==6.0.1
sniffio==1.3.1
sortedcontainers==2.4.0
tomli==2.0.1
toposort==1.10
trio==0.22.2
typing_extensions==4.7.1
usim==0.4.0
virtualenv==20.26.6
zipp==3.15.0
| name: lapis
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=22.3.1=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==24.2.0
- cfgv==3.3.1
- click==8.1.8
- cobald==0.13.0
- distlib==0.3.9
- entrypoints==0.4
- exceptiongroup==1.2.2
- filelock==3.12.2
- flake8==5.0.4
- identify==2.5.24
- idna==3.10
- importlib-metadata==6.7.0
- iniconfig==2.0.0
- lapis-sim==0.3.0
- mccabe==0.7.0
- nodeenv==1.9.1
- outcome==1.3.0.post0
- packaging==24.0
- platformdirs==4.0.0
- pluggy==1.2.0
- pre-commit==2.21.0
- pycodestyle==2.9.1
- pyflakes==2.5.0
- pytest==7.4.4
- pyyaml==6.0.1
- sniffio==1.3.1
- sortedcontainers==2.4.0
- tomli==2.0.1
- toposort==1.10
- trio==0.22.2
- typing-extensions==4.7.1
- usim==0.4.0
- virtualenv==20.26.6
- zipp==3.15.0
prefix: /opt/conda/envs/lapis
| [
"lapis_tests/test_job.py::TestJob::test_run_job"
] | [] | [
"lapis_tests/test_job.py::TestJob::test_init",
"lapis_tests/test_job.py::TestJob::test_name",
"lapis_tests/test_job.py::TestJob::test_job_in_drone",
"lapis_tests/test_job.py::TestJob::test_nonmatching_job_in_drone",
"lapis_tests/test_job.py::TestJob::test_two_nonmatching_jobs",
"lapis_tests/test_job.py::TestJob::test_two_matching_jobs"
] | [] | MIT License | 5,862 | 436 | [
"lapis/drone.py",
"lapis/job.py"
] |
|
googlefonts__ufo2ft-354 | de173acf4c160395bf192d8364bef1f2325e7b13 | 2019-11-22 16:32:26 | de173acf4c160395bf192d8364bef1f2325e7b13 | diff --git a/Lib/ufo2ft/__init__.py b/Lib/ufo2ft/__init__.py
index df7d233..00a6303 100644
--- a/Lib/ufo2ft/__init__.py
+++ b/Lib/ufo2ft/__init__.py
@@ -51,6 +51,7 @@ def compileOTF(
inplace=False,
layerName=None,
skipExportGlyphs=None,
+ debugFeatureFile=None,
_tables=None,
):
"""Create FontTools CFF font from a UFO.
@@ -130,6 +131,7 @@ def compileOTF(
glyphSet=glyphSet,
featureWriters=featureWriters,
featureCompilerClass=featureCompilerClass,
+ debugFeatureFile=debugFeatureFile,
)
postProcessor = PostProcessor(otf, ufo, glyphSet=glyphSet)
@@ -157,6 +159,7 @@ def compileTTF(
inplace=False,
layerName=None,
skipExportGlyphs=None,
+ debugFeatureFile=None,
):
"""Create FontTools TrueType font from a UFO.
@@ -208,6 +211,7 @@ def compileTTF(
glyphSet=glyphSet,
featureWriters=featureWriters,
featureCompilerClass=featureCompilerClass,
+ debugFeatureFile=debugFeatureFile,
)
postProcessor = PostProcessor(otf, ufo, glyphSet=glyphSet)
@@ -229,6 +233,7 @@ def compileInterpolatableTTFs(
inplace=False,
layerNames=None,
skipExportGlyphs=None,
+ debugFeatureFile=None,
):
"""Create FontTools TrueType fonts from a list of UFOs with interpolatable
outlines. Cubic curves are converted compatibly to quadratic curves using
@@ -291,12 +296,15 @@ def compileInterpolatableTTFs(
# Only the default layer is likely to have all glyphs used in feature
# code.
if layerName is None:
+ if debugFeatureFile:
+ debugFeatureFile.write("\n### %s ###\n" % fontName)
compileFeatures(
ufo,
ttf,
glyphSet=glyphSet,
featureWriters=featureWriters,
featureCompilerClass=featureCompilerClass,
+ debugFeatureFile=debugFeatureFile,
)
postProcessor = PostProcessor(ttf, ufo, glyphSet=glyphSet)
@@ -327,6 +335,7 @@ def compileInterpolatableTTFsFromDS(
cubicConversionError=None,
reverseDirection=True,
inplace=False,
+ debugFeatureFile=None,
):
"""Create FontTools TrueType fonts from the DesignSpaceDocument UFO sources
with interpolatable outlines. Cubic curves are converted compatibly to
@@ -378,6 +387,7 @@ def compileInterpolatableTTFsFromDS(
inplace=inplace,
layerNames=layerNames,
skipExportGlyphs=skipExportGlyphs,
+ debugFeatureFile=debugFeatureFile,
)
if inplace:
@@ -400,6 +410,7 @@ def compileInterpolatableOTFsFromDS(
useProductionNames=None,
roundTolerance=None,
inplace=False,
+ debugFeatureFile=None,
):
"""Create FontTools CFF fonts from the DesignSpaceDocument UFO sources
with interpolatable outlines.
@@ -454,6 +465,7 @@ def compileInterpolatableOTFsFromDS(
overlapsBackend=None,
inplace=inplace,
skipExportGlyphs=skipExportGlyphs,
+ debugFeatureFile=debugFeatureFile,
_tables=SPARSE_OTF_MASTER_TABLES if source.layerName else None,
)
)
@@ -471,7 +483,12 @@ def compileInterpolatableOTFsFromDS(
def compileFeatures(
- ufo, ttFont=None, glyphSet=None, featureWriters=None, featureCompilerClass=None
+ ufo,
+ ttFont=None,
+ glyphSet=None,
+ featureWriters=None,
+ featureCompilerClass=None,
+ debugFeatureFile=None,
):
""" Compile OpenType Layout features from `ufo` into FontTools OTL tables.
If `ttFont` is None, a new TTFont object is created containing the new
@@ -485,6 +502,10 @@ def compileFeatures(
If skipExportGlyphs is provided (see description in the ``compile*``
functions), the feature compiler will prune groups (removing them if empty)
and kerning of the UFO of these glyphs. The feature file is left untouched.
+
+ `debugFeatureFile` can be a file or file-like object opened in text mode,
+ in which to dump the text content of the feature file, useful for debugging
+ auto-generated OpenType features like kern, mark, mkmk etc.
"""
if featureCompilerClass is None:
if any(
@@ -497,7 +518,13 @@ def compileFeatures(
featureCompiler = featureCompilerClass(
ufo, ttFont, glyphSet=glyphSet, featureWriters=featureWriters
)
- return featureCompiler.compile()
+ otFont = featureCompiler.compile()
+
+ if debugFeatureFile:
+ if hasattr(featureCompiler, "writeFeatures"):
+ featureCompiler.writeFeatures(debugFeatureFile)
+
+ return otFont
def compileVariableTTF(
@@ -513,6 +540,7 @@ def compileVariableTTF(
excludeVariationTables=(),
optimizeGvar=True,
inplace=False,
+ debugFeatureFile=None,
):
"""Create FontTools TrueType variable font from the DesignSpaceDocument UFO sources
with interpolatable outlines, using fontTools.varLib.build.
@@ -540,6 +568,7 @@ def compileVariableTTF(
cubicConversionError=cubicConversionError,
reverseDirection=reverseDirection,
inplace=inplace,
+ debugFeatureFile=debugFeatureFile,
)
logger.info("Building variable TTF font")
@@ -565,6 +594,7 @@ def compileVariableCFF2(
roundTolerance=None,
excludeVariationTables=(),
inplace=False,
+ debugFeatureFile=None,
):
"""Create FontTools CFF2 variable font from the DesignSpaceDocument UFO sources
with interpolatable outlines, using fontTools.varLib.build.
@@ -588,6 +618,7 @@ def compileVariableCFF2(
useProductionNames=False, # will rename glyphs after varfont is built
roundTolerance=roundTolerance,
inplace=inplace,
+ debugFeatureFile=debugFeatureFile,
)
logger.info("Building variable CFF2 font")
diff --git a/Lib/ufo2ft/featureCompiler.py b/Lib/ufo2ft/featureCompiler.py
index a1d8876..8fbc2e7 100644
--- a/Lib/ufo2ft/featureCompiler.py
+++ b/Lib/ufo2ft/featureCompiler.py
@@ -223,6 +223,10 @@ class FeatureCompiler(BaseFeatureCompiler):
# no featureWriters, simply read existing features' text
self.features = tounicode(self.ufo.features.text or "", "utf-8")
+ def writeFeatures(self, outfile):
+ if hasattr(self, "features"):
+ outfile.write(self.features)
+
def buildTables(self):
"""
Compile OpenType feature tables from the source.
| Support writing the final autogenerated feature file somewhere
Useful for debugging. Maybe a new parameter on all `compile*` functions that is a IOBuffer that the feature file is written to. Then fontmake can be wired up to pass something in. | googlefonts/ufo2ft | diff --git a/tests/integration_test.py b/tests/integration_test.py
index 83bfcb8..4b6de3d 100644
--- a/tests/integration_test.py
+++ b/tests/integration_test.py
@@ -1,4 +1,5 @@
from __future__ import print_function, division, absolute_import, unicode_literals
+import io
from fontTools.misc.py23 import *
from ufo2ft import (
compileOTF,
@@ -157,6 +158,14 @@ class IntegrationTest(object):
),
)
+ def test_debugFeatureFile(self, designspace):
+ tmp = io.StringIO()
+
+ varfont = compileVariableTTF(designspace, debugFeatureFile=tmp)
+
+ assert "### LayerFont-Regular ###" in tmp.getvalue()
+ assert "### LayerFont-Bold ###" in tmp.getvalue()
+
if __name__ == "__main__":
sys.exit(pytest.main(sys.argv))
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 2
} | 2.9 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest>=2.8",
"tox>=2.3"
],
"pre_install": null,
"python": "3.6",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | appdirs==1.4.4
attrs==22.2.0
booleanOperations==0.8.2
certifi==2021.5.30
compreffor==0.4.6.post1
cu2qu==1.6.5
defcon==0.6.0
distlib==0.3.9
filelock==3.4.1
fonttools==3.44.0
fs==2.4.16
importlib-metadata==4.8.3
importlib-resources==5.4.0
iniconfig==1.1.1
lxml==4.9.4
packaging==21.3
platformdirs==2.4.0
pluggy==1.0.0
py==1.11.0
pyclipper==1.3.0.post6
pyparsing==3.1.4
pytest==7.0.1
six==1.17.0
toml==0.10.2
tomli==1.2.3
tox==3.28.0
typing_extensions==4.1.1
-e git+https://github.com/googlefonts/ufo2ft.git@de173acf4c160395bf192d8364bef1f2325e7b13#egg=ufo2ft
ufoLib2==0.3.2.post2
unicodedata2==16.0.0
virtualenv==20.17.1
zipp==3.6.0
| name: ufo2ft
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- appdirs==1.4.4
- attrs==22.2.0
- booleanoperations==0.8.2
- compreffor==0.4.6.post1
- cu2qu==1.6.5
- defcon==0.6.0
- distlib==0.3.9
- filelock==3.4.1
- fonttools==3.44.0
- fs==2.4.16
- importlib-metadata==4.8.3
- importlib-resources==5.4.0
- iniconfig==1.1.1
- lxml==4.9.4
- packaging==21.3
- platformdirs==2.4.0
- pluggy==1.0.0
- py==1.11.0
- pyclipper==1.3.0.post6
- pyparsing==3.1.4
- pytest==7.0.1
- six==1.17.0
- toml==0.10.2
- tomli==1.2.3
- tox==3.28.0
- typing-extensions==4.1.1
- ufolib2==0.3.2.post2
- unicodedata2==16.0.0
- virtualenv==20.17.1
- zipp==3.6.0
prefix: /opt/conda/envs/ufo2ft
| [
"tests/integration_test.py::IntegrationTest::test_debugFeatureFile[defcon]",
"tests/integration_test.py::IntegrationTest::test_debugFeatureFile[ufoLib2]"
] | [
"tests/integration_test.py::IntegrationTest::test_removeOverlaps_CFF_pathops[defcon]",
"tests/integration_test.py::IntegrationTest::test_removeOverlaps_pathops[defcon]",
"tests/integration_test.py::IntegrationTest::test_removeOverlaps_CFF_pathops[ufoLib2]",
"tests/integration_test.py::IntegrationTest::test_removeOverlaps_pathops[ufoLib2]"
] | [
"tests/integration_test.py::IntegrationTest::test_TestFont_TTF[defcon]",
"tests/integration_test.py::IntegrationTest::test_TestFont_CFF[defcon]",
"tests/integration_test.py::IntegrationTest::test_included_features[defcon]",
"tests/integration_test.py::IntegrationTest::test_mti_features[defcon]",
"tests/integration_test.py::IntegrationTest::test_removeOverlaps_CFF[defcon]",
"tests/integration_test.py::IntegrationTest::test_removeOverlaps[defcon]",
"tests/integration_test.py::IntegrationTest::test_interpolatableTTFs_lazy[defcon]",
"tests/integration_test.py::IntegrationTest::test_optimizeCFF_none[defcon]",
"tests/integration_test.py::IntegrationTest::test_optimizeCFF_specialize[defcon]",
"tests/integration_test.py::IntegrationTest::test_optimizeCFF_subroutinize[defcon]",
"tests/integration_test.py::IntegrationTest::test_compileVariableTTF[defcon-None]",
"tests/integration_test.py::IntegrationTest::test_compileVariableTTF[defcon-True]",
"tests/integration_test.py::IntegrationTest::test_compileVariableTTF[defcon-False]",
"tests/integration_test.py::IntegrationTest::test_compileVariableCFF2[defcon-None]",
"tests/integration_test.py::IntegrationTest::test_compileVariableCFF2[defcon-True]",
"tests/integration_test.py::IntegrationTest::test_compileVariableCFF2[defcon-False]",
"tests/integration_test.py::IntegrationTest::test_TestFont_TTF[ufoLib2]",
"tests/integration_test.py::IntegrationTest::test_TestFont_CFF[ufoLib2]",
"tests/integration_test.py::IntegrationTest::test_included_features[ufoLib2]",
"tests/integration_test.py::IntegrationTest::test_mti_features[ufoLib2]",
"tests/integration_test.py::IntegrationTest::test_removeOverlaps_CFF[ufoLib2]",
"tests/integration_test.py::IntegrationTest::test_removeOverlaps[ufoLib2]",
"tests/integration_test.py::IntegrationTest::test_interpolatableTTFs_lazy[ufoLib2]",
"tests/integration_test.py::IntegrationTest::test_optimizeCFF_none[ufoLib2]",
"tests/integration_test.py::IntegrationTest::test_optimizeCFF_specialize[ufoLib2]",
"tests/integration_test.py::IntegrationTest::test_optimizeCFF_subroutinize[ufoLib2]",
"tests/integration_test.py::IntegrationTest::test_compileVariableTTF[ufoLib2-None]",
"tests/integration_test.py::IntegrationTest::test_compileVariableTTF[ufoLib2-True]",
"tests/integration_test.py::IntegrationTest::test_compileVariableTTF[ufoLib2-False]",
"tests/integration_test.py::IntegrationTest::test_compileVariableCFF2[ufoLib2-None]",
"tests/integration_test.py::IntegrationTest::test_compileVariableCFF2[ufoLib2-True]",
"tests/integration_test.py::IntegrationTest::test_compileVariableCFF2[ufoLib2-False]"
] | [] | MIT License | 5,867 | 1,705 | [
"Lib/ufo2ft/__init__.py",
"Lib/ufo2ft/featureCompiler.py"
] |
|
astanin__python-tabulate-21 | e7daa576ff444f95c560b18ef0bb22b3b1b67b57 | 2019-11-26 02:53:34 | 3f0757e117ed2ca1171bbf84b61793f353d67282 | diff --git a/tabulate.py b/tabulate.py
index 92164fb..99b6118 100755
--- a/tabulate.py
+++ b/tabulate.py
@@ -1423,7 +1423,11 @@ def tabulate(
has_invisible = re.search(_invisible_codes, plain_text)
enable_widechars = wcwidth is not None and WIDE_CHARS_MODE
- if tablefmt in multiline_formats and _is_multiline(plain_text):
+ if (
+ not isinstance(tablefmt, TableFormat)
+ and tablefmt in multiline_formats
+ and _is_multiline(plain_text)
+ ):
tablefmt = multiline_formats.get(tablefmt, tablefmt)
is_multiline = True
else:
| Custom TableFormat gives: TypeError: unhashable type: 'list'
Cloned tabulate from master (433dfc69f2abba1c463763d33e1fb3bcdd3afe37) and tried to use custom TableFormat:
Script:
```
from tabulate import tabulate, TableFormat, Line, DataRow
tablefmt = TableFormat(
lineabove = Line("", "-", " ", ""),
linebelowheader = Line("", "-", " ", ""),
linebetweenrows = None,
linebelow = Line("", "-", " ", ""),
headerrow = DataRow("", " ", ""),
datarow = DataRow("", " ", ""),
padding = 0,
with_header_hide = ["lineabove", "linebelow"])
rows = [
['foo', 'bar'],
['baz', 'qux'] ]
print(tabulate(rows, headers=['A', 'B'], tablefmt=tablefmt))
```
Output:
```
Traceback (most recent call last):
File "<stdin>", line 17, in <module>
File "/home/woky/work/test/venv/lib/python3.7/site-packages/tabulate.py", line 1268, in tabulate
if tablefmt in multiline_formats and _is_multiline(plain_text):
TypeError: unhashable type: 'list'
```
(I'm not the original issue submitter - this is copied from https://bitbucket.org/astanin/python-tabulate/issues/156/custom-tableformat-gives-typeerror) | astanin/python-tabulate | diff --git a/test/test_regression.py b/test/test_regression.py
index 8cdfcb2..e79aad8 100644
--- a/test/test_regression.py
+++ b/test/test_regression.py
@@ -4,7 +4,7 @@
from __future__ import print_function
from __future__ import unicode_literals
-from tabulate import tabulate, _text_type, _long_type
+from tabulate import tabulate, _text_type, _long_type, TableFormat, Line, DataRow
from common import assert_equal, assert_in, SkipTest
@@ -365,3 +365,21 @@ def test_empty_pipe_table_with_columns():
expected = "\n".join(["| Col1 | Col2 |", "|--------|--------|"])
result = tabulate(table, headers, tablefmt="pipe")
assert_equal(result, expected)
+
+
+def test_custom_tablefmt():
+ "Regression: allow custom TableFormat that specifies with_header_hide (github issue #20)"
+ tablefmt = TableFormat(
+ lineabove=Line("", "-", " ", ""),
+ linebelowheader=Line("", "-", " ", ""),
+ linebetweenrows=None,
+ linebelow=Line("", "-", " ", ""),
+ headerrow=DataRow("", " ", ""),
+ datarow=DataRow("", " ", ""),
+ padding=0,
+ with_header_hide=["lineabove", "linebelow"],
+ )
+ rows = [["foo", "bar"], ["baz", "qux"]]
+ expected = "\n".join(["A B", "--- ---", "foo bar", "baz qux"])
+ result = tabulate(rows, headers=["A", "B"], tablefmt=tablefmt)
+ assert_equal(result, expected)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_git_commit_hash"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 1
} | 0.8 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[widechars]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"nose",
"wcwidth",
"pytest"
],
"pre_install": null,
"python": "3.8",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
nose==1.3.7
packaging @ file:///croot/packaging_1720101850331/work
pluggy @ file:///tmp/build/80754af9/pluggy_1648042571233/work
pytest @ file:///croot/pytest_1717793244625/work
-e git+https://github.com/astanin/python-tabulate.git@e7daa576ff444f95c560b18ef0bb22b3b1b67b57#egg=tabulate
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
wcwidth==0.2.13
| name: python-tabulate
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py38h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.1=py38h06a4308_0
- pip=24.2=py38h06a4308_0
- pluggy=1.0.0=py38h06a4308_1
- pytest=7.4.4=py38h06a4308_0
- python=3.8.20=he870216_0
- readline=8.2=h5eee18b_0
- setuptools=75.1.0=py38h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py38h06a4308_0
- wheel=0.44.0=py38h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- nose==1.3.7
- wcwidth==0.2.13
prefix: /opt/conda/envs/python-tabulate
| [
"test/test_regression.py::test_custom_tablefmt"
] | [] | [
"test/test_regression.py::test_ansi_color_in_table_cells",
"test/test_regression.py::test_alignment_of_colored_cells",
"test/test_regression.py::test_iter_of_iters_with_headers",
"test/test_regression.py::test_datetime_values",
"test/test_regression.py::test_simple_separated_format",
"test/test_regression.py::test_simple_separated_format_with_headers",
"test/test_regression.py::test_column_type_of_bytestring_columns",
"test/test_regression.py::test_numeric_column_headers",
"test/test_regression.py::test_88_256_ANSI_color_codes",
"test/test_regression.py::test_column_with_mixed_value_types",
"test/test_regression.py::test_latex_escape_special_chars",
"test/test_regression.py::test_isconvertible_on_set_values",
"test/test_regression.py::test_ansi_color_for_decimal_numbers",
"test/test_regression.py::test_alignment_of_decimal_numbers_with_ansi_color",
"test/test_regression.py::test_long_integers",
"test/test_regression.py::test_colorclass_colors",
"test/test_regression.py::test_mix_normal_and_wide_characters",
"test/test_regression.py::test_align_long_integers",
"test/test_regression.py::test_boolean_columns",
"test/test_regression.py::test_ansi_color_bold_and_fgcolor",
"test/test_regression.py::test_empty_table_with_keys_as_header",
"test/test_regression.py::test_escape_empty_cell_in_first_column_in_rst",
"test/test_regression.py::test_ragged_rows",
"test/test_regression.py::test_empty_pipe_table_with_columns"
] | [] | MIT License | 5,891 | 178 | [
"tabulate.py"
] |
|
fitbenchmarking__fitbenchmarking-320 | be3361d4fd7a34a8a2fcfa7a8510908accf9222a | 2019-11-27 12:33:32 | be3361d4fd7a34a8a2fcfa7a8510908accf9222a | diff --git a/fitbenchmarking/fitting/misc.py b/fitbenchmarking/fitting/misc.py
index 73ccd9e5..3120ebfc 100644
--- a/fitbenchmarking/fitting/misc.py
+++ b/fitbenchmarking/fitting/misc.py
@@ -20,10 +20,8 @@ def compute_chisq(actual, fitted, errors=None):
"""
r = fitted - actual
if errors is not None:
- weighted_r = np.multiply(errors, r)
- chi_sq = np.dot(r, weighted_r)
- else:
- chi_sq = np.dot(r, r)
+ r = r / errors
+ chi_sq = np.dot(r, r)
return chi_sq
@@ -46,8 +44,10 @@ def create_result_entry(problem, status, chi_sq, runtime, minimizer,
"""
if 'fitFunction' in ini_function_def:
- ini_function_def = ini_function_def.replace('fitFunction', problem.equation)
- fin_function_def = fin_function_def.replace('fitFunction', problem.equation)
+ ini_function_def = ini_function_def.replace(
+ 'fitFunction', problem.equation)
+ fin_function_def = fin_function_def.replace(
+ 'fitFunction', problem.equation)
# Create empty fitting result object
result = fitbm_result.FittingResult()
| Wrong `chi_sq` value produced
**Description of the error**
Noticed by @AndrewLister-STFC in the review of #316 that the `chi_sq` computed in `utils/fitting/misc.py` is incorrect.
**Describe the expected result**
Fixed reported `chi_sq` value
| fitbenchmarking/fitbenchmarking | diff --git a/fitbenchmarking/fitting/tests/test_misc.py b/fitbenchmarking/fitting/tests/test_misc.py
index b74458e2..1363554a 100644
--- a/fitbenchmarking/fitting/tests/test_misc.py
+++ b/fitbenchmarking/fitting/tests/test_misc.py
@@ -79,7 +79,7 @@ class FitMiscTests(unittest.TestCase):
errors = np.array([5, 0.1, 0.5])
chi_sq = compute_chisq(actual, calculated, errors)
- chi_sq_expected = 9.9
+ chi_sq_expected = 436.04
self.assertEqual(chi_sq_expected, chi_sq)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 1
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gfortran lcov libblas-dev liblapack-dev"
],
"python": "2.7",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
bumps==0.9.3
certifi==2021.5.30
chardet==3.0.4
charset-normalizer==2.0.12
coverage==6.2
coveralls==3.3.1
cycler==0.11.0
DataProperty==0.46.4
DFOGN==1.0.2
docopt==0.6.2
docutils==0.18.1
-e git+https://github.com/fitbenchmarking/fitbenchmarking.git@be3361d4fd7a34a8a2fcfa7a8510908accf9222a#egg=FitBenchmarking
idna==3.10
iminuit==2.16.0
importlib-metadata==4.8.3
iniconfig==1.1.1
kiwisolver==1.3.1
lxml==5.3.1
matplotlib==2.2.5
mbstrdecoder==0.8.4
msgfy==0.0.7
numpy==1.16.6
packaging==21.3
pandas==0.24.2
pathvalidate==0.29.1
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytablewriter==0.46.1
pytest==7.0.1
pytest-cov==4.0.0
python-coveralls==2.9.3
python-dateutil==2.9.0.post0
pytz==2025.2
PyYAML==6.0.1
requests==2.27.1
sasmodels==1.0.9
scipy==1.2.3
six==1.17.0
tabledata==0.10.4
tinycc==1.1
tomli==1.2.3
typepy==0.6.6
typing_extensions==4.1.1
urllib3==1.23
zipp==3.6.0
| name: fitbenchmarking
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- bumps==0.9.3
- chardet==3.0.4
- charset-normalizer==2.0.12
- coverage==6.2
- coveralls==3.3.1
- cycler==0.11.0
- dataproperty==0.46.4
- dfogn==1.0.2
- docopt==0.6.2
- docutils==0.18.1
- idna==3.10
- iminuit==2.16.0
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- kiwisolver==1.3.1
- lxml==5.3.1
- matplotlib==2.2.5
- mbstrdecoder==0.8.4
- msgfy==0.0.7
- numpy==1.16.6
- packaging==21.3
- pandas==0.24.2
- pathvalidate==0.29.1
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytablewriter==0.46.1
- pytest==7.0.1
- pytest-cov==4.0.0
- python-coveralls==2.9.3
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyyaml==6.0.1
- requests==2.27.1
- sasmodels==1.0.9
- scipy==1.2.3
- six==1.17.0
- tabledata==0.10.4
- tinycc==1.1
- tomli==1.2.3
- typepy==0.6.6
- typing-extensions==4.1.1
- urllib3==1.23
- zipp==3.6.0
prefix: /opt/conda/envs/fitbenchmarking
| [
"fitbenchmarking/fitting/tests/test_misc.py::FitMiscTests::test_compute_chisq_errors"
] | [] | [
"fitbenchmarking/fitting/tests/test_misc.py::FitMiscTests::test_compute_chisq_no_errors",
"fitbenchmarking/fitting/tests/test_misc.py::FitMiscTests::test_createResultEntry"
] | [] | BSD 3-Clause "New" or "Revised" License | 5,902 | 315 | [
"fitbenchmarking/fitting/misc.py"
] |
|
conan-io__conan-6143 | d42ec055d459489c184b160cafbd3f200ceb6d41 | 2019-11-27 16:18:50 | 8d2139334957dd396b44aad80b69af33f0a55ac0 | diff --git a/conans/client/cmd/export_pkg.py b/conans/client/cmd/export_pkg.py
index 09a98959c..a4fc16f26 100644
--- a/conans/client/cmd/export_pkg.py
+++ b/conans/client/cmd/export_pkg.py
@@ -4,7 +4,6 @@ from conans.client import packager
from conans.client.graph.graph_manager import load_deps_info
from conans.errors import ConanException
from conans.model.conan_file import get_env_context_manager
-from conans.model.manifest import FileTreeManifest
from conans.model.ref import PackageReference
from conans.util.files import rmdir
diff --git a/conans/client/cmd/uploader.py b/conans/client/cmd/uploader.py
index 95e08ac6d..23c5e3599 100644
--- a/conans/client/cmd/uploader.py
+++ b/conans/client/cmd/uploader.py
@@ -1,7 +1,6 @@
import os
import stat
import tarfile
-import threading
import time
from collections import defaultdict
from multiprocessing.pool import ThreadPool
@@ -97,8 +96,8 @@ class CmdUpload(object):
self._output.info("Uploading to remote '{}':".format(remote.name))
def upload_ref(ref_conanfile_prefs):
- ref, conanfile, prefs = ref_conanfile_prefs
- self._upload_ref(conanfile, ref, prefs, retry, retry_wait,
+ _ref, _conanfile, _prefs = ref_conanfile_prefs
+ self._upload_ref(_conanfile, _ref, _prefs, retry, retry_wait,
integrity_check, policy, remote, upload_recorder, remotes)
self._upload_thread_pool.map(upload_ref,
@@ -124,6 +123,9 @@ class CmdUpload(object):
if package_id or check_valid_ref(reference_or_pattern):
# Upload package
ref = ConanFileReference.loads(reference_or_pattern)
+ if ref.revision and not self._cache.config.revisions_enabled:
+ raise ConanException("Revisions not enabled in the client, specify a "
+ "reference without revision")
refs = [ref, ]
confirm = True
else:
@@ -141,6 +143,9 @@ class CmdUpload(object):
for ref in refs:
metadata = self._cache.package_layout(ref).load_metadata()
+ if ref.revision and ref.revision != metadata.recipe.revision:
+ raise ConanException("Recipe revision {} does not match the one stored in the cache {}"
+ .format(ref.revision, metadata.recipe.revision))
ref = ref.copy_with_rev(metadata.recipe.revision)
remote = remotes.selected
if remote:
@@ -181,16 +186,20 @@ class CmdUpload(object):
"no packages can be uploaded" % str(ref))
prefs = []
# Gather all the complete PREFS with PREV
- for package_id in packages_ids:
+ for package in packages_ids:
+ package_id, prev = package.split("#") if "#" in package else (package, None)
if package_id not in metadata.packages:
raise ConanException("Binary package %s:%s not found"
% (str(ref), package_id))
+ if prev and prev != metadata.packages[package_id].revision:
+ raise ConanException("Binary package %s:%s#%s not found"
+ % (str(ref), package_id, prev))
# Filter packages that don't match the recipe revision
if self._cache.config.revisions_enabled and ref.revision:
rec_rev = metadata.packages[package_id].recipe_revision
if ref.revision != rec_rev:
self._output.warn("Skipping package '%s', it doesn't belong to the"
- " current recipe revision" % package_id)
+ " current recipe revision" % package_id)
continue
package_revision = metadata.packages[package_id].revision
assert package_revision is not None, "PREV cannot be None to upload"
@@ -390,8 +399,7 @@ class CmdUpload(object):
% (pref, pref.ref, pref.id))
tgz_path = os.path.join(package_folder, PACKAGE_TGZ_NAME)
if is_dirty(tgz_path):
- self._output.warn("%s: Removing %s, marked as dirty"
- % (str(pref), PACKAGE_TGZ_NAME))
+ self._output.warn("%s: Removing %s, marked as dirty" % (str(pref), PACKAGE_TGZ_NAME))
os.remove(tgz_path)
clean_dirty(tgz_path)
# Get all the files in that directory
@@ -429,7 +437,7 @@ class CmdUpload(object):
except NotFoundException:
# This is weird, the manifest still not there, better upload everything
self._output.warn("The remote recipe doesn't have the 'conanmanifest.txt' "
- "file and will be uploaded: '{}'".format(ref))
+ "file and will be uploaded: '{}'".format(ref))
return files_to_upload, deleted
if remote_manifest == local_manifest:
@@ -477,7 +485,7 @@ class CmdUpload(object):
diff = read_manifest.difference(expected_manifest)
for fname, (h1, h2) in diff.items():
self._output.warn("Mismatched checksum '%s' (manifest: %s, file: %s)"
- % (fname, h1, h2))
+ % (fname, h1, h2))
if PACKAGE_TGZ_NAME in files:
try:
diff --git a/conans/client/command.py b/conans/client/command.py
index 1198096e6..a6a80cbf2 100644
--- a/conans/client/command.py
+++ b/conans/client/command.py
@@ -403,8 +403,12 @@ class Command(object):
else:
reference = repr(pref.ref)
if pref.ref.user is None:
- reference += "@"
- packages_list = [pref.id]
+ if pref.ref.revision:
+ reference = "%s/%s@#%s" % (pref.ref.name, pref.ref.version, pref.ref.revision)
+ else:
+ reference += "@"
+ pkgref = "{}#{}".format(pref.id, pref.revision) if pref.revision else pref.id
+ packages_list = [pkgref]
if args.package:
raise ConanException("Use a full package reference (preferred) or the `--package`"
" command argument, but not both.")
@@ -1365,7 +1369,7 @@ class Command(object):
raise ConanException("'--query' argument cannot be used together with '--package'")
else:
reference = repr(pref.ref)
- package_id = pref.id
+ package_id = "{}#{}".format(pref.id, pref.revision) if pref.revision else pref.id
if args.package:
raise ConanException("Use a full package reference (preferred) or the `--package`"
diff --git a/conans/client/conan_api.py b/conans/client/conan_api.py
index 2d3df0d10..866e73206 100644
--- a/conans/client/conan_api.py
+++ b/conans/client/conan_api.py
@@ -430,6 +430,9 @@ class ConanAPIV1(object):
# Install packages without settings (fixed ids or all)
if check_valid_ref(reference):
ref = ConanFileReference.loads(reference)
+ if ref.revision and not self.app.config.revisions_enabled:
+ raise ConanException("Revisions not enabled in the client, specify a "
+ "reference without revision")
if packages and ref.revision is None:
for package_id in packages:
if "#" in package_id:
diff --git a/conans/client/conf/__init__.py b/conans/client/conf/__init__.py
index a403ec861..55ceb97eb 100644
--- a/conans/client/conf/__init__.py
+++ b/conans/client/conf/__init__.py
@@ -1,6 +1,5 @@
import os
-from six.moves import urllib
from six.moves.configparser import ConfigParser, NoSectionError
from conans.errors import ConanException
diff --git a/conans/client/rest/uploader_downloader.py b/conans/client/rest/uploader_downloader.py
index a903d1d2f..19ee7f234 100644
--- a/conans/client/rest/uploader_downloader.py
+++ b/conans/client/rest/uploader_downloader.py
@@ -7,7 +7,7 @@ from conans.util import progress_bar
from conans.client.rest import response_to_str
from conans.errors import AuthenticationException, ConanConnectionError, ConanException, \
NotFoundException, ForbiddenException, RequestErrorException
-from conans.util.files import mkdir, save_append, sha1sum, to_file_bytes
+from conans.util.files import mkdir, sha1sum, to_file_bytes
from conans.util.log import logger
from conans.util.tracer import log_download
@@ -59,11 +59,11 @@ class FileUploader(object):
file_name = os.path.basename(abs_path)
description = "Uploading {}".format(file_name)
- def load_in_chunks(file, size):
+ def load_in_chunks(_file, size):
"""Lazy function (generator) to read a file piece by piece.
Default chunk size: 1k."""
while True:
- chunk = file.read(size)
+ chunk = _file.read(size)
if not chunk:
break
yield chunk
diff --git a/conans/model/settings.py b/conans/model/settings.py
index 2815d9663..6b9075727 100644
--- a/conans/model/settings.py
+++ b/conans/model/settings.py
@@ -233,7 +233,10 @@ class Settings(object):
@staticmethod
def loads(text):
- return Settings(yaml.safe_load(text) or {})
+ try:
+ return Settings(yaml.safe_load(text) or {})
+ except (yaml.YAMLError, AttributeError) as ye:
+ raise ConanException("Invalid settings.yml format: {}".format(ye))
def validate(self):
for field in self.fields:
diff --git a/conans/server/store/disk_adapter.py b/conans/server/store/disk_adapter.py
index 38df83d50..88dd49596 100644
--- a/conans/server/store/disk_adapter.py
+++ b/conans/server/store/disk_adapter.py
@@ -4,7 +4,6 @@ import fasteners
from conans.client.tools.env import no_op
from conans.errors import NotFoundException
-from conans.server.store.server_store import REVISIONS_FILE
from conans.util.files import decode_text, md5sum, path_exists, relative_dirs, rmdir
| [feature] add PREV to export-pkg --json output
I want to be sure I am uploading the correct package after a build on my CI server. If I am understanding package revisions correctly then I need to provide a full reference to the `conan upload` command, specifying the PREV (e.g. `lib/1.0@conan/stable#RREV:PACKAGE_ID#PREV`), in case any other package revisions are still existing in the build server's local cache.
I am not able to find this PREV hash in any --json output before the `conan upload ...` command. Can this be added? | conan-io/conan | diff --git a/conans/test/functional/build_helpers/cmake_targets_test.py b/conans/test/functional/build_helpers/cmake_targets_test.py
index 48348fcd1..c46f382d5 100644
--- a/conans/test/functional/build_helpers/cmake_targets_test.py
+++ b/conans/test/functional/build_helpers/cmake_targets_test.py
@@ -5,7 +5,6 @@ import unittest
from nose.plugins.attrib import attr
from conans.test.utils.tools import TestClient
-from conans.util.files import load
conanfile_py = """
from conans import ConanFile
diff --git a/conans/test/functional/command/alias_test.py b/conans/test/functional/command/alias_test.py
index 407f5d2e1..fff02c951 100644
--- a/conans/test/functional/command/alias_test.py
+++ b/conans/test/functional/command/alias_test.py
@@ -6,7 +6,6 @@ from parameterized.parameterized import parameterized
from conans.client.tools.files import replace_in_file
from conans.test.utils.tools import TestClient, TestServer
-from conans.util.files import load
class ConanAliasTest(unittest.TestCase):
@@ -158,7 +157,7 @@ class Pkg(ConanFile):
"""
client.save({"conanfile.py": consumer})
client.run("info . --graph=file.dot")
- graphfile = load(os.path.join(client.current_folder, "file.dot"))
+ graphfile = client.load("file.dot")
self.assertIn('"CB/0.1@user/testing" -> {"CA/0.1@user/testing"}', graphfile)
self.assertTrue(('"CD/0.1@user/testing" -> {"CA/0.1@user/testing" "CB/0.1@user/testing"}' in graphfile) or
('"CD/0.1@user/testing" -> {"CB/0.1@user/testing" "CA/0.1@user/testing"}' in graphfile))
@@ -211,7 +210,7 @@ class Pkg(ConanFile):
"""
client.save({"conanfile.py": consumer})
client.run("info . --graph=file.dot")
- graphfile = load(os.path.join(client.current_folder, "file.dot"))
+ graphfile = client.load("file.dot")
self.assertIn('"CM/0.1@user/testing" -> {"CL/0.1@user/testing"}', graphfile)
self.assertTrue(('"CL/0.1@user/testing" -> {"CK/0.1@user/testing" "CH/0.1@user/testing"}' in graphfile) or
('"CL/0.1@user/testing" -> {"CH/0.1@user/testing" "CK/0.1@user/testing"}' in graphfile))
@@ -255,7 +254,7 @@ class Pkg(ConanFile):
{"conanfile.txt": "[requires]\nLibA/latest@user/testing\nLibB/latest@user/testing"},
clean_first=True)
client.run("info conanfile.txt --graph=file.dot")
- graphfile = load(os.path.join(client.current_folder, "file.dot"))
+ graphfile = client.load("file.dot")
self.assertIn('"LibA/0.1@user/testing" -> {"LibC/0.1@user/testing"}', graphfile)
self.assertIn('"LibB/0.1@user/testing" -> {"LibC/0.1@user/testing"}', graphfile)
self.assertIn('"LibC/0.1@user/testing" -> {"LibD/0.1@user/testing"}', graphfile)
@@ -314,7 +313,7 @@ class Pkg(ConanFile):
client.save({"conanfile.txt": "[requires]\nLibA/latest@user/testing\nLibB/latest@user/testing"},
clean_first=True)
client.run("info conanfile.txt --graph=file.dot")
- graphfile = load(os.path.join(client.current_folder, "file.dot"))
+ graphfile = client.load("file.dot")
self.assertIn('"LibA/0.1@user/testing" -> {"LibC/0.1@user/testing"}', graphfile)
self.assertIn('"LibB/0.1@user/testing" -> {"LibC/0.1@user/testing"}', graphfile)
self.assertIn('"LibC/0.1@user/testing" -> {"LibD/0.1@user/testing"}', graphfile)
@@ -363,7 +362,7 @@ class Pkg(ConanFile):
client.save({"conanfile.txt": "[requires]\nLibA/[~0.1]@user/testing\nLibB/[~0.1]@user/testing"},
clean_first=True)
client.run("info conanfile.txt --graph=file.dot")
- graphfile = load(os.path.join(client.current_folder, "file.dot"))
+ graphfile = client.load("file.dot")
self.assertIn('"LibA/sha1@user/testing" -> {"LibC/sha1@user/testing"}', graphfile)
self.assertIn('"LibB/sha1@user/testing" -> {"LibC/sha1@user/testing"}', graphfile)
self.assertIn('"LibC/sha1@user/testing" -> {"LibD/sha1@user/testing"}', graphfile)
diff --git a/conans/test/functional/command/create_test.py b/conans/test/functional/command/create_test.py
index cdddd77fe..d6af0afb7 100644
--- a/conans/test/functional/command/create_test.py
+++ b/conans/test/functional/command/create_test.py
@@ -32,7 +32,7 @@ PkgB/0.1@user/testing
PkgA/0.1@user/testing"""
client.save({"conanfile.txt": conanfile}, clean_first=True)
client.run("install . -g txt -g cmake")
- text = load(os.path.join(client.current_folder, "conanbuildinfo.txt"))
+ text = client.load("conanbuildinfo.txt")
txt = ";".join(text.splitlines())
self.assertIn("[libs];LibB;LibA", txt)
cmake = client.load("conanbuildinfo.cmake")
diff --git a/conans/test/functional/command/download_test.py b/conans/test/functional/command/download_test.py
index d133bf640..423a16bc6 100644
--- a/conans/test/functional/command/download_test.py
+++ b/conans/test/functional/command/download_test.py
@@ -5,6 +5,7 @@ from collections import OrderedDict
from conans.model.ref import ConanFileReference
from conans.test.utils.tools import (TestClient, TestServer, NO_SETTINGS_PACKAGE_ID, TurboTestClient,
GenConanfile)
+from conans.util.env_reader import get_env
from conans.util.files import load
@@ -220,3 +221,75 @@ class Pkg(ConanFile):
client.run("download pkg/1.0@")
self.assertIn("pkg/1.0: Downloading pkg/1.0:%s" % NO_SETTINGS_PACKAGE_ID, client.out)
self.assertIn("pkg/1.0: Package installed %s" % NO_SETTINGS_PACKAGE_ID, client.out)
+
+ @unittest.skipIf(get_env("TESTING_REVISIONS_ENABLED", False), "No sense with revs")
+ def download_revs_disabled_with_rrev_test(self):
+ # https://github.com/conan-io/conan/issues/6106
+ client = TestClient(revisions_enabled=False)
+ client.run("download pkg/1.0@user/channel#fakerevision", assert_error=True)
+ self.assertIn(
+ "ERROR: Revisions not enabled in the client, specify a reference without revision",
+ client.out)
+
+ @unittest.skipUnless(get_env("TESTING_REVISIONS_ENABLED", False), "Only revisions")
+ def download_revs_enabled_with_fake_rrev_test(self):
+ client = TestClient(default_server_user=True, revisions_enabled=True)
+ client.save({"conanfile.py": GenConanfile()})
+ client.run("create . pkg/1.0@user/channel")
+ client.run("upload * --all --confirm")
+ client.run("remove * -f")
+ client.run("download pkg/1.0@user/channel#fakerevision", assert_error=True)
+ self.assertIn("ERROR: Recipe not found: 'pkg/1.0@user/channel'", client.out)
+
+ @unittest.skipUnless(get_env("TESTING_REVISIONS_ENABLED", False), "Only revisions")
+ def download_revs_enabled_with_rrev_test(self):
+ ref = ConanFileReference.loads("pkg/1.0@user/channel")
+ client = TurboTestClient(default_server_user=True, revisions_enabled=True)
+ pref = client.create(ref, conanfile=GenConanfile())
+ client.run("upload pkg/1.0@user/channel --all --confirm")
+ # create new revision from recipe
+ client.create(ref, conanfile=GenConanfile().with_build_msg("new revision"))
+ client.run("upload pkg/1.0@user/channel --all --confirm")
+ client.run("remove * -f")
+ client.run("download pkg/1.0@user/channel#{}".format(pref.ref.revision))
+ self.assertIn("pkg/1.0@user/channel: Package installed {}".format(pref.id), client.out)
+ search_result = client.search("pkg/1.0@user/channel --revisions")[0]
+ self.assertIn(pref.ref.revision, search_result["revision"])
+
+ @unittest.skipUnless(get_env("TESTING_REVISIONS_ENABLED", False), "Only revisions")
+ def download_revs_enabled_with_rrev_no_user_channel_test(self):
+ ref = ConanFileReference.loads("pkg/1.0@")
+ servers = {"default": TestServer([("*/*@*/*", "*")], [("*/*@*/*", "*")],
+ users={"user": "password"})}
+ client = TurboTestClient(servers=servers, revisions_enabled=True,
+ users={"default": [("user", "password")]})
+ pref = client.create(ref, conanfile=GenConanfile())
+ client.run("upload pkg/1.0@ --all --confirm")
+ # create new revision from recipe
+ client.create(ref, conanfile=GenConanfile().with_build_msg("new revision"))
+ client.run("upload pkg/1.0@ --all --confirm")
+ client.run("remove * -f")
+ client.run("download pkg/1.0@#{}".format(pref.ref.revision))
+ self.assertIn("pkg/1.0: Package installed {}".format(pref.id), client.out)
+ search_result = client.search("pkg/1.0@ --revisions")[0]
+ self.assertIn(pref.ref.revision, search_result["revision"])
+
+ @unittest.skipUnless(get_env("TESTING_REVISIONS_ENABLED", False), "Only revisions")
+ def download_revs_enabled_with_prev_test(self):
+ # https://github.com/conan-io/conan/issues/6106
+ ref = ConanFileReference.loads("pkg/1.0@user/channel")
+ client = TurboTestClient(default_server_user=True, revisions_enabled=True)
+ pref = client.create(ref, conanfile=GenConanfile())
+ client.run("upload pkg/1.0@user/channel --all --confirm")
+ client.create(ref, conanfile=GenConanfile().with_build_msg("new revision"))
+ client.run("upload pkg/1.0@user/channel --all --confirm")
+ client.run("remove * -f")
+ client.run("download pkg/1.0@user/channel#{}:{}#{}".format(pref.ref.revision,
+ pref.id,
+ pref.revision))
+ self.assertIn("pkg/1.0@user/channel: Package installed {}".format(pref.id), client.out)
+ search_result = client.search("pkg/1.0@user/channel --revisions")[0]
+ self.assertIn(pref.ref.revision, search_result["revision"])
+ search_result = client.search(
+ "pkg/1.0@user/channel#{}:{} --revisions".format(pref.ref.revision, pref.id))[0]
+ self.assertIn(pref.revision, search_result["revision"])
diff --git a/conans/test/functional/command/info_test.py b/conans/test/functional/command/info_test.py
index 435c8d021..1c6eb29b5 100644
--- a/conans/test/functional/command/info_test.py
+++ b/conans/test/functional/command/info_test.py
@@ -217,7 +217,7 @@ class MyTest(ConanFile):
save(viscss_path, "")
client.save({"conanfile.txt": ""})
client.run("info . --graph=file.html")
- html = load(os.path.join(client.current_folder, "file.html"))
+ html = client.load("file.html")
self.assertIn("<body>", html)
self.assertNotIn("cloudflare", html)
self.assertIn(visjs_path, html)
@@ -437,7 +437,7 @@ class MyTest(ConanFile):
self.client.run("info Hello1/0.1@lasote/stable -bo=Hello0/0.1@lasote/stable "
"--json=file.json")
self.assertEqual('{"groups": [["Hello0/0.1@lasote/stable"], ["Hello1/0.1@lasote/stable"]]}',
- load(os.path.join(self.client.current_folder, "file.json")))
+ self.client.load("file.json"))
self.client.run("info Hello1/0.1@lasote/stable -bo=Hello0/0.1@lasote/stable --json")
self.assertIn('{"groups": [["Hello0/0.1@lasote/stable"], ["Hello1/0.1@lasote/stable"]]}',
diff --git a/conans/test/functional/command/inspect_test.py b/conans/test/functional/command/inspect_test.py
index 97cfc33ad..731bb13a5 100644
--- a/conans/test/functional/command/inspect_test.py
+++ b/conans/test/functional/command/inspect_test.py
@@ -4,7 +4,6 @@ import textwrap
import unittest
from conans.test.utils.tools import TestClient, TestServer
-from conans.util.files import load
class ConanInspectTest(unittest.TestCase):
@@ -70,7 +69,7 @@ class Pkg(ConanFile):
self.assertIn("name: MyPkg", client.out)
self.assertIn("version: 1.2.3", client.out)
client.run("inspect . -a=version -a=name --json=file.json")
- contents = load(os.path.join(client.current_folder, "file.json"))
+ contents = client.load("file.json")
self.assertIn('"version": "1.2.3"', contents)
self.assertIn('"name": "MyPkg"', contents)
@@ -131,7 +130,7 @@ default_options:
""")
client.run("inspect . -a=version -a=name -a=options -a=default_options --json=file.json")
- contents = load(os.path.join(client.current_folder, "file.json"))
+ contents = client.load("file.json")
json_contents = json.loads(contents)
self.assertEqual(json_contents["version"], None)
self.assertEqual(json_contents["name"], None)
diff --git a/conans/test/functional/command/install_test.py b/conans/test/functional/command/install_test.py
index 0a78b2517..0cc01c220 100644
--- a/conans/test/functional/command/install_test.py
+++ b/conans/test/functional/command/install_test.py
@@ -11,7 +11,7 @@ from conans.paths import CONANFILE, CONANFILE_TXT, CONANINFO
from conans.test.utils.cpp_test_files import cpp_hello_conan_files
from conans.test.utils.tools import NO_SETTINGS_PACKAGE_ID
from conans.test.utils.tools import TestClient, TestServer, GenConanfile
-from conans.util.files import load, mkdir, rmdir
+from conans.util.files import mkdir, rmdir
class InstallTest(unittest.TestCase):
@@ -463,10 +463,10 @@ class TestConan(ConanFile):
client.out) # Test "from local cache" output message
client.run("install . --build=missing -s os=Macos -s os_build=Macos "
"--install-folder=os_dir")
- conaninfo = load(os.path.join(client.current_folder, "win_dir/conaninfo.txt"))
+ conaninfo = client.load("win_dir/conaninfo.txt")
self.assertIn("os=Windows", conaninfo)
self.assertNotIn("os=Macos", conaninfo)
- conaninfo = load(os.path.join(client.current_folder, "os_dir/conaninfo.txt"))
+ conaninfo = client.load("os_dir/conaninfo.txt")
self.assertNotIn("os=Windows", conaninfo)
self.assertIn("os=Macos", conaninfo)
diff --git a/conans/test/functional/command/json_output_test.py b/conans/test/functional/command/json_output_test.py
index ef6c72d5c..fc7b276d8 100644
--- a/conans/test/functional/command/json_output_test.py
+++ b/conans/test/functional/command/json_output_test.py
@@ -21,7 +21,7 @@ class JsonOutputTest(unittest.TestCase):
files = cpp_hello_conan_files("CC", "1.0", build=False)
self.client.save(files, clean_first=True)
self.client.run("create . private_user/channel --json=myfile.json")
- my_json = json.loads(load(os.path.join(self.client.current_folder, "myfile.json")))
+ my_json = json.loads(self.client.load("myfile.json"))
self.assertFalse(my_json["error"])
tmp = ConanFileReference.loads(my_json["installed"][0]["recipe"]["id"])
self.assertEqual(str(tmp), "CC/1.0@private_user/channel")
@@ -38,7 +38,7 @@ class JsonOutputTest(unittest.TestCase):
self.client.run("upload CC/1.0@private_user/channel -c")
self.client.run("remove '*' -f")
self.client.run("install CC/1.0@private_user/channel --json=myfile.json --build missing ")
- my_json = json.loads(load(os.path.join(self.client.current_folder, "myfile.json")))
+ my_json = json.loads(self.client.load("myfile.json"))
the_time_str = my_json["installed"][0]["recipe"]["time"]
self.assertIn("T", the_time_str) # Weak validation of the ISO 8601
@@ -54,7 +54,7 @@ class JsonOutputTest(unittest.TestCase):
self.client.run("upload CC/1.0@private_user/channel --all -c")
self.client.run("remove '*' -f")
self.client.run("install CC/1.0@private_user/channel --json=myfile.json")
- my_json = json.loads(load(os.path.join(self.client.current_folder, "myfile.json")))
+ my_json = json.loads(self.client.load("myfile.json"))
self.assertFalse(my_json["error"])
self.assertEqual(my_json["installed"][0]["recipe"]["id"], "CC/1.0@private_user/channel")
@@ -67,7 +67,7 @@ class JsonOutputTest(unittest.TestCase):
# Force build
self.client.run("remove '*' -f")
self.client.run("install CC/1.0@private_user/channel --json=myfile.json --build")
- my_json = json.loads(load(os.path.join(self.client.current_folder, "myfile.json")))
+ my_json = json.loads(self.client.load("myfile.json"))
self.assertFalse(my_json["error"])
self.assertEqual(my_json["installed"][0]["recipe"]["id"], "CC/1.0@private_user/channel")
@@ -81,7 +81,7 @@ class JsonOutputTest(unittest.TestCase):
# Missing recipe
self.client.run("install CC/1.0@private_user/channel --json=myfile.json", assert_error=True)
- my_json = json.loads(load(os.path.join(self.client.current_folder, "myfile.json")))
+ my_json = json.loads(self.client.load("myfile.json"))
self.assertTrue(my_json["error"])
self.assertEqual(len(my_json["installed"]), 1)
self.assertFalse(my_json["installed"][0]["recipe"]["downloaded"])
@@ -96,7 +96,7 @@ class JsonOutputTest(unittest.TestCase):
self.client.run("upload CC/1.0@private_user/channel -c")
self.client.run("remove '*' -f")
self.client.run("install CC/1.0@private_user/channel --json=myfile.json", assert_error=True)
- my_json = json.loads(load(os.path.join(self.client.current_folder, "myfile.json")))
+ my_json = json.loads(self.client.load("myfile.json"))
self.assertTrue(my_json["error"])
self.assertEqual(len(my_json["installed"]), 1)
@@ -118,7 +118,7 @@ class JsonOutputTest(unittest.TestCase):
self.client.save(files, clean_first=True)
self.client.run("create . private_user/channel --json=myfile.json ", assert_error=True)
- my_json = json.loads(load(os.path.join(self.client.current_folder, "myfile.json")))
+ my_json = json.loads(self.client.load("myfile.json"))
self.assertTrue(my_json["error"])
self.assertEqual(my_json["installed"][0]["packages"][0]["error"]["type"], "building")
self.assertIsNone(my_json["installed"][0]["packages"][0]["error"]["remote"])
@@ -162,7 +162,7 @@ AA*: CC/1.0@private_user/channel
deps=["AA/1.0@private_user/channel"], build=False)
self.client.save(files, clean_first=True)
self.client.run("install . --profile mybr --json=myfile.json --build AA --build BB")
- my_json = load(os.path.join(self.client.current_folder, "myfile.json"))
+ my_json = self.client.load("myfile.json")
my_json = json.loads(my_json)
self.assertTrue(my_json["installed"][0]["recipe"]["dependency"])
@@ -193,7 +193,7 @@ AA*: CC/1.0@private_user/channel
""")
self.client.save({'conanfile.py': conanfile})
self.client.run("create . name/version@user/channel --json=myfile.json")
- my_json = load(os.path.join(self.client.current_folder, "myfile.json"))
+ my_json = self.client.load("myfile.json")
my_json = json.loads(my_json)
# Nodes with cpp_info
diff --git a/conans/test/functional/command/new_test.py b/conans/test/functional/command/new_test.py
index aa24be7a9..b4b5b3da7 100644
--- a/conans/test/functional/command/new_test.py
+++ b/conans/test/functional/command/new_test.py
@@ -19,7 +19,7 @@ class NewTest(unittest.TestCase):
""")
save(os.path.join(client.cache_folder, "templates/mytemplate.py"), template1)
client.run("new hello/0.1 --template=mytemplate.py")
- conanfile = load(os.path.join(client.current_folder, "conanfile.py"))
+ conanfile = client.load("conanfile.py")
self.assertIn("class HelloConan(ConanFile):", conanfile)
self.assertIn('name = "hello"', conanfile)
self.assertIn('version = "0.1"', conanfile)
@@ -33,7 +33,7 @@ class NewTest(unittest.TestCase):
save(os.path.join(client.cache_folder, "templates", "subfolder", "mytemplate.py"),
template2)
client.run("new hello/0.1 -m=subfolder/mytemplate.py")
- conanfile = load(os.path.join(client.current_folder, "conanfile.py"))
+ conanfile = client.load("conanfile.py")
self.assertIn("class HelloConan(ConanFile):", conanfile)
self.assertIn('version = "fixed"', conanfile)
@@ -47,7 +47,7 @@ class NewTest(unittest.TestCase):
full_path = os.path.join(tmp, "templates", "subfolder", "mytemplate.py")
save(full_path, template2)
client.run('new hello/0.1 --template="%s"' % full_path)
- conanfile = load(os.path.join(client.current_folder, "conanfile.py"))
+ conanfile = client.load("conanfile.py")
self.assertIn("class HelloConan(ConanFile):", conanfile)
self.assertIn('version = "fixed"', conanfile)
diff --git a/conans/test/functional/command/remove_test.py b/conans/test/functional/command/remove_test.py
index 54772ecfd..7999da879 100644
--- a/conans/test/functional/command/remove_test.py
+++ b/conans/test/functional/command/remove_test.py
@@ -1,6 +1,5 @@
import os
import platform
-import textwrap
import unittest
import six
diff --git a/conans/test/functional/command/search_test.py b/conans/test/functional/command/search_test.py
index 866d9ec7f..907b8698c 100644
--- a/conans/test/functional/command/search_test.py
+++ b/conans/test/functional/command/search_test.py
@@ -382,7 +382,7 @@ helloTest/1.4.10@myuser/stable""".format(remote)
def search_html_table_test(self):
self.client.run("search Hello/1.4.10@myuser/testing --table=table.html")
- html = load(os.path.join(self.client.current_folder, "table.html"))
+ html = self.client.load("table.html")
self.assertIn("<h1>Hello/1.4.10@myuser/testing</h1>", html)
self.assertIn("<td>Linux gcc 4.5 (libstdc++11)</td>", html)
self.assertIn("<td>Windows Visual Studio 8.1</td>", html)
@@ -394,7 +394,7 @@ helloTest/1.4.10@myuser/stable""".format(remote)
self._copy_to_server(self.client.cache, self.servers["search_able"].server_store)
self.client.run("search Hello/1.4.10@myuser/testing -r=all --table=table.html")
- html = load(os.path.join(self.client.current_folder, "table.html"))
+ html = self.client.load("table.html")
self.assertIn("<h1>Hello/1.4.10@myuser/testing</h1>", html)
self.assertIn("<h2>'local':</h2>", html)
diff --git a/conans/test/functional/command/source_test.py b/conans/test/functional/command/source_test.py
index 1e978c6fa..bb57e2d3f 100644
--- a/conans/test/functional/command/source_test.py
+++ b/conans/test/functional/command/source_test.py
@@ -5,7 +5,7 @@ import six
from conans.paths import BUILD_INFO, CONANFILE
from conans.test.utils.tools import TestClient
-from conans.util.files import load, mkdir
+from conans.util.files import mkdir
class SourceTest(unittest.TestCase):
@@ -278,4 +278,4 @@ class ConanLib(ConanFile):
client.run("source .")
self.assertIn("conanfile.py: Configuring sources in", client.out)
self.assertIn("conanfile.py: Running source!", client.out)
- self.assertEqual("Hello World", load(os.path.join(client.current_folder, "file1.txt")))
+ self.assertEqual("Hello World", client.load("file1.txt"))
diff --git a/conans/test/functional/command/upload_test.py b/conans/test/functional/command/upload_test.py
index 5925d54f5..9a7e95a69 100644
--- a/conans/test/functional/command/upload_test.py
+++ b/conans/test/functional/command/upload_test.py
@@ -826,3 +826,46 @@ class Pkg(ConanFile):
client.run("user -c")
client.run("upload Hello0/1.2.1@user/testing --all -r default")
self.assertIn("Uploaded conan recipe 'Hello0/1.2.1@user/testing' to 'default'", client.out)
+
+ @unittest.skipIf(get_env("TESTING_REVISIONS_ENABLED", False), "No sense with revs")
+ def upload_with_rev_revs_disabled_test(self):
+ client = TestClient(default_server_user=True, revisions_enabled=False)
+ client.run("upload pkg/1.0@user/channel#fakerevision --confirm", assert_error=True)
+ self.assertIn(
+ "ERROR: Revisions not enabled in the client, specify a reference without revision",
+ client.out)
+
+ @unittest.skipUnless(get_env("TESTING_REVISIONS_ENABLED", False), "Only revisions")
+ def upload_with_recipe_revision_test(self):
+ ref = ConanFileReference.loads("pkg/1.0@user/channel")
+ client = TurboTestClient(default_server_user=True, revisions_enabled=True)
+ pref = client.create(ref, conanfile=GenConanfile())
+ client.run("upload pkg/1.0@user/channel#fakerevision --confirm", assert_error=True)
+ self.assertIn("ERROR: Recipe revision fakerevision does not match the one stored in the cache {}".
+ format(pref.ref.revision), client.out)
+
+ client.run("upload pkg/1.0@user/channel#{} --confirm".format(pref.ref.revision))
+ search_result = client.search("pkg/1.0@user/channel --revisions -r default")[0]
+ self.assertIn(pref.ref.revision, search_result["revision"])
+
+ @unittest.skipUnless(get_env("TESTING_REVISIONS_ENABLED", False), "Only revisions")
+ def upload_with_package_revision_test(self):
+ ref = ConanFileReference.loads("pkg/1.0@user/channel")
+ client = TurboTestClient(default_server_user=True, revisions_enabled=True)
+ pref = client.create(ref, conanfile=GenConanfile())
+ client.run(
+ "upload pkg/1.0@user/channel#{}:{}#fakeprev --confirm".format(pref.ref.revision, pref.id),
+ assert_error=True)
+ self.assertIn(
+ "ERROR: Binary package pkg/1.0@user/channel:{}#fakeprev not found".format(pref.id),
+ client.out)
+
+ client.run(
+ "upload pkg/1.0@user/channel#{}:{}#{} --confirm".format(pref.ref.revision, pref.id,
+ pref.revision))
+ search_result = client.search("pkg/1.0@user/channel --revisions -r default")[0]
+ self.assertIn(pref.ref.revision, search_result["revision"])
+ search_result = client.search(
+ "pkg/1.0@user/channel#{}:{} --revisions -r default".format(pref.ref.revision, pref.id))[
+ 0]
+ self.assertIn(pref.revision, search_result["revision"])
diff --git a/conans/test/functional/configuration/invalid_settings_test.py b/conans/test/functional/configuration/invalid_settings_test.py
new file mode 100644
index 000000000..8dd30e2f6
--- /dev/null
+++ b/conans/test/functional/configuration/invalid_settings_test.py
@@ -0,0 +1,28 @@
+import os
+import textwrap
+import unittest
+
+from conans.test.utils.tools import TestClient
+
+
+class SettingsLoadTestCase(unittest.TestCase):
+ def test_invalid_settings(self):
+ client = TestClient()
+ client.save({os.path.join(client.cache_folder, 'settings.yml'): """your buggy file"""})
+ client.run("new -b hello/1.0")
+ client.run("install .", assert_error=True)
+ self.assertIn("ERROR: Invalid settings.yml format", client.out)
+
+ def test_invalid_yaml(self):
+ client = TestClient()
+ client.save({os.path.join(client.cache_folder, 'settings.yml'):
+ textwrap.dedent("""
+ Almost:
+ - a
+ - valid
+ yaml
+ """)})
+ client.run("new -b hello/1.0")
+ client.run("install .", assert_error=True)
+ self.assertIn("ERROR: Invalid settings.yml format: while parsing a block mapping",
+ client.out)
diff --git a/conans/test/functional/configuration/profile_test.py b/conans/test/functional/configuration/profile_test.py
index 8bdc5d5ae..6c43ce23d 100644
--- a/conans/test/functional/configuration/profile_test.py
+++ b/conans/test/functional/configuration/profile_test.py
@@ -53,11 +53,11 @@ class ProfileTest(unittest.TestCase):
env=[("A_VAR", "A_VALUE"), ("PREPEND_VAR", ["new_path", "other_path"])],
package_env={"Hello0": [("OTHER_VAR", "2")]})
self.client.run("install . -pr envs -g virtualenv")
- content = load(os.path.join(self.client.current_folder, "activate.sh"))
+ content = self.client.load("activate.sh")
self.assertIn(":".join(["PREPEND_VAR=\"new_path\"", "\"other_path\""]) +
"${PREPEND_VAR+:$PREPEND_VAR}", content)
if platform.system() == "Windows":
- content = load(os.path.join(self.client.current_folder, "activate.bat"))
+ content = self.client.load("activate.bat")
self.assertIn(";".join(["PREPEND_VAR=new_path", "other_path", "%PREPEND_VAR%"]),
content)
@@ -213,13 +213,13 @@ class ProfileTest(unittest.TestCase):
self.client.save(files)
self.client.run("export . lasote/stable")
self.client.run("install . --build missing -pr vs_12_86")
- info = load(os.path.join(self.client.current_folder, "conaninfo.txt"))
+ info = self.client.load("conaninfo.txt")
for setting, value in profile_settings.items():
self.assertIn("%s=%s" % (setting, value), info)
# Try to override some settings in install command
self.client.run("install . --build missing -pr vs_12_86 -s compiler.version=14")
- info = load(os.path.join(self.client.current_folder, "conaninfo.txt"))
+ info = self.client.load("conaninfo.txt")
for setting, value in profile_settings.items():
if setting != "compiler.version":
self.assertIn("%s=%s" % (setting, value), info)
@@ -237,7 +237,7 @@ class ProfileTest(unittest.TestCase):
package_settings=package_settings)
# Try to override some settings in install command
self.client.run("install . --build missing -pr vs_12_86_Hello0_gcc -s compiler.version=14")
- info = load(os.path.join(self.client.current_folder, "conaninfo.txt"))
+ info = self.client.load("conaninfo.txt")
self.assertIn("compiler=gcc", info)
self.assertIn("compiler.libcxx=libstdc++11", info)
@@ -248,7 +248,7 @@ class ProfileTest(unittest.TestCase):
package_settings=package_settings)
# Try to override some settings in install command
self.client.run("install . --build missing -pr vs_12_86_Hello0_gcc -s compiler.version=14")
- info = load(os.path.join(self.client.current_folder, "conaninfo.txt"))
+ info = self.client.load("conaninfo.txt")
self.assertIn("compiler=Visual Studio", info)
self.assertNotIn("compiler.libcxx", info)
@@ -260,7 +260,7 @@ class ProfileTest(unittest.TestCase):
# Try to override some settings in install command
self.client.run("install . --build missing -pr vs_12_86_Hello0_gcc"
" -s compiler.version=14 -s Hello0:compiler.libcxx=libstdc++")
- info = load(os.path.join(self.client.current_folder, "conaninfo.txt"))
+ info = self.client.load("conaninfo.txt")
self.assertIn("compiler=gcc", info)
self.assertNotIn("compiler.libcxx=libstdc++11", info)
self.assertIn("compiler.libcxx=libstdc++", info)
@@ -287,7 +287,7 @@ class ProfileTest(unittest.TestCase):
package_settings=package_settings)
# Try to override some settings in install command
self.client.run("install . lasote/testing -pr myprofile")
- info = load(os.path.join(self.client.current_folder, "conaninfo.txt"))
+ info = self.client.load("conaninfo.txt")
self.assertIn("compiler=gcc", info)
self.assertIn("compiler.libcxx=libstdc++11", info)
self.assertIn("compiler.version=4.8", info)
@@ -298,7 +298,7 @@ class ProfileTest(unittest.TestCase):
package_settings=package_settings)
# Try to override some settings in install command
self.client.run("install . lasote/testing -pr myprofile")
- info = load(os.path.join(self.client.current_folder, "conaninfo.txt"))
+ info = self.client.load("conaninfo.txt")
self.assertIn("compiler=Visual Studio", info)
self.assertIn("compiler.runtime=MD", info)
self.assertIn("compiler.version=12", info)
@@ -314,7 +314,7 @@ class ProfileTest(unittest.TestCase):
self.client.save(files)
self.client.run("install . --build missing -pr vs_12_86")
- info = load(os.path.join(self.client.current_folder, "conaninfo.txt"))
+ info = self.client.load("conaninfo.txt")
self.assertIn("language=1", info)
self.assertIn("static=False", info)
diff --git a/conans/test/functional/environment/apply_environment_test.py b/conans/test/functional/environment/apply_environment_test.py
index 6e8b6420d..2aae83cf5 100644
--- a/conans/test/functional/environment/apply_environment_test.py
+++ b/conans/test/functional/environment/apply_environment_test.py
@@ -283,7 +283,7 @@ virtualrunenv
ext = "bat" if platform.system() == "Windows" else "sh"
self.assertTrue(os.path.exists(os.path.join(client.current_folder, "activate_run.%s" % ext)))
self.assertTrue(os.path.exists(os.path.join(client.current_folder, "deactivate_run.%s" % ext)))
- activate_contents = load(os.path.join(client.current_folder, "activate_run.%s" % ext))
+ activate_contents = client.load("activate_run.%s" % ext)
self.assertIn("PATH", activate_contents)
self.assertIn("LD_LIBRARY_PATH", activate_contents)
@@ -427,8 +427,8 @@ class HelloConan(ConanFile):
ext = "bat" if platform.system() == "Windows" else "sh"
self.assertTrue(os.path.exists(os.path.join(client.current_folder, "activate.%s" % ext)))
self.assertTrue(os.path.exists(os.path.join(client.current_folder, "deactivate.%s" % ext)))
- activate_contents = load(os.path.join(client.current_folder, "activate.%s" % ext))
- deactivate_contents = load(os.path.join(client.current_folder, "deactivate.%s" % ext))
+ activate_contents = client.load("activate.%s" % ext)
+ deactivate_contents = client.load("deactivate.%s" % ext)
self.assertNotIn("bad value", activate_contents)
if platform.system() == "Windows":
self.assertIn("var1=good value", activate_contents)
@@ -665,7 +665,7 @@ virtualenv
"""
client.save({"conanfile.txt": conanfile}, clean_first=True)
client.run("install .")
- info = load(os.path.join(client.current_folder, "conanbuildinfo.txt"))
+ info = client.load("conanbuildinfo.txt")
info = info.replace("\r\n", "\n")
self.assertIn("""
[ENV_libA]
@@ -673,10 +673,10 @@ PATH=["path_from_A"]
[ENV_libB]
PATH=["path_from_B"]""", info)
if platform.system() != "Windows":
- activate = load(os.path.join(client.current_folder, "activate.sh"))
+ activate = client.load("activate.sh")
self.assertIn('PATH="path_from_A":"path_from_B"${PATH+:$PATH}', activate)
else:
- activate = load(os.path.join(client.current_folder, "activate.bat"))
+ activate = client.load("activate.bat")
self.assertIn('PATH=path_from_A;path_from_B;%PATH%', activate)
def check_conaninfo_completion_test(self):
diff --git a/conans/test/functional/generators/cmake_multi_test.py b/conans/test/functional/generators/cmake_multi_test.py
index 96ea70bf9..44960b64a 100644
--- a/conans/test/functional/generators/cmake_multi_test.py
+++ b/conans/test/functional/generators/cmake_multi_test.py
@@ -344,7 +344,7 @@ class CMakeMultiSyntaxTest(unittest.TestCase):
"""
Check conan_basic_setup() interface is the same one for cmake and cmake_multi generators
"""
- conanbuildinfo = load(os.path.join(self.client.current_folder, "conanbuildinfo.cmake"))
+ conanbuildinfo = self.client.load("conanbuildinfo.cmake")
conanbuildinfo_multi = load(os.path.join(self.client.current_folder,
"conanbuildinfo_multi.cmake"))
expected = "set(options TARGETS NO_OUTPUT_DIRS SKIP_RPATH KEEP_RPATHS SKIP_STD SKIP_FPIC)"
diff --git a/conans/test/functional/generators/cmake_paths_test.py b/conans/test/functional/generators/cmake_paths_test.py
index fb4ae6e5d..90eb923c0 100644
--- a/conans/test/functional/generators/cmake_paths_test.py
+++ b/conans/test/functional/generators/cmake_paths_test.py
@@ -5,7 +5,6 @@ import unittest
from conans.model.ref import ConanFileReference, PackageReference
from conans.test.utils.tools import TestClient, NO_SETTINGS_PACKAGE_ID, TurboTestClient, GenConanfile
-from conans.util.files import load
class CMakePathsGeneratorTest(unittest.TestCase):
@@ -19,7 +18,7 @@ class CMakePathsGeneratorTest(unittest.TestCase):
client.run("install {} -g cmake_paths".format(ref2))
pfolder1 = client.cache.package_layout(pref1.ref).package(pref1).replace("\\", "/")
pfolder2 = client.cache.package_layout(pref2.ref).package(pref2).replace("\\", "/")
- contents = load(os.path.join(client.current_folder, "conan_paths.cmake"))
+ contents = client.load("conan_paths.cmake")
expected = 'set(CONAN_LIB2_ROOT "{pfolder2}")\r\n' \
'set(CONAN_LIB1_ROOT "{pfolder1}")\r\n' \
'set(CMAKE_MODULE_PATH "{pfolder2}/"\r\n\t\t\t"{pfolder1}/" ' \
diff --git a/conans/test/functional/generators/cmake_test.py b/conans/test/functional/generators/cmake_test.py
index da1c6b9b2..75a8704c2 100644
--- a/conans/test/functional/generators/cmake_test.py
+++ b/conans/test/functional/generators/cmake_test.py
@@ -5,7 +5,6 @@ import unittest
from nose.plugins.attrib import attr
-from conans import load
from conans.client.tools import replace_in_file
from conans.model.ref import ConanFileReference
from conans.test.utils.tools import TestClient, GenConanfile, TurboTestClient
diff --git a/conans/test/functional/generators/custom_generator_test.py b/conans/test/functional/generators/custom_generator_test.py
index 0293cf73b..b9b0da0f6 100644
--- a/conans/test/functional/generators/custom_generator_test.py
+++ b/conans/test/functional/generators/custom_generator_test.py
@@ -1,4 +1,3 @@
-import os
import textwrap
import unittest
@@ -6,7 +5,6 @@ from conans.model.ref import ConanFileReference
from conans.paths import CONANFILE, CONANFILE_TXT
from conans.test.utils.cpp_test_files import cpp_hello_conan_files
from conans.test.utils.tools import TestClient, TestServer
-from conans.util.files import load
generator = """
from conans.model import Generator
@@ -93,7 +91,7 @@ class CustomGeneratorTest(unittest.TestCase):
files = {CONANFILE_TXT: consumer}
client.save(files, clean_first=True)
client.run("install . --build")
- generated = load(os.path.join(client.current_folder, "customfile.gen"))
+ generated = client.load("customfile.gen")
self.assertEqual(generated, "My custom generator content")
# Test retrieval from remote
@@ -102,7 +100,7 @@ class CustomGeneratorTest(unittest.TestCase):
client.save(files)
client.run("install . --build")
- generated = load(os.path.join(client.current_folder, "customfile.gen"))
+ generated = client.load("customfile.gen")
self.assertEqual(generated, "My custom generator content")
def multifile_test(self):
@@ -121,7 +119,7 @@ class CustomGeneratorTest(unittest.TestCase):
"Property 'filename' not used",
client.out)
for i in (1, 2):
- generated = load(os.path.join(client.current_folder, "file%d.gen" % i))
+ generated = client.load("file%d.gen" % i)
self.assertEqual(generated, "CustomContent%d" % i)
def export_template_generator_test(self):
@@ -146,7 +144,7 @@ class MyCustomGeneratorWithTemplatePackage(ConanFile):
client.run("create . gen/0.1@user/stable")
client.run("install gen/0.1@user/stable -g=MyCustomTemplateGenerator")
- generated = load(os.path.join(client.current_folder, "customfile.gen"))
+ generated = client.load("customfile.gen")
self.assertEqual(generated, "Template: Hello")
def install_folder_test(self):
@@ -169,6 +167,6 @@ class MyCustomGeneratorWithTemplatePackage(ConanFile):
client.save({CONANFILE: templated_generator, "mytemplate.txt": "Template: %s"})
client.run("create . gen/0.1@user/stable")
client.run("install gen/0.1@user/stable -g=MyGenerator")
- generated = load(os.path.join(client.current_folder, "customfile.gen"))
+ generated = client.load("customfile.gen")
self.assertEqual(generated, client.current_folder)
diff --git a/conans/test/functional/generators/generators_test.py b/conans/test/functional/generators/generators_test.py
index e278d073a..649558059 100644
--- a/conans/test/functional/generators/generators_test.py
+++ b/conans/test/functional/generators/generators_test.py
@@ -5,7 +5,6 @@ import unittest
from conans.model.graph_info import GRAPH_INFO_FILE
from conans.test.utils.tools import NO_SETTINGS_PACKAGE_ID, TestClient
-from conans.util.files import load
from conans.model.graph_lock import LOCKFILE
@@ -102,7 +101,7 @@ qmake
client.save({"conanfile.txt": base}, clean_first=True)
client.run("install . --build")
- qmake = load(os.path.join(client.current_folder, "conanbuildinfo.pri"))
+ qmake = client.load("conanbuildinfo.pri")
self.assertIn("CONAN_RESDIRS += ", qmake)
self.assertEqual(qmake.count("CONAN_LIBS += "), 1)
self.assertIn("CONAN_LIBS_PKG_RELEASE += -lhellor", qmake)
@@ -139,7 +138,7 @@ qmake
client.save({"conanfile.txt": base}, clean_first=True)
client.run("install . --build")
- qmake = load(os.path.join(client.current_folder, "conanbuildinfo.pri"))
+ qmake = client.load("conanbuildinfo.pri")
self.assertIn("CONAN_RESDIRS += ", qmake)
self.assertEqual(qmake.count("CONAN_LIBS += "), 1)
self.assertIn("CONAN_LIBS_PKG_NAME_WORLD_RELEASE += -lhellor", qmake)
diff --git a/conans/test/functional/generators/json_test.py b/conans/test/functional/generators/json_test.py
index 5aebd68b5..0c4c7d357 100644
--- a/conans/test/functional/generators/json_test.py
+++ b/conans/test/functional/generators/json_test.py
@@ -3,7 +3,7 @@ import os
import textwrap
import unittest
-from conans.test.utils.tools import TestClient, load
+from conans.test.utils.tools import TestClient
class JsonTest(unittest.TestCase):
@@ -24,7 +24,7 @@ class HelloConan(ConanFile):
"header.h": ""})
client.run("create . Hello/0.1@lasote/testing")
client.run("install Hello/0.1@lasote/testing -g json")
- conan_json = load(os.path.join(client.current_folder, "conanbuildinfo.json"))
+ conan_json = client.load("conanbuildinfo.json")
data = json.loads(conan_json)
self.assertEqual(data["deps_env_info"]["MY_ENV_VAR"], "foo")
@@ -55,7 +55,7 @@ class HelloConan(ConanFile):
client.run("create . Hello/0.1@lasote/testing " + settings)
client.run("install Hello/0.1@lasote/testing -g json " + settings)
- conan_json = load(os.path.join(client.current_folder, "conanbuildinfo.json"))
+ conan_json = client.load("conanbuildinfo.json")
data = json.loads(conan_json)
settings_data = data["settings"]
@@ -135,7 +135,7 @@ class HelloConan(ConanFile):
client.run("create . Hello/0.1@lasote/testing")
client.run("install Hello/0.1@lasote/testing -g json")
- my_json = load(os.path.join(client.current_folder, "conanbuildinfo.json"))
+ my_json = client.load("conanbuildinfo.json")
my_json = json.loads(my_json)
self.assertListEqual(my_json["dependencies"][0]["libs"], ["LIB1"])
self.assertListEqual(my_json["dependencies"][0]["system_libs"], ["SYSTEM_LIB1"])
diff --git a/conans/test/functional/generators/make_test.py b/conans/test/functional/generators/make_test.py
index c04103204..afeb8af82 100644
--- a/conans/test/functional/generators/make_test.py
+++ b/conans/test/functional/generators/make_test.py
@@ -4,7 +4,7 @@ import unittest
from nose.plugins.attrib import attr
-from conans.client.tools import chdir, replace_in_file
+from conans.client.tools import replace_in_file
from conans.test.utils.tools import TestClient
diff --git a/conans/test/functional/generators/premake_test.py b/conans/test/functional/generators/premake_test.py
index 53576b937..7f1fbd447 100644
--- a/conans/test/functional/generators/premake_test.py
+++ b/conans/test/functional/generators/premake_test.py
@@ -44,7 +44,7 @@ class PremakeGeneratorTest(unittest.TestCase):
def test_generate_basic_setup_release(self):
self.client.run("install . -s build_type=Release -s arch=x86_64 --build missing")
self.client.run_command("premake5 vs2017")
- sln_content = load(os.path.join(self.client.current_folder, "example.sln"))
+ sln_content = self.client.load("example.sln")
self.assertIn("Release|x64", sln_content)
self.assertNotIn("Debug|Win32", sln_content)
self.assertNotIn("Debug|x64", sln_content)
@@ -52,7 +52,7 @@ class PremakeGeneratorTest(unittest.TestCase):
def test_generate_basic_setup_debug_32bit(self):
self.client.run("install . -s build_type=Debug -s arch=x86 --build missing")
self.client.run_command("premake5 vs2017")
- sln_content = load(os.path.join(self.client.current_folder, "example.sln"))
+ sln_content = self.client.load("example.sln")
self.assertIn("Debug|Win32", sln_content)
self.assertNotIn("Release|Win32", sln_content)
self.assertNotIn("Release|x64", sln_content)
diff --git a/conans/test/functional/generators/virtualbuildenv_test.py b/conans/test/functional/generators/virtualbuildenv_test.py
index 30bad8d8d..489844b6d 100644
--- a/conans/test/functional/generators/virtualbuildenv_test.py
+++ b/conans/test/functional/generators/virtualbuildenv_test.py
@@ -21,7 +21,7 @@ class TestConan(ConanFile):
client.save({"conanfile.py": conanfile})
client.run('install . -g virtualbuildenv -s os=Windows -s compiler="Visual Studio"'
' -s compiler.runtime=MD -s compiler.version=15')
- bat = load(os.path.join(client.current_folder, "activate_build.bat"))
+ bat = client.load("activate_build.bat")
self.assertIn("SET UseEnv=True", bat)
self.assertIn('SET CL=-MD -DNDEBUG -O2 -Ob2 %CL%', bat)
diff --git a/conans/test/functional/generators/visual_studio_test.py b/conans/test/functional/generators/visual_studio_test.py
index 1162e87df..e1a24808d 100644
--- a/conans/test/functional/generators/visual_studio_test.py
+++ b/conans/test/functional/generators/visual_studio_test.py
@@ -7,7 +7,6 @@ import unittest
from nose.plugins.attrib import attr
-from conans import load
from conans.test.utils.tools import TestClient
from conans.test.utils.visual_project_files import get_vs_project_files
diff --git a/conans/test/functional/graph/half_diamond_test.py b/conans/test/functional/graph/half_diamond_test.py
index 514c450fa..b2ccc47bb 100644
--- a/conans/test/functional/graph/half_diamond_test.py
+++ b/conans/test/functional/graph/half_diamond_test.py
@@ -49,5 +49,5 @@ class HalfDiamondTest(unittest.TestCase):
self.client.run("install . --build missing")
self.assertIn("conanfile.py (Hello2/0.1): Generated conaninfo.txt",
self.client.out)
- conaninfo = load(os.path.join(self.client.current_folder, "conaninfo.txt"))
+ conaninfo = self.client.load("conaninfo.txt")
self.assertEqual(1, conaninfo.count("Hello0/0.1@lasote/stable"))
diff --git a/conans/test/functional/graph/private_deps_test.py b/conans/test/functional/graph/private_deps_test.py
index 0ee32a437..716984706 100644
--- a/conans/test/functional/graph/private_deps_test.py
+++ b/conans/test/functional/graph/private_deps_test.py
@@ -32,14 +32,14 @@ class Pkg(ConanFile):
client.save({"conanfile.py": conanfile.replace("pass", "requires='PkgA/0.1@user/channel'")})
client.run("install . -g=cmake")
self.assertIn("PkgC/0.1@user/channel:%s - Cache" % NO_SETTINGS_PACKAGE_ID, client.out)
- conanbuildinfo = load(os.path.join(client.current_folder, "conanbuildinfo.txt"))
+ conanbuildinfo = client.load("conanbuildinfo.txt")
self.assertIn("[libs];PkgA;PkgC", ";".join(conanbuildinfo.splitlines()))
self.assertIn("PkgC/0.1/user/channel/package", conanbuildinfo)
self.assertIn("[includedirs_PkgC]", conanbuildinfo)
conanbuildinfo = client.load("conanbuildinfo.cmake")
self.assertIn("set(CONAN_LIBS PkgA PkgC ${CONAN_LIBS})", conanbuildinfo)
client.run("info . --graph=file.html")
- html = load(os.path.join(client.current_folder, "file.html"))
+ html = client.load("file.html")
self.assertEqual(1, html.count("label: 'PkgC/0.1', shape: 'box'"))
def test_private_regression_skip(self):
diff --git a/conans/test/functional/graph/version_range_error_test.py b/conans/test/functional/graph/version_range_error_test.py
index fc5580347..2d69c8b39 100644
--- a/conans/test/functional/graph/version_range_error_test.py
+++ b/conans/test/functional/graph/version_range_error_test.py
@@ -1,6 +1,5 @@
import unittest
-from conans.model.ref import ConanFileReference
from conans.paths import CONANFILE
from conans.test.utils.tools import TestClient, GenConanfile
diff --git a/conans/test/functional/graph/version_ranges_diamond_test.py b/conans/test/functional/graph/version_ranges_diamond_test.py
index b37ccbd2a..e691938b2 100644
--- a/conans/test/functional/graph/version_ranges_diamond_test.py
+++ b/conans/test/functional/graph/version_ranges_diamond_test.py
@@ -1,4 +1,3 @@
-import os
import textwrap
import unittest
from collections import OrderedDict
diff --git a/conans/test/functional/graph_lock/graph_lock_test.py b/conans/test/functional/graph_lock/graph_lock_test.py
index f28d7ce88..653997755 100644
--- a/conans/test/functional/graph_lock/graph_lock_test.py
+++ b/conans/test/functional/graph_lock/graph_lock_test.py
@@ -65,7 +65,7 @@ class GraphLockCustomFilesTest(unittest.TestCase):
def _check_lock(self, ref_b, rev_b=""):
ref_b = repr(ConanFileReference.loads(ref_b))
- lock_file = load(os.path.join(self.client.current_folder, "custom.lock"))
+ lock_file = self.client.load("custom.lock")
lock_file_json = json.loads(lock_file)
self.assertEqual(lock_file_json["version"], LOCKFILE_VERSION)
self.assertEqual(2, len(lock_file_json["graph_lock"]["nodes"]))
diff --git a/conans/test/functional/old/conanfile_extend_test.py b/conans/test/functional/old/conanfile_extend_test.py
index 18531b43b..b2c039413 100644
--- a/conans/test/functional/old/conanfile_extend_test.py
+++ b/conans/test/functional/old/conanfile_extend_test.py
@@ -1,8 +1,6 @@
-import os
import unittest
from conans.test.utils.tools import TestClient
-from conans.util.files import load
class ConanfileExtendTest(unittest.TestCase):
diff --git a/conans/test/functional/old/path_exists_test.py b/conans/test/functional/old/path_exists_test.py
index a13560c23..e31c70e7d 100644
--- a/conans/test/functional/old/path_exists_test.py
+++ b/conans/test/functional/old/path_exists_test.py
@@ -26,7 +26,7 @@ class PathExistsTest(unittest.TestCase):
self.client = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")]})
self.client.save({"conanfile.txt": "[requires]\nHello0/0.1@lasote/stable\n[generators]\ntxt"})
self.client.run("install . --build missing ")
- build_info = load(os.path.join(self.client.current_folder, "conanbuildinfo.txt"))
+ build_info = self.client.load("conanbuildinfo.txt")
self.assertIn("helloHello0", build_info)
self.client = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")]})
diff --git a/conans/test/functional/old/sysroot_test.py b/conans/test/functional/old/sysroot_test.py
index 01b5d16ff..c93402502 100644
--- a/conans/test/functional/old/sysroot_test.py
+++ b/conans/test/functional/old/sysroot_test.py
@@ -40,6 +40,6 @@ class Pkg(ConanFile):
# Install conanfile and check conaninfo.txt
client.run("install .")
- bili = load(os.path.join(client.current_folder, "conanbuildinfo.txt"))
+ bili = client.load("conanbuildinfo.txt")
self.assertIn(os.linesep.join(["[sysroot_sysroot]", "HelloSysRoot"]), bili)
self.assertIn(os.linesep.join(["[sysroot]", "HelloSysRoot"]), bili)
diff --git a/conans/test/functional/old/user_info_test.py b/conans/test/functional/old/user_info_test.py
index c84c2f201..5e65d9ee6 100644
--- a/conans/test/functional/old/user_info_test.py
+++ b/conans/test/functional/old/user_info_test.py
@@ -58,7 +58,7 @@ class MyConanfile(ConanFile):
client.run('install reuse/0.1@lasote/stable --build -g txt')
# Assert generator TXT
- txt_contents = load(os.path.join(client.current_folder, "conanbuildinfo.txt"))
+ txt_contents = client.load("conanbuildinfo.txt")
self.assertIn("[USER_LIB_A]%sVAR1=2" % os.linesep, txt_contents)
self.assertIn("[USER_LIB_B]%sVAR1=2%sVAR2=3" % (os.linesep, os.linesep), txt_contents)
self.assertIn("[USER_LIB_C]%sVAR1=2" % os.linesep, txt_contents)
diff --git a/conans/test/functional/package_id/compatible_test.py b/conans/test/functional/package_id/compatible_test.py
index be42a12ea..652360501 100644
--- a/conans/test/functional/package_id/compatible_test.py
+++ b/conans/test/functional/package_id/compatible_test.py
@@ -4,7 +4,6 @@ import unittest
from conans.model.ref import ConanFileReference
from conans.test.utils.tools import TestClient, GenConanfile
-from conans.util.env_reader import get_env
class CompatibleIDsTest(unittest.TestCase):
diff --git a/conans/test/functional/python_requires/python_package_reuse_test.py b/conans/test/functional/python_requires/python_package_reuse_test.py
index d19350e1f..73bd372af 100644
--- a/conans/test/functional/python_requires/python_package_reuse_test.py
+++ b/conans/test/functional/python_requires/python_package_reuse_test.py
@@ -5,7 +5,7 @@ from conans.model.info import ConanInfo
from conans.paths import BUILD_INFO, CONANFILE
from conans.test.utils.test_files import temp_folder
from conans.test.utils.tools import TestClient, TestServer
-from conans.util.files import load, save
+from conans.util.files import save
conanfile = """from conans import ConanFile
diff --git a/conans/test/functional/python_requires/python_requires_test.py b/conans/test/functional/python_requires/python_requires_test.py
index 5fb3e879f..c3f774b4c 100644
--- a/conans/test/functional/python_requires/python_requires_test.py
+++ b/conans/test/functional/python_requires/python_requires_test.py
@@ -9,7 +9,6 @@ from conans.model.ref import ConanFileReference
from conans.paths import CONANFILE
from conans.test.utils.tools import TestClient, TestServer, \
NO_SETTINGS_PACKAGE_ID, create_local_git_repo, GenConanfile
-from conans.util.files import load
class PythonExtendTest(unittest.TestCase):
@@ -250,7 +249,7 @@ class MyConanfileBase(source.SourceBuild, package.PackageInfo):
""")
client.save({"conanfile.py": conanfile})
client.run("install . pkg/0.1@user/channel")
- lockfile = load(os.path.join(client.current_folder, "conan.lock"))
+ lockfile = client.load("conan.lock")
self.assertIn("base/1.0@user/channel#e41727b922c6ae54b216a58442893f3a", lockfile)
self.assertIn("helper/1.0@user/channel#98457e1f8d9174ed053747634ce0ea1a", lockfile)
client.run("source .")
diff --git a/conans/test/integration/build_id_test.py b/conans/test/integration/build_id_test.py
index a0ba2e36d..a70e4e4b5 100644
--- a/conans/test/integration/build_id_test.py
+++ b/conans/test/integration/build_id_test.py
@@ -118,13 +118,13 @@ class BuildIdTest(unittest.TestCase):
client.out)
self.assertIn("Building my code!", client.out)
self.assertIn("Packaging Debug!", client.out)
- content = load(os.path.join(client.current_folder, "file1.txt"))
+ content = client.load("file1.txt")
self.assertEqual("Debug file1", content)
# Package Windows Release, it will reuse the previous build
client.run('install . -s os=Windows -s build_type=Release')
self.assertNotIn("Building my code!", client.out)
self.assertIn("Packaging Release!", client.out)
- content = load(os.path.join(client.current_folder, "file1.txt"))
+ content = client.load("file1.txt")
self.assertEqual("Release file1", content)
# Now Linux Debug
@@ -133,13 +133,13 @@ class BuildIdTest(unittest.TestCase):
client.out)
self.assertIn("Building my code!", client.out)
self.assertIn("Packaging Debug!", client.out)
- content = load(os.path.join(client.current_folder, "file1.txt"))
+ content = client.load("file1.txt")
self.assertEqual("Debug file1", content)
# Linux Release must build again, as it is not affected by build_id()
client.run('install . -s os=Linux -s build_type=Release')
self.assertIn("Building my code!", client.out)
self.assertIn("Packaging Release!", client.out)
- content = load(os.path.join(client.current_folder, "file1.txt"))
+ content = client.load("file1.txt")
self.assertEqual("Release file1", content)
self._check_conaninfo(client)
@@ -149,13 +149,13 @@ class BuildIdTest(unittest.TestCase):
client.run('install . -s os=Windows -s build_type=Debug')
self.assertNotIn("Building my code!", client.out)
self.assertIn("Packaging Debug!", client.out)
- content = load(os.path.join(client.current_folder, "file1.txt"))
+ content = client.load("file1.txt")
self.assertEqual("Debug file1", content)
# Windows Release
client.run('install . -s os=Windows -s build_type=Release')
self.assertNotIn("Building my code!", client.out)
self.assertIn("Packaging Release!", client.out)
- content = load(os.path.join(client.current_folder, "file1.txt"))
+ content = client.load("file1.txt")
self.assertEqual("Release file1", content)
# Now Linux
client.run('install . -s os=Linux -s build_type=Debug')
@@ -163,12 +163,12 @@ class BuildIdTest(unittest.TestCase):
client.out)
self.assertIn("Building my code!", client.out)
self.assertIn("Packaging Debug!", client.out)
- content = load(os.path.join(client.current_folder, "file1.txt"))
+ content = client.load("file1.txt")
self.assertEqual("Debug file1", content)
client.run('install . -s os=Linux -s build_type=Release')
self.assertIn("Building my code!", client.out)
self.assertIn("Packaging Release!", client.out)
- content = load(os.path.join(client.current_folder, "file1.txt"))
+ content = client.load("file1.txt")
self.assertEqual("Release file1", content)
self._check_conaninfo(client)
@@ -177,23 +177,23 @@ class BuildIdTest(unittest.TestCase):
client.run('install . -s os=Windows -s build_type=Debug')
self.assertNotIn("Building my code!", client.out)
self.assertNotIn("Packaging Debug!", client.out)
- content = load(os.path.join(client.current_folder, "file1.txt"))
+ content = client.load("file1.txt")
self.assertEqual("Debug file1", content)
client.run('install . -s os=Windows -s build_type=Release')
self.assertNotIn("Building my code!", client.out)
self.assertNotIn("Packaging Release!", client.out)
- content = load(os.path.join(client.current_folder, "file1.txt"))
+ content = client.load("file1.txt")
self.assertEqual("Release file1", content)
# Now Linux
client.run('install . -s os=Linux -s build_type=Debug')
self.assertNotIn("Building my code!", client.out)
self.assertNotIn("Packaging Debug!", client.out)
- content = load(os.path.join(client.current_folder, "file1.txt"))
+ content = client.load("file1.txt")
self.assertEqual("Debug file1", content)
client.run('install . -s os=Linux -s build_type=Release')
self.assertNotIn("Building my code!", client.out)
self.assertNotIn("Packaging Release!", client.out)
- content = load(os.path.join(client.current_folder, "file1.txt"))
+ content = client.load("file1.txt")
self.assertEqual("Release file1", content)
self._check_conaninfo(client)
diff --git a/conans/test/integration/order_libs_test.py b/conans/test/integration/order_libs_test.py
index 425f45279..bb4c069b2 100644
--- a/conans/test/integration/order_libs_test.py
+++ b/conans/test/integration/order_libs_test.py
@@ -110,9 +110,9 @@ class HelloReuseConan(ConanFile):
expected_libs = ['SDL2_ttf', 'freeType', 'SDL2', 'rt', 'pthread', 'dl',
'BZip2', 'LibPNG', 'm', 'ZLib']
- conanbuildinfo = load(os.path.join(self.client.current_folder, "conanbuildinfo.txt"))
+ conanbuildinfo = self.client.load("conanbuildinfo.txt")
libs = os.linesep.join(expected_libs)
self.assertIn(libs, conanbuildinfo)
- conanbuildinfo = load(os.path.join(self.client.current_folder, "conanbuildinfo.cmake"))
+ conanbuildinfo = self.client.load("conanbuildinfo.cmake")
libs = " ".join(expected_libs)
self.assertIn(libs, conanbuildinfo)
diff --git a/conans/test/unittests/client/cmd/export_test.py b/conans/test/unittests/client/cmd/export_test.py
index a62322c25..aa2f3acc5 100644
--- a/conans/test/unittests/client/cmd/export_test.py
+++ b/conans/test/unittests/client/cmd/export_test.py
@@ -5,7 +5,6 @@ from collections import namedtuple
from conans.client.cmd.export import _replace_scm_data_in_conanfile
from conans.client.loader import _parse_conanfile
-from conans.client.tools import chdir
from conans.model.ref import ConanFileReference
from conans.model.scm import SCMData
from conans.test.utils.test_files import temp_folder
diff --git a/conans/test/unittests/client/generators/cmake_paths_test.py b/conans/test/unittests/client/generators/cmake_paths_test.py
index ea09657e3..f6afdfc54 100644
--- a/conans/test/unittests/client/generators/cmake_paths_test.py
+++ b/conans/test/unittests/client/generators/cmake_paths_test.py
@@ -1,6 +1,5 @@
import os
import unittest
-from collections import namedtuple
from conans.client.generators.cmake_paths import CMakePathsGenerator
from conans.model.build_info import CppInfo
diff --git a/conans/test/unittests/client/generators/cmake_test.py b/conans/test/unittests/client/generators/cmake_test.py
index 6acbb10dc..3acbd6e9a 100644
--- a/conans/test/unittests/client/generators/cmake_test.py
+++ b/conans/test/unittests/client/generators/cmake_test.py
@@ -1,11 +1,8 @@
import os
-import platform
import re
-import textwrap
import unittest
import six
-from parameterized import parameterized
from conans.client.build.cmake_flags import CMakeDefinitionsBuilder
from conans.client.conf import default_settings_yml
@@ -20,9 +17,8 @@ from conans.model.ref import ConanFileReference
from conans.model.settings import Settings
from conans.test.utils.test_files import temp_folder
from conans.test.utils.tools import TestBufferConanOutput
-from conans.test.utils.tools import TestClient
from conans.util.files import save
-from conans.test.utils.tools import TestClient, GenConanfile
+
class _MockSettings(object):
build_type = None
diff --git a/conans/test/unittests/tools/files_patch_test.py b/conans/test/unittests/tools/files_patch_test.py
index 4508f64c0..4dd65bf16 100644
--- a/conans/test/unittests/tools/files_patch_test.py
+++ b/conans/test/unittests/tools/files_patch_test.py
@@ -7,8 +7,7 @@ from parameterized.parameterized import parameterized
from conans.client.graph.python_requires import ConanPythonRequire
from conans.client.loader import ConanFileLoader
from conans.test.utils.test_files import temp_folder
-from conans.test.utils.tools import TestClient, TestBufferConanOutput,\
- test_profile
+from conans.test.utils.tools import TestClient, TestBufferConanOutput, test_profile
from conans.util.files import save, load
base_conanfile = '''
@@ -82,8 +81,7 @@ class ToolsFilesPatchTest(unittest.TestCase):
client.save({"conanfile.py": conanfile,
"example.patch": patch})
client.run("source .")
- self.assertEqual(load(os.path.join(client.current_folder, "newfile")),
- "New file!")
+ self.assertEqual(client.load("newfile"), "New file!")
def test_patch_strip_delete(self):
conanfile = dedent("""
@@ -249,10 +247,10 @@ class ToolsFilesPatchTest(unittest.TestCase):
"bar.txt": bar})
client.run("install .")
client.run("build .")
- bar_content = load(os.path.join(client.current_folder, "bar.txt"))
+ bar_content = client.load("bar.txt")
self.assertIn(dedent("""Yo no creo en brujas, pero que las hay, las hay
"""), bar_content)
- foo_content = load(os.path.join(client.current_folder, "foo.txt"))
+ foo_content = client.load("foo.txt")
self.assertIn(dedent("""For us, there is no spring.
Just the wind that smells fresh before the storm."""), foo_content)
self.assertIn("Running build()", client.out)
@@ -314,5 +312,5 @@ Z""")
"Jamroot": source})
client.run("install .")
client.run("build .")
- content = load(os.path.join(client.current_folder, "Jamroot"))
+ content = client.load("Jamroot")
self.assertIn(expected, content)
diff --git a/conans/test/unittests/util/conanfile_tools_test.py b/conans/test/unittests/util/conanfile_tools_test.py
index e58cacf78..d6bc33db6 100644
--- a/conans/test/unittests/util/conanfile_tools_test.py
+++ b/conans/test/unittests/util/conanfile_tools_test.py
@@ -36,8 +36,7 @@ class Pkg(ConanFile):
client.save({"conanfile.py": conanfile,
"myfile.txt": "World"})
client.run("source .")
- self.assertEqual("WorldHello",
- load(os.path.join(client.current_folder, "myfile.txt")))
+ self.assertEqual("WorldHello", client.load("myfile.txt"))
def test_untar(self):
tmp_dir = temp_folder()
diff --git a/conans/test/utils/tools.py b/conans/test/utils/tools.py
index aa9e3f82e..5c9369142 100644
--- a/conans/test/utils/tools.py
+++ b/conans/test/utils/tools.py
@@ -10,7 +10,6 @@ import sys
import tempfile
import threading
import unittest
-import textwrap
import uuid
from collections import Counter, OrderedDict
from contextlib import contextmanager
@@ -1236,7 +1235,8 @@ class TurboTestClient(TestClient):
def create(self, ref, conanfile=GenConanfile(), args=None, assert_error=False):
if conanfile:
self.save({"conanfile.py": conanfile})
- self.run("create . {} {} --json {}".format(ref.full_str(),
+ full_str = "{}@".format(ref.full_str()) if not ref.user else ref.full_str()
+ self.run("create . {} {} --json {}".format(full_str,
args or "", self.tmp_json_name),
assert_error=assert_error)
rrev = self.cache.package_layout(ref).recipe_revision()
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 3
},
"num_modified_files": 8
} | 1.20 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"nose",
"nose-cov",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.6",
"reqs_path": [
"conans/requirements.txt",
"conans/requirements_server.txt",
"conans/requirements_dev.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | astroid==2.11.7
attrs==22.2.0
beautifulsoup4==4.12.3
bottle==0.12.25
certifi==2021.5.30
charset-normalizer==2.0.12
codecov==2.1.13
colorama==0.4.5
-e git+https://github.com/conan-io/conan.git@d42ec055d459489c184b160cafbd3f200ceb6d41#egg=conan
cov-core==1.15.0
coverage==4.2
deprecation==2.0.7
dill==0.3.4
distro==1.1.0
fasteners==0.19
future==0.18.3
idna==3.10
importlib-metadata==4.8.3
importlib-resources==5.4.0
iniconfig==1.1.1
isort==5.10.1
Jinja2==2.11.3
lazy-object-proxy==1.7.1
MarkupSafe==2.0.1
mccabe==0.7.0
mock==1.3.0
node-semver==0.6.1
nose==1.3.7
nose-cov==1.6
packaging==21.3
parameterized==0.8.1
patch-ng==1.17.2
pbr==6.1.1
platformdirs==2.4.0
pluggy==1.0.0
pluginbase==0.7
py==1.11.0
Pygments==2.14.0
PyJWT==1.7.1
pylint==2.13.9
pyparsing==3.1.4
pytest==7.0.1
python-dateutil==2.9.0.post0
PyYAML==5.4.1
requests==2.27.1
six==1.12.0
soupsieve==2.3.2.post1
tomli==1.2.3
tqdm==4.64.1
typed-ast==1.5.5
typing_extensions==4.1.1
urllib3==1.26.20
waitress==2.0.0
WebOb==1.8.9
WebTest==2.0.35
wrapt==1.16.0
zipp==3.6.0
| name: conan
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- astroid==2.11.7
- attrs==22.2.0
- beautifulsoup4==4.12.3
- bottle==0.12.25
- charset-normalizer==2.0.12
- codecov==2.1.13
- colorama==0.4.5
- cov-core==1.15.0
- coverage==4.2
- deprecation==2.0.7
- dill==0.3.4
- distro==1.1.0
- fasteners==0.19
- future==0.18.3
- idna==3.10
- importlib-metadata==4.8.3
- importlib-resources==5.4.0
- iniconfig==1.1.1
- isort==5.10.1
- jinja2==2.11.3
- lazy-object-proxy==1.7.1
- markupsafe==2.0.1
- mccabe==0.7.0
- mock==1.3.0
- node-semver==0.6.1
- nose==1.3.7
- nose-cov==1.6
- packaging==21.3
- parameterized==0.8.1
- patch-ng==1.17.2
- pbr==6.1.1
- platformdirs==2.4.0
- pluggy==1.0.0
- pluginbase==0.7
- py==1.11.0
- pygments==2.14.0
- pyjwt==1.7.1
- pylint==2.13.9
- pyparsing==3.1.4
- pytest==7.0.1
- python-dateutil==2.9.0.post0
- pyyaml==5.4.1
- requests==2.27.1
- six==1.12.0
- soupsieve==2.3.2.post1
- tomli==1.2.3
- tqdm==4.64.1
- typed-ast==1.5.5
- typing-extensions==4.1.1
- urllib3==1.26.20
- waitress==2.0.0
- webob==1.8.9
- webtest==2.0.35
- wrapt==1.16.0
- zipp==3.6.0
prefix: /opt/conda/envs/conan
| [
"conans/test/functional/configuration/invalid_settings_test.py::SettingsLoadTestCase::test_invalid_settings",
"conans/test/functional/configuration/invalid_settings_test.py::SettingsLoadTestCase::test_invalid_yaml"
] | [
"conans/test/functional/command/alias_test.py::ConanAliasTest::test_basic_test",
"conans/test/functional/command/create_test.py::CreateTest::test_build_folder_handling_test",
"conans/test/functional/command/create_test.py::CreateTest::test_error_create_name_version",
"conans/test/functional/command/info_test.py::InfoTest::test_common_attributes",
"conans/test/functional/command/info_test.py::InfoTest::test_cwd",
"conans/test/functional/command/info_test.py::InfoTest::test_full_attributes",
"conans/test/functional/command/info_test.py::InfoTest::test_json_info_outputs",
"conans/test/functional/command/install_test.py::InstallTest::test_four_subfolder_install",
"conans/test/functional/command/json_output_test.py::JsonOutputTest::test_errors",
"conans/test/functional/command/json_output_test.py::JsonOutputTest::test_json_create_multiconfig",
"conans/test/functional/command/json_output_test.py::JsonOutputTest::test_json_generation",
"conans/test/functional/command/json_output_test.py::JsonOutputTest::test_simple_fields",
"conans/test/functional/command/search_test.py::SearchTest::test_search_with_none_user_channel",
"conans/test/functional/command/source_test.py::SourceTest::test_conanfile_removed",
"conans/test/functional/command/upload_test.py::UploadTest::test_concurrent_upload",
"conans/test/functional/command/upload_test.py::UploadTest::test_no_remote_recipe_manifest",
"conans/test/functional/command/upload_test.py::UploadTest::test_upload_force",
"conans/test/functional/configuration/profile_test.py::ProfileTest::test_package_test",
"conans/test/functional/environment/apply_environment_test.py::ConanEnvTest::test_complex_deps_propagation",
"conans/test/functional/environment/apply_environment_test.py::ConanEnvTest::test_complex_deps_propagation_append",
"conans/test/functional/environment/apply_environment_test.py::ConanEnvTest::test_complex_deps_propagation_override",
"conans/test/functional/environment/apply_environment_test.py::ConanEnvTest::test_conan_info_cache_and_priority",
"conans/test/functional/environment/apply_environment_test.py::ConanEnvTest::test_conaninfo_filtered",
"conans/test/functional/environment/apply_environment_test.py::ConanEnvTest::test_override_simple",
"conans/test/functional/environment/apply_environment_test.py::ConanEnvTest::test_override_simple2",
"conans/test/functional/environment/apply_environment_test.py::ConanEnvTest::test_package_env_working",
"conans/test/functional/environment/apply_environment_test.py::ConanEnvTest::test_run_env",
"conans/test/functional/generators/cmake_test.py::CMakeGeneratorTest::test_no_check_compiler",
"conans/test/functional/generators/generators_test.py::GeneratorsTest::test_base",
"conans/test/functional/generators/generators_test.py::GeneratorsTest::test_error",
"conans/test/functional/generators/generators_test.py::GeneratorsTest::test_qmake",
"conans/test/functional/generators/generators_test.py::GeneratorsTest::test_qmake_hyphen_dot",
"conans/test/functional/generators/generators_test.py::GeneratorsTest::test_srcdirs",
"conans/test/functional/generators/json_test.py::JsonTest::test_multiconfig",
"conans/test/functional/graph/private_deps_test.py::PrivateBinariesTest::test_multiple_private_skip",
"conans/test/functional/graph/private_deps_test.py::PrivateBinariesTest::test_own_private_skip",
"conans/test/functional/graph/private_deps_test.py::PrivateBinariesTest::test_private_dont_skip",
"conans/test/functional/graph/private_deps_test.py::PrivateBinariesTest::test_private_regression_skip",
"conans/test/functional/graph/private_deps_test.py::PrivateBinariesTest::test_private_skip",
"conans/test/functional/graph/private_deps_test.py::PrivateBinariesTest::test_transitive_private",
"conans/test/functional/graph_lock/graph_lock_test.py::GraphLockCustomFilesTest::test",
"conans/test/functional/graph_lock/graph_lock_test.py::GraphLockWarningsTestCase::test_override",
"conans/test/functional/graph_lock/graph_lock_test.py::GraphLockBuildRequireErrorTestCase::test",
"conans/test/functional/graph_lock/graph_lock_test.py::GraphLockModifyConanfileTestCase::test",
"conans/test/functional/old/conanfile_extend_test.py::ConanfileExtendTest::test_base",
"conans/test/functional/old/conanfile_extend_test.py::ConanfileExtendTest::test_txt",
"conans/test/functional/old/path_exists_test.py::PathExistsTest::test_conanfile_not_found",
"conans/test/functional/old/sysroot_test.py::SysrootTest::test",
"conans/test/functional/old/user_info_test.py::UserInfoTest::test_user_info_propagation",
"conans/test/functional/python_requires/python_requires_test.py::PythonExtendTest::test_with_python_requires",
"conans/test/functional/python_requires/python_requires_test.py::PythonRequiresNestedTest::test_python_requires_with_alias_0",
"conans/test/functional/python_requires/python_requires_test.py::PythonRequiresNestedTest::test_python_requires_with_alias_1",
"conans/test/functional/python_requires/python_requires_test.py::PythonRequiresNestedTest::test_python_requires_with_alias_2",
"conans/test/unittests/tools/files_patch_test.py::ToolsFilesPatchTest::test_add_new_file",
"conans/test/unittests/tools/files_patch_test.py::ToolsFilesPatchTest::test_error_patch",
"conans/test/unittests/tools/files_patch_test.py::ToolsFilesPatchTest::test_fuzzy_patch",
"conans/test/unittests/tools/files_patch_test.py::ToolsFilesPatchTest::test_patch_new_delete",
"conans/test/unittests/tools/files_patch_test.py::ToolsFilesPatchTest::test_patch_new_strip",
"conans/test/unittests/tools/files_patch_test.py::ToolsFilesPatchTest::test_patch_strip_delete",
"conans/test/unittests/tools/files_patch_test.py::ToolsFilesPatchTest::test_patch_strip_delete_no_folder",
"conans/test/unittests/tools/files_patch_test.py::ToolsFilesPatchTest::test_patch_strip_new"
] | [
"conans/test/functional/command/alias_test.py::ConanAliasTest::test_alias_different_name",
"conans/test/functional/command/alias_test.py::ConanAliasTest::test_not_override_package",
"conans/test/functional/command/download_test.py::DownloadTest::test_download_pattern",
"conans/test/functional/command/download_test.py::DownloadTest::test_download_with_full_reference_and_p",
"conans/test/functional/command/download_test.py::DownloadTest::test_download_with_package_and_recipe_args",
"conans/test/functional/command/download_test.py::DownloadTest::test_download_wrong_id",
"conans/test/functional/command/inspect_test.py::ConanInspectTest::test_default_options_list",
"conans/test/functional/command/inspect_test.py::ConanInspectTest::test_inspect_filled_attributes",
"conans/test/functional/command/inspect_test.py::ConanInspectTest::test_mixed_options_instances",
"conans/test/functional/command/inspect_test.py::InspectRawTest::test_default_options_dict",
"conans/test/functional/command/inspect_test.py::InspectRawTest::test_default_options_list",
"conans/test/functional/command/inspect_test.py::InspectRawTest::test_incompatible_commands",
"conans/test/functional/command/inspect_test.py::InspectRawTest::test_invalid_field",
"conans/test/functional/command/inspect_test.py::InspectRawTest::test_no_field_or_multiple",
"conans/test/functional/command/inspect_test.py::InspectRawTest::test_options_dict",
"conans/test/functional/command/inspect_test.py::InspectRawTest::test_private_field",
"conans/test/functional/command/inspect_test.py::InspectRawTest::test_settings",
"conans/test/functional/command/remove_test.py::RemoveTest::test_remove_any_package_channel",
"conans/test/functional/command/remove_test.py::RemoveTest::test_remove_any_package_version",
"conans/test/functional/command/remove_test.py::RemoveTest::test_remove_any_package_version_channel",
"conans/test/functional/command/search_test.py::SearchOutdatedTest::test_exception_client_without_revs",
"conans/test/functional/command/search_test.py::SearchRemoteAllTestCase::test_search_by_name",
"conans/test/functional/command/upload_test.py::UploadTest::test_upload_not_existing",
"conans/test/functional/configuration/profile_test.py::ProfileTest::test_profile_relative_cwd",
"conans/test/functional/configuration/profile_test.py::ProfileAggregationTest::test_create",
"conans/test/functional/configuration/profile_test.py::ProfileAggregationTest::test_export_pkg",
"conans/test/functional/configuration/profile_test.py::ProfileAggregationTest::test_info",
"conans/test/functional/configuration/profile_test.py::ProfileAggregationTest::test_install",
"conans/test/unittests/client/cmd/export_test.py::ReplaceSCMDataInConanfileTest::test_conanfile_after_recipe",
"conans/test/unittests/client/cmd/export_test.py::ReplaceSCMDataInConanfileTest::test_conanfile_after_scm",
"conans/test/unittests/client/cmd/export_test.py::ReplaceSCMDataInConanfileTest::test_conanfile_after_scm_and_recipe",
"conans/test/unittests/client/cmd/export_test.py::ReplaceSCMDataInConanfileTest::test_conanfile_none",
"conans/test/unittests/tools/files_patch_test.py::test_profile",
"conans/test/unittests/tools/files_patch_test.py::ToolsFilesPatchTest::test_patch_from_file_0",
"conans/test/unittests/tools/files_patch_test.py::ToolsFilesPatchTest::test_patch_from_file_1",
"conans/test/unittests/tools/files_patch_test.py::ToolsFilesPatchTest::test_patch_from_str",
"conans/test/unittests/util/conanfile_tools_test.py::ConanfileToolsTest::test_replace_in_file",
"conans/test/unittests/util/conanfile_tools_test.py::ConanfileToolsTest::test_untar",
"conans/test/utils/tools.py::test_profile"
] | [] | MIT License | 5,905 | 2,463 | [
"conans/client/cmd/export_pkg.py",
"conans/client/cmd/uploader.py",
"conans/client/command.py",
"conans/client/conan_api.py",
"conans/client/conf/__init__.py",
"conans/client/rest/uploader_downloader.py",
"conans/model/settings.py",
"conans/server/store/disk_adapter.py"
] |
|
arvkevi__kneed-53 | cb35c9b21ac551eb3af5fa1372745e6bd57f084d | 2019-11-27 16:42:04 | ca7ca3e14f85fd0a2bdeb4260a39133903151e11 | diff --git a/kneed/knee_locator.py b/kneed/knee_locator.py
index 82412bc..c8c1954 100644
--- a/kneed/knee_locator.py
+++ b/kneed/knee_locator.py
@@ -81,12 +81,12 @@ class KneeLocator(object):
# Step 4: Identify local maxima/minima
# local maxima
- self.maxima_indices = argrelextrema(self.y_difference, np.greater)[0]
+ self.maxima_indices = argrelextrema(self.y_difference, np.greater_equal)[0]
self.x_difference_maxima = self.x_difference[self.maxima_indices]
self.y_difference_maxima = self.y_difference[self.maxima_indices]
# local minima
- self.minima_indices = argrelextrema(self.y_difference, np.less)[0]
+ self.minima_indices = argrelextrema(self.y_difference, np.less_equal)[0]
self.x_difference_minima = self.x_difference[self.minima_indices]
self.y_difference_minima = self.y_difference[self.minima_indices]
diff --git a/kneed/version.py b/kneed/version.py
index 3d18726..dd9b22c 100644
--- a/kneed/version.py
+++ b/kneed/version.py
@@ -1,1 +1,1 @@
-__version__ = "0.5.0"
+__version__ = "0.5.1"
| KneeLocator fails if there are flat extrema
This simple example fails:
```python
from kneed import KneeLocator
x = [0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 11.0, 12.0, 13.0, 14.0, 15.0, 16.0, 17.0]
y = [1, 0.787701317715959, 0.7437774524158126, 0.6559297218155198, 0.5065885797950219, 0.36749633967789164, 0.2547584187408492, 0.16251830161054173, 0.10395314787701318, 0.06734992679355783, 0.043923865300146414, 0.027818448023426062, 0.01903367496339678, 0.013177159590043924, 0.010248901903367497, 0.007320644216691069, 0.005856515373352855, 0.004392386530014641]
k = KneeLocator(x, y, curve='convex', direction='decreasing')
```
Output:
`UserWarning: No knee/elbow found`
However, if we obtain the normalized knee plot, it is clear that there is a "flat optimum".

It seems that the algorithm should be able to find that point between 0.4 and 0.5.
I've been able to workaround this issue by modifying `knee_locator.py` in the calculation of `self.maxima_indices` and `self.minima_indices`, by using `np.greater_equal` and `np.less_equal` rather than `np.great` and `np.less`, but I'm not sure if this is a proper solution.
Thanks! | arvkevi/kneed | diff --git a/tests/test_sample.py b/tests/test_sample.py
index f82e9e8..8c3f160 100644
--- a/tests/test_sample.py
+++ b/tests/test_sample.py
@@ -151,3 +151,19 @@ def test_list_input():
x, y = dg.figure2()
kl = KneeLocator(x.tolist(), y.tolist(), S=1.0, curve='concave', interp_method='polynomial')
assert math.isclose(kl.knee, 0.22, rel_tol=0.05)
+
+
+def test_flat_maxima():
+ """The global maxima has a sequentially equal value in the difference curve"""
+ x = [0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 11.0, 12.0, 13.0, 14.0, 15.0, 16.0, 17.0]
+ y = [1, 0.787701317715959, 0.7437774524158126, 0.6559297218155198, 0.5065885797950219, 0.36749633967789164,
+ 0.2547584187408492, 0.16251830161054173, 0.10395314787701318, 0.06734992679355783, 0.043923865300146414,
+ 0.027818448023426062, 0.01903367496339678, 0.013177159590043924, 0.010248901903367497, 0.007320644216691069,
+ 0.005856515373352855, 0.004392386530014641]
+ # When S=0.0 the first local maximum is found.
+ kl = KneeLocator(x, y, curve='convex', direction='decreasing', S=0.0)
+ assert math.isclose(kl.knee, 1.0, rel_tol=0.05)
+
+ # When S=1.0 the global maximum is found.
+ kl = KneeLocator(x, y, curve='convex', direction='decreasing', S=1.0)
+ assert math.isclose(kl.knee, 8.0, rel_tol=0.05)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_media",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 1
},
"num_modified_files": 2
} | 0.5 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest>=5.0.1",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.7",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi @ file:///croot/certifi_1671487769961/work/certifi
cycler==0.11.0
exceptiongroup==1.2.2
fonttools==4.38.0
importlib-metadata==6.7.0
iniconfig==2.0.0
joblib==1.3.2
kiwisolver==1.4.5
-e git+https://github.com/arvkevi/kneed.git@cb35c9b21ac551eb3af5fa1372745e6bd57f084d#egg=kneed
matplotlib==3.5.3
numpy==1.21.6
packaging==24.0
Pillow==9.5.0
pluggy==1.2.0
pyparsing==3.1.4
pytest==7.4.4
python-dateutil==2.9.0.post0
scikit-learn==1.0.2
scipy==1.7.3
six==1.17.0
threadpoolctl==3.1.0
tomli==2.0.1
typing_extensions==4.7.1
zipp==3.15.0
| name: kneed
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=22.3.1=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- cycler==0.11.0
- exceptiongroup==1.2.2
- fonttools==4.38.0
- importlib-metadata==6.7.0
- iniconfig==2.0.0
- joblib==1.3.2
- kiwisolver==1.4.5
- matplotlib==3.5.3
- numpy==1.21.6
- packaging==24.0
- pillow==9.5.0
- pluggy==1.2.0
- pyparsing==3.1.4
- pytest==7.4.4
- python-dateutil==2.9.0.post0
- scikit-learn==1.0.2
- scipy==1.7.3
- six==1.17.0
- threadpoolctl==3.1.0
- tomli==2.0.1
- typing-extensions==4.7.1
- zipp==3.15.0
prefix: /opt/conda/envs/kneed
| [
"tests/test_sample.py::test_flat_maxima"
] | [] | [
"tests/test_sample.py::test_figure2[interp1d]",
"tests/test_sample.py::test_figure2[polynomial]",
"tests/test_sample.py::test_NoisyGaussian[interp1d]",
"tests/test_sample.py::test_NoisyGaussian[polynomial]",
"tests/test_sample.py::test_concave_increasing[interp1d]",
"tests/test_sample.py::test_concave_increasing[polynomial]",
"tests/test_sample.py::test_concave_decreasing[interp1d]",
"tests/test_sample.py::test_concave_decreasing[polynomial]",
"tests/test_sample.py::test_convex_increasing[interp1d]",
"tests/test_sample.py::test_convex_increasing[polynomial]",
"tests/test_sample.py::test_convex_decreasing[interp1d]",
"tests/test_sample.py::test_convex_decreasing[polynomial]",
"tests/test_sample.py::test_concave_increasing_truncated[interp1d]",
"tests/test_sample.py::test_concave_increasing_truncated[polynomial]",
"tests/test_sample.py::test_concave_decreasing_truncated[interp1d]",
"tests/test_sample.py::test_concave_decreasing_truncated[polynomial]",
"tests/test_sample.py::test_convex_increasing_truncated[interp1d]",
"tests/test_sample.py::test_convex_increasing_truncated[polynomial]",
"tests/test_sample.py::test_convex_decreasing_truncated[interp1d]",
"tests/test_sample.py::test_convex_decreasing_truncated[polynomial]",
"tests/test_sample.py::test_convex_decreasing_bumpy[interp1d-26]",
"tests/test_sample.py::test_convex_decreasing_bumpy[polynomial-28]",
"tests/test_sample.py::test_gamma_online_offline[True-482]",
"tests/test_sample.py::test_gamma_online_offline[False-22]",
"tests/test_sample.py::test_sensitivity",
"tests/test_sample.py::test_sine",
"tests/test_sample.py::test_list_input"
] | [] | BSD 3-Clause "New" or "Revised" License | 5,906 | 341 | [
"kneed/knee_locator.py",
"kneed/version.py"
] |
|
coverahealth__dataspec-42 | 70c2a947f0df85649b4e20f6c26e3f6df10838cd | 2019-11-27 21:50:56 | 2fba98a61212d5d731551f1aed0d07f4a887feb8 | diff --git a/src/dataspec/base.py b/src/dataspec/base.py
index e9364a9..7f07262 100644
--- a/src/dataspec/base.py
+++ b/src/dataspec/base.py
@@ -486,6 +486,22 @@ class ObjectSpec(DictSpec):
raise TypeError("Cannot use a default conformer for an Object")
+def _enum_conformer(e: EnumMeta) -> Conformer:
+ """Create a conformer for Enum types which accepts Enum instances, Enum values,
+ and Enum names."""
+
+ def conform_enum(v) -> Union[EnumMeta, Invalid]:
+ try:
+ return e(v)
+ except ValueError:
+ try:
+ return e[v]
+ except KeyError:
+ return INVALID
+
+ return conform_enum
+
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
class SetSpec(Spec):
tag: Tag
@@ -668,7 +684,7 @@ def make_spec( # pylint: disable=inconsistent-return-statements
return SetSpec(
tag or pred.__name__,
frozenset(chain.from_iterable([mem, mem.name, mem.value] for mem in pred)),
- conformer=conformer or pred,
+ conformer=conformer or _enum_conformer(pred),
)
elif isinstance(pred, tuple):
return TupleSpec.from_val(tag, pred, conformer=conformer)
| Enum conformers fail to conform valid Enum values
Using the example from #35:
```python
from enum import Enum
from dataspec import s
class PhoneType(Enum):
HOME = "Home"
MOBILE = "Mobile"
OFFICE = "Office"
phone_type = s.any(
"phone_type",
s(
{"H", "M", "O", ""},
conformer=lambda v: {
"H": PhoneType.HOME,
"M": PhoneType.MOBILE,
"O": PhoneType.OFFICE,
}.get(v, ""),
),
PhoneType,
)
```
```
>>> phone_type.is_valid("HOME")
True
>>> phone_type.conform("HOME")
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/Users/chris/Projects/dataspec/src/dataspec/base.py", line 161, in conform
return self.conform_valid(v)
File "/Users/chris/Projects/dataspec/src/dataspec/base.py", line 155, in conform_valid
return self.conformer(v)
File "/Users/chris/Projects/dataspec/src/dataspec/factories.py", line 115, in _conform_any
conformed = spec.conform_valid(e)
File "/Users/chris/Projects/dataspec/src/dataspec/base.py", line 155, in conform_valid
return self.conformer(v)
File "/Users/chris/.pyenv/versions/3.6.6/lib/python3.6/enum.py", line 291, in __call__
return cls.__new__(cls, value)
File "/Users/chris/.pyenv/versions/3.6.6/lib/python3.6/enum.py", line 533, in __new__
return cls._missing_(value)
File "/Users/chris/.pyenv/versions/3.6.6/lib/python3.6/enum.py", line 546, in _missing_
raise ValueError("%r is not a valid %s" % (value, cls.__name__))
ValueError: 'HOME' is not a valid PhoneType
```
The problem is that we use the enum itself as the conformer, but Enums used as functions only accept the _values_ of the enum, not the _names_. | coverahealth/dataspec | diff --git a/tests/test_base.py b/tests/test_base.py
index 7261475..f0a45c6 100644
--- a/tests/test_base.py
+++ b/tests/test_base.py
@@ -526,6 +526,8 @@ class TestEnumSetSpec:
assert not enum_spec.is_valid(None)
def test_enum_spec_conformation(self, enum_spec: Spec):
+ assert self.YesNo.YES == enum_spec.conform("YES")
+ assert self.YesNo.NO == enum_spec.conform("NO")
assert self.YesNo.YES == enum_spec.conform("Yes")
assert self.YesNo.NO == enum_spec.conform("No")
assert self.YesNo.YES == enum_spec.conform(self.YesNo.YES)
@@ -533,6 +535,10 @@ class TestEnumSetSpec:
assert INVALID is enum_spec.conform("Maybe")
assert INVALID is enum_spec.conform(None)
+ # Testing the last branch of the conformer
+ assert INVALID is enum_spec.conform_valid("Maybe")
+ assert INVALID is enum_spec.conform_valid(None)
+
class TestTupleSpecValidation:
@pytest.fixture
diff --git a/tests/test_factories.py b/tests/test_factories.py
index ee64c61..0b271ee 100644
--- a/tests/test_factories.py
+++ b/tests/test_factories.py
@@ -115,6 +115,7 @@ class TestAnySpecConformation:
)
def test_conformation_failure(self, spec: Spec, v):
assert INVALID is spec.conform(v)
+ assert INVALID is spec.conform_valid(v)
@pytest.fixture
def tag_spec(self) -> Spec:
@@ -141,6 +142,7 @@ class TestAnySpecConformation:
)
def test_tagged_conformation_failure(self, tag_spec: Spec, v):
assert INVALID is tag_spec.conform(v)
+ assert INVALID is tag_spec.conform_valid(v)
class TestAnySpecWithOuterConformation:
@@ -163,6 +165,7 @@ class TestAnySpecWithOuterConformation:
)
def test_conformation_failure(self, spec: Spec, v):
assert INVALID is spec.conform(v)
+ assert INVALID is spec.conform_valid(v)
@pytest.fixture
def tag_spec(self) -> Spec:
@@ -190,6 +193,7 @@ class TestAnySpecWithOuterConformation:
)
def test_tagged_conformation_failure(self, tag_spec: Spec, v):
assert INVALID is tag_spec.conform(v)
+ assert INVALID is tag_spec.conform_valid(v)
@pytest.mark.parametrize(
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_issue_reference"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 1
} | 0.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.7",
"reqs_path": [
"requirements.txt",
"requirements.dev.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==24.2.0
black==23.3.0
bleach==6.0.0
cachetools==5.5.2
certifi @ file:///croot/certifi_1671487769961/work/certifi
cffi==1.15.1
chardet==5.2.0
charset-normalizer==3.4.1
click==8.1.8
colorama==0.4.6
cryptography==44.0.2
-e git+https://github.com/coverahealth/dataspec.git@70c2a947f0df85649b4e20f6c26e3f6df10838cd#egg=dataspec
distlib==0.3.9
docutils==0.20.1
exceptiongroup==1.2.2
filelock==3.12.2
idna==3.10
importlib-metadata==6.7.0
importlib-resources==5.12.0
iniconfig==2.0.0
isort==5.11.5
jaraco.classes==3.2.3
jeepney==0.9.0
keyring==24.1.1
markdown-it-py==2.2.0
mdurl==0.1.2
more-itertools==9.1.0
mypy-extensions==1.0.0
packaging==24.0
pathspec==0.11.2
phonenumbers==9.0.2
pkginfo==1.10.0
platformdirs==4.0.0
pluggy==1.2.0
pycparser==2.21
Pygments==2.17.2
pyproject-api==1.5.3
pytest==7.4.4
python-dateutil==2.9.0.post0
readme-renderer==37.3
requests==2.31.0
requests-toolbelt==1.0.0
rfc3986==2.0.0
rich==13.8.1
SecretStorage==3.3.3
six==1.17.0
tomli==2.0.1
tox==4.8.0
twine==4.0.2
typed-ast==1.5.5
typing_extensions==4.7.1
urllib3==2.0.7
virtualenv==20.26.6
webencodings==0.5.1
zipp==3.15.0
| name: dataspec
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=22.3.1=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==24.2.0
- black==23.3.0
- bleach==6.0.0
- cachetools==5.5.2
- cffi==1.15.1
- chardet==5.2.0
- charset-normalizer==3.4.1
- click==8.1.8
- colorama==0.4.6
- cryptography==44.0.2
- dataspec==0.2.2
- distlib==0.3.9
- docutils==0.20.1
- exceptiongroup==1.2.2
- filelock==3.12.2
- idna==3.10
- importlib-metadata==6.7.0
- importlib-resources==5.12.0
- iniconfig==2.0.0
- isort==5.11.5
- jaraco-classes==3.2.3
- jeepney==0.9.0
- keyring==24.1.1
- markdown-it-py==2.2.0
- mdurl==0.1.2
- more-itertools==9.1.0
- mypy-extensions==1.0.0
- packaging==24.0
- pathspec==0.11.2
- phonenumbers==9.0.2
- pkginfo==1.10.0
- platformdirs==4.0.0
- pluggy==1.2.0
- pycparser==2.21
- pygments==2.17.2
- pyproject-api==1.5.3
- pytest==7.4.4
- python-dateutil==2.9.0.post0
- readme-renderer==37.3
- requests==2.31.0
- requests-toolbelt==1.0.0
- rfc3986==2.0.0
- rich==13.8.1
- secretstorage==3.3.3
- six==1.17.0
- tomli==2.0.1
- tox==4.8.0
- twine==4.0.2
- typed-ast==1.5.5
- typing-extensions==4.7.1
- urllib3==2.0.7
- virtualenv==20.26.6
- webencodings==0.5.1
- zipp==3.15.0
prefix: /opt/conda/envs/dataspec
| [
"tests/test_base.py::TestEnumSetSpec::test_enum_spec_conformation"
] | [] | [
"tests/test_base.py::TestCollSpecValidation::test_error_details[v0-path0]",
"tests/test_base.py::TestCollSpecValidation::test_error_details[v1-path1]",
"tests/test_base.py::TestCollSpecValidation::test_error_details[v2-path2]",
"tests/test_base.py::TestCollSpecValidation::TestMinlengthValidation::test_min_minlength[-1]",
"tests/test_base.py::TestCollSpecValidation::TestMinlengthValidation::test_min_minlength[-100]",
"tests/test_base.py::TestCollSpecValidation::TestMinlengthValidation::test_int_minlength[-0.5]",
"tests/test_base.py::TestCollSpecValidation::TestMinlengthValidation::test_int_minlength[0.5]",
"tests/test_base.py::TestCollSpecValidation::TestMinlengthValidation::test_int_minlength[2.71]",
"tests/test_base.py::TestCollSpecValidation::TestMinlengthValidation::test_minlength_spec[coll0]",
"tests/test_base.py::TestCollSpecValidation::TestMinlengthValidation::test_minlength_spec[coll1]",
"tests/test_base.py::TestCollSpecValidation::TestMinlengthValidation::test_minlength_spec_failure[coll0]",
"tests/test_base.py::TestCollSpecValidation::TestMinlengthValidation::test_minlength_spec_failure[coll1]",
"tests/test_base.py::TestCollSpecValidation::TestMinlengthValidation::test_minlength_spec_failure[coll2]",
"tests/test_base.py::TestCollSpecValidation::TestMinlengthValidation::test_minlength_spec_failure[coll3]",
"tests/test_base.py::TestCollSpecValidation::TestMaxlengthValidation::test_min_maxlength[-1]",
"tests/test_base.py::TestCollSpecValidation::TestMaxlengthValidation::test_min_maxlength[-100]",
"tests/test_base.py::TestCollSpecValidation::TestMaxlengthValidation::test_int_maxlength[-0.5]",
"tests/test_base.py::TestCollSpecValidation::TestMaxlengthValidation::test_int_maxlength[0.5]",
"tests/test_base.py::TestCollSpecValidation::TestMaxlengthValidation::test_int_maxlength[2.71]",
"tests/test_base.py::TestCollSpecValidation::TestMaxlengthValidation::test_maxlength_spec[coll0]",
"tests/test_base.py::TestCollSpecValidation::TestMaxlengthValidation::test_maxlength_spec[coll1]",
"tests/test_base.py::TestCollSpecValidation::TestMaxlengthValidation::test_maxlength_spec[coll2]",
"tests/test_base.py::TestCollSpecValidation::TestMaxlengthValidation::test_maxlength_spec_failure[coll0]",
"tests/test_base.py::TestCollSpecValidation::TestMaxlengthValidation::test_maxlength_spec_failure[coll1]",
"tests/test_base.py::TestCollSpecValidation::TestMaxlengthValidation::test_maxlength_spec_failure[coll2]",
"tests/test_base.py::TestCollSpecValidation::TestCountValidation::test_min_count[-1]",
"tests/test_base.py::TestCollSpecValidation::TestCountValidation::test_min_count[-100]",
"tests/test_base.py::TestCollSpecValidation::TestCountValidation::test_int_count[-0.5]",
"tests/test_base.py::TestCollSpecValidation::TestCountValidation::test_int_count[0.5]",
"tests/test_base.py::TestCollSpecValidation::TestCountValidation::test_int_count[2.71]",
"tests/test_base.py::TestCollSpecValidation::TestCountValidation::test_maxlength_spec[coll0]",
"tests/test_base.py::TestCollSpecValidation::TestCountValidation::test_count_spec_failure[coll0]",
"tests/test_base.py::TestCollSpecValidation::TestCountValidation::test_count_spec_failure[coll1]",
"tests/test_base.py::TestCollSpecValidation::TestCountValidation::test_count_spec_failure[coll2]",
"tests/test_base.py::TestCollSpecValidation::TestCountValidation::test_count_spec_failure[coll3]",
"tests/test_base.py::TestCollSpecValidation::TestCountValidation::test_count_spec_failure[coll4]",
"tests/test_base.py::TestCollSpecValidation::TestCountValidation::test_count_and_minlength_or_maxlength_agreement[opts0]",
"tests/test_base.py::TestCollSpecValidation::TestCountValidation::test_count_and_minlength_or_maxlength_agreement[opts1]",
"tests/test_base.py::TestCollSpecValidation::TestCountValidation::test_count_and_minlength_or_maxlength_agreement[opts2]",
"tests/test_base.py::TestCollSpecValidation::test_minlength_and_maxlength_agreement",
"tests/test_base.py::TestCollSpecValidation::TestKindValidation::test_kind_validation[frozenset]",
"tests/test_base.py::TestCollSpecValidation::TestKindValidation::test_kind_validation[list]",
"tests/test_base.py::TestCollSpecValidation::TestKindValidation::test_kind_validation[set]",
"tests/test_base.py::TestCollSpecValidation::TestKindValidation::test_kind_validation[tuple]",
"tests/test_base.py::TestCollSpecValidation::TestKindValidation::test_kind_validation_failure[frozenset]",
"tests/test_base.py::TestCollSpecValidation::TestKindValidation::test_kind_validation_failure[list]",
"tests/test_base.py::TestCollSpecValidation::TestKindValidation::test_kind_validation_failure[set]",
"tests/test_base.py::TestCollSpecValidation::TestKindValidation::test_kind_validation_failure[tuple]",
"tests/test_base.py::TestCollSpecConformation::test_coll_conformation",
"tests/test_base.py::TestCollSpecConformation::test_set_coll_conformation",
"tests/test_base.py::TestDictSpecValidation::test_dict_spec[d0]",
"tests/test_base.py::TestDictSpecValidation::test_dict_spec[d1]",
"tests/test_base.py::TestDictSpecValidation::test_dict_spec[d2]",
"tests/test_base.py::TestDictSpecValidation::test_dict_spec_failure[None]",
"tests/test_base.py::TestDictSpecValidation::test_dict_spec_failure[a",
"tests/test_base.py::TestDictSpecValidation::test_dict_spec_failure[0]",
"tests/test_base.py::TestDictSpecValidation::test_dict_spec_failure[3.14]",
"tests/test_base.py::TestDictSpecValidation::test_dict_spec_failure[True]",
"tests/test_base.py::TestDictSpecValidation::test_dict_spec_failure[False]",
"tests/test_base.py::TestDictSpecValidation::test_dict_spec_failure[d6]",
"tests/test_base.py::TestDictSpecValidation::test_dict_spec_failure[d7]",
"tests/test_base.py::TestDictSpecValidation::test_dict_spec_failure[d8]",
"tests/test_base.py::TestDictSpecValidation::test_dict_spec_failure[d9]",
"tests/test_base.py::TestDictSpecValidation::test_dict_spec_failure[d10]",
"tests/test_base.py::TestDictSpecValidation::test_dict_spec_failure[d11]",
"tests/test_base.py::TestDictSpecValidation::test_dict_spec_failure[d12]",
"tests/test_base.py::TestDictSpecValidation::test_error_details[v0-path0]",
"tests/test_base.py::TestDictSpecValidation::test_error_details[v1-path1]",
"tests/test_base.py::TestDictSpecValidation::test_error_details[v2-path2]",
"tests/test_base.py::TestDictSpecConformation::test_dict_conformation",
"tests/test_base.py::TestObjectSpecValidation::test_obj_spec[o0]",
"tests/test_base.py::TestObjectSpecValidation::test_obj_spec[o1]",
"tests/test_base.py::TestObjectSpecValidation::test_obj_spec[o2]",
"tests/test_base.py::TestObjectSpecValidation::test_obj_spec_failure[o0]",
"tests/test_base.py::TestObjectSpecValidation::test_obj_spec_failure[o1]",
"tests/test_base.py::TestObjectSpecValidation::test_obj_spec_failure[o2]",
"tests/test_base.py::TestObjectSpecValidation::test_obj_spec_failure[o3]",
"tests/test_base.py::TestSetSpec::test_set_spec",
"tests/test_base.py::TestSetSpec::test_set_spec_conformation",
"tests/test_base.py::TestEnumSetSpec::test_enum_spec",
"tests/test_base.py::TestTupleSpecValidation::test_tuple_spec[row0]",
"tests/test_base.py::TestTupleSpecValidation::test_tuple_spec[row1]",
"tests/test_base.py::TestTupleSpecValidation::test_tuple_spec[row2]",
"tests/test_base.py::TestTupleSpecValidation::test_tuple_spec_failure[None]",
"tests/test_base.py::TestTupleSpecValidation::test_tuple_spec_failure[a",
"tests/test_base.py::TestTupleSpecValidation::test_tuple_spec_failure[0]",
"tests/test_base.py::TestTupleSpecValidation::test_tuple_spec_failure[3.14]",
"tests/test_base.py::TestTupleSpecValidation::test_tuple_spec_failure[True]",
"tests/test_base.py::TestTupleSpecValidation::test_tuple_spec_failure[False]",
"tests/test_base.py::TestTupleSpecValidation::test_tuple_spec_failure[row6]",
"tests/test_base.py::TestTupleSpecValidation::test_tuple_spec_failure[row7]",
"tests/test_base.py::TestTupleSpecValidation::test_tuple_spec_failure[row8]",
"tests/test_base.py::TestTupleSpecValidation::test_tuple_spec_failure[row9]",
"tests/test_base.py::TestTupleSpecValidation::test_tuple_spec_failure[row10]",
"tests/test_base.py::TestTupleSpecValidation::test_tuple_spec_failure[row11]",
"tests/test_base.py::TestTupleSpecValidation::test_error_details[v0-path0]",
"tests/test_base.py::TestTupleSpecValidation::test_error_details[v1-path1]",
"tests/test_base.py::TestTupleSpecConformation::test_tuple_conformation",
"tests/test_base.py::TestTupleSpecConformation::test_namedtuple_conformation",
"tests/test_base.py::TestTypeSpec::test_typecheck[bool-vals0]",
"tests/test_base.py::TestTypeSpec::test_typecheck[bytes-vals1]",
"tests/test_base.py::TestTypeSpec::test_typecheck[dict-vals2]",
"tests/test_base.py::TestTypeSpec::test_typecheck[float-vals3]",
"tests/test_base.py::TestTypeSpec::test_typecheck[int-vals4]",
"tests/test_base.py::TestTypeSpec::test_typecheck[list-vals5]",
"tests/test_base.py::TestTypeSpec::test_typecheck[set-vals6]",
"tests/test_base.py::TestTypeSpec::test_typecheck[str-vals7]",
"tests/test_base.py::TestTypeSpec::test_typecheck[tuple-vals8]",
"tests/test_base.py::TestTypeSpec::test_typecheck_failure[bool]",
"tests/test_base.py::TestTypeSpec::test_typecheck_failure[bytes]",
"tests/test_base.py::TestTypeSpec::test_typecheck_failure[dict]",
"tests/test_base.py::TestTypeSpec::test_typecheck_failure[float]",
"tests/test_base.py::TestTypeSpec::test_typecheck_failure[int]",
"tests/test_base.py::TestTypeSpec::test_typecheck_failure[list]",
"tests/test_base.py::TestTypeSpec::test_typecheck_failure[set]",
"tests/test_base.py::TestTypeSpec::test_typecheck_failure[str]",
"tests/test_base.py::TestTypeSpec::test_typecheck_failure[tuple]",
"tests/test_factories.py::TestAllSpecValidation::test_all_validation[c5a28680-986f-4f0d-8187-80d1fbe22059]",
"tests/test_factories.py::TestAllSpecValidation::test_all_validation[3BE59FF6-9C75-4027-B132-C9792D84547D]",
"tests/test_factories.py::TestAllSpecValidation::test_all_failure[6281d852-ef4d-11e9-9002-4c327592fea9]",
"tests/test_factories.py::TestAllSpecValidation::test_all_failure[0e8d7ceb-56e8-36d2-9b54-ea48d4bdea3f]",
"tests/test_factories.py::TestAllSpecValidation::test_all_failure[10988ff4-136c-5ca7-ab35-a686a56c22c4]",
"tests/test_factories.py::TestAllSpecValidation::test_all_failure[]",
"tests/test_factories.py::TestAllSpecValidation::test_all_failure[50]",
"tests/test_factories.py::TestAllSpecValidation::test_all_failure[abcde]",
"tests/test_factories.py::TestAllSpecValidation::test_all_failure[ABCDe]",
"tests/test_factories.py::TestAllSpecValidation::test_all_failure[51]",
"tests/test_factories.py::TestAllSpecValidation::test_all_failure[3.14]",
"tests/test_factories.py::TestAllSpecValidation::test_all_failure[None]",
"tests/test_factories.py::TestAllSpecValidation::test_all_failure[v10]",
"tests/test_factories.py::TestAllSpecValidation::test_all_failure[v11]",
"tests/test_factories.py::TestAllSpecValidation::test_all_failure[v12]",
"tests/test_factories.py::TestAllSpecConformation::test_all_spec_conformation[yes-YesNo.YES]",
"tests/test_factories.py::TestAllSpecConformation::test_all_spec_conformation[Yes-YesNo.YES]",
"tests/test_factories.py::TestAllSpecConformation::test_all_spec_conformation[yES-YesNo.YES]",
"tests/test_factories.py::TestAllSpecConformation::test_all_spec_conformation[YES-YesNo.YES]",
"tests/test_factories.py::TestAllSpecConformation::test_all_spec_conformation[no-YesNo.NO]",
"tests/test_factories.py::TestAllSpecConformation::test_all_spec_conformation[No-YesNo.NO]",
"tests/test_factories.py::TestAllSpecConformation::test_all_spec_conformation[nO-YesNo.NO]",
"tests/test_factories.py::TestAllSpecConformation::test_all_spec_conformation[NO-YesNo.NO]",
"tests/test_factories.py::TestAnySpecValidation::test_any_validation[50]",
"tests/test_factories.py::TestAnySpecValidation::test_any_validation[51]",
"tests/test_factories.py::TestAnySpecValidation::test_any_validation[3.14]",
"tests/test_factories.py::TestAnySpecValidation::test_any_validation_failure[None]",
"tests/test_factories.py::TestAnySpecValidation::test_any_validation_failure[v1]",
"tests/test_factories.py::TestAnySpecValidation::test_any_validation_failure[v2]",
"tests/test_factories.py::TestAnySpecValidation::test_any_validation_failure[v3]",
"tests/test_factories.py::TestAnySpecConformation::test_conformation[5-50]",
"tests/test_factories.py::TestAnySpecConformation::test_conformation[5-51]",
"tests/test_factories.py::TestAnySpecConformation::test_conformation[3.14-3.14]",
"tests/test_factories.py::TestAnySpecConformation::test_conformation[-10--10]",
"tests/test_factories.py::TestAnySpecConformation::test_conformation_failure[None]",
"tests/test_factories.py::TestAnySpecConformation::test_conformation_failure[v1]",
"tests/test_factories.py::TestAnySpecConformation::test_conformation_failure[v2]",
"tests/test_factories.py::TestAnySpecConformation::test_conformation_failure[v3]",
"tests/test_factories.py::TestAnySpecConformation::test_conformation_failure[500x]",
"tests/test_factories.py::TestAnySpecConformation::test_conformation_failure[Just",
"tests/test_factories.py::TestAnySpecConformation::test_conformation_failure[500]",
"tests/test_factories.py::TestAnySpecConformation::test_conformation_failure[byteword]",
"tests/test_factories.py::TestAnySpecConformation::test_tagged_conformation[expected0-5]",
"tests/test_factories.py::TestAnySpecConformation::test_tagged_conformation[expected1-5]",
"tests/test_factories.py::TestAnySpecConformation::test_tagged_conformation[expected2-3.14]",
"tests/test_factories.py::TestAnySpecConformation::test_tagged_conformation[expected3--10]",
"tests/test_factories.py::TestAnySpecConformation::test_tagged_conformation_failure[None]",
"tests/test_factories.py::TestAnySpecConformation::test_tagged_conformation_failure[v1]",
"tests/test_factories.py::TestAnySpecConformation::test_tagged_conformation_failure[v2]",
"tests/test_factories.py::TestAnySpecConformation::test_tagged_conformation_failure[v3]",
"tests/test_factories.py::TestAnySpecConformation::test_tagged_conformation_failure[500x]",
"tests/test_factories.py::TestAnySpecConformation::test_tagged_conformation_failure[Just",
"tests/test_factories.py::TestAnySpecConformation::test_tagged_conformation_failure[500]",
"tests/test_factories.py::TestAnySpecConformation::test_tagged_conformation_failure[byteword]",
"tests/test_factories.py::TestAnySpecWithOuterConformation::test_conformation[10-50]",
"tests/test_factories.py::TestAnySpecWithOuterConformation::test_conformation[10-51]",
"tests/test_factories.py::TestAnySpecWithOuterConformation::test_conformation[8.14-3.14]",
"tests/test_factories.py::TestAnySpecWithOuterConformation::test_conformation[-5--10]",
"tests/test_factories.py::TestAnySpecWithOuterConformation::test_conformation_failure[None]",
"tests/test_factories.py::TestAnySpecWithOuterConformation::test_conformation_failure[v1]",
"tests/test_factories.py::TestAnySpecWithOuterConformation::test_conformation_failure[v2]",
"tests/test_factories.py::TestAnySpecWithOuterConformation::test_conformation_failure[v3]",
"tests/test_factories.py::TestAnySpecWithOuterConformation::test_conformation_failure[500x]",
"tests/test_factories.py::TestAnySpecWithOuterConformation::test_conformation_failure[Just",
"tests/test_factories.py::TestAnySpecWithOuterConformation::test_conformation_failure[500]",
"tests/test_factories.py::TestAnySpecWithOuterConformation::test_conformation_failure[byteword]",
"tests/test_factories.py::TestAnySpecWithOuterConformation::test_tagged_conformation[expected0-5]",
"tests/test_factories.py::TestAnySpecWithOuterConformation::test_tagged_conformation[expected1-5]",
"tests/test_factories.py::TestAnySpecWithOuterConformation::test_tagged_conformation[expected2-3.14]",
"tests/test_factories.py::TestAnySpecWithOuterConformation::test_tagged_conformation[expected3--10]",
"tests/test_factories.py::TestAnySpecWithOuterConformation::test_tagged_conformation_failure[None]",
"tests/test_factories.py::TestAnySpecWithOuterConformation::test_tagged_conformation_failure[v1]",
"tests/test_factories.py::TestAnySpecWithOuterConformation::test_tagged_conformation_failure[v2]",
"tests/test_factories.py::TestAnySpecWithOuterConformation::test_tagged_conformation_failure[v3]",
"tests/test_factories.py::TestAnySpecWithOuterConformation::test_tagged_conformation_failure[500x]",
"tests/test_factories.py::TestAnySpecWithOuterConformation::test_tagged_conformation_failure[Just",
"tests/test_factories.py::TestAnySpecWithOuterConformation::test_tagged_conformation_failure[500]",
"tests/test_factories.py::TestAnySpecWithOuterConformation::test_tagged_conformation_failure[byteword]",
"tests/test_factories.py::test_is_any[None]",
"tests/test_factories.py::test_is_any[25]",
"tests/test_factories.py::test_is_any[3.14]",
"tests/test_factories.py::test_is_any[3j]",
"tests/test_factories.py::test_is_any[v4]",
"tests/test_factories.py::test_is_any[v5]",
"tests/test_factories.py::test_is_any[v6]",
"tests/test_factories.py::test_is_any[v7]",
"tests/test_factories.py::test_is_any[abcdef]",
"tests/test_factories.py::TestBoolValidation::test_bool[True]",
"tests/test_factories.py::TestBoolValidation::test_bool[False]",
"tests/test_factories.py::TestBoolValidation::test_bool_failure[1]",
"tests/test_factories.py::TestBoolValidation::test_bool_failure[0]",
"tests/test_factories.py::TestBoolValidation::test_bool_failure[]",
"tests/test_factories.py::TestBoolValidation::test_bool_failure[a",
"tests/test_factories.py::TestBoolValidation::test_is_false",
"tests/test_factories.py::TestBoolValidation::test_is_true_failure[False]",
"tests/test_factories.py::TestBoolValidation::test_is_true_failure[1]",
"tests/test_factories.py::TestBoolValidation::test_is_true_failure[0]",
"tests/test_factories.py::TestBoolValidation::test_is_true_failure[]",
"tests/test_factories.py::TestBoolValidation::test_is_true_failure[a",
"tests/test_factories.py::TestBoolValidation::test_is_true",
"tests/test_factories.py::TestBytesSpecValidation::test_is_bytes[]",
"tests/test_factories.py::TestBytesSpecValidation::test_is_bytes[a",
"tests/test_factories.py::TestBytesSpecValidation::test_is_bytes[\\xf0\\x9f\\x98\\x8f]",
"tests/test_factories.py::TestBytesSpecValidation::test_is_bytes[v3]",
"tests/test_factories.py::TestBytesSpecValidation::test_is_bytes[v4]",
"tests/test_factories.py::TestBytesSpecValidation::test_not_is_bytes[25]",
"tests/test_factories.py::TestBytesSpecValidation::test_not_is_bytes[None]",
"tests/test_factories.py::TestBytesSpecValidation::test_not_is_bytes[3.14]",
"tests/test_factories.py::TestBytesSpecValidation::test_not_is_bytes[v3]",
"tests/test_factories.py::TestBytesSpecValidation::test_not_is_bytes[v4]",
"tests/test_factories.py::TestBytesSpecValidation::test_not_is_bytes[]",
"tests/test_factories.py::TestBytesSpecValidation::test_not_is_bytes[a",
"tests/test_factories.py::TestBytesSpecValidation::test_not_is_bytes[\\U0001f60f]",
"tests/test_factories.py::TestBytesSpecValidation::TestLengthValidation::test_min_count[-1]",
"tests/test_factories.py::TestBytesSpecValidation::TestLengthValidation::test_min_count[-100]",
"tests/test_factories.py::TestBytesSpecValidation::TestLengthValidation::test_int_count[-0.5]",
"tests/test_factories.py::TestBytesSpecValidation::TestLengthValidation::test_int_count[0.5]",
"tests/test_factories.py::TestBytesSpecValidation::TestLengthValidation::test_int_count[2.71]",
"tests/test_factories.py::TestBytesSpecValidation::TestLengthValidation::test_length_spec[xxx]",
"tests/test_factories.py::TestBytesSpecValidation::TestLengthValidation::test_length_spec[xxy]",
"tests/test_factories.py::TestBytesSpecValidation::TestLengthValidation::test_length_spec[773]",
"tests/test_factories.py::TestBytesSpecValidation::TestLengthValidation::test_length_spec[833]",
"tests/test_factories.py::TestBytesSpecValidation::TestLengthValidation::test_length_spec_failure[]",
"tests/test_factories.py::TestBytesSpecValidation::TestLengthValidation::test_length_spec_failure[x]",
"tests/test_factories.py::TestBytesSpecValidation::TestLengthValidation::test_length_spec_failure[xx]",
"tests/test_factories.py::TestBytesSpecValidation::TestLengthValidation::test_length_spec_failure[xxxx]",
"tests/test_factories.py::TestBytesSpecValidation::TestLengthValidation::test_length_spec_failure[xxxxx]",
"tests/test_factories.py::TestBytesSpecValidation::TestLengthValidation::test_length_and_minlength_or_maxlength_agreement[opts0]",
"tests/test_factories.py::TestBytesSpecValidation::TestLengthValidation::test_length_and_minlength_or_maxlength_agreement[opts1]",
"tests/test_factories.py::TestBytesSpecValidation::TestLengthValidation::test_length_and_minlength_or_maxlength_agreement[opts2]",
"tests/test_factories.py::TestBytesSpecValidation::TestMinlengthSpec::test_min_minlength[-1]",
"tests/test_factories.py::TestBytesSpecValidation::TestMinlengthSpec::test_min_minlength[-100]",
"tests/test_factories.py::TestBytesSpecValidation::TestMinlengthSpec::test_int_minlength[-0.5]",
"tests/test_factories.py::TestBytesSpecValidation::TestMinlengthSpec::test_int_minlength[0.5]",
"tests/test_factories.py::TestBytesSpecValidation::TestMinlengthSpec::test_int_minlength[2.71]",
"tests/test_factories.py::TestBytesSpecValidation::TestMinlengthSpec::test_is_minlength[abcde]",
"tests/test_factories.py::TestBytesSpecValidation::TestMinlengthSpec::test_is_minlength[abcdef]",
"tests/test_factories.py::TestBytesSpecValidation::TestMinlengthSpec::test_is_not_minlength[None]",
"tests/test_factories.py::TestBytesSpecValidation::TestMinlengthSpec::test_is_not_minlength[25]",
"tests/test_factories.py::TestBytesSpecValidation::TestMinlengthSpec::test_is_not_minlength[3.14]",
"tests/test_factories.py::TestBytesSpecValidation::TestMinlengthSpec::test_is_not_minlength[v3]",
"tests/test_factories.py::TestBytesSpecValidation::TestMinlengthSpec::test_is_not_minlength[v4]",
"tests/test_factories.py::TestBytesSpecValidation::TestMinlengthSpec::test_is_not_minlength[]",
"tests/test_factories.py::TestBytesSpecValidation::TestMinlengthSpec::test_is_not_minlength[a]",
"tests/test_factories.py::TestBytesSpecValidation::TestMinlengthSpec::test_is_not_minlength[ab]",
"tests/test_factories.py::TestBytesSpecValidation::TestMinlengthSpec::test_is_not_minlength[abc]",
"tests/test_factories.py::TestBytesSpecValidation::TestMinlengthSpec::test_is_not_minlength[abcd]",
"tests/test_factories.py::TestBytesSpecValidation::TestMaxlengthSpec::test_min_maxlength[-1]",
"tests/test_factories.py::TestBytesSpecValidation::TestMaxlengthSpec::test_min_maxlength[-100]",
"tests/test_factories.py::TestBytesSpecValidation::TestMaxlengthSpec::test_int_maxlength[-0.5]",
"tests/test_factories.py::TestBytesSpecValidation::TestMaxlengthSpec::test_int_maxlength[0.5]",
"tests/test_factories.py::TestBytesSpecValidation::TestMaxlengthSpec::test_int_maxlength[2.71]",
"tests/test_factories.py::TestBytesSpecValidation::TestMaxlengthSpec::test_is_maxlength[]",
"tests/test_factories.py::TestBytesSpecValidation::TestMaxlengthSpec::test_is_maxlength[a]",
"tests/test_factories.py::TestBytesSpecValidation::TestMaxlengthSpec::test_is_maxlength[ab]",
"tests/test_factories.py::TestBytesSpecValidation::TestMaxlengthSpec::test_is_maxlength[abc]",
"tests/test_factories.py::TestBytesSpecValidation::TestMaxlengthSpec::test_is_maxlength[abcd]",
"tests/test_factories.py::TestBytesSpecValidation::TestMaxlengthSpec::test_is_maxlength[abcde]",
"tests/test_factories.py::TestBytesSpecValidation::TestMaxlengthSpec::test_is_not_maxlength[None]",
"tests/test_factories.py::TestBytesSpecValidation::TestMaxlengthSpec::test_is_not_maxlength[25]",
"tests/test_factories.py::TestBytesSpecValidation::TestMaxlengthSpec::test_is_not_maxlength[3.14]",
"tests/test_factories.py::TestBytesSpecValidation::TestMaxlengthSpec::test_is_not_maxlength[v3]",
"tests/test_factories.py::TestBytesSpecValidation::TestMaxlengthSpec::test_is_not_maxlength[v4]",
"tests/test_factories.py::TestBytesSpecValidation::TestMaxlengthSpec::test_is_not_maxlength[abcdef]",
"tests/test_factories.py::TestBytesSpecValidation::TestMaxlengthSpec::test_is_not_maxlength[abcdefg]",
"tests/test_factories.py::TestBytesSpecValidation::test_minlength_and_maxlength_agreement",
"tests/test_factories.py::TestEmailSpecValidation::test_invalid_email_specs[spec_kwargs0]",
"tests/test_factories.py::TestEmailSpecValidation::test_invalid_email_specs[spec_kwargs1]",
"tests/test_factories.py::TestEmailSpecValidation::test_invalid_email_specs[spec_kwargs2]",
"tests/test_factories.py::TestEmailSpecValidation::test_is_email_str[spec_kwargs0]",
"tests/test_factories.py::TestEmailSpecValidation::test_is_email_str[spec_kwargs1]",
"tests/test_factories.py::TestEmailSpecValidation::test_is_email_str[spec_kwargs2]",
"tests/test_factories.py::TestEmailSpecValidation::test_is_email_str[spec_kwargs3]",
"tests/test_factories.py::TestEmailSpecValidation::test_is_email_str[spec_kwargs4]",
"tests/test_factories.py::TestEmailSpecValidation::test_is_email_str[spec_kwargs5]",
"tests/test_factories.py::TestEmailSpecValidation::test_is_not_email_str[spec_kwargs0]",
"tests/test_factories.py::TestEmailSpecValidation::test_is_not_email_str[spec_kwargs1]",
"tests/test_factories.py::TestEmailSpecValidation::test_is_not_email_str[spec_kwargs2]",
"tests/test_factories.py::TestEmailSpecValidation::test_is_not_email_str[spec_kwargs3]",
"tests/test_factories.py::TestEmailSpecValidation::test_is_not_email_str[spec_kwargs4]",
"tests/test_factories.py::TestEmailSpecValidation::test_is_not_email_str[spec_kwargs5]",
"tests/test_factories.py::TestInstSpecValidation::test_is_inst[v0]",
"tests/test_factories.py::TestInstSpecValidation::test_is_inst_failure[None]",
"tests/test_factories.py::TestInstSpecValidation::test_is_inst_failure[25]",
"tests/test_factories.py::TestInstSpecValidation::test_is_inst_failure[3.14]",
"tests/test_factories.py::TestInstSpecValidation::test_is_inst_failure[3j]",
"tests/test_factories.py::TestInstSpecValidation::test_is_inst_failure[v4]",
"tests/test_factories.py::TestInstSpecValidation::test_is_inst_failure[v5]",
"tests/test_factories.py::TestInstSpecValidation::test_is_inst_failure[v6]",
"tests/test_factories.py::TestInstSpecValidation::test_is_inst_failure[v7]",
"tests/test_factories.py::TestInstSpecValidation::test_is_inst_failure[abcdef]",
"tests/test_factories.py::TestInstSpecValidation::test_is_inst_failure[v9]",
"tests/test_factories.py::TestInstSpecValidation::test_is_inst_failure[v10]",
"tests/test_factories.py::TestInstSpecValidation::TestFormatSpec::test_is_inst_spec[2003-01-14",
"tests/test_factories.py::TestInstSpecValidation::TestFormatSpec::test_is_inst_spec[0994-12-31",
"tests/test_factories.py::TestInstSpecValidation::TestFormatSpec::test_is_not_inst_spec[994-12-31]",
"tests/test_factories.py::TestInstSpecValidation::TestFormatSpec::test_is_not_inst_spec[2000-13-20]",
"tests/test_factories.py::TestInstSpecValidation::TestFormatSpec::test_is_not_inst_spec[1984-09-32]",
"tests/test_factories.py::TestInstSpecValidation::TestFormatSpec::test_is_not_inst_spec[84-10-4]",
"tests/test_factories.py::TestInstSpecValidation::TestFormatSpec::test_is_not_inst_spec[23:18:22]",
"tests/test_factories.py::TestInstSpecValidation::TestFormatSpec::test_is_not_inst_spec[11:40:72]",
"tests/test_factories.py::TestInstSpecValidation::TestFormatSpec::test_is_not_inst_spec[06:89:13]",
"tests/test_factories.py::TestInstSpecValidation::TestBeforeSpec::test_before_spec[v0]",
"tests/test_factories.py::TestInstSpecValidation::TestBeforeSpec::test_before_spec[v1]",
"tests/test_factories.py::TestInstSpecValidation::TestBeforeSpec::test_before_spec[v2]",
"tests/test_factories.py::TestInstSpecValidation::TestBeforeSpec::test_before_spec[v3]",
"tests/test_factories.py::TestInstSpecValidation::TestBeforeSpec::test_before_spec_failure[v0]",
"tests/test_factories.py::TestInstSpecValidation::TestBeforeSpec::test_before_spec_failure[v1]",
"tests/test_factories.py::TestInstSpecValidation::TestBeforeSpec::test_before_spec_failure[v2]",
"tests/test_factories.py::TestInstSpecValidation::TestAfterSpec::test_after_spec[v0]",
"tests/test_factories.py::TestInstSpecValidation::TestAfterSpec::test_after_spec[v1]",
"tests/test_factories.py::TestInstSpecValidation::TestAfterSpec::test_after_spec[v2]",
"tests/test_factories.py::TestInstSpecValidation::TestAfterSpec::test_after_spec[v3]",
"tests/test_factories.py::TestInstSpecValidation::TestAfterSpec::test_after_spec_failure[v0]",
"tests/test_factories.py::TestInstSpecValidation::TestAfterSpec::test_after_spec_failure[v1]",
"tests/test_factories.py::TestInstSpecValidation::TestAfterSpec::test_after_spec_failure[v2]",
"tests/test_factories.py::TestInstSpecValidation::test_before_after_agreement",
"tests/test_factories.py::TestInstSpecValidation::TestIsAwareSpec::test_aware_spec",
"tests/test_factories.py::TestInstSpecValidation::TestIsAwareSpec::test_aware_spec_failure",
"tests/test_factories.py::TestDateSpecValidation::test_is_date[v0]",
"tests/test_factories.py::TestDateSpecValidation::test_is_date[v1]",
"tests/test_factories.py::TestDateSpecValidation::test_is_date_failure[None]",
"tests/test_factories.py::TestDateSpecValidation::test_is_date_failure[25]",
"tests/test_factories.py::TestDateSpecValidation::test_is_date_failure[3.14]",
"tests/test_factories.py::TestDateSpecValidation::test_is_date_failure[3j]",
"tests/test_factories.py::TestDateSpecValidation::test_is_date_failure[v4]",
"tests/test_factories.py::TestDateSpecValidation::test_is_date_failure[v5]",
"tests/test_factories.py::TestDateSpecValidation::test_is_date_failure[v6]",
"tests/test_factories.py::TestDateSpecValidation::test_is_date_failure[v7]",
"tests/test_factories.py::TestDateSpecValidation::test_is_date_failure[abcdef]",
"tests/test_factories.py::TestDateSpecValidation::test_is_date_failure[v9]",
"tests/test_factories.py::TestDateSpecValidation::TestFormatSpec::test_is_date_spec[2003-01-14-parsed0]",
"tests/test_factories.py::TestDateSpecValidation::TestFormatSpec::test_is_date_spec[0994-12-31-parsed1]",
"tests/test_factories.py::TestDateSpecValidation::TestFormatSpec::test_is_not_date_spec[994-12-31]",
"tests/test_factories.py::TestDateSpecValidation::TestFormatSpec::test_is_not_date_spec[2000-13-20]",
"tests/test_factories.py::TestDateSpecValidation::TestFormatSpec::test_is_not_date_spec[1984-09-32]",
"tests/test_factories.py::TestDateSpecValidation::TestFormatSpec::test_is_not_date_spec[84-10-4]",
"tests/test_factories.py::TestDateSpecValidation::TestFormatSpec::test_date_spec_with_time_fails",
"tests/test_factories.py::TestDateSpecValidation::TestBeforeSpec::test_before_spec[v0]",
"tests/test_factories.py::TestDateSpecValidation::TestBeforeSpec::test_before_spec[v1]",
"tests/test_factories.py::TestDateSpecValidation::TestBeforeSpec::test_before_spec[v2]",
"tests/test_factories.py::TestDateSpecValidation::TestBeforeSpec::test_before_spec[v3]",
"tests/test_factories.py::TestDateSpecValidation::TestBeforeSpec::test_before_spec_failure[v0]",
"tests/test_factories.py::TestDateSpecValidation::TestBeforeSpec::test_before_spec_failure[v1]",
"tests/test_factories.py::TestDateSpecValidation::TestAfterSpec::test_after_spec[v0]",
"tests/test_factories.py::TestDateSpecValidation::TestAfterSpec::test_after_spec[v1]",
"tests/test_factories.py::TestDateSpecValidation::TestAfterSpec::test_after_spec[v2]",
"tests/test_factories.py::TestDateSpecValidation::TestAfterSpec::test_after_spec_failure[v0]",
"tests/test_factories.py::TestDateSpecValidation::TestAfterSpec::test_after_spec_failure[v1]",
"tests/test_factories.py::TestDateSpecValidation::TestAfterSpec::test_after_spec_failure[v2]",
"tests/test_factories.py::TestDateSpecValidation::test_before_after_agreement",
"tests/test_factories.py::TestDateSpecValidation::TestIsAwareSpec::test_aware_spec",
"tests/test_factories.py::TestTimeSpecValidation::test_is_time[v0]",
"tests/test_factories.py::TestTimeSpecValidation::test_is_time_failure[None]",
"tests/test_factories.py::TestTimeSpecValidation::test_is_time_failure[25]",
"tests/test_factories.py::TestTimeSpecValidation::test_is_time_failure[3.14]",
"tests/test_factories.py::TestTimeSpecValidation::test_is_time_failure[3j]",
"tests/test_factories.py::TestTimeSpecValidation::test_is_time_failure[v4]",
"tests/test_factories.py::TestTimeSpecValidation::test_is_time_failure[v5]",
"tests/test_factories.py::TestTimeSpecValidation::test_is_time_failure[v6]",
"tests/test_factories.py::TestTimeSpecValidation::test_is_time_failure[v7]",
"tests/test_factories.py::TestTimeSpecValidation::test_is_time_failure[abcdef]",
"tests/test_factories.py::TestTimeSpecValidation::test_is_time_failure[v9]",
"tests/test_factories.py::TestTimeSpecValidation::test_is_time_failure[v10]",
"tests/test_factories.py::TestTimeSpecValidation::TestFormatSpec::test_is_time_spec[01:41:16-parsed0]",
"tests/test_factories.py::TestTimeSpecValidation::TestFormatSpec::test_is_time_spec[08:00:00-parsed1]",
"tests/test_factories.py::TestTimeSpecValidation::TestFormatSpec::test_is_not_time_spec[23:18:22]",
"tests/test_factories.py::TestTimeSpecValidation::TestFormatSpec::test_is_not_time_spec[11:40:72]",
"tests/test_factories.py::TestTimeSpecValidation::TestFormatSpec::test_is_not_time_spec[06:89:13]",
"tests/test_factories.py::TestTimeSpecValidation::TestFormatSpec::test_time_spec_with_date_fails",
"tests/test_factories.py::TestTimeSpecValidation::TestBeforeSpec::test_before_spec[v0]",
"tests/test_factories.py::TestTimeSpecValidation::TestBeforeSpec::test_before_spec[v1]",
"tests/test_factories.py::TestTimeSpecValidation::TestBeforeSpec::test_before_spec[v2]",
"tests/test_factories.py::TestTimeSpecValidation::TestBeforeSpec::test_before_spec_failure[v0]",
"tests/test_factories.py::TestTimeSpecValidation::TestBeforeSpec::test_before_spec_failure[v1]",
"tests/test_factories.py::TestTimeSpecValidation::TestBeforeSpec::test_before_spec_failure[v2]",
"tests/test_factories.py::TestTimeSpecValidation::TestAfterSpec::test_after_spec[v0]",
"tests/test_factories.py::TestTimeSpecValidation::TestAfterSpec::test_after_spec[v1]",
"tests/test_factories.py::TestTimeSpecValidation::TestAfterSpec::test_after_spec[v2]",
"tests/test_factories.py::TestTimeSpecValidation::TestAfterSpec::test_after_spec_failure[v0]",
"tests/test_factories.py::TestTimeSpecValidation::TestAfterSpec::test_after_spec_failure[v1]",
"tests/test_factories.py::TestTimeSpecValidation::TestAfterSpec::test_after_spec_failure[v2]",
"tests/test_factories.py::TestTimeSpecValidation::test_before_after_agreement",
"tests/test_factories.py::TestTimeSpecValidation::TestIsAwareSpec::test_aware_spec",
"tests/test_factories.py::TestTimeSpecValidation::TestIsAwareSpec::test_aware_spec_failure",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation[2003-09-25T10:49:41-datetime_obj0]",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation[2003-09-25T10:49-datetime_obj1]",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation[2003-09-25T10-datetime_obj2]",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation[2003-09-25-datetime_obj3]",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation[20030925T104941-datetime_obj4]",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation[20030925T1049-datetime_obj5]",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation[20030925T10-datetime_obj6]",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation[20030925-datetime_obj7]",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation[2003-09-25",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation[19760704-datetime_obj9]",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation[0099-01-01T00:00:00-datetime_obj10]",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation[0031-01-01T00:00:00-datetime_obj11]",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation[20080227T21:26:01.123456789-datetime_obj12]",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation[0003-03-04-datetime_obj13]",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation[950404",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation[Thu",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation[199709020908-datetime_obj17]",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation[19970902090807-datetime_obj18]",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation[09-25-2003-datetime_obj19]",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation[25-09-2003-datetime_obj20]",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation[10-09-2003-datetime_obj21]",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation[10-09-03-datetime_obj22]",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation[2003.09.25-datetime_obj23]",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation[09.25.2003-datetime_obj24]",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation[25.09.2003-datetime_obj25]",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation[10.09.2003-datetime_obj26]",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation[10.09.03-datetime_obj27]",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation[2003/09/25-datetime_obj28]",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation[09/25/2003-datetime_obj29]",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation[25/09/2003-datetime_obj30]",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation[10/09/2003-datetime_obj31]",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation[10/09/03-datetime_obj32]",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation[2003",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation[09",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation[25",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation[10",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation[03",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation[",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation[Wed,",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation[1996.July.10",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation[July",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation[7",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation[4",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation[7-4-76-datetime_obj48]",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation[0:01:02",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation[Mon",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation[04.04.95",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation[Jan",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation[3rd",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation[5th",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation[1st",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation[13NOV2017-datetime_obj57]",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation[December.0031.30-datetime_obj58]",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation_failure[]",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation_failure[abcde]",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation_failure[Tue",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation_failure[5]",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation_failure[3.14]",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation_failure[None]",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation_failure[v6]",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation_failure[v7]",
"tests/test_factories.py::TestInstStringSpecValidation::test_inst_str_validation_failure[v8]",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation[2003-09-25T10:49:41-datetime_obj0]",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation[2003-09-25T10:49-datetime_obj1]",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation[2003-09-25T10-datetime_obj2]",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation[2003-09-25-datetime_obj3]",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation[20030925T104941-datetime_obj4]",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation[20030925T1049-datetime_obj5]",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation[20030925T10-datetime_obj6]",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation[20030925-datetime_obj7]",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation[2003-09-25",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation[19760704-datetime_obj9]",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation[0099-01-01T00:00:00-datetime_obj10]",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation[0031-01-01T00:00:00-datetime_obj11]",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation[20080227T21:26:01.123456789-datetime_obj12]",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation[0003-03-04-datetime_obj13]",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation[950404",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation_failure[]",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation_failure[abcde]",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation_failure[Tue",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation_failure[5]",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation_failure[3.14]",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation_failure[None]",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation_failure[v6]",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation_failure[v7]",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation_failure[v8]",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation_failure[Thu",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation_failure[199709020908]",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation_failure[19970902090807]",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation_failure[09-25-2003]",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation_failure[25-09-2003]",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation_failure[10-09-2003]",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation_failure[10-09-03]",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation_failure[2003.09.25]",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation_failure[09.25.2003]",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation_failure[25.09.2003]",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation_failure[10.09.2003]",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation_failure[10.09.03]",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation_failure[2003/09/25]",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation_failure[09/25/2003]",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation_failure[25/09/2003]",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation_failure[10/09/2003]",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation_failure[10/09/03]",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation_failure[2003",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation_failure[09",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation_failure[25",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation_failure[10",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation_failure[03",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation_failure[",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation_failure[Wed,",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation_failure[1996.July.10",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation_failure[July",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation_failure[7",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation_failure[4",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation_failure[7-4-76]",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation_failure[0:01:02",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation_failure[Mon",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation_failure[04.04.95",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation_failure[Jan",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation_failure[3rd",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation_failure[5th",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation_failure[1st",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation_failure[13NOV2017]",
"tests/test_factories.py::TestInstStringSpecValidation::test_iso_inst_str_validation_failure[December.0031.30]",
"tests/test_factories.py::test_nilable[None]",
"tests/test_factories.py::test_nilable[]",
"tests/test_factories.py::test_nilable[a",
"tests/test_factories.py::TestNumSpecValidation::test_is_num[-3]",
"tests/test_factories.py::TestNumSpecValidation::test_is_num[25]",
"tests/test_factories.py::TestNumSpecValidation::test_is_num[3.14]",
"tests/test_factories.py::TestNumSpecValidation::test_is_num[-2.72]",
"tests/test_factories.py::TestNumSpecValidation::test_is_num[-33]",
"tests/test_factories.py::TestNumSpecValidation::test_not_is_num[4j]",
"tests/test_factories.py::TestNumSpecValidation::test_not_is_num[6j]",
"tests/test_factories.py::TestNumSpecValidation::test_not_is_num[]",
"tests/test_factories.py::TestNumSpecValidation::test_not_is_num[a",
"tests/test_factories.py::TestNumSpecValidation::test_not_is_num[\\U0001f60f]",
"tests/test_factories.py::TestNumSpecValidation::test_not_is_num[None]",
"tests/test_factories.py::TestNumSpecValidation::test_not_is_num[v6]",
"tests/test_factories.py::TestNumSpecValidation::test_not_is_num[v7]",
"tests/test_factories.py::TestNumSpecValidation::TestMinSpec::test_is_above_min[5]",
"tests/test_factories.py::TestNumSpecValidation::TestMinSpec::test_is_above_min[6]",
"tests/test_factories.py::TestNumSpecValidation::TestMinSpec::test_is_above_min[100]",
"tests/test_factories.py::TestNumSpecValidation::TestMinSpec::test_is_above_min[300.14]",
"tests/test_factories.py::TestNumSpecValidation::TestMinSpec::test_is_above_min[5.83838828283]",
"tests/test_factories.py::TestNumSpecValidation::TestMinSpec::test_is_not_above_min[None]",
"tests/test_factories.py::TestNumSpecValidation::TestMinSpec::test_is_not_above_min[-50]",
"tests/test_factories.py::TestNumSpecValidation::TestMinSpec::test_is_not_above_min[4.9]",
"tests/test_factories.py::TestNumSpecValidation::TestMinSpec::test_is_not_above_min[4]",
"tests/test_factories.py::TestNumSpecValidation::TestMinSpec::test_is_not_above_min[0]",
"tests/test_factories.py::TestNumSpecValidation::TestMinSpec::test_is_not_above_min[3.14]",
"tests/test_factories.py::TestNumSpecValidation::TestMinSpec::test_is_not_above_min[v6]",
"tests/test_factories.py::TestNumSpecValidation::TestMinSpec::test_is_not_above_min[v7]",
"tests/test_factories.py::TestNumSpecValidation::TestMinSpec::test_is_not_above_min[]",
"tests/test_factories.py::TestNumSpecValidation::TestMinSpec::test_is_not_above_min[a]",
"tests/test_factories.py::TestNumSpecValidation::TestMinSpec::test_is_not_above_min[ab]",
"tests/test_factories.py::TestNumSpecValidation::TestMinSpec::test_is_not_above_min[abc]",
"tests/test_factories.py::TestNumSpecValidation::TestMinSpec::test_is_not_above_min[abcd]",
"tests/test_factories.py::TestNumSpecValidation::TestMaxSpec::test_is_below_max[-50]",
"tests/test_factories.py::TestNumSpecValidation::TestMaxSpec::test_is_below_max[4.9]",
"tests/test_factories.py::TestNumSpecValidation::TestMaxSpec::test_is_below_max[4]",
"tests/test_factories.py::TestNumSpecValidation::TestMaxSpec::test_is_below_max[0]",
"tests/test_factories.py::TestNumSpecValidation::TestMaxSpec::test_is_below_max[3.14]",
"tests/test_factories.py::TestNumSpecValidation::TestMaxSpec::test_is_below_max[5]",
"tests/test_factories.py::TestNumSpecValidation::TestMaxSpec::test_is_not_below_max[None]",
"tests/test_factories.py::TestNumSpecValidation::TestMaxSpec::test_is_not_below_max[6]",
"tests/test_factories.py::TestNumSpecValidation::TestMaxSpec::test_is_not_below_max[100]",
"tests/test_factories.py::TestNumSpecValidation::TestMaxSpec::test_is_not_below_max[300.14]",
"tests/test_factories.py::TestNumSpecValidation::TestMaxSpec::test_is_not_below_max[5.83838828283]",
"tests/test_factories.py::TestNumSpecValidation::TestMaxSpec::test_is_not_below_max[v5]",
"tests/test_factories.py::TestNumSpecValidation::TestMaxSpec::test_is_not_below_max[v6]",
"tests/test_factories.py::TestNumSpecValidation::TestMaxSpec::test_is_not_below_max[]",
"tests/test_factories.py::TestNumSpecValidation::TestMaxSpec::test_is_not_below_max[a]",
"tests/test_factories.py::TestNumSpecValidation::TestMaxSpec::test_is_not_below_max[ab]",
"tests/test_factories.py::TestNumSpecValidation::TestMaxSpec::test_is_not_below_max[abc]",
"tests/test_factories.py::TestNumSpecValidation::TestMaxSpec::test_is_not_below_max[abcd]",
"tests/test_factories.py::TestNumSpecValidation::test_min_and_max_agreement",
"tests/test_factories.py::TestPhoneNumberStringSpecValidation::test_valid_regions[US]",
"tests/test_factories.py::TestPhoneNumberStringSpecValidation::test_valid_regions[Us]",
"tests/test_factories.py::TestPhoneNumberStringSpecValidation::test_valid_regions[uS]",
"tests/test_factories.py::TestPhoneNumberStringSpecValidation::test_valid_regions[us]",
"tests/test_factories.py::TestPhoneNumberStringSpecValidation::test_valid_regions[GB]",
"tests/test_factories.py::TestPhoneNumberStringSpecValidation::test_valid_regions[Gb]",
"tests/test_factories.py::TestPhoneNumberStringSpecValidation::test_valid_regions[gB]",
"tests/test_factories.py::TestPhoneNumberStringSpecValidation::test_valid_regions[gb]",
"tests/test_factories.py::TestPhoneNumberStringSpecValidation::test_valid_regions[DE]",
"tests/test_factories.py::TestPhoneNumberStringSpecValidation::test_valid_regions[dE]",
"tests/test_factories.py::TestPhoneNumberStringSpecValidation::test_valid_regions[De]",
"tests/test_factories.py::TestPhoneNumberStringSpecValidation::test_invalid_regions[USA]",
"tests/test_factories.py::TestPhoneNumberStringSpecValidation::test_invalid_regions[usa]",
"tests/test_factories.py::TestPhoneNumberStringSpecValidation::test_invalid_regions[america]",
"tests/test_factories.py::TestPhoneNumberStringSpecValidation::test_invalid_regions[ZZ]",
"tests/test_factories.py::TestPhoneNumberStringSpecValidation::test_invalid_regions[zz]",
"tests/test_factories.py::TestPhoneNumberStringSpecValidation::test_invalid_regions[FU]",
"tests/test_factories.py::TestPhoneNumberStringSpecValidation::test_valid_phone_number[9175555555]",
"tests/test_factories.py::TestPhoneNumberStringSpecValidation::test_valid_phone_number[+19175555555]",
"tests/test_factories.py::TestPhoneNumberStringSpecValidation::test_valid_phone_number[(917)",
"tests/test_factories.py::TestPhoneNumberStringSpecValidation::test_valid_phone_number[917-555-5555]",
"tests/test_factories.py::TestPhoneNumberStringSpecValidation::test_valid_phone_number[1-917-555-5555]",
"tests/test_factories.py::TestPhoneNumberStringSpecValidation::test_valid_phone_number[917.555.5555]",
"tests/test_factories.py::TestPhoneNumberStringSpecValidation::test_valid_phone_number[917",
"tests/test_factories.py::TestPhoneNumberStringSpecValidation::test_invalid_phone_number[None]",
"tests/test_factories.py::TestPhoneNumberStringSpecValidation::test_invalid_phone_number[-50]",
"tests/test_factories.py::TestPhoneNumberStringSpecValidation::test_invalid_phone_number[4.9]",
"tests/test_factories.py::TestPhoneNumberStringSpecValidation::test_invalid_phone_number[4]",
"tests/test_factories.py::TestPhoneNumberStringSpecValidation::test_invalid_phone_number[0]",
"tests/test_factories.py::TestPhoneNumberStringSpecValidation::test_invalid_phone_number[3.14]",
"tests/test_factories.py::TestPhoneNumberStringSpecValidation::test_invalid_phone_number[v6]",
"tests/test_factories.py::TestPhoneNumberStringSpecValidation::test_invalid_phone_number[v7]",
"tests/test_factories.py::TestPhoneNumberStringSpecValidation::test_invalid_phone_number[]",
"tests/test_factories.py::TestPhoneNumberStringSpecValidation::test_invalid_phone_number[917555555]",
"tests/test_factories.py::TestPhoneNumberStringSpecValidation::test_invalid_phone_number[+1917555555]",
"tests/test_factories.py::TestPhoneNumberStringSpecValidation::test_invalid_phone_number[(917)",
"tests/test_factories.py::TestPhoneNumberStringSpecValidation::test_invalid_phone_number[917-555-555]",
"tests/test_factories.py::TestPhoneNumberStringSpecValidation::test_invalid_phone_number[1-917-555-555]",
"tests/test_factories.py::TestPhoneNumberStringSpecValidation::test_invalid_phone_number[917.555.555]",
"tests/test_factories.py::TestPhoneNumberStringSpecValidation::test_invalid_phone_number[917",
"tests/test_factories.py::TestStringSpecValidation::test_is_str[]",
"tests/test_factories.py::TestStringSpecValidation::test_is_str[a",
"tests/test_factories.py::TestStringSpecValidation::test_is_str[\\U0001f60f]",
"tests/test_factories.py::TestStringSpecValidation::test_not_is_str[25]",
"tests/test_factories.py::TestStringSpecValidation::test_not_is_str[None]",
"tests/test_factories.py::TestStringSpecValidation::test_not_is_str[3.14]",
"tests/test_factories.py::TestStringSpecValidation::test_not_is_str[v3]",
"tests/test_factories.py::TestStringSpecValidation::test_not_is_str[v4]",
"tests/test_factories.py::TestStringSpecValidation::TestCountValidation::test_min_count[-1]",
"tests/test_factories.py::TestStringSpecValidation::TestCountValidation::test_min_count[-100]",
"tests/test_factories.py::TestStringSpecValidation::TestCountValidation::test_int_count[-0.5]",
"tests/test_factories.py::TestStringSpecValidation::TestCountValidation::test_int_count[0.5]",
"tests/test_factories.py::TestStringSpecValidation::TestCountValidation::test_int_count[2.71]",
"tests/test_factories.py::TestStringSpecValidation::TestCountValidation::test_count_spec[xxx]",
"tests/test_factories.py::TestStringSpecValidation::TestCountValidation::test_count_spec[xxy]",
"tests/test_factories.py::TestStringSpecValidation::TestCountValidation::test_count_spec[773]",
"tests/test_factories.py::TestStringSpecValidation::TestCountValidation::test_count_spec[833]",
"tests/test_factories.py::TestStringSpecValidation::TestCountValidation::test_count_spec_failure[]",
"tests/test_factories.py::TestStringSpecValidation::TestCountValidation::test_count_spec_failure[x]",
"tests/test_factories.py::TestStringSpecValidation::TestCountValidation::test_count_spec_failure[xx]",
"tests/test_factories.py::TestStringSpecValidation::TestCountValidation::test_count_spec_failure[xxxx]",
"tests/test_factories.py::TestStringSpecValidation::TestCountValidation::test_count_spec_failure[xxxxx]",
"tests/test_factories.py::TestStringSpecValidation::TestCountValidation::test_count_and_minlength_or_maxlength_agreement[opts0]",
"tests/test_factories.py::TestStringSpecValidation::TestCountValidation::test_count_and_minlength_or_maxlength_agreement[opts1]",
"tests/test_factories.py::TestStringSpecValidation::TestCountValidation::test_count_and_minlength_or_maxlength_agreement[opts2]",
"tests/test_factories.py::TestStringSpecValidation::TestMinlengthSpec::test_min_minlength[-1]",
"tests/test_factories.py::TestStringSpecValidation::TestMinlengthSpec::test_min_minlength[-100]",
"tests/test_factories.py::TestStringSpecValidation::TestMinlengthSpec::test_int_minlength[-0.5]",
"tests/test_factories.py::TestStringSpecValidation::TestMinlengthSpec::test_int_minlength[0.5]",
"tests/test_factories.py::TestStringSpecValidation::TestMinlengthSpec::test_int_minlength[2.71]",
"tests/test_factories.py::TestStringSpecValidation::TestMinlengthSpec::test_is_minlength[abcde]",
"tests/test_factories.py::TestStringSpecValidation::TestMinlengthSpec::test_is_minlength[abcdef]",
"tests/test_factories.py::TestStringSpecValidation::TestMinlengthSpec::test_is_not_minlength[None]",
"tests/test_factories.py::TestStringSpecValidation::TestMinlengthSpec::test_is_not_minlength[25]",
"tests/test_factories.py::TestStringSpecValidation::TestMinlengthSpec::test_is_not_minlength[3.14]",
"tests/test_factories.py::TestStringSpecValidation::TestMinlengthSpec::test_is_not_minlength[v3]",
"tests/test_factories.py::TestStringSpecValidation::TestMinlengthSpec::test_is_not_minlength[v4]",
"tests/test_factories.py::TestStringSpecValidation::TestMinlengthSpec::test_is_not_minlength[]",
"tests/test_factories.py::TestStringSpecValidation::TestMinlengthSpec::test_is_not_minlength[a]",
"tests/test_factories.py::TestStringSpecValidation::TestMinlengthSpec::test_is_not_minlength[ab]",
"tests/test_factories.py::TestStringSpecValidation::TestMinlengthSpec::test_is_not_minlength[abc]",
"tests/test_factories.py::TestStringSpecValidation::TestMinlengthSpec::test_is_not_minlength[abcd]",
"tests/test_factories.py::TestStringSpecValidation::TestMaxlengthSpec::test_min_maxlength[-1]",
"tests/test_factories.py::TestStringSpecValidation::TestMaxlengthSpec::test_min_maxlength[-100]",
"tests/test_factories.py::TestStringSpecValidation::TestMaxlengthSpec::test_int_maxlength[-0.5]",
"tests/test_factories.py::TestStringSpecValidation::TestMaxlengthSpec::test_int_maxlength[0.5]",
"tests/test_factories.py::TestStringSpecValidation::TestMaxlengthSpec::test_int_maxlength[2.71]",
"tests/test_factories.py::TestStringSpecValidation::TestMaxlengthSpec::test_is_maxlength[]",
"tests/test_factories.py::TestStringSpecValidation::TestMaxlengthSpec::test_is_maxlength[a]",
"tests/test_factories.py::TestStringSpecValidation::TestMaxlengthSpec::test_is_maxlength[ab]",
"tests/test_factories.py::TestStringSpecValidation::TestMaxlengthSpec::test_is_maxlength[abc]",
"tests/test_factories.py::TestStringSpecValidation::TestMaxlengthSpec::test_is_maxlength[abcd]",
"tests/test_factories.py::TestStringSpecValidation::TestMaxlengthSpec::test_is_maxlength[abcde]",
"tests/test_factories.py::TestStringSpecValidation::TestMaxlengthSpec::test_is_not_maxlength[None]",
"tests/test_factories.py::TestStringSpecValidation::TestMaxlengthSpec::test_is_not_maxlength[25]",
"tests/test_factories.py::TestStringSpecValidation::TestMaxlengthSpec::test_is_not_maxlength[3.14]",
"tests/test_factories.py::TestStringSpecValidation::TestMaxlengthSpec::test_is_not_maxlength[v3]",
"tests/test_factories.py::TestStringSpecValidation::TestMaxlengthSpec::test_is_not_maxlength[v4]",
"tests/test_factories.py::TestStringSpecValidation::TestMaxlengthSpec::test_is_not_maxlength[abcdef]",
"tests/test_factories.py::TestStringSpecValidation::TestMaxlengthSpec::test_is_not_maxlength[abcdefg]",
"tests/test_factories.py::TestStringSpecValidation::test_minlength_and_maxlength_agreement",
"tests/test_factories.py::TestStringSpecValidation::TestRegexSpec::test_is_zipcode[\\\\d{5}(-\\\\d{4})?0-10017]",
"tests/test_factories.py::TestStringSpecValidation::TestRegexSpec::test_is_zipcode[\\\\d{5}(-\\\\d{4})?0-10017-3332]",
"tests/test_factories.py::TestStringSpecValidation::TestRegexSpec::test_is_zipcode[\\\\d{5}(-\\\\d{4})?0-37779]",
"tests/test_factories.py::TestStringSpecValidation::TestRegexSpec::test_is_zipcode[\\\\d{5}(-\\\\d{4})?0-37779-2770]",
"tests/test_factories.py::TestStringSpecValidation::TestRegexSpec::test_is_zipcode[\\\\d{5}(-\\\\d{4})?0-00000]",
"tests/test_factories.py::TestStringSpecValidation::TestRegexSpec::test_is_zipcode[\\\\d{5}(-\\\\d{4})?1-10017]",
"tests/test_factories.py::TestStringSpecValidation::TestRegexSpec::test_is_zipcode[\\\\d{5}(-\\\\d{4})?1-10017-3332]",
"tests/test_factories.py::TestStringSpecValidation::TestRegexSpec::test_is_zipcode[\\\\d{5}(-\\\\d{4})?1-37779]",
"tests/test_factories.py::TestStringSpecValidation::TestRegexSpec::test_is_zipcode[\\\\d{5}(-\\\\d{4})?1-37779-2770]",
"tests/test_factories.py::TestStringSpecValidation::TestRegexSpec::test_is_zipcode[\\\\d{5}(-\\\\d{4})?1-00000]",
"tests/test_factories.py::TestStringSpecValidation::TestRegexSpec::test_is_not_zipcode[\\\\d{5}(-\\\\d{4})?0-None]",
"tests/test_factories.py::TestStringSpecValidation::TestRegexSpec::test_is_not_zipcode[\\\\d{5}(-\\\\d{4})?0-25]",
"tests/test_factories.py::TestStringSpecValidation::TestRegexSpec::test_is_not_zipcode[\\\\d{5}(-\\\\d{4})?0-3.14]",
"tests/test_factories.py::TestStringSpecValidation::TestRegexSpec::test_is_not_zipcode[\\\\d{5}(-\\\\d{4})?0-v3]",
"tests/test_factories.py::TestStringSpecValidation::TestRegexSpec::test_is_not_zipcode[\\\\d{5}(-\\\\d{4})?0-v4]",
"tests/test_factories.py::TestStringSpecValidation::TestRegexSpec::test_is_not_zipcode[\\\\d{5}(-\\\\d{4})?0-abcdef]",
"tests/test_factories.py::TestStringSpecValidation::TestRegexSpec::test_is_not_zipcode[\\\\d{5}(-\\\\d{4})?0-abcdefg]",
"tests/test_factories.py::TestStringSpecValidation::TestRegexSpec::test_is_not_zipcode[\\\\d{5}(-\\\\d{4})?0-100017]",
"tests/test_factories.py::TestStringSpecValidation::TestRegexSpec::test_is_not_zipcode[\\\\d{5}(-\\\\d{4})?0-10017-383]",
"tests/test_factories.py::TestStringSpecValidation::TestRegexSpec::test_is_not_zipcode[\\\\d{5}(-\\\\d{4})?1-None]",
"tests/test_factories.py::TestStringSpecValidation::TestRegexSpec::test_is_not_zipcode[\\\\d{5}(-\\\\d{4})?1-25]",
"tests/test_factories.py::TestStringSpecValidation::TestRegexSpec::test_is_not_zipcode[\\\\d{5}(-\\\\d{4})?1-3.14]",
"tests/test_factories.py::TestStringSpecValidation::TestRegexSpec::test_is_not_zipcode[\\\\d{5}(-\\\\d{4})?1-v3]",
"tests/test_factories.py::TestStringSpecValidation::TestRegexSpec::test_is_not_zipcode[\\\\d{5}(-\\\\d{4})?1-v4]",
"tests/test_factories.py::TestStringSpecValidation::TestRegexSpec::test_is_not_zipcode[\\\\d{5}(-\\\\d{4})?1-abcdef]",
"tests/test_factories.py::TestStringSpecValidation::TestRegexSpec::test_is_not_zipcode[\\\\d{5}(-\\\\d{4})?1-abcdefg]",
"tests/test_factories.py::TestStringSpecValidation::TestRegexSpec::test_is_not_zipcode[\\\\d{5}(-\\\\d{4})?1-100017]",
"tests/test_factories.py::TestStringSpecValidation::TestRegexSpec::test_is_not_zipcode[\\\\d{5}(-\\\\d{4})?1-10017-383]",
"tests/test_factories.py::TestStringSpecValidation::test_regex_and_format_agreement[opts0]",
"tests/test_factories.py::TestStringSpecValidation::test_regex_and_format_agreement[opts1]",
"tests/test_factories.py::TestStringSpecValidation::test_regex_and_format_agreement[opts2]",
"tests/test_factories.py::TestStringFormatValidation::TestISODateFormat::test_is_date_str[2019-10-12]",
"tests/test_factories.py::TestStringFormatValidation::TestISODateFormat::test_is_date_str[1945-09-02]",
"tests/test_factories.py::TestStringFormatValidation::TestISODateFormat::test_is_date_str[1066-10-14]",
"tests/test_factories.py::TestStringFormatValidation::TestISODateFormat::test_is_not_date_str[None]",
"tests/test_factories.py::TestStringFormatValidation::TestISODateFormat::test_is_not_date_str[25]",
"tests/test_factories.py::TestStringFormatValidation::TestISODateFormat::test_is_not_date_str[3.14]",
"tests/test_factories.py::TestStringFormatValidation::TestISODateFormat::test_is_not_date_str[v3]",
"tests/test_factories.py::TestStringFormatValidation::TestISODateFormat::test_is_not_date_str[v4]",
"tests/test_factories.py::TestStringFormatValidation::TestISODateFormat::test_is_not_date_str[abcdef]",
"tests/test_factories.py::TestStringFormatValidation::TestISODateFormat::test_is_not_date_str[abcdefg]",
"tests/test_factories.py::TestStringFormatValidation::TestISODateFormat::test_is_not_date_str[100017]",
"tests/test_factories.py::TestStringFormatValidation::TestISODateFormat::test_is_not_date_str[10017-383]",
"tests/test_factories.py::TestStringFormatValidation::TestISODateFormat::test_is_not_date_str[1945-9-2]",
"tests/test_factories.py::TestStringFormatValidation::TestISODateFormat::test_is_not_date_str[430-10-02]",
"tests/test_factories.py::TestStringFormatValidation::TestISODatetimeFormat::test_is_datetime_str[2019-10-12T18:03:50.617-00:00]",
"tests/test_factories.py::TestStringFormatValidation::TestISODatetimeFormat::test_is_datetime_str[1945-09-02T18:03:50.617-00:00]",
"tests/test_factories.py::TestStringFormatValidation::TestISODatetimeFormat::test_is_datetime_str[1066-10-14T18:03:50.617-00:00]",
"tests/test_factories.py::TestStringFormatValidation::TestISODatetimeFormat::test_is_datetime_str[2019-10-12]",
"tests/test_factories.py::TestStringFormatValidation::TestISODatetimeFormat::test_is_datetime_str[1945-09-02]",
"tests/test_factories.py::TestStringFormatValidation::TestISODatetimeFormat::test_is_datetime_str[1066-10-14]",
"tests/test_factories.py::TestStringFormatValidation::TestISODatetimeFormat::test_is_not_datetime_str[None]",
"tests/test_factories.py::TestStringFormatValidation::TestISODatetimeFormat::test_is_not_datetime_str[25]",
"tests/test_factories.py::TestStringFormatValidation::TestISODatetimeFormat::test_is_not_datetime_str[3.14]",
"tests/test_factories.py::TestStringFormatValidation::TestISODatetimeFormat::test_is_not_datetime_str[v3]",
"tests/test_factories.py::TestStringFormatValidation::TestISODatetimeFormat::test_is_not_datetime_str[v4]",
"tests/test_factories.py::TestStringFormatValidation::TestISODatetimeFormat::test_is_not_datetime_str[abcdef]",
"tests/test_factories.py::TestStringFormatValidation::TestISODatetimeFormat::test_is_not_datetime_str[abcdefg]",
"tests/test_factories.py::TestStringFormatValidation::TestISODatetimeFormat::test_is_not_datetime_str[100017]",
"tests/test_factories.py::TestStringFormatValidation::TestISODatetimeFormat::test_is_not_datetime_str[10017-383]",
"tests/test_factories.py::TestStringFormatValidation::TestISODatetimeFormat::test_is_not_datetime_str[1945-9-2]",
"tests/test_factories.py::TestStringFormatValidation::TestISODatetimeFormat::test_is_not_datetime_str[430-10-02]",
"tests/test_factories.py::TestStringFormatValidation::TestISOTimeFormat::test_is_time_str[18]",
"tests/test_factories.py::TestStringFormatValidation::TestISOTimeFormat::test_is_time_str[18-00:00]",
"tests/test_factories.py::TestStringFormatValidation::TestISOTimeFormat::test_is_time_str[18.335]",
"tests/test_factories.py::TestStringFormatValidation::TestISOTimeFormat::test_is_time_str[18.335-00:00]",
"tests/test_factories.py::TestStringFormatValidation::TestISOTimeFormat::test_is_time_str[18:03]",
"tests/test_factories.py::TestStringFormatValidation::TestISOTimeFormat::test_is_time_str[18:03-00:00]",
"tests/test_factories.py::TestStringFormatValidation::TestISOTimeFormat::test_is_time_str[18:03.335]",
"tests/test_factories.py::TestStringFormatValidation::TestISOTimeFormat::test_is_time_str[18:03.335-00:00]",
"tests/test_factories.py::TestStringFormatValidation::TestISOTimeFormat::test_is_time_str[18:03:50]",
"tests/test_factories.py::TestStringFormatValidation::TestISOTimeFormat::test_is_time_str[18:03:50-00:00]",
"tests/test_factories.py::TestStringFormatValidation::TestISOTimeFormat::test_is_time_str[18:03:50.617]",
"tests/test_factories.py::TestStringFormatValidation::TestISOTimeFormat::test_is_time_str[18:03:50.617-00:00]",
"tests/test_factories.py::TestStringFormatValidation::TestISOTimeFormat::test_is_time_str[18:03:50.617332]",
"tests/test_factories.py::TestStringFormatValidation::TestISOTimeFormat::test_is_time_str[18:03:50.617332-00:00]",
"tests/test_factories.py::TestStringFormatValidation::TestISOTimeFormat::test_is_not_time_str[None]",
"tests/test_factories.py::TestStringFormatValidation::TestISOTimeFormat::test_is_not_time_str[25]",
"tests/test_factories.py::TestStringFormatValidation::TestISOTimeFormat::test_is_not_time_str[3.14]",
"tests/test_factories.py::TestStringFormatValidation::TestISOTimeFormat::test_is_not_time_str[v3]",
"tests/test_factories.py::TestStringFormatValidation::TestISOTimeFormat::test_is_not_time_str[v4]",
"tests/test_factories.py::TestStringFormatValidation::TestISOTimeFormat::test_is_not_time_str[abcdef]",
"tests/test_factories.py::TestStringFormatValidation::TestISOTimeFormat::test_is_not_time_str[abcdefg]",
"tests/test_factories.py::TestStringFormatValidation::TestISOTimeFormat::test_is_not_time_str[100017]",
"tests/test_factories.py::TestStringFormatValidation::TestISOTimeFormat::test_is_not_time_str[10017-383]",
"tests/test_factories.py::TestStringFormatValidation::TestISOTimeFormat::test_is_not_time_str[1945-9-2]",
"tests/test_factories.py::TestStringFormatValidation::TestISOTimeFormat::test_is_not_time_str[430-10-02]",
"tests/test_factories.py::TestStringFormatValidation::TestISOTimeFormat::test_is_not_time_str[2019-10-12]",
"tests/test_factories.py::TestStringFormatValidation::TestISOTimeFormat::test_is_not_time_str[1945-09-02]",
"tests/test_factories.py::TestStringFormatValidation::TestISOTimeFormat::test_is_not_time_str[1066-10-14]",
"tests/test_factories.py::TestStringFormatValidation::TestUUIDFormat::test_is_uuid_str[91d7e5f0-7567-4569-a61d-02ed57507f47]",
"tests/test_factories.py::TestStringFormatValidation::TestUUIDFormat::test_is_uuid_str[91d7e5f075674569a61d02ed57507f47]",
"tests/test_factories.py::TestStringFormatValidation::TestUUIDFormat::test_is_uuid_str[06130510-83A5-478B-B65C-6A8DC2104E2F]",
"tests/test_factories.py::TestStringFormatValidation::TestUUIDFormat::test_is_uuid_str[0613051083A5478BB65C6A8DC2104E2F]",
"tests/test_factories.py::TestStringFormatValidation::TestUUIDFormat::test_is_not_uuid_str[None]",
"tests/test_factories.py::TestStringFormatValidation::TestUUIDFormat::test_is_not_uuid_str[25]",
"tests/test_factories.py::TestStringFormatValidation::TestUUIDFormat::test_is_not_uuid_str[3.14]",
"tests/test_factories.py::TestStringFormatValidation::TestUUIDFormat::test_is_not_uuid_str[v3]",
"tests/test_factories.py::TestStringFormatValidation::TestUUIDFormat::test_is_not_uuid_str[v4]",
"tests/test_factories.py::TestStringFormatValidation::TestUUIDFormat::test_is_not_uuid_str[abcdef]",
"tests/test_factories.py::TestStringFormatValidation::TestUUIDFormat::test_is_not_uuid_str[abcdefg]",
"tests/test_factories.py::TestStringFormatValidation::TestUUIDFormat::test_is_not_uuid_str[100017]",
"tests/test_factories.py::TestStringFormatValidation::TestUUIDFormat::test_is_not_uuid_str[10017-383]",
"tests/test_factories.py::TestURLSpecValidation::test_invalid_url_specs[spec_kwargs0]",
"tests/test_factories.py::TestURLSpecValidation::test_invalid_url_specs[spec_kwargs1]",
"tests/test_factories.py::TestURLSpecValidation::test_invalid_url_specs[spec_kwargs2]",
"tests/test_factories.py::TestURLSpecValidation::test_invalid_url_specs[spec_kwargs3]",
"tests/test_factories.py::TestURLSpecValidation::test_invalid_url_spec_argument_types[spec_kwargs0]",
"tests/test_factories.py::TestURLSpecValidation::test_invalid_url_spec_argument_types[spec_kwargs1]",
"tests/test_factories.py::TestURLSpecValidation::test_invalid_url[None]",
"tests/test_factories.py::TestURLSpecValidation::test_invalid_url[25]",
"tests/test_factories.py::TestURLSpecValidation::test_invalid_url[3.14]",
"tests/test_factories.py::TestURLSpecValidation::test_invalid_url[v3]",
"tests/test_factories.py::TestURLSpecValidation::test_invalid_url[v4]",
"tests/test_factories.py::TestURLSpecValidation::test_invalid_url[v5]",
"tests/test_factories.py::TestURLSpecValidation::test_invalid_url[v6]",
"tests/test_factories.py::TestURLSpecValidation::test_invalid_url[]",
"tests/test_factories.py::TestURLSpecValidation::test_invalid_url[//[coverahealth.com]",
"tests/test_factories.py::TestURLSpecValidation::test_valid_query_str",
"tests/test_factories.py::TestURLSpecValidation::test_invalid_query_str",
"tests/test_factories.py::TestURLSpecValidation::test_is_url_str[spec_kwargs0]",
"tests/test_factories.py::TestURLSpecValidation::test_is_url_str[spec_kwargs1]",
"tests/test_factories.py::TestURLSpecValidation::test_is_url_str[spec_kwargs2]",
"tests/test_factories.py::TestURLSpecValidation::test_is_url_str[spec_kwargs3]",
"tests/test_factories.py::TestURLSpecValidation::test_is_url_str[spec_kwargs4]",
"tests/test_factories.py::TestURLSpecValidation::test_is_url_str[spec_kwargs5]",
"tests/test_factories.py::TestURLSpecValidation::test_is_url_str[spec_kwargs6]",
"tests/test_factories.py::TestURLSpecValidation::test_is_url_str[spec_kwargs7]",
"tests/test_factories.py::TestURLSpecValidation::test_is_url_str[spec_kwargs8]",
"tests/test_factories.py::TestURLSpecValidation::test_is_url_str[spec_kwargs9]",
"tests/test_factories.py::TestURLSpecValidation::test_is_url_str[spec_kwargs10]",
"tests/test_factories.py::TestURLSpecValidation::test_is_url_str[spec_kwargs11]",
"tests/test_factories.py::TestURLSpecValidation::test_is_url_str[spec_kwargs12]",
"tests/test_factories.py::TestURLSpecValidation::test_is_url_str[spec_kwargs13]",
"tests/test_factories.py::TestURLSpecValidation::test_is_url_str[spec_kwargs14]",
"tests/test_factories.py::TestURLSpecValidation::test_is_url_str[spec_kwargs15]",
"tests/test_factories.py::TestURLSpecValidation::test_is_url_str[spec_kwargs16]",
"tests/test_factories.py::TestURLSpecValidation::test_is_url_str[spec_kwargs17]",
"tests/test_factories.py::TestURLSpecValidation::test_is_url_str[spec_kwargs18]",
"tests/test_factories.py::TestURLSpecValidation::test_is_url_str[spec_kwargs19]",
"tests/test_factories.py::TestURLSpecValidation::test_is_url_str[spec_kwargs20]",
"tests/test_factories.py::TestURLSpecValidation::test_is_url_str[spec_kwargs21]",
"tests/test_factories.py::TestURLSpecValidation::test_is_url_str[spec_kwargs22]",
"tests/test_factories.py::TestURLSpecValidation::test_is_not_url_str[spec_kwargs0]",
"tests/test_factories.py::TestURLSpecValidation::test_is_not_url_str[spec_kwargs1]",
"tests/test_factories.py::TestURLSpecValidation::test_is_not_url_str[spec_kwargs2]",
"tests/test_factories.py::TestURLSpecValidation::test_is_not_url_str[spec_kwargs3]",
"tests/test_factories.py::TestURLSpecValidation::test_is_not_url_str[spec_kwargs4]",
"tests/test_factories.py::TestURLSpecValidation::test_is_not_url_str[spec_kwargs5]",
"tests/test_factories.py::TestURLSpecValidation::test_is_not_url_str[spec_kwargs6]",
"tests/test_factories.py::TestURLSpecValidation::test_is_not_url_str[spec_kwargs7]",
"tests/test_factories.py::TestURLSpecValidation::test_is_not_url_str[spec_kwargs8]",
"tests/test_factories.py::TestURLSpecValidation::test_is_not_url_str[spec_kwargs9]",
"tests/test_factories.py::TestURLSpecValidation::test_is_not_url_str[spec_kwargs10]",
"tests/test_factories.py::TestURLSpecValidation::test_is_not_url_str[spec_kwargs11]",
"tests/test_factories.py::TestURLSpecValidation::test_is_not_url_str[spec_kwargs12]",
"tests/test_factories.py::TestURLSpecValidation::test_is_not_url_str[spec_kwargs13]",
"tests/test_factories.py::TestURLSpecValidation::test_is_not_url_str[spec_kwargs14]",
"tests/test_factories.py::TestURLSpecValidation::test_is_not_url_str[spec_kwargs15]",
"tests/test_factories.py::TestURLSpecValidation::test_is_not_url_str[spec_kwargs16]",
"tests/test_factories.py::TestURLSpecValidation::test_is_not_url_str[spec_kwargs17]",
"tests/test_factories.py::TestURLSpecValidation::test_is_not_url_str[spec_kwargs18]",
"tests/test_factories.py::TestURLSpecValidation::test_is_not_url_str[spec_kwargs19]",
"tests/test_factories.py::TestURLSpecValidation::test_is_not_url_str[spec_kwargs20]",
"tests/test_factories.py::TestURLSpecValidation::test_is_not_url_str[spec_kwargs21]",
"tests/test_factories.py::TestURLSpecValidation::test_is_not_url_str[spec_kwargs22]",
"tests/test_factories.py::TestUUIDSpecValidation::test_uuid_validation[6281d852-ef4d-11e9-9002-4c327592fea9]",
"tests/test_factories.py::TestUUIDSpecValidation::test_uuid_validation[0e8d7ceb-56e8-36d2-9b54-ea48d4bdea3f]",
"tests/test_factories.py::TestUUIDSpecValidation::test_uuid_validation[c5a28680-986f-4f0d-8187-80d1fbe22059]",
"tests/test_factories.py::TestUUIDSpecValidation::test_uuid_validation[3BE59FF6-9C75-4027-B132-C9792D84547D]",
"tests/test_factories.py::TestUUIDSpecValidation::test_uuid_validation[10988ff4-136c-5ca7-ab35-a686a56c22c4]",
"tests/test_factories.py::TestUUIDSpecValidation::test_uuid_validation_failure[]",
"tests/test_factories.py::TestUUIDSpecValidation::test_uuid_validation_failure[50]",
"tests/test_factories.py::TestUUIDSpecValidation::test_uuid_validation_failure[abcde]",
"tests/test_factories.py::TestUUIDSpecValidation::test_uuid_validation_failure[ABCDe]",
"tests/test_factories.py::TestUUIDSpecValidation::test_uuid_validation_failure[51]",
"tests/test_factories.py::TestUUIDSpecValidation::test_uuid_validation_failure[3.14]",
"tests/test_factories.py::TestUUIDSpecValidation::test_uuid_validation_failure[None]",
"tests/test_factories.py::TestUUIDSpecValidation::test_uuid_validation_failure[v7]",
"tests/test_factories.py::TestUUIDSpecValidation::test_uuid_validation_failure[v8]",
"tests/test_factories.py::TestUUIDSpecValidation::test_uuid_validation_failure[v9]",
"tests/test_factories.py::TestUUIDSpecValidation::TestUUIDVersionSpecValidation::test_invalid_uuid_version_spec[versions0]",
"tests/test_factories.py::TestUUIDSpecValidation::TestUUIDVersionSpecValidation::test_invalid_uuid_version_spec[versions1]",
"tests/test_factories.py::TestUUIDSpecValidation::TestUUIDVersionSpecValidation::test_invalid_uuid_version_spec[versions2]",
"tests/test_factories.py::TestUUIDSpecValidation::TestUUIDVersionSpecValidation::test_uuid_validation[6281d852-ef4d-11e9-9002-4c327592fea9]",
"tests/test_factories.py::TestUUIDSpecValidation::TestUUIDVersionSpecValidation::test_uuid_validation[c5a28680-986f-4f0d-8187-80d1fbe22059]",
"tests/test_factories.py::TestUUIDSpecValidation::TestUUIDVersionSpecValidation::test_uuid_validation[3BE59FF6-9C75-4027-B132-C9792D84547D]",
"tests/test_factories.py::TestUUIDSpecValidation::TestUUIDVersionSpecValidation::test_uuid_validation_failure[0e8d7ceb-56e8-36d2-9b54-ea48d4bdea3f]",
"tests/test_factories.py::TestUUIDSpecValidation::TestUUIDVersionSpecValidation::test_uuid_validation_failure[10988ff4-136c-5ca7-ab35-a686a56c22c4]",
"tests/test_factories.py::TestUUIDSpecValidation::TestUUIDVersionSpecValidation::test_uuid_validation_failure[]",
"tests/test_factories.py::TestUUIDSpecValidation::TestUUIDVersionSpecValidation::test_uuid_validation_failure[50]",
"tests/test_factories.py::TestUUIDSpecValidation::TestUUIDVersionSpecValidation::test_uuid_validation_failure[abcde]",
"tests/test_factories.py::TestUUIDSpecValidation::TestUUIDVersionSpecValidation::test_uuid_validation_failure[ABCDe]",
"tests/test_factories.py::TestUUIDSpecValidation::TestUUIDVersionSpecValidation::test_uuid_validation_failure[51]",
"tests/test_factories.py::TestUUIDSpecValidation::TestUUIDVersionSpecValidation::test_uuid_validation_failure[3.14]",
"tests/test_factories.py::TestUUIDSpecValidation::TestUUIDVersionSpecValidation::test_uuid_validation_failure[None]",
"tests/test_factories.py::TestUUIDSpecValidation::TestUUIDVersionSpecValidation::test_uuid_validation_failure[v9]",
"tests/test_factories.py::TestUUIDSpecValidation::TestUUIDVersionSpecValidation::test_uuid_validation_failure[v10]",
"tests/test_factories.py::TestUUIDSpecValidation::TestUUIDVersionSpecValidation::test_uuid_validation_failure[v11]"
] | [] | MIT License | 5,913 | 333 | [
"src/dataspec/base.py"
] |
|
mroberge__hydrofunctions-61 | bf1f296f4a2e31a0df9bd07860bbfa90e71e7890 | 2019-11-28 03:59:33 | bf1f296f4a2e31a0df9bd07860bbfa90e71e7890 | diff --git a/hydrofunctions/charts.py b/hydrofunctions/charts.py
index 8e41052..aba1b8e 100644
--- a/hydrofunctions/charts.py
+++ b/hydrofunctions/charts.py
@@ -20,7 +20,7 @@ from matplotlib.ticker import NullFormatter
import numpy as np
-def flow_duration(Qdf, xscale='logit', yscale='log', ylabel='Stream Discharge (m³/s)', symbol='.', legend=True):
+def flow_duration(Qdf, xscale='logit', yscale='log', ylabel='Stream Discharge (m³/s)', symbol='.', legend=True, legend_loc='best', title=''):
"""Creates a flow duration chart from a dataframe of discharges.
Args:
@@ -56,6 +56,17 @@ def flow_duration(Qdf, xscale='logit', yscale='log', ylabel='Stream Discharge (m
legend (bool, default: True): whether the legend should be plotted.
+ legend_loc (str, default: 'best'): the location of the legend.
+
+ * 'best': Automatically choose the option below with the least overlap.
+ * 'upper left', 'upper right', 'lower left', 'lower right': place the legend at the corresponding corner of the axes/figure.
+ * 'upper center', 'lower center', 'center left', 'center right': place the legend at the center of the corresponding edge of the axes/figure.
+ * 'center': place the legend at the center of the axes/figure.
+ * The location can also be a 2-tuple giving the coordinates of the lower-left corner of the legend in axes coordinates.
+
+ title (str, default: ''): text to use as a figure title. If no text
+ is provided, no title will be created (default).
+
Returns:
fig (matplotlib.figure.Figure):
a matplotlib figure. This will plot immediately in a Jupyter
@@ -76,14 +87,16 @@ def flow_duration(Qdf, xscale='logit', yscale='log', ylabel='Stream Discharge (m
ax.set_yscale(yscale)
ax.set_ylabel(ylabel)
if legend:
- ax.legend()
+ ax.legend(loc=legend_loc)
+ if title:
+ ax.title.set_text(title)
# A pyplot bug causes a valueError value if the xlabel is set.
#ax.set_xlabel('Probability of Exceedence')
ax.xaxis.set_minor_formatter(NullFormatter())
return fig, ax
-def cycleplot(Qseries, cycle='diurnal', compare=None, y_label='Discharge (ft³/s)'):
+def cycleplot(Qseries, cycle='diurnal', compare=None, y_label='Discharge (ft³/s)', legend=True, legend_loc='best', title=''):
"""Creates a chart to illustrate annual and diurnal cycles.
This chart will use the pandas groupby method to plot the mean and median
@@ -138,6 +151,19 @@ def cycleplot(Qseries, cycle='diurnal', compare=None, y_label='Discharge (ft³/s
y_label (str): The label for the y axis.
+ legend (bool, default: True): whether the legend should be plotted.
+
+ legend_loc (str, default: 'best'): the location of the legend.
+
+ * 'best': Automatically choose the option below with the least overlap.
+ * 'upper left', 'upper right', 'lower left', 'lower right': place the legend at the corresponding corner of the axes/figure.
+ * 'upper center', 'lower center', 'center left', 'center right': place the legend at the center of the corresponding edge of the axes/figure.
+ * 'center': place the legend at the center of the axes/figure.
+ * The location can also be a 2-tuple giving the coordinates of the lower-left corner of the legend in axes coordinates.
+
+ title (str, default: ''): text to use as a figure title. If no text
+ is provided, no title will be created (default).
+
Returns:
fig (matplotlib.figure.Figure):
a matplotlib figure. This will plot immediately in a Jupyter
@@ -187,9 +213,7 @@ def cycleplot(Qseries, cycle='diurnal', compare=None, y_label='Discharge (ft³/s
cycleby = Qseries.index.hour
x_label = ' (hour of the day)'
else:
- print("The cycle label '", cycle, "' is not recognized as an option. Using cycle='diurnal' instead.")
- cycleby = Qseries.index.hour
- x_label = ' (hour of the day)'
+ raise ValueError("The cycle label '", cycle, "' is not recognized as an option.")
if compare is None:
# Don't make a comparison plot.
@@ -269,7 +293,8 @@ def cycleplot(Qseries, cycle='diurnal', compare=None, y_label='Discharge (ft³/s
# axs[i].xaxis.set_major_formatter(matplotlib.dates.DateFormatter('%H'))
# Set the legend on either the ax or fig.
- axs[0].legend(loc='best', fancybox=True, framealpha=0.5)
+ if legend:
+ axs[0].legend(loc=legend_loc, fancybox=True, framealpha=0.5)
# fig.legend(loc='upper center', shadow=True, frameon=True, fancybox=True, framealpha=0.5)
# Get the yaxis limits, set bottom to zero.
@@ -278,5 +303,7 @@ def cycleplot(Qseries, cycle='diurnal', compare=None, y_label='Discharge (ft³/s
axs[0].set_ylabel(y_label)
axs[0].set_xlabel('Time' + x_label)
plt.tight_layout()
+ if title:
+ fig.suptitle(title)
return fig, axs
| Add ability to modify the legend and titles
* HydroFunctions version: 0.1.8dev
* Python version: 3.7
### Description
Hydrofunction's built-in charts should have the ability to set a title or create a legend.
### Potential Solutions:
1) add new parameters `legend` and `title`
- `legend` default could be `False`; otherwise you could provide a value for legend.
- This approach might require a `legend_loc` parameter too.
- `title` could be set to `False` or text.
2) Use **kwargs and pass these on
3) What does Pandas do?? | mroberge/hydrofunctions | diff --git a/tests/test_charts.py b/tests/test_charts.py
index 9219578..a74174d 100644
--- a/tests/test_charts.py
+++ b/tests/test_charts.py
@@ -34,18 +34,28 @@ class TestFlowDuration(unittest.TestCase):
actual_yscale = actual_ax.yaxis.get_scale()
actual_ylabel = actual_ax.yaxis.get_label_text()
actual_marker = actual_ax.get_lines()[0].get_marker()
+ actual_legend = actual_ax.get_legend()
+ actual_legend_loc = actual_legend._loc
+ actual_title = actual_ax.get_title()
self.assertEqual(actual_xscale, 'logit')
self.assertEqual(actual_yscale, 'log')
self.assertEqual(actual_ylabel, 'Stream Discharge (m³/s)')
self.assertEqual(actual_marker, '.')
+ self.assertTrue(actual_legend)
+ self.assertEqual(actual_legend_loc, 0) # '0' is internal code for 'best'.
+ self.assertEqual(actual_title, '')
def test_charts_flowduration_accepts_params(self):
expected = pd.DataFrame(data=dummy)
params = {'xscale': 'linear',
'yscale': 'linear',
'ylabel': 'test value',
- 'symbol': ','}
+ 'symbol': ',',
+ 'legend': False,
+ 'legend_loc': 'center',
+ 'title': 'Test Title',
+ }
actual_fig, actual_ax = charts.flow_duration(expected, **params)
@@ -53,12 +63,19 @@ class TestFlowDuration(unittest.TestCase):
actual_yscale = actual_ax.yaxis.get_scale()
actual_ylabel = actual_ax.yaxis.get_label_text()
actual_marker = actual_ax.get_lines()[0].get_marker()
+ actual_legend = actual_ax.get_legend()
+ # There is no legend in this test, so there is no legend property.
+ #actual_legend_loc = actual_legend._loc
+ actual_title = actual_ax.get_title()
self.assertEqual(actual_xscale, 'linear')
self.assertEqual(actual_yscale, 'linear')
self.assertEqual(actual_ylabel, 'test value')
self.assertEqual(actual_marker, ',')
-
+ self.assertIsNone(actual_legend)
+ # There is no legend, so there is no legend location property.
+ #self.assertEqual(actual_legend_loc, 10) # 'center' is equal to 10.
+ self.assertEqual(actual_title, 'Test Title')
class TestCyclePlot(unittest.TestCase):
@@ -80,15 +97,21 @@ class TestCyclePlot(unittest.TestCase):
def test_charts_cycleplot_parts(self):
expected_df, expected_dict = hf.extract_nwis_df(test_json, interpolate=False)
- actual_fig, actual_ax = charts.cycleplot(expected_df)
+ actual_fig, actual_ax = charts.cycleplot(expected_df, legend_loc='center', title='test title')
actual_xscale = actual_ax[0].xaxis.get_scale()
actual_yscale = actual_ax[0].yaxis.get_scale()
actual_ylabel = actual_ax[0].yaxis.get_label_text()
+ actual_legend = actual_ax[0].get_legend()
+ actual_legend_loc = actual_legend._loc
+ actual_title = actual_fig._suptitle.get_text() # unofficial title accessor! A little wonky.
self.assertEqual(actual_xscale, 'linear')
self.assertEqual(actual_yscale, 'linear')
self.assertEqual(actual_ylabel, 'Discharge (ft³/s)')
+ self.assertTrue(actual_legend)
+ self.assertEqual(actual_legend_loc, 10) # '10' is internal code for legend(loc = 'center')
+ self.assertEqual(actual_title, 'test title')
def test_charts_cycleplot_compare_month(self):
expected_df, expected_dict = hf.extract_nwis_df(test_json, interpolate=False)
@@ -102,5 +125,46 @@ class TestCyclePlot(unittest.TestCase):
self.assertIsInstance(actual_fig, matplotlib.figure.Figure)
self.assertIsInstance(actual_ax[0], matplotlib.axes.Axes)
+ def test_charts_cycleplot_cycle_annual(self):
+ expected_df, expected_dict = hf.extract_nwis_df(test_json, interpolate=False)
+ actual_fig, actual_ax = charts.cycleplot(expected_df, 'annual')
+ self.assertIsInstance(actual_fig, matplotlib.figure.Figure)
+ self.assertIsInstance(actual_ax[0], matplotlib.axes.Axes)
+
+ def test_charts_cycleplot_cycle_annualdate(self):
+ expected_df, expected_dict = hf.extract_nwis_df(test_json, interpolate=False)
+ actual_fig, actual_ax = charts.cycleplot(expected_df, 'annual-date')
+ self.assertIsInstance(actual_fig, matplotlib.figure.Figure)
+ self.assertIsInstance(actual_ax[0], matplotlib.axes.Axes)
+
+ def test_charts_cycleplot_cycle_annualmonth(self):
+ expected_df, expected_dict = hf.extract_nwis_df(test_json, interpolate=False)
+ actual_fig, actual_ax = charts.cycleplot(expected_df, 'annual-month')
+ self.assertIsInstance(actual_fig, matplotlib.figure.Figure)
+ self.assertIsInstance(actual_ax[0], matplotlib.axes.Axes)
+
+ def test_charts_cycleplot_cycle_weekly(self):
+ expected_df, expected_dict = hf.extract_nwis_df(test_json, interpolate=False)
+ actual_fig, actual_ax = charts.cycleplot(expected_df, 'weekly')
+ self.assertIsInstance(actual_fig, matplotlib.figure.Figure)
+ self.assertIsInstance(actual_ax[0], matplotlib.axes.Axes)
+
+ def test_charts_cycleplot_cycle_diurnalsmallest(self):
+ expected_df, expected_dict = hf.extract_nwis_df(test_json, interpolate=False)
+ actual_fig, actual_ax = charts.cycleplot(expected_df, 'diurnal-smallest')
+ self.assertIsInstance(actual_fig, matplotlib.figure.Figure)
+ self.assertIsInstance(actual_ax[0], matplotlib.axes.Axes)
+
+ def test_charts_cycleplot_cycle_diurnalhour(self):
+ expected_df, expected_dict = hf.extract_nwis_df(test_json, interpolate=False)
+ actual_fig, actual_ax = charts.cycleplot(expected_df, 'diurnal-hour')
+ self.assertIsInstance(actual_fig, matplotlib.figure.Figure)
+ self.assertIsInstance(actual_ax[0], matplotlib.axes.Axes)
+
+ def test_charts_cycleplot_cycle_nonsense_raises_ValueError(self):
+ expected_df, expected_dict = hf.extract_nwis_df(test_json, interpolate=False)
+ with self.assertRaises(ValueError):
+ actual_fig, actual_ax = charts.cycleplot(expected_df, 'nonsense')
+
if __name__ == '__main__':
unittest.main(verbosity=2)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 1
} | .0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-xdist",
"pytest-mock",
"pytest-asyncio"
],
"pre_install": null,
"python": "3.7",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.13
attrs==24.2.0
Babel==2.14.0
backcall==0.2.0
beautifulsoup4==4.13.3
bleach==6.0.0
certifi @ file:///croot/certifi_1671487769961/work/certifi
charset-normalizer==3.4.1
coverage==7.2.7
cycler==0.11.0
debugpy==1.7.0
decorator==5.1.1
defusedxml==0.7.1
docutils==0.19
entrypoints==0.4
exceptiongroup==1.2.2
execnet==2.0.2
fastjsonschema==2.21.1
fonttools==4.38.0
-e git+https://github.com/mroberge/hydrofunctions.git@bf1f296f4a2e31a0df9bd07860bbfa90e71e7890#egg=hydrofunctions
idna==3.10
imagesize==1.4.1
importlib-metadata==6.7.0
importlib-resources==5.12.0
iniconfig==2.0.0
ipykernel==6.16.2
ipython==7.34.0
jedi==0.19.2
Jinja2==3.1.6
jsonschema==4.17.3
jupyter_client==7.4.9
jupyter_core==4.12.0
jupyterlab-pygments==0.2.2
kiwisolver==1.4.5
MarkupSafe==2.1.5
matplotlib==3.5.3
matplotlib-inline==0.1.6
mistune==3.0.2
nbclient==0.7.4
nbconvert==7.6.0
nbformat==5.8.0
nbsphinx==0.9.7
nest-asyncio==1.6.0
numpy==1.21.6
packaging==24.0
pandas==1.3.5
pandocfilters==1.5.1
parso==0.8.4
pexpect==4.9.0
pickleshare==0.7.5
Pillow==9.5.0
pkgutil_resolve_name==1.3.10
pluggy==1.2.0
prompt_toolkit==3.0.48
psutil==7.0.0
ptyprocess==0.7.0
pyarrow==12.0.1
Pygments==2.17.2
pyparsing==3.1.4
pyrsistent==0.19.3
pytest==7.4.4
pytest-asyncio==0.21.2
pytest-cov==4.1.0
pytest-mock==3.11.1
pytest-xdist==3.5.0
python-dateutil==2.9.0.post0
pytz==2025.2
pyzmq==26.2.1
requests==2.31.0
six==1.17.0
snowballstemmer==2.2.0
soupsieve==2.4.1
Sphinx==5.3.0
sphinxcontrib-applehelp==1.0.2
sphinxcontrib-devhelp==1.0.2
sphinxcontrib-htmlhelp==2.0.0
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==1.0.3
sphinxcontrib-serializinghtml==1.1.5
tinycss2==1.2.1
tomli==2.0.1
tornado==6.2
traitlets==5.9.0
typing_extensions==4.7.1
urllib3==2.0.7
wcwidth==0.2.13
webencodings==0.5.1
zipp==3.15.0
| name: hydrofunctions
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=22.3.1=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.13
- attrs==24.2.0
- babel==2.14.0
- backcall==0.2.0
- beautifulsoup4==4.13.3
- bleach==6.0.0
- charset-normalizer==3.4.1
- coverage==7.2.7
- cycler==0.11.0
- debugpy==1.7.0
- decorator==5.1.1
- defusedxml==0.7.1
- docutils==0.19
- entrypoints==0.4
- exceptiongroup==1.2.2
- execnet==2.0.2
- fastjsonschema==2.21.1
- fonttools==4.38.0
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==6.7.0
- importlib-resources==5.12.0
- iniconfig==2.0.0
- ipykernel==6.16.2
- ipython==7.34.0
- jedi==0.19.2
- jinja2==3.1.6
- jsonschema==4.17.3
- jupyter-client==7.4.9
- jupyter-core==4.12.0
- jupyterlab-pygments==0.2.2
- kiwisolver==1.4.5
- markupsafe==2.1.5
- matplotlib==3.5.3
- matplotlib-inline==0.1.6
- mistune==3.0.2
- nbclient==0.7.4
- nbconvert==7.6.0
- nbformat==5.8.0
- nbsphinx==0.9.7
- nest-asyncio==1.6.0
- numpy==1.21.6
- packaging==24.0
- pandas==1.3.5
- pandocfilters==1.5.1
- parso==0.8.4
- pexpect==4.9.0
- pickleshare==0.7.5
- pillow==9.5.0
- pkgutil-resolve-name==1.3.10
- pluggy==1.2.0
- prompt-toolkit==3.0.48
- psutil==7.0.0
- ptyprocess==0.7.0
- pyarrow==12.0.1
- pygments==2.17.2
- pyparsing==3.1.4
- pyrsistent==0.19.3
- pytest==7.4.4
- pytest-asyncio==0.21.2
- pytest-cov==4.1.0
- pytest-mock==3.11.1
- pytest-xdist==3.5.0
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyzmq==26.2.1
- requests==2.31.0
- six==1.17.0
- snowballstemmer==2.2.0
- soupsieve==2.4.1
- sphinx==5.3.0
- sphinxcontrib-applehelp==1.0.2
- sphinxcontrib-devhelp==1.0.2
- sphinxcontrib-htmlhelp==2.0.0
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==1.0.3
- sphinxcontrib-serializinghtml==1.1.5
- tinycss2==1.2.1
- tomli==2.0.1
- tornado==6.2
- traitlets==5.9.0
- typing-extensions==4.7.1
- urllib3==2.0.7
- wcwidth==0.2.13
- webencodings==0.5.1
- zipp==3.15.0
prefix: /opt/conda/envs/hydrofunctions
| [
"tests/test_charts.py::TestFlowDuration::test_charts_flowduration_accepts_params",
"tests/test_charts.py::TestCyclePlot::test_charts_cycleplot_cycle_nonsense_raises_ValueError",
"tests/test_charts.py::TestCyclePlot::test_charts_cycleplot_parts"
] | [
"tests/test_charts.py::TestCyclePlot::test_charts_cycleplot_cycle_diurnalsmallest"
] | [
"tests/test_charts.py::TestFlowDuration::test_charts_flowduration_defaults",
"tests/test_charts.py::TestFlowDuration::test_charts_flowduration_exists",
"tests/test_charts.py::TestCyclePlot::test_charts_cycleplot_compare_month",
"tests/test_charts.py::TestCyclePlot::test_charts_cycleplot_cycle_annual",
"tests/test_charts.py::TestCyclePlot::test_charts_cycleplot_cycle_annualdate",
"tests/test_charts.py::TestCyclePlot::test_charts_cycleplot_cycle_annualmonth",
"tests/test_charts.py::TestCyclePlot::test_charts_cycleplot_cycle_annualweek",
"tests/test_charts.py::TestCyclePlot::test_charts_cycleplot_cycle_diurnalhour",
"tests/test_charts.py::TestCyclePlot::test_charts_cycleplot_cycle_weekly",
"tests/test_charts.py::TestCyclePlot::test_charts_cycleplot_exists",
"tests/test_charts.py::TestCyclePlot::test_charts_groupby_not_object_dtype"
] | [] | MIT License | 5,916 | 1,336 | [
"hydrofunctions/charts.py"
] |
|
fitbenchmarking__fitbenchmarking-329 | 6a41e646051513f80570287b96796f879534b072 | 2019-11-28 17:37:08 | 6a41e646051513f80570287b96796f879534b072 | diff --git a/fitbenchmarking/fitting/controllers/dfogn_controller.py b/fitbenchmarking/fitting/controllers/dfogn_controller.py
index 328f62ae..ae6fc1cc 100644
--- a/fitbenchmarking/fitting/controllers/dfogn_controller.py
+++ b/fitbenchmarking/fitting/controllers/dfogn_controller.py
@@ -31,8 +31,7 @@ class DFOGNController(Controller):
self._pinit = np.asarray(self.initial_params)
def _prediction_error(self, p):
- f = self.data_y - self.problem.eval_f(x=self.data_x,
- params=p,
+ f = self.data_y - self.problem.eval_f(params=p,
function_id=self.function_id)
if self.use_errors:
f = f/self.data_e
@@ -59,7 +58,6 @@ class DFOGNController(Controller):
will be read from.
"""
if self.success:
- self.results = self.problem.eval_f(x=self.data_x,
- params=self._popt,
+ self.results = self.problem.eval_f(params=self._popt,
function_id=self.function_id)
self.final_params = self._popt
diff --git a/fitbenchmarking/fitting/controllers/minuit_controller.py b/fitbenchmarking/fitting/controllers/minuit_controller.py
index cf5fe91a..640aefdb 100644
--- a/fitbenchmarking/fitting/controllers/minuit_controller.py
+++ b/fitbenchmarking/fitting/controllers/minuit_controller.py
@@ -43,8 +43,7 @@ class MinuitController(Controller):
errordef=1)
def _prediction_error(self, p):
- f = self.problem.eval_f(x=self.data_x,
- params=p,
+ f = self.problem.eval_f(params=p,
function_id=self.function_id)
f = f - self.data_y
if self.use_errors:
@@ -68,7 +67,6 @@ class MinuitController(Controller):
self.success = (self._popt is not None)
if self.success:
- self.results = self.problem.eval_f(x=self.data_x,
- params=self._popt,
+ self.results = self.problem.eval_f(params=self._popt,
function_id=self.function_id)
self.final_params = self._popt
diff --git a/fitbenchmarking/fitting/controllers/ralfit_controller.py b/fitbenchmarking/fitting/controllers/ralfit_controller.py
index 951ab5af..8765e089 100644
--- a/fitbenchmarking/fitting/controllers/ralfit_controller.py
+++ b/fitbenchmarking/fitting/controllers/ralfit_controller.py
@@ -44,8 +44,7 @@ class RALFitController(Controller):
raise RuntimeError("An undefined RALFit minmizer was selected")
def _prediction_error(self, p):
- f = self.problem.eval_f(x=self.data_x,
- params=p,
+ f = self.problem.eval_f(params=p,
function_id=self.function_id)
f = f - self.data_y
if self.use_errors:
@@ -76,7 +75,6 @@ class RALFitController(Controller):
will be read from.
"""
if self.success:
- self.results = self.problem.eval_f(x=self.data_x,
- params=self._popt,
+ self.results = self.problem.eval_f(params=self._popt,
function_id=self.function_id)
self.final_params = self._popt
diff --git a/fitbenchmarking/fitting/controllers/scipy_controller.py b/fitbenchmarking/fitting/controllers/scipy_controller.py
index 33d1f955..38e5beb6 100644
--- a/fitbenchmarking/fitting/controllers/scipy_controller.py
+++ b/fitbenchmarking/fitting/controllers/scipy_controller.py
@@ -50,7 +50,6 @@ class ScipyController(Controller):
will be read from.
"""
if self.success:
- self.results = self.problem.eval_f(x=self.data_x,
- params=self._popt,
+ self.results = self.problem.eval_f(params=self._popt,
function_id=self.function_id)
self.final_params = self._popt
diff --git a/fitbenchmarking/parsing/fitting_problem.py b/fitbenchmarking/parsing/fitting_problem.py
index 43c05c14..b7365f22 100644
--- a/fitbenchmarking/parsing/fitting_problem.py
+++ b/fitbenchmarking/parsing/fitting_problem.py
@@ -50,16 +50,16 @@ class FittingProblem:
# Executable param pairs
self.functions = None
- def eval_f(self, x, params, function_id):
+ def eval_f(self, params, function_id, x=None):
"""
Function evaluation method
- :param x: x data values
- :type x: numpy array
:param params: parameter value(s)
:type params: list
:param function_id: The index of the function in functions
:type function_id: int
+ :param x: x data values or None, if None this uses self.data_x
+ :type x: numpy array
:return: y data values evaluated from the function of the problem
:rtype: numpy array
@@ -67,6 +67,9 @@ class FittingProblem:
if self.functions is None:
raise AttributeError('Cannot call function before setting'
+ 'functions in object.')
+
+ if x is None:
+ x = self.data_x
function = self.functions[function_id][0]
return function(x, *params)
| Add a default value for x in FittingProblem.eval_f
Every evaluation of this is going to occur at self.data_x.
This already wrapped by some controllers to give a function which only takes parameters as an argument.
We should make self.data_x a default, which will accommodate all of the controllers | fitbenchmarking/fitbenchmarking | diff --git a/fitbenchmarking/parsing/tests/test_fitting_problem.py b/fitbenchmarking/parsing/tests/test_fitting_problem.py
index f97a8e4b..b9cca333 100644
--- a/fitbenchmarking/parsing/tests/test_fitting_problem.py
+++ b/fitbenchmarking/parsing/tests/test_fitting_problem.py
@@ -70,6 +70,10 @@ class TestFittingProblem(TestCase):
params=[5],
function_id=1)
self.assertTrue(all(eval_result == np.array([16])))
+ fitting_problem.data_x = np.array([20, 21, 22])
+ eval_result = fitting_problem.eval_f(params=[5],
+ function_id=0)
+ self.assertTrue(all(eval_result == np.array([25, 26, 27])))
def test_eval_starting_params(self):
"""
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 5
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gfortran lcov libblas-dev liblapack-dev"
],
"python": "3.6",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
bumps==0.9.3
certifi==2021.5.30
chardet==3.0.4
charset-normalizer==2.0.12
configparser==5.2.0
coverage==6.2
coveralls==3.3.1
cycler==0.11.0
DataProperty==0.46.4
DFOGN==1.0.2
docopt==0.6.2
docutils==0.18.1
-e git+https://github.com/fitbenchmarking/fitbenchmarking.git@6a41e646051513f80570287b96796f879534b072#egg=FitBenchmarking
idna==3.10
iminuit==2.16.0
importlib-metadata==4.8.3
iniconfig==1.1.1
Jinja2==3.0.3
kiwisolver==1.3.1
lxml==5.3.1
MarkupSafe==2.0.1
matplotlib==2.2.5
mbstrdecoder==0.8.4
msgfy==0.0.7
numpy==1.16.6
packaging==21.3
pandas==0.24.2
pathvalidate==0.29.1
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytablewriter==0.46.1
pytest==7.0.1
pytest-cov==4.0.0
python-coveralls==2.9.3
python-dateutil==2.9.0.post0
pytz==2025.2
PyYAML==6.0.1
requests==2.27.1
sasmodels==1.0.9
scipy==1.2.3
six==1.17.0
tabledata==0.10.4
tinycc==1.1
tomli==1.2.3
typepy==0.6.6
typing_extensions==4.1.1
urllib3==1.23
zipp==3.6.0
| name: fitbenchmarking
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- bumps==0.9.3
- chardet==3.0.4
- charset-normalizer==2.0.12
- configparser==5.2.0
- coverage==6.2
- coveralls==3.3.1
- cycler==0.11.0
- dataproperty==0.46.4
- dfogn==1.0.2
- docopt==0.6.2
- docutils==0.18.1
- idna==3.10
- iminuit==2.16.0
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- jinja2==3.0.3
- kiwisolver==1.3.1
- lxml==5.3.1
- markupsafe==2.0.1
- matplotlib==2.2.5
- mbstrdecoder==0.8.4
- msgfy==0.0.7
- numpy==1.16.6
- packaging==21.3
- pandas==0.24.2
- pathvalidate==0.29.1
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytablewriter==0.46.1
- pytest==7.0.1
- pytest-cov==4.0.0
- python-coveralls==2.9.3
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyyaml==6.0.1
- requests==2.27.1
- sasmodels==1.0.9
- scipy==1.2.3
- six==1.17.0
- tabledata==0.10.4
- tinycc==1.1
- tomli==1.2.3
- typepy==0.6.6
- typing-extensions==4.1.1
- urllib3==1.23
- zipp==3.6.0
prefix: /opt/conda/envs/fitbenchmarking
| [
"fitbenchmarking/parsing/tests/test_fitting_problem.py::TestFittingProblem::test_eval_f"
] | [] | [
"fitbenchmarking/parsing/tests/test_fitting_problem.py::TestFittingProblem::test_eval_starting_params",
"fitbenchmarking/parsing/tests/test_fitting_problem.py::TestFittingProblem::test_get_function_def",
"fitbenchmarking/parsing/tests/test_fitting_problem.py::TestFittingProblem::test_verify_problem"
] | [] | BSD 3-Clause "New" or "Revised" License | 5,918 | 1,306 | [
"fitbenchmarking/fitting/controllers/dfogn_controller.py",
"fitbenchmarking/fitting/controllers/minuit_controller.py",
"fitbenchmarking/fitting/controllers/ralfit_controller.py",
"fitbenchmarking/fitting/controllers/scipy_controller.py",
"fitbenchmarking/parsing/fitting_problem.py"
] |
|
iterative__dvc-2866 | bdfeba8f9be3de53ccb6a419099e84cf9e0969f8 | 2019-11-29 08:20:36 | 1ed1a89b9fe4d56c6fcf775308d4dfadce9bb789 | pared: Note:
If we will agree that is how we want to handle s3, I will have to prepare change to docs about `push` and `pull`.
shcheklein: sorry guys, missed the discussion in the ticket. @pared could you please summarize the changes, show a gif on how would it look like?
My 2cs - it feels a bit that we are over-architecting this if we try to analyze the system in advance. Why don't we just handle this gracefully and show a message in logs/screen that "you are hitting the limit blah - blah ..."?
pared: @shcheklein the whole implementation is a result of our discussion in the original issue.
We wanted to achieve two goals:
1. Prevent `Too many open files` from occurring
2. The default jobs number is pretty high (16) and we would like it to stay, to make default experience better.
The problem here is that we cannot for 100% say that this error will occur, which heavily depends on the size of particular files and speed of the network. We know that this error might occur when the number of jobs exceeds some (possible to estimate) number. However, user might be aware of that and purposefully increases the number of jobs because he knows that for some reason (super-fast network) this does not apply to him. So what I want to introduce in this change is:
1. Decrease automatically default number of jobs if user did not provide `jobs`, just to make his life easier, example:
https://asciinema.org/a/286748
(added print that its actually decreasing)
2. If everything seems fine, just use default jobs number:
https://asciinema.org/a/286749
(also this print will not be there)
3. In case of user defining `jobs` specifically, warn him:
https://asciinema.org/a/286750
Suor: I don't like this approach. This is:
- fragile, if something is happening in the bg, like backup or torrents) then a user will still get "too many files"
- bothers user with some warnings
- still stops on errors, this makes it non-reliable to run automatically unless you use conservative `jobs`.
So I would prefer dvc to retry on errors, adjust jobs automatically. Ideally we won't need `jobs` param at all.
pared: @Suor I don't like the retrying approach, because you cannot determine when and if the operation will fail. So it is possible, that someone will get an error after a few minutes of operation execution. And I think that would be really bad because the user would need to reduce number of jobs/increase fds limit, retry and hope for the best. I think that could be frustrating if faced a few times.
shcheklein: @pared thanks for the clarifications! Even though I tend to agree with @Suor that our future direction should be aggressive (and do not fail, but adjust itself dynamically), I think we should move forward (especially since it was discussed by a few members of the team) with this and move @Suor 's general suggestion to a separate discussion and approach holistically.
@efiop just to clarify (since I missed the discussion), what's your take on this?
efiop: > fragile, if something is happening in the bg, like backup or torrents) then a user will still get "too many files"
@Suor ulimit is per-session, your torrents (aye-aye, captain Alex! ⚓️ 🚢 🔫 ☠️) and backups are in a different session, so they won't affect your dvc pull.
> bothers user with some warnings
> still stops on errors, this makes it non-reliable to run automatically unless you use conservative jobs.
Agreed on these two. The warning can be replaced with a post-crash one (see part about it below).
> So I would prefer dvc to retry on errors, adjust jobs automatically. Ideally we won't need jobs param at all.
Ideally yes, but that is a different ticket. With ulimit, it is hard to retry, as you can't really be sure if it won't fail again or after 10 minutes of wait time. I haven't tested it, so not 100% sure if that would be suitable for us or not, I guess it depends on where we are going to catch that exception.
@pared We've agreed to definitely catch that exception in `main.py` and show a meaningful warning to start with, right? I don't think anyone has any objections about that part that should come as a separate PR.
pared: @Suor @shcheklein @efiop
Ok, so to sum it up:
1. Lets create new issue where we will handle OsError 24 and post-fail communication, and maybe automatic adjustment of jobs, post fail
2. Modify current change so that it does not log warnings, but leave adjusting default jobs number be.
How does it sound?
pared: Note: I think I did not initially understand @Suor point of view, and imagined it as following workflow:
push -> fail -> push again (manually).
I was wrong here, right? In the end, we should let it fail and retry (automatically) upon "Too many open files" error. That should not penalize the whole operation, because push/pull operation as a whole is not success or fail operation, some of the files should be pushed/pulled.
Suor: @pared the retry thing may be definitely separated and might need to be handled along with retries for other network related reasons.
We can merge this as is or remove warning, both are fine with me.
@pared yes, I meant automatic retries.
efiop: @pared It still feels like catching the error and printing a message to increase the ulimit brings more value here. I'm worried about automatic adjustment like this because it is a lot of heuristics to handle. Maybe it is indeed the only way to do that, but it feels like if I was the user, if dvc told me "increase your ulimit" it would be more useful than the warning and the auto-throttle. Especially since we are talking mostly about mac, where ulimit for files is simply ridiculously low, so it makes total sense to just make user increase it.
pared: It seems that handling exceptions coming from parallel execution of tasks is causing some confusion.
1. We have ugly `try/ catch Exception` in [`_download_file`](https://github.com/iterative/dvc/blob/6e22769f3d8c940a5f2cf7e9855a8370ad4c2772/dvc/remote/base.py#L617) and in [`upload`](https://github.com/iterative/dvc/blob/6e22769f3d8c940a5f2cf7e9855a8370ad4c2772/dvc/remote/base.py#L540)
2. Till now we used to throw generic `[Download/Upload]Error` after all operations if some of them failed
3. I think that on `Too many open files` we could raise exception immediately.
4. Trying to introduce separate `Too many open files` error handling in `RemoteBASE` caused me to scatter logic responsible for error handling between `RemoteBASE` and `RemoteLOCAL` which obfuscates what is actually happening with the errors.
That brings me to the conclusion that we might need to introduce some kind of error handler to whom we could delegate logic related to error handling.
@iterative/engineering What do you think about this idea?
efiop: @pared So for example, `_download` from RemoteS3 would catch its specific boto3.ClientError and maybe some other stuff that is not critical and so other workers can proceed working, while we in `download` in RemoteBASE would catch some generic issues (not sure which ones from the top of my head) and main.py would catch OSError for ulimit issue, right?
pared: @efiop yes, that's what I have in mind.
I also can't come up some common errors for all remotes, but handling "Unspecified" exceptions is common to all classes (raise `[Upload/ Download]Error` after all operations are performed)
efiop: @pared Should be quite straight forward. E.g. for s3 it is ClientError, for other remotes it is also something specific that we can get straight from the docs. There will be learning curve though, but I think that is reasonable for the sake of proper error handling and not this current lazy "except Exception"(my fault originally).
pared: @efiop I agree that there will be a learning curve, though I believe that the current way of handling exceptions also requires some insight.
efiop: @pared Agreed. Ok, so on a quick google for upload/download:
s3 - ClientError
azure - ClientException
gs - ClientError
etc
So looks like it is pretty standardized for those clouds, which is pretty nice. As a first step, it would be reasonable to count those as recoverable ones (meaning that they would be logged and `return 1`, same as we do right now with all exceptions in workers) and the rest consider non-recoverable, that way we could catch ulimit error in main.py, as discussed earlier. What do you think?
pared: @efiop I would actually leave error as recoverable by default, and implement logic for those that we think should fail the whole operation, that seems to go along with logic we applied until this issue came up.
efiop: @pared Right, that is what you are basically doing right now. Makes sense. Seems like we agree on the throttling magic being too fragile, so maybe let's repurpose this PR to only properly handle the ulimit issue? If so, one thing I would do is to not create that exception, but rather catch that as an OSError in main.py and `logger.error()` it instead with that message. Doesn't look like we really need a special exception for it. | diff --git a/dvc/main.py b/dvc/main.py
index 86e1a9e0d..89c9dc775 100644
--- a/dvc/main.py
+++ b/dvc/main.py
@@ -1,6 +1,7 @@
"""Main entry point for dvc CLI."""
from __future__ import unicode_literals
+import errno
import logging
from dvc import analytics
@@ -64,6 +65,10 @@ def main(argv=None):
"unicode is not supported in DVC for Python 2 "
"(end-of-life January 1, 2020), please upgrade to Python 3"
)
+ elif isinstance(exc, OSError) and exc.errno == errno.EMFILE:
+ logger.exception(
+ "too many open files, please increase your `ulimit`"
+ )
else:
logger.exception("unexpected error")
ret = 255
diff --git a/dvc/remote/base.py b/dvc/remote/base.py
index f72e56127..57a1846a6 100644
--- a/dvc/remote/base.py
+++ b/dvc/remote/base.py
@@ -1,4 +1,7 @@
from __future__ import unicode_literals
+
+import errno
+
from dvc.utils.compat import basestring, FileNotFoundError, str, urlparse
import itertools
@@ -516,6 +519,16 @@ class RemoteBASE(object):
return
self._save_file(path_info, checksum)
+ def _handle_transfer_exception(
+ self, from_info, to_info, exception, operation
+ ):
+ if isinstance(exception, OSError) and exception.errno == errno.EMFILE:
+ raise exception
+
+ msg = "failed to {} '{}' to '{}'".format(operation, from_info, to_info)
+ logger.exception(msg)
+ return 1
+
def upload(self, from_info, to_info, name=None, no_progress_bar=False):
if not hasattr(self, "_upload"):
raise RemoteActionNotImplemented("upload", self.scheme)
@@ -537,10 +550,10 @@ class RemoteBASE(object):
name=name,
no_progress_bar=no_progress_bar,
)
- except Exception:
- msg = "failed to upload '{}' to '{}'"
- logger.exception(msg.format(from_info, to_info))
- return 1 # 1 fail
+ except Exception as e:
+ return self._handle_transfer_exception(
+ from_info, to_info, e, "upload"
+ )
return 0
@@ -614,10 +627,10 @@ class RemoteBASE(object):
self._download(
from_info, tmp_file, name=name, no_progress_bar=no_progress_bar
)
- except Exception:
- msg = "failed to download '{}' to '{}'"
- logger.exception(msg.format(from_info, to_info))
- return 1 # 1 fail
+ except Exception as e:
+ return self._handle_transfer_exception(
+ from_info, to_info, e, "download"
+ )
move(tmp_file, to_info, mode=file_mode)
| 'Errno 24 - Too many open files' on dvc push
### Version information
* DVC version: 0.58.1
* Platform: MacOS 10.14.6
* Method of installation: pip within a conda environment
### Description
When pushing to S3 a directory of ~100 files that have been added to DVC, I observe an Errno 24 error from the dvc process.
It looks like dvc is trying to open more files than the OS allows. Checking the file handles on for the dvc process I get:
```
$ lsof -p $DVC_PID | wc -l
412
```
Looking at the OS limits, a process is limited to having 256 open files.
```
$ ulimit -a
core file size (blocks, -c) 0
data seg size (kbytes, -d) unlimited
file size (blocks, -f) unlimited
max locked memory (kbytes, -l) unlimited
max memory size (kbytes, -m) unlimited
open files (-n) 256
pipe size (512 bytes, -p) 1
stack size (kbytes, -s) 8192
cpu time (seconds, -t) unlimited
max user processes (-u) 4256
virtual memory (kbytes, -v) unlimited
```
A workaround for this is to increase the max files per process to a larger number (say 4096) by running something like `ulimit -n 4096`, but I wonder if the ideal solution is for DVC to work within the OS configured limits by default?
Edit: Updated wording of workaround | iterative/dvc | diff --git a/tests/func/test_remote.py b/tests/func/test_remote.py
index 23f318861..f3140b5f3 100644
--- a/tests/func/test_remote.py
+++ b/tests/func/test_remote.py
@@ -1,14 +1,17 @@
+import errno
import os
import shutil
import configobj
+import pytest
from mock import patch
from dvc.config import Config
from dvc.main import main
from dvc.path_info import PathInfo
-from dvc.remote import RemoteLOCAL
+from dvc.remote import RemoteLOCAL, RemoteConfig
from dvc.remote.base import RemoteBASE
+from dvc.utils.compat import fspath
from tests.basic_env import TestDvc
from tests.remotes import get_local_url, get_local_storagepath
@@ -253,3 +256,21 @@ def test_partial_push_n_pull(dvc_repo, repo_dir, caplog):
def get_last_exc(caplog):
_, exc, _ = caplog.records[-2].exc_info
return exc
+
+
+def test_raise_on_too_many_open_files(tmp_dir, dvc, tmp_path_factory, mocker):
+ storage = tmp_path_factory.mktemp("test_remote_base")
+ remote_config = RemoteConfig(dvc.config)
+ remote_config.add("local_remote", fspath(storage), default=True)
+
+ tmp_dir.dvc_gen({"file": "file content"})
+
+ mocker.patch.object(
+ RemoteLOCAL,
+ "_upload",
+ side_effect=OSError(errno.EMFILE, "Too many open files"),
+ )
+
+ with pytest.raises(OSError) as e:
+ dvc.push()
+ assert e.errno == errno.EMFILE
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 3
},
"num_modified_files": 2
} | 0.77 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[all]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest",
"pytest-timeout",
"pytest-cov",
"pytest-xdist",
"pytest-mock",
"flaky",
"mock",
"xmltodict",
"awscli",
"google-compute-engine",
"Pygments",
"collective.checkdocs",
"flake8",
"psutil",
"flake8-docstrings",
"pydocstyle",
"jaraco.windows",
"mock-ssh-server",
"moto",
"rangehttpserver"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.7",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | aliyun-python-sdk-core==2.16.0
aliyun-python-sdk-core-v3==2.13.33
aliyun-python-sdk-kms==2.16.5
appdirs==1.4.4
atpublic==3.1.2
attrs @ file:///croot/attrs_1668696182826/work
autocommand==2.2.2
awscli==1.31.13
azure-common==1.1.28
azure-storage-blob==2.1.0
azure-storage-common==2.1.0
bcrypt==4.2.1
boto==2.49.0
boto3==1.33.13
botocore==1.33.13
cachetools==5.5.2
certifi @ file:///croot/certifi_1671487769961/work/certifi
cffi==1.15.1
charset-normalizer==3.4.1
collective.checkdocs==0.2
colorama==0.4.4
configobj==5.0.9
configparser==5.3.0
coverage==7.2.7
crcmod==1.7
cryptography==44.0.2
decorator==5.1.1
distro==1.9.0
docutils==0.16
-e git+https://github.com/iterative/dvc.git@bdfeba8f9be3de53ccb6a419099e84cf9e0969f8#egg=dvc
execnet==2.0.2
flake8==5.0.4
flake8-docstrings==1.7.0
flaky==3.8.1
flit_core @ file:///opt/conda/conda-bld/flit-core_1644941570762/work/source/flit_core
flufl.lock==7.1.1
funcy==2.0
future==1.0.0
gitdb==4.0.12
gitdb2==4.0.2
GitPython==3.1.44
google-api-core==1.34.1
google-api-python-client==2.166.0
google-auth==2.38.0
google-auth-httplib2==0.2.0
google-cloud-core==1.5.0
google-cloud-storage==1.19.0
google-compute-engine==2.8.13
google-crc32c==1.5.0
google-resumable-media==2.7.2
googleapis-common-protos==1.69.2
grandalf==0.6
httplib2==0.22.0
humanize==4.6.0
idna==3.10
importlib-metadata==4.2.0
importlib-resources==5.12.0
inflect==6.0.5
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
jaraco.classes==3.2.3
jaraco.collections==4.2.0
jaraco.context==4.3.0
jaraco.functools==3.7.0
jaraco.structures==2.1.0
jaraco.text==3.11.1
jaraco.ui==2.3.0
jaraco.windows==5.7.0
Jinja2==3.1.6
jmespath==0.10.0
jsonpath-ng==1.7.0
MarkupSafe==2.1.5
mccabe==0.7.0
mock==5.2.0
mock-ssh-server==0.9.1
more-itertools==9.1.0
moto==4.2.14
nanotime==0.5.2
networkx==2.3
numpy==1.21.6
oauth2client==4.1.3
oss2==2.6.1
packaging @ file:///croot/packaging_1671697413597/work
paramiko==3.5.1
path==16.6.0
pathspec==0.11.2
pluggy @ file:///tmp/build/80754af9/pluggy_1648042572264/work
ply==3.11
protobuf==3.20.3
psutil==7.0.0
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pyarrow==0.15.1
pyasn1==0.5.1
pyasn1-modules==0.3.0
pycodestyle==2.9.1
pycparser==2.21
pycryptodome==3.22.0
pydantic==1.10.21
pydocstyle==6.3.0
PyDrive==1.3.1
pyflakes==2.5.0
Pygments==2.17.2
PyNaCl==1.5.0
pyparsing==3.1.4
pytest==7.1.2
pytest-cov==4.1.0
pytest-mock==3.11.1
pytest-timeout==2.3.1
pytest-xdist==3.5.0
python-dateutil==2.8.0
PyYAML==5.1.2
rangehttpserver==1.4.0
requests==2.31.0
responses==0.23.3
rsa==4.7.2
ruamel.yaml==0.18.10
ruamel.yaml.clib==0.2.8
s3transfer==0.8.2
shortuuid==1.0.13
six==1.17.0
smmap==5.0.2
snowballstemmer==2.2.0
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
tqdm==4.67.1
treelib==1.7.1
types-PyYAML==6.0.12.12
typing_extensions @ file:///croot/typing_extensions_1669924550328/work
uritemplate==4.1.1
urllib3==1.26.20
voluptuous==0.14.1
Werkzeug==2.2.3
xmltodict==0.14.2
zc.lockfile==3.0.post1
zipp @ file:///croot/zipp_1672387121353/work
| name: dvc
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=22.1.0=py37h06a4308_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- flit-core=3.6.0=pyhd3eb1b0_0
- importlib_metadata=4.11.3=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=22.0=py37h06a4308_0
- pip=22.3.1=py37h06a4308_0
- pluggy=1.0.0=py37h06a4308_1
- py=1.11.0=pyhd3eb1b0_0
- pytest=7.1.2=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py37h06a4308_0
- typing_extensions=4.4.0=py37h06a4308_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zipp=3.11.0=py37h06a4308_0
- zlib=1.2.13=h5eee18b_1
- pip:
- aliyun-python-sdk-core==2.16.0
- aliyun-python-sdk-core-v3==2.13.33
- aliyun-python-sdk-kms==2.16.5
- appdirs==1.4.4
- atpublic==3.1.2
- autocommand==2.2.2
- awscli==1.31.13
- azure-common==1.1.28
- azure-storage-blob==2.1.0
- azure-storage-common==2.1.0
- bcrypt==4.2.1
- boto==2.49.0
- boto3==1.33.13
- botocore==1.33.13
- cachetools==5.5.2
- cffi==1.15.1
- charset-normalizer==3.4.1
- collective-checkdocs==0.2
- colorama==0.4.4
- configobj==5.0.9
- configparser==5.3.0
- coverage==7.2.7
- crcmod==1.7
- cryptography==44.0.2
- decorator==5.1.1
- distro==1.9.0
- docutils==0.16
- dvc==0.77.3+bdfeba
- execnet==2.0.2
- flake8==5.0.4
- flake8-docstrings==1.7.0
- flaky==3.8.1
- flufl-lock==7.1.1
- funcy==2.0
- future==1.0.0
- gitdb==4.0.12
- gitdb2==4.0.2
- gitpython==3.1.44
- google-api-core==1.34.1
- google-api-python-client==2.166.0
- google-auth==2.38.0
- google-auth-httplib2==0.2.0
- google-cloud-core==1.5.0
- google-cloud-storage==1.19.0
- google-compute-engine==2.8.13
- google-crc32c==1.5.0
- google-resumable-media==2.7.2
- googleapis-common-protos==1.69.2
- grandalf==0.6
- httplib2==0.22.0
- humanize==4.6.0
- idna==3.10
- importlib-metadata==4.2.0
- importlib-resources==5.12.0
- inflect==6.0.5
- jaraco-classes==3.2.3
- jaraco-collections==4.2.0
- jaraco-context==4.3.0
- jaraco-functools==3.7.0
- jaraco-structures==2.1.0
- jaraco-text==3.11.1
- jaraco-ui==2.3.0
- jaraco-windows==5.7.0
- jinja2==3.1.6
- jmespath==0.10.0
- jsonpath-ng==1.7.0
- markupsafe==2.1.5
- mccabe==0.7.0
- mock==5.2.0
- mock-ssh-server==0.9.1
- more-itertools==9.1.0
- moto==4.2.14
- nanotime==0.5.2
- networkx==2.3
- numpy==1.21.6
- oauth2client==4.1.3
- oss2==2.6.1
- paramiko==3.5.1
- path==16.6.0
- pathspec==0.11.2
- ply==3.11
- protobuf==3.20.3
- psutil==7.0.0
- pyarrow==0.15.1
- pyasn1==0.5.1
- pyasn1-modules==0.3.0
- pycodestyle==2.9.1
- pycparser==2.21
- pycryptodome==3.22.0
- pydantic==1.10.21
- pydocstyle==6.3.0
- pydrive==1.3.1
- pyflakes==2.5.0
- pygments==2.17.2
- pynacl==1.5.0
- pyparsing==3.1.4
- pytest-cov==4.1.0
- pytest-mock==3.11.1
- pytest-timeout==2.3.1
- pytest-xdist==3.5.0
- python-dateutil==2.8.0
- pyyaml==5.1.2
- rangehttpserver==1.4.0
- requests==2.31.0
- responses==0.23.3
- rsa==4.7.2
- ruamel-yaml==0.18.10
- ruamel-yaml-clib==0.2.8
- s3transfer==0.8.2
- shortuuid==1.0.13
- six==1.17.0
- smmap==5.0.2
- snowballstemmer==2.2.0
- tqdm==4.67.1
- treelib==1.7.1
- types-pyyaml==6.0.12.12
- uritemplate==4.1.1
- urllib3==1.26.20
- voluptuous==0.14.1
- werkzeug==2.2.3
- xmltodict==0.14.2
- zc-lockfile==3.0.post1
prefix: /opt/conda/envs/dvc
| [
"tests/func/test_remote.py::test_raise_on_too_many_open_files"
] | [] | [
"tests/func/test_remote.py::TestRemote::test",
"tests/func/test_remote.py::TestRemote::test_overwrite",
"tests/func/test_remote.py::TestRemote::test_referencing_other_remotes",
"tests/func/test_remote.py::TestRemote::test_relative_path",
"tests/func/test_remote.py::TestRemoteRemoveDefault::test",
"tests/func/test_remote.py::TestRemoteRemove::test",
"tests/func/test_remote.py::TestRemoteDefault::test",
"tests/func/test_remote.py::test_show_default",
"tests/func/test_remote.py::TestRemoteShouldHandleUppercaseRemoteName::test",
"tests/func/test_remote.py::test_large_dir_progress",
"tests/func/test_remote.py::test_dir_checksum_should_be_key_order_agnostic",
"tests/func/test_remote.py::test_partial_push_n_pull"
] | [] | Apache License 2.0 | 5,921 | 719 | [
"dvc/main.py",
"dvc/remote/base.py"
] |
iterative__dvc-2873 | 30848306e86d97e4baf7486c045d5f2fe6e26877 | 2019-11-30 13:36:42 | 516e3b14bcedbfd3f81dff2380cd8bd4135b6a2c | diff --git a/dvc/remote/s3.py b/dvc/remote/s3.py
index 8013f9bb9..8dd2bdf48 100644
--- a/dvc/remote/s3.py
+++ b/dvc/remote/s3.py
@@ -5,6 +5,7 @@ import logging
import os
import threading
+from botocore.exceptions import ClientError
from funcy import cached_property, wrap_prop
from dvc.config import Config
@@ -207,10 +208,26 @@ class RemoteS3(RemoteBASE):
def list_cache_paths(self):
return self._list_paths(self.path_info)
+ def isfile(self, path_info):
+ if path_info.path.endswith("/"):
+ return False
+
+ try:
+ self.s3.head_object(Bucket=path_info.bucket, Key=path_info.path)
+ except ClientError as exc:
+ if exc.response["Error"]["Code"] != "404":
+ raise
+ return False
+
+ return True
+
def exists(self, path_info):
- dir_path = path_info / ""
- fname = next(self._list_paths(path_info, max_items=1), "")
- return path_info.path == fname or fname.startswith(dir_path.path)
+ """Check if the blob exists. If it does not exist,
+ it could be a part of a directory path.
+
+ eg: if `data/file.txt` exists, check for `data` should return True
+ """
+ return self.isfile(path_info) or self.isdir(path_info)
def makedirs(self, path_info):
# We need to support creating empty directories, which means
@@ -279,7 +296,7 @@ class RemoteS3(RemoteBASE):
)
def walk_files(self, path_info, max_items=None):
- for fname in self._list_paths(path_info, max_items):
+ for fname in self._list_paths(path_info / "", max_items):
if fname.endswith("/"):
continue
| Cannot add file having name with substring of a folder as prefix in s3
### Steps to reproduce
1. Upload two files in s3: `folder/data/data.csv` and` folder/datasets.md`.
2. Setup remotes and caches.
```sh
dvc remote add -f s3 s3://dvc-temp/folder
dvc remote add -f cache remote://s3/cache
dvc config cache.s3 cache
```
3. `dvc run -d remote://s3/data 'echo hello world'`
### Outcome
```
Running command:
echo hello world
hello world
ERROR: unexpected error - '/folder/datasets.md' does not start with '/folder/data'
```
#### Version
```
$ dvc version
Python version: 3.6.6
Platform: Linux-5.3.12-arch1-1-x86_64-with-arch
Binary: False
Package: None
Filesystem type (cache directory): ('ext4', '/dev/sda9')
Filesystem type (workspace): ('ext4', '/dev/sda9')
```
#### Script to reproduce
```sh
#! /usr/bin/env bash
export AWS_ACCESS_KEY_ID='testing'
export AWS_SECRET_ACCESS_KEY='testing'
export AWS_SECURITY_TOKEN='testing'
export AWS_SESSION_TOKEN='testing'
moto_server s3 &> /dev/null &
python -c '
import boto3
session = boto3.session.Session()
s3 = session.client("s3", endpoint_url="http://localhost:5000")
s3.create_bucket(Bucket="dvc-temp")
s3.put_object(Bucket="dvc-temp", Key="folder/data/data.csv")
s3.put_object(Bucket="dvc-temp", Key="folder/datasets.md", Body="### Datasets")
'
temp=$(mktemp -d)
cd $temp
dvc init --no-scm
dvc remote add -f s3 s3://dvc-temp/folder
dvc remote modify s3 endpointurl http://localhost:5000
dvc remote add -f cache remote://s3/cache
dvc config cache.s3 cache
dvc run -d remote://s3/data 'echo hello world'
```
#### Analysis:
1. This is due to `walk_files` implementation in `RemoteS3` looking via prefix instead of `/<prefix`> to walk files. Either, `walk_files` should get directory path or should just append it itself.
https://github.com/iterative/dvc/blob/0404a2324e497667a8b7d0ab0bd2b37db8c97e4c/dvc/remote/s3.py#L282
Or, I'd prefer it to be handled when collecting the directory.
https://github.com/iterative/dvc/blob/caa67c725e1e351ed122bdad17db0f29a8e73c39/dvc/remote/base.py#L196
2. Again, the logic of `exists` looks flawed. Say, you have `data/subdir-file.txt` and `data/subdir/1` files. When adding `data/subdir`, the first result could be `subdir-file.txt` which matches `startswith`, therefore, the `exists()` will return True, but in reality, `subdir` does not exist.
So, the function should check if it's a directory, and should loop through all results of `_list_paths()` till it finds the exact match (not sure, how expensive this will be).
https://github.com/iterative/dvc/blob/caa67c725e1e351ed122bdad17db0f29a8e73c39/dvc/remote/s3.py#L208-L211 | iterative/dvc | diff --git a/tests/func/test_api.py b/tests/func/test_api.py
index a970d3cd5..ddf101479 100644
--- a/tests/func/test_api.py
+++ b/tests/func/test_api.py
@@ -75,7 +75,7 @@ def run_dvc(*argv):
@pytest.mark.parametrize("remote_url", remote_params, indirect=True)
-def test_get_url(remote_url, repo_dir, dvc_repo):
+def test_get_url(repo_dir, dvc_repo, remote_url):
run_dvc("remote", "add", "-d", "upstream", remote_url)
dvc_repo.add(repo_dir.FOO)
@@ -84,7 +84,7 @@ def test_get_url(remote_url, repo_dir, dvc_repo):
@pytest.mark.parametrize("remote_url", remote_params, indirect=True)
-def test_get_url_external(remote_url, dvc_repo, erepo):
+def test_get_url_external(dvc_repo, erepo, remote_url):
_set_remote_url_and_commit(erepo.dvc, remote_url)
# Using file url to force clone to tmp repo
@@ -94,7 +94,7 @@ def test_get_url_external(remote_url, dvc_repo, erepo):
@pytest.mark.parametrize("remote_url", all_remote_params, indirect=True)
-def test_open(remote_url, repo_dir, dvc_repo):
+def test_open(repo_dir, dvc_repo, remote_url):
run_dvc("remote", "add", "-d", "upstream", remote_url)
dvc_repo.add(repo_dir.FOO)
run_dvc("push")
@@ -107,7 +107,7 @@ def test_open(remote_url, repo_dir, dvc_repo):
@pytest.mark.parametrize("remote_url", all_remote_params, indirect=True)
-def test_open_external(remote_url, dvc_repo, erepo):
+def test_open_external(dvc_repo, erepo, remote_url):
erepo.dvc.scm.checkout("branch")
_set_remote_url_and_commit(erepo.dvc, remote_url)
erepo.dvc.scm.checkout("master")
@@ -127,7 +127,7 @@ def test_open_external(remote_url, dvc_repo, erepo):
@pytest.mark.parametrize("remote_url", all_remote_params, indirect=True)
-def test_missing(remote_url, repo_dir, dvc_repo):
+def test_missing(repo_dir, dvc_repo, remote_url):
run_dvc("add", repo_dir.FOO)
run_dvc("remote", "add", "-d", "upstream", remote_url)
diff --git a/tests/unit/remote/test_s3.py b/tests/unit/remote/test_s3.py
index 49fc3dbb8..bb0abe594 100644
--- a/tests/unit/remote/test_s3.py
+++ b/tests/unit/remote/test_s3.py
@@ -13,10 +13,12 @@ def remote():
├── data
│ ├── alice
│ ├── alpha
+ │ ├── subdir-file.txt
│ └── subdir
│ ├── 1
│ ├── 2
│ └── 3
+ ├── data1.txt
├── empty_dir
├── empty_file
└── foo
@@ -26,6 +28,7 @@ def remote():
s3 = remote.s3
s3.create_bucket(Bucket="bucket")
+ s3.put_object(Bucket="bucket", Key="data1.txt", Body=b"")
s3.put_object(Bucket="bucket", Key="empty_dir/")
s3.put_object(Bucket="bucket", Key="empty_file", Body=b"")
s3.put_object(Bucket="bucket", Key="foo", Body=b"foo")
@@ -34,6 +37,9 @@ def remote():
s3.put_object(Bucket="bucket", Key="data/subdir/1", Body=b"1")
s3.put_object(Bucket="bucket", Key="data/subdir/2", Body=b"2")
s3.put_object(Bucket="bucket", Key="data/subdir/3", Body=b"3")
+ s3.put_object(
+ Bucket="bucket", Key="data/subdir-file.txt", Body=b"subdir"
+ )
yield remote
@@ -66,6 +72,7 @@ def test_exists(remote):
(True, "data/subdir/1"),
(False, "data/al"),
(False, "foo/"),
+ (True, "data1.txt"),
]
for expected, path in test_cases:
@@ -76,9 +83,11 @@ def test_walk_files(remote):
files = [
remote.path_info / "data/alice",
remote.path_info / "data/alpha",
+ remote.path_info / "data/subdir-file.txt",
remote.path_info / "data/subdir/1",
remote.path_info / "data/subdir/2",
remote.path_info / "data/subdir/3",
+ remote.path_info / "data1.txt",
remote.path_info / "empty_file",
remote.path_info / "foo",
]
@@ -109,3 +118,23 @@ def test_makedirs(remote):
assert not remote.exists(empty_dir)
remote.makedirs(empty_dir)
assert remote.exists(empty_dir)
+
+
+def test_isfile(remote):
+ test_cases = [
+ (False, "empty_dir/"),
+ (True, "empty_file"),
+ (True, "foo"),
+ (True, "data/alice"),
+ (True, "data/alpha"),
+ (True, "data/subdir/1"),
+ (True, "data/subdir/2"),
+ (True, "data/subdir/3"),
+ (False, "data/subdir/empty_dir/"),
+ (False, "data/subdir/1/"),
+ (False, "something-that-does-not-exist"),
+ (False, "empty_dir"),
+ ]
+
+ for expected, path in test_cases:
+ assert remote.isfile(remote.path_info / path) == expected
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 1
} | 0.71 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[all]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest",
"pytest-timeout",
"pytest-cov",
"pytest-xdist",
"pytest-mock",
"flaky",
"mock",
"xmltodict",
"awscli",
"google-compute-engine",
"Pygments",
"collective.checkdocs",
"flake8",
"psutil",
"flake8-docstrings",
"pydocstyle",
"jaraco.windows",
"mock-ssh-server",
"moto",
"rangehttpserver"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.7",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | aliyun-python-sdk-core==2.16.0
aliyun-python-sdk-core-v3==2.13.33
aliyun-python-sdk-kms==2.16.5
appdirs==1.4.4
asciimatics==1.14.0
atpublic==3.1.2
attrs @ file:///croot/attrs_1668696182826/work
autocommand==2.2.2
awscli==1.31.13
azure-common==1.1.28
azure-storage-blob==2.1.0
azure-storage-common==2.1.0
bcrypt==4.2.1
boto==2.49.0
boto3==1.9.115
botocore==1.12.253
cachetools==5.5.2
certifi @ file:///croot/certifi_1671487769961/work/certifi
cffi==1.15.1
charset-normalizer==3.4.1
collective.checkdocs==0.2
colorama==0.4.4
configobj==5.0.9
configparser==5.3.0
coverage==7.2.7
crcmod==1.7
cryptography==44.0.2
decorator==5.1.1
distro==1.9.0
docutils==0.15.2
-e git+https://github.com/iterative/dvc.git@30848306e86d97e4baf7486c045d5f2fe6e26877#egg=dvc
execnet==2.0.2
flake8==5.0.4
flake8-docstrings==1.7.0
flaky==3.8.1
flit_core @ file:///opt/conda/conda-bld/flit-core_1644941570762/work/source/flit_core
flufl.lock==7.1.1
funcy==2.0
future==1.0.0
gitdb==4.0.12
gitdb2==4.0.2
GitPython==3.1.44
google-api-core==1.34.1
google-api-python-client==2.166.0
google-auth==2.38.0
google-auth-httplib2==0.2.0
google-cloud-core==1.5.0
google-cloud-storage==1.19.0
google-compute-engine==2.8.13
google-crc32c==1.5.0
google-resumable-media==2.7.2
googleapis-common-protos==1.69.2
grandalf==0.6
httplib2==0.22.0
humanize==4.6.0
idna==3.10
importlib-metadata==4.2.0
importlib-resources==5.12.0
inflect==6.0.5
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
jaraco.classes==3.2.3
jaraco.collections==4.2.0
jaraco.context==4.3.0
jaraco.functools==3.7.0
jaraco.structures==2.1.0
jaraco.text==3.11.1
jaraco.ui==2.3.0
jaraco.windows==5.7.0
Jinja2==3.1.6
jmespath==0.10.0
jsonpath-ng==1.7.0
MarkupSafe==2.1.5
mccabe==0.7.0
mock==5.2.0
mock-ssh-server==0.9.1
more-itertools==9.1.0
moto==4.2.14
nanotime==0.5.2
networkx==2.3
numpy==1.21.6
oauth2client==4.1.3
oss2==2.6.1
packaging @ file:///croot/packaging_1671697413597/work
paramiko==3.5.1
path==16.6.0
pathspec==0.11.2
Pillow==9.5.0
pluggy @ file:///tmp/build/80754af9/pluggy_1648042572264/work
ply==3.11
protobuf==3.20.3
psutil==7.0.0
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pyarrow==0.14.0
pyasn1==0.5.1
pyasn1-modules==0.3.0
pycodestyle==2.9.1
pycparser==2.21
pycryptodome==3.22.0
pydantic==1.10.21
pydocstyle==6.3.0
PyDrive==1.3.1
pyfiglet==0.8.post1
pyflakes==2.5.0
Pygments==2.17.2
PyNaCl==1.5.0
pyparsing==3.1.4
pytest==7.1.2
pytest-cov==4.1.0
pytest-mock==3.11.1
pytest-timeout==2.3.1
pytest-xdist==3.5.0
python-dateutil==2.9.0.post0
PyYAML==6.0.1
rangehttpserver==1.4.0
requests==2.31.0
responses==0.23.3
rsa==4.7.2
ruamel.yaml==0.18.10
ruamel.yaml.clib==0.2.8
s3transfer==0.2.1
shortuuid==1.0.13
six==1.17.0
smmap==5.0.2
snowballstemmer==2.2.0
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
tqdm==4.67.1
treelib==1.7.1
types-PyYAML==6.0.12.12
typing_extensions @ file:///croot/typing_extensions_1669924550328/work
uritemplate==4.1.1
urllib3==1.25.11
voluptuous==0.14.1
wcwidth==0.2.13
Werkzeug==2.2.3
xmltodict==0.14.2
zipp @ file:///croot/zipp_1672387121353/work
| name: dvc
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=22.1.0=py37h06a4308_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- flit-core=3.6.0=pyhd3eb1b0_0
- importlib_metadata=4.11.3=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=22.0=py37h06a4308_0
- pip=22.3.1=py37h06a4308_0
- pluggy=1.0.0=py37h06a4308_1
- py=1.11.0=pyhd3eb1b0_0
- pytest=7.1.2=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py37h06a4308_0
- typing_extensions=4.4.0=py37h06a4308_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zipp=3.11.0=py37h06a4308_0
- zlib=1.2.13=h5eee18b_1
- pip:
- aliyun-python-sdk-core==2.16.0
- aliyun-python-sdk-core-v3==2.13.33
- aliyun-python-sdk-kms==2.16.5
- appdirs==1.4.4
- asciimatics==1.14.0
- atpublic==3.1.2
- autocommand==2.2.2
- awscli==1.31.13
- azure-common==1.1.28
- azure-storage-blob==2.1.0
- azure-storage-common==2.1.0
- bcrypt==4.2.1
- boto==2.49.0
- boto3==1.9.115
- botocore==1.12.253
- cachetools==5.5.2
- cffi==1.15.1
- charset-normalizer==3.4.1
- collective-checkdocs==0.2
- colorama==0.4.4
- configobj==5.0.9
- configparser==5.3.0
- coverage==7.2.7
- crcmod==1.7
- cryptography==44.0.2
- decorator==5.1.1
- distro==1.9.0
- docutils==0.15.2
- dvc==0.71.0+308483
- execnet==2.0.2
- flake8==5.0.4
- flake8-docstrings==1.7.0
- flaky==3.8.1
- flufl-lock==7.1.1
- funcy==2.0
- future==1.0.0
- gitdb==4.0.12
- gitdb2==4.0.2
- gitpython==3.1.44
- google-api-core==1.34.1
- google-api-python-client==2.166.0
- google-auth==2.38.0
- google-auth-httplib2==0.2.0
- google-cloud-core==1.5.0
- google-cloud-storage==1.19.0
- google-compute-engine==2.8.13
- google-crc32c==1.5.0
- google-resumable-media==2.7.2
- googleapis-common-protos==1.69.2
- grandalf==0.6
- httplib2==0.22.0
- humanize==4.6.0
- idna==3.10
- importlib-metadata==4.2.0
- importlib-resources==5.12.0
- inflect==6.0.5
- jaraco-classes==3.2.3
- jaraco-collections==4.2.0
- jaraco-context==4.3.0
- jaraco-functools==3.7.0
- jaraco-structures==2.1.0
- jaraco-text==3.11.1
- jaraco-ui==2.3.0
- jaraco-windows==5.7.0
- jinja2==3.1.6
- jmespath==0.10.0
- jsonpath-ng==1.7.0
- markupsafe==2.1.5
- mccabe==0.7.0
- mock==5.2.0
- mock-ssh-server==0.9.1
- more-itertools==9.1.0
- moto==4.2.14
- nanotime==0.5.2
- networkx==2.3
- numpy==1.21.6
- oauth2client==4.1.3
- oss2==2.6.1
- paramiko==3.5.1
- path==16.6.0
- pathspec==0.11.2
- pillow==9.5.0
- ply==3.11
- protobuf==3.20.3
- psutil==7.0.0
- pyarrow==0.14.0
- pyasn1==0.5.1
- pyasn1-modules==0.3.0
- pycodestyle==2.9.1
- pycparser==2.21
- pycryptodome==3.22.0
- pydantic==1.10.21
- pydocstyle==6.3.0
- pydrive==1.3.1
- pyfiglet==0.8.post1
- pyflakes==2.5.0
- pygments==2.17.2
- pynacl==1.5.0
- pyparsing==3.1.4
- pytest-cov==4.1.0
- pytest-mock==3.11.1
- pytest-timeout==2.3.1
- pytest-xdist==3.5.0
- python-dateutil==2.9.0.post0
- pyyaml==6.0.1
- rangehttpserver==1.4.0
- requests==2.31.0
- responses==0.23.3
- rsa==4.7.2
- ruamel-yaml==0.18.10
- ruamel-yaml-clib==0.2.8
- s3transfer==0.2.1
- shortuuid==1.0.13
- six==1.17.0
- smmap==5.0.2
- snowballstemmer==2.2.0
- tqdm==4.67.1
- treelib==1.7.1
- types-pyyaml==6.0.12.12
- uritemplate==4.1.1
- urllib3==1.25.11
- voluptuous==0.14.1
- wcwidth==0.2.13
- werkzeug==2.2.3
- xmltodict==0.14.2
prefix: /opt/conda/envs/dvc
| [
"tests/unit/remote/test_s3.py::test_exists",
"tests/unit/remote/test_s3.py::test_isfile"
] | [] | [
"tests/unit/remote/test_s3.py::test_isdir",
"tests/unit/remote/test_s3.py::test_walk_files",
"tests/unit/remote/test_s3.py::test_copy_preserve_etag_across_buckets",
"tests/unit/remote/test_s3.py::test_makedirs"
] | [] | Apache License 2.0 | 5,927 | 460 | [
"dvc/remote/s3.py"
] |
|
mystor__git-revise-53 | 162ac90cba010a0f90f2290bf267d130466417d6 | 2019-12-01 19:07:56 | 114a08196382fa37386da52bf58678819bdb9544 | anordal: I changed the wording a little in the conflict file, but I wasn't so sure about it. What do you think?
* `incoming` → `this`: Short for "You are editing this commit".
* `new parent`/`old parent` → `new basis`/`old basis`: Parent obviously means parent commit in this case, but basis can stand on its own. It is the basis for the change.
mystor: > I changed the wording a little in the conflict file, but wasn't too sure about it. What do you think?
>
> * `incoming` → `this`: Short for "You are editing this commit".
"incoming" definitely wasn't the best name for it, but I'm not sure about "this" either. How about "current"?
> * `new parent`/`old parent` → `new basis`/`old basis`: To say it without jargon, this is the "basis for the change".
I think I prefer the old wording. I find "basis" quite jargony as well, and "parent" is a more common word, also used by git. In my experience it hasn't been a source of confusion for people using git-revise. | diff --git a/gitrevise/merge.py b/gitrevise/merge.py
index 56caeb4..c0171d0 100644
--- a/gitrevise/merge.py
+++ b/gitrevise/merge.py
@@ -31,7 +31,7 @@ def rebase(commit: Commit, parent: Commit) -> Commit:
tree = merge_trees(
Path("/"),
- ("new parent", "old parent", "incoming"),
+ (parent.summary(), commit.parent().summary(), commit.summary()),
parent.tree(),
commit.parent().tree(),
commit.tree(),
@@ -185,9 +185,9 @@ def merge_blobs(
"merge-file",
"-q",
"-p",
- f"-L{path} ({labels[0]})",
- f"-L{path} ({labels[1]})",
- f"-L{path} ({labels[2]})",
+ f"-L{path} (new parent): {labels[0]}",
+ f"-L{path} (old parent): {labels[1]}",
+ f"-L{path} (current): {labels[2]}",
str(tmpdir / "current"),
str(tmpdir / "base"),
str(tmpdir / "other"),
@@ -201,7 +201,8 @@ def merge_blobs(
# At this point, we know that there are merge conflicts to resolve.
# Prompt to try and trigger manual resolution.
- print(f"Merge conflict for '{path}'")
+ print(f"Conflict in applying '{labels[2]}'")
+ print(f" Path: '{path}'")
if input(" Edit conflicted file? (Y/n) ").lower() == "n":
raise MergeConflict("user aborted")
| Feature request: Say which patch failed when editing a conflict
A useful feature of `git rebase -i` is that it says which commit failed to apply when there is a conflict:
Could not apply badbeef... Commit bla bla
To me, this is essential information, because the editing is after all not of the end result, but some intermediate commit – you need to know which commit you're editing. Even if you can guess it, you don't want to take chances here, because of how easily this can make a tangled mess of the commits.
Git rebase also repeats the commit title inside the editable git conflict (whereas revise just puts "incoming" here). That would also suffice, though I really don't like the userfriendlyness of standard git conflicts. Thus why I suggest printing a message in plain English.
Perhaps the git conflict is the better place for this info, but then I would say the real problem is the readability of standard git conflicts: They look too much like a conflict between equals, and presents which two commits it came from, when what matters is which one commit you're editing. They really should just say `You are editing commit "Commit bla bla"`. If this is fixable, I would say you don't need the message, because then, a preview would hint even better to the user whether to edit or abort. | mystor/git-revise | diff --git a/tests/test_fixup.py b/tests/test_fixup.py
index 8730f22..406d311 100644
--- a/tests/test_fixup.py
+++ b/tests/test_fixup.py
@@ -115,12 +115,12 @@ def test_fixup_nonhead_conflict(basic_repo):
with ed.next_file() as f:
assert f.equals_dedent(
f"""\
- <<<<<<< {os.sep}file1 (new parent)
+ <<<<<<< {os.sep}file1 (new parent): commit1
Hello, World!
How are things?
=======
conflict
- >>>>>>> {os.sep}file1 (incoming)
+ >>>>>>> {os.sep}file1 (current): <git index>
"""
)
f.replace_dedent("conflict1\n")
@@ -128,13 +128,13 @@ def test_fixup_nonhead_conflict(basic_repo):
with ed.next_file() as f:
assert f.equals_dedent(
f"""\
- <<<<<<< {os.sep}file1 (new parent)
+ <<<<<<< {os.sep}file1 (new parent): commit1
conflict1
=======
Hello, World!
Oops, gotta add a new line!
How are things?
- >>>>>>> {os.sep}file1 (incoming)
+ >>>>>>> {os.sep}file1 (current): commit2
"""
)
f.replace_dedent("conflict2\n")
| {
"commit_name": "merge_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 1
} | 0.5 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
-e git+https://github.com/mystor/git-revise.git@162ac90cba010a0f90f2290bf267d130466417d6#egg=git_revise
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
pytest @ file:///croot/pytest_1738938843180/work
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
| name: git-revise
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
prefix: /opt/conda/envs/git-revise
| [
"tests/test_fixup.py::test_fixup_nonhead_conflict"
] | [] | [
"tests/test_fixup.py::test_fixup_head",
"tests/test_fixup.py::test_fixup_nonhead",
"tests/test_fixup.py::test_fixup_head_msg",
"tests/test_fixup.py::test_fixup_nonhead_msg",
"tests/test_fixup.py::test_fixup_head_editor",
"tests/test_fixup.py::test_fixup_nonhead_editor",
"tests/test_fixup.py::test_autosquash_nonhead"
] | [] | MIT License | 5,934 | 406 | [
"gitrevise/merge.py"
] |
cgarwood__python-openzwave-mqtt-31 | aa4ba281c9750e9cdd4e07afcaf18dbc741b6a78 | 2019-12-02 04:49:36 | aa4ba281c9750e9cdd4e07afcaf18dbc741b6a78 | diff --git a/openzwavemqtt/models/command_class.py b/openzwavemqtt/models/command_class.py
index 0b702fd..fac4637 100644
--- a/openzwavemqtt/models/command_class.py
+++ b/openzwavemqtt/models/command_class.py
@@ -15,6 +15,8 @@ class OZWCommandClass(OZWNodeChildBase):
EVENT_CHANGED = EVENT_COMMAND_CLASS_CHANGED
EVENT_REMOVED = EVENT_COMMAND_CLASS_REMOVED
+ PLURAL_NAME = "commandclasses"
+
@property
def instance(self) -> int:
"""Return Instance."""
diff --git a/openzwavemqtt/models/instance.py b/openzwavemqtt/models/instance.py
index e404484..a6c7a35 100644
--- a/openzwavemqtt/models/instance.py
+++ b/openzwavemqtt/models/instance.py
@@ -49,14 +49,13 @@ class OZWInstance(ZWaveBase):
"statistics": OZWInstanceStatistics,
}
- def send_message(self, topic, payload=""):
- instance_id = self.id
+ def send_command(self, command: str, payload: str = ""):
topic_prefix = self.options.topic_prefix
- full_topic = f"{topic_prefix}{instance_id}/command/{topic}/"
- self.options.sent_message(full_topic, payload)
+ full_topic = f"{topic_prefix}{self.id}/command/{command}/"
+ self.options.send_command(full_topic, payload)
def add_node(self, secure=False):
- self.send_message("addnode", {"secure": secure})
+ self.send_command("addnode", {"secure": secure})
def cancel_controller_command(self):
- self.send_message("cancelcontrollercommand")
+ self.send_command("cancelcontrollercommand")
diff --git a/openzwavemqtt/models/node.py b/openzwavemqtt/models/node.py
index 2831349..3ade28b 100644
--- a/openzwavemqtt/models/node.py
+++ b/openzwavemqtt/models/node.py
@@ -199,6 +199,17 @@ class OZWNode(ZWaveBase):
"""Return Neighbors."""
return self.data.get("Neighbors")
+ @property
+ def values(self):
+ """Return list of OZWValue child items."""
+ _values = []
+ for instance in self.collections["instance"]:
+ for cc in instance.collections["commandclass"]:
+ for value in cc.collections["value"]:
+ _values.append(value)
+
+ return _values
+
def create_collections(self):
"""Create collections that Node supports."""
return {
diff --git a/openzwavemqtt/options.py b/openzwavemqtt/options.py
index f8e8e03..041d147 100644
--- a/openzwavemqtt/options.py
+++ b/openzwavemqtt/options.py
@@ -5,10 +5,10 @@ from typing import Callable
class OZWOptions:
def __init__(
self,
- sent_message: Callable[[str, dict], None],
+ send_message: Callable[[str, dict], None],
topic_prefix: str = "OpenZWave/",
):
- self.sent_message = sent_message
+ self.send_message = send_message
self.topic_prefix = topic_prefix
self.listeners = {}
| Rename option `sent_message` to `send_message`
It is a typo 🤦♂ | cgarwood/python-openzwave-mqtt | diff --git a/test/models/test_value.py b/test/models/test_value.py
index 478074e..e6e53d9 100644
--- a/test/models/test_value.py
+++ b/test/models/test_value.py
@@ -23,6 +23,9 @@ def test_value_events(mgr):
assert events[0].value == "yo"
assert events[0].parent.id == "4"
+ # Test OZWNode.values shortcut
+ assert mgr.get_instance("1").get_node("2").values[0].id == "3"
+
# Listen for value changed
mgr.options.listen(EVENT_VALUE_CHANGED, events.append)
mgr.mock_receive_json(
@@ -34,10 +37,7 @@ def test_value_events(mgr):
# Show how to use collection helpers
assert (
- mgr.get_instance("1")
- .get_node("2")
- .get_instance("1")
- .get_commandclass("4")
+ list(mgr.get_instance("1").get_node("2").get_instance("1").commandclasses())[0]
.get_value("3")
.value
== "yo2"
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 4
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[test]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": [
"pip install -U pip",
"pip install -U pytest"
],
"python": "3.7",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs @ file:///croot/attrs_1668696182826/work
certifi @ file:///croot/certifi_1671487769961/work/certifi
exceptiongroup==1.2.2
flit_core @ file:///opt/conda/conda-bld/flit-core_1644941570762/work/source/flit_core
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1648562407465/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
packaging @ file:///croot/packaging_1671697413597/work
pluggy @ file:///tmp/build/80754af9/pluggy_1648042572264/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pytest==7.4.4
-e git+https://github.com/cgarwood/python-openzwave-mqtt.git@aa4ba281c9750e9cdd4e07afcaf18dbc741b6a78#egg=python_openzwave_mqtt
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
typing_extensions @ file:///croot/typing_extensions_1669924550328/work
zipp @ file:///croot/zipp_1672387121353/work
| name: python-openzwave-mqtt
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=22.1.0=py37h06a4308_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- flit-core=3.6.0=pyhd3eb1b0_0
- importlib-metadata=4.11.3=py37h06a4308_0
- importlib_metadata=4.11.3=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=22.0=py37h06a4308_0
- pluggy=1.0.0=py37h06a4308_1
- py=1.11.0=pyhd3eb1b0_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py37h06a4308_0
- typing_extensions=4.4.0=py37h06a4308_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zipp=3.11.0=py37h06a4308_0
- zlib=1.2.13=h5eee18b_1
- pip:
- exceptiongroup==1.2.2
- pip==24.0
- pytest==7.4.4
prefix: /opt/conda/envs/python-openzwave-mqtt
| [
"test/models/test_value.py::test_value_events"
] | [] | [] | [] | Apache License 2.0 | 5,935 | 779 | [
"openzwavemqtt/models/command_class.py",
"openzwavemqtt/models/instance.py",
"openzwavemqtt/models/node.py",
"openzwavemqtt/options.py"
] |
|
cgarwood__python-openzwave-mqtt-32 | 399ee0aabb9fb814d636829483165ec949ed3983 | 2019-12-02 04:55:27 | 399ee0aabb9fb814d636829483165ec949ed3983 | diff --git a/openzwavemqtt/base.py b/openzwavemqtt/base.py
index 350ef4a..1d2f426 100644
--- a/openzwavemqtt/base.py
+++ b/openzwavemqtt/base.py
@@ -31,7 +31,7 @@ class ItemCollection:
def process_message(self, topic: Deque[str], message: dict):
"""Process a new message."""
- item_id = topic.popleft()
+ item_id = int(topic.popleft())
item = self.collection.get(item_id)
added = False
@@ -103,7 +103,7 @@ class ZWaveBase(ABC):
options: OZWOptions,
parent: Optional["ZWaveBase"],
topic_part: str,
- item_id: Optional[str],
+ item_id: Optional[int],
):
"""Initialize the model."""
# Runtime options
diff --git a/openzwavemqtt/models/node.py b/openzwavemqtt/models/node.py
index 3ade28b..4eb7f50 100644
--- a/openzwavemqtt/models/node.py
+++ b/openzwavemqtt/models/node.py
@@ -1,5 +1,5 @@
"""Model for Node."""
-from typing import List
+from typing import Iterable, List, TYPE_CHECKING
from ..base import ZWaveBase, ItemCollection
from ..const import EVENT_NODE_ADDED, EVENT_NODE_CHANGED, EVENT_NODE_REMOVED
@@ -7,6 +7,9 @@ from ..const import EVENT_NODE_ADDED, EVENT_NODE_CHANGED, EVENT_NODE_REMOVED
from .node_statistics import OZWNodeStatistics
from .node_instance import OZWNodeInstance
+if TYPE_CHECKING:
+ from .value import OZWValue
+
class OZWNode(ZWaveBase):
@@ -199,16 +202,15 @@ class OZWNode(ZWaveBase):
"""Return Neighbors."""
return self.data.get("Neighbors")
- @property
- def values(self):
- """Return list of OZWValue child items."""
- _values = []
- for instance in self.collections["instance"]:
- for cc in instance.collections["commandclass"]:
- for value in cc.collections["value"]:
- _values.append(value)
-
- return _values
+ def values(self) -> Iterable["OZWValue"]:
+ """Iterate over all OZWValue child items."""
+ # pylint: disable=no-member
+ return (
+ value
+ for instance in self.instances()
+ for cc in instance.commandclasses()
+ for value in cc.values()
+ )
def create_collections(self):
"""Create collections that Node supports."""
| Convert item_id to int
All IDs are ints in OZW. We now extract the ID from the topic and pass them in as strins (`item_id`) to the constructor of `ZWaveBase`. We should convert these to integers as data posted to OZW topics will also refer to nodes as integers. Makes it easier on the consumer of this lib.
Conversion should happen in `ItemCollection` (base.py) where it instantiates the model: `self.item_class(…)`. | cgarwood/python-openzwave-mqtt | diff --git a/test/models/test_instance_statistics.py b/test/models/test_instance_statistics.py
index d2cbffe..e64722f 100644
--- a/test/models/test_instance_statistics.py
+++ b/test/models/test_instance_statistics.py
@@ -26,7 +26,7 @@ def test_statistics(mgr):
mgr.mock_receive_json("OpenZWave/1", {})
mgr.mock_receive_json("OpenZWave/1/statistics/", RESPONSE_JSON)
- statistics = mgr.get_instance("1").get_statistics()
+ statistics = mgr.get_instance(1).get_statistics()
assert statistics.sof_count == 148
assert statistics.read_count == 147
diff --git a/test/models/test_node_statistics.py b/test/models/test_node_statistics.py
index 2ace4ad..1606b1b 100644
--- a/test/models/test_node_statistics.py
+++ b/test/models/test_node_statistics.py
@@ -38,9 +38,9 @@ def test_statistics(mgr):
mgr.mock_receive_json("OpenZWave/1", {})
mgr.mock_receive_json("OpenZWave/1/node/2", {})
mgr.mock_receive_json("OpenZWave/1/node/2/statistics/", RESPONSE_JSON)
- statistics = mgr.get_instance("1").get_node("2").get_statistics()
+ statistics = mgr.get_instance(1).get_node(2).get_statistics()
assert statistics.ack_channel == 0
assert statistics.average_response_rtt == 47
assert statistics.average_request_rtt == 31
assert statistics.send_count == 10
- assert statistics.parent.id == "2"
+ assert statistics.parent.id == 2
diff --git a/test/models/test_value.py b/test/models/test_value.py
index e6e53d9..51854ab 100644
--- a/test/models/test_value.py
+++ b/test/models/test_value.py
@@ -19,12 +19,12 @@ def test_value_events(mgr):
"OpenZWave/1/node/2/instance/1/commandclass/4/value/3", {"Value": "yo"}
)
assert len(events) == 1
- assert events[0].id == "3"
+ assert events[0].id == 3
assert events[0].value == "yo"
- assert events[0].parent.id == "4"
+ assert events[0].parent.id == 4
# Test OZWNode.values shortcut
- assert mgr.get_instance("1").get_node("2").values[0].id == "3"
+ assert list(mgr.get_instance(1).get_node(2).values())[0].id == 3
# Listen for value changed
mgr.options.listen(EVENT_VALUE_CHANGED, events.append)
@@ -32,13 +32,13 @@ def test_value_events(mgr):
"OpenZWave/1/node/2/instance/1/commandclass/4/value/3", {"Value": "yo2"}
)
assert len(events) == 2
- assert events[0].id == "3"
+ assert events[0].id == 3
assert events[0].value == "yo2"
# Show how to use collection helpers
assert (
- list(mgr.get_instance("1").get_node("2").get_instance("1").commandclasses())[0]
- .get_value("3")
+ list(mgr.get_instance(1).get_node(2).get_instance(1).commandclasses())[0]
+ .get_value(3)
.value
== "yo2"
)
@@ -47,4 +47,4 @@ def test_value_events(mgr):
mgr.options.listen(EVENT_VALUE_REMOVED, events.append)
mgr.receive_message("OpenZWave/1/node/2/instance/1/commandclass/4/value/3", "")
assert len(events) == 3
- assert events[0].id == "3"
+ assert events[0].id == 3
diff --git a/test/test_base.py b/test/test_base.py
index a684063..a810dc0 100644
--- a/test/test_base.py
+++ b/test/test_base.py
@@ -45,11 +45,11 @@ def test_direct_collection(level1, caplog):
level1.process_message(deque(), {"info": 1})
level1.process_message(deque(["2"]), {"info": 1})
level1.process_message(deque(["2", "3"]), {"hello": 1})
- assert level1.get_level2("2").get_level3("3").hello == 1
+ assert level1.get_level2(2).get_level3(3).hello == 1
# Only works on numbers
level1.process_message(deque(["2", "a"]), {"hello": 1})
- assert level1.get_level2("2").get_level3("a") is None
+ assert level1.get_level2(2).get_level3("a") is None
assert "cannot process message" in caplog.text
@@ -59,17 +59,17 @@ def test_pending_messages(level1, options):
# Only message for level3 has been received, level2 is none
level1.process_message(deque(["2", "3"]), {"hello": 1})
- assert level1.get_level2("2") is None
+ assert level1.get_level2(2) is None
assert events == []
# Message for level2, level3 received, level1 still None
level1.process_message(deque(["2"]), {"hello": 1})
- assert level1.get_level2("2") is None
+ assert level1.get_level2(2) is None
assert events == []
# Level 1 receives data, process all child messages.
level1.process_message(deque(), {"info": 1})
- assert level1.get_level2("2").get_level3("3").hello == 1
+ assert level1.get_level2(2).get_level3(3).hello == 1
assert events == ["level2_added", "level3_added"]
@@ -111,11 +111,11 @@ def test_topic(options):
level1.process_message(deque(["2", "3", "statistics"]), {"hello": 1})
assert (
- level1.get_level2("2").get_level3("3").get_level4("4").topic
+ level1.get_level2(2).get_level3(3).get_level4(4).topic
== "OpenZWave/2/3/level4/4"
)
assert (
- level1.get_level2("2").get_level3("3").get_statistics().topic
+ level1.get_level2(2).get_level3(3).get_statistics().topic
== "OpenZWave/2/3/statistics"
)
@@ -126,9 +126,7 @@ def test_automatic_collections(level1):
level1.process_message(deque(["2", "3"]), {"hello": 1})
# Test overridden using PLURAL_NAME
- assert list(level1.level_twos()) == [level1.get_level2("2")]
+ assert list(level1.level_twos()) == [level1.get_level2(2)]
# Test default name
- assert list(level1.get_level2("2").level3s()) == [
- level1.get_level2("2").get_level3("3")
- ]
+ assert list(level1.get_level2(2).level3s()) == [level1.get_level2(2).get_level3(3)]
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 2
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.7",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs @ file:///croot/attrs_1668696182826/work
certifi @ file:///croot/certifi_1671487769961/work/certifi
flit_core @ file:///opt/conda/conda-bld/flit-core_1644941570762/work/source/flit_core
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1648562407465/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
packaging @ file:///croot/packaging_1671697413597/work
pluggy @ file:///tmp/build/80754af9/pluggy_1648042572264/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pytest==7.1.2
-e git+https://github.com/cgarwood/python-openzwave-mqtt.git@399ee0aabb9fb814d636829483165ec949ed3983#egg=python_openzwave_mqtt
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
typing_extensions @ file:///croot/typing_extensions_1669924550328/work
zipp @ file:///croot/zipp_1672387121353/work
| name: python-openzwave-mqtt
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=22.1.0=py37h06a4308_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- flit-core=3.6.0=pyhd3eb1b0_0
- importlib-metadata=4.11.3=py37h06a4308_0
- importlib_metadata=4.11.3=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=22.0=py37h06a4308_0
- pip=22.3.1=py37h06a4308_0
- pluggy=1.0.0=py37h06a4308_1
- py=1.11.0=pyhd3eb1b0_0
- pytest=7.1.2=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py37h06a4308_0
- typing_extensions=4.4.0=py37h06a4308_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zipp=3.11.0=py37h06a4308_0
- zlib=1.2.13=h5eee18b_1
prefix: /opt/conda/envs/python-openzwave-mqtt
| [
"test/models/test_instance_statistics.py::test_statistics",
"test/models/test_node_statistics.py::test_statistics",
"test/models/test_value.py::test_value_events",
"test/test_base.py::test_direct_collection",
"test/test_base.py::test_pending_messages",
"test/test_base.py::test_topic",
"test/test_base.py::test_automatic_collections"
] | [] | [
"test/test_base.py::test_recursive_remove"
] | [] | Apache License 2.0 | 5,936 | 610 | [
"openzwavemqtt/base.py",
"openzwavemqtt/models/node.py"
] |
|
ModellingWebLab__cellmlmanip-167 | fc8ab9ff9b9712aab0ffe98eb376e8cba5de61c9 | 2019-12-02 11:41:36 | fc8ab9ff9b9712aab0ffe98eb376e8cba5de61c9 | diff --git a/cellmlmanip/model.py b/cellmlmanip/model.py
index 1167056..5e014f6 100644
--- a/cellmlmanip/model.py
+++ b/cellmlmanip/model.py
@@ -198,13 +198,13 @@ class Model(object):
rhs_units, self.units.ureg.get_base_units(rhs_units)
)
- def get_equations_for(self, symbols, lexicographical_sort=True, recurse=True, strip_units=True):
+ def get_equations_for(self, symbols, recurse=True, strip_units=True):
"""Get all equations for a given collection of symbols.
- Results are sorted in topographical order.
- :param symbols: the symbols to get the equations for
- :param lexicographical_sort: indicates whether the result is sorted in lexicographical order first
- :param recurse: indicates whether to recurse the equation graph, or to return only the top level equations
+ Results are sorted first by dependencies, then by variable name.
+
+ :param symbols: the symbols to get the equations for.
+ :param recurse: indicates whether to recurse the equation graph, or to return only the top level equations.
:param strip_units: if ``True``, all ``sympy.Dummy`` objects representing number with units will be replaced
with ordinary sympy number objects.
"""
@@ -215,10 +215,7 @@ class Model(object):
graph = self.graph
# Get sorted list of symbols
- if lexicographical_sort:
- sorted_symbols = nx.lexicographical_topological_sort(graph, key=str)
- else:
- sorted_symbols = nx.topological_sort(graph)
+ sorted_symbols = nx.lexicographical_topological_sort(graph, key=str)
# Create set of symbols for which we require equations
required_symbols = set()
@@ -506,7 +503,20 @@ class Model(object):
# And replace the equation with one with the rhs subbed with sympy.Number objects
if subs_dict:
- graph.nodes[node]['equation'] = sympy.Eq(equation.lhs, equation.rhs.subs(subs_dict))
+ # Update rhs
+ rhs = equation.rhs.subs(subs_dict)
+
+ # Check if simplification removed dependencies on other variables, and if so remove the corresponding
+ # edges.
+ refs = self.find_symbols_and_derivatives([rhs])
+ edges = list(graph.in_edges(equation.lhs))
+ for edge in edges:
+ ref = edge[0]
+ if ref not in refs:
+ graph.remove_edge(ref, equation.lhs)
+
+ # Replace equation
+ graph.nodes[node]['equation'] = sympy.Eq(equation.lhs, rhs)
# Cache graph and return
self._graph_with_sympy_numbers = graph
| More robust checking of `get_equations_for`
This arises from #162. It seems that the simplification that happens when `strip_units=True` can result in invalid edges remaining. We need to add more robust tests, and then fix issues revealed; hopefully just by removing edges from `graph_with_sympy_numbers` when it's created!
Ideally we'd have a simple model containing easy to verify test cases:
- Equations something like:
1. `dy1/dt = number`
2. `dy2/dt = var1`
3. `dy3/dt = var2 * (number + dy1/dt)`
4. `var1 = (5 - 5) * var3`
5. `var2 = 23 + var4`
6. `var4 = -23`
7. `var3 = number / number`
8. `var5 = var4 + var3`
- Test cases:
- [x] `g_e_f([dy2/dt], strip_units=False)` -> `[eq7, eq4, eq2]`
- [x] `g_e_f([dy2/dt], strip_units=True)` -> `[eq4, eq2]` (simplification sets eq4 as `var1=0`)
- [x] `g_e_f([dy3/dt], strip_units=True)` -> `[eq1, eq6, eq5, eq3]` (because the simplification is only within a single equation, not across the system)
- [x] `g_e_f([var5], lexicographical_sort=False)` -> `[eq6, eq7, eq8]` (assuming document order if not sorting by lhs name? Not sure if we can guarantee any order though!)
- [x] `g_e_f([var5], lexicographical_sort=True)` -> `[eq7, eq6, eq8]`
- [x] Extra cases to test `recurse=False`! | ModellingWebLab/cellmlmanip | diff --git a/tests/test_model_functions.py b/tests/test_model_functions.py
index 58dd7a7..0cff687 100644
--- a/tests/test_model_functions.py
+++ b/tests/test_model_functions.py
@@ -4,7 +4,7 @@ import pytest
import sympy as sp
from cellmlmanip import parser
-from cellmlmanip.model import VariableDummy
+from cellmlmanip.model import Model, VariableDummy
from . import shared
@@ -154,93 +154,123 @@ class TestModelFunctions():
'Derivative(_Ca_handling_by_the_SR$F2, _environment$time), '\
'Derivative(_Ca_handling_by_the_SR$F3, _environment$time)]'
- # also tested by model_units
- def test_get_equations_for(self, basic_model):
- """ Tests Model.get_equations_for() works correctly.
- Note: the basic model has 1 equation dsv11/dt = 1 which is not
- related to a symbol and so has no equations that can be retrieved
- by symbol
+ def test_get_equations_for(self):
+ """
+ Tests Model.get_equations_for().
"""
- symbol_a = basic_model.get_symbol_by_cmeta_id("sv11")
- equation = basic_model.get_equations_for([symbol_a])
- assert len(equation) == 0
-
- symbol_t = basic_model.get_symbol_by_name("environment$time")
-
- equation1 = basic_model.get_equations_for([symbol_t])
- assert len(equation1) == 0
-
- # also tested by model_units
- def test_get_equations_for_1(self, aslanidi_model):
- """ Tests Model.get_equations_for() works correctly. """
-
- symbol_a = aslanidi_model.get_symbol_by_ontology_term(shared.OXMETA, "membrane_capacitance")
- equation = aslanidi_model.get_equations_for([symbol_a])
- assert len(equation) == 1
- assert equation[0].lhs == symbol_a
- assert equation[0].rhs == 5e-5
-
- def test_get_equations_for_2(self, hh_model):
- """ Tests Model.get_equations_for() works correctly. """
-
- # Test get_equations_for with topgraphical lexicographical ordering
-
- # Get ordered equations
- membrane_fast_sodium_current = hh_model.get_symbol_by_ontology_term(shared.OXMETA,
- 'membrane_fast_sodium_current')
- equations = hh_model.get_equations_for([membrane_fast_sodium_current])
- top_level_equations = hh_model.get_equations_for([membrane_fast_sodium_current], recurse=False)
-
- # There should be 4 in this model
- assert len(equations) == 4
-
- # There should be 3 top level (non recursed) in this model
- assert len(top_level_equations) == 3
-
- # Expected equations
- ER = sp.Eq(sp.Dummy('membrane$E_R'), sp.numbers.Float(-75.0))
- ENa = sp.Eq(sp.Dummy('sodium_channel$E_Na'),
- sp.add.Add(sp.Dummy('membrane$E_R'), sp.numbers.Float(115.0)))
- gNa = sp.Eq(sp.Dummy('sodium_channel$g_Na'), sp.numbers.Float(120.0))
- iNa = sp.Eq(sp.Dummy('sodium_channel$i_Na'),
- sp.Dummy('sodium_channel_m_gate$m') ** 3.0 * sp.Dummy('sodium_channel$g_Na') *
- sp.Dummy('sodium_channel_h_gate$h') * (sp.Dummy('membrane$V') -
- sp.Dummy('sodium_channel$E_Na')))
-
- # Get order as strings, for easy comparison
- ER, ENa, gNa, iNa = str(ER), str(ENa), str(gNa), str(iNa)
- expected_order = [ER, ENa, gNa, iNa]
-
- # Check equations against expected equations
- equations = [str(eq) for eq in equations]
- assert equations == expected_order
-
- # Check topologically (but not lexicographically) ordered equations
- unordered_equations = hh_model.get_equations_for([membrane_fast_sodium_current], False)
- unordered_equations = [str(eq) for eq in unordered_equations]
-
- # Each equation should be both in the ordered and unordered equations
- assert set(unordered_equations) == set(equations)
-
- # ER should come before ENa
- assert unordered_equations.index(ER) < unordered_equations.index(ENa)
-
- # ENa and gNa should come before iNa
- assert unordered_equations.index(ENa) < unordered_equations.index(iNa)
- assert unordered_equations.index(gNa) < unordered_equations.index(iNa)
-
- def test_get_equations_for_with_dummies(self, hh_model):
-
- # Tests using get_equations_for without replacing dummies with sp numbers
- # Get ordered equations
- ina = hh_model.get_symbol_by_ontology_term(shared.OXMETA, 'membrane_fast_sodium_current')
- equations = hh_model.get_equations_for([ina], recurse=False, strip_units=False)
-
- for eq in equations:
- if eq.lhs.name == 'sodium_channel$g_Na':
- assert isinstance(eq.rhs, sp.Dummy)
- break
+ m = Model('simplification')
+ u = 'dimensionless'
+ t = m.add_variable('t', u)
+ y1 = m.add_variable('y1', u, initial_value=10)
+ y2 = m.add_variable('y2', u, initial_value=20)
+ y3 = m.add_variable('y3', u, initial_value=30)
+ v1 = m.add_variable('v1', u)
+ v2 = m.add_variable('v2', u)
+ v3 = m.add_variable('v3', u)
+ v4 = m.add_variable('v4', u)
+ v5 = m.add_variable('v5', u)
+ a1 = m.add_variable('a1', u)
+
+ # dy1/dt = 1
+ m.add_equation(sp.Eq(sp.Derivative(y1, t), m.add_number(1, u)))
+ # dy2/dt = v1 --> Doesn't simplify, reference to v1 is maintained
+ m.add_equation(sp.Eq(sp.Derivative(y2, t), v1))
+ # dy3/dt = v2 * (2 + dy1/dt)
+ m.add_equation(sp.Eq(sp.Derivative(y3, t), sp.Mul(v2, sp.Add(m.add_number(2, u), sp.Derivative(y1, t)))))
+ # v1 = (5 - 5) * v3 --> Simplifies to 0
+ m.add_equation(sp.Eq(v1, sp.Mul(sp.Add(m.add_number(5, u), m.add_number(-5, u)), v3)))
+ # v2 = 23 + v4 --> Doesn't simplify, reference to v4 is maintained
+ m.add_equation(sp.Eq(v2, sp.Add(m.add_number(23, u), v4)))
+ # v3 = 2 / 3
+ m.add_equation(sp.Eq(v3, sp.Mul(m.add_number(2, u), sp.Pow(m.add_number(3, u), sp.S.NegativeOne))))
+ # v4 = -23
+ m.add_equation(sp.Eq(v4, m.add_number(-23, u)))
+ # v5 = v3 + v4
+ m.add_equation(sp.Eq(v5, sp.Add(v3, v4)))
+ # a1 = v5 + v2 + v1 + t
+ m.add_equation(sp.Eq(a1, sp.Add(v5, v2, v1, t)))
+
+ # Simplified equations
+ e_v1 = sp.Eq(v1, sp.Number(0))
+ e_v2 = sp.Eq(v2, sp.Add(v4, sp.Number(23)))
+ e_v3 = sp.Eq(v3, sp.Number(2 / 3))
+ e_v4 = sp.Eq(v4, sp.Number(-23))
+ e_v5 = sp.Eq(v5, sp.Add(v3, v4))
+ e_a1 = sp.Eq(a1, sp.Add(v1, v2, v5, t))
+
+ d_y1 = sp.Derivative(y1, t)
+ d_y2 = sp.Derivative(y2, t)
+ d_y3 = sp.Derivative(y3, t)
+
+ e_y1 = sp.Eq(d_y1, sp.Number(1))
+ e_y2 = sp.Eq(d_y2, v1)
+ e_y3 = sp.Eq(d_y3, sp.Mul(v2, sp.Add(sp.Number(2), d_y1)))
+
+ # v1 with simplification: [v1=0] (simplified)
+ eqs = m.get_equations_for([v1])
+ assert eqs[0] == e_v1
+ assert len(eqs) == 1
+
+ # v1 without simplification: [v3=2/3, v1=(5-5)*v3]
+ eqs = m.get_equations_for([v1], strip_units=False)
+ assert eqs[0] == m.graph.nodes[v3]['equation']
+ assert eqs[1] == m.graph.nodes[v1]['equation']
+ assert len(eqs) == 2
+
+ # dy1/dt with simplification: [dy1/dt=1]
+ eqs = m.get_equations_for([d_y1])
+ assert eqs[0] == e_y1
+ assert len(eqs) == 1
+
+ # dy2/dt with simplification: [v1=0, dy2/dt=v1]
+ eqs = m.get_equations_for([d_y2])
+ assert eqs[0] == e_v1
+ assert eqs[1] == e_y2
+ assert len(eqs) == 2
+
+ # dy2/dt without simplification: [v3=2/3, v1=(5-5)*v3, dy2/dt=v1]
+ eqs = m.get_equations_for([d_y2], strip_units=False)
+ assert eqs[0] == m.graph.nodes[v3]['equation']
+ assert eqs[1] == m.graph.nodes[v1]['equation']
+ assert eqs[2] == m.graph.nodes[d_y2]['equation']
+ assert len(eqs) == 3
+
+ # dy3/dt with simpification: [dy1/dt=1, v4=-23, v2=v4+23, dy2/dt=v2*(2+dy1/dt)]
+ eqs = m.get_equations_for([d_y3])
+ assert e_y3 in eqs
+ assert e_y1 in eqs
+ assert e_v2 in eqs
+ assert e_v4 in eqs
+ assert len(eqs) == 4
+
+ # a1 with simplification: [v1=0, v3=2/3, v4=-23, v2=v4+23, v5=v3+v4, a1=v1+v2+v5]
+ eqs = m.get_equations_for([a1])
+ assert eqs[0] == e_v1
+ assert eqs[1] == e_v3
+ assert eqs[2] == e_v4
+ assert eqs[3] == e_v2
+ assert eqs[4] == e_v5
+ assert eqs[5] == e_a1
+ assert len(eqs) == 6
+
+ # a1 with only one level of recursion
+ eqs = m.get_equations_for([a1], recurse=False)
+ assert eqs[0] == e_v1
+ assert eqs[1] == e_v2
+ assert eqs[2] == e_v5
+ assert eqs[3] == e_a1
+ assert len(eqs) == 4
+
+ # Multiple input symbols: [d_y1=1, v1=0, d_y2=v1, v4=-23, v2=23+v4, d_y3=v2*(2+d_y1)]
+ eqs = m.get_equations_for([d_y1, d_y2, d_y3])
+ assert eqs[0] == e_y1
+ assert eqs[1] == e_v1
+ assert eqs[2] == e_y2
+ assert eqs[3] == e_v4
+ assert eqs[4] == e_v2
+ assert eqs[5] == e_y3
+ assert len(eqs) == 6
def test_get_value(self, aslanidi_model):
""" Tests Model.get_value() works correctly. """
diff --git a/tests/test_model_units.py b/tests/test_model_units.py
index 6db3b26..79f184c 100644
--- a/tests/test_model_units.py
+++ b/tests/test_model_units.py
@@ -12,25 +12,6 @@ class TestModelUnits:
symbol = simple_units_model.get_symbol_by_cmeta_id("b")
assert symbol.is_Symbol
- def test_equations(self, simple_units_model):
- """ Tests the Model.get_equations_for function."""
- symbol_a = simple_units_model.get_symbol_by_cmeta_id("a")
- equation = simple_units_model.get_equations_for([symbol_a])
- assert len(equation) == 1
- assert equation[0].lhs == symbol_a
- assert equation[0].rhs == 1.0
-
- def test_equations_2(self, simple_units_model):
- """ Tests the Model.get_equations_for function. """
- symbol_a = simple_units_model.get_symbol_by_cmeta_id("a")
- symbol_b = simple_units_model.get_symbol_by_cmeta_id("b")
- equation = simple_units_model.get_equations_for([symbol_b])
- assert len(equation) == 2
- assert equation[0].lhs == symbol_a
- assert equation[0].rhs == 1.0
- assert equation[1].lhs == symbol_b
- assert equation[1].rhs == 2.0 / symbol_a
-
def test_units(self, simple_units_model):
""" Tests units read and calculated from a model. """
symbol_a = simple_units_model.get_symbol_by_cmeta_id("a")
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 1
} | unknown | {
"env_vars": null,
"env_yml_path": [],
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"flake8",
"isort",
"codecov"
],
"pre_install": [],
"python": "3.6",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
-e git+https://github.com/ModellingWebLab/cellmlmanip.git@fc8ab9ff9b9712aab0ffe98eb376e8cba5de61c9#egg=cellmlmanip
certifi==2021.5.30
charset-normalizer==2.0.12
codecov==2.1.13
coverage==6.2
decorator==4.4.0
flake8==5.0.4
idna==3.10
importlib-metadata==4.2.0
iniconfig==1.1.1
isodate==0.6.0
isort==5.10.1
lxml==4.4.1
mccabe==0.7.0
mpmath==1.1.0
networkx==2.3
packaging==21.3
Pint==0.9
pluggy==1.0.0
py==1.11.0
pycodestyle==2.9.1
pyflakes==2.5.0
pyparsing==2.4.2
pytest==7.0.1
pytest-cov==4.0.0
rdflib==4.2.2
requests==2.27.1
six==1.12.0
sympy==1.4
tomli==1.2.3
typing_extensions==4.1.1
urllib3==1.26.20
zipp==3.6.0
| name: cellmlmanip
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- charset-normalizer==2.0.12
- codecov==2.1.13
- coverage==6.2
- decorator==4.4.0
- flake8==5.0.4
- idna==3.10
- importlib-metadata==4.2.0
- iniconfig==1.1.1
- isodate==0.6.0
- isort==5.10.1
- lxml==4.4.1
- mccabe==0.7.0
- mpmath==1.1.0
- networkx==2.3
- packaging==21.3
- pint==0.9
- pluggy==1.0.0
- py==1.11.0
- pycodestyle==2.9.1
- pyflakes==2.5.0
- pyparsing==2.4.2
- pytest==7.0.1
- pytest-cov==4.0.0
- rdflib==4.2.2
- requests==2.27.1
- six==1.12.0
- sympy==1.4
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- zipp==3.6.0
prefix: /opt/conda/envs/cellmlmanip
| [
"tests/test_model_functions.py::TestModelFunctions::test_get_equations_for"
] | [] | [
"tests/test_model_functions.py::TestModelFunctions::test_graph_property",
"tests/test_model_functions.py::TestModelFunctions::test_graph_for_dae",
"tests/test_model_functions.py::TestModelFunctions::test_get_state_symbols",
"tests/test_model_functions.py::TestModelFunctions::test_get_state_symbols2",
"tests/test_model_functions.py::TestModelFunctions::test_get_free_variable_symbol",
"tests/test_model_functions.py::TestModelFunctions::test_get_free_variable_symbol_1",
"tests/test_model_functions.py::TestModelFunctions::test_get_initial_value",
"tests/test_model_functions.py::TestModelFunctions::test_get_derivative_symbols",
"tests/test_model_functions.py::TestModelFunctions::test_get_derivative_symbols2",
"tests/test_model_functions.py::TestModelFunctions::test_get_value",
"tests/test_model_functions.py::TestModelFunctions::test_get_symbol_by_cmeta_id",
"tests/test_model_functions.py::TestModelFunctions::test_get_symbol_by_cmeta_id_2",
"tests/test_model_functions.py::TestModelFunctions::test_get_symbol_by_name",
"tests/test_model_functions.py::TestModelFunctions::test_get_symbol_by_ontology_term",
"tests/test_model_functions.py::TestModelFunctions::test_get_symbols_by_rdf",
"tests/test_model_functions.py::TestModelFunctions::test_add_equation",
"tests/test_model_functions.py::TestModelFunctions::test_set_equation",
"tests/test_model_functions.py::TestModelFunctions::test_set_equation2",
"tests/test_model_functions.py::TestModelFunctions::test_add_number",
"tests/test_model_functions.py::TestModelFunctions::test_add_unit",
"tests/test_model_functions.py::TestModelFunctions::test_add_variable",
"tests/test_model_functions.py::TestModelFunctions::test_find_symbols_and_derivatives",
"tests/test_model_functions.py::TestModelFunctions::test_find_symbols_and_derivatives2",
"tests/test_model_functions.py::TestModelFunctions::test_connect_variables",
"tests/test_model_functions.py::TestModelFunctions::test_connect_variable2",
"tests/test_model_units.py::TestModelUnits::test_symbols",
"tests/test_model_units.py::TestModelUnits::test_units",
"tests/test_model_units.py::TestModelUnits::test_bad_units"
] | [] | BSD 3-Clause License | 5,937 | 653 | [
"cellmlmanip/model.py"
] |
|
ModellingWebLab__cellmlmanip-171 | 599758456d9040a944be0b0603e258d51d3d1c63 | 2019-12-02 18:14:03 | 599758456d9040a944be0b0603e258d51d3d1c63 | diff --git a/cellmlmanip/model.py b/cellmlmanip/model.py
index 5e014f6..cf611dd 100644
--- a/cellmlmanip/model.py
+++ b/cellmlmanip/model.py
@@ -112,9 +112,11 @@ class Model(object):
:return: A :class:`VariableDummy` object.
"""
+ # Check for clashes
if name in self._name_to_symbol:
raise ValueError('Variable %s already exists.' % name)
+ # Add variable
self._name_to_symbol[name] = var = VariableDummy(
name=name,
units=self.units.get_quantity(units),
@@ -125,6 +127,9 @@ class Model(object):
cmeta_id=cmeta_id,
)
+ # Invalidate cached graphs
+ self._invalidate_cache()
+
return var
def connect_variables(self, source_name: str, target_name: str):
@@ -141,45 +146,48 @@ class Model(object):
source = self._name_to_symbol[source_name]
target = self._name_to_symbol[target_name]
- # If the source variable has already been assigned a final symbol
- if source.assigned_to:
-
- if target.assigned_to:
- raise ValueError('Target already assigned to %s before assignment to %s' %
- (target.assigned_to, source.assigned_to))
-
- # If source/target variable is in the same unit
- if source.units == target.units:
- # Direct substitution is possible
- target.assigned_to = source.assigned_to
- # everywhere the target variable is used, replace with source variable
- for index, equation in enumerate(self.equations):
- self.equations[index] = equation.xreplace({target: source.assigned_to})
- # Otherwise, this connection requires a conversion
- else:
- # Get the scaling factor required to convert source units to target units
- factor = self.units.convert_to(1 * source.units, target.units).magnitude
+ # If the source variable has not been assigned a symbol, we can't make this connection
+ if not source.assigned_to:
+ logger.info('The source variable has not been assigned to a symbol '
+ '(i.e. expecting a connection): %s ⟶ %s',
+ target.name, source.name)
+ return False
+
+ # If target is already assigned this is an error
+ if target.assigned_to:
+ raise ValueError('Target already assigned to %s before assignment to %s' %
+ (target.assigned_to, source.assigned_to))
+
+ # If source/target variable is in the same unit
+ if source.units == target.units:
+ # Direct substitution is possible
+ target.assigned_to = source.assigned_to
+ # everywhere the target variable is used, replace with source variable
+ for index, equation in enumerate(self.equations):
+ self.equations[index] = equation.xreplace({target: source.assigned_to})
+
+ # Otherwise, this connection requires a conversion
+ else:
+ # Get the scaling factor required to convert source units to target units
+ factor = self.units.convert_to(1 * source.units, target.units).magnitude
- # Dummy to represent this factor in equations, having units for conversion
- factor_dummy = self.add_number(factor, str(target.units / source.units))
+ # Dummy to represent this factor in equations, having units for conversion
+ factor_dummy = self.add_number(factor, str(target.units / source.units))
- # Add an equations making the connection with the required conversion
- self.equations.append(sympy.Eq(target, source.assigned_to * factor_dummy))
+ # Add an equations making the connection with the required conversion
+ self.equations.append(sympy.Eq(target, source.assigned_to * factor_dummy))
- logger.info('Connection req. unit conversion: %s', self.equations[-1])
+ logger.info('Connection req. unit conversion: %s', self.equations[-1])
- # The assigned symbol for this variable is itself
- target.assigned_to = target
+ # The assigned symbol for this variable is itself
+ target.assigned_to = target
- logger.debug('Updated target: %s', target)
+ logger.debug('Updated target: %s', target)
- return True
+ # Invalidate cached graphs
+ self._invalidate_cache()
- # The source variable has not been assigned a symbol, so we can't make this connection
- logger.info('The source variable has not been assigned to a symbol '
- '(i.e. expecting a connection): %s ⟶ %s',
- target.name, source.name)
- return False
+ return True
def add_rdf(self, rdf: str):
""" Takes an RDF string and stores it in the model's RDF graph. """
@@ -569,36 +577,16 @@ class Model(object):
symbols |= self.find_symbols_and_derivatives(expr.args)
return symbols
- def set_equation(self, lhs, rhs):
+ def remove_equation(self, equation):
"""
- Adds an equation defining the variable named in ``lhs``, or replaces an existing one.
-
- As with :meth:`add_equation()` the LHS must be either a variable symbol or a derivative, and all numbers and
- variable symbols used in ``lhs`` and ``rhs`` must have been obtained from this model, e.g. via
- :meth:`add_number()`, :meth:`add_variable()`, or :meth:`get_symbol_by_ontology_term()`.
+ Removes an equation from the model.
- :param lhs: An LHS expression (either a symbol or a derivative).
- :param rhs: The new RHS expression for this variable.
+ :param equation: The equation to remove.
"""
- # Get variable symbol named in the lhs
- lhs_symbol = lhs
- if lhs_symbol.is_Derivative:
- lhs_symbol = lhs_symbol.free_symbols.pop()
- assert isinstance(lhs_symbol, VariableDummy)
-
- # Check if the variable named in the lhs already has an equation
- i_existing = None
- for i, eq in enumerate(self.equations):
- symbol = eq.lhs.free_symbols.pop() if eq.lhs.is_Derivative else eq.lhs
- if symbol == lhs_symbol:
- i_existing = i
- break
-
- # Add or replace equation
- if i_existing is None:
- self.equations.append(sympy.Eq(lhs, rhs))
- else:
- self.equations[i_existing] = sympy.Eq(lhs, rhs)
+ try:
+ self.equations.remove(equation)
+ except ValueError:
+ raise KeyError('Equation not found in model ' + str(equation))
# Invalidate cached equation graphs
self._invalidate_cache()
| Replace a variable's equation
Given a variable, we need a method to set/replace its RHS
From FC, I can read equations (with variables specified as ontology terms) and units (using an FC unit syntax).
In a two-way communication approach, I could then:
- ask the cellmlmanip model for the symbols for each term
- ask the cellmlmanip model for the unit objects corresponding to my units
- pass in an equation with symbols pointing to model variables and literals as dummies containing the model's unit vars
Buuuut, that would mean fc talking to cellmlmanip through cg, so would probably prefer a one-way approach
- give cellmlmanip a sympy equation with variables as ontology terms and units in some syntax that `pint` understands?
- cellmlmanip then replaces the ontology terms and units
So then we'd need to write some mini-spec for how cellmlmanip expects variable symbols and units?
| ModellingWebLab/cellmlmanip | diff --git a/tests/test_model_functions.py b/tests/test_model_functions.py
index 0cff687..3a98f46 100644
--- a/tests/test_model_functions.py
+++ b/tests/test_model_functions.py
@@ -359,35 +359,8 @@ class TestModelFunctions():
assert eqn[2].lhs == symbol2
assert eqn[2].rhs == sp.Add(symbol, symbol1)
- def test_set_equation(self, local_model):
- """ Tests the Model.set_equation method.
- """
- model = local_model
- assert len(model.equations) == 1
- # so we are adding
- # newvar2 = newvar + newvar1
- # but need to also add newvar1 = 2; newvar = 2 in order or the graph to resolve correctly
- model.add_variable(name='newvar', units='mV')
- symbol = model.get_symbol_by_name('newvar')
- model.add_variable(name='newvar1', units='mV')
- symbol1 = model.get_symbol_by_name('newvar1')
- model.add_variable(name='newvar2', units='mV')
- symbol2 = model.get_symbol_by_name('newvar2')
- model.set_equation(symbol, 2.0)
- model.set_equation(symbol1, 2.0)
- model.set_equation(symbol2, sp.Add(symbol, symbol1))
- assert len(model.equations) == 4
- eqn = model.get_equations_for([symbol2])
- assert len(eqn) == 3
- assert eqn[0].lhs == symbol
- assert eqn[0].rhs == 2.0
- assert eqn[1].lhs == symbol1
- assert eqn[1].rhs == 2.0
- assert eqn[2].lhs == symbol2
- assert eqn[2].rhs == sp.Add(symbol, symbol1)
-
- def test_set_equation2(self, local_hh_model):
- """ Tests replacing an equation in a model. """
+ def test_remove_equation(self, local_hh_model):
+ """ Tests the Model.remove_equation method. """
model = local_hh_model
# Get model, assert that V is a state variable
@@ -395,8 +368,11 @@ class TestModelFunctions():
assert v.type == 'state'
# Now clamp it to -80mV
- rhs = model.add_number(-80, str(v.units))
- model.set_equation(v, rhs)
+ t = model.get_symbol_by_ontology_term(shared.OXMETA, 'time')
+ equation = model.graph.nodes[sp.Derivative(v, t)]['equation']
+ model.remove_equation(equation)
+ equation = sp.Eq(v, model.add_number(-80, str(v.units)))
+ model.add_equation(equation)
# Check that V is no longer a state
v = model.get_symbol_by_ontology_term(shared.OXMETA, 'membrane_voltage')
@@ -406,24 +382,23 @@ class TestModelFunctions():
# See: https://github.com/ModellingWebLab/cellmlmanip/issues/133
# Now make V a state again
- t = model.get_symbol_by_ontology_term(shared.OXMETA, 'time')
- lhs = sp.Derivative(v, t)
dvdt_units = 'unlikely_unit_name'
model.add_unit(dvdt_units, [
{'units': str(v.units)},
{'units': str(t.units), 'exponent': -1},
])
- rhs = model.add_number(0, dvdt_units)
- model.set_equation(lhs, rhs)
+ model.remove_equation(equation)
+ equation = sp.Eq(sp.Derivative(v, t), model.add_number(0, dvdt_units))
+ model.add_equation(equation)
# Check that V is a state again
v = model.get_symbol_by_ontology_term(shared.OXMETA, 'membrane_voltage')
assert v.type == 'state'
- # Set equation for a newly created variable
- lhs = model.add_variable(name='an_incredibly_unlikely_variable_name', units=str(v.units))
- rhs = model.add_number(12, str(v.units))
- model.set_equation(lhs, rhs)
+ # Test removing non-existing equation
+ equation = sp.Eq(sp.Derivative(v, t), model.add_number(5, dvdt_units))
+ with pytest.raises(KeyError, match='Equation not found'):
+ model.remove_equation(equation)
def test_add_number(self, local_model):
""" Tests the Model.add_number method. """
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 1
} | unknown | {
"env_vars": null,
"env_yml_path": [],
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"flake8",
"isort",
"codecov"
],
"pre_install": [],
"python": "3.6",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
-e git+https://github.com/ModellingWebLab/cellmlmanip.git@599758456d9040a944be0b0603e258d51d3d1c63#egg=cellmlmanip
certifi==2021.5.30
charset-normalizer==2.0.12
codecov==2.1.13
coverage==6.2
decorator==4.4.0
flake8==5.0.4
idna==3.10
importlib-metadata==4.2.0
iniconfig==1.1.1
isodate==0.6.0
isort==5.10.1
lxml==4.4.1
mccabe==0.7.0
mpmath==1.1.0
networkx==2.3
packaging==21.3
Pint==0.9
pluggy==1.0.0
py==1.11.0
pycodestyle==2.9.1
pyflakes==2.5.0
pyparsing==2.4.2
pytest==7.0.1
pytest-cov==4.0.0
rdflib==4.2.2
requests==2.27.1
six==1.12.0
sympy==1.4
tomli==1.2.3
typing_extensions==4.1.1
urllib3==1.26.20
zipp==3.6.0
| name: cellmlmanip
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- charset-normalizer==2.0.12
- codecov==2.1.13
- coverage==6.2
- decorator==4.4.0
- flake8==5.0.4
- idna==3.10
- importlib-metadata==4.2.0
- iniconfig==1.1.1
- isodate==0.6.0
- isort==5.10.1
- lxml==4.4.1
- mccabe==0.7.0
- mpmath==1.1.0
- networkx==2.3
- packaging==21.3
- pint==0.9
- pluggy==1.0.0
- py==1.11.0
- pycodestyle==2.9.1
- pyflakes==2.5.0
- pyparsing==2.4.2
- pytest==7.0.1
- pytest-cov==4.0.0
- rdflib==4.2.2
- requests==2.27.1
- six==1.12.0
- sympy==1.4
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- zipp==3.6.0
prefix: /opt/conda/envs/cellmlmanip
| [
"tests/test_model_functions.py::TestModelFunctions::test_remove_equation"
] | [] | [
"tests/test_model_functions.py::TestModelFunctions::test_graph_property",
"tests/test_model_functions.py::TestModelFunctions::test_graph_for_dae",
"tests/test_model_functions.py::TestModelFunctions::test_get_state_symbols",
"tests/test_model_functions.py::TestModelFunctions::test_get_state_symbols2",
"tests/test_model_functions.py::TestModelFunctions::test_get_free_variable_symbol",
"tests/test_model_functions.py::TestModelFunctions::test_get_free_variable_symbol_1",
"tests/test_model_functions.py::TestModelFunctions::test_get_initial_value",
"tests/test_model_functions.py::TestModelFunctions::test_get_derivative_symbols",
"tests/test_model_functions.py::TestModelFunctions::test_get_derivative_symbols2",
"tests/test_model_functions.py::TestModelFunctions::test_get_equations_for",
"tests/test_model_functions.py::TestModelFunctions::test_get_value",
"tests/test_model_functions.py::TestModelFunctions::test_get_symbol_by_cmeta_id",
"tests/test_model_functions.py::TestModelFunctions::test_get_symbol_by_cmeta_id_2",
"tests/test_model_functions.py::TestModelFunctions::test_get_symbol_by_name",
"tests/test_model_functions.py::TestModelFunctions::test_get_symbol_by_ontology_term",
"tests/test_model_functions.py::TestModelFunctions::test_get_symbols_by_rdf",
"tests/test_model_functions.py::TestModelFunctions::test_add_equation",
"tests/test_model_functions.py::TestModelFunctions::test_add_number",
"tests/test_model_functions.py::TestModelFunctions::test_add_unit",
"tests/test_model_functions.py::TestModelFunctions::test_add_variable",
"tests/test_model_functions.py::TestModelFunctions::test_find_symbols_and_derivatives",
"tests/test_model_functions.py::TestModelFunctions::test_find_symbols_and_derivatives2",
"tests/test_model_functions.py::TestModelFunctions::test_connect_variables",
"tests/test_model_functions.py::TestModelFunctions::test_connect_variable2"
] | [] | BSD 3-Clause License | 5,940 | 1,560 | [
"cellmlmanip/model.py"
] |
|
lace__polliwog-127 | 3953122d5f309753bddd7c1df38afe5a877d3ba9 | 2019-12-02 19:24:03 | 9fee49c8c4e2693d7d0b82bc36205463d9b08094 | diff --git a/polliwog/plane/plane.py b/polliwog/plane/plane.py
index fc35b8b..c115070 100644
--- a/polliwog/plane/plane.py
+++ b/polliwog/plane/plane.py
@@ -141,7 +141,6 @@ class Plane(object):
the plane (away from the normal), and 0 for points on the plane.
"""
- vg.shape.check(locals(), "points", (-1, 3))
return np.sign(self.signed_distance(points))
def points_in_front(self, points, inverted=False, ret_indices=False):
@@ -180,7 +179,6 @@ class Plane(object):
return functions.signed_distance_to_plane(points, self.equation)
def distance(self, points):
- vg.shape.check(locals(), "points", (-1, 3))
return np.absolute(self.signed_distance(points))
def project_point(self, points):
diff --git a/polliwog/segment/segment.py b/polliwog/segment/segment.py
index 49ddb24..ef6600f 100644
--- a/polliwog/segment/segment.py
+++ b/polliwog/segment/segment.py
@@ -69,40 +69,6 @@ def partition_segment(p1, p2, n_samples, endpoint=True):
] + p1
-def partition_segment_old(p1, p2, partition_size=5):
- """
- Deprecated. Please use partition_segment.
-
- For two points in n-space, return an np.ndarray of partition points at equal widths
- determined by 'partition_size' on the interior of the segment determined by p1 & p2.
-
- Accomplished by partitioning the segment into 'partition_size' sub-intervals.
-
- Partition order is oriented from p1 to p2.
-
- Args:
- p1, p2:
- 1 x N vectors
-
- partition_size:
- size of partition. should be > 1.
- """
-
- if not isinstance(partition_size, int):
- raise TypeError("partition_size should be an int.")
- elif partition_size < 2:
- raise ValueError("partition_size should be bigger than 1.")
-
- dist = np.linalg.norm(p1 - p2)
-
- unit_direction = (p2 - p1) / dist
- partition_width = dist / partition_size
-
- domain = partition_width * np.arange(1, partition_size)
-
- return p1 + unit_direction * domain[:, np.newaxis]
-
-
def closest_point_of_line_segment(points, start_points, segment_vectors):
# Adapted from public domain algorithm
# https://gdbooks.gitbooks.io/3dcollisions/content/Chapter1/closest_point_on_line.html
| Remove partition_segment_old
Seems a safe bet that we'd want to remove a function with this name. | lace/polliwog | diff --git a/polliwog/plane/test_plane.py b/polliwog/plane/test_plane.py
index 79a8269..4c4ae95 100644
--- a/polliwog/plane/test_plane.py
+++ b/polliwog/plane/test_plane.py
@@ -51,6 +51,7 @@ def test_returns_unsigned_distances_for_xz_plane_at_origin():
expected = np.array([502.0, 501.0])
np.testing.assert_array_equal(expected, plane.distance(pts))
+ np.testing.assert_array_equal(expected[0], plane.distance(pts[0]))
def test_returns_signed_distances_for_diagonal_plane():
@@ -103,10 +104,9 @@ def test_returns_sign_for_diagonal_plane():
pts = np.array([[425.0, 425.0, 25.0], [-500.0, -500.0, 25.0]])
- sign = plane.sign(pts)
-
expected = np.array([1.0, -1.0])
- np.testing.assert_array_equal(sign, expected)
+ np.testing.assert_array_equal(plane.sign(pts), expected)
+ np.testing.assert_array_equal(plane.sign(pts[0]), expected[0])
def test_points_in_front():
diff --git a/polliwog/segment/test_segment.py b/polliwog/segment/test_segment.py
index efe231e..5a8e17f 100644
--- a/polliwog/segment/test_segment.py
+++ b/polliwog/segment/test_segment.py
@@ -4,99 +4,9 @@ from .segment import (
closest_point_of_line_segment,
partition,
partition_segment,
- partition_segment_old,
)
-def test_partition_segment_old_raises_exception_for_invalid_partition_size_type():
- p1 = np.array([0.0, 0.0, 0.0])
- p2 = np.array([1.0, 0.0, 0.0])
-
- with pytest.raises(TypeError):
- partition_segment_old(p1, p2, "foobar")
-
-
-def test_partition_segment_old_raises_exception_for_invalid_partition_size_value():
- p1 = np.array([0.0, 0.0, 0.0])
- p2 = np.array([1.0, 0.0, 0.0])
-
- with pytest.raises(ValueError):
- partition_segment_old(p1, p2, 1)
-
-
-def test_partition_segment_old_returns_partition_for_odd_partition_size():
- p1 = np.array([0.0, 0.0, 0.0])
- p2 = np.array([2.0, 0.0, 0.0])
-
- partition_size = 4
-
- expected_partition_points = np.array(
- [[0.5, 0.0, 0.0], [1.0, 0.0, 0.0], [1.5, 0.0, 0.0]]
- )
-
- np.testing.assert_array_almost_equal(
- partition_segment_old(p1, p2, partition_size),
- expected_partition_points,
- decimal=7,
- )
-
-
-def test_partition_segment_old_returns_partition_points_for_even_partition_size():
- p1 = np.array([0.0, 0.0, 0.0])
- p2 = np.array([1.0, 0.0, 0.0])
-
- partition_size = 5
-
- expected_partition_points = np.array(
- [[0.2, 0.0, 0.0], [0.4, 0.0, 0.0], [0.6, 0.0, 0.0], [0.8, 0.0, 0.0]]
- )
-
- np.testing.assert_array_almost_equal(
- partition_segment_old(p1, p2, partition_size),
- expected_partition_points,
- decimal=7,
- )
-
-
-def test_partition_segment_old_returns_partition_points_in_oriented_order():
- p1 = np.array([0.0, 0.0, 0.0])
- p2 = np.array([1.0, 0.0, 0.0])
-
- partition_size = 5
-
- expected_partition_points = np.array(
- [[0.8, 0.0, 0.0], [0.6, 0.0, 0.0], [0.4, 0.0, 0.0], [0.2, 0.0, 0.0]]
- )
-
- np.testing.assert_array_almost_equal(
- partition_segment_old(p2, p1, partition_size),
- expected_partition_points,
- decimal=7,
- )
-
-
-def test_partition_segment_old_returns_partition_points_for_diagonal_segment():
- p1 = np.array([0.0, 0.0, 0.0])
- p2 = np.array([1.0, 1.0, 0.0])
-
- partition_size = 3
-
- dist = np.linalg.norm(p2 - p1)
- domain = [(1 / 3.0) * dist, (2 / 3.0) * dist]
-
- unit_direction = (p2 - p1) / dist
-
- expected_partition_points = np.array(
- [p1 + scalar * unit_direction for scalar in domain]
- )
-
- np.testing.assert_array_almost_equal(
- partition_segment_old(p1, p2, partition_size),
- expected_partition_points,
- decimal=7,
- )
-
-
def test_partition_segment_raises_exception_for_invalid_partition_size_type():
p1 = np.array([0.0, 0.0, 0.0])
p2 = np.array([1.0, 0.0, 0.0])
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 2
} | 0.12 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | exceptiongroup==1.2.2
iniconfig==2.1.0
numpy==2.0.2
ounce==1.1.1
packaging==24.2
pluggy==1.5.0
-e git+https://github.com/lace/polliwog.git@3953122d5f309753bddd7c1df38afe5a877d3ba9#egg=polliwog
pytest==8.3.5
tomli==2.2.1
vg==2.0.0
| name: polliwog
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- exceptiongroup==1.2.2
- iniconfig==2.1.0
- numpy==2.0.2
- ounce==1.1.1
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- tomli==2.2.1
- vg==2.0.0
prefix: /opt/conda/envs/polliwog
| [
"polliwog/plane/test_plane.py::test_returns_unsigned_distances_for_xz_plane_at_origin",
"polliwog/plane/test_plane.py::test_returns_sign_for_diagonal_plane"
] | [] | [
"polliwog/plane/test_plane.py::test_validation",
"polliwog/plane/test_plane.py::test_repr",
"polliwog/plane/test_plane.py::test_flipped",
"polliwog/plane/test_plane.py::test_returns_signed_distances_for_xz_plane_at_origin",
"polliwog/plane/test_plane.py::test_returns_signed_distances_for_diagonal_plane",
"polliwog/plane/test_plane.py::test_returns_unsigned_distances_for_diagonal_plane_at_origin",
"polliwog/plane/test_plane.py::test_signed_distance_validation",
"polliwog/plane/test_plane.py::test_points_in_front",
"polliwog/plane/test_plane.py::test_canonical_point",
"polliwog/plane/test_plane.py::test_project_point",
"polliwog/plane/test_plane.py::test_project_point_vectorized",
"polliwog/plane/test_plane.py::test_plane_from_points",
"polliwog/plane/test_plane.py::test_plane_from_points_and_vector",
"polliwog/plane/test_plane.py::test_fit_from_points",
"polliwog/plane/test_plane.py::test_line_plane_intersection",
"polliwog/plane/test_plane.py::test_line_plane_intersections",
"polliwog/plane/test_plane.py::test_line_segment_plane_intersection",
"polliwog/plane/test_plane.py::test_line_segment_plane_intersections",
"polliwog/segment/test_segment.py::test_partition_segment_raises_exception_for_invalid_partition_size_type",
"polliwog/segment/test_segment.py::test_partition_segment_raises_exception_for_invalid_partition_size_value",
"polliwog/segment/test_segment.py::test_partition_segment_returns_partition_for_odd_partition_size",
"polliwog/segment/test_segment.py::test_partition_segment_returns_partition_points_for_even_partition_size",
"polliwog/segment/test_segment.py::test_partition_segment_returns_partition_omitting_endpoint",
"polliwog/segment/test_segment.py::test_partition_adds_points_for_equal_length_line_segments",
"polliwog/segment/test_segment.py::test_partition_adds_points_for_nonequal_arbitrarily_oriented_line",
"polliwog/segment/test_segment.py::test_closest_point_of_line_segment"
] | [] | BSD 2-Clause "Simplified" License | 5,941 | 649 | [
"polliwog/plane/plane.py",
"polliwog/segment/segment.py"
] |
|
lace__polliwog-128 | b4399acbe78b92924f98d395135a2e95fd30033a | 2019-12-02 20:03:33 | 9fee49c8c4e2693d7d0b82bc36205463d9b08094 | diff --git a/polliwog/transform/affine_transform.py b/polliwog/transform/affine_transform.py
index 4c25c0d..feab638 100644
--- a/polliwog/transform/affine_transform.py
+++ b/polliwog/transform/affine_transform.py
@@ -18,3 +18,116 @@ def apply_affine_transform(points, transform_matrix):
transformed_points = np.delete(transformed_padded_points, 3, axis=1)
return maybe_decolumnize(transformed_points)
+
+
+def _convert_33_to_44(matrix):
+ """
+ Transform from:
+ array([[1., 2., 3.],
+ [2., 3., 4.],
+ [5., 6., 7.]])
+ to:
+ array([[1., 2., 3., 0.],
+ [2., 3., 4., 0.],
+ [5., 6., 7., 0.],
+ [0., 0., 0., 1.]])
+
+ """
+ vg.shape.check(locals(), "matrix", (3, 3))
+ result = np.pad(matrix, ((0, 1), (0, 1)), mode="constant")
+ result[3][3] = 1
+ return result
+
+
+def transform_matrix_for_rotation(rotation, ret_inverse_matrix=False):
+ """
+ Create a transformation matrix from the given 3x3 rotation matrix or a
+ Rodrigues vector.
+
+ With `ret_inverse_matrix=True`, also returns a matrix which provides
+ the reverse transform.
+ """
+ from .rodrigues import as_rotation_matrix
+
+ if rotation.shape == (3, 3):
+ forward3 = rotation
+ else:
+ vg.shape.check(locals(), "rotation", (3,))
+ forward3 = as_rotation_matrix(rotation)
+
+ forward = _convert_33_to_44(forward3)
+
+ if not ret_inverse_matrix:
+ return forward
+
+ # The inverse of a rotation matrix is its transpose.
+ inverse = forward.T
+ return forward, inverse
+
+
+def transform_matrix_for_translation(translation, ret_inverse_matrix=False):
+ """
+ Create a transformation matrix which translates by the provided
+ displacement vector.
+
+ Forward:
+
+ [[ 1, 0, 0, v_0 ],
+ [ 0, 1, 0, v_1 ],
+ [ 0, 0, 1, v_2 ],
+ [ 0, 0, 0, 1 ]]
+
+ Reverse:
+
+ [[ 1, 0, 0, -v_0 ],
+ [ 0, 1, 0, -v_1 ],
+ [ 0, 0, 1, -v_2 ],
+ [ 0, 0, 0, 1 ]]
+
+ Args:
+ vector (np.arraylike): A 3x1 vector.
+ """
+ vg.shape.check(locals(), "translation", (3,))
+
+ forward = np.eye(4)
+ forward[:, -1][:-1] = translation
+
+ if not ret_inverse_matrix:
+ return forward
+
+ inverse = np.eye(4)
+ inverse[:, -1][:-1] = -translation
+ return forward, inverse
+
+
+def transform_matrix_for_scale(scale_factor, ret_inverse_matrix=False):
+ """
+ Create a transformation matrix that scales by the given factor.
+
+ Forward:
+ [[ s_0, 0, 0, 0 ],
+ [ 0, s_1, 0, 0 ],
+ [ 0, 0, s_2, 0 ],
+ [ 0, 0, 0, 1 ]]
+
+ Reverse:
+ [[ 1/s_0, 0, 0, 0 ],
+ [ 0, 1/s_1, 0, 0 ],
+ [ 0, 0, 1/s_2, 0 ],
+ [ 0, 0, 0, 1 ]]
+
+ Args:
+ factor (float): The scale factor.
+ ret_inverse_matrix (bool): When `True`, also returns a matrix which
+ provides the inverse transform.
+ """
+ if scale_factor <= 0:
+ raise ValueError("Scale factor should be greater than zero")
+
+ forward = _convert_33_to_44(np.diag(np.repeat(scale_factor, 3)))
+
+ if not ret_inverse_matrix:
+ return forward
+
+ inverse = _convert_33_to_44(np.diag(np.repeat(1.0 / scale_factor, 3)))
+ return forward, inverse
diff --git a/polliwog/transform/composite.py b/polliwog/transform/composite.py
index e0090d8..17a3c99 100644
--- a/polliwog/transform/composite.py
+++ b/polliwog/transform/composite.py
@@ -1,25 +1,5 @@
import numpy as np
import vg
-from .affine_transform import apply_affine_transform
-
-
-def _convert_33_to_44(matrix):
- """
- Transform from:
- array([[1., 2., 3.],
- [2., 3., 4.],
- [5., 6., 7.]])
- to:
- array([[1., 2., 3., 0.],
- [2., 3., 4., 0.],
- [5., 6., 7., 0.],
- [0., 0., 0., 1.]])
-
- """
- vg.shape.check(locals(), "matrix", (3, 3))
- result = np.pad(matrix, ((0, 1), (0, 1)), mode="constant")
- result[3][3] = 1
- return result
class CompositeTransform(object):
@@ -59,6 +39,8 @@ class CompositeTransform(object):
or reverse mode.
"""
+ from .affine_transform import apply_affine_transform
+
transform_matrix = self.transform_matrix_for(
from_range=from_range, reverse=reverse
)
@@ -97,7 +79,7 @@ class CompositeTransform(object):
matrix = reduce(np.dot, matrices)
return matrix if reverse else matrix.T
- def append_transform4(self, forward, reverse=None):
+ def append_transform(self, forward, reverse=None):
"""
Append an arbitrary transformation, defined by 4x4 forward and reverse
matrices.
@@ -105,56 +87,27 @@ class CompositeTransform(object):
The new transformation is added to the end. Return its index.
"""
+ vg.shape.check(locals(), "forward", (4, 4))
if reverse is None:
reverse = np.linalg.inv(forward)
+ else:
+ vg.shape.check(locals(), "reverse", (4, 4))
new_index = len(self.transforms)
self.transforms.append((forward, reverse))
return new_index
- def append_transform3(self, forward, reverse=None):
- """
- Append an arbitrary transformation, defined by 3x3 forward and reverse
- matrices.
-
- The new transformation is added to the end. Return its index.
-
- """
- vg.shape.check(locals(), "forward", (3, 3))
- forward4 = _convert_33_to_44(forward)
- if reverse is None:
- reverse4 = None
- else:
- vg.shape.check(locals(), "reverse", (3, 3))
- reverse4 = _convert_33_to_44(reverse)
- return self.append_transform4(forward4, reverse4)
-
def scale(self, factor):
"""
Scale by the given factor.
- Forward:
- [[ s_0, 0, 0, 0 ],
- [ 0, s_1, 0, 0 ],
- [ 0, 0, s_2, 0 ],
- [ 0, 0, 0, 1 ]]
-
- Reverse:
- [[ 1/s_0, 0, 0, 0 ],
- [ 0, 1/s_1, 0, 0 ],
- [ 0, 0, 1/s_2, 0 ],
- [ 0, 0, 0, 1 ]]
-
Args:
factor (float): The scale factor.
"""
- if factor <= 0:
- raise ValueError("Scale factor should be greater than zero")
-
- forward3 = np.diag(np.repeat(factor, 3))
- reverse3 = np.diag(np.repeat(1.0 / factor, 3))
+ from .affine_transform import transform_matrix_for_scale
- return self.append_transform3(forward3, reverse3)
+ forward, inverse = transform_matrix_for_scale(factor, ret_inverse_matrix=True)
+ return self.append_transform(forward, inverse)
def convert_units(self, from_units, to_units):
"""
@@ -171,61 +124,37 @@ class CompositeTransform(object):
import ounce
factor = ounce.factor(from_units, to_units)
- self.scale(factor)
+ return self.scale(factor)
def translate(self, translation):
"""
Translate by the vector provided.
- Forward:
-
- [[ 1, 0, 0, v_0 ],
- [ 0, 1, 0, v_1 ],
- [ 0, 0, 1, v_2 ],
- [ 0, 0, 0, 1 ]]
-
- Reverse:
-
- [[ 1, 0, 0, -v_0 ],
- [ 0, 1, 0, -v_1 ],
- [ 0, 0, 1, -v_2 ],
- [ 0, 0, 0, 1 ]]
-
Args:
vector (np.arraylike): A 3x1 vector.
"""
- vg.shape.check(locals(), "translation", (3,))
+ from .affine_transform import transform_matrix_for_translation
- forward = np.eye(4)
- forward[:, -1][:-1] = translation
-
- reverse = np.eye(4)
- reverse[:, -1][:-1] = -translation
-
- return self.append_transform4(forward, reverse)
+ forward, inverse = transform_matrix_for_translation(
+ translation, ret_inverse_matrix=True
+ )
+ return self.append_transform(forward, inverse)
def reorient(self, up, look):
"""
Reorient using up and look.
-
"""
from .rotation import rotation_from_up_and_look
- forward3 = rotation_from_up_and_look(up, look)
- # The inverse of a rotation matrix is its transpose.
- return self.append_transform3(forward3, forward3.T)
+ return self.rotate(rotation_from_up_and_look(up, look))
def rotate(self, rotation):
"""
- Rotate by either an explicit matrix or a rodrigues vector
+ Rotate by the given 3x3 rotation matrix or a Rodrigues vector.
"""
- from .rodrigues import as_rotation_matrix
+ from .affine_transform import transform_matrix_for_rotation
- if rotation.shape == (3, 3):
- forward3 = rotation
- else:
- vg.shape.check(locals(), "rotation", (3,))
- forward3 = as_rotation_matrix(rotation)
-
- # The inverse of a rotation matrix is its transpose.
- return self.append_transform3(forward3, forward3.T)
+ forward, inverse = transform_matrix_for_rotation(
+ rotation, ret_inverse_matrix=True
+ )
+ return self.append_transform(forward, inverse)
diff --git a/polliwog/transform/coordinate_manager.py b/polliwog/transform/coordinate_manager.py
index e301e8d..5761e56 100644
--- a/polliwog/transform/coordinate_manager.py
+++ b/polliwog/transform/coordinate_manager.py
@@ -33,11 +33,8 @@ class CoordinateManager(object):
}
)
- def append_transform4(self, *args, **kwargs):
- self._transform.append_transform4(*args, **kwargs)
-
- def append_transform3(self, *args, **kwargs):
- self._transform.append_transform3(*args, **kwargs)
+ def append_transform(self, *args, **kwargs):
+ self._transform.append_transform(*args, **kwargs)
def scale(self, *args, **kwargs):
self._transform.scale(*args, **kwargs)
| Clean up matrix functions
- [x] Stop using `matrix` functions from vg (opened lace/vg#95 about removing them)
- [ ] Add `affine_transform`
- [x] Add `apply_affine_transform` (from `vg.matrix.transform`) | lace/polliwog | diff --git a/polliwog/transform/test_affine_transform.py b/polliwog/transform/test_affine_transform.py
index f97d6ea..730089e 100644
--- a/polliwog/transform/test_affine_transform.py
+++ b/polliwog/transform/test_affine_transform.py
@@ -1,28 +1,105 @@
import numpy as np
-from .affine_transform import apply_affine_transform
-
-scale_factor = np.array([3.0, 0.5, 2.0])
-transform = np.array(
- [
- [scale_factor[0], 0, 0, 0],
- [0, scale_factor[1], 0, 0],
- [0, 0, scale_factor[2], 0],
- [0, 0, 0, 1],
- ]
+import pytest
+from .affine_transform import (
+ apply_affine_transform,
+ transform_matrix_for_rotation,
+ transform_matrix_for_scale,
+ transform_matrix_for_translation,
)
-def test_apply_homogeneous():
- point = np.array([5.0, 0.0, 1.0])
- expected_point = np.array([15.0, 0.0, 2.0])
- np.testing.assert_array_equal(
- apply_affine_transform(point, transform), expected_point
+def create_cube_verts(origin, size):
+ # Create a cube. Since CompositeTransform just works on verticies,
+ # we don't need a full lace.mesh object.
+ origin = np.array(origin)
+ size = np.repeat(size, 3)
+ lower_base_plane = np.array(
+ [
+ # Lower base plane
+ origin,
+ origin + np.array([size[0], 0, 0]),
+ origin + np.array([size[0], 0, size[2]]),
+ origin + np.array([0, 0, size[2]]),
+ ]
)
+ upper_base_plane = lower_base_plane + np.array([0, size[1], 0])
+ return np.vstack([lower_base_plane, upper_base_plane])
+
+
+def create_default_cube_verts():
+ return create_cube_verts([1.0, 0.0, 0.0], 4.0)
-def test_apply_homogeneous_stacked():
+def test_apply_affine_transform():
+ scale_factor = np.array([3.0, 0.5, 2.0])
+ transform = np.array(
+ [
+ [scale_factor[0], 0, 0, 0],
+ [0, scale_factor[1], 0, 0],
+ [0, 0, scale_factor[2], 0],
+ [0, 0, 0, 1],
+ ]
+ )
+
points = np.array([[1.0, 2.0, 3.0], [5.0, 0.0, 1.0]])
expected_points = np.array([[3.0, 1.0, 6.0], [15.0, 0.0, 2.0]])
np.testing.assert_array_equal(
apply_affine_transform(points, transform), expected_points
)
+ np.testing.assert_array_equal(
+ apply_affine_transform(points[1], transform), expected_points[1]
+ )
+
+
+def test_rotate():
+ cube_v = create_default_cube_verts()
+ ways_to_rotate_around_y_a_quarter_turn = [
+ np.array([[0, 0, 1], [0, 1, 0], [-1, 0, 0]]),
+ np.array([0, np.pi / 2, 0]),
+ ]
+ for rot in ways_to_rotate_around_y_a_quarter_turn:
+ # Confidence check.
+ np.testing.assert_array_equal(cube_v[0], [1.0, 0.0, 0.0])
+ np.testing.assert_array_equal(cube_v[6], [5.0, 4.0, 4.0])
+
+ transformed_cube_v = apply_affine_transform(
+ cube_v, transform_matrix_for_rotation(rot)
+ )
+
+ np.testing.assert_array_almost_equal(transformed_cube_v[0], [0.0, 0.0, -1.0])
+ np.testing.assert_array_almost_equal(transformed_cube_v[6], [4, 4.0, -5.0])
+
+
+def test_translate():
+ cube_v = create_default_cube_verts()
+
+ # Confidence check.
+ np.testing.assert_array_equal(cube_v[0], [1.0, 0.0, 0.0])
+ np.testing.assert_array_equal(cube_v[6], [5.0, 4.0, 4.0])
+
+ transformed_cube_v = apply_affine_transform(
+ cube_v, transform_matrix_for_translation(np.array([8.0, 6.0, 7.0]))
+ )
+
+ np.testing.assert_array_equal(transformed_cube_v[0], [9.0, 6.0, 7.0])
+ np.testing.assert_array_equal(transformed_cube_v[6], [13.0, 10.0, 11.0])
+
+
+def test_scale():
+ cube_v = create_default_cube_verts()
+
+ # Confidence check.
+ np.testing.assert_array_equal(cube_v[0], [1.0, 0.0, 0.0])
+ np.testing.assert_array_equal(cube_v[6], [5.0, 4.0, 4.0])
+
+ transformed_cube_v = apply_affine_transform(
+ cube_v, transform_matrix_for_scale(10.0)
+ )
+
+ np.testing.assert_array_equal(transformed_cube_v[0], [10.0, 0.0, 0.0])
+ np.testing.assert_array_equal(transformed_cube_v[6], [50.0, 40.0, 40.0])
+
+
+def test_scale_error():
+ with pytest.raises(ValueError, match="Scale factor should be greater than zero"):
+ transform_matrix_for_scale(-1)
diff --git a/polliwog/transform/test_composite.py b/polliwog/transform/test_composite.py
index 53569b5..a6fe883 100644
--- a/polliwog/transform/test_composite.py
+++ b/polliwog/transform/test_composite.py
@@ -1,67 +1,7 @@
import numpy as np
-import pytest
import vg
from .composite import CompositeTransform
-
-
-def create_cube_verts(origin, size):
- # Create a cube. Since CompositeTransform just works on verticies,
- # we don't need a full lace.mesh object.
- origin = np.asarray(origin)
- size = np.repeat(size, 3)
- lower_base_plane = np.array(
- [
- # Lower base plane
- origin,
- origin + np.array([size[0], 0, 0]),
- origin + np.array([size[0], 0, size[2]]),
- origin + np.array([0, 0, size[2]]),
- ]
- )
- upper_base_plane = lower_base_plane + np.array([0, size[1], 0])
- return np.vstack([lower_base_plane, upper_base_plane])
-
-
-def create_default_cube_verts():
- return create_cube_verts([1.0, 0.0, 0.0], 4.0)
-
-
-def test_translate():
- transform = CompositeTransform()
- transform.translate(np.array([8.0, 6.0, 7.0]))
-
- cube_v = create_default_cube_verts()
-
- # Confidence check.
- np.testing.assert_array_equal(cube_v[0], [1.0, 0.0, 0.0])
- np.testing.assert_array_equal(cube_v[6], [5.0, 4.0, 4.0])
-
- transformed_cube_v = transform(cube_v)
-
- np.testing.assert_array_equal(transformed_cube_v[0], [9.0, 6.0, 7.0])
- np.testing.assert_array_equal(transformed_cube_v[6], [13.0, 10.0, 11.0])
-
-
-def test_scale():
- transform = CompositeTransform()
- transform.scale(10.0)
-
- cube_v = create_default_cube_verts()
-
- # Confidence check.
- np.testing.assert_array_equal(cube_v[0], [1.0, 0.0, 0.0])
- np.testing.assert_array_equal(cube_v[6], [5.0, 4.0, 4.0])
-
- transformed_cube_v = transform(cube_v)
-
- np.testing.assert_array_equal(transformed_cube_v[0], [10.0, 0.0, 0.0])
- np.testing.assert_array_equal(transformed_cube_v[6], [50.0, 40.0, 40.0])
-
-
-def test_scale_error():
- transform = CompositeTransform()
- with pytest.raises(ValueError):
- transform.scale(-1)
+from .test_affine_transform import create_default_cube_verts
def test_convert_units():
@@ -150,26 +90,6 @@ def test_reorient():
np.testing.assert_array_equal(transformed_cube_v[6], [4, 4.0, -5.0])
-def test_rotate():
- ways_to_rotate_around_y_a_quarter_turn = [
- np.array([[0, 0, 1], [0, 1, 0], [-1, 0, 0]]),
- np.array([0, np.pi / 2, 0]),
- ]
- for rot in ways_to_rotate_around_y_a_quarter_turn:
- transform = CompositeTransform()
- transform.rotate(rot)
- cube_v = create_default_cube_verts()
-
- # Confidence check.
- np.testing.assert_array_equal(cube_v[0], [1.0, 0.0, 0.0])
- np.testing.assert_array_equal(cube_v[6], [5.0, 4.0, 4.0])
-
- transformed_cube_v = transform(cube_v)
-
- np.testing.assert_array_almost_equal(transformed_cube_v[0], [0.0, 0.0, -1.0])
- np.testing.assert_array_almost_equal(transformed_cube_v[6], [4, 4.0, -5.0])
-
-
def test_reverse_transforms():
transforms = [CompositeTransform() for _ in range(5)]
diff --git a/polliwog/transform/test_coordinate_manager.py b/polliwog/transform/test_coordinate_manager.py
index 3cecb81..414f2f0 100644
--- a/polliwog/transform/test_coordinate_manager.py
+++ b/polliwog/transform/test_coordinate_manager.py
@@ -2,7 +2,7 @@ import numpy as np
import pytest
import vg
from .coordinate_manager import CoordinateManager
-from .test_composite import create_cube_verts
+from .test_affine_transform import create_cube_verts
def perform_transform_test(apply_transform_fn, expected_v0, expected_v6):
@@ -132,19 +132,10 @@ def test_coordinate_manager_invalid_tag():
def test_coordinate_manager_custom_transform():
- scale4 = np.array([[3, 0, 0, 0], [0, 3, 0, 0], [0, 0, 3, 0], [0, 0, 0, 1]])
+ scale = np.array([[3, 0, 0, 0], [0, 3, 0, 0], [0, 0, 3, 0], [0, 0, 0, 1]])
perform_transform_test(
- apply_transform_fn=lambda coordinate_manager: coordinate_manager.append_transform4(
- scale4
- ),
- expected_v0=np.array([3.0, 0.0, 0.0]),
- expected_v6=np.array([15.0, 12.0, 12.0]),
- )
-
- scale3 = np.array([[3, 0, 0], [0, 3, 0], [0, 0, 3]])
- perform_transform_test(
- apply_transform_fn=lambda coordinate_manager: coordinate_manager.append_transform3(
- scale3
+ apply_transform_fn=lambda coordinate_manager: coordinate_manager.append_transform(
+ scale
),
expected_v0=np.array([3.0, 0.0, 0.0]),
expected_v6=np.array([15.0, 12.0, 12.0]),
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 3
},
"num_modified_files": 3
} | 0.12 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"numpy>=1.16.0",
"pandas>=1.0.0",
"vg",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | exceptiongroup==1.2.2
iniconfig==2.1.0
numpy==2.0.2
ounce==1.1.1
packaging==24.2
pandas==2.2.3
pluggy==1.5.0
-e git+https://github.com/lace/polliwog.git@b4399acbe78b92924f98d395135a2e95fd30033a#egg=polliwog
pytest==8.3.5
python-dateutil==2.9.0.post0
pytz==2025.2
six==1.17.0
tomli==2.2.1
tzdata==2025.2
vg==2.0.0
| name: polliwog
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- exceptiongroup==1.2.2
- iniconfig==2.1.0
- numpy==2.0.2
- ounce==1.1.1
- packaging==24.2
- pandas==2.2.3
- pluggy==1.5.0
- pytest==8.3.5
- python-dateutil==2.9.0.post0
- pytz==2025.2
- six==1.17.0
- tomli==2.2.1
- tzdata==2025.2
- vg==2.0.0
prefix: /opt/conda/envs/polliwog
| [
"polliwog/transform/test_affine_transform.py::test_apply_affine_transform",
"polliwog/transform/test_affine_transform.py::test_rotate",
"polliwog/transform/test_affine_transform.py::test_translate",
"polliwog/transform/test_affine_transform.py::test_scale",
"polliwog/transform/test_affine_transform.py::test_scale_error",
"polliwog/transform/test_composite.py::test_convert_units",
"polliwog/transform/test_composite.py::test_translate_then_scale",
"polliwog/transform/test_composite.py::test_scale_then_translate",
"polliwog/transform/test_composite.py::test_rotate_then_translate",
"polliwog/transform/test_composite.py::test_reorient",
"polliwog/transform/test_composite.py::test_reverse_transforms",
"polliwog/transform/test_composite.py::test_forward_reverse_equivalence",
"polliwog/transform/test_coordinate_manager.py::test_coordinate_manager_forward",
"polliwog/transform/test_coordinate_manager.py::test_coordinate_manager_forward_with_attrs",
"polliwog/transform/test_coordinate_manager.py::test_coordinate_manager_out_of_order",
"polliwog/transform/test_coordinate_manager.py::test_coordinate_manager_invalid_tag",
"polliwog/transform/test_coordinate_manager.py::test_coordinate_manager_custom_transform",
"polliwog/transform/test_coordinate_manager.py::test_coordinate_manager_convert_units",
"polliwog/transform/test_coordinate_manager.py::test_coordinate_manager_reorient",
"polliwog/transform/test_coordinate_manager.py::test_coordinate_manager_rotate"
] | [] | [] | [] | BSD 2-Clause "Simplified" License | 5,943 | 3,053 | [
"polliwog/transform/affine_transform.py",
"polliwog/transform/composite.py",
"polliwog/transform/coordinate_manager.py"
] |
|
lace__polliwog-146 | 9fee49c8c4e2693d7d0b82bc36205463d9b08094 | 2019-12-04 17:05:47 | 9fee49c8c4e2693d7d0b82bc36205463d9b08094 | diff --git a/polliwog/line/__init__.py b/polliwog/line/__init__.py
index fd46546..6c4309a 100644
--- a/polliwog/line/__init__.py
+++ b/polliwog/line/__init__.py
@@ -1,5 +1,5 @@
from . import functions as _functions
from .functions import * # noqa: F401,F403
-from .line_intersect import line_intersect2, line_intersect3
+from .line_intersect import intersect_2d_lines, intersect_lines # noqa: F401
-__all__ = _functions.__all__ + ["line_intersect2", "line_intersect3"]
+__all__ = ["intersect_lines", "intersect_2d_lines"] + _functions.__all__
diff --git a/polliwog/line/functions.py b/polliwog/line/functions.py
index 89ac4e4..9ea02a6 100644
--- a/polliwog/line/functions.py
+++ b/polliwog/line/functions.py
@@ -1,10 +1,10 @@
import vg
from .._common.shape import check_shape_any
-__all__ = ["project_to_line"]
+__all__ = ["project_point_to_line", "coplanar_points_are_on_same_side_of_line"]
-def project_to_line(points, reference_points_of_lines, vectors_along_lines):
+def project_point_to_line(points, reference_points_of_lines, vectors_along_lines):
"""
Project a point to a line, or pairwise project a stack of points to a
stack of lines.
@@ -21,3 +21,27 @@ def project_to_line(points, reference_points_of_lines, vectors_along_lines):
return reference_points_of_lines + vg.project(
points - reference_points_of_lines, onto=vectors_along_lines
)
+
+
+def coplanar_points_are_on_same_side_of_line(a, b, p1, p2):
+ """
+ Test if the given points are on the same side of the given line.
+
+ Args:
+ a (np.arraylike): The first 3D point of interest.
+ b (np.arraylike): The second 3D point of interest.
+ p1 (np.arraylike): A first point which lies on the line of interest.
+ p2 (np.arraylike): A second point which lies on the line of interest.
+
+ Returns:
+ bool: `True` when `a` and `b` are on the same side of the line defined
+ by `p1` and `p2`.
+ """
+ vg.shape.check(locals(), "a", (3,))
+ vg.shape.check(locals(), "b", (3,))
+ vg.shape.check(locals(), "p1", (3,))
+ vg.shape.check(locals(), "p2", (3,))
+
+ # Uses "same-side technique" from http://blackpawn.com/texts/pointinpoly/default.html
+ along_line = b - a
+ return vg.dot(vg.cross(along_line, p1 - a), vg.cross(along_line, p2 - a)) >= 0
diff --git a/polliwog/line/line.py b/polliwog/line/line.py
index 0e51a76..b860c10 100644
--- a/polliwog/line/line.py
+++ b/polliwog/line/line.py
@@ -1,6 +1,4 @@
-from __future__ import absolute_import, print_function
import vg
-from .functions import project_to_line
class Line:
@@ -32,15 +30,17 @@ class Line:
"""
Find the intersection with another line.
"""
- from .line_intersect import line_intersect3
+ from .line_intersect import intersect_lines
- return line_intersect3(*(self.reference_points + other.reference_points))
+ return intersect_lines(*(self.reference_points + other.reference_points))
def project(self, points):
"""
Project a given point (or stack of points) to the plane.
"""
- return project_to_line(
+ from .functions import project_point_to_line
+
+ return project_point_to_line(
points=points,
reference_points_of_lines=self.reference_point,
vectors_along_lines=self.along,
diff --git a/polliwog/line/line_intersect.py b/polliwog/line/line_intersect.py
index 206ff56..61ae357 100644
--- a/polliwog/line/line_intersect.py
+++ b/polliwog/line/line_intersect.py
@@ -1,32 +1,7 @@
import numpy as np
-def line_intersect2(p0, q0, p1, q1):
- """
- Intersect two lines: (p0, q0) and (p1, q1). Each should be a 2D
- point.
- """
- # Adapted from http://stackoverflow.com/a/26416320/893113
- dy = q0[1] - p0[1]
- dx = q0[0] - p0[0]
- lhs0 = [-dy, dx]
- rhs0 = p0[1] * dx - dy * p0[0]
-
- dy = q1[1] - p1[1]
- dx = q1[0] - p1[0]
- lhs1 = [-dy, dx]
- rhs1 = p1[1] * dx - dy * p1[0]
-
- a = np.array([lhs0, lhs1])
- b = np.array([rhs0, rhs1])
-
- try:
- return np.linalg.solve(a, b)
- except np.linalg.LinAlgError:
- return None
-
-
-def line_intersect3(p0, q0, p1, q1):
+def intersect_lines(p0, q0, p1, q1):
"""
Intersect two lines in 3d: (p0, q0) and (p1, q1). Each should be a 3D
point.
@@ -55,3 +30,28 @@ def line_intersect3(p0, q0, p1, q1):
l = h_ / k_ * e # noqa: E741 (FIXME)
sign = -1 if np.all(h / h_ == k / k_) else +1
return p0 + sign * l
+
+
+def intersect_2d_lines(p0, q0, p1, q1):
+ """
+ Intersect two lines: (p0, q0) and (p1, q1). Each should be a 2D
+ point.
+ """
+ # Adapted from http://stackoverflow.com/a/26416320/893113
+ dy = q0[1] - p0[1]
+ dx = q0[0] - p0[0]
+ lhs0 = [-dy, dx]
+ rhs0 = p0[1] * dx - dy * p0[0]
+
+ dy = q1[1] - p1[1]
+ dx = q1[0] - p1[0]
+ lhs1 = [-dy, dx]
+ rhs1 = p1[1] * dx - dy * p1[0]
+
+ a = np.array([lhs0, lhs1])
+ b = np.array([rhs0, rhs1])
+
+ try:
+ return np.linalg.solve(a, b)
+ except np.linalg.LinAlgError:
+ return None
diff --git a/polliwog/polyline/polyline.py b/polliwog/polyline/polyline.py
index 281ece7..d7b61c4 100644
--- a/polliwog/polyline/polyline.py
+++ b/polliwog/polyline/polyline.py
@@ -299,7 +299,7 @@ class Polyline(object):
"""
import itertools
- from ..segment.segment_functions import partition_segment
+ from ..segment.segment_functions import subdivide_segment
old_num_e = self.num_e
old_num_v = self.num_v
@@ -308,7 +308,7 @@ class Polyline(object):
)
(es_to_subdivide,) = (num_segments_needed > 1).nonzero()
vs_to_insert = [
- partition_segment(
+ subdivide_segment(
self.v[self.e[old_e_index][0]],
self.v[self.e[old_e_index][1]],
np.int(num_segments_needed[old_e_index]),
diff --git a/polliwog/segment/__init__.py b/polliwog/segment/__init__.py
index c31c526..4456e19 100644
--- a/polliwog/segment/__init__.py
+++ b/polliwog/segment/__init__.py
@@ -1,7 +1,7 @@
from .segment_functions import (
closest_point_of_line_segment,
- partition,
- partition_segment,
+ subdivide_segment,
+ subdivide_segments,
)
-__all__ = ["closest_point_of_line_segment", "partition", "partition_segment"]
+__all__ = ["closest_point_of_line_segment", "subdivide_segment", "subdivide_segments"]
diff --git a/polliwog/segment/segment_functions.py b/polliwog/segment/segment_functions.py
index ef6600f..a185231 100644
--- a/polliwog/segment/segment_functions.py
+++ b/polliwog/segment/segment_functions.py
@@ -2,7 +2,37 @@ import numpy as np
import vg
-def partition(v, partition_size=5):
+def subdivide_segment(p1, p2, num_points, endpoint=True):
+ """
+ For two points in n-space, return an np.ndarray of equidistant partition
+ points along the segment determined by p1 & p2.
+
+ The total number of points returned will be n_samples. When n_samples is
+ 2, returns the original points.
+
+ When endpoint is True, p2 is the last point. When false, p2 is excluded.
+
+ Partition order is oriented from p1 to p2.
+
+ Args:
+ p1, p2:
+ 1 x N vectors
+
+ partition_size:
+ size of partition. should be >= 2.
+
+ """
+ if not isinstance(num_points, int):
+ raise TypeError("partition_size should be an int.")
+ elif num_points < 2:
+ raise ValueError("partition_size should be bigger than 1.")
+
+ return (p2 - p1) * np.linspace(0, 1, num=num_points, endpoint=endpoint)[
+ :, np.newaxis
+ ] + p1
+
+
+def subdivide_segments(v, num_subdivisions=5):
"""
params:
@@ -26,50 +56,24 @@ def partition(v, partition_size=5):
dists = np.sqrt(np.sum(sqdis, axis=1))
unitds = diffs / dists[:, np.newaxis]
- widths = dists / partition_size
+ widths = dists / num_subdivisions
- domain = widths[:, np.newaxis] * np.arange(0, partition_size)
+ domain = widths[:, np.newaxis] * np.arange(0, num_subdivisions)
domain = domain.flatten()[:, np.newaxis]
- points = np.repeat(v[:-1], partition_size, axis=0)
- unitds = np.repeat(unitds, partition_size, axis=0)
+ points = np.repeat(v[:-1], num_subdivisions, axis=0)
+ unitds = np.repeat(unitds, num_subdivisions, axis=0)
filled = points + (unitds * domain)
return np.vstack((filled, v[-1]))
-def partition_segment(p1, p2, n_samples, endpoint=True):
+def closest_point_of_line_segment(points, start_points, segment_vectors):
"""
- For two points in n-space, return an np.ndarray of equidistant partition
- points along the segment determined by p1 & p2.
-
- The total number of points returned will be n_samples. When n_samples is
- 2, returns the original points.
-
- When endpoint is True, p2 is the last point. When false, p2 is excluded.
-
- Partition order is oriented from p1 to p2.
-
- Args:
- p1, p2:
- 1 x N vectors
-
- partition_size:
- size of partition. should be >= 2.
-
+ Compute pairwise the point on each line segment that is nearest to the
+ corresponding query point.
"""
- if not isinstance(n_samples, int):
- raise TypeError("partition_size should be an int.")
- elif n_samples < 2:
- raise ValueError("partition_size should be bigger than 1.")
-
- return (p2 - p1) * np.linspace(0, 1, num=n_samples, endpoint=endpoint)[
- :, np.newaxis
- ] + p1
-
-
-def closest_point_of_line_segment(points, start_points, segment_vectors):
# Adapted from public domain algorithm
# https://gdbooks.gitbooks.io/3dcollisions/content/Chapter1/closest_point_on_line.html
k = vg.shape.check(locals(), "points", (-1, 3))
diff --git a/polliwog/tri/functions.py b/polliwog/tri/functions.py
index 387221d..5040803 100644
--- a/polliwog/tri/functions.py
+++ b/polliwog/tri/functions.py
@@ -1,11 +1,11 @@
import numpy as np
import vg
from .._common.shape import columnize
+from ..line.functions import coplanar_points_are_on_same_side_of_line
__all__ = [
"surface_normals",
- "coplanar_points_are_on_same_side_of_line",
- "contains_coplanar_point",
+ "tri_contains_coplanar_point",
"barycentric_coordinates_of_points",
]
@@ -33,31 +33,7 @@ def surface_normals(points, normalize=True):
return transform_result(normals)
-def coplanar_points_are_on_same_side_of_line(a, b, p1, p2):
- """
- Test if the given points are on the same side of the given line.
-
- Args:
- a (np.arraylike): The first 3D point of interest.
- b (np.arraylike): The second 3D point of interest.
- p1 (np.arraylike): A first point which lies on the line of interest.
- p2 (np.arraylike): A second point which lies on the line of interest.
-
- Returns:
- bool: `True` when `a` and `b` are on the same side of the line defined
- by `p1` and `p2`.
- """
- vg.shape.check(locals(), "a", (3,))
- vg.shape.check(locals(), "b", (3,))
- vg.shape.check(locals(), "p1", (3,))
- vg.shape.check(locals(), "p2", (3,))
-
- # Uses "same-side technique" from http://blackpawn.com/texts/pointinpoly/default.html
- along_line = b - a
- return vg.dot(vg.cross(along_line, p1 - a), vg.cross(along_line, p2 - a)) >= 0
-
-
-def contains_coplanar_point(a, b, c, point):
+def tri_contains_coplanar_point(a, b, c, point):
"""
Assuming `point` is coplanar with the triangle `ABC`, check if it lies
inside it.
| Renames
- [ ] Move coplanar_points_are_on_same_side_of_line to line_functions.
- [ ] Rename contains_coplanar_point to tri_contains_coplanar_point
- [ ] ~Rename as_rotation_matrix?~
- [ ] Rename project_to_line to project_point_to_line
- [ ] Rename line_intersect3 to intersect_lines
- [ ] Rename line_intersect2 to intersect_2d_lines
- [ ] Rename partition to partition_segments and move after partition_segment | lace/polliwog | diff --git a/polliwog/line/test_functions.py b/polliwog/line/test_functions.py
index 5a440f5..2fe7356 100644
--- a/polliwog/line/test_functions.py
+++ b/polliwog/line/test_functions.py
@@ -1,9 +1,9 @@
import numpy as np
import vg
-from .functions import project_to_line
+from .functions import project_point_to_line
-def test_project_to_line():
+def test_project_point_to_line():
p1 = np.array([5.0, 5.0, 4.0])
p2 = np.array([10.0, 10.0, 6.0])
along_line = p2 - p1
@@ -11,15 +11,15 @@ def test_project_to_line():
common_kwargs = dict(reference_points_of_lines=p1, vectors_along_lines=along_line)
np.testing.assert_array_almost_equal(
- project_to_line(points=p1, **common_kwargs), p1
+ project_point_to_line(points=p1, **common_kwargs), p1
)
np.testing.assert_array_almost_equal(
- project_to_line(points=p2, **common_kwargs), p2
+ project_point_to_line(points=p2, **common_kwargs), p2
)
other_point_on_line = np.array([0.0, 0.0, 2.0])
np.testing.assert_array_almost_equal(
- project_to_line(points=other_point_on_line, **common_kwargs),
+ project_point_to_line(points=other_point_on_line, **common_kwargs),
other_point_on_line,
)
@@ -30,12 +30,14 @@ def test_project_to_line():
for point_on_line in [p1, p2, other_point_on_line]:
for displacement in example_perpendicular_displacement:
np.testing.assert_array_almost_equal(
- project_to_line(points=point_on_line + displacement, **common_kwargs),
+ project_point_to_line(
+ points=point_on_line + displacement, **common_kwargs
+ ),
point_on_line,
)
-def test_project_to_line_stacked_points():
+def test_project_point_to_line_stacked_points():
p1 = np.array([5.0, 5.0, 4.0])
p2 = np.array([10.0, 10.0, 6.0])
along_line = p2 - p1
@@ -53,18 +55,18 @@ def test_project_to_line_stacked_points():
expected_projected_points = np.vstack([p1, p2, other_point_on_line])
np.testing.assert_array_almost_equal(
- project_to_line(points=example_points, **common_kwargs),
+ project_point_to_line(points=example_points, **common_kwargs),
expected_projected_points,
)
np.testing.assert_array_almost_equal(
- project_to_line(
+ project_point_to_line(
points=example_points + example_perpendicular_displacement, **common_kwargs
),
expected_projected_points,
)
-def test_project_to_line_stacked_lines():
+def test_project_point_to_line_stacked_lines():
p1 = np.array([5.0, 5.0, 4.0])
p2 = np.array([10.0, 10.0, 6.0])
along_line = p2 - p1
@@ -76,7 +78,7 @@ def test_project_to_line_stacked_lines():
other_point_on_line = np.array([0.0, 0.0, 2.0])
np.testing.assert_array_almost_equal(
- project_to_line(points=other_point_on_line, **common_kwargs),
+ project_point_to_line(points=other_point_on_line, **common_kwargs),
np.array([other_point_on_line, other_point_on_line]),
)
@@ -87,12 +89,14 @@ def test_project_to_line_stacked_lines():
for point_on_line in [p1, p2, other_point_on_line]:
for displacement in example_perpendicular_displacement:
np.testing.assert_array_almost_equal(
- project_to_line(points=point_on_line + displacement, **common_kwargs),
+ project_point_to_line(
+ points=point_on_line + displacement, **common_kwargs
+ ),
np.array([point_on_line, point_on_line]),
)
-def test_project_to_line_stacked_both():
+def test_project_point_to_line_stacked_both():
p1 = np.array([5.0, 5.0, 4.0])
p2 = np.array([10.0, 10.0, 6.0])
along_line = p2 - p1
@@ -113,11 +117,11 @@ def test_project_to_line_stacked_both():
expected_projected_points = np.vstack([p1, p2, other_point_on_line])
np.testing.assert_array_almost_equal(
- project_to_line(points=example_points, **common_kwargs),
+ project_point_to_line(points=example_points, **common_kwargs),
expected_projected_points,
)
np.testing.assert_array_almost_equal(
- project_to_line(
+ project_point_to_line(
points=example_points + example_perpendicular_displacement, **common_kwargs
),
expected_projected_points,
diff --git a/polliwog/line/test_line_intersect.py b/polliwog/line/test_line_intersect.py
index a31af76..02cec73 100644
--- a/polliwog/line/test_line_intersect.py
+++ b/polliwog/line/test_line_intersect.py
@@ -1,82 +1,82 @@
import numpy as np
-from .line_intersect import line_intersect2, line_intersect3
+from .line_intersect import intersect_2d_lines, intersect_lines
-def test_line_intersect2():
+def test_intersect_2d_lines():
p0, q0 = np.array([[0.0, 3.0], [4.0, 11.0]])
p1, q1 = np.array([[-2.0, 8.0], [6.0, 4.0]])
- np.testing.assert_array_equal(line_intersect2(p0, q0, p1, q1), [1.6, 6.2])
+ np.testing.assert_array_equal(intersect_2d_lines(p0, q0, p1, q1), [1.6, 6.2])
-def test_line_intersect2_duplicate_point():
+def test_intersect_2d_lines_duplicate_point():
p0, q0 = np.array([[0.0, 3.0], [5.0, 5.0]])
p1, q1 = np.array([[5.0, 5.0], [6.0, 4.0]])
- np.testing.assert_array_equal(line_intersect2(p0, q0, p1, q1), [5.0, 5.0])
+ np.testing.assert_array_equal(intersect_2d_lines(p0, q0, p1, q1), [5.0, 5.0])
-def test_line_intersect2_with_collinear_lines():
+def test_intersect_2d_lines_with_collinear_lines():
p0, q0 = np.array([[0.0, 1.0], [0.0, 10.0]])
p1, q1 = np.array([[0.0, 2.0], [0.0, 4.0]])
- assert line_intersect2(p0, q0, p1, q1) is None
+ assert intersect_2d_lines(p0, q0, p1, q1) is None
-def test_line_intersect2_with_parallel_lines():
+def test_intersect_2d_lines_with_parallel_lines():
p0, q0 = np.array([[0.0, 1.0], [0.0, 10.0]])
p1, q1 = np.array([[1.0, 2.0], [1.0, 11.0]])
- assert line_intersect2(p0, q0, p1, q1) is None
+ assert intersect_2d_lines(p0, q0, p1, q1) is None
-def test_line_intersect3_with_collinear_lines():
+def test_intersect_lines_with_collinear_lines():
p0, q0 = np.array([[0.0, 1.0, 2.0], [0.0, 10.0, 20.0]])
p1, q1 = np.array([[0.0, 2.0, 4.0], [0.0, 4.0, 8.0]])
- assert line_intersect3(p0, q0, p1, q1) is None
+ assert intersect_lines(p0, q0, p1, q1) is None
-def test_line_intersect3_with_parallel_lines():
+def test_intersect_lines_with_parallel_lines():
p0, q0 = np.array([[0.0, 1.0, 2.0], [0.0, 10.0, 20.0]])
p1, q1 = np.array([[1.0, 2.0, 3.0], [1.0, 11.0, 21.0]])
- assert line_intersect3(p0, q0, p1, q1) is None
+ assert intersect_lines(p0, q0, p1, q1) is None
-def test_line_intersect3_with_degenerate_input_p():
+def test_intersect_lines_with_degenerate_input_p():
p0, q0 = np.array([[0.0, 1.0, 2.0], [0.0, 10.0, 20.0]])
p1, q1 = np.array([[0.0, 1.0, 2.0], [1.0, 11.0, 21.0]])
- np.testing.assert_array_equal(line_intersect3(p0, q0, p1, q1), [0.0, 1.0, 2.0])
+ np.testing.assert_array_equal(intersect_lines(p0, q0, p1, q1), [0.0, 1.0, 2.0])
-def test_line_intersect3_with_degenerate_input_q():
+def test_intersect_lines_with_degenerate_input_q():
p0, q0 = np.array([[0.0, 1.0, 2.0], [0.0, 10.0, 20.0]])
p1, q1 = np.array([[1.0, 2.0, 3.0], [0.0, 10.0, 20.0]])
- np.testing.assert_array_equal(line_intersect3(p0, q0, p1, q1), [0.0, 10.0, 20.0])
+ np.testing.assert_array_equal(intersect_lines(p0, q0, p1, q1), [0.0, 10.0, 20.0])
-def test_line_intersect3_with_degenerate_input_q_2():
+def test_intersect_lines_with_degenerate_input_q_2():
p0, q0 = np.array([[0.0, 1.0, 2.0], [0.0, 10.0, 20.0]])
p1, q1 = np.array([[0.0, 10.0, 20.0], [1.0, 2.0, 3.0]])
- np.testing.assert_array_equal(line_intersect3(p0, q0, p1, q1), [0.0, 10.0, 20.0])
+ np.testing.assert_array_equal(intersect_lines(p0, q0, p1, q1), [0.0, 10.0, 20.0])
-def test_line_intersect3_example_1():
+def test_intersect_lines_example_1():
"""
This example tests the codirectional cross product case.
"""
p0, q0 = np.array([[5.0, 5.0, 4.0], [10.0, 10.0, 6.0]])
p1, q1 = np.array([[5.0, 5.0, 5.0], [10.0, 10.0, 3.0]])
np.testing.assert_array_equal(
- line_intersect3(p0, q0, p1, q1), [25.0 / 4, 25.0 / 4, 9.0 / 2]
+ intersect_lines(p0, q0, p1, q1), [25.0 / 4, 25.0 / 4, 9.0 / 2]
)
-def test_line_intersect3_example_2():
+def test_intersect_lines_example_2():
"""
This example tests the opposite direction cross product case.
"""
p0, q0 = np.array([[5.0, 5.0, 4.0], [10.0, 10.0, -6.0]])
p1, q1 = np.array([[5.0, 5.0, 5.0], [10.0, 10.0, -3.0]])
- np.testing.assert_array_equal(line_intersect3(p0, q0, p1, q1), [2.5, 2.5, 9])
+ np.testing.assert_array_equal(intersect_lines(p0, q0, p1, q1), [2.5, 2.5, 9])
-def test_line_intersect3_example_3():
+def test_intersect_lines_example_3():
p0, q0 = np.array([[6.0, 8.0, 4.0], [12.0, 15.0, 4.0]])
p1, q1 = np.array([[6.0, 8.0, 2.0], [12.0, 15.0, 6.0]])
- np.testing.assert_array_equal(line_intersect3(p0, q0, p1, q1), [9.0, 23.0 / 2, 4.0])
+ np.testing.assert_array_equal(intersect_lines(p0, q0, p1, q1), [9.0, 23.0 / 2, 4.0])
diff --git a/polliwog/segment/test_segment_functions.py b/polliwog/segment/test_segment_functions.py
index aa61365..e324ad5 100644
--- a/polliwog/segment/test_segment_functions.py
+++ b/polliwog/segment/test_segment_functions.py
@@ -2,28 +2,28 @@ import numpy as np
import pytest
from .segment_functions import (
closest_point_of_line_segment,
- partition,
- partition_segment,
+ subdivide_segment,
+ subdivide_segments,
)
-def test_partition_segment_raises_exception_for_invalid_partition_size_type():
+def test_subdivide_segment_raises_exception_for_invalid_partition_size_type():
p1 = np.array([0.0, 0.0, 0.0])
p2 = np.array([1.0, 0.0, 0.0])
with pytest.raises(TypeError):
- partition_segment(p1, p2, "foobar")
+ subdivide_segment(p1, p2, "foobar")
-def test_partition_segment_raises_exception_for_invalid_partition_size_value():
+def test_subdivide_segment_raises_exception_for_invalid_partition_size_value():
p1 = np.array([0.0, 0.0, 0.0])
p2 = np.array([1.0, 0.0, 0.0])
with pytest.raises(ValueError):
- partition_segment(p1, p2, 1)
+ subdivide_segment(p1, p2, 1)
-def test_partition_segment_returns_partition_for_odd_partition_size():
+def test_subdivide_segment_returns_partition_for_odd_partition_size():
p1 = np.array([0.0, 0.0, 0.0])
p2 = np.array([2.0, 0.0, 0.0])
@@ -40,11 +40,11 @@ def test_partition_segment_returns_partition_for_odd_partition_size():
)
np.testing.assert_array_almost_equal(
- partition_segment(p1, p2, partition_size), expected_partition_points, decimal=7
+ subdivide_segment(p1, p2, partition_size), expected_partition_points, decimal=7
)
-def test_partition_segment_returns_partition_points_for_even_partition_size():
+def test_subdivide_segment_returns_partition_points_for_even_partition_size():
p1 = np.array([0.0, 0.0, 0.0])
p2 = np.array([1.0, 0.0, 0.0])
@@ -62,11 +62,11 @@ def test_partition_segment_returns_partition_points_for_even_partition_size():
)
np.testing.assert_array_almost_equal(
- partition_segment(p1, p2, partition_size), expected_partition_points, decimal=7
+ subdivide_segment(p1, p2, partition_size), expected_partition_points, decimal=7
)
-def test_partition_segment_returns_partition_omitting_endpoint():
+def test_subdivide_segment_returns_partition_omitting_endpoint():
p1 = np.array([0.0, 0.0, 0.0])
p2 = np.array([1.0, 0.0, 0.0])
@@ -83,13 +83,13 @@ def test_partition_segment_returns_partition_omitting_endpoint():
)
np.testing.assert_array_almost_equal(
- partition_segment(p1, p2, partition_size, endpoint=False),
+ subdivide_segment(p1, p2, partition_size, endpoint=False),
expected_partition_points,
decimal=7,
)
-def test_partition_adds_points_for_equal_length_line_segments():
+def test_subdivide_segments_adds_points_for_equal_length_line_segments():
v = np.array(
[
[0.0, 0.0, 0.0],
@@ -126,10 +126,10 @@ def test_partition_adds_points_for_equal_length_line_segments():
]
)
- np.testing.assert_array_almost_equal(partition(v), expected)
+ np.testing.assert_array_almost_equal(subdivide_segments(v), expected)
-def test_partition_adds_points_for_nonequal_arbitrarily_oriented_line():
+def test_subdivide_segments_adds_points_for_nonequal_arbitrarily_oriented_line():
v = np.array([[0.0, 0.0, 0.0], [1.0, 0.0, 1.0], [2.0, 0.0, 1.0], [2.0, 2.0, 1.0]])
expected = np.array(
@@ -153,7 +153,7 @@ def test_partition_adds_points_for_nonequal_arbitrarily_oriented_line():
]
)
- np.testing.assert_array_almost_equal(partition(v), expected)
+ np.testing.assert_array_almost_equal(subdivide_segments(v), expected)
def test_closest_point_of_line_segment():
diff --git a/polliwog/tri/test_functions.py b/polliwog/tri/test_functions.py
index 48c87da..74337ca 100644
--- a/polliwog/tri/test_functions.py
+++ b/polliwog/tri/test_functions.py
@@ -3,8 +3,8 @@ import numpy as np
import vg
from .functions import (
barycentric_coordinates_of_points,
- contains_coplanar_point,
surface_normals,
+ tri_contains_coplanar_point,
)
@@ -46,34 +46,34 @@ def test_surface_normals_from_points_vectorized():
np.testing.assert_allclose(surface_normals(vertices), expected_normals)
-def test_contains_coplanar_point():
+def test_tri_contains_coplanar_point():
a = np.array([0.0, 0.0, 0.0])
b = np.array([4.0, 0.1, 0.0])
c = np.array([3.0, 3.1, 0.0])
# Not sure why, but `is True` does not work.
- assert contains_coplanar_point(a, b, c, a) == True # noqa: E712
- assert contains_coplanar_point(a, b, c, b) == True # noqa: E712
- assert contains_coplanar_point(a, b, c, c) == True # noqa: E712
+ assert tri_contains_coplanar_point(a, b, c, a) == True # noqa: E712
+ assert tri_contains_coplanar_point(a, b, c, b) == True # noqa: E712
+ assert tri_contains_coplanar_point(a, b, c, c) == True # noqa: E712
assert (
- contains_coplanar_point(a, b, c, np.array([2.0, 1.0, 0.0])) == True
+ tri_contains_coplanar_point(a, b, c, np.array([2.0, 1.0, 0.0])) == True
) # noqa: E712
# Unexpected, as it's not in the plane, though if projected to the plane,
# it is in the triangle.
assert (
- contains_coplanar_point(a, b, c, np.array([0.0, 0.0, 1.0])) == True
+ tri_contains_coplanar_point(a, b, c, np.array([0.0, 0.0, 1.0])) == True
) # noqa: E712
assert (
- contains_coplanar_point(a, b, c, np.array([2.0, 0.0, 0.0])) == False
+ tri_contains_coplanar_point(a, b, c, np.array([2.0, 0.0, 0.0])) == False
) # noqa: E712
assert (
- contains_coplanar_point(a, b, c, np.array([2.0, 5.0, 0.0])) == False
+ tri_contains_coplanar_point(a, b, c, np.array([2.0, 5.0, 0.0])) == False
) # noqa: E712
assert (
- contains_coplanar_point(
+ tri_contains_coplanar_point(
np.array([0.06710189, 1.69908346, 0.06590126]),
np.array([0.05648619, 1.70207, 0.07402092]),
np.array([0.05969098, 1.69641423, 0.07268801]),
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 8
} | 0.12 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-xdist",
"pytest-mock",
"pytest-asyncio"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | coverage==7.8.0
exceptiongroup==1.2.2
execnet==2.1.1
iniconfig==2.1.0
numpy==2.0.2
ounce==1.1.1
packaging==24.2
pluggy==1.5.0
-e git+https://github.com/lace/polliwog.git@9fee49c8c4e2693d7d0b82bc36205463d9b08094#egg=polliwog
pytest==8.3.5
pytest-asyncio==0.26.0
pytest-cov==6.0.0
pytest-mock==3.14.0
pytest-xdist==3.6.1
tomli==2.2.1
typing_extensions==4.13.0
vg==2.0.0
| name: polliwog
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==7.8.0
- exceptiongroup==1.2.2
- execnet==2.1.1
- iniconfig==2.1.0
- numpy==2.0.2
- ounce==1.1.1
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- pytest-asyncio==0.26.0
- pytest-cov==6.0.0
- pytest-mock==3.14.0
- pytest-xdist==3.6.1
- tomli==2.2.1
- typing-extensions==4.13.0
- vg==2.0.0
prefix: /opt/conda/envs/polliwog
| [
"polliwog/line/test_functions.py::test_project_point_to_line",
"polliwog/line/test_functions.py::test_project_point_to_line_stacked_points",
"polliwog/line/test_functions.py::test_project_point_to_line_stacked_lines",
"polliwog/line/test_functions.py::test_project_point_to_line_stacked_both",
"polliwog/line/test_line_intersect.py::test_intersect_2d_lines",
"polliwog/line/test_line_intersect.py::test_intersect_2d_lines_duplicate_point",
"polliwog/line/test_line_intersect.py::test_intersect_2d_lines_with_collinear_lines",
"polliwog/line/test_line_intersect.py::test_intersect_2d_lines_with_parallel_lines",
"polliwog/line/test_line_intersect.py::test_intersect_lines_with_collinear_lines",
"polliwog/line/test_line_intersect.py::test_intersect_lines_with_parallel_lines",
"polliwog/line/test_line_intersect.py::test_intersect_lines_with_degenerate_input_p",
"polliwog/line/test_line_intersect.py::test_intersect_lines_with_degenerate_input_q",
"polliwog/line/test_line_intersect.py::test_intersect_lines_with_degenerate_input_q_2",
"polliwog/line/test_line_intersect.py::test_intersect_lines_example_1",
"polliwog/line/test_line_intersect.py::test_intersect_lines_example_2",
"polliwog/line/test_line_intersect.py::test_intersect_lines_example_3",
"polliwog/segment/test_segment_functions.py::test_subdivide_segment_raises_exception_for_invalid_partition_size_type",
"polliwog/segment/test_segment_functions.py::test_subdivide_segment_raises_exception_for_invalid_partition_size_value",
"polliwog/segment/test_segment_functions.py::test_subdivide_segment_returns_partition_for_odd_partition_size",
"polliwog/segment/test_segment_functions.py::test_subdivide_segment_returns_partition_points_for_even_partition_size",
"polliwog/segment/test_segment_functions.py::test_subdivide_segment_returns_partition_omitting_endpoint",
"polliwog/segment/test_segment_functions.py::test_subdivide_segments_adds_points_for_equal_length_line_segments",
"polliwog/segment/test_segment_functions.py::test_subdivide_segments_adds_points_for_nonequal_arbitrarily_oriented_line",
"polliwog/segment/test_segment_functions.py::test_closest_point_of_line_segment",
"polliwog/tri/test_functions.py::test_surface_normals_from_points_single",
"polliwog/tri/test_functions.py::test_surface_normals_from_points_vectorized",
"polliwog/tri/test_functions.py::test_tri_contains_coplanar_point",
"polliwog/tri/test_functions.py::test_barycentric"
] | [] | [] | [] | BSD 2-Clause "Simplified" License | 5,959 | 3,652 | [
"polliwog/line/__init__.py",
"polliwog/line/functions.py",
"polliwog/line/line.py",
"polliwog/line/line_intersect.py",
"polliwog/polyline/polyline.py",
"polliwog/segment/__init__.py",
"polliwog/segment/segment_functions.py",
"polliwog/tri/functions.py"
] |
|
juju__python-libjuju-369 | 79212d5bbcb3e68ba1aae68aa75fcf5b0a2d4305 | 2019-12-05 02:00:54 | 0c7ca041f918b012f4da2038afada4cc275ddf34 | diff --git a/juju/bundle.py b/juju/bundle.py
index 8e96c95..2c28811 100644
--- a/juju/bundle.py
+++ b/juju/bundle.py
@@ -8,7 +8,7 @@ import yaml
from toposort import toposort_flatten
from .client import client
-from .constraints import parse as parse_constraints
+from .constraints import parse as parse_constraints, parse_storage_constraint, parse_device_constraint
from .errors import JujuError
log = logging.getLogger(__name__)
@@ -277,17 +277,18 @@ class AddApplicationChange(ChangeInfo):
self.application = params[2]
self.options = params[3]
self.constraints = params[4]
- self.storage = params[5]
- self.endpoint_bindings = params[6]
+ self.storage = {k: parse_storage_constraint(v) for k, v in params[5].items()}
if len(params) == 8:
- # Juju 2.4 and below only sends the resources
+ # Juju 2.4 and below only sends the endpoint bindings and resources
+ self.endpoint_bindings = params[6]
self.resources = params[7]
self.devices = None
self.num_units = None
else:
- # Juju 2.5+ sends devices before resources, as well as num_units
+ # Juju 2.5+ sends devices before endpoint bindings, as well as num_units
# There might be placement but we need to ignore that.
- self.devices = params[7]
+ self.devices = {k: parse_device_constraint(v) for k, v in params[6].items()}
+ self.endpoint_bindings = params[7]
self.resources = params[8]
self.num_units = params[9]
diff --git a/juju/client/_definitions.py b/juju/client/_definitions.py
index 4576c40..fb95f40 100644
--- a/juju/client/_definitions.py
+++ b/juju/client/_definitions.py
@@ -1529,32 +1529,38 @@ class ApplicationConstraint(Type):
class ApplicationDeploy(Type):
- _toSchema = {'application': 'application', 'channel': 'channel', 'charm_url': 'charm-url', 'config': 'config', 'config_yaml': 'config-yaml', 'constraints': 'constraints', 'endpoint_bindings': 'endpoint-bindings', 'num_units': 'num-units', 'placement': 'placement', 'resources': 'resources', 'series': 'series', 'storage': 'storage'}
- _toPy = {'application': 'application', 'channel': 'channel', 'charm-url': 'charm_url', 'config': 'config', 'config-yaml': 'config_yaml', 'constraints': 'constraints', 'endpoint-bindings': 'endpoint_bindings', 'num-units': 'num_units', 'placement': 'placement', 'resources': 'resources', 'series': 'series', 'storage': 'storage'}
- def __init__(self, application=None, channel=None, charm_url=None, config=None, config_yaml=None, constraints=None, endpoint_bindings=None, num_units=None, placement=None, resources=None, series=None, storage=None, **unknown_fields):
+ _toSchema = {'application': 'application', 'attach_storage': 'attach-storage', 'channel': 'channel', 'charm_url': 'charm-url', 'config': 'config', 'config_yaml': 'config-yaml', 'constraints': 'constraints', 'devices': 'devices', 'endpoint_bindings': 'endpoint-bindings', 'num_units': 'num-units', 'placement': 'placement', 'policy': 'policy', 'resources': 'resources', 'series': 'series', 'storage': 'storage'}
+ _toPy = {'application': 'application', 'attach-storage': 'attach_storage', 'channel': 'channel', 'charm-url': 'charm_url', 'config': 'config', 'config-yaml': 'config_yaml', 'constraints': 'constraints', 'devices': 'devices', 'endpoint-bindings': 'endpoint_bindings', 'num-units': 'num_units', 'placement': 'placement', 'policy': 'policy', 'resources': 'resources', 'series': 'series', 'storage': 'storage'}
+ def __init__(self, application=None, attach_storage=None, channel=None, charm_url=None, config=None, config_yaml=None, constraints=None, devices=None, endpoint_bindings=None, num_units=None, placement=None, policy=None, resources=None, series=None, storage=None, **unknown_fields):
'''
application : str
+ attach_storage : typing.Sequence[str]
channel : str
charm_url : str
config : typing.Mapping[str, str]
config_yaml : str
constraints : Value
+ devices : typing.Mapping[str, ~Constraints]
endpoint_bindings : typing.Mapping[str, str]
num_units : int
placement : typing.Sequence[~Placement]
+ policy : str
resources : typing.Mapping[str, str]
series : str
storage : typing.Mapping[str, ~Constraints]
'''
application_ = application
+ attach_storage_ = attach_storage
channel_ = channel
charm_url_ = charm_url
config_ = config
config_yaml_ = config_yaml
constraints_ = Value.from_json(constraints) if constraints else None
+ devices_ = devices
endpoint_bindings_ = endpoint_bindings
num_units_ = num_units
placement_ = [Placement.from_json(o) for o in placement or []]
+ policy_ = policy
resources_ = resources
series_ = series
storage_ = storage
@@ -1563,6 +1569,9 @@ class ApplicationDeploy(Type):
if application_ is not None and not isinstance(application_, (bytes, str)):
raise Exception("Expected application_ to be a str, received: {}".format(type(application_)))
+ if attach_storage_ is not None and not isinstance(attach_storage_, (bytes, str, list)):
+ raise Exception("Expected attach_storage_ to be a Sequence, received: {}".format(type(attach_storage_)))
+
if channel_ is not None and not isinstance(channel_, (bytes, str)):
raise Exception("Expected channel_ to be a str, received: {}".format(type(channel_)))
@@ -1578,6 +1587,9 @@ class ApplicationDeploy(Type):
if constraints_ is not None and not isinstance(constraints_, (dict, Value)):
raise Exception("Expected constraints_ to be a Value, received: {}".format(type(constraints_)))
+ if devices_ is not None and not isinstance(devices_, dict):
+ raise Exception("Expected devices_ to be a Mapping, received: {}".format(type(devices_)))
+
if endpoint_bindings_ is not None and not isinstance(endpoint_bindings_, dict):
raise Exception("Expected endpoint_bindings_ to be a Mapping, received: {}".format(type(endpoint_bindings_)))
@@ -1587,6 +1599,9 @@ class ApplicationDeploy(Type):
if placement_ is not None and not isinstance(placement_, (bytes, str, list)):
raise Exception("Expected placement_ to be a Sequence, received: {}".format(type(placement_)))
+ if policy_ is not None and not isinstance(policy_, (bytes, str)):
+ raise Exception("Expected policy_ to be a str, received: {}".format(type(policy_)))
+
if resources_ is not None and not isinstance(resources_, dict):
raise Exception("Expected resources_ to be a Mapping, received: {}".format(type(resources_)))
@@ -1597,14 +1612,17 @@ class ApplicationDeploy(Type):
raise Exception("Expected storage_ to be a Mapping, received: {}".format(type(storage_)))
self.application = application_
+ self.attach_storage = attach_storage_
self.channel = channel_
self.charm_url = charm_url_
self.config = config_
self.config_yaml = config_yaml_
self.constraints = constraints_
+ self.devices = devices_
self.endpoint_bindings = endpoint_bindings_
self.num_units = num_units_
self.placement = placement_
+ self.policy = policy_
self.resources = resources_
self.series = series_
self.storage = storage_
diff --git a/juju/client/facade.py b/juju/client/facade.py
index bc67398..53e0cf7 100644
--- a/juju/client/facade.py
+++ b/juju/client/facade.py
@@ -18,8 +18,7 @@ _marker = object()
JUJU_VERSION = re.compile(r'[0-9]+\.[0-9-]+[\.\-][0-9a-z]+(\.[0-9]+)?')
# Workaround for https://bugs.launchpad.net/juju/+bug/1683906
-NAUGHTY_CLASSES = ['ClientFacade', 'Client', 'ModelStatusInfo',
- 'ApplicationDeploy']
+NAUGHTY_CLASSES = ['ClientFacade', 'Client', 'ModelStatusInfo']
# Map basic types to Python's typing with a callable
diff --git a/juju/constraints.py b/juju/constraints.py
index 0050673..43cbd00 100644
--- a/juju/constraints.py
+++ b/juju/constraints.py
@@ -23,10 +23,13 @@ MEM = re.compile('^[1-9][0-9]*[MGTP]$')
# Multiplication factors to get Megabytes
# https://github.com/juju/juju/blob/master/constraints/constraints.go#L666
FACTORS = {
- "M": 1,
- "G": 1024,
- "T": 1024 * 1024,
- "P": 1024 * 1024 * 1024
+ "M": 1024 ** 0,
+ "G": 1024 ** 1,
+ "T": 1024 ** 2,
+ "P": 1024 ** 3,
+ "E": 1024 ** 4,
+ "Z": 1024 ** 5,
+ "Y": 1024 ** 6
}
LIST_KEYS = {'tags', 'spaces'}
@@ -85,3 +88,49 @@ def normalize_value(value):
def normalize_list_value(value):
values = value.strip().split(',')
return [normalize_value(value) for value in values]
+
+
+STORAGE = re.compile(
+ '(?:(?:^|(?<=,))(?:|(?P<pool>[a-zA-Z]+[-?a-zA-Z0-9]*)|(?P<count>-?[0-9]+)|(?:(?P<size>-?[0-9]+(?:\\.[0-9]+)?)(?P<size_exp>[MGTPEZY])(?:i?B)?))(?:$|,))')
+
+
+def parse_storage_constraint(constraint):
+ storage = {'count': 1}
+ for m in STORAGE.finditer(constraint):
+ pool = m.group('pool')
+ if pool:
+ if 'pool' in storage:
+ raise Exception("pool already specified")
+ storage['pool'] = pool
+ count = m.group('count')
+ if count:
+ count = int(count)
+ storage['count'] = count if count > 0 else 1
+ size = m.group('size')
+ if size:
+ storage['size'] = int(float(size) * FACTORS[m.group('size_exp')])
+ return storage
+
+
+DEVICE = re.compile(
+ '^(?P<count>[0-9]+)?(?:^|,)(?P<type>[^,]+)(?:$|,(?!$))(?P<attrs>(?:[^=]+=[^;]+)+)*$')
+ATTR = re.compile(';?(?P<key>[^=]+)=(?P<value>[^;]+)')
+
+
+def parse_device_constraint(constraint):
+ m = DEVICE.match(constraint)
+ if m is None:
+ raise Exception("device constraint does not match")
+ device = {}
+ count = m.group('count')
+ if count:
+ count = int(count)
+ device['count'] = count if count > 0 else 1
+ else:
+ device['count'] = 1
+ device['type'] = m.group('type')
+ attrs = m.group('attrs')
+ if attrs:
+ device['attributes'] = {kv['key']: kv['value']
+ for kv in ATTR.finditer(attrs)}
+ return device
| Cannot deploy bundle with storage
Everytime I deploy an application with storage with libjuju, I'm getting this error:
```
Traceback (most recent call last):
File "examples/deploy_k8s.py", line 42, in <module>
loop.run(main())
File "/home/davigar15/.local/lib/python3.6/site-packages/juju/loop.py", line 38, in run
raise task.exception()
File "examples/deploy_k8s.py", line 23, in main
application = await model.deploy( 'cs:osm' )
File "/home/davigar15/.local/lib/python3.6/site-packages/juju/model.py", line 1286, in deploy
await handler.execute_plan()
File "/home/davigar15/.local/lib/python3.6/site-packages/juju/model.py", line 1969, in execute_plan
result = await method(*step.args)
File "/home/davigar15/.local/lib/python3.6/site-packages/juju/model.py", line 2140, in deploy
num_units=num_units,
File "/home/davigar15/.local/lib/python3.6/site-packages/juju/model.py", line 1415, in _deploy
result = await app_facade.Deploy([app])
File "/home/davigar15/.local/lib/python3.6/site-packages/juju/client/facade.py", line 420, in wrapper
reply = await f(*args, **kwargs)
File "/home/davigar15/.local/lib/python3.6/site-packages/juju/client/_client8.py", line 745, in Deploy
reply = await self.rpc(msg)
File "/home/davigar15/.local/lib/python3.6/site-packages/juju/client/facade.py", line 545, in rpc
result = await self.connection.rpc(msg, encoder=TypeEncoder)
File "/home/davigar15/.local/lib/python3.6/site-packages/juju/client/connection.py", line 326, in rpc
raise errors.JujuAPIError(result)
``` | juju/python-libjuju | diff --git a/tests/unit/test_bundle.py b/tests/unit/test_bundle.py
index 6cdfa40..990c70f 100644
--- a/tests/unit/test_bundle.py
+++ b/tests/unit/test_bundle.py
@@ -63,7 +63,7 @@ class TestAddApplicationChange(unittest.TestCase):
"application",
"options",
"constraints",
- "storage",
+ {"db": "pool,1,1GB"},
"endpoint_bindings",
"resources"])
self.assertEqual({"change_id": 1,
@@ -73,7 +73,7 @@ class TestAddApplicationChange(unittest.TestCase):
"application": "application",
"options": "options",
"constraints": "constraints",
- "storage": "storage",
+ "storage": {"db": {"pool": "pool", "count": 1, "size": 1024}},
"endpoint_bindings": "endpoint_bindings",
"resources": "resources",
"devices": None,
@@ -85,9 +85,9 @@ class TestAddApplicationChange(unittest.TestCase):
"application",
"options",
"constraints",
- "storage",
+ {"db": "pool,1,1GB"},
+ {"gpu": "1,gpu,attr1=a;attr2=b"},
"endpoint_bindings",
- "devices",
"resources",
"num_units"])
self.assertEqual({"change_id": 1,
@@ -97,10 +97,10 @@ class TestAddApplicationChange(unittest.TestCase):
"application": "application",
"options": "options",
"constraints": "constraints",
- "storage": "storage",
+ "storage": {"db": {"pool": "pool", "count": 1, "size": 1024}},
"endpoint_bindings": "endpoint_bindings",
"resources": "resources",
- "devices": "devices",
+ "devices": {"gpu": {"type": "gpu", "count": 1, "attributes": {"attr1": "a", "attr2": "b"}}},
"num_units": "num_units"}, change.__dict__)
def test_dict_params(self):
diff --git a/tests/unit/test_constraints.py b/tests/unit/test_constraints.py
index 3c52090..4d5e26c 100644
--- a/tests/unit/test_constraints.py
+++ b/tests/unit/test_constraints.py
@@ -55,3 +55,76 @@ class TestConstraints(unittest.TestCase):
"tags": ["tag1"],
"spaces": ["space1", "space2"]}
)
+
+ def test_parse_storage_constraint(self):
+ _ = constraints.parse_storage_constraint
+
+ self.assertEqual(
+ _("pool,1M"),
+ {"pool": "pool",
+ "count": 1,
+ "size": 1 * 1024 ** 0}
+ )
+ self.assertEqual(
+ _("pool,"),
+ {"pool": "pool",
+ "count": 1}
+ )
+ self.assertEqual(
+ _("1M"),
+ {"size": 1 * 1024 ** 0,
+ "count": 1}
+ )
+ self.assertEqual(
+ _("p,1G"),
+ {"pool": "p",
+ "count": 1,
+ "size": 1 * 1024 ** 1}
+ )
+ self.assertEqual(
+ _("p,0.5T"),
+ {"pool": "p",
+ "count": 1,
+ "size": 512 * 1024 ** 1}
+ )
+ self.assertEqual(
+ _("3,0.5T"),
+ {"count": 3,
+ "size": 512 * 1024 ** 1}
+ )
+ self.assertEqual(
+ _("0.5T,3"),
+ {"count": 3,
+ "size": 512 * 1024 ** 1}
+ )
+
+ def test_parse_device_constraint(self):
+ _ = constraints.parse_device_constraint
+
+ self.assertEqual(
+ _("nvidia.com/gpu"),
+ {"type": "nvidia.com/gpu",
+ "count": 1}
+ )
+ self.assertEqual(
+ _("2,nvidia.com/gpu"),
+ {"type": "nvidia.com/gpu",
+ "count": 2}
+ )
+ self.assertEqual(
+ _("3,nvidia.com/gpu,gpu=nvidia-tesla-p100"),
+ {"type": "nvidia.com/gpu",
+ "count": 3,
+ "attributes": {
+ "gpu": "nvidia-tesla-p100"
+ }}
+ )
+ self.assertEqual(
+ _("3,nvidia.com/gpu,gpu=nvidia-tesla-p100;2ndattr=another-attr"),
+ {"type": "nvidia.com/gpu",
+ "count": 3,
+ "attributes": {
+ "gpu": "nvidia-tesla-p100",
+ "2ndattr": "another-attr"
+ }}
+ )
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 1
},
"num_modified_files": 4
} | 2.6 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"asynctest",
"ipdb",
"mock",
"pytest",
"pytest-asyncio",
"pytest-xdist"
],
"pre_install": [
"apt-get update",
"apt-get install -y python3-all debhelper sbuild schroot ubuntu-dev-tools snapd libsodium-dev"
],
"python": "3.7",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | asynctest==0.13.0
attrs @ file:///croot/attrs_1668696182826/work
backcall==0.2.0
bcrypt==4.2.1
certifi @ file:///croot/certifi_1671487769961/work/certifi
cffi==1.15.1
charset-normalizer==3.4.1
cryptography==44.0.2
decorator==5.1.1
execnet==2.0.2
flit_core @ file:///opt/conda/conda-bld/flit-core_1644941570762/work/source/flit_core
idna==3.10
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1648562407465/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
ipdb==0.13.13
ipython==7.34.0
jedi==0.19.2
-e git+https://github.com/juju/python-libjuju.git@79212d5bbcb3e68ba1aae68aa75fcf5b0a2d4305#egg=juju
jujubundlelib==0.5.7
macaroonbakery==1.3.4
matplotlib-inline==0.1.6
mock==5.2.0
packaging @ file:///croot/packaging_1671697413597/work
paramiko==2.12.0
parso==0.8.4
pexpect==4.9.0
pickleshare==0.7.5
pluggy @ file:///tmp/build/80754af9/pluggy_1648042572264/work
prompt_toolkit==3.0.48
protobuf==4.24.4
ptyprocess==0.7.0
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pyasn1==0.5.1
pycparser==2.21
Pygments==2.17.2
pymacaroons==0.13.0
PyNaCl==1.5.0
pyRFC3339==1.1
pytest==7.1.2
pytest-asyncio==0.21.2
pytest-xdist==3.5.0
pytz==2025.2
PyYAML==6.0
requests==2.31.0
six==1.17.0
theblues==0.5.2
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
toposort==1.10
traitlets==5.9.0
typing_extensions @ file:///croot/typing_extensions_1669924550328/work
urllib3==2.0.7
wcwidth==0.2.13
websockets==7.0
zipp @ file:///croot/zipp_1672387121353/work
| name: python-libjuju
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=22.1.0=py37h06a4308_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- flit-core=3.6.0=pyhd3eb1b0_0
- importlib-metadata=4.11.3=py37h06a4308_0
- importlib_metadata=4.11.3=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=22.0=py37h06a4308_0
- pip=22.3.1=py37h06a4308_0
- pluggy=1.0.0=py37h06a4308_1
- py=1.11.0=pyhd3eb1b0_0
- pytest=7.1.2=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py37h06a4308_0
- typing_extensions=4.4.0=py37h06a4308_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zipp=3.11.0=py37h06a4308_0
- zlib=1.2.13=h5eee18b_1
- pip:
- asynctest==0.13.0
- backcall==0.2.0
- bcrypt==4.2.1
- cffi==1.15.1
- charset-normalizer==3.4.1
- cryptography==44.0.2
- decorator==5.1.1
- execnet==2.0.2
- idna==3.10
- ipdb==0.13.13
- ipython==7.34.0
- jedi==0.19.2
- jujubundlelib==0.5.7
- macaroonbakery==1.3.4
- matplotlib-inline==0.1.6
- mock==5.2.0
- paramiko==2.12.0
- parso==0.8.4
- pexpect==4.9.0
- pickleshare==0.7.5
- prompt-toolkit==3.0.48
- protobuf==4.24.4
- ptyprocess==0.7.0
- pyasn1==0.5.1
- pycparser==2.21
- pygments==2.17.2
- pymacaroons==0.13.0
- pynacl==1.5.0
- pyrfc3339==1.1
- pytest-asyncio==0.21.2
- pytest-xdist==3.5.0
- pytz==2025.2
- pyyaml==6.0
- requests==2.31.0
- six==1.17.0
- theblues==0.5.2
- toposort==1.10
- traitlets==5.9.0
- urllib3==2.0.7
- wcwidth==0.2.13
- websockets==7.0
prefix: /opt/conda/envs/python-libjuju
| [
"tests/unit/test_bundle.py::TestAddApplicationChange::test_list_params_juju_2_4",
"tests/unit/test_bundle.py::TestAddApplicationChange::test_list_params_juju_2_5",
"tests/unit/test_constraints.py::TestConstraints::test_parse_device_constraint",
"tests/unit/test_constraints.py::TestConstraints::test_parse_storage_constraint"
] | [] | [
"tests/unit/test_bundle.py::TestChangeSet::test_sort_causes_circular_error",
"tests/unit/test_bundle.py::TestChangeSet::test_sort_changes",
"tests/unit/test_bundle.py::TestChangeSet::test_sort_complex_changes",
"tests/unit/test_bundle.py::TestChangeSet::test_sort_empty_changes",
"tests/unit/test_bundle.py::TestAddApplicationChange::test_dict_params",
"tests/unit/test_bundle.py::TestAddApplicationChange::test_dict_params_missing_data",
"tests/unit/test_bundle.py::TestAddApplicationChange::test_method",
"tests/unit/test_bundle.py::TestAddApplicationChangeRun::test_run",
"tests/unit/test_bundle.py::TestAddCharmChange::test_dict_params",
"tests/unit/test_bundle.py::TestAddCharmChange::test_dict_params_missing_data",
"tests/unit/test_bundle.py::TestAddCharmChange::test_list_params_juju_2_6",
"tests/unit/test_bundle.py::TestAddCharmChange::test_list_params_juju_2_7",
"tests/unit/test_bundle.py::TestAddCharmChange::test_method",
"tests/unit/test_bundle.py::TestAddCharmChangeRun::test_run",
"tests/unit/test_bundle.py::TestAddMachineChange::test_dict_params",
"tests/unit/test_bundle.py::TestAddMachineChange::test_dict_params_missing_data",
"tests/unit/test_bundle.py::TestAddMachineChange::test_list_params",
"tests/unit/test_bundle.py::TestAddMachineChange::test_method",
"tests/unit/test_bundle.py::TestAddMachineChangeRun::test_run",
"tests/unit/test_bundle.py::TestAddRelationChange::test_dict_params",
"tests/unit/test_bundle.py::TestAddRelationChange::test_dict_params_missing_data",
"tests/unit/test_bundle.py::TestAddRelationChange::test_list_params",
"tests/unit/test_bundle.py::TestAddRelationChange::test_method",
"tests/unit/test_bundle.py::TestAddRelationChangeRun::test_run",
"tests/unit/test_bundle.py::TestAddUnitChange::test_dict_params",
"tests/unit/test_bundle.py::TestAddUnitChange::test_dict_params_missing_data",
"tests/unit/test_bundle.py::TestAddUnitChange::test_list_params",
"tests/unit/test_bundle.py::TestAddUnitChange::test_method",
"tests/unit/test_bundle.py::TestAddUnitChangeRun::test_run",
"tests/unit/test_bundle.py::TestCreateOfferChange::test_dict_params",
"tests/unit/test_bundle.py::TestCreateOfferChange::test_dict_params_missing_data",
"tests/unit/test_bundle.py::TestCreateOfferChange::test_list_params",
"tests/unit/test_bundle.py::TestCreateOfferChange::test_method",
"tests/unit/test_bundle.py::TestCreateOfferChangeRun::test_run",
"tests/unit/test_bundle.py::TestConsumeOfferChange::test_dict_params",
"tests/unit/test_bundle.py::TestConsumeOfferChange::test_dict_params_missing_data",
"tests/unit/test_bundle.py::TestConsumeOfferChange::test_list_params",
"tests/unit/test_bundle.py::TestConsumeOfferChange::test_method",
"tests/unit/test_bundle.py::TestConsumeOfferChangeRun::test_run",
"tests/unit/test_bundle.py::TestExposeChange::test_dict_params",
"tests/unit/test_bundle.py::TestExposeChange::test_dict_params_missing_data",
"tests/unit/test_bundle.py::TestExposeChange::test_list_params",
"tests/unit/test_bundle.py::TestExposeChange::test_method",
"tests/unit/test_bundle.py::TestExposeChangeRun::test_run",
"tests/unit/test_bundle.py::TestScaleChange::test_dict_params",
"tests/unit/test_bundle.py::TestScaleChange::test_dict_params_missing_data",
"tests/unit/test_bundle.py::TestScaleChange::test_list_params",
"tests/unit/test_bundle.py::TestScaleChange::test_method",
"tests/unit/test_bundle.py::TestScaleChangeRun::test_run",
"tests/unit/test_bundle.py::TestSetAnnotationsChange::test_dict_params",
"tests/unit/test_bundle.py::TestSetAnnotationsChange::test_dict_params_missing_data",
"tests/unit/test_bundle.py::TestSetAnnotationsChange::test_list_params",
"tests/unit/test_bundle.py::TestSetAnnotationsChange::test_method",
"tests/unit/test_bundle.py::TestSetAnnotationsChangeRun::test_run",
"tests/unit/test_constraints.py::TestConstraints::test_mem_regex",
"tests/unit/test_constraints.py::TestConstraints::test_normalize_key",
"tests/unit/test_constraints.py::TestConstraints::test_normalize_list_val",
"tests/unit/test_constraints.py::TestConstraints::test_normalize_val",
"tests/unit/test_constraints.py::TestConstraints::test_parse_constraints"
] | [] | Apache License 2.0 | 5,962 | 2,832 | [
"juju/bundle.py",
"juju/client/_definitions.py",
"juju/client/facade.py",
"juju/constraints.py"
] |
|
googleapis__google-auth-library-python-397 | ab3dc1e26f5240ea3456de364c7c5cb8f40f9583 | 2019-12-05 15:25:33 | ab3dc1e26f5240ea3456de364c7c5cb8f40f9583 | diff --git a/google/auth/transport/requests.py b/google/auth/transport/requests.py
index d1971cd..f21c524 100644
--- a/google/auth/transport/requests.py
+++ b/google/auth/transport/requests.py
@@ -18,6 +18,7 @@ from __future__ import absolute_import
import functools
import logging
+import time
try:
import requests
@@ -64,6 +65,33 @@ class _Response(transport.Response):
return self._response.content
+class TimeoutGuard(object):
+ """A context manager raising an error if the suite execution took too long.
+ """
+
+ def __init__(self, timeout, timeout_error_type=requests.exceptions.Timeout):
+ self._timeout = timeout
+ self.remaining_timeout = timeout
+ self._timeout_error_type = timeout_error_type
+
+ def __enter__(self):
+ self._start = time.time()
+ return self
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ if exc_value:
+ return # let the error bubble up automatically
+
+ if self._timeout is None:
+ return # nothing to do, the timeout was not specified
+
+ elapsed = time.time() - self._start
+ self.remaining_timeout = self._timeout - elapsed
+
+ if self.remaining_timeout <= 0:
+ raise self._timeout_error_type()
+
+
class Request(transport.Request):
"""Requests request adapter.
@@ -193,8 +221,12 @@ class AuthorizedSession(requests.Session):
# credentials.refresh).
self._auth_request = auth_request
- def request(self, method, url, data=None, headers=None, **kwargs):
- """Implementation of Requests' request."""
+ def request(self, method, url, data=None, headers=None, timeout=None, **kwargs):
+ """Implementation of Requests' request.
+
+ The ``timeout`` argument is interpreted as the approximate total time
+ of **all** requests that are made under the hood.
+ """
# pylint: disable=arguments-differ
# Requests has a ton of arguments to request, but only two
# (method, url) are required. We pass through all of the other
@@ -208,13 +240,28 @@ class AuthorizedSession(requests.Session):
# and we want to pass the original headers if we recurse.
request_headers = headers.copy() if headers is not None else {}
- self.credentials.before_request(
- self._auth_request, method, url, request_headers
+ # Do not apply the timeout unconditionally in order to not override the
+ # _auth_request's default timeout.
+ auth_request = (
+ self._auth_request
+ if timeout is None
+ else functools.partial(self._auth_request, timeout=timeout)
)
- response = super(AuthorizedSession, self).request(
- method, url, data=data, headers=request_headers, **kwargs
- )
+ with TimeoutGuard(timeout) as guard:
+ self.credentials.before_request(auth_request, method, url, request_headers)
+ timeout = guard.remaining_timeout
+
+ with TimeoutGuard(timeout) as guard:
+ response = super(AuthorizedSession, self).request(
+ method,
+ url,
+ data=data,
+ headers=request_headers,
+ timeout=timeout,
+ **kwargs
+ )
+ timeout = guard.remaining_timeout
# If the response indicated that the credentials needed to be
# refreshed, then refresh the credentials and re-attempt the
@@ -233,17 +280,33 @@ class AuthorizedSession(requests.Session):
self._max_refresh_attempts,
)
- auth_request_with_timeout = functools.partial(
- self._auth_request, timeout=self._refresh_timeout
+ if self._refresh_timeout is not None:
+ timeout = (
+ self._refresh_timeout
+ if timeout is None
+ else min(timeout, self._refresh_timeout)
+ )
+
+ # Do not apply the timeout unconditionally in order to not override the
+ # _auth_request's default timeout.
+ auth_request = (
+ self._auth_request
+ if timeout is None
+ else functools.partial(self._auth_request, timeout=timeout)
)
- self.credentials.refresh(auth_request_with_timeout)
- # Recurse. Pass in the original headers, not our modified set.
+ with TimeoutGuard(timeout) as guard:
+ self.credentials.refresh(auth_request)
+ timeout = guard.remaining_timeout
+
+ # Recurse. Pass in the original headers, not our modified set, but
+ # do pass the adjusted timeout (i.e. the remaining time).
return self.request(
method,
url,
data=data,
headers=headers,
+ timeout=timeout,
_credential_refresh_attempt=_credential_refresh_attempt + 1,
**kwargs
)
diff --git a/noxfile.py b/noxfile.py
index aaf1bc5..e170ee5 100644
--- a/noxfile.py
+++ b/noxfile.py
@@ -16,6 +16,7 @@ import nox
TEST_DEPENDENCIES = [
"flask",
+ "freezegun",
"mock",
"oauth2client",
"pytest",
| Add timeout parameter to AuthorizedSession.request()
As a Google Cloud client library developer, I would like an easy way of specifying a timeout when making API calls using the `requests.AuthorizedSession` transport, for example:
```py
transport = AuthorizedSession(...)
transport.request(..., timeout=42)
```
**Motivation:**
Sometimes the BigQuery client gets stuck at the transport layer. Having a timeout parameter that can be passed to the transport is necessary for fixing several issue, for example: https://github.com/googleapis/google-cloud-python/pull/9875#discussion_r351504480
| googleapis/google-auth-library-python | diff --git a/tests/transport/test_requests.py b/tests/transport/test_requests.py
index 0e165ac..252e4a6 100644
--- a/tests/transport/test_requests.py
+++ b/tests/transport/test_requests.py
@@ -12,7 +12,12 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import datetime
+import functools
+
+import freezegun
import mock
+import pytest
import requests
import requests.adapters
from six.moves import http_client
@@ -22,6 +27,12 @@ import google.auth.transport.requests
from tests.transport import compliance
[email protected]
+def frozen_time():
+ with freezegun.freeze_time("1970-01-01 00:00:00", tick=False) as frozen:
+ yield frozen
+
+
class TestRequestResponse(compliance.RequestResponseTests):
def make_request(self):
return google.auth.transport.requests.Request()
@@ -34,6 +45,41 @@ class TestRequestResponse(compliance.RequestResponseTests):
assert http.request.call_args[1]["timeout"] == 5
+class TestTimeoutGuard(object):
+ def make_guard(self, *args, **kwargs):
+ return google.auth.transport.requests.TimeoutGuard(*args, **kwargs)
+
+ def test_tracks_elapsed_time(self, frozen_time):
+ with self.make_guard(timeout=10) as guard:
+ frozen_time.tick(delta=3.8)
+ assert guard.remaining_timeout == 6.2
+
+ def test_noop_if_no_timeout(self, frozen_time):
+ with self.make_guard(timeout=None) as guard:
+ frozen_time.tick(delta=datetime.timedelta(days=3650))
+ # NOTE: no timeout error raised, despite years have passed
+ assert guard.remaining_timeout is None
+
+ def test_error_on_timeout(self, frozen_time):
+ with pytest.raises(requests.exceptions.Timeout):
+ with self.make_guard(timeout=10) as guard:
+ frozen_time.tick(delta=10.001)
+ assert guard.remaining_timeout == pytest.approx(-0.001)
+
+ def test_custom_timeout_error_type(self, frozen_time):
+ class FooError(Exception):
+ pass
+
+ with pytest.raises(FooError):
+ with self.make_guard(timeout=1, timeout_error_type=FooError):
+ frozen_time.tick(2)
+
+ def test_lets_errors_bubble_up(self, frozen_time):
+ with pytest.raises(IndexError):
+ with self.make_guard(timeout=1):
+ [1, 2, 3][3]
+
+
class CredentialsStub(google.auth.credentials.Credentials):
def __init__(self, token="token"):
super(CredentialsStub, self).__init__()
@@ -49,6 +95,18 @@ class CredentialsStub(google.auth.credentials.Credentials):
self.token += "1"
+class TimeTickCredentialsStub(CredentialsStub):
+ """Credentials that spend some (mocked) time when refreshing a token."""
+
+ def __init__(self, time_tick, token="token"):
+ self._time_tick = time_tick
+ super(TimeTickCredentialsStub, self).__init__(token=token)
+
+ def refresh(self, request):
+ self._time_tick()
+ super(TimeTickCredentialsStub, self).refresh(requests)
+
+
class AdapterStub(requests.adapters.BaseAdapter):
def __init__(self, responses, headers=None):
super(AdapterStub, self).__init__()
@@ -69,6 +127,18 @@ class AdapterStub(requests.adapters.BaseAdapter):
return
+class TimeTickAdapterStub(AdapterStub):
+ """Adapter that spends some (mocked) time when making a request."""
+
+ def __init__(self, time_tick, responses, headers=None):
+ self._time_tick = time_tick
+ super(TimeTickAdapterStub, self).__init__(responses, headers=headers)
+
+ def send(self, request, **kwargs):
+ self._time_tick()
+ return super(TimeTickAdapterStub, self).send(request, **kwargs)
+
+
def make_response(status=http_client.OK, data=None):
response = requests.Response()
response.status_code = status
@@ -121,7 +191,9 @@ class TestAuthorizedHttp(object):
[make_response(status=http_client.UNAUTHORIZED), final_response]
)
- authed_session = google.auth.transport.requests.AuthorizedSession(credentials)
+ authed_session = google.auth.transport.requests.AuthorizedSession(
+ credentials, refresh_timeout=60
+ )
authed_session.mount(self.TEST_URL, adapter)
result = authed_session.request("GET", self.TEST_URL)
@@ -136,3 +208,44 @@ class TestAuthorizedHttp(object):
assert adapter.requests[1].url == self.TEST_URL
assert adapter.requests[1].headers["authorization"] == "token1"
+
+ def test_request_timout(self, frozen_time):
+ tick_one_second = functools.partial(frozen_time.tick, delta=1.0)
+
+ credentials = mock.Mock(
+ wraps=TimeTickCredentialsStub(time_tick=tick_one_second)
+ )
+ adapter = TimeTickAdapterStub(
+ time_tick=tick_one_second,
+ responses=[
+ make_response(status=http_client.UNAUTHORIZED),
+ make_response(status=http_client.OK),
+ ],
+ )
+
+ authed_session = google.auth.transport.requests.AuthorizedSession(credentials)
+ authed_session.mount(self.TEST_URL, adapter)
+
+ # Because at least two requests have to be made, and each takes one
+ # second, the total timeout specified will be exceeded.
+ with pytest.raises(requests.exceptions.Timeout):
+ authed_session.request("GET", self.TEST_URL, timeout=1.9)
+
+ def test_request_timeout_w_refresh_timeout(self, frozen_time):
+ credentials = mock.Mock(wraps=CredentialsStub())
+ adapter = TimeTickAdapterStub(
+ time_tick=functools.partial(frozen_time.tick, delta=1.0), # one second
+ responses=[
+ make_response(status=http_client.UNAUTHORIZED),
+ make_response(status=http_client.OK),
+ ],
+ )
+
+ authed_session = google.auth.transport.requests.AuthorizedSession(
+ credentials, refresh_timeout=0.9
+ )
+ authed_session.mount(self.TEST_URL, adapter)
+
+ # The timeout is long, but the short refresh timeout will prevail.
+ with pytest.raises(requests.exceptions.Timeout):
+ authed_session.request("GET", self.TEST_URL, timeout=60)
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 2
} | 1.7 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"flask",
"freezegun",
"mock",
"oauth2client",
"pytest",
"pytest-cov",
"pytest-localserver",
"requests",
"urllib3",
"cryptography",
"grpcio"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | blinker==1.9.0
cachetools==3.1.1
certifi==2025.1.31
cffi==1.17.1
charset-normalizer==3.4.1
click==8.1.8
coverage==7.8.0
cryptography==44.0.2
exceptiongroup==1.2.2
Flask==3.1.0
freezegun==1.5.1
-e git+https://github.com/googleapis/google-auth-library-python.git@ab3dc1e26f5240ea3456de364c7c5cb8f40f9583#egg=google_auth
grpcio==1.71.0
httplib2==0.22.0
idna==3.10
importlib_metadata==8.6.1
iniconfig==2.1.0
itsdangerous==2.2.0
Jinja2==3.1.6
MarkupSafe==3.0.2
mock==5.2.0
oauth2client==4.1.3
packaging==24.2
pluggy==1.5.0
pyasn1==0.6.1
pyasn1_modules==0.4.2
pycparser==2.22
pyparsing==3.2.3
pytest==8.3.5
pytest-cov==6.0.0
pytest-localserver==0.9.0.post0
python-dateutil==2.9.0.post0
requests==2.32.3
rsa==4.0
six==1.17.0
tomli==2.2.1
urllib3==2.3.0
Werkzeug==3.1.3
zipp==3.21.0
| name: google-auth-library-python
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- blinker==1.9.0
- cachetools==3.1.1
- certifi==2025.1.31
- cffi==1.17.1
- charset-normalizer==3.4.1
- click==8.1.8
- coverage==7.8.0
- cryptography==44.0.2
- exceptiongroup==1.2.2
- flask==3.1.0
- freezegun==1.5.1
- grpcio==1.71.0
- httplib2==0.22.0
- idna==3.10
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- itsdangerous==2.2.0
- jinja2==3.1.6
- markupsafe==3.0.2
- mock==5.2.0
- oauth2client==4.1.3
- packaging==24.2
- pluggy==1.5.0
- pyasn1==0.6.1
- pyasn1-modules==0.4.2
- pycparser==2.22
- pyparsing==3.2.3
- pytest==8.3.5
- pytest-cov==6.0.0
- pytest-localserver==0.9.0.post0
- python-dateutil==2.9.0.post0
- requests==2.32.3
- rsa==4.0
- six==1.17.0
- tomli==2.2.1
- urllib3==2.3.0
- werkzeug==3.1.3
- zipp==3.21.0
prefix: /opt/conda/envs/google-auth-library-python
| [
"tests/transport/test_requests.py::TestTimeoutGuard::test_tracks_elapsed_time",
"tests/transport/test_requests.py::TestTimeoutGuard::test_noop_if_no_timeout",
"tests/transport/test_requests.py::TestTimeoutGuard::test_error_on_timeout",
"tests/transport/test_requests.py::TestTimeoutGuard::test_custom_timeout_error_type",
"tests/transport/test_requests.py::TestTimeoutGuard::test_lets_errors_bubble_up",
"tests/transport/test_requests.py::TestAuthorizedHttp::test_request_timout",
"tests/transport/test_requests.py::TestAuthorizedHttp::test_request_timeout_w_refresh_timeout"
] | [] | [
"tests/transport/test_requests.py::TestRequestResponse::test_request_basic",
"tests/transport/test_requests.py::TestRequestResponse::test_request_with_timeout_success",
"tests/transport/test_requests.py::TestRequestResponse::test_request_with_timeout_failure",
"tests/transport/test_requests.py::TestRequestResponse::test_request_headers",
"tests/transport/test_requests.py::TestRequestResponse::test_request_error",
"tests/transport/test_requests.py::TestRequestResponse::test_connection_error",
"tests/transport/test_requests.py::TestRequestResponse::test_timeout",
"tests/transport/test_requests.py::TestAuthorizedHttp::test_constructor",
"tests/transport/test_requests.py::TestAuthorizedHttp::test_constructor_with_auth_request",
"tests/transport/test_requests.py::TestAuthorizedHttp::test_request_no_refresh",
"tests/transport/test_requests.py::TestAuthorizedHttp::test_request_refresh"
] | [] | Apache License 2.0 | 5,968 | 1,206 | [
"google/auth/transport/requests.py",
"noxfile.py"
] |
|
dask__dask-5681 | c3367b47c0d5ce4b68cab7f2f0ad33b03f592693 | 2019-12-05 16:44:13 | c3367b47c0d5ce4b68cab7f2f0ad33b03f592693 | jrbourbeau: cc @TAdeJong
jcrist: Thanks @jrbourbeau. Closing. | diff --git a/dask/array/reductions.py b/dask/array/reductions.py
index 9bbdc11c5..5dfda7af7 100644
--- a/dask/array/reductions.py
+++ b/dask/array/reductions.py
@@ -813,7 +813,8 @@ with ignoring(AttributeError):
@wraps(chunk.std)
def std(a, axis=None, dtype=None, keepdims=False, ddof=0, split_every=None, out=None):
result = sqrt(
- a.var(
+ var(
+ a,
axis=axis,
dtype=dtype,
keepdims=keepdims,
| Applying da.std to numpy array yields keyword error
When applying `da.std` to a numpy array it yields a Keyword error, while all other combinations of numpy and dask arrays and functions do work.
Admittedly this is a bit of pathetic use case / anti-pattern, but should be easy to fix.
Would there be some more general way to test for this kind of errors?
```python
np.random.seed(10)
npdat = np.random.random(5)
dadat = da.from_array(npdat)
da.std(dadat).compute(), np.std(npdat), np.std(dadat).compute() # all work
da.std(npdat)
```
> TypeError Traceback (most recent call last)
> <ipython-input-130-a1acaa3fc2bb> in <module>
> 1 np.random.seed(10)
> 2 npdat = np.random.random(5) #+ 1j*np.random.random(5)
> ----> 3 da.std(npdat)
>
> C:\ProgramData\Anaconda3\lib\site-packages\dask\array\reductions.py in std(a, axis, dtype, keepdims, ddof, split_every, out)
> 820 ddof=ddof,
> 821 split_every=split_every,
> --> 822 out=out,
> 823 )
> 824 )
>
> TypeError: _var() got an unexpected keyword argument 'split_every' | dask/dask | diff --git a/dask/array/tests/test_reductions.py b/dask/array/tests/test_reductions.py
index b0254f859..8c241c178 100644
--- a/dask/array/tests/test_reductions.py
+++ b/dask/array/tests/test_reductions.py
@@ -49,6 +49,9 @@ def test_numel(dtype, keepdims):
def reduction_1d_test(da_func, darr, np_func, narr, use_dtype=True, split_every=True):
assert_eq(da_func(darr), np_func(narr))
+ assert_eq(
+ da_func(narr), np_func(narr)
+ ) # Ensure Dask reductions work with NumPy arrays
assert_eq(da_func(darr, keepdims=True), np_func(narr, keepdims=True))
assert_eq(da_func(darr, axis=()), np_func(narr, axis=()))
assert same_keys(da_func(darr), da_func(darr))
| {
"commit_name": "merge_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 1
},
"num_modified_files": 1
} | 2.8 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[complete]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest",
"moto"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.7",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs @ file:///croot/attrs_1668696182826/work
bokeh==2.4.3
boto3==1.33.13
botocore==1.33.13
certifi @ file:///croot/certifi_1671487769961/work/certifi
cffi==1.15.1
charset-normalizer==3.4.1
click==8.1.8
cloudpickle==2.2.1
cryptography==44.0.2
-e git+https://github.com/dask/dask.git@c3367b47c0d5ce4b68cab7f2f0ad33b03f592693#egg=dask
distributed==2.9.0
flit_core @ file:///opt/conda/conda-bld/flit-core_1644941570762/work/source/flit_core
fsspec==2023.1.0
HeapDict==1.0.1
idna==3.10
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1648562407465/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
Jinja2==3.1.6
jmespath==1.0.1
locket==1.0.0
MarkupSafe==2.1.5
moto==4.2.14
msgpack==1.0.5
numpy==1.21.6
packaging @ file:///croot/packaging_1671697413597/work
pandas==1.3.5
partd==1.4.1
Pillow==9.5.0
pluggy @ file:///tmp/build/80754af9/pluggy_1648042572264/work
psutil==7.0.0
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pycparser==2.21
pytest==7.1.2
python-dateutil==2.9.0.post0
pytz==2025.2
PyYAML==6.0.1
requests==2.31.0
responses==0.23.3
s3transfer==0.8.2
six==1.17.0
sortedcontainers==2.4.0
tblib==2.0.0
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
toolz==0.12.1
tornado==6.2
types-PyYAML==6.0.12.12
typing_extensions @ file:///croot/typing_extensions_1669924550328/work
urllib3==1.26.20
Werkzeug==2.2.3
xmltodict==0.14.2
zict==2.2.0
zipp @ file:///croot/zipp_1672387121353/work
| name: dask
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=22.1.0=py37h06a4308_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- flit-core=3.6.0=pyhd3eb1b0_0
- importlib-metadata=4.11.3=py37h06a4308_0
- importlib_metadata=4.11.3=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=22.0=py37h06a4308_0
- pip=22.3.1=py37h06a4308_0
- pluggy=1.0.0=py37h06a4308_1
- py=1.11.0=pyhd3eb1b0_0
- pytest=7.1.2=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py37h06a4308_0
- typing_extensions=4.4.0=py37h06a4308_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zipp=3.11.0=py37h06a4308_0
- zlib=1.2.13=h5eee18b_1
- pip:
- bokeh==2.4.3
- boto3==1.33.13
- botocore==1.33.13
- cffi==1.15.1
- charset-normalizer==3.4.1
- click==8.1.8
- cloudpickle==2.2.1
- cryptography==44.0.2
- distributed==2.9.0
- fsspec==2023.1.0
- heapdict==1.0.1
- idna==3.10
- jinja2==3.1.6
- jmespath==1.0.1
- locket==1.0.0
- markupsafe==2.1.5
- moto==4.2.14
- msgpack==1.0.5
- numpy==1.21.6
- pandas==1.3.5
- partd==1.4.1
- pillow==9.5.0
- psutil==7.0.0
- pycparser==2.21
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyyaml==6.0.1
- requests==2.31.0
- responses==0.23.3
- s3transfer==0.8.2
- six==1.17.0
- sortedcontainers==2.4.0
- tblib==2.0.0
- toolz==0.12.1
- tornado==6.2
- types-pyyaml==6.0.12.12
- urllib3==1.26.20
- werkzeug==2.2.3
- xmltodict==0.14.2
- zict==2.2.0
prefix: /opt/conda/envs/dask
| [
"dask/array/tests/test_reductions.py::test_reductions_1D[f4]",
"dask/array/tests/test_reductions.py::test_reductions_1D[i4]"
] | [
"dask/array/tests/test_reductions.py::test_array_cumreduction_axis[None-True-cumsum]",
"dask/array/tests/test_reductions.py::test_array_cumreduction_axis[None-True-cumprod]",
"dask/array/tests/test_reductions.py::test_array_cumreduction_axis[None-True-nancumsum]",
"dask/array/tests/test_reductions.py::test_array_cumreduction_axis[None-True-nancumprod]",
"dask/array/tests/test_reductions.py::test_array_cumreduction_axis[0-True-cumsum]",
"dask/array/tests/test_reductions.py::test_array_cumreduction_axis[0-True-cumprod]",
"dask/array/tests/test_reductions.py::test_array_cumreduction_axis[0-True-nancumsum]",
"dask/array/tests/test_reductions.py::test_array_cumreduction_axis[0-True-nancumprod]",
"dask/array/tests/test_reductions.py::test_array_cumreduction_axis[1-True-cumsum]",
"dask/array/tests/test_reductions.py::test_array_cumreduction_axis[1-True-cumprod]",
"dask/array/tests/test_reductions.py::test_array_cumreduction_axis[1-True-nancumsum]",
"dask/array/tests/test_reductions.py::test_array_cumreduction_axis[1-True-nancumprod]",
"dask/array/tests/test_reductions.py::test_array_cumreduction_axis[-1-True-cumsum]",
"dask/array/tests/test_reductions.py::test_array_cumreduction_axis[-1-True-cumprod]",
"dask/array/tests/test_reductions.py::test_array_cumreduction_axis[-1-True-nancumsum]",
"dask/array/tests/test_reductions.py::test_array_cumreduction_axis[-1-True-nancumprod]"
] | [
"dask/array/tests/test_reductions.py::test_numel[True-f4]",
"dask/array/tests/test_reductions.py::test_numel[True-i4]",
"dask/array/tests/test_reductions.py::test_numel[False-f4]",
"dask/array/tests/test_reductions.py::test_numel[False-i4]",
"dask/array/tests/test_reductions.py::test_reduction_errors",
"dask/array/tests/test_reductions.py::test_arg_reductions[argmin-argmin]",
"dask/array/tests/test_reductions.py::test_arg_reductions[argmax-argmax]",
"dask/array/tests/test_reductions.py::test_arg_reductions[_nanargmin-nanargmin]",
"dask/array/tests/test_reductions.py::test_arg_reductions[_nanargmax-nanargmax]",
"dask/array/tests/test_reductions.py::test_nanarg_reductions[_nanargmin-nanargmin]",
"dask/array/tests/test_reductions.py::test_nanarg_reductions[_nanargmax-nanargmax]",
"dask/array/tests/test_reductions.py::test_arg_reductions_unknown_chunksize[argmax]",
"dask/array/tests/test_reductions.py::test_arg_reductions_unknown_chunksize[nanargmax]",
"dask/array/tests/test_reductions.py::test_arg_reductions_unknown_chunksize_2d[argmax]",
"dask/array/tests/test_reductions.py::test_arg_reductions_unknown_chunksize_2d[nanargmax]",
"dask/array/tests/test_reductions.py::test_arg_reductions_unknown_single_chunksize[argmax]",
"dask/array/tests/test_reductions.py::test_arg_reductions_unknown_single_chunksize[nanargmax]",
"dask/array/tests/test_reductions.py::test_reductions_2D_nans",
"dask/array/tests/test_reductions.py::test_moment",
"dask/array/tests/test_reductions.py::test_reductions_with_negative_axes",
"dask/array/tests/test_reductions.py::test_nan",
"dask/array/tests/test_reductions.py::test_nan_object[nansum]",
"dask/array/tests/test_reductions.py::test_nan_object[sum]",
"dask/array/tests/test_reductions.py::test_nan_object[nanmin]",
"dask/array/tests/test_reductions.py::test_nan_object[min]",
"dask/array/tests/test_reductions.py::test_nan_object[nanmax]",
"dask/array/tests/test_reductions.py::test_nan_object[max]",
"dask/array/tests/test_reductions.py::test_0d_array",
"dask/array/tests/test_reductions.py::test_reduction_on_scalar",
"dask/array/tests/test_reductions.py::test_reductions_with_empty_array",
"dask/array/tests/test_reductions.py::test_tree_reduce_depth",
"dask/array/tests/test_reductions.py::test_tree_reduce_set_options",
"dask/array/tests/test_reductions.py::test_reduction_names",
"dask/array/tests/test_reductions.py::test_array_reduction_out[sum]",
"dask/array/tests/test_reductions.py::test_array_reduction_out[argmax]",
"dask/array/tests/test_reductions.py::test_array_cumreduction_axis[None-False-cumsum]",
"dask/array/tests/test_reductions.py::test_array_cumreduction_axis[None-False-cumprod]",
"dask/array/tests/test_reductions.py::test_array_cumreduction_axis[None-False-nancumsum]",
"dask/array/tests/test_reductions.py::test_array_cumreduction_axis[None-False-nancumprod]",
"dask/array/tests/test_reductions.py::test_array_cumreduction_axis[0-False-cumsum]",
"dask/array/tests/test_reductions.py::test_array_cumreduction_axis[0-False-cumprod]",
"dask/array/tests/test_reductions.py::test_array_cumreduction_axis[0-False-nancumsum]",
"dask/array/tests/test_reductions.py::test_array_cumreduction_axis[0-False-nancumprod]",
"dask/array/tests/test_reductions.py::test_array_cumreduction_axis[1-False-cumsum]",
"dask/array/tests/test_reductions.py::test_array_cumreduction_axis[1-False-cumprod]",
"dask/array/tests/test_reductions.py::test_array_cumreduction_axis[1-False-nancumsum]",
"dask/array/tests/test_reductions.py::test_array_cumreduction_axis[1-False-nancumprod]",
"dask/array/tests/test_reductions.py::test_array_cumreduction_axis[-1-False-cumsum]",
"dask/array/tests/test_reductions.py::test_array_cumreduction_axis[-1-False-cumprod]",
"dask/array/tests/test_reductions.py::test_array_cumreduction_axis[-1-False-nancumsum]",
"dask/array/tests/test_reductions.py::test_array_cumreduction_axis[-1-False-nancumprod]",
"dask/array/tests/test_reductions.py::test_array_cumreduction_out[cumsum]",
"dask/array/tests/test_reductions.py::test_array_cumreduction_out[cumprod]",
"dask/array/tests/test_reductions.py::test_topk_argtopk1[None-sort-topk]",
"dask/array/tests/test_reductions.py::test_topk_argtopk1[None-argsort-argtopk]",
"dask/array/tests/test_reductions.py::test_topk_argtopk1[2-sort-topk]",
"dask/array/tests/test_reductions.py::test_topk_argtopk1[2-argsort-argtopk]",
"dask/array/tests/test_reductions.py::test_topk_argtopk1[4-sort-topk]",
"dask/array/tests/test_reductions.py::test_topk_argtopk1[4-argsort-argtopk]",
"dask/array/tests/test_reductions.py::test_topk_argtopk1[8-sort-topk]",
"dask/array/tests/test_reductions.py::test_topk_argtopk1[8-argsort-argtopk]",
"dask/array/tests/test_reductions.py::test_topk_argtopk2[1-None-sort-topk]",
"dask/array/tests/test_reductions.py::test_topk_argtopk2[1-None-argsort-argtopk]",
"dask/array/tests/test_reductions.py::test_topk_argtopk2[1-2-sort-topk]",
"dask/array/tests/test_reductions.py::test_topk_argtopk2[1-2-argsort-argtopk]",
"dask/array/tests/test_reductions.py::test_topk_argtopk2[1-3-sort-topk]",
"dask/array/tests/test_reductions.py::test_topk_argtopk2[1-3-argsort-argtopk]",
"dask/array/tests/test_reductions.py::test_topk_argtopk2[1-4-sort-topk]",
"dask/array/tests/test_reductions.py::test_topk_argtopk2[1-4-argsort-argtopk]",
"dask/array/tests/test_reductions.py::test_topk_argtopk2[2-None-sort-topk]",
"dask/array/tests/test_reductions.py::test_topk_argtopk2[2-None-argsort-argtopk]",
"dask/array/tests/test_reductions.py::test_topk_argtopk2[2-2-sort-topk]",
"dask/array/tests/test_reductions.py::test_topk_argtopk2[2-2-argsort-argtopk]",
"dask/array/tests/test_reductions.py::test_topk_argtopk2[2-3-sort-topk]",
"dask/array/tests/test_reductions.py::test_topk_argtopk2[2-3-argsort-argtopk]",
"dask/array/tests/test_reductions.py::test_topk_argtopk2[2-4-sort-topk]",
"dask/array/tests/test_reductions.py::test_topk_argtopk2[2-4-argsort-argtopk]",
"dask/array/tests/test_reductions.py::test_topk_argtopk2[3-None-sort-topk]",
"dask/array/tests/test_reductions.py::test_topk_argtopk2[3-None-argsort-argtopk]",
"dask/array/tests/test_reductions.py::test_topk_argtopk2[3-2-sort-topk]",
"dask/array/tests/test_reductions.py::test_topk_argtopk2[3-2-argsort-argtopk]",
"dask/array/tests/test_reductions.py::test_topk_argtopk2[3-3-sort-topk]",
"dask/array/tests/test_reductions.py::test_topk_argtopk2[3-3-argsort-argtopk]",
"dask/array/tests/test_reductions.py::test_topk_argtopk2[3-4-sort-topk]",
"dask/array/tests/test_reductions.py::test_topk_argtopk2[3-4-argsort-argtopk]",
"dask/array/tests/test_reductions.py::test_topk_argtopk2[4-None-sort-topk]",
"dask/array/tests/test_reductions.py::test_topk_argtopk2[4-None-argsort-argtopk]",
"dask/array/tests/test_reductions.py::test_topk_argtopk2[4-2-sort-topk]",
"dask/array/tests/test_reductions.py::test_topk_argtopk2[4-2-argsort-argtopk]",
"dask/array/tests/test_reductions.py::test_topk_argtopk2[4-3-sort-topk]",
"dask/array/tests/test_reductions.py::test_topk_argtopk2[4-3-argsort-argtopk]",
"dask/array/tests/test_reductions.py::test_topk_argtopk2[4-4-sort-topk]",
"dask/array/tests/test_reductions.py::test_topk_argtopk2[4-4-argsort-argtopk]",
"dask/array/tests/test_reductions.py::test_topk_argtopk2[5-None-sort-topk]",
"dask/array/tests/test_reductions.py::test_topk_argtopk2[5-None-argsort-argtopk]",
"dask/array/tests/test_reductions.py::test_topk_argtopk2[5-2-sort-topk]",
"dask/array/tests/test_reductions.py::test_topk_argtopk2[5-2-argsort-argtopk]",
"dask/array/tests/test_reductions.py::test_topk_argtopk2[5-3-sort-topk]",
"dask/array/tests/test_reductions.py::test_topk_argtopk2[5-3-argsort-argtopk]",
"dask/array/tests/test_reductions.py::test_topk_argtopk2[5-4-sort-topk]",
"dask/array/tests/test_reductions.py::test_topk_argtopk2[5-4-argsort-argtopk]",
"dask/array/tests/test_reductions.py::test_topk_argtopk2[10-None-sort-topk]",
"dask/array/tests/test_reductions.py::test_topk_argtopk2[10-None-argsort-argtopk]",
"dask/array/tests/test_reductions.py::test_topk_argtopk2[10-2-sort-topk]",
"dask/array/tests/test_reductions.py::test_topk_argtopk2[10-2-argsort-argtopk]",
"dask/array/tests/test_reductions.py::test_topk_argtopk2[10-3-sort-topk]",
"dask/array/tests/test_reductions.py::test_topk_argtopk2[10-3-argsort-argtopk]",
"dask/array/tests/test_reductions.py::test_topk_argtopk2[10-4-sort-topk]",
"dask/array/tests/test_reductions.py::test_topk_argtopk2[10-4-argsort-argtopk]",
"dask/array/tests/test_reductions.py::test_topk_argtopk3",
"dask/array/tests/test_reductions.py::test_regres_3940[cumsum]",
"dask/array/tests/test_reductions.py::test_regres_3940[cumprod]",
"dask/array/tests/test_reductions.py::test_regres_3940[argmin]",
"dask/array/tests/test_reductions.py::test_regres_3940[argmax]",
"dask/array/tests/test_reductions.py::test_regres_3940[amin]",
"dask/array/tests/test_reductions.py::test_regres_3940[amax]",
"dask/array/tests/test_reductions.py::test_regres_3940[nansum]",
"dask/array/tests/test_reductions.py::test_regres_3940[nanmax]",
"dask/array/tests/test_reductions.py::test_trace",
"dask/array/tests/test_reductions.py::test_median[True-0]",
"dask/array/tests/test_reductions.py::test_median[True-axis1]",
"dask/array/tests/test_reductions.py::test_median[True-1]",
"dask/array/tests/test_reductions.py::test_median[True--1]",
"dask/array/tests/test_reductions.py::test_median[False-0]",
"dask/array/tests/test_reductions.py::test_median[False-axis1]",
"dask/array/tests/test_reductions.py::test_median[False-1]",
"dask/array/tests/test_reductions.py::test_median[False--1]"
] | [] | BSD 3-Clause "New" or "Revised" License | 5,969 | 150 | [
"dask/array/reductions.py"
] |
d0c-s4vage__lookatme-61 | 7665549bf3977aa65b88dae0c24ed658a4857c6a | 2019-12-07 04:18:58 | 523799580e41c4f9b93ed6fa25bfe8e41df7edd0 | diff --git a/lookatme/render/markdown_block.py b/lookatme/render/markdown_block.py
index 77a7433..703478f 100644
--- a/lookatme/render/markdown_block.py
+++ b/lookatme/render/markdown_block.py
@@ -46,6 +46,16 @@ def _list_level(item):
return _meta(item).get("list_level", 1)
+@contrib_first
+def render_newline(token, body, stack, loop):
+ """Render a newline
+
+ See :any:`lookatme.tui.SlideRenderer.do_render` for argument and return
+ value descriptions.
+ """
+ return urwid.Divider()
+
+
@contrib_first
def render_heading(token, body, stack, loop):
"""Render markdown headings, using the defined styles for the styling and
| Add newline token support
Apparently I never add double newlines in a markdown document | d0c-s4vage/lookatme | diff --git a/tests/test_markdown.py b/tests/test_markdown.py
index aa3c2e3..5fa53f9 100644
--- a/tests/test_markdown.py
+++ b/tests/test_markdown.py
@@ -162,6 +162,42 @@ def test_lists(mocker):
assert stripped_row_text == stripped_rows[idx]
+def test_lists_with_newline(mocker):
+ """Test list rendering with a newline between a new nested list and the
+ previous list item
+ """
+ import lookatme.widgets.table
+
+ mocker.patch.object(lookatme.config, "LOG")
+ fake_config = mocker.patch.object(lookatme.render.markdown_block, "config")
+ mocker.patch.object(lookatme.widgets.table, "config", fake_config)
+ fake_config.STYLE = {
+ "bullets": {
+ "default": "*",
+ "1": "-",
+ "2": "=",
+ "3": "^",
+ },
+ }
+
+ rendered = render_markdown("""
+* list 1
+
+ * list 2
+""")
+
+ stripped_rows = [
+ b'',
+ b' - list 1',
+ b'',
+ b' = list 2',
+ b'',
+ ]
+ for idx, row in enumerate(rendered):
+ stripped_row_text = row_text(row).rstrip()
+ assert stripped_row_text == stripped_rows[idx]
+
+
def test_block_quote(mocker):
"""Test block quote rendering
"""
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 1
} | 0.3 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-xdist",
"pytest-mock",
"pytest-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.7",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi @ file:///croot/certifi_1671487769961/work/certifi
click==7.1.2
coverage==7.2.7
exceptiongroup==1.2.2
execnet==2.0.2
importlib-metadata==6.7.0
iniconfig==2.0.0
-e git+https://github.com/d0c-s4vage/lookatme.git@7665549bf3977aa65b88dae0c24ed658a4857c6a#egg=lookatme
marshmallow==3.19.0
mistune==0.8.4
packaging==24.0
pluggy==1.2.0
Pygments==2.17.2
pytest==7.4.4
pytest-cov==4.1.0
pytest-mock==3.11.1
pytest-xdist==3.5.0
PyYAML==5.4.1
tomli==2.0.1
typing_extensions==4.7.1
urwid==2.6.16
wcwidth==0.2.13
zipp==3.15.0
| name: lookatme
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=22.3.1=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- click==7.1.2
- coverage==7.2.7
- exceptiongroup==1.2.2
- execnet==2.0.2
- importlib-metadata==6.7.0
- iniconfig==2.0.0
- marshmallow==3.19.0
- mistune==0.8.4
- packaging==24.0
- pluggy==1.2.0
- pygments==2.17.2
- pytest==7.4.4
- pytest-cov==4.1.0
- pytest-mock==3.11.1
- pytest-xdist==3.5.0
- pyyaml==5.4.1
- tomli==2.0.1
- typing-extensions==4.7.1
- urwid==2.6.16
- wcwidth==0.2.13
- zipp==3.15.0
prefix: /opt/conda/envs/lookatme
| [
"tests/test_markdown.py::test_lists_with_newline"
] | [
"tests/test_markdown.py::test_code",
"tests/test_markdown.py::test_empty_codeblock",
"tests/test_markdown.py::test_code_yaml",
"tests/test_markdown.py::test_inline"
] | [
"tests/test_markdown.py::test_headings",
"tests/test_markdown.py::test_table",
"tests/test_markdown.py::test_lists",
"tests/test_markdown.py::test_block_quote"
] | [] | MIT License | 5,975 | 203 | [
"lookatme/render/markdown_block.py"
] |
|
d0c-s4vage__lookatme-68 | de10fe43475e72ba3ee497feb8670c1e89c9113f | 2019-12-08 05:54:16 | de2bcb73023ecb90ed98db49c20fff7521ebbf4f | diff --git a/lookatme/__main__.py b/lookatme/__main__.py
index a42b1d7..78d5101 100644
--- a/lookatme/__main__.py
+++ b/lookatme/__main__.py
@@ -57,6 +57,7 @@ from lookatme.schemas import StyleSchema
is_flag=True,
default=False,
)
[email protected]_option(lookatme.__version__)
@click.argument(
"input_files",
type=click.File("r"),
| add --version flag to CLI
```
lookatme --version
0.4.0
``` | d0c-s4vage/lookatme | diff --git a/tests/test_cli.py b/tests/test_cli.py
index e480bd0..b3be2f1 100644
--- a/tests/test_cli.py
+++ b/tests/test_cli.py
@@ -6,6 +6,7 @@ Test the main CLI
from click.testing import CliRunner
+import lookatme
from lookatme.__main__ import main
@@ -22,3 +23,11 @@ def test_dump_styles_unicode():
res = run_cmd("--dump-styles")
assert res.exit_code == 0
assert "█" in res.output
+
+
+def test_version():
+ """Test the version option
+ """
+ res = run_cmd("--version")
+ assert res.exit_code == 0
+ assert lookatme.__version__ in res.output
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 1
} | 0.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-xdist",
"pytest-mock",
"pytest-cov"
],
"pre_install": null,
"python": "3.7",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi @ file:///croot/certifi_1671487769961/work/certifi
click==7.1.2
coverage==7.2.7
exceptiongroup==1.2.2
execnet==2.0.2
importlib-metadata==6.7.0
iniconfig==2.0.0
-e git+https://github.com/d0c-s4vage/lookatme.git@de10fe43475e72ba3ee497feb8670c1e89c9113f#egg=lookatme
marshmallow==3.19.0
mistune==0.8.4
packaging==24.0
pluggy==1.2.0
Pygments==2.17.2
pytest==7.4.4
pytest-cov==4.1.0
pytest-mock==3.11.1
pytest-xdist==3.5.0
PyYAML==5.4.1
tomli==2.0.1
typing_extensions==4.7.1
urwid==2.6.16
wcwidth==0.2.13
zipp==3.15.0
| name: lookatme
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=22.3.1=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- click==7.1.2
- coverage==7.2.7
- exceptiongroup==1.2.2
- execnet==2.0.2
- importlib-metadata==6.7.0
- iniconfig==2.0.0
- marshmallow==3.19.0
- mistune==0.8.4
- packaging==24.0
- pluggy==1.2.0
- pygments==2.17.2
- pytest==7.4.4
- pytest-cov==4.1.0
- pytest-mock==3.11.1
- pytest-xdist==3.5.0
- pyyaml==5.4.1
- tomli==2.0.1
- typing-extensions==4.7.1
- urwid==2.6.16
- wcwidth==0.2.13
- zipp==3.15.0
prefix: /opt/conda/envs/lookatme
| [
"tests/test_cli.py::test_version"
] | [] | [
"tests/test_cli.py::test_dump_styles_unicode"
] | [] | MIT License | 5,980 | 129 | [
"lookatme/__main__.py"
] |
|
jparise__flake8-assertive-8 | 36d4999f2b345c8f9e812cf7c8c297db94d874bd | 2019-12-08 16:10:46 | 36d4999f2b345c8f9e812cf7c8c297db94d874bd | diff --git a/flake8_assertive.py b/flake8_assertive.py
index e2dff4c..841356c 100644
--- a/flake8_assertive.py
+++ b/flake8_assertive.py
@@ -49,6 +49,13 @@ def is_assert_method_call(node):
node.func.attr.startswith('assert'))
+def args(node):
+ for arg in node.args:
+ yield arg
+ for arg in node.keywords:
+ yield arg.value
+
+
def wrap_deprecated(func, name):
"""Return a check function for a deprecated assert method call.
@@ -121,56 +128,56 @@ class Checker(object):
yield error
def check_assertequal(self, node):
- if any(arg for arg in node.args if is_constant(arg, None)):
+ if any(arg for arg in args(node) if is_constant(arg, None)):
yield self.error(node, 'A502', 'assertIsNone', obj=None)
- elif any(arg for arg in node.args if is_constant(arg, True)):
+ elif any(arg for arg in args(node) if is_constant(arg, True)):
yield self.error(node, 'A502', 'assertTrue', obj=True)
- elif any(arg for arg in node.args if is_constant(arg, False)):
+ elif any(arg for arg in args(node) if is_constant(arg, False)):
yield self.error(node, 'A502', 'assertFalse', obj=False)
- elif any(arg for arg in node.args if is_function_call(arg, 'round')):
+ elif any(arg for arg in args(node) if is_function_call(arg, 'round')):
yield self.error(node, 'A501',
'built-in rounding of assertAlmostEqual',
op='round')
def check_assertalmostequal(self, node):
- if any(arg for arg in node.args if is_function_call(arg, 'round')):
+ if any(arg for arg in args(node) if is_function_call(arg, 'round')):
yield self.error(node, 'A501',
'built-in rounding of assertAlmostEqual',
op='round')
def check_assertnotequal(self, node):
- if any(arg for arg in node.args if is_constant(arg, None)):
+ if any(arg for arg in args(node) if is_constant(arg, None)):
yield self.error(node, 'A502', 'assertIsNotNone', obj=None)
- elif any(arg for arg in node.args if is_constant(arg, True)):
+ elif any(arg for arg in args(node) if is_constant(arg, True)):
yield self.error(node, 'A502', 'assertFalse', obj=True)
- elif any(arg for arg in node.args if is_constant(arg, False)):
+ elif any(arg for arg in args(node) if is_constant(arg, False)):
yield self.error(node, 'A502', 'assertTrue', obj=False)
- elif any(arg for arg in node.args if is_function_call(arg, 'round')):
+ elif any(arg for arg in args(node) if is_function_call(arg, 'round')):
yield self.error(node, 'A501',
'built-in rounding of assertNotAlmostEqual',
op='round')
def check_assertnotalmostequal(self, node):
- if any(arg for arg in node.args if is_function_call(arg, 'round')):
+ if any(arg for arg in args(node) if is_function_call(arg, 'round')):
yield self.error(node, 'A501',
'built-in rounding of assertNotAlmostEqual',
op='round')
def check_asserttrue(self, node):
- if (isinstance(node.args[0], ast.Compare) and
- len(node.args[0].ops) == 1):
- op = node.args[0].ops[0]
+ arg = next(args(node), None)
+ if arg and isinstance(arg, ast.Compare) and len(arg.ops) == 1:
+ op = arg.ops[0]
if isinstance(op, ast.In):
yield self.error(node, 'A501', 'assertIn', op='in')
elif isinstance(op, ast.NotIn):
yield self.error(node, 'A501', 'assertNotIn', op='in')
elif isinstance(op, ast.Is):
- if is_constant(node.args[0].comparators[0], None):
+ if is_constant(arg.comparators[0], None):
yield self.error(node, 'A502', 'assertIsNone', obj=None)
else:
yield self.error(node, 'A501', 'assertIs', op='is')
elif isinstance(op, ast.IsNot):
- if is_constant(node.args[0].comparators[0], None):
+ if is_constant(arg.comparators[0], None):
yield self.error(node, 'A502', 'assertIsNotNone', obj=None)
else:
yield self.error(node, 'A501', 'assertIsNot', op='is')
@@ -186,25 +193,25 @@ class Checker(object):
yield self.error(node, 'A500', 'assertGreater', op='>')
elif isinstance(op, ast.GtE):
yield self.error(node, 'A500', 'assertGreaterEqual', op='>=')
- elif is_function_call(node.args[0], 'isinstance'):
+ elif is_function_call(arg, 'isinstance'):
yield self.error(
node, 'A501', 'assertIsInstance', op='isinstance()')
def check_assertfalse(self, node):
- if (isinstance(node.args[0], ast.Compare) and
- len(node.args[0].ops) == 1):
- op = node.args[0].ops[0]
+ arg = next(args(node), None)
+ if arg and isinstance(arg, ast.Compare) and len(arg.ops) == 1:
+ op = arg.ops[0]
if isinstance(op, ast.In):
yield self.error(node, 'A501', 'assertNotIn', op='in')
elif isinstance(op, ast.NotIn):
yield self.error(node, 'A501', 'assertIn', op='in')
elif isinstance(op, ast.Is):
- if is_constant(node.args[0].comparators[0], None):
+ if is_constant(arg.comparators[0], None):
yield self.error(node, 'A502', 'assertIsNotNone', obj=None)
else:
yield self.error(node, 'A501', 'assertIsNot', op='is')
elif isinstance(op, ast.IsNot):
- if is_constant(node.args[0].comparators[0], None):
+ if is_constant(arg.comparators[0], None):
yield self.error(node, 'A502', 'assertIsNone', obj=None)
else:
yield self.error(node, 'A501', 'assertIs', op='is')
@@ -212,7 +219,7 @@ class Checker(object):
yield self.error(node, 'A500', 'assertNotEqual', op='==')
elif isinstance(op, ast.NotEq):
yield self.error(node, 'A500', 'assertEqual', op='!=')
- elif is_function_call(node.args[0], 'isinstance'):
+ elif is_function_call(arg, 'isinstance'):
yield self.error(
node, 'A501', 'assertNotIsInstance', op='isinstance()')
| can not handle keyword arguments
For the following test case
```python
class TestCase:
def test_one(
self,
):
self.assertTrue(
expr=some_value,
)
```
flake8 raises the following exception.
```python
Traceback (most recent call last):
File "/home/wavenator/.local/bin/flake8", line 8, in <module>
sys.exit(main())
File "/home/wavenator/.local/lib/python3.7/site-packages/flake8/main/cli.py", line 18, in main
app.run(argv)
File "/home/wavenator/.local/lib/python3.7/site-packages/flake8/main/application.py", line 393, in run
self._run(argv)
File "/home/wavenator/.local/lib/python3.7/site-packages/flake8/main/application.py", line 381, in _run
self.run_checks()
File "/home/wavenator/.local/lib/python3.7/site-packages/flake8/main/application.py", line 300, in run_checks
self.file_checker_manager.run()
File "/home/wavenator/.local/lib/python3.7/site-packages/flake8/checker.py", line 331, in run
self.run_serial()
File "/home/wavenator/.local/lib/python3.7/site-packages/flake8/checker.py", line 315, in run_serial
checker.run_checks()
File "/home/wavenator/.local/lib/python3.7/site-packages/flake8/checker.py", line 598, in run_checks
self.run_ast_checks()
File "/home/wavenator/.local/lib/python3.7/site-packages/flake8/checker.py", line 502, in run_ast_checks
for (line_number, offset, text, check) in runner:
File "/home/wavenator/.local/lib/python3.7/site-packages/flake8_assertive.py", line 120, in run
for error in func(node):
File "/home/wavenator/.local/lib/python3.7/site-packages/flake8_assertive.py", line 160, in check_asserttrue
if (isinstance(node.args[0], ast.Compare) and
IndexError: list index out of range
```
It happens because in `check_asserttrue` you assume `.args` exists although it does not in functions with keyword arguments.
I can produce a fix if you want. I also assume that this behavior might happen in other tests in your code. I can fix them if you want me to.
Thanks and I really appreciate your work!
EDIT:
Yep it happens in `check_assertfalse` too. | jparise/flake8-assertive | diff --git a/tests/test_checker.py b/tests/test_checker.py
index 4118ad3..0752a24 100644
--- a/tests/test_checker.py
+++ b/tests/test_checker.py
@@ -148,6 +148,15 @@ class TestChecks(unittest.TestCase):
self.check(
"self.assertFalse(1 != 0)", "A500", "assertEqual() for '!='")
+ def test_keyword_args(self):
+ self.check("self.assertTrue(expr=1)", expected=None)
+ self.check("self.assertTrue(expr=(True is True))", expected="A501")
+ self.check("self.assertEqual(first=1, second=1)", expected=None)
+ self.check("self.assertEqual(first=1, second=None)", "A502")
+ self.check("self.assertEqual(first=None, second=1)", "A502")
+ self.check("self.assertEqual(1, second=1)", expected=None)
+ self.check("self.assertEqual(None, second=1)", "A502")
+
def test_multiple_comparison_ops(self):
self.check("self.assertTrue(1 == 1 == 1)", expected=None)
self.check("self.assertFalse(1 == 1 == 1)", expected=None)
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 1
} | 1.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.7",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi @ file:///croot/certifi_1671487769961/work/certifi
entrypoints==0.3
exceptiongroup==1.2.2
flake8==3.7.9
-e git+https://github.com/jparise/flake8-assertive.git@36d4999f2b345c8f9e812cf7c8c297db94d874bd#egg=flake8_assertive
importlib-metadata==6.7.0
iniconfig==2.0.0
mccabe==0.6.1
packaging==24.0
pluggy==1.2.0
pycodestyle==2.5.0
pyflakes==2.1.1
pytest==7.4.4
tomli==2.0.1
typing_extensions==4.7.1
zipp==3.15.0
| name: flake8-assertive
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=22.3.1=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- entrypoints==0.3
- exceptiongroup==1.2.2
- flake8==3.7.9
- importlib-metadata==6.7.0
- iniconfig==2.0.0
- mccabe==0.6.1
- packaging==24.0
- pluggy==1.2.0
- pycodestyle==2.5.0
- pyflakes==2.1.1
- pytest==7.4.4
- tomli==2.0.1
- typing-extensions==4.7.1
- zipp==3.15.0
prefix: /opt/conda/envs/flake8-assertive
| [
"tests/test_checker.py::TestChecks::test_keyword_args"
] | [] | [
"tests/test_checker.py::TestChecks::test_assertalmostequal_round",
"tests/test_checker.py::TestChecks::test_assertalmostequals",
"tests/test_checker.py::TestChecks::test_assertequal_false",
"tests/test_checker.py::TestChecks::test_assertequal_none",
"tests/test_checker.py::TestChecks::test_assertequal_round",
"tests/test_checker.py::TestChecks::test_assertequal_true",
"tests/test_checker.py::TestChecks::test_assertequals",
"tests/test_checker.py::TestChecks::test_assertfalse_equal",
"tests/test_checker.py::TestChecks::test_assertfalse_in",
"tests/test_checker.py::TestChecks::test_assertfalse_is",
"tests/test_checker.py::TestChecks::test_assertfalse_is_none",
"tests/test_checker.py::TestChecks::test_assertfalse_isinstance",
"tests/test_checker.py::TestChecks::test_assertnotalmostequal_round",
"tests/test_checker.py::TestChecks::test_assertnotalmostequals",
"tests/test_checker.py::TestChecks::test_assertnotequal_false",
"tests/test_checker.py::TestChecks::test_assertnotequal_none",
"tests/test_checker.py::TestChecks::test_assertnotequal_round",
"tests/test_checker.py::TestChecks::test_assertnotequal_true",
"tests/test_checker.py::TestChecks::test_assertnotequals",
"tests/test_checker.py::TestChecks::test_asserttrue_equal",
"tests/test_checker.py::TestChecks::test_asserttrue_greater",
"tests/test_checker.py::TestChecks::test_asserttrue_in",
"tests/test_checker.py::TestChecks::test_asserttrue_is",
"tests/test_checker.py::TestChecks::test_asserttrue_is_none",
"tests/test_checker.py::TestChecks::test_asserttrue_isinstance",
"tests/test_checker.py::TestChecks::test_asserttrue_less",
"tests/test_checker.py::TestChecks::test_deprecated",
"tests/test_checker.py::TestChecks::test_multiple_comparison_ops",
"tests/test_checker.py::TestChecks::test_pattern",
"tests/test_checker.py::TestChecks::test_snakecase"
] | [] | MIT License | 5,981 | 1,678 | [
"flake8_assertive.py"
] |
|
genadijrazdorov__algograph-36 | 967e516a99fba43b6ce4cd4991e547739b86eef9 | 2019-12-09 08:05:46 | 967e516a99fba43b6ce4cd4991e547739b86eef9 | diff --git a/algograph/lexer.py b/algograph/lexer.py
index 9d0e50f..39fa603 100644
--- a/algograph/lexer.py
+++ b/algograph/lexer.py
@@ -64,10 +64,10 @@ class Lexer:
old, self.indent = self.indent, indent
if indent > old:
- yield INDENT(indent)
+ yield INDENT(indent - old)
elif indent < old:
- yield DEDENT(indent)
+ yield DEDENT(old - indent)
def ignore(self, value):
return []
diff --git a/algograph/parser.py b/algograph/parser.py
index c1b0840..cb4f890 100644
--- a/algograph/parser.py
+++ b/algograph/parser.py
@@ -85,9 +85,12 @@ def reduce_by_rule(rule):
class Parser:
- def __init__(self, algorithm):
+ def __init__(self, algorithm=None, tokens=None):
self.algorithm = algorithm
- self.tokens = Lexer(algorithm).tokenize()
+ if algorithm is None:
+ self.tokens = tokens
+ else:
+ self.tokens = Lexer(algorithm).tokenize()
def reduce(self):
'''
@@ -164,7 +167,12 @@ class Parser:
raise SyntaxError
token = SUITE(*stack[-i + 1: -1])
+ dedent = stack[-1].value
+ indent = stack[-i].value
stack[-i:] = [token]
+ if dedent > indent:
+ stack.append(DEDENT(dedent - indent))
+ self._SUITE()
@reduce_by_rule
def _IFSWITCH(self):
@@ -213,7 +221,8 @@ class Parser:
last = node
not_ = stack[2].EXPR.NOT
- yes = N(stack[2].SUITE.tokens[0].ID.value)
+ ## yes = N(stack[2].SUITE.tokens[0].ID.value)
+ yes = Parser(None, stack[2].SUITE.tokens).parse().root
node[yes] = not not_
elif_ = stack[2].ELIF
@@ -222,13 +231,15 @@ class Parser:
not_ = o.NOT
o = N(o.ID.value)
node[o] = not_
- s = N(s.tokens[0].ID.value)
+ ## s = N(s.tokens[0].ID.value)
+ s = Parser(None, s.tokens).parse().root
o[s] = not not_
node = o
no = stack[2].ELSE
if no:
- no = N(no.tokens[0].ID.value)
+ ## no = N(no.tokens[0].ID.value)
+ no = Parser(None, no.tokens).parse().root
node[no] = not_
del stack[2]
| Complex branches (more then one statement) not working | genadijrazdorov/algograph | diff --git a/test/test_lexer.py b/test/test_lexer.py
index 99b4552..e04c4e9 100644
--- a/test/test_lexer.py
+++ b/test/test_lexer.py
@@ -47,7 +47,7 @@ class TestLexer:
INDENT(2),
ID('y'),
NEWLINE(),
- DEDENT(0),
+ DEDENT(2),
ID('end'),
NEWLINE()
]
@@ -71,7 +71,7 @@ class TestLexer:
INDENT(2),
ID('one'),
NEWLINE(),
- DEDENT(0),
+ DEDENT(2),
ELIF(),
ID('q'),
IS(),
@@ -82,14 +82,14 @@ class TestLexer:
INDENT(2),
ID('two'),
NEWLINE(),
- DEDENT(0),
+ DEDENT(2),
ELSE(),
LITERAL(':'),
NEWLINE(),
INDENT(2),
ID('three'),
NEWLINE(),
- DEDENT(0),
+ DEDENT(2),
ID('end'),
NEWLINE()
]
diff --git a/test/test_parser.py b/test/test_parser.py
index 8f62515..b9a72fa 100644
--- a/test/test_parser.py
+++ b/test/test_parser.py
@@ -29,6 +29,21 @@ class TestParser:
| y
''') == G(N('q', {N('y'): True}))
+ def test_multistatemnt_suite(self):
+ assert parse('''
+ | if q:
+ | first
+ | second
+ ''') == G(N('q', {N('first', {N('second'): None}): True}))
+
+ def test_multilevel_suite(self):
+ assert parse('''
+ | if q:
+ | if q2:
+ | first
+ | second
+ ''') == G(N('q', {N('q2', {N('first', {N('second'): None}): True}): True}))
+
def test_if_not(self):
assert parse('''
| if not q:
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 2
},
"num_modified_files": 2
} | unknown | {
"env_vars": null,
"env_yml_path": [],
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "",
"pip_packages": [
"pytest",
"pytest-cov",
"codecov"
],
"pre_install": [],
"python": "3.9",
"reqs_path": [],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | -e git+https://github.com/genadijrazdorov/algograph.git@967e516a99fba43b6ce4cd4991e547739b86eef9#egg=algograph
build==1.2.2.post1
certifi==2025.1.31
charset-normalizer==3.4.1
check-manifest==0.50
codecov==2.1.13
coverage==7.8.0
exceptiongroup==1.2.2
idna==3.10
importlib_metadata==8.6.1
iniconfig==2.1.0
packaging==24.2
pluggy==1.5.0
pyproject_hooks==1.2.0
pytest==8.3.5
pytest-cov==6.0.0
requests==2.32.3
tomli==2.2.1
urllib3==2.3.0
zipp==3.21.0
| name: algograph
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- build==1.2.2.post1
- certifi==2025.1.31
- charset-normalizer==3.4.1
- check-manifest==0.50
- codecov==2.1.13
- coverage==7.8.0
- exceptiongroup==1.2.2
- idna==3.10
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- packaging==24.2
- pluggy==1.5.0
- pyproject-hooks==1.2.0
- pytest==8.3.5
- pytest-cov==6.0.0
- requests==2.32.3
- tomli==2.2.1
- urllib3==2.3.0
- zipp==3.21.0
prefix: /opt/conda/envs/algograph
| [
"test/test_lexer.py::TestLexer::test_indent_dedent",
"test/test_lexer.py::TestLexer::test_keywords",
"test/test_parser.py::TestParser::test_multistatemnt_suite",
"test/test_parser.py::TestParser::test_multilevel_suite"
] | [] | [
"test/test_lexer.py::TestLexer::test_start_end",
"test/test_lexer.py::TestLexer::test_unknown_token",
"test/test_lexer.py::TestLexer::test_newline",
"test/test_parser.py::TestParser::test_start_end",
"test/test_parser.py::TestParser::test_if",
"test/test_parser.py::TestParser::test_if_not",
"test/test_parser.py::TestParser::test_suite_error",
"test/test_parser.py::TestParser::test_if_error",
"test/test_parser.py::TestParser::test_if_else",
"test/test_parser.py::TestParser::test_if_elif_elif_else",
"test/test_parser.py::TestParser::test_if_is"
] | [] | MIT License | 5,983 | 690 | [
"algograph/lexer.py",
"algograph/parser.py"
] |
|
tarohi24__typedflow-68 | 2127e74314d2b97d596cfc12ed8fb257bb688d6f | 2019-12-10 15:26:34 | 2127e74314d2b97d596cfc12ed8fb257bb688d6f | diff --git a/typedflow/nodes/base.py b/typedflow/nodes/base.py
index ece0895..b9853f9 100644
--- a/typedflow/nodes/base.py
+++ b/typedflow/nodes/base.py
@@ -113,7 +113,8 @@ class ConsumerNode:
None
"""
assert len(self.precs) == 0, 'Some arguments have been already set'
- self.precs: Dict[str, ProviderNode] = args
+ for name, prec in args.items():
+ self.set_upstream_node(name, prec)
return self
| The new syntax doesn't work
It doesn't accept args in the correct way. For instance, life of cache tables are never incremented. | tarohi24/typedflow | diff --git a/typedflow/tests/flow/test_flow.py b/typedflow/tests/flow/test_flow.py
index aa31917..7682475 100644
--- a/typedflow/tests/flow/test_flow.py
+++ b/typedflow/tests/flow/test_flow.py
@@ -209,3 +209,4 @@ def test_declare_inputs_when_definition_with_multiple_args():
node_dump = DumpNode(dump)({'a': node_task})
flow = Flow([node_dump, ])
flow.typecheck()
+ assert node_task.cache_table.life == 1
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 1
},
"num_modified_files": 1
} | 1.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-runner",
"autoflake",
"bump2version",
"flake8",
"mypy",
"pip",
"python-language-server[pyls-mypy]"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc build-essential libomp-dev libopenblas-dev"
],
"python": "3.8",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | autoflake==2.3.1
bump2version==1.0.1
dataclasses-json==0.6.7
exceptiongroup==1.2.2
flake8==7.1.2
iniconfig==2.1.0
jedi==0.17.2
marshmallow==3.22.0
mccabe==0.7.0
mypy==1.14.1
mypy-extensions==1.0.0
packaging==24.2
parso==0.7.1
pluggy==1.5.0
pycodestyle==2.12.1
pyflakes==3.2.0
pytest==8.3.5
pytest-runner==6.0.1
python-jsonrpc-server==0.4.0
python-language-server==0.36.2
tomli==2.2.1
-e git+https://github.com/tarohi24/typedflow.git@2127e74314d2b97d596cfc12ed8fb257bb688d6f#egg=typedflow
typing-inspect==0.9.0
typing_extensions==4.13.0
ujson==5.10.0
| name: typedflow
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=24.2=py38h06a4308_0
- python=3.8.20=he870216_0
- readline=8.2=h5eee18b_0
- setuptools=75.1.0=py38h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.44.0=py38h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- autoflake==2.3.1
- bump2version==1.0.1
- dataclasses-json==0.6.7
- exceptiongroup==1.2.2
- flake8==7.1.2
- iniconfig==2.1.0
- jedi==0.17.2
- marshmallow==3.22.0
- mccabe==0.7.0
- mypy==1.14.1
- mypy-extensions==1.0.0
- packaging==24.2
- parso==0.7.1
- pluggy==1.5.0
- pycodestyle==2.12.1
- pyflakes==3.2.0
- pytest==8.3.5
- pytest-runner==6.0.1
- python-jsonrpc-server==0.4.0
- python-language-server==0.36.2
- tomli==2.2.1
- typing-extensions==4.13.0
- typing-inspect==0.9.0
- ujson==5.10.0
prefix: /opt/conda/envs/typedflow
| [
"typedflow/tests/flow/test_flow.py::test_declare_inputs_when_definition_with_multiple_args"
] | [] | [
"typedflow/tests/flow/test_flow.py::test_flow_run",
"typedflow/tests/flow/test_flow.py::test_typecheck_success",
"typedflow/tests/flow/test_flow.py::test_typecheck_failure",
"typedflow/tests/flow/test_flow.py::test_incoming_multiple_node",
"typedflow/tests/flow/test_flow.py::test_arg_inheritance",
"typedflow/tests/flow/test_flow.py::test_declare_inputs_when_definition"
] | [] | MIT License | 5,993 | 148 | [
"typedflow/nodes/base.py"
] |
|
NREL__hescore-hpxml-117 | 0b79d17c5fff0876a18ccd2efca3d058323d7810 | 2019-12-10 23:16:18 | 1bba1766baf897c421196e584597a1140fea3ea4 | diff --git a/hescorehpxml/__init__.py b/hescorehpxml/__init__.py
index 55a58b86..e7bfb5c8 100644
--- a/hescorehpxml/__init__.py
+++ b/hescorehpxml/__init__.py
@@ -1345,7 +1345,7 @@ class HPXMLtoHEScoreTranslator(object):
elif zone_floor['foundation_type'] == 'slab_on_grade':
del fw_eff_rvalues[11] # remove unused values
del fw_eff_rvalues[19]
- slabs = foundation.xpath('h:Slab', namespaces=ns)
+ slabs = xpath(foundation, 'h:Slab', raise_err=True, aslist=True)
slabua = 0
slabtotalperimeter = 0
for slab in slabs:
| Throw descriptive error message on SlabOnGrade foundations when a Slab element is missing
Currently, when foundations are defined without attaching a slab element, translator will throw an error of "integer division or modulo by zero". A more descriptive error is expected to reflect the missing input.
Might be a sub-issue of #106 . | NREL/hescore-hpxml | diff --git a/tests/tests.py b/tests/tests.py
index 1f6fd6cb..60924139 100644
--- a/tests/tests.py
+++ b/tests/tests.py
@@ -320,6 +320,15 @@ class TestOtherHouses(unittest.TestCase, ComparatorBase):
r'The house is a slab on grade foundation, but has foundation walls\.',
tr.hpxml_to_hescore_dict)
+ def test_slab_missing(self):
+ tr = self._load_xmlfile('house3')
+ el = self.xpath('//h:Slab')
+ el.getparent().remove(el)
+ self.assertRaises(
+ ElementNotFoundError,
+ tr.hpxml_to_hescore_dict
+ )
+
def test_missing_window_area(self):
tr = self._load_xmlfile('hescore_min')
el = self.xpath('//h:Window[1]/h:Area')
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_issue_reference",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 1
} | 5.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-xdist",
"pytest-mock",
"pytest-asyncio"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.7",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.13
Babel==2.14.0
certifi @ file:///croot/certifi_1671487769961/work/certifi
charset-normalizer==3.4.1
colorama==0.4.6
coverage==7.2.7
docutils==0.17.1
exceptiongroup==1.2.2
execnet==2.0.2
flake8==5.0.4
future==1.0.0
-e git+https://github.com/NREL/hescore-hpxml.git@0b79d17c5fff0876a18ccd2efca3d058323d7810#egg=hescore_hpxml
idna==3.10
imagesize==1.4.1
importlib-metadata==4.2.0
iniconfig==2.0.0
Jinja2==3.1.6
livereload==2.7.1
lxml==5.3.1
MarkupSafe==2.1.5
mccabe==0.7.0
packaging==24.0
pluggy==1.2.0
pycodestyle==2.9.1
pyflakes==2.5.0
Pygments==2.17.2
pytest==7.4.4
pytest-asyncio==0.21.2
pytest-cov==4.1.0
pytest-mock==3.11.1
pytest-xdist==3.5.0
pytz==2025.2
requests==2.31.0
snowballstemmer==2.2.0
Sphinx==4.3.2
sphinx-autobuild==2021.3.14
sphinx-rtd-theme==1.3.0
sphinxcontrib-applehelp==1.0.2
sphinxcontrib-devhelp==1.0.2
sphinxcontrib-htmlhelp==2.0.0
sphinxcontrib-jquery==4.1
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==1.0.3
sphinxcontrib-serializinghtml==1.1.5
tomli==2.0.1
tornado==6.2
typing_extensions==4.7.1
urllib3==2.0.7
zipp==3.15.0
| name: hescore-hpxml
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=22.3.1=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.13
- babel==2.14.0
- charset-normalizer==3.4.1
- colorama==0.4.6
- coverage==7.2.7
- docutils==0.17.1
- exceptiongroup==1.2.2
- execnet==2.0.2
- flake8==5.0.4
- future==1.0.0
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==4.2.0
- iniconfig==2.0.0
- jinja2==3.1.6
- livereload==2.7.1
- lxml==5.3.1
- markupsafe==2.1.5
- mccabe==0.7.0
- packaging==24.0
- pluggy==1.2.0
- pycodestyle==2.9.1
- pyflakes==2.5.0
- pygments==2.17.2
- pytest==7.4.4
- pytest-asyncio==0.21.2
- pytest-cov==4.1.0
- pytest-mock==3.11.1
- pytest-xdist==3.5.0
- pytz==2025.2
- requests==2.31.0
- snowballstemmer==2.2.0
- sphinx==4.3.2
- sphinx-autobuild==2021.3.14
- sphinx-rtd-theme==1.3.0
- sphinxcontrib-applehelp==1.0.2
- sphinxcontrib-devhelp==1.0.2
- sphinxcontrib-htmlhelp==2.0.0
- sphinxcontrib-jquery==4.1
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==1.0.3
- sphinxcontrib-serializinghtml==1.1.5
- tomli==2.0.1
- tornado==6.2
- typing-extensions==4.7.1
- urllib3==2.0.7
- zipp==3.15.0
prefix: /opt/conda/envs/hescore-hpxml
| [
"tests/tests.py::TestOtherHouses::test_slab_missing"
] | [] | [
"tests/tests.py::TestAPIHouses::test_house1",
"tests/tests.py::TestAPIHouses::test_house1_v1_1",
"tests/tests.py::TestAPIHouses::test_house1_v2",
"tests/tests.py::TestAPIHouses::test_house1_v2_1",
"tests/tests.py::TestAPIHouses::test_house2",
"tests/tests.py::TestAPIHouses::test_house3",
"tests/tests.py::TestAPIHouses::test_house4",
"tests/tests.py::TestAPIHouses::test_house5",
"tests/tests.py::TestAPIHouses::test_house6",
"tests/tests.py::TestAPIHouses::test_house7",
"tests/tests.py::TestAPIHouses::test_house8",
"tests/tests.py::TestOtherHouses::test_air_source_heat_pump_has_no_ducts",
"tests/tests.py::TestOtherHouses::test_attic_knee_wall",
"tests/tests.py::TestOtherHouses::test_attic_knee_wall_zero_rvalue",
"tests/tests.py::TestOtherHouses::test_attic_roof_assoc",
"tests/tests.py::TestOtherHouses::test_bad_duct_location",
"tests/tests.py::TestOtherHouses::test_bldgid_not_found",
"tests/tests.py::TestOtherHouses::test_clg_sys_has_air_dist",
"tests/tests.py::TestOtherHouses::test_cooling_system_wrong_efficiency_type",
"tests/tests.py::TestOtherHouses::test_dist_sys_idref",
"tests/tests.py::TestOtherHouses::test_evap_cooling_system_type",
"tests/tests.py::TestOtherHouses::test_external_id_extension_passthru",
"tests/tests.py::TestOtherHouses::test_external_id_passthru",
"tests/tests.py::TestOtherHouses::test_extra_roof_sheathing_insulation",
"tests/tests.py::TestOtherHouses::test_extra_wall_sheathing_insulation",
"tests/tests.py::TestOtherHouses::test_floor_no_area",
"tests/tests.py::TestOtherHouses::test_foundation_walls_on_slab",
"tests/tests.py::TestOtherHouses::test_heating_system_no_efficiency",
"tests/tests.py::TestOtherHouses::test_heating_system_wrong_efficiency_type",
"tests/tests.py::TestOtherHouses::test_heatpump_no_cooling",
"tests/tests.py::TestOtherHouses::test_heatpump_no_heating",
"tests/tests.py::TestOtherHouses::test_hescore_min",
"tests/tests.py::TestOtherHouses::test_htg_sys_has_air_dist",
"tests/tests.py::TestOtherHouses::test_hvac_fractions_sum_to_one",
"tests/tests.py::TestOtherHouses::test_impossible_cooling_system_type",
"tests/tests.py::TestOtherHouses::test_impossible_heating_system_type",
"tests/tests.py::TestOtherHouses::test_impossible_triple_pane_window",
"tests/tests.py::TestOtherHouses::test_impossible_window",
"tests/tests.py::TestOtherHouses::test_indirect_dhw_error",
"tests/tests.py::TestOtherHouses::test_invalid_attic_type",
"tests/tests.py::TestOtherHouses::test_invalid_residential_faciliy_type",
"tests/tests.py::TestOtherHouses::test_invalid_roof_type",
"tests/tests.py::TestOtherHouses::test_invalid_surroundings",
"tests/tests.py::TestOtherHouses::test_log_wall_fail",
"tests/tests.py::TestOtherHouses::test_mentor_extension",
"tests/tests.py::TestOtherHouses::test_missing_attached_to_roof",
"tests/tests.py::TestOtherHouses::test_missing_cooling_system",
"tests/tests.py::TestOtherHouses::test_missing_cooling_weighting_factor",
"tests/tests.py::TestOtherHouses::test_missing_heating_system",
"tests/tests.py::TestOtherHouses::test_missing_heating_weighting_factor",
"tests/tests.py::TestOtherHouses::test_missing_residential_facility_type",
"tests/tests.py::TestOtherHouses::test_missing_roof_color",
"tests/tests.py::TestOtherHouses::test_missing_roof_type",
"tests/tests.py::TestOtherHouses::test_missing_siding",
"tests/tests.py::TestOtherHouses::test_missing_skylight_area",
"tests/tests.py::TestOtherHouses::test_missing_surroundings",
"tests/tests.py::TestOtherHouses::test_missing_water_heater",
"tests/tests.py::TestOtherHouses::test_missing_window_area",
"tests/tests.py::TestOtherHouses::test_missing_window_orientation",
"tests/tests.py::TestOtherHouses::test_only_duct_system_per_heating_sys",
"tests/tests.py::TestOtherHouses::test_ove_low_r",
"tests/tests.py::TestOtherHouses::test_preconstruction_event_type",
"tests/tests.py::TestOtherHouses::test_siding_cmu_fail",
"tests/tests.py::TestOtherHouses::test_siding_fail2",
"tests/tests.py::TestOtherHouses::test_tankless_coil_dhw_error",
"tests/tests.py::TestOtherHouses::test_too_many_duct_systems",
"tests/tests.py::TestOtherHouses::test_townhouse_walls",
"tests/tests.py::TestOtherHouses::test_townhouse_walls_all_same",
"tests/tests.py::TestOtherHouses::test_townhouse_walls_conflict",
"tests/tests.py::TestOtherHouses::test_townhouse_window_fail",
"tests/tests.py::TestOtherHouses::test_townhouse_window_wall_all_same_fail",
"tests/tests.py::TestOtherHouses::test_townhouse_windows_area_wrong",
"tests/tests.py::TestOtherHouses::test_wall_construction_ps_low_r",
"tests/tests.py::TestOtherHouses::test_wall_insulation_layer_missing_rvalue",
"tests/tests.py::TestOtherHouses::test_wall_same_area_same_side_different_construction",
"tests/tests.py::TestOtherHouses::test_window_area_sum_on_angled_front_door",
"tests/tests.py::TestOtherHouses::test_window_attached_to_wall",
"tests/tests.py::TestOtherHouses::test_wood_stove",
"tests/tests.py::TestOtherHouses::test_wood_stove_invalid_fuel_type",
"tests/tests.py::TestOtherHouses::test_zipcode_missing",
"tests/tests.py::TestInputOutOfBounds::test_assessment_date1",
"tests/tests.py::TestInputOutOfBounds::test_assessment_date2",
"tests/tests.py::TestInputOutOfBounds::test_conditioned_floor_area1",
"tests/tests.py::TestInputOutOfBounds::test_conditioned_floor_area2",
"tests/tests.py::TestInputOutOfBounds::test_cooling_efficiency",
"tests/tests.py::TestInputOutOfBounds::test_cooling_year",
"tests/tests.py::TestInputOutOfBounds::test_dhw_heat_pump_efficiency",
"tests/tests.py::TestInputOutOfBounds::test_dhw_storage_efficiency",
"tests/tests.py::TestInputOutOfBounds::test_dhw_year",
"tests/tests.py::TestInputOutOfBounds::test_envelope_leakage",
"tests/tests.py::TestInputOutOfBounds::test_evap_cooler_missing_efficiency",
"tests/tests.py::TestInputOutOfBounds::test_floor_to_ceiling_height1",
"tests/tests.py::TestInputOutOfBounds::test_floor_to_ceiling_height2",
"tests/tests.py::TestInputOutOfBounds::test_heating_efficiency_furnace",
"tests/tests.py::TestInputOutOfBounds::test_heating_efficiency_gchp",
"tests/tests.py::TestInputOutOfBounds::test_heating_efficiency_heat_pump",
"tests/tests.py::TestInputOutOfBounds::test_heating_year",
"tests/tests.py::TestInputOutOfBounds::test_num_floor_above_grade",
"tests/tests.py::TestInputOutOfBounds::test_skylight_area",
"tests/tests.py::TestInputOutOfBounds::test_skylight_u_value",
"tests/tests.py::TestInputOutOfBounds::test_window_area",
"tests/tests.py::TestInputOutOfBounds::test_window_u_value",
"tests/tests.py::TestInputOutOfBounds::test_year_built1",
"tests/tests.py::TestInputOutOfBounds::test_year_built2",
"tests/tests.py::TestHVACFractions::test_allow_5pct_diff",
"tests/tests.py::TestHVACFractions::test_boiler_roomac",
"tests/tests.py::TestHVACFractions::test_furnace_baseboard_centralac",
"tests/tests.py::TestHVACFractions::test_furnace_heat_pump",
"tests/tests.py::TestHVACFractions::test_wall_furnace_baseboard_centralac",
"tests/tests.py::TestPhotovoltaics::test_azimuth_orientation_missing",
"tests/tests.py::TestPhotovoltaics::test_capacity_missing",
"tests/tests.py::TestPhotovoltaics::test_collector_area",
"tests/tests.py::TestPhotovoltaics::test_orientation",
"tests/tests.py::TestPhotovoltaics::test_pv",
"tests/tests.py::TestPhotovoltaics::test_two_sys_avg",
"tests/tests.py::TestPhotovoltaics::test_two_sys_different_capacity_error",
"tests/tests.py::TestPhotovoltaics::test_years_missing",
"tests/tests.py::TesHPXMLVersion2Point3::test_floor_furnace",
"tests/tests.py::TesHPXMLVersion2Point3::test_medium_dark_roof_color",
"tests/tests.py::TesHPXMLVersion2Point3::test_roof_absorptance",
"tests/tests.py::TestHEScore2019Updates::test_bldg_about_comment",
"tests/tests.py::TestHEScore2019Updates::test_conditioned_attic",
"tests/tests.py::TestHEScore2019Updates::test_duct_location_validation",
"tests/tests.py::TestHEScore2019Updates::test_hpwes",
"tests/tests.py::TestHEScore2019Updates::test_hpwes_fail",
"tests/tests.py::TestHEScore2019Updates::test_hvac_combinations",
"tests/tests.py::TestHEScore2019Updates::test_mini_split_cooling_only",
"tests/tests.py::TestHEScore2019Updates::test_skylight_solar_screens_exteriorshading",
"tests/tests.py::TestHEScore2019Updates::test_skylight_solar_screens_treatments",
"tests/tests.py::TestHEScore2019Updates::test_tankless",
"tests/tests.py::TestHEScore2019Updates::test_tankless_energyfactorerror",
"tests/tests.py::TestHEScore2019Updates::test_uef_over_ef",
"tests/tests.py::TestHEScore2019Updates::test_uef_with_tankless",
"tests/tests.py::TestHEScore2019Updates::test_window_code_mappings_aluminum",
"tests/tests.py::TestHEScore2019Updates::test_window_solar_screens"
] | [] | BSD 2-Clause "Simplified" License | 5,997 | 208 | [
"hescorehpxml/__init__.py"
] |
|
lace__polliwog-151 | 1c2291f417ea6a563b59c479703779d146f546a8 | 2019-12-11 20:46:19 | c04d3a1b074a6795ae5bdea1495ff8fc6e54eb0a | diff --git a/polliwog/polyline/polyline.py b/polliwog/polyline/polyline.py
index 2f79230..a9fe270 100644
--- a/polliwog/polyline/polyline.py
+++ b/polliwog/polyline/polyline.py
@@ -285,15 +285,16 @@ class Polyline(object):
else:
return result
- def partition_by_length(self, max_length, ret_indices=False):
+ def subdivided_by_length(self, max_length, ret_indices=False):
"""
- Subdivide each line segment longer than max_length with
- equal-length segments, such that none of the new segments
- are longer than max_length.
-
- ret_indices: If True, return the indices of the original vertices.
- Otherwise return self for chaining.
+ Subdivide each line segment longer than `max_length` with
+ equal-length segments, such that none of the new segments are longer
+ than `max_length`. Returns a new Polyline.
+ Args:
+ max_length (float): The maximum lenth of a segment.
+ ret_indices (bool): When `True`, also returns the indices of the
+ original vertices.
"""
import itertools
from ..segment.segment_functions import subdivide_segment
@@ -319,45 +320,45 @@ class Polyline(object):
]
splits_of_original_vs = np.vsplit(self.v, es_to_subdivide + 1)
- self.v = np.concatenate(
- list(
- itertools.chain(
- *zip(
- splits_of_original_vs,
- vs_to_insert + [np.empty((0, 3), dtype=np.float64)],
+ new_polyline = Polyline(
+ v=np.concatenate(
+ list(
+ itertools.chain(
+ *zip(
+ splits_of_original_vs,
+ vs_to_insert + [np.empty((0, 3), dtype=np.float64)],
+ )
)
)
- )
+ ),
+ is_closed=self.is_closed,
)
- if ret_indices:
- # In a degenerate case, `partition_segment()` may return fewer than
- # the requested number of indices. So, recompute the actual number of
- # segments inserted.
- num_segments_inserted = np.zeros(old_num_e, dtype=np.int64)
- num_segments_inserted[es_to_subdivide] = [len(vs) for vs in vs_to_insert]
- stepwise_index_offsets = np.concatenate(
- [
- # The first vertex is never moved.
- np.zeros(1, dtype=np.int64),
- # In a closed polyline, the last edge goes back to vertex
- # 0. Subdivisions of that segment do not affect indexing of
- # any of the vertices (since the original end vertex is
- # still at index 0).
- num_segments_inserted[:-1]
- if self.is_closed
- else num_segments_inserted,
- ]
- )
- cumulative_index_offsets = np.sum(
- np.tril(
- np.broadcast_to(stepwise_index_offsets, (old_num_v, old_num_v))
- ),
- axis=1,
- )
- return np.arange(old_num_v) + cumulative_index_offsets
- else:
- return self
+ if not ret_indices:
+ return new_polyline
+
+ # In a degenerate case, `partition_segment()` may return fewer than
+ # the requested number of indices. So, recompute the actual number of
+ # segments inserted.
+ num_segments_inserted = np.zeros(old_num_e, dtype=np.int64)
+ num_segments_inserted[es_to_subdivide] = [len(vs) for vs in vs_to_insert]
+ stepwise_index_offsets = np.concatenate(
+ [
+ # The first vertex is never moved.
+ np.zeros(1, dtype=np.int64),
+ # In a closed polyline, the last edge goes back to vertex
+ # 0. Subdivisions of that segment do not affect indexing of
+ # any of the vertices (since the original end vertex is
+ # still at index 0).
+ num_segments_inserted[:-1] if self.is_closed else num_segments_inserted,
+ ]
+ )
+ cumulative_index_offsets = np.sum(
+ np.tril(np.broadcast_to(stepwise_index_offsets, (old_num_v, old_num_v))),
+ axis=1,
+ )
+ indices_of_original_vertices = np.arange(old_num_v) + cumulative_index_offsets
+ return new_polyline, indices_of_original_vertices
def with_segments_bisected(self, segment_indices, ret_new_indices=False):
"""
| Convert `Polyline.partition_by_length()` to work on a copy
This follows the pattern of most of the other mutating functions. | lace/polliwog | diff --git a/polliwog/polyline/test_polyline.py b/polliwog/polyline/test_polyline.py
index 0fa38c3..9952359 100644
--- a/polliwog/polyline/test_polyline.py
+++ b/polliwog/polyline/test_polyline.py
@@ -67,6 +67,19 @@ def test_to_dict():
np.testing.assert_array_equal(expected_dict["edges"], actual_dict["edges"])
+def test_copy():
+ original_vs = np.array(
+ [[0.0, 0.0, 0.0], [1.0, 0.0, 0.0], [1.0, 1.0, 0.0], [1.0, 2.0, 0.0]]
+ )
+ polyline = Polyline(v=original_vs.copy(), is_closed=False)
+ copy_of_polyline = polyline.copy()
+ assert polyline is not copy_of_polyline
+ assert polyline.is_closed == copy_of_polyline.is_closed
+ np.testing.assert_array_equal(polyline.v, copy_of_polyline.v)
+ polyline.v[0] = np.array([2.0, 3.0, 4.0])
+ np.testing.assert_array_equal(copy_of_polyline.v, original_vs)
+
+
def test_bounding_box():
bounding_box = Polyline(
np.array([[0.0, 0.0, 0.0], [1.0, 0.0, 0.0], [1.0, 1.0, 0.0], [1.0, 2.0, 0.0]])
@@ -233,7 +246,7 @@ def test_length_of_empty_polyline():
assert polyline.total_length == 0
-def test_partition_by_length_noop():
+def test_subdivided_by_length_noop():
original = Polyline(
np.array(
[
@@ -246,8 +259,7 @@ def test_partition_by_length_noop():
)
)
- result = original.copy()
- indices = result.partition_by_length(1.0, ret_indices=True)
+ result, indices = original.subdivided_by_length(1.0, ret_indices=True)
expected_indices = np.array([0, 1, 2, 3, 4])
@@ -257,15 +269,14 @@ def test_partition_by_length_noop():
np.testing.assert_array_equal(result.v[indices], original.v)
-def test_partition_by_length_degenerate():
+def test_subdivided_by_length_degenerate():
"""
This covers a bug that arose from a numerical stability issue in
measurement on EC2 / MKL.
"""
original = Polyline(np.array([[0.0, 0.0, 0.0], [1.0, 0.0, 0.0], [1.0, 0.0, 0.0]]))
- result = original.copy()
- indices = result.partition_by_length(1.0, ret_indices=True)
+ result, indices = original.subdivided_by_length(1.0, ret_indices=True)
expected_indices = np.array([0, 1, 2])
@@ -275,7 +286,7 @@ def test_partition_by_length_degenerate():
np.testing.assert_array_equal(result.v[indices], original.v)
-def test_partition_by_length_divide_by_two():
+def test_subdivided_by_length_divide_by_two():
original = Polyline(
np.array(
[
@@ -307,17 +318,15 @@ def test_partition_by_length_divide_by_two():
expected_indices = np.array([0, 2, 4, 6, 8])
for max_length in (0.99, 0.75, 0.5):
- result = original.copy()
- indices = result.partition_by_length(max_length, ret_indices=True)
+ result, indices = original.subdivided_by_length(max_length, ret_indices=True)
np.testing.assert_array_almost_equal(result.v, expected.v)
np.testing.assert_array_equal(result.e, expected.e)
np.testing.assert_array_equal(indices, expected_indices)
- result_2 = original.copy()
- ret = result_2.partition_by_length(max_length, ret_indices=False)
- np.testing.assert_array_almost_equal(result.v, expected.v)
- assert ret is result_2
+ result_2 = result.subdivided_by_length(max_length, ret_indices=False)
+ np.testing.assert_array_almost_equal(result_2.v, expected.v)
+ assert result_2 is not result
np.testing.assert_array_equal(result.v[indices], original.v)
@@ -365,8 +374,7 @@ def test_partition_length_divide_by_five():
expected_indices = np.array([0, 5, 10, 15, 20])
for max_length in (0.2, 0.24):
- result = original.copy()
- indices = result.partition_by_length(max_length, ret_indices=True)
+ result, indices = original.subdivided_by_length(max_length, ret_indices=True)
np.testing.assert_array_almost_equal(result.v, expected.v)
np.testing.assert_array_equal(result.e, expected.e)
@@ -374,7 +382,7 @@ def test_partition_length_divide_by_five():
np.testing.assert_array_equal(result.v[indices], original.v)
-def test_partition_by_length_divide_some_leave_some():
+def test_subdivided_by_length_divide_some_leave_some():
original = Polyline(
np.array(
[
@@ -404,8 +412,7 @@ def test_partition_by_length_divide_some_leave_some():
expected_indices = np.array([0, 1, 2, 5, 6])
for max_length in (2.0, 2.99):
- result = original.copy()
- indices = result.partition_by_length(max_length, ret_indices=True)
+ result, indices = original.subdivided_by_length(max_length, ret_indices=True)
np.testing.assert_array_almost_equal(result.v, expected.v)
np.testing.assert_array_equal(result.e, expected.e)
@@ -413,7 +420,7 @@ def test_partition_by_length_divide_some_leave_some():
np.testing.assert_array_equal(result.v[indices], original.v)
-def test_partition_by_length_closed():
+def test_subdivided_by_length_closed():
original = Polyline(
np.array(
[
@@ -450,8 +457,7 @@ def test_partition_by_length_closed():
expected_indices = np.array([0, 1, 2, 5, 6, 7])
for max_length in (2.0, 2.5, 2.6):
- result = original.copy()
- indices = result.partition_by_length(max_length, ret_indices=True)
+ result, indices = original.subdivided_by_length(max_length, ret_indices=True)
np.testing.assert_array_almost_equal(result.v, expected.v)
np.testing.assert_array_equal(result.e, expected.e)
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 1
} | 1.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": null,
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | coverage==7.8.0
exceptiongroup==1.2.2
iniconfig==2.1.0
numpy==2.0.2
ounce==1.1.1
packaging==24.2
pluggy==1.5.0
-e git+https://github.com/lace/polliwog.git@1c2291f417ea6a563b59c479703779d146f546a8#egg=polliwog
pytest==8.3.5
pytest-cov==6.0.0
tomli==2.2.1
vg==2.0.0
| name: polliwog
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==7.8.0
- exceptiongroup==1.2.2
- iniconfig==2.1.0
- numpy==2.0.2
- ounce==1.1.1
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- pytest-cov==6.0.0
- tomli==2.2.1
- vg==2.0.0
prefix: /opt/conda/envs/polliwog
| [
"polliwog/polyline/test_polyline.py::test_subdivided_by_length_noop",
"polliwog/polyline/test_polyline.py::test_subdivided_by_length_degenerate"
] | [
"polliwog/polyline/test_polyline.py::test_subdivided_by_length_divide_by_two",
"polliwog/polyline/test_polyline.py::test_partition_length_divide_by_five",
"polliwog/polyline/test_polyline.py::test_subdivided_by_length_divide_some_leave_some",
"polliwog/polyline/test_polyline.py::test_subdivided_by_length_closed"
] | [
"polliwog/polyline/test_polyline.py::test_join",
"polliwog/polyline/test_polyline.py::test_repr",
"polliwog/polyline/test_polyline.py::test_to_dict",
"polliwog/polyline/test_polyline.py::test_copy",
"polliwog/polyline/test_polyline.py::test_bounding_box",
"polliwog/polyline/test_polyline.py::test_bounding_box_degnerate",
"polliwog/polyline/test_polyline.py::test_index_of_vertex",
"polliwog/polyline/test_polyline.py::test_with_insertions",
"polliwog/polyline/test_polyline.py::test_update_is_closed",
"polliwog/polyline/test_polyline.py::test_num_v_num_e",
"polliwog/polyline/test_polyline.py::test_edges",
"polliwog/polyline/test_polyline.py::test_segments",
"polliwog/polyline/test_polyline.py::test_segment_vectors",
"polliwog/polyline/test_polyline.py::test_length_of_empty_polyline",
"polliwog/polyline/test_polyline.py::test_with_segments_bisected",
"polliwog/polyline/test_polyline.py::test_flipped",
"polliwog/polyline/test_polyline.py::test_aligned_with",
"polliwog/polyline/test_polyline.py::test_aligned_with_closed",
"polliwog/polyline/test_polyline.py::test_aligned_with_degenerate",
"polliwog/polyline/test_polyline.py::test_reindexed",
"polliwog/polyline/test_polyline.py::test_intersect_plane",
"polliwog/polyline/test_polyline.py::test_sliced_by_plane_closed",
"polliwog/polyline/test_polyline.py::test_sliced_by_plane_closed_on_vertex",
"polliwog/polyline/test_polyline.py::test_sliced_by_plane_closed_one_vertex",
"polliwog/polyline/test_polyline.py::test_sliced_by_plane_open",
"polliwog/polyline/test_polyline.py::test_apex",
"polliwog/polyline/test_polyline.py::test_sliced_at_indices",
"polliwog/polyline/test_polyline.py::test_polyline_nearest",
"polliwog/polyline/test_polyline.py::test_slice_at_points"
] | [] | BSD 2-Clause "Simplified" License | 6,004 | 1,072 | [
"polliwog/polyline/polyline.py"
] |
|
stigok__ruterstop-37 | 01651a140d0639197dec3233ddaeb22ac0e2859a | 2019-12-11 22:07:15 | 21572158bf6ed855b3ec1403d3af3451cc9fdee8 | diff --git a/ruterstop/__init__.py b/ruterstop/__init__.py
index 0c1986c..d1d7f24 100644
--- a/ruterstop/__init__.py
+++ b/ruterstop/__init__.py
@@ -134,6 +134,23 @@ def parse_departures(raw_dict, *, date_fmt="%Y-%m-%dT%H:%M:%S%z"):
@webapp.route("/<stop_id:int>")
+def get_departures_proxy(*args, **kwargs):
+ """
+ A proxy function for get_departures to make get_departures mock-/patchable
+ even after decorated by bottle.
+ Calls get_departures with whitelisted kwargs defined in the querystring of
+ the current web request.
+ """
+ q = bottle.request.query
+
+ if q.direction:
+ kwargs["directions"] = q.direction
+ if q.min_eta:
+ kwargs["min_eta"] = int(q.min_eta)
+
+ return get_departures(*args, **kwargs)
+
+
def get_departures(*, stop_id=None, directions=None, min_eta=0, text=True):
"""
Returns a filtered list of departures. If `text` is True, return stringified
| Send queryparams til API-funksjonskall
Så man f.eks kan filtrere på retning med `GET /1234?direction=inbound` | stigok/ruterstop | diff --git a/ruterstop/tests/test_webapp.py b/ruterstop/tests/test_webapp.py
index c79d47a..7f05385 100644
--- a/ruterstop/tests/test_webapp.py
+++ b/ruterstop/tests/test_webapp.py
@@ -1,14 +1,5 @@
-"""
-Note for webtest and bottle:
-Even though I'd ideally want to mock ruterstop.get_departures, that function
-is decorated with @bottle.get() on import-time, consequently wrapping the
-underlying function before it is patched/mocked.
-A work-around for this is to mock get_realtime_stop instead, as it is the
-first function called in get_departures, and use that to e.g. raise Exceptions
-as a side-effect.
-"""
from unittest import TestCase
-from unittest.mock import patch
+from unittest.mock import Mock, patch
from webtest import TestApp
@@ -24,28 +15,30 @@ class WebAppTestCase(TestCase):
self.app.reset()
pass
- # The patchability of this module isn't great for this kind of test
- @patch('ruterstop.get_realtime_stop', return_value={"data": "foobar"})
- @patch('ruterstop.parse_departures', returl_value=[])
- def test_calls_api_on_proper_path(self, parse_departures, get_realtime_stop):
- res = self.app.get('/1234')
- get_realtime_stop.assert_called_once_with(stop_id=1234)
+ @patch("ruterstop.get_departures", return_value=None)
+ def test_calls_api_on_proper_path(self, mock):
+ res = self.app.get("/1234")
+ mock.assert_called_once_with(stop_id=1234)
- def test_simple_404_error(self):
- res = self.app.get('/', expect_errors=True)
- self.assertEqual(res.content_type, 'text/plain')
+ @patch("ruterstop.get_departures", return_value=None)
+ def test_simple_404_error(self, mock):
+ res = self.app.get("/", expect_errors=True)
+ self.assertEqual(res.content_type, "text/plain")
self.assertEqual(res.status_code, 404)
- self.assertTrue(str(res.body).count('\n') <= 1) # a single line of text
self.assertEqual(res.body, "Ugyldig stoppested".encode())
-
- def test_simple_500_error(self):
- with patch('ruterstop.get_realtime_stop') as mock:
- mock.side_effect = Exception("Trigger a 500")
-
- res = self.app.get('/1234', expect_errors=True)
- mock.assert_called_once()
-
- self.assertEqual(res.content_type, 'text/plain')
- self.assertEqual(res.status_code, 500)
- self.assertTrue(str(res.body).count('\n') <= 1) # a single line of text
- self.assertEqual(res.body, "Feil på serveren".encode())
+ self.assertEqual(mock.call_count, 0)
+
+ @patch("ruterstop.get_departures", return_value=None)
+ def test_simple_500_error(self, mock):
+ mock.side_effect = Exception("barf")
+ res = self.app.get("/1234", expect_errors=True)
+ self.assertEqual(res.content_type, "text/plain")
+ self.assertEqual(res.status_code, 500)
+ self.assertEqual(res.body, "Feil på serveren".encode())
+ self.assertEqual(mock.call_count, 1)
+
+ @patch("ruterstop.get_departures", return_value=None)
+ def test_calls_api_with_querystring_params(self, mock):
+ self.app.get("/1234?direction=inbound&min_eta=5&bogusargs=1337")
+ mock.assert_called_once_with(stop_id=1234, directions="inbound",
+ min_eta=5)
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 1
},
"num_modified_files": 1
} | 0.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.7",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | beautifulsoup4==4.13.3
bottle==0.13.2
certifi @ file:///croot/certifi_1671487769961/work/certifi
charset-normalizer==3.4.1
coverage==7.2.7
exceptiongroup==1.2.2
freezegun==1.5.1
idna==3.10
importlib-metadata==6.7.0
iniconfig==2.0.0
packaging==24.0
pluggy==1.2.0
pytest==7.4.4
python-dateutil==2.9.0.post0
requests==2.31.0
-e git+https://github.com/stigok/ruterstop.git@01651a140d0639197dec3233ddaeb22ac0e2859a#egg=ruterstop
six==1.17.0
soupsieve==2.4.1
tomli==2.0.1
typing_extensions==4.7.1
urllib3==2.0.7
waitress==2.1.2
WebOb==1.8.9
WebTest==3.0.1
zipp==3.15.0
| name: ruterstop
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=22.3.1=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- beautifulsoup4==4.13.3
- bottle==0.13.2
- charset-normalizer==3.4.1
- coverage==7.2.7
- exceptiongroup==1.2.2
- freezegun==1.5.1
- idna==3.10
- importlib-metadata==6.7.0
- iniconfig==2.0.0
- packaging==24.0
- pluggy==1.2.0
- pytest==7.4.4
- python-dateutil==2.9.0.post0
- requests==2.31.0
- six==1.17.0
- soupsieve==2.4.1
- tomli==2.0.1
- typing-extensions==4.7.1
- urllib3==2.0.7
- waitress==2.1.2
- webob==1.8.9
- webtest==3.0.1
- zipp==3.15.0
prefix: /opt/conda/envs/ruterstop
| [
"ruterstop/tests/test_webapp.py::WebAppTestCase::test_calls_api_on_proper_path",
"ruterstop/tests/test_webapp.py::WebAppTestCase::test_calls_api_with_querystring_params",
"ruterstop/tests/test_webapp.py::WebAppTestCase::test_simple_500_error"
] | [] | [
"ruterstop/tests/test_webapp.py::WebAppTestCase::test_simple_404_error"
] | [] | MIT License | 6,005 | 289 | [
"ruterstop/__init__.py"
] |
|
zamzterz__Flask-pyoidc-70 | cf3f5f8ed0507d310c70b40d13b49dd2a7b708b4 | 2019-12-12 19:16:30 | cf3f5f8ed0507d310c70b40d13b49dd2a7b708b4 | codecov-io: # [Codecov](https://codecov.io/gh/zamzterz/Flask-pyoidc/pull/70?src=pr&el=h1) Report
> Merging [#70](https://codecov.io/gh/zamzterz/Flask-pyoidc/pull/70?src=pr&el=desc) into [master](https://codecov.io/gh/zamzterz/Flask-pyoidc/commit/cf3f5f8ed0507d310c70b40d13b49dd2a7b708b4?src=pr&el=desc) will **increase** coverage by `0.01%`.
> The diff coverage is `100%`.
[](https://codecov.io/gh/zamzterz/Flask-pyoidc/pull/70?src=pr&el=tree)
```diff
@@ Coverage Diff @@
## master #70 +/- ##
==========================================
+ Coverage 97.01% 97.02% +0.01%
==========================================
Files 8 8
Lines 502 504 +2
Branches 55 55
==========================================
+ Hits 487 489 +2
Misses 8 8
Partials 7 7
```
| [Impacted Files](https://codecov.io/gh/zamzterz/Flask-pyoidc/pull/70?src=pr&el=tree) | Coverage Δ | |
|---|---|---|
| [src/flask\_pyoidc/user\_session.py](https://codecov.io/gh/zamzterz/Flask-pyoidc/pull/70/diff?src=pr&el=tree#diff-c3JjL2ZsYXNrX3B5b2lkYy91c2VyX3Nlc3Npb24ucHk=) | `100% <100%> (ø)` | :arrow_up: |
------
[Continue to review full report at Codecov](https://codecov.io/gh/zamzterz/Flask-pyoidc/pull/70?src=pr&el=continue).
> **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta)
> `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data`
> Powered by [Codecov](https://codecov.io/gh/zamzterz/Flask-pyoidc/pull/70?src=pr&el=footer). Last update [cf3f5f8...b9a6354](https://codecov.io/gh/zamzterz/Flask-pyoidc/pull/70?src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments).
| diff --git a/src/flask_pyoidc/user_session.py b/src/flask_pyoidc/user_session.py
index 962f4db..4578db0 100644
--- a/src/flask_pyoidc/user_session.py
+++ b/src/flask_pyoidc/user_session.py
@@ -11,7 +11,15 @@ class UserSession:
Wraps comparison of times necessary for session handling.
"""
- KEYS = ['access_token', 'current_provider', 'id_token', 'id_token_jwt', 'last_authenticated', 'userinfo']
+ KEYS = [
+ 'access_token',
+ 'current_provider',
+ 'id_token',
+ 'id_token_jwt',
+ 'last_authenticated',
+ 'last_session_refresh',
+ 'userinfo'
+ ]
def __init__(self, session_storage, provider_name=None):
self._session_storage = session_storage
@@ -36,10 +44,11 @@ class UserSession:
def should_refresh(self, refresh_interval_seconds=None):
return refresh_interval_seconds is not None and \
+ self._session_storage.get('last_session_refresh') is not None and \
self._refresh_time(refresh_interval_seconds) < time.time()
def _refresh_time(self, refresh_interval_seconds):
- last = self._session_storage.get('last_authenticated', 0)
+ last = self._session_storage.get('last_session_refresh', 0)
return last + refresh_interval_seconds
def update(self, access_token=None, id_token=None, id_token_jwt=None, userinfo=None):
@@ -55,11 +64,13 @@ class UserSession:
if value:
self._session_storage[session_key] = value
- auth_time = int(time.time())
+ now = int(time.time())
+ auth_time = now
if id_token:
auth_time = id_token.get('auth_time', auth_time)
self._session_storage['last_authenticated'] = auth_time
+ self._session_storage['last_session_refresh'] = now
set_if_defined('access_token', access_token)
set_if_defined('id_token', id_token)
set_if_defined('id_token_jwt', id_token_jwt)
| Issues refreshing tokens when 'session_refresh_interval_seconds' is set
With the current state of flask_pyoidc we're running into the issue of not silently refreshing the tokens when it's desired.
Our current setup is as follows: API endpoints are protected using flask_pyoidc, with the session_refresh_interval_seconds providing `prompt=none` refreshing after 5 minutes.
The first issue we encountered was running into the silent refreshing when the initial authentication has not been successfully established yet.
The second issue was the refreshing being triggered based on the previous auth_time, which didn't change with the request. Instead, what worked was taking the expiry time provided by the OIDC instance to trigger the silent refresh that way.
To illustrate a proposed way of fixing it we've created this change (this is not PR-quality code by any means):
https://github.com/fredldotme/Flask-pyoidc/commit/8a062a1d9d281a80421c1e3adadd84c50ae12c7a
This forces the refresh after 5 minutes. Note that we've added 20 seconds of headroom before the expiry time runs out.
I'd like to get your suggestions on this topic and maybe a way forward in the form of code being merged into upstream. | zamzterz/Flask-pyoidc | diff --git a/tests/test_user_session.py b/tests/test_user_session.py
index 6507603..1689161 100644
--- a/tests/test_user_session.py
+++ b/tests/test_user_session.py
@@ -45,17 +45,17 @@ class TestUserSession(object):
def test_should_not_refresh_if_authenticated_within_refresh_interval(self):
refresh_interval = 10
- session = self.initialised_session({'last_authenticated': time.time() + (refresh_interval - 1)})
+ session = self.initialised_session({'last_session_refresh': time.time() + (refresh_interval - 1)})
assert session.should_refresh(refresh_interval) is False
def test_should_refresh_if_supported_and_necessary(self):
refresh_interval = 10
# authenticated too far in the past
- session_storage = {'last_authenticated': time.time() - (refresh_interval + 1)}
+ session_storage = {'last_session_refresh': time.time() - (refresh_interval + 1)}
assert self.initialised_session(session_storage).should_refresh(refresh_interval) is True
- def test_should_refresh_if_supported_and_not_previously_authenticated(self):
- assert self.initialised_session({}).should_refresh(10) is True
+ def test_should_not_refresh_if_not_previously_authenticated(self):
+ assert self.initialised_session({}).should_refresh(10) is False
@pytest.mark.parametrize('data', [
{'access_token': 'test_access_token'},
@@ -71,7 +71,11 @@ class TestUserSession(object):
self.initialised_session(storage).update(**data)
- expected_session_data = {'last_authenticated': auth_time, 'current_provider': self.PROVIDER_NAME}
+ expected_session_data = {
+ 'last_authenticated': auth_time,
+ 'last_session_refresh': auth_time,
+ 'current_provider': self.PROVIDER_NAME
+ }
expected_session_data.update(**data)
assert storage == expected_session_data
@@ -81,6 +85,15 @@ class TestUserSession(object):
session.update(id_token={'auth_time': auth_time})
assert session.last_authenticated == auth_time
+ @patch('time.time')
+ def test_update_should_update_last_session_refresh_timestamp(self, time_mock):
+ now_timestamp = 1234
+ time_mock.return_value = now_timestamp
+ data = {}
+ session = self.initialised_session(data)
+ session.update()
+ assert data['last_session_refresh'] == now_timestamp
+
def test_trying_to_update_uninitialised_session_should_throw_exception(self):
with pytest.raises(UninitialisedSession):
UserSession(session_storage={}).update()
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 1
} | 3.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"responses"
],
"pre_install": null,
"python": "3.7",
"reqs_path": [
"tests/requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | Beaker==1.13.0
certifi @ file:///croot/certifi_1671487769961/work/certifi
cffi==1.15.1
charset-normalizer==3.4.1
click==8.1.8
cryptography==44.0.2
defusedxml==0.7.1
exceptiongroup==1.2.2
Flask==2.2.5
-e git+https://github.com/zamzterz/Flask-pyoidc.git@cf3f5f8ed0507d310c70b40d13b49dd2a7b708b4#egg=Flask_pyoidc
future==1.0.0
idna==3.10
importlib-metadata==6.7.0
importlib-resources==5.12.0
iniconfig==2.0.0
itsdangerous==2.1.2
Jinja2==3.1.6
Mako==1.2.4
MarkupSafe==2.1.5
oic==1.1.2
packaging==24.0
pluggy==1.2.0
pycparser==2.21
pycryptodomex==3.22.0
pyjwkest==1.4.2
pytest==7.4.4
PyYAML==6.0.1
requests==2.31.0
responses==0.23.3
six==1.17.0
tomli==2.0.1
types-PyYAML==6.0.12.12
typing_extensions==4.7.1
urllib3==2.0.7
Werkzeug==2.2.3
zipp==3.15.0
| name: Flask-pyoidc
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=22.3.1=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- beaker==1.13.0
- cffi==1.15.1
- charset-normalizer==3.4.1
- click==8.1.8
- cryptography==44.0.2
- defusedxml==0.7.1
- exceptiongroup==1.2.2
- flask==2.2.5
- future==1.0.0
- idna==3.10
- importlib-metadata==6.7.0
- importlib-resources==5.12.0
- iniconfig==2.0.0
- itsdangerous==2.1.2
- jinja2==3.1.6
- mako==1.2.4
- markupsafe==2.1.5
- oic==1.1.2
- packaging==24.0
- pluggy==1.2.0
- pycparser==2.21
- pycryptodomex==3.22.0
- pyjwkest==1.4.2
- pytest==7.4.4
- pyyaml==6.0.1
- requests==2.31.0
- responses==0.23.3
- six==1.17.0
- tomli==2.0.1
- types-pyyaml==6.0.12.12
- typing-extensions==4.7.1
- urllib3==2.0.7
- werkzeug==2.2.3
- zipp==3.15.0
prefix: /opt/conda/envs/Flask-pyoidc
| [
"tests/test_user_session.py::TestUserSession::test_should_not_refresh_if_authenticated_within_refresh_interval",
"tests/test_user_session.py::TestUserSession::test_should_not_refresh_if_not_previously_authenticated",
"tests/test_user_session.py::TestUserSession::test_update[data0]",
"tests/test_user_session.py::TestUserSession::test_update[data1]",
"tests/test_user_session.py::TestUserSession::test_update[data2]",
"tests/test_user_session.py::TestUserSession::test_update[data3]",
"tests/test_user_session.py::TestUserSession::test_update_should_update_last_session_refresh_timestamp"
] | [] | [
"tests/test_user_session.py::TestUserSession::test_initialising_session_with_existing_user_session_should_preserve_state",
"tests/test_user_session.py::TestUserSession::test_initialising_session_with_new_provider_name_should_reset_session",
"tests/test_user_session.py::TestUserSession::test_unauthenticated_session",
"tests/test_user_session.py::TestUserSession::test_authenticated_session",
"tests/test_user_session.py::TestUserSession::test_should_not_refresh_if_not_supported",
"tests/test_user_session.py::TestUserSession::test_should_refresh_if_supported_and_necessary",
"tests/test_user_session.py::TestUserSession::test_update_should_use_auth_time_from_id_token_if_it_exists",
"tests/test_user_session.py::TestUserSession::test_trying_to_update_uninitialised_session_should_throw_exception",
"tests/test_user_session.py::TestUserSession::test_clear"
] | [] | Apache License 2.0 | 6,013 | 492 | [
"src/flask_pyoidc/user_session.py"
] |
awslabs__aws-embedded-metrics-python-12 | 6cf02e6a5b1849be7aaa5300f2ec9425c5fbc398 | 2019-12-12 22:57:09 | 6cf02e6a5b1849be7aaa5300f2ec9425c5fbc398 | diff --git a/aws_embedded_metrics/logger/metrics_context.py b/aws_embedded_metrics/logger/metrics_context.py
index 5d762e3..283f369 100644
--- a/aws_embedded_metrics/logger/metrics_context.py
+++ b/aws_embedded_metrics/logger/metrics_context.py
@@ -125,8 +125,17 @@ class MetricsContext(object):
new_properties: Dict = {}
new_properties.update(self.properties)
+ # dimensions added with put_dimension will not be copied.
+ # the reason for this is so that you can flush the same scope multiple
+ # times without stacking new dimensions. Example:
+ #
+ # @metric_scope
+ # def my_func(metrics):
+ # metrics.put_dimensions(...)
+ #
+ # my_func()
+ # my_func()
new_dimensions: List[Dict] = []
- new_dimensions = new_dimensions + self.dimensions
new_default_dimensions: Dict = {}
new_default_dimensions.update(self.default_dimensions)
diff --git a/aws_embedded_metrics/logger/metrics_logger.py b/aws_embedded_metrics/logger/metrics_logger.py
index 60843bc..69e1b62 100644
--- a/aws_embedded_metrics/logger/metrics_logger.py
+++ b/aws_embedded_metrics/logger/metrics_logger.py
@@ -41,7 +41,7 @@ class MetricsLogger:
# accept and reset the context
sink.accept(self.context)
- self.context = MetricsContext.empty()
+ self.context = self.context.create_copy_with_context()
def __configureContextForEnvironment(self, env: Environment) -> None:
default_dimensions = {
| Preserve logger context across flushes
See https://github.com/awslabs/aws-embedded-metrics-node/issues/15
https://github.com/awslabs/aws-embedded-metrics-python/blob/6cf02e6a5b1849be7aaa5300f2ec9425c5fbc398/aws_embedded_metrics/logger/metrics_logger.py#L44 | awslabs/aws-embedded-metrics-python | diff --git a/tests/logger/test_metrics_context.py b/tests/logger/test_metrics_context.py
index e188314..550440d 100644
--- a/tests/logger/test_metrics_context.py
+++ b/tests/logger/test_metrics_context.py
@@ -197,7 +197,7 @@ def test_create_copy_with_context_copies_properties():
assert context.properties is not new_context.properties
-def test_create_copy_with_context_copies_dimensions():
+def test_create_copy_with_context_does_not_copy_dimensions():
# arrange
context = MetricsContext()
dimension_key = fake.word()
@@ -208,8 +208,7 @@ def test_create_copy_with_context_copies_dimensions():
new_context = context.create_copy_with_context()
# assert
- assert context.dimensions == new_context.dimensions
- assert context.dimensions is not new_context.dimensions
+ assert len(new_context.dimensions) == 0
def test_create_copy_with_context_copies_default_dimensions():
@@ -225,6 +224,20 @@ def test_create_copy_with_context_copies_default_dimensions():
assert context.default_dimensions is not new_context.default_dimensions
+def test_create_copy_with_context_does_not_copy_metrics():
+ # arrange
+ context = MetricsContext()
+ prop_key = fake.word()
+ prop_value = fake.word()
+ context.set_property(prop_key, prop_value)
+
+ # act
+ new_context = context.create_copy_with_context()
+
+ # assert
+ assert len(new_context.metrics) == 0
+
+
def test_set_dimensions_overwrites_all_dimensions():
# arrange
context = MetricsContext()
@@ -238,3 +251,20 @@ def test_set_dimensions_overwrites_all_dimensions():
# assert
assert context.dimensions == expected_dimensions
+
+
+def test_create_copy_with_context_does_not_repeat_dimensions():
+ # arrange
+ context = MetricsContext()
+ expected_dimensions = {fake.word(): fake.word()}
+
+ custom = {fake.word(): fake.word()}
+ context.set_default_dimensions(expected_dimensions)
+ context.put_dimensions(custom)
+
+ new_context = context.create_copy_with_context()
+ new_context.set_default_dimensions(expected_dimensions)
+ new_context.put_dimensions(custom)
+
+ # assert
+ assert len(new_context.get_dimensions()) == 1
diff --git a/tests/logger/test_metrics_logger.py b/tests/logger/test_metrics_logger.py
index 23a33a5..895b02b 100644
--- a/tests/logger/test_metrics_logger.py
+++ b/tests/logger/test_metrics_logger.py
@@ -180,6 +180,39 @@ async def test_can_set_namespace(mocker):
assert context.namespace == expected_value
[email protected]
+async def test_context_is_preserved_across_flushes(mocker):
+ # arrange
+ expected_namespace = "Namespace"
+ metric_key = "Metric"
+ expected_dimension_key = "Dim"
+ expected_property_key = "Prop"
+ expected_value = "Value"
+
+ logger, sink, env = get_logger_and_sink(mocker)
+
+ logger.set_namespace(expected_namespace)
+ logger.set_property(expected_property_key, expected_value)
+ logger.set_dimensions({expected_dimension_key: expected_value})
+
+ # act
+ logger.put_metric(metric_key, 0)
+ await logger.flush()
+
+ context = sink.accept.call_args[0][0]
+ assert context.namespace == expected_namespace
+ assert context.properties[expected_property_key] == expected_value
+ assert context.metrics[metric_key].values == [0]
+
+ logger.put_metric(metric_key, 1)
+ await logger.flush()
+
+ context = sink.accept.call_args[0][0]
+ assert context.namespace == expected_namespace
+ assert context.properties[expected_property_key] == expected_value
+ assert context.metrics[metric_key].values == [1]
+
+
# Test helper methods
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_hyperlinks",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 2
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-asyncio",
"pytest-mock",
"Faker",
"aresponses",
"boto3",
"pytest-timeout"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.7",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | aiohttp==3.8.6
aiosignal==1.3.1
aresponses==3.0.0
async-timeout==4.0.3
asynctest==0.13.0
attrs==24.2.0
-e git+https://github.com/awslabs/aws-embedded-metrics-python.git@6cf02e6a5b1849be7aaa5300f2ec9425c5fbc398#egg=aws_embedded_metrics
boto3==1.33.13
botocore==1.33.13
certifi @ file:///croot/certifi_1671487769961/work/certifi
charset-normalizer==3.4.1
exceptiongroup==1.2.2
Faker==18.13.0
frozenlist==1.3.3
idna==3.10
importlib-metadata==6.7.0
iniconfig==2.0.0
jmespath==1.0.1
multidict==6.0.5
packaging==24.0
pluggy==1.2.0
pytest==7.4.4
pytest-asyncio==0.21.2
pytest-mock==3.11.1
pytest-timeout==2.3.1
python-dateutil==2.9.0.post0
s3transfer==0.8.2
six==1.17.0
tomli==2.0.1
typing_extensions==4.7.1
urllib3==1.26.20
yarl==1.9.4
zipp==3.15.0
| name: aws-embedded-metrics-python
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=22.3.1=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- aiohttp==3.8.6
- aiosignal==1.3.1
- aresponses==3.0.0
- async-timeout==4.0.3
- asynctest==0.13.0
- attrs==24.2.0
- boto3==1.33.13
- botocore==1.33.13
- charset-normalizer==3.4.1
- exceptiongroup==1.2.2
- faker==18.13.0
- frozenlist==1.3.3
- idna==3.10
- importlib-metadata==6.7.0
- iniconfig==2.0.0
- jmespath==1.0.1
- multidict==6.0.5
- packaging==24.0
- pluggy==1.2.0
- pytest==7.4.4
- pytest-asyncio==0.21.2
- pytest-mock==3.11.1
- pytest-timeout==2.3.1
- python-dateutil==2.9.0.post0
- s3transfer==0.8.2
- six==1.17.0
- tomli==2.0.1
- typing-extensions==4.7.1
- urllib3==1.26.20
- yarl==1.9.4
- zipp==3.15.0
prefix: /opt/conda/envs/aws-embedded-metrics-python
| [
"tests/logger/test_metrics_context.py::test_create_copy_with_context_does_not_copy_dimensions",
"tests/logger/test_metrics_context.py::test_create_copy_with_context_does_not_repeat_dimensions",
"tests/logger/test_metrics_logger.py::test_context_is_preserved_across_flushes"
] | [] | [
"tests/logger/test_metrics_context.py::test_can_create_context_with_no_arguments",
"tests/logger/test_metrics_context.py::test_can_set_property",
"tests/logger/test_metrics_context.py::test_put_dimension_adds_to_dimensions",
"tests/logger/test_metrics_context.py::test_get_dimensions_returns_only_custom_dimensions_if_no_default_dimensions_not_set",
"tests/logger/test_metrics_context.py::test_get_dimensions_returns_only_custom_dimensions_if_default_dimensions_are_empty",
"tests/logger/test_metrics_context.py::test_get_dimensions_returns_default_dimensions_if_custom_dimensions_not_set",
"tests/logger/test_metrics_context.py::test_get_dimensions_returns_merged_custom_and_default_dimensions",
"tests/logger/test_metrics_context.py::test_put_metric_adds_metrics",
"tests/logger/test_metrics_context.py::test_put_metric_uses_None_unit_if_not_provided",
"tests/logger/test_metrics_context.py::test_create_copy_with_context_creates_new_instance",
"tests/logger/test_metrics_context.py::test_create_copy_with_context_copies_namespace",
"tests/logger/test_metrics_context.py::test_create_copy_with_context_copies_properties",
"tests/logger/test_metrics_context.py::test_create_copy_with_context_copies_default_dimensions",
"tests/logger/test_metrics_context.py::test_create_copy_with_context_does_not_copy_metrics",
"tests/logger/test_metrics_context.py::test_set_dimensions_overwrites_all_dimensions",
"tests/logger/test_metrics_logger.py::test_can_set_property",
"tests/logger/test_metrics_logger.py::test_can_put_metric",
"tests/logger/test_metrics_logger.py::test_put_metric_appends_values_to_array",
"tests/logger/test_metrics_logger.py::test_put_dimension",
"tests/logger/test_metrics_logger.py::test_logger_configures_default_dimensions_on_flush",
"tests/logger/test_metrics_logger.py::test_logger_configures_uses_config_overrides_for_default_dimensions",
"tests/logger/test_metrics_logger.py::test_set_dimensions_overrides_all_dimensions",
"tests/logger/test_metrics_logger.py::test_can_set_namespace"
] | [] | Apache License 2.0 | 6,015 | 375 | [
"aws_embedded_metrics/logger/metrics_context.py",
"aws_embedded_metrics/logger/metrics_logger.py"
] |
|
awdeorio__mailmerge-60 | 8f6f9468a511d942b220ec1a660aa8c2f394fadb | 2019-12-14 05:27:07 | 17a65fd889ef181ce02a625ebbdd640d1d7a7c40 | diff --git a/mailmerge/template_message.py b/mailmerge/template_message.py
index 0366d6e..a442cfc 100644
--- a/mailmerge/template_message.py
+++ b/mailmerge/template_message.py
@@ -93,38 +93,67 @@ class TemplateMessage(object):
def _make_message_multipart(self):
"""Convert a message into a multipart message."""
- if not self._message.is_multipart():
- multipart_message = email.mime.multipart.MIMEMultipart(
- 'alternative')
- for header_key in set(self._message.keys()):
- # Preserve duplicate headers
- values = self._message.get_all(header_key, failobj=[])
- for value in values:
- multipart_message[header_key] = value
- original_text = self._message.get_payload()
- multipart_message.attach(email.mime.text.MIMEText(original_text))
- self._message = multipart_message
+ # Do nothing if message already multipart
+ if self._message.is_multipart():
+ return
+
+ # Create empty multipart message
+ multipart_message = email.mime.multipart.MIMEMultipart('alternative')
+
+ # Copy headers, preserving duplicate headers
+ for header_key in set(self._message.keys()):
+ values = self._message.get_all(header_key, failobj=[])
+ for value in values:
+ multipart_message[header_key] = value
+
+ # Copy text, preserving original encoding
+ original_text = self._message.get_payload(decode=True)
+ original_encoding = str(self._message.get_charset())
+ multipart_message.attach(email.mime.text.MIMEText(
+ original_text,
+ _charset=original_encoding,
+ ))
+
+ # Replace original message with multipart message
+ self._message = multipart_message
def _transform_markdown(self):
"""Convert markdown in message text to HTML."""
+ # Do nothing if Content-Type is not text/markdown
if not self._message['Content-Type'].startswith("text/markdown"):
return
+ # Remove the markdown Content-Type header, it's non-standard for email
del self._message['Content-Type']
- # Convert the text from markdown and then make the message multipart
+
+ # Make sure the message is multipart. We need a multipart message so
+ # that we can add an HTML part containing rendered Markdown.
self._make_message_multipart()
- for payload_item in set(self._message.get_payload()):
- # Assume the plaintext item is formatted with markdown.
- # Add corresponding HTML version of the item as the last part of
- # the multipart message (as per RFC 2046)
- if payload_item['Content-Type'].startswith('text/plain'):
- original_text = payload_item.get_payload()
- html_text = markdown.markdown(original_text)
- html_payload = future.backports.email.mime.text.MIMEText(
- "<html><body>{}</body></html>".format(html_text),
- "html",
- )
- self._message.attach(html_payload)
+
+ # Extract unrendered text and encoding. We assume that the first
+ # plaintext payload is formatted with Markdown.
+ for mimetext in self._message.get_payload():
+ if mimetext['Content-Type'].startswith('text/plain'):
+ encoding = str(mimetext.get_charset())
+ text = mimetext.get_payload(decode=True).decode(encoding)
+ break
+ assert encoding
+ assert text
+
+ # Render Markdown to HTML and add the HTML as the last part of the
+ # multipart message as per RFC 2046.
+ #
+ # Note: We need to use u"..." to ensure that unicode string
+ # substitution works properly in Python 2.
+ #
+ # https://docs.python.org/3/library/email.mime.html#email.mime.text.MIMEText
+ html = markdown.markdown(text)
+ payload = future.backports.email.mime.text.MIMEText(
+ u"<html><body>{}</body></html>".format(html),
+ _subtype="html",
+ _charset=encoding,
+ )
+ self._message.attach(payload)
def _transform_attachments(self):
"""Parse Attachment headers and add attachments."""
| Doesn't use charset=utf-8 when using markdown
Sending a message with special characters gives good results.
```
TO: {{email}}
SUBJECT: Testing mailmerge
FROM: [email protected]
Hi, {{name}},
æøå
```
outputs
```
>>> encoding utf-8
>>> message 0
TO: [email protected]
SUBJECT: Testing mailmerge
FROM: [email protected]
MIME-Version: 1.0
Content-Type: text/plain; charset="utf-8"
Content-Transfer-Encoding: base64
Date: Fri, 13 Dec 2019 20:41:08 -0000
SGksIE15c2VsZiwKw6bDuMOl
```
Notice that `charset` here is set ut `utf-8` and message renders well in email client.
But when specifying markdown:
```
TO: {{email}}
SUBJECT: Testing mailmerge
FROM: [email protected]
CONTENT-TYPE: text/markdown
Hi, {{name}},
æøå
```
It outputs
```
>>> encoding utf-8
>>> message 0
MIME-Version: 1.0
SUBJECT: Testing mailmerge
Date: Fri, 13 Dec 2019 20:42:22 -0000
TO: [email protected]
FROM: [email protected]
MIME-Version: 1.0
Content-Transfer-Encoding: base64
Content-Type: multipart/alternative; boundary="===============3629053266709230733=="
--===============3629053266709230733==
Content-Type: text/plain; charset="us-ascii"
MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
SGksIE15c2VsZiwKw6bDuMOl
--===============3629053266709230733==
Content-Type: text/html; charset="us-ascii"
MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
<html><body><p>SGksIE15c2VsZiwKw6bDuMOl</p></body></html>
--===============3629053266709230733==--
```
Notice that `charset` here is set ut `us-ascii`, and message shows up as SGksIE15c2VsZiwKw6bDuMOl in the client. | awdeorio/mailmerge | diff --git a/tests/test_template_message.py b/tests/test_template_message.py
index 1dc59f1..0baa86b 100644
--- a/tests/test_template_message.py
+++ b/tests/test_template_message.py
@@ -74,19 +74,59 @@ def test_markdown():
# Ensure that the first part is plaintext and the last part
# is HTML (as per RFC 2046)
- plaintext_contenttype = payload[0]['Content-Type']
- assert plaintext_contenttype.startswith("text/plain")
- plaintext = payload[0].get_payload()
- html_contenttype = payload[1]['Content-Type']
- assert html_contenttype.startswith("text/html")
+ plaintext_part = payload[0]
+ assert plaintext_part['Content-Type'].startswith("text/plain")
+ plaintext_encoding = str(plaintext_part.get_charset())
+ plaintext = plaintext_part.get_payload(decode=True) \
+ .decode(plaintext_encoding)
+
+ html_part = payload[1]
+ assert html_part['Content-Type'].startswith("text/html")
+ html_encoding = str(html_part.get_charset())
+ htmltext = html_part.get_payload(decode=True) \
+ .decode(html_encoding)
# Verify rendered Markdown
- htmltext = payload[1].get_payload()
rendered = markdown.markdown(plaintext)
htmltext_correct = "<html><body>{}</body></html>".format(rendered)
assert htmltext.strip() == htmltext_correct.strip()
+def test_markdown_encoding():
+ """Verify encoding is preserved when rendering a Markdown template.
+
+ See Issue #59 for a detailed explanation
+ https://github.com/awdeorio/mailmerge/issues/59
+ """
+ template_message = mailmerge.template_message.TemplateMessage(
+ utils.TESTDATA/"markdown_template_utf8.txt"
+ )
+ _, _, message = template_message.render({
+ "email": "[email protected]",
+ "name": "Myself",
+ })
+
+ # Message should contain an unrendered Markdown plaintext part and a
+ # rendered Markdown HTML part
+ plaintext_part, html_part = message.get_payload()
+
+ # Verify encodings
+ assert str(plaintext_part.get_charset()) == "utf-8"
+ assert str(html_part.get_charset()) == "utf-8"
+ assert plaintext_part["Content-Transfer-Encoding"] == "base64"
+ assert html_part["Content-Transfer-Encoding"] == "base64"
+
+ # Verify content, which is base64 encoded
+ plaintext = plaintext_part.get_payload().strip()
+ htmltext = html_part.get_payload().strip()
+ assert plaintext == "SGksIE15c2VsZiwKw6bDuMOl"
+ assert htmltext == (
+ "PGh0bWw+PGJvZHk+PHA+"
+ "SGksIE15c2VsZiwKw6bDuMOl"
+ "PC9wPjwvYm9keT48L2h0bWw+"
+ )
+
+
def test_attachment():
"""Attachments should be sent as part of the email."""
template_message = mailmerge.template_message.TemplateMessage(
@@ -165,7 +205,17 @@ def test_utf8_template():
# NOTE: to decode a base46-encoded string:
# print((str(base64.b64decode(payload), "utf-8")))
payload = message.get_payload().replace("\n", "")
- assert payload == 'RnJvbSB0aGUgVGFnZWxpZWQgb2YgV29sZnJhbSB2b24gRXNjaGVuYmFjaCAoTWlkZGxlIEhpZ2ggR2VybWFuKToKClPDrm5lIGtsw6J3ZW4gZHVyaCBkaWUgd29sa2VuIHNpbnQgZ2VzbGFnZW4sCmVyIHN0w65nZXQgw7tmIG1pdCBncsO0emVyIGtyYWZ0LAppY2ggc2loIGluIGdyw6J3ZW4gdMOkZ2Vsw65jaCBhbHMgZXIgd2lsIHRhZ2VuLApkZW4gdGFjLCBkZXIgaW0gZ2VzZWxsZXNjaGFmdAplcndlbmRlbiB3aWwsIGRlbSB3ZXJkZW4gbWFuLApkZW4gaWNoIG1pdCBzb3JnZW4gw65uIHZlcmxpZXouCmljaCBicmluZ2UgaW4gaGlubmVuLCBvYiBpY2gga2FuLgpzw65uIHZpbCBtYW5lZ2l1IHR1Z2VudCBtaWNoeiBsZWlzdGVuIGhpZXouCgpodHRwOi8vd3d3LmNvbHVtYmlhLmVkdS9+ZmRjL3V0Zjgv' # noqa: E501 pylint: disable=line-too-long
+ assert payload == (
+ "RnJvbSB0aGUgVGFnZWxpZWQgb2YgV29sZnJhbSB2b24gRXNjaGVuYmFjaCAo"
+ "TWlkZGxlIEhpZ2ggR2VybWFuKToKClPDrm5lIGtsw6J3ZW4gZHVyaCBkaWUg"
+ "d29sa2VuIHNpbnQgZ2VzbGFnZW4sCmVyIHN0w65nZXQgw7tmIG1pdCBncsO0"
+ "emVyIGtyYWZ0LAppY2ggc2loIGluIGdyw6J3ZW4gdMOkZ2Vsw65jaCBhbHMg"
+ "ZXIgd2lsIHRhZ2VuLApkZW4gdGFjLCBkZXIgaW0gZ2VzZWxsZXNjaGFmdApl"
+ "cndlbmRlbiB3aWwsIGRlbSB3ZXJkZW4gbWFuLApkZW4gaWNoIG1pdCBzb3Jn"
+ "ZW4gw65uIHZlcmxpZXouCmljaCBicmluZ2UgaW4gaGlubmVuLCBvYiBpY2gg"
+ "a2FuLgpzw65uIHZpbCBtYW5lZ2l1IHR1Z2VudCBtaWNoeiBsZWlzdGVuIGhp"
+ "ZXouCgpodHRwOi8vd3d3LmNvbHVtYmlhLmVkdS9+ZmRjL3V0Zjgv"
+ )
def test_utf8_database():
diff --git a/tests/testdata/markdown_template_utf8.txt b/tests/testdata/markdown_template_utf8.txt
new file mode 100644
index 0000000..aa8b14d
--- /dev/null
+++ b/tests/testdata/markdown_template_utf8.txt
@@ -0,0 +1,7 @@
+TO: {{email}}
+SUBJECT: Testing mailmerge
+FROM: [email protected]
+CONTENT-TYPE: text/markdown
+
+Hi, {{name}},
+æøå
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 1
} | 1.9 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.6",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | astroid==2.11.7
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
build==0.9.0
certifi==2021.5.30
chardet==5.0.0
charset-normalizer==2.0.12
check-manifest==0.48
click==8.0.4
codecov==2.1.13
configparser==5.2.0
coverage==6.2
dill==0.3.4
distlib==0.3.9
fancycompleter==0.9.1
filelock==3.4.1
future==1.0.0
idna==3.10
importlib-metadata==4.8.3
importlib-resources==5.4.0
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
isort==5.10.1
Jinja2==3.0.3
lazy-object-proxy==1.7.1
-e git+https://github.com/awdeorio/mailmerge.git@8f6f9468a511d942b220ec1a660aa8c2f394fadb#egg=mailmerge
Markdown==3.3.7
MarkupSafe==2.0.1
mccabe==0.7.0
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pdbpp==0.10.3
pep517==0.13.1
platformdirs==2.4.0
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pycodestyle==2.10.0
pydocstyle==6.3.0
Pygments==2.14.0
pylint==2.13.9
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pyrepl==0.9.0
pytest==6.2.4
pytest-cov==4.0.0
requests==2.27.1
sh==1.14.3
six==1.17.0
snowballstemmer==2.2.0
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
tomli==1.2.3
tox==3.28.0
typed-ast==1.5.5
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
urllib3==1.26.20
virtualenv==20.17.1
wmctrl==0.5
wrapt==1.16.0
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: mailmerge
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- astroid==2.11.7
- build==0.9.0
- chardet==5.0.0
- charset-normalizer==2.0.12
- check-manifest==0.48
- click==8.0.4
- codecov==2.1.13
- configparser==5.2.0
- coverage==6.2
- dill==0.3.4
- distlib==0.3.9
- fancycompleter==0.9.1
- filelock==3.4.1
- future==1.0.0
- idna==3.10
- importlib-metadata==4.8.3
- importlib-resources==5.4.0
- isort==5.10.1
- jinja2==3.0.3
- lazy-object-proxy==1.7.1
- markdown==3.3.7
- markupsafe==2.0.1
- mccabe==0.7.0
- pdbpp==0.10.3
- pep517==0.13.1
- platformdirs==2.4.0
- pycodestyle==2.10.0
- pydocstyle==6.3.0
- pygments==2.14.0
- pylint==2.13.9
- pyrepl==0.9.0
- pytest-cov==4.0.0
- requests==2.27.1
- sh==1.14.3
- six==1.17.0
- snowballstemmer==2.2.0
- tomli==1.2.3
- tox==3.28.0
- typed-ast==1.5.5
- urllib3==1.26.20
- virtualenv==20.17.1
- wmctrl==0.5
- wrapt==1.16.0
prefix: /opt/conda/envs/mailmerge
| [
"tests/test_template_message.py::test_markdown_encoding"
] | [
"tests/test_template_message.py::test_utf8_database"
] | [
"tests/test_template_message.py::test_bad_jinja",
"tests/test_template_message.py::test_cc_bcc",
"tests/test_template_message.py::test_markdown",
"tests/test_template_message.py::test_attachment",
"tests/test_template_message.py::test_attachment_empty",
"tests/test_template_message.py::test_utf8_template"
] | [] | MIT License | 6,020 | 943 | [
"mailmerge/template_message.py"
] |
|
facebookincubator__ptr-85 | 8c8ee7ab353bd294a7de41eddd9a65ba7e28015c | 2019-12-16 03:20:39 | ecd6c48781bb016c5495e24d6c68687f377ba729 | cooperlees: Guess we could just set both environment variables .. | diff --git a/ptr.py b/ptr.py
index 7b24422..4bce8ed 100755
--- a/ptr.py
+++ b/ptr.py
@@ -702,7 +702,9 @@ async def _test_runner(
else None
)
env = _set_build_env(extra_build_env_path)
- env["COVERAGE_FILE"] = str(cov_data_path)
+ need_cov_env_var = await _using_coverage_5(venv_path)
+ if not need_cov_env_var:
+ env["COVERAGE_FILE"] = str(cov_data_path)
while True:
try:
@@ -743,6 +745,18 @@ async def _test_runner(
queue.task_done()
+async def _using_coverage_5(venv_path: Path, timeout: float = 5) -> bool:
+ """ Check coverage version and set the correct environment var """
+ version_sub_str = "version 5."
+ if WINDOWS:
+ cov_exe = venv_path / "Scripts" / "coverage.exe"
+ else:
+ cov_exe = venv_path / "bin" / "coverage"
+
+ stdout, _stderr = await _gen_check_output((str(cov_exe), "--help"), timeout=timeout)
+ return version_sub_str in stdout.decode("utf-8")
+
+
async def create_venv(
mirror: str,
py_exe: str = sys.executable,
| Coverage >= 5.0 Breaks Coverage Analysis
## How is `ptr` not doing what you expect?
Analyzing coverage is failing on Mac OS X with coverage unable to find the `COVERAGE_FILE`
- May need to open an Issue / do a PR to coverage itself maybe here
## What is your suggestion to make this better?
Workout why the file can not be found. I think it will be something to do with the `COVERAGE_FILE` environment variable we set.
https://github.com/facebookincubator/ptr/blob/master/ptr.py#L705
## Code/Bug example?
Output: https://pastebin.com/2kdQQw8g
## How can a developer reproduce this?
Install coverage >= 5.0 and run ptr. | facebookincubator/ptr | diff --git a/ptr_tests.py b/ptr_tests.py
index 77d5c59..702a057 100644
--- a/ptr_tests.py
+++ b/ptr_tests.py
@@ -455,6 +455,7 @@ class TestPtr(unittest.TestCase):
self.assertTrue("[global]" in conf_file)
self.assertTrue("/simple" in conf_file)
+ @patch("ptr._using_coverage_5", async_none)
@patch("ptr._test_steps_runner", fake_test_steps_runner)
def test_test_runner(self) -> None:
queue = asyncio.Queue() # type: asyncio.Queue
@@ -538,6 +539,14 @@ class TestPtr(unittest.TestCase):
# Ensure we've "printed coverage"
self.assertEqual(mock_print.call_count, 1)
+ @patch("ptr._gen_check_output", return_bytes_output)
+ def test_using_coverage_5(self) -> None:
+ self.assertFalse(
+ self.loop.run_until_complete(
+ ptr._using_coverage_5(Path("venvs/test"), timeout=1)
+ )
+ )
+
def test_validate_base_dir(self) -> None:
path_str = gettempdir()
expected_path = Path(path_str)
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 1
} | 2019.12 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-xdist",
"pytest-mock",
"pytest-asyncio"
],
"pre_install": null,
"python": "3.7",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | astroid==2.15.8
black==23.3.0
certifi @ file:///croot/certifi_1671487769961/work/certifi
click==8.1.8
coverage==7.2.7
dataclasses-json==0.6.7
dill==0.3.7
exceptiongroup==1.2.2
execnet==2.0.2
flake8==5.0.4
importlib-metadata==4.2.0
iniconfig==2.0.0
intervaltree==3.1.0
isort==5.11.5
lazy-object-proxy==1.9.0
libcst==1.0.1
marshmallow==3.19.0
mccabe==0.7.0
mypy==1.4.1
mypy-extensions==1.0.0
packaging==24.0
pathspec==0.11.2
platformdirs==4.0.0
pluggy==1.2.0
psutil==7.0.0
-e git+https://github.com/facebookincubator/ptr.git@8c8ee7ab353bd294a7de41eddd9a65ba7e28015c#egg=ptr
pycodestyle==2.9.1
pyflakes==2.5.0
Pygments==2.17.2
pylint==2.17.7
pyre-check==0.9.18
pyre-extensions==0.0.32
pytest==7.4.4
pytest-asyncio==0.21.2
pytest-cov==4.1.0
pytest-mock==3.11.1
pytest-xdist==3.5.0
PyYAML==6.0.1
sortedcontainers==2.4.0
tabulate==0.9.0
TestSlide==2.7.1
tomli==2.0.1
tomlkit==0.12.5
typed-ast==1.5.5
typeguard==2.13.3
typing-inspect==0.9.0
typing_extensions==4.7.1
wrapt==1.16.0
zipp==3.15.0
| name: ptr
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=22.3.1=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- astroid==2.15.8
- black==23.3.0
- click==8.1.8
- coverage==7.2.7
- dataclasses-json==0.6.7
- dill==0.3.7
- exceptiongroup==1.2.2
- execnet==2.0.2
- flake8==5.0.4
- importlib-metadata==4.2.0
- iniconfig==2.0.0
- intervaltree==3.1.0
- isort==5.11.5
- lazy-object-proxy==1.9.0
- libcst==1.0.1
- marshmallow==3.19.0
- mccabe==0.7.0
- mypy==1.4.1
- mypy-extensions==1.0.0
- packaging==24.0
- pathspec==0.11.2
- platformdirs==4.0.0
- pluggy==1.2.0
- psutil==7.0.0
- pycodestyle==2.9.1
- pyflakes==2.5.0
- pygments==2.17.2
- pylint==2.17.7
- pyre-check==0.9.18
- pyre-extensions==0.0.32
- pytest==7.4.4
- pytest-asyncio==0.21.2
- pytest-cov==4.1.0
- pytest-mock==3.11.1
- pytest-xdist==3.5.0
- pyyaml==6.0.1
- sortedcontainers==2.4.0
- tabulate==0.9.0
- testslide==2.7.1
- tomli==2.0.1
- tomlkit==0.12.5
- typed-ast==1.5.5
- typeguard==2.13.3
- typing-extensions==4.7.1
- typing-inspect==0.9.0
- wrapt==1.16.0
- zipp==3.15.0
prefix: /opt/conda/envs/ptr
| [
"ptr_tests.py::TestPtr::test_test_runner",
"ptr_tests.py::TestPtr::test_using_coverage_5"
] | [
"ptr_tests.py::TestPtr::test_write_stats_file_raise"
] | [
"ptr_tests.py::TestPtr::test_analyze_coverage",
"ptr_tests.py::TestPtr::test_analyze_coverage_errors",
"ptr_tests.py::TestPtr::test_async_main",
"ptr_tests.py::TestPtr::test_config",
"ptr_tests.py::TestPtr::test_create_venv",
"ptr_tests.py::TestPtr::test_create_venv_site_packages",
"ptr_tests.py::TestPtr::test_find_setup_py",
"ptr_tests.py::TestPtr::test_find_setup_py_exclude_default",
"ptr_tests.py::TestPtr::test_gen_output",
"ptr_tests.py::TestPtr::test_generate_black_command",
"ptr_tests.py::TestPtr::test_generate_flake8_command",
"ptr_tests.py::TestPtr::test_generate_install_cmd",
"ptr_tests.py::TestPtr::test_generate_mypy_cmd",
"ptr_tests.py::TestPtr::test_generate_pylint_command",
"ptr_tests.py::TestPtr::test_generate_pyre_cmd",
"ptr_tests.py::TestPtr::test_generate_test_suite_cmd",
"ptr_tests.py::TestPtr::test_get_site_packages_path_error",
"ptr_tests.py::TestPtr::test_get_test_modules",
"ptr_tests.py::TestPtr::test_handle_debug",
"ptr_tests.py::TestPtr::test_mac_osx_slash_private",
"ptr_tests.py::TestPtr::test_main",
"ptr_tests.py::TestPtr::test_parse_setup_cfg",
"ptr_tests.py::TestPtr::test_print_non_configured_modules",
"ptr_tests.py::TestPtr::test_print_test_results",
"ptr_tests.py::TestPtr::test_process_reporter",
"ptr_tests.py::TestPtr::test_set_build_env",
"ptr_tests.py::TestPtr::test_set_pip_mirror",
"ptr_tests.py::TestPtr::test_test_steps_runner",
"ptr_tests.py::TestPtr::test_validate_base_dir",
"ptr_tests.py::TestPtr::test_validate_base_dir_fail",
"ptr_tests.py::TestPtr::test_write_stats_file"
] | [] | MIT License | 6,027 | 332 | [
"ptr.py"
] |
gboeing__osmnx-359 | 499a8e685342f9e040504411bbfbc20c9ee6cdeb | 2019-12-18 01:11:19 | eeb1e76d49a14a4ed9ce481514e1e0f394e9fae0 | diff --git a/osmnx/core.py b/osmnx/core.py
index d9588335..9c844d4a 100644
--- a/osmnx/core.py
+++ b/osmnx/core.py
@@ -807,8 +807,11 @@ def add_path(G, data, one_way):
del data['nodes']
# set the oneway attribute to the passed-in value, to make it consistent
- # True/False values
- data['oneway'] = one_way
+ # True/False values, but only do this if you aren't forcing all edges to
+ # oneway with the all_oneway setting. With the all_oneway setting, you
+ # likely still want to preserve the original OSM oneway attribute.
+ if not settings.all_oneway:
+ data['oneway'] = one_way
# zip together the path nodes so you get tuples like (0,1), (1,2), (2,3)
# and so on
@@ -846,9 +849,11 @@ def add_paths(G, paths, bidirectional=False):
for data in paths.values():
+ if settings.all_oneway is True:
+ add_path(G, data, one_way=True)
# if this path is tagged as one-way and if it is not a walking network,
# then we'll add the path in one direction only
- if ('oneway' in data and data['oneway'] in osm_oneway_values) and not bidirectional:
+ elif ('oneway' in data and data['oneway'] in osm_oneway_values) and not bidirectional:
if data['oneway'] == '-1':
# paths with a one-way value of -1 are one-way, but in the
# reverse direction of the nodes' order, see osm documentation
diff --git a/osmnx/save_load.py b/osmnx/save_load.py
index c12b3373..ac09de0d 100644
--- a/osmnx/save_load.py
+++ b/osmnx/save_load.py
@@ -19,7 +19,7 @@ from shapely import wkt
from xml.etree import ElementTree as etree
from . import settings
-from .utils import make_str, log
+from .utils import make_str, log, get_unique_nodes_ordered_from_way
def save_gdf_shapefile(gdf, filename=None, folder=None):
@@ -147,7 +147,7 @@ def save_graph_osm(G, node_tags=settings.osm_xml_node_tags,
node_attrs=settings.osm_xml_node_attrs,
edge_tags=settings.osm_xml_way_tags,
edge_attrs=settings.osm_xml_way_attrs,
- oneway=True, filename='graph.osm',
+ oneway=False, filename='graph.osm',
folder=None):
"""
Save a graph as an OSM XML formatted file. NOTE: for very large
@@ -169,11 +169,12 @@ def save_graph_osm(G, node_tags=settings.osm_xml_node_tags,
if folder is None:
folder = settings.data_folder
+ # get undirected graph so we don't generate duplicate nodes and
# create a copy to convert all the node/edge attribute values to string
- G_save = G.copy()
+ H = get_undirected(G).copy()
gdf_nodes, gdf_edges = graph_to_gdfs(
- G_save, node_geometry=False, fill_edge_geometry=False)
+ H, node_geometry=False, fill_edge_geometry=False)
# rename columns per osm specification
gdf_nodes.rename(
@@ -197,13 +198,15 @@ def save_graph_osm(G, node_tags=settings.osm_xml_node_tags,
# misc. string replacements to meet OSM XML spec
if 'oneway' in edges.columns:
- edges.loc[:, 'oneway'] = oneway
+
+ # fill blank oneway tags with default (False)
+ edges.loc[pd.isnull(edges['oneway']), 'oneway'] = oneway
edges.loc[:, 'oneway'] = edges['oneway'].astype(str)
edges.loc[:, 'oneway'] = edges['oneway'].str.replace(
'False', 'no').replace('True', 'yes')
# initialize XML tree with an OSM root element
- root = etree.Element('osm')
+ root = etree.Element('osm', attrib={'version': '1', 'generator': 'OSMnx'})
# append nodes to the XML tree
for i, row in nodes.iterrows():
@@ -214,14 +217,23 @@ def save_graph_osm(G, node_tags=settings.osm_xml_node_tags,
node, 'tag', attrib={'k': tag, 'v': row[tag]})
# append edges to the XML tree
- for i, row in edges.iterrows():
+ for e in edges.id.unique():
+ all_way_edges = edges[edges['id'] == e]
+ first = all_way_edges.iloc[0]
edge = etree.SubElement(
- root, 'way', attrib=row[edge_attrs].dropna().to_dict())
- etree.SubElement(edge, 'nd', attrib={'ref': row['u']})
- etree.SubElement(edge, 'nd', attrib={'ref': row['v']})
+ root, 'way', attrib=first[edge_attrs].dropna().to_dict())
+
+ if len(all_way_edges) == 1:
+ etree.SubElement(edge, 'nd', attrib={'ref': first['u']})
+ etree.SubElement(edge, 'nd', attrib={'ref': first['v']})
+ else:
+ ordered_nodes = get_unique_nodes_ordered_from_way(all_way_edges)
+ for node in ordered_nodes:
+ etree.SubElement(edge, 'nd', attrib={'ref': node})
+
for tag in edge_tags:
etree.SubElement(
- edge, 'tag', attrib={'k': tag, 'v': row[tag]})
+ edge, 'tag', attrib={'k': tag, 'v': first[tag]})
et = etree.ElementTree(root)
@@ -230,8 +242,8 @@ def save_graph_osm(G, node_tags=settings.osm_xml_node_tags,
et.write(os.path.join(folder, filename))
- log('Saved graph "{}" to disk as OSM at "{}" in {:,.2f} seconds'.format(
- G_save.name, os.path.join(folder, filename), time.time() - start_time))
+ log('Saved graph to disk as OSM at "{}" in {:,.2f} seconds'.format(
+ os.path.join(folder, filename), time.time() - start_time))
def save_graphml(G, filename='graph.graphml', folder=None, gephi=False):
diff --git a/osmnx/settings.py b/osmnx/settings.py
index 9f1d7d6c..d4aeb01c 100644
--- a/osmnx/settings.py
+++ b/osmnx/settings.py
@@ -52,6 +52,10 @@ default_access = '["access"!~"private"]'
# The network types for which a bidirectional graph will be created
bidirectional_network_types = ['walk']
+# all one-way mode to maintain original OSM node order
+# when constructing graphs specifically to save to .osm xml file
+all_oneway = False
+
# default CRS to set when creating graphs
default_crs = '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'
diff --git a/osmnx/utils.py b/osmnx/utils.py
index 27b7e16d..fbe8dd15 100644
--- a/osmnx/utils.py
+++ b/osmnx/utils.py
@@ -80,7 +80,8 @@ def config(data_folder=settings.data_folder,
default_accept_language=settings.default_accept_language,
nominatim_endpoint=settings.nominatim_endpoint,
nominatim_key=settings.nominatim_key,
- overpass_endpoint=settings.overpass_endpoint):
+ overpass_endpoint=settings.overpass_endpoint,
+ all_oneway=settings.all_oneway):
"""
Configure osmnx by setting the default global vars to desired values.
@@ -125,6 +126,9 @@ def config(data_folder=settings.data_folder,
your API key, if you are using an endpoint that requires one
overpass_endpoint : string
which API endpoint to use for overpass queries
+ all_oneway : boolean
+ if True, forces all paths to be loaded as oneway ways, preserving
+ the original order of nodes stored in the OSM way XML.
Returns
-------
@@ -158,6 +162,7 @@ def config(data_folder=settings.data_folder,
settings.nominatim_endpoint = nominatim_endpoint
settings.nominatim_key = nominatim_key
settings.overpass_endpoint = overpass_endpoint
+ settings.all_oneway = all_oneway
# if logging is turned on, log that we are configured
if settings.log_file or settings.log_console:
@@ -331,3 +336,89 @@ def get_logger(level=None, name=None, filename=None):
logger.handler_set = True
return logger
+
+
+def get_unique_nodes_ordered_from_way(way_edges_df):
+ """
+ Function to recover the original order of nodes from a dataframe
+ of edges associated with a single OSM way.
+
+ Parameters
+ ----------
+ way_edges_df : pandas.DataFrame()
+ Dataframe containing columns 'u' and 'v' corresponding to
+ origin/desitination nodes.
+
+ Returns
+ -------
+ unique_ordered_nodes : list
+ An ordered list of unique node IDs
+
+ NOTE: If the edges do not all connect (e.g. [(1, 2), (2,3),
+ (10, 11), (11, 12), (12, 13)]), then this method will return
+ only those nodes associated with the FIRST chunk of connected
+ edges, even if subsequent connected chunks are contain more
+ total nodes. I don't believe that we would ever encounter this
+ kind of disconnected structure of nodes within a given way,
+ but as best I could tell it is not explicitly forbidden in the
+ OSM XML design schema. As such, I had to safeguard against it
+ to ensure this method wouldn't get stuck in the while loop if
+ encountered a disconnected structure. I'm using a print
+ statement right now to tell the user whether or not any nodes
+ have been dropped and how many.
+ """
+ all_nodes = list(way_edges_df['u'].values) + \
+ list(way_edges_df['v'].values)
+ num_unique_nodes = len(np.unique(all_nodes))
+ node_pairs = list(way_edges_df[['u', 'v']].values)
+ unique_ordered_nodes = []
+ recycled = []
+
+ while len(node_pairs) > 0:
+
+ pair = node_pairs.pop(0)
+ start = pair[0]
+ end = pair[1]
+ the_rest = [element for p in node_pairs for element in p]
+
+ # first pair
+ if len(unique_ordered_nodes) == 0:
+
+ # if there are subsequent pairs to match on
+ if start in the_rest or end in the_rest:
+ unique_ordered_nodes = list(pair)
+ continue
+
+ # if both nodes are already in the list, we don't need them
+ if (start in unique_ordered_nodes) and (end in unique_ordered_nodes):
+ continue
+
+ # if start node is in the list, add the end node to the right of it
+ if start in unique_ordered_nodes:
+ start_idx = unique_ordered_nodes.index(start)
+ end_idx = start_idx + 1
+ unique_ordered_nodes[end_idx:end_idx] = [end]
+
+ # if end node is in the list, add the start node to the left of it
+ elif end in unique_ordered_nodes:
+ end_idx = unique_ordered_nodes.index(end)
+ start_idx = end_idx
+ unique_ordered_nodes[start_idx:start_idx] = [start]
+
+ else:
+ # if we've already processed this pair and there is still no way
+ # to match it in the list, then we're done
+ if list(pair) in recycled:
+ break
+
+ # if there's no match in the list but there's a match in the
+ # remaining pairs to be processed, recycle it to process again
+ elif start in the_rest or end in the_rest:
+ node_pairs.append(pair)
+ recycled.append(list(pair))
+
+ if len(unique_ordered_nodes) < num_unique_nodes:
+ print('Recovered order for {0} of {1} nodes'.format(
+ len(unique_ordered_nodes), num_unique_nodes))
+
+ return unique_ordered_nodes
| Fix duplicate edges in save_graph_osm()
When I originally wrote this function, I didn't realize that the edges dataframe generated by `ox.graph_to_gdfs()` will contain multiple rows with the same `osmid` for any way that is associate with more than two nodes. As such, save_graph_osm() will generate XML like
```
<way id="123456789"/>
<nd ref="1"/>
<nd ref="2"/>
<tag k="highway" v="residential"/>
</way>
<way id="123456789"/>
<nd ref="2"/>
<nd ref="3"/>
<tag k="highway" v="residential"/>
</way>
<way id="123456789"/>
<nd ref="3"/>
<nd ref="4"/>
<tag k="highway" v="residential"/>
</way>
```
instead of
```
<way id="123456789"/>
<nd ref="1"/>
<nd ref="2"/>
<nd ref="3"/>
<nd ref="4"/>
<tag k="highway" v="residential"/>
</way>
```
The XML will still validate, as was documented in the original PR, but it doesn't technically conform to the OSM XML design schema, nor is it the most efficient way of storing the data. Furthermore I'm just not sure what this might do for any downstream processes (e.g. routers) that make use of this re-formatted file.
In practice, this should be a straightforward fix, edit [this](https://github.com/gboeing/osmnx/blob/master/osmnx/save_load.py#L217-L224) chunk of code to check whether or not the tree already contains an edge with that id and if so just add the new nodes to it rather than creating a whole new way. However, according to the OSM wiki, OSM XML requires that the `<nd/>` tags be listed "in the correct order" (see the last sentence [here](https://wiki.openstreetmap.org/wiki/Way#Street_as_a_vector), and I'm unsure if its possible to infer that order from the order in which the duplicate `osmid` ways appear in the edges table..
So, my questions are two-fold:
1. Is this a big enough problem to worry about fixing?
2. If so, is it possible to recover the correct node order from either the edges table or the original NetworkX graph object? | gboeing/osmnx | diff --git a/tests/test_osmnx.py b/tests/test_osmnx.py
index 271f91fb..030b24d9 100644
--- a/tests/test_osmnx.py
+++ b/tests/test_osmnx.py
@@ -5,6 +5,7 @@
################################################################################
import matplotlib as mpl
+import pandas as pd
import warnings
mpl.use('Agg') # use agg backend so you don't need a display on travis-ci
@@ -438,6 +439,14 @@ def test_osm_xml_output():
ox.save_graph_osm(G)
+def test_ordered_nodes_from_way():
+ df = pd.DataFrame({
+ 'u': [54, 2, 5, 3, 10, 19, 20],
+ 'v': [76, 3, 8, 10, 5, 20, 15]})
+ ordered_nodes = ox.get_unique_nodes_ordered_from_way(df)
+ assert ordered_nodes == [2, 3, 10, 5, 8]
+
+
def test_overpass():
import pytest
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 4
} | 0.11 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"coverage"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.7",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==24.2.0
certifi @ file:///croot/certifi_1671487769961/work/certifi
charset-normalizer==3.4.1
click==8.1.8
click-plugins==1.1.1
cligj==0.7.2
coverage==7.2.7
cycler==0.11.0
descartes==1.1.0
exceptiongroup==1.2.2
fiona==1.9.6
fonttools==4.38.0
geopandas==0.10.2
idna==3.10
importlib-metadata==6.7.0
iniconfig==2.0.0
kiwisolver==1.4.5
matplotlib==3.5.3
networkx==2.6.3
numpy==1.21.6
-e git+https://github.com/gboeing/osmnx.git@499a8e685342f9e040504411bbfbc20c9ee6cdeb#egg=osmnx
packaging==24.0
pandas==1.3.5
Pillow==9.5.0
pluggy==1.2.0
pyparsing==3.1.4
pyproj==3.2.1
pytest==7.4.4
python-dateutil==2.9.0.post0
pytz==2025.2
requests==2.31.0
Rtree==1.0.1
shapely==2.0.7
six==1.17.0
tomli==2.0.1
typing_extensions==4.7.1
urllib3==2.0.7
zipp==3.15.0
| name: osmnx
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=22.3.1=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==24.2.0
- charset-normalizer==3.4.1
- click==8.1.8
- click-plugins==1.1.1
- cligj==0.7.2
- coverage==7.2.7
- cycler==0.11.0
- descartes==1.1.0
- exceptiongroup==1.2.2
- fiona==1.9.6
- fonttools==4.38.0
- geopandas==0.10.2
- idna==3.10
- importlib-metadata==6.7.0
- iniconfig==2.0.0
- kiwisolver==1.4.5
- matplotlib==3.5.3
- networkx==2.6.3
- numpy==1.21.6
- packaging==24.0
- pandas==1.3.5
- pillow==9.5.0
- pluggy==1.2.0
- pyparsing==3.1.4
- pyproj==3.2.1
- pytest==7.4.4
- python-dateutil==2.9.0.post0
- pytz==2025.2
- requests==2.31.0
- rtree==1.0.1
- shapely==2.0.7
- six==1.17.0
- tomli==2.0.1
- typing-extensions==4.7.1
- urllib3==2.0.7
- zipp==3.15.0
prefix: /opt/conda/envs/osmnx
| [
"tests/test_osmnx.py::test_ordered_nodes_from_way"
] | [
"tests/test_osmnx.py::test_geometry_coords_rounding",
"tests/test_osmnx.py::test_gdf_shapefiles",
"tests/test_osmnx.py::test_network_saving_loading",
"tests/test_osmnx.py::test_get_network_methods",
"tests/test_osmnx.py::test_stats",
"tests/test_osmnx.py::test_plots",
"tests/test_osmnx.py::test_routing_folium",
"tests/test_osmnx.py::test_nearest_edge",
"tests/test_osmnx.py::test_nearest_edges",
"tests/test_osmnx.py::test_footprints",
"tests/test_osmnx.py::test_pois",
"tests/test_osmnx.py::test_osm_xml_output"
] | [
"tests/test_osmnx.py::test_imports",
"tests/test_osmnx.py::test_logging",
"tests/test_osmnx.py::test_graph_from_file",
"tests/test_osmnx.py::test_overpass"
] | [
"tests/test_osmnx.py::test_nominatim"
] | MIT License | 6,042 | 2,936 | [
"osmnx/core.py",
"osmnx/save_load.py",
"osmnx/settings.py",
"osmnx/utils.py"
] |
|
iterative__dvc-2975 | e7b3297c2d6ae1ad633cd0435cca81093cac86ff | 2019-12-18 15:36:40 | e7b3297c2d6ae1ad633cd0435cca81093cac86ff | casperdcl: also not sure whether this is a bug/feature request/enhancement :D
efiop: `dvc run` comes to mind as well. Same with `dvc import -o` and `dvc import-url`. You might want to just do that convertion in the OutputBASE/LOCAL class, when we mutating the `def_path`.
casperdcl: will that also affect paths written in `*.dvc` files?
efiop: @casperdcl Yes, but that is the point of this PR, right?
efiop: @casperdcl ?
casperdcl: > `dvc run` comes to mind as well. Same with `dvc import -o` and `dvc import-url`. You might want to just do that convertion in the OutputBASE/LOCAL class, when we mutating the `def_path`.
I think `dvc.output.local.OutputLOCAL` is already fine (there's a lot of complicated logic in it).
Just pushed an update for `dvc.output.base.OutputBASE`...
efiop: Thanks @casperdcl ! Looks good! Please see some comments above 🙂
casperdcl: @efiop ok the style inconsistencies are now annoying. I feel like opening a new PR to fix things like relative imports and linting bash scripts...
Would you prefer I revert imports here?
efiop: @casperdcl We have issues for both of those already IIRC.
> Would you prefer I revert imports here?
Let them be this time, but please don't include things like that next time.
efiop: @Suor Using abs paths to in-repo files will cause adding them as external local outputs, which is not right. This change will break cases where there was a misuse, which is ok. There is no reason to drop this PR.
Suor: @efiop but is within and out of repo dir is not constant.
efiop: @Suor Git doesn't have "external" stuff, but same as it, anything inside of our repo is an in-repo data, even if user has specified an absolute path. We've received reports from confused users, hence why we are fixing it here this way.
casperdcl: I think maybe we could print a warning about converting abs => relative?
> Git doesn't have "external" stuff
Well actually I regularly do things like mess with `--local core.worktree` but in general I think it's safe to assume *"external" suff* isn't intended.
casperdcl: I think there are more people who e.g. drag-drop files into terminals (autofilling abs paths) and then expect them to be treated as relative to the repo than people who deliberately want in-repo paths to be tracked as external/absolute paths.
efiop: > I think maybe we could print a warning about converting abs => relative?
No need, when users add files this way they _expect_ them to become in-repo ones. Our current behavior is a bug, no need to put a warning on top. 🙂
> Well actually I regularly do things like mess with --local core.worktree but in general I think it's safe to assume "external" suff isn't intended.
That is not the same "external" stuff that I'm talking about. `git add /absolute/out/of/repo/path` won't work, but `dvc add /absolute/out/of/repo/path` does.
So let's add a test here and we can merge it 🙂
efiop: @casperdcl Also https://github.com/iterative/dvc/pull/2975#discussion_r368566499
Suor: @casperdcl if you are going to add it to `OutputLocal` then it should be `OutputLocal._parse_path()`.
Suor: If we are going to fix this, we should not do that at `Output*` level, we should handle that somewhere closer to UI. `OutputBase/Local` doesn't know whether it got path from user or from `.dvc` file.
efiop: @Suor There is `is_in_repo` already in Output*, handling abs path is suitable there as well.
casperdcl: Hmm. Maybe @efiop/@Suor you want to open an alternative PR and close this one? It looks like it's going to be just a couple of lines to change but requires a lot of discussion.
Suor: @efiop It could be easier to implement it in `Output*`, but it doesn't make it right. This will ignore whatever user wrote in `.dvc` file. Will rewrite it to rel path on `.dvc` file rewrite, but otherwise it will be silent.
So if this is user input bug then it should be handled where a user input is handled.
efiop: > It could be easier to implement it in Output*, but it doesn't make it right. This will ignore whatever user wrote in .dvc file. Will rewrite it to rel path on .dvc file rewrite, but otherwise it will be silent.
I think that it is alright. Can see someone writing a dvc file with `$(pwd)/output`, still expecting it to be handled properly as an in-repo file. So it is a user input bug and solving it in Output* will make it right once and for all.
efiop: @casperdcl Let's finish this one as proposed in this PR.
casperdcl: > Can see someone writing a dvc file with `$(pwd)/output`, still expecting it to be handled properly as an in-repo file
Not sure about that. If I manually edit a dvc-file by hand, I wouldn't want it to be silently changed.
casperdcl: @efiop the failing tests are the ones that need to be changed for the new behaviour. I'll fix them but first wanted some feedback on the implementation.
efiop: > Not sure about that. If I manually edit a dvc-file by hand, I wouldn't want it to be silently changed.
We do that in other fields, it is only changed when re-written.
> not sure about the distinction between def_path and path_info
`def_path` is what is recorded into the dvc-file, while `path_info` is a resolved path (e.g. we support stuff like `remote://mylocalremote/path`.
casperdcl: > We do that in other fields, it is only changed when re-written (e.g. on `dvc repro`).
Right sounds like discussion for a different issue.
> `def_path` is what is recorded into the dvc-file, while `path_info` is a resolved path (e.g. we support stuff like `remote://mylocalremote/path`.
So I think that means `path_info` should not be modified? i.e.
```python
# path_info before == after
assert self._parse_path(self.remote, self.def_path) == \
self._parse_path(self.remote, relpath(self.def_path, self.repo.root_dir))
```
efiop: > So I think that means path_info should not be modified? i.e.
I'm pretty sure it doesn't matter, as long as def_path is the same, as path_infos will be used by abspath anyway.
Suor: Stumbled upon this while doing some updates to `dvcx` publishing, looks like we need to make `.is_in_repo` say `True` for absolute paths when that path is in repo root. This will trigger automatic in repo handling for `Stage.create(..., add=True)`, which will do the job naturally.
casperdcl: > Stumbled upon this while doing some updates to `dvcx` publishing, looks like we need to make `.is_in_repo` say `True` for absolute paths when that path is in repo root. This will trigger automatic in repo handling for `Stage.create(..., add=True)`, which will do the job naturally.
If that fixes this issue I'd say it's a bug because of
> def_path is resolved relative to `stage.wdir` when it is loaded, so it has to be relative to `stage.wdir` not the repo root
See https://github.com/iterative/dvc/pull/2975#discussion_r375414004
Suor: > If that fixes this issue I'd say it's a bug because of
If `def_path` or more precisely `dvc add` argument is absolute it is not resolved as relative to anything. So I can't see any bug.
efiop: > Stumbled upon this while doing some updates to dvcx publishing, looks like we need to make .is_in_repo say True for absolute paths when that path is in repo root. This will trigger automatic in repo handling for Stage.create(..., add=True), which will do the job naturally.
Good point, that might work, but I'm worried about `def_path` being not quire right until we actually write the dvc-file. Might be still suitable though.
casperdcl: I decided not to touch `is_in_repo` for now; not sure if changing its behaviour will break anything else.
efiop: @casperdcl `is_in_repo` now works automatically because you've adjusted def_path. :+1:
@Suor Adjusting `is_in_repo` will affect def_path only in `dumpd`, so effectively only after reloading, which is not the best way of handling it, as it will break the symmetry between "before" and "after" reloading. | diff --git a/dvc/analytics.py b/dvc/analytics.py
index 7c0a69310..76dbc335c 100644
--- a/dvc/analytics.py
+++ b/dvc/analytics.py
@@ -76,7 +76,10 @@ def send(report):
headers = {"content-type": "application/json"}
with open(report, "rb") as fobj:
- requests.post(url, data=fobj, headers=headers, timeout=5)
+ try:
+ requests.post(url, data=fobj, headers=headers, timeout=5)
+ except requests.exceptions.RequestException:
+ logger.debug("failed to send analytics report", exc_info=True)
os.remove(report)
diff --git a/dvc/logger.py b/dvc/logger.py
index 9385a6aa2..d1cf08e83 100644
--- a/dvc/logger.py
+++ b/dvc/logger.py
@@ -51,7 +51,6 @@ class ColorFormatter(logging.Formatter):
color_code = {
"DEBUG": colorama.Fore.BLUE,
- "INFO": "",
"WARNING": colorama.Fore.YELLOW,
"ERROR": colorama.Fore.RED,
"CRITICAL": colorama.Fore.RED,
@@ -59,29 +58,21 @@ class ColorFormatter(logging.Formatter):
def format(self, record):
msg = record.msg.format(*record.args) if record.args else record.msg
+ exception, stack_trace = self._parse_exc(record)
+ return ("{prefix}{description}{stack_trace}").format(
+ prefix=self._prefix(record),
+ description=self._description(msg, exception),
+ stack_trace=stack_trace,
+ )
+ def _prefix(self, record):
if record.levelname == "INFO":
- return msg
-
- if record.levelname == "ERROR" or record.levelname == "CRITICAL":
- exception, stack_trace = self._parse_exc(record)
-
- return (
- "{color}{levelname}{nc}: {description}" "{stack_trace}\n"
- ).format(
- color=self.color_code.get(record.levelname, ""),
- nc=colorama.Fore.RESET,
- levelname=record.levelname,
- description=self._description(msg, exception),
- msg=msg,
- stack_trace=stack_trace,
- )
+ return ""
- return "{color}{levelname}{nc}: {msg}".format(
+ return "{color}{levelname}{nc}: ".format(
color=self.color_code.get(record.levelname, ""),
- nc=colorama.Fore.RESET,
levelname=record.levelname,
- msg=msg,
+ nc=colorama.Fore.RESET,
)
def _current_level(self):
diff --git a/dvc/output/local.py b/dvc/output/local.py
index 0e1b72dc9..6e4123d1e 100644
--- a/dvc/output/local.py
+++ b/dvc/output/local.py
@@ -18,6 +18,12 @@ class OutputLOCAL(OutputBase):
REMOTE = RemoteLOCAL
sep = os.sep
+ def __init__(self, stage, path, *args, **kwargs):
+ if stage and path_isin(path, stage.repo.root_dir):
+ path = relpath(path, stage.wdir)
+
+ super().__init__(stage, path, *args, **kwargs)
+
def _parse_path(self, remote, path):
parsed = urlparse(path)
if parsed.scheme == "remote":
diff --git a/dvc/remote/gdrive.py b/dvc/remote/gdrive.py
index 9cbca729e..4bffb60b4 100644
--- a/dvc/remote/gdrive.py
+++ b/dvc/remote/gdrive.py
@@ -275,6 +275,13 @@ class RemoteGDrive(RemoteBASE):
item.Upload()
return item
+ @gdrive_retry
+ def delete_remote_file(self, remote_id):
+ param = {"id": remote_id}
+ # it does not create a file on the remote
+ item = self.drive.CreateFile(param)
+ item.Delete()
+
@gdrive_retry
def get_remote_item(self, name, parents_ids):
if not parents_ids:
@@ -387,3 +394,7 @@ class RemoteGDrive(RemoteBASE):
except ValueError:
# We ignore all the non-cache looking files
logger.debug('Ignoring path as "non-cache looking"')
+
+ def remove(self, path_info):
+ remote_id = self.get_remote_id(path_info)
+ self.delete_remote_file(remote_id)
diff --git a/dvc/version.py b/dvc/version.py
index 586e45286..089533383 100644
--- a/dvc/version.py
+++ b/dvc/version.py
@@ -7,7 +7,7 @@ import os
import subprocess
-_BASE_VERSION = "0.82.9"
+_BASE_VERSION = "0.83.0"
def _generate_version(base_version):
diff --git a/setup.py b/setup.py
index 1eac416a2..40b3b16c5 100644
--- a/setup.py
+++ b/setup.py
@@ -85,7 +85,7 @@ install_requires = [
# Extra dependencies for remote integrations
gs = ["google-cloud-storage==1.19.0"]
-gdrive = ["pydrive2>=1.4.2"]
+gdrive = ["pydrive2>=1.4.5"]
s3 = ["boto3>=1.9.201"]
azure = ["azure-storage-blob==2.1.0"]
oss = ["oss2==2.6.1"]
| dvc add absolute_path
`~/repo$ dvc add $HOME/repo/file` should realise that the file is not "external" and add it as a relative path rather than absolute. | iterative/dvc | diff --git a/tests/func/test_add.py b/tests/func/test_add.py
index 1ce44a0b0..002722f52 100644
--- a/tests/func/test_add.py
+++ b/tests/func/test_add.py
@@ -216,7 +216,7 @@ class TestAddLocalRemoteFile(TestDvc):
self.assertEqual(ret, 0)
d = load_stage_file("bar.dvc")
- self.assertEqual(d["outs"][0]["path"], bar)
+ self.assertEqual(d["outs"][0]["path"], self.BAR)
class TestCmdAdd(TestDvc):
diff --git a/tests/unit/output/test_local.py b/tests/unit/output/test_local.py
index 50059e6ea..ef029d294 100644
--- a/tests/unit/output/test_local.py
+++ b/tests/unit/output/test_local.py
@@ -42,13 +42,28 @@ def test_str_workdir_inside_repo(dvc):
assert os.path.join("some_folder", "path") == str(output)
-def test_str_on_absolute_path(dvc):
+def test_str_on_local_absolute_path(dvc):
stage = Stage(dvc)
- path = os.path.abspath(os.path.join("path", "to", "file"))
- output = OutputLOCAL(stage, path, cache=False)
+ rel_path = os.path.join("path", "to", "file")
+ abs_path = os.path.abspath(rel_path)
+ output = OutputLOCAL(stage, abs_path, cache=False)
- assert path == str(output)
+ assert output.def_path == rel_path
+ assert output.path_info.fspath == abs_path
+ assert str(output) == rel_path
+
+
+def test_str_on_external_absolute_path(dvc):
+ stage = Stage(dvc)
+
+ rel_path = os.path.join("..", "path", "to", "file")
+ abs_path = os.path.abspath(rel_path)
+ output = OutputLOCAL(stage, abs_path, cache=False)
+
+ assert output.def_path == abs_path
+ assert output.path_info.fspath == abs_path
+ assert str(output) == abs_path
class TestGetFilesNumber(TestDvc):
diff --git a/tests/unit/test_analytics.py b/tests/unit/test_analytics.py
index 3179aeff0..9413e781b 100644
--- a/tests/unit/test_analytics.py
+++ b/tests/unit/test_analytics.py
@@ -61,16 +61,19 @@ def test_runtime_info(tmp_global_config):
@mock.patch("requests.post")
def test_send(mock_post, tmp_path):
+ import requests
+
url = "https://analytics.dvc.org"
report = {"name": "dummy report"}
- fname = str(tmp_path / "report")
+ report_file = tmp_path / "report"
- with open(fname, "w") as fobj:
- json.dump(report, fobj)
+ report_file.write_text(json.dumps(report))
+ mock_post.side_effect = requests.exceptions.RequestException
- analytics.send(fname)
+ analytics.send(str(report_file))
assert mock_post.called
assert mock_post.call_args.args[0] == url
+ assert not report_file.exists()
@pytest.mark.parametrize(
diff --git a/tests/unit/test_logger.py b/tests/unit/test_logger.py
index 0847a5e45..1b7d906e1 100644
--- a/tests/unit/test_logger.py
+++ b/tests/unit/test_logger.py
@@ -44,7 +44,7 @@ class TestColorFormatter:
with caplog.at_level(logging.INFO, logger="dvc"):
logger.error("message")
- expected = "{red}ERROR{nc}: message\n".format(**colors)
+ expected = "{red}ERROR{nc}: message".format(**colors)
assert expected == formatter.format(caplog.records[0])
@@ -55,7 +55,7 @@ class TestColorFormatter:
except Exception:
logger.exception("message")
- expected = "{red}ERROR{nc}: message\n".format(**colors)
+ expected = "{red}ERROR{nc}: message".format(**colors)
assert expected == formatter.format(caplog.records[0])
@@ -66,7 +66,7 @@ class TestColorFormatter:
except Exception:
logger.exception("")
- expected = "{red}ERROR{nc}: description\n".format(**colors)
+ expected = "{red}ERROR{nc}: description".format(**colors)
assert expected == formatter.format(caplog.records[0])
@@ -77,9 +77,7 @@ class TestColorFormatter:
except Exception:
logger.exception("message")
- expected = "{red}ERROR{nc}: message - description\n".format(
- **colors
- )
+ expected = "{red}ERROR{nc}: message - description".format(**colors)
assert expected == formatter.format(caplog.records[0])
@@ -95,7 +93,7 @@ class TestColorFormatter:
"{red}ERROR{nc}: description\n"
"{red}{line}{nc}\n"
"{stack_trace}"
- "{red}{line}{nc}\n".format(
+ "{red}{line}{nc}".format(
line="-" * 60, stack_trace=stack_trace, **colors
)
)
@@ -114,7 +112,7 @@ class TestColorFormatter:
"{red}ERROR{nc}: something\n"
"{red}{line}{nc}\n"
"{stack_trace}"
- "{red}{line}{nc}\n".format(
+ "{red}{line}{nc}".format(
line="-" * 60, stack_trace=stack_trace, **colors
)
)
@@ -136,7 +134,7 @@ class TestColorFormatter:
"{red}ERROR{nc}: message - second: first\n"
"{red}{line}{nc}\n"
"{stack_trace}"
- "{red}{line}{nc}\n".format(
+ "{red}{line}{nc}".format(
line="-" * 60, stack_trace=stack_trace, **colors
)
)
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 6
} | 0.82 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[all]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest",
"pytest-timeout",
"pytest-cov",
"pytest-xdist",
"pytest-mock",
"flaky",
"mock",
"xmltodict",
"awscli",
"google-compute-engine",
"Pygments",
"collective.checkdocs",
"flake8",
"psutil",
"flake8-docstrings",
"pydocstyle",
"jaraco.windows",
"mock-ssh-server",
"moto",
"rangehttpserver"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.7",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | aliyun-python-sdk-core==2.16.0
aliyun-python-sdk-core-v3==2.13.33
aliyun-python-sdk-kms==2.16.5
appdirs==1.4.4
atpublic==3.1.2
attrs @ file:///croot/attrs_1668696182826/work
autocommand==2.2.2
awscli==1.31.13
azure-common==1.1.28
azure-storage-blob==2.1.0
azure-storage-common==2.1.0
bcrypt==4.2.1
boto==2.49.0
boto3==1.33.13
botocore==1.33.13
cachetools==4.2.4
certifi @ file:///croot/certifi_1671487769961/work/certifi
cffi==1.15.1
charset-normalizer==3.4.1
collective.checkdocs==0.2
colorama==0.4.4
configobj==5.0.9
coverage==7.2.7
crcmod==1.7
cryptography==44.0.2
decorator==5.1.1
distro==1.9.0
docutils==0.16
-e git+https://github.com/iterative/dvc.git@e7b3297c2d6ae1ad633cd0435cca81093cac86ff#egg=dvc
execnet==2.0.2
flake8==5.0.4
flake8-docstrings==1.7.0
flaky==3.8.1
flatten-json==0.1.14
flit_core @ file:///opt/conda/conda-bld/flit-core_1644941570762/work/source/flit_core
flufl.lock==7.1.1
funcy==2.0
future==1.0.0
gitdb==4.0.12
gitdb2==4.0.2
GitPython==3.1.44
google-api-core==2.10.2
google-api-python-client==2.166.0
google-auth==1.35.0
google-auth-httplib2==0.2.0
google-cloud-core==1.7.3
google-cloud-storage==1.19.0
google-compute-engine==2.8.13
google-crc32c==1.5.0
google-resumable-media==2.7.2
googleapis-common-protos==1.69.2
grandalf==0.6
httplib2==0.22.0
humanize==4.6.0
idna==3.10
importlib-metadata==4.2.0
importlib-resources==5.12.0
inflect==3.0.2
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
jaraco.classes==3.2.3
jaraco.collections==4.2.0
jaraco.context==4.3.0
jaraco.functools==3.7.0
jaraco.structures==2.1.0
jaraco.text==3.11.1
jaraco.ui==2.3.0
jaraco.windows==5.7.0
Jinja2==3.1.6
jmespath==0.10.0
jsonpath-ng==1.7.0
MarkupSafe==2.1.5
mccabe==0.7.0
mock==5.2.0
mock-ssh-server==0.9.1
more-itertools==9.1.0
moto==4.2.14
nanotime==0.5.2
networkx==2.3
numpy==1.21.6
oauth2client==4.1.3
oss2==2.6.1
packaging @ file:///croot/packaging_1671697413597/work
paramiko==3.5.1
path==16.6.0
pathspec==0.11.2
pluggy @ file:///tmp/build/80754af9/pluggy_1648042572264/work
ply==3.11
protobuf==4.24.4
psutil==7.0.0
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pyarrow==0.15.1
pyasn1==0.5.1
pyasn1-modules==0.3.0
pycodestyle==2.9.1
pycparser==2.21
pycryptodome==3.22.0
pydantic==1.10.21
pydocstyle==6.3.0
pydot==2.0.0
PyDrive2==1.15.4
pyfastcopy==1.0.3
pyflakes==2.5.0
Pygments==2.17.2
PyNaCl==1.5.0
pyOpenSSL==25.0.0
pyparsing==3.1.4
pytest==7.1.2
pytest-cov==4.1.0
pytest-mock==3.11.1
pytest-timeout==2.3.1
pytest-xdist==3.5.0
python-dateutil==2.8.0
PyYAML==5.1.2
rangehttpserver==1.4.0
requests==2.31.0
responses==0.23.3
rsa==4.7.2
ruamel.yaml==0.18.10
ruamel.yaml.clib==0.2.8
s3transfer==0.8.2
shortuuid==1.0.13
six==1.17.0
smmap==5.0.2
snowballstemmer==2.2.0
speedcopy==2.1.5
texttable==1.7.0
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
tqdm==4.67.1
treelib==1.7.1
types-PyYAML==6.0.12.12
typing_extensions @ file:///croot/typing_extensions_1669924550328/work
uritemplate==4.1.1
urllib3==1.26.20
voluptuous==0.14.1
Werkzeug==2.2.3
xmltodict==0.14.2
zc.lockfile==3.0.post1
zipp @ file:///croot/zipp_1672387121353/work
| name: dvc
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=22.1.0=py37h06a4308_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- flit-core=3.6.0=pyhd3eb1b0_0
- importlib_metadata=4.11.3=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=22.0=py37h06a4308_0
- pip=22.3.1=py37h06a4308_0
- pluggy=1.0.0=py37h06a4308_1
- py=1.11.0=pyhd3eb1b0_0
- pytest=7.1.2=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py37h06a4308_0
- typing_extensions=4.4.0=py37h06a4308_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zipp=3.11.0=py37h06a4308_0
- zlib=1.2.13=h5eee18b_1
- pip:
- aliyun-python-sdk-core==2.16.0
- aliyun-python-sdk-core-v3==2.13.33
- aliyun-python-sdk-kms==2.16.5
- appdirs==1.4.4
- atpublic==3.1.2
- autocommand==2.2.2
- awscli==1.31.13
- azure-common==1.1.28
- azure-storage-blob==2.1.0
- azure-storage-common==2.1.0
- bcrypt==4.2.1
- boto==2.49.0
- boto3==1.33.13
- botocore==1.33.13
- cachetools==4.2.4
- cffi==1.15.1
- charset-normalizer==3.4.1
- collective-checkdocs==0.2
- colorama==0.4.4
- configobj==5.0.9
- coverage==7.2.7
- crcmod==1.7
- cryptography==44.0.2
- decorator==5.1.1
- distro==1.9.0
- docutils==0.16
- dvc==0.82.9+e7b329
- execnet==2.0.2
- flake8==5.0.4
- flake8-docstrings==1.7.0
- flaky==3.8.1
- flatten-json==0.1.14
- flufl-lock==7.1.1
- funcy==2.0
- future==1.0.0
- gitdb==4.0.12
- gitdb2==4.0.2
- gitpython==3.1.44
- google-api-core==2.10.2
- google-api-python-client==2.166.0
- google-auth==1.35.0
- google-auth-httplib2==0.2.0
- google-cloud-core==1.7.3
- google-cloud-storage==1.19.0
- google-compute-engine==2.8.13
- google-crc32c==1.5.0
- google-resumable-media==2.7.2
- googleapis-common-protos==1.69.2
- grandalf==0.6
- httplib2==0.22.0
- humanize==4.6.0
- idna==3.10
- importlib-metadata==4.2.0
- importlib-resources==5.12.0
- inflect==3.0.2
- jaraco-classes==3.2.3
- jaraco-collections==4.2.0
- jaraco-context==4.3.0
- jaraco-functools==3.7.0
- jaraco-structures==2.1.0
- jaraco-text==3.11.1
- jaraco-ui==2.3.0
- jaraco-windows==5.7.0
- jinja2==3.1.6
- jmespath==0.10.0
- jsonpath-ng==1.7.0
- markupsafe==2.1.5
- mccabe==0.7.0
- mock==5.2.0
- mock-ssh-server==0.9.1
- more-itertools==9.1.0
- moto==4.2.14
- nanotime==0.5.2
- networkx==2.3
- numpy==1.21.6
- oauth2client==4.1.3
- oss2==2.6.1
- paramiko==3.5.1
- path==16.6.0
- pathspec==0.11.2
- ply==3.11
- protobuf==4.24.4
- psutil==7.0.0
- pyarrow==0.15.1
- pyasn1==0.5.1
- pyasn1-modules==0.3.0
- pycodestyle==2.9.1
- pycparser==2.21
- pycryptodome==3.22.0
- pydantic==1.10.21
- pydocstyle==6.3.0
- pydot==2.0.0
- pydrive2==1.15.4
- pyfastcopy==1.0.3
- pyflakes==2.5.0
- pygments==2.17.2
- pynacl==1.5.0
- pyopenssl==25.0.0
- pyparsing==3.1.4
- pytest-cov==4.1.0
- pytest-mock==3.11.1
- pytest-timeout==2.3.1
- pytest-xdist==3.5.0
- python-dateutil==2.8.0
- pyyaml==5.1.2
- rangehttpserver==1.4.0
- requests==2.31.0
- responses==0.23.3
- rsa==4.7.2
- ruamel-yaml==0.18.10
- ruamel-yaml-clib==0.2.8
- s3transfer==0.8.2
- shortuuid==1.0.13
- six==1.17.0
- smmap==5.0.2
- snowballstemmer==2.2.0
- speedcopy==2.1.5
- texttable==1.7.0
- tqdm==4.67.1
- treelib==1.7.1
- types-pyyaml==6.0.12.12
- uritemplate==4.1.1
- urllib3==1.26.20
- voluptuous==0.14.1
- werkzeug==2.2.3
- xmltodict==0.14.2
- zc-lockfile==3.0.post1
prefix: /opt/conda/envs/dvc
| [
"tests/func/test_add.py::TestAddLocalRemoteFile::test",
"tests/unit/output/test_local.py::test_str_on_local_absolute_path",
"tests/unit/test_analytics.py::test_send",
"tests/unit/test_logger.py::TestColorFormatter::test_error",
"tests/unit/test_logger.py::TestColorFormatter::test_exception",
"tests/unit/test_logger.py::TestColorFormatter::test_exception_with_description_and_without_message",
"tests/unit/test_logger.py::TestColorFormatter::test_exception_with_description_and_message",
"tests/unit/test_logger.py::TestColorFormatter::test_exception_under_verbose",
"tests/unit/test_logger.py::TestColorFormatter::test_tb_only",
"tests/unit/test_logger.py::TestColorFormatter::test_nested_exceptions"
] | [
"tests/func/test_add.py::TestAddUnprotected::test",
"tests/func/test_add.py::test_should_protect_on_repeated_add[hardlink]",
"tests/func/test_add.py::test_should_protect_on_repeated_add[symlink]",
"tests/func/test_add.py::test_should_protect_on_repeated_add[copy]"
] | [
"tests/func/test_add.py::test_add",
"tests/func/test_add.py::test_add_unicode",
"tests/func/test_add.py::test_add_unsupported_file",
"tests/func/test_add.py::test_add_directory",
"tests/func/test_add.py::TestAddDirectoryRecursive::test",
"tests/func/test_add.py::TestAddCmdDirectoryRecursive::test",
"tests/func/test_add.py::TestAddCmdDirectoryRecursive::test_warn_about_large_directories",
"tests/func/test_add.py::TestAddDirectoryWithForwardSlash::test",
"tests/func/test_add.py::test_add_tracked_file",
"tests/func/test_add.py::TestAddDirWithExistingCache::test",
"tests/func/test_add.py::TestAddModifiedDir::test",
"tests/func/test_add.py::test_add_file_in_dir",
"tests/func/test_add.py::TestAddExternalLocalFile::test",
"tests/func/test_add.py::TestCmdAdd::test",
"tests/func/test_add.py::TestDoubleAddUnchanged::test_dir",
"tests/func/test_add.py::TestDoubleAddUnchanged::test_file",
"tests/func/test_add.py::test_should_update_state_entry_for_file_after_add",
"tests/func/test_add.py::test_should_update_state_entry_for_directory_after_add",
"tests/func/test_add.py::TestAddCommit::test",
"tests/func/test_add.py::test_should_collect_dir_cache_only_once",
"tests/func/test_add.py::TestShouldAddDataFromExternalSymlink::test",
"tests/func/test_add.py::TestShouldAddDataFromInternalSymlink::test",
"tests/func/test_add.py::TestShouldPlaceStageInDataDirIfRepositoryBelowSymlink::test",
"tests/func/test_add.py::TestShouldThrowProperExceptionOnCorruptedStageFile::test",
"tests/func/test_add.py::TestAddFilename::test",
"tests/func/test_add.py::test_failed_add_cleanup",
"tests/func/test_add.py::test_should_not_track_git_internal_files",
"tests/func/test_add.py::test_readding_dir_should_not_unprotect_all",
"tests/func/test_add.py::test_should_not_checkout_when_adding_cached_copy",
"tests/func/test_add.py::test_should_relink_on_repeated_add[hardlink-copy-<lambda>]",
"tests/func/test_add.py::test_should_relink_on_repeated_add[symlink-copy-<lambda>]",
"tests/func/test_add.py::test_should_relink_on_repeated_add[copy-hardlink-is_hardlink]",
"tests/func/test_add.py::test_should_relink_on_repeated_add[copy-symlink-is_symlink]",
"tests/func/test_add.py::test_escape_gitignore_entries",
"tests/unit/output/test_local.py::TestOutputLOCAL::test_save_missing",
"tests/unit/output/test_local.py::test_str_workdir_outside_repo",
"tests/unit/output/test_local.py::test_str_workdir_inside_repo",
"tests/unit/output/test_local.py::test_str_on_external_absolute_path",
"tests/unit/output/test_local.py::TestGetFilesNumber::test_return_0_on_no_cache",
"tests/unit/output/test_local.py::TestGetFilesNumber::test_return_1_on_single_file_cache",
"tests/unit/output/test_local.py::TestGetFilesNumber::test_return_multiple_for_dir",
"tests/unit/test_analytics.py::test_collect_and_send_report",
"tests/unit/test_analytics.py::test_runtime_info",
"tests/unit/test_analytics.py::test_is_enabled[config0-True]",
"tests/unit/test_analytics.py::test_is_enabled[config1-False]",
"tests/unit/test_analytics.py::test_is_enabled[config2-True]",
"tests/unit/test_analytics.py::test_is_enabled[config3-True]",
"tests/unit/test_analytics.py::test_is_enabled[config4-False]",
"tests/unit/test_analytics.py::test_system_info",
"tests/unit/test_analytics.py::test_find_or_create_user_id",
"tests/unit/test_logger.py::TestColorFormatter::test_debug",
"tests/unit/test_logger.py::TestColorFormatter::test_info",
"tests/unit/test_logger.py::TestColorFormatter::test_warning",
"tests/unit/test_logger.py::TestColorFormatter::test_progress_awareness",
"tests/unit/test_logger.py::test_handlers"
] | [] | Apache License 2.0 | 6,045 | 1,318 | [
"dvc/analytics.py",
"dvc/logger.py",
"dvc/output/local.py",
"dvc/remote/gdrive.py",
"dvc/version.py",
"setup.py"
] |
mikedh__trimesh-674 | 85ec8303d49fe12ecd1360d4027c378615105ea3 | 2019-12-19 03:16:50 | 0ff7eadc10b85363c8811725defad9a6cad23892 | diff --git a/trimesh/base.py b/trimesh/base.py
index 29b1a6bc..d309124d 100644
--- a/trimesh/base.py
+++ b/trimesh/base.py
@@ -285,7 +285,7 @@ class Trimesh(Geometry):
Returns
----------
- faces : (n,3) int
+ faces : (n, 3) int
Representing triangles which reference self.vertices
"""
return self._data.get('faces', np.empty(shape=(0, 3), dtype=int))
@@ -439,9 +439,8 @@ class Trimesh(Geometry):
values : (n, 3) float
Points in space
"""
- self._data['vertices'] = np.asanyarray(values,
- order='C',
- dtype=np.float64)
+ self._data['vertices'] = np.asanyarray(
+ values, order='C', dtype=np.float64)
@caching.cache_decorator
def vertex_normals(self):
@@ -455,7 +454,7 @@ class Trimesh(Geometry):
Returns
----------
- vertex_normals : (n,3) float
+ vertex_normals : (n, 3) float
Represents the surface normal at each vertex.
Where n == len(self.vertices)
"""
@@ -1288,7 +1287,7 @@ class Trimesh(Geometry):
Returns
----------
- adjacency : (n,2) int
+ adjacency : (n, 2) int
Pairs of faces which share an edge
Examples
@@ -1460,7 +1459,7 @@ class Trimesh(Geometry):
mesh = trimesh.primitives.Box()
graph = mesh.vertex_adjacency_graph
graph.neighbors(0)
- > [1,2,3,4]
+ > [1, 2, 3, 4]
"""
adjacency_g = graph.vertex_adjacency_graph(mesh=self)
@@ -1485,7 +1484,7 @@ class Trimesh(Geometry):
>>> mesh = trimesh.primitives.Box()
>>> mesh.vertex_neighbors[0]
- [1,2,3,4]
+ [1, 2, 3, 4]
"""
graph = self.vertex_adjacency_graph
neighbors = [list(graph.neighbors(i)) for
@@ -1674,9 +1673,9 @@ class Trimesh(Geometry):
# the face index of the largest face in each facet
index = np.array([i[area_faces[i].argmax()]
for i in self.facets])
- # (n,3) float, unit normal vectors of facet plane
+ # (n, 3) float, unit normal vectors of facet plane
normals = self.face_normals[index]
- # (n,3) float, points on facet plane
+ # (n, 3) float, points on facet plane
origins = self.vertices[self.faces[:, 0][index]]
# save origins in cache
self._cache['facets_origin'] = origins
@@ -1732,7 +1731,7 @@ class Trimesh(Geometry):
normals = self.facets_normal
origins = self.facets_origin
- # (n,3) convex hull vertices
+ # (n, 3) convex hull vertices
convex = self.convex_hull.vertices.view(np.ndarray).copy()
# boolean mask for which facets are on convex hull
@@ -2155,24 +2154,6 @@ class Trimesh(Geometry):
# keep face normals as the haven't changed
self._cache.clear(exclude=['face_normals'])
- def apply_obb(self):
- """
- Apply the oriented bounding box transform to the current mesh.
-
- This will result in a mesh with an AABB centered at the
- origin and the same dimensions as the OBB.
-
- Returns
- ----------
- matrix : (4, 4) float
- Transformation matrix that was applied
- to mesh to move it into OBB frame
- """
- matrix = self.bounding_box_oriented.primitive.transform
- matrix = np.linalg.inv(matrix)
- self.apply_transform(matrix)
- return matrix
-
def apply_transform(self, matrix):
"""
Transform mesh by a homogeneous transformation matrix.
@@ -2187,13 +2168,12 @@ class Trimesh(Geometry):
Homogeneous transformation matrix
"""
# get c-order float64 matrix
- matrix = np.asanyarray(matrix,
- order='C',
- dtype=np.float64)
+ matrix = np.asanyarray(
+ matrix, order='C', dtype=np.float64)
# only support homogeneous transformations
if matrix.shape != (4, 4):
- raise ValueError('Transformation matrix must be (4,4)!')
+ raise ValueError('Transformation matrix must be (4, 4)!')
# exit early if we've been passed an identity matrix
# np.allclose is surprisingly slow so do this test
@@ -2804,7 +2784,7 @@ class Trimesh(Geometry):
Examples
-----------
- r = mesh.eval_cached('np.dot(self.vertices, args[0])', [0,0,1])
+ r = mesh.eval_cached('np.dot(self.vertices, args[0])', [0, 0, 1])
"""
statement = str(statement)
diff --git a/trimesh/parent.py b/trimesh/parent.py
index 86a75d3f..39d7b941 100644
--- a/trimesh/parent.py
+++ b/trimesh/parent.py
@@ -95,6 +95,24 @@ class Geometry(ABC):
# apply_transform will work nicely even on negative scales
return self.apply_transform(matrix)
+ def apply_obb(self):
+ """
+ Apply the oriented bounding box transform to the current mesh.
+
+ This will result in a mesh with an AABB centered at the
+ origin and the same dimensions as the OBB.
+
+ Returns
+ ----------
+ matrix : (4, 4) float
+ Transformation matrix that was applied
+ to mesh to move it into OBB frame
+ """
+ matrix = self.bounding_box_oriented.primitive.transform
+ matrix = np.linalg.inv(matrix)
+ self.apply_transform(matrix)
+ return matrix
+
@abc.abstractmethod
def copy(self):
pass
diff --git a/trimesh/path/segments.py b/trimesh/path/segments.py
index 8b510d68..b1ef6109 100644
--- a/trimesh/path/segments.py
+++ b/trimesh/path/segments.py
@@ -437,6 +437,10 @@ def resample(segments,
# save index of original segment
index = []
+ tile = np.tile
+ # generate the line indexes ahead of time
+ stacks = util.stack_lines(np.arange(splits.max() + 1))
+
# loop through each count of unique splits needed
for split in np.unique(splits):
# get a mask of which segments need to be split
@@ -444,20 +448,17 @@ def resample(segments,
# the vector for each incremental length
increment = vec[mask] / split
# stack the increment vector into the shape needed
- v = np.tile(increment, split + 1).reshape((-1, 3))
- # apply integer multiples of the increment
- v *= np.tile(np.arange(split + 1),
- len(increment)).reshape((-1, 1))
+ v = (tile(increment, split + 1).reshape((-1, 3)) *
+ tile(np.arange(split + 1),
+ len(increment)).reshape((-1, 1)))
# stack the origin points correctly
- o = np.tile(pt1[mask], split + 1).reshape((-1, 3))
+ o = tile(pt1[mask], split + 1).reshape((-1, 3))
# now get each segment as an (split, 3) polyline
poly = (o + v).reshape((-1, split + 1, 3))
- # get indexes to stack polyline into segments
- stack = util.stack_lines(np.arange(split + 1))
# save the resulting segments
# magical slicing is equivalent to:
# > [p[stack] for p in poly]
- result.extend(poly[:, stack])
+ result.extend(poly[:, stacks[:split]])
if return_index:
# get the original index from the mask
@@ -466,13 +467,16 @@ def resample(segments,
index.append((np.ones((len(poly), split),
dtype=np.int64) *
index_original).ravel())
-
if tol.strict:
# check to make sure every start and end point
# from the reconstructed result corresponds
for original, recon in zip(segments[mask], poly):
assert np.allclose(original[0], recon[0])
assert np.allclose(original[-1], recon[-1])
+ # make sure stack slicing was OK
+ assert np.allclose(
+ util.stack_lines(np.arange(split + 1)),
+ stacks[:split])
# stack into (n, 2, 3) segments
result = [np.concatenate(result)]
diff --git a/trimesh/version.py b/trimesh/version.py
index b363ca01..f59ba749 100644
--- a/trimesh/version.py
+++ b/trimesh/version.py
@@ -1,1 +1,1 @@
-__version__ = '3.5.5'
+__version__ = '3.5.6'
| AttributeError: 'PointCloud' object has no attribute 'apply_obb'
When i use
“mesh = trimesh.load(folder + "registeredScene.ply")
Tform = mesh.apply_obb()”
Statement,system hint AttributeError: 'PointCloud' object has no attribute 'apply_obb',How can i modify it?Thanks。 | mikedh/trimesh | diff --git a/tests/test_points.py b/tests/test_points.py
index 3c407a1f..fd8975e6 100644
--- a/tests/test_points.py
+++ b/tests/test_points.py
@@ -190,6 +190,13 @@ class PointsTest(g.unittest.TestCase):
assert p.vertices.shape == (100, 3)
assert p.colors.shape == (100, 4)
+ def test_obb(self):
+ p = g.get_mesh('points_agisoft.xyz')
+ original = p.bounds.copy()
+ matrix = p.apply_obb()
+ assert matrix.shape == (4, 4)
+ assert not g.np.allclose(p.bounds, original)
+
def test_remove_close(self):
# create 100 unique points
p = g.np.arange(300).reshape((100, 3))
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 3
},
"num_modified_files": 4
} | 3.5 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[all]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y openscad blender"
],
"python": "3.6",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
autoprop==4.1.0
backports.cached-property==1.0.2
certifi==2021.5.30
chardet==5.0.0
charset-normalizer==2.0.12
colorlog==6.9.0
coverage==6.2
cycler==0.11.0
Cython==3.0.12
decorator==4.4.2
glooey==0.3.6
idna==3.10
imageio==2.15.0
importlib-metadata==4.8.3
iniconfig==1.1.1
jsonschema==3.2.0
kiwisolver==1.3.1
lxml==5.3.1
matplotlib==3.3.4
more-itertools==8.14.0
mpmath==1.3.0
msgpack==1.0.5
networkx==2.5.1
numpy==1.19.5
packaging==21.3
Pillow==8.4.0
pluggy==1.0.0
psutil==7.0.0
py==1.11.0
pycollada==0.8
pyglet==2.0.10
pyparsing==3.1.4
pyrsistent==0.18.0
pytest==7.0.1
pytest-cov==4.0.0
python-dateutil==2.9.0.post0
python-fcl==0.7.0.5
PyWavelets==1.1.1
PyYAML==6.0.1
requests==2.27.1
Rtree==0.9.7
scikit-image==0.17.2
scipy==1.5.4
Shapely==1.8.5.post1
signature_dispatch==1.0.0
six==1.17.0
svg.path==6.2
sympy==1.9
tifffile==2020.9.3
tomli==1.2.3
triangle==20220202
-e git+https://github.com/mikedh/trimesh.git@85ec8303d49fe12ecd1360d4027c378615105ea3#egg=trimesh
typeguard==2.13.3
typing==3.7.4.3
typing_extensions==4.1.1
urllib3==1.26.20
vecrec==0.3.1
xxhash==3.2.0
zipp==3.6.0
| name: trimesh
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- autoprop==4.1.0
- backports-cached-property==1.0.2
- chardet==5.0.0
- charset-normalizer==2.0.12
- colorlog==6.9.0
- coverage==6.2
- cycler==0.11.0
- cython==3.0.12
- decorator==4.4.2
- glooey==0.3.6
- idna==3.10
- imageio==2.15.0
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- jsonschema==3.2.0
- kiwisolver==1.3.1
- lxml==5.3.1
- matplotlib==3.3.4
- more-itertools==8.14.0
- mpmath==1.3.0
- msgpack==1.0.5
- networkx==2.5.1
- numpy==1.19.5
- packaging==21.3
- pillow==8.4.0
- pluggy==1.0.0
- psutil==7.0.0
- py==1.11.0
- pycollada==0.8
- pyglet==2.0.10
- pyparsing==3.1.4
- pyrsistent==0.18.0
- pytest==7.0.1
- pytest-cov==4.0.0
- python-dateutil==2.9.0.post0
- python-fcl==0.7.0.5
- pywavelets==1.1.1
- pyyaml==6.0.1
- requests==2.27.1
- rtree==0.9.7
- scikit-image==0.17.2
- scipy==1.5.4
- shapely==1.8.5.post1
- signature-dispatch==1.0.0
- six==1.17.0
- svg-path==6.2
- sympy==1.9
- tifffile==2020.9.3
- tomli==1.2.3
- triangle==20220202
- typeguard==2.13.3
- typing==3.7.4.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- vecrec==0.3.1
- xxhash==3.2.0
- zipp==3.6.0
prefix: /opt/conda/envs/trimesh
| [
"tests/test_points.py::PointsTest::test_obb"
] | [] | [
"tests/test_points.py::PointsTest::test_empty",
"tests/test_points.py::PointsTest::test_init_arguments",
"tests/test_points.py::PointsTest::test_kmeans",
"tests/test_points.py::PointsTest::test_plane",
"tests/test_points.py::PointsTest::test_pointcloud",
"tests/test_points.py::PointsTest::test_remove_close",
"tests/test_points.py::PointsTest::test_tsp",
"tests/test_points.py::PointsTest::test_vertex_only",
"tests/test_points.py::PointsTest::test_xyz"
] | [] | MIT License | 6,050 | 2,294 | [
"trimesh/base.py",
"trimesh/parent.py",
"trimesh/path/segments.py",
"trimesh/version.py"
] |
|
getsentry__responses-291 | 567c69e11332e1e2bcabe347f6cd9376765944b9 | 2019-12-19 03:31:11 | 72a60d9be44a46ac2cda442a8105c3c4f9e86458 | diff --git a/responses.py b/responses.py
index 6148f1d..47c81cf 100644
--- a/responses.py
+++ b/responses.py
@@ -375,21 +375,12 @@ class Response(BaseResponse):
headers = self.get_headers()
status = self.status
body = _handle_body(self.body)
-
- # The requests library's cookie handling depends on the response object
- # having an original response object with the headers as the `msg`, so
- # we give it what it needs.
- orig_response = HTTPResponse(
- body=body, # required to avoid "ValueError: Unable to determine whether fp is closed."
- msg=headers,
- preload_content=False,
- )
return HTTPResponse(
status=status,
reason=six.moves.http_client.responses.get(status),
body=body,
headers=headers,
- original_response=orig_response,
+ original_response=OriginalResponseShim(headers),
preload_content=False,
)
@@ -414,27 +405,45 @@ class CallbackResponse(BaseResponse):
if isinstance(body, Exception):
raise body
+ # If the callback set a content-type remove the one
+ # set in add_callback() so that we don't have multiple
+ # content type values.
+ if "Content-Type" in r_headers:
+ headers.pop("Content-Type", None)
+
body = _handle_body(body)
headers.extend(r_headers)
- # The requests library's cookie handling depends on the response object
- # having an original response object with the headers as the `msg`, so
- # we give it what it needs.
- orig_response = HTTPResponse(
- body=body, # required to avoid "ValueError: Unable to determine whether fp is closed."
- msg=headers,
- preload_content=False,
- )
return HTTPResponse(
status=status,
reason=six.moves.http_client.responses.get(status),
body=body,
headers=headers,
- original_response=orig_response,
+ original_response=OriginalResponseShim(headers),
preload_content=False,
)
+class OriginalResponseShim(object):
+ """
+ Shim for compatibility with older versions of urllib3
+
+ requests cookie handling depends on responses having a property chain of
+ `response._original_response.msg` which contains the response headers [1]
+
+ Using HTTPResponse() for this purpose causes compatibility errors with
+ urllib3<1.23.0. To avoid adding more dependencies we can use this shim.
+
+ [1]: https://github.com/psf/requests/blob/75bdc998e2d/requests/cookies.py#L125
+ """
+
+ def __init__(self, headers):
+ self.msg = headers
+
+ def isclosed(self):
+ return True
+
+
class RequestsMock(object):
DELETE = "DELETE"
GET = "GET"
| responses 0.10.8 introduces an implicit dependency on urllib3>=1.23
responses 0.10.8 introduces these blocks:
https://github.com/getsentry/responses/blob/2d5d9f5ec33f168f097bb92f99243f18f264184c/responses.py#L382-L386
The `msg` param was not introduced until urllib3 1.23.
`requests` itself permits any `urllib3>=1.21.1,<1.26,!=1.25.0,!=1.25.1`. If `responses` is run in an env with `urllib3<1.23`, this error gets thrown:
```
def get_response(self, request):
if self.body and isinstance(self.body, Exception):
raise self.body
headers = self.get_headers()
status = self.status
body = _handle_body(self.body)
# The requests library's cookie handling depends on the response object
# having an original response object with the headers as the `msg`, so
# we give it what it needs.
orig_response = HTTPResponse(
body=body, # required to avoid "ValueError: Unable to determine whether fp is closed."
msg=headers,
> preload_content=False,
)
E TypeError: __init__() got an unexpected keyword argument 'msg'
```
Perhaps these new blocks can be modified to avoid `msg`? ~~Or a hard requirement for urllib3>=1.23 could be added to the responses setup.py?~~ | getsentry/responses | diff --git a/test_responses.py b/test_responses.py
index 65904de..223c0a7 100644
--- a/test_responses.py
+++ b/test_responses.py
@@ -410,7 +410,11 @@ def test_callback():
body = b"test callback"
status = 400
reason = "Bad Request"
- headers = {"foo": "bar"}
+ headers = {
+ "foo": "bar",
+ "Content-Type": "application/json",
+ "Content-Length": "13",
+ }
url = "http://example.com/"
def request_callback(request):
@@ -423,8 +427,9 @@ def test_callback():
assert resp.text == "test callback"
assert resp.status_code == status
assert resp.reason == reason
- assert "foo" in resp.headers
- assert resp.headers["foo"] == "bar"
+ assert "bar" == resp.headers.get("foo")
+ assert "application/json" == resp.headers.get("Content-Type")
+ assert "13" == resp.headers.get("Content-Length")
run()
assert_reset()
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 1
} | 0.10 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[tests]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2025.1.31
charset-normalizer==3.4.1
coverage==4.5.4
exceptiongroup==1.2.2
flake8==7.2.0
idna==3.10
iniconfig==2.1.0
MarkupSafe==3.0.2
mccabe==0.7.0
packaging==24.2
pluggy==1.5.0
pycodestyle==2.13.0
pyflakes==3.3.2
pytest==8.3.5
pytest-cov==2.10.1
pytest-localserver==0.9.0.post0
requests==2.32.3
-e git+https://github.com/getsentry/responses.git@567c69e11332e1e2bcabe347f6cd9376765944b9#egg=responses
six==1.17.0
tomli==2.2.1
urllib3==2.3.0
Werkzeug==3.1.3
| name: responses
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- charset-normalizer==3.4.1
- coverage==4.5.4
- exceptiongroup==1.2.2
- flake8==7.2.0
- idna==3.10
- iniconfig==2.1.0
- markupsafe==3.0.2
- mccabe==0.7.0
- packaging==24.2
- pluggy==1.5.0
- pycodestyle==2.13.0
- pyflakes==3.3.2
- pytest==8.3.5
- pytest-cov==2.10.1
- pytest-localserver==0.9.0.post0
- requests==2.32.3
- six==1.17.0
- tomli==2.2.1
- urllib3==2.3.0
- werkzeug==3.1.3
prefix: /opt/conda/envs/responses
| [
"test_responses.py::test_callback"
] | [
"test_responses.py::test_arbitrary_status_code"
] | [
"test_responses.py::test_response",
"test_responses.py::test_response_encoded",
"test_responses.py::test_response_with_instance",
"test_responses.py::test_replace[http://example.com/two-http://example.com/two]",
"test_responses.py::test_replace[original1-replacement1]",
"test_responses.py::test_replace[http://example\\\\.com/two-http://example\\\\.com/two]",
"test_responses.py::test_replace_error[http://example.com/one-http://example\\\\.com/one]",
"test_responses.py::test_replace_error[http://example\\\\.com/one-http://example.com/one]",
"test_responses.py::test_remove",
"test_responses.py::test_response_equality[args10-kwargs10-args20-kwargs20-True]",
"test_responses.py::test_response_equality[args11-kwargs11-args21-kwargs21-False]",
"test_responses.py::test_response_equality[args12-kwargs12-args22-kwargs22-False]",
"test_responses.py::test_response_equality[args13-kwargs13-args23-kwargs23-True]",
"test_responses.py::test_response_equality_different_objects",
"test_responses.py::test_connection_error",
"test_responses.py::test_match_querystring",
"test_responses.py::test_match_empty_querystring",
"test_responses.py::test_match_querystring_error",
"test_responses.py::test_match_querystring_regex",
"test_responses.py::test_match_querystring_error_regex",
"test_responses.py::test_match_querystring_auto_activates",
"test_responses.py::test_accept_string_body",
"test_responses.py::test_accept_json_body",
"test_responses.py::test_no_content_type",
"test_responses.py::test_throw_connection_error_explicit",
"test_responses.py::test_callback_exception_result",
"test_responses.py::test_callback_exception_body",
"test_responses.py::test_callback_no_content_type",
"test_responses.py::test_regular_expression_url",
"test_responses.py::test_custom_adapter",
"test_responses.py::test_responses_as_context_manager",
"test_responses.py::test_activate_doesnt_change_signature",
"test_responses.py::test_activate_mock_interaction",
"test_responses.py::test_activate_doesnt_change_signature_with_return_type",
"test_responses.py::test_activate_doesnt_change_signature_for_method",
"test_responses.py::test_response_cookies",
"test_responses.py::test_response_secure_cookies",
"test_responses.py::test_response_cookies_multiple",
"test_responses.py::test_response_callback",
"test_responses.py::test_response_filebody",
"test_responses.py::test_assert_all_requests_are_fired",
"test_responses.py::test_allow_redirects_samehost",
"test_responses.py::test_handles_unicode_querystring",
"test_responses.py::test_handles_unicode_url",
"test_responses.py::test_headers",
"test_responses.py::test_legacy_adding_headers",
"test_responses.py::test_multiple_responses",
"test_responses.py::test_multiple_urls",
"test_responses.py::test_passthru",
"test_responses.py::test_method_named_param",
"test_responses.py::test_passthru_unicode",
"test_responses.py::test_custom_target",
"test_responses.py::test_cookies_from_headers"
] | [] | Apache License 2.0 | 6,051 | 671 | [
"responses.py"
] |
|
encode__httpx-653 | 5ee512d803d1d6b49dc171e1114f0075618de78e | 2019-12-20 10:31:47 | e284b84bf9365c8c10a681140a72066980e0da9d | tomchristie: Running the example in #652 after this change...
```python
$ python ./example.py
requests {'a': '1', 'b': '2'}
httpx {'a': '1', 'b': '2'}
``` | diff --git a/httpx/models.py b/httpx/models.py
index ad3599b..fc938c3 100644
--- a/httpx/models.py
+++ b/httpx/models.py
@@ -90,9 +90,14 @@ class URL:
if self.is_absolute_url:
self._uri_reference = self._uri_reference.normalize()
- # Add any query parameters.
+ # Add any query parameters, merging with any in the URL if needed.
if params:
- query_string = str(QueryParams(params))
+ if self._uri_reference.query:
+ url_params = QueryParams(self._uri_reference.query)
+ url_params.update(params)
+ query_string = str(url_params)
+ else:
+ query_string = str(QueryParams(params))
self._uri_reference = self._uri_reference.copy_with(query=query_string)
# Enforce absolute URLs by default.
| `params` overrides query string in url
Hi,
I noticed a difference between `httpx` and `requests` when processing query parameters. I provided the `params` for the url with query string, `requests` would **merge** `params` into query string, but `httpx` replaces the whole query string.
Here is a simple script:
```py
import requests
import httpx
import asyncio
URL = 'https://httpbin.org/get?a=1'
r = requests.get(URL, params={'b': 2})
print('requests', r.json()['args'])
async def f():
r = await httpx.get(URL, params={'b': 2})
print('httpx', r.json()['args'])
asyncio.run(f())
```
Results:
```
requests {'a': '1', 'b': '2'}
httpx {'b': '2'}
```
Is this an intended behavior? | encode/httpx | diff --git a/tests/models/test_url.py b/tests/models/test_url.py
index 1415ee9..491d0a4 100644
--- a/tests/models/test_url.py
+++ b/tests/models/test_url.py
@@ -89,7 +89,7 @@ def test_url_params():
assert str(url) == "https://example.org:123/path/to/somewhere?a=123"
url = URL("https://example.org:123/path/to/somewhere?b=456", params={"a": "123"})
- assert str(url) == "https://example.org:123/path/to/somewhere?a=123"
+ assert str(url) == "https://example.org:123/path/to/somewhere?b=456&a=123"
def test_url_join():
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 1
} | 0.9 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": null,
"pre_install": null,
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | aspy.refactor-imports==3.0.2
attrs==25.3.0
autoflake==2.3.1
babel==2.17.0
backrefs==5.8
black==25.1.0
brotlipy==0.7.0
certifi==2025.1.31
cffi==1.17.1
chardet==3.0.4
charset-normalizer==3.4.1
click==8.1.8
colorama==0.4.6
coverage==7.8.0
cryptography==44.0.2
exceptiongroup==1.2.2
flake8==7.2.0
flake8-bugbear==24.12.12
flake8-pie==0.16.0
ghp-import==2.1.0
h11==0.8.1
h2==3.2.0
hpack==3.0.0
hstspreload==2025.1.1
-e git+https://github.com/encode/httpx.git@5ee512d803d1d6b49dc171e1114f0075618de78e#egg=httpx
hyperframe==5.2.0
idna==2.10
importlib_metadata==8.6.1
iniconfig==2.1.0
isort==6.0.1
Jinja2==3.1.6
Markdown==3.7
MarkupSafe==3.0.2
mccabe==0.7.0
mergedeep==1.3.4
mkautodoc==0.2.0
mkdocs==1.6.1
mkdocs-get-deps==0.2.0
mkdocs-material==9.6.10
mkdocs-material-extensions==1.3.1
mypy==1.15.0
mypy-extensions==1.0.0
outcome==1.3.0.post0
packaging==24.2
paginate==0.5.7
pathspec==0.12.1
platformdirs==4.3.7
pluggy==1.5.0
pycodestyle==2.13.0
pycparser==2.22
pyflakes==3.3.1
Pygments==2.19.1
pymdown-extensions==10.14.3
pytest==8.3.5
pytest-asyncio==0.26.0
pytest-cov==6.0.0
pytest-trio==0.8.0
python-dateutil==2.9.0.post0
PyYAML==6.0.2
pyyaml_env_tag==0.1
requests==2.32.3
rfc3986==1.5.0
seed-isort-config==2.2.0
six==1.17.0
sniffio==1.3.1
sortedcontainers==2.4.0
tomli==2.2.1
trio==0.29.0
trustme==1.2.1
typing_extensions==4.13.0
urllib3==2.3.0
uvicorn==0.34.0
watchdog==6.0.0
zipp==3.21.0
| name: httpx
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- aspy-refactor-imports==3.0.2
- attrs==25.3.0
- autoflake==2.3.1
- babel==2.17.0
- backrefs==5.8
- black==25.1.0
- brotlipy==0.7.0
- certifi==2025.1.31
- cffi==1.17.1
- chardet==3.0.4
- charset-normalizer==3.4.1
- click==8.1.8
- colorama==0.4.6
- coverage==7.8.0
- cryptography==44.0.2
- exceptiongroup==1.2.2
- flake8==7.2.0
- flake8-bugbear==24.12.12
- flake8-pie==0.16.0
- ghp-import==2.1.0
- h11==0.8.1
- h2==3.2.0
- hpack==3.0.0
- hstspreload==2025.1.1
- hyperframe==5.2.0
- idna==2.10
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- isort==6.0.1
- jinja2==3.1.6
- markdown==3.7
- markupsafe==3.0.2
- mccabe==0.7.0
- mergedeep==1.3.4
- mkautodoc==0.2.0
- mkdocs==1.6.1
- mkdocs-get-deps==0.2.0
- mkdocs-material==9.6.10
- mkdocs-material-extensions==1.3.1
- mypy==1.15.0
- mypy-extensions==1.0.0
- outcome==1.3.0.post0
- packaging==24.2
- paginate==0.5.7
- pathspec==0.12.1
- platformdirs==4.3.7
- pluggy==1.5.0
- pycodestyle==2.13.0
- pycparser==2.22
- pyflakes==3.3.1
- pygments==2.19.1
- pymdown-extensions==10.14.3
- pytest==8.3.5
- pytest-asyncio==0.26.0
- pytest-cov==6.0.0
- pytest-trio==0.8.0
- python-dateutil==2.9.0.post0
- pyyaml==6.0.2
- pyyaml-env-tag==0.1
- requests==2.32.3
- rfc3986==1.5.0
- seed-isort-config==2.2.0
- six==1.17.0
- sniffio==1.3.1
- sortedcontainers==2.4.0
- tomli==2.2.1
- trio==0.29.0
- trustme==1.2.1
- typing-extensions==4.13.0
- urllib3==2.3.0
- uvicorn==0.34.0
- watchdog==6.0.0
- zipp==3.21.0
prefix: /opt/conda/envs/httpx
| [
"tests/models/test_url.py::test_url_params"
] | [] | [
"tests/models/test_url.py::test_idna_url[http_with_port]",
"tests/models/test_url.py::test_idna_url[unicode_tr46_compat]",
"tests/models/test_url.py::test_idna_url[https_without_port]",
"tests/models/test_url.py::test_idna_url[https_with_port]",
"tests/models/test_url.py::test_idna_url[http_with_custom_port]",
"tests/models/test_url.py::test_idna_url[https_with_custom_port]",
"tests/models/test_url.py::test_url",
"tests/models/test_url.py::test_url_eq_str",
"tests/models/test_url.py::test_url_join",
"tests/models/test_url.py::test_url_join_rfc3986",
"tests/models/test_url.py::test_url_set",
"tests/models/test_url.py::test_url_full_path_setter",
"tests/models/test_url.py::test_origin_from_url_string",
"tests/models/test_url.py::test_url_copywith_for_authority"
] | [] | BSD 3-Clause "New" or "Revised" License | 6,059 | 203 | [
"httpx/models.py"
] |
jsbronder__asyncio-dgram-2 | 40cabedc88195bf85a3912752c02fed09385dc94 | 2019-12-23 22:38:54 | 40cabedc88195bf85a3912752c02fed09385dc94 | diff --git a/asyncio_dgram/aio.py b/asyncio_dgram/aio.py
index 8117282..7487a98 100644
--- a/asyncio_dgram/aio.py
+++ b/asyncio_dgram/aio.py
@@ -22,17 +22,20 @@ class DatagramStream:
raised.
"""
- def __init__(self, transport, recvq, excq):
+ def __init__(self, transport, recvq, excq, drained):
"""
@param transport - asyncio transport
@param recvq - asyncio queue that gets populated by the
DatagramProtocol with received datagrams.
@param excq - asyncio queue that gets populated with any errors
detected by the DatagramProtocol.
+ @param drained - asyncio event that is unset when writing is
+ paused and set otherwise.
"""
self._transport = transport
self._recvq = recvq
self._excq = excq
+ self._drained = drained
def __del__(self):
self._transport.close()
@@ -87,6 +90,7 @@ class DatagramStream:
"""
_ = self.exception
self._transport.sendto(data, addr)
+ await self._drained.wait()
async def recv(self):
"""
@@ -132,13 +136,18 @@ class Protocol(asyncio.DatagramProtocol):
based asyncio into higher level coroutines.
"""
- def __init__(self, recvq, excq):
+ def __init__(self, recvq, excq, drained):
"""
@param recvq - asyncio.Queue for new datagrams
@param excq - asyncio.Queue for exceptions
+ @param drained - asyncio.Event set when the write buffer is below the
+ high watermark.
"""
self._recvq = recvq
self._excq = excq
+ self._drained = drained
+
+ self._drained.set()
# Transports are connected at the time a connection is made.
self._transport = None
@@ -167,12 +176,19 @@ class Protocol(asyncio.DatagramProtocol):
def error_received(self, exc):
self._excq.put_nowait(exc)
+ def pause_writing(self):
+ self._drained.clear()
+ super().pause_writing()
+
+ def resume_writing(self):
+ self._drained.set()
+ super().resume_writing()
+
async def bind(addr):
"""
Bind a socket to a local address for datagrams. The socket will be either
- AF_INET or AF_INET6 depending upon the type of address specified. The
- socket will be reusable (SO_REUSEADDR) once it enters TIME_WAIT.
+ AF_INET or AF_INET6 depending upon the type of address specified.
@param addr - For AF_INET or AF_INET6, a tuple with the the host and port to
to bind; port may be set to 0 to get any free port.
@@ -181,12 +197,13 @@ async def bind(addr):
loop = asyncio.get_event_loop()
recvq = asyncio.Queue()
excq = asyncio.Queue()
+ drained = asyncio.Event()
transport, protocol = await loop.create_datagram_endpoint(
- lambda: Protocol(recvq, excq), local_addr=addr, reuse_address=True
+ lambda: Protocol(recvq, excq, drained), local_addr=addr, reuse_address=False
)
- return DatagramServer(transport, recvq, excq)
+ return DatagramServer(transport, recvq, excq, drained)
async def connect(addr):
@@ -201,12 +218,13 @@ async def connect(addr):
loop = asyncio.get_event_loop()
recvq = asyncio.Queue()
excq = asyncio.Queue()
+ drained = asyncio.Event()
transport, protocol = await loop.create_datagram_endpoint(
- lambda: Protocol(recvq, excq), remote_addr=addr
+ lambda: Protocol(recvq, excq, drained), remote_addr=addr
)
- return DatagramClient(transport, recvq, excq)
+ return DatagramClient(transport, recvq, excq, drained)
async def from_socket(sock):
@@ -224,6 +242,7 @@ async def from_socket(sock):
loop = asyncio.get_event_loop()
recvq = asyncio.Queue()
excq = asyncio.Queue()
+ drained = asyncio.Event()
if sock.family not in (socket.AF_INET, socket.AF_INET6):
raise TypeError(
@@ -234,12 +253,12 @@ async def from_socket(sock):
raise TypeError("socket must be %s" % (socket.SOCK_DGRAM,))
transport, protocol = await loop.create_datagram_endpoint(
- lambda: Protocol(recvq, excq), sock=sock
+ lambda: Protocol(recvq, excq, drained), sock=sock
)
if transport.get_extra_info("peername") is not None:
# Workaround transport ignoring the peer address of the socket.
transport._address = transport.get_extra_info("peername")
- return DatagramClient(transport, recvq, excq)
+ return DatagramClient(transport, recvq, excq, drained)
else:
- return DatagramServer(transport, recvq, excq)
+ return DatagramServer(transport, recvq, excq, drained)
| DatagramStream.send should respect flow control
currently send is a coroutine that `awaits` nothing. it should be a `send/drain` pair of methods instead | jsbronder/asyncio-dgram | diff --git a/test/test_aio.py b/test/test_aio.py
index 7d52a99..7b71ffc 100644
--- a/test/test_aio.py
+++ b/test/test_aio.py
@@ -1,12 +1,23 @@
import asyncio
import contextlib
+import os
import socket
+import unittest.mock
import pytest
import asyncio_dgram
[email protected]
+def mock_socket():
+ s = unittest.mock.create_autospec(socket.socket)
+ s.family = socket.AF_INET
+ s.type = socket.SOCK_DGRAM
+
+ return s
+
+
@contextlib.contextmanager
def loop_exception_handler():
"""
@@ -260,3 +271,81 @@ async def test_unconnected_sender(addr):
with pytest.raises(asyncio.TimeoutError):
await asyncio.wait_for(connected.recv(), 0.05)
+
+
[email protected]
+async def test_protocol_pause_resume(monkeypatch, mock_socket, tmp_path):
+ # This is a little involved, but necessary to make sure that the Protocol
+ # is correctly noticing when writing as been paused and resumed. In
+ # summary:
+ #
+ # - Mock the Protocol with one that sets the write buffer limits to 0 and
+ # records when pause and recume writing are called.
+ #
+ # - Use a mock socket so that we can inject a BlockingIOError on send.
+ # Ideally we'd mock method itself, but it's read-only the entire object
+ # needs to be mocked. Due to this, we need to use a temporary file that we
+ # can write to in order to kick the event loop to consider it ready for
+ # writing.
+
+ class TestableProtocol(asyncio_dgram.aio.Protocol):
+ pause_writing_called = 0
+ resume_writing_called = 0
+ instance = None
+
+ def __init__(self, *args, **kwds):
+ TestableProtocol.instance = self
+ super().__init__(*args, **kwds)
+
+ def connection_made(self, transport):
+ transport.set_write_buffer_limits(low=0, high=0)
+ super().connection_made(transport)
+
+ def pause_writing(self):
+ self.pause_writing_called += 1
+ super().pause_writing()
+
+ def resume_writing(self):
+ self.resume_writing_called += 1
+ super().resume_writing()
+
+ async def passthrough():
+ """
+ Used to mock the wait method on the asyncio.Event tracking if the write
+ buffer is past the high water mark or not. Given we're testing how
+ that case is handled, we know it's safe locally to mock it.
+ """
+ pass
+
+ with monkeypatch.context() as ctx:
+ ctx.setattr(asyncio_dgram.aio, "Protocol", TestableProtocol)
+
+ client = await asyncio_dgram.from_socket(mock_socket)
+ mock_socket.send.side_effect = BlockingIOError
+ mock_socket.fileno.return_value = os.open(
+ tmp_path / "socket", os.O_RDONLY | os.O_CREAT
+ )
+
+ with monkeypatch.context() as ctx2:
+ ctx2.setattr(client._drained, "wait", passthrough)
+ await client.send(b"foo")
+
+ assert TestableProtocol.instance.pause_writing_called == 1
+ assert TestableProtocol.instance.resume_writing_called == 0
+ assert not TestableProtocol.instance._drained.is_set()
+
+ mock_socket.send.side_effect = None
+ fd = os.open(tmp_path / "socket", os.O_WRONLY)
+ os.write(fd, b"\n")
+ os.close(fd)
+
+ with monkeypatch.context() as ctx2:
+ ctx2.setattr(client._drained, "wait", passthrough)
+ await client.send(b"foo")
+ await asyncio.sleep(0.1)
+
+ assert TestableProtocol.instance.pause_writing_called == 1
+ assert TestableProtocol.instance.resume_writing_called == 1
+ assert TestableProtocol.instance._drained.is_set()
+
+ os.close(mock_socket.fileno.return_value)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 1
} | 0.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[test]",
"log_parser": "parse_log_pytest_v2",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": null,
"pre_install": null,
"python": "3.7",
"reqs_path": [
"requirements-test.txt"
],
"test_cmd": "pytest --log-level=DEBUG -W default -v -s"
} | -e git+https://github.com/jsbronder/asyncio-dgram.git@40cabedc88195bf85a3912752c02fed09385dc94#egg=asyncio_dgram
black==23.3.0
certifi @ file:///croot/certifi_1671487769961/work/certifi
click==8.1.8
exceptiongroup==1.2.2
flake8==5.0.4
importlib-metadata==4.2.0
iniconfig==2.0.0
mccabe==0.7.0
mypy-extensions==1.0.0
packaging==24.0
pathspec==0.11.2
platformdirs==4.0.0
pluggy==1.2.0
pycodestyle==2.9.1
pyflakes==2.5.0
pytest==7.4.4
pytest-asyncio==0.21.2
tomli==2.0.1
typed-ast==1.5.5
typing_extensions==4.7.1
zipp==3.15.0
| name: asyncio-dgram
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=22.3.1=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- black==23.3.0
- click==8.1.8
- exceptiongroup==1.2.2
- flake8==5.0.4
- importlib-metadata==4.2.0
- iniconfig==2.0.0
- mccabe==0.7.0
- mypy-extensions==1.0.0
- packaging==24.0
- pathspec==0.11.2
- platformdirs==4.0.0
- pluggy==1.2.0
- pycodestyle==2.9.1
- pyflakes==2.5.0
- pytest==7.4.4
- pytest-asyncio==0.21.2
- tomli==2.0.1
- typed-ast==1.5.5
- typing-extensions==4.7.1
- zipp==3.15.0
prefix: /opt/conda/envs/asyncio-dgram
| [
"test/test_aio.py::test_bind_sync[INET]",
"test/test_aio.py::test_bind_sync[INET6]",
"test/test_aio.py::test_echo[INET]",
"test/test_aio.py::test_echo[INET6]",
"test/test_aio.py::test_echo_bind[INET]",
"test/test_aio.py::test_echo_bind[INET6]",
"test/test_aio.py::test_unconnected_sender[INET]",
"test/test_aio.py::test_unconnected_sender[INET6]",
"test/test_aio.py::test_protocol_pause_resume"
] | [] | [
"test/test_aio.py::test_connect_sync[INET]",
"test/test_aio.py::test_connect_sync[INET6]",
"test/test_aio.py::test_from_socket_streamtype[INET]",
"test/test_aio.py::test_from_socket_streamtype[INET6]",
"test/test_aio.py::test_from_socket_bad_socket",
"test/test_aio.py::test_no_server[INET]",
"test/test_aio.py::test_no_server[INET6]"
] | [] | MIT License | 6,081 | 1,240 | [
"asyncio_dgram/aio.py"
] |
|
pre-commit__pre-commit-1251 | 83f08025789c31de9168a69bd3b6f465305ef296 | 2019-12-24 01:47:18 | 2500767f1bbd133d2be3b1c6f29ad91c49f39b7c | diff --git a/pre_commit/util.py b/pre_commit/util.py
index 0f54e9e..8072042 100644
--- a/pre_commit/util.py
+++ b/pre_commit/util.py
@@ -74,36 +74,31 @@ def make_executable(filename):
class CalledProcessError(RuntimeError):
- def __init__(self, returncode, cmd, expected_returncode, output=None):
+ def __init__(self, returncode, cmd, expected_returncode, stdout, stderr):
super(CalledProcessError, self).__init__(
- returncode, cmd, expected_returncode, output,
+ returncode, cmd, expected_returncode, stdout, stderr,
)
self.returncode = returncode
self.cmd = cmd
self.expected_returncode = expected_returncode
- self.output = output
+ self.stdout = stdout
+ self.stderr = stderr
def to_bytes(self):
- output = []
- for maybe_text in self.output:
- if maybe_text:
- output.append(
- b'\n ' +
- five.to_bytes(maybe_text).replace(b'\n', b'\n '),
- )
+ def _indent_or_none(part):
+ if part:
+ return b'\n ' + part.replace(b'\n', b'\n ')
else:
- output.append(b'(none)')
+ return b' (none)'
return b''.join((
- five.to_bytes(
- 'Command: {!r}\n'
- 'Return code: {}\n'
- 'Expected return code: {}\n'.format(
- self.cmd, self.returncode, self.expected_returncode,
- ),
- ),
- b'Output: ', output[0], b'\n',
- b'Errors: ', output[1],
+ 'command: {!r}\n'
+ 'return code: {}\n'
+ 'expected return code: {}\n'.format(
+ self.cmd, self.returncode, self.expected_returncode,
+ ).encode('UTF-8'),
+ b'stdout:', _indent_or_none(self.stdout), b'\n',
+ b'stderr:', _indent_or_none(self.stderr),
))
def to_text(self):
@@ -143,9 +138,7 @@ def cmd_output_b(*cmd, **kwargs):
returncode = proc.returncode
if retcode is not None and retcode != returncode:
- raise CalledProcessError(
- returncode, cmd, retcode, output=(stdout_b, stderr_b),
- )
+ raise CalledProcessError(returncode, cmd, retcode, stdout_b, stderr_b)
return returncode, stdout_b, stderr_b
| pip install crashes can easily confuse newbies
those that are not familiar with the usual annoying messaging that pip presents can get pretty easily confused by the output that happens when `pip` fails to install
here's an example:
```console
$ pre-commit run flake8 --all-files
[INFO] Initializing environment for https://gitlab.com/pycqa/flake8:flake8-walrus.
[INFO] Installing environment for https://gitlab.com/pycqa/flake8.
[INFO] Once installed this environment will be reused.
[INFO] This may take a few minutes...
An unexpected error has occurred: CalledProcessError: Command: ('/home/asottile/.cache/pre-commit/repoi6ij0tyu/py_env-python3/bin/python', '/home/asottile/.cache/pre-commit/repoi6ij0tyu/py_env-python3/bin/pip', 'install', '.', 'flake8-walrus')
Return code: 1
Expected return code: 0
Output:
Processing /home/asottile/.cache/pre-commit/repoi6ij0tyu
Collecting flake8-walrus
Errors:
ERROR: Could not find a version that satisfies the requirement flake8-walrus (from versions: none)
ERROR: No matching distribution found for flake8-walrus
WARNING: You are using pip version 19.2.3, however version 19.3.1 is available.
You should consider upgrading via the 'pip install --upgrade pip' command.
Check the log at /home/asottile/.cache/pre-commit/pre-commit.log
```
this ~admittedly is a bit garbled for a number of reasons:
- pip's error message here isn't great (it _could_ say something about `python_requires` or that there are versions available for other versions) **(the actual error is that the python is python3.6 and the plugin requires python3.8)**
- pip is out of date (when is it not? but admittedly who cares) -- **this is what a lot of people try and fix** -- unfortunately there's not really anything to fix here, the version of `pip` is from inside the virtualenv and doesn't really matter all that much
- `pre-commit` is currently splitting the output from stdout and stderr making it harder to read what's going on
I can't really fix the first one, and the second one I could silence but it doesn't quite feel like the right thing to do (and admittedly knowing the pip version is sometimes useful when debugging). The third however I can pretty easily fix! | pre-commit/pre-commit | diff --git a/tests/languages/docker_test.py b/tests/languages/docker_test.py
index 42616cd..4ea7679 100644
--- a/tests/languages/docker_test.py
+++ b/tests/languages/docker_test.py
@@ -10,7 +10,7 @@ from pre_commit.util import CalledProcessError
def test_docker_is_running_process_error():
with mock.patch(
'pre_commit.languages.docker.cmd_output_b',
- side_effect=CalledProcessError(*(None,) * 4),
+ side_effect=CalledProcessError(None, None, None, None, None),
):
assert docker.docker_is_running() is False
diff --git a/tests/store_test.py b/tests/store_test.py
index 1833dee..c71c350 100644
--- a/tests/store_test.py
+++ b/tests/store_test.py
@@ -125,7 +125,7 @@ def test_clone_shallow_failure_fallback_to_complete(
# Force shallow clone failure
def fake_shallow_clone(self, *args, **kwargs):
- raise CalledProcessError(None, None, None)
+ raise CalledProcessError(None, None, None, None, None)
store._shallow_clone = fake_shallow_clone
ret = store.clone(path, rev)
diff --git a/tests/util_test.py b/tests/util_test.py
index dd1ad37..647fd18 100644
--- a/tests/util_test.py
+++ b/tests/util_test.py
@@ -9,6 +9,7 @@ import pytest
from pre_commit.util import CalledProcessError
from pre_commit.util import clean_path_on_failure
from pre_commit.util import cmd_output
+from pre_commit.util import cmd_output_b
from pre_commit.util import cmd_output_p
from pre_commit.util import parse_version
from pre_commit.util import rmtree
@@ -16,30 +17,26 @@ from pre_commit.util import tmpdir
def test_CalledProcessError_str():
- error = CalledProcessError(
- 1, [str('git'), str('status')], 0, (str('stdout'), str('stderr')),
- )
+ error = CalledProcessError(1, [str('exe')], 0, b'output', b'errors')
assert str(error) == (
- "Command: ['git', 'status']\n"
- 'Return code: 1\n'
- 'Expected return code: 0\n'
- 'Output: \n'
- ' stdout\n'
- 'Errors: \n'
- ' stderr'
+ "command: ['exe']\n"
+ 'return code: 1\n'
+ 'expected return code: 0\n'
+ 'stdout:\n'
+ ' output\n'
+ 'stderr:\n'
+ ' errors'
)
def test_CalledProcessError_str_nooutput():
- error = CalledProcessError(
- 1, [str('git'), str('status')], 0, (str(''), str('')),
- )
+ error = CalledProcessError(1, [str('exe')], 0, b'', b'')
assert str(error) == (
- "Command: ['git', 'status']\n"
- 'Return code: 1\n'
- 'Expected return code: 0\n'
- 'Output: (none)\n'
- 'Errors: (none)'
+ "command: ['exe']\n"
+ 'return code: 1\n'
+ 'expected return code: 0\n'
+ 'stdout: (none)\n'
+ 'stderr: (none)'
)
@@ -90,8 +87,9 @@ def test_cmd_output_exe_not_found():
assert out == 'Executable `dne` not found'
-def test_cmd_output_p_exe_not_found():
- ret, out, _ = cmd_output_p('dne', retcode=None, stderr=subprocess.STDOUT)
[email protected]('fn', (cmd_output_b, cmd_output_p))
+def test_cmd_output_exe_not_found_bytes(fn):
+ ret, out, _ = fn('dne', retcode=None, stderr=subprocess.STDOUT)
assert ret == 1
assert out == b'Executable `dne` not found'
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 1
} | 1.20 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements-dev.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | aspy.yaml==1.3.0
cfgv==3.4.0
coverage==7.8.0
distlib==0.3.9
exceptiongroup==1.2.2
filelock==3.18.0
identify==2.6.9
iniconfig==2.1.0
mock==5.2.0
nodeenv==1.9.1
packaging==24.2
platformdirs==4.3.7
pluggy==1.5.0
-e git+https://github.com/pre-commit/pre-commit.git@83f08025789c31de9168a69bd3b6f465305ef296#egg=pre_commit
pytest==8.3.5
pytest-env==1.1.5
PyYAML==6.0.2
six==1.17.0
toml==0.10.2
tomli==2.2.1
virtualenv==20.29.3
| name: pre-commit
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- aspy-yaml==1.3.0
- cfgv==3.4.0
- coverage==7.8.0
- distlib==0.3.9
- exceptiongroup==1.2.2
- filelock==3.18.0
- identify==2.6.9
- iniconfig==2.1.0
- mock==5.2.0
- nodeenv==1.9.1
- packaging==24.2
- platformdirs==4.3.7
- pluggy==1.5.0
- pytest==8.3.5
- pytest-env==1.1.5
- pyyaml==6.0.2
- six==1.17.0
- toml==0.10.2
- tomli==2.2.1
- virtualenv==20.29.3
prefix: /opt/conda/envs/pre-commit
| [
"tests/languages/docker_test.py::test_docker_is_running_process_error",
"tests/store_test.py::test_clone_shallow_failure_fallback_to_complete",
"tests/util_test.py::test_CalledProcessError_str",
"tests/util_test.py::test_CalledProcessError_str_nooutput"
] | [] | [
"tests/languages/docker_test.py::test_docker_fallback_user",
"tests/store_test.py::test_our_session_fixture_works",
"tests/store_test.py::test_get_default_directory_defaults_to_home",
"tests/store_test.py::test_adheres_to_xdg_specification",
"tests/store_test.py::test_uses_environment_variable_when_present",
"tests/store_test.py::test_store_init",
"tests/store_test.py::test_clone",
"tests/store_test.py::test_clone_cleans_up_on_checkout_failure",
"tests/store_test.py::test_clone_when_repo_already_exists",
"tests/store_test.py::test_clone_tag_not_on_mainline",
"tests/store_test.py::test_create_when_directory_exists_but_not_db",
"tests/store_test.py::test_create_when_store_already_exists",
"tests/store_test.py::test_db_repo_name",
"tests/store_test.py::test_local_resources_reflects_reality",
"tests/store_test.py::test_mark_config_as_used",
"tests/store_test.py::test_mark_config_as_used_idempotent",
"tests/store_test.py::test_mark_config_as_used_does_not_exist",
"tests/store_test.py::test_select_all_configs_roll_forward",
"tests/store_test.py::test_mark_config_as_used_roll_forward",
"tests/util_test.py::test_clean_on_failure_noop",
"tests/util_test.py::test_clean_path_on_failure_does_nothing_when_not_raising",
"tests/util_test.py::test_clean_path_on_failure_cleans_for_normal_exception",
"tests/util_test.py::test_clean_path_on_failure_cleans_for_system_exit",
"tests/util_test.py::test_tmpdir",
"tests/util_test.py::test_cmd_output_exe_not_found",
"tests/util_test.py::test_cmd_output_exe_not_found_bytes[cmd_output_b]",
"tests/util_test.py::test_cmd_output_exe_not_found_bytes[cmd_output_p]",
"tests/util_test.py::test_parse_version",
"tests/util_test.py::test_rmtree_read_only_directories"
] | [] | MIT License | 6,084 | 614 | [
"pre_commit/util.py"
] |
|
python-pillow__Pillow-4302 | bbaebe0d20d29328fb20d2e30fa612dcd6b1b87b | 2019-12-26 02:10:58 | 94ec95c571777cf5188c5b791957220d723194a1 | kdschlosser: question.... am I able to use this when calling image.open
```python
image.open(b'\x65\x32\x86\x54\xD2\xA1')
```
the byte data is nothing just for example.
or does it have to explicitly be.
```python
image.open(io.BytesIO(b'\x65\x32\x86\x54\xD2\xA1'))
```
radarhere: @kdschlosser the second one
```python
Image.open(io.BytesIO(b'\x65\x32\x86\x54\xD2\xA1'))
```
The first code sample can be used for a path -
```python
Image.open(b"Tests/images/hopper.jpg")
``` | diff --git a/src/PIL/Image.py b/src/PIL/Image.py
index 41e9c9fe8..a636a2bbe 100644
--- a/src/PIL/Image.py
+++ b/src/PIL/Image.py
@@ -2690,10 +2690,17 @@ def open(fp, mode="r"):
:exception FileNotFoundError: If the file cannot be found.
:exception PIL.UnidentifiedImageError: If the image cannot be opened and
identified.
+ :exception ValueError: If the ``mode`` is not "r", or if a ``StringIO``
+ instance is used for ``fp``.
"""
if mode != "r":
raise ValueError("bad mode %r" % mode)
+ elif isinstance(fp, io.StringIO):
+ raise ValueError(
+ "StringIO cannot be used to open an image. "
+ "Binary data must be used instead."
+ )
exclusive_fp = False
filename = ""
| Loading image from bytes
### What did you do?
Convert string or bytes to PIL.Image
### What did you expect to happen?
have the PIL.Image instance returned
### What actually happened?
Got a Traceback.
### What are your OS, Python and Pillow versions?
* OS: Windows 7 x64
* Python: 2.7 and also 3.7
* Pillow: 6.1.0
OK so this is the skinny. when running python 2.7 everything works as expected.
when I run the same code using Python 3.7 I get the following Traceback
```python
File "C:\Program Files\Python37\lib\site-packages\PIL\Image.py", line 2822, in open
raise IOError("cannot identify image file %r" % (filename if filename else fp))
OSError: cannot identify image file <_io.BytesIO object at 0x0000000003465F68>
```
Here is the test code to use.
```python
import sys
from PIL import Image
from io import BytesIO
# PNG data
LEFT_THUMB = (
'\x89\x50\x4E\x47\x0D\x0A\x1A\x0A\x00\x00\x00\x0D\x49\x48\x44\x52\x00\x00'
'\x00\x13\x00\x00\x00\x0B\x08\x06\x00\x00\x00\x9D\xD5\xB6\x3A\x00\x00\x01'
'\x2E\x49\x44\x41\x54\x78\x9C\x95\xD2\x31\x6B\xC2\x40\x00\x05\xE0\x77\x10'
'\x42\x09\x34\xD0\x29\x21\x82\xC9\x9C\x2E\x72\x4B\x87\x40\x50\xB9\xBF\x5B'
'\x28\x35\xA1\xA4\x94\x76\x68\x1C\x1C\x74\xCD\x9A\xE8\x20\x0A\x12\xA5\x5A'
'\xE4\x72\xC9\x75\x10\x6D\xDC\xCE\xF7\x03\x3E\xDE\x83\x47\xA4\x94\x68\x67'
'\xB5\xD9\x4E\xBF\xBF\x3E\xE8\x78\x3C\x86\x6A\x3C\xCF\x43\x10\x04\x20\x6D'
'\x6C\xB5\xD9\x4E\x93\xF8\x95\x5A\x96\x05\xC6\x98\x32\x56\x14\x05\x46\xA3'
'\x11\xB4\x36\x14\xBD\x3C\xD3\x4E\xA7\x03\xC6\x18\x8E\xC7\x23\x9A\xA6\x51'
'\xC2\x5C\xD7\x45\x9E\xE7\x27\xEC\x0C\x39\x8E\x03\xC6\x18\x0E\x87\x83\x32'
'\x04\x00\xE7\x75\x1A\xE7\x7C\xF2\xF9\xFE\x46\x6D\xDB\x06\x63\x0C\xFB\xFD'
'\x1E\x75\x5D\x2B\x43\x57\x58\xF9\xF3\xAB\xAD\xD7\x6B\x98\xA6\x09\x21\x04'
'\x76\xBB\x1D\x84\x10\x37\x61\x86\x61\x9C\x30\x00\x70\x1C\x07\x49\x92\x80'
'\x10\x82\x7E\xBF\x8F\xE5\x72\x79\x13\x78\x69\xF6\x70\x6F\x88\x5E\xAF\x37'
'\x2B\xCB\x92\xC6\x71\x0C\x42\x08\xC2\x30\xC4\x7C\x3E\x57\x06\x2F\x98\xAE'
'\xEB\x4F\xAE\xEB\x4E\x06\x83\xC1\x4C\x4A\x49\xA3\x28\x82\x94\x12\x61\x18'
'\x2A\x37\x5B\x2C\x16\xE8\x76\xBB\xFF\x3F\xE3\x9C\x4F\x8A\xA2\xD0\xD2\x34'
'\xA5\x59\x96\xA1\xAA\x2A\x65\xCC\xB2\x2C\x0C\x87\xC3\xEB\xD3\x9E\xC1\xAA'
'\xAA\xEE\x38\xE7\x4A\x90\xAE\xEB\x00\x00\xDF\xF7\x1F\xFF\x00\x09\x7C\xA7'
'\x93\xB1\xFB\xFA\x11\x00\x00\x00\x00\x49\x45\x4E\x44\xAE\x42\x60\x82'
)
PY3 = sys.version_info[0] > 2
if PY3:
stream = BytesIO(LEFT_THUMB.encode())
else:
stream = BytesIO(LEFT_THUMB)
image = Image.open(stream).convert("RGBA")
stream.close()
image.show()
```
Now This is where it goes a little bit sideways. when running Python 2.7 if I use cStringIO.StringIO everything works as expected.. But if I use io.StringIO I get the traceback listed above. I think it has got something to do with the first 16 bytes but I am not 100% sure.
any help is appreciated.
| python-pillow/Pillow | diff --git a/Tests/test_image.py b/Tests/test_image.py
index 83da76b96..47e7420ef 100644
--- a/Tests/test_image.py
+++ b/Tests/test_image.py
@@ -1,3 +1,4 @@
+import io
import os
import shutil
import tempfile
@@ -91,6 +92,9 @@ class TestImage(PillowTestCase):
def test_bad_mode(self):
self.assertRaises(ValueError, Image.open, "filename", "bad mode")
+ def test_stringio(self):
+ self.assertRaises(ValueError, Image.open, io.StringIO())
+
def test_pathlib(self):
from PIL.Image import Path
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 1
} | 6.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": null,
"pre_install": [
"apt-get update",
"apt-get install -y gcc libjpeg-dev zlib1g-dev libtiff5-dev libfreetype6-dev liblcms2-dev libwebp-dev tcl8.6-dev tk8.6-dev libharfbuzz-dev libfribidi-dev libxcb1-dev"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.16
babel==2.17.0
black==25.1.0
blessed==1.20.0
build==1.2.2.post1
certifi==2025.1.31
charset-normalizer==3.4.1
check-manifest==0.50
click==8.1.8
coverage==7.8.0
coveralls==4.0.1
docopt==0.6.2
docutils==0.21.2
exceptiongroup==1.2.2
idna==3.10
imagesize==1.4.1
importlib_metadata==8.6.1
iniconfig==2.1.0
jarn.viewdoc==2.7
Jinja2==3.1.6
MarkupSafe==3.0.2
mypy-extensions==1.0.0
olefile==0.47
packaging==24.2
pathspec==0.12.1
-e git+https://github.com/python-pillow/Pillow.git@bbaebe0d20d29328fb20d2e30fa612dcd6b1b87b#egg=Pillow
platformdirs==4.3.7
pluggy==1.5.0
pycodestyle==2.13.0
pyflakes==3.3.1
Pygments==2.19.1
pyproject_hooks==1.2.0
pyroma==4.2
pytest==8.3.5
pytest-cov==6.0.0
requests==2.32.3
six==1.17.0
snowballstemmer==2.2.0
Sphinx==7.4.7
sphinx-rtd-theme==3.0.2
sphinxcontrib-applehelp==2.0.0
sphinxcontrib-devhelp==2.0.0
sphinxcontrib-htmlhelp==2.1.0
sphinxcontrib-jquery==4.1
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==2.0.0
sphinxcontrib-serializinghtml==2.0.0
tomli==2.2.1
trove-classifiers==2025.3.19.19
typing_extensions==4.13.0
urllib3==2.3.0
wcwidth==0.2.13
zipp==3.21.0
| name: Pillow
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.16
- babel==2.17.0
- black==25.1.0
- blessed==1.20.0
- build==1.2.2.post1
- certifi==2025.1.31
- charset-normalizer==3.4.1
- check-manifest==0.50
- click==8.1.8
- coverage==7.8.0
- coveralls==4.0.1
- docopt==0.6.2
- docutils==0.21.2
- exceptiongroup==1.2.2
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- jarn-viewdoc==2.7
- jinja2==3.1.6
- markupsafe==3.0.2
- mypy-extensions==1.0.0
- olefile==0.47
- packaging==24.2
- pathspec==0.12.1
- platformdirs==4.3.7
- pluggy==1.5.0
- pycodestyle==2.13.0
- pyflakes==3.3.1
- pygments==2.19.1
- pyproject-hooks==1.2.0
- pyroma==4.2
- pytest==8.3.5
- pytest-cov==6.0.0
- requests==2.32.3
- six==1.17.0
- snowballstemmer==2.2.0
- sphinx==7.4.7
- sphinx-rtd-theme==3.0.2
- sphinxcontrib-applehelp==2.0.0
- sphinxcontrib-devhelp==2.0.0
- sphinxcontrib-htmlhelp==2.1.0
- sphinxcontrib-jquery==4.1
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==2.0.0
- sphinxcontrib-serializinghtml==2.0.0
- tomli==2.2.1
- trove-classifiers==2025.3.19.19
- typing-extensions==4.13.0
- urllib3==2.3.0
- wcwidth==0.2.13
- zipp==3.21.0
prefix: /opt/conda/envs/Pillow
| [
"Tests/test_image.py::TestImage::test_stringio"
] | [] | [
"Tests/test_image.py::TestImage::test__new",
"Tests/test_image.py::TestImage::test_alpha_composite",
"Tests/test_image.py::TestImage::test_alpha_inplace",
"Tests/test_image.py::TestImage::test_bad_mode",
"Tests/test_image.py::TestImage::test_check_size",
"Tests/test_image.py::TestImage::test_comparison_with_other_type",
"Tests/test_image.py::TestImage::test_dump",
"Tests/test_image.py::TestImage::test_effect_mandelbrot",
"Tests/test_image.py::TestImage::test_effect_mandelbrot_bad_arguments",
"Tests/test_image.py::TestImage::test_effect_noise",
"Tests/test_image.py::TestImage::test_effect_spread",
"Tests/test_image.py::TestImage::test_exception_inheritance",
"Tests/test_image.py::TestImage::test_expand_x",
"Tests/test_image.py::TestImage::test_expand_xy",
"Tests/test_image.py::TestImage::test_fp_name",
"Tests/test_image.py::TestImage::test_fromstring",
"Tests/test_image.py::TestImage::test_getbands",
"Tests/test_image.py::TestImage::test_getbbox",
"Tests/test_image.py::TestImage::test_getchannel",
"Tests/test_image.py::TestImage::test_getchannel_wrong_params",
"Tests/test_image.py::TestImage::test_image_modes_fail",
"Tests/test_image.py::TestImage::test_image_modes_success",
"Tests/test_image.py::TestImage::test_internals",
"Tests/test_image.py::TestImage::test_invalid_image",
"Tests/test_image.py::TestImage::test_linear_gradient",
"Tests/test_image.py::TestImage::test_linear_gradient_wrong_mode",
"Tests/test_image.py::TestImage::test_load_on_nonexclusive_multiframe",
"Tests/test_image.py::TestImage::test_ne",
"Tests/test_image.py::TestImage::test_no_resource_warning_on_save",
"Tests/test_image.py::TestImage::test_offset_not_implemented",
"Tests/test_image.py::TestImage::test_overrun",
"Tests/test_image.py::TestImage::test_p_from_rgb_rgba",
"Tests/test_image.py::TestImage::test_pathlib",
"Tests/test_image.py::TestImage::test_radial_gradient",
"Tests/test_image.py::TestImage::test_radial_gradient_wrong_mode",
"Tests/test_image.py::TestImage::test_readonly_save",
"Tests/test_image.py::TestImage::test_register_extensions",
"Tests/test_image.py::TestImage::test_registered_extensions",
"Tests/test_image.py::TestImage::test_registered_extensions_uninitialized",
"Tests/test_image.py::TestImage::test_remap_palette",
"Tests/test_image.py::TestImage::test_sanity",
"Tests/test_image.py::TestImage::test_storage_neg",
"Tests/test_image.py::TestImage::test_tempfile",
"Tests/test_image.py::TestImage::test_unknown_extension",
"Tests/test_image.py::TestImage::test_width_height",
"Tests/test_image.py::TestRegistry::test_encode_registry",
"Tests/test_image.py::TestRegistry::test_encode_registry_fail"
] | [] | MIT-CMU License | 6,087 | 224 | [
"src/PIL/Image.py"
] |
bids-standard__pybids-560 | 35e1296202959d375e570d08078282c26ad02bc0 | 2019-12-27 05:09:55 | c2b753c7ecc3dd830005cfaea06b5b24eb5f05be | diff --git a/bids/layout/index.py b/bids/layout/index.py
index 94fd675b..7dd92018 100644
--- a/bids/layout/index.py
+++ b/bids/layout/index.py
@@ -164,10 +164,32 @@ class BIDSLayoutIndexer(object):
"""Index all files in the BIDS dataset. """
self._index_dir(self.root, self.config)
- def index_metadata(self):
- """Index metadata for all files in the BIDS dataset. """
+ def index_metadata(self, **filters):
+ """Index metadata for all files in the BIDS dataset.
+
+ Parameters
+ ----------
+
+ **filters
+ keyword arguments passed to the .get() method of a
+ :obj:`bids.layout.BIDSLayout` object.
+ These keyword arguments define what files get selected
+ for metadata indexing.
+ """
+
+ if filters:
+ # ensure we are returning objects
+ filters['return_type'] = 'object'
+ # until 0.11.0, user can specify extension or extensions
+ ext_key = 'extensions' if 'extensions' in filters else 'extension'
+ if filters.get(ext_key):
+ filters[ext_key] = listify(filters[ext_key])
+ # ensure json files are being indexed
+ if 'json' not in filters[ext_key]:
+ filters[ext_key].append('json')
+
# Process JSON files first if we're indexing metadata
- all_files = self.layout.get(absolute_paths=True)
+ all_files = self.layout.get(absolute_paths=True, **filters)
# Track ALL entities we've seen in file names or metadatas
all_entities = {}
| partial metadata indexing?
I'm curious if anyone has thoughts on partial metadata indexing if metadata indexing takes so long (I do not know if partial metadata indexing would even be helpful, depending on what's taking up the most time).
I patched [BIDSLayoutIndexer's index_metadata function](https://github.com/jdkent/NiBetaSeries/blob/e92f952cdd6136fb85dc0707f6b683efb9cd674c/src/nibetaseries/workflows/utils.py#L19) to allow filters on the get data query.
## Use Case
I'm indexing a large(ish) dataset and only use the metadata associated with the bold files, so I only want to index the metadata for the bold files.
## Question
Would it beneficial (i.e., quicker indexing) to have the option to index metadata for a specific subset of files for a tool that only cares about that subset of files? | bids-standard/pybids | diff --git a/bids/layout/tests/test_layout.py b/bids/layout/tests/test_layout.py
index 7c3a55a3..9275e292 100644
--- a/bids/layout/tests/test_layout.py
+++ b/bids/layout/tests/test_layout.py
@@ -12,6 +12,7 @@ import pytest
import bids
from bids.layout import (BIDSLayout, parse_file_entities, add_config_paths,
Query)
+from bids.layout.index import BIDSLayoutIndexer
from bids.layout.models import Entity, Config
from bids.tests import get_test_data_path
from bids.utils import natural_sort
@@ -21,6 +22,28 @@ def test_layout_init(layout_7t_trt):
assert isinstance(layout_7t_trt.files, dict)
[email protected](
+ 'index_metadata,query,result',
+ [
+ (True, None, 3.0),
+ (False, None, None),
+ (False, {}, 3.0),
+ (False, {'task': 'rest'}, 3.0),
+ (False, {'task': 'rest', 'extension': ['nii.gz']}, 3.0),
+ (False, {'task': 'rest', 'extension': 'nii.gz'}, 3.0),
+ (False, {'task': 'rest', 'extension': ['nii.gz', 'json'], 'return_type': 'file'}, 3.0),
+ ])
+def test_index_metadata(index_metadata, query, result):
+ data_dir = join(get_test_data_path(), '7t_trt')
+ layout = BIDSLayout(data_dir, index_metadata=index_metadata)
+ if not index_metadata and query is not None:
+ indexer = BIDSLayoutIndexer(layout)
+ indexer.index_metadata(**query)
+ sample_file = layout.get(task='rest', extension='nii.gz', acq='fullbrain')[0]
+ metadata = sample_file.get_metadata()
+ assert metadata.get('RepetitionTime') == result
+
+
def test_layout_repr(layout_7t_trt):
assert "Subjects: 10 | Sessions: 20 | Runs: 20" in str(layout_7t_trt)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 1
},
"num_modified_files": 1
} | 0.10 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest",
"pytest-xdist"
],
"pre_install": null,
"python": "3.6",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.13
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
Babel==2.11.0
bids-validator==1.14.0
certifi==2021.5.30
charset-normalizer==2.0.12
docopt==0.6.2
docutils==0.18.1
execnet==1.9.0
greenlet==2.0.2
idna==3.10
imagesize==1.4.1
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
Jinja2==3.0.3
m2r==0.3.1
MarkupSafe==2.0.1
mistune==0.8.4
mock==5.2.0
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
nibabel==3.2.2
num2words==0.5.14
numpy==1.19.5
numpydoc==1.1.0
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pandas==1.1.5
patsy==1.0.1
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
-e git+https://github.com/bids-standard/pybids.git@35e1296202959d375e570d08078282c26ad02bc0#egg=pybids
Pygments==2.14.0
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
pytest-xdist==3.0.2
python-dateutil==2.9.0.post0
pytz==2025.2
requests==2.27.1
scipy==1.5.4
six==1.17.0
snowballstemmer==2.2.0
Sphinx==5.3.0
sphinx-rtd-theme==2.0.0
sphinxcontrib-applehelp==1.0.2
sphinxcontrib-devhelp==1.0.2
sphinxcontrib-htmlhelp==2.0.0
sphinxcontrib-jquery==4.1
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==1.0.3
sphinxcontrib-serializinghtml==1.1.5
SQLAlchemy==1.4.54
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
urllib3==1.26.20
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: pybids
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib-metadata=4.8.1=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.13
- babel==2.11.0
- bids-validator==1.14.0
- charset-normalizer==2.0.12
- docopt==0.6.2
- docutils==0.18.1
- execnet==1.9.0
- greenlet==2.0.2
- idna==3.10
- imagesize==1.4.1
- jinja2==3.0.3
- m2r==0.3.1
- markupsafe==2.0.1
- mistune==0.8.4
- mock==5.2.0
- nibabel==3.2.2
- num2words==0.5.14
- numpy==1.19.5
- numpydoc==1.1.0
- pandas==1.1.5
- patsy==1.0.1
- pygments==2.14.0
- pytest-xdist==3.0.2
- python-dateutil==2.9.0.post0
- pytz==2025.2
- requests==2.27.1
- scipy==1.5.4
- six==1.17.0
- snowballstemmer==2.2.0
- sphinx==5.3.0
- sphinx-rtd-theme==2.0.0
- sphinxcontrib-applehelp==1.0.2
- sphinxcontrib-devhelp==1.0.2
- sphinxcontrib-htmlhelp==2.0.0
- sphinxcontrib-jquery==4.1
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==1.0.3
- sphinxcontrib-serializinghtml==1.1.5
- sqlalchemy==1.4.54
- urllib3==1.26.20
prefix: /opt/conda/envs/pybids
| [
"bids/layout/tests/test_layout.py::test_index_metadata[False-query3-3.0]",
"bids/layout/tests/test_layout.py::test_index_metadata[False-query4-3.0]",
"bids/layout/tests/test_layout.py::test_index_metadata[False-query5-3.0]",
"bids/layout/tests/test_layout.py::test_index_metadata[False-query6-3.0]"
] | [] | [
"bids/layout/tests/test_layout.py::test_layout_init",
"bids/layout/tests/test_layout.py::test_index_metadata[True-None-3.0]",
"bids/layout/tests/test_layout.py::test_index_metadata[False-None-None]",
"bids/layout/tests/test_layout.py::test_index_metadata[False-query2-3.0]",
"bids/layout/tests/test_layout.py::test_layout_repr",
"bids/layout/tests/test_layout.py::test_load_description",
"bids/layout/tests/test_layout.py::test_get_file",
"bids/layout/tests/test_layout.py::test_get_metadata",
"bids/layout/tests/test_layout.py::test_get_metadata2",
"bids/layout/tests/test_layout.py::test_get_metadata3",
"bids/layout/tests/test_layout.py::test_get_metadata4",
"bids/layout/tests/test_layout.py::test_get_metadata_meg",
"bids/layout/tests/test_layout.py::test_get_metadata5",
"bids/layout/tests/test_layout.py::test_get_metadata_via_bidsfile",
"bids/layout/tests/test_layout.py::test_get_with_bad_target",
"bids/layout/tests/test_layout.py::test_get_bvals_bvecs",
"bids/layout/tests/test_layout.py::test_get_subjects",
"bids/layout/tests/test_layout.py::test_get_fieldmap",
"bids/layout/tests/test_layout.py::test_get_fieldmap2",
"bids/layout/tests/test_layout.py::test_bids_json",
"bids/layout/tests/test_layout.py::test_get_return_type_dir",
"bids/layout/tests/test_layout.py::test_get_val_none[None]",
"bids/layout/tests/test_layout.py::test_get_val_none[Query.NONE]",
"bids/layout/tests/test_layout.py::test_get_val_enum_any",
"bids/layout/tests/test_layout.py::test_get_return_sorted",
"bids/layout/tests/test_layout.py::test_ignore_files",
"bids/layout/tests/test_layout.py::test_force_index",
"bids/layout/tests/test_layout.py::test_nested_include_exclude",
"bids/layout/tests/test_layout.py::test_nested_include_exclude_with_regex",
"bids/layout/tests/test_layout.py::test_layout_with_derivs",
"bids/layout/tests/test_layout.py::test_layout_with_multi_derivs[None]",
"bids/layout/tests/test_layout.py::test_get_layouts_in_scope[None]",
"bids/layout/tests/test_layout.py::test_get_dataset_description[None]",
"bids/layout/tests/test_layout.py::test_layout_with_multi_derivs[bidsdb0]",
"bids/layout/tests/test_layout.py::test_get_layouts_in_scope[bidsdb0]",
"bids/layout/tests/test_layout.py::test_get_dataset_description[bidsdb0]",
"bids/layout/tests/test_layout.py::test_layout_with_multi_derivs[bidsdb1]",
"bids/layout/tests/test_layout.py::test_get_layouts_in_scope[bidsdb1]",
"bids/layout/tests/test_layout.py::test_get_dataset_description[bidsdb1]",
"bids/layout/tests/test_layout.py::test_query_derivatives",
"bids/layout/tests/test_layout.py::test_restricted_words_in_path",
"bids/layout/tests/test_layout.py::test_derivative_getters",
"bids/layout/tests/test_layout.py::test_get_tr",
"bids/layout/tests/test_layout.py::test_to_df",
"bids/layout/tests/test_layout.py::test_parse_file_entities",
"bids/layout/tests/test_layout.py::test_parse_file_entities_from_layout[None]",
"bids/layout/tests/test_layout.py::test_parse_file_entities_from_layout[bidsdb-synth0]",
"bids/layout/tests/test_layout.py::test_parse_file_entities_from_layout[bidsdb-synth1]",
"bids/layout/tests/test_layout.py::test_deriv_indexing",
"bids/layout/tests/test_layout.py::test_add_config_paths",
"bids/layout/tests/test_layout.py::test_layout_in_scope",
"bids/layout/tests/test_layout.py::test_indexed_file_associations",
"bids/layout/tests/test_layout.py::test_layout_save",
"bids/layout/tests/test_layout.py::test_indexing_tag_conflict",
"bids/layout/tests/test_layout.py::test_get_with_wrong_dtypes",
"bids/layout/tests/test_layout.py::test_get_with_regex_search",
"bids/layout/tests/test_layout.py::test_get_with_regex_search_bad_dtype",
"bids/layout/tests/test_layout.py::test_load_layout"
] | [] | MIT License | 6,093 | 398 | [
"bids/layout/index.py"
] |
|
adamboche__python-marshmallow-union-29 | 49f82a94403b686e56c4509cf75bca40a9dfe23a | 2019-12-27 12:40:46 | 58bfc9fb069e00478afba87da3e003464cbdaebe | diff --git a/src/marshmallow_union/__init__.py b/src/marshmallow_union/__init__.py
index 69e3099..b3b86e1 100644
--- a/src/marshmallow_union/__init__.py
+++ b/src/marshmallow_union/__init__.py
@@ -59,9 +59,22 @@ class Union(marshmallow.fields.Field):
for candidate_field in fields:
try:
- return candidate_field.serialize(
- attr, obj, error_store=error_store, **kwargs
- )
+ try:
+ return candidate_field.serialize(
+ attr, obj, error_store=error_store, **kwargs
+ )
+ except TypeError:
+ # When serialising a mapping (eg dict) value item, 'attr' and 'obj'
+ # is none (as a dict value is not an attribute of anything). This
+ # causes issues with the attribute-get methods within
+ # 'marshmallow', but can be bypassed by passing the known 'value'
+ # directly to '_serialize'
+ if attr is obj is None:
+ # pylint: disable=protected-access
+ return candidate_field._serialize(
+ value, attr, obj, **kwargs
+ )
+ raise
# pylint: disable=broad-except
except Exception as exc:
pass
| Support union for mapping value
When using a union for a mapping value-type, marshmallow-union raises `ExceptionGroup` during serialisation.
All candidate fields raise as marshmallow-union calls `schema.serialise(attr, obj, ...)`. However, inside a mapping value-type, marshmallow calls `schema._serialise(value, None, None)`, as the mapping values are not part of the serialised object.
### Reproduction
```python
import typing as t
import dataclasses
import marshmallow
import marshmallow_dataclass
@marshmallow_dataclass.dataclass
class Foo:
bar: t.Dict[str, t.Union[int, str]] = dataclasses.field(default_factory=dict)
obj = Foo(bar={"spam": "eggs", "ham": 42})
schema = Foo.Schema()
schema.dump(obj)
```
### Expected
```python
{'bar': {'spam': 'eggs', 'ham': 42}}
```
### Actual
```python
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "~/env-tmp/lib/python3.7/site-packages/marshmallow/schema.py", line 553, in dump
result = self._serialize(processed_obj, many=many)
File "~/env-tmp/lib/python3.7/site-packages/marshmallow/schema.py", line 517, in _serialize
value = field_obj.serialize(attr_name, obj, accessor=self.get_attribute)
File "~/env-tmp/lib/python3.7/site-packages/marshmallow/fields.py", line 325, in serialize
return self._serialize(value, attr, obj, **kwargs)
File "`/env-tmp/lib/python3.7/site-packages/marshmallow/fields.py", line 1510, in _serialize
result[keys[k]] = self.value_field._serialize(v, None, None, **kwargs)
File "~/env-tmp/lib/python3.7/site-packages/marshmallow_union/__init__.py", line 69, in _serialize
raise ExceptionGroup("All serializers raised exceptions.\n", error_store.errors)
marshmallow_union.ExceptionGroup: ('All serializers raised exceptions.\n', {})
```
### Environment
* OS: Ubuntu 19:10
* Python: 3.7.5
* marshmallow: 3.2.2
* marshmallow-union: 0.1.12
* marshmallow-dataclass: 7.1 | adamboche/python-marshmallow-union | diff --git a/tests/test_union.py b/tests/test_union.py
index 10e6066..c4ffd4f 100644
--- a/tests/test_union.py
+++ b/tests/test_union.py
@@ -32,6 +32,19 @@ class OtherSchema(marshmallow.Schema):
)
+class MappingSchema(marshmallow.Schema):
+ """Schema with union inside mapping."""
+ items = marshmallow.fields.Dict(
+ marshmallow.fields.String(),
+ marshmallow_union.Union(
+ [
+ marshmallow.fields.Integer(),
+ marshmallow.fields.List(marshmallow.fields.Integer()),
+ ],
+ ),
+ )
+
+
class StrIntSchema(marshmallow.Schema):
"""Schema with str and int candidates."""
@@ -46,6 +59,7 @@ class StrIntSchema(marshmallow.Schema):
({"name": "Alice", "number_or_numbers": [25, 50]}, OtherSchema()),
({"x": 5}, StrIntSchema()),
({"x": "hello"}, StrIntSchema()),
+ ({"items": {"a": 42, "b": [17]}}, MappingSchema()),
],
)
def test_round_trip(data, schema):
@@ -60,6 +74,7 @@ def test_round_trip(data, schema):
[
({"name": "Alice", "number_or_numbers": "twenty-five"}, PersonSchema()),
({"name": "Alice", "number_or_numbers": {"x": 14}}, PersonSchema()),
+ ({"items": {"a": 42, "b": "spam"}}, MappingSchema()),
],
)
def test_raises(data, schema):
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 1
} | 0.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest_v2",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": null,
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.7",
"reqs_path": [
"requirements.txt",
"dev-requirements.txt"
],
"test_cmd": "pytest -xvs"
} | ansicolors==1.0.2
appdirs==1.4.3
argh==0.26.2
astroid==2.2.5
atomicwrites==1.3.0
attrs==19.1.0
black==19.3b0
bleach==3.1.0
bump2version==0.5.10
certifi @ file:///croot/certifi_1671487769961/work/certifi
check-manifest==0.39
Click==7.0
coverage==4.5.3
cuvner==18.0.1
docutils==0.14
filelock==3.0.12
importlib-metadata==0.17
incremental==17.5.0
isort==4.3.20
Jinja2==2.10.1
lazy-object-proxy==1.4.1
MarkupSafe==1.1.1
marshmallow==3.2.1
-e git+https://github.com/adamboche/python-marshmallow-union.git@49f82a94403b686e56c4509cf75bca40a9dfe23a#egg=marshmallow_union
mccabe==0.6.1
more-itertools==7.0.0
mypy==0.701
mypy-extensions==0.4.1
packaging==19.0
pathtools==0.1.2
pex==1.6.7
pluggy==0.12.0
py==1.8.0
Pygments==2.4.2
pylint==2.3.1
pyparsing==2.4.0
pytest==4.6.2
pytest-sphinx==0.2.2
PyYAML==5.1.1
readme-renderer==24.0
six==1.12.0
toml==0.10.0
towncrier==19.2.0
tox==3.12.1
typed-ast==1.3.5
unidiff==0.5.5
versioneer==0.18
virtualenv==16.6.0
watchdog==0.9.0
wcwidth==0.1.7
webencodings==0.5.1
wrapt==1.11.1
zipp==0.5.1
| name: python-marshmallow-union
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=22.3.1=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- ansicolors==1.0.2
- appdirs==1.4.3
- argh==0.26.2
- astroid==2.2.5
- atomicwrites==1.3.0
- attrs==19.1.0
- black==19.3b0
- bleach==3.1.0
- bump2version==0.5.10
- check-manifest==0.39
- click==7.0
- coverage==4.5.3
- cuvner==18.0.1
- docutils==0.14
- filelock==3.0.12
- importlib-metadata==0.17
- incremental==17.5.0
- isort==4.3.20
- jinja2==2.10.1
- lazy-object-proxy==1.4.1
- markupsafe==1.1.1
- marshmallow==3.2.1
- marshmallow-union==0.1.12
- mccabe==0.6.1
- more-itertools==7.0.0
- mypy==0.701
- mypy-extensions==0.4.1
- packaging==19.0
- pathtools==0.1.2
- pex==1.6.7
- pluggy==0.12.0
- py==1.8.0
- pygments==2.4.2
- pylint==2.3.1
- pyparsing==2.4.0
- pytest==4.6.2
- pytest-sphinx==0.2.2
- pyyaml==5.1.1
- readme-renderer==24.0
- six==1.12.0
- toml==0.10.0
- towncrier==19.2.0
- tox==3.12.1
- typed-ast==1.3.5
- unidiff==0.5.5
- versioneer==0.18
- virtualenv==16.6.0
- watchdog==0.9.0
- wcwidth==0.1.7
- webencodings==0.5.1
- wrapt==1.11.1
- zipp==0.5.1
prefix: /opt/conda/envs/python-marshmallow-union
| [
"tests/test_union.py::test_round_trip[data5-schema5]",
"tests/test_union.py::test_raises[data0-schema0]",
"tests/test_union.py::test_raises[data1-schema1]",
"tests/test_union.py::test_raises[data2-schema2]"
] | [] | [
"tests/test_union.py::test_round_trip[data0-schema0]",
"tests/test_union.py::test_round_trip[data1-schema1]",
"tests/test_union.py::test_round_trip[data2-schema2]",
"tests/test_union.py::test_round_trip[data3-schema3]",
"tests/test_union.py::test_round_trip[data4-schema4]"
] | [] | MIT License | 6,095 | 313 | [
"src/marshmallow_union/__init__.py"
] |
|
benjamincorcoran__sasdocs-5 | e89745837421fd33469de903650aaf881110d891 | 2019-12-28 13:42:09 | e89745837421fd33469de903650aaf881110d891 | diff --git a/sasdocs/objects.py b/sasdocs/objects.py
index b625e6e..5208195 100644
--- a/sasdocs/objects.py
+++ b/sasdocs/objects.py
@@ -507,6 +507,19 @@ class macro:
def __attrs_post_init__(self):
self.contents = [obj for obj in self.contents if obj != '\n']
+ about = []
+ for obj in self.contents:
+ if type(obj).__name__ == 'comment':
+ about.append(obj)
+ else:
+ break
+ if len(about) == 0:
+ self.about = 'No docstring found.'
+ self.documented = False
+ else:
+ self.about = '\n'.join([comment.text for comment in about])
+ self.documented = True
+
# Parsy Objects
| Give Macro object an about attribute
# Issue
Macro object needs an `about` attribute capturing the documentation of the macro variable. This should be the first set of comments *inside* the macro. The comments need to be collapsed into a single string object. | benjamincorcoran/sasdocs | diff --git a/tests/test_objects.py b/tests/test_objects.py
index 7249a1f..515899d 100644
--- a/tests/test_objects.py
+++ b/tests/test_objects.py
@@ -149,10 +149,20 @@ testcases = [
]
@pytest.mark.parametrize("case,expected", testcases)
-def test_macro_parse(case, expected):
-
+def test_macro_about_parse(case, expected):
assert force_partial_parse(fullprogram, case) == [expected]
+testcases = [
+ ('%macro test; /*This is the test macro*/ %mend;', 'This is the test macro'),
+ ('%macro test; /*This is the test macro*/\n/*This is the second line*/%mend;', 'This is the test macro\nThis is the second line'),
+ ('%macro test; data a; set b; run; /*This is the test macro*/ %mend;', 'No docstring found.'),
+]
+
[email protected]("case,expected", testcases)
+def test_macro_parse(case, expected):
+ macro = force_partial_parse(fullprogram,case)[0]
+ assert macro.about == expected
+
testcases = [
('%macro test; data a; set b; run; %mend;', [dataStep(outputs=[dataObject(library=None, dataset=['a'], options=None)], header=' ', inputs=[dataObject(library=None, dataset=['b'], options=None)], body=' ')]),
('%macro test(a=1/*Doc A*/,b/*Doc B*/); data a; set b; run; %mend;', [dataStep(outputs=[dataObject(library=None, dataset=['a'], options=None)], header=' ', inputs=[dataObject(library='work', dataset=['b'])], body=' ')]),
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 1
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.7",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==24.2.0
certifi @ file:///croot/certifi_1671487769961/work/certifi
exceptiongroup==1.2.2
importlib-metadata==6.7.0
iniconfig==2.0.0
packaging==24.0
parsy==2.1
pluggy==1.2.0
pytest==7.4.4
sasdocs @ file:///sasdocs
tomli==2.0.1
typing_extensions==4.7.1
zipp==3.15.0
| name: sasdocs
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=22.3.1=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==24.2.0
- exceptiongroup==1.2.2
- importlib-metadata==6.7.0
- iniconfig==2.0.0
- packaging==24.0
- parsy==2.1
- pluggy==1.2.0
- pytest==7.4.4
- sasdocs==1.0.dev0
- tomli==2.0.1
- typing-extensions==4.7.1
- zipp==3.15.0
prefix: /opt/conda/envs/sasdocs
| [
"tests/test_objects.py::test_macro_parse[%macro"
] | [] | [
"tests/test_objects.py::test_sasname_parse[test-expected0]",
"tests/test_objects.py::test_sasname_parse[&test-expected1]",
"tests/test_objects.py::test_sasname_parse[&test.-expected2]",
"tests/test_objects.py::test_sasname_parse[&&test&test.-expected3]",
"tests/test_objects.py::test_sasname_parse[ab&test-expected4]",
"tests/test_objects.py::test_sasname_parse[ab&test.-expected5]",
"tests/test_objects.py::test_sasname_parse[ab&test.ab-expected6]",
"tests/test_objects.py::test_sasname_parse[ab&test.ab&test-expected7]",
"tests/test_objects.py::test_sasname_parse[ab&test.ab&test.-expected8]",
"tests/test_objects.py::test_sasname_parse[ab&test.abab&test.ab-expected9]",
"tests/test_objects.py::test_dataObject_parse[lib.test-expected0]",
"tests/test_objects.py::test_dataObject_parse[&test.test-expected1]",
"tests/test_objects.py::test_dataObject_parse[lib.&test.-expected2]",
"tests/test_objects.py::test_dataObject_parse[lib.ab&test.-expected3]",
"tests/test_objects.py::test_dataObject_parse[lib.ab&test-expected4]",
"tests/test_objects.py::test_dataObject_parse[lib.ab&test.ab-expected5]",
"tests/test_objects.py::test_dataObject_parse[lib.ab&test.ab&test-expected6]",
"tests/test_objects.py::test_dataObject_parse[li&lib.b.ab&test.ab&test.-expected7]",
"tests/test_objects.py::test_dataObject_parse[ab&lib.&lib.aa.ab&test.abab&test.ab-expected8]",
"tests/test_objects.py::test_dataLineOption_parse[(where=(1=1))-expected0]",
"tests/test_objects.py::test_dataLine_parse[test",
"tests/test_objects.py::test_dataStep_parse[data",
"tests/test_objects.py::test_procedure_parse[proc",
"tests/test_objects.py::test_libname_parse[libname",
"tests/test_objects.py::test_include_parse[*Comment;-expected0]",
"tests/test_objects.py::test_include_parse[/*Comment*/-expected1]",
"tests/test_objects.py::test_macroVariableDefinition_parse[%let",
"tests/test_objects.py::test_macroargument_parse[a-expected0]",
"tests/test_objects.py::test_macroargument_parse[a=1-expected1]",
"tests/test_objects.py::test_macroargument_parse[a",
"tests/test_objects.py::test_macroargument_parse[a/*Docs*/-expected5]",
"tests/test_objects.py::test_macroargument_parse[a=1/*Docs*/-expected6]",
"tests/test_objects.py::test_macroargumentLine_parse[(a,",
"tests/test_objects.py::test_macroargumentLine_parse[(a=1,",
"tests/test_objects.py::test_macroargumentLine_parse[(a=1/*Doc",
"tests/test_objects.py::test_macro_about_parse[%macro",
"tests/test_objects.py::test_macro_children_parse[%macro",
"tests/test_objects.py::test_force_partial_parse[\\nlibname",
"tests/test_objects.py::test_force_partial_marco_parse[\\nlibname"
] | [] | MIT License | 6,102 | 212 | [
"sasdocs/objects.py"
] |
|
iterative__dvc-3020 | cbc7e21593b30402ff53c28a62ddcaa3dd5bf277 | 2019-12-30 22:12:38 | dd5e0589dbdbf58077774e4a086bd4f28c1b11b7 | efiop: Also, no test? Your PR broke even existing tests. Is this change ready for review?
chatcannon: No, it's not ready for review yet, sorry | diff --git a/dvc/dependency/repo.py b/dvc/dependency/repo.py
index 0963e1061..595b836dc 100644
--- a/dvc/dependency/repo.py
+++ b/dvc/dependency/repo.py
@@ -6,7 +6,9 @@ from dvc.utils.compat import FileNotFoundError
from funcy import merge
from .local import DependencyLOCAL
+from dvc.external_repo import cached_clone
from dvc.external_repo import external_repo
+from dvc.exceptions import NotDvcRepoError
from dvc.exceptions import OutputNotFoundError
from dvc.exceptions import PathMissingError
from dvc.utils.fs import fs_copy
@@ -75,27 +77,35 @@ class DependencyREPO(DependencyLOCAL):
return out
@staticmethod
- def _is_git_file(repo, path):
- if not os.path.isabs(path):
- try:
- output = repo.find_out_by_relpath(path)
- if not output.use_cache:
- return True
- except OutputNotFoundError:
- return True
- return False
+ def _is_git_file(repo_dir, path):
+ from dvc.repo import Repo
+
+ if os.path.isabs(path):
+ return False
+
+ try:
+ repo = Repo(repo_dir)
+ except NotDvcRepoError:
+ return True
+
+ try:
+ output = repo.find_out_by_relpath(path)
+ return not output.use_cache
+ except OutputNotFoundError:
+ return True
+ finally:
+ repo.close()
def _copy_if_git_file(self, to_path):
src_path = self.def_path
- with self._make_repo(
- cache_dir=self.repo.cache.local.cache_dir
- ) as repo:
- if not self._is_git_file(repo, src_path):
- return False
+ repo_dir = cached_clone(**self.def_repo)
+
+ if not self._is_git_file(repo_dir, src_path):
+ return False
- src_full_path = os.path.join(repo.root_dir, src_path)
- dst_full_path = os.path.abspath(to_path)
- fs_copy(src_full_path, dst_full_path)
+ src_full_path = os.path.join(repo_dir, src_path)
+ dst_full_path = os.path.abspath(to_path)
+ fs_copy(src_full_path, dst_full_path)
return True
def download(self, to):
diff --git a/dvc/external_repo.py b/dvc/external_repo.py
index cf5ecacdd..9ff2f2a41 100644
--- a/dvc/external_repo.py
+++ b/dvc/external_repo.py
@@ -33,18 +33,20 @@ def external_repo(url=None, rev=None, rev_lock=None, cache_dir=None):
repo.close()
-def _external_repo(url=None, rev=None, cache_dir=None):
- from dvc.config import Config
- from dvc.cache import CacheConfig
- from dvc.repo import Repo
+def cached_clone(url, rev=None, **_ignored_kwargs):
+ """Clone an external git repo to a temporary directory.
- key = (url, rev, cache_dir)
- if key in REPO_CACHE:
- return REPO_CACHE[key]
+ Returns the path to a local temporary directory with the specified
+ revision checked out.
+
+ Uses the REPO_CACHE to avoid accessing the remote server again if
+ cloning from the same URL twice in the same session.
+
+ """
new_path = tempfile.mkdtemp("dvc-erepo")
- # Copy and adjust existing clone
+ # Copy and adjust existing clean clone
if (url, None, None) in REPO_CACHE:
old_path = REPO_CACHE[url, None, None]
@@ -59,13 +61,24 @@ def _external_repo(url=None, rev=None, cache_dir=None):
copy_tree(new_path, clean_clone_path)
REPO_CACHE[url, None, None] = clean_clone_path
- # Adjust new clone/copy to fit rev and cache_dir
-
- # Checkout needs to be done first because current branch might not be
- # DVC repository
+ # Check out the specified revision
if rev is not None:
_git_checkout(new_path, rev)
+ return new_path
+
+
+def _external_repo(url=None, rev=None, cache_dir=None):
+ from dvc.config import Config
+ from dvc.cache import CacheConfig
+ from dvc.repo import Repo
+
+ key = (url, rev, cache_dir)
+ if key in REPO_CACHE:
+ return REPO_CACHE[key]
+
+ new_path = cached_clone(url, rev=rev)
+
repo = Repo(new_path)
try:
# check if the URL is local and no default remote is present
| import: Handle non-DVC Git repositories
After https://github.com/iterative/dvc/pull/2889, `dvc import` can also import files that are tracked by Git but not DVC. DVC still requires that they come from a DVC repository rather than any Git repository, although there is no longer need for that. | iterative/dvc | diff --git a/tests/func/test_import.py b/tests/func/test_import.py
index 99da2df89..19ef1edd1 100644
--- a/tests/func/test_import.py
+++ b/tests/func/test_import.py
@@ -29,7 +29,12 @@ def test_import(tmp_dir, scm, dvc, erepo_dir, monkeypatch):
assert scm.repo.git.check_ignore("foo_imported")
-def test_import_git_file(erepo_dir, tmp_dir, dvc, scm):
[email protected]("src_is_dvc", [True, False])
+def test_import_git_file(erepo_dir, tmp_dir, dvc, scm, src_is_dvc):
+ if not src_is_dvc:
+ erepo_dir.dvc.scm.repo.index.remove([".dvc"], r=True)
+ erepo_dir.dvc.scm.commit("remove .dvc")
+
src = "some_file"
dst = "some_file_imported"
@@ -44,7 +49,12 @@ def test_import_git_file(erepo_dir, tmp_dir, dvc, scm):
assert tmp_dir.scm.repo.git.check_ignore(fspath(tmp_dir / dst))
-def test_import_git_dir(erepo_dir, tmp_dir, dvc, scm):
[email protected]("src_is_dvc", [True, False])
+def test_import_git_dir(erepo_dir, tmp_dir, dvc, scm, src_is_dvc):
+ if not src_is_dvc:
+ erepo_dir.dvc.scm.repo.index.remove([".dvc"], r=True)
+ erepo_dir.dvc.scm.commit("remove .dvc")
+
src = "some_directory"
dst = "some_directory_imported"
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 2
} | 0.78 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[all]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest",
"pytest-timeout",
"pytest-cov",
"pytest-xdist",
"pytest-mock",
"flaky",
"mock",
"xmltodict",
"awscli",
"google-compute-engine",
"Pygments",
"collective.checkdocs",
"flake8",
"psutil",
"flake8-docstrings",
"pydocstyle",
"jaraco.windows",
"mock-ssh-server",
"moto",
"rangehttpserver"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.7",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | aliyun-python-sdk-core==2.16.0
aliyun-python-sdk-core-v3==2.13.33
aliyun-python-sdk-kms==2.16.5
appdirs==1.4.4
atpublic==3.1.2
attrs @ file:///croot/attrs_1668696182826/work
autocommand==2.2.2
awscli==1.31.13
azure-common==1.1.28
azure-storage-blob==2.1.0
azure-storage-common==2.1.0
bcrypt==4.2.1
boto==2.49.0
boto3==1.33.13
botocore==1.33.13
cachetools==5.5.2
certifi @ file:///croot/certifi_1671487769961/work/certifi
cffi==1.15.1
charset-normalizer==3.4.1
collective.checkdocs==0.2
colorama==0.4.4
configobj==5.0.9
configparser==5.3.0
coverage==7.2.7
crcmod==1.7
cryptography==44.0.2
decorator==5.1.1
distro==1.9.0
docutils==0.16
-e git+https://github.com/iterative/dvc.git@cbc7e21593b30402ff53c28a62ddcaa3dd5bf277#egg=dvc
execnet==2.0.2
flake8==5.0.4
flake8-docstrings==1.7.0
flaky==3.8.1
flit_core @ file:///opt/conda/conda-bld/flit-core_1644941570762/work/source/flit_core
flufl.lock==7.1.1
funcy==2.0
future==1.0.0
gitdb==4.0.12
gitdb2==4.0.2
GitPython==3.1.44
google-api-core==1.34.1
google-api-python-client==2.166.0
google-auth==2.38.0
google-auth-httplib2==0.2.0
google-cloud-core==1.5.0
google-cloud-storage==1.19.0
google-compute-engine==2.8.13
google-crc32c==1.5.0
google-resumable-media==2.7.2
googleapis-common-protos==1.69.2
grandalf==0.6
httplib2==0.22.0
humanize==4.6.0
idna==3.10
importlib-metadata==4.2.0
importlib-resources==5.12.0
inflect==3.0.2
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
jaraco.classes==3.2.3
jaraco.collections==4.2.0
jaraco.context==4.3.0
jaraco.functools==3.7.0
jaraco.structures==2.1.0
jaraco.text==3.11.1
jaraco.ui==2.3.0
jaraco.windows==5.7.0
Jinja2==3.1.6
jmespath==0.10.0
jsonpath-ng==1.7.0
MarkupSafe==2.1.5
mccabe==0.7.0
mock==5.2.0
mock-ssh-server==0.9.1
more-itertools==9.1.0
moto==4.2.14
nanotime==0.5.2
networkx==2.3
numpy==1.21.6
oauth2client==4.1.3
oss2==2.6.1
packaging @ file:///croot/packaging_1671697413597/work
paramiko==3.5.1
path==16.6.0
pathspec==0.11.2
pluggy @ file:///tmp/build/80754af9/pluggy_1648042572264/work
ply==3.11
protobuf==3.20.3
psutil==7.0.0
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pyarrow==0.15.1
pyasn1==0.5.1
pyasn1-modules==0.3.0
pycodestyle==2.9.1
pycparser==2.21
pycryptodome==3.22.0
pydantic==1.10.21
pydocstyle==6.3.0
PyDrive==1.3.1
pyflakes==2.5.0
Pygments==2.17.2
PyNaCl==1.5.0
pyparsing==3.1.4
pytest==7.1.2
pytest-cov==4.1.0
pytest-mock==3.11.1
pytest-timeout==2.3.1
pytest-xdist==3.5.0
python-dateutil==2.8.0
PyYAML==5.1.2
rangehttpserver==1.4.0
requests==2.31.0
responses==0.23.3
rsa==4.7.2
ruamel.yaml==0.18.10
ruamel.yaml.clib==0.2.8
s3transfer==0.8.2
shortuuid==1.0.13
six==1.17.0
smmap==5.0.2
snowballstemmer==2.2.0
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
tqdm==4.67.1
treelib==1.7.1
types-PyYAML==6.0.12.12
typing_extensions @ file:///croot/typing_extensions_1669924550328/work
uritemplate==4.1.1
urllib3==1.26.20
voluptuous==0.14.1
Werkzeug==2.2.3
xmltodict==0.14.2
zc.lockfile==3.0.post1
zipp @ file:///croot/zipp_1672387121353/work
| name: dvc
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=22.1.0=py37h06a4308_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- flit-core=3.6.0=pyhd3eb1b0_0
- importlib_metadata=4.11.3=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=22.0=py37h06a4308_0
- pip=22.3.1=py37h06a4308_0
- pluggy=1.0.0=py37h06a4308_1
- py=1.11.0=pyhd3eb1b0_0
- pytest=7.1.2=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py37h06a4308_0
- typing_extensions=4.4.0=py37h06a4308_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zipp=3.11.0=py37h06a4308_0
- zlib=1.2.13=h5eee18b_1
- pip:
- aliyun-python-sdk-core==2.16.0
- aliyun-python-sdk-core-v3==2.13.33
- aliyun-python-sdk-kms==2.16.5
- appdirs==1.4.4
- atpublic==3.1.2
- autocommand==2.2.2
- awscli==1.31.13
- azure-common==1.1.28
- azure-storage-blob==2.1.0
- azure-storage-common==2.1.0
- bcrypt==4.2.1
- boto==2.49.0
- boto3==1.33.13
- botocore==1.33.13
- cachetools==5.5.2
- cffi==1.15.1
- charset-normalizer==3.4.1
- collective-checkdocs==0.2
- colorama==0.4.4
- configobj==5.0.9
- configparser==5.3.0
- coverage==7.2.7
- crcmod==1.7
- cryptography==44.0.2
- decorator==5.1.1
- distro==1.9.0
- docutils==0.16
- dvc==0.78.1+cbc7e2
- execnet==2.0.2
- flake8==5.0.4
- flake8-docstrings==1.7.0
- flaky==3.8.1
- flufl-lock==7.1.1
- funcy==2.0
- future==1.0.0
- gitdb==4.0.12
- gitdb2==4.0.2
- gitpython==3.1.44
- google-api-core==1.34.1
- google-api-python-client==2.166.0
- google-auth==2.38.0
- google-auth-httplib2==0.2.0
- google-cloud-core==1.5.0
- google-cloud-storage==1.19.0
- google-compute-engine==2.8.13
- google-crc32c==1.5.0
- google-resumable-media==2.7.2
- googleapis-common-protos==1.69.2
- grandalf==0.6
- httplib2==0.22.0
- humanize==4.6.0
- idna==3.10
- importlib-metadata==4.2.0
- importlib-resources==5.12.0
- inflect==3.0.2
- jaraco-classes==3.2.3
- jaraco-collections==4.2.0
- jaraco-context==4.3.0
- jaraco-functools==3.7.0
- jaraco-structures==2.1.0
- jaraco-text==3.11.1
- jaraco-ui==2.3.0
- jaraco-windows==5.7.0
- jinja2==3.1.6
- jmespath==0.10.0
- jsonpath-ng==1.7.0
- markupsafe==2.1.5
- mccabe==0.7.0
- mock==5.2.0
- mock-ssh-server==0.9.1
- more-itertools==9.1.0
- moto==4.2.14
- nanotime==0.5.2
- networkx==2.3
- numpy==1.21.6
- oauth2client==4.1.3
- oss2==2.6.1
- paramiko==3.5.1
- path==16.6.0
- pathspec==0.11.2
- ply==3.11
- protobuf==3.20.3
- psutil==7.0.0
- pyarrow==0.15.1
- pyasn1==0.5.1
- pyasn1-modules==0.3.0
- pycodestyle==2.9.1
- pycparser==2.21
- pycryptodome==3.22.0
- pydantic==1.10.21
- pydocstyle==6.3.0
- pydrive==1.3.1
- pyflakes==2.5.0
- pygments==2.17.2
- pynacl==1.5.0
- pyparsing==3.1.4
- pytest-cov==4.1.0
- pytest-mock==3.11.1
- pytest-timeout==2.3.1
- pytest-xdist==3.5.0
- python-dateutil==2.8.0
- pyyaml==5.1.2
- rangehttpserver==1.4.0
- requests==2.31.0
- responses==0.23.3
- rsa==4.7.2
- ruamel-yaml==0.18.10
- ruamel-yaml-clib==0.2.8
- s3transfer==0.8.2
- shortuuid==1.0.13
- six==1.17.0
- smmap==5.0.2
- snowballstemmer==2.2.0
- tqdm==4.67.1
- treelib==1.7.1
- types-pyyaml==6.0.12.12
- uritemplate==4.1.1
- urllib3==1.26.20
- voluptuous==0.14.1
- werkzeug==2.2.3
- xmltodict==0.14.2
- zc-lockfile==3.0.post1
prefix: /opt/conda/envs/dvc
| [
"tests/func/test_import.py::test_import_git_file[False]",
"tests/func/test_import.py::test_import_git_dir[False]"
] | [] | [
"tests/func/test_import.py::test_import",
"tests/func/test_import.py::test_import_git_file[True]",
"tests/func/test_import.py::test_import_git_dir[True]",
"tests/func/test_import.py::test_import_dir",
"tests/func/test_import.py::test_import_non_cached",
"tests/func/test_import.py::test_import_rev",
"tests/func/test_import.py::test_pull_imported_stage",
"tests/func/test_import.py::test_cache_type_is_properly_overridden",
"tests/func/test_import.py::test_pull_imported_directory_stage",
"tests/func/test_import.py::test_download_error_pulling_imported_stage",
"tests/func/test_import.py::test_import_to_dir[.]",
"tests/func/test_import.py::test_import_to_dir[dir]",
"tests/func/test_import.py::test_import_to_dir[dir/subdir]",
"tests/func/test_import.py::test_pull_non_workspace",
"tests/func/test_import.py::test_import_non_existing"
] | [] | Apache License 2.0 | 6,115 | 1,099 | [
"dvc/dependency/repo.py",
"dvc/external_repo.py"
] |
python-pillow__Pillow-4326 | 94ec95c571777cf5188c5b791957220d723194a1 | 2020-01-01 02:37:03 | 94ec95c571777cf5188c5b791957220d723194a1 | diff --git a/src/PIL/GifImagePlugin.py b/src/PIL/GifImagePlugin.py
index 5f9ba59c1..63a0f662b 100644
--- a/src/PIL/GifImagePlugin.py
+++ b/src/PIL/GifImagePlugin.py
@@ -569,8 +569,11 @@ def _write_local_header(fp, im, offset, flags):
if "comment" in im.encoderinfo and 1 <= len(im.encoderinfo["comment"]):
fp.write(b"!" + o8(254)) # extension intro
- for i in range(0, len(im.encoderinfo["comment"]), 255):
- subblock = im.encoderinfo["comment"][i : i + 255]
+ comment = im.encoderinfo["comment"]
+ if isinstance(comment, str):
+ comment = comment.encode()
+ for i in range(0, len(comment), 255):
+ subblock = comment[i : i + 255]
fp.write(o8(len(subblock)) + subblock)
fp.write(o8(0))
if "loop" in im.encoderinfo:
| Saving as GIF throws exception when source image has "comment" metadata
### What did you do?
Attempted to open a png image file, and save it as a gif file.
### What did you expect to happen?
The file should have been successfully saved/converted
### What actually happened?
This exception was thrown:
```
Traceback (most recent call last):
File "test.py", line 8, in <module>
image.save("out.gif", "gif")
File "/usr/local/lib/python3.6/site-packages/PIL/Image.py", line 2084, in save
save_handler(self, fp, filename)
File "/usr/local/lib/python3.6/site-packages/PIL/GifImagePlugin.py", line 513, in _save
_write_single_frame(im, fp, palette)
File "/usr/local/lib/python3.6/site-packages/PIL/GifImagePlugin.py", line 415, in _write_single_frame
_write_local_header(fp, im, (0, 0), flags)
File "/usr/local/lib/python3.6/site-packages/PIL/GifImagePlugin.py", line 577, in _write_local_header
fp.write(o8(len(subblock)) + subblock)
TypeError: can't concat str to bytes
```
### What are your OS, Python and Pillow versions?
* OS: Ubuntu 14.04.5
* Python: 3.6.6
* Pillow: 6.2.1
## Code to Reproduce
```python
from PIL import Image
image = Image.open("example.png")
image.save("out.gif", "gif")
```
Have this image in the directory while running the code:

## My diagnosis
It appears that it is erroring because when it tries to write the "comment" field from the encoderinfo of the image, the "comment" field is a string instead of bytes. I'm guessing either that should be checked here, or it should be read in differently when `Image.open` is called. I checked this by checking the metadata of the image via `identify -verbose example.png`. When I stripped the metadata from the image using `mogrify -strip example.png`, the issue went away and the conversion was successful.
Heres the stripped version of the image that has no issues when converted using the code above:

I'd create a pull request for this myself, except I'm not sure if the fix needs to be on reading it in, or attempting to write it when writing the gif.
| python-pillow/Pillow | diff --git a/Tests/test_file_gif.py b/Tests/test_file_gif.py
index bbd589ada..61c3d8f78 100644
--- a/Tests/test_file_gif.py
+++ b/Tests/test_file_gif.py
@@ -565,14 +565,18 @@ class TestFileGif(PillowTestCase):
im.info["comment"], b"File written by Adobe Photoshop\xa8 4.0"
)
- out = self.tempfile("temp.gif")
- im = Image.new("L", (100, 100), "#000")
- im.info["comment"] = b"Test comment text"
- im.save(out)
+ out = self.tempfile("temp.gif")
+ im = Image.new("L", (100, 100), "#000")
+ im.info["comment"] = b"Test comment text"
+ im.save(out)
with Image.open(out) as reread:
-
self.assertEqual(reread.info["comment"], im.info["comment"])
+ im.info["comment"] = "Test comment text"
+ im.save(out)
+ with Image.open(out) as reread:
+ self.assertEqual(reread.info["comment"], im.info["comment"].encode())
+
def test_comment_over_255(self):
out = self.tempfile("temp.gif")
im = Image.new("L", (100, 100), "#000")
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_media"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 1
} | 6.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": null,
"pre_install": [
"apt-get update",
"apt-get install -y gcc libjpeg-dev zlib1g-dev libtiff5-dev libfreetype6-dev liblcms2-dev libwebp-dev tcl8.6-dev tk8.6-dev libharfbuzz-dev libfribidi-dev libxcb1-dev"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.16
babel==2.17.0
black==25.1.0
blessed==1.20.0
build==1.2.2.post1
certifi==2025.1.31
charset-normalizer==3.4.1
check-manifest==0.50
click==8.1.8
coverage==7.8.0
coveralls==4.0.1
docopt==0.6.2
docutils==0.21.2
exceptiongroup==1.2.2
idna==3.10
imagesize==1.4.1
importlib_metadata==8.6.1
iniconfig==2.1.0
jarn.viewdoc==2.7
Jinja2==3.1.6
MarkupSafe==3.0.2
mypy-extensions==1.0.0
olefile==0.47
packaging==24.2
pathspec==0.12.1
-e git+https://github.com/python-pillow/Pillow.git@94ec95c571777cf5188c5b791957220d723194a1#egg=Pillow
platformdirs==4.3.7
pluggy==1.5.0
pycodestyle==2.13.0
pyflakes==3.3.1
Pygments==2.19.1
pyproject_hooks==1.2.0
pyroma==4.2
pytest==8.3.5
pytest-cov==6.0.0
requests==2.32.3
six==1.17.0
snowballstemmer==2.2.0
Sphinx==7.4.7
sphinx-rtd-theme==3.0.2
sphinxcontrib-applehelp==2.0.0
sphinxcontrib-devhelp==2.0.0
sphinxcontrib-htmlhelp==2.1.0
sphinxcontrib-jquery==4.1
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==2.0.0
sphinxcontrib-serializinghtml==2.0.0
tomli==2.2.1
trove-classifiers==2025.3.19.19
typing_extensions==4.13.0
urllib3==2.3.0
wcwidth==0.2.13
zipp==3.21.0
| name: Pillow
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.16
- babel==2.17.0
- black==25.1.0
- blessed==1.20.0
- build==1.2.2.post1
- certifi==2025.1.31
- charset-normalizer==3.4.1
- check-manifest==0.50
- click==8.1.8
- coverage==7.8.0
- coveralls==4.0.1
- docopt==0.6.2
- docutils==0.21.2
- exceptiongroup==1.2.2
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- jarn-viewdoc==2.7
- jinja2==3.1.6
- markupsafe==3.0.2
- mypy-extensions==1.0.0
- olefile==0.47
- packaging==24.2
- pathspec==0.12.1
- platformdirs==4.3.7
- pluggy==1.5.0
- pycodestyle==2.13.0
- pyflakes==3.3.1
- pygments==2.19.1
- pyproject-hooks==1.2.0
- pyroma==4.2
- pytest==8.3.5
- pytest-cov==6.0.0
- requests==2.32.3
- six==1.17.0
- snowballstemmer==2.2.0
- sphinx==7.4.7
- sphinx-rtd-theme==3.0.2
- sphinxcontrib-applehelp==2.0.0
- sphinxcontrib-devhelp==2.0.0
- sphinxcontrib-htmlhelp==2.1.0
- sphinxcontrib-jquery==4.1
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==2.0.0
- sphinxcontrib-serializinghtml==2.0.0
- tomli==2.2.1
- trove-classifiers==2025.3.19.19
- typing-extensions==4.13.0
- urllib3==2.3.0
- wcwidth==0.2.13
- zipp==3.21.0
prefix: /opt/conda/envs/Pillow
| [
"Tests/test_file_gif.py::TestFileGif::test_comment"
] | [] | [
"Tests/test_file_gif.py::TestFileGif::test_append_images",
"Tests/test_file_gif.py::TestFileGif::test_background",
"Tests/test_file_gif.py::TestFileGif::test_bbox",
"Tests/test_file_gif.py::TestFileGif::test_closed_file",
"Tests/test_file_gif.py::TestFileGif::test_comment_over_255",
"Tests/test_file_gif.py::TestFileGif::test_context_manager",
"Tests/test_file_gif.py::TestFileGif::test_dispose2_background",
"Tests/test_file_gif.py::TestFileGif::test_dispose2_diff",
"Tests/test_file_gif.py::TestFileGif::test_dispose2_palette",
"Tests/test_file_gif.py::TestFileGif::test_dispose_background",
"Tests/test_file_gif.py::TestFileGif::test_dispose_none",
"Tests/test_file_gif.py::TestFileGif::test_dispose_previous",
"Tests/test_file_gif.py::TestFileGif::test_duration",
"Tests/test_file_gif.py::TestFileGif::test_eoferror",
"Tests/test_file_gif.py::TestFileGif::test_extents",
"Tests/test_file_gif.py::TestFileGif::test_getdata",
"Tests/test_file_gif.py::TestFileGif::test_headers_saving_for_animated_gifs",
"Tests/test_file_gif.py::TestFileGif::test_identical_frames",
"Tests/test_file_gif.py::TestFileGif::test_identical_frames_to_single_frame",
"Tests/test_file_gif.py::TestFileGif::test_invalid_file",
"Tests/test_file_gif.py::TestFileGif::test_iss634",
"Tests/test_file_gif.py::TestFileGif::test_lzw_bits",
"Tests/test_file_gif.py::TestFileGif::test_multiple_duration",
"Tests/test_file_gif.py::TestFileGif::test_n_frames",
"Tests/test_file_gif.py::TestFileGif::test_number_of_loops",
"Tests/test_file_gif.py::TestFileGif::test_optimize",
"Tests/test_file_gif.py::TestFileGif::test_optimize_correctness",
"Tests/test_file_gif.py::TestFileGif::test_optimize_full_l",
"Tests/test_file_gif.py::TestFileGif::test_palette_434",
"Tests/test_file_gif.py::TestFileGif::test_palette_handling",
"Tests/test_file_gif.py::TestFileGif::test_palette_save_ImagePalette",
"Tests/test_file_gif.py::TestFileGif::test_palette_save_L",
"Tests/test_file_gif.py::TestFileGif::test_palette_save_P",
"Tests/test_file_gif.py::TestFileGif::test_rgb_transparency",
"Tests/test_file_gif.py::TestFileGif::test_roundtrip",
"Tests/test_file_gif.py::TestFileGif::test_roundtrip2",
"Tests/test_file_gif.py::TestFileGif::test_roundtrip_save_all",
"Tests/test_file_gif.py::TestFileGif::test_sanity",
"Tests/test_file_gif.py::TestFileGif::test_save_I",
"Tests/test_file_gif.py::TestFileGif::test_save_dispose",
"Tests/test_file_gif.py::TestFileGif::test_seek",
"Tests/test_file_gif.py::TestFileGif::test_seek_info",
"Tests/test_file_gif.py::TestFileGif::test_seek_rewind",
"Tests/test_file_gif.py::TestFileGif::test_transparent_optimize",
"Tests/test_file_gif.py::TestFileGif::test_unclosed_file",
"Tests/test_file_gif.py::TestFileGif::test_version",
"Tests/test_file_gif.py::TestFileGif::test_zero_comment_subblocks"
] | [] | MIT-CMU License | 6,118 | 270 | [
"src/PIL/GifImagePlugin.py"
] |
|
Azure__pykusto-42 | 8783b166ce0ee3236ad6fcda81d9355a80438cd2 | 2020-01-01 13:44:42 | 68121cdd79cc9c1d6b8f71e5e80df566ac6842c7 | diff --git a/pykusto/expressions.py b/pykusto/expressions.py
index a02183b..ec73163 100644
--- a/pykusto/expressions.py
+++ b/pykusto/expressions.py
@@ -21,14 +21,6 @@ OrderType = Union[DatetimeType, TimespanType, NumberType, StringType]
# All classes in the same file to prevent circular dependencies
def _subexpr_to_kql(obj: ExpressionType) -> KQL:
- """
- Convert the given expression to KQL, enclosing it in parentheses if it is a compound expression. This guarantees
- correct evaluation order. When parentheses are not needed, for example when the expressions is used as an argument
- to a function, use `to_kql` instead.
-
- :param obj: Expression to convert to KQL
- :return: KQL that represents the given expression
- """
if isinstance(obj, BaseExpression):
return obj.as_subexpression()
return to_kql(obj)
@@ -42,11 +34,8 @@ class BaseExpression:
raise TypeError("BaseExpression is abstract")
return object.__new__(cls)
- def __init__(self, kql: Union[KQL, 'BaseExpression']) -> None:
- if isinstance(kql, BaseExpression):
- self.kql = kql.kql
- elif not isinstance(kql, str):
- raise ValueError("Either expression or KQL required")
+ def __init__(self, kql: KQL) -> None:
+ assert isinstance(kql, str)
self.kql = kql
def __repr__(self) -> str:
@@ -270,9 +259,9 @@ class StringExpression(BaseExpression):
def split(self, delimiter: StringType, requested_index: NumberType = None) -> 'ArrayExpression':
if requested_index is None:
- return ArrayExpression(KQL('split({}, {})'.format(to_kql(self.kql), to_kql(delimiter))))
- return ArrayExpression(KQL('split({}, {}, {})'.format(
- to_kql(self.kql), to_kql(delimiter), to_kql(requested_index)
+ return ArrayExpression(KQL('split({}, {}'.format(self.kql, to_kql(delimiter))))
+ return ArrayExpression(KQL('split({}, {}, {}'.format(
+ self.kql, _subexpr_to_kql(delimiter), to_kql(requested_index)
)))
def equals(self, other: StringType, case_sensitive: bool = False) -> BooleanExpression:
@@ -654,15 +643,6 @@ column_generator = ColumnGenerator()
def to_kql(obj: ExpressionType) -> KQL:
- """
- Convert the given expression to KQL. If this is a subexpression of a greater expression, neighboring operators might
- take precedence over operators included in this expression, causing an incorrect evaluation order.
- If this is a concern, use `_subexpr_to_kql` instead, which will enclose this expression in parentheses if it is
- a compound expression.
-
- :param obj: Expression to convert to KQL
- :return: KQL that represents the given expression
- """
if isinstance(obj, BaseExpression):
return obj.kql
return kql_converter.for_obj(obj)
diff --git a/pykusto/functions.py b/pykusto/functions.py
index 6269870..fcfc6f4 100644
--- a/pykusto/functions.py
+++ b/pykusto/functions.py
@@ -1,10 +1,10 @@
+import json
from typing import Union
from pykusto.expressions import Column, NumberType, NumberExpression, TimespanType, \
DatetimeExpression, TimespanExpression, ArrayType, DynamicType, DatetimeType, BaseExpression, BooleanType, \
ExpressionType, AggregationExpression, StringType, StringExpression, BooleanExpression, \
- NumberAggregationExpression, MappingAggregationExpression, ArrayAggregationExpression, to_kql, DynamicExpression, \
- ArrayExpression
+ NumberAggregationExpression, MappingAggregationExpression, ArrayAggregationExpression, to_kql, DynamicExpression
from pykusto.kql_converters import KQL
from pykusto.type_utils import plain_expression
@@ -559,8 +559,7 @@ def sign(expr: NumberType) -> NumberExpression:
# def sin(self): return
#
#
-def split(string: StringType, delimiter: StringType, requested_index: NumberType = None) -> 'ArrayExpression':
- return StringExpression(KQL(string)).split(delimiter, requested_index)
+# def split(self): return
def sqrt(expr: NumberType) -> NumberExpression:
@@ -583,16 +582,20 @@ def startofyear(expr: DatetimeType, offset: NumberType = None) -> DatetimeExpres
return expr.startofyear(offset)
-def strcat(expr1: StringType, expr2: StringType, *exprs: StringType) -> StringExpression:
- res = 'strcat({}, {}'.format(to_kql(expr1),
- to_kql(expr2))
- if len(exprs) > 0:
- res = res + ', ' + ', '.join([to_kql(expr) for expr in exprs])
- return StringExpression(KQL(res + ')'))
+def strcat(*strings: StringType) -> StringExpression:
+ if len(strings) < 2:
+ raise ValueError("strcat requires at least two arguments")
+ return StringExpression(KQL('strcat({})'.format(', '.join(to_kql(s) for s in strings))))
+
+
+def to_literal_dynamic(d: DynamicType) -> KQL:
+ if isinstance(d, BaseExpression):
+ return d.kql
+ return KQL('dynamic({})'.format(json.dumps(d)))
def strcat_array(expr: ArrayType, delimiter: StringType) -> StringExpression:
- return StringExpression(KQL('strcat_array({}, {})'.format(to_kql(expr), to_kql(delimiter))))
+ return StringExpression(KQL('strcat_array({}, {})'.format(to_literal_dynamic(expr), to_kql(delimiter))))
def strcat_delim(delimiter: StringType, expr1: StringType, expr2: StringType, *exprs: StringType) -> StringExpression:
diff --git a/pykusto/kql_converters.py b/pykusto/kql_converters.py
index 63079f4..a1eaeae 100644
--- a/pykusto/kql_converters.py
+++ b/pykusto/kql_converters.py
@@ -29,7 +29,7 @@ def timedelta_to_kql(td: timedelta) -> KQL:
@kql_converter(Mapping, List, Tuple)
def dynamic_to_kql(d: Union[Mapping, List, Tuple]) -> KQL:
query = list(json.dumps(d))
- # Issue #11
+ # Convert square brackets to round brackets (Issue #11)
counter = 0
prev = ""
for i, c in enumerate(query):
diff --git a/pykusto/logger.py b/pykusto/logger.py
deleted file mode 100644
index 6b2cc66..0000000
--- a/pykusto/logger.py
+++ /dev/null
@@ -1,3 +0,0 @@
-import logging
-
-logger = logging.getLogger("pykusto")
diff --git a/pykusto/query.py b/pykusto/query.py
index 845517c..8207f13 100644
--- a/pykusto/query.py
+++ b/pykusto/query.py
@@ -1,18 +1,18 @@
-from abc import abstractmethod
-from copy import copy, deepcopy
-from enum import Enum
from itertools import chain
-from types import FunctionType
from typing import Tuple, List, Union, Optional
+from abc import abstractmethod
from azure.kusto.data.helpers import dataframe_from_result_table
+from copy import copy, deepcopy
+from enum import Enum
+from types import FunctionType
from pykusto.client import Table
from pykusto.expressions import BooleanType, ExpressionType, AggregationExpression, OrderType, \
StringType, AssignmentBase, AssignmentFromAggregationToColumn, AssignmentToSingleColumn, Column, BaseExpression, \
AssignmentFromColumnToColumn, AnyExpression, to_kql
from pykusto.kql_converters import KQL
-from pykusto.logger import logger
+from pykusto.type_utils import logger
from pykusto.udf import stringify_python_func
diff --git a/pykusto/type_utils.py b/pykusto/type_utils.py
index ca407cd..bb5cb4d 100644
--- a/pykusto/type_utils.py
+++ b/pykusto/type_utils.py
@@ -1,65 +1,41 @@
+import logging
from datetime import datetime, timedelta
from numbers import Number
-from typing import Union, Mapping, Type, Dict, Callable, Tuple, List
+from typing import Union, Mapping, Type, Dict, Callable, Any, Tuple, List
+
+logger = logging.getLogger("pykusto")
KustoTypes = Union[str, Number, bool, datetime, Mapping, List, Tuple, timedelta]
# TODO: Unhandled data types: guid, decimal
class TypeRegistrar:
- """
- A factory for annotations that are used to create a mapping between Kusto types and python types and functions.
- Each annotation must be called with a Kusto type as a parameter. The `for_obj` and `for_type` methods
- can then be used to retrieve the python type or function corresponding to a given Kusto type.
- """
registry: Dict[Type[KustoTypes], Callable]
- def __init__(self, name: str) -> None:
- """
- :param name: Name is used for better logging and clearer errors
- """
- self.name = name
+ def __init__(self) -> None:
self.registry = {}
- def __repr__(self) -> str:
- return self.name
-
def __call__(self, *types: Type[KustoTypes]) -> Callable:
def inner(wrapped):
for t in types:
- previous = self.registry.setdefault(t, wrapped)
- if previous is not wrapped:
- raise TypeError("{}: type already registered: {}".format(self, t.__name__))
+ self.registry[t] = wrapped
return wrapped
return inner
- def for_obj(self, obj: KustoTypes) -> Callable:
- """
- Given an object of Kusto type, retrieve the python type or function associated with the object's type, and call
- it with the given object as a parameter
-
- :param obj: An object of Kusto type
- :return: Associated python object
- """
+ def for_obj(self, obj: Any) -> Any:
for registered_type, registered_callable in self.registry.items():
if isinstance(obj, registered_type):
return registered_callable(obj)
- raise ValueError("{}: no registered callable for object {} of type {}".format(self, obj, type(obj).__name__))
+ raise ValueError("No registered callable for object {} of type {}".format(obj, type(obj).__name__))
def for_type(self, t: Type[KustoTypes]) -> Callable:
- """
- Given a Kusto type, retrieve the associated python type or function
-
- :param t: A Kusto type
- :return: Associated python object
- """
for registered_type, registered_callable in self.registry.items():
if issubclass(t, registered_type):
return registered_callable
- raise ValueError("{}: no registered callable for type {}".format(self, t.__name__))
+ raise ValueError("No registered callable for type {}".format(t.__name__))
-kql_converter = TypeRegistrar("KQL Converter")
-plain_expression = TypeRegistrar("Plain expression")
-aggregation_expression = TypeRegistrar("Aggregation expression")
+kql_converter = TypeRegistrar()
+plain_expression = TypeRegistrar()
+aggregation_expression = TypeRegistrar()
diff --git a/setup.py b/setup.py
index ca8f191..543081f 100644
--- a/setup.py
+++ b/setup.py
@@ -2,7 +2,7 @@ from setuptools import setup, find_packages
setup(
name='pykusto',
- version='0.0.9',
+ version='0.0.7',
packages=find_packages(exclude=['test']),
url='https://github.com/Azure/pykusto',
license='MIT License',
| In some contexts a dynamic literal needs to be created with the "dynamic" keyword
For example, currently `strcat_array` fails because it requires:
`strcat_array(dynamic(["A", "B", "C"]), ",")`
while pykusto produces:
`strcat_array(("A", "B", "C"), ",")` | Azure/pykusto | diff --git a/test/test_base.py b/test/test_base.py
index dde3185..6321ec8 100644
--- a/test/test_base.py
+++ b/test/test_base.py
@@ -3,7 +3,7 @@ import sys
from typing import Callable
from unittest import TestCase
-from pykusto.logger import logger
+from pykusto.type_utils import logger
class TestBase(TestCase):
diff --git a/test/test_functions.py b/test/test_functions.py
index 3ce0a5c..65d0d07 100644
--- a/test/test_functions.py
+++ b/test/test_functions.py
@@ -1,5 +1,4 @@
import datetime
-import unittest
from pykusto import functions as f
from pykusto.expressions import column_generator as col
@@ -372,7 +371,6 @@ class TestFunction(TestBase):
Query().extend(f.strcat_delim('-', ',', col.foo)).render()
)
- @unittest.skip("Enabled after #40 is fixed")
def test_strcat_array(self):
self.assertEqual(
" | where (strcat_array(foo, \",\")) == \"A,B,C\"",
@@ -425,20 +423,6 @@ class TestFunction(TestBase):
Query().where(f.substring(col.foo, col.bar, 4) == 'ABC,ABC').render()
)
- def test_split(self):
- self.assertEqual(
- ' | extend foo = (split(bar, "_", 3))',
- Query().extend(foo=f.split(col.bar, "_", 3)).render()
- )
- self.assertEqual(
- ' | extend foo = (split(bar, "_")[3])',
- Query().extend(foo=f.split(col.bar, "_")[3]).render()
- )
- self.assertEqual(
- ' | extend foo = (split("1_2", "_")[3])',
- Query().extend(foo=f.split("1_2", "_")[3]).render()
- )
-
def test_tobool(self):
self.assertEqual(
" | where tobool(foo)",
diff --git a/test/test_utils.py b/test/test_utils.py
index 2b2e7aa..fc774c4 100644
--- a/test/test_utils.py
+++ b/test/test_utils.py
@@ -1,81 +1,16 @@
from pykusto.expressions import to_kql
-from pykusto.type_utils import TypeRegistrar
from test.test_base import TestBase
class TestUtils(TestBase):
def test_dynamic_to_kql(self):
- test_dict = {
+ dict ={
"name": "Alan",
"age": 21,
"address": ("NY", 36),
"pets": ["Libby", "Panda", "]", "["]
}
self.assertEqual(
- "{\"name\": \"Alan\", \"age\": 21, \"address\": (\"NY\", 36), "
- "\"pets\": (\"Libby\", \"Panda\", \"]\", \"[\")}",
- to_kql(test_dict)
- )
-
- def test_type_registrar_for_type(self):
- test_annotation = TypeRegistrar("Test annotation")
-
- @test_annotation(str)
- def str_annotated(s: str) -> str:
- return "response to " + s
-
- self.assertEqual(
- "response to test for_type",
- test_annotation.for_type(str)("test for_type")
- )
-
- def test_type_registrar_for_obj(self):
- test_annotation = TypeRegistrar("Test annotation")
-
- @test_annotation(str)
- def str_annotated(s: str) -> str:
- return "response to " + s
-
- self.assertEqual(
- "response to test for_obj",
- test_annotation.for_obj("test for_obj")
- )
-
- def test_type_registrar_for_type_not_found(self):
- test_annotation = TypeRegistrar("Test annotation")
-
- @test_annotation(str)
- def str_annotated(s: str) -> str:
- return "response to " + s
-
- self.assertRaises(
- ValueError("Test annotation: no registered callable for type bool"),
- lambda: test_annotation.for_type(bool)("test for_type")
- )
-
- def test_type_registrar_for_obj_not_found(self):
- test_annotation = TypeRegistrar("Test annotation")
-
- @test_annotation(str)
- def str_annotated(s: str) -> str:
- return "response to " + s
-
- self.assertRaises(
- ValueError("Test annotation: no registered callable for object True of type bool"),
- lambda: test_annotation.for_obj(True)
- )
-
- def test_type_registrar_collision(self):
- test_annotation = TypeRegistrar("Test annotation")
-
- @test_annotation(str)
- def str_annotated_1(s: str) -> str:
- return "response to " + s
-
- def str_annotated_2(s: str) -> str:
- return "response to " + s
-
- self.assertRaises(
- TypeError("Test annotation: type already registered: str"),
- lambda: test_annotation(str)(str_annotated_2)
+ "{\"name\": \"Alan\", \"age\": 21, \"address\": (\"NY\", 36), \"pets\": (\"Libby\", \"Panda\", \"]\", \"[\")}",
+ to_kql(dict)
)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_removed_files",
"has_many_modified_files",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 6
} | 0.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.7",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | adal==1.2.7
azure-kusto-data==0.0.36
certifi @ file:///croot/certifi_1671487769961/work/certifi
cffi==1.15.1
charset-normalizer==3.4.1
cryptography==44.0.2
exceptiongroup==1.2.2
idna==3.10
importlib-metadata==6.7.0
iniconfig==2.0.0
packaging==24.0
pluggy==1.2.0
pycparser==2.21
PyJWT==2.8.0
-e git+https://github.com/Azure/pykusto.git@8783b166ce0ee3236ad6fcda81d9355a80438cd2#egg=pykusto
pytest==7.4.4
python-dateutil==2.9.0.post0
requests==2.31.0
six==1.17.0
tomli==2.0.1
typing_extensions==4.7.1
urllib3==2.0.7
zipp==3.15.0
| name: pykusto
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=22.3.1=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- adal==1.2.7
- azure-kusto-data==0.0.36
- cffi==1.15.1
- charset-normalizer==3.4.1
- cryptography==44.0.2
- exceptiongroup==1.2.2
- idna==3.10
- importlib-metadata==6.7.0
- iniconfig==2.0.0
- packaging==24.0
- pluggy==1.2.0
- pycparser==2.21
- pyjwt==2.8.0
- pytest==7.4.4
- python-dateutil==2.9.0.post0
- requests==2.31.0
- six==1.17.0
- tomli==2.0.1
- typing-extensions==4.7.1
- urllib3==2.0.7
- zipp==3.15.0
prefix: /opt/conda/envs/pykusto
| [
"test/test_functions.py::TestFunction::test_acos",
"test/test_functions.py::TestFunction::test_ago",
"test/test_functions.py::TestFunction::test_any",
"test/test_functions.py::TestFunction::test_arg_max",
"test/test_functions.py::TestFunction::test_arg_min",
"test/test_functions.py::TestFunction::test_array_length",
"test/test_functions.py::TestFunction::test_avg",
"test/test_functions.py::TestFunction::test_avgif",
"test/test_functions.py::TestFunction::test_bag_keys",
"test/test_functions.py::TestFunction::test_bin",
"test/test_functions.py::TestFunction::test_bin_at",
"test/test_functions.py::TestFunction::test_bin_auto",
"test/test_functions.py::TestFunction::test_case",
"test/test_functions.py::TestFunction::test_ceiling",
"test/test_functions.py::TestFunction::test_cos",
"test/test_functions.py::TestFunction::test_count",
"test/test_functions.py::TestFunction::test_countif",
"test/test_functions.py::TestFunction::test_dcount",
"test/test_functions.py::TestFunction::test_endofday",
"test/test_functions.py::TestFunction::test_endofmonth",
"test/test_functions.py::TestFunction::test_endofweek",
"test/test_functions.py::TestFunction::test_exp",
"test/test_functions.py::TestFunction::test_exp10",
"test/test_functions.py::TestFunction::test_exp2",
"test/test_functions.py::TestFunction::test_floor",
"test/test_functions.py::TestFunction::test_format_datetime",
"test/test_functions.py::TestFunction::test_format_timespan",
"test/test_functions.py::TestFunction::test_getmonth",
"test/test_functions.py::TestFunction::test_gettype",
"test/test_functions.py::TestFunction::test_getyear",
"test/test_functions.py::TestFunction::test_hash",
"test/test_functions.py::TestFunction::test_hash_sha256",
"test/test_functions.py::TestFunction::test_hourofday",
"test/test_functions.py::TestFunction::test_iff",
"test/test_functions.py::TestFunction::test_iif",
"test/test_functions.py::TestFunction::test_isempty",
"test/test_functions.py::TestFunction::test_isfinite",
"test/test_functions.py::TestFunction::test_isinf",
"test/test_functions.py::TestFunction::test_isnan",
"test/test_functions.py::TestFunction::test_isnotempty",
"test/test_functions.py::TestFunction::test_isnotnull",
"test/test_functions.py::TestFunction::test_isnull",
"test/test_functions.py::TestFunction::test_isutf8",
"test/test_functions.py::TestFunction::test_log",
"test/test_functions.py::TestFunction::test_log10",
"test/test_functions.py::TestFunction::test_log2",
"test/test_functions.py::TestFunction::test_loggamma",
"test/test_functions.py::TestFunction::test_make_bag",
"test/test_functions.py::TestFunction::test_make_datetime",
"test/test_functions.py::TestFunction::test_make_list",
"test/test_functions.py::TestFunction::test_make_set",
"test/test_functions.py::TestFunction::test_max",
"test/test_functions.py::TestFunction::test_min",
"test/test_functions.py::TestFunction::test_nesting",
"test/test_functions.py::TestFunction::test_now",
"test/test_functions.py::TestFunction::test_parse_json_array",
"test/test_functions.py::TestFunction::test_parse_json_brackets",
"test/test_functions.py::TestFunction::test_parse_json_dot",
"test/test_functions.py::TestFunction::test_parse_json_nesting",
"test/test_functions.py::TestFunction::test_parse_json_number_expression",
"test/test_functions.py::TestFunction::test_parse_json_to_string",
"test/test_functions.py::TestFunction::test_percentile",
"test/test_functions.py::TestFunction::test_percentiles",
"test/test_functions.py::TestFunction::test_pow",
"test/test_functions.py::TestFunction::test_round",
"test/test_functions.py::TestFunction::test_sign",
"test/test_functions.py::TestFunction::test_sqrt",
"test/test_functions.py::TestFunction::test_startofday",
"test/test_functions.py::TestFunction::test_startofmonth",
"test/test_functions.py::TestFunction::test_startofweek",
"test/test_functions.py::TestFunction::test_startofyear",
"test/test_functions.py::TestFunction::test_stdev",
"test/test_functions.py::TestFunction::test_stdevif",
"test/test_functions.py::TestFunction::test_stdevp",
"test/test_functions.py::TestFunction::test_strcat",
"test/test_functions.py::TestFunction::test_strcat_array",
"test/test_functions.py::TestFunction::test_strcat_delim",
"test/test_functions.py::TestFunction::test_strcmp",
"test/test_functions.py::TestFunction::test_string_size",
"test/test_functions.py::TestFunction::test_strlen",
"test/test_functions.py::TestFunction::test_strrep",
"test/test_functions.py::TestFunction::test_substring",
"test/test_functions.py::TestFunction::test_sum",
"test/test_functions.py::TestFunction::test_sumif",
"test/test_functions.py::TestFunction::test_tobool",
"test/test_functions.py::TestFunction::test_toboolean",
"test/test_functions.py::TestFunction::test_todouble",
"test/test_functions.py::TestFunction::test_variance",
"test/test_functions.py::TestFunction::test_varianceif",
"test/test_functions.py::TestFunction::test_variancep",
"test/test_utils.py::TestUtils::test_dynamic_to_kql"
] | [] | [] | [] | MIT License | 6,124 | 2,799 | [
"pykusto/expressions.py",
"pykusto/functions.py",
"pykusto/kql_converters.py",
"pykusto/logger.py",
"pykusto/query.py",
"pykusto/type_utils.py",
"setup.py"
] |
|
einsteinpy__einsteinpy-357 | 094d896d431b92ef2e20646a4dac9eab1420d195 | 2020-01-02 07:51:33 | 27aaa3d80d74a56dd7edebc2e721b5bdfdf3058b | pep8speaks: Hello @michiboo! Thanks for opening this PR. We checked the lines you've touched for [PEP 8](https://www.python.org/dev/peps/pep-0008) issues, and found:
* In the file [`src/einsteinpy/symbolic/einstein.py`](https://github.com/einsteinpy/einsteinpy/blob/b45e0f548e4e27273c4ab0b41245896157828a5d/src/einsteinpy/symbolic/einstein.py):
> [Line 132:10](https://github.com/einsteinpy/einsteinpy/blob/b45e0f548e4e27273c4ab0b41245896157828a5d/src/einsteinpy/symbolic/einstein.py#L132): [W292](https://duckduckgo.com/?q=pep8%20W292) no newline at end of file
shreyasbapat: Good thing @michiboo .
Can you join our chatroom, we can discuyss why this fails there :D
ritzvik: @michiboo Are you still on it??
michiboo: > @michiboo Are you still on it??
Hi Yes, sorry for the delay, been busy lately :/
ritzvik: @michiboo Can you rebase the commits. Then the ‘docs’ test would pass.
After that lemme take care of the unit tests.
codecov[bot]: # [Codecov](https://codecov.io/gh/einsteinpy/einsteinpy/pull/357?src=pr&el=h1) Report
> Merging [#357](https://codecov.io/gh/einsteinpy/einsteinpy/pull/357?src=pr&el=desc) into [master](https://codecov.io/gh/einsteinpy/einsteinpy/commit/17580ec82dae91bf39226df89115bb4876d5c199?src=pr&el=desc) will **decrease** coverage by `0.1%`.
> The diff coverage is `33.33%`.
[](https://codecov.io/gh/einsteinpy/einsteinpy/pull/357?src=pr&el=tree)
```diff
@@ Coverage Diff @@
## master #357 +/- ##
=========================================
- Coverage 94.5% 94.4% -0.11%
=========================================
Files 47 47
Lines 1765 1768 +3
=========================================
+ Hits 1668 1669 +1
- Misses 97 99 +2
```
| [Impacted Files](https://codecov.io/gh/einsteinpy/einsteinpy/pull/357?src=pr&el=tree) | Coverage Δ | |
|---|---|---|
| [src/einsteinpy/symbolic/einstein.py](https://codecov.io/gh/einsteinpy/einsteinpy/pull/357/diff?src=pr&el=tree#diff-c3JjL2VpbnN0ZWlucHkvc3ltYm9saWMvZWluc3RlaW4ucHk=) | `92% <33.33%> (-8%)` | :arrow_down: |
------
[Continue to review full report at Codecov](https://codecov.io/gh/einsteinpy/einsteinpy/pull/357?src=pr&el=continue).
> **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta)
> `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data`
> Powered by [Codecov](https://codecov.io/gh/einsteinpy/einsteinpy/pull/357?src=pr&el=footer). Last update [17580ec...2c6660e](https://codecov.io/gh/einsteinpy/einsteinpy/pull/357?src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments).
| diff --git a/src/einsteinpy/symbolic/einstein.py b/src/einsteinpy/symbolic/einstein.py
index 10fa945..1eb2dc1 100644
--- a/src/einsteinpy/symbolic/einstein.py
+++ b/src/einsteinpy/symbolic/einstein.py
@@ -90,3 +90,23 @@ class EinsteinTensor(BaseRelativityTensor):
new_tensor, self.syms, config=newconfig, parent_metric=metric
)
return new_obj
+
+ def lorentz_transform(self, transformation_matrix):
+ """
+ Performs a Lorentz transform on the tensor.
+
+ Parameters
+ ----------
+ transformation_matrix : ~sympy.tensor.array.dense_ndim_array.ImmutableDenseNDimArray or list
+ Sympy Array or multi-dimensional list containing Sympy Expressions
+
+ Returns
+ -------
+ ~einsteinpy.symbolic.tensor.BaseRelativityTensor
+ lorentz transformed tensor(or vector)
+
+ """
+ t = super(EinsteinTensor, self).lorentz_transform(transformation_matrix)
+ return EinsteinTensor(
+ t.tensor(), syms=self.syms, config=self._config, parent_metric=None
+ )
| Add lorentz_tranform to all the classes in symbolic module
🐞 **Problem**
Many classes like `MetricTensor`, `RiemannCurvatureTensor` does not have the `lorentz_transform()` function as of now!
🎯 **Goal**
Implement it in a similar way as that in `GenericVector` class in `symbolic/vector.py`
- [x] `GenericVector`
- [ ] `MetricTensor`
- [ ] `ChristoffelSymbols`
- [ ] `RiemannCurvatureTensor`
- [ ] `RicciTensor`
- [ ] `StressEnergyMomentumTensor`
- [x] `EinsteinTensor`
- [ ] `WeylTensor`
- [ ] `SchoutenTensor`
📋 **Steps to solve the problem**
* Comment below about what you've started working on.
* Add, commit, push your changes
* Submit a pull request and add this in comments - `Addresses #<put issue number here>`
* Ask for a review in comments section of pull request
* Celebrate your contribution to this project 🎉
| einsteinpy/einsteinpy | diff --git a/src/einsteinpy/tests/test_symbolic/test_einstein.py b/src/einsteinpy/tests/test_symbolic/test_einstein.py
index a68d75d..79c5450 100644
--- a/src/einsteinpy/tests/test_symbolic/test_einstein.py
+++ b/src/einsteinpy/tests/test_symbolic/test_einstein.py
@@ -1,9 +1,14 @@
import numpy as np
import pytest
import sympy
-from sympy import cos, simplify, sin, sinh, tensorcontraction
+from sympy import cos, cosh, simplify, sin, sinh, symbols, tensorcontraction
-from einsteinpy.symbolic import EinsteinTensor, MetricTensor, RicciScalar
+from einsteinpy.symbolic import (
+ EinsteinTensor,
+ MetricTensor,
+ RicciScalar,
+ simplify_sympy_array,
+)
def schwarzschild_metric():
@@ -66,3 +71,32 @@ def test_EinsteinTensor_symbols_parent_metric_wrong_change_config():
except Exception:
boolstore = True
assert boolstore
+
+
+def test_lorentz_transform():
+ # currently testing correct instance, proper theoretical tests needed
+ def get_lorentz_matrix():
+ list2d = [[0 for t1 in range(4)] for t2 in range(4)]
+ phi = symbols("phi")
+ list2d[0][0], list2d[0][1], list2d[1][0], list2d[1][1] = (
+ cosh(phi),
+ -sinh(phi),
+ -sinh(phi),
+ cosh(phi),
+ )
+ list2d[2][2], list2d[3][3] = 1, 1
+ return list2d
+
+ def get_tensor():
+ x, y, z, w = symbols("x y z w")
+ list2d = [[0 for t1 in range(4)] for t2 in range(4)]
+ list2d[0][0], list2d[0][1], list2d[1][0], list2d[1][1] = x, z, z, x
+ list2d[2][2], list2d[3][3] = y, y
+ return EinsteinTensor(
+ list2d, syms=(x, y, z, w), config="lu", parent_metric=None
+ )
+
+ tm = get_lorentz_matrix()
+ t0 = get_tensor()
+ t1 = t0.lorentz_transform(tm)
+ assert isinstance(t1, EinsteinTensor)
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 3
},
"num_modified_files": 1
} | 0.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-xdist",
"pytest-mock",
"pytest-asyncio"
],
"pre_install": null,
"python": "3.7",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.13
astropy==4.3.1
attrs @ file:///croot/attrs_1668696182826/work
Babel==2.14.0
backcall==0.2.0
beautifulsoup4==4.13.3
black==23.3.0
bleach==6.0.0
cachetools==5.5.2
certifi @ file:///croot/certifi_1671487769961/work/certifi
chardet==5.2.0
charset-normalizer==3.4.1
click==8.1.8
colorama==0.4.6
comm==0.1.4
coverage==7.2.7
cycler==0.11.0
debugpy==1.7.0
decorator==5.1.1
defusedxml==0.7.1
distlib==0.3.9
docutils==0.19
-e git+https://github.com/einsteinpy/einsteinpy.git@094d896d431b92ef2e20646a4dac9eab1420d195#egg=einsteinpy
entrypoints==0.4
execnet==2.0.2
fastjsonschema==2.21.1
filelock==3.12.2
flit_core @ file:///opt/conda/conda-bld/flit-core_1644941570762/work/source/flit_core
fonttools==4.38.0
idna==3.10
imagesize==1.4.1
importlib-metadata==6.7.0
importlib-resources==5.12.0
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
ipykernel==6.16.2
ipython==7.34.0
ipywidgets==8.1.5
isort==5.11.5
jedi==0.19.2
Jinja2==3.1.6
jsonschema==4.17.3
jupyter-sphinx==0.4.0
jupyter_client==7.4.9
jupyter_core==4.12.0
jupyterlab-pygments==0.2.2
jupyterlab_widgets==3.0.13
kiwisolver==1.4.5
llvmlite==0.39.1
MarkupSafe==2.1.5
matplotlib==3.5.3
matplotlib-inline==0.1.6
mistune==3.0.2
mpmath==1.3.0
mypy-extensions==1.0.0
nbclient==0.7.4
nbconvert==7.6.0
nbformat==5.8.0
nbsphinx==0.9.7
nest-asyncio==1.6.0
numba==0.56.4
numpy==1.21.6
packaging==24.0
pandocfilters==1.5.1
parso==0.8.4
pathspec==0.11.2
pexpect==4.9.0
pickleshare==0.7.5
Pillow==9.5.0
pkgutil_resolve_name==1.3.10
platformdirs==4.0.0
plotly==5.18.0
pluggy==1.2.0
prompt_toolkit==3.0.48
psutil==7.0.0
ptyprocess==0.7.0
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pycodestyle==2.10.0
pyerfa==2.0.0.3
Pygments==2.17.2
pyparsing==3.1.4
pyproject-api==1.5.3
pyrsistent==0.19.3
pytest==7.1.2
pytest-asyncio==0.21.2
pytest-cov==2.5.1
pytest-mock==3.11.1
pytest-xdist==3.5.0
python-dateutil==2.9.0.post0
pytz==2025.2
pyzmq==26.2.1
requests==2.31.0
scipy==1.7.3
six==1.17.0
snowballstemmer==2.2.0
soupsieve==2.4.1
Sphinx==5.3.0
sphinxcontrib-applehelp==1.0.2
sphinxcontrib-devhelp==1.0.2
sphinxcontrib-htmlhelp==2.0.0
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==1.0.3
sphinxcontrib-serializinghtml==1.1.5
sympy==1.10.1
tenacity==8.2.3
tinycss2==1.2.1
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
tornado==6.2
tox==4.8.0
traitlets==5.9.0
typed-ast==1.5.5
typing_extensions==4.7.1
urllib3==2.0.7
virtualenv==20.26.6
wcwidth==0.2.13
webencodings==0.5.1
widgetsnbextension==4.0.13
zipp @ file:///croot/zipp_1672387121353/work
| name: einsteinpy
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=22.1.0=py37h06a4308_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- flit-core=3.6.0=pyhd3eb1b0_0
- importlib_metadata=4.11.3=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=22.3.1=py37h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pytest=7.1.2=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py37h06a4308_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zipp=3.11.0=py37h06a4308_0
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.13
- astropy==4.3.1
- babel==2.14.0
- backcall==0.2.0
- beautifulsoup4==4.13.3
- black==23.3.0
- bleach==6.0.0
- cachetools==5.5.2
- chardet==5.2.0
- charset-normalizer==3.4.1
- click==8.1.8
- colorama==0.4.6
- comm==0.1.4
- coverage==7.2.7
- cycler==0.11.0
- debugpy==1.7.0
- decorator==5.1.1
- defusedxml==0.7.1
- distlib==0.3.9
- docutils==0.19
- einsteinpy==0.3.dev0
- entrypoints==0.4
- execnet==2.0.2
- fastjsonschema==2.21.1
- filelock==3.12.2
- fonttools==4.38.0
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==6.7.0
- importlib-resources==5.12.0
- ipykernel==6.16.2
- ipython==7.34.0
- ipywidgets==8.1.5
- isort==5.11.5
- jedi==0.19.2
- jinja2==3.1.6
- jsonschema==4.17.3
- jupyter-client==7.4.9
- jupyter-core==4.12.0
- jupyter-sphinx==0.4.0
- jupyterlab-pygments==0.2.2
- jupyterlab-widgets==3.0.13
- kiwisolver==1.4.5
- llvmlite==0.39.1
- markupsafe==2.1.5
- matplotlib==3.5.3
- matplotlib-inline==0.1.6
- mistune==3.0.2
- mpmath==1.3.0
- mypy-extensions==1.0.0
- nbclient==0.7.4
- nbconvert==7.6.0
- nbformat==5.8.0
- nbsphinx==0.9.7
- nest-asyncio==1.6.0
- numba==0.56.4
- numpy==1.21.6
- packaging==24.0
- pandocfilters==1.5.1
- parso==0.8.4
- pathspec==0.11.2
- pexpect==4.9.0
- pickleshare==0.7.5
- pillow==9.5.0
- pkgutil-resolve-name==1.3.10
- platformdirs==4.0.0
- plotly==5.18.0
- pluggy==1.2.0
- prompt-toolkit==3.0.48
- psutil==7.0.0
- ptyprocess==0.7.0
- pycodestyle==2.10.0
- pyerfa==2.0.0.3
- pygments==2.17.2
- pyparsing==3.1.4
- pyproject-api==1.5.3
- pyrsistent==0.19.3
- pytest-asyncio==0.21.2
- pytest-cov==2.5.1
- pytest-mock==3.11.1
- pytest-xdist==3.5.0
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyzmq==26.2.1
- requests==2.31.0
- scipy==1.7.3
- six==1.17.0
- snowballstemmer==2.2.0
- soupsieve==2.4.1
- sphinx==5.3.0
- sphinxcontrib-applehelp==1.0.2
- sphinxcontrib-devhelp==1.0.2
- sphinxcontrib-htmlhelp==2.0.0
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==1.0.3
- sphinxcontrib-serializinghtml==1.1.5
- sympy==1.10.1
- tenacity==8.2.3
- tinycss2==1.2.1
- tornado==6.2
- tox==4.8.0
- traitlets==5.9.0
- typed-ast==1.5.5
- typing-extensions==4.7.1
- urllib3==2.0.7
- virtualenv==20.26.6
- wcwidth==0.2.13
- webencodings==0.5.1
- widgetsnbextension==4.0.13
prefix: /opt/conda/envs/einsteinpy
| [
"src/einsteinpy/tests/test_symbolic/test_einstein.py::test_lorentz_transform"
] | [] | [
"src/einsteinpy/tests/test_symbolic/test_einstein.py::test_EinsteinTensor_ValueError_wrong_config_length",
"src/einsteinpy/tests/test_symbolic/test_einstein.py::test_EinsteinTensor_trace_negetive_of_Ricci_Scalar_in_4D[metric0]",
"src/einsteinpy/tests/test_symbolic/test_einstein.py::test_EinsteinTensor_trace_negetive_of_Ricci_Scalar_in_4D[metric1]",
"src/einsteinpy/tests/test_symbolic/test_einstein.py::test_EinsteinTensor_symbols_parent_metric_wrong_change_config"
] | [] | MIT License | 6,128 | 280 | [
"src/einsteinpy/symbolic/einstein.py"
] |
jabesq__netatmo-api-python-98 | dbc69dc23708cfc5c717909f3627655dd0082676 | 2020-01-02 09:36:47 | dbc69dc23708cfc5c717909f3627655dd0082676 | diff --git a/src/pyatmo/weather_station.py b/src/pyatmo/weather_station.py
index 79be749..df30eed 100644
--- a/src/pyatmo/weather_station.py
+++ b/src/pyatmo/weather_station.py
@@ -193,7 +193,8 @@ class WeatherStationData:
for st in stations:
s = self.stationById(st) if byId else self.stationByName(st)
if not s or "dashboard_data" not in s:
- return None
+ LOG.info("Not dashboard data for station %s", st)
+ continue
# Define oldest acceptable sensor measure event
limit = (time.time() - exclude) if exclude else 0
ds = s["dashboard_data"]
| If one netatmo weather station lost, all stations are lost
https://github.com/home-assistant/home-assistant/issues/30291 | jabesq/netatmo-api-python | diff --git a/tests/test_pyatmo_weatherstation.py b/tests/test_pyatmo_weatherstation.py
index 5072931..c2e8267 100644
--- a/tests/test_pyatmo_weatherstation.py
+++ b/tests/test_pyatmo_weatherstation.py
@@ -354,7 +354,7 @@ def test_WeatherStationData_monitoredConditions(
"Yard",
],
),
- ("NoValidStation", None, False, None),
+ ("NoValidStation", None, False, {}),
(
None,
1000000,
@@ -395,7 +395,7 @@ def test_WeatherStationData_lastData(
if mod:
assert sorted(mod) == expected
else:
- assert mod is expected
+ assert mod == expected
@freeze_time("2019-06-11")
@@ -414,8 +414,8 @@ def test_WeatherStationData_lastData(
"12:34:56:37:11:ca",
],
),
- ("", None, None,),
- ("NoValidStation", None, None),
+ ("", None, {},),
+ ("NoValidStation", None, {},),
(
"12:34:56:37:11:ca",
1000000,
@@ -448,7 +448,7 @@ def test_WeatherStationData_lastData_byId(
if mod:
assert sorted(mod) == expected
else:
- assert mod is expected
+ assert mod == expected
@freeze_time("2019-06-11")
@@ -567,3 +567,44 @@ def test_WeatherStationData_lastData_measurements(weatherStationData):
assert mod["NetatmoIndoor"]["max_temp"] == 25.6
assert mod["NetatmoIndoor"]["Temperature"] == 24.6
assert mod["NetatmoIndoor"]["Pressure"] == 1017.3
+
+
+@freeze_time("2019-06-11")
[email protected](
+ "station, exclude, expected",
+ [
+ (
+ "12:34:56:37:11:ca",
+ None,
+ [
+ "12:34:56:03:1b:e4",
+ "12:34:56:05:51:20",
+ "12:34:56:07:bb:0e",
+ "12:34:56:07:bb:3e",
+ "12:34:56:36:fc:de",
+ "12:34:56:37:11:ca",
+ ],
+ ),
+ (
+ None,
+ None,
+ [
+ "12:34:56:03:1b:e4",
+ "12:34:56:05:51:20",
+ "12:34:56:07:bb:0e",
+ "12:34:56:07:bb:3e",
+ "12:34:56:36:fc:de",
+ "12:34:56:37:11:ca",
+ ],
+ ),
+ ("12:34:56:00:aa:01", None, {},),
+ ],
+)
+def test_WeatherStationData_lastData_bug_97(
+ weatherStationData, station, exclude, expected
+):
+ mod = weatherStationData.lastData(station, exclude, byId=True)
+ if mod:
+ assert sorted(mod) == expected
+ else:
+ assert mod == expected
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 3
},
"num_modified_files": 1
} | 3.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-mock",
"requests-mock",
"freezegun"
],
"pre_install": null,
"python": "3.7",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi @ file:///croot/certifi_1671487769961/work/certifi
charset-normalizer==3.4.1
coverage==7.2.7
exceptiongroup==1.2.2
freezegun==1.5.1
idna==3.10
importlib-metadata==6.7.0
iniconfig==2.0.0
oauthlib==3.2.2
packaging==24.0
pluggy==1.2.0
-e git+https://github.com/jabesq/netatmo-api-python.git@dbc69dc23708cfc5c717909f3627655dd0082676#egg=pyatmo
pytest==7.4.4
pytest-cov==4.1.0
pytest-mock==3.11.1
python-dateutil==2.9.0.post0
requests==2.31.0
requests-mock==1.12.1
requests-oauthlib==2.0.0
six==1.17.0
tomli==2.0.1
typing_extensions==4.7.1
urllib3==2.0.7
zipp==3.15.0
| name: netatmo-api-python
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=22.3.1=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- charset-normalizer==3.4.1
- coverage==7.2.7
- exceptiongroup==1.2.2
- freezegun==1.5.1
- idna==3.10
- importlib-metadata==6.7.0
- iniconfig==2.0.0
- oauthlib==3.2.2
- packaging==24.0
- pluggy==1.2.0
- pyatmo==3.2.0
- pytest==7.4.4
- pytest-cov==4.1.0
- pytest-mock==3.11.1
- python-dateutil==2.9.0.post0
- requests==2.31.0
- requests-mock==1.12.1
- requests-oauthlib==2.0.0
- six==1.17.0
- tomli==2.0.1
- typing-extensions==4.7.1
- urllib3==2.0.7
- zipp==3.15.0
prefix: /opt/conda/envs/netatmo-api-python
| [
"tests/test_pyatmo_weatherstation.py::test_WeatherStationData_lastData[NoValidStation-None-False-expected2]",
"tests/test_pyatmo_weatherstation.py::test_WeatherStationData_lastData_byId[-None-expected1]",
"tests/test_pyatmo_weatherstation.py::test_WeatherStationData_lastData_byId[NoValidStation-None-expected2]",
"tests/test_pyatmo_weatherstation.py::test_WeatherStationData_lastData_bug_97[12:34:56:00:aa:01-None-expected2]"
] | [] | [
"tests/test_pyatmo_weatherstation.py::test_WeatherStationData",
"tests/test_pyatmo_weatherstation.py::test_WeatherStationData_no_response",
"tests/test_pyatmo_weatherstation.py::test_WeatherStationData_no_body",
"tests/test_pyatmo_weatherstation.py::test_WeatherStationData_no_data",
"tests/test_pyatmo_weatherstation.py::test_WeatherStationData_modulesNamesList[None-expected0]",
"tests/test_pyatmo_weatherstation.py::test_WeatherStationData_modulesNamesList[MyStation-expected1]",
"tests/test_pyatmo_weatherstation.py::test_WeatherStationData_getModules[None-expected0]",
"tests/test_pyatmo_weatherstation.py::test_WeatherStationData_getModules[MyStation-expected1]",
"tests/test_pyatmo_weatherstation.py::test_WeatherStationData_stationByName",
"tests/test_pyatmo_weatherstation.py::test_WeatherStationData_moduleByName[Kitchen-None-12:34:56:07:bb:3e]",
"tests/test_pyatmo_weatherstation.py::test_WeatherStationData_moduleByName[Kitchen-MyStation-12:34:56:07:bb:3e]",
"tests/test_pyatmo_weatherstation.py::test_WeatherStationData_moduleByName[Kitchen-NoValidStation-None]",
"tests/test_pyatmo_weatherstation.py::test_WeatherStationData_moduleByName[NetatmoIndoor-None-12:34:56:37:11:ca]",
"tests/test_pyatmo_weatherstation.py::test_WeatherStationData_moduleByName[NetatmoIndoor-MyStation-12:34:56:37:11:ca]",
"tests/test_pyatmo_weatherstation.py::test_WeatherStationData_moduleByName[-None-None]",
"tests/test_pyatmo_weatherstation.py::test_WeatherStationData_moduleByName[--None]",
"tests/test_pyatmo_weatherstation.py::test_WeatherStationData_moduleByName[None-None-None]",
"tests/test_pyatmo_weatherstation.py::test_WeatherStationData_moduleById[12:34:56:07:bb:3e-None-12:34:56:07:bb:3e]",
"tests/test_pyatmo_weatherstation.py::test_WeatherStationData_moduleById[12:34:56:07:bb:3e-12:34:56:37:11:ca-12:34:56:07:bb:3e]",
"tests/test_pyatmo_weatherstation.py::test_WeatherStationData_moduleById[-None-None]",
"tests/test_pyatmo_weatherstation.py::test_WeatherStationData_moduleById[--None]",
"tests/test_pyatmo_weatherstation.py::test_WeatherStationData_moduleById[None-None-None]",
"tests/test_pyatmo_weatherstation.py::test_WeatherStationData_monitoredConditions[None-12:34:56:07:bb:3e-expected0]",
"tests/test_pyatmo_weatherstation.py::test_WeatherStationData_monitoredConditions[Kitchen-None-expected1]",
"tests/test_pyatmo_weatherstation.py::test_WeatherStationData_monitoredConditions[Garden-None-expected2]",
"tests/test_pyatmo_weatherstation.py::test_WeatherStationData_monitoredConditions[Yard-None-expected3]",
"tests/test_pyatmo_weatherstation.py::test_WeatherStationData_monitoredConditions[NetatmoIndoor-None-expected4]",
"tests/test_pyatmo_weatherstation.py::test_WeatherStationData_lastData[MyStation-None-False-expected0]",
"tests/test_pyatmo_weatherstation.py::test_WeatherStationData_lastData[-None-False-expected1]",
"tests/test_pyatmo_weatherstation.py::test_WeatherStationData_lastData[None-1000000-False-expected3]",
"tests/test_pyatmo_weatherstation.py::test_WeatherStationData_lastData[None-798103-False-expected4]",
"tests/test_pyatmo_weatherstation.py::test_WeatherStationData_lastData[None-798103-True-expected5]",
"tests/test_pyatmo_weatherstation.py::test_WeatherStationData_lastData_byId[12:34:56:37:11:ca-None-expected0]",
"tests/test_pyatmo_weatherstation.py::test_WeatherStationData_lastData_byId[12:34:56:37:11:ca-1000000-expected3]",
"tests/test_pyatmo_weatherstation.py::test_WeatherStationData_lastData_byId[12:34:56:37:11:ca-798103-expected4]",
"tests/test_pyatmo_weatherstation.py::test_WeatherStationData_checkNotUpdated[MyStation-3600-expected0]",
"tests/test_pyatmo_weatherstation.py::test_WeatherStationData_checkNotUpdated[None-3600-expected1]",
"tests/test_pyatmo_weatherstation.py::test_WeatherStationData_checkNotUpdated[-3600-expected2]",
"tests/test_pyatmo_weatherstation.py::test_WeatherStationData_checkUpdated[MyStation-798500-expected0]",
"tests/test_pyatmo_weatherstation.py::test_WeatherStationData_checkUpdated[None-798500-expected1]",
"tests/test_pyatmo_weatherstation.py::test_WeatherStationData_getMeasure[MyStation-scale-type-expected0]",
"tests/test_pyatmo_weatherstation.py::test_WeatherStationData_lastData_measurements",
"tests/test_pyatmo_weatherstation.py::test_WeatherStationData_lastData_bug_97[12:34:56:37:11:ca-None-expected0]",
"tests/test_pyatmo_weatherstation.py::test_WeatherStationData_lastData_bug_97[None-None-expected1]"
] | [] | MIT License | 6,129 | 173 | [
"src/pyatmo/weather_station.py"
] |
|
briggySmalls__pyflipdot-5 | 1de9944550a1d5f49a29b5499ce4a44dba81d2cc | 2020-01-03 18:47:49 | 1de9944550a1d5f49a29b5499ce4a44dba81d2cc | diff --git a/pyflipdot/data.py b/pyflipdot/data.py
index b26b05f..2326941 100644
--- a/pyflipdot/data.py
+++ b/pyflipdot/data.py
@@ -16,13 +16,7 @@ _COMMAND_CODES = {
def _to_ascii_hex(value: bytes) -> bytes:
- def _bytes_to_ascii_hex(val: bytes) -> bytes:
- return val.hex().upper().encode('ASCII')
-
- try:
- return _bytes_to_ascii_hex(value)
- except AttributeError:
- return _bytes_to_ascii_hex(bytes([value]))
+ return value.hex().upper().encode('ASCII')
def _bytes_to_int(data: bytes) -> int:
@@ -125,7 +119,11 @@ class ImagePacket(Packet):
image_bytes = self.image_to_bytes(image)
# Start with the resolution (image byte count)
- payload = _to_ascii_hex(len(image_bytes))
+ # Note: we only ever send a single bytes-worth of info, even if the
+ # resolution is an integer bigger than 255
+ resolution_bytes = (len(image_bytes) & 0xFF).to_bytes(
+ 1, byteorder='big')
+ payload = _to_ascii_hex(resolution_bytes)
# Add the image bytes
payload += _to_ascii_hex(image_bytes)
| The script cannot support board like 128 * 32
the image will be too large, see below error
File "/usr/local/lib/python3.6/dist-packages/pyflipdot/data.py", line 23, in _to_ascii_hex
return _bytes_to_ascii_hex(value)
File "/usr/local/lib/python3.6/dist-packages/pyflipdot/data.py", line 20, in _bytes_to_ascii_hex
return val.hex().upper().encode('ASCII') | briggySmalls/pyflipdot | diff --git a/tests/test_data.py b/tests/test_data.py
index 011a49f..710d4ae 100644
--- a/tests/test_data.py
+++ b/tests/test_data.py
@@ -5,54 +5,68 @@ import numpy as np
from pyflipdot.data import ImagePacket, Packet
-class TestPackets(object):
- def test_no_payload(self):
- packet = Packet(1, 2)
- packet_data = packet.get_bytes()
-
- assert packet_data == b'\x0212\x039A'
-
- def test_with_payload(self):
- payload = b'345'
- packet = Packet(1, 2, payload)
- packet_data = packet.get_bytes()
-
- assert packet_data == b'\x0212345\x03FE'
-
- def test_image(self):
- # Send an image as below ('p' indicates byte alignment padding)
- # (0) | 1, 0 |
- # (1) | 0, 0 | -> [0x01, 0x00]
- # (2) | 0, 0 |
- # (3) | 0, 0 |
- image = np.full((3, 2), False)
- image[0, 0] = True
-
- packet = ImagePacket(1, image)
- packet_data = packet.get_bytes()
- assert packet_data == b'\x0211020100\x0378'
-
- def test_tall_image(self):
- # Send an image as below ('p' indicates byte alignment padding)
- # (0) | 1, 0 |
- # (1) | 0, 0 |
- # (2) | 0, 0 |
- # (3) | 0, 0 |
- # (4) | 0, 0 |
- # (5) | 0, 0 |
- # (6) | 0, 0 |
- # (7) | 0, 0 | -> | 0x01, 0x00 | -> [0x01, 0x02, 0x00, 0x00]
- # (8) | 0, 0 | | 0x02, 0x00 |
- # (9) | 1, 0 |
- # (10) | 0, 0 |
- # (11) | 0, 0 |
- # (12) | 0, 0 |
- # (13) | 0, 0 |
- # (14) | 0, 0 |
- image = np.full((15, 2), False)
- image[0, 0] = True
- image[9, 0] = True
-
- packet = ImagePacket(1, image)
- packet_data = packet.get_bytes()
- assert packet_data == b'\x02110401020000\x03B4'
+def test_no_payload():
+ packet = Packet(1, 2)
+ packet_data = packet.get_bytes()
+
+ assert packet_data == b'\x0212\x039A'
+
+
+def test_with_payload():
+ payload = b'345'
+ packet = Packet(1, 2, payload)
+ packet_data = packet.get_bytes()
+
+ assert packet_data == b'\x0212345\x03FE'
+
+
+def test_simple_image():
+ # Send an image as below ('p' indicates byte alignment padding)
+ # (0) | 1, 0 |
+ # (1) | 0, 0 | -> [0x01, 0x00]
+ # (2) | 0, 0 |
+ # (3) | 0, 0 |
+ image = np.full((3, 2), False)
+ image[0, 0] = True
+
+ packet = ImagePacket(1, image)
+ packet_data = packet.get_bytes()
+ assert packet_data == b'\x0211020100\x0378'
+
+
+def test_tall_image():
+ # Send an image as below ('p' indicates byte alignment padding)
+ # (0) | 1, 0 |
+ # (1) | 0, 0 |
+ # (2) | 0, 0 |
+ # (3) | 0, 0 |
+ # (4) | 0, 0 |
+ # (5) | 0, 0 |
+ # (6) | 0, 0 |
+ # (7) | 0, 0 | -> | 0x01, 0x00 | -> [0x01, 0x02, 0x00, 0x00]
+ # (8) | 0, 0 | | 0x02, 0x00 |
+ # (9) | 1, 0 |
+ # (10) | 0, 0 |
+ # (11) | 0, 0 |
+ # (12) | 0, 0 |
+ # (13) | 0, 0 |
+ # (14) | 0, 0 |
+ image = np.full((15, 2), False)
+ image[0, 0] = True
+ image[9, 0] = True
+
+ packet = ImagePacket(1, image)
+ packet_data = packet.get_bytes()
+ assert packet_data == b'\x02110401020000\x03B4'
+
+
+def test_large_image():
+ # Create an image that is 128x32 pixels
+ image = np.full((16, 128), True)
+
+ packet = ImagePacket(1, image)
+ packet_data = packet.get_bytes()
+ assert packet_data[:5] == b'\x021100'
+ for val in packet_data[7:-3]:
+ assert val.to_bytes(1, byteorder='big') == b'F'
+ assert packet_data[-3:] == b'\x033B'
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 1
} | 0.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "Pipfile",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.7",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi @ file:///croot/certifi_1671487769961/work/certifi
exceptiongroup==1.2.2
importlib-metadata==6.7.0
iniconfig==2.0.0
numpy==1.21.6
packaging==24.0
pipfile==0.0.2
pluggy==1.2.0
-e git+https://github.com/briggySmalls/pyflipdot.git@1de9944550a1d5f49a29b5499ce4a44dba81d2cc#egg=pyflipdot
pyserial==3.5
pytest==7.4.4
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
tomli==2.0.1
typing_extensions==4.7.1
zipp==3.15.0
| name: pyflipdot
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=22.3.1=py37h06a4308_0
- pipfile=0.0.2=py_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- exceptiongroup==1.2.2
- importlib-metadata==6.7.0
- iniconfig==2.0.0
- numpy==1.21.6
- packaging==24.0
- pluggy==1.2.0
- pyserial==3.5
- pytest==7.4.4
- tomli==2.0.1
- typing-extensions==4.7.1
- zipp==3.15.0
prefix: /opt/conda/envs/pyflipdot
| [
"tests/test_data.py::test_large_image"
] | [] | [
"tests/test_data.py::test_no_payload",
"tests/test_data.py::test_with_payload",
"tests/test_data.py::test_simple_image",
"tests/test_data.py::test_tall_image"
] | [] | MIT License | 6,133 | 318 | [
"pyflipdot/data.py"
] |
|
hamcrest__PyHamcrest-129 | eb746246a13c4abd6141c6ab45724ec1c4a2efbe | 2020-01-06 15:42:02 | eb746246a13c4abd6141c6ab45724ec1c4a2efbe | coveralls:
[](https://coveralls.io/builds/27937258)
Coverage remained the same at ?% when pulling **ade263377bad8f2dc077de53a1c7c2f54023f511 on bittrance:raises-with-matchers** into **eb746246a13c4abd6141c6ab45724ec1c4a2efbe on hamcrest:master**.
coveralls:
[](https://coveralls.io/builds/27937258)
Coverage remained the same at ?% when pulling **ade263377bad8f2dc077de53a1c7c2f54023f511 on bittrance:raises-with-matchers** into **eb746246a13c4abd6141c6ab45724ec1c4a2efbe on hamcrest:master**.
coveralls:
[](https://coveralls.io/builds/27937258)
Coverage remained the same at ?% when pulling **ade263377bad8f2dc077de53a1c7c2f54023f511 on bittrance:raises-with-matchers** into **eb746246a13c4abd6141c6ab45724ec1c4a2efbe on hamcrest:master**.
coveralls:
[](https://coveralls.io/builds/27937258)
Coverage remained the same at ?% when pulling **ade263377bad8f2dc077de53a1c7c2f54023f511 on bittrance:raises-with-matchers** into **eb746246a13c4abd6141c6ab45724ec1c4a2efbe on hamcrest:master**.
| diff --git a/src/hamcrest/core/core/raises.py b/src/hamcrest/core/core/raises.py
index efe9e6c..67ec46c 100644
--- a/src/hamcrest/core/core/raises.py
+++ b/src/hamcrest/core/core/raises.py
@@ -13,8 +13,11 @@ __license__ = "BSD, see License.txt"
class Raises(BaseMatcher[Callable[..., Any]]):
- def __init__(self, expected: Exception, pattern: Optional[str] = None) -> None:
+ def __init__(
+ self, expected: Exception, pattern: Optional[str] = None, matching: Optional[Matcher] = None
+ ) -> None:
self.pattern = pattern
+ self.matcher = matching
self.expected = expected
self.actual = None # type: Optional[BaseException]
self.function = None # type: Optional[Callable[..., Any]]
@@ -35,7 +38,11 @@ class Raises(BaseMatcher[Callable[..., Any]]):
if isinstance(self.actual, cast(type, self.expected)):
if self.pattern is not None:
- return re.search(self.pattern, str(self.actual)) is not None
+ if re.search(self.pattern, str(self.actual)) is None:
+ return False
+ if self.matcher is not None:
+ if not self.matcher.matches(self.actual):
+ return False
return True
return False
@@ -55,12 +62,17 @@ class Raises(BaseMatcher[Callable[..., Any]]):
if self.actual is None:
description.append_text("No exception raised.")
- elif isinstance(self.actual, cast(type, self.expected)) and self.pattern is not None:
- description.append_text(
- 'Correct assertion type raised, but the expected pattern ("%s") not found.'
- % self.pattern
- )
- description.append_text('\n message was: "%s"' % str(self.actual))
+ elif isinstance(self.actual, cast(type, self.expected)):
+ if self.pattern is not None or self.matcher is not None:
+ description.append_text("Correct assertion type raised, but ")
+ if self.pattern is not None:
+ description.append_text('the expected pattern ("%s") ' % self.pattern)
+ if self.pattern is not None and self.matcher is not None:
+ description.append_text("and ")
+ if self.matcher is not None:
+ description.append_description_of(self.matcher)
+ description.append_text(" ")
+ description.append_text('not found. Exception message was: "%s"' % str(self.actual))
else:
description.append_text(
"%r of type %s was raised instead" % (self.actual, type(self.actual))
@@ -73,11 +85,12 @@ class Raises(BaseMatcher[Callable[..., Any]]):
)
-def raises(exception: Exception, pattern=None) -> Matcher[Callable[..., Any]]:
+def raises(exception: Exception, pattern=None, matching=None) -> Matcher[Callable[..., Any]]:
"""Matches if the called function raised the expected exception.
:param exception: The class of the expected exception
:param pattern: Optional regular expression to match exception message.
+ :param matching: Optional Hamcrest matchers to apply to the exception.
Expects the actual to be wrapped by using :py:func:`~hamcrest.core.core.raises.calling`,
or a callable taking no arguments.
@@ -88,8 +101,12 @@ def raises(exception: Exception, pattern=None) -> Matcher[Callable[..., Any]]:
assert_that(calling(int).with_args('q'), raises(TypeError))
assert_that(calling(parse, broken_input), raises(ValueError))
+ assert_that(
+ calling(valid_user, bad_json),
+ raises(HTTPError, matching=has_properties(status_code=500)
+ )
"""
- return Raises(exception, pattern)
+ return Raises(exception, pattern, matching)
class DeferredCallable(object):
| How to match exception properties?
I have a webapp with helper methods that raises `bottle.HTTPError` and I want to ensure that a raised exception has a `status_code` property, but it seems the calling/raises pair cannot directly assert this? E.g. something like
```
assert_that(
calling(helper).with_args(broken_input),
raises(HTTPError, has_properties(status_code=500))
)
```
Am I missing something obvious or is there no way to match structured details of an exception short of manually catching the exception first?
If this is indeed the case, would you be amenable to a PR implementing `raises(HTTPError, matching=has_properties(status_code=500))` or perhaps `raises(HTTPError).matching(has_properties(status_code=500))`? | hamcrest/PyHamcrest | diff --git a/tests/hamcrest_unit_test/core/raises_test.py b/tests/hamcrest_unit_test/core/raises_test.py
index e4fee61..519b7a2 100644
--- a/tests/hamcrest_unit_test/core/raises_test.py
+++ b/tests/hamcrest_unit_test/core/raises_test.py
@@ -2,7 +2,7 @@ import sys
import unittest
import pytest
-from hamcrest import not_
+from hamcrest import has_properties, not_
from hamcrest.core.core.raises import calling, raises
from hamcrest_unit_test.matcher_test import MatcherTest, assert_mismatch_description
@@ -28,6 +28,13 @@ def raise_baseException(*args, **kwargs):
raise SystemExit(str(args) + str(kwargs))
+def raise_exception_with_properties(**kwargs):
+ err = AssertionError("boom")
+ for k, v in kwargs.items():
+ setattr(err, k, v)
+ raise err
+
+
class RaisesTest(MatcherTest):
def testMatchesIfFunctionRaisesTheExactExceptionExpected(self):
self.assert_matches("Right exception", raises(AssertionError), calling(raise_exception))
@@ -72,6 +79,11 @@ class RaisesTest(MatcherTest):
self.assert_does_not_match(
"Bad regex", raises(AssertionError, "Phrase not found"), calling(raise_exception)
)
+ self.assert_mismatch_description(
+ '''Correct assertion type raised, but the expected pattern ("Phrase not found") not found. Exception message was: "(){}"''',
+ raises(AssertionError, "Phrase not found"),
+ calling(raise_exception),
+ )
def testMatchesRegularExpressionToStringifiedException(self):
self.assert_matches(
@@ -86,6 +98,37 @@ class RaisesTest(MatcherTest):
calling(raise_exception).with_args(3, 1, 4),
)
+ def testMachesIfRaisedExceptionMatchesAdditionalMatchers(self):
+ self.assert_matches(
+ "Properties",
+ raises(AssertionError, matching=has_properties(prip="prop")),
+ calling(raise_exception_with_properties).with_args(prip="prop"),
+ )
+
+ def testDoesNotMatchIfAdditionalMatchersDoesNotMatch(self):
+ self.assert_does_not_match(
+ "Bad properties",
+ raises(AssertionError, matching=has_properties(prop="prip")),
+ calling(raise_exception_with_properties).with_args(prip="prop"),
+ )
+ self.assert_mismatch_description(
+ '''Correct assertion type raised, but an object with a property 'prop' matching 'prip' not found. Exception message was: "boom"''',
+ raises(AssertionError, matching=has_properties(prop="prip")),
+ calling(raise_exception_with_properties).with_args(prip="prop"),
+ )
+
+ def testDoesNotMatchIfNeitherPatternOrMatcherMatch(self):
+ self.assert_does_not_match(
+ "Bad pattern and properties",
+ raises(AssertionError, pattern="asdf", matching=has_properties(prop="prip")),
+ calling(raise_exception_with_properties).with_args(prip="prop"),
+ )
+ self.assert_mismatch_description(
+ '''Correct assertion type raised, but the expected pattern ("asdf") and an object with a property 'prop' matching 'prip' not found. Exception message was: "boom"''',
+ raises(AssertionError, pattern="asdf", matching=has_properties(prop="prip")),
+ calling(raise_exception_with_properties).with_args(prip="prop"),
+ )
+
def testDescribeMismatchWillCallItemIfNotTheOriginalMatch(self):
function = Callable()
matcher = raises(AssertionError)
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 1
} | 1.9 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"numpy>=1.16.0",
"pandas>=1.0.0"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | exceptiongroup==1.2.2
iniconfig==2.1.0
numpy==2.0.2
packaging==24.2
pandas==2.2.3
pluggy==1.5.0
-e git+https://github.com/hamcrest/PyHamcrest.git@eb746246a13c4abd6141c6ab45724ec1c4a2efbe#egg=PyHamcrest
pytest==8.3.5
python-dateutil==2.9.0.post0
pytz==2025.2
six==1.17.0
tomli==2.2.1
tzdata==2025.2
| name: PyHamcrest
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- exceptiongroup==1.2.2
- iniconfig==2.1.0
- numpy==2.0.2
- packaging==24.2
- pandas==2.2.3
- pluggy==1.5.0
- pytest==8.3.5
- python-dateutil==2.9.0.post0
- pytz==2025.2
- six==1.17.0
- tomli==2.2.1
- tzdata==2025.2
prefix: /opt/conda/envs/PyHamcrest
| [
"tests/hamcrest_unit_test/core/raises_test.py::RaisesTest::testDoesNotMatchExceptionIfRegularExpressionDoesNotMatch",
"tests/hamcrest_unit_test/core/raises_test.py::RaisesTest::testDoesNotMatchIfAdditionalMatchersDoesNotMatch",
"tests/hamcrest_unit_test/core/raises_test.py::RaisesTest::testDoesNotMatchIfNeitherPatternOrMatcherMatch",
"tests/hamcrest_unit_test/core/raises_test.py::RaisesTest::testMachesIfRaisedExceptionMatchesAdditionalMatchers"
] | [] | [
"tests/hamcrest_unit_test/core/raises_test.py::RaisesTest::testDescribeMismatchWillCallItemIfNotTheOriginalMatch",
"tests/hamcrest_unit_test/core/raises_test.py::RaisesTest::testDoesNotMatchIfFunctionDoesNotRaiseException",
"tests/hamcrest_unit_test/core/raises_test.py::RaisesTest::testDoesNotMatchIfTheWrongExceptionTypeIsRaisedPy37",
"tests/hamcrest_unit_test/core/raises_test.py::RaisesTest::testDoesNotMatchTypeErrorIfActualIsNotCallable",
"tests/hamcrest_unit_test/core/raises_test.py::RaisesTest::testMatchesIfFunctionRaisesASubclassOfTheExpectedBaseException",
"tests/hamcrest_unit_test/core/raises_test.py::RaisesTest::testMatchesIfFunctionRaisesASubclassOfTheExpectedException",
"tests/hamcrest_unit_test/core/raises_test.py::RaisesTest::testMatchesIfFunctionRaisesTheExactExceptionExpected",
"tests/hamcrest_unit_test/core/raises_test.py::RaisesTest::testMatchesRegularExpressionToStringifiedException",
"tests/hamcrest_unit_test/core/raises_test.py::test_gives_correct_message_when_wrapped_with_is_not[but",
"tests/hamcrest_unit_test/core/raises_test.py::CallingTest::testCallingDoesNotImmediatelyExecuteFunction",
"tests/hamcrest_unit_test/core/raises_test.py::CallingTest::testCallingObjectCallsProvidedFunction",
"tests/hamcrest_unit_test/core/raises_test.py::CallingTest::testCallingWithFunctionReturnsObject",
"tests/hamcrest_unit_test/core/raises_test.py::CallingTest::testCallingWithFunctionSetsArgumentList"
] | [] | BSD License | 6,146 | 874 | [
"src/hamcrest/core/core/raises.py"
] |
spacetx__starfish-1718 | 38fb2d7e2eb7810300e4dbf9a469b84c701b1ffe | 2020-01-06 23:11:35 | 6ffbbbfd0bcc9f69379135d3bdc264f14a8dd1c5 | codecov-io: # [Codecov](https://codecov.io/gh/spacetx/starfish/pull/1718?src=pr&el=h1) Report
> Merging [#1718](https://codecov.io/gh/spacetx/starfish/pull/1718?src=pr&el=desc) into [master](https://codecov.io/gh/spacetx/starfish/commit/0f1420e521e7fae0561af6dff8b7894961641332?src=pr&el=desc) will **increase** coverage by `<.01%`.
> The diff coverage is `90.9%`.
[](https://codecov.io/gh/spacetx/starfish/pull/1718?src=pr&el=tree)
```diff
@@ Coverage Diff @@
## master #1718 +/- ##
==========================================
+ Coverage 89.96% 89.97% +<.01%
==========================================
Files 246 246
Lines 9170 9176 +6
==========================================
+ Hits 8250 8256 +6
Misses 920 920
```
| [Impacted Files](https://codecov.io/gh/spacetx/starfish/pull/1718?src=pr&el=tree) | Coverage Δ | |
|---|---|---|
| [starfish/core/test/test\_display.py](https://codecov.io/gh/spacetx/starfish/pull/1718/diff?src=pr&el=tree#diff-c3RhcmZpc2gvY29yZS90ZXN0L3Rlc3RfZGlzcGxheS5weQ==) | `60.86% <0%> (ø)` | :arrow_up: |
| [starfish/core/morphology/Filter/areafilter.py](https://codecov.io/gh/spacetx/starfish/pull/1718/diff?src=pr&el=tree#diff-c3RhcmZpc2gvY29yZS9tb3JwaG9sb2d5L0ZpbHRlci9hcmVhZmlsdGVyLnB5) | `100% <100%> (ø)` | :arrow_up: |
| [...ish/core/morphology/Filter/test/test\_areafilter.py](https://codecov.io/gh/spacetx/starfish/pull/1718/diff?src=pr&el=tree#diff-c3RhcmZpc2gvY29yZS9tb3JwaG9sb2d5L0ZpbHRlci90ZXN0L3Rlc3RfYXJlYWZpbHRlci5weQ==) | `100% <100%> (ø)` | :arrow_up: |
------
[Continue to review full report at Codecov](https://codecov.io/gh/spacetx/starfish/pull/1718?src=pr&el=continue).
> **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta)
> `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data`
> Powered by [Codecov](https://codecov.io/gh/spacetx/starfish/pull/1718?src=pr&el=footer). Last update [0f1420e...f9f1dd0](https://codecov.io/gh/spacetx/starfish/pull/1718?src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments).
| diff --git a/starfish/core/morphology/Filter/areafilter.py b/starfish/core/morphology/Filter/areafilter.py
index 963f18f5..f3f9c399 100644
--- a/starfish/core/morphology/Filter/areafilter.py
+++ b/starfish/core/morphology/Filter/areafilter.py
@@ -19,10 +19,15 @@ class AreaFilter(FilterAlgorithm):
collection.
"""
- def __init__(self, min_area: Optional[int], max_area: Optional[int]):
+ def __init__(self, *, min_area: Optional[int] = None, max_area: Optional[int] = None):
self._min_area = min_area
self._max_area = max_area
+ if (self._min_area is not None
+ and self._max_area is not None
+ and self._min_area > self._max_area):
+ raise ValueError(f"min_area ({min_area}) should be smaller than max_area ({max_area})")
+
def run(
self,
binary_mask_collection: BinaryMaskCollection,
| MinDistanceLabel needs kwargs and sanity checking
#### Description
<!-- Example: Joblib Error thrown when calling fit on LatentDirichletAllocation with evaluate_every > 0-->
#### Steps/Code to Reproduce
<!--
If the code is too long, feel free to put it in a public gist and link
it in the issue: https://gist.github.com
-->
#### Expected Results
<!-- Example: No error is thrown. Please paste or describe the expected results.-->
#### Actual Results
<!-- Please paste or specifically describe the actual output or traceback. -->
#### Versions
<!--
import platform; print(platform.platform())
import sys; print("Python", sys.version)
import numpy; print("NumPy", numpy.__version__)
import scipy; print("SciPy", scipy.__version__)
import skimage; print("scikit-image", skimage.__version__)
import pandas; print("pandas", pandas.__version__)
import sklearn; print("sklearn", sklearn.__version__)
import xarray; print("xarray", xarray.__version__)
import sympy; print("sympy", sympy.__version__)
import starfish; print("starfish", starfish.__version__)
-->
<!-- Thanks for contributing! -->
| spacetx/starfish | diff --git a/starfish/core/morphology/Filter/test/test_areafilter.py b/starfish/core/morphology/Filter/test/test_areafilter.py
index 32d85b1d..1a52c0e1 100644
--- a/starfish/core/morphology/Filter/test/test_areafilter.py
+++ b/starfish/core/morphology/Filter/test/test_areafilter.py
@@ -1,10 +1,12 @@
+import pytest
+
from starfish.core.morphology.binary_mask.test.factories import binary_mask_collection_2d
from ..areafilter import AreaFilter
def test_empty_filter():
input_mask_collection = binary_mask_collection_2d()
- output_mask_collection = AreaFilter(None, None).run(input_mask_collection)
+ output_mask_collection = AreaFilter().run(input_mask_collection)
assert len(output_mask_collection) == len(input_mask_collection)
for mask_num in range(len(input_mask_collection)):
@@ -15,11 +17,16 @@ def test_empty_filter():
def test_min_area():
input_mask_collection = binary_mask_collection_2d()
- output_mask_collection = AreaFilter(6, None).run(input_mask_collection)
+ output_mask_collection = AreaFilter(min_area=6).run(input_mask_collection)
assert len(output_mask_collection) == 1
def test_max_area():
input_mask_collection = binary_mask_collection_2d()
- output_mask_collection = AreaFilter(None, 5).run(input_mask_collection)
+ output_mask_collection = AreaFilter(max_area=5).run(input_mask_collection)
assert len(output_mask_collection) == 1
+
+
+def test_illegal_areas():
+ with pytest.raises(ValueError):
+ AreaFilter(min_area=7, max_area=1)
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 2
},
"num_modified_files": 1
} | 0.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-xdist"
],
"pre_install": null,
"python": "3.6",
"reqs_path": [
"REQUIREMENTS.txt",
"REQUIREMENTS-CI.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | argon2-cffi==21.3.0
argon2-cffi-bindings==21.2.0
async-generator==1.10
attrs==22.2.0
backcall==0.2.0
bleach==4.1.0
boto3==1.23.10
botocore==1.26.10
certifi==2021.5.30
cffi==1.15.1
charset-normalizer==2.0.12
click==8.0.4
comm==0.1.4
coverage==6.2
cycler==0.11.0
dataclasses==0.6
decorator==4.4.2
defusedxml==0.7.1
diskcache==5.6.3
entrypoints==0.4
execnet==1.9.0
idna==3.10
imageio==2.15.0
importlib-metadata==4.8.3
importlib-resources==5.4.0
iniconfig==1.1.1
ipykernel==5.5.6
ipython==7.16.3
ipython-genutils==0.2.0
ipywidgets==7.8.5
jedi==0.17.2
Jinja2==3.0.3
jmespath==0.10.0
joblib==1.1.1
jsonschema==3.2.0
jupyter-client==7.1.2
jupyter-core==4.9.2
jupyterlab-pygments==0.1.2
jupyterlab_widgets==1.1.11
kiwisolver==1.3.1
MarkupSafe==2.0.1
matplotlib==3.3.4
mistune==0.8.4
mpmath==1.3.0
nbclient==0.5.9
nbconvert==6.0.7
nbformat==5.1.3
nest-asyncio==1.6.0
networkx==2.5.1
notebook==6.4.10
numpy==1.19.5
packaging==21.3
pandas==1.1.5
pandocfilters==1.5.1
parso==0.7.1
pexpect==4.9.0
pickleshare==0.7.5
Pillow==8.4.0
pluggy==1.0.0
prometheus-client==0.17.1
prompt-toolkit==3.0.36
ptyprocess==0.7.0
py==1.11.0
pycparser==2.21
Pygments==2.14.0
pyparsing==3.1.4
pyrsistent==0.18.0
pytest==7.0.1
pytest-cov==4.0.0
pytest-xdist==3.0.2
python-dateutil==2.8.0
pytz==2025.2
PyWavelets==1.1.1
PyYAML==6.0.1
pyzmq==25.1.2
regional==1.1.2
requests==2.27.1
s3transfer==0.5.2
scikit-image==0.17.2
scikit-learn==0.24.2
scipy==1.5.4
semantic-version==2.10.0
Send2Trash==1.8.3
showit==1.1.4
six==1.17.0
slicedimage==4.1.1
-e git+https://github.com/spacetx/starfish.git@38fb2d7e2eb7810300e4dbf9a469b84c701b1ffe#egg=starfish
sympy==1.9
terminado==0.12.1
testpath==0.6.0
threadpoolctl==3.1.0
tifffile==2020.9.3
tomli==1.2.3
tornado==6.1
tqdm==4.64.1
trackpy==0.5.0
traitlets==4.3.3
typing_extensions==4.1.1
urllib3==1.26.20
validators==0.20.0
wcwidth==0.2.13
webencodings==0.5.1
widgetsnbextension==3.6.10
xarray==0.16.2
zipp==3.6.0
| name: starfish
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- argon2-cffi==21.3.0
- argon2-cffi-bindings==21.2.0
- async-generator==1.10
- attrs==22.2.0
- backcall==0.2.0
- bleach==4.1.0
- boto3==1.23.10
- botocore==1.26.10
- cffi==1.15.1
- charset-normalizer==2.0.12
- click==8.0.4
- comm==0.1.4
- coverage==6.2
- cycler==0.11.0
- dataclasses==0.6
- decorator==4.4.2
- defusedxml==0.7.1
- diskcache==5.6.3
- entrypoints==0.4
- execnet==1.9.0
- idna==3.10
- imageio==2.15.0
- importlib-metadata==4.8.3
- importlib-resources==5.4.0
- iniconfig==1.1.1
- ipykernel==5.5.6
- ipython==7.16.3
- ipython-genutils==0.2.0
- ipywidgets==7.8.5
- jedi==0.17.2
- jinja2==3.0.3
- jmespath==0.10.0
- joblib==1.1.1
- jsonschema==3.2.0
- jupyter-client==7.1.2
- jupyter-core==4.9.2
- jupyterlab-pygments==0.1.2
- jupyterlab-widgets==1.1.11
- kiwisolver==1.3.1
- markupsafe==2.0.1
- matplotlib==3.3.4
- mistune==0.8.4
- mpmath==1.3.0
- nbclient==0.5.9
- nbconvert==6.0.7
- nbformat==5.1.3
- nest-asyncio==1.6.0
- networkx==2.5.1
- notebook==6.4.10
- numpy==1.19.5
- packaging==21.3
- pandas==1.1.5
- pandocfilters==1.5.1
- parso==0.7.1
- pexpect==4.9.0
- pickleshare==0.7.5
- pillow==8.4.0
- pluggy==1.0.0
- prometheus-client==0.17.1
- prompt-toolkit==3.0.36
- ptyprocess==0.7.0
- py==1.11.0
- pycparser==2.21
- pygments==2.14.0
- pyparsing==3.1.4
- pyrsistent==0.18.0
- pytest==7.0.1
- pytest-cov==4.0.0
- pytest-xdist==3.0.2
- python-dateutil==2.8.0
- pytz==2025.2
- pywavelets==1.1.1
- pyyaml==6.0.1
- pyzmq==25.1.2
- regional==1.1.2
- requests==2.27.1
- s3transfer==0.5.2
- scikit-image==0.17.2
- scikit-learn==0.24.2
- scipy==1.5.4
- semantic-version==2.10.0
- send2trash==1.8.3
- showit==1.1.4
- six==1.17.0
- slicedimage==4.1.1
- sympy==1.9
- terminado==0.12.1
- testpath==0.6.0
- threadpoolctl==3.1.0
- tifffile==2020.9.3
- tomli==1.2.3
- tornado==6.1
- tqdm==4.64.1
- trackpy==0.5.0
- traitlets==4.3.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- validators==0.20.0
- wcwidth==0.2.13
- webencodings==0.5.1
- widgetsnbextension==3.6.10
- xarray==0.16.2
- zipp==3.6.0
prefix: /opt/conda/envs/starfish
| [
"starfish/core/morphology/Filter/test/test_areafilter.py::test_empty_filter",
"starfish/core/morphology/Filter/test/test_areafilter.py::test_min_area",
"starfish/core/morphology/Filter/test/test_areafilter.py::test_max_area",
"starfish/core/morphology/Filter/test/test_areafilter.py::test_illegal_areas"
] | [] | [] | [] | MIT License | 6,148 | 258 | [
"starfish/core/morphology/Filter/areafilter.py"
] |
antirotor__speedcopy-6 | 78dee74fa0f0aef2c61f9f48f6389969f63272f7 | 2020-01-09 11:44:18 | 78dee74fa0f0aef2c61f9f48f6389969f63272f7 | diff --git a/speedcopy/__init__.py b/speedcopy/__init__.py
index c7b2c99..e681fd7 100644
--- a/speedcopy/__init__.py
+++ b/speedcopy/__init__.py
@@ -144,8 +144,8 @@ if not sys.platform.startswith("win32"):
os.symlink(os.readlink(src), dst)
else:
fs_src_type = FilesystemInfo().filesystem(src.encode('utf-8'))
- fs_dst_type = FilesystemInfo().filesystem(
- os.path.dirname(dst.encode('utf-8')))
+ dst_dir_path = os.path.normpath(os.path.dirname(dst.encode('utf-8'))) # noqa: E501
+ fs_dst_type = FilesystemInfo().filesystem(dst_dir_path)
supported_fs = ['CIFS', 'SMB2']
debug(">>> Source FS: {}".format(fs_src_type))
debug(">>> Destination FS: {}".format(fs_dst_type))
@@ -218,8 +218,8 @@ else:
ctypes.c_void_p)
copyfile.restype = ctypes.HRESULT
- source_file = os.path.normpath(src)
- dest_file = os.path.normpath(dst)
+ source_file = os.path.abspath(os.path.normpath(src))
+ dest_file = os.path.abspath(os.path.normpath(dst))
if source_file.startswith('\\\\'):
source_file = 'UNC\\' + source_file[2:]
if dest_file.startswith('\\\\'):
| Copying doesn't work if non-absolute filenames supplied
```
speedcopy.copyfile("C:/Temp/asdf", "C:/Temp/fdsa")
```
should work exactly the same as
```
os.chdir("C:/Temp")
speedcopy.copyfile("asdf", "fdsa")
```
| antirotor/speedcopy | diff --git a/tests/test_speedcopy.py b/tests/test_speedcopy.py
index bea8ba4..ec69756 100644
--- a/tests/test_speedcopy.py
+++ b/tests/test_speedcopy.py
@@ -3,6 +3,7 @@ import speedcopy
import os
speedcopy.SPEEDCOPY_DEBUG = True
+_FILE_SIZE = 5 * 1024 * 1024
def setup_function(function):
@@ -13,11 +14,11 @@ def teadown_function(function):
speedcopy.unpatch_copyfile()
-def test_copy(tmpdir):
+def test_copy_abs(tmpdir):
src = tmpdir.join("source")
dst = tmpdir.join("destination")
with open(str(src), "wb") as f:
- f.write(os.urandom(5 * 1024 * 1024))
+ f.write(os.urandom(_FILE_SIZE))
f.close()
shutil.copyfile(str(src), str(dst))
@@ -25,6 +26,24 @@ def test_copy(tmpdir):
assert os.path.isfile(str(dst))
+def test_copy_rel(tmpdir):
+ cwd = os.getcwd()
+ os.chdir(str(tmpdir))
+
+ try:
+ src = "source"
+ dst = "destination"
+ with open(str(src), "wb") as f:
+ f.write(os.urandom(_FILE_SIZE))
+ f.close()
+
+ shutil.copyfile(str(src), str(dst))
+
+ assert os.path.isfile(str(dst))
+ finally:
+ os.chdir(cwd)
+
+
def test_patch():
assert shutil.copyfile == speedcopy.copyfile
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 3
},
"num_modified_files": 1
} | 1.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pysendfile",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.6",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
certifi==2021.5.30
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pysendfile==2.0.1
pytest==6.2.4
-e git+https://github.com/antirotor/speedcopy.git@78dee74fa0f0aef2c61f9f48f6389969f63272f7#egg=speedcopy
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: speedcopy
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib-metadata=4.8.1=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- pysendfile==2.0.1
prefix: /opt/conda/envs/speedcopy
| [
"tests/test_speedcopy.py::test_copy_rel"
] | [] | [
"tests/test_speedcopy.py::test_copy_abs",
"tests/test_speedcopy.py::test_patch",
"tests/test_speedcopy.py::test_unpatch"
] | [] | Apache License 2.0 | 6,172 | 337 | [
"speedcopy/__init__.py"
] |
|
googleapis__google-auth-library-python-421 | af29c1a9fd9282b38867961e4053f74f018a3815 | 2020-01-09 21:41:07 | 1b9de8dfbe4523f3170e47985ab523cb7865de48 | diff --git a/google/auth/iam.py b/google/auth/iam.py
index a438726..0ab5b55 100644
--- a/google/auth/iam.py
+++ b/google/auth/iam.py
@@ -70,7 +70,9 @@ class Signer(crypt.Signer):
method = "POST"
url = _SIGN_BLOB_URI.format(self._service_account_email)
headers = {}
- body = json.dumps({"bytesToSign": base64.b64encode(message).decode("utf-8")})
+ body = json.dumps(
+ {"bytesToSign": base64.b64encode(message).decode("utf-8")}
+ ).encode("utf-8")
self._credentials.before_request(self._request, method, url, headers)
response = self._request(url=url, method=method, body=body, headers=headers)
diff --git a/google/auth/impersonated_credentials.py b/google/auth/impersonated_credentials.py
index 70fa5dc..bc7031e 100644
--- a/google/auth/impersonated_credentials.py
+++ b/google/auth/impersonated_credentials.py
@@ -84,7 +84,7 @@ def _make_iam_token_request(request, principal, headers, body):
"""
iam_endpoint = _IAM_ENDPOINT.format(principal)
- body = json.dumps(body)
+ body = json.dumps(body).encode("utf-8")
response = request(url=iam_endpoint, method="POST", headers=headers, body=body)
diff --git a/google/oauth2/_client.py b/google/oauth2/_client.py
index 4cf7a7f..4ba31a8 100644
--- a/google/oauth2/_client.py
+++ b/google/oauth2/_client.py
@@ -95,7 +95,7 @@ def _token_endpoint_request(request, token_uri, body):
google.auth.exceptions.RefreshError: If the token endpoint returned
an error.
"""
- body = urllib.parse.urlencode(body)
+ body = urllib.parse.urlencode(body).encode("utf-8")
headers = {"content-type": _URLENCODED_CONTENT_TYPE}
retry = 0
| google.auth.transport.Request is called with type(body) == str
According to eg https://google-auth.readthedocs.io/en/latest/reference/google.auth.html#google.auth.default, classes implementing google.auth.transport.Request will be called with a *body* parameter of type bytes. This makes sense, because the request agent is in no position to decide how a str body should be encoded for transport.
Unfortunately, in some cases the googl.auth module does pass str value. One such call is made in https://github.com/googleapis/google-auth-library-python/blob/137b43ba002d422c68e7bfacdafa54986f38500b/google/oauth2/_client.py#L100, but there may be more. | googleapis/google-auth-library-python | diff --git a/tests/oauth2/test__client.py b/tests/oauth2/test__client.py
index 9cf59eb..052390a 100644
--- a/tests/oauth2/test__client.py
+++ b/tests/oauth2/test__client.py
@@ -96,7 +96,7 @@ def test__token_endpoint_request():
method="POST",
url="http://example.com",
headers={"content-type": "application/x-www-form-urlencoded"},
- body="test=params",
+ body="test=params".encode("utf-8"),
)
# Check result
@@ -131,7 +131,7 @@ def test__token_endpoint_request_internal_failure_error():
def verify_request_params(request, params):
- request_body = request.call_args[1]["body"]
+ request_body = request.call_args[1]["body"].decode("utf-8")
request_params = urllib.parse.parse_qs(request_body)
for key, value in six.iteritems(params):
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 3
} | 1.10 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"flask",
"freezegun",
"mock",
"oauth2client",
"pytest",
"pytest-cov",
"pytest-localserver",
"requests",
"urllib3",
"cryptography",
"grpcio"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | blinker==1.9.0
cachetools==4.2.4
certifi==2025.1.31
cffi==1.17.1
charset-normalizer==3.4.1
click==8.1.8
coverage==7.8.0
cryptography==44.0.2
exceptiongroup==1.2.2
Flask==3.1.0
freezegun==1.5.1
-e git+https://github.com/googleapis/google-auth-library-python.git@af29c1a9fd9282b38867961e4053f74f018a3815#egg=google_auth
grpcio==1.71.0
httplib2==0.22.0
idna==3.10
importlib_metadata==8.6.1
iniconfig==2.1.0
itsdangerous==2.2.0
Jinja2==3.1.6
MarkupSafe==3.0.2
mock==5.2.0
oauth2client==4.1.3
packaging==24.2
pluggy==1.5.0
pyasn1==0.6.1
pyasn1_modules==0.4.2
pycparser==2.22
pyparsing==3.2.3
pytest==8.3.5
pytest-cov==6.0.0
pytest-localserver==0.9.0.post0
python-dateutil==2.9.0.post0
requests==2.32.3
rsa==4.0
six==1.17.0
tomli==2.2.1
urllib3==2.3.0
Werkzeug==3.1.3
zipp==3.21.0
| name: google-auth-library-python
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- blinker==1.9.0
- cachetools==4.2.4
- certifi==2025.1.31
- cffi==1.17.1
- charset-normalizer==3.4.1
- click==8.1.8
- coverage==7.8.0
- cryptography==44.0.2
- exceptiongroup==1.2.2
- flask==3.1.0
- freezegun==1.5.1
- grpcio==1.71.0
- httplib2==0.22.0
- idna==3.10
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- itsdangerous==2.2.0
- jinja2==3.1.6
- markupsafe==3.0.2
- mock==5.2.0
- oauth2client==4.1.3
- packaging==24.2
- pluggy==1.5.0
- pyasn1==0.6.1
- pyasn1-modules==0.4.2
- pycparser==2.22
- pyparsing==3.2.3
- pytest==8.3.5
- pytest-cov==6.0.0
- pytest-localserver==0.9.0.post0
- python-dateutil==2.9.0.post0
- requests==2.32.3
- rsa==4.0
- six==1.17.0
- tomli==2.2.1
- urllib3==2.3.0
- werkzeug==3.1.3
- zipp==3.21.0
prefix: /opt/conda/envs/google-auth-library-python
| [
"tests/oauth2/test__client.py::test__token_endpoint_request",
"tests/oauth2/test__client.py::test_jwt_grant",
"tests/oauth2/test__client.py::test_id_token_jwt_grant",
"tests/oauth2/test__client.py::test_refresh_grant",
"tests/oauth2/test__client.py::test_refresh_grant_with_scopes"
] | [] | [
"tests/oauth2/test__client.py::test__handle_error_response",
"tests/oauth2/test__client.py::test__handle_error_response_non_json",
"tests/oauth2/test__client.py::test__parse_expiry",
"tests/oauth2/test__client.py::test__parse_expiry_none",
"tests/oauth2/test__client.py::test__token_endpoint_request_error",
"tests/oauth2/test__client.py::test__token_endpoint_request_internal_failure_error",
"tests/oauth2/test__client.py::test_jwt_grant_no_access_token",
"tests/oauth2/test__client.py::test_id_token_jwt_grant_no_access_token",
"tests/oauth2/test__client.py::test_refresh_grant_no_access_token"
] | [] | Apache License 2.0 | 6,174 | 504 | [
"google/auth/iam.py",
"google/auth/impersonated_credentials.py",
"google/oauth2/_client.py"
] |
|
iterative__dvc-3132 | fe635a5040c9d593cd1bdef3fa31f4df7af85259 | 2020-01-14 00:23:27 | 7f5e4a9fe03f2695219fa6cde0fea208f2a81cf3 | diff --git a/dvc/remote/config.py b/dvc/remote/config.py
index 87572bd1d..b409e7eab 100644
--- a/dvc/remote/config.py
+++ b/dvc/remote/config.py
@@ -3,8 +3,7 @@ import os
import posixpath
from urllib.parse import urlparse
-from dvc.config import Config
-from dvc.config import ConfigError
+from dvc.config import Config, ConfigError
from dvc.utils import relpath
@@ -134,6 +133,8 @@ class RemoteConfig(object):
break
def modify(self, name, option, value, level=None):
+ self.get_settings(name)
+
self.config.set(
Config.SECTION_REMOTE_FMT.format(name), option, value, level=level
)
| remote: allow modifying only existing entries
**DVC version**: `0.81.0`
Using `dvc remote modify` on a non-existing entry creates an invalid remote configuration, since it creates a remote without a URL.
```bash
dvc init --no-scm
dvc remote modify missing <option> <value>
```
Ideally, raising an error should prevent the user from introducing an invalid remote entry.
[Discord discussion](https://discordapp.com/channels/485586884165107732/485596304961962003/666411899499315221) | iterative/dvc | diff --git a/tests/func/test_remote.py b/tests/func/test_remote.py
index 777666bd9..09674eff3 100644
--- a/tests/func/test_remote.py
+++ b/tests/func/test_remote.py
@@ -6,7 +6,7 @@ import configobj
import pytest
from mock import patch
-from dvc.config import Config
+from dvc.config import Config, ConfigError
from dvc.exceptions import DownloadError, UploadError
from dvc.main import main
from dvc.path_info import PathInfo
@@ -31,10 +31,10 @@ class TestRemote(TestDvc):
self.assertEqual(main(["remote", "list"]), 0)
- self.assertEqual(main(["remote", "remove", remotes[0]]), 0)
self.assertEqual(
main(["remote", "modify", remotes[0], "option", "value"]), 0
)
+ self.assertEqual(main(["remote", "remove", remotes[0]]), 0)
self.assertEqual(main(["remote", "list"]), 0)
@@ -250,3 +250,10 @@ def test_raise_on_too_many_open_files(tmp_dir, dvc, tmp_path_factory, mocker):
with pytest.raises(OSError) as e:
dvc.push()
assert e.errno == errno.EMFILE
+
+
+def test_modify_missing_remote(dvc):
+ remote_config = RemoteConfig(dvc.config)
+
+ with pytest.raises(ConfigError, match=r"unable to find remote section"):
+ remote_config.modify("myremote", "gdrive_client_id", "xxx")
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 1
} | 0.81 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[all]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest",
"pytest-timeout",
"pytest-cov",
"pytest-xdist",
"pytest-mock",
"flaky",
"mock",
"xmltodict",
"awscli",
"google-compute-engine",
"Pygments",
"collective.checkdocs",
"flake8",
"psutil",
"flake8-docstrings",
"pydocstyle",
"jaraco.windows",
"mock-ssh-server",
"moto",
"rangehttpserver"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.7",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | aliyun-python-sdk-core==2.16.0
aliyun-python-sdk-core-v3==2.13.33
aliyun-python-sdk-kms==2.16.5
appdirs==1.4.4
atpublic==3.1.2
attrs @ file:///croot/attrs_1668696182826/work
autocommand==2.2.2
awscli==1.31.13
azure-common==1.1.28
azure-storage-blob==2.1.0
azure-storage-common==2.1.0
bcrypt==4.2.1
boto==2.49.0
boto3==1.33.13
botocore==1.33.13
cachetools==5.5.2
certifi @ file:///croot/certifi_1671487769961/work/certifi
cffi==1.15.1
charset-normalizer==3.4.1
collective.checkdocs==0.2
colorama==0.4.4
configobj==5.0.9
configparser==5.3.0
coverage==7.2.7
crcmod==1.7
cryptography==44.0.2
decorator==5.1.1
distro==1.9.0
docutils==0.16
-e git+https://github.com/iterative/dvc.git@fe635a5040c9d593cd1bdef3fa31f4df7af85259#egg=dvc
execnet==2.0.2
flake8==5.0.4
flake8-docstrings==1.7.0
flaky==3.8.1
flit_core @ file:///opt/conda/conda-bld/flit-core_1644941570762/work/source/flit_core
flufl.lock==7.1.1
funcy==2.0
future==1.0.0
gitdb==4.0.12
gitdb2==4.0.2
GitPython==3.1.44
google-api-core==1.34.1
google-api-python-client==2.166.0
google-auth==2.38.0
google-auth-httplib2==0.2.0
google-cloud-core==1.5.0
google-cloud-storage==1.19.0
google-compute-engine==2.8.13
google-crc32c==1.5.0
google-resumable-media==2.7.2
googleapis-common-protos==1.69.2
grandalf==0.6
httplib2==0.22.0
humanize==4.6.0
idna==3.10
importlib-metadata==4.2.0
importlib-resources==5.12.0
inflect==3.0.2
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
jaraco.classes==3.2.3
jaraco.collections==4.2.0
jaraco.context==4.3.0
jaraco.functools==3.7.0
jaraco.structures==2.1.0
jaraco.text==3.11.1
jaraco.ui==2.3.0
jaraco.windows==5.7.0
Jinja2==3.1.6
jmespath==0.10.0
jsonpath-ng==1.7.0
MarkupSafe==2.1.5
mccabe==0.7.0
mock==5.2.0
mock-ssh-server==0.9.1
more-itertools==9.1.0
moto==4.2.14
nanotime==0.5.2
networkx==2.3
numpy==1.21.6
oauth2client==4.1.3
oss2==2.6.1
packaging @ file:///croot/packaging_1671697413597/work
paramiko==3.5.1
path==16.6.0
pathspec==0.11.2
pluggy @ file:///tmp/build/80754af9/pluggy_1648042572264/work
ply==3.11
protobuf==3.20.3
psutil==7.0.0
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pyarrow==0.15.1
pyasn1==0.5.1
pyasn1-modules==0.3.0
pycodestyle==2.9.1
pycparser==2.21
pycryptodome==3.22.0
pydantic==1.10.21
pydocstyle==6.3.0
PyDrive @ git+https://github.com/gsuitedevs/PyDrive@42022f9a1c48f435438fce74ad4032ec9f34cfd1
pyflakes==2.5.0
Pygments==2.17.2
PyNaCl==1.5.0
pyparsing==3.1.4
pytest==7.1.2
pytest-cov==4.1.0
pytest-mock==3.11.1
pytest-timeout==2.3.1
pytest-xdist==3.5.0
python-dateutil==2.8.0
PyYAML==5.1.2
rangehttpserver==1.4.0
requests==2.31.0
responses==0.23.3
rsa==4.7.2
ruamel.yaml==0.18.10
ruamel.yaml.clib==0.2.8
s3transfer==0.8.2
shortuuid==1.0.13
six==1.17.0
smmap==5.0.2
snowballstemmer==2.2.0
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
tqdm==4.67.1
treelib==1.7.1
types-PyYAML==6.0.12.12
typing_extensions @ file:///croot/typing_extensions_1669924550328/work
uritemplate==4.1.1
urllib3==1.26.20
voluptuous==0.14.1
Werkzeug==2.2.3
xmltodict==0.14.2
zc.lockfile==3.0.post1
zipp @ file:///croot/zipp_1672387121353/work
| name: dvc
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=22.1.0=py37h06a4308_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- flit-core=3.6.0=pyhd3eb1b0_0
- importlib_metadata=4.11.3=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=22.0=py37h06a4308_0
- pip=22.3.1=py37h06a4308_0
- pluggy=1.0.0=py37h06a4308_1
- py=1.11.0=pyhd3eb1b0_0
- pytest=7.1.2=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py37h06a4308_0
- typing_extensions=4.4.0=py37h06a4308_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zipp=3.11.0=py37h06a4308_0
- zlib=1.2.13=h5eee18b_1
- pip:
- aliyun-python-sdk-core==2.16.0
- aliyun-python-sdk-core-v3==2.13.33
- aliyun-python-sdk-kms==2.16.5
- appdirs==1.4.4
- atpublic==3.1.2
- autocommand==2.2.2
- awscli==1.31.13
- azure-common==1.1.28
- azure-storage-blob==2.1.0
- azure-storage-common==2.1.0
- bcrypt==4.2.1
- boto==2.49.0
- boto3==1.33.13
- botocore==1.33.13
- cachetools==5.5.2
- cffi==1.15.1
- charset-normalizer==3.4.1
- collective-checkdocs==0.2
- colorama==0.4.4
- configobj==5.0.9
- configparser==5.3.0
- coverage==7.2.7
- crcmod==1.7
- cryptography==44.0.2
- decorator==5.1.1
- distro==1.9.0
- docutils==0.16
- dvc==0.81.0+fe635a
- execnet==2.0.2
- flake8==5.0.4
- flake8-docstrings==1.7.0
- flaky==3.8.1
- flufl-lock==7.1.1
- funcy==2.0
- future==1.0.0
- gitdb==4.0.12
- gitdb2==4.0.2
- gitpython==3.1.44
- google-api-core==1.34.1
- google-api-python-client==2.166.0
- google-auth==2.38.0
- google-auth-httplib2==0.2.0
- google-cloud-core==1.5.0
- google-cloud-storage==1.19.0
- google-compute-engine==2.8.13
- google-crc32c==1.5.0
- google-resumable-media==2.7.2
- googleapis-common-protos==1.69.2
- grandalf==0.6
- httplib2==0.22.0
- humanize==4.6.0
- idna==3.10
- importlib-metadata==4.2.0
- importlib-resources==5.12.0
- inflect==3.0.2
- jaraco-classes==3.2.3
- jaraco-collections==4.2.0
- jaraco-context==4.3.0
- jaraco-functools==3.7.0
- jaraco-structures==2.1.0
- jaraco-text==3.11.1
- jaraco-ui==2.3.0
- jaraco-windows==5.7.0
- jinja2==3.1.6
- jmespath==0.10.0
- jsonpath-ng==1.7.0
- markupsafe==2.1.5
- mccabe==0.7.0
- mock==5.2.0
- mock-ssh-server==0.9.1
- more-itertools==9.1.0
- moto==4.2.14
- nanotime==0.5.2
- networkx==2.3
- numpy==1.21.6
- oauth2client==4.1.3
- oss2==2.6.1
- paramiko==3.5.1
- path==16.6.0
- pathspec==0.11.2
- ply==3.11
- protobuf==3.20.3
- psutil==7.0.0
- pyarrow==0.15.1
- pyasn1==0.5.1
- pyasn1-modules==0.3.0
- pycodestyle==2.9.1
- pycparser==2.21
- pycryptodome==3.22.0
- pydantic==1.10.21
- pydocstyle==6.3.0
- pydrive==1.3.1
- pyflakes==2.5.0
- pygments==2.17.2
- pynacl==1.5.0
- pyparsing==3.1.4
- pytest-cov==4.1.0
- pytest-mock==3.11.1
- pytest-timeout==2.3.1
- pytest-xdist==3.5.0
- python-dateutil==2.8.0
- pyyaml==5.1.2
- rangehttpserver==1.4.0
- requests==2.31.0
- responses==0.23.3
- rsa==4.7.2
- ruamel-yaml==0.18.10
- ruamel-yaml-clib==0.2.8
- s3transfer==0.8.2
- shortuuid==1.0.13
- six==1.17.0
- smmap==5.0.2
- snowballstemmer==2.2.0
- tqdm==4.67.1
- treelib==1.7.1
- types-pyyaml==6.0.12.12
- uritemplate==4.1.1
- urllib3==1.26.20
- voluptuous==0.14.1
- werkzeug==2.2.3
- xmltodict==0.14.2
- zc-lockfile==3.0.post1
prefix: /opt/conda/envs/dvc
| [
"tests/func/test_remote.py::test_modify_missing_remote"
] | [] | [
"tests/func/test_remote.py::TestRemote::test",
"tests/func/test_remote.py::TestRemote::test_overwrite",
"tests/func/test_remote.py::TestRemote::test_referencing_other_remotes",
"tests/func/test_remote.py::TestRemote::test_relative_path",
"tests/func/test_remote.py::TestRemoteRemoveDefault::test",
"tests/func/test_remote.py::TestRemoteRemove::test",
"tests/func/test_remote.py::TestRemoteDefault::test",
"tests/func/test_remote.py::test_show_default",
"tests/func/test_remote.py::TestRemoteShouldHandleUppercaseRemoteName::test",
"tests/func/test_remote.py::test_dir_checksum_should_be_key_order_agnostic",
"tests/func/test_remote.py::test_partial_push_n_pull",
"tests/func/test_remote.py::test_raise_on_too_many_open_files"
] | [] | Apache License 2.0 | 6,207 | 189 | [
"dvc/remote/config.py"
] |
|
keis__base58-46 | 9418945639480b9a428ccabe6cd0c9becac24df6 | 2020-01-14 10:05:45 | 9418945639480b9a428ccabe6cd0c9becac24df6 | diff --git a/base58.py b/base58.py
index f1bded6..71c49c4 100644
--- a/base58.py
+++ b/base58.py
@@ -103,10 +103,13 @@ def b58decode(
return b'\0' * (origlen - newlen) + bytes(reversed(result))
-def b58encode_check(v: bytes, alphabet: bytes = BITCOIN_ALPHABET) -> bytes:
+def b58encode_check(
+ v: Union[str, bytes], alphabet: bytes = BITCOIN_ALPHABET
+) -> bytes:
"""
Encode a string using Base58 with a 4 character checksum
"""
+ v = scrub_input(v)
digest = sha256(sha256(v).digest()).digest()
return b58encode(v + digest[:4], alphabet=alphabet)
| b58encode_check does not take a str as input
b58encode_check only takes `bytes` as input. This is inconsistent with b58encode, which can handle a `Union[str, bytes]` as input. | keis/base58 | diff --git a/test_base58.py b/test_base58.py
index f007ee8..98d921b 100644
--- a/test_base58.py
+++ b/test_base58.py
@@ -76,6 +76,14 @@ def test_check_identity():
assert_that(out, equal_to(data))
+def test_check_str():
+ data = 'hello world'
+ out = b58encode_check(data)
+ assert_that(out, equal_to(b'3vQB7B6MrGQZaxCuFg4oh'))
+ back = b58decode_check(out)
+ assert_that(back, b'hello world')
+
+
def test_check_failure():
data = '3vQB7B6MrGQZaxCuFg4oH'
with assert_raises(ValueError):
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 1
} | 1.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest",
"pytest-flake8",
"pytest-cov",
"PyHamcrest",
"coveralls"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | -e git+https://github.com/keis/base58.git@9418945639480b9a428ccabe6cd0c9becac24df6#egg=base58
certifi==2025.1.31
charset-normalizer==3.4.1
coverage==7.8.0
coveralls==4.0.1
docopt==0.6.2
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
flake8==7.2.0
idna==3.10
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
mccabe==0.7.0
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
pycodestyle==2.13.0
pyflakes==3.3.2
PyHamcrest==2.1.0
pytest @ file:///croot/pytest_1738938843180/work
pytest-cov==6.0.0
pytest-flake8==1.3.0
requests==2.32.3
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
urllib3==2.3.0
| name: base58
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- charset-normalizer==3.4.1
- coverage==7.8.0
- coveralls==4.0.1
- docopt==0.6.2
- flake8==7.2.0
- idna==3.10
- mccabe==0.7.0
- pycodestyle==2.13.0
- pyflakes==3.3.2
- pyhamcrest==2.1.0
- pytest-cov==6.0.0
- pytest-flake8==1.3.0
- requests==2.32.3
- urllib3==2.3.0
prefix: /opt/conda/envs/base58
| [
"test_base58.py::test_check_str"
] | [] | [
"test_base58.py::test_simple_encode",
"test_base58.py::test_leadingz_encode",
"test_base58.py::test_encode_empty",
"test_base58.py::test_simple_decode",
"test_base58.py::test_simple_decode_bytes",
"test_base58.py::test_leadingz_decode",
"test_base58.py::test_leadingz_decode_bytes",
"test_base58.py::test_empty_decode",
"test_base58.py::test_empty_decode_bytes",
"test_base58.py::test_check_identity",
"test_base58.py::test_check_failure",
"test_base58.py::test_round_trips",
"test_base58.py::test_simple_integers",
"test_base58.py::test_large_integer",
"test_base58.py::test_alphabet_alias_exists_and_equals_bitcoin_alphabet"
] | [] | MIT License | 6,211 | 213 | [
"base58.py"
] |
|
iterative__dvc-3150 | 0be0931fcda067f79233a6e13c2b34c5a7fc62f7 | 2020-01-14 18:03:35 | 7f5e4a9fe03f2695219fa6cde0fea208f2a81cf3 | diff --git a/dvc/dependency/repo.py b/dvc/dependency/repo.py
index 3e1878f9c..3bcfd3e65 100644
--- a/dvc/dependency/repo.py
+++ b/dvc/dependency/repo.py
@@ -9,8 +9,10 @@ from dvc.external_repo import cached_clone
from dvc.external_repo import external_repo
from dvc.exceptions import NotDvcRepoError
from dvc.exceptions import OutputNotFoundError
+from dvc.exceptions import NoOutputInExternalRepoError
from dvc.exceptions import PathMissingError
from dvc.utils.fs import fs_copy
+from dvc.path_info import PathInfo
from dvc.scm import SCM
@@ -46,14 +48,31 @@ class DependencyREPO(DependencyLOCAL):
with external_repo(**merge(self.def_repo, overrides)) as repo:
yield repo
- def status(self):
- with self._make_repo() as repo:
- current = repo.find_out_by_relpath(self.def_path).info
+ def _get_checksum(self, updated=False):
+ rev_lock = None
+ if not updated:
+ rev_lock = self.def_repo.get(self.PARAM_REV_LOCK)
- with self._make_repo(rev_lock=None) as repo:
- updated = repo.find_out_by_relpath(self.def_path).info
+ try:
+ with self._make_repo(rev_lock=rev_lock) as repo:
+ return repo.find_out_by_relpath(self.def_path).info["md5"]
+ except (NotDvcRepoError, NoOutputInExternalRepoError):
+ # Fall through and clone
+ pass
+
+ repo_path = cached_clone(
+ self.def_repo[self.PARAM_URL],
+ rev=rev_lock or self.def_repo.get(self.PARAM_REV),
+ )
+ path = PathInfo(os.path.join(repo_path, self.def_path))
+
+ return self.repo.cache.local.get_checksum(path)
+
+ def status(self):
+ current_checksum = self._get_checksum(updated=False)
+ updated_checksum = self._get_checksum(updated=True)
- if current != updated:
+ if current_checksum != updated_checksum:
return {str(self): "update available"}
return {}
diff --git a/dvc/repo/status.py b/dvc/repo/status.py
index 2e9a7a948..3528e6243 100644
--- a/dvc/repo/status.py
+++ b/dvc/repo/status.py
@@ -10,16 +10,11 @@ from . import locked
logger = logging.getLogger(__name__)
-def _local_status(self, targets=None, with_deps=False):
+def _joint_status(stages):
status = {}
- if targets:
- stages = cat(self.collect(t, with_deps=with_deps) for t in targets)
- else:
- stages = self.collect(None, with_deps=with_deps)
-
for stage in stages:
- if stage.locked:
+ if stage.locked and not stage.is_repo_import:
logger.warning(
"DVC-file '{path}' is locked. Its dependencies are"
" not going to be shown in the status output.".format(
@@ -27,11 +22,20 @@ def _local_status(self, targets=None, with_deps=False):
)
)
- status.update(stage.status())
+ status.update(stage.status(check_updates=True))
return status
+def _local_status(self, targets=None, with_deps=False):
+ if targets:
+ stages = cat(self.collect(t, with_deps=with_deps) for t in targets)
+ else:
+ stages = self.collect(None, with_deps=with_deps)
+
+ return _joint_status(stages)
+
+
def _cloud_status(
self,
targets=None,
diff --git a/dvc/stage.py b/dvc/stage.py
index 449cb261f..28076bad6 100644
--- a/dvc/stage.py
+++ b/dvc/stage.py
@@ -1006,10 +1006,12 @@ class Stage(object):
return ret
@rwlocked(read=["deps", "outs"])
- def status(self):
+ def status(self, check_updates=False):
ret = []
- if not self.locked:
+ show_import = self.is_repo_import and check_updates
+
+ if not self.locked or show_import:
deps_status = self._status(self.deps)
if deps_status:
ret.append({"changed deps": deps_status})
| status: implement support for import-ed files
See https://github.com/iterative/dvc/pull/2889#discussion_r357950906 for context | iterative/dvc | diff --git a/tests/func/test_status.py b/tests/func/test_status.py
index e4de03852..8b8d3d2ba 100644
--- a/tests/func/test_status.py
+++ b/tests/func/test_status.py
@@ -1,8 +1,12 @@
import os
+import shutil
from mock import patch
+from dvc.repo import Repo
from dvc.main import main
+from dvc.compat import fspath
+from dvc.external_repo import clean_repos
from tests.basic_env import TestDvc
@@ -23,3 +27,72 @@ class TestStatus(TestDvc):
def test_implied_cloud(self, mock_status):
main(["status", "--remote", "something"])
mock_status.assert_called()
+
+
+def test_status_non_dvc_repo_import(tmp_dir, dvc, erepo_dir):
+ with erepo_dir.branch("branch", new=True), erepo_dir.chdir():
+ erepo_dir.scm.repo.index.remove([".dvc"], r=True)
+ shutil.rmtree(".dvc")
+ erepo_dir.scm_gen("file", "first version")
+ erepo_dir.scm.add(["file"])
+ erepo_dir.scm.commit("first version")
+
+ dvc.imp(fspath(erepo_dir), "file", "file", rev="branch")
+
+ status = dvc.status(["file.dvc"])
+
+ assert status == {}
+
+ # Caching in external repos doesn't see upstream updates within single
+ # cli call, so we need to clean the caches to see the changes.
+ clean_repos()
+
+ with erepo_dir.branch("branch", new=False), erepo_dir.chdir():
+ erepo_dir.scm_gen("file", "second_version", commit="update file")
+ erepo_dir.scm.add(["file"])
+ erepo_dir.scm.commit("first version")
+
+ status, = dvc.status(["file.dvc"])["file.dvc"]
+
+ assert status == {
+ "changed deps": {
+ "file ({})".format(fspath(erepo_dir)): "update available"
+ }
+ }
+
+
+def test_status_before_and_after_dvc_init(tmp_dir, dvc, erepo_dir):
+ with erepo_dir.chdir():
+ erepo_dir.scm.repo.index.remove([".dvc"], r=True)
+ shutil.rmtree(".dvc")
+ erepo_dir.scm_gen("file", "first version")
+ erepo_dir.scm.add(["file"])
+ erepo_dir.scm.commit("first version")
+ old_rev = erepo_dir.scm.get_rev()
+
+ dvc.imp(fspath(erepo_dir), "file", "file")
+
+ assert dvc.status(["file.dvc"]) == {}
+
+ with erepo_dir.chdir():
+ Repo.init()
+ erepo_dir.scm.repo.index.remove(["file"])
+ os.remove("file")
+ erepo_dir.dvc_gen("file", "second version")
+ erepo_dir.scm.add([".dvc", "file.dvc"])
+ erepo_dir.scm.commit("version with dvc")
+ new_rev = erepo_dir.scm.get_rev()
+
+ assert old_rev != new_rev
+
+ # Caching in external repos doesn't see upstream updates within single
+ # cli call, so we need to clean the caches to see the changes.
+ clean_repos()
+
+ status, = dvc.status(["file.dvc"])["file.dvc"]
+
+ assert status == {
+ "changed deps": {
+ "file ({})".format(fspath(erepo_dir)): "update available"
+ }
+ }
diff --git a/tests/func/test_update.py b/tests/func/test_update.py
index 5afbac048..6f483fff0 100644
--- a/tests/func/test_update.py
+++ b/tests/func/test_update.py
@@ -1,5 +1,8 @@
import pytest
+import os
+import shutil
+from dvc.repo import Repo
from dvc.stage import Stage
from dvc.compat import fspath
from dvc.external_repo import clean_repos
@@ -36,8 +39,64 @@ def test_update_import(tmp_dir, dvc, erepo_dir, cached):
# cli call, so we need to clean the caches to see the changes.
clean_repos()
+ status, = dvc.status([stage.path])["version.dvc"]
+ changed_dep, = list(status["changed deps"].items())
+ assert changed_dep[0].startswith("version ")
+ assert changed_dep[1] == "update available"
+
+ dvc.update(stage.path)
+
assert dvc.status([stage.path]) == {}
+
+ assert imported.is_file()
+ assert imported.read_text() == "updated"
+
+ stage = Stage.load(dvc, stage.path)
+ assert stage.deps[0].def_repo == {
+ "url": fspath(erepo_dir),
+ "rev": "branch",
+ "rev_lock": new_rev,
+ }
+
+
+def test_update_import_after_remote_updates_to_dvc(tmp_dir, dvc, erepo_dir):
+ old_rev = None
+ with erepo_dir.branch("branch", new=True), erepo_dir.chdir():
+ erepo_dir.scm_gen("version", "branch", commit="add version file")
+ old_rev = erepo_dir.scm.get_rev()
+
+ stage = dvc.imp(fspath(erepo_dir), "version", "version", rev="branch")
+
+ imported = tmp_dir / "version"
+ assert imported.is_file()
+ assert imported.read_text() == "branch"
+ assert stage.deps[0].def_repo == {
+ "url": fspath(erepo_dir),
+ "rev": "branch",
+ "rev_lock": old_rev,
+ }
+
+ new_rev = None
+ with erepo_dir.branch("branch", new=False), erepo_dir.chdir():
+ erepo_dir.scm.repo.index.remove("version")
+ erepo_dir.dvc_gen("version", "updated")
+ erepo_dir.scm.add(["version", "version.dvc"])
+ erepo_dir.scm.commit("upgrade to DVC tracking")
+ new_rev = erepo_dir.scm.get_rev()
+
+ assert old_rev != new_rev
+
+ # Caching in external repos doesn't see upstream updates within single
+ # cli call, so we need to clean the caches to see the changes.
+ clean_repos()
+
+ status, = dvc.status([stage.path])["version.dvc"]
+ changed_dep, = list(status["changed deps"].items())
+ assert changed_dep[0].startswith("version ")
+ assert changed_dep[1] == "update available"
+
dvc.update(stage.path)
+
assert dvc.status([stage.path]) == {}
assert imported.is_file()
@@ -51,6 +110,48 @@ def test_update_import(tmp_dir, dvc, erepo_dir, cached):
}
+def test_update_before_and_after_dvc_init(tmp_dir, dvc, erepo_dir):
+ with erepo_dir.chdir():
+ erepo_dir.scm.repo.index.remove([".dvc"], r=True)
+ shutil.rmtree(".dvc")
+ erepo_dir.scm_gen("file", "first version")
+ erepo_dir.scm.add(["file"])
+ erepo_dir.scm.commit("first version")
+ old_rev = erepo_dir.scm.get_rev()
+
+ stage = dvc.imp(fspath(erepo_dir), "file", "file")
+
+ with erepo_dir.chdir():
+ Repo.init()
+ erepo_dir.scm.repo.index.remove(["file"])
+ os.remove("file")
+ erepo_dir.dvc_gen("file", "second version")
+ erepo_dir.scm.add([".dvc", "file.dvc"])
+ erepo_dir.scm.commit("version with dvc")
+ new_rev = erepo_dir.scm.get_rev()
+
+ assert old_rev != new_rev
+
+ # Caching in external repos doesn't see upstream updates within single
+ # cli call, so we need to clean the caches to see the changes.
+ clean_repos()
+
+ assert dvc.status([stage.path]) == {
+ "file.dvc": [
+ {
+ "changed deps": {
+ "file ({})".format(fspath(erepo_dir)): "update available"
+ }
+ }
+ ]
+ }
+
+ dvc.update(stage.path)
+
+ assert (tmp_dir / "file").read_text() == "second version"
+ assert dvc.status([stage.path]) == {}
+
+
def test_update_import_url(tmp_dir, dvc, tmp_path_factory):
import_src = tmp_path_factory.mktemp("import_url_source")
src = import_src / "file"
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 3
},
"num_modified_files": 3
} | 0.81 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[all]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest",
"pytest-timeout",
"pytest-cov",
"pytest-xdist",
"pytest-mock",
"flaky",
"mock",
"xmltodict",
"awscli",
"google-compute-engine",
"Pygments",
"collective.checkdocs",
"flake8",
"psutil",
"flake8-docstrings",
"pydocstyle",
"jaraco.windows",
"mock-ssh-server",
"moto",
"rangehttpserver"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.7",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | aliyun-python-sdk-core==2.16.0
aliyun-python-sdk-core-v3==2.13.33
aliyun-python-sdk-kms==2.16.5
appdirs==1.4.4
atpublic==3.1.2
attrs @ file:///croot/attrs_1668696182826/work
autocommand==2.2.2
awscli==1.31.13
azure-common==1.1.28
azure-storage-blob==2.1.0
azure-storage-common==2.1.0
bcrypt==4.2.1
boto==2.49.0
boto3==1.33.13
botocore==1.33.13
cachetools==5.5.2
certifi @ file:///croot/certifi_1671487769961/work/certifi
cffi==1.15.1
charset-normalizer==3.4.1
collective.checkdocs==0.2
colorama==0.4.4
configobj==5.0.9
configparser==5.3.0
coverage==7.2.7
crcmod==1.7
cryptography==44.0.2
decorator==5.1.1
distro==1.9.0
docutils==0.16
-e git+https://github.com/iterative/dvc.git@0be0931fcda067f79233a6e13c2b34c5a7fc62f7#egg=dvc
execnet==2.0.2
flake8==5.0.4
flake8-docstrings==1.7.0
flaky==3.8.1
flatten-dict==0.4.2
flit_core @ file:///opt/conda/conda-bld/flit-core_1644941570762/work/source/flit_core
flufl.lock==7.1.1
funcy==2.0
future==1.0.0
gitdb==4.0.12
gitdb2==4.0.2
GitPython==3.1.44
google-api-core==1.34.1
google-api-python-client==2.166.0
google-auth==2.38.0
google-auth-httplib2==0.2.0
google-cloud-core==1.5.0
google-cloud-storage==1.19.0
google-compute-engine==2.8.13
google-crc32c==1.5.0
google-resumable-media==2.7.2
googleapis-common-protos==1.69.2
grandalf==0.6
httplib2==0.22.0
humanize==4.6.0
idna==3.10
importlib-metadata==4.2.0
importlib-resources==5.12.0
inflect==3.0.2
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
jaraco.classes==3.2.3
jaraco.collections==4.2.0
jaraco.context==4.3.0
jaraco.functools==3.7.0
jaraco.structures==2.1.0
jaraco.text==3.11.1
jaraco.ui==2.3.0
jaraco.windows==5.7.0
Jinja2==3.1.6
jmespath==0.10.0
jsonpath-ng==1.7.0
MarkupSafe==2.1.5
mccabe==0.7.0
mock==5.2.0
mock-ssh-server==0.9.1
more-itertools==9.1.0
moto==4.2.14
nanotime==0.5.2
networkx==2.3
numpy==1.21.6
oauth2client==4.1.3
oss2==2.6.1
packaging @ file:///croot/packaging_1671697413597/work
paramiko==3.5.1
path==16.6.0
pathspec==0.11.2
pluggy @ file:///tmp/build/80754af9/pluggy_1648042572264/work
ply==3.11
protobuf==3.20.3
psutil==7.0.0
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pyarrow==0.15.1
pyasn1==0.5.1
pyasn1-modules==0.3.0
pycodestyle==2.9.1
pycparser==2.21
pycryptodome==3.22.0
pydantic==1.10.21
pydocstyle==6.3.0
PyDrive==1.3.1
pyflakes==2.5.0
Pygments==2.17.2
PyNaCl==1.5.0
pyparsing==3.1.4
pytest==7.1.2
pytest-cov==4.1.0
pytest-mock==3.11.1
pytest-timeout==2.3.1
pytest-xdist==3.5.0
python-dateutil==2.8.0
PyYAML==5.1.2
rangehttpserver==1.4.0
requests==2.31.0
responses==0.23.3
rsa==4.7.2
ruamel.yaml==0.18.10
ruamel.yaml.clib==0.2.8
s3transfer==0.8.2
shortuuid==1.0.13
six==1.17.0
smmap==5.0.2
snowballstemmer==2.2.0
texttable==1.7.0
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
tqdm==4.67.1
treelib==1.7.1
types-PyYAML==6.0.12.12
typing_extensions @ file:///croot/typing_extensions_1669924550328/work
uritemplate==4.1.1
urllib3==1.26.20
voluptuous==0.14.1
Werkzeug==2.2.3
xmltodict==0.14.2
zc.lockfile==3.0.post1
zipp @ file:///croot/zipp_1672387121353/work
| name: dvc
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=22.1.0=py37h06a4308_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- flit-core=3.6.0=pyhd3eb1b0_0
- importlib_metadata=4.11.3=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=22.0=py37h06a4308_0
- pip=22.3.1=py37h06a4308_0
- pluggy=1.0.0=py37h06a4308_1
- py=1.11.0=pyhd3eb1b0_0
- pytest=7.1.2=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py37h06a4308_0
- typing_extensions=4.4.0=py37h06a4308_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zipp=3.11.0=py37h06a4308_0
- zlib=1.2.13=h5eee18b_1
- pip:
- aliyun-python-sdk-core==2.16.0
- aliyun-python-sdk-core-v3==2.13.33
- aliyun-python-sdk-kms==2.16.5
- appdirs==1.4.4
- atpublic==3.1.2
- autocommand==2.2.2
- awscli==1.31.13
- azure-common==1.1.28
- azure-storage-blob==2.1.0
- azure-storage-common==2.1.0
- bcrypt==4.2.1
- boto==2.49.0
- boto3==1.33.13
- botocore==1.33.13
- cachetools==5.5.2
- cffi==1.15.1
- charset-normalizer==3.4.1
- collective-checkdocs==0.2
- colorama==0.4.4
- configobj==5.0.9
- configparser==5.3.0
- coverage==7.2.7
- crcmod==1.7
- cryptography==44.0.2
- decorator==5.1.1
- distro==1.9.0
- docutils==0.16
- dvc==0.81.3+0be093
- execnet==2.0.2
- flake8==5.0.4
- flake8-docstrings==1.7.0
- flaky==3.8.1
- flatten-dict==0.4.2
- flufl-lock==7.1.1
- funcy==2.0
- future==1.0.0
- gitdb==4.0.12
- gitdb2==4.0.2
- gitpython==3.1.44
- google-api-core==1.34.1
- google-api-python-client==2.166.0
- google-auth==2.38.0
- google-auth-httplib2==0.2.0
- google-cloud-core==1.5.0
- google-cloud-storage==1.19.0
- google-compute-engine==2.8.13
- google-crc32c==1.5.0
- google-resumable-media==2.7.2
- googleapis-common-protos==1.69.2
- grandalf==0.6
- httplib2==0.22.0
- humanize==4.6.0
- idna==3.10
- importlib-metadata==4.2.0
- importlib-resources==5.12.0
- inflect==3.0.2
- jaraco-classes==3.2.3
- jaraco-collections==4.2.0
- jaraco-context==4.3.0
- jaraco-functools==3.7.0
- jaraco-structures==2.1.0
- jaraco-text==3.11.1
- jaraco-ui==2.3.0
- jaraco-windows==5.7.0
- jinja2==3.1.6
- jmespath==0.10.0
- jsonpath-ng==1.7.0
- markupsafe==2.1.5
- mccabe==0.7.0
- mock==5.2.0
- mock-ssh-server==0.9.1
- more-itertools==9.1.0
- moto==4.2.14
- nanotime==0.5.2
- networkx==2.3
- numpy==1.21.6
- oauth2client==4.1.3
- oss2==2.6.1
- paramiko==3.5.1
- path==16.6.0
- pathspec==0.11.2
- ply==3.11
- protobuf==3.20.3
- psutil==7.0.0
- pyarrow==0.15.1
- pyasn1==0.5.1
- pyasn1-modules==0.3.0
- pycodestyle==2.9.1
- pycparser==2.21
- pycryptodome==3.22.0
- pydantic==1.10.21
- pydocstyle==6.3.0
- pydrive==1.3.1
- pyflakes==2.5.0
- pygments==2.17.2
- pynacl==1.5.0
- pyparsing==3.1.4
- pytest-cov==4.1.0
- pytest-mock==3.11.1
- pytest-timeout==2.3.1
- pytest-xdist==3.5.0
- python-dateutil==2.8.0
- pyyaml==5.1.2
- rangehttpserver==1.4.0
- requests==2.31.0
- responses==0.23.3
- rsa==4.7.2
- ruamel-yaml==0.18.10
- ruamel-yaml-clib==0.2.8
- s3transfer==0.8.2
- shortuuid==1.0.13
- six==1.17.0
- smmap==5.0.2
- snowballstemmer==2.2.0
- texttable==1.7.0
- tqdm==4.67.1
- treelib==1.7.1
- types-pyyaml==6.0.12.12
- uritemplate==4.1.1
- urllib3==1.26.20
- voluptuous==0.14.1
- werkzeug==2.2.3
- xmltodict==0.14.2
- zc-lockfile==3.0.post1
prefix: /opt/conda/envs/dvc
| [
"tests/func/test_status.py::test_status_non_dvc_repo_import",
"tests/func/test_status.py::test_status_before_and_after_dvc_init",
"tests/func/test_update.py::test_update_import[True]",
"tests/func/test_update.py::test_update_import[False]",
"tests/func/test_update.py::test_update_import_after_remote_updates_to_dvc",
"tests/func/test_update.py::test_update_before_and_after_dvc_init"
] | [] | [
"tests/func/test_status.py::TestStatus::test_implied_cloud",
"tests/func/test_status.py::TestStatus::test_quiet",
"tests/func/test_update.py::test_update_import_url"
] | [] | Apache License 2.0 | 6,216 | 1,035 | [
"dvc/dependency/repo.py",
"dvc/repo/status.py",
"dvc/stage.py"
] |
|
pytorch__ignite-703 | d5c10ce69b16919f1fc45c79d9faddcc6f864b82 | 2020-01-15 14:05:28 | 8c8c3c2e9a007673dca1db68e27f0bdff2edf643 | diff --git a/ignite/handlers/checkpoint.py b/ignite/handlers/checkpoint.py
index 3cb61be0..13720adb 100644
--- a/ignite/handlers/checkpoint.py
+++ b/ignite/handlers/checkpoint.py
@@ -27,7 +27,8 @@ class Checkpoint:
retained.
score_name (str, optional): If `score_function` not None, it is possible to store its absolute value using
`score_name`. See Notes for more details.
- n_saved (int, optional): Number of objects that should be kept on disk. Older files will be removed.
+ n_saved (int, optional): Number of objects that should be kept on disk. Older files will be removed. If set to
+ `None`, all objects are kept.
global_step_transform (callable, optional): global step transform function to output a desired global step.
Input of the function is `(engine, event_name)`. Output of function should be an integer.
Default is None, global_step based on attached engine. If provided, uses function output as global_step.
@@ -154,6 +155,11 @@ class Checkpoint:
return None
return self._saved[0].filename
+ def _check_lt_n_saved(self, or_equal=False):
+ if self._n_saved is None:
+ return True
+ return len(self._saved) < self._n_saved + int(or_equal)
+
def __call__(self, engine):
suffix = ""
@@ -166,8 +172,7 @@ class Checkpoint:
else:
priority = engine.state.get_event_attrib_value(Events.ITERATION_COMPLETED)
- if len(self._saved) < self._n_saved or \
- self._saved[0].priority < priority:
+ if self._check_lt_n_saved() or self._saved[0].priority < priority:
if self._score_name is not None:
if len(suffix) > 0:
@@ -194,7 +199,7 @@ class Checkpoint:
self._saved.append(Checkpoint.Item(priority, filename))
self._saved.sort(key=lambda item: item[0])
- if len(self._saved) > self._n_saved:
+ if not self._check_lt_n_saved(or_equal=True):
item = self._saved.pop(0)
self.save_handler.remove(item.filename)
@@ -316,7 +321,8 @@ class ModelCheckpoint(Checkpoint):
retained.
score_name (str, optional): if `score_function` not None, it is possible to store its absolute value using
`score_name`. See Notes for more details.
- n_saved (int, optional): Number of objects that should be kept on disk. Older files will be removed.
+ n_saved (int, optional): Number of objects that should be kept on disk. Older files will be removed. If set to
+ `None`, all objects are kept.
atomic (bool, optional): If True, objects are serialized to a temporary file, and then moved to final
destination, so that files are guaranteed to not be damaged (for example if exception
occurs during saving).
| Add option to Checkpoint, ModelCheckpoint to disable n_saved
Following the discussion [here](https://github.com/Project-MONAI/MONAI/issues/5#issuecomment-574636483), idea is to provide the option to disable `n_saved` argument and thus save all objects without removing previously stored ones.
cc @ericspod | pytorch/ignite | diff --git a/tests/ignite/handlers/test_checkpoint.py b/tests/ignite/handlers/test_checkpoint.py
index 96529333..f11e9a6f 100644
--- a/tests/ignite/handlers/test_checkpoint.py
+++ b/tests/ignite/handlers/test_checkpoint.py
@@ -403,6 +403,27 @@ def test_last_k(dirname):
assert sorted(os.listdir(dirname)) == expected, "{} vs {}".format(sorted(os.listdir(dirname)), expected)
+def test_disabled_n_saved(dirname):
+
+ h = ModelCheckpoint(dirname, _PREFIX, create_dir=False, n_saved=None)
+ engine = Engine(lambda e, b: None)
+ engine.state = State(epoch=0, iteration=0)
+
+ model = DummyModel()
+ to_save = {'model': model}
+
+ num_iters = 100
+ for i in range(num_iters):
+ engine.state.iteration = i
+ h(engine, to_save)
+
+ saved_files = sorted(os.listdir(dirname))
+ assert len(saved_files) == num_iters, "{}".format(saved_files)
+
+ expected = sorted(['{}_{}_{}.pth'.format(_PREFIX, 'model', i) for i in range(num_iters)])
+ assert saved_files == expected, "{} vs {}".format(saved_files, expected)
+
+
def test_best_k(dirname):
scores = iter([1.2, -2., 3.1, -4.0])
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_hyperlinks",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 1
} | 0.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"numpy",
"mock",
"pytest",
"codecov",
"pytest-cov",
"matplotlib",
"pandas",
"gym",
"tqdm",
"scikit-learn",
"tensorboardX",
"visdom",
"polyaxon-client",
"mlflow"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.6",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alembic==1.7.7
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
certifi==2021.5.30
charset-normalizer==2.0.12
click==7.1.2
cloudpickle==2.2.1
codecov==2.1.13
coverage==6.2
cycler==0.11.0
databricks-cli==0.17.8
dataclasses==0.8
decorator==4.4.2
docker==5.0.3
entrypoints==0.4
Flask==1.1.4
gitdb==4.0.9
GitPython==3.1.18
greenlet==2.0.2
gunicorn==21.2.0
gym==0.26.2
gym-notices==0.0.8
hestia==0.6.0
idna==3.10
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work
importlib-resources==5.4.0
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
itsdangerous==1.1.0
Jinja2==2.10.3
joblib==1.1.1
jsonpatch==1.32
jsonpointer==2.3
kiwisolver==1.3.1
Mako==1.1.6
MarkupSafe==2.0.1
marshmallow==3.0.0rc5
matplotlib==3.3.4
mlflow==1.23.1
mock==5.2.0
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
networkx==2.5.1
numpy==1.19.5
oauthlib==3.2.2
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pandas==1.1.5
Pillow==8.4.0
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
polyaxon-client==0.6.1
polyaxon-schemas==0.6.1
polystores==0.2.5
prometheus-client==0.17.1
prometheus_flask_exporter==0.23.2
protobuf==4.21.0
psutil==7.0.0
py @ file:///opt/conda/conda-bld/py_1644396412707/work
PyJWT==2.4.0
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
pytest-cov==4.0.0
python-dateutil==2.9.0.post0
-e git+https://github.com/pytorch/ignite.git@d5c10ce69b16919f1fc45c79d9faddcc6f864b82#egg=pytorch_ignite
pytz==2025.2
PyYAML==6.0.1
querystring-parser==1.2.4
requests==2.27.1
requests-toolbelt==1.0.0
rhea==0.5.5
scikit-learn==0.24.2
scipy==1.5.4
six==1.17.0
smmap==5.0.0
SQLAlchemy==1.4.54
sqlparse==0.4.4
tabulate==0.8.10
tensorboardX==2.6.2.2
threadpoolctl==3.1.0
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
tomli==1.2.3
torch==1.10.2
tornado==6.1
tqdm==4.64.1
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
urllib3==1.26.20
visdom==0.2.4
websocket-client==1.3.1
Werkzeug==1.0.1
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: ignite
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib-metadata=4.8.1=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- alembic==1.7.7
- charset-normalizer==2.0.12
- click==7.1.2
- cloudpickle==2.2.1
- codecov==2.1.13
- coverage==6.2
- cycler==0.11.0
- databricks-cli==0.17.8
- dataclasses==0.8
- decorator==4.4.2
- docker==5.0.3
- entrypoints==0.4
- flask==1.1.4
- gitdb==4.0.9
- gitpython==3.1.18
- greenlet==2.0.2
- gunicorn==21.2.0
- gym==0.26.2
- gym-notices==0.0.8
- hestia==0.6.0
- idna==3.10
- importlib-resources==5.4.0
- itsdangerous==1.1.0
- jinja2==2.10.3
- joblib==1.1.1
- jsonpatch==1.32
- jsonpointer==2.3
- kiwisolver==1.3.1
- mako==1.1.6
- markupsafe==2.0.1
- marshmallow==3.0.0rc5
- matplotlib==3.3.4
- mlflow==1.23.1
- mock==5.2.0
- networkx==2.5.1
- numpy==1.19.5
- oauthlib==3.2.2
- pandas==1.1.5
- pillow==8.4.0
- polyaxon-client==0.6.1
- polyaxon-schemas==0.6.1
- polystores==0.2.5
- prometheus-client==0.17.1
- prometheus-flask-exporter==0.23.2
- protobuf==4.21.0
- psutil==7.0.0
- pyjwt==2.4.0
- pytest-cov==4.0.0
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyyaml==6.0.1
- querystring-parser==1.2.4
- requests==2.27.1
- requests-toolbelt==1.0.0
- rhea==0.5.5
- scikit-learn==0.24.2
- scipy==1.5.4
- six==1.17.0
- smmap==5.0.0
- sqlalchemy==1.4.54
- sqlparse==0.4.4
- tabulate==0.8.10
- tensorboardx==2.6.2.2
- threadpoolctl==3.1.0
- tomli==1.2.3
- torch==1.10.2
- tornado==6.1
- tqdm==4.64.1
- urllib3==1.26.20
- visdom==0.2.4
- websocket-client==1.3.1
- werkzeug==1.0.1
prefix: /opt/conda/envs/ignite
| [
"tests/ignite/handlers/test_checkpoint.py::test_disabled_n_saved"
] | [] | [
"tests/ignite/handlers/test_checkpoint.py::test_checkpoint_wrong_input",
"tests/ignite/handlers/test_checkpoint.py::test_checkpoint_default",
"tests/ignite/handlers/test_checkpoint.py::test_checkpoint_with_global_step_transform",
"tests/ignite/handlers/test_checkpoint.py::test_checkpoint_with_score_function",
"tests/ignite/handlers/test_checkpoint.py::test_checkpoint_with_score_name_and_function",
"tests/ignite/handlers/test_checkpoint.py::test_checkpoint_with_score_function_and_trainer_epoch",
"tests/ignite/handlers/test_checkpoint.py::test_checkpoint_with_score_name_and_function_and_trainer_epoch",
"tests/ignite/handlers/test_checkpoint.py::test_model_checkpoint_args_validation",
"tests/ignite/handlers/test_checkpoint.py::test_model_checkpoint_simple_recovery",
"tests/ignite/handlers/test_checkpoint.py::test_model_checkpoint_simple_recovery_from_existing_non_empty",
"tests/ignite/handlers/test_checkpoint.py::test_disk_saver_atomic",
"tests/ignite/handlers/test_checkpoint.py::test_last_k",
"tests/ignite/handlers/test_checkpoint.py::test_best_k",
"tests/ignite/handlers/test_checkpoint.py::test_best_k_with_suffix",
"tests/ignite/handlers/test_checkpoint.py::test_with_engine",
"tests/ignite/handlers/test_checkpoint.py::test_with_state_dict",
"tests/ignite/handlers/test_checkpoint.py::test_valid_state_dict_save",
"tests/ignite/handlers/test_checkpoint.py::test_save_model_optimizer_lr_scheduler_with_state_dict",
"tests/ignite/handlers/test_checkpoint.py::test_checkpoint_load_objects",
"tests/ignite/handlers/test_checkpoint.py::test_disksaver_wrong_input"
] | [] | BSD 3-Clause "New" or "Revised" License | 6,222 | 715 | [
"ignite/handlers/checkpoint.py"
] |
|
ESMValGroup__ESMValCore-432 | a0552bd1a33108156d9453ddf884e21087932150 | 2020-01-15 15:29:25 | 5ef3833f2afb163735f6da5b61e0d4ec7b29b682 | diff --git a/esmvalcore/_config.py b/esmvalcore/_config.py
index 6f44f1673..c8e08381d 100644
--- a/esmvalcore/_config.py
+++ b/esmvalcore/_config.py
@@ -168,7 +168,10 @@ def configure_logging(cfg_file=None, output=None, console_log_level=None):
def get_project_config(project):
"""Get developer-configuration for project."""
logger.debug("Retrieving %s configuration", project)
- return CFG[project]
+ if project in CFG:
+ return CFG[project]
+ else:
+ raise ValueError(f"Project '{project}' not in config-developer.yml")
def get_institutes(variable):
diff --git a/esmvalcore/_recipe.py b/esmvalcore/_recipe.py
index 7b5f194f8..c94022b5b 100644
--- a/esmvalcore/_recipe.py
+++ b/esmvalcore/_recipe.py
@@ -386,26 +386,45 @@ def _add_fxvar_keys(fx_var_dict, variable):
def _get_correct_fx_file(variable, fx_varname, config_user):
- """Wrapper to standard file getter to recover the correct fx file."""
+ """Get fx files (searching all possible mips)."""
+ # TODO: allow user to specify certain mip if desired
var = dict(variable)
- if var['project'] in ['CMIP5', 'OBS', 'OBS6', 'obs4mips']:
- fx_var = _add_fxvar_keys({'short_name': fx_varname, 'mip': 'fx'}, var)
- elif var['project'] == 'CMIP6':
- if fx_varname == 'sftlf':
- fx_var = _add_fxvar_keys({'short_name': fx_varname, 'mip': 'fx'},
- var)
- elif fx_varname == 'sftof':
- fx_var = _add_fxvar_keys({'short_name': fx_varname, 'mip': 'Ofx'},
- var)
- # TODO allow availability for multiple mip's for sftgif
- elif fx_varname == 'sftgif':
- fx_var = _add_fxvar_keys({'short_name': fx_varname, 'mip': 'fx'},
- var)
+ var_project = variable['project']
+ cmor_table = CMOR_TABLES[var_project]
+
+ # Get all fx-related mips ('fx' always first, original mip last)
+ fx_mips = ['fx']
+ fx_mips.extend(
+ [key for key in cmor_table.tables if 'fx' in key and key != 'fx'])
+ fx_mips.append(variable['mip'])
+
+ # Search all mips for available variables
+ searched_mips = []
+ for fx_mip in fx_mips:
+ fx_variable = cmor_table.get_variable(fx_mip, fx_varname)
+ if fx_variable is not None:
+ searched_mips.append(fx_mip)
+ fx_var = _add_fxvar_keys(
+ {'short_name': fx_varname, 'mip': fx_mip}, var)
+ logger.debug("For CMIP6 fx variable '%s', found table '%s'",
+ fx_varname, fx_mip)
+ fx_files = _get_input_files(fx_var, config_user)
+
+ # If files found, return them
+ if fx_files:
+ logger.debug("Found CMIP6 fx variables '%s':\n%s",
+ fx_varname, pformat(fx_files))
+ break
else:
+ # No files found
+ fx_files = []
+
+ # If fx variable was not found in any table, raise exception
+ if not searched_mips:
raise RecipeError(
- f"Project {var['project']} not supported with fx variables")
+ f"Requested fx variable '{fx_varname}' not available in "
+ f"any 'fx'-related CMOR table ({fx_mips}) for '{var_project}'")
- fx_files = _get_input_files(fx_var, config_user)
# allow for empty lists corrected for by NE masks
if fx_files:
fx_files = fx_files[0]
@@ -726,7 +745,7 @@ def _get_single_preprocessor_task(variables,
order = _extract_preprocessor_order(profile)
ancestor_products = [p for task in ancestor_tasks for p in task.products]
- if variables[0]['frequency'] == 'fx':
+ if variables[0].get('frequency') == 'fx':
check.check_for_temporal_preprocs(profile)
ancestor_products = None
| Use of areacella and areacello in area_statistics not supported for CMIP6
Using the following preprocessor
```yaml
spatial_mean_sea:
mask_landsea:
mask_out: land
area_statistics:
operator: mean
fx_files: ['areacello']
```
fails at the moment:
```
2020-01-15 13:48:55,511 UTC [22398] ERROR Program terminated abnormally, see stack trace below for more information
Traceback (most recent call last):
File "ESMValCore/esmvalcore/_main.py", line 229, in run
conf = main(args)
File "ESMValCore/esmvalcore/_main.py", line 157, in main
process_recipe(recipe_file=recipe, config_user=cfg)
File "ESMValCore/esmvalcore/_main.py", line 203, in process_recipe
recipe = read_recipe_file(recipe_file, config_user)
File "ESMValCore/esmvalcore/_recipe.py", line 73, in read_recipe_file
recipe_file=filename)
File "ESMValCore/esmvalcore/_recipe.py", line 909, in __init__
self.tasks = self.initialize_tasks() if initialize_tasks else None
File "ESMValCore/esmvalcore/_recipe.py", line 1255, in initialize_tasks
task_name=task_name,
File "ESMValCore/esmvalcore/_recipe.py", line 880, in _get_preprocessor_task
name=task_name,
File "ESMValCore/esmvalcore/_recipe.py", line 738, in _get_single_preprocessor_task
config_user=config_user,
File "ESMValCore/esmvalcore/_recipe.py", line 689, in _get_preprocessor_products
config_user=config_user)
File "ESMValCore/esmvalcore/_recipe.py", line 478, in _update_fx_settings
for fxvar in var['fx_files']}
File "ESMValCore/esmvalcore/_recipe.py", line 478, in <dictcomp>
for fxvar in var['fx_files']}
File "ESMValCore/esmvalcore/_recipe.py", line 408, in _get_correct_fx_file
fx_files = _get_input_files(fx_var, config_user)
UnboundLocalError: local variable 'fx_var' referenced before assignment
```
because certain fx variables are hardcoded in `_recipe.py` for CMIP6. I will take care of that. | ESMValGroup/ESMValCore | diff --git a/tests/integration/test_recipe.py b/tests/integration/test_recipe.py
index f2dce12c9..682885cb8 100644
--- a/tests/integration/test_recipe.py
+++ b/tests/integration/test_recipe.py
@@ -1420,7 +1420,6 @@ def test_extract_shape_raises(tmp_path, patched_datafinder, config_user,
def test_weighting_landsea_fraction(tmp_path, patched_datafinder, config_user):
-
content = dedent("""
preprocessors:
landfrac_weighting:
@@ -1471,7 +1470,6 @@ def test_weighting_landsea_fraction(tmp_path, patched_datafinder, config_user):
def test_weighting_landsea_fraction_no_fx(tmp_path, patched_failing_datafinder,
config_user):
-
content = dedent("""
preprocessors:
landfrac_weighting:
@@ -1524,7 +1522,6 @@ def test_weighting_landsea_fraction_no_fx(tmp_path, patched_failing_datafinder,
def test_weighting_landsea_fraction_exclude(tmp_path, patched_datafinder,
config_user):
-
content = dedent("""
preprocessors:
landfrac_weighting:
@@ -1577,7 +1574,6 @@ def test_weighting_landsea_fraction_exclude(tmp_path, patched_datafinder,
def test_weighting_landsea_fraction_exclude_fail(tmp_path, patched_datafinder,
config_user):
-
content = dedent("""
preprocessors:
landfrac_weighting:
@@ -1611,7 +1607,6 @@ def test_weighting_landsea_fraction_exclude_fail(tmp_path, patched_datafinder,
def test_landmask(tmp_path, patched_datafinder, config_user):
-
content = dedent("""
preprocessors:
landmask:
@@ -1658,13 +1653,12 @@ def test_landmask(tmp_path, patched_datafinder, config_user):
def test_landmask_no_fx(tmp_path, patched_failing_datafinder, config_user):
-
content = dedent("""
preprocessors:
landmask:
mask_landsea:
mask_out: sea
- always_use_ne_mask: true
+ always_use_ne_mask: false
diagnostics:
diagnostic_name:
@@ -1679,6 +1673,8 @@ def test_landmask_no_fx(tmp_path, patched_failing_datafinder, config_user):
ensemble: r1i1p1
additional_datasets:
- {dataset: CanESM2}
+ - {dataset: CanESM5, project: CMIP6, grid: gn,
+ ensemble: r1i1p1f1}
- {dataset: TEST, project: obs4mips, level: 1, version: 1,
tier: 1}
scripts: null
@@ -1690,14 +1686,363 @@ def test_landmask_no_fx(tmp_path, patched_failing_datafinder, config_user):
task = recipe.tasks.pop()
assert task.name == 'diagnostic_name' + TASKSEP + 'gpp'
- # Check weighting
- assert len(task.products) == 2
+ # Check masking
+ assert len(task.products) == 3
for product in task.products:
assert 'mask_landsea' in product.settings
settings = product.settings['mask_landsea']
assert len(settings) == 3
assert settings['mask_out'] == 'sea'
- assert settings['always_use_ne_mask'] is True
+ assert settings['always_use_ne_mask'] is False
fx_files = settings['fx_files']
assert isinstance(fx_files, list)
assert fx_files == []
+
+
+def test_fx_vars_mip_change_cmip6(tmp_path, patched_datafinder, config_user):
+ content = dedent("""
+ preprocessors:
+ preproc:
+ area_statistics:
+ operator: mean
+ fx_files: [
+ 'areacella',
+ 'areacello',
+ 'clayfrac',
+ 'sftlf',
+ 'sftgif',
+ 'sftof',
+ ]
+ mask_landsea:
+ mask_out: sea
+
+ diagnostics:
+ diagnostic_name:
+ variables:
+ tas:
+ preprocessor: preproc
+ project: CMIP6
+ mip: Amon
+ exp: historical
+ start_year: 2000
+ end_year: 2005
+ ensemble: r1i1p1f1
+ grid: gn
+ additional_datasets:
+ - {dataset: CanESM5}
+ scripts: null
+ """)
+ recipe = get_recipe(tmp_path, content, config_user)
+
+ # Check generated tasks
+ assert len(recipe.tasks) == 1
+ task = recipe.tasks.pop()
+ assert task.name == 'diagnostic_name' + TASKSEP + 'tas'
+ assert len(task.products) == 1
+ product = task.products.pop()
+
+ # Check area_statistics
+ assert 'area_statistics' in product.settings
+ settings = product.settings['area_statistics']
+ assert len(settings) == 2
+ assert settings['operator'] == 'mean'
+ fx_files = settings['fx_files']
+ assert isinstance(fx_files, dict)
+ assert len(fx_files) == 6
+ assert '_fx_' in fx_files['areacella']
+ assert '_Ofx_' in fx_files['areacello']
+ assert '_Efx_' in fx_files['clayfrac']
+ assert '_fx_' in fx_files['sftlf']
+ assert '_fx_' in fx_files['sftgif']
+ assert '_Ofx_' in fx_files['sftof']
+
+ # Check mask_landsea
+ assert 'mask_landsea' in product.settings
+ settings = product.settings['mask_landsea']
+ assert len(settings) == 2
+ assert settings['mask_out'] == 'sea'
+ fx_files = settings['fx_files']
+ assert isinstance(fx_files, list)
+ assert len(fx_files) == 2
+ for fx_file in fx_files:
+ if 'sftlf' in fx_file:
+ assert '_fx_' in fx_file
+ elif 'sftof' in fx_file:
+ assert '_Ofx_' in fx_file
+ else:
+ assert False
+
+
+def test_fx_vars_volcello_in_ofx_cmip6(tmp_path, patched_datafinder,
+ config_user):
+ content = dedent("""
+ preprocessors:
+ preproc:
+ volume_statistics:
+ operator: mean
+ fx_files: ['volcello']
+
+ diagnostics:
+ diagnostic_name:
+ variables:
+ tos:
+ preprocessor: preproc
+ project: CMIP6
+ mip: Omon
+ exp: historical
+ start_year: 2000
+ end_year: 2005
+ ensemble: r1i1p1f1
+ grid: gn
+ additional_datasets:
+ - {dataset: CanESM5}
+ scripts: null
+ """)
+ recipe = get_recipe(tmp_path, content, config_user)
+
+ # Check generated tasks
+ assert len(recipe.tasks) == 1
+ task = recipe.tasks.pop()
+ assert task.name == 'diagnostic_name' + TASKSEP + 'tos'
+ assert len(task.products) == 1
+ product = task.products.pop()
+
+ # Check volume_statistics
+ assert 'volume_statistics' in product.settings
+ settings = product.settings['volume_statistics']
+ assert len(settings) == 2
+ assert settings['operator'] == 'mean'
+ fx_files = settings['fx_files']
+ assert isinstance(fx_files, dict)
+ assert len(fx_files) == 1
+ assert '_Ofx_' in fx_files['volcello']
+ assert '_Omon_' not in fx_files['volcello']
+
+
+def test_fx_vars_volcello_in_omon_cmip6(tmp_path, patched_failing_datafinder,
+ config_user):
+ content = dedent("""
+ preprocessors:
+ preproc:
+ volume_statistics:
+ operator: mean
+ fx_files: ['volcello']
+
+ diagnostics:
+ diagnostic_name:
+ variables:
+ tos:
+ preprocessor: preproc
+ project: CMIP6
+ mip: Omon
+ exp: historical
+ start_year: 2000
+ end_year: 2005
+ ensemble: r1i1p1f1
+ grid: gn
+ additional_datasets:
+ - {dataset: CanESM5}
+ scripts: null
+ """)
+ recipe = get_recipe(tmp_path, content, config_user)
+
+ # Check generated tasks
+ assert len(recipe.tasks) == 1
+ task = recipe.tasks.pop()
+ assert task.name == 'diagnostic_name' + TASKSEP + 'tos'
+ assert len(task.products) == 1
+ product = task.products.pop()
+
+ # Check volume_statistics
+ assert 'volume_statistics' in product.settings
+ settings = product.settings['volume_statistics']
+ assert len(settings) == 2
+ assert settings['operator'] == 'mean'
+ fx_files = settings['fx_files']
+ assert isinstance(fx_files, dict)
+ assert len(fx_files) == 1
+ assert '_Ofx_' not in fx_files['volcello']
+ assert '_Omon_' in fx_files['volcello']
+
+
+def test_fx_vars_volcello_in_oyr_cmip6(tmp_path, patched_failing_datafinder,
+ config_user):
+ content = dedent("""
+ preprocessors:
+ preproc:
+ volume_statistics:
+ operator: mean
+ fx_files: ['volcello']
+
+ diagnostics:
+ diagnostic_name:
+ variables:
+ o2:
+ preprocessor: preproc
+ project: CMIP6
+ mip: Oyr
+ exp: historical
+ start_year: 2000
+ end_year: 2005
+ ensemble: r1i1p1f1
+ grid: gn
+ additional_datasets:
+ - {dataset: CanESM5}
+ scripts: null
+ """)
+ recipe = get_recipe(tmp_path, content, config_user)
+
+ # Check generated tasks
+ assert len(recipe.tasks) == 1
+ task = recipe.tasks.pop()
+ assert task.name == 'diagnostic_name' + TASKSEP + 'o2'
+ assert len(task.products) == 1
+ product = task.products.pop()
+
+ # Check volume_statistics
+ assert 'volume_statistics' in product.settings
+ settings = product.settings['volume_statistics']
+ assert len(settings) == 2
+ assert settings['operator'] == 'mean'
+ fx_files = settings['fx_files']
+ assert isinstance(fx_files, dict)
+ assert len(fx_files) == 1
+ assert '_Ofx_' not in fx_files['volcello']
+ assert '_Oyr_' in fx_files['volcello']
+
+
+def test_fx_vars_volcello_in_fx_cmip5(tmp_path, patched_datafinder,
+ config_user):
+ content = dedent("""
+ preprocessors:
+ preproc:
+ volume_statistics:
+ operator: mean
+ fx_files: ['volcello']
+
+ diagnostics:
+ diagnostic_name:
+ variables:
+ tos:
+ preprocessor: preproc
+ project: CMIP5
+ mip: Omon
+ exp: historical
+ start_year: 2000
+ end_year: 2005
+ ensemble: r1i1p1
+ additional_datasets:
+ - {dataset: CanESM2}
+ scripts: null
+ """)
+ recipe = get_recipe(tmp_path, content, config_user)
+
+ # Check generated tasks
+ assert len(recipe.tasks) == 1
+ task = recipe.tasks.pop()
+ assert task.name == 'diagnostic_name' + TASKSEP + 'tos'
+ assert len(task.products) == 1
+ product = task.products.pop()
+
+ # Check volume_statistics
+ assert 'volume_statistics' in product.settings
+ settings = product.settings['volume_statistics']
+ assert len(settings) == 2
+ assert settings['operator'] == 'mean'
+ fx_files = settings['fx_files']
+ assert isinstance(fx_files, dict)
+ assert len(fx_files) == 1
+ assert '_fx_' in fx_files['volcello']
+ assert '_Omon_' not in fx_files['volcello']
+
+
+def test_wrong_project(tmp_path, patched_datafinder, config_user):
+ content = dedent("""
+ preprocessors:
+ preproc:
+ volume_statistics:
+ operator: mean
+ fx_files: ['volcello']
+
+ diagnostics:
+ diagnostic_name:
+ variables:
+ tos:
+ preprocessor: preproc
+ project: CMIP7
+ mip: Omon
+ exp: historical
+ start_year: 2000
+ end_year: 2005
+ ensemble: r1i1p1
+ additional_datasets:
+ - {dataset: CanESM2}
+ scripts: null
+ """)
+ with pytest.raises(ValueError) as wrong_proj:
+ get_recipe(tmp_path, content, config_user)
+ assert wrong_proj == "Project CMIP7 not in config-developer"
+
+
+def test_invalid_fx_var_cmip6(tmp_path, patched_datafinder, config_user):
+ content = dedent("""
+ preprocessors:
+ preproc:
+ area_statistics:
+ operator: mean
+ fx_files: [
+ 'areacella',
+ 'wrong_fx_variable',
+ ]
+
+ diagnostics:
+ diagnostic_name:
+ variables:
+ tas:
+ preprocessor: preproc
+ project: CMIP6
+ mip: Amon
+ exp: historical
+ start_year: 2000
+ end_year: 2005
+ ensemble: r1i1p1f1
+ grid: gn
+ additional_datasets:
+ - {dataset: CanESM5}
+ scripts: null
+ """)
+ msg = ("Requested fx variable 'wrong_fx_variable' for CMIP6 not "
+ "available in any 'fx'-related CMOR table")
+ with pytest.raises(RecipeError) as rec_err_exp:
+ get_recipe(tmp_path, content, config_user)
+ assert msg in rec_err_exp
+
+
+def test_fx_var_invalid_project(tmp_path, patched_datafinder, config_user):
+ content = dedent("""
+ preprocessors:
+ preproc:
+ area_statistics:
+ operator: mean
+ fx_files: ['areacella']
+
+ diagnostics:
+ diagnostic_name:
+ variables:
+ tas:
+ preprocessor: preproc
+ project: EMAC
+ mip: Amon
+ exp: historical
+ start_year: 2000
+ end_year: 2005
+ ensemble: r1i1p1f1
+ grid: gn
+ additional_datasets:
+ - {dataset: CanESM5}
+ scripts: null
+ """)
+ msg = 'Project EMAC not supported with fx variables'
+ with pytest.raises(RecipeError) as rec_err_exp:
+ get_recipe(tmp_path, content, config_user)
+ assert msg in rec_err_exp
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 2
} | 2.0 | {
"env_vars": null,
"env_yml_path": [
"environment.yml"
],
"install": "pip install -e .[develop]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "environment.yml",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-xdist",
"pytest-mock",
"pytest-asyncio"
],
"pre_install": null,
"python": "3.7",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.13
antlr4-python3-runtime @ file:///home/conda/feedstock_root/build_artifacts/antlr-python-runtime_1636143373368/work
astroid==2.15.8
attrs==24.2.0
Babel==2.14.0
build==0.10.0
Cartopy @ file:///home/conda/feedstock_root/build_artifacts/cartopy_1642060950241/work
certifi @ file:///home/conda/feedstock_root/build_artifacts/certifi_1725278078093/work/certifi
cf-units @ file:///home/conda/feedstock_root/build_artifacts/cf-units_1640986232540/work
cftime @ file:///home/conda/feedstock_root/build_artifacts/cftime_1663606412550/work
charset-normalizer==3.4.1
click==8.1.8
click-plugins==1.1.1
cligj==0.7.2
cloudpickle @ file:///home/conda/feedstock_root/build_artifacts/cloudpickle_1674202310934/work
colorama==0.4.6
coverage==7.2.7
cycler @ file:///home/conda/feedstock_root/build_artifacts/cycler_1635519461629/work
Cython @ file:///home/conda/feedstock_root/build_artifacts/cython_1659101942790/work
dask @ file:///home/conda/feedstock_root/build_artifacts/dask-core_1644602974678/work
dill==0.3.7
docutils==0.17.1
dodgy==0.2.1
easytest==0.1.5
ESMPy==8.2.0
-e git+https://github.com/ESMValGroup/ESMValCore.git@a0552bd1a33108156d9453ddf884e21087932150#egg=ESMValCore
exceptiongroup==1.2.2
execnet==2.0.2
fiona==1.9.6
flake8==5.0.4
flake8-polyfill==1.0.2
fonttools @ file:///home/conda/feedstock_root/build_artifacts/fonttools_1666389892786/work
fsspec @ file:///home/conda/feedstock_root/build_artifacts/fsspec_1674184942191/work
gitdb==4.0.12
GitPython==3.1.44
idna==3.10
imagesize==1.4.1
importlib-metadata==4.2.0
iniconfig==2.0.0
isodate==0.6.1
isort==5.11.5
Jinja2 @ file:///home/conda/feedstock_root/build_artifacts/jinja2_1715127149914/work
kiwisolver @ file:///home/conda/feedstock_root/build_artifacts/kiwisolver_1657953088445/work
lazy-object-proxy==1.9.0
llvmlite==0.39.1
locket @ file:///home/conda/feedstock_root/build_artifacts/locket_1650660393415/work
lxml==5.3.1
MarkupSafe @ file:///home/conda/feedstock_root/build_artifacts/markupsafe_1648737551960/work
matplotlib @ file:///home/conda/feedstock_root/build_artifacts/matplotlib-suite_1661439848456/work
mccabe==0.7.0
mock==5.2.0
mpi4py @ file:///home/conda/feedstock_root/build_artifacts/mpi4py_1660326262210/work
munkres==1.1.4
nc-time-axis==1.4.1
netCDF4 @ file:///home/conda/feedstock_root/build_artifacts/netcdf4_1661987221388/work
networkx==2.6.3
nose==1.3.7
numba==0.56.4
numpy @ file:///home/conda/feedstock_root/build_artifacts/numpy_1649806299270/work
olefile @ file:///home/conda/feedstock_root/build_artifacts/olefile_1701735466804/work
packaging @ file:///home/conda/feedstock_root/build_artifacts/packaging_1696202382185/work
partd @ file:///home/conda/feedstock_root/build_artifacts/partd_1695667515973/work
pathspec==0.11.2
pep8-naming==0.10.0
Pillow @ file:///tmp/build/80754af9/pillow_1625655818400/work
platformdirs==4.0.0
pluggy==1.2.0
prospector==1.10.3
prov==2.0.1
psutil==7.0.0
py==1.11.0
pycodestyle==2.9.1
pydocstyle==6.3.0
pydot==2.0.0
pyflakes==2.5.0
Pygments==2.17.2
pylint==2.17.7
pylint-celery==0.3
pylint-django==2.5.3
pylint-flask==0.6
pylint-plugin-utils==0.7
pyparsing @ file:///home/conda/feedstock_root/build_artifacts/pyparsing_1724616129934/work
pyproj @ file:///home/conda/feedstock_root/build_artifacts/pyproj_1636547699801/work
pyproject_hooks==1.2.0
pyroma==4.2
pyshp @ file:///home/conda/feedstock_root/build_artifacts/pyshp_1659002966020/work
pytest==7.4.4
pytest-asyncio==0.21.2
pytest-cov==4.1.0
pytest-env==1.0.1
pytest-flake8==1.1.3
pytest-html==3.2.0
pytest-metadata==3.0.0
pytest-mock==3.11.1
pytest-xdist==3.5.0
python-dateutil @ file:///home/conda/feedstock_root/build_artifacts/python-dateutil_1709299778482/work
pytz==2025.2
PyYAML @ file:///home/conda/feedstock_root/build_artifacts/pyyaml_1648757092905/work
rdflib==6.3.2
requests==2.31.0
requirements-detector==1.2.2
scipy @ file:///home/conda/feedstock_root/build_artifacts/scipy_1637806658031/work
scitools-iris @ file:///home/conda/feedstock_root/build_artifacts/iris_1637936208876/work
semver==3.0.4
setoptconf-tmp==0.3.1
Shapely @ file:///home/conda/feedstock_root/build_artifacts/shapely_1637399855493/work
six @ file:///home/conda/feedstock_root/build_artifacts/six_1620240208055/work
smmap==5.0.2
snowballstemmer==2.2.0
Sphinx==4.3.2
sphinx-rtd-theme==1.3.0
sphinxcontrib-applehelp==1.0.2
sphinxcontrib-devhelp==1.0.2
sphinxcontrib-htmlhelp==2.0.0
sphinxcontrib-jquery==4.1
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==1.0.3
sphinxcontrib-serializinghtml==1.1.5
stratify @ file:///home/conda/feedstock_root/build_artifacts/python-stratify_1620938361732/work
toml==0.10.2
tomli==2.0.1
tomlkit==0.12.5
toolz @ file:///home/conda/feedstock_root/build_artifacts/toolz_1706112571092/work
trove-classifiers==2025.3.19.19
typed-ast==1.5.5
typing_extensions @ file:///home/conda/feedstock_root/build_artifacts/typing_extensions_1688315532570/work
unicodedata2 @ file:///home/conda/feedstock_root/build_artifacts/unicodedata2_1649111917568/work
urllib3==2.0.7
vmprof==0.4.18.1
wrapt==1.16.0
xxhash @ file:///home/conda/feedstock_root/build_artifacts/python-xxhash_1649442453935/work
yamale==4.0.4
yamllint==1.32.0
yapf==0.43.0
zipp==3.15.0
| name: ESMValCore
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=conda_forge
- _openmp_mutex=4.5=2_gnu
- antlr-python-runtime=4.7.2=py37h89c1867_1003
- atk-1.0=2.38.0=h04ea711_2
- binutils_impl_linux-64=2.43=h4bf12b8_4
- binutils_linux-64=2.43=h4852527_4
- brotli=1.1.0=hb9d3cd8_2
- brotli-bin=1.1.0=hb9d3cd8_2
- bzip2=1.0.8=h4bc722e_7
- c-ares=1.34.4=hb9d3cd8_0
- ca-certificates=2025.1.31=hbcca054_0
- cairo=1.18.0=h3faef2a_0
- cartopy=0.20.2=py37h9a08e6e_0
- certifi=2024.8.30=pyhd8ed1ab_0
- cf-units=3.0.1=py37hb1e94ed_2
- cftime=1.6.2=py37hc105733_0
- cloudpickle=2.2.1=pyhd8ed1ab_0
- curl=8.8.0=he654da7_1
- cycler=0.11.0=pyhd8ed1ab_0
- cython=0.29.32=py37hd23a5d3_0
- dask-core=2022.2.0=pyhd8ed1ab_0
- esmf=8.2.0=mpi_mpich_h5a1934d_102
- esmpy=8.2.0=mpi_mpich_py37h7352969_101
- expat=2.6.4=h5888daf_0
- font-ttf-dejavu-sans-mono=2.37=hab24e00_0
- font-ttf-inconsolata=3.000=h77eed37_0
- font-ttf-source-code-pro=2.038=h77eed37_0
- font-ttf-ubuntu=0.83=h77eed37_3
- fontconfig=2.14.2=h14ed4e7_0
- fonts-conda-ecosystem=1=0
- fonts-conda-forge=1=0
- fonttools=4.38.0=py37h540881e_0
- freetype=2.12.1=h267a509_2
- fribidi=1.0.10=h36c2ea0_0
- fsspec=2023.1.0=pyhd8ed1ab_0
- gcc_impl_linux-64=14.2.0=hdb7739f_2
- gcc_linux-64=14.2.0=h5910c8f_8
- gdk-pixbuf=2.42.10=h5eee18b_1
- geos=3.10.1=h9c3ff4c_1
- glib=2.80.2=hf974151_0
- glib-tools=2.80.2=hb6ce0ca_0
- graphite2=1.3.13=h59595ed_1003
- graphviz=9.0.0=h78e8752_1
- gtk2=2.24.33=h280cfa0_4
- gts=0.7.6=h977cf35_4
- harfbuzz=8.5.0=hfac3d4d_0
- hdf4=4.2.15=h9772cbc_5
- hdf5=1.12.2=mpi_mpich_h5d83325_1
- icu=73.2=h59595ed_0
- iris=3.1.0=pyhd8ed1ab_3
- jinja2=3.1.4=pyhd8ed1ab_0
- jpeg=9e=h0b41bf4_3
- kernel-headers_linux-64=3.10.0=he073ed8_18
- keyutils=1.6.1=h166bdaf_0
- kiwisolver=1.4.4=py37h7cecad7_0
- krb5=1.21.3=h659f571_0
- lcms2=2.14=h6ed2654_0
- ld_impl_linux-64=2.43=h712a8e2_4
- lerc=4.0.0=h27087fc_0
- libaec=1.1.3=h59595ed_0
- libblas=3.9.0=20_linux64_openblas
- libbrotlicommon=1.1.0=hb9d3cd8_2
- libbrotlidec=1.1.0=hb9d3cd8_2
- libbrotlienc=1.1.0=hb9d3cd8_2
- libcblas=3.9.0=20_linux64_openblas
- libcurl=8.8.0=hca28451_1
- libdeflate=1.14=h166bdaf_0
- libedit=3.1.20250104=pl5321h7949ede_0
- libev=4.33=hd590300_2
- libexpat=2.6.4=h5888daf_0
- libffi=3.4.6=h2dba641_0
- libgcc=14.2.0=h767d61c_2
- libgcc-devel_linux-64=14.2.0=h9c4974d_102
- libgcc-ng=14.2.0=h69a702a_2
- libgd=2.3.3=h695aa2c_1
- libgfortran=14.2.0=h69a702a_2
- libgfortran-ng=14.2.0=h69a702a_2
- libgfortran5=14.2.0=hf1ad2bd_2
- libglib=2.80.2=hf974151_0
- libgomp=14.2.0=h767d61c_2
- libiconv=1.18=h4ce23a2_1
- liblapack=3.9.0=20_linux64_openblas
- liblzma=5.6.4=hb9d3cd8_0
- liblzma-devel=5.6.4=hb9d3cd8_0
- libnetcdf=4.8.1=mpi_mpich_hcd871d9_6
- libnghttp2=1.58.0=h47da74e_1
- libnsl=2.0.1=hd590300_0
- libopenblas=0.3.25=pthreads_h413a1c8_0
- libpng=1.6.43=h2797004_0
- librsvg=2.58.0=hadf69e7_1
- libsanitizer=14.2.0=hed042b8_2
- libsqlite=3.46.0=hde9e2c9_0
- libssh2=1.11.0=h0841786_0
- libstdcxx=14.2.0=h8f9b012_2
- libstdcxx-ng=14.2.0=h4852527_2
- libtiff=4.4.0=h82bc61c_5
- libudunits2=2.2.28=h40f5838_3
- libunwind=1.7.2=he02047a_0
- libuuid=2.38.1=h0b41bf4_0
- libwebp-base=1.5.0=h851e524_0
- libxcb=1.15=h0b41bf4_0
- libxml2=2.12.7=hc051c1a_1
- libzip=1.10.1=h2629f0a_3
- libzlib=1.2.13=h4ab18f5_6
- locket=1.0.0=pyhd8ed1ab_0
- markupsafe=2.1.1=py37h540881e_1
- matplotlib-base=3.5.3=py37hf395dca_2
- mpi=1.0=mpich
- mpi4py=3.1.3=py37h52370cb_2
- mpich=4.0.3=h846660c_100
- munkres=1.1.4=pyh9f0ad1d_0
- ncurses=6.5=h2d0b736_3
- netcdf-fortran=4.6.0=mpi_mpich_h1e13492_2
- netcdf4=1.6.0=nompi_py37h7187172_102
- numpy=1.21.6=py37h976b520_0
- olefile=0.47=pyhd8ed1ab_0
- openjpeg=2.5.0=h7d73246_1
- openssl=3.4.1=h7b32b05_0
- packaging=23.2=pyhd8ed1ab_0
- pango=1.54.0=h84a9a3c_0
- partd=1.4.1=pyhd8ed1ab_0
- pcre2=10.43=hcad00b1_0
- pillow=8.3.1=py37h2c7a002_0
- pip=24.0=pyhd8ed1ab_0
- pixman=0.44.2=h29eaf8c_0
- proj=8.2.0=h277dcde_0
- pthread-stubs=0.4=hb9d3cd8_1002
- pyparsing=3.1.4=pyhd8ed1ab_0
- pyproj=3.2.1=py37hb589d83_5
- pyshp=2.3.1=pyhd8ed1ab_0
- python=3.7.12=hf930737_100_cpython
- python-dateutil=2.9.0=pyhd8ed1ab_0
- python-stratify=0.1.1=py37h6f94858_1004
- python-xxhash=3.0.0=py37h540881e_1
- python_abi=3.7=4_cp37m
- pyyaml=6.0=py37h540881e_4
- readline=8.2=h8c095d6_2
- scipy=1.7.3=py37hf2a6cf1_0
- setuptools=69.0.3=pyhd8ed1ab_0
- shapely=1.8.0=py37h9b0f7a3_4
- six=1.16.0=pyh6c4a22f_0
- sqlite=3.46.0=h6d4b2fc_0
- sysroot_linux-64=2.17=h0157908_18
- tk=8.6.13=noxft_h4845f30_101
- toolz=0.12.1=pyhd8ed1ab_0
- typing-extensions=4.7.1=hd8ed1ab_0
- typing_extensions=4.7.1=pyha770c72_0
- tzdata=2025b=h78e105d_0
- udunits2=2.2.28=h40f5838_3
- unicodedata2=14.0.0=py37h540881e_1
- wheel=0.42.0=pyhd8ed1ab_0
- xorg-kbproto=1.0.7=hb9d3cd8_1003
- xorg-libice=1.1.2=hb9d3cd8_0
- xorg-libsm=1.2.6=he73a12e_0
- xorg-libx11=1.8.9=h8ee46fc_0
- xorg-libxau=1.0.12=hb9d3cd8_0
- xorg-libxdmcp=1.1.5=hb9d3cd8_0
- xorg-libxext=1.3.4=h0b41bf4_2
- xorg-libxrender=0.9.11=hd590300_0
- xorg-renderproto=0.11.1=hb9d3cd8_1003
- xorg-xextproto=7.3.0=hb9d3cd8_1004
- xorg-xproto=7.0.31=hb9d3cd8_1008
- xxhash=0.8.0=h7f98852_3
- xz=5.6.4=hbcc6ac9_0
- xz-gpl-tools=5.6.4=hbcc6ac9_0
- xz-tools=5.6.4=hb9d3cd8_0
- yaml=0.2.5=h7f98852_2
- zlib=1.2.13=h4ab18f5_6
- zstd=1.5.6=ha6fb4c9_0
- pip:
- alabaster==0.7.13
- astroid==2.15.8
- attrs==24.2.0
- babel==2.14.0
- build==0.10.0
- charset-normalizer==3.4.1
- click==8.1.8
- click-plugins==1.1.1
- cligj==0.7.2
- colorama==0.4.6
- coverage==7.2.7
- dill==0.3.7
- docutils==0.17.1
- dodgy==0.2.1
- easytest==0.1.5
- exceptiongroup==1.2.2
- execnet==2.0.2
- fiona==1.9.6
- flake8==5.0.4
- flake8-polyfill==1.0.2
- gitdb==4.0.12
- gitpython==3.1.44
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==4.2.0
- iniconfig==2.0.0
- isodate==0.6.1
- isort==5.11.5
- lazy-object-proxy==1.9.0
- llvmlite==0.39.1
- lxml==5.3.1
- mccabe==0.7.0
- mock==5.2.0
- nc-time-axis==1.4.1
- networkx==2.6.3
- nose==1.3.7
- numba==0.56.4
- pathspec==0.11.2
- pep8-naming==0.10.0
- platformdirs==4.0.0
- pluggy==1.2.0
- prospector==1.10.3
- prov==2.0.1
- psutil==7.0.0
- py==1.11.0
- pycodestyle==2.9.1
- pydocstyle==6.3.0
- pydot==2.0.0
- pyflakes==2.5.0
- pygments==2.17.2
- pylint==2.17.7
- pylint-celery==0.3
- pylint-django==2.5.3
- pylint-flask==0.6
- pylint-plugin-utils==0.7
- pyproject-hooks==1.2.0
- pyroma==4.2
- pytest==7.4.4
- pytest-asyncio==0.21.2
- pytest-cov==4.1.0
- pytest-env==1.0.1
- pytest-flake8==1.1.3
- pytest-html==3.2.0
- pytest-metadata==3.0.0
- pytest-mock==3.11.1
- pytest-xdist==3.5.0
- pytz==2025.2
- rdflib==6.3.2
- requests==2.31.0
- requirements-detector==1.2.2
- semver==3.0.4
- setoptconf-tmp==0.3.1
- smmap==5.0.2
- snowballstemmer==2.2.0
- sphinx==4.3.2
- sphinx-rtd-theme==1.3.0
- sphinxcontrib-applehelp==1.0.2
- sphinxcontrib-devhelp==1.0.2
- sphinxcontrib-htmlhelp==2.0.0
- sphinxcontrib-jquery==4.1
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==1.0.3
- sphinxcontrib-serializinghtml==1.1.5
- toml==0.10.2
- tomli==2.0.1
- tomlkit==0.12.5
- trove-classifiers==2025.3.19.19
- typed-ast==1.5.5
- urllib3==2.0.7
- vmprof==0.4.18.1
- wrapt==1.16.0
- yamale==4.0.4
- yamllint==1.32.0
- yapf==0.43.0
- zipp==3.15.0
prefix: /opt/conda/envs/ESMValCore
| [
"tests/integration/test_recipe.py::test_wrong_project"
] | [
"tests/integration/test_recipe.py::test_simple_recipe",
"tests/integration/test_recipe.py::test_cmip3_variable_autocomplete",
"tests/integration/test_recipe.py::test_cmip6_variable_autocomplete",
"tests/integration/test_recipe.py::test_custom_preproc_order",
"tests/integration/test_recipe.py::test_derive",
"tests/integration/test_recipe.py::test_derive_not_needed",
"tests/integration/test_recipe.py::test_derive_with_fx_ohc",
"tests/integration/test_recipe.py::test_derive_with_fx_ohc_fail",
"tests/integration/test_recipe.py::test_derive_with_optional_var",
"tests/integration/test_recipe.py::test_derive_with_optional_var_nodata",
"tests/integration/test_recipe.py::test_diagnostic_task_provenance",
"tests/integration/test_recipe.py::test_alias_generation",
"tests/integration/test_recipe.py::test_concatenation",
"tests/integration/test_recipe.py::test_ensemble_expansion",
"tests/integration/test_recipe.py::test_weighting_landsea_fraction",
"tests/integration/test_recipe.py::test_weighting_landsea_fraction_no_fx",
"tests/integration/test_recipe.py::test_weighting_landsea_fraction_exclude",
"tests/integration/test_recipe.py::test_landmask",
"tests/integration/test_recipe.py::test_landmask_no_fx",
"tests/integration/test_recipe.py::test_fx_vars_mip_change_cmip6",
"tests/integration/test_recipe.py::test_fx_vars_volcello_in_ofx_cmip6",
"tests/integration/test_recipe.py::test_fx_vars_volcello_in_omon_cmip6",
"tests/integration/test_recipe.py::test_fx_vars_volcello_in_oyr_cmip6",
"tests/integration/test_recipe.py::test_invalid_fx_var_cmip6",
"tests/integration/test_recipe.py::test_fx_var_invalid_project"
] | [
"tests/integration/test_recipe.py::flake-8::FLAKE8",
"tests/integration/test_recipe.py::test_fx_preproc_error",
"tests/integration/test_recipe.py::test_default_preprocessor",
"tests/integration/test_recipe.py::test_default_fx_preprocessor",
"tests/integration/test_recipe.py::test_empty_variable",
"tests/integration/test_recipe.py::test_cmip5_variable_autocomplete",
"tests/integration/test_recipe.py::test_reference_dataset",
"tests/integration/test_recipe.py::test_extract_shape",
"tests/integration/test_recipe.py::test_extract_shape_raises[crop]",
"tests/integration/test_recipe.py::test_extract_shape_raises[shapefile]",
"tests/integration/test_recipe.py::test_extract_shape_raises[method]",
"tests/integration/test_recipe.py::test_weighting_landsea_fraction_exclude_fail",
"tests/integration/test_recipe.py::test_fx_vars_volcello_in_fx_cmip5"
] | [] | Apache License 2.0 | 6,223 | 1,075 | [
"esmvalcore/_config.py",
"esmvalcore/_recipe.py"
] |
|
PyCQA__pyflakes-508 | 1911c203a13826d2eb03d582d60874b91e36f4fc | 2020-01-17 18:59:12 | 0af480e3351ae40b4ae7f3ce7272a46fd4265dbd | mikeholler: @asottile I've confirmed this one locally, so :+1:
adamstueckrath: @asottile Also verified this works locally. | diff --git a/pyflakes/checker.py b/pyflakes/checker.py
index c8ccf56..d157008 100644
--- a/pyflakes/checker.py
+++ b/pyflakes/checker.py
@@ -1799,6 +1799,10 @@ class Checker(object):
addArgs(node.args.args)
defaults = node.args.defaults
else:
+ if PY38_PLUS:
+ for arg in node.args.posonlyargs:
+ args.append(arg.arg)
+ annotations.append(arg.annotation)
for arg in node.args.args + node.args.kwonlyargs:
args.append(arg.arg)
annotations.append(arg.annotation)
| F401 (imported but unused) false positive on Foo when using def x(a: Foo, /, *, b: Bar)
```python
from datetime import datetime as Foo, time as Bar
def x(a: Foo, /, *, b: Bar):
pass
```
Running this on the file results in:
```
› flake8 --version
3.7.9 (mccabe: 0.6.1, pycodestyle: 2.5.0, pyflakes: 2.1.1) CPython 3.8.1 on Linux
› pyflakes --version
2.1.1 Python 3.8.1 on Linux
› flake8 example.py
example.py:1:1: F401 'datetime.datetime as Foo' imported but unused
› pyflakes example.py
example.py:1: 'datetime.datetime as Foo' imported but unused
``` | PyCQA/pyflakes | diff --git a/pyflakes/test/test_type_annotations.py b/pyflakes/test/test_type_annotations.py
index 289535d..1fa4f5e 100644
--- a/pyflakes/test/test_type_annotations.py
+++ b/pyflakes/test/test_type_annotations.py
@@ -418,3 +418,11 @@ class TestTypeAnnotations(TestCase):
Y = 2
return Y
""", m.UndefinedName)
+
+ @skipIf(version_info < (3, 8), 'new in Python 3.8')
+ def test_positional_only_argument_annotations(self):
+ self.flakes("""
+ from x import C
+
+ def f(c: C, /): ...
+ """)
| {
"commit_name": "merge_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 1
} | 2.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"flake8==3.6.0",
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
flake8==3.6.0
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
mccabe==0.6.1
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
pycodestyle==2.4.0
-e git+https://github.com/PyCQA/pyflakes.git@1911c203a13826d2eb03d582d60874b91e36f4fc#egg=pyflakes
pytest @ file:///croot/pytest_1738938843180/work
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
| name: pyflakes
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- flake8==3.6.0
- mccabe==0.6.1
- pycodestyle==2.4.0
prefix: /opt/conda/envs/pyflakes
| [
"pyflakes/test/test_type_annotations.py::TestTypeAnnotations::test_positional_only_argument_annotations"
] | [] | [
"pyflakes/test/test_type_annotations.py::TestTypeAnnotations::test_annotated_async_def",
"pyflakes/test/test_type_annotations.py::TestTypeAnnotations::test_not_a_typing_overload",
"pyflakes/test/test_type_annotations.py::TestTypeAnnotations::test_overload_in_class",
"pyflakes/test/test_type_annotations.py::TestTypeAnnotations::test_overload_with_multiple_decorators",
"pyflakes/test/test_type_annotations.py::TestTypeAnnotations::test_postponed_annotations",
"pyflakes/test/test_type_annotations.py::TestTypeAnnotations::test_return_annotation_is_class_scope_variable",
"pyflakes/test/test_type_annotations.py::TestTypeAnnotations::test_return_annotation_is_function_body_variable",
"pyflakes/test/test_type_annotations.py::TestTypeAnnotations::test_typeCommentsAdditionalComment",
"pyflakes/test/test_type_annotations.py::TestTypeAnnotations::test_typeCommentsAssignedToPreviousNode",
"pyflakes/test/test_type_annotations.py::TestTypeAnnotations::test_typeCommentsFullSignature",
"pyflakes/test/test_type_annotations.py::TestTypeAnnotations::test_typeCommentsFullSignatureWithDocstring",
"pyflakes/test/test_type_annotations.py::TestTypeAnnotations::test_typeCommentsInvalidDoesNotMarkAsUsed",
"pyflakes/test/test_type_annotations.py::TestTypeAnnotations::test_typeCommentsMarkImportsAsUsed",
"pyflakes/test/test_type_annotations.py::TestTypeAnnotations::test_typeCommentsNoWhitespaceAnnotation",
"pyflakes/test/test_type_annotations.py::TestTypeAnnotations::test_typeCommentsStarArgs",
"pyflakes/test/test_type_annotations.py::TestTypeAnnotations::test_typeCommentsSyntaxError",
"pyflakes/test/test_type_annotations.py::TestTypeAnnotations::test_typeCommentsSyntaxErrorCorrectLine",
"pyflakes/test/test_type_annotations.py::TestTypeAnnotations::test_typeIgnore",
"pyflakes/test/test_type_annotations.py::TestTypeAnnotations::test_typeIgnoreBogus",
"pyflakes/test/test_type_annotations.py::TestTypeAnnotations::test_typeIgnoreBogusUnicode",
"pyflakes/test/test_type_annotations.py::TestTypeAnnotations::test_typingExtensionsOverload",
"pyflakes/test/test_type_annotations.py::TestTypeAnnotations::test_typingOverload",
"pyflakes/test/test_type_annotations.py::TestTypeAnnotations::test_typingOverloadAsync",
"pyflakes/test/test_type_annotations.py::TestTypeAnnotations::test_variable_annotations"
] | [] | MIT License | 6,237 | 157 | [
"pyflakes/checker.py"
] |
pydata__sparse-309 | 9e1213aa34f1e4bcf1994b0be6a895060b9fce6f | 2020-01-18 07:13:28 | 9e1213aa34f1e4bcf1994b0be6a895060b9fce6f | codecov[bot]: # [Codecov](https://codecov.io/gh/pydata/sparse/pull/309?src=pr&el=h1) Report
> Merging [#309](https://codecov.io/gh/pydata/sparse/pull/309?src=pr&el=desc) into [master](https://codecov.io/gh/pydata/sparse/commit/9e1213aa34f1e4bcf1994b0be6a895060b9fce6f?src=pr&el=desc) will **decrease** coverage by `0.02%`.
> The diff coverage is `75%`.
```diff
@@ Coverage Diff @@
## master #309 +/- ##
==========================================
- Coverage 93.5% 93.47% -0.03%
==========================================
Files 17 17
Lines 2140 2146 +6
==========================================
+ Hits 2001 2006 +5
- Misses 139 140 +1
```
| diff --git a/sparse/_sparse_array.py b/sparse/_sparse_array.py
index 4418b2e..0737fe4 100644
--- a/sparse/_sparse_array.py
+++ b/sparse/_sparse_array.py
@@ -229,6 +229,7 @@ class SparseArray:
def __array_function__(self, func, types, args, kwargs):
import sparse as module
+ sparse_func = None
try:
submodules = getattr(func, "__module__", "numpy").split(".")[1:]
for submodule in submodules:
@@ -242,9 +243,19 @@ class SparseArray:
try:
sparse_func = getattr(type(self), func.__name__)
except AttributeError:
- return NotImplemented
+ pass
- if not isinstance(sparse_func, Callable):
- return getattr(self, func.__name__)
+ if (
+ not isinstance(sparse_func, Callable)
+ and len(args) == 1
+ and len(kwargs) == 0
+ ):
+ try:
+ return getattr(self, func.__name__)
+ except AttributeError:
+ pass
+
+ if sparse_func is None:
+ return NotImplemented
return sparse_func(*args, **kwargs)
| sparse.COO does not support np.shape
scipy.sparse array support np.shape, but sparse.COO does not.
```python
In [1]: import sparse
In [2]: import numpy as np
In [3]: import scipy.sparse
In [4]: x = sparse.random((3, 4, 5))
In [5]: x.shape
Out[5]: (3, 4, 5)
In [6]: np.shape(x)
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-6-b17f5597101a> in <module>
----> 1 np.shape(x)
<__array_function__ internals> in shape(*args, **kwargs)
TypeError: no implementation found for 'numpy.shape' on types that implement __array_function__: [<class 'sparse._coo.core.COO'>]
In [7]: y = scipy.sparse.random(3, 4)
In [8]: np.shape(y)
Out[8]: (3, 4)
``` | pydata/sparse | diff --git a/sparse/tests/test_array_function.py b/sparse/tests/test_array_function.py
index a0ee724..b31d4fa 100644
--- a/sparse/tests/test_array_function.py
+++ b/sparse/tests/test_array_function.py
@@ -66,3 +66,12 @@ def test_ternary(func, arg_order):
args = [(x, y)[i] for i in arg_order]
yy = func(*args)
assert_eq(xx, yy)
+
+
[email protected]("func", [np.shape, np.size, np.ndim])
+def test_property(func):
+ y = sparse.random((50, 50), density=0.25)
+ x = y.todense()
+ xx = func(x)
+ yy = func(y)
+ assert xx == yy
| {
"commit_name": "merge_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 1
} | 0.8 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[all]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-flake8"
],
"pre_install": null,
"python": "3.7",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.13
Babel==2.14.0
black==23.3.0
cachetools==5.5.2
certifi @ file:///croot/certifi_1671487769961/work/certifi
chardet==5.2.0
charset-normalizer==3.4.1
click==8.1.8
colorama==0.4.6
coverage==7.2.7
distlib==0.3.9
docutils==0.19
exceptiongroup==1.2.2
filelock==3.12.2
flake8==5.0.4
idna==3.10
imagesize==1.4.1
importlib-metadata==6.7.0
iniconfig==2.0.0
Jinja2==3.1.6
llvmlite==0.39.1
MarkupSafe==2.1.5
mccabe==0.7.0
mypy-extensions==1.0.0
numba==0.56.4
numpy==1.21.6
packaging==24.0
pathspec==0.11.2
platformdirs==4.0.0
pluggy==1.2.0
pycodestyle==2.9.1
pyflakes==2.5.0
Pygments==2.17.2
pyproject-api==1.5.3
pytest==7.4.4
pytest-black==0.6.0
pytest-cov==4.1.0
pytest-flake8==1.1.3
pytz==2025.2
requests==2.31.0
scipy==1.7.3
snowballstemmer==2.2.0
-e git+https://github.com/pydata/sparse.git@9e1213aa34f1e4bcf1994b0be6a895060b9fce6f#egg=sparse
Sphinx==5.3.0
sphinx-rtd-theme==2.0.0
sphinxcontrib-applehelp==1.0.2
sphinxcontrib-devhelp==1.0.2
sphinxcontrib-htmlhelp==2.0.0
sphinxcontrib-jquery==4.1
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==1.0.3
sphinxcontrib-serializinghtml==1.1.5
toml==0.10.2
tomli==2.0.1
tox==4.8.0
typed-ast==1.5.5
typing_extensions==4.7.1
urllib3==2.0.7
virtualenv==20.26.6
zipp==3.15.0
| name: sparse
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=22.3.1=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.13
- babel==2.14.0
- black==23.3.0
- cachetools==5.5.2
- chardet==5.2.0
- charset-normalizer==3.4.1
- click==8.1.8
- colorama==0.4.6
- coverage==7.2.7
- distlib==0.3.9
- docutils==0.19
- exceptiongroup==1.2.2
- filelock==3.12.2
- flake8==5.0.4
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==6.7.0
- iniconfig==2.0.0
- jinja2==3.1.6
- llvmlite==0.39.1
- markupsafe==2.1.5
- mccabe==0.7.0
- mypy-extensions==1.0.0
- numba==0.56.4
- numpy==1.21.6
- packaging==24.0
- pathspec==0.11.2
- platformdirs==4.0.0
- pluggy==1.2.0
- pycodestyle==2.9.1
- pyflakes==2.5.0
- pygments==2.17.2
- pyproject-api==1.5.3
- pytest==7.4.4
- pytest-black==0.6.0
- pytest-cov==4.1.0
- pytest-flake8==1.1.3
- pytz==2025.2
- requests==2.31.0
- scipy==1.7.3
- snowballstemmer==2.2.0
- sphinx==5.3.0
- sphinx-rtd-theme==2.0.0
- sphinxcontrib-applehelp==1.0.2
- sphinxcontrib-devhelp==1.0.2
- sphinxcontrib-htmlhelp==2.0.0
- sphinxcontrib-jquery==4.1
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==1.0.3
- sphinxcontrib-serializinghtml==1.1.5
- toml==0.10.2
- tomli==2.0.1
- tox==4.8.0
- typed-ast==1.5.5
- typing-extensions==4.7.1
- urllib3==2.0.7
- virtualenv==20.26.6
- zipp==3.15.0
prefix: /opt/conda/envs/sparse
| [
"sparse/tests/test_array_function.py::test_property[shape]"
] | [
"sparse/tests/test_array_function.py::black",
"sparse/tests/test_array_function.py::test_binary[dot-arg_order2]",
"sparse/tests/test_array_function.py::test_binary[tensordot-arg_order2]",
"sparse/tests/test_array_function.py::test_binary[matmul-arg_order2]"
] | [
"sparse/tests/test_array_function.py::test_unary[mean]",
"sparse/tests/test_array_function.py::test_unary[std]",
"sparse/tests/test_array_function.py::test_unary[var]",
"sparse/tests/test_array_function.py::test_unary[sum]",
"sparse/tests/test_array_function.py::test_unary[<lambda>0]",
"sparse/tests/test_array_function.py::test_unary[<lambda>1]",
"sparse/tests/test_array_function.py::test_binary[dot-arg_order0]",
"sparse/tests/test_array_function.py::test_binary[dot-arg_order1]",
"sparse/tests/test_array_function.py::test_binary[result_type-arg_order0]",
"sparse/tests/test_array_function.py::test_binary[result_type-arg_order1]",
"sparse/tests/test_array_function.py::test_binary[result_type-arg_order2]",
"sparse/tests/test_array_function.py::test_binary[tensordot-arg_order0]",
"sparse/tests/test_array_function.py::test_binary[tensordot-arg_order1]",
"sparse/tests/test_array_function.py::test_binary[matmul-arg_order0]",
"sparse/tests/test_array_function.py::test_binary[matmul-arg_order1]",
"sparse/tests/test_array_function.py::test_stack",
"sparse/tests/test_array_function.py::test_ternary[<lambda>-arg_order0]",
"sparse/tests/test_array_function.py::test_ternary[<lambda>-arg_order1]",
"sparse/tests/test_array_function.py::test_ternary[<lambda>-arg_order2]",
"sparse/tests/test_array_function.py::test_ternary[<lambda>-arg_order3]",
"sparse/tests/test_array_function.py::test_ternary[<lambda>-arg_order4]",
"sparse/tests/test_array_function.py::test_ternary[<lambda>-arg_order5]",
"sparse/tests/test_array_function.py::test_ternary[<lambda>-arg_order6]",
"sparse/tests/test_array_function.py::test_property[size]",
"sparse/tests/test_array_function.py::test_property[ndim]"
] | [] | BSD 3-Clause "New" or "Revised" License | 6,242 | 288 | [
"sparse/_sparse_array.py"
] |
asottile__pyupgrade-247 | 711b1cdfa1b92eb035fe379b35d73a23859b5bc3 | 2020-01-18 16:15:24 | 93a8c0f191e3ff269f5824bf1010f2920f0b1284 | diff --git a/pyupgrade.py b/pyupgrade.py
index 2f9170e..4a0f9c1 100644
--- a/pyupgrade.py
+++ b/pyupgrade.py
@@ -1137,10 +1137,9 @@ SIX_CALLS = {
SIX_B_TMPL = 'b{args[0]}'
WITH_METACLASS_NO_BASES_TMPL = 'metaclass={args[0]}'
WITH_METACLASS_BASES_TMPL = '{rest}, metaclass={args[0]}'
-SIX_RAISES = {
- 'raise_from': 'raise {args[0]} from {rest}',
- 'reraise': 'raise {args[1]}.with_traceback({args[2]})',
-}
+RAISE_FROM_TMPL = 'raise {args[0]} from {rest}'
+RERAISE_2_TMPL = 'raise {args[1]}.with_traceback(None)'
+RERAISE_3_TMPL = 'raise {args[1]}.with_traceback({args[2]})'
def _all_isinstance(vals, tp):
@@ -1234,7 +1233,8 @@ class FindPy3Plus(ast.NodeVisitor):
self.six_calls = {} # type: Dict[Offset, ast.Call]
self.six_iter = {} # type: Dict[Offset, ast.Call]
self._previous_node = None # type: Optional[ast.AST]
- self.six_raises = {} # type: Dict[Offset, ast.Call]
+ self.six_raise_from = set() # type: Set[Offset]
+ self.six_reraise = set() # type: Set[Offset]
self.six_remove_decorators = set() # type: Set[Offset]
self.six_simple = {} # type: Dict[Offset, NameOrAttr]
self.six_type_ctx = {} # type: Dict[Offset, NameOrAttr]
@@ -1448,10 +1448,16 @@ class FindPy3Plus(ast.NodeVisitor):
self.six_iter[_ast_to_offset(node.args[0])] = node.args[0]
elif (
isinstance(self._previous_node, ast.Expr) and
- self._is_six(node.func, SIX_RAISES) and
+ self._is_six(node.func, ('raise_from',)) and
+ not _starargs(node)
+ ):
+ self.six_raise_from.add(_ast_to_offset(node))
+ elif (
+ isinstance(self._previous_node, ast.Expr) and
+ self._is_six(node.func, ('reraise',)) and
not _starargs(node)
):
- self.six_raises[_ast_to_offset(node)] = node
+ self.six_reraise.add(_ast_to_offset(node))
elif (
not self._in_comp and
self._class_info_stack and
@@ -1882,7 +1888,8 @@ def _fix_py3_plus(contents_text): # type: (str) -> str
visitor.six_b,
visitor.six_calls,
visitor.six_iter,
- visitor.six_raises,
+ visitor.six_raise_from,
+ visitor.six_reraise,
visitor.six_remove_decorators,
visitor.six_simple,
visitor.six_type_ctx,
@@ -1975,13 +1982,17 @@ def _fix_py3_plus(contents_text): # type: (str) -> str
assert isinstance(call.func, (ast.Name, ast.Attribute))
template = _get_tmpl(SIX_CALLS, call.func)
_replace_call(tokens, i, end, func_args, template)
- elif token.offset in visitor.six_raises:
+ elif token.offset in visitor.six_raise_from:
j = _find_open_paren(tokens, i)
func_args, end = _parse_call_args(tokens, j)
- call = visitor.six_raises[token.offset]
- assert isinstance(call.func, (ast.Name, ast.Attribute))
- template = _get_tmpl(SIX_RAISES, call.func)
- _replace_call(tokens, i, end, func_args, template)
+ _replace_call(tokens, i, end, func_args, RAISE_FROM_TMPL)
+ elif token.offset in visitor.six_reraise:
+ j = _find_open_paren(tokens, i)
+ func_args, end = _parse_call_args(tokens, j)
+ if len(func_args) == 2:
+ _replace_call(tokens, i, end, func_args, RERAISE_2_TMPL)
+ else:
+ _replace_call(tokens, i, end, func_args, RERAISE_3_TMPL)
elif token.offset in visitor.six_add_metaclass:
j = _find_open_paren(tokens, i)
func_args, end = _parse_call_args(tokens, j)
| six.reraise: IndexError: list index out of range
pyupgrade 1.26.0
Minimal test case. Requires using the `--py3-plus` argument.
```
def foo(func, arg):
et, ev, _ = sys.exc_info()
six.reraise(et, (ev[0], ev[1] + (" %s %s" % (func, arg))))
```
```
Traceback (most recent call last):
File ".../venv/bin/pyupgrade", line 10, in <module>
sys.exit(main())
File ".../venv/lib64/python3.8/site-packages/pyupgrade.py", line 2318, in main
ret |= _fix_file(filename, args)
File ".../venv/lib64/python3.8/site-packages/pyupgrade.py", line 2280, in _fix_file
contents_text = _fix_py3_plus(contents_text)
File ".../venv/lib64/python3.8/site-packages/pyupgrade.py", line 1984, in _fix_py3_plus
_replace_call(tokens, i, end, func_args, template)
File ".../venv/lib64/python3.8/site-packages/pyupgrade.py", line 1849, in _replace_call
src = tmpl.format(args=arg_strs, rest=rest)
IndexError: list index out of range
```
Discovered by running on setuptools:
https://github.com/pypa/setuptools/blob/682b6511ac67e021b542e74ce30e13fe52bc2da9/setuptools/command/easy_install.py#L1730-L1735 | asottile/pyupgrade | diff --git a/tests/six_test.py b/tests/six_test.py
index 83ce558..c58f5fc 100644
--- a/tests/six_test.py
+++ b/tests/six_test.py
@@ -221,6 +221,10 @@ def test_fix_six_noop(s):
'six.reraise(tp, exc, tb)\n',
'raise exc.with_traceback(tb)\n',
),
+ (
+ 'six.reraise(tp, exc)\n',
+ 'raise exc.with_traceback(None)\n',
+ ),
(
'from six import raise_from\n'
'raise_from(exc, exc_from)\n',
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 1
} | 1.26 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
pytest @ file:///croot/pytest_1738938843180/work
-e git+https://github.com/asottile/pyupgrade.git@711b1cdfa1b92eb035fe379b35d73a23859b5bc3#egg=pyupgrade
tokenize_rt==6.1.0
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
| name: pyupgrade
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- tokenize-rt==6.1.0
prefix: /opt/conda/envs/pyupgrade
| [
"tests/six_test.py::test_fix_six[six.reraise(tp,"
] | [] | [
"tests/six_test.py::test_fix_six_noop[x",
"tests/six_test.py::test_fix_six_noop[from",
"tests/six_test.py::test_fix_six_noop[@mydec\\nclass",
"tests/six_test.py::test_fix_six_noop[print(six.raise_from(exc,",
"tests/six_test.py::test_fix_six_noop[print(six.b(\"\\xa3\"))]",
"tests/six_test.py::test_fix_six_noop[print(six.b(",
"tests/six_test.py::test_fix_six_noop[class",
"tests/six_test.py::test_fix_six_noop[six.reraise(*err)]",
"tests/six_test.py::test_fix_six_noop[six.b(*a)]",
"tests/six_test.py::test_fix_six_noop[six.u(*a)]",
"tests/six_test.py::test_fix_six_noop[@six.add_metaclass(*a)\\nclass",
"tests/six_test.py::test_fix_six_noop[(\\n",
"tests/six_test.py::test_fix_six_noop[next()]",
"tests/six_test.py::test_fix_six[isinstance(s,",
"tests/six_test.py::test_fix_six[weird",
"tests/six_test.py::test_fix_six[issubclass(tp,",
"tests/six_test.py::test_fix_six[STRING_TYPES",
"tests/six_test.py::test_fix_six[from",
"tests/six_test.py::test_fix_six[six.b(\"123\")-b\"123\"]",
"tests/six_test.py::test_fix_six[six.b(r\"123\")-br\"123\"]",
"tests/six_test.py::test_fix_six[six.b(\"\\\\x12\\\\xef\")-b\"\\\\x12\\\\xef\"]",
"tests/six_test.py::test_fix_six[six.ensure_binary(\"foo\")-b\"foo\"]",
"tests/six_test.py::test_fix_six[six.byte2int(b\"f\")-b\"f\"[0]]",
"tests/six_test.py::test_fix_six[@six.python_2_unicode_compatible\\nclass",
"tests/six_test.py::test_fix_six[@six.python_2_unicode_compatible\\n@other_decorator\\nclass",
"tests/six_test.py::test_fix_six[six.get_unbound_function(meth)\\n-meth\\n]",
"tests/six_test.py::test_fix_six[six.indexbytes(bs,",
"tests/six_test.py::test_fix_six[six.assertCountEqual(\\n",
"tests/six_test.py::test_fix_six[six.raise_from(exc,",
"tests/six_test.py::test_fix_six[six.reraise(\\n",
"tests/six_test.py::test_fix_six[class",
"tests/six_test.py::test_fix_six[basic",
"tests/six_test.py::test_fix_six[add_metaclass,",
"tests/six_test.py::test_fix_six[six.itervalues]",
"tests/six_test.py::test_fix_six[six.itervalues",
"tests/six_test.py::test_fix_base_classes[import",
"tests/six_test.py::test_fix_base_classes[from",
"tests/six_test.py::test_fix_base_classes[class",
"tests/six_test.py::test_fix_base_classes_py3only[class",
"tests/six_test.py::test_fix_base_classes_py3only[from"
] | [] | MIT License | 6,245 | 1,099 | [
"pyupgrade.py"
] |
|
einsteinpy__einsteinpy-383 | ad3a490b332185d19042c8a34f9ccedca37171fe | 2020-01-19 10:59:53 | 27aaa3d80d74a56dd7edebc2e721b5bdfdf3058b | pep8speaks: Hello @Bhavam! Thanks for opening this PR. We checked the lines you've touched for [PEP 8](https://www.python.org/dev/peps/pep-0008) issues, and found:
* In the file [`src/einsteinpy/geodesic.py`](https://github.com/einsteinpy/einsteinpy/blob/514576453f59eac704d8c54109797e5e0908664d/src/einsteinpy/geodesic.py):
> [Line 36:23](https://github.com/einsteinpy/einsteinpy/blob/514576453f59eac704d8c54109797e5e0908664d/src/einsteinpy/geodesic.py#L36): [E203](https://duckduckgo.com/?q=pep8%20E203) whitespace before ':'
> [Line 36:48](https://github.com/einsteinpy/einsteinpy/blob/514576453f59eac704d8c54109797e5e0908664d/src/einsteinpy/geodesic.py#L36): [E203](https://duckduckgo.com/?q=pep8%20E203) whitespace before ':'
> [Line 36:65](https://github.com/einsteinpy/einsteinpy/blob/514576453f59eac704d8c54109797e5e0908664d/src/einsteinpy/geodesic.py#L36): [E203](https://duckduckgo.com/?q=pep8%20E203) whitespace before ','
> [Line 36:89](https://github.com/einsteinpy/einsteinpy/blob/514576453f59eac704d8c54109797e5e0908664d/src/einsteinpy/geodesic.py#L36): [E203](https://duckduckgo.com/?q=pep8%20E203) whitespace before ','
> [Line 36:121](https://github.com/einsteinpy/einsteinpy/blob/514576453f59eac704d8c54109797e5e0908664d/src/einsteinpy/geodesic.py#L36): [E501](https://duckduckgo.com/?q=pep8%20E501) line too long (123 > 120 characters)
> [Line 39:44](https://github.com/einsteinpy/einsteinpy/blob/514576453f59eac704d8c54109797e5e0908664d/src/einsteinpy/geodesic.py#L39): [E203](https://duckduckgo.com/?q=pep8%20E203) whitespace before ','
> [Line 39:85](https://github.com/einsteinpy/einsteinpy/blob/514576453f59eac704d8c54109797e5e0908664d/src/einsteinpy/geodesic.py#L39): [E203](https://duckduckgo.com/?q=pep8%20E203) whitespace before ','
> [Line 39:119](https://github.com/einsteinpy/einsteinpy/blob/514576453f59eac704d8c54109797e5e0908664d/src/einsteinpy/geodesic.py#L39): [E203](https://duckduckgo.com/?q=pep8%20E203) whitespace before ','
> [Line 39:121](https://github.com/einsteinpy/einsteinpy/blob/514576453f59eac704d8c54109797e5e0908664d/src/einsteinpy/geodesic.py#L39): [E501](https://duckduckgo.com/?q=pep8%20E501) line too long (163 > 120 characters)
shreyasbapat: Hi @Bhavam You need to fix the code quality.
Run `tox -e reformat`
while in the repo.
shreyasbapat: Also, can you define a example geodesic and show how the representation looks like? Pick anyone from the example notebook
Bhavam: Yes sure ill do that and get back to you.
shreyasbapat: Good. Now I believe, the representations won't pop up. So, please try to run it once.
codecov[bot]: # [Codecov](https://codecov.io/gh/einsteinpy/einsteinpy/pull/383?src=pr&el=h1) Report
> Merging [#383](https://codecov.io/gh/einsteinpy/einsteinpy/pull/383?src=pr&el=desc) into [master](https://codecov.io/gh/einsteinpy/einsteinpy/commit/4df5a19e51da5aabff19c50e231d6f8b83e3d5a7?src=pr&el=desc) will **decrease** coverage by `0.09%`.
> The diff coverage is `50%`.
[](https://codecov.io/gh/einsteinpy/einsteinpy/pull/383?src=pr&el=tree)
```diff
@@ Coverage Diff @@
## master #383 +/- ##
=========================================
- Coverage 94.13% 94.04% -0.1%
=========================================
Files 52 52
Lines 1843 1847 +4
=========================================
+ Hits 1735 1737 +2
- Misses 108 110 +2
```
| [Impacted Files](https://codecov.io/gh/einsteinpy/einsteinpy/pull/383?src=pr&el=tree) | Coverage Δ | |
|---|---|---|
| [src/einsteinpy/geodesic.py](https://codecov.io/gh/einsteinpy/einsteinpy/pull/383/diff?src=pr&el=tree#diff-c3JjL2VpbnN0ZWlucHkvZ2VvZGVzaWMucHk=) | `85.71% <50%> (-14.29%)` | :arrow_down: |
------
[Continue to review full report at Codecov](https://codecov.io/gh/einsteinpy/einsteinpy/pull/383?src=pr&el=continue).
> **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta)
> `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data`
> Powered by [Codecov](https://codecov.io/gh/einsteinpy/einsteinpy/pull/383?src=pr&el=footer). Last update [4df5a19...363c4f1](https://codecov.io/gh/einsteinpy/einsteinpy/pull/383?src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments).
shreyasbapat: And later you will have to write up a unit test that tests this change to take care of the failing coverage check.
Bhavam: Sure and ill follow up with the example geodesic representation also.
Bhavam: > And later you will have to write up a unit test that tests this change to take care of the failing coverage check.
Do I insert the unit tests in the testing module or somewhere else
ritzvik: > > And later you will have to write up a unit test that tests this change to take care of the failing coverage check.
>
> Do I insert the unit tests in the testing module or somewhere else
Kindly put in new tests in `tests/test_geodesic.py`
Also, run `tox -e reformat` to lint the code and pass quality checks :)
shreyasbapat: I guess all you have to do is,
Squash this in one commit.
And rebase.
If you need any help in that front, ping me and I'll be happy to help. | diff --git a/src/einsteinpy/geodesic.py b/src/einsteinpy/geodesic.py
index 31b0700..7537371 100644
--- a/src/einsteinpy/geodesic.py
+++ b/src/einsteinpy/geodesic.py
@@ -32,6 +32,22 @@ class Geodesic:
return_cartesian=True,
)[1]
+ def __repr__(self):
+ return "body name= ({0}) , metric=({1}) , parent name=({2}) , parent mass=({3})".format(
+ self.body.name,
+ self.metric.name,
+ self.body.parent.name,
+ self.body.parent.mass,
+ )
+
+ def __str__(self):
+ return "body name= ({0}) , metric=({1}) , parent name=({2}) , parent mass=({3})".format(
+ self.body.name,
+ self.metric.name,
+ self.body.parent.name,
+ self.body.parent.mass,
+ )
+
@property
def trajectory(self):
return self._trajectory
diff --git a/src/einsteinpy/metric/kerr.py b/src/einsteinpy/metric/kerr.py
index c5230b3..b2a3467 100644
--- a/src/einsteinpy/metric/kerr.py
+++ b/src/einsteinpy/metric/kerr.py
@@ -20,6 +20,7 @@ class Kerr:
@u.quantity_input(time=u.s, M=u.kg)
def __init__(self, bl_coords, M, time):
self.input_coords = bl_coords
+ self.name = "Kerr"
self.M = M
self.a = self.input_coords.a.to(u.m)
self.time = time
diff --git a/src/einsteinpy/metric/kerrnewman.py b/src/einsteinpy/metric/kerrnewman.py
index 5a6f9e2..0c8cde9 100644
--- a/src/einsteinpy/metric/kerrnewman.py
+++ b/src/einsteinpy/metric/kerrnewman.py
@@ -22,6 +22,7 @@ class KerrNewman:
@u.quantity_input(time=u.s, M=u.kg, Q=u.C)
def __init__(self, bl_coords, q, M, Q, time):
self.input_coords = bl_coords
+ self.name = "KerrNewman"
self.M = M
self.a = self.input_coords.a.to(u.m)
self.Q = Q
diff --git a/src/einsteinpy/metric/schwarzschild.py b/src/einsteinpy/metric/schwarzschild.py
index b446b28..a49e126 100644
--- a/src/einsteinpy/metric/schwarzschild.py
+++ b/src/einsteinpy/metric/schwarzschild.py
@@ -19,6 +19,7 @@ class Schwarzschild:
@u.quantity_input(time=u.s, M=u.kg)
def __init__(self, sph_coords, M, time):
+ self.name = "Schwarzschild"
self.M = M
self.input_coords = sph_coords
self.time = time
| Add __repr__ and __str__ for Geodesic Class
🐞 **Problem**
The Geodesic class is missing these methods. More can be discusses in comments.
🎯 **Goal**
<!--- Why is this change important to you? How would you use it? -->
<!--- How can it benefit other users? -->
💡 **Possible solutions**
<!--- Not obligatory, but suggest an idea for implementing addition or change -->
📋 **Steps to solve the problem**
* Comment below about what you've started working on.
* Add, commit, push your changes
* Submit a pull request and add this in comments - `Addresses #<put issue number here>`
* Ask for a review in comments section of pull request
* Celebrate your contribution to this project 🎉
| einsteinpy/einsteinpy | diff --git a/src/einsteinpy/tests/test_geodesics.py b/src/einsteinpy/tests/test_geodesics.py
index 7050e40..97b93c6 100644
--- a/src/einsteinpy/tests/test_geodesics.py
+++ b/src/einsteinpy/tests/test_geodesics.py
@@ -37,3 +37,21 @@ def test_Geodesics_has_trajectory(dummy_data):
body, t, _, end_lambda, stepsize = dummy_data
geo = Geodesic(body, time=t, end_lambda=end_lambda, step_size=stepsize)
assert isinstance(geo.trajectory, np.ndarray)
+
+
+def test_Geodesics_str_returns_members(dummy_data):
+ body, t, _, end_lambda, stepsize = dummy_data
+ geo = Geodesic(body, time=t, end_lambda=end_lambda, step_size=stepsize)
+ assert (
+ geo.__str__()
+ == "body name= (obj) , metric=(Schwarzschild) , parent name=(attractor) , parent mass=(6e+24 kg)"
+ )
+
+
+def test_Geodesics_repr_returns_members(dummy_data):
+ body, t, _, end_lambda, stepsize = dummy_data
+ geo = Geodesic(body, time=t, end_lambda=end_lambda, step_size=stepsize)
+ assert (
+ geo.__repr__()
+ == "body name= (obj) , metric=(Schwarzschild) , parent name=(attractor) , parent mass=(6e+24 kg)"
+ )
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 4
} | 0.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest-cov",
"ipywidgets",
"pytest"
],
"pre_install": null,
"python": "3.7",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.13
astropy==4.3.1
attrs @ file:///croot/attrs_1668696182826/work
Babel==2.14.0
backcall==0.2.0
beautifulsoup4==4.13.3
black==23.3.0
bleach==6.0.0
cachetools==5.5.2
certifi @ file:///croot/certifi_1671487769961/work/certifi
chardet==5.2.0
charset-normalizer==3.4.1
click==8.1.8
colorama==0.4.6
comm==0.1.4
coverage==7.2.7
cycler==0.11.0
debugpy==1.7.0
decorator==5.1.1
defusedxml==0.7.1
distlib==0.3.9
docutils==0.19
-e git+https://github.com/einsteinpy/einsteinpy.git@ad3a490b332185d19042c8a34f9ccedca37171fe#egg=einsteinpy
entrypoints==0.4
execnet==2.0.2
fastjsonschema==2.21.1
filelock==3.12.2
flit_core @ file:///opt/conda/conda-bld/flit-core_1644941570762/work/source/flit_core
fonttools==4.38.0
idna==3.10
imagesize==1.4.1
importlib-metadata==6.7.0
importlib-resources==5.12.0
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
ipykernel==6.16.2
ipython==7.34.0
ipywidgets==8.1.5
isort==5.11.5
jedi==0.19.2
Jinja2==3.1.6
jsonschema==4.17.3
jupyter-sphinx==0.4.0
jupyter_client==7.4.9
jupyter_core==4.12.0
jupyterlab-pygments==0.2.2
jupyterlab_widgets==3.0.13
kiwisolver==1.4.5
llvmlite==0.39.1
MarkupSafe==2.1.5
matplotlib==3.5.3
matplotlib-inline==0.1.6
mistune==3.0.2
mpmath==1.3.0
mypy-extensions==1.0.0
nbclient==0.7.4
nbconvert==7.6.0
nbformat==5.8.0
nbsphinx==0.9.7
nest-asyncio==1.6.0
numba==0.56.4
numpy==1.21.6
packaging==24.0
pandocfilters==1.5.1
parso==0.8.4
pathspec==0.11.2
pexpect==4.9.0
pickleshare==0.7.5
Pillow==9.5.0
pkgutil_resolve_name==1.3.10
platformdirs==4.0.0
plotly==5.18.0
pluggy==1.2.0
prompt_toolkit==3.0.48
psutil==7.0.0
ptyprocess==0.7.0
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pycodestyle==2.10.0
pyerfa==2.0.0.3
Pygments==2.17.2
pyparsing==3.1.4
pyproject-api==1.5.3
pyrsistent==0.19.3
pytest==7.1.2
pytest-cov==2.5.1
pytest-xdist==3.5.0
python-dateutil==2.9.0.post0
pytz==2025.2
pyzmq==26.2.1
requests==2.31.0
scipy==1.7.3
six==1.17.0
snowballstemmer==2.2.0
soupsieve==2.4.1
Sphinx==5.3.0
sphinxcontrib-applehelp==1.0.2
sphinxcontrib-devhelp==1.0.2
sphinxcontrib-htmlhelp==2.0.0
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==1.0.3
sphinxcontrib-serializinghtml==1.1.5
sympy==1.10.1
tenacity==8.2.3
tinycss2==1.2.1
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
tornado==6.2
tox==4.8.0
traitlets==5.9.0
typed-ast==1.5.5
typing_extensions==4.7.1
urllib3==2.0.7
virtualenv==20.26.6
wcwidth==0.2.13
webencodings==0.5.1
widgetsnbextension==4.0.13
zipp @ file:///croot/zipp_1672387121353/work
| name: einsteinpy
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=22.1.0=py37h06a4308_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- flit-core=3.6.0=pyhd3eb1b0_0
- importlib_metadata=4.11.3=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=22.3.1=py37h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pytest=7.1.2=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py37h06a4308_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zipp=3.11.0=py37h06a4308_0
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.13
- astropy==4.3.1
- babel==2.14.0
- backcall==0.2.0
- beautifulsoup4==4.13.3
- black==23.3.0
- bleach==6.0.0
- cachetools==5.5.2
- chardet==5.2.0
- charset-normalizer==3.4.1
- click==8.1.8
- colorama==0.4.6
- comm==0.1.4
- coverage==7.2.7
- cycler==0.11.0
- debugpy==1.7.0
- decorator==5.1.1
- defusedxml==0.7.1
- distlib==0.3.9
- docutils==0.19
- einsteinpy==0.3.dev0
- entrypoints==0.4
- execnet==2.0.2
- fastjsonschema==2.21.1
- filelock==3.12.2
- fonttools==4.38.0
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==6.7.0
- importlib-resources==5.12.0
- ipykernel==6.16.2
- ipython==7.34.0
- ipywidgets==8.1.5
- isort==5.11.5
- jedi==0.19.2
- jinja2==3.1.6
- jsonschema==4.17.3
- jupyter-client==7.4.9
- jupyter-core==4.12.0
- jupyter-sphinx==0.4.0
- jupyterlab-pygments==0.2.2
- jupyterlab-widgets==3.0.13
- kiwisolver==1.4.5
- llvmlite==0.39.1
- markupsafe==2.1.5
- matplotlib==3.5.3
- matplotlib-inline==0.1.6
- mistune==3.0.2
- mpmath==1.3.0
- mypy-extensions==1.0.0
- nbclient==0.7.4
- nbconvert==7.6.0
- nbformat==5.8.0
- nbsphinx==0.9.7
- nest-asyncio==1.6.0
- numba==0.56.4
- numpy==1.21.6
- packaging==24.0
- pandocfilters==1.5.1
- parso==0.8.4
- pathspec==0.11.2
- pexpect==4.9.0
- pickleshare==0.7.5
- pillow==9.5.0
- pkgutil-resolve-name==1.3.10
- platformdirs==4.0.0
- plotly==5.18.0
- pluggy==1.2.0
- prompt-toolkit==3.0.48
- psutil==7.0.0
- ptyprocess==0.7.0
- pycodestyle==2.10.0
- pyerfa==2.0.0.3
- pygments==2.17.2
- pyparsing==3.1.4
- pyproject-api==1.5.3
- pyrsistent==0.19.3
- pytest-cov==2.5.1
- pytest-xdist==3.5.0
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyzmq==26.2.1
- requests==2.31.0
- scipy==1.7.3
- six==1.17.0
- snowballstemmer==2.2.0
- soupsieve==2.4.1
- sphinx==5.3.0
- sphinxcontrib-applehelp==1.0.2
- sphinxcontrib-devhelp==1.0.2
- sphinxcontrib-htmlhelp==2.0.0
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==1.0.3
- sphinxcontrib-serializinghtml==1.1.5
- sympy==1.10.1
- tenacity==8.2.3
- tinycss2==1.2.1
- tornado==6.2
- tox==4.8.0
- traitlets==5.9.0
- typed-ast==1.5.5
- typing-extensions==4.7.1
- urllib3==2.0.7
- virtualenv==20.26.6
- wcwidth==0.2.13
- webencodings==0.5.1
- widgetsnbextension==4.0.13
prefix: /opt/conda/envs/einsteinpy
| [
"src/einsteinpy/tests/test_geodesics.py::test_Geodesics_str_returns_members",
"src/einsteinpy/tests/test_geodesics.py::test_Geodesics_repr_returns_members"
] | [] | [
"src/einsteinpy/tests/test_geodesics.py::test_Geodesics_conserves_the_attractor",
"src/einsteinpy/tests/test_geodesics.py::test_Geodesics_has_trajectory"
] | [] | MIT License | 6,248 | 748 | [
"src/einsteinpy/geodesic.py",
"src/einsteinpy/metric/kerr.py",
"src/einsteinpy/metric/kerrnewman.py",
"src/einsteinpy/metric/schwarzschild.py"
] |
stigok__ruterstop-43 | 53b7887fb64918ec8c8d57d494c033afd06def14 | 2020-01-19 16:20:28 | 21572158bf6ed855b3ec1403d3af3451cc9fdee8 | diff --git a/ruterstop/__init__.py b/ruterstop/__init__.py
index 7217385..cfb9f42 100644
--- a/ruterstop/__init__.py
+++ b/ruterstop/__init__.py
@@ -45,13 +45,13 @@ ENTUR_GRAPHQL_QUERY = """
webapp = bottle.Bottle()
def not_found_error_handler(res):
- res.content_type = "text/plain"
+ res.set_header("Content-Type", "text/plain")
return "Ugyldig stoppested"
webapp.error(code=404)(not_found_error_handler)
def default_error_handler(res):
- res.content_type = "text/plain"
+ res.set_header("Content-Type", "text/plain")
return "Feil på serveren"
webapp.default_error_handler = default_error_handler
@@ -152,6 +152,7 @@ def serve_departures(stop_id):
kw["grouped"] = True
deps = get_departures(stop_id=stop_id)
+ bottle.response.set_header("Content-Type", "text/plain")
return format_departure_list(deps, **kw)
| Returner text/plain fra server
Nå er det text/html
```
HTTP/1.0 200 OK
Date: Fri, 17 Jan 2020 22:50:33 GMT
Server: WSGIServer/0.2 CPython/3.7.5
Content-Length: 88
Content-Type: text/html; charset=UTF-8
25 Furuset T 3 min
31 Grorud T 8 min
31 Fornebu 9 min
31 Tonsenhagen 14 min
``` | stigok/ruterstop | diff --git a/ruterstop/tests/test_webapp.py b/ruterstop/tests/test_webapp.py
index 24d4175..ab82f79 100644
--- a/ruterstop/tests/test_webapp.py
+++ b/ruterstop/tests/test_webapp.py
@@ -19,6 +19,7 @@ class WebAppTestCase(TestCase):
@patch("ruterstop.get_departures", return_value=dict(a="foo"))
def test_calls_api_on_proper_path(self, get_mock, format_mock):
res = self.app.get("/1234")
+ self.assertEqual(res.content_type, "text/plain")
get_mock.assert_called_once_with(stop_id=1234)
format_mock.assert_called_once_with(dict(a="foo"))
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 1
},
"num_modified_files": 1
} | 0.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.7",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | beautifulsoup4==4.13.3
bottle==0.13.2
certifi @ file:///croot/certifi_1671487769961/work/certifi
charset-normalizer==3.4.1
coverage==7.2.7
exceptiongroup==1.2.2
freezegun==1.5.1
idna==3.10
importlib-metadata==6.7.0
iniconfig==2.0.0
packaging==24.0
pluggy==1.2.0
pytest==7.4.4
python-dateutil==2.9.0.post0
requests==2.31.0
-e git+https://github.com/stigok/ruterstop.git@53b7887fb64918ec8c8d57d494c033afd06def14#egg=ruterstop
six==1.17.0
soupsieve==2.4.1
tomli==2.0.1
typing_extensions==4.7.1
urllib3==2.0.7
waitress==2.1.2
WebOb==1.8.9
WebTest==3.0.1
zipp==3.15.0
| name: ruterstop
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=22.3.1=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- beautifulsoup4==4.13.3
- bottle==0.13.2
- charset-normalizer==3.4.1
- coverage==7.2.7
- exceptiongroup==1.2.2
- freezegun==1.5.1
- idna==3.10
- importlib-metadata==6.7.0
- iniconfig==2.0.0
- packaging==24.0
- pluggy==1.2.0
- pytest==7.4.4
- python-dateutil==2.9.0.post0
- requests==2.31.0
- six==1.17.0
- soupsieve==2.4.1
- tomli==2.0.1
- typing-extensions==4.7.1
- urllib3==2.0.7
- waitress==2.1.2
- webob==1.8.9
- webtest==3.0.1
- zipp==3.15.0
prefix: /opt/conda/envs/ruterstop
| [
"ruterstop/tests/test_webapp.py::WebAppTestCase::test_calls_api_on_proper_path"
] | [] | [
"ruterstop/tests/test_webapp.py::WebAppTestCase::test_calls_api_with_querystring_params",
"ruterstop/tests/test_webapp.py::WebAppTestCase::test_simple_404_error",
"ruterstop/tests/test_webapp.py::WebAppTestCase::test_simple_500_error"
] | [] | MIT License | 6,250 | 268 | [
"ruterstop/__init__.py"
] |
|
iterative__dvc-3200 | d942542c841b054865760afc2b212db10d5b901f | 2020-01-20 16:25:24 | e7b3297c2d6ae1ad633cd0435cca81093cac86ff | diff --git a/dvc/config.py b/dvc/config.py
index 65250c76d..0759395d0 100644
--- a/dvc/config.py
+++ b/dvc/config.py
@@ -198,6 +198,7 @@ class Config(object): # pylint: disable=too-many-instance-attributes
SECTION_REMOTE_ASK_PASSWORD = "ask_password"
SECTION_REMOTE_GSS_AUTH = "gss_auth"
SECTION_REMOTE_NO_TRAVERSE = "no_traverse"
+ SECTION_REMOTE_VERIFY = "verify"
SECTION_REMOTE_SCHEMA = {
Required(SECTION_REMOTE_URL): str,
SECTION_AWS_REGION: str,
@@ -232,6 +233,7 @@ class Config(object): # pylint: disable=too-many-instance-attributes
SECTION_GDRIVE_USER_CREDENTIALS_FILE: str,
PRIVATE_CWD: str,
SECTION_REMOTE_NO_TRAVERSE: Bool,
+ SECTION_REMOTE_VERIFY: Bool,
}
SECTION_STATE = "state"
diff --git a/dvc/data_cloud.py b/dvc/data_cloud.py
index fadd536bd..e85c8b280 100644
--- a/dvc/data_cloud.py
+++ b/dvc/data_cloud.py
@@ -73,13 +73,25 @@ class DataCloud(object):
show_checksums (bool): show checksums instead of file names in
information messages.
"""
- return self.repo.cache.local.pull(
- cache,
- jobs=jobs,
- remote=self.get_remote(remote, "pull"),
- show_checksums=show_checksums,
+ remote = self.get_remote(remote, "pull")
+ downloaded_items_num = self.repo.cache.local.pull(
+ cache, jobs=jobs, remote=remote, show_checksums=show_checksums
)
+ if not remote.verify:
+ self._save_pulled_checksums(cache)
+
+ return downloaded_items_num
+
+ def _save_pulled_checksums(self, cache):
+ for checksum in cache["local"].keys():
+ cache_file = self.repo.cache.local.checksum_to_path_info(checksum)
+ if self.repo.cache.local.exists(cache_file):
+ # We can safely save here, as existing corrupted files will be
+ # removed upon status, while files corrupted during download
+ # will not be moved from tmp_file (see `RemoteBASE.download()`)
+ self.repo.state.save(cache_file, checksum)
+
def status(self, cache, jobs=None, remote=None, show_checksums=False):
"""Check status of data items in a cloud-agnostic way.
diff --git a/dvc/remote/base.py b/dvc/remote/base.py
index 788742c8c..7262ba1a9 100644
--- a/dvc/remote/base.py
+++ b/dvc/remote/base.py
@@ -80,6 +80,7 @@ class RemoteBASE(object):
CHECKSUM_JOBS = max(1, min(4, cpu_count() // 2))
DEFAULT_CACHE_TYPES = ["copy"]
DEFAULT_NO_TRAVERSE = True
+ DEFAULT_VERIFY = False
state = StateNoop()
@@ -93,6 +94,9 @@ class RemoteBASE(object):
self.no_traverse = config.get(
Config.SECTION_REMOTE_NO_TRAVERSE, self.DEFAULT_NO_TRAVERSE
)
+ self.verify = config.get(
+ Config.SECTION_REMOTE_VERIFY, self.DEFAULT_VERIFY
+ )
self._dir_info = {}
types = config.get(Config.SECTION_CACHE_TYPE, None)
diff --git a/dvc/remote/gdrive.py b/dvc/remote/gdrive.py
index a38b2a1a2..24d65aa25 100644
--- a/dvc/remote/gdrive.py
+++ b/dvc/remote/gdrive.py
@@ -63,6 +63,7 @@ class RemoteGDrive(RemoteBASE):
path_cls = CloudURLInfo
REQUIRES = {"pydrive2": "pydrive2"}
DEFAULT_NO_TRAVERSE = False
+ DEFAULT_VERIFY = True
GDRIVE_USER_CREDENTIALS_DATA = "GDRIVE_USER_CREDENTIALS_DATA"
DEFAULT_USER_CREDENTIALS_FILE = "gdrive-user-credentials.json"
| option to have dvc pull trust the remote and not calculate its own MD5 hashes
The dvc pull command takes the time (which can be significant it seems) to calculate its own MD5 hashes. It would be nice if there were an option to tell it to trust the remote and skip this so that it would complete faster. Or it could trust the remote by default and make you opt-in to recalculating the hashes if that is what you really wanted. It seems unlikely that it would be a bad breaking change to flip the default behavior here. Or, maybe, we just don't need dvc pull to _ever_ calculate MD5 hashes. Thoughts? | iterative/dvc | diff --git a/tests/func/test_data_cloud.py b/tests/func/test_data_cloud.py
index 748a7fe63..137fd674c 100644
--- a/tests/func/test_data_cloud.py
+++ b/tests/func/test_data_cloud.py
@@ -9,10 +9,11 @@ import pytest
from mock import patch
from dvc.cache import NamedCache
+from dvc.compat import fspath
from dvc.config import Config
from dvc.data_cloud import DataCloud
from dvc.main import main
-from dvc.remote import RemoteAZURE
+from dvc.remote import RemoteAZURE, RemoteConfig
from dvc.remote import RemoteGDrive
from dvc.remote import RemoteGS
from dvc.remote import RemoteHDFS
@@ -653,3 +654,37 @@ class TestShouldWarnOnNoChecksumInLocalAndRemoteCache(TestDvc):
assert self.message_header in self._caplog.text
assert self.message_foo_part in self._caplog.text
assert self.message_bar_part in self._caplog.text
+
+
+def test_verify_checksums(tmp_dir, scm, dvc, mocker, tmp_path_factory):
+ tmp_dir.dvc_gen({"file": "file1 content"}, commit="add file")
+ tmp_dir.dvc_gen({"dir": {"subfile": "file2 content"}}, commit="add dir")
+
+ RemoteConfig(dvc.config).add(
+ "local_remote",
+ fspath(tmp_path_factory.mktemp("local_remote")),
+ default=True,
+ )
+ dvc.push()
+
+ # remove artifacts and cache to trigger fetching
+ os.remove("file")
+ shutil.rmtree("dir")
+ shutil.rmtree(dvc.cache.local.cache_dir)
+
+ checksum_spy = mocker.spy(dvc.cache.local, "get_file_checksum")
+
+ dvc.pull()
+ assert checksum_spy.call_count == 0
+
+ # Removing cache will invalidate existing state entries
+ shutil.rmtree(dvc.cache.local.cache_dir)
+
+ dvc.config.set(
+ Config.SECTION_REMOTE_FMT.format("local_remote"),
+ Config.SECTION_REMOTE_VERIFY,
+ "True",
+ )
+
+ dvc.pull()
+ assert checksum_spy.call_count == 3
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 4
} | 0.82 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[all]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest",
"pytest-timeout",
"pytest-cov",
"pytest-xdist",
"pytest-mock",
"flaky",
"mock",
"xmltodict",
"awscli",
"google-compute-engine",
"Pygments",
"collective.checkdocs",
"flake8",
"psutil",
"flake8-docstrings",
"pydocstyle",
"jaraco.windows",
"mock-ssh-server",
"moto",
"rangehttpserver"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.7",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | aliyun-python-sdk-core==2.16.0
aliyun-python-sdk-core-v3==2.13.33
aliyun-python-sdk-kms==2.16.5
appdirs==1.4.4
atpublic==3.1.2
attrs @ file:///croot/attrs_1668696182826/work
autocommand==2.2.2
awscli==1.31.13
azure-common==1.1.28
azure-storage-blob==2.1.0
azure-storage-common==2.1.0
bcrypt==4.2.1
boto==2.49.0
boto3==1.33.13
botocore==1.33.13
cachetools==5.5.2
certifi @ file:///croot/certifi_1671487769961/work/certifi
cffi==1.15.1
charset-normalizer==3.4.1
collective.checkdocs==0.2
colorama==0.4.4
configobj==5.0.9
coverage==7.2.7
crcmod==1.7
cryptography==44.0.2
decorator==5.1.1
distro==1.9.0
docutils==0.16
-e git+https://github.com/iterative/dvc.git@d942542c841b054865760afc2b212db10d5b901f#egg=dvc
execnet==2.0.2
flake8==5.0.4
flake8-docstrings==1.7.0
flaky==3.8.1
flatten-json==0.1.14
flit_core @ file:///opt/conda/conda-bld/flit-core_1644941570762/work/source/flit_core
flufl.lock==7.1.1
funcy==2.0
future==1.0.0
gitdb==4.0.12
gitdb2==4.0.2
GitPython==3.1.44
google-api-core==1.34.1
google-api-python-client==2.166.0
google-auth==2.38.0
google-auth-httplib2==0.2.0
google-cloud-core==1.5.0
google-cloud-storage==1.19.0
google-compute-engine==2.8.13
google-crc32c==1.5.0
google-resumable-media==2.7.2
googleapis-common-protos==1.69.2
grandalf==0.6
httplib2==0.22.0
humanize==4.6.0
idna==3.10
importlib-metadata==4.2.0
importlib-resources==5.12.0
inflect==3.0.2
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
jaraco.classes==3.2.3
jaraco.collections==4.2.0
jaraco.context==4.3.0
jaraco.functools==3.7.0
jaraco.structures==2.1.0
jaraco.text==3.11.1
jaraco.ui==2.3.0
jaraco.windows==5.7.0
Jinja2==3.1.6
jmespath==0.10.0
jsonpath-ng==1.7.0
MarkupSafe==2.1.5
mccabe==0.7.0
mock==5.2.0
mock-ssh-server==0.9.1
more-itertools==9.1.0
moto==4.2.14
nanotime==0.5.2
networkx==2.3
numpy==1.21.6
oauth2client==4.1.3
oss2==2.6.1
packaging @ file:///croot/packaging_1671697413597/work
paramiko==3.5.1
path==16.6.0
pathspec==0.11.2
pluggy @ file:///tmp/build/80754af9/pluggy_1648042572264/work
ply==3.11
protobuf==3.20.3
psutil==7.0.0
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pyarrow==0.15.1
pyasn1==0.5.1
pyasn1-modules==0.3.0
pycodestyle==2.9.1
pycparser==2.21
pycryptodome==3.22.0
pydantic==1.10.21
pydocstyle==6.3.0
PyDrive2==1.15.4
pyfastcopy==1.0.3
pyflakes==2.5.0
Pygments==2.17.2
PyNaCl==1.5.0
pyOpenSSL==25.0.0
pyparsing==3.1.4
pytest==7.1.2
pytest-cov==4.1.0
pytest-mock==3.11.1
pytest-timeout==2.3.1
pytest-xdist==3.5.0
python-dateutil==2.8.0
PyYAML==5.1.2
rangehttpserver==1.4.0
requests==2.31.0
responses==0.23.3
rsa==4.7.2
ruamel.yaml==0.18.10
ruamel.yaml.clib==0.2.8
s3transfer==0.8.2
shortuuid==1.0.13
six==1.17.0
smmap==5.0.2
snowballstemmer==2.2.0
speedcopy==2.1.5
texttable==1.7.0
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
tqdm==4.67.1
treelib==1.7.1
types-PyYAML==6.0.12.12
typing_extensions @ file:///croot/typing_extensions_1669924550328/work
uritemplate==4.1.1
urllib3==1.26.20
voluptuous==0.14.1
Werkzeug==2.2.3
xmltodict==0.14.2
zc.lockfile==3.0.post1
zipp @ file:///croot/zipp_1672387121353/work
| name: dvc
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=22.1.0=py37h06a4308_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- flit-core=3.6.0=pyhd3eb1b0_0
- importlib_metadata=4.11.3=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=22.0=py37h06a4308_0
- pip=22.3.1=py37h06a4308_0
- pluggy=1.0.0=py37h06a4308_1
- py=1.11.0=pyhd3eb1b0_0
- pytest=7.1.2=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py37h06a4308_0
- typing_extensions=4.4.0=py37h06a4308_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zipp=3.11.0=py37h06a4308_0
- zlib=1.2.13=h5eee18b_1
- pip:
- aliyun-python-sdk-core==2.16.0
- aliyun-python-sdk-core-v3==2.13.33
- aliyun-python-sdk-kms==2.16.5
- appdirs==1.4.4
- atpublic==3.1.2
- autocommand==2.2.2
- awscli==1.31.13
- azure-common==1.1.28
- azure-storage-blob==2.1.0
- azure-storage-common==2.1.0
- bcrypt==4.2.1
- boto==2.49.0
- boto3==1.33.13
- botocore==1.33.13
- cachetools==5.5.2
- cffi==1.15.1
- charset-normalizer==3.4.1
- collective-checkdocs==0.2
- colorama==0.4.4
- configobj==5.0.9
- coverage==7.2.7
- crcmod==1.7
- cryptography==44.0.2
- decorator==5.1.1
- distro==1.9.0
- docutils==0.16
- dvc==0.82.1+d94254
- execnet==2.0.2
- flake8==5.0.4
- flake8-docstrings==1.7.0
- flaky==3.8.1
- flatten-json==0.1.14
- flufl-lock==7.1.1
- funcy==2.0
- future==1.0.0
- gitdb==4.0.12
- gitdb2==4.0.2
- gitpython==3.1.44
- google-api-core==1.34.1
- google-api-python-client==2.166.0
- google-auth==2.38.0
- google-auth-httplib2==0.2.0
- google-cloud-core==1.5.0
- google-cloud-storage==1.19.0
- google-compute-engine==2.8.13
- google-crc32c==1.5.0
- google-resumable-media==2.7.2
- googleapis-common-protos==1.69.2
- grandalf==0.6
- httplib2==0.22.0
- humanize==4.6.0
- idna==3.10
- importlib-metadata==4.2.0
- importlib-resources==5.12.0
- inflect==3.0.2
- jaraco-classes==3.2.3
- jaraco-collections==4.2.0
- jaraco-context==4.3.0
- jaraco-functools==3.7.0
- jaraco-structures==2.1.0
- jaraco-text==3.11.1
- jaraco-ui==2.3.0
- jaraco-windows==5.7.0
- jinja2==3.1.6
- jmespath==0.10.0
- jsonpath-ng==1.7.0
- markupsafe==2.1.5
- mccabe==0.7.0
- mock==5.2.0
- mock-ssh-server==0.9.1
- more-itertools==9.1.0
- moto==4.2.14
- nanotime==0.5.2
- networkx==2.3
- numpy==1.21.6
- oauth2client==4.1.3
- oss2==2.6.1
- paramiko==3.5.1
- path==16.6.0
- pathspec==0.11.2
- ply==3.11
- protobuf==3.20.3
- psutil==7.0.0
- pyarrow==0.15.1
- pyasn1==0.5.1
- pyasn1-modules==0.3.0
- pycodestyle==2.9.1
- pycparser==2.21
- pycryptodome==3.22.0
- pydantic==1.10.21
- pydocstyle==6.3.0
- pydrive2==1.15.4
- pyfastcopy==1.0.3
- pyflakes==2.5.0
- pygments==2.17.2
- pynacl==1.5.0
- pyopenssl==25.0.0
- pyparsing==3.1.4
- pytest-cov==4.1.0
- pytest-mock==3.11.1
- pytest-timeout==2.3.1
- pytest-xdist==3.5.0
- python-dateutil==2.8.0
- pyyaml==5.1.2
- rangehttpserver==1.4.0
- requests==2.31.0
- responses==0.23.3
- rsa==4.7.2
- ruamel-yaml==0.18.10
- ruamel-yaml-clib==0.2.8
- s3transfer==0.8.2
- shortuuid==1.0.13
- six==1.17.0
- smmap==5.0.2
- snowballstemmer==2.2.0
- speedcopy==2.1.5
- texttable==1.7.0
- tqdm==4.67.1
- treelib==1.7.1
- types-pyyaml==6.0.12.12
- uritemplate==4.1.1
- urllib3==1.26.20
- voluptuous==0.14.1
- werkzeug==2.2.3
- xmltodict==0.14.2
- zc-lockfile==3.0.post1
prefix: /opt/conda/envs/dvc
| [
"tests/func/test_data_cloud.py::test_verify_checksums"
] | [
"tests/func/test_data_cloud.py::TestRemoteLOCALCLI::test"
] | [
"tests/func/test_data_cloud.py::TestDataCloud::test",
"tests/func/test_data_cloud.py::TestRemoteLOCAL::test",
"tests/func/test_data_cloud.py::TestRemoteSSHMocked::test",
"tests/func/test_data_cloud.py::TestDataCloudErrorCLI::test_error",
"tests/func/test_data_cloud.py::TestWarnOnOutdatedStage::test",
"tests/func/test_data_cloud.py::TestRecursiveSyncOperations::test",
"tests/func/test_data_cloud.py::TestCheckSumRecalculation::test",
"tests/func/test_data_cloud.py::TestShouldWarnOnNoChecksumInLocalAndRemoteCache::test"
] | [] | Apache License 2.0 | 6,258 | 968 | [
"dvc/config.py",
"dvc/data_cloud.py",
"dvc/remote/base.py",
"dvc/remote/gdrive.py"
] |
|
pytorch__ignite-715 | a94725b83b8d6aefc9382f08e40c19976e3f8fc1 | 2020-01-22 14:07:02 | 479659a9436e41d4e725f3f9492b6b644e69aa8f | diff --git a/ignite/engine/engine.py b/ignite/engine/engine.py
index 52febd51..9f17ac9a 100644
--- a/ignite/engine/engine.py
+++ b/ignite/engine/engine.py
@@ -863,19 +863,20 @@ class Engine:
# if input data is torch dataloader we replace batch sampler by a batch sampler
# such that its random sampling indices are reproducible by prefetching them before data iteration
if isinstance(self.state.dataloader, torch.utils.data.DataLoader):
-
- if (self._dataloader_len is not None) and hasattr(self.state.dataloader.sampler, "epoch"):
- if self._dataloader_len != self.state.epoch_length:
- warnings.warn("When defined engine's epoch length is different of input dataloader length, "
- "distributed sampler indices can not be setup in a reproducible manner")
-
- batch_sampler = self.state.dataloader.batch_sampler
- if not isinstance(batch_sampler, ReproducibleBatchSampler):
- self.state.dataloader = _update_dataloader(self.state.dataloader,
- ReproducibleBatchSampler(batch_sampler))
+ _dataloader_kind = self.state.dataloader._dataset_kind
+ if _dataloader_kind == torch.utils.data.dataloader._DatasetKind.Map:
+ if (self._dataloader_len is not None) and hasattr(self.state.dataloader.sampler, "epoch"):
+ if self._dataloader_len != self.state.epoch_length:
+ warnings.warn("When defined engine's epoch length is different of input dataloader length, "
+ "distributed sampler indices can not be setup in a reproducible manner")
+
+ batch_sampler = self.state.dataloader.batch_sampler
+ if not isinstance(batch_sampler, ReproducibleBatchSampler):
+ self.state.dataloader = _update_dataloader(self.state.dataloader,
+ ReproducibleBatchSampler(batch_sampler))
iteration = self.state.iteration
- self._dataloader_iter = self._from_iteration(self.state.dataloader, iteration, self.state.epoch_length)
+ self._dataloader_iter = self._from_iteration(self.state.dataloader, iteration)
# Below we define initial counter value for _run_once_on_dataset to measure a single epoch
if self.state.epoch_length is not None:
@@ -883,12 +884,17 @@ class Engine:
self._init_iter.append(iteration)
@staticmethod
- def _from_iteration(data, iteration, epoch_length):
+ def _from_iteration(data, iteration):
if isinstance(data, torch.utils.data.DataLoader):
- iteration %= len(data.batch_sampler)
- if iteration > 0:
- # batch sampler is ReproducibleBatchSampler
- data.batch_sampler.start_iteration = iteration
+ try:
+ # following is unsafe for IterableDatasets
+ iteration %= len(data.batch_sampler)
+ if iteration > 0:
+ # batch sampler is ReproducibleBatchSampler
+ data.batch_sampler.start_iteration = iteration
+ except TypeError:
+ # Probably we can do nothing with DataLoader built upon IterableDatasets
+ pass
data_iter = iter(data)
else:
if hasattr(data, "__len__"):
| Issue with DataLoader with _dataset_kind = Iterable
The following code wont work:
```python
import torch
class MyIterableDataset(torch.utils.data.IterableDataset):
def __init__(self, start, end):
super(MyIterableDataset).__init__()
assert end > start, "this example code only works with end >= start"
self.start = start
self.end = end
def __iter__(self):
return iter(range(self.start, self.end))
ds = MyIterableDataset(0, 1000)
data_loader = torch.utils.data.DataLoader(ds, num_workers=2)
from ignite.engine import Engine
def foo(e, b):
print("{}-{}: {}".format(e.state.epoch, e.state.iteration, b))
engine = Engine(foo)
engine.run(data_loader, epoch_length=10)
```
and gives the error
```
ValueErrorTraceback (most recent call last)
<ipython-input-19-1c8004fbf46e> in <module>
21
22 engine = Engine(foo)
---> 23 engine.run(data_loader, epoch_length=10)
/opt/conda/lib/python3.7/site-packages/ignite/engine/engine.py in run(self, data, max_epochs, epoch_length, seed)
848
849 self.state.dataloader = data
--> 850 return self._internal_run()
851
852 def _setup_engine(self):
/opt/conda/lib/python3.7/site-packages/ignite/engine/engine.py in _internal_run(self)
950 self._dataloader_iter = self._dataloader_len = None
951 self.logger.error("Engine run is terminating due to exception: %s.", str(e))
--> 952 self._handle_exception(e)
953
954 self._dataloader_iter = self._dataloader_len = None
/opt/conda/lib/python3.7/site-packages/ignite/engine/engine.py in _handle_exception(self, e)
714 self._fire_event(Events.EXCEPTION_RAISED, e)
715 else:
--> 716 raise e
717
718 def state_dict(self):
/opt/conda/lib/python3.7/site-packages/ignite/engine/engine.py in _internal_run(self)
933
934 if self._dataloader_iter is None:
--> 935 self._setup_engine()
936
937 hours, mins, secs = self._run_once_on_dataset()
/opt/conda/lib/python3.7/site-packages/ignite/engine/engine.py in _setup_engine(self)
873 if not isinstance(batch_sampler, ReproducibleBatchSampler):
874 self.state.dataloader = _update_dataloader(self.state.dataloader,
--> 875 ReproducibleBatchSampler(batch_sampler))
876
877 iteration = self.state.iteration
/opt/conda/lib/python3.7/site-packages/ignite/engine/engine.py in _update_dataloader(dataloader, new_batch_sampler)
963 params = {k: getattr(dataloader, k) for k in params_keys}
964 params['batch_sampler'] = new_batch_sampler
--> 965 return torch.utils.data.DataLoader(**params)
966
967
/opt/conda/lib/python3.7/site-packages/torch/utils/data/dataloader.py in __init__(self, dataset, batch_size, shuffle, sampler, batch_sampler, num_workers, collate_fn, pin_memory, drop_last, timeout, worker_init_fn, multiprocessing_context)
182 raise ValueError(
183 "DataLoader with IterableDataset: expected unspecified "
--> 184 "batch_sampler option, but got batch_sampler={}".format(batch_sampler))
185 else:
186 self._dataset_kind = _DatasetKind.Map
ValueError: DataLoader with IterableDataset: expected unspecified batch_sampler option, but got batch_sampler=<ignite.engine.engine.ReproducibleBatchSampler object at 0x7f6000e3fad0>
```
Patching it like [here](https://github.com/pytorch/ignite/issues/618#issuecomment-577183132) can be a workaround:
```python
engine.run(map(lambda x: x, data_loader), epoch_length=10)
``` | pytorch/ignite | diff --git a/tests/ignite/engine/test_engine.py b/tests/ignite/engine/test_engine.py
index 638415bf..40aeea13 100644
--- a/tests/ignite/engine/test_engine.py
+++ b/tests/ignite/engine/test_engine.py
@@ -496,3 +496,30 @@ def test_multinode_distrib_cpu(distributed_context_multi_node_gloo):
def test_multinode_distrib_gpu(distributed_context_multi_node_nccl):
_test_run_check_triggered_events_on_iterator()
_test_run_check_triggered_events()
+
+
+def test_engine_with_iterable_dataloader():
+
+ class MyIterableDataset(torch.utils.data.IterableDataset):
+ def __init__(self, start, end):
+ super(MyIterableDataset).__init__()
+ assert end > start, "this example code only works with end >= start"
+ self.start = start
+ self.end = end
+
+ def __iter__(self):
+ return iter(range(self.start, self.end))
+
+ ds = MyIterableDataset(0, 1000)
+ data_loader = torch.utils.data.DataLoader(ds, num_workers=2)
+
+ counter = [0]
+
+ def foo(e, b):
+ print("{}-{}: {}".format(e.state.epoch, e.state.iteration, b))
+ counter[0] += 1
+
+ engine = Engine(foo)
+ engine.run(data_loader, epoch_length=10, max_epochs=5)
+
+ assert counter[0] == 50
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 3
},
"num_modified_files": 1
} | 0.3 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc",
"pip install torch torchvision -f https://download.pytorch.org/whl/cpu/torch_stable.html -U"
],
"python": "3.7",
"reqs_path": [
"requirements-dev.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi @ file:///croot/certifi_1671487769961/work/certifi
charset-normalizer==3.4.1
exceptiongroup==1.2.2
idna==3.10
importlib-metadata==6.7.0
iniconfig==2.0.0
numpy==1.21.6
packaging==24.0
Pillow==9.5.0
pluggy==1.2.0
pytest==7.4.4
-e git+https://github.com/pytorch/ignite.git@a94725b83b8d6aefc9382f08e40c19976e3f8fc1#egg=pytorch_ignite
requests==2.31.0
tomli==2.0.1
torch==1.13.1+cpu
torchvision==0.14.1+cpu
typing_extensions==4.7.1
urllib3==2.0.7
zipp==3.15.0
| name: ignite
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=22.3.1=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- charset-normalizer==3.4.1
- exceptiongroup==1.2.2
- idna==3.10
- importlib-metadata==6.7.0
- iniconfig==2.0.0
- numpy==1.21.6
- packaging==24.0
- pillow==9.5.0
- pluggy==1.2.0
- pytest==7.4.4
- requests==2.31.0
- tomli==2.0.1
- torch==1.13.1+cpu
- torchvision==0.14.1+cpu
- typing-extensions==4.7.1
- urllib3==2.0.7
- zipp==3.15.0
prefix: /opt/conda/envs/ignite
| [
"tests/ignite/engine/test_engine.py::test_engine_with_iterable_dataloader"
] | [] | [
"tests/ignite/engine/test_engine.py::test_terminate",
"tests/ignite/engine/test_engine.py::test_invalid_process_raises_with_invalid_signature",
"tests/ignite/engine/test_engine.py::test_current_epoch_counter_increases_every_epoch",
"tests/ignite/engine/test_engine.py::test_current_iteration_counter_increases_every_iteration",
"tests/ignite/engine/test_engine.py::test_stopping_criterion_is_max_epochs",
"tests/ignite/engine/test_engine.py::test_terminate_at_end_of_epoch_stops_run",
"tests/ignite/engine/test_engine.py::test_terminate_at_start_of_epoch_stops_run_after_completing_iteration",
"tests/ignite/engine/test_engine.py::test_terminate_stops_run_mid_epoch",
"tests/ignite/engine/test_engine.py::test_terminate_epoch_stops_mid_epoch",
"tests/ignite/engine/test_engine.py::test_iteration_events_are_fired",
"tests/ignite/engine/test_engine.py::test_last_event_name",
"tests/ignite/engine/test_engine.py::test_reset_should_terminate",
"tests/ignite/engine/test_engine.py::test_batch_values",
"tests/ignite/engine/test_engine.py::test_state_repr",
"tests/ignite/engine/test_engine.py::test_alter_batch",
"tests/ignite/engine/test_engine.py::test__is_done",
"tests/ignite/engine/test_engine.py::test__setup_engine",
"tests/ignite/engine/test_engine.py::test_run_asserts",
"tests/ignite/engine/test_engine.py::test_state_get_event_attrib_value",
"tests/ignite/engine/test_engine.py::test_run_check_triggered_events",
"tests/ignite/engine/test_engine.py::test_run_check_triggered_events_on_iterator"
] | [] | BSD 3-Clause "New" or "Revised" License | 6,270 | 712 | [
"ignite/engine/engine.py"
] |
|
asottile__pyupgrade-250 | 465effb0a65f3cd05d8b8201be4c8b899941b6e1 | 2020-01-22 18:14:01 | 93a8c0f191e3ff269f5824bf1010f2920f0b1284 | diff --git a/pyupgrade.py b/pyupgrade.py
index 4a0f9c1..6f99694 100644
--- a/pyupgrade.py
+++ b/pyupgrade.py
@@ -1140,6 +1140,7 @@ WITH_METACLASS_BASES_TMPL = '{rest}, metaclass={args[0]}'
RAISE_FROM_TMPL = 'raise {args[0]} from {rest}'
RERAISE_2_TMPL = 'raise {args[1]}.with_traceback(None)'
RERAISE_3_TMPL = 'raise {args[1]}.with_traceback({args[2]})'
+SIX_NATIVE_STR = frozenset(('ensure_str', 'ensure_text', 'text_type'))
def _all_isinstance(vals, tp):
@@ -1473,7 +1474,7 @@ class FindPy3Plus(ast.NodeVisitor):
self.super_calls[_ast_to_offset(node)] = node
elif (
(
- self._is_six(node.func, ('ensure_str', 'ensure_text')) or
+ self._is_six(node.func, SIX_NATIVE_STR) or
isinstance(node.func, ast.Name) and node.func.id == 'str'
) and
not node.keywords and
@@ -1953,6 +1954,15 @@ def _fix_py3_plus(contents_text): # type: (str) -> str
if_block, else_block = _find_if_else_block(tokens, j)
del tokens[if_block.end:else_block.end]
if_block.replace_condition(tokens, [Token('NAME', 'else')])
+ elif token.offset in visitor.native_literals:
+ j = _find_open_paren(tokens, i)
+ func_args, end = _parse_call_args(tokens, j)
+ if any(tok.name == 'NL' for tok in tokens[i:end]):
+ continue
+ if func_args:
+ _replace_call(tokens, i, end, func_args, '{args[0]}')
+ else:
+ tokens[i:end] = [token._replace(name='STRING', src="''")]
elif token.offset in visitor.six_type_ctx:
_replace(i, SIX_TYPE_CTX_ATTRS, visitor.six_type_ctx[token.offset])
elif token.offset in visitor.six_simple:
@@ -2042,15 +2052,6 @@ def _fix_py3_plus(contents_text): # type: (str) -> str
call = visitor.encode_calls[token.offset]
victims = _victims(tokens, i, call, gen=False)
del tokens[victims.starts[0] + 1:victims.ends[-1]]
- elif token.offset in visitor.native_literals:
- j = _find_open_paren(tokens, i)
- func_args, end = _parse_call_args(tokens, j)
- if any(tok.name == 'NL' for tok in tokens[i:end]):
- continue
- if func_args:
- _replace_call(tokens, i, end, func_args, '{args[0]}')
- else:
- tokens[i:end] = [token._replace(name='STRING', src="''")]
elif token.offset in visitor.io_open_calls:
j = _find_open_paren(tokens, i)
tokens[i:j] = [token._replace(name='NAME', src='open')]
| six.text_type('') takes two passes to resolve
For example:
```console
$ pip freeze | grep pyupgrade
pyupgrade==1.26.1
$ cat 1.py
six.text_type('')
$ pyupgrade 1.py --py3-plus
Rewriting 1.py
$ cat 1.py
str('')
$ pyupgrade 1.py --py3-plus
Rewriting 1.py
$ cat 1.py
''
$ pyupgrade 1.py --py3-plus
$
```
Perhaps rewrite directly to `''`? | asottile/pyupgrade | diff --git a/tests/native_literals_test.py b/tests/native_literals_test.py
index 7b6c8f0..7f52126 100644
--- a/tests/native_literals_test.py
+++ b/tests/native_literals_test.py
@@ -29,6 +29,7 @@ def test_fix_native_literals_noop(s):
('str("""\nfoo""")', '"""\nfoo"""'),
('six.ensure_str("foo")', '"foo"'),
('six.ensure_text("foo")', '"foo"'),
+ ('six.text_type("foo")', '"foo"'),
),
)
def test_fix_native_literals(s, expected):
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 1
} | 1.26 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
pytest @ file:///croot/pytest_1738938843180/work
-e git+https://github.com/asottile/pyupgrade.git@465effb0a65f3cd05d8b8201be4c8b899941b6e1#egg=pyupgrade
tokenize_rt==6.1.0
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
| name: pyupgrade
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- tokenize-rt==6.1.0
prefix: /opt/conda/envs/pyupgrade
| [
"tests/native_literals_test.py::test_fix_native_literals[six.text_type(\"foo\")-\"foo\"]"
] | [] | [
"tests/native_literals_test.py::test_fix_native_literals_noop[str(1)]",
"tests/native_literals_test.py::test_fix_native_literals_noop[str(\"foo\"\\n\"bar\")]",
"tests/native_literals_test.py::test_fix_native_literals_noop[str(*a)]",
"tests/native_literals_test.py::test_fix_native_literals_noop[str(\"foo\",",
"tests/native_literals_test.py::test_fix_native_literals_noop[str(**k)]",
"tests/native_literals_test.py::test_fix_native_literals[str()-'']",
"tests/native_literals_test.py::test_fix_native_literals[str(\"foo\")-\"foo\"]",
"tests/native_literals_test.py::test_fix_native_literals[str(\"\"\"\\nfoo\"\"\")-\"\"\"\\nfoo\"\"\"]",
"tests/native_literals_test.py::test_fix_native_literals[six.ensure_str(\"foo\")-\"foo\"]",
"tests/native_literals_test.py::test_fix_native_literals[six.ensure_text(\"foo\")-\"foo\"]"
] | [] | MIT License | 6,273 | 741 | [
"pyupgrade.py"
] |
|
lace__vg-103 | eb29bab81fe17b0a66b262ae6f2b2d78d9186482 | 2020-01-22 21:09:04 | 4bbfb179da51a24f23bcb48c57f22b19154b3fe1 | now[bot]:
This pull request is being automatically deployed with ZEIT Now ([learn more](https://zeit.co/docs/v2/integrations/now-for-github?utm_source=automated&utm_medium=github&utm_campaign=now_bot)).
To see the status of your deployment, click on the icon next to each commit.
| diff --git a/vg/core.py b/vg/core.py
index a0be949..0c3867b 100644
--- a/vg/core.py
+++ b/vg/core.py
@@ -28,6 +28,7 @@ __all__ = [
"farthest",
"basis",
"within",
+ "average",
"cross",
"dot",
]
@@ -601,6 +602,33 @@ def within(points, radius, of_point, atol=1e-08, ret_indices=False):
return points_within_radius
+def average(values, weights=None, ret_sum_of_weights=False):
+ """
+ Compute a weighted or unweighted average of the 3D input values. The
+ inputs could be points or vectors.
+
+ Args:
+ values (np.arraylike): A `kx3` stack of vectors.
+ weights (array-convertible): An optional `k` array of weights.
+ ret_sum_of_weights (bool): When `True`, the sum of the weights is
+ returned. When `weights` is `None`, this is the number of
+ elements over which the average is taken.
+
+ Returns:
+ np.ndarray: A `(3,)` vector with the weighted or unweighted average.
+ """
+ k = check(locals(), "values", (-1, 3))
+ if weights is not None:
+ weights = np.array(weights)
+ check(locals(), "weights", (k,))
+ result = np.average(values, axis=0, weights=weights)
+ if ret_sum_of_weights:
+ sum_of_weights = np.sum(weights)
+ return result, sum_of_weights
+ else:
+ return result
+
+
def dot(v1, v2):
"""
Compute individual or pairwise dot products.
| Add `vg.mean` method
To replace code removed in lace/polliwog#96. | lace/vg | diff --git a/vg/test_average.py b/vg/test_average.py
new file mode 100644
index 0000000..9a39ab8
--- /dev/null
+++ b/vg/test_average.py
@@ -0,0 +1,22 @@
+import numpy as np
+from . import core as vg
+
+
+def test_average():
+ np.testing.assert_array_equal(
+ vg.average(np.array([[1.0, 2.0, 3.0], [-6.0, -9.0, -15.0]])),
+ np.array([-2.5, -3.5, -6.0]),
+ )
+ np.testing.assert_array_equal(
+ vg.average(np.array([[1.0, 2.0, 3.0], [-6.0, -9.0, -15.0]]), weights=(3, 5)),
+ np.array([-3.375, -4.875, -8.25]),
+ )
+ result, sum_of_weights = vg.average(
+ np.array([[1.0, 2.0, 3.0], [-6.0, -9.0, -15.0]]),
+ weights=(3, 5),
+ ret_sum_of_weights=True,
+ )
+ np.testing.assert_array_equal(
+ result, np.array([-3.375, -4.875, -8.25]),
+ )
+ assert sum_of_weights == 8.0
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 3
},
"num_modified_files": 1
} | 1.6 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": [],
"python": "3.6",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
coverage==6.2
importlib-metadata==4.8.3
iniconfig==1.1.1
numpy==1.19.5
packaging==21.3
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
pytest-cov==4.0.0
tomli==1.2.3
typing_extensions==4.1.1
-e git+https://github.com/lace/vg.git@eb29bab81fe17b0a66b262ae6f2b2d78d9186482#egg=vg
zipp==3.6.0
| name: vg
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- coverage==6.2
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- numpy==1.19.5
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-cov==4.0.0
- tomli==1.2.3
- typing-extensions==4.1.1
- zipp==3.6.0
prefix: /opt/conda/envs/vg
| [
"vg/test_average.py::test_average"
] | [] | [] | [] | BSD 2-Clause "Simplified" License | 6,277 | 416 | [
"vg/core.py"
] |
googleapis__google-auth-library-python-435 | 1b9de8dfbe4523f3170e47985ab523cb7865de48 | 2020-01-23 11:45:24 | 1b9de8dfbe4523f3170e47985ab523cb7865de48 | plamut: Well go figure, running the `blacken` session can cause syntax errors, because a trailing comma is not allowed after **kwargs in Python <= 3.5. :smile: | diff --git a/google/auth/transport/requests.py b/google/auth/transport/requests.py
index ce78e63..1c709d4 100644
--- a/google/auth/transport/requests.py
+++ b/google/auth/transport/requests.py
@@ -42,6 +42,8 @@ from google.auth import transport
_LOGGER = logging.getLogger(__name__)
+_DEFAULT_TIMEOUT = 120 # in seconds
+
class _Response(transport.Response):
"""Requests transport response adapter.
@@ -141,7 +143,13 @@ class Request(transport.Request):
self.session = session
def __call__(
- self, url, method="GET", body=None, headers=None, timeout=120, **kwargs
+ self,
+ url,
+ method="GET",
+ body=None,
+ headers=None,
+ timeout=_DEFAULT_TIMEOUT,
+ **kwargs
):
"""Make an HTTP request using requests.
@@ -246,7 +254,7 @@ class AuthorizedSession(requests.Session):
data=None,
headers=None,
max_allowed_time=None,
- timeout=None,
+ timeout=_DEFAULT_TIMEOUT,
**kwargs
):
"""Implementation of Requests' request.
| Set the default request timeout to something other than `None`
The [AuthorizedSession.requests()](https://github.com/googleapis/google-auth-library-python/blob/1b9de8dfbe4523f3170e47985ab523cb7865de48/google/auth/transport/requests.py#L242-L251) method does not specify a default timeout.
Since production code would almost always want to have some sort of a timeout, let's add one. The libraries that rely on `google-auth`, but do (yet) not specify a timeout themselves, would benefit from that.
Example: [storage issue](https://github.com/googleapis/google-cloud-python/issues/10182)
| googleapis/google-auth-library-python | diff --git a/tests/transport/test_requests.py b/tests/transport/test_requests.py
index 8f73d4b..f0321c8 100644
--- a/tests/transport/test_requests.py
+++ b/tests/transport/test_requests.py
@@ -177,6 +177,21 @@ class TestAuthorizedHttp(object):
assert authed_session._auth_request == auth_request
+ def test_request_default_timeout(self):
+ credentials = mock.Mock(wraps=CredentialsStub())
+ response = make_response()
+ adapter = AdapterStub([response])
+
+ authed_session = google.auth.transport.requests.AuthorizedSession(credentials)
+ authed_session.mount(self.TEST_URL, adapter)
+
+ patcher = mock.patch("google.auth.transport.requests.requests.Session.request")
+ with patcher as patched_request:
+ authed_session.request("GET", self.TEST_URL)
+
+ expected_timeout = google.auth.transport.requests._DEFAULT_TIMEOUT
+ assert patched_request.call_args.kwargs.get("timeout") == expected_timeout
+
def test_request_no_refresh(self):
credentials = mock.Mock(wraps=CredentialsStub())
response = make_response()
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 1
},
"num_modified_files": 1
} | 1.10 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-localserver",
"mock",
"freezegun",
"flask",
"oauth2client",
"requests",
"urllib3",
"cryptography",
"grpcio"
],
"pre_install": null,
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | blinker==1.9.0
cachetools==4.2.4
certifi==2025.1.31
cffi==1.17.1
charset-normalizer==3.4.1
click==8.1.8
coverage==7.8.0
cryptography==44.0.2
exceptiongroup==1.2.2
Flask==3.1.0
freezegun==1.5.1
-e git+https://github.com/googleapis/google-auth-library-python.git@1b9de8dfbe4523f3170e47985ab523cb7865de48#egg=google_auth
grpcio==1.71.0
httplib2==0.22.0
idna==3.10
importlib_metadata==8.6.1
iniconfig==2.1.0
itsdangerous==2.2.0
Jinja2==3.1.6
MarkupSafe==3.0.2
mock==5.2.0
oauth2client==4.1.3
packaging==24.2
pluggy==1.5.0
pyasn1==0.6.1
pyasn1_modules==0.4.2
pycparser==2.22
pyparsing==3.2.3
pytest==8.3.5
pytest-cov==6.0.0
pytest-localserver==0.9.0.post0
python-dateutil==2.9.0.post0
requests==2.32.3
rsa==4.0
six==1.17.0
tomli==2.2.1
urllib3==2.3.0
Werkzeug==3.1.3
zipp==3.21.0
| name: google-auth-library-python
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- blinker==1.9.0
- cachetools==4.2.4
- certifi==2025.1.31
- cffi==1.17.1
- charset-normalizer==3.4.1
- click==8.1.8
- coverage==7.8.0
- cryptography==44.0.2
- exceptiongroup==1.2.2
- flask==3.1.0
- freezegun==1.5.1
- grpcio==1.71.0
- httplib2==0.22.0
- idna==3.10
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- itsdangerous==2.2.0
- jinja2==3.1.6
- markupsafe==3.0.2
- mock==5.2.0
- oauth2client==4.1.3
- packaging==24.2
- pluggy==1.5.0
- pyasn1==0.6.1
- pyasn1-modules==0.4.2
- pycparser==2.22
- pyparsing==3.2.3
- pytest==8.3.5
- pytest-cov==6.0.0
- pytest-localserver==0.9.0.post0
- python-dateutil==2.9.0.post0
- requests==2.32.3
- rsa==4.0
- six==1.17.0
- tomli==2.2.1
- urllib3==2.3.0
- werkzeug==3.1.3
- zipp==3.21.0
prefix: /opt/conda/envs/google-auth-library-python
| [
"tests/transport/test_requests.py::TestAuthorizedHttp::test_request_default_timeout"
] | [] | [
"tests/transport/test_requests.py::TestRequestResponse::test_request_basic",
"tests/transport/test_requests.py::TestRequestResponse::test_request_with_timeout_success",
"tests/transport/test_requests.py::TestRequestResponse::test_request_with_timeout_failure",
"tests/transport/test_requests.py::TestRequestResponse::test_request_headers",
"tests/transport/test_requests.py::TestRequestResponse::test_request_error",
"tests/transport/test_requests.py::TestRequestResponse::test_connection_error",
"tests/transport/test_requests.py::TestRequestResponse::test_timeout",
"tests/transport/test_requests.py::TestTimeoutGuard::test_tracks_elapsed_time_w_numeric_timeout",
"tests/transport/test_requests.py::TestTimeoutGuard::test_tracks_elapsed_time_w_tuple_timeout",
"tests/transport/test_requests.py::TestTimeoutGuard::test_noop_if_no_timeout",
"tests/transport/test_requests.py::TestTimeoutGuard::test_timeout_error_w_numeric_timeout",
"tests/transport/test_requests.py::TestTimeoutGuard::test_timeout_error_w_tuple_timeout",
"tests/transport/test_requests.py::TestTimeoutGuard::test_custom_timeout_error_type",
"tests/transport/test_requests.py::TestTimeoutGuard::test_lets_suite_errors_bubble_up",
"tests/transport/test_requests.py::TestAuthorizedHttp::test_constructor",
"tests/transport/test_requests.py::TestAuthorizedHttp::test_constructor_with_auth_request",
"tests/transport/test_requests.py::TestAuthorizedHttp::test_request_no_refresh",
"tests/transport/test_requests.py::TestAuthorizedHttp::test_request_refresh",
"tests/transport/test_requests.py::TestAuthorizedHttp::test_request_max_allowed_time_timeout_error",
"tests/transport/test_requests.py::TestAuthorizedHttp::test_request_max_allowed_time_w_transport_timeout_no_error",
"tests/transport/test_requests.py::TestAuthorizedHttp::test_request_max_allowed_time_w_refresh_timeout_no_error",
"tests/transport/test_requests.py::TestAuthorizedHttp::test_request_timeout_w_refresh_timeout_timeout_error"
] | [] | Apache License 2.0 | 6,282 | 285 | [
"google/auth/transport/requests.py"
] |
kblin__ncbi-genome-download-106 | a46446733e70b7754b2bcb847cb6be3544ce73bc | 2020-01-23 15:56:12 | a46446733e70b7754b2bcb847cb6be3544ce73bc | kblin: Hi there, thanks for the patch! I'll try to make some time to look at it this week. | diff --git a/ncbi_genome_download/core.py b/ncbi_genome_download/core.py
index 69efc2b..c99fd6b 100644
--- a/ncbi_genome_download/core.py
+++ b/ncbi_genome_download/core.py
@@ -192,15 +192,19 @@ def config_download(config):
if config.parallel == 1:
for entry, group in download_candidates:
- download_jobs.extend(create_downloadjob(entry, group, config))
+ curr_jobs = create_downloadjob(entry, group, config)
+ fill_metadata(curr_jobs, entry)
+ download_jobs.extend(curr_jobs)
+
for dl_job in download_jobs:
worker(dl_job)
else: # pragma: no cover
# Testing multiprocessing code is annoying
pool = Pool(processes=config.parallel)
- for created_dl_job in pool.imap_unordered(downloadjob_creator_caller, [ (entry, group, config) for entry, group in download_candidates ]):
+ for index, created_dl_job in enumerate(pool.imap(downloadjob_creator_caller, [ (entry, group, config) for entry, group in download_candidates ])):
download_jobs.extend(created_dl_job)
+ fill_metadata(created_dl_job, download_candidates[index][0])#index is conserved from download_candidates with the use of imap
jobs = pool.map_async(worker, download_jobs)
try:
@@ -222,6 +226,11 @@ def config_download(config):
return 75
return 0
+def fill_metadata(jobs, entry):
+ for job in jobs:
+ if job.full_url is not None:#if it is None, it's a symlink making, so nothing to write
+ mtable = metadata.get()
+ mtable.add(entry, job.local_file)
def select_candidates(config):
"""Select candidates to download.
@@ -534,10 +543,6 @@ def download_file_job(entry, directory, checksums, filetype='genbank', symlink_p
if symlink_path is not None:
full_symlink = os.path.join(symlink_path, filename)
- # Keep metadata around
- mtable = metadata.get()
- mtable.add(entry, local_file)
-
return DownloadJob(full_url, local_file, expected_checksum, full_symlink)
# pylint: enable=too-many-arguments,too-many-locals
| metadata option unworking
# version of ncbi-genome-download
`0.2.11`
# command
```
python3 ncbi-genome-download/ncbi-genome-download-runner.py --verbose --genus "Kluyvera" --metadata
-table data.tsv --format fasta -p 10 --retries 1 bacteria
```
# result
All genomes are downloaded but the data.tsv file doesnt contains the metadata, only the header.
```
cat data.tsv
assembly_accession bioproject biosample wgs_master excluded_from_refseq refseq_category relation_to_type_material taxid species_taxid organism_name infraspecific_name isolate version_status assembly_level release_type genome_rep seq_rel_date asm_name submitter gbrs_paired_asm paired_asm_comp ftp_path local_filename
``` | kblin/ncbi-genome-download | diff --git a/tests/test_core.py b/tests/test_core.py
index dda0f74..4210c57 100644
--- a/tests/test_core.py
+++ b/tests/test_core.py
@@ -4,12 +4,13 @@ from argparse import Namespace
from collections import OrderedDict
import os
from os import path
+from multiprocessing import Pool
import pytest
import requests_mock
from requests.exceptions import ConnectionError
-from ncbi_genome_download import core
+from ncbi_genome_download import core, metadata
from ncbi_genome_download import NgdConfig, SUPPORTED_TAXONOMIC_GROUPS
@@ -132,6 +133,28 @@ def test_download_metadata(monkeypatch, mocker, req, tmpdir):
assert core.create_downloadjob.call_count == 4
assert metadata_file.check()
+def test_metadata_fill(req, tmpdir):
+ entry, config, _ = prepare_create_downloadjob(req, tmpdir)
+ metadata.clear()#clear it, otherwise operations realized in other tests might impact it
+ assert len(core.metadata.get().rows) == 0
+ jobs = core.create_downloadjob(entry, 'bacteria', config)
+ core.fill_metadata(jobs, entry)
+ assert len(core.metadata.get().rows) == 1
+
+def test_metadata_fill_multi(req, tmpdir):
+ entry, config, joblist = prepare_create_downloadjob(req, tmpdir)
+ metadata.clear()#clear it, otherwise operations realized in other tests might impact it
+ jobs = []
+ assert len(core.metadata.get().rows) == 0
+ download_candidates = [(entry, 'bacteria')]
+ p = Pool(processes=1)
+ for index, created_dl_job in enumerate(p.imap(core.downloadjob_creator_caller, [ (curr_entry, curr_group, config) for curr_entry, curr_group in download_candidates ])):
+ jobs.extend(created_dl_job)
+ assert download_candidates[index][0] == entry
+ core.fill_metadata(created_dl_job, download_candidates[index][0])
+ expected = [j for j in joblist if j.local_file.endswith('_genomic.gbff.gz')]
+ assert len(core.metadata.get().rows) == 1
+ assert jobs == expected
def test_download_complete(monkeypatch, mocker, req):
summary_contents = open(_get_file('assembly_status.txt'), 'r').read()
| {
"commit_name": "merge_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 3
},
"num_modified_files": 1
} | 0.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"coverage",
"pytest-cov",
"requests-mock",
"pytest-mock"
],
"pre_install": null,
"python": "3.7",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | appdirs==1.4.4
certifi @ file:///croot/certifi_1671487769961/work/certifi
charset-normalizer==3.4.1
coverage==7.2.7
exceptiongroup==1.2.2
idna==3.10
importlib-metadata==6.7.0
iniconfig==2.0.0
-e git+https://github.com/kblin/ncbi-genome-download.git@a46446733e70b7754b2bcb847cb6be3544ce73bc#egg=ncbi_genome_download
packaging==24.0
pluggy==1.2.0
pytest==7.4.4
pytest-cov==4.1.0
pytest-mock==3.11.1
requests==2.31.0
requests-mock==1.12.1
tomli==2.0.1
typing_extensions==4.7.1
urllib3==2.0.7
zipp==3.15.0
| name: ncbi-genome-download
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=22.3.1=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- appdirs==1.4.4
- charset-normalizer==3.4.1
- coverage==7.2.7
- exceptiongroup==1.2.2
- idna==3.10
- importlib-metadata==6.7.0
- iniconfig==2.0.0
- packaging==24.0
- pluggy==1.2.0
- pytest==7.4.4
- pytest-cov==4.1.0
- pytest-mock==3.11.1
- requests==2.31.0
- requests-mock==1.12.1
- tomli==2.0.1
- typing-extensions==4.7.1
- urllib3==2.0.7
- zipp==3.15.0
prefix: /opt/conda/envs/ncbi-genome-download
| [
"tests/test_core.py::test_metadata_fill",
"tests/test_core.py::test_metadata_fill_multi"
] | [] | [
"tests/test_core.py::test_download_defaults",
"tests/test_core.py::test_args_download_defaults",
"tests/test_core.py::test_download_defaults_nomatch",
"tests/test_core.py::test_download_dry_run",
"tests/test_core.py::test_download_one",
"tests/test_core.py::test_download_connection_err",
"tests/test_core.py::test_download",
"tests/test_core.py::test_download_metadata",
"tests/test_core.py::test_download_complete",
"tests/test_core.py::test_download_chromosome",
"tests/test_core.py::test_download_scaffold",
"tests/test_core.py::test_download_contig",
"tests/test_core.py::test_download_genus",
"tests/test_core.py::test_download_genus_lowercase",
"tests/test_core.py::test_download_genus_fuzzy",
"tests/test_core.py::test_download_taxid",
"tests/test_core.py::test_download_species_taxid",
"tests/test_core.py::test_download_refseq_category",
"tests/test_core.py::test_download_type_material",
"tests/test_core.py::test_download_type_material_no_match",
"tests/test_core.py::test_get_summary",
"tests/test_core.py::test_get_summary_error_handling",
"tests/test_core.py::test_parse_summary",
"tests/test_core.py::test_filter_entries",
"tests/test_core.py::test_create_downloadjob_genbank",
"tests/test_core.py::test_create_downloadjob_all",
"tests/test_core.py::test_create_downloadjob_missing",
"tests/test_core.py::test_create_downloadjob_human_readable",
"tests/test_core.py::test_create_downloadjob_symlink_only",
"tests/test_core.py::test_create_dir",
"tests/test_core.py::test_create_dir_exists",
"tests/test_core.py::test_create_dir_isfile",
"tests/test_core.py::test_create_dir_flat",
"tests/test_core.py::test_create_readable_dir",
"tests/test_core.py::test_create_readable_dir_exists",
"tests/test_core.py::test_create_readable_dir_isfile",
"tests/test_core.py::test_create_readable_dir_virus",
"tests/test_core.py::test_grab_checksums_file",
"tests/test_core.py::test_parse_checksums",
"tests/test_core.py::test_has_file_changed_no_file",
"tests/test_core.py::test_has_file_changed",
"tests/test_core.py::test_has_file_changed_unchanged",
"tests/test_core.py::test_need_to_create_symlink_no_symlink",
"tests/test_core.py::test_need_to_create_symlink_correct_link",
"tests/test_core.py::test_need_to_create_symlink",
"tests/test_core.py::test_md5sum",
"tests/test_core.py::test_download_file_genbank",
"tests/test_core.py::test_download_file_genbank_mismatch",
"tests/test_core.py::test_download_file_fasta",
"tests/test_core.py::test_download_file_cds_fasta",
"tests/test_core.py::test_download_file_rna_fasta",
"tests/test_core.py::test_download_file_rna_fna",
"tests/test_core.py::test_download_file_rm_out",
"tests/test_core.py::test_download_file_symlink_path",
"tests/test_core.py::test_create_symlink_job",
"tests/test_core.py::test_create_symlink_job_remove_symlink",
"tests/test_core.py::test_download_file_symlink_path_existed",
"tests/test_core.py::test_get_genus_label",
"tests/test_core.py::test_get_species_label",
"tests/test_core.py::test_get_strain_label"
] | [] | Apache License 2.0 | 6,284 | 535 | [
"ncbi_genome_download/core.py"
] |
pybamm-team__PyBaMM-794 | ae1e6ae452b8aa499936e6ede8f118aee0118b7a | 2020-01-23 18:24:38 | 4bf599043c9bd070333e224180abc455c8127940 | diff --git a/pybamm/expression_tree/functions.py b/pybamm/expression_tree/functions.py
index be98461a7..5be8e15a1 100644
--- a/pybamm/expression_tree/functions.py
+++ b/pybamm/expression_tree/functions.py
@@ -159,7 +159,7 @@ class Function(pybamm.Symbol):
evaluated_children = [None] * len(self.children)
for i, child in enumerate(self.children):
evaluated_children[i], known_evals = child.evaluate(
- t, y, known_evals=known_evals
+ t, y, u, known_evals=known_evals
)
known_evals[self.id] = self._function_evaluate(evaluated_children)
return known_evals[self.id], known_evals
diff --git a/pybamm/processed_variable.py b/pybamm/processed_variable.py
index 77aef5bfe..847fdef80 100644
--- a/pybamm/processed_variable.py
+++ b/pybamm/processed_variable.py
@@ -41,12 +41,14 @@ class ProcessedVariable(object):
self.base_eval, self.known_evals[solution.t[0]] = base_variable.evaluate(
solution.t[0],
solution.y[:, 0],
- solution.inputs,
+ {name: inp[0] for name, inp in solution.inputs.items()},
known_evals=self.known_evals[solution.t[0]],
)
else:
self.base_eval = base_variable.evaluate(
- solution.t[0], solution.y[:, 0], solution.inputs
+ solution.t[0],
+ solution.y[:, 0],
+ {name: inp[0] for name, inp in solution.inputs.items()},
)
# handle 2D (in space) finite element variables differently
@@ -90,14 +92,14 @@ class ProcessedVariable(object):
# Evaluate the base_variable index-by-index
for idx in range(len(self.t_sol)):
t = self.t_sol[idx]
+ u = self.u_sol[:, idx]
+ inputs = {name: inp[0] for name, inp in self.inputs.items()}
if self.known_evals:
entries[idx], self.known_evals[t] = self.base_variable.evaluate(
- t, self.u_sol[:, idx], self.inputs, known_evals=self.known_evals[t]
+ t, u, inputs, known_evals=self.known_evals[t]
)
else:
- entries[idx] = self.base_variable.evaluate(
- t, self.u_sol[:, idx], self.inputs
- )
+ entries[idx] = self.base_variable.evaluate(t, u, inputs)
# No discretisation provided, or variable has no domain (function of t only)
self._interpolation_function = interp.interp1d(
@@ -115,14 +117,15 @@ class ProcessedVariable(object):
for idx in range(len(self.t_sol)):
t = self.t_sol[idx]
u = self.u_sol[:, idx]
+ inputs = {name: inp[0] for name, inp in self.inputs.items()}
if self.known_evals:
eval_and_known_evals = self.base_variable.evaluate(
- t, u, self.inputs, known_evals=self.known_evals[t]
+ t, u, inputs, known_evals=self.known_evals[t]
)
entries[:, idx] = eval_and_known_evals[0][:, 0]
self.known_evals[t] = eval_and_known_evals[1]
else:
- entries[:, idx] = self.base_variable.evaluate(t, u, self.inputs)[:, 0]
+ entries[:, idx] = self.base_variable.evaluate(t, u, inputs)[:, 0]
# Process the discretisation to get x values
nodes = self.mesh[0].nodes
@@ -218,9 +221,10 @@ class ProcessedVariable(object):
for idx in range(len(self.t_sol)):
t = self.t_sol[idx]
u = self.u_sol[:, idx]
+ inputs = {name: inp[0] for name, inp in self.inputs.items()}
if self.known_evals:
eval_and_known_evals = self.base_variable.evaluate(
- t, u, self.inputs, known_evals=self.known_evals[t]
+ t, u, inputs, known_evals=self.known_evals[t]
)
entries[:, :, idx] = np.reshape(
eval_and_known_evals[0],
@@ -230,7 +234,7 @@ class ProcessedVariable(object):
self.known_evals[t] = eval_and_known_evals[1]
else:
entries[:, :, idx] = np.reshape(
- self.base_variable.evaluate(t, u, self.inputs),
+ self.base_variable.evaluate(t, u, inputs),
[first_dim_size, second_dim_size],
order="F",
)
| Solution[<variable>] doesn't work when some parameters are inputs
**Describe the bug**
The inputs are not making their way to all the things that need to be evaluated somehow. The example below will work if current is the only input but fails when electrode height is one. Something to do with mesh
**To Reproduce**
```
import pybamm
import numpy as np
pybamm.set_logging_level('DEBUG')
I_typical = 1.0
def current_function(t):
return pybamm.InputParameter("Current")
model = pybamm.lithium_ion.SPM()
geometry = model.default_geometry
param = model.default_parameter_values
param.update(
{
"Typical current [A]": I_typical,
"Current function [A]": current_function,
"Current": "[input]",
"Electrode height [m]": "[input]",
}
)
param.process_model(model)
param.process_geometry(geometry)
var = pybamm.standard_spatial_vars
var_pts = {var.x_n: 5, var.x_s: 5, var.x_p: 5, var.r_n: 10, var.r_p: 10}
spatial_methods = model.default_spatial_methods
solver = model.default_solver
sim = pybamm.Simulation(
model=model,
geometry=geometry,
parameter_values=param,
var_pts=var_pts,
spatial_methods=spatial_methods,
solver=solver,
)
inputs = {
'Current': I_typical,
'Electrode height [m]': 0.1
}
sim.solve(t_eval=np.linspace(0, 0.01, 100), inputs=inputs)
print(sim.solution['Time [h]'](sim.solution.t))
```
| pybamm-team/PyBaMM | diff --git a/tests/unit/test_solvers/test_solution.py b/tests/unit/test_solvers/test_solution.py
index 32e81cfbc..7aa4db34f 100644
--- a/tests/unit/test_solvers/test_solution.py
+++ b/tests/unit/test_solvers/test_solution.py
@@ -95,6 +95,30 @@ class TestSolution(unittest.TestCase):
self.assertEqual(solution.model.name, solution_load.model.name)
np.testing.assert_array_equal(solution["c"].entries, solution_load["c"].entries)
+ def test_solution_evals_with_inputs(self):
+ model = pybamm.lithium_ion.SPM()
+ geometry = model.default_geometry
+ param = model.default_parameter_values
+ param.update({"Electrode height [m]": "[input]"})
+ param.process_model(model)
+ param.process_geometry(geometry)
+ var = pybamm.standard_spatial_vars
+ var_pts = {var.x_n: 5, var.x_s: 5, var.x_p: 5, var.r_n: 10, var.r_p: 10}
+ spatial_methods = model.default_spatial_methods
+ solver = model.default_solver
+ sim = pybamm.Simulation(
+ model=model,
+ geometry=geometry,
+ parameter_values=param,
+ var_pts=var_pts,
+ spatial_methods=spatial_methods,
+ solver=solver,
+ )
+ inputs = {"Electrode height [m]": 0.1}
+ sim.solve(t_eval=np.linspace(0, 0.01, 10), inputs=inputs)
+ time = sim.solution["Time [h]"](sim.solution.t)
+ self.assertEqual(len(time), 10)
+
if __name__ == "__main__":
print("Add -v for more debug output")
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 2
} | 0.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev,docs]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc gfortran libopenblas-dev"
],
"python": "3.9",
"reqs_path": [
"requirements-docs.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.16
anyio==4.9.0
anytree==2.12.1
argon2-cffi==23.1.0
argon2-cffi-bindings==21.2.0
arrow==1.3.0
asttokens==3.0.0
async-lru==2.0.5
attrs==25.3.0
autograd==1.7.0
babel==2.17.0
beautifulsoup4==4.13.3
black==25.1.0
bleach==6.2.0
casadi==3.7.0
certifi==2025.1.31
cffi==1.17.1
charset-normalizer==3.4.1
click==8.1.8
comm==0.2.2
contourpy==1.3.0
cycler==0.12.1
debugpy==1.8.13
decorator==5.2.1
defusedxml==0.7.1
docutils==0.21.2
exceptiongroup==1.2.2
executing==2.2.0
fastjsonschema==2.21.1
flake8==7.2.0
fonttools==4.56.0
fqdn==1.5.1
guzzle_sphinx_theme==0.7.11
h11==0.14.0
httpcore==1.0.7
httpx==0.28.1
idna==3.10
imagesize==1.4.1
importlib_metadata==8.6.1
importlib_resources==6.5.2
iniconfig==2.1.0
ipykernel==6.29.5
ipython==8.18.1
ipywidgets==8.1.5
isoduration==20.11.0
jedi==0.19.2
Jinja2==3.1.6
json5==0.10.0
jsonpointer==3.0.0
jsonschema==4.23.0
jsonschema-specifications==2024.10.1
jupyter==1.1.1
jupyter-console==6.6.3
jupyter-events==0.12.0
jupyter-lsp==2.2.5
jupyter_client==8.6.3
jupyter_core==5.7.2
jupyter_server==2.15.0
jupyter_server_terminals==0.5.3
jupyterlab==4.3.6
jupyterlab_pygments==0.3.0
jupyterlab_server==2.27.3
jupyterlab_widgets==3.0.13
kiwisolver==1.4.7
MarkupSafe==3.0.2
matplotlib==3.9.4
matplotlib-inline==0.1.7
mccabe==0.7.0
mistune==3.1.3
mypy-extensions==1.0.0
nbclient==0.10.2
nbconvert==7.16.6
nbformat==5.10.4
nest-asyncio==1.6.0
notebook==7.3.3
notebook_shim==0.2.4
numpy==2.0.2
overrides==7.7.0
packaging==24.2
pandas==2.2.3
pandocfilters==1.5.1
parso==0.8.4
pathspec==0.12.1
pexpect==4.9.0
pillow==11.1.0
platformdirs==4.3.7
pluggy==1.5.0
prometheus_client==0.21.1
prompt_toolkit==3.0.50
psutil==7.0.0
ptyprocess==0.7.0
pure_eval==0.2.3
-e git+https://github.com/pybamm-team/PyBaMM.git@ae1e6ae452b8aa499936e6ede8f118aee0118b7a#egg=pybamm
pycodestyle==2.13.0
pycparser==2.22
pyflakes==3.3.1
Pygments==2.19.1
pyparsing==3.2.3
pytest==8.3.5
python-dateutil==2.9.0.post0
python-json-logger==3.3.0
pytz==2025.2
PyYAML==6.0.2
pyzmq==26.3.0
referencing==0.36.2
requests==2.32.3
rfc3339-validator==0.1.4
rfc3986-validator==0.1.1
rpds-py==0.24.0
scikit-fem==10.0.2
scipy==1.13.1
Send2Trash==1.8.3
six==1.17.0
sniffio==1.3.1
snowballstemmer==2.2.0
soupsieve==2.6
Sphinx==7.4.7
sphinxcontrib-applehelp==2.0.0
sphinxcontrib-devhelp==2.0.0
sphinxcontrib-htmlhelp==2.1.0
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==2.0.0
sphinxcontrib-serializinghtml==2.0.0
stack-data==0.6.3
terminado==0.18.1
tinycss2==1.4.0
tomli==2.2.1
tornado==6.4.2
traitlets==5.14.3
types-python-dateutil==2.9.0.20241206
typing_extensions==4.13.0
tzdata==2025.2
uri-template==1.3.0
urllib3==2.3.0
wcwidth==0.2.13
webcolors==24.11.1
webencodings==0.5.1
websocket-client==1.8.0
widgetsnbextension==4.0.13
zipp==3.21.0
| name: PyBaMM
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.16
- anyio==4.9.0
- anytree==2.12.1
- argon2-cffi==23.1.0
- argon2-cffi-bindings==21.2.0
- arrow==1.3.0
- asttokens==3.0.0
- async-lru==2.0.5
- attrs==25.3.0
- autograd==1.7.0
- babel==2.17.0
- beautifulsoup4==4.13.3
- black==25.1.0
- bleach==6.2.0
- casadi==3.7.0
- certifi==2025.1.31
- cffi==1.17.1
- charset-normalizer==3.4.1
- click==8.1.8
- comm==0.2.2
- contourpy==1.3.0
- cycler==0.12.1
- debugpy==1.8.13
- decorator==5.2.1
- defusedxml==0.7.1
- docutils==0.21.2
- exceptiongroup==1.2.2
- executing==2.2.0
- fastjsonschema==2.21.1
- flake8==7.2.0
- fonttools==4.56.0
- fqdn==1.5.1
- guzzle-sphinx-theme==0.7.11
- h11==0.14.0
- httpcore==1.0.7
- httpx==0.28.1
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==8.6.1
- importlib-resources==6.5.2
- iniconfig==2.1.0
- ipykernel==6.29.5
- ipython==8.18.1
- ipywidgets==8.1.5
- isoduration==20.11.0
- jedi==0.19.2
- jinja2==3.1.6
- json5==0.10.0
- jsonpointer==3.0.0
- jsonschema==4.23.0
- jsonschema-specifications==2024.10.1
- jupyter==1.1.1
- jupyter-client==8.6.3
- jupyter-console==6.6.3
- jupyter-core==5.7.2
- jupyter-events==0.12.0
- jupyter-lsp==2.2.5
- jupyter-server==2.15.0
- jupyter-server-terminals==0.5.3
- jupyterlab==4.3.6
- jupyterlab-pygments==0.3.0
- jupyterlab-server==2.27.3
- jupyterlab-widgets==3.0.13
- kiwisolver==1.4.7
- markupsafe==3.0.2
- matplotlib==3.9.4
- matplotlib-inline==0.1.7
- mccabe==0.7.0
- mistune==3.1.3
- mypy-extensions==1.0.0
- nbclient==0.10.2
- nbconvert==7.16.6
- nbformat==5.10.4
- nest-asyncio==1.6.0
- notebook==7.3.3
- notebook-shim==0.2.4
- numpy==2.0.2
- overrides==7.7.0
- packaging==24.2
- pandas==2.2.3
- pandocfilters==1.5.1
- parso==0.8.4
- pathspec==0.12.1
- pexpect==4.9.0
- pillow==11.1.0
- platformdirs==4.3.7
- pluggy==1.5.0
- prometheus-client==0.21.1
- prompt-toolkit==3.0.50
- psutil==7.0.0
- ptyprocess==0.7.0
- pure-eval==0.2.3
- pycodestyle==2.13.0
- pycparser==2.22
- pyflakes==3.3.1
- pygments==2.19.1
- pyparsing==3.2.3
- pytest==8.3.5
- python-dateutil==2.9.0.post0
- python-json-logger==3.3.0
- pytz==2025.2
- pyyaml==6.0.2
- pyzmq==26.3.0
- referencing==0.36.2
- requests==2.32.3
- rfc3339-validator==0.1.4
- rfc3986-validator==0.1.1
- rpds-py==0.24.0
- scikit-fem==10.0.2
- scipy==1.13.1
- send2trash==1.8.3
- six==1.17.0
- sniffio==1.3.1
- snowballstemmer==2.2.0
- soupsieve==2.6
- sphinx==7.4.7
- sphinxcontrib-applehelp==2.0.0
- sphinxcontrib-devhelp==2.0.0
- sphinxcontrib-htmlhelp==2.1.0
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==2.0.0
- sphinxcontrib-serializinghtml==2.0.0
- stack-data==0.6.3
- terminado==0.18.1
- tinycss2==1.4.0
- tomli==2.2.1
- tornado==6.4.2
- traitlets==5.14.3
- types-python-dateutil==2.9.0.20241206
- typing-extensions==4.13.0
- tzdata==2025.2
- uri-template==1.3.0
- urllib3==2.3.0
- wcwidth==0.2.13
- webcolors==24.11.1
- webencodings==0.5.1
- websocket-client==1.8.0
- widgetsnbextension==4.0.13
- zipp==3.21.0
prefix: /opt/conda/envs/PyBaMM
| [
"tests/unit/test_solvers/test_solution.py::TestSolution::test_solution_evals_with_inputs"
] | [] | [
"tests/unit/test_solvers/test_solution.py::TestSolution::test_append",
"tests/unit/test_solvers/test_solution.py::TestSolution::test_getitem",
"tests/unit/test_solvers/test_solution.py::TestSolution::test_init",
"tests/unit/test_solvers/test_solution.py::TestSolution::test_save",
"tests/unit/test_solvers/test_solution.py::TestSolution::test_total_time"
] | [] | BSD 3-Clause "New" or "Revised" License | 6,286 | 1,117 | [
"pybamm/expression_tree/functions.py",
"pybamm/processed_variable.py"
] |
|
pybamm-team__PyBaMM-797 | f7725171de2b718f56ea48983dbaf39a7d7864f9 | 2020-01-24 13:50:33 | 4bf599043c9bd070333e224180abc455c8127940 | diff --git a/pybamm/processed_variable.py b/pybamm/processed_variable.py
index 847fdef80..96f97f18e 100644
--- a/pybamm/processed_variable.py
+++ b/pybamm/processed_variable.py
@@ -93,7 +93,7 @@ class ProcessedVariable(object):
for idx in range(len(self.t_sol)):
t = self.t_sol[idx]
u = self.u_sol[:, idx]
- inputs = {name: inp[0] for name, inp in self.inputs.items()}
+ inputs = {name: inp[idx] for name, inp in self.inputs.items()}
if self.known_evals:
entries[idx], self.known_evals[t] = self.base_variable.evaluate(
t, u, inputs, known_evals=self.known_evals[t]
@@ -117,7 +117,7 @@ class ProcessedVariable(object):
for idx in range(len(self.t_sol)):
t = self.t_sol[idx]
u = self.u_sol[:, idx]
- inputs = {name: inp[0] for name, inp in self.inputs.items()}
+ inputs = {name: inp[idx] for name, inp in self.inputs.items()}
if self.known_evals:
eval_and_known_evals = self.base_variable.evaluate(
t, u, inputs, known_evals=self.known_evals[t]
@@ -221,7 +221,7 @@ class ProcessedVariable(object):
for idx in range(len(self.t_sol)):
t = self.t_sol[idx]
u = self.u_sol[:, idx]
- inputs = {name: inp[0] for name, inp in self.inputs.items()}
+ inputs = {name: inp[idx] for name, inp in self.inputs.items()}
if self.known_evals:
eval_and_known_evals = self.base_variable.evaluate(
t, u, inputs, known_evals=self.known_evals[t]
@@ -258,7 +258,11 @@ class ProcessedVariable(object):
len_z = len(z_sol)
# Evaluate the base_variable
- entries = np.reshape(self.base_variable.evaluate(0, self.u_sol), [len_y, len_z])
+ inputs = {name: inp[0] for name, inp in self.inputs.items()}
+
+ entries = np.reshape(
+ self.base_variable.evaluate(0, self.u_sol, inputs), [len_y, len_z]
+ )
# assign attributes for reference
self.entries = entries
@@ -284,15 +288,17 @@ class ProcessedVariable(object):
for idx in range(len(self.t_sol)):
t = self.t_sol[idx]
u = self.u_sol[:, idx]
+ inputs = {name: inp[idx] for name, inp in self.inputs.items()}
+
if self.known_evals:
eval_and_known_evals = self.base_variable.evaluate(
- t, u, self.inputs, known_evals=self.known_evals[t]
+ t, u, inputs, known_evals=self.known_evals[t]
)
entries[:, :, idx] = np.reshape(eval_and_known_evals[0], [len_y, len_z])
self.known_evals[t] = eval_and_known_evals[1]
else:
entries[:, :, idx] = np.reshape(
- self.base_variable.evaluate(t, u, self.inputs), [len_y, len_z]
+ self.base_variable.evaluate(t, u, inputs), [len_y, len_z]
)
# assign attributes for reference
diff --git a/pybamm/simulation.py b/pybamm/simulation.py
index 7eafa8fb2..f7080bf11 100644
--- a/pybamm/simulation.py
+++ b/pybamm/simulation.py
@@ -225,22 +225,10 @@ class Simulation:
elif self._solution.t[-1] == solution.t[-1]:
pass
else:
- self._update_solution(solution)
+ self._solution.append(solution)
self._made_first_step = True
- def _update_solution(self, solution):
-
- self._solution.set_up_time += solution.set_up_time
- self._solution.solve_time += solution.solve_time
- self._solution.t = np.append(self._solution.t, solution.t[-1])
- self._solution.t_event = solution.t_event
- self._solution.termination = solution.termination
- self._solution.y = np.concatenate(
- [self._solution.y, solution.y[:, -1][:, np.newaxis]], axis=1
- )
- self._solution.y_event = solution.y_event
-
def get_variable_array(self, *variables):
"""
A helper function to easily obtain a dictionary of arrays of values
diff --git a/pybamm/solvers/solution.py b/pybamm/solvers/solution.py
index 64ab97f35..b3f798544 100644
--- a/pybamm/solvers/solution.py
+++ b/pybamm/solvers/solution.py
@@ -142,6 +142,10 @@ class Solution(object):
self.inputs[name] = np.concatenate((inp, solution_inp[1:]))
# Update solution time
self.solve_time += solution.solve_time
+ # Update events
+ self.termination = solution.termination
+ self.t_event = solution.t_event
+ self.y_event = solution.y_event
# Update known_evals
for t, evals in solution.known_evals.items():
| Solution[<variable>] doesn't work when some parameters are inputs
**Describe the bug**
The inputs are not making their way to all the things that need to be evaluated somehow. The example below will work if current is the only input but fails when electrode height is one. Something to do with mesh
**To Reproduce**
```
import pybamm
import numpy as np
pybamm.set_logging_level('DEBUG')
I_typical = 1.0
def current_function(t):
return pybamm.InputParameter("Current")
model = pybamm.lithium_ion.SPM()
geometry = model.default_geometry
param = model.default_parameter_values
param.update(
{
"Typical current [A]": I_typical,
"Current function [A]": current_function,
"Current": "[input]",
"Electrode height [m]": "[input]",
}
)
param.process_model(model)
param.process_geometry(geometry)
var = pybamm.standard_spatial_vars
var_pts = {var.x_n: 5, var.x_s: 5, var.x_p: 5, var.r_n: 10, var.r_p: 10}
spatial_methods = model.default_spatial_methods
solver = model.default_solver
sim = pybamm.Simulation(
model=model,
geometry=geometry,
parameter_values=param,
var_pts=var_pts,
spatial_methods=spatial_methods,
solver=solver,
)
inputs = {
'Current': I_typical,
'Electrode height [m]': 0.1
}
sim.solve(t_eval=np.linspace(0, 0.01, 100), inputs=inputs)
print(sim.solution['Time [h]'](sim.solution.t))
```
| pybamm-team/PyBaMM | diff --git a/tests/unit/test_simulation.py b/tests/unit/test_simulation.py
index fcfaf2831..ee7fa188d 100644
--- a/tests/unit/test_simulation.py
+++ b/tests/unit/test_simulation.py
@@ -223,6 +223,31 @@ class TestSimulation(unittest.TestCase):
self.assertEqual(sim.solution.t[0], 2 * dt)
self.assertEqual(sim.solution.t[1], 3 * dt)
+ def test_step_with_inputs(self):
+ def current_function(t):
+ return pybamm.InputParameter("Current")
+
+ dt = 0.001
+ model = pybamm.lithium_ion.SPM()
+ param = model.default_parameter_values
+ param.update({"Current function [A]": current_function, "Current": "[input]"})
+ sim = pybamm.Simulation(model, parameter_values=param)
+ sim.step(dt, inputs={"Current": 1}) # 1 step stores first two points
+ self.assertEqual(sim.solution.t.size, 2)
+ self.assertEqual(sim.solution.y[0, :].size, 2)
+ self.assertEqual(sim.solution.t[0], 0)
+ self.assertEqual(sim.solution.t[1], dt)
+ np.testing.assert_array_equal(sim.solution.inputs["Current"], 1)
+ sim.step(dt, inputs={"Current": 2}) # automatically append the next step
+ self.assertEqual(sim.solution.t.size, 3)
+ self.assertEqual(sim.solution.y[0, :].size, 3)
+ self.assertEqual(sim.solution.t[0], 0)
+ self.assertEqual(sim.solution.t[1], dt)
+ self.assertEqual(sim.solution.t[2], 2 * dt)
+ np.testing.assert_array_equal(
+ sim.solution.inputs["Current"], np.array([1, 1, 2])
+ )
+
def test_save_load(self):
model = pybamm.lead_acid.LOQS()
model.use_jacobian = True
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 3
} | 0.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev,docs]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"flake8",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y python3-dev gfortran gcc cmake libopenblas-dev"
],
"python": "3.7",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.13
anyio==3.7.1
anytree==2.12.1
argon2-cffi==23.1.0
argon2-cffi-bindings==21.2.0
attrs==24.2.0
autograd==1.6.2
Babel==2.14.0
backcall==0.2.0
beautifulsoup4==4.13.3
black==23.3.0
bleach==6.0.0
casadi==3.7.0
certifi @ file:///croot/certifi_1671487769961/work/certifi
cffi==1.15.1
charset-normalizer==3.4.1
click==8.1.8
comm==0.1.4
cycler==0.11.0
debugpy==1.7.0
decorator==5.1.1
defusedxml==0.7.1
docutils==0.17.1
entrypoints==0.4
exceptiongroup==1.2.2
fastjsonschema==2.21.1
flake8==5.0.4
fonttools==4.38.0
future==1.0.0
guzzle-sphinx-theme==0.7.11
idna==3.10
imagesize==1.4.1
importlib-metadata==4.2.0
importlib-resources==5.12.0
iniconfig==2.0.0
ipykernel==6.16.2
ipython==7.34.0
ipython-genutils==0.2.0
ipywidgets==8.1.5
jedi==0.19.2
Jinja2==3.1.6
json5==0.9.16
jsonschema==4.17.3
jupyter==1.1.1
jupyter-console==6.6.3
jupyter-server==1.24.0
jupyter_client==7.4.9
jupyter_core==4.12.0
jupyterlab==3.5.3
jupyterlab-pygments==0.2.2
jupyterlab_server==2.15.2
jupyterlab_widgets==3.0.13
kiwisolver==1.4.5
MarkupSafe==2.1.5
matplotlib==3.5.3
matplotlib-inline==0.1.6
mccabe==0.7.0
mistune==3.0.2
mypy-extensions==1.0.0
nbclassic==1.2.0
nbclient==0.7.4
nbconvert==7.6.0
nbformat==5.8.0
nest-asyncio==1.6.0
notebook==6.5.7
notebook_shim==0.2.4
numpy==1.21.6
packaging==24.0
pandas==1.3.5
pandocfilters==1.5.1
parso==0.8.4
pathspec==0.11.2
pexpect==4.9.0
pickleshare==0.7.5
Pillow==9.5.0
pkgutil_resolve_name==1.3.10
platformdirs==4.0.0
pluggy==1.2.0
prometheus-client==0.17.1
prompt_toolkit==3.0.48
psutil==7.0.0
ptyprocess==0.7.0
-e git+https://github.com/pybamm-team/PyBaMM.git@f7725171de2b718f56ea48983dbaf39a7d7864f9#egg=pybamm
pycodestyle==2.9.1
pycparser==2.21
pyflakes==2.5.0
Pygments==2.17.2
pyparsing==3.1.4
pyrsistent==0.19.3
pytest==7.4.4
python-dateutil==2.9.0.post0
pytz==2025.2
pyzmq==26.2.1
requests==2.31.0
scikit-fem==8.1.0
scipy==1.7.3
Send2Trash==1.8.3
six==1.17.0
sniffio==1.3.1
snowballstemmer==2.2.0
soupsieve==2.4.1
Sphinx==4.3.2
sphinxcontrib-applehelp==1.0.2
sphinxcontrib-devhelp==1.0.2
sphinxcontrib-htmlhelp==2.0.0
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==1.0.3
sphinxcontrib-serializinghtml==1.1.5
terminado==0.17.1
tinycss2==1.2.1
tomli==2.0.1
tornado==6.2
traitlets==5.9.0
typed-ast==1.5.5
typing_extensions==4.7.1
urllib3==2.0.7
wcwidth==0.2.13
webencodings==0.5.1
websocket-client==1.6.1
widgetsnbextension==4.0.13
zipp==3.15.0
| name: PyBaMM
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=22.3.1=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.13
- anyio==3.7.1
- anytree==2.12.1
- argon2-cffi==23.1.0
- argon2-cffi-bindings==21.2.0
- attrs==24.2.0
- autograd==1.6.2
- babel==2.14.0
- backcall==0.2.0
- beautifulsoup4==4.13.3
- black==23.3.0
- bleach==6.0.0
- casadi==3.7.0
- cffi==1.15.1
- charset-normalizer==3.4.1
- click==8.1.8
- comm==0.1.4
- cycler==0.11.0
- debugpy==1.7.0
- decorator==5.1.1
- defusedxml==0.7.1
- docutils==0.17.1
- entrypoints==0.4
- exceptiongroup==1.2.2
- fastjsonschema==2.21.1
- flake8==5.0.4
- fonttools==4.38.0
- future==1.0.0
- guzzle-sphinx-theme==0.7.11
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==4.2.0
- importlib-resources==5.12.0
- iniconfig==2.0.0
- ipykernel==6.16.2
- ipython==7.34.0
- ipython-genutils==0.2.0
- ipywidgets==8.1.5
- jedi==0.19.2
- jinja2==3.1.6
- json5==0.9.16
- jsonschema==4.17.3
- jupyter==1.1.1
- jupyter-client==7.4.9
- jupyter-console==6.6.3
- jupyter-core==4.12.0
- jupyter-server==1.24.0
- jupyterlab==3.5.3
- jupyterlab-pygments==0.2.2
- jupyterlab-server==2.15.2
- jupyterlab-widgets==3.0.13
- kiwisolver==1.4.5
- markupsafe==2.1.5
- matplotlib==3.5.3
- matplotlib-inline==0.1.6
- mccabe==0.7.0
- mistune==3.0.2
- mypy-extensions==1.0.0
- nbclassic==1.2.0
- nbclient==0.7.4
- nbconvert==7.6.0
- nbformat==5.8.0
- nest-asyncio==1.6.0
- notebook==6.5.7
- notebook-shim==0.2.4
- numpy==1.21.6
- packaging==24.0
- pandas==1.3.5
- pandocfilters==1.5.1
- parso==0.8.4
- pathspec==0.11.2
- pexpect==4.9.0
- pickleshare==0.7.5
- pillow==9.5.0
- pkgutil-resolve-name==1.3.10
- platformdirs==4.0.0
- pluggy==1.2.0
- prometheus-client==0.17.1
- prompt-toolkit==3.0.48
- psutil==7.0.0
- ptyprocess==0.7.0
- pycodestyle==2.9.1
- pycparser==2.21
- pyflakes==2.5.0
- pygments==2.17.2
- pyparsing==3.1.4
- pyrsistent==0.19.3
- pytest==7.4.4
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyzmq==26.2.1
- requests==2.31.0
- scikit-fem==8.1.0
- scipy==1.7.3
- send2trash==1.8.3
- six==1.17.0
- sniffio==1.3.1
- snowballstemmer==2.2.0
- soupsieve==2.4.1
- sphinx==4.3.2
- sphinxcontrib-applehelp==1.0.2
- sphinxcontrib-devhelp==1.0.2
- sphinxcontrib-htmlhelp==2.0.0
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==1.0.3
- sphinxcontrib-serializinghtml==1.1.5
- terminado==0.17.1
- tinycss2==1.2.1
- tomli==2.0.1
- tornado==6.2
- traitlets==5.9.0
- typed-ast==1.5.5
- typing-extensions==4.7.1
- urllib3==2.0.7
- wcwidth==0.2.13
- webencodings==0.5.1
- websocket-client==1.6.1
- widgetsnbextension==4.0.13
- zipp==3.15.0
prefix: /opt/conda/envs/PyBaMM
| [
"tests/unit/test_simulation.py::TestSimulation::test_step_with_inputs"
] | [] | [
"tests/unit/test_simulation.py::TestSimulation::test_basic_ops",
"tests/unit/test_simulation.py::TestSimulation::test_get_variable_array",
"tests/unit/test_simulation.py::TestSimulation::test_plot",
"tests/unit/test_simulation.py::TestSimulation::test_reuse_commands",
"tests/unit/test_simulation.py::TestSimulation::test_save_load",
"tests/unit/test_simulation.py::TestSimulation::test_save_load_dae",
"tests/unit/test_simulation.py::TestSimulation::test_set_crate",
"tests/unit/test_simulation.py::TestSimulation::test_set_defaults",
"tests/unit/test_simulation.py::TestSimulation::test_set_defaults2",
"tests/unit/test_simulation.py::TestSimulation::test_set_external_variable",
"tests/unit/test_simulation.py::TestSimulation::test_solve",
"tests/unit/test_simulation.py::TestSimulation::test_specs",
"tests/unit/test_simulation.py::TestSimulation::test_step"
] | [] | BSD 3-Clause "New" or "Revised" License | 6,293 | 1,259 | [
"pybamm/processed_variable.py",
"pybamm/simulation.py",
"pybamm/solvers/solution.py"
] |
|
civisanalytics__civis-python-353 | dd27663851748fdfac7a29b7fc4cb496b1ba38b3 | 2020-01-24 22:11:58 | dd27663851748fdfac7a29b7fc4cb496b1ba38b3 | elsander: @stephen-hoover Wanted to flag this for you, since you might have more context on the original decision here.
@salilgupta1 , can you note in the description that this closes #345 ?
stephen-hoover: > @stephen-hoover Wanted to flag this for you, since you might have more context on the original decision here.
Thanks for the flag, but I don't recall being involved in any discussions around the initial implementation of this part of the code.
| diff --git a/civis/resources/_resources.py b/civis/resources/_resources.py
index 0a10aa3..cbe0ec5 100644
--- a/civis/resources/_resources.py
+++ b/civis/resources/_resources.py
@@ -132,6 +132,16 @@ def docs_from_properties(properties, level=0):
return docs
+def deprecated_notice(deprecation_warning):
+ """ Return a doc string element for the deprecation notice. The
+ doc string can be an empty string if the warning is None
+ """
+ if deprecation_warning is None:
+ return ""
+
+ return "Deprecation warning!\n------------------\n" + deprecation_warning
+
+
def doc_from_responses(responses):
""" Return a doc string element from a responses object. The
doc string describes the returned objects of a function.
@@ -411,13 +421,14 @@ def parse_method(verb, operation, path):
summary = operation["summary"]
params = operation.get("parameters", [])
responses = operation["responses"]
- deprecated = operation.get('deprecated', False)
- if 'deprecated' in summary.lower() or deprecated:
+ deprecation_warning = operation.get("x-deprecation-warning", None)
+ if 'deprecated' in summary.lower():
return None
args, param_doc = parse_params(params, summary, verb)
response_doc = doc_from_responses(responses)
- docs = join_doc_elements(param_doc, response_doc)
+ deprecation_notice = deprecated_notice(deprecation_warning)
+ docs = join_doc_elements(deprecation_notice, param_doc, response_doc)
name = parse_method_name(verb, path)
method = create_method(args, verb, name, path, docs)
| Add `ignore_deprecated` flag to `APIClient`
The API client currently ignores any endpoints with a deprecation warning. This means that endpoints effectively cannot be deprecated-- once they are tagged deprecated, they are inaccessible from the client.
One option would be to automatically pull in these endpoints, which would allow for deprecation, but would make deprecated endpoints visible to new users, which may not be desirable.
The alternative I'd propose is to add an `ignore_deprecated` flag when instantiating the client, which defaults to `True`. This means that existing code based on newly deprecated endpoints would still break, but could be updated to work until the endpoint is removed entirely. | civisanalytics/civis-python | diff --git a/civis/tests/test_resources.py b/civis/tests/test_resources.py
index 4c46091..2ce2d59 100644
--- a/civis/tests/test_resources.py
+++ b/civis/tests/test_resources.py
@@ -126,6 +126,18 @@ def test_docs_from_properties():
assert sorted(y) == sorted([' - a : string', ' - b : integer'])
+def test_deprecated_notice():
+ deprecation_warning = "This endpoint is no longer supported"
+ notice = _resources.deprecated_notice(deprecation_warning)
+
+ assert "Deprecation warning!" in notice
+ assert deprecation_warning in notice
+
+
+def test_deprecated_notice_handles_none():
+ assert _resources.deprecated_notice(None) == ""
+
+
def test_doc_from_responses():
responses = OrderedDict([('200', OrderedDict([('description', 'success'), ('schema', OrderedDict([('type', 'array'), ('items', OrderedDict([('type', 'object'), ('properties', OrderedDict([('id', OrderedDict([('description', 'The ID of the credential.'), ('type', 'integer')])), ('name', OrderedDict([('description', 'The name identifying the credential'), ('type', 'string')])), ('type', OrderedDict([('description', "The credential's type."), ('type', 'string')])), ('username', OrderedDict([('description', 'The username for the credential.'), ('type', 'string')])), ('description', OrderedDict([('description', 'A long description of the credential.'), ('type', 'string')])), ('owner', OrderedDict([('description', 'The name of the user who this credential belongs to.'), ('type', 'string')])), ('remoteHostId', OrderedDict([('description', 'The ID of the remote host associated with this credential.'), ('type', 'integer')])), ('remoteHostName', OrderedDict([('description', 'The name of the remote host associated with this credential.'), ('type', 'string')])), ('createdAt', OrderedDict([('description', 'The creation time for this credential.'), ('type', 'string'), ('format', 'time')])), ('updatedAt', OrderedDict([('description', 'The last modification time for this credential.'), ('type', 'string'), ('format', 'time')]))]))]))]))]))]) # noqa: E501
x = _resources.doc_from_responses(responses)
| {
"commit_name": "merge_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 1
} | 1.12 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"flake8"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.6",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
cbor2==5.4.2.post1
certifi==2021.5.30
charset-normalizer==2.0.12
-e git+https://github.com/civisanalytics/civis-python.git@dd27663851748fdfac7a29b7fc4cb496b1ba38b3#egg=civis
Click==7.0
cloudpickle==1.6.0
coverage==6.2
flake8==5.0.4
idna==3.10
importlib-metadata==4.2.0
iniconfig==1.1.1
joblib==0.14.1
jsonref==0.2
jsonschema==3.0.0
mccabe==0.7.0
packaging==21.3
pluggy==1.0.0
pubnub==4.8.1
py==1.11.0
pycodestyle==2.9.1
pycryptodomex==3.21.0
pyflakes==2.5.0
pyparsing==3.1.4
pyrsistent==0.18.0
pytest==7.0.1
pytest-cov==4.0.0
PyYAML==5.4.1
requests==2.27.1
six==1.17.0
tomli==1.2.3
typing_extensions==4.1.1
urllib3==1.26.20
zipp==3.6.0
| name: civis-python
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- cbor2==5.4.2.post1
- charset-normalizer==2.0.12
- click==7.0
- cloudpickle==1.6.0
- coverage==6.2
- flake8==5.0.4
- idna==3.10
- importlib-metadata==4.2.0
- iniconfig==1.1.1
- joblib==0.14.1
- jsonref==0.2
- jsonschema==3.0.0
- mccabe==0.7.0
- packaging==21.3
- pluggy==1.0.0
- pubnub==4.8.1
- py==1.11.0
- pycodestyle==2.9.1
- pycryptodomex==3.21.0
- pyflakes==2.5.0
- pyparsing==3.1.4
- pyrsistent==0.18.0
- pytest==7.0.1
- pytest-cov==4.0.0
- pyyaml==5.4.1
- requests==2.27.1
- six==1.17.0
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- zipp==3.6.0
prefix: /opt/conda/envs/civis-python
| [
"civis/tests/test_resources.py::test_deprecated_notice",
"civis/tests/test_resources.py::test_deprecated_notice_handles_none"
] | [] | [
"civis/tests/test_resources.py::test_create_method_iterator_kwarg",
"civis/tests/test_resources.py::test_create_method_no_iterator_kwarg",
"civis/tests/test_resources.py::test_exclude_resource",
"civis/tests/test_resources.py::test_property_type",
"civis/tests/test_resources.py::test_name_and_type_doc",
"civis/tests/test_resources.py::test_docs_from_property",
"civis/tests/test_resources.py::test_docs_from_properties",
"civis/tests/test_resources.py::test_doc_from_responses",
"civis/tests/test_resources.py::test_iterable_method",
"civis/tests/test_resources.py::test_split_method_params",
"civis/tests/test_resources.py::test_parse_param",
"civis/tests/test_resources.py::test_parse_params",
"civis/tests/test_resources.py::test_parse_param_body",
"civis/tests/test_resources.py::test_parse_method_name",
"civis/tests/test_resources.py::test_duplicate_names_generated_from_api_spec",
"civis/tests/test_resources.py::test_expired_api_key",
"civis/tests/test_resources.py::test_create_method_unexpected_kwargs",
"civis/tests/test_resources.py::test_create_method_too_many_pos_args",
"civis/tests/test_resources.py::test_create_method_multiple_values",
"civis/tests/test_resources.py::test_create_method_keyword_only",
"civis/tests/test_resources.py::test_generate_classes_maybe_cached",
"civis/tests/test_resources.py::test_parse_api_spec_names",
"civis/tests/test_resources.py::test_add_no_underscore_compatibility"
] | [] | BSD 3-Clause "New" or "Revised" License | 6,299 | 398 | [
"civis/resources/_resources.py"
] |
MycroftAI__lingua-franca-76 | a45b9d12a2a8adace210055a38f432dd51e7456d | 2020-01-27 05:02:35 | d726b0562400769577e724c6fcb12307934435b6 | diff --git a/lingua_franca/lang/format_en.py b/lingua_franca/lang/format_en.py
index 5634a95..bba9bc1 100644
--- a/lingua_franca/lang/format_en.py
+++ b/lingua_franca/lang/format_en.py
@@ -97,19 +97,20 @@ def pronounce_number_en(num, places=2, short_scale=True, scientific=False,
# handling since each call disables the scientific flag
return '{}{} times ten to the {}{} power'.format(
'negative ' if float(n) < 0 else '',
- pronounce_number_en(abs(float(n)), places, short_scale, False, ordinals=False),
+ pronounce_number_en(
+ abs(float(n)), places, short_scale, False, ordinals=False),
'negative ' if power < 0 else '',
- pronounce_number_en(abs(power), places, short_scale, False,ordinals=True))
+ pronounce_number_en(abs(power), places, short_scale, False, ordinals=True))
else:
# This handles negatives of powers separately from the normal
# handling since each call disables the scientific flag
return '{}{} times ten to the power of {}{}'.format(
'negative ' if float(n) < 0 else '',
- pronounce_number_en(abs(float(n)), places, short_scale, False),
+ pronounce_number_en(
+ abs(float(n)), places, short_scale, False),
'negative ' if power < 0 else '',
pronounce_number_en(abs(power), places, short_scale, False))
-
if short_scale:
number_names = _NUM_STRING_EN.copy()
number_names.update(_SHORT_SCALE_EN)
@@ -289,12 +290,13 @@ def pronounce_number_en(num, places=2, short_scale=True, scientific=False,
return pronounce_number_en(num, places, short_scale, scientific=True)
# Deal with fractional part
elif not num == int(num) and places > 0:
+ if abs(num) < 1.0 and (result is "minus " or not result):
+ result += "zero"
result += " point"
- place = 10
- while int(num * place) % 10 > 0 and places > 0:
- result += " " + number_names[int(num * place) % 10]
- place *= 10
- places -= 1
+ _num_str = str(num)
+ _num_str = _num_str.split(".")[1][0:places]
+ for char in _num_str:
+ result += " " + number_names[int(char)]
return result
diff --git a/lingua_franca/lang/format_es.py b/lingua_franca/lang/format_es.py
index a77cf5f..6a4abf8 100644
--- a/lingua_franca/lang/format_es.py
+++ b/lingua_franca/lang/format_es.py
@@ -194,12 +194,13 @@ def pronounce_number_es(num, places=2):
# instead the dot. Decimal part can be written both with comma
# and dot, but when pronounced, its pronounced "coma"
if not num == int(num) and places > 0:
+ if abs(num) < 1.0 and (result is "menos " or not result):
+ result += "cero"
result += " coma"
- place = 10
- while int(num*place) % 10 > 0 and places > 0:
- result += " " + NUM_STRING_ES[int(num*place) % 10]
- place *= 10
- places -= 1
+ _num_str = str(num)
+ _num_str = _num_str.split(".")[1][0:places]
+ for char in _num_str:
+ result += " " + NUM_STRING_ES[int(char)]
return result
diff --git a/lingua_franca/lang/format_fr.py b/lingua_franca/lang/format_fr.py
index 698b317..7b94704 100644
--- a/lingua_franca/lang/format_fr.py
+++ b/lingua_franca/lang/format_fr.py
@@ -192,12 +192,13 @@ def pronounce_number_fr(num, places=2):
# Deal with decimal part
if not num == int(num) and places > 0:
+ if abs(num) < 1.0 and (result is "moins " or not result):
+ result += "zéro"
result += " virgule"
- place = 10
- while int(num*place) % 10 > 0 and places > 0:
- result += " " + NUM_STRING_FR[int(num*place) % 10]
- place *= 10
- places -= 1
+ _num_str = str(num)
+ _num_str = _num_str.split(".")[1][0:places]
+ for char in _num_str:
+ result += " " + NUM_STRING_FR[int(char)]
return result
diff --git a/lingua_franca/lang/format_it.py b/lingua_franca/lang/format_it.py
index 82788ec..3fd3224 100644
--- a/lingua_franca/lang/format_it.py
+++ b/lingua_franca/lang/format_it.py
@@ -384,12 +384,13 @@ def pronounce_number_it(num, places=2, short_scale=False, scientific=False):
# Deal with fractional part
if not num == int(num) and places > 0:
+ if abs(num) < 1.0 and (result is "meno " or not result):
+ result += "zero"
result += " virgola"
- place = 10
- while int(num * place) % 10 > 0 and places > 0:
- result += " " + number_names[int(num * place) % 10]
- place *= 10
- places -= 1
+ _num_str = str(num)
+ _num_str = _num_str.split(".")[1][0:places]
+ for char in _num_str:
+ result += " " + number_names[int(char)]
return result
diff --git a/lingua_franca/lang/format_pt.py b/lingua_franca/lang/format_pt.py
index ccf06c4..49d91b5 100644
--- a/lingua_franca/lang/format_pt.py
+++ b/lingua_franca/lang/format_pt.py
@@ -105,12 +105,13 @@ def pronounce_number_pt(num, places=2):
# instead the dot. Decimal part can be written both with comma
# and dot, but when pronounced, its pronounced "virgula"
if not num == int(num) and places > 0:
+ if abs(num) < 1.0 and (result is "menos " or not result):
+ result += "zero"
result += " vírgula"
- place = 10
- while int(num * place) % 10 > 0 and places > 0:
- result += " " + _NUM_STRING_PT[int(num * place) % 10]
- place *= 10
- places -= 1
+ _num_str = str(num)
+ _num_str = _num_str.split(".")[1][0:places]
+ for char in _num_str:
+ result += " " + _NUM_STRING_PT[int(char)]
return result
diff --git a/lingua_franca/lang/parse_da.py b/lingua_franca/lang/parse_da.py
index 3008e27..405a7f6 100644
--- a/lingua_franca/lang/parse_da.py
+++ b/lingua_franca/lang/parse_da.py
@@ -72,8 +72,10 @@ da_numbers = {
'million': 1000000
}
-
-def extractnumber_da(text):
+# TODO: short_scale and ordinals don't do anything here.
+# The parameters are present in the function signature for API compatibility
+# reasons.
+def extractnumber_da(text, short_scale=True, ordinals=False):
"""
This function prepares the given text for parsing by making
numbers consistent, getting rid of contractions, etc.
@@ -151,10 +153,7 @@ def extractnumber_da(text):
break
- if not val:
- return False
-
- return val
+ return val or False
def extract_datetime_da(string, currentDate, default_time):
diff --git a/lingua_franca/lang/parse_de.py b/lingua_franca/lang/parse_de.py
index b735f33..453df69 100644
--- a/lingua_franca/lang/parse_de.py
+++ b/lingua_franca/lang/parse_de.py
@@ -77,8 +77,10 @@ de_numbers = {
'million': 1000000
}
-
-def extractnumber_de(text):
+# TODO: short_scale and ordinals don't do anything here.
+# The parameters are present in the function signature for API compatibility
+# reasons.
+def extractnumber_de(text, short_scale=True, ordinals=False):
"""
This function prepares the given text for parsing by making
numbers consistent, getting rid of contractions, etc.
@@ -156,10 +158,7 @@ def extractnumber_de(text):
break
- if not val:
- return False
-
- return val
+ return val or False
def extract_datetime_de(string, currentDate, default_time):
diff --git a/lingua_franca/lang/parse_es.py b/lingua_franca/lang/parse_es.py
index ab81e2e..bbbdb0a 100644
--- a/lingua_franca/lang/parse_es.py
+++ b/lingua_franca/lang/parse_es.py
@@ -177,9 +177,6 @@ def extractnumber_es(text, short_scale=True, ordinals=False):
break
count += 1
- if result is None:
- return False
-
# Return the $str with the number related words removed
# (now empty strings, so strlen == 0)
# aWords = [word for word in aWords if len(word) > 0]
@@ -190,7 +187,7 @@ def extractnumber_es(text, short_scale=True, ordinals=False):
if dec == "0":
result = int(integer)
- return result
+ return result or False
# TODO Not parsing 'cero'
diff --git a/lingua_franca/lang/parse_fr.py b/lingua_franca/lang/parse_fr.py
index b3be404..d2ea2ca 100644
--- a/lingua_franca/lang/parse_fr.py
+++ b/lingua_franca/lang/parse_fr.py
@@ -370,8 +370,10 @@ def number_ordinal_fr(words, i):
return None
-
-def extractnumber_fr(text):
+# TODO: short_scale and ordinals don't do anything here.
+# The parameters are present in the function signature for API compatibility
+# reasons.
+def extractnumber_fr(text, short_scale=True, ordinals=False):
"""Takes in a string and extracts a number.
Args:
text (str): the string to extract a number from
@@ -465,11 +467,7 @@ def extractnumber_fr(text):
else:
result = val
- # if result == False:
- if not result:
- return normalize_fr(text, True)
-
- return result
+ return result or False
def extract_datetime_fr(string, currentDate, default_time):
diff --git a/lingua_franca/lang/parse_pt.py b/lingua_franca/lang/parse_pt.py
index e6cc662..62cffeb 100644
--- a/lingua_franca/lang/parse_pt.py
+++ b/lingua_franca/lang/parse_pt.py
@@ -63,8 +63,10 @@ def isFractional_pt(input_str):
return False
-
-def extractnumber_pt(text):
+# TODO: short_scale and ordinals don't do anything here.
+# The parameters are present in the function signature for API compatibility
+# reasons.
+def extractnumber_pt(text, short_scale=True, ordinals=False):
"""
This function prepares the given text for parsing by making
numbers consistent, getting rid of contractions, etc.
@@ -179,9 +181,6 @@ def extractnumber_pt(text):
break
count += 1
- if result is None:
- return False
-
# Return the $str with the number related words removed
# (now empty strings, so strlen == 0)
# aWords = [word for word in aWords if len(word) > 0]
@@ -192,7 +191,7 @@ def extractnumber_pt(text):
if dec == "0":
result = int(integer)
- return result
+ return result or False
class PortugueseNormalizer(Normalizer):
diff --git a/lingua_franca/lang/parse_sv.py b/lingua_franca/lang/parse_sv.py
index 30c2f08..9efe78d 100644
--- a/lingua_franca/lang/parse_sv.py
+++ b/lingua_franca/lang/parse_sv.py
@@ -17,8 +17,10 @@ from datetime import datetime
from dateutil.relativedelta import relativedelta
from .parse_common import is_numeric, look_for_fractions
-
-def extractnumber_sv(text):
+# TODO: short_scale and ordinals don't do anything here.
+# The parameters are present in the function signature for API compatibility
+# reasons.
+def extractnumber_sv(text, short_scale=True, ordinals=False):
"""
This function prepares the given text for parsing by making
numbers consistent, getting rid of contractions, etc.
@@ -117,10 +119,7 @@ def extractnumber_sv(text):
break
- if not val:
- return False
-
- return val
+ return val or False
def extract_datetime_sv(string, currentDate, default_time):
| Fails with 0.0x numbers
```
$ python3
Python 3.7.6 (default, Dec 19 2019, 22:52:49)
[GCC 9.2.1 20190827 (Red Hat 9.2.1-1)] on linux
Type "help", "copyright", "credits" or "license" for more information.
@>>> from lingua_franca.format import pronounce_number
@>>> pronounce_number(0.05)
' point'
@>>> pronounce_number(0.05, places=1)
' point'
@>>> pronounce_number(0.05, places=0)
''
```
This causes trouble in the [travel-time-skill](https://github.com/TheLastProject/travel-time-skill) for short distances:
```
self.speak_dialog(
dialog,
{
'time': time,
'distance': util.format.pronounce_number(distance, places=0),
'from': from_,
'destination': destination
}
)
```
Given `distance = 0.05` and the following dialog options:
```
It is {{distance}} kilometers and {{time}} from {{from}} to {{destination}} by car
```
Output will be as follows:
```
It is kilometers and under a minute from A to B by car
``` | MycroftAI/lingua-franca | diff --git a/test/test_format.py b/test/test_format.py
index f40d71c..450263e 100755
--- a/test/test_format.py
+++ b/test/test_format.py
@@ -118,6 +118,8 @@ class TestPronounceNumber(unittest.TestCase):
self.assertEqual(pronounce_number(-33), "minus thirty three")
def test_convert_decimals(self):
+ self.assertEqual(pronounce_number(0.05), "zero point zero five")
+ self.assertEqual(pronounce_number(-0.05), "minus zero point zero five")
self.assertEqual(pronounce_number(1.234),
"one point two three")
self.assertEqual(pronounce_number(21.234),
diff --git a/test/test_format_es.py b/test/test_format_es.py
index f81fdc8..641a435 100644
--- a/test/test_format_es.py
+++ b/test/test_format_es.py
@@ -132,6 +132,10 @@ class TestPronounceNumber(unittest.TestCase):
"menos noventa y nueve")
def test_convert_decimals(self):
+ self.assertEqual(pronounce_number(
+ 0.05, lang="es"), "cero coma cero cinco")
+ self.assertEqual(pronounce_number(
+ -0.05, lang="es"), "menos cero coma cero cinco")
self.assertEqual(pronounce_number(1.234, lang="es"),
"uno coma dos tres")
self.assertEqual(pronounce_number(21.234, lang="es"),
diff --git a/test/test_format_fr.py b/test/test_format_fr.py
index e762c9d..16d4bda 100644
--- a/test/test_format_fr.py
+++ b/test/test_format_fr.py
@@ -135,6 +135,10 @@ class TestPronounceNumber_fr(unittest.TestCase):
"moins trente-trois")
def test_convert_decimals_fr(self):
+ self.assertEqual(pronounce_number(0.05, lang="fr-fr"),
+ "zéro virgule zéro cinq")
+ self.assertEqual(pronounce_number(-0.05, lang="fr-fr"),
+ "moins zéro virgule zéro cinq")
self.assertEqual(pronounce_number(1.234, lang="fr-fr"),
"un virgule deux trois")
self.assertEqual(pronounce_number(21.234, lang="fr-fr"),
diff --git a/test/test_format_it.py b/test/test_format_it.py
index 93e13bb..1ae5745 100644
--- a/test/test_format_it.py
+++ b/test/test_format_it.py
@@ -105,6 +105,10 @@ class TestPronounceNumber(unittest.TestCase):
self.assertEqual(pronounce_number(-83, lang="it"), "meno ottantatre")
def test_convert_decimals(self):
+ self.assertEqual(pronounce_number(
+ 0.05, lang="it"), "zero virgola zero cinque")
+ self.assertEqual(pronounce_number(
+ -0.05, lang="it"), "meno zero virgola zero cinque")
self.assertEqual(pronounce_number(1.234, lang="it"),
"uno virgola due tre")
self.assertEqual(pronounce_number(21.234, lang="it"),
@@ -352,5 +356,6 @@ class TestPronounceNumber(unittest.TestCase):
self.assertEqual(pronounce_number(float("-inf"),
lang="it"), "meno infinito")
+
if __name__ == "__main__":
unittest.main()
diff --git a/test/test_format_pt.py b/test/test_format_pt.py
index f49be66..edd99fe 100644
--- a/test/test_format_pt.py
+++ b/test/test_format_pt.py
@@ -87,6 +87,10 @@ class TestPronounceNumber(unittest.TestCase):
"menos noventa e nove")
def test_convert_decimals(self):
+ self.assertEqual(pronounce_number(
+ 0.05, lang="pt"), "zero vírgula zero cinco")
+ self.assertEqual(pronounce_number(
+ -0.05, lang="pt"), "menos zero vírgula zero cinco")
self.assertEqual(pronounce_number(1.234, lang="pt"),
"um vírgula dois três")
self.assertEqual(pronounce_number(21.234, lang="pt"),
diff --git a/test/test_parse_fr.py b/test/test_parse_fr.py
index ee19fe2..86d26cd 100644
--- a/test/test_parse_fr.py
+++ b/test/test_parse_fr.py
@@ -71,8 +71,7 @@ class TestNormalize_fr(unittest.TestCase):
2.02)
self.assertEqual(extract_number("ça fait virgule 2 cm", lang="fr-fr"),
0.2)
- self.assertEqual(extract_number("point du tout", lang="fr-fr"),
- "point tout")
+ self.assertEqual(extract_number("point du tout", lang="fr-fr"), False)
self.assertEqual(extract_number("32.00 secondes", lang="fr-fr"), 32)
self.assertEqual(extract_number("mange trente-et-une bougies",
lang="fr-fr"), 31)
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 3
},
"num_modified_files": 11
} | 0.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[test]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.7",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi @ file:///croot/certifi_1671487769961/work/certifi
exceptiongroup==1.2.2
importlib-metadata==6.7.0
iniconfig==2.0.0
-e git+https://github.com/MycroftAI/lingua-franca.git@a45b9d12a2a8adace210055a38f432dd51e7456d#egg=lingua_franca
packaging==24.0
pluggy==1.2.0
pytest==7.4.4
python-dateutil==2.6.0
six==1.17.0
tomli==2.0.1
typing_extensions==4.7.1
zipp==3.15.0
| name: lingua-franca
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=22.3.1=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- exceptiongroup==1.2.2
- importlib-metadata==6.7.0
- iniconfig==2.0.0
- packaging==24.0
- pluggy==1.2.0
- pytest==7.4.4
- python-dateutil==2.6.0
- six==1.17.0
- tomli==2.0.1
- typing-extensions==4.7.1
- zipp==3.15.0
prefix: /opt/conda/envs/lingua-franca
| [
"test/test_format.py::TestPronounceNumber::test_convert_decimals",
"test/test_format_es.py::TestPronounceNumber::test_convert_decimals",
"test/test_format_fr.py::TestPronounceNumber_fr::test_convert_decimals_fr",
"test/test_format_it.py::TestPronounceNumber::test_convert_decimals",
"test/test_format_pt.py::TestPronounceNumber::test_convert_decimals",
"test/test_parse_fr.py::TestNormalize_fr::test_extractnumber_fr"
] | [] | [
"test/test_format.py::TestNiceNumberFormat::test_convert_float_to_nice_number",
"test/test_format.py::TestNiceNumberFormat::test_no_speech",
"test/test_format.py::TestNiceNumberFormat::test_specify_denominator",
"test/test_format.py::TestNiceNumberFormat::test_unknown_language",
"test/test_format.py::TestPronounceNumber::test_auto_scientific_notation",
"test/test_format.py::TestPronounceNumber::test_convert_hundreds",
"test/test_format.py::TestPronounceNumber::test_convert_int",
"test/test_format.py::TestPronounceNumber::test_convert_negative_int",
"test/test_format.py::TestPronounceNumber::test_convert_scientific_notation",
"test/test_format.py::TestPronounceNumber::test_large_numbers",
"test/test_format.py::TestPronounceNumber::test_ordinals",
"test/test_format.py::TestNiceDateFormat::test_convert_times",
"test/test_format.py::TestNiceDateFormat::test_join",
"test/test_format.py::TestNiceDateFormat::test_nice_date",
"test/test_format.py::TestNiceDateFormat::test_nice_date_time",
"test/test_format.py::TestNiceDateFormat::test_nice_duration",
"test/test_format.py::TestNiceDateFormat::test_nice_year",
"test/test_format_es.py::TestNiceNumberFormat_es::test_convert_float_to_nice_number_es",
"test/test_format_es.py::TestNiceNumberFormat_es::test_no_speech_es",
"test/test_format_es.py::TestNiceNumberFormat_es::test_specify_denominator_es",
"test/test_format_es.py::TestPronounceNumber::test_convert_int",
"test/test_format_es.py::TestPronounceNumber::test_convert_negative_int",
"test/test_format_es.py::TestNiceDateFormat::test_convert_times",
"test/test_format_fr.py::TestNiceNumberFormat_fr::test_convert_float_to_nice_number_fr",
"test/test_format_fr.py::TestNiceNumberFormat_fr::test_no_speech_fr",
"test/test_format_fr.py::TestNiceNumberFormat_fr::test_specify_denominator_fr",
"test/test_format_fr.py::TestPronounceNumber_fr::test_convert_int_fr",
"test/test_format_fr.py::TestPronounceNumber_fr::test_convert_negative_int_fr",
"test/test_format_fr.py::TestNiceDateFormat_fr::test_convert_times_fr",
"test/test_format_it.py::TestNiceNumberFormat::test_convert_float_to_nice_number_it",
"test/test_format_it.py::TestNiceNumberFormat::test_no_speech",
"test/test_format_it.py::TestNiceNumberFormat::test_specify_denominator",
"test/test_format_it.py::TestPronounceNumber::test_convert_hundreds",
"test/test_format_it.py::TestPronounceNumber::test_convert_int",
"test/test_format_it.py::TestPronounceNumber::test_convert_negative_int",
"test/test_format_it.py::TestPronounceNumber::test_convert_scientific_notation",
"test/test_format_it.py::TestPronounceNumber::test_convert_times",
"test/test_format_it.py::TestPronounceNumber::test_infinity",
"test/test_format_it.py::TestPronounceNumber::test_large_numbers",
"test/test_format_pt.py::TestPronounceNumber::test_convert_int",
"test/test_format_pt.py::TestPronounceNumber::test_convert_negative_int",
"test/test_format_pt.py::TestNiceDateFormat::test_midday",
"test/test_format_pt.py::TestNiceDateFormat::test_midnight",
"test/test_format_pt.py::TestNiceDateFormat::test_minutes_past_hour",
"test/test_format_pt.py::TestNiceDateFormat::test_minutes_to_hour",
"test/test_format_pt.py::TestNiceDateFormat::test_pm",
"test/test_parse_fr.py::TestNormalize_fr::test_articles_fr",
"test/test_parse_fr.py::TestNormalize_fr::test_extractdatetime_default_fr",
"test/test_parse_fr.py::TestNormalize_fr::test_extractdatetime_fr",
"test/test_parse_fr.py::TestNormalize_fr::test_gender_fr",
"test/test_parse_fr.py::TestNormalize_fr::test_numbers_fr",
"test/test_parse_fr.py::TestNormalize_fr::test_spaces_fr"
] | [] | Apache License 2.0 | 6,315 | 3,347 | [
"lingua_franca/lang/format_en.py",
"lingua_franca/lang/format_es.py",
"lingua_franca/lang/format_fr.py",
"lingua_franca/lang/format_it.py",
"lingua_franca/lang/format_pt.py",
"lingua_franca/lang/parse_da.py",
"lingua_franca/lang/parse_de.py",
"lingua_franca/lang/parse_es.py",
"lingua_franca/lang/parse_fr.py",
"lingua_franca/lang/parse_pt.py",
"lingua_franca/lang/parse_sv.py"
] |
|
pytorch__ignite-734 | feff57f42454905e7566d114bcf5c1324ab9e56c | 2020-01-27 12:12:15 | 479659a9436e41d4e725f3f9492b6b644e69aa8f | vfdev-5: Sorry, my changes broke flake8. We need split the line into two.
Joxis: @vfdev-5 I'm on it. I will also add the extra formatting.
Joxis: @vfdev-5 Would you like the decimal places formatting to be explicitly mentioned in the docs?
vfdev-5: To have 100% user-friendly documentation, yes, please, we can mention that :)
Joxis: @vfdev-5 The conflict is due to the fixed 4 decimal places, even when the original value has less decimal places. Is this desired or do you prefer rounding the value to enforce a maximum on the decimal places rather that having it always fixed to 4?
vfdev-5: @Joxis we can simply adapt true values as we expected them:
```
# assert set(os.listdir(dirname)) == set(['best_model_8_val_acc=0.61.pth', 'best_model_9_val_acc=0.7.pth'])
assert set(os.listdir(dirname)) == set(['best_model_8_val_acc=0.6100.pth', 'best_model_9_val_acc=0.7000.pth'])
``` | diff --git a/ignite/handlers/checkpoint.py b/ignite/handlers/checkpoint.py
index db893d6f..d58860f5 100644
--- a/ignite/handlers/checkpoint.py
+++ b/ignite/handlers/checkpoint.py
@@ -31,7 +31,7 @@ class Checkpoint:
score_function (callable, optional): If not None, it should be a function taking a single argument,
:class:`~ignite.engine.Engine` object, and returning a score (`float`). Objects with highest scores will be
retained.
- score_name (str, optional): If `score_function` not None, it is possible to store its absolute value using
+ score_name (str, optional): If `score_function` not None, it is possible to store its value using
`score_name`. See Notes for more details.
n_saved (int, optional): Number of objects that should be kept on disk. Older files will be removed. If set to
`None`, all objects are kept.
@@ -61,11 +61,11 @@ class Checkpoint:
The filename will be `{filename_prefix}_{name}_{global_step}_{score}.pth`.
If defined `score_function` and `score_name`, then the filename will
- be `{filename_prefix}_{name}_{score_name}={abs(score)}.{ext}`. If `global_step_transform` is provided, then
- the filename will be `{filename_prefix}_{name}_{global_step}_{score_name}={abs(score)}.{ext}`
+ be `{filename_prefix}_{name}_{score_name}={score}.{ext}`. If `global_step_transform` is provided, then
+ the filename will be `{filename_prefix}_{name}_{global_step}_{score_name}={score}.{ext}`
- For example, `score_name="val_loss"` and `score_function` that returns `-loss` (as objects with
- highest scores will be retained), then saved filename will be `{filename_prefix}_{name}_val_loss=0.1234.pth`.
+ For example, `score_name="neg_val_loss"` and `score_function` that returns `-loss` (as objects with highest
+ scores will be retained), then saved filename will be `{filename_prefix}_{name}_neg_val_loss=-0.1234.pth`.
To get the last stored filename, handler exposes attribute `last_checkpoint`:
@@ -183,11 +183,11 @@ class Checkpoint:
if self._score_name is not None:
if len(suffix) > 0:
suffix += "_"
- suffix = "{}{}={}".format(suffix, self._score_name, priority)
+ suffix = "{}{}={:.4f}".format(suffix, self._score_name, priority)
elif self._score_function is not None:
if len(suffix) > 0:
suffix += "_"
- suffix = "{}{}".format(suffix, priority)
+ suffix = "{}{:.4f}".format(suffix, priority)
elif len(suffix) == 0:
suffix = "{}".format(priority)
@@ -325,7 +325,7 @@ class ModelCheckpoint(Checkpoint):
score_function (callable, optional): if not None, it should be a function taking a single argument, an
:class:`~ignite.engine.Engine` object, and return a score (`float`). Objects with highest scores will be
retained.
- score_name (str, optional): if `score_function` not None, it is possible to store its absolute value using
+ score_name (str, optional): if `score_function` not None, it is possible to store its value using
`score_name`. See Notes for more details.
n_saved (int, optional): Number of objects that should be kept on disk. Older files will be removed. If set to
`None`, all objects are kept.
| Checkpoint filename not using absolute score value
The [Checkpoint documentation](https://pytorch.org/ignite/handlers.html?highlight=checkpoint#ignite.handlers.Checkpoint) mentions the use of the absolute value of the score in the filename.
> If score_function not None, it is possible to store its absolute value using score_name. See Notes for more details.
> If defined score_function and score_name, then the filename will be {filename_prefix}_{name}_{score_name}={abs(score)}.{ext}.
However, it seems the score (priority) is not used as an absolute value:
https://github.com/pytorch/ignite/blob/a1cae3148a92ddc82bfb1412859e04d4b51f0f25/ignite/handlers/checkpoint.py#L186
The `abs` call was removed in #679. | pytorch/ignite | diff --git a/tests/ignite/contrib/engines/test_common.py b/tests/ignite/contrib/engines/test_common.py
index 3be8692e..55ec279c 100644
--- a/tests/ignite/contrib/engines/test_common.py
+++ b/tests/ignite/contrib/engines/test_common.py
@@ -127,7 +127,7 @@ def test_save_best_model_by_val_score(dirname, capsys):
data = [0, ]
trainer.run(data, max_epochs=len(acc_scores))
- assert set(os.listdir(dirname)) == set(['best_model_8_val_acc=0.61.pth', 'best_model_9_val_acc=0.7.pth'])
+ assert set(os.listdir(dirname)) == set(['best_model_8_val_acc=0.6100.pth', 'best_model_9_val_acc=0.7000.pth'])
def test_add_early_stopping_by_val_score():
diff --git a/tests/ignite/handlers/test_checkpoint.py b/tests/ignite/handlers/test_checkpoint.py
index f11e9a6f..5844bb13 100644
--- a/tests/ignite/handlers/test_checkpoint.py
+++ b/tests/ignite/handlers/test_checkpoint.py
@@ -137,7 +137,7 @@ def test_checkpoint_with_score_function():
checkpointer(trainer)
assert save_handler.call_count == 1
- save_handler.assert_called_with(obj, "{}_0.77.pth".format(name))
+ save_handler.assert_called_with(obj, "{}_0.7700.pth".format(name))
trainer.state.epoch = 12
trainer.state.iteration = 1234
@@ -145,10 +145,10 @@ def test_checkpoint_with_score_function():
checkpointer(trainer)
assert save_handler.call_count == 2
- save_handler.assert_called_with(obj, "{}_0.78.pth".format(name))
+ save_handler.assert_called_with(obj, "{}_0.7800.pth".format(name))
assert save_handler.remove.call_count == 1
- save_handler.remove.assert_called_with("{}_0.77.pth".format(name))
- assert checkpointer.last_checkpoint == "{}_0.78.pth".format(name)
+ save_handler.remove.assert_called_with("{}_0.7700.pth".format(name))
+ assert checkpointer.last_checkpoint == "{}_0.7800.pth".format(name)
model = DummyModel()
to_save = {'model': model}
@@ -176,7 +176,7 @@ def test_checkpoint_with_score_name_and_function():
checkpointer(trainer)
assert save_handler.call_count == 1
- save_handler.assert_called_with(obj, "{}_loss=-0.77.pth".format(name))
+ save_handler.assert_called_with(obj, "{}_loss=-0.7700.pth".format(name))
trainer.state.epoch = 12
trainer.state.iteration = 1234
@@ -184,10 +184,10 @@ def test_checkpoint_with_score_name_and_function():
checkpointer(trainer)
assert save_handler.call_count == 2
- save_handler.assert_called_with(obj, "{}_loss=-0.76.pth".format(name))
+ save_handler.assert_called_with(obj, "{}_loss=-0.7600.pth".format(name))
assert save_handler.remove.call_count == 1
- save_handler.remove.assert_called_with("{}_loss=-0.77.pth".format(name))
- assert checkpointer.last_checkpoint == "{}_loss=-0.76.pth".format(name)
+ save_handler.remove.assert_called_with("{}_loss=-0.7700.pth".format(name))
+ assert checkpointer.last_checkpoint == "{}_loss=-0.7600.pth".format(name)
model = DummyModel()
to_save = {'model': model}
@@ -218,17 +218,17 @@ def test_checkpoint_with_score_function_and_trainer_epoch():
checkpointer(evaluator)
assert save_handler.call_count == 1
- save_handler.assert_called_with(obj, "{}_11_0.77.pth".format(name))
+ save_handler.assert_called_with(obj, "{}_11_0.7700.pth".format(name))
trainer.state.epoch = 12
evaluator.state.metrics['val_acc'] = 0.78
checkpointer(evaluator)
assert save_handler.call_count == 2
- save_handler.assert_called_with(obj, "{}_12_0.78.pth".format(name))
+ save_handler.assert_called_with(obj, "{}_12_0.7800.pth".format(name))
assert save_handler.remove.call_count == 1
- save_handler.remove.assert_called_with("{}_11_0.77.pth".format(name))
- assert checkpointer.last_checkpoint == "{}_12_0.78.pth".format(name)
+ save_handler.remove.assert_called_with("{}_11_0.7700.pth".format(name))
+ assert checkpointer.last_checkpoint == "{}_12_0.7800.pth".format(name)
model = DummyModel()
to_save = {'model': model}
@@ -256,17 +256,17 @@ def test_checkpoint_with_score_name_and_function_and_trainer_epoch():
checkpointer(evaluator)
assert save_handler.call_count == 1
- save_handler.assert_called_with(obj, "{}_11_val_acc=0.77.pth".format(name))
+ save_handler.assert_called_with(obj, "{}_11_val_acc=0.7700.pth".format(name))
trainer.state.epoch = 12
evaluator.state.metrics['val_acc'] = 0.78
checkpointer(evaluator)
assert save_handler.call_count == 2
- save_handler.assert_called_with(obj, "{}_12_val_acc=0.78.pth".format(name))
+ save_handler.assert_called_with(obj, "{}_12_val_acc=0.7800.pth".format(name))
assert save_handler.remove.call_count == 1
- save_handler.remove.assert_called_with("{}_11_val_acc=0.77.pth".format(name))
- assert checkpointer.last_checkpoint == "{}_12_val_acc=0.78.pth".format(name)
+ save_handler.remove.assert_called_with("{}_11_val_acc=0.7700.pth".format(name))
+ assert checkpointer.last_checkpoint == "{}_12_val_acc=0.7800.pth".format(name)
model = DummyModel()
to_save = {'model': model}
@@ -440,7 +440,7 @@ def test_best_k(dirname):
for _ in range(4):
h(engine, to_save)
- expected = ['{}_{}_{}.pth'.format(_PREFIX, 'model', i) for i in [1.2, 3.1]]
+ expected = ['{}_{}_{:.4f}.pth'.format(_PREFIX, 'model', i) for i in [1.2, 3.1]]
assert sorted(os.listdir(dirname)) == expected
@@ -464,7 +464,7 @@ def test_best_k_with_suffix(dirname):
engine.state.epoch += 1
h(engine, to_save)
- expected = ['{}_{}_val_loss={:.7}.pth'.format(_PREFIX, 'model', scores[e - 1]) for e in [1, 3]]
+ expected = ['{}_{}_val_loss={:.4}.pth'.format(_PREFIX, 'model', scores[e - 1]) for e in [1, 3]]
assert sorted(os.listdir(dirname)) == expected
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 1
} | 0.3 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc",
"pip install torch torchvision -f https://download.pytorch.org/whl/cpu/torch_stable.html -U"
],
"python": "3.7",
"reqs_path": [
"requirements-dev.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi @ file:///croot/certifi_1671487769961/work/certifi
charset-normalizer==3.4.1
exceptiongroup==1.2.2
idna==3.10
importlib-metadata==6.7.0
iniconfig==2.0.0
numpy==1.21.6
packaging==24.0
Pillow==9.5.0
pluggy==1.2.0
pytest==7.4.4
-e git+https://github.com/pytorch/ignite.git@feff57f42454905e7566d114bcf5c1324ab9e56c#egg=pytorch_ignite
requests==2.31.0
tomli==2.0.1
torch==1.13.1+cpu
torchvision==0.14.1+cpu
typing_extensions==4.7.1
urllib3==2.0.7
zipp==3.15.0
| name: ignite
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=22.3.1=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- charset-normalizer==3.4.1
- exceptiongroup==1.2.2
- idna==3.10
- importlib-metadata==6.7.0
- iniconfig==2.0.0
- numpy==1.21.6
- packaging==24.0
- pillow==9.5.0
- pluggy==1.2.0
- pytest==7.4.4
- requests==2.31.0
- tomli==2.0.1
- torch==1.13.1+cpu
- torchvision==0.14.1+cpu
- typing-extensions==4.7.1
- urllib3==2.0.7
- zipp==3.15.0
prefix: /opt/conda/envs/ignite
| [
"tests/ignite/contrib/engines/test_common.py::test_save_best_model_by_val_score",
"tests/ignite/handlers/test_checkpoint.py::test_checkpoint_with_score_function",
"tests/ignite/handlers/test_checkpoint.py::test_checkpoint_with_score_name_and_function",
"tests/ignite/handlers/test_checkpoint.py::test_checkpoint_with_score_function_and_trainer_epoch",
"tests/ignite/handlers/test_checkpoint.py::test_checkpoint_with_score_name_and_function_and_trainer_epoch",
"tests/ignite/handlers/test_checkpoint.py::test_best_k",
"tests/ignite/handlers/test_checkpoint.py::test_best_k_with_suffix"
] | [
"tests/ignite/contrib/engines/test_common.py::test_asserts_setup_common_training_handlers",
"tests/ignite/contrib/engines/test_common.py::test_setup_common_training_handlers",
"tests/ignite/contrib/engines/test_common.py::test_setup_tb_logging"
] | [
"tests/ignite/contrib/engines/test_common.py::test_add_early_stopping_by_val_score",
"tests/ignite/handlers/test_checkpoint.py::test_checkpoint_wrong_input",
"tests/ignite/handlers/test_checkpoint.py::test_checkpoint_default",
"tests/ignite/handlers/test_checkpoint.py::test_checkpoint_with_global_step_transform",
"tests/ignite/handlers/test_checkpoint.py::test_model_checkpoint_args_validation",
"tests/ignite/handlers/test_checkpoint.py::test_model_checkpoint_simple_recovery",
"tests/ignite/handlers/test_checkpoint.py::test_model_checkpoint_simple_recovery_from_existing_non_empty",
"tests/ignite/handlers/test_checkpoint.py::test_disk_saver_atomic",
"tests/ignite/handlers/test_checkpoint.py::test_last_k",
"tests/ignite/handlers/test_checkpoint.py::test_disabled_n_saved",
"tests/ignite/handlers/test_checkpoint.py::test_with_engine",
"tests/ignite/handlers/test_checkpoint.py::test_with_state_dict",
"tests/ignite/handlers/test_checkpoint.py::test_valid_state_dict_save",
"tests/ignite/handlers/test_checkpoint.py::test_save_model_optimizer_lr_scheduler_with_state_dict",
"tests/ignite/handlers/test_checkpoint.py::test_checkpoint_load_objects",
"tests/ignite/handlers/test_checkpoint.py::test_disksaver_wrong_input"
] | [] | BSD 3-Clause "New" or "Revised" License | 6,320 | 856 | [
"ignite/handlers/checkpoint.py"
] |
Shopify__shopify_python_api-354 | 3cec005f5af28622fba023c67f739784a23f98a4 | 2020-01-27 15:13:24 | cd049439a62b84475b3693f82d455fb342f4abc7 | diff --git a/shopify/base.py b/shopify/base.py
index bcab899..65349e6 100644
--- a/shopify/base.py
+++ b/shopify/base.py
@@ -208,43 +208,7 @@ class ShopifyResource(ActiveResource, mixins.Countable):
@classmethod
def find(cls, id_=None, from_=None, **kwargs):
"""Checks the resulting collection for pagination metadata."""
-
- collection = super(ShopifyResource, cls).find(id_=id_, from_=from_,
- **kwargs)
-
- # pyactiveresource currently sends all headers from the response with
- # the collection.
- if isinstance(collection, Collection) and \
- "headers" in collection.metadata:
- headers = collection.metadata["headers"]
- if "Link" in headers:
- pagination = cls._parse_pagination(headers["Link"])
- return PaginatedCollection(collection, metadata={
- "pagination": pagination,
- "resource_class": cls
- })
-
+ collection = super(ShopifyResource, cls).find(id_=id_, from_=from_, **kwargs)
+ if isinstance(collection, Collection) and "headers" in collection.metadata:
+ return PaginatedCollection(collection, metadata={"resource_class": cls})
return collection
-
- @classmethod
- def _parse_pagination(cls, data):
- """Parses a Link header into a dict for cursor-based pagination.
-
- Args:
- data: The Link header value as a string.
- Returns:
- A dict with rel names as keys and URLs as values.
- """
-
- # Example Link header:
- # <https://xxx.shopify.com/admin/...>; rel="previous",
- # <https://xxx.shopify.com/admin/...>; rel="next"
-
- values = data.split(", ")
-
- result = {}
- for value in values:
- link, rel = value.split("; ")
- result[rel.split('"')[1]] = link[1:-1]
-
- return result
diff --git a/shopify/collection.py b/shopify/collection.py
index 5fa4a68..a604b1a 100644
--- a/shopify/collection.py
+++ b/shopify/collection.py
@@ -26,11 +26,12 @@ class PaginatedCollection(Collection):
metadata = obj.metadata
super(PaginatedCollection, self).__init__(obj, metadata=metadata)
else:
- super(PaginatedCollection, self).__init__(metadata=metadata or {},
- *args, **kwargs)
- if not ("pagination" in self.metadata and "resource_class" in self.metadata):
- raise AttributeError("Cursor-based pagination requires \"pagination\" and \"resource_class\" attributes in the metadata.")
+ super(PaginatedCollection, self).__init__(metadata=metadata or {}, *args, **kwargs)
+ if not ("resource_class" in self.metadata):
+ raise AttributeError("Cursor-based pagination requires a \"resource_class\" attribute in the metadata.")
+
+ self.metadata["pagination"] = self.__parse_pagination()
self.next_page_url = self.metadata["pagination"].get('next', None)
self.previous_page_url = self.metadata["pagination"].get('previous', None)
@@ -39,6 +40,16 @@ class PaginatedCollection(Collection):
self._current_iter = None
self._no_iter_next = kwargs.pop("no_iter_next", False)
+ def __parse_pagination(self):
+ if "headers" not in self.metadata or "Link" not in self.metadata["headers"]:
+ return {}
+ values = self.metadata["headers"]["Link"].split(", ")
+ result = {}
+ for value in values:
+ link, rel = value.split("; ")
+ result[rel.split('"')[1]] = link[1:-1]
+ return result
+
def has_previous_page(self):
"""Returns true if the current page has any previous pages before it.
"""
| Cursor pagination issue with single page results
I was trying out the new cursor pagination 6.0.0 release and came across an issue.
page1 = shopify.Product.find(limit=100)
while page1.has_next_page():
// do stuff
I can an error
AttributeError: 'Collection' object has no attribute 'has_next_page'
This happens when I have less then limit results returned so there is only 1 page of results. It seems to return a Collection object instead of a PaginatedCollection.
If I reduce the limit so there will be at least 2 pages, it works fine.
For now I will use a limit I know will return multiple pages, but this means always making an extra api call.
| Shopify/shopify_python_api | diff --git a/test/pagination_test.py b/test/pagination_test.py
index 461e8e0..3ee05ad 100644
--- a/test/pagination_test.py
+++ b/test/pagination_test.py
@@ -28,6 +28,13 @@ class PaginationTest(TestCase):
body=json.dumps({ "products": fixture[:2] }),
response_headers=next_headers)
+ def test_nonpaginates_collection(self):
+ self.fake('draft_orders', method='GET', code=200, body=self.load_fixture('draft_orders'))
+ draft_orders = shopify.DraftOrder.find()
+ self.assertEqual(1, len(draft_orders))
+ self.assertEqual(517119332, draft_orders[0].id)
+ self.assertIsInstance(draft_orders, shopify.collection.PaginatedCollection, "find() result is not PaginatedCollection")
+
def test_paginated_collection(self):
items = shopify.Product.find(limit=2)
self.assertIsInstance(items, shopify.collection.PaginatedCollection, "find() result is not PaginatedCollection")
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 2
} | 6.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"mock>=1.0.1",
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | exceptiongroup==1.2.2
iniconfig==2.1.0
mock==5.2.0
packaging==24.2
pluggy==1.5.0
pyactiveresource==2.2.2
pytest==8.3.5
PyYAML==6.0.2
-e git+https://github.com/Shopify/shopify_python_api.git@3cec005f5af28622fba023c67f739784a23f98a4#egg=ShopifyAPI
six==1.17.0
tomli==2.2.1
| name: shopify_python_api
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- exceptiongroup==1.2.2
- iniconfig==2.1.0
- mock==5.2.0
- packaging==24.2
- pluggy==1.5.0
- pyactiveresource==2.2.2
- pytest==8.3.5
- pyyaml==6.0.2
- six==1.17.0
- tomli==2.2.1
prefix: /opt/conda/envs/shopify_python_api
| [
"test/pagination_test.py::PaginationTest::test_nonpaginates_collection"
] | [] | [
"test/pagination_test.py::PaginationTest::test_paginated_collection",
"test/pagination_test.py::PaginationTest::test_paginated_collection_iterator",
"test/pagination_test.py::PaginationTest::test_paginated_collection_no_cache",
"test/pagination_test.py::PaginationTest::test_paginated_iterator",
"test/pagination_test.py::PaginationTest::test_pagination_next_page",
"test/pagination_test.py::PaginationTest::test_pagination_previous"
] | [] | MIT License | 6,321 | 896 | [
"shopify/base.py",
"shopify/collection.py"
] |
|
pypa__virtualenv-1509 | 599907870e9a501224cf603af863c712e581c752 | 2020-01-28 14:11:40 | 368f830bbb5fe4ecc7c24aed9b9d1f99620c3982 | diff --git a/src/virtualenv/activation/bash/__init__.py b/src/virtualenv/activation/bash/__init__.py
index c89b42ed..df3f7727 100644
--- a/src/virtualenv/activation/bash/__init__.py
+++ b/src/virtualenv/activation/bash/__init__.py
@@ -12,3 +12,6 @@ class BashActivator(ViaTemplateActivator):
def templates(self):
yield Path("activate.sh")
+
+ def as_name(self, template):
+ return template.stem
diff --git a/src/virtualenv/activation/via_template.py b/src/virtualenv/activation/via_template.py
index 3f6b46bb..d9d0a14d 100644
--- a/src/virtualenv/activation/via_template.py
+++ b/src/virtualenv/activation/via_template.py
@@ -39,7 +39,11 @@ class ViaTemplateActivator(Activator):
def _generate(self, replacements, templates, to_folder, creator):
for template in templates:
text = self.instantiate_template(replacements, template, creator)
- (to_folder / template).write_text(text, encoding="utf-8")
+ dest = to_folder / self.as_name(template)
+ dest.write_text(text, encoding="utf-8")
+
+ def as_name(self, template):
+ return template.name
def instantiate_template(self, replacements, template, creator):
# read text and do replacements
| Integration with virtualenvwrapper
Congratulations on the rewrite.
I notice that `virtualenvwrapper` doesn't integrate with the new beta:
```
$ mkvirtualenv foo
ERROR: Environment '/Users/brettmz/.virtualenvs/foo' does not contain an activate script.
```
Sure enough - I believe it is looking for a script `activate` - but the script file now depends on the shell - `activate.sh` etc.
It would be good if this could work somehow - would it be reasonable to create / link a default `activate` script for this case? Or should virtualenvwrapper be updated? | pypa/virtualenv | diff --git a/tests/unit/activation/test_bash.py b/tests/unit/activation/test_bash.py
index d5d8ad9b..b5ff9285 100644
--- a/tests/unit/activation/test_bash.py
+++ b/tests/unit/activation/test_bash.py
@@ -7,7 +7,7 @@ def test_bash(raise_on_non_source_class, activation_tester):
class Bash(raise_on_non_source_class):
def __init__(self, session):
super(Bash, self).__init__(
- BashActivator, session, "bash", "activate.sh", "sh", "You must source this script: $ source "
+ BashActivator, session, "bash", "activate", "sh", "You must source this script: $ source "
)
activation_tester(Bash)
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 2
} | 20.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest",
"coverage",
"pytest-mock",
"pytest-env"
],
"pre_install": null,
"python": "3.8",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | appdirs==1.4.4
coverage==7.6.1
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
filelock==3.16.1
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
packaging @ file:///croot/packaging_1720101850331/work
pluggy==1.5.0
pytest==8.3.5
pytest-env==1.1.5
pytest-mock==3.14.0
six==1.17.0
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
-e git+https://github.com/pypa/virtualenv.git@599907870e9a501224cf603af863c712e581c752#egg=virtualenv
| name: virtualenv
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py38h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.1=py38h06a4308_0
- pip=24.2=py38h06a4308_0
- python=3.8.20=he870216_0
- readline=8.2=h5eee18b_0
- setuptools=75.1.0=py38h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py38h06a4308_0
- wheel=0.44.0=py38h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- appdirs==1.4.4
- coverage==7.6.1
- filelock==3.16.1
- pluggy==1.5.0
- pytest==8.3.5
- pytest-env==1.1.5
- pytest-mock==3.14.0
- six==1.17.0
- virtualenv==20.0.0b1
prefix: /opt/conda/envs/virtualenv
| [
"tests/unit/activation/test_bash.py::test_bash"
] | [] | [] | [] | MIT License | 6,332 | 345 | [
"src/virtualenv/activation/bash/__init__.py",
"src/virtualenv/activation/via_template.py"
] |
|
pre-commit__pre-commit-1299 | f74e3031bda69bc9b0f2a91ea83dc6e7a01e2986 | 2020-01-28 20:43:51 | f74e3031bda69bc9b0f2a91ea83dc6e7a01e2986 | diff --git a/pre_commit/commands/install_uninstall.py b/pre_commit/commands/install_uninstall.py
index 9372176..b2ccc5c 100644
--- a/pre_commit/commands/install_uninstall.py
+++ b/pre_commit/commands/install_uninstall.py
@@ -123,7 +123,7 @@ def install(
skip_on_missing_config: bool = False,
git_dir: Optional[str] = None,
) -> int:
- if git.has_core_hookpaths_set():
+ if git_dir is None and git.has_core_hookpaths_set():
logger.error(
'Cowardly refusing to install hooks with `core.hooksPath` set.\n'
'hint: `git config --unset-all core.hooksPath`',
| core.hooksPath being set makes it not possible to install
I have a laptop which has some mandaotry global settings for git, and I have not been able to enable pre-commit. I also tried doing
```
git config --global init.templateDir ~/.git-template
pre-commit init-templatedir ~/.git-template
```
But still get the message
```
[ERROR] Cowardly refusing to install hooks with `core.hooksPath` set.
hint: `git config --unset-all core.hooksPath`
```
Is there any work arounds for this? | pre-commit/pre-commit | diff --git a/tests/commands/init_templatedir_test.py b/tests/commands/init_templatedir_test.py
index 4e32e75..d14a171 100644
--- a/tests/commands/init_templatedir_test.py
+++ b/tests/commands/init_templatedir_test.py
@@ -79,3 +79,14 @@ def test_init_templatedir_expanduser(tmpdir, tempdir_factory, store, cap_out):
lines = cap_out.get().splitlines()
assert len(lines) == 1
assert lines[0].startswith('pre-commit installed at')
+
+
+def test_init_templatedir_hookspath_set(tmpdir, tempdir_factory, store):
+ target = tmpdir.join('tmpl')
+ tmp_git_dir = git_dir(tempdir_factory)
+ with cwd(tmp_git_dir):
+ cmd_output('git', 'config', '--local', 'core.hooksPath', 'hooks')
+ init_templatedir(
+ C.CONFIG_FILE, store, target, hook_types=['pre-commit'],
+ )
+ assert target.join('hooks/pre-commit').exists()
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 1
} | 1.21 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.7",
"reqs_path": [
"requirements-dev.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | aspy.yaml==1.3.0
certifi @ file:///croot/certifi_1671487769961/work/certifi
cfgv==3.3.1
coverage==7.2.7
distlib==0.3.9
exceptiongroup==1.2.2
filelock==3.12.2
identify==2.5.24
importlib-metadata==6.7.0
iniconfig==2.0.0
nodeenv==1.9.1
packaging==24.0
platformdirs==4.0.0
pluggy==1.2.0
-e git+https://github.com/pre-commit/pre-commit.git@f74e3031bda69bc9b0f2a91ea83dc6e7a01e2986#egg=pre_commit
pytest==7.4.4
pytest-env==1.0.1
PyYAML==6.0.1
toml==0.10.2
tomli==2.0.1
typing_extensions==4.7.1
virtualenv==20.26.6
zipp==3.15.0
| name: pre-commit
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=22.3.1=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- aspy-yaml==1.3.0
- cfgv==3.3.1
- coverage==7.2.7
- distlib==0.3.9
- exceptiongroup==1.2.2
- filelock==3.12.2
- identify==2.5.24
- importlib-metadata==6.7.0
- iniconfig==2.0.0
- nodeenv==1.9.1
- packaging==24.0
- platformdirs==4.0.0
- pluggy==1.2.0
- pytest==7.4.4
- pytest-env==1.0.1
- pyyaml==6.0.1
- toml==0.10.2
- tomli==2.0.1
- typing-extensions==4.7.1
- virtualenv==20.26.6
- zipp==3.15.0
prefix: /opt/conda/envs/pre-commit
| [
"tests/commands/init_templatedir_test.py::test_init_templatedir_hookspath_set"
] | [] | [
"tests/commands/init_templatedir_test.py::test_init_templatedir",
"tests/commands/init_templatedir_test.py::test_init_templatedir_already_set",
"tests/commands/init_templatedir_test.py::test_init_templatedir_not_set",
"tests/commands/init_templatedir_test.py::test_init_templatedir_expanduser"
] | [] | MIT License | 6,335 | 173 | [
"pre_commit/commands/install_uninstall.py"
] |
|
nipype__pydra-186 | 6d6340500696f68390de551bd168b54d3f88dc21 | 2020-01-30 02:09:17 | 890254771974ed37f6922c8dcd0a349993bc3aac | diff --git a/pydra/engine/core.py b/pydra/engine/core.py
index 11111396..7cd8b7b8 100644
--- a/pydra/engine/core.py
+++ b/pydra/engine/core.py
@@ -247,9 +247,9 @@ class TaskBase:
TODO
"""
+ self.state.prepare_states(self.inputs)
+ self.state.prepare_inputs()
if state_index is not None:
- if self.state is None:
- raise Exception("can't use state_index if no splitter is used")
inputs_copy = deepcopy(self.inputs)
for key, ind in self.state.inputs_ind[state_index].items():
setattr(
@@ -510,6 +510,9 @@ class TaskBase:
@property
def done(self):
"""Check whether the tasks has been finalized and all outputs are stored."""
+ # if any of the field is lazy, there is no need to check results
+ if is_lazy(self.inputs):
+ return False
if self.state:
# TODO: only check for needed state result
if self.result() and all(self.result()):
@@ -866,3 +869,11 @@ def is_task(obj):
def is_workflow(obj):
"""Check whether an object is a :class:`Workflow` instance."""
return isinstance(obj, Workflow)
+
+
+def is_lazy(obj):
+ """Check whether an object has any field that is a Lazy Field"""
+ for f in attr_fields(obj):
+ if isinstance(getattr(obj, f.name), LazyField):
+ return True
+ return False
diff --git a/pydra/engine/task.py b/pydra/engine/task.py
index 6f1d2ebe..0519b78e 100644
--- a/pydra/engine/task.py
+++ b/pydra/engine/task.py
@@ -44,7 +44,7 @@ import inspect
import typing as ty
from pathlib import Path
-from .core import TaskBase
+from .core import TaskBase, is_lazy
from ..utils.messenger import AuditFlag
from .specs import (
File,
@@ -215,17 +215,17 @@ class ShellCommandTask(TaskBase):
return super(ShellCommandTask, cls).__new__(cls)
if len(container_info) == 3:
- type_cont, image, bind = container_info
+ type, image, bind = container_info
elif len(container_info) == 2:
- type_cont, image, bind = container_info + (None,)
+ type, image, bind = container_info + (None,)
else:
raise Exception(
f"container_info has to have 2 or 3 elements, but {container_info} provided"
)
- if type_cont == "docker":
+ if type == "docker":
return DockerTask(image=image, bindings=bind, *args, **kwargs)
- elif type_cont == "singularity":
+ elif type == "singularity":
return SingularityTask(image=image, bindings=bind, *args, **kwargs)
else:
raise Exception(
@@ -288,7 +288,20 @@ class ShellCommandTask(TaskBase):
@property
def command_args(self):
- """Get command line arguments."""
+ """Get command line arguments, returns a list if task has a state"""
+ if is_lazy(self.inputs):
+ raise Exception("can't return cmdline, self.inputs has LazyFields")
+ if self.state:
+ command_args_list = []
+ self.state.prepare_states(self.inputs)
+ for ii, el in enumerate(self.state.states_ind):
+ command_args_list.append(self._command_args_single(el, ind=ii))
+ return command_args_list
+ else:
+ return self._command_args_single(self.inputs)
+
+ def _command_args_single(self, state_ind, ind=None):
+ """Get command line arguments for a single state"""
pos_args = [] # list for (position, command arg)
for f in attr_fields(self.inputs):
if f.name == "executable":
@@ -311,13 +324,16 @@ class ShellCommandTask(TaskBase):
# if f.metadata.get("copyfile") in [True, False]:
# value = str(self.inputs.map_copyfiles[f.name])
# else:
- value = getattr(self.inputs, f.name)
+ if self.state and f"{self.name}.{f.name}" in state_ind:
+ value = getattr(self.inputs, f.name)[state_ind[f"{self.name}.{f.name}"]]
+ else:
+ value = getattr(self.inputs, f.name)
if is_local_file(f):
value = str(value)
# changing path to the cpath (the directory should be mounted)
if getattr(self, "bind_paths", None) and is_local_file(f):
lpath = Path(value)
- cdir = self.bind_paths[lpath.parent][0]
+ cdir = self.bind_paths(ind=ind)[lpath.parent][0]
cpath = cdir.joinpath(lpath.name)
value = str(cpath)
if f.type is bool:
@@ -338,21 +354,32 @@ class ShellCommandTask(TaskBase):
cmd_args += el[1]
return cmd_args
- @command_args.setter
- def command_args(self, args: ty.Dict):
- self.inputs = attr.evolve(self.inputs, **args)
-
@property
def cmdline(self):
- """Get the actual command line that will be submitted."""
+ """ Get the actual command line that will be submitted
+ Returns a list if the task has a state.
+ """
+ if is_lazy(self.inputs):
+ raise Exception("can't return cmdline, self.inputs has LazyFields")
orig_inputs = attr.asdict(self.inputs)
modified_inputs = template_update(self.inputs)
if modified_inputs is not None:
self.inputs = attr.evolve(self.inputs, **modified_inputs)
if isinstance(self, ContainerTask):
- cmdline = " ".join(self.container_args + self.command_args)
+ if self.state:
+ cmdline = []
+ for con, com in zip(self.container_args, self.command_args):
+ cmdline.append(" ".join(con + com))
+ else:
+ cmdline = " ".join(self.container_args + self.command_args)
else:
- cmdline = " ".join(self.command_args)
+ if self.state:
+ cmdline = []
+ for el in self.command_args:
+ cmdline.append(" ".join(el))
+ else:
+ cmdline = " ".join(self.command_args)
+
self.inputs = attr.evolve(self.inputs, **orig_inputs)
return cmdline
@@ -428,23 +455,27 @@ class ContainerTask(ShellCommandTask):
**kwargs,
)
- @property
- def container_args(self):
+ def container_check(self, container_type):
"""Get container-specific CLI arguments."""
if self.inputs.container is None:
raise AttributeError("Container software is not specified")
- cargs = [self.inputs.container]
+ elif self.inputs.container != container_type:
+ raise AttributeError(
+ f"Container type should be {container_type}, but {self.inputs.container} given"
+ )
if self.inputs.image is None:
raise AttributeError("Container image is not specified")
- return cargs
- @property
- def bind_paths(self):
+ def bind_paths(self, ind=None):
"""Return bound mount points: ``dict(lpath: (cpath, mode))``."""
bind_paths = {}
output_dir_cpath = None
if self.inputs.bindings is None:
self.inputs.bindings = []
+ if ind is None:
+ output_dir = self.output_dir
+ else:
+ output_dir = self.output_dir[ind]
for binding in self.inputs.bindings:
if len(binding) == 3:
lpath, cpath, mode = binding
@@ -454,17 +485,17 @@ class ContainerTask(ShellCommandTask):
raise Exception(
f"binding should have length 2, 3, or 4, it has {len(binding)}"
)
- if Path(lpath) == self.output_dir:
+ if Path(lpath) == output_dir:
output_dir_cpath = cpath
if mode is None:
mode = "rw" # default
bind_paths[Path(lpath)] = (Path(cpath), mode)
# output_dir is added to the bindings if not part of self.inputs.bindings
if not output_dir_cpath:
- bind_paths[self.output_dir] = (self.output_cpath, "rw")
+ bind_paths[output_dir] = (self.output_cpath, "rw")
return bind_paths
- def binds(self, opt):
+ def binds(self, opt, ind=None):
"""
Specify mounts to bind from local filesystems to container and working directory.
@@ -472,9 +503,8 @@ class ContainerTask(ShellCommandTask):
"""
bargs = []
- for (key, val) in self.bind_paths.items():
+ for (key, val) in self.bind_paths(ind).items():
bargs.extend([opt, "{0}:{1}:{2}".format(key, val[0], val[1])])
- # TODO: would need changes for singularity
return bargs
@@ -542,16 +572,36 @@ class DockerTask(ContainerTask):
@property
def container_args(self):
- """Get container-specific CLI arguments."""
- cargs = super().container_args
- assert self.inputs.container == "docker"
- cargs.append("run")
+ """Get container-specific CLI arguments, returns a list if the task has a state"""
+ if is_lazy(self.inputs):
+ raise Exception("can't return container_args, self.inputs has LazyFields")
+ self.container_check("docker")
+ if self.state:
+ self.state.prepare_states(self.inputs)
+ cargs_list = []
+ for ii, el in enumerate(self.state.states_ind):
+ if f"{self.name}.image" in el:
+ cargs_list.append(
+ self._container_args_single(
+ self.inputs.image[el[f"{self.name}.image"]], ind=ii
+ )
+ )
+ else:
+ cargs_list.append(
+ self._container_args_single(self.inputs.image, ind=ii)
+ )
+ return cargs_list
+ else:
+ return self._container_args_single(self.inputs.image)
+
+ def _container_args_single(self, image, ind=None):
+ cargs = ["docker", "run"]
if self.inputs.container_xargs is not None:
cargs.extend(self.inputs.container_xargs)
- cargs.extend(self.binds("-v"))
+ cargs.extend(self.binds("-v", ind))
cargs.extend(["-w", str(self.output_cpath)])
- cargs.append(self.inputs.image)
+ cargs.append(image)
return cargs
@@ -615,14 +665,35 @@ class SingularityTask(ContainerTask):
@property
def container_args(self):
"""Get container-specific CLI arguments."""
- cargs = super().container_args
- assert self.inputs.container == "singularity"
- cargs.append("exec")
+ if is_lazy(self.inputs):
+ raise Exception("can't return container_args, self.inputs has LazyFields")
+ self.container_check("singularity")
+ if self.state:
+ self.state.prepare_states(self.inputs)
+ cargs_list = []
+ for ii, el in enumerate(self.state.states_ind):
+ if f"{self.name}.image" in el:
+ cargs_list.append(
+ self._container_args_single(
+ self.inputs.image[el[f"{self.name}.image"]], ind=ii
+ )
+ )
+ else:
+ cargs_list.append(
+ self._container_args_single(self.inputs.image, ind=ii)
+ )
+ return cargs_list
+ else:
+ return self._container_args_single(self.inputs.image)
+
+ def _container_args_single(self, image, ind=None):
+ cargs = ["singularity", "exec"]
+
if self.inputs.container_xargs is not None:
cargs.extend(self.inputs.container_xargs)
- cargs.append(self.inputs.image)
+ cargs.append(image)
# insert bindings before image
idx = len(cargs) - 1
- cargs[idx:-1] = self.binds("-B")
+ cargs[idx:-1] = self.binds("-B", ind)
return cargs
| TODO for Shell Task and ContainerTask
There are some issues tak don't have to be fixed right away, but should be done soon:
(will be updated)
- [x] cmdline for Shell: doesn't make sense for tasks with splitter
- [x] inputs for Containers:
- checking if has image and it's proper image, raising exceptions;
- bindings: allowing for tuple (not only list), tuple should be 3 or 2 elements (without `ro`)
- checking if the local directory exists
- [x] adding an option to `Task` that converts it to the `DockerTask`: see @satra comments in #144
- [x] provide option to have position for arguments | nipype/pydra | diff --git a/pydra/engine/tests/test_dockertask.py b/pydra/engine/tests/test_dockertask.py
index 6a823007..4841cf2a 100644
--- a/pydra/engine/tests/test_dockertask.py
+++ b/pydra/engine/tests/test_dockertask.py
@@ -352,6 +352,12 @@ def test_docker_st_1(plugin):
)
assert docky.state.splitter == "docky.executable"
+ for ii, el in enumerate(docky.cmdline):
+ assert (
+ el
+ == f"docker run --rm -v {docky.output_dir[ii]}:/output_pydra:rw -w /output_pydra {docky.inputs.image} {cmd[ii]}"
+ )
+
res = docky(plugin=plugin)
assert res[0].output.stdout == "/output_pydra\n"
assert res[1].output.stdout == "root\n"
@@ -370,6 +376,12 @@ def test_docker_st_2(plugin):
)
assert docky.state.splitter == "docky.image"
+ for ii, el in enumerate(docky.cmdline):
+ assert (
+ el
+ == f"docker run --rm -v {docky.output_dir[ii]}:/output_pydra:rw -w /output_pydra {docky.inputs.image[ii]} {' '.join(cmd)}"
+ )
+
res = docky(plugin=plugin)
assert "Debian" in res[0].output.stdout
assert "Ubuntu" in res[1].output.stdout
@@ -409,6 +421,17 @@ def test_docker_st_4(plugin):
assert docky.state.combiner == ["docky.image"]
assert docky.state.splitter_final == "docky.executable"
+ for ii, el in enumerate(docky.cmdline):
+ i, j = ii // 2, ii % 2
+ if j == 0:
+ cmd_str = "whoami"
+ else:
+ cmd_str = " ".join(["cat", "/etc/issue"])
+ assert (
+ el
+ == f"docker run --rm -v {docky.output_dir[ii]}:/output_pydra:rw -w /output_pydra {docky.inputs.image[i]} {cmd_str}"
+ )
+
res = docky(plugin=plugin)
# checking the first command
@@ -458,6 +481,10 @@ def test_wf_docker_1(plugin, tmpdir):
)
wf.set_output([("out", wf.docky_echo.lzout.stdout)])
+ with pytest.raises(Exception) as excinfo:
+ wf.docky_echo.cmdline
+ assert "can't return cmdline" in str(excinfo.value)
+
with Submitter(plugin=plugin) as sub:
wf(submitter=sub)
diff --git a/pydra/engine/tests/test_shelltask.py b/pydra/engine/tests/test_shelltask.py
index 0a5d6541..7284a4cd 100644
--- a/pydra/engine/tests/test_shelltask.py
+++ b/pydra/engine/tests/test_shelltask.py
@@ -122,8 +122,7 @@ def test_shell_cmd_3(plugin):
# all args given as executable
shelly = ShellCommandTask(name="shelly", executable=cmd).split("executable")
- # TODO: doesnt make sense for tasks with splitter
- # assert shelly.cmdline == " ".join(cmd)
+ assert shelly.cmdline == ["pwd", "whoami"]
res = shelly(plugin=plugin)
assert res[0].output.stdout == f"{str(shelly.output_dir[0])}\n"
if "USER" in os.environ:
@@ -147,8 +146,7 @@ def test_shell_cmd_4(plugin):
)
assert shelly.inputs.executable == "echo"
assert shelly.inputs.args == ["nipype", "pydra"]
- # this doesnt work, cmdline gives echo nipype pydra
- # assert shelly.cmdline == "echo pydra"
+ assert shelly.cmdline == ["echo nipype", "echo pydra"]
res = shelly(plugin=plugin)
assert res[0].output.stdout == "nipype\n"
@@ -173,8 +171,7 @@ def test_shell_cmd_5(plugin):
)
assert shelly.inputs.executable == "echo"
assert shelly.inputs.args == ["nipype", "pydra"]
- # this doesnt work, cmdline gives echo nipype pydra
- # assert shelly.cmdline == "echo pydra"
+ assert shelly.cmdline == ["echo nipype", "echo pydra"]
res = shelly(plugin=plugin)
assert res[0][0].output.stdout == "nipype\n"
@@ -194,8 +191,12 @@ def test_shell_cmd_6(plugin):
)
assert shelly.inputs.executable == ["echo", ["echo", "-n"]]
assert shelly.inputs.args == ["nipype", "pydra"]
- # this doesnt work, cmdline gives echo nipype pydra
- # assert shelly.cmdline == "echo pydra"
+ assert shelly.cmdline == [
+ "echo nipype",
+ "echo pydra",
+ "echo -n nipype",
+ "echo -n pydra",
+ ]
res = shelly(plugin=plugin)
assert res[0].output.stdout == "nipype\n"
@@ -234,8 +235,7 @@ def test_shell_cmd_7(plugin):
)
assert shelly.inputs.executable == ["echo", ["echo", "-n"]]
assert shelly.inputs.args == ["nipype", "pydra"]
- # this doesnt work, cmdline gives echo nipype pydra
- # assert shelly.cmdline == "echo pydra"
+
res = shelly(plugin=plugin)
assert res[0][0].output.stdout == "nipype\n"
@@ -2251,9 +2251,7 @@ def test_fsl():
# TODO: not sure why this has to be string
in_file = Path(os.path.dirname(os.path.abspath(__file__))) / "data" / "foo.nii"
- out_file = (
- Path(os.path.dirname(os.path.abspath(__file__))) / "data" / "foo_brain.nii"
- )
+
# separate command into exec + args
shelly = ShellCommandTask(
name="bet_task", executable="bet", in_file=in_file, input_spec=bet_input_spec
diff --git a/pydra/engine/tests/test_task.py b/pydra/engine/tests/test_task.py
index ce78efe6..45b47cd5 100644
--- a/pydra/engine/tests/test_task.py
+++ b/pydra/engine/tests/test_task.py
@@ -351,7 +351,7 @@ def test_shell_cmd(tmpdir):
def test_container_cmds(tmpdir):
- containy = ContainerTask(name="containy", executable="pwd")
+ containy = DockerTask(name="containy", executable="pwd")
with pytest.raises(AttributeError):
containy.cmdline
containy.inputs.container = "docker"
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 2
} | 0.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-xdist"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | asttokens==3.0.0
attrs==25.3.0
black==25.1.0
bleach==6.2.0
cachetools==5.5.2
certifi==2025.1.31
cfgv==3.4.0
charset-normalizer==3.4.1
ci-info==0.3.0
click==8.1.8
cloudpickle==3.1.1
codecov==2.1.13
comm==0.2.2
coverage==7.8.0
debugpy==1.8.13
decorator==5.2.1
defusedxml==0.7.1
distlib==0.3.9
entrypoints==0.4
etelemetry==0.3.1
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
execnet==2.1.1
executing==2.2.0
fastjsonschema==2.21.1
filelock==3.18.0
frozendict==2.4.6
identify==2.6.9
idna==3.10
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
ipykernel==6.9.2
ipython==8.18.1
ipython-genutils==0.2.0
ipywidgets==8.1.5
jedi==0.19.2
Jinja2==3.0.0
json5==0.10.0
jsonschema==4.23.0
jsonschema-specifications==2024.10.1
jupyter==1.1.1
jupyter-client==6.1.12
jupyter-console==6.4.2
jupyter-contrib-nbextensions==0.5.1
jupyter-highlight-selected-word==0.2.0
jupyter_contrib_core==0.4.2
jupyter_core==5.7.2
jupyter_latex_envs==1.4.6
jupyter_nbextensions_configurator==0.4.1
jupyterlab==2.3.2
jupyterlab-server==1.2.0
jupyterlab_widgets==3.0.13
lxml==5.3.1
MarkupSafe==3.0.2
matplotlib-inline==0.1.7
mistune==0.8.4
mypy-extensions==1.0.0
nbconvert==5.6.1
nbformat==5.10.4
nest-asyncio==1.6.0
nodeenv==1.9.1
notebook==5.7.16
numpy==2.0.2
packaging @ file:///croot/packaging_1734472117206/work
pandocfilters==1.5.1
parso==0.8.4
pathspec==0.12.1
pexpect==4.9.0
platformdirs==4.3.7
pluggy @ file:///croot/pluggy_1733169602837/work
pre_commit==4.2.0
prometheus_client==0.21.1
prompt_toolkit==3.0.50
psutil==7.0.0
ptyprocess==0.7.0
pure_eval==0.2.3
-e git+https://github.com/nipype/pydra.git@6d6340500696f68390de551bd168b54d3f88dc21#egg=pydra
Pygments==2.19.1
PyLD==2.0.4
pytest @ file:///croot/pytest_1738938843180/work
pytest-cov==6.0.0
pytest-env==1.1.5
pytest-xdist==3.6.1
python-dateutil==2.9.0.post0
PyYAML==6.0.2
pyzmq==26.3.0
referencing==0.36.2
requests==2.32.3
rpds-py==0.24.0
Send2Trash==1.8.3
six==1.17.0
stack-data==0.6.3
terminado==0.13.3
testpath==0.6.0
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
tornado==4.5.3
traitlets==5.14.3
typing_extensions==4.13.0
urllib3==2.3.0
virtualenv==20.29.3
wcwidth==0.2.13
webencodings==0.5.1
widgetsnbextension==4.0.13
| name: pydra
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- asttokens==3.0.0
- attrs==25.3.0
- black==25.1.0
- bleach==6.2.0
- cachetools==5.5.2
- certifi==2025.1.31
- cfgv==3.4.0
- charset-normalizer==3.4.1
- ci-info==0.3.0
- click==8.1.8
- cloudpickle==3.1.1
- codecov==2.1.13
- comm==0.2.2
- coverage==7.8.0
- debugpy==1.8.13
- decorator==5.2.1
- defusedxml==0.7.1
- distlib==0.3.9
- entrypoints==0.4
- etelemetry==0.3.1
- execnet==2.1.1
- executing==2.2.0
- fastjsonschema==2.21.1
- filelock==3.18.0
- frozendict==2.4.6
- identify==2.6.9
- idna==3.10
- ipykernel==6.9.2
- ipython==8.18.1
- ipython-genutils==0.2.0
- ipywidgets==8.1.5
- jedi==0.19.2
- jinja2==3.0.0
- json5==0.10.0
- jsonschema==4.23.0
- jsonschema-specifications==2024.10.1
- jupyter==1.1.1
- jupyter-client==6.1.12
- jupyter-console==6.4.2
- jupyter-contrib-core==0.4.2
- jupyter-contrib-nbextensions==0.5.1
- jupyter-core==5.7.2
- jupyter-highlight-selected-word==0.2.0
- jupyter-latex-envs==1.4.6
- jupyter-nbextensions-configurator==0.4.1
- jupyterlab==2.3.2
- jupyterlab-server==1.2.0
- jupyterlab-widgets==3.0.13
- lxml==5.3.1
- markupsafe==3.0.2
- matplotlib-inline==0.1.7
- mistune==0.8.4
- mypy-extensions==1.0.0
- nbconvert==5.6.1
- nbformat==5.10.4
- nest-asyncio==1.6.0
- nodeenv==1.9.1
- notebook==5.7.16
- numpy==2.0.2
- pandocfilters==1.5.1
- parso==0.8.4
- pathspec==0.12.1
- pexpect==4.9.0
- platformdirs==4.3.7
- pre-commit==4.2.0
- prometheus-client==0.21.1
- prompt-toolkit==3.0.50
- psutil==7.0.0
- ptyprocess==0.7.0
- pure-eval==0.2.3
- pydra==0.2.2+43.g6d634050
- pygments==2.19.1
- pyld==2.0.4
- pytest-cov==6.0.0
- pytest-env==1.1.5
- pytest-xdist==3.6.1
- python-dateutil==2.9.0.post0
- pyyaml==6.0.2
- pyzmq==26.3.0
- referencing==0.36.2
- requests==2.32.3
- rpds-py==0.24.0
- send2trash==1.8.3
- six==1.17.0
- stack-data==0.6.3
- terminado==0.13.3
- testpath==0.6.0
- tornado==4.5.3
- traitlets==5.14.3
- typing-extensions==4.13.0
- urllib3==2.3.0
- virtualenv==20.29.3
- wcwidth==0.2.13
- webencodings==0.5.1
- widgetsnbextension==4.0.13
prefix: /opt/conda/envs/pydra
| [
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_3[cf]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_4[cf]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_5[cf]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_6[cf]"
] | [] | [
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_1[cf-result_no_submitter]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_1[cf-result_submitter]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_1_strip[cf-result_no_submitter]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_1_strip[cf-result_submitter]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_2[cf-result_no_submitter]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_2[cf-result_submitter]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_2a[cf-result_no_submitter]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_2a[cf-result_submitter]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_2b[cf-result_no_submitter]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_2b[cf-result_submitter]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_7[cf]",
"pydra/engine/tests/test_shelltask.py::test_wf_shell_cmd_1[cf]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_inputspec_1[cf-result_no_submitter]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_inputspec_1[cf-result_submitter]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_inputspec_2[cf-result_no_submitter]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_inputspec_2[cf-result_submitter]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_inputspec_3[cf-result_no_submitter]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_inputspec_3[cf-result_submitter]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_inputspec_3a[cf-result_no_submitter]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_inputspec_3a[cf-result_submitter]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_inputspec_3b_exception[cf]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_inputspec_3c[cf-result_no_submitter]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_inputspec_3c[cf-result_submitter]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_inputspec_4[cf-result_no_submitter]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_inputspec_4[cf-result_submitter]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_inputspec_4a[cf-result_no_submitter]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_inputspec_4a[cf-result_submitter]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_inputspec_4b_exception[cf]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_inputspec_4c_exception[cf]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_inputspec_4d_exception[cf]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_inputspec_5_nosubm[cf-result_no_submitter]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_inputspec_5_nosubm[cf-result_submitter]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_inputspec_5a_exception[cf]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_inputspec_6[cf-result_no_submitter]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_inputspec_6[cf-result_submitter]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_inputspec_6a_exception[cf]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_inputspec_6b[cf-result_no_submitter]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_inputspec_6b[cf-result_submitter]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_inputspec_7[cf-result_no_submitter]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_inputspec_7[cf-result_submitter]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_inputspec_7a[cf-result_no_submitter]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_inputspec_7a[cf-result_submitter]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_inputspec_copyfile_1[cf-result_no_submitter]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_inputspec_copyfile_1[cf-result_submitter]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_inputspec_copyfile_1a[cf-result_no_submitter]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_inputspec_copyfile_1a[cf-result_submitter]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_inputspec_state_1[cf-result_no_submitter]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_inputspec_state_1[cf-result_submitter]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_inputspec_state_2[cf-result_no_submitter]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_inputspec_state_2[cf-result_submitter]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_inputspec_state_3[cf-result_no_submitter]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_inputspec_state_3[cf-result_submitter]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_inputspec_copyfile_state_1[cf-result_no_submitter]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_inputspec_copyfile_state_1[cf-result_submitter]",
"pydra/engine/tests/test_shelltask.py::test_wf_shell_cmd_2[cf]",
"pydra/engine/tests/test_shelltask.py::test_wf_shell_cmd_2a[cf]",
"pydra/engine/tests/test_shelltask.py::test_wf_shell_cmd_3[cf]",
"pydra/engine/tests/test_shelltask.py::test_wf_shell_cmd_3a[cf]",
"pydra/engine/tests/test_shelltask.py::test_wf_shell_cmd_state_1[cf]",
"pydra/engine/tests/test_shelltask.py::test_wf_shell_cmd_ndst_1[cf]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_outputspec_1[cf-result_no_submitter]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_outputspec_1[cf-result_submitter]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_outputspec_1a[cf-result_no_submitter]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_outputspec_1a[cf-result_submitter]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_outputspec_1b_exception[cf]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_outputspec_2[cf-result_no_submitter]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_outputspec_2[cf-result_submitter]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_outputspec_2a_exception[cf]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_outputspec_3[cf-result_no_submitter]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_outputspec_3[cf-result_submitter]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_outputspec_4[cf-result_no_submitter]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_outputspec_4[cf-result_submitter]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_outputspec_5[cf-result_no_submitter]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_outputspec_5[cf-result_submitter]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_state_outputspec_1[cf-result_no_submitter]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_state_outputspec_1[cf-result_submitter]",
"pydra/engine/tests/test_shelltask.py::test_shell_cmd_outputspec_wf_1[cf]",
"pydra/engine/tests/test_task.py::test_output",
"pydra/engine/tests/test_task.py::test_annotated_func",
"pydra/engine/tests/test_task.py::test_annotated_func_multreturn",
"pydra/engine/tests/test_task.py::test_annotated_func_multreturn_exception",
"pydra/engine/tests/test_task.py::test_halfannotated_func",
"pydra/engine/tests/test_task.py::test_halfannotated_func_multreturn",
"pydra/engine/tests/test_task.py::test_notannotated_func",
"pydra/engine/tests/test_task.py::test_notannotated_func_returnlist",
"pydra/engine/tests/test_task.py::test_halfannotated_func_multrun_returnlist",
"pydra/engine/tests/test_task.py::test_notannotated_func_multreturn",
"pydra/engine/tests/test_task.py::test_exception_func",
"pydra/engine/tests/test_task.py::test_audit_prov",
"pydra/engine/tests/test_task.py::test_audit_all",
"pydra/engine/tests/test_task.py::test_shell_cmd",
"pydra/engine/tests/test_task.py::test_container_cmds",
"pydra/engine/tests/test_task.py::test_docker_cmd",
"pydra/engine/tests/test_task.py::test_singularity_cmd",
"pydra/engine/tests/test_task.py::test_functask_callable",
"pydra/engine/tests/test_task.py::test_taskhooks"
] | [] | Apache License 2.0 | 6,345 | 2,829 | [
"pydra/engine/core.py",
"pydra/engine/task.py"
] |
|
stan-dev__cmdstanpy-205 | edd34ed072cfb3cc30f0a9d8d8df2a7fd267d529 | 2020-01-31 16:04:10 | edd34ed072cfb3cc30f0a9d8d8df2a7fd267d529 | diff --git a/cmdstanpy/__init__.py b/cmdstanpy/__init__.py
index 9f1dbbb..45dc8e6 100644
--- a/cmdstanpy/__init__.py
+++ b/cmdstanpy/__init__.py
@@ -5,7 +5,7 @@ import atexit
import shutil
import tempfile
-STANSUMMARY_STATS = [
+_STANSUMMARY_STATS = [
'Mean',
'MCSE',
'StdDev',
@@ -17,20 +17,24 @@ STANSUMMARY_STATS = [
'R_hat',
]
-TMPDIR = tempfile.mkdtemp()
+_TMPDIR = tempfile.mkdtemp()
-def cleanup_tmpdir():
- """Force deletion of TMPDIR."""
- print('deleting tmpfiles dir: {}'.format(TMPDIR))
- shutil.rmtree(TMPDIR, ignore_errors=True)
+def _cleanup_tmpdir():
+ """Force deletion of _TMPDIR."""
+ print('deleting tmpfiles dir: {}'.format(_TMPDIR))
+ shutil.rmtree(_TMPDIR, ignore_errors=True)
print('done')
-atexit.register(cleanup_tmpdir)
+atexit.register(_cleanup_tmpdir)
from .utils import set_cmdstan_path, cmdstan_path, set_make_env, install_cmdstan # noqa
from .stanfit import CmdStanMCMC, CmdStanMLE, CmdStanGQ, CmdStanVB # noqa
from .model import CmdStanModel # noqa
from ._version import __version__ # noqa
+
+__all__ = ['set_cmdstan_path', 'cmdstan_path', 'set_make_env',
+ 'install_cmdstan', 'CmdStanMCMC', 'CmdStanMLE',
+ 'CmdStanGQ', 'CmdStanVB', 'CmdStanModel']
diff --git a/cmdstanpy/stanfit.py b/cmdstanpy/stanfit.py
index 98751b5..13d732e 100644
--- a/cmdstanpy/stanfit.py
+++ b/cmdstanpy/stanfit.py
@@ -11,7 +11,7 @@ from time import time
import numpy as np
import pandas as pd
-from cmdstanpy import TMPDIR
+from cmdstanpy import _TMPDIR
from cmdstanpy.utils import (
check_sampler_csv,
scan_optimize_csv,
@@ -54,7 +54,7 @@ class RunSet:
if args.output_dir is not None:
output_dir = args.output_dir
else:
- output_dir = TMPDIR
+ output_dir = _TMPDIR
self._csv_files = []
self._diagnostic_files = [None for _ in range(chains)]
@@ -77,7 +77,7 @@ class RunSet:
if args.save_diagnostics:
if args.output_dir is None:
diag_file = create_named_text_file(
- dir=TMPDIR,
+ dir=_TMPDIR,
prefix='{}-diagnostic-{}-'.format(file_basename, i + 1),
suffix='.csv',
)
@@ -202,7 +202,7 @@ class RunSet:
)
path, filename = os.path.split(self._csv_files[i])
- if path == TMPDIR: # cleanup tmpstr in filename
+ if path == _TMPDIR: # cleanup tmpstr in filename
root, ext = os.path.splitext(filename)
rlist = root.split('-')
root = '-'.join(rlist[:-1])
@@ -431,7 +431,7 @@ class CmdStanMCMC:
self.runset._args.model_name, self.runset.chains
)
tmp_csv_path = create_named_text_file(
- dir=TMPDIR, prefix=tmp_csv_file, suffix='.csv'
+ dir=_TMPDIR, prefix=tmp_csv_file, suffix='.csv'
)
cmd = [
cmd_path,
diff --git a/cmdstanpy/utils.py b/cmdstanpy/utils.py
index 5f80dac..9aa0ad9 100644
--- a/cmdstanpy/utils.py
+++ b/cmdstanpy/utils.py
@@ -24,7 +24,7 @@ import numpy as np
import pandas as pd
-from cmdstanpy import TMPDIR
+from cmdstanpy import _TMPDIR
EXTENSION = '.exe' if platform.system() == 'Windows' else ''
@@ -66,7 +66,7 @@ class MaybeDictToFilePath():
for obj in objs:
if isinstance(obj, dict):
data_file = create_named_text_file(
- dir=TMPDIR, prefix='', suffix='.json'
+ dir=_TMPDIR, prefix='', suffix='.json'
)
self._logger.debug('input tempfile: %s', data_file)
if any(
| Set __all__ in __init__.py
#### Summary:
Good morning! I think `__all__` should be set in `__init__.py`
#### Description:
I realize the following is not usually considered good practice, but if someone imports as follows it imports everything not prefixed with an underscore:
```
>>>from cmdstanpy import *
>>> dir()
['CmdStanGQ', 'CmdStanMCMC', 'CmdStanMLE', 'CmdStanModel', 'CmdStanVB',
'STANSUMMARY_STATS', 'TMPDIR', '__annotations__', '__builtins__', '__doc__', '__loader__',
'__name__', '__package__', '__spec__', 'atexit', 'cleanup_tmpdir', 'cmdstan_args', 'cmdstan_path',
'install_cmdstan', 'model', 'set_cmdstan_path', 'set_make_env', 'shutil', 'stanfit', 'tempfile', 'utils']
```
To solve this either `__all__` can be set explicitly to those items that the project wants exported for the "public API".
For example, it could be set as:
```
__all__ = ['set_cmdstan_path', 'cmdstan_path', 'set_make_env',
'install_cmdstan', 'set_cmdstan_path', 'CmdStanMLE', 'CmdStanGQ',
'CmdStanVB', 'CmdStanModel' ]
```
Alternatively it could be set to an empty list indicating the module is not designed for this type of import statement:
```
__all__ = []
```
In this case the module can still be imported as usual with:
```
import cmdstanpy
```
or one can indicate what to import explicitly:
```
from cmdstanpy import cmdstan_path
```
Here is the [PEP8 reference](https://www.python.org/dev/peps/pep-0008/#public-and-internal-interfaces).
#### Current Version:
master branch (0.6.0) | stan-dev/cmdstanpy | diff --git a/test/test_cmdstan_args.py b/test/test_cmdstan_args.py
index c709378..cf6f111 100644
--- a/test/test_cmdstan_args.py
+++ b/test/test_cmdstan_args.py
@@ -4,7 +4,7 @@ import os
import platform
import unittest
-from cmdstanpy import TMPDIR
+from cmdstanpy import _TMPDIR
from cmdstanpy.cmdstan_args import (
Method,
SamplerArgs,
@@ -496,7 +496,7 @@ class CmdStanArgsTest(unittest.TestCase):
# TODO: read-only dir test for Windows - set ACLs, not mode
if platform.system() == 'Darwin' or platform.system() == 'Linux':
with self.assertRaises(ValueError):
- read_only = os.path.join(TMPDIR, 'read_only')
+ read_only = os.path.join(_TMPDIR, 'read_only')
os.mkdir(read_only, mode=0o444)
CmdStanArgs(
model_name='bernoulli',
diff --git a/test/test_utils.py b/test/test_utils.py
index d319577..77593e3 100644
--- a/test/test_utils.py
+++ b/test/test_utils.py
@@ -10,7 +10,7 @@ import random
import numpy as np
-from cmdstanpy import TMPDIR
+from cmdstanpy import _TMPDIR
from cmdstanpy.utils import (
cmdstan_path,
set_cmdstan_path,
@@ -40,13 +40,13 @@ class CmdStanPathTest(unittest.TestCase):
self.assertTrue(cmdstan_path().startswith(abs_rel_path))
def test_non_spaces_location(self):
- good_path = os.path.join(TMPDIR, 'good_dir')
+ good_path = os.path.join(_TMPDIR, 'good_dir')
with TemporaryCopiedFile(good_path) as (pth, is_changed):
self.assertEqual(pth, good_path)
self.assertFalse(is_changed)
# prepare files for test
- bad_path = os.path.join(TMPDIR, 'bad dir')
+ bad_path = os.path.join(_TMPDIR, 'bad dir')
os.makedirs(bad_path, exist_ok=True)
stan = os.path.join(DATAFILES_PATH, 'bernoulli.stan')
stan_bad = os.path.join(bad_path, 'bad name.stan')
@@ -119,32 +119,32 @@ class CmdStanPathTest(unittest.TestCase):
def test_jsondump(self):
dict_list = {'a': [1.0, 2.0, 3.0]}
- file_list = os.path.join(TMPDIR, 'list.json')
+ file_list = os.path.join(_TMPDIR, 'list.json')
jsondump(file_list, dict_list)
with open(file_list) as fd:
self.assertEqual(json.load(fd), dict_list)
dict_vec = {'a': np.repeat(3, 4)}
- file_vec = os.path.join(TMPDIR, 'vec.json')
+ file_vec = os.path.join(_TMPDIR, 'vec.json')
jsondump(file_vec, dict_vec)
with open(file_vec) as fd:
self.assertEqual(json.load(fd), dict_vec)
dict_zero_vec = {'a': []}
- file_zero_vec = os.path.join(TMPDIR, 'empty_vec.json')
+ file_zero_vec = os.path.join(_TMPDIR, 'empty_vec.json')
jsondump(file_zero_vec, dict_zero_vec)
with open(file_zero_vec) as fd:
self.assertEqual(json.load(fd), dict_zero_vec)
dict_zero_matrix = {'a': [[], [], []]}
- file_zero_matrix = os.path.join(TMPDIR, 'empty_matrix.json')
+ file_zero_matrix = os.path.join(_TMPDIR, 'empty_matrix.json')
jsondump(file_zero_matrix, dict_zero_matrix)
with open(file_zero_matrix) as fd:
self.assertEqual(json.load(fd), dict_zero_matrix)
arr = np.zeros(shape=(5, 0))
dict_zero_matrix = {'a': arr}
- file_zero_matrix = os.path.join(TMPDIR, 'empty_matrix.json')
+ file_zero_matrix = os.path.join(_TMPDIR, 'empty_matrix.json')
jsondump(file_zero_matrix, dict_zero_matrix)
with open(file_zero_matrix) as fd:
self.assertEqual(json.load(fd), dict_zero_matrix)
@@ -282,7 +282,7 @@ class WindowsShortPath(unittest.TestCase):
def test_windows_short_path_directory(self):
if platform.system() != 'Windows':
return
- original_path = os.path.join(TMPDIR, 'new path')
+ original_path = os.path.join(_TMPDIR, 'new path')
os.makedirs(original_path, exist_ok=True)
assert os.path.exists(original_path)
assert ' ' in original_path
@@ -294,7 +294,7 @@ class WindowsShortPath(unittest.TestCase):
def test_windows_short_path_file(self):
if platform.system() != 'Windows':
return
- original_path = os.path.join(TMPDIR, 'new path', 'my_file.csv')
+ original_path = os.path.join(_TMPDIR, 'new path', 'my_file.csv')
os.makedirs(os.path.split(original_path)[0], exist_ok=True)
assert os.path.exists(os.path.split(original_path)[0])
assert ' ' in original_path
@@ -309,7 +309,7 @@ class WindowsShortPath(unittest.TestCase):
"""Test that the function doesn't touch filename."""
if platform.system() != 'Windows':
return
- original_path = os.path.join(TMPDIR, 'new path', 'my file.csv')
+ original_path = os.path.join(_TMPDIR, 'new path', 'my file.csv')
os.makedirs(os.path.split(original_path)[0], exist_ok=True)
assert os.path.exists(os.path.split(original_path)[0])
assert ' ' in original_path
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 3
},
"num_modified_files": 3
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install --upgrade cmdstanpy[all]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | cmdstanpy==1.2.5
exceptiongroup==1.2.2
iniconfig==2.1.0
numpy==2.0.2
packaging==24.2
pandas==2.2.3
pluggy==1.5.0
pytest==8.3.5
python-dateutil==2.9.0.post0
pytz==2025.2
six==1.17.0
stanio==0.5.1
tomli==2.2.1
tqdm==4.67.1
tzdata==2025.2
xarray==2024.7.0
| name: cmdstanpy
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- cmdstanpy==1.2.5
- exceptiongroup==1.2.2
- iniconfig==2.1.0
- numpy==2.0.2
- packaging==24.2
- pandas==2.2.3
- pluggy==1.5.0
- pytest==8.3.5
- python-dateutil==2.9.0.post0
- pytz==2025.2
- six==1.17.0
- stanio==0.5.1
- tomli==2.2.1
- tqdm==4.67.1
- tzdata==2025.2
- xarray==2024.7.0
prefix: /opt/conda/envs/cmdstanpy
| [
"test/test_cmdstan_args.py::OptimizeArgsTest::test_args_algorithm",
"test/test_cmdstan_args.py::OptimizeArgsTest::test_args_algorithm_init_alpha",
"test/test_cmdstan_args.py::OptimizeArgsTest::test_args_algorithm_iter",
"test/test_cmdstan_args.py::SamplerArgsTest::test_adapt",
"test/test_cmdstan_args.py::SamplerArgsTest::test_args_chains",
"test/test_cmdstan_args.py::SamplerArgsTest::test_args_min",
"test/test_cmdstan_args.py::SamplerArgsTest::test_bad",
"test/test_cmdstan_args.py::SamplerArgsTest::test_fixed_param",
"test/test_cmdstan_args.py::SamplerArgsTest::test_good",
"test/test_cmdstan_args.py::SamplerArgsTest::test_metric",
"test/test_cmdstan_args.py::CmdStanArgsTest::test_args_good",
"test/test_cmdstan_args.py::CmdStanArgsTest::test_args_inits",
"test/test_cmdstan_args.py::CmdStanArgsTest::test_compose",
"test/test_cmdstan_args.py::CmdStanArgsTest::test_no_chains",
"test/test_cmdstan_args.py::GenerateQuantitesTest::test_args_fitted_params",
"test/test_cmdstan_args.py::VariationalTest::test_args_bad",
"test/test_cmdstan_args.py::VariationalTest::test_args_variational",
"test/test_utils.py::CmdStanPathTest::test_dict_to_file",
"test/test_utils.py::CmdStanPathTest::test_jsondump",
"test/test_utils.py::CmdStanPathTest::test_non_spaces_location",
"test/test_utils.py::ReadStanCsvTest::test_check_sampler_csv_1",
"test/test_utils.py::ReadStanCsvTest::test_check_sampler_csv_2",
"test/test_utils.py::ReadStanCsvTest::test_check_sampler_csv_3",
"test/test_utils.py::ReadStanCsvTest::test_check_sampler_csv_4",
"test/test_utils.py::ReadStanCsvTest::test_check_sampler_csv_metric_1",
"test/test_utils.py::ReadStanCsvTest::test_check_sampler_csv_metric_2",
"test/test_utils.py::ReadStanCsvTest::test_check_sampler_csv_metric_3",
"test/test_utils.py::ReadStanCsvTest::test_check_sampler_csv_metric_4",
"test/test_utils.py::ReadMetricTest::test_metric_json_bad",
"test/test_utils.py::ReadMetricTest::test_metric_json_matrix",
"test/test_utils.py::ReadMetricTest::test_metric_json_vec",
"test/test_utils.py::ReadMetricTest::test_metric_missing",
"test/test_utils.py::ReadMetricTest::test_metric_rdump_bad_1",
"test/test_utils.py::ReadMetricTest::test_metric_rdump_bad_2",
"test/test_utils.py::ReadMetricTest::test_metric_rdump_matrix",
"test/test_utils.py::ReadMetricTest::test_metric_rdump_vec",
"test/test_utils.py::WindowsShortPath::test_windows_short_path_directory",
"test/test_utils.py::WindowsShortPath::test_windows_short_path_file",
"test/test_utils.py::WindowsShortPath::test_windows_short_path_file_with_space",
"test/test_utils.py::RloadTest::test_parse_rdump_value",
"test/test_utils.py::RloadTest::test_rload_bad_data_1",
"test/test_utils.py::RloadTest::test_rload_bad_data_2",
"test/test_utils.py::RloadTest::test_rload_bad_data_3",
"test/test_utils.py::RloadTest::test_rload_data",
"test/test_utils.py::RloadTest::test_rload_jags_data",
"test/test_utils.py::RloadTest::test_rload_metric",
"test/test_utils.py::RloadTest::test_rload_wrong_data",
"test/test_utils.py::RloadTest::test_roundtrip_metric"
] | [
"test/test_cmdstan_args.py::CmdStanArgsTest::test_args_bad",
"test/test_utils.py::CmdStanPathTest::test_default_path",
"test/test_utils.py::CmdStanPathTest::test_set_path",
"test/test_utils.py::CmdStanPathTest::test_validate_path",
"test/test_utils.py::ReadStanCsvTest::test_check_sampler_csv_thin"
] | [] | [] | BSD 3-Clause "New" or "Revised" License | 6,351 | 1,067 | [
"cmdstanpy/__init__.py",
"cmdstanpy/stanfit.py",
"cmdstanpy/utils.py"
] |
|
byuccl__spydrnet-67 | 4676e42351fc755708d8b017e39b10a0f5378333 | 2020-02-01 22:11:53 | 5c5a891e246a31a846e59efdf164ca99568d77a1 | diff --git a/spydrnet/ir/element.py b/spydrnet/ir/element.py
index ae406c5..24d33ef 100644
--- a/spydrnet/ir/element.py
+++ b/spydrnet/ir/element.py
@@ -36,6 +36,22 @@ class Element(object):
@property
def data(self):
return DictView(self._data)
+
+ @property
+ def name(self):
+ return self._data.get(".NAME", None)
+
+ @name.setter
+ def name(self, value):
+ if value is None and ".NAME" in self:
+ del self[".NAME"]
+ else:
+ self[".NAME"] = value
+
+ @name.deleter
+ def name(self):
+ if ".NAME" in self:
+ del self[".NAME"]
def __setitem__(self, key, value):
"""
| Add .name to elements as a pointer to ['NAME'] | byuccl/spydrnet | diff --git a/spydrnet/ir/tests/test_element.py b/spydrnet/ir/tests/test_element.py
index c93d096..a9d7c20 100644
--- a/spydrnet/ir/tests/test_element.py
+++ b/spydrnet/ir/tests/test_element.py
@@ -4,6 +4,9 @@ from spydrnet.ir.element import Element
class TestElement(unittest.TestCase):
+ def setUp(self) -> None:
+ self.element = Element()
+
def test_constructor(self):
element1 = Element()
self.assertTrue(element1, "Constructor return None type or empty collection")
@@ -11,19 +14,32 @@ class TestElement(unittest.TestCase):
self.assertNotEqual(element1, element2, "Unique objects are considered equal.")
def test_dictionary(self):
- element = Element()
- self.assertFalse('NAME' in element)
- element['NAME'] = "TestName"
- self.assertTrue('NAME' in element)
- for key in element:
- self.assertEqual(element[key], "TestName")
- del element['NAME']
- self.assertFalse('NAME' in element)
- element['NAME'] = "DifferentName"
- name = element.pop('NAME')
+ self.assertFalse('NAME' in self.element)
+ self.element['NAME'] = "TestName"
+ self.assertTrue('NAME' in self.element)
+ for key in self.element:
+ self.assertEqual(self.element[key], "TestName")
+ del self.element['NAME']
+ self.assertFalse('NAME' in self.element)
+ self.element['NAME'] = "DifferentName"
+ name = self.element.pop('NAME')
self.assertEqual(name, "DifferentName")
+ def test_name(self):
+ self.element.name = "TestName"
+ self.assertTrue(".NAME" in self.element)
+ self.assertEqual(self.element.name, "TestName")
+
+ def test_del_name(self):
+ self.assertIsNone(self.element.name)
+ self.element.name = None
+ self.element.name = "TestName"
+ self.element.name = None
+ self.assertFalse(".NAME" in self.element)
+ self.element.name = "TestName"
+ del self.element.name
+ self.assertFalse(".NAME" in self.element)
+
def test_data_view(self):
- element = Element()
- element['NAME'] = "TestName"
- self.assertEqual(element.data, {'NAME': 'TestName'})
+ self.element['NAME'] = "TestName"
+ self.assertEqual(self.element.data, {'NAME': 'TestName'})
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 1
} | 1.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[all]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest-cov",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.7",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi @ file:///croot/certifi_1671487769961/work/certifi
coverage==7.2.7
exceptiongroup==1.2.2
importlib-metadata==6.7.0
iniconfig==2.0.0
packaging==24.0
pluggy==1.2.0
pytest==7.4.4
pytest-cov==4.1.0
-e git+https://github.com/byuccl/spydrnet.git@4676e42351fc755708d8b017e39b10a0f5378333#egg=spydrnet
tomli==2.0.1
typing_extensions==4.7.1
zipp==3.15.0
| name: spydrnet
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=22.3.1=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==7.2.7
- exceptiongroup==1.2.2
- importlib-metadata==6.7.0
- iniconfig==2.0.0
- packaging==24.0
- pluggy==1.2.0
- pytest==7.4.4
- pytest-cov==4.1.0
- tomli==2.0.1
- typing-extensions==4.7.1
- zipp==3.15.0
prefix: /opt/conda/envs/spydrnet
| [
"spydrnet/ir/tests/test_element.py::TestElement::test_del_name",
"spydrnet/ir/tests/test_element.py::TestElement::test_name"
] | [] | [
"spydrnet/ir/tests/test_element.py::TestElement::test_constructor",
"spydrnet/ir/tests/test_element.py::TestElement::test_data_view",
"spydrnet/ir/tests/test_element.py::TestElement::test_dictionary"
] | [] | BSD 3-Clause "New" or "Revised" License | 6,359 | 224 | [
"spydrnet/ir/element.py"
] |
|
googleapis__python-storage-42 | cecc7ac95a00ab1437a24bee44f8b6c5b8a7189d | 2020-02-04 11:06:59 | 4b654b5bb2bf902f07d04eb753c84d6d564cdef0 | HemangChothani: PTAL.
frankyn: Thanks for your patience @HemangChothani, this LGTM. IIUC, you're making last update consistent with server side after objects are downloaded to a local file.
| diff --git a/google/cloud/storage/_helpers.py b/google/cloud/storage/_helpers.py
index 1a1aca8..b649384 100644
--- a/google/cloud/storage/_helpers.py
+++ b/google/cloud/storage/_helpers.py
@@ -19,6 +19,7 @@ These are *not* part of the API.
import base64
from hashlib import md5
+from datetime import datetime
import os
from google.cloud.storage.constants import _DEFAULT_TIMEOUT
@@ -297,3 +298,17 @@ def _base64_md5hash(buffer_object):
_write_buffer_to_hash(buffer_object, hash_obj)
digest_bytes = hash_obj.digest()
return base64.b64encode(digest_bytes)
+
+
+def _convert_to_timestamp(value):
+ """Convert non-none datetime to timestamp.
+
+ :type value: :class:`datetime.datetime`
+ :param value: The datetime to convert.
+
+ :rtype: int
+ :returns: The timestamp.
+ """
+ utc_naive = value.replace(tzinfo=None) - value.utcoffset()
+ mtime = (utc_naive - datetime(1970, 1, 1)).total_seconds()
+ return mtime
diff --git a/google/cloud/storage/blob.py b/google/cloud/storage/blob.py
index e8299d4..7358d57 100644
--- a/google/cloud/storage/blob.py
+++ b/google/cloud/storage/blob.py
@@ -31,8 +31,8 @@ import hashlib
from io import BytesIO
import mimetypes
import os
-import time
import warnings
+import six
from six.moves.urllib.parse import parse_qsl
from six.moves.urllib.parse import quote
@@ -57,6 +57,7 @@ from google.cloud.exceptions import NotFound
from google.cloud.storage._helpers import _get_storage_host
from google.cloud.storage._helpers import _PropertyMixin
from google.cloud.storage._helpers import _scalar_property
+from google.cloud.storage._helpers import _convert_to_timestamp
from google.cloud.storage._signing import generate_signed_url_v2
from google.cloud.storage._signing import generate_signed_url_v4
from google.cloud.storage.acl import ACL
@@ -846,7 +847,10 @@ class Blob(_PropertyMixin):
updated = self.updated
if updated is not None:
- mtime = time.mktime(updated.timetuple())
+ if six.PY2:
+ mtime = _convert_to_timestamp(updated)
+ else:
+ mtime = updated.timestamp()
os.utime(file_obj.name, (mtime, mtime))
def download_as_string(self, client=None, start=None, end=None, raw_download=False):
| Storage: mtime of downloaded file is incorrect by UTC offset
Google Cloud Storage v1.25.0
Python 3.7.3
OS: OSX & Win7
Issue: If I upload a file to Google Cloud Storage and then immediately download it, the mtime is incorrect - for me, I'm in EST, so I'm 5 hours behind UTC. That's the exact timedelta that occurs between the file's original mtime and the recorded mtime after the file is downloaded.
Here's an example screenshot:

The original file mtime in Google Cloud Storage is 1/23/20 9:04 PM (which is correct from the file I uploaded), but when I download the file, the mtime becomes 1/24/20 2:04 AM, which is 5 hours ahead of what is supposed to be (the UTC offset from my timezone).
The issue is here in `blob.download_to_filename`:
```
updated = self.updated
if updated is not None:
mtime = time.mktime(updated.timetuple())
os.utime(file_obj.name, (mtime, mtime))
```
In my example, `updated` is the timezone-aware datetime corresponding to `2020-01-24 02:04:11.184000+00:00` (it has `tzinfo==UTC`). The `updated.timetuple()` is
```
time.struct_time(tm_year=2020, tm_mon=1, tm_mday=24, tm_hour=2, tm_min=4, tm_sec=9, tm_wday=4, tm_yday=24, tm_isdst=0)
```
The problem, I believe, is that the timetuple doesn't know this is a UTC date, nor did it convert to my timezone. The docs of `mktime` note, "Its argument is the struct_time or full 9-tuple (since the dst flag is needed; use -1 as the dst flag if it is unknown) which expresses the time in local time, not UTC." Perhaps, we should do this instead:
```
if updated is not None:
mtime = updated.timestamp() # For Python3, not sure of the Python2 version
os.utime(file_obj.name, (mtime, mtime))
```
The `timestamp()` function accounts for the timezone information in the datetime object.
I've just been doing this manually in my code after downloading a file because my application is sensitive to mtimes, and it seems to fix the issue. | googleapis/python-storage | diff --git a/tests/unit/test_blob.py b/tests/unit/test_blob.py
index f656e64..91af389 100644
--- a/tests/unit/test_blob.py
+++ b/tests/unit/test_blob.py
@@ -1060,7 +1060,7 @@ class Test_Blob(unittest.TestCase):
def _download_to_filename_helper(self, updated, raw_download):
import os
- import time
+ from google.cloud.storage._helpers import _convert_to_timestamp
from google.cloud._testing import _NamedTemporaryFile
blob_name = "blob-name"
@@ -1080,7 +1080,10 @@ class Test_Blob(unittest.TestCase):
self.assertIsNone(blob.updated)
else:
mtime = os.path.getmtime(temp.name)
- updated_time = time.mktime(blob.updated.timetuple())
+ if six.PY2:
+ updated_time = _convert_to_timestamp(blob.updated)
+ else:
+ updated_time = blob.updated.timestamp()
self.assertEqual(mtime, updated_time)
headers = {"accept-encoding": "gzip"}
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_media",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 1
},
"num_modified_files": 2
} | 1.27 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-xdist",
"pytest-mock",
"pytest-asyncio",
"mock"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | cachetools==4.2.4
certifi==2025.1.31
charset-normalizer==3.4.1
coverage==7.8.0
exceptiongroup==1.2.2
execnet==2.1.1
google-api-core==2.10.2
google-auth==1.35.0
google-cloud-core==1.7.3
-e git+https://github.com/googleapis/python-storage.git@cecc7ac95a00ab1437a24bee44f8b6c5b8a7189d#egg=google_cloud_storage
google-resumable-media==0.5.1
googleapis-common-protos==1.69.2
idna==3.10
iniconfig==2.1.0
mock==5.2.0
packaging==24.2
pluggy==1.5.0
protobuf==4.25.6
pyasn1==0.6.1
pyasn1_modules==0.4.2
pytest==8.3.5
pytest-asyncio==0.26.0
pytest-cov==6.0.0
pytest-mock==3.14.0
pytest-xdist==3.6.1
requests==2.32.3
rsa==4.9
six==1.17.0
tomli==2.2.1
typing_extensions==4.13.0
urllib3==2.3.0
| name: python-storage
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- cachetools==4.2.4
- certifi==2025.1.31
- charset-normalizer==3.4.1
- coverage==7.8.0
- exceptiongroup==1.2.2
- execnet==2.1.1
- google-api-core==2.10.2
- google-auth==1.35.0
- google-cloud-core==1.7.3
- google-resumable-media==0.5.1
- googleapis-common-protos==1.69.2
- idna==3.10
- iniconfig==2.1.0
- mock==5.2.0
- packaging==24.2
- pluggy==1.5.0
- protobuf==4.25.6
- pyasn1==0.6.1
- pyasn1-modules==0.4.2
- pytest==8.3.5
- pytest-asyncio==0.26.0
- pytest-cov==6.0.0
- pytest-mock==3.14.0
- pytest-xdist==3.6.1
- requests==2.32.3
- rsa==4.9
- six==1.17.0
- tomli==2.2.1
- typing-extensions==4.13.0
- urllib3==2.3.0
prefix: /opt/conda/envs/python-storage
| [
"tests/unit/test_blob.py::Test_Blob::test_download_to_filename_w_updated_w_raw",
"tests/unit/test_blob.py::Test_Blob::test_download_to_filename_w_updated_wo_raw",
"tests/unit/test_blob.py::Test_Blob::test_download_to_filename_wo_updated_w_raw",
"tests/unit/test_blob.py::Test_Blob::test_download_to_filename_wo_updated_wo_raw"
] | [] | [
"tests/unit/test_blob.py::Test_Blob::test__do_download_w_chunks_w_range_w_raw",
"tests/unit/test_blob.py::Test_Blob::test__do_download_w_chunks_w_range_wo_raw",
"tests/unit/test_blob.py::Test_Blob::test__do_download_w_chunks_wo_range_w_raw",
"tests/unit/test_blob.py::Test_Blob::test__do_download_w_chunks_wo_range_wo_raw",
"tests/unit/test_blob.py::Test_Blob::test__do_download_wo_chunks_w_range_w_raw",
"tests/unit/test_blob.py::Test_Blob::test__do_download_wo_chunks_w_range_wo_raw",
"tests/unit/test_blob.py::Test_Blob::test__do_download_wo_chunks_wo_range_w_raw",
"tests/unit/test_blob.py::Test_Blob::test__do_download_wo_chunks_wo_range_wo_raw",
"tests/unit/test_blob.py::Test_Blob::test__do_multipart_upload_bad_size",
"tests/unit/test_blob.py::Test_Blob::test__do_multipart_upload_no_size",
"tests/unit/test_blob.py::Test_Blob::test__do_multipart_upload_with_kms",
"tests/unit/test_blob.py::Test_Blob::test__do_multipart_upload_with_retry",
"tests/unit/test_blob.py::Test_Blob::test__do_multipart_upload_with_size",
"tests/unit/test_blob.py::Test_Blob::test__do_multipart_upload_with_user_project",
"tests/unit/test_blob.py::Test_Blob::test__do_resumable_upload_no_size",
"tests/unit/test_blob.py::Test_Blob::test__do_resumable_upload_with_predefined_acl",
"tests/unit/test_blob.py::Test_Blob::test__do_resumable_upload_with_retry",
"tests/unit/test_blob.py::Test_Blob::test__do_resumable_upload_with_size",
"tests/unit/test_blob.py::Test_Blob::test__do_upload_uses_multipart",
"tests/unit/test_blob.py::Test_Blob::test__do_upload_uses_resumable",
"tests/unit/test_blob.py::Test_Blob::test__do_upload_with_retry",
"tests/unit/test_blob.py::Test_Blob::test__encryption_headers_w_encryption_key",
"tests/unit/test_blob.py::Test_Blob::test__encryption_headers_wo_encryption_key",
"tests/unit/test_blob.py::Test_Blob::test__get_content_type_default",
"tests/unit/test_blob.py::Test_Blob::test__get_content_type_explicit",
"tests/unit/test_blob.py::Test_Blob::test__get_content_type_from_blob",
"tests/unit/test_blob.py::Test_Blob::test__get_content_type_from_filename",
"tests/unit/test_blob.py::Test_Blob::test__get_download_url_on_the_fly",
"tests/unit/test_blob.py::Test_Blob::test__get_download_url_on_the_fly_with_generation",
"tests/unit/test_blob.py::Test_Blob::test__get_download_url_on_the_fly_with_kms_key_name",
"tests/unit/test_blob.py::Test_Blob::test__get_download_url_on_the_fly_with_user_project",
"tests/unit/test_blob.py::Test_Blob::test__get_download_url_with_media_link",
"tests/unit/test_blob.py::Test_Blob::test__get_download_url_with_media_link_w_user_project",
"tests/unit/test_blob.py::Test_Blob::test__get_transport",
"tests/unit/test_blob.py::Test_Blob::test__get_upload_arguments",
"tests/unit/test_blob.py::Test_Blob::test__get_writable_metadata_no_changes",
"tests/unit/test_blob.py::Test_Blob::test__get_writable_metadata_unwritable_field",
"tests/unit/test_blob.py::Test_Blob::test__get_writable_metadata_with_changes",
"tests/unit/test_blob.py::Test_Blob::test__initiate_resumable_upload_no_size",
"tests/unit/test_blob.py::Test_Blob::test__initiate_resumable_upload_with_chunk_size",
"tests/unit/test_blob.py::Test_Blob::test__initiate_resumable_upload_with_extra_headers",
"tests/unit/test_blob.py::Test_Blob::test__initiate_resumable_upload_with_kms",
"tests/unit/test_blob.py::Test_Blob::test__initiate_resumable_upload_with_predefined_acl",
"tests/unit/test_blob.py::Test_Blob::test__initiate_resumable_upload_with_retry",
"tests/unit/test_blob.py::Test_Blob::test__initiate_resumable_upload_with_size",
"tests/unit/test_blob.py::Test_Blob::test__initiate_resumable_upload_with_user_project",
"tests/unit/test_blob.py::Test_Blob::test__initiate_resumable_upload_without_chunk_size",
"tests/unit/test_blob.py::Test_Blob::test__query_params_default",
"tests/unit/test_blob.py::Test_Blob::test__query_params_w_generation",
"tests/unit/test_blob.py::Test_Blob::test__query_params_w_user_project",
"tests/unit/test_blob.py::Test_Blob::test__set_metadata_to_none",
"tests/unit/test_blob.py::Test_Blob::test__set_properties_w_kms_key_name",
"tests/unit/test_blob.py::Test_Blob::test__set_properties_wo_kms_key_name",
"tests/unit/test_blob.py::Test_Blob::test_acl_property",
"tests/unit/test_blob.py::Test_Blob::test_bucket_readonly_property",
"tests/unit/test_blob.py::Test_Blob::test_cache_control_getter",
"tests/unit/test_blob.py::Test_Blob::test_cache_control_setter",
"tests/unit/test_blob.py::Test_Blob::test_chunk_size_ctor",
"tests/unit/test_blob.py::Test_Blob::test_chunk_size_getter",
"tests/unit/test_blob.py::Test_Blob::test_chunk_size_setter",
"tests/unit/test_blob.py::Test_Blob::test_chunk_size_setter_bad_value",
"tests/unit/test_blob.py::Test_Blob::test_client",
"tests/unit/test_blob.py::Test_Blob::test_component_count",
"tests/unit/test_blob.py::Test_Blob::test_component_count_string_val",
"tests/unit/test_blob.py::Test_Blob::test_component_count_unset",
"tests/unit/test_blob.py::Test_Blob::test_compose_minimal_w_user_project",
"tests/unit/test_blob.py::Test_Blob::test_compose_w_additional_property_changes",
"tests/unit/test_blob.py::Test_Blob::test_compose_wo_content_type_set",
"tests/unit/test_blob.py::Test_Blob::test_content_disposition_getter",
"tests/unit/test_blob.py::Test_Blob::test_content_disposition_setter",
"tests/unit/test_blob.py::Test_Blob::test_content_encoding_getter",
"tests/unit/test_blob.py::Test_Blob::test_content_encoding_setter",
"tests/unit/test_blob.py::Test_Blob::test_content_language_getter",
"tests/unit/test_blob.py::Test_Blob::test_content_language_setter",
"tests/unit/test_blob.py::Test_Blob::test_content_type_getter",
"tests/unit/test_blob.py::Test_Blob::test_content_type_setter",
"tests/unit/test_blob.py::Test_Blob::test_crc32c_getter",
"tests/unit/test_blob.py::Test_Blob::test_crc32c_setter",
"tests/unit/test_blob.py::Test_Blob::test_create_resumable_upload_session",
"tests/unit/test_blob.py::Test_Blob::test_create_resumable_upload_session_with_failure",
"tests/unit/test_blob.py::Test_Blob::test_create_resumable_upload_session_with_origin",
"tests/unit/test_blob.py::Test_Blob::test_ctor_w_encryption_key",
"tests/unit/test_blob.py::Test_Blob::test_ctor_w_kms_key_name",
"tests/unit/test_blob.py::Test_Blob::test_ctor_w_kms_key_name_and_encryption_key",
"tests/unit/test_blob.py::Test_Blob::test_ctor_with_encoded_unicode",
"tests/unit/test_blob.py::Test_Blob::test_ctor_with_generation",
"tests/unit/test_blob.py::Test_Blob::test_ctor_wo_encryption_key",
"tests/unit/test_blob.py::Test_Blob::test_delete_w_generation",
"tests/unit/test_blob.py::Test_Blob::test_delete_wo_generation",
"tests/unit/test_blob.py::Test_Blob::test_download_as_string_w_raw",
"tests/unit/test_blob.py::Test_Blob::test_download_as_string_wo_raw",
"tests/unit/test_blob.py::Test_Blob::test_download_to_file_w_chunks_w_raw",
"tests/unit/test_blob.py::Test_Blob::test_download_to_file_w_chunks_wo_raw",
"tests/unit/test_blob.py::Test_Blob::test_download_to_file_with_failure",
"tests/unit/test_blob.py::Test_Blob::test_download_to_file_wo_chunks_w_raw",
"tests/unit/test_blob.py::Test_Blob::test_download_to_file_wo_chunks_wo_raw",
"tests/unit/test_blob.py::Test_Blob::test_download_to_file_wo_media_link",
"tests/unit/test_blob.py::Test_Blob::test_download_to_filename_corrupted",
"tests/unit/test_blob.py::Test_Blob::test_download_to_filename_w_key",
"tests/unit/test_blob.py::Test_Blob::test_etag",
"tests/unit/test_blob.py::Test_Blob::test_event_based_hold_getter_false",
"tests/unit/test_blob.py::Test_Blob::test_event_based_hold_getter_missing",
"tests/unit/test_blob.py::Test_Blob::test_event_based_hold_getter_true",
"tests/unit/test_blob.py::Test_Blob::test_event_based_hold_setter",
"tests/unit/test_blob.py::Test_Blob::test_exists_hit_w_generation",
"tests/unit/test_blob.py::Test_Blob::test_exists_hit_w_user_project",
"tests/unit/test_blob.py::Test_Blob::test_exists_miss",
"tests/unit/test_blob.py::Test_Blob::test_from_string_w_domain_name_bucket",
"tests/unit/test_blob.py::Test_Blob::test_from_string_w_invalid_uri",
"tests/unit/test_blob.py::Test_Blob::test_from_string_w_valid_uri",
"tests/unit/test_blob.py::Test_Blob::test_generate_signed_url_no_version_passed_warning",
"tests/unit/test_blob.py::Test_Blob::test_generate_signed_url_v2_w_content_md5",
"tests/unit/test_blob.py::Test_Blob::test_generate_signed_url_v2_w_content_type",
"tests/unit/test_blob.py::Test_Blob::test_generate_signed_url_v2_w_credentials",
"tests/unit/test_blob.py::Test_Blob::test_generate_signed_url_v2_w_csek",
"tests/unit/test_blob.py::Test_Blob::test_generate_signed_url_v2_w_csek_and_headers",
"tests/unit/test_blob.py::Test_Blob::test_generate_signed_url_v2_w_defaults",
"tests/unit/test_blob.py::Test_Blob::test_generate_signed_url_v2_w_endpoint",
"tests/unit/test_blob.py::Test_Blob::test_generate_signed_url_v2_w_expiration",
"tests/unit/test_blob.py::Test_Blob::test_generate_signed_url_v2_w_generation",
"tests/unit/test_blob.py::Test_Blob::test_generate_signed_url_v2_w_headers",
"tests/unit/test_blob.py::Test_Blob::test_generate_signed_url_v2_w_lowercase_method",
"tests/unit/test_blob.py::Test_Blob::test_generate_signed_url_v2_w_method",
"tests/unit/test_blob.py::Test_Blob::test_generate_signed_url_v2_w_non_ascii_name",
"tests/unit/test_blob.py::Test_Blob::test_generate_signed_url_v2_w_response_disposition",
"tests/unit/test_blob.py::Test_Blob::test_generate_signed_url_v2_w_response_type",
"tests/unit/test_blob.py::Test_Blob::test_generate_signed_url_v2_w_slash_in_name",
"tests/unit/test_blob.py::Test_Blob::test_generate_signed_url_v2_w_tilde_in_name",
"tests/unit/test_blob.py::Test_Blob::test_generate_signed_url_v4_w_bucket_bound_hostname_w_bare_hostname",
"tests/unit/test_blob.py::Test_Blob::test_generate_signed_url_v4_w_bucket_bound_hostname_w_scheme",
"tests/unit/test_blob.py::Test_Blob::test_generate_signed_url_v4_w_content_md5",
"tests/unit/test_blob.py::Test_Blob::test_generate_signed_url_v4_w_content_type",
"tests/unit/test_blob.py::Test_Blob::test_generate_signed_url_v4_w_credentials",
"tests/unit/test_blob.py::Test_Blob::test_generate_signed_url_v4_w_csek",
"tests/unit/test_blob.py::Test_Blob::test_generate_signed_url_v4_w_csek_and_headers",
"tests/unit/test_blob.py::Test_Blob::test_generate_signed_url_v4_w_defaults",
"tests/unit/test_blob.py::Test_Blob::test_generate_signed_url_v4_w_endpoint",
"tests/unit/test_blob.py::Test_Blob::test_generate_signed_url_v4_w_generation",
"tests/unit/test_blob.py::Test_Blob::test_generate_signed_url_v4_w_headers",
"tests/unit/test_blob.py::Test_Blob::test_generate_signed_url_v4_w_lowercase_method",
"tests/unit/test_blob.py::Test_Blob::test_generate_signed_url_v4_w_method",
"tests/unit/test_blob.py::Test_Blob::test_generate_signed_url_v4_w_non_ascii_name",
"tests/unit/test_blob.py::Test_Blob::test_generate_signed_url_v4_w_response_disposition",
"tests/unit/test_blob.py::Test_Blob::test_generate_signed_url_v4_w_response_type",
"tests/unit/test_blob.py::Test_Blob::test_generate_signed_url_v4_w_slash_in_name",
"tests/unit/test_blob.py::Test_Blob::test_generate_signed_url_v4_w_tilde_in_name",
"tests/unit/test_blob.py::Test_Blob::test_generate_signed_url_v4_w_virtual_hostname",
"tests/unit/test_blob.py::Test_Blob::test_generate_signed_url_w_invalid_version",
"tests/unit/test_blob.py::Test_Blob::test_generation",
"tests/unit/test_blob.py::Test_Blob::test_generation_string_val",
"tests/unit/test_blob.py::Test_Blob::test_generation_unset",
"tests/unit/test_blob.py::Test_Blob::test_get_iam_policy",
"tests/unit/test_blob.py::Test_Blob::test_get_iam_policy_w_requested_policy_version",
"tests/unit/test_blob.py::Test_Blob::test_get_iam_policy_w_user_project",
"tests/unit/test_blob.py::Test_Blob::test_id",
"tests/unit/test_blob.py::Test_Blob::test_make_private",
"tests/unit/test_blob.py::Test_Blob::test_make_public",
"tests/unit/test_blob.py::Test_Blob::test_md5_hash_getter",
"tests/unit/test_blob.py::Test_Blob::test_md5_hash_setter",
"tests/unit/test_blob.py::Test_Blob::test_media_link",
"tests/unit/test_blob.py::Test_Blob::test_metadata_getter",
"tests/unit/test_blob.py::Test_Blob::test_metadata_setter",
"tests/unit/test_blob.py::Test_Blob::test_metadata_setter_w_nan",
"tests/unit/test_blob.py::Test_Blob::test_metageneration",
"tests/unit/test_blob.py::Test_Blob::test_metageneration_string_val",
"tests/unit/test_blob.py::Test_Blob::test_metageneration_unset",
"tests/unit/test_blob.py::Test_Blob::test_owner",
"tests/unit/test_blob.py::Test_Blob::test_path_bad_bucket",
"tests/unit/test_blob.py::Test_Blob::test_path_no_name",
"tests/unit/test_blob.py::Test_Blob::test_path_normal",
"tests/unit/test_blob.py::Test_Blob::test_path_w_slash_in_name",
"tests/unit/test_blob.py::Test_Blob::test_path_with_non_ascii",
"tests/unit/test_blob.py::Test_Blob::test_public_url",
"tests/unit/test_blob.py::Test_Blob::test_public_url_w_slash_in_name",
"tests/unit/test_blob.py::Test_Blob::test_public_url_w_tilde_in_name",
"tests/unit/test_blob.py::Test_Blob::test_public_url_with_non_ascii",
"tests/unit/test_blob.py::Test_Blob::test_retention_expiration_time",
"tests/unit/test_blob.py::Test_Blob::test_retention_expiration_time_unset",
"tests/unit/test_blob.py::Test_Blob::test_rewrite_other_bucket_other_name_no_encryption_partial",
"tests/unit/test_blob.py::Test_Blob::test_rewrite_response_without_resource",
"tests/unit/test_blob.py::Test_Blob::test_rewrite_same_name_no_key_new_key_w_token",
"tests/unit/test_blob.py::Test_Blob::test_rewrite_same_name_no_old_key_new_key_done_w_user_project",
"tests/unit/test_blob.py::Test_Blob::test_rewrite_same_name_w_old_key_new_kms_key",
"tests/unit/test_blob.py::Test_Blob::test_rewrite_w_generations",
"tests/unit/test_blob.py::Test_Blob::test_self_link",
"tests/unit/test_blob.py::Test_Blob::test_set_iam_policy",
"tests/unit/test_blob.py::Test_Blob::test_set_iam_policy_w_user_project",
"tests/unit/test_blob.py::Test_Blob::test_size",
"tests/unit/test_blob.py::Test_Blob::test_size_string_val",
"tests/unit/test_blob.py::Test_Blob::test_size_unset",
"tests/unit/test_blob.py::Test_Blob::test_storage_class_getter",
"tests/unit/test_blob.py::Test_Blob::test_storage_class_setter",
"tests/unit/test_blob.py::Test_Blob::test_temporary_hold_getter_false",
"tests/unit/test_blob.py::Test_Blob::test_temporary_hold_getter_missing",
"tests/unit/test_blob.py::Test_Blob::test_temporary_hold_getter_true",
"tests/unit/test_blob.py::Test_Blob::test_temporary_hold_setter",
"tests/unit/test_blob.py::Test_Blob::test_test_iam_permissions",
"tests/unit/test_blob.py::Test_Blob::test_test_iam_permissions_w_user_project",
"tests/unit/test_blob.py::Test_Blob::test_time_created",
"tests/unit/test_blob.py::Test_Blob::test_time_created_unset",
"tests/unit/test_blob.py::Test_Blob::test_time_deleted",
"tests/unit/test_blob.py::Test_Blob::test_time_deleted_unset",
"tests/unit/test_blob.py::Test_Blob::test_update_storage_class_invalid",
"tests/unit/test_blob.py::Test_Blob::test_update_storage_class_large_file",
"tests/unit/test_blob.py::Test_Blob::test_update_storage_class_w_encryption_key_w_user_project",
"tests/unit/test_blob.py::Test_Blob::test_update_storage_class_wo_encryption_key",
"tests/unit/test_blob.py::Test_Blob::test_updated",
"tests/unit/test_blob.py::Test_Blob::test_updated_unset",
"tests/unit/test_blob.py::Test_Blob::test_upload_from_file_failure",
"tests/unit/test_blob.py::Test_Blob::test_upload_from_file_success",
"tests/unit/test_blob.py::Test_Blob::test_upload_from_file_with_retries",
"tests/unit/test_blob.py::Test_Blob::test_upload_from_file_with_rewind",
"tests/unit/test_blob.py::Test_Blob::test_upload_from_filename",
"tests/unit/test_blob.py::Test_Blob::test_upload_from_string_w_bytes",
"tests/unit/test_blob.py::Test_Blob::test_upload_from_string_w_text",
"tests/unit/test_blob.py::Test_Blob::test_user_project",
"tests/unit/test_blob.py::Test__quote::test_bad_type",
"tests/unit/test_blob.py::Test__quote::test_bytes",
"tests/unit/test_blob.py::Test__quote::test_unicode",
"tests/unit/test_blob.py::Test__quote::test_w_slash_default",
"tests/unit/test_blob.py::Test__quote::test_w_slash_w_safe",
"tests/unit/test_blob.py::Test__quote::test_w_tilde",
"tests/unit/test_blob.py::Test__maybe_rewind::test_default",
"tests/unit/test_blob.py::Test__maybe_rewind::test_do_not_rewind",
"tests/unit/test_blob.py::Test__maybe_rewind::test_do_rewind",
"tests/unit/test_blob.py::Test__raise_from_invalid_response::test_default",
"tests/unit/test_blob.py::Test__raise_from_invalid_response::test_w_206_and_args",
"tests/unit/test_blob.py::Test__add_query_parameters::test_w_empty_list",
"tests/unit/test_blob.py::Test__add_query_parameters::test_w_existing_qs",
"tests/unit/test_blob.py::Test__add_query_parameters::test_wo_existing_qs"
] | [] | Apache License 2.0 | 6,373 | 616 | [
"google/cloud/storage/_helpers.py",
"google/cloud/storage/blob.py"
] |
mverleg__pyjson_tricks-67 | 7a309c9dfaf2a43987e5b580fbb30290e4d4f149 | 2020-02-04 20:10:02 | 7a309c9dfaf2a43987e5b580fbb30290e4d4f149 | diff --git a/json_tricks/encoders.py b/json_tricks/encoders.py
index 3082a8f..f6573da 100644
--- a/json_tricks/encoders.py
+++ b/json_tricks/encoders.py
@@ -316,7 +316,7 @@ def pandas_encode(obj, primitives=False):
))
repr['index'] = tuple(obj.index.values)
for k, name in enumerate(obj.columns.values):
- repr[name] = tuple(obj.ix[:, k].values)
+ repr[name] = tuple(obj.iloc[:, k].values)
return repr
if isinstance(obj, Series):
repr = hashodict()
| Pandas DeprecationWarning: .ix is deprecated | mverleg/pyjson_tricks | diff --git a/tests/test_pandas.py b/tests/test_pandas.py
index 4f4a89e..5c3d19c 100644
--- a/tests/test_pandas.py
+++ b/tests/test_pandas.py
@@ -24,7 +24,7 @@ def test_pandas_dataframe():
df = DataFrame(COLUMNS, columns=tuple(COLUMNS.keys()))
txt = dumps(df, allow_nan=True)
back = loads(txt)
- assert isnan(back.ix[0, -1])
+ assert isnan(back.iloc[0, -1])
assert (df.equals(back))
assert (df.dtypes == back.dtypes).all()
df = DataFrame(COLUMNS, columns=tuple(COLUMNS.keys()))
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 1
} | 3.13 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"numpy>=1.16.0",
"pandas>=1.0.0",
"pytz",
"enum34",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | enum34==1.1.10
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
-e git+https://github.com/mverleg/pyjson_tricks.git@7a309c9dfaf2a43987e5b580fbb30290e4d4f149#egg=json_tricks
numpy==2.0.2
packaging @ file:///croot/packaging_1734472117206/work
pandas==2.2.3
pluggy @ file:///croot/pluggy_1733169602837/work
pytest @ file:///croot/pytest_1738938843180/work
python-dateutil==2.9.0.post0
pytz==2025.2
six==1.17.0
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
tzdata==2025.2
| name: pyjson_tricks
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- enum34==1.1.10
- numpy==2.0.2
- pandas==2.2.3
- python-dateutil==2.9.0.post0
- pytz==2025.2
- six==1.17.0
- tzdata==2025.2
prefix: /opt/conda/envs/pyjson_tricks
| [
"tests/test_pandas.py::test_pandas_dataframe",
"tests/test_pandas.py::test_pandas_mixed_with_other_types"
] | [] | [
"tests/test_pandas.py::test_pandas_series"
] | [] | BSD-3-Clause | 6,377 | 161 | [
"json_tricks/encoders.py"
] |
|
iterative__dvc-3284 | 509ce3e16ee6cbae6350c104ce9533c322fccebe | 2020-02-06 11:28:01 | e7b3297c2d6ae1ad633cd0435cca81093cac86ff | efiop: @Aljo-Rovco Thank for the PR! :pray: Looks great! Please see my comment above and also please install `pre-commit` hooks as described in https://dvc.org/doc/user-guide/contributing/core to check the formatting. | diff --git a/dvc/rwlock.py b/dvc/rwlock.py
index c9d15eae2..6b1d4de14 100644
--- a/dvc/rwlock.py
+++ b/dvc/rwlock.py
@@ -66,10 +66,11 @@ def _infos_to_str(infos):
def _check_blockers(lock, info, *, mode, waiters):
for path_info in waiters:
blockers = [
- info
+ blocker
for path, infos in lock[mode].items()
if path_info.overlaps(path)
- if info not in (infos if type(infos) is list else [infos])
+ for blocker in (infos if isinstance(infos, list) else [infos])
+ if blocker != info
]
if not blockers:
diff --git a/dvc/scm/git/__init__.py b/dvc/scm/git/__init__.py
index 79fbdb360..ac63854ff 100644
--- a/dvc/scm/git/__init__.py
+++ b/dvc/scm/git/__init__.py
@@ -18,7 +18,6 @@ from dvc.utils import is_binary
from dvc.utils import relpath
from dvc.utils.fs import path_isin
-
logger = logging.getLogger(__name__)
@@ -133,20 +132,19 @@ class Git(Base):
return entry, gitignore
- @staticmethod
- def _ignored(entry, gitignore_path):
- if os.path.exists(gitignore_path):
- with open(gitignore_path, "r") as fobj:
- ignore_list = fobj.readlines()
- return any(
- filter(lambda x: x.strip() == entry.strip(), ignore_list)
- )
- return False
+ def _ignored(self, path):
+ from git.exc import GitCommandError
+
+ try:
+ self.repo.git.check_ignore(path)
+ return True
+ except GitCommandError:
+ return False
def ignore(self, path):
entry, gitignore = self._get_gitignore(path)
- if self._ignored(entry, gitignore):
+ if self._ignored(path):
return
msg = "Adding '{}' to '{}'.".format(relpath(path), relpath(gitignore))
| Add a way to suppress dvc changes to .gitignore
At the moment, DVC make changes to the .gitignore after commands like `dvc add`. There are cases, like mine, where it's not needed as .gitignore already exclude whole directory where data is kept.
Add a configuration that allow to suppress dvc changes to .gitignore. | iterative/dvc | diff --git a/tests/dir_helpers.py b/tests/dir_helpers.py
index 4fc44e211..19580bbf6 100644
--- a/tests/dir_helpers.py
+++ b/tests/dir_helpers.py
@@ -45,6 +45,7 @@ from global repo template to creating everything inplace, which:
import os
import pathlib
+import logging
from contextlib import contextmanager
import pytest
@@ -65,6 +66,11 @@ __all__ = [
]
+# see https://github.com/iterative/dvc/issues/3167
+git_logger = logging.getLogger("git")
+git_logger.setLevel(logging.CRITICAL)
+
+
class TmpDir(pathlib.Path):
def __new__(cls, *args, **kwargs):
if cls is TmpDir:
diff --git a/tests/func/test_get.py b/tests/func/test_get.py
index 45f839355..aa6f57642 100644
--- a/tests/func/test_get.py
+++ b/tests/func/test_get.py
@@ -163,11 +163,7 @@ def test_get_from_non_dvc_master(tmp_dir, git_dir, caplog):
caplog.clear()
- # removing `git` import in conftest resulted in unexpected logs from
- # that package, see https://github.com/iterative/dvc/issues/3167
- with caplog.at_level(logging.INFO, logger="git"), caplog.at_level(
- logging.INFO, logger="dvc"
- ):
+ with caplog.at_level(logging.INFO, logger="dvc"):
Repo.get(fspath(git_dir), "some_file", out="some_dst", rev="branch")
assert caplog.text == ""
diff --git a/tests/func/test_scm.py b/tests/func/test_scm.py
index 23d303e1e..185c8b60c 100644
--- a/tests/func/test_scm.py
+++ b/tests/func/test_scm.py
@@ -83,6 +83,14 @@ def test_ignore(tmp_dir, scm):
assert _count_gitignore_entries(target) == 0
+def test_ignored(tmp_dir, scm):
+ tmp_dir.gen({"dir1": {"file1.jpg": "cont", "file2.txt": "cont"}})
+ tmp_dir.gen({".gitignore": "dir1/*.jpg"})
+
+ assert scm._ignored(fspath(tmp_dir / "dir1" / "file1.jpg"))
+ assert not scm._ignored(fspath(tmp_dir / "dir1" / "file2.txt"))
+
+
def test_get_gitignore(tmp_dir, scm):
tmp_dir.gen({"file1": "contents", "dir": {}})
diff --git a/tests/unit/test_rwlock.py b/tests/unit/test_rwlock.py
index f44e0303f..4bb3f1969 100644
--- a/tests/unit/test_rwlock.py
+++ b/tests/unit/test_rwlock.py
@@ -62,12 +62,12 @@ def test_rwlock_subdirs(tmp_path):
subfoo = PathInfo("foo/subfoo")
with rwlock(path, "cmd1", [foo], []):
- with pytest.raises(LockError):
+ with pytest.raises(LockError, match=r"subfoo(.|\n)*cmd1"):
with rwlock(path, "cmd2", [], [subfoo]):
pass
with rwlock(path, "cmd1", [], [subfoo]):
- with pytest.raises(LockError):
+ with pytest.raises(LockError, match=r"'foo'(.|\n)*cmd1"):
with rwlock(path, "cmd2", [foo], []):
pass
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 3
},
"num_modified_files": 2
} | 0.82 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[all]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest",
"pytest-timeout",
"pytest-cov",
"pytest-xdist",
"pytest-mock",
"flaky",
"mock",
"xmltodict",
"awscli",
"google-compute-engine",
"Pygments",
"collective.checkdocs",
"flake8",
"psutil",
"flake8-docstrings",
"pydocstyle",
"jaraco.windows",
"mock-ssh-server",
"moto",
"rangehttpserver"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.7",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | aliyun-python-sdk-core==2.16.0
aliyun-python-sdk-core-v3==2.13.33
aliyun-python-sdk-kms==2.16.5
appdirs==1.4.4
atpublic==3.1.2
attrs @ file:///croot/attrs_1668696182826/work
autocommand==2.2.2
awscli==1.31.13
azure-common==1.1.28
azure-storage-blob==2.1.0
azure-storage-common==2.1.0
bcrypt==4.2.1
boto==2.49.0
boto3==1.33.13
botocore==1.33.13
cachetools==4.2.4
certifi @ file:///croot/certifi_1671487769961/work/certifi
cffi==1.15.1
charset-normalizer==3.4.1
collective.checkdocs==0.2
colorama==0.4.4
configobj==5.0.9
coverage==7.2.7
crcmod==1.7
cryptography==44.0.2
decorator==5.1.1
distro==1.9.0
docutils==0.16
-e git+https://github.com/iterative/dvc.git@509ce3e16ee6cbae6350c104ce9533c322fccebe#egg=dvc
execnet==2.0.2
flake8==5.0.4
flake8-docstrings==1.7.0
flaky==3.8.1
flatten-json==0.1.14
flit_core @ file:///opt/conda/conda-bld/flit-core_1644941570762/work/source/flit_core
flufl.lock==7.1.1
funcy==2.0
future==1.0.0
gitdb==4.0.12
gitdb2==4.0.2
GitPython==3.1.44
google-api-core==2.10.2
google-api-python-client==2.166.0
google-auth==1.35.0
google-auth-httplib2==0.2.0
google-cloud-core==1.7.3
google-cloud-storage==1.19.0
google-compute-engine==2.8.13
google-crc32c==1.5.0
google-resumable-media==2.7.2
googleapis-common-protos==1.69.2
grandalf==0.6
httplib2==0.22.0
humanize==4.6.0
idna==3.10
importlib-metadata==4.2.0
importlib-resources==5.12.0
inflect==3.0.2
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
jaraco.classes==3.2.3
jaraco.collections==4.2.0
jaraco.context==4.3.0
jaraco.functools==3.7.0
jaraco.structures==2.1.0
jaraco.text==3.11.1
jaraco.ui==2.3.0
jaraco.windows==5.7.0
Jinja2==3.1.6
jmespath==0.10.0
jsonpath-ng==1.7.0
MarkupSafe==2.1.5
mccabe==0.7.0
mock==5.2.0
mock-ssh-server==0.9.1
more-itertools==9.1.0
moto==4.2.14
nanotime==0.5.2
networkx==2.3
numpy==1.21.6
oauth2client==4.1.3
oss2==2.6.1
packaging @ file:///croot/packaging_1671697413597/work
paramiko==3.5.1
path==16.6.0
pathspec==0.11.2
pluggy @ file:///tmp/build/80754af9/pluggy_1648042572264/work
ply==3.11
protobuf==4.24.4
psutil==7.0.0
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pyarrow==0.15.1
pyasn1==0.5.1
pyasn1-modules==0.3.0
pycodestyle==2.9.1
pycparser==2.21
pycryptodome==3.22.0
pydantic==1.10.21
pydocstyle==6.3.0
pydot==2.0.0
PyDrive2==1.15.4
pyfastcopy==1.0.3
pyflakes==2.5.0
Pygments==2.17.2
PyNaCl==1.5.0
pyOpenSSL==25.0.0
pyparsing==3.1.4
pytest==7.1.2
pytest-cov==4.1.0
pytest-mock==3.11.1
pytest-timeout==2.3.1
pytest-xdist==3.5.0
python-dateutil==2.8.0
PyYAML==5.1.2
rangehttpserver==1.4.0
requests==2.31.0
responses==0.23.3
rsa==4.7.2
ruamel.yaml==0.18.10
ruamel.yaml.clib==0.2.8
s3transfer==0.8.2
shortuuid==1.0.13
six==1.17.0
smmap==5.0.2
snowballstemmer==2.2.0
speedcopy==2.1.5
texttable==1.7.0
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
tqdm==4.67.1
treelib==1.7.1
types-PyYAML==6.0.12.12
typing_extensions @ file:///croot/typing_extensions_1669924550328/work
uritemplate==4.1.1
urllib3==1.26.20
voluptuous==0.14.1
Werkzeug==2.2.3
xmltodict==0.14.2
zc.lockfile==3.0.post1
zipp @ file:///croot/zipp_1672387121353/work
| name: dvc
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=22.1.0=py37h06a4308_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- flit-core=3.6.0=pyhd3eb1b0_0
- importlib_metadata=4.11.3=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=22.0=py37h06a4308_0
- pip=22.3.1=py37h06a4308_0
- pluggy=1.0.0=py37h06a4308_1
- py=1.11.0=pyhd3eb1b0_0
- pytest=7.1.2=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py37h06a4308_0
- typing_extensions=4.4.0=py37h06a4308_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zipp=3.11.0=py37h06a4308_0
- zlib=1.2.13=h5eee18b_1
- pip:
- aliyun-python-sdk-core==2.16.0
- aliyun-python-sdk-core-v3==2.13.33
- aliyun-python-sdk-kms==2.16.5
- appdirs==1.4.4
- atpublic==3.1.2
- autocommand==2.2.2
- awscli==1.31.13
- azure-common==1.1.28
- azure-storage-blob==2.1.0
- azure-storage-common==2.1.0
- bcrypt==4.2.1
- boto==2.49.0
- boto3==1.33.13
- botocore==1.33.13
- cachetools==4.2.4
- cffi==1.15.1
- charset-normalizer==3.4.1
- collective-checkdocs==0.2
- colorama==0.4.4
- configobj==5.0.9
- coverage==7.2.7
- crcmod==1.7
- cryptography==44.0.2
- decorator==5.1.1
- distro==1.9.0
- docutils==0.16
- dvc==0.82.9+509ce3
- execnet==2.0.2
- flake8==5.0.4
- flake8-docstrings==1.7.0
- flaky==3.8.1
- flatten-json==0.1.14
- flufl-lock==7.1.1
- funcy==2.0
- future==1.0.0
- gitdb==4.0.12
- gitdb2==4.0.2
- gitpython==3.1.44
- google-api-core==2.10.2
- google-api-python-client==2.166.0
- google-auth==1.35.0
- google-auth-httplib2==0.2.0
- google-cloud-core==1.7.3
- google-cloud-storage==1.19.0
- google-compute-engine==2.8.13
- google-crc32c==1.5.0
- google-resumable-media==2.7.2
- googleapis-common-protos==1.69.2
- grandalf==0.6
- httplib2==0.22.0
- humanize==4.6.0
- idna==3.10
- importlib-metadata==4.2.0
- importlib-resources==5.12.0
- inflect==3.0.2
- jaraco-classes==3.2.3
- jaraco-collections==4.2.0
- jaraco-context==4.3.0
- jaraco-functools==3.7.0
- jaraco-structures==2.1.0
- jaraco-text==3.11.1
- jaraco-ui==2.3.0
- jaraco-windows==5.7.0
- jinja2==3.1.6
- jmespath==0.10.0
- jsonpath-ng==1.7.0
- markupsafe==2.1.5
- mccabe==0.7.0
- mock==5.2.0
- mock-ssh-server==0.9.1
- more-itertools==9.1.0
- moto==4.2.14
- nanotime==0.5.2
- networkx==2.3
- numpy==1.21.6
- oauth2client==4.1.3
- oss2==2.6.1
- paramiko==3.5.1
- path==16.6.0
- pathspec==0.11.2
- ply==3.11
- protobuf==4.24.4
- psutil==7.0.0
- pyarrow==0.15.1
- pyasn1==0.5.1
- pyasn1-modules==0.3.0
- pycodestyle==2.9.1
- pycparser==2.21
- pycryptodome==3.22.0
- pydantic==1.10.21
- pydocstyle==6.3.0
- pydot==2.0.0
- pydrive2==1.15.4
- pyfastcopy==1.0.3
- pyflakes==2.5.0
- pygments==2.17.2
- pynacl==1.5.0
- pyopenssl==25.0.0
- pyparsing==3.1.4
- pytest-cov==4.1.0
- pytest-mock==3.11.1
- pytest-timeout==2.3.1
- pytest-xdist==3.5.0
- python-dateutil==2.8.0
- pyyaml==5.1.2
- rangehttpserver==1.4.0
- requests==2.31.0
- responses==0.23.3
- rsa==4.7.2
- ruamel-yaml==0.18.10
- ruamel-yaml-clib==0.2.8
- s3transfer==0.8.2
- shortuuid==1.0.13
- six==1.17.0
- smmap==5.0.2
- snowballstemmer==2.2.0
- speedcopy==2.1.5
- texttable==1.7.0
- tqdm==4.67.1
- treelib==1.7.1
- types-pyyaml==6.0.12.12
- uritemplate==4.1.1
- urllib3==1.26.20
- voluptuous==0.14.1
- werkzeug==2.2.3
- xmltodict==0.14.2
- zc-lockfile==3.0.post1
prefix: /opt/conda/envs/dvc
| [
"tests/func/test_scm.py::test_ignored",
"tests/unit/test_rwlock.py::test_rwlock_subdirs"
] | [] | [
"tests/func/test_get.py::test_get_repo_file",
"tests/func/test_get.py::test_get_repo_dir",
"tests/func/test_get.py::test_get_git_file",
"tests/func/test_get.py::test_get_git_dir",
"tests/func/test_get.py::test_cache_type_is_properly_overridden",
"tests/func/test_get.py::test_get_repo_rev",
"tests/func/test_get.py::test_get_from_non_dvc_repo",
"tests/func/test_get.py::test_get_a_dvc_file",
"tests/func/test_get.py::test_get_full_dvc_path",
"tests/func/test_get.py::test_non_cached_output",
"tests/func/test_get.py::test_absolute_file_outside_repo",
"tests/func/test_get.py::test_absolute_file_outside_git_repo",
"tests/func/test_get.py::test_unknown_path",
"tests/func/test_get.py::test_get_to_dir[.]",
"tests/func/test_get.py::test_get_to_dir[dir]",
"tests/func/test_get.py::test_get_to_dir[dir/subdir]",
"tests/func/test_get.py::test_get_from_non_dvc_master",
"tests/func/test_get.py::test_get_url_positive",
"tests/func/test_get.py::test_get_url_not_existing",
"tests/func/test_get.py::test_get_url_git_only_repo",
"tests/func/test_scm.py::TestSCM::test_git",
"tests/func/test_scm.py::TestSCM::test_none",
"tests/func/test_scm.py::TestSCMGit::test_commit",
"tests/func/test_scm.py::TestSCMGit::test_is_repo",
"tests/func/test_scm.py::TestSCMGit::test_is_tracked",
"tests/func/test_scm.py::TestSCMGitSubmodule::test_commit_in_submodule",
"tests/func/test_scm.py::TestSCMGitSubmodule::test_git_submodule",
"tests/func/test_scm.py::TestSCMGitSubmodule::test_is_submodule",
"tests/func/test_scm.py::test_ignore",
"tests/func/test_scm.py::test_get_gitignore",
"tests/func/test_scm.py::test_get_gitignore_symlink",
"tests/func/test_scm.py::test_get_gitignore_subdir",
"tests/func/test_scm.py::test_gitignore_should_end_with_newline",
"tests/func/test_scm.py::test_gitignore_should_append_newline_to_gitignore",
"tests/unit/test_rwlock.py::test_rwlock",
"tests/unit/test_rwlock.py::test_rwlock_reentrant",
"tests/unit/test_rwlock.py::test_broken_rwlock"
] | [] | Apache License 2.0 | 6,386 | 533 | [
"dvc/rwlock.py",
"dvc/scm/git/__init__.py"
] |
ucfopen__canvasapi-351 | 309210681e28c58256ce5999a513b8a70f8aba31 | 2020-02-07 02:55:19 | e85c5552b8bd611ab87fbf84ba9d752a86567be8 | coveralls:
[](https://coveralls.io/builds/28953722)
Coverage remained the same at 100.0% when pulling **af9c78fe3a33482d5a6d5ff5eb5009316d7b6397 on Mike-Nahmias:issue/347-quiz-submission-data** into **309210681e28c58256ce5999a513b8a70f8aba31 on ucfopen:develop**.
| diff --git a/canvasapi/quiz.py b/canvasapi/quiz.py
index 0cce770..efd945b 100644
--- a/canvasapi/quiz.py
+++ b/canvasapi/quiz.py
@@ -8,6 +8,8 @@ from canvasapi.canvas_object import CanvasObject
from canvasapi.exceptions import RequiredFieldMissing
from canvasapi.paginated_list import PaginatedList
from canvasapi.quiz_group import QuizGroup
+from canvasapi.submission import Submission
+from canvasapi.user import User
from canvasapi.util import combine_kwargs, obj_or_id
@@ -374,6 +376,22 @@ class Quiz(CanvasObject):
response_json = response.json()["quiz_submissions"][0]
response_json.update({"course_id": self.course_id})
+ if len(response.json().get("quizzes", [])) > 0:
+ response_json.update(
+ {"quiz": Quiz(self._requester, response.json()["quizzes"][0])}
+ )
+ if len(response.json().get("submissions", [])) > 0:
+ response_json.update(
+ {
+ "submission": Submission(
+ self._requester, response.json()["submissions"][0]
+ )
+ }
+ )
+ if len(response.json().get("users", [])) > 0:
+ response_json.update(
+ {"user": User(self._requester, response.json()["users"][0])}
+ )
return QuizSubmission(self._requester, response_json)
| quiz.get_quiz_submission() strips included data
# Describe the bug
quiz.get_quiz_submission() only keeps ["quiz_submissions"][0] from the returned data. If you use the kwarg `include` you can have the request include submission, quiz, or user, but this data is omitted by the function.
# To Reproduce
1. Turn on logging as described in the docs
2. Get a quiz
3. Call `quiz.get_get_quiz_submission(submission_id, include=['quiz'])`
4. The log will show the returned data including more than what is included in the resulting object
# Expected behavior
The raw data returned from the API looks like this:
```
{
'quiz_submissions': [{...}],
'quizzes': [{...}]
}
```
# Environment information
- Python version - 3.7.6
- CanvasAPI version - 0.15.0
| ucfopen/canvasapi | diff --git a/tests/fixtures/quiz.json b/tests/fixtures/quiz.json
index 1a0c09e..253e05c 100644
--- a/tests/fixtures/quiz.json
+++ b/tests/fixtures/quiz.json
@@ -219,6 +219,30 @@
"validation_token": "this is a validation token",
"score": 0
}
+ ],
+ "quizzes": [
+ {
+ "id": 1,
+ "title": "Test Quiz",
+ "quiz_type": "survey"
+ }
+ ],
+ "submissions": [
+ {
+ "id": 1,
+ "body": "user: 1, quiz: 1, score: 1.0, time: 2020-01-01 00:00:00Z",
+ "grade": "1",
+ "score": 1,
+ "assignment_id": 1234,
+ "user_id": 1
+ }
+ ],
+ "users": [
+ {
+ "id": 1,
+ "name": "Test User",
+ "login_id": "[email protected]"
+ }
]
},
"status_code": 200
diff --git a/tests/test_quiz.py b/tests/test_quiz.py
index 4e87972..cf0b010 100644
--- a/tests/test_quiz.py
+++ b/tests/test_quiz.py
@@ -22,6 +22,8 @@ from canvasapi.quiz_group import QuizGroup
from canvasapi.paginated_list import PaginatedList
from tests import settings
from tests.util import register_uris
+from canvasapi.user import User
+from canvasapi.submission import Submission
@requests_mock.Mocker()
@@ -319,19 +321,24 @@ class TestQuiz(unittest.TestCase):
register_uris({"quiz": ["get_quiz_submission"]}, m)
quiz_id = 1
- submission = self.quiz.get_quiz_submission(quiz_id)
+ quiz_submission = self.quiz.get_quiz_submission(
+ quiz_id, include=["quiz", "submission", "user"]
+ )
- self.assertIsInstance(submission, QuizSubmission)
- self.assertTrue(hasattr(submission, "id"))
- self.assertEqual(submission.quiz_id, quiz_id)
- self.assertTrue(hasattr(submission, "quiz_version"))
- self.assertEqual(submission.quiz_version, 1)
- self.assertTrue(hasattr(submission, "user_id"))
- self.assertEqual(submission.user_id, 1)
- self.assertTrue(hasattr(submission, "validation_token"))
- self.assertEqual(submission.validation_token, "this is a validation token")
- self.assertTrue(hasattr(submission, "score"))
- self.assertEqual(submission.score, 0)
+ self.assertIsInstance(quiz_submission, QuizSubmission)
+ self.assertTrue(hasattr(quiz_submission, "id"))
+ self.assertEqual(quiz_submission.quiz_id, quiz_id)
+ self.assertTrue(hasattr(quiz_submission, "quiz_version"))
+ self.assertEqual(quiz_submission.quiz_version, 1)
+ self.assertTrue(hasattr(quiz_submission, "user_id"))
+ self.assertEqual(quiz_submission.user_id, 1)
+ self.assertTrue(hasattr(quiz_submission, "validation_token"))
+ self.assertEqual(quiz_submission.validation_token, "this is a validation token")
+ self.assertTrue(hasattr(quiz_submission, "score"))
+ self.assertEqual(quiz_submission.score, 0)
+ self.assertIsInstance(quiz_submission.quiz, Quiz)
+ self.assertIsInstance(quiz_submission.submission, Submission)
+ self.assertIsInstance(quiz_submission.user, User)
# create_submission
def test_create_submission(self, m):
| {
"commit_name": "merge_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 1
} | 0.15 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"flake8",
"requests-mock"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.6",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
-e git+https://github.com/ucfopen/canvasapi.git@309210681e28c58256ce5999a513b8a70f8aba31#egg=canvasapi
certifi==2021.5.30
charset-normalizer==2.0.12
flake8==5.0.4
idna==3.10
importlib-metadata==4.2.0
iniconfig==1.1.1
mccabe==0.7.0
packaging==21.3
pluggy==1.0.0
py==1.11.0
pycodestyle==2.9.1
pyflakes==2.5.0
pyparsing==3.1.4
pytest==7.0.1
pytz==2025.2
requests==2.27.1
requests-mock==1.12.1
six==1.17.0
tomli==1.2.3
typing_extensions==4.1.1
urllib3==1.26.20
zipp==3.6.0
| name: canvasapi
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- charset-normalizer==2.0.12
- flake8==5.0.4
- idna==3.10
- importlib-metadata==4.2.0
- iniconfig==1.1.1
- mccabe==0.7.0
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pycodestyle==2.9.1
- pyflakes==2.5.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytz==2025.2
- requests==2.27.1
- requests-mock==1.12.1
- six==1.17.0
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- zipp==3.6.0
prefix: /opt/conda/envs/canvasapi
| [
"tests/test_quiz.py::TestQuiz::test_get_quiz_submission"
] | [
"tests/test_quiz.py::TestQuiz::test_get_all_quiz_submissions"
] | [
"tests/test_quiz.py::TestQuiz::test__str__",
"tests/test_quiz.py::TestQuiz::test_broadcast_message",
"tests/test_quiz.py::TestQuiz::test_broadcast_message_invalid_params",
"tests/test_quiz.py::TestQuiz::test_create_question",
"tests/test_quiz.py::TestQuiz::test_create_question_group",
"tests/test_quiz.py::TestQuiz::test_create_question_group_empty_list",
"tests/test_quiz.py::TestQuiz::test_create_question_group_incorrect_dict",
"tests/test_quiz.py::TestQuiz::test_create_question_group_incorrect_param",
"tests/test_quiz.py::TestQuiz::test_create_report",
"tests/test_quiz.py::TestQuiz::test_create_report_failure",
"tests/test_quiz.py::TestQuiz::test_create_submission",
"tests/test_quiz.py::TestQuiz::test_delete",
"tests/test_quiz.py::TestQuiz::test_edit",
"tests/test_quiz.py::TestQuiz::test_get_all_quiz_reports",
"tests/test_quiz.py::TestQuiz::test_get_question",
"tests/test_quiz.py::TestQuiz::test_get_questions",
"tests/test_quiz.py::TestQuiz::test_get_quiz_group",
"tests/test_quiz.py::TestQuiz::test_get_quiz_report",
"tests/test_quiz.py::TestQuiz::test_get_statistics",
"tests/test_quiz.py::TestQuiz::test_get_submissions",
"tests/test_quiz.py::TestQuiz::test_set_extensions",
"tests/test_quiz.py::TestQuiz::test_set_extensions_empty_list",
"tests/test_quiz.py::TestQuiz::test_set_extensions_missing_key",
"tests/test_quiz.py::TestQuiz::test_set_extensions_non_dicts",
"tests/test_quiz.py::TestQuiz::test_set_extensions_not_list",
"tests/test_quiz.py::TestQuizReport::test__str__",
"tests/test_quiz.py::TestQuizReport::test_abort_or_delete",
"tests/test_quiz.py::TestQuizReport::test_abort_or_delete_failure",
"tests/test_quiz.py::TestQuizSubmission::test__str__",
"tests/test_quiz.py::TestQuizSubmission::test_answer_submission_questions",
"tests/test_quiz.py::TestQuizSubmission::test_answer_submission_questions_manual_validation_token",
"tests/test_quiz.py::TestQuizSubmission::test_answer_submission_questions_no_validation_token",
"tests/test_quiz.py::TestQuizSubmission::test_complete",
"tests/test_quiz.py::TestQuizSubmission::test_complete_no_validation_token",
"tests/test_quiz.py::TestQuizSubmission::test_get_submission_events",
"tests/test_quiz.py::TestQuizSubmission::test_get_submission_questions",
"tests/test_quiz.py::TestQuizSubmission::test_get_times",
"tests/test_quiz.py::TestQuizSubmission::test_submit_events",
"tests/test_quiz.py::TestQuizSubmission::test_submit_events_fail",
"tests/test_quiz.py::TestQuizSubmission::test_update_score_and_comments",
"tests/test_quiz.py::TestQuizExtension::test__str__",
"tests/test_quiz.py::TestQuizQuestion::test__str__",
"tests/test_quiz.py::TestQuizQuestion::test_delete",
"tests/test_quiz.py::TestQuizQuestion::test_edit",
"tests/test_quiz.py::TestQuizStatistic::test__str__",
"tests/test_quiz.py::TestQuizSubmissionEvent::test__str__",
"tests/test_quiz.py::TestQuizSubmissionQuestion::test__str__",
"tests/test_quiz.py::TestQuizSubmissionQuestion::test_flag",
"tests/test_quiz.py::TestQuizSubmissionQuestion::test_flag_manual_validation_token",
"tests/test_quiz.py::TestQuizSubmissionQuestion::test_flag_no_validation_token",
"tests/test_quiz.py::TestQuizSubmissionQuestion::test_unflag",
"tests/test_quiz.py::TestQuizSubmissionQuestion::test_unflag_manual_validation_token",
"tests/test_quiz.py::TestQuizSubmissionQuestion::test_unflag_no_validation_token",
"tests/test_quiz.py::TestQuizAssignmentOverrideSet::test__str__"
] | [] | MIT License | 6,393 | 345 | [
"canvasapi/quiz.py"
] |
ESMValGroup__ESMValCore-462 | e2404b7924afd040b909e1eaf5d22db3110e86b7 | 2020-02-07 09:03:24 | 5ef3833f2afb163735f6da5b61e0d4ec7b29b682 | diff --git a/esmvalcore/cmor/_fixes/cmip5/gfdl_cm2p1.py b/esmvalcore/cmor/_fixes/cmip5/gfdl_cm2p1.py
index ad98dacae..e8c5008a5 100644
--- a/esmvalcore/cmor/_fixes/cmip5/gfdl_cm2p1.py
+++ b/esmvalcore/cmor/_fixes/cmip5/gfdl_cm2p1.py
@@ -1,5 +1,7 @@
"""Fixes for GFDL CM2p1 model."""
from copy import deepcopy
+import numpy as np
+import cftime
from ..fix import Fix
from ..cmip5.gfdl_esm2g import AllVars as BaseAllVars
@@ -56,6 +58,48 @@ class Sftof(Fix):
return cube
+class Sit(Fix):
+ """Fixes for sit"""
+
+ def fix_metadata(self, cubes):
+ """
+ Fix metadata.
+
+ Fixes bad bounds
+
+ Parameters
+ ----------
+ cube: iris.cube.Cube
+
+ Returns
+ -------
+ iris.cube.Cube
+
+ """
+ cube = self.get_cube_from_list(cubes)
+ time = cube.coord('time')
+ if self._fix_required(time):
+ times = time.units.num2date(time.points)
+ starts = [
+ cftime.DatetimeJulian(c.year, c.month, 1)
+ for c in times
+ ]
+ ends = [
+ cftime.DatetimeJulian(c.year, c.month + 1, 1)
+ if c.month < 12
+ else cftime.DatetimeJulian(c.year + 1, 1, 1)
+ for c in times
+ ]
+ time.bounds = time.units.date2num(np.stack([starts, ends], -1))
+ return cubes
+
+ def _fix_required(self, time):
+ return (
+ self.vardef.frequency == 'mon' and
+ not (time.bounds[-1, 0] < time.points[-1] < time.bounds[-1, 1])
+ )
+
+
class Tos(Fix):
"""Fixes for tos"""
| Dataset problem: GFDL-CM2p1 sit has invalid time bounds
I have found a sit file for CMIP5 GFDL-CM2p1 that has time bounds of about 1e9 with times of abot 40000. Iris breaks up at the checker, when changing the time units:
```yaml
- {dataset: GFDL-CM2p1, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2004}
```
Pull request with the fix is coming soon | ESMValGroup/ESMValCore | diff --git a/tests/integration/cmor/_fixes/cmip5/test_gfdl_cm2p1.py b/tests/integration/cmor/_fixes/cmip5/test_gfdl_cm2p1.py
index 4540645c9..eabead476 100644
--- a/tests/integration/cmor/_fixes/cmip5/test_gfdl_cm2p1.py
+++ b/tests/integration/cmor/_fixes/cmip5/test_gfdl_cm2p1.py
@@ -1,11 +1,14 @@
"""Test GDL-CM2P1 fixes."""
import unittest
+from unittest import mock
from cf_units import Unit
+import iris
from iris.cube import Cube
from esmvalcore.cmor.fix import Fix
-from esmvalcore.cmor._fixes.cmip5.gfdl_cm2p1 import Sftof, AllVars, Areacello
+from esmvalcore.cmor._fixes.cmip5.gfdl_cm2p1 import (Sftof, AllVars,
+ Areacello, Sit)
class TestSftof(unittest.TestCase):
@@ -29,7 +32,8 @@ class TestSftof(unittest.TestCase):
class TestAreacello(unittest.TestCase):
- """Test sftof fixes."""
+ """Test areacello fixes."""
+
def setUp(self):
"""Prepare tests."""
self.cube = Cube([1.0], var_name='areacello', units='m-2')
@@ -53,3 +57,64 @@ class TestAreacello(unittest.TestCase):
cube = self.fix.fix_metadata((self.cube, ))[0]
self.assertEqual(cube.data[0], 1.0)
self.assertEqual(cube.units, Unit('m2'))
+
+
+class TestSit(unittest.TestCase):
+ """Test sit fixes."""
+
+ def setUp(self):
+ """Prepare tests."""
+ self.cube = Cube([1.0, 2.0], var_name='sit', units='m')
+ self.cube.add_dim_coord(
+ iris.coords.DimCoord(
+ points=[45000.5, 45031.5],
+ var_name='time',
+ standard_name='time',
+ long_name='time',
+ units='days since 1850-01-01',
+ bounds=[[1e8, 1.1e8], [1.1e8, 1.2e8]]
+ ),
+ 0
+ )
+ self.var_info_mock = mock.Mock()
+ self.var_info_mock.frequency = 'mon'
+ self.fix = Sit(self.var_info_mock)
+
+ def test_get(self):
+ """Test fix get"""
+ self.assertListEqual(
+ Fix.get_fixes('CMIP5', 'GFDL-CM2P1', 'OImon', 'sit'),
+ [Sit(self.var_info_mock), AllVars(None)])
+
+ def test_fix_metadata_day_do_nothing(self):
+ """Test data fix."""
+ self.var_info_mock.frequency = 'day'
+ fix = Sit(self.var_info_mock)
+ cube = fix.fix_metadata((self.cube,))[0]
+ time = cube.coord('time')
+ self.assertEqual(time.bounds[0, 0], 1e8)
+ self.assertEqual(time.bounds[0, 1], 1.1e8)
+ self.assertEqual(time.bounds[1, 0], 1.1e8)
+ self.assertEqual(time.bounds[1, 1], 1.2e8)
+
+ def test_fix_metadata(self):
+ """Test data fix."""
+ fix = Sit(self.var_info_mock)
+ cube = fix.fix_metadata((self.cube,))[0]
+ time = cube.coord('time')
+ self.assertEqual(time.bounds[0, 0], 44984)
+ self.assertEqual(time.bounds[0, 1], 45015)
+ self.assertEqual(time.bounds[1, 0], 45015)
+ self.assertEqual(time.bounds[1, 1], 45045)
+
+ def test_fix_metadata_not_needed(self):
+ """Test data fix."""
+ fix = Sit(self.var_info_mock)
+ cube = fix.fix_metadata((self.cube,))[0]
+ time = cube.coord('time')
+ new_bounds = [[44985., 45014.], [45016., 45044.]]
+ time.bounds = new_bounds
+ self.assertEqual(time.bounds[0, 0], 44985)
+ self.assertEqual(time.bounds[0, 1], 45014)
+ self.assertEqual(time.bounds[1, 0], 45016)
+ self.assertEqual(time.bounds[1, 1], 45044)
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 1
} | 2.0 | {
"env_vars": null,
"env_yml_path": [
"environment.yml"
],
"install": "pip install -e .[develop]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "environment.yml",
"pip_packages": [
"pytest>=3.9",
"pytest-cov",
"pytest-env",
"pytest-flake8",
"pytest-html",
"pytest-metadata>=1.5.1",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y build-essential curl unzip"
],
"python": "3.7",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.13
antlr4-python3-runtime @ file:///home/conda/feedstock_root/build_artifacts/antlr-python-runtime_1636143373368/work
astroid==2.15.8
attrs==24.2.0
Babel==2.14.0
build==0.10.0
Cartopy @ file:///home/conda/feedstock_root/build_artifacts/cartopy_1642060950241/work
certifi @ file:///home/conda/feedstock_root/build_artifacts/certifi_1725278078093/work/certifi
cf-units @ file:///home/conda/feedstock_root/build_artifacts/cf-units_1640986232540/work
cftime @ file:///home/conda/feedstock_root/build_artifacts/cftime_1663606412550/work
charset-normalizer==3.4.1
click==8.1.8
click-plugins==1.1.1
cligj==0.7.2
cloudpickle @ file:///home/conda/feedstock_root/build_artifacts/cloudpickle_1674202310934/work
colorama==0.4.6
coverage==7.2.7
cycler @ file:///home/conda/feedstock_root/build_artifacts/cycler_1635519461629/work
Cython @ file:///home/conda/feedstock_root/build_artifacts/cython_1659101942790/work
dask @ file:///home/conda/feedstock_root/build_artifacts/dask-core_1644602974678/work
dill==0.3.7
docutils==0.17.1
dodgy==0.2.1
ESMPy==8.2.0
-e git+https://github.com/ESMValGroup/ESMValCore.git@e2404b7924afd040b909e1eaf5d22db3110e86b7#egg=ESMValCore
exceptiongroup==1.2.2
fiona==1.9.6
flake8==5.0.4
flake8-polyfill==1.0.2
fonttools @ file:///home/conda/feedstock_root/build_artifacts/fonttools_1666389892786/work
fsspec @ file:///home/conda/feedstock_root/build_artifacts/fsspec_1674184942191/work
gitdb==4.0.12
GitPython==3.1.44
idna==3.10
imagesize==1.4.1
importlib-metadata==4.2.0
iniconfig==2.0.0
isodate==0.6.1
isort==5.11.5
Jinja2 @ file:///home/conda/feedstock_root/build_artifacts/jinja2_1715127149914/work
kiwisolver @ file:///home/conda/feedstock_root/build_artifacts/kiwisolver_1657953088445/work
lazy-object-proxy==1.9.0
llvmlite==0.39.1
locket @ file:///home/conda/feedstock_root/build_artifacts/locket_1650660393415/work
lxml==5.3.1
MarkupSafe @ file:///home/conda/feedstock_root/build_artifacts/markupsafe_1648737551960/work
matplotlib @ file:///home/conda/feedstock_root/build_artifacts/matplotlib-suite_1661439848456/work
mccabe==0.7.0
mpi4py @ file:///home/conda/feedstock_root/build_artifacts/mpi4py_1660326262210/work
munkres==1.1.4
nc-time-axis==1.4.1
netCDF4 @ file:///home/conda/feedstock_root/build_artifacts/netcdf4_1661987221388/work
networkx==2.6.3
numba==0.56.4
numpy @ file:///home/conda/feedstock_root/build_artifacts/numpy_1649806299270/work
olefile @ file:///home/conda/feedstock_root/build_artifacts/olefile_1701735466804/work
packaging @ file:///home/conda/feedstock_root/build_artifacts/packaging_1696202382185/work
partd @ file:///home/conda/feedstock_root/build_artifacts/partd_1695667515973/work
pathspec==0.11.2
pep8-naming==0.10.0
Pillow @ file:///tmp/build/80754af9/pillow_1625655818400/work
platformdirs==4.0.0
pluggy==1.2.0
prospector==1.10.3
prov==2.0.1
psutil==7.0.0
py==1.11.0
pycodestyle==2.9.1
pydocstyle==6.3.0
pydot==2.0.0
pyflakes==2.5.0
Pygments==2.17.2
pylint==2.17.7
pylint-celery==0.3
pylint-django==2.5.3
pylint-flask==0.6
pylint-plugin-utils==0.7
pyparsing @ file:///home/conda/feedstock_root/build_artifacts/pyparsing_1724616129934/work
pyproj @ file:///home/conda/feedstock_root/build_artifacts/pyproj_1636547699801/work
pyproject_hooks==1.2.0
pyroma==4.2
pyshp @ file:///home/conda/feedstock_root/build_artifacts/pyshp_1659002966020/work
pytest==7.4.4
pytest-cov==4.1.0
pytest-env==1.0.1
pytest-flake8==1.1.3
pytest-html==3.2.0
pytest-metadata==3.0.0
python-dateutil @ file:///home/conda/feedstock_root/build_artifacts/python-dateutil_1709299778482/work
pytz==2025.2
PyYAML @ file:///home/conda/feedstock_root/build_artifacts/pyyaml_1648757092905/work
rdflib==6.3.2
requests==2.31.0
requirements-detector==1.2.2
scipy @ file:///home/conda/feedstock_root/build_artifacts/scipy_1637806658031/work
scitools-iris @ file:///home/conda/feedstock_root/build_artifacts/iris_1637936208876/work
semver==3.0.4
setoptconf-tmp==0.3.1
Shapely @ file:///home/conda/feedstock_root/build_artifacts/shapely_1637399855493/work
six @ file:///home/conda/feedstock_root/build_artifacts/six_1620240208055/work
smmap==5.0.2
snowballstemmer==2.2.0
Sphinx==4.3.2
sphinx-rtd-theme==1.3.0
sphinxcontrib-applehelp==1.0.2
sphinxcontrib-devhelp==1.0.2
sphinxcontrib-htmlhelp==2.0.0
sphinxcontrib-jquery==4.1
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==1.0.3
sphinxcontrib-serializinghtml==1.1.5
stratify @ file:///home/conda/feedstock_root/build_artifacts/python-stratify_1620938361732/work
toml==0.10.2
tomli==2.0.1
tomlkit==0.12.5
toolz @ file:///home/conda/feedstock_root/build_artifacts/toolz_1706112571092/work
trove-classifiers==2025.3.19.19
typed-ast==1.5.5
typing_extensions @ file:///home/conda/feedstock_root/build_artifacts/typing_extensions_1688315532570/work
unicodedata2 @ file:///home/conda/feedstock_root/build_artifacts/unicodedata2_1649111917568/work
urllib3==2.0.7
vmprof==0.4.18.1
wrapt==1.16.0
xxhash @ file:///home/conda/feedstock_root/build_artifacts/python-xxhash_1649442453935/work
yamale==4.0.4
yamllint==1.32.0
yapf==0.43.0
zipp==3.15.0
| name: ESMValCore
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=conda_forge
- _openmp_mutex=4.5=2_gnu
- antlr-python-runtime=4.7.2=py37h89c1867_1003
- atk-1.0=2.38.0=h04ea711_2
- binutils_impl_linux-64=2.43=h4bf12b8_4
- binutils_linux-64=2.43=h4852527_4
- brotli=1.1.0=hb9d3cd8_2
- brotli-bin=1.1.0=hb9d3cd8_2
- bzip2=1.0.8=h4bc722e_7
- c-ares=1.34.4=hb9d3cd8_0
- ca-certificates=2025.1.31=hbcca054_0
- cairo=1.18.0=h3faef2a_0
- cartopy=0.20.2=py37h9a08e6e_0
- certifi=2024.8.30=pyhd8ed1ab_0
- cf-units=3.0.1=py37hb1e94ed_2
- cftime=1.6.2=py37hc105733_0
- cloudpickle=2.2.1=pyhd8ed1ab_0
- curl=8.8.0=he654da7_1
- cycler=0.11.0=pyhd8ed1ab_0
- cython=0.29.32=py37hd23a5d3_0
- dask-core=2022.2.0=pyhd8ed1ab_0
- esmf=8.2.0=mpi_mpich_h5a1934d_102
- esmpy=8.2.0=mpi_mpich_py37h7352969_101
- expat=2.6.4=h5888daf_0
- font-ttf-dejavu-sans-mono=2.37=hab24e00_0
- font-ttf-inconsolata=3.000=h77eed37_0
- font-ttf-source-code-pro=2.038=h77eed37_0
- font-ttf-ubuntu=0.83=h77eed37_3
- fontconfig=2.14.2=h14ed4e7_0
- fonts-conda-ecosystem=1=0
- fonts-conda-forge=1=0
- fonttools=4.38.0=py37h540881e_0
- freetype=2.12.1=h267a509_2
- fribidi=1.0.10=h36c2ea0_0
- fsspec=2023.1.0=pyhd8ed1ab_0
- gcc_impl_linux-64=14.2.0=hdb7739f_2
- gcc_linux-64=14.2.0=h5910c8f_8
- gdk-pixbuf=2.42.10=h5eee18b_1
- geos=3.10.1=h9c3ff4c_1
- glib=2.80.2=hf974151_0
- glib-tools=2.80.2=hb6ce0ca_0
- graphite2=1.3.13=h59595ed_1003
- graphviz=9.0.0=h78e8752_1
- gtk2=2.24.33=h280cfa0_4
- gts=0.7.6=h977cf35_4
- harfbuzz=8.5.0=hfac3d4d_0
- hdf4=4.2.15=h9772cbc_5
- hdf5=1.12.2=mpi_mpich_h5d83325_1
- icu=73.2=h59595ed_0
- iris=3.1.0=pyhd8ed1ab_3
- jinja2=3.1.4=pyhd8ed1ab_0
- jpeg=9e=h0b41bf4_3
- kernel-headers_linux-64=3.10.0=he073ed8_18
- keyutils=1.6.1=h166bdaf_0
- kiwisolver=1.4.4=py37h7cecad7_0
- krb5=1.21.3=h659f571_0
- lcms2=2.14=h6ed2654_0
- ld_impl_linux-64=2.43=h712a8e2_4
- lerc=4.0.0=h27087fc_0
- libaec=1.1.3=h59595ed_0
- libblas=3.9.0=20_linux64_openblas
- libbrotlicommon=1.1.0=hb9d3cd8_2
- libbrotlidec=1.1.0=hb9d3cd8_2
- libbrotlienc=1.1.0=hb9d3cd8_2
- libcblas=3.9.0=20_linux64_openblas
- libcurl=8.8.0=hca28451_1
- libdeflate=1.14=h166bdaf_0
- libedit=3.1.20250104=pl5321h7949ede_0
- libev=4.33=hd590300_2
- libexpat=2.6.4=h5888daf_0
- libffi=3.4.6=h2dba641_0
- libgcc=14.2.0=h767d61c_2
- libgcc-devel_linux-64=14.2.0=h9c4974d_102
- libgcc-ng=14.2.0=h69a702a_2
- libgd=2.3.3=h695aa2c_1
- libgfortran=14.2.0=h69a702a_2
- libgfortran-ng=14.2.0=h69a702a_2
- libgfortran5=14.2.0=hf1ad2bd_2
- libglib=2.80.2=hf974151_0
- libgomp=14.2.0=h767d61c_2
- libiconv=1.18=h4ce23a2_1
- liblapack=3.9.0=20_linux64_openblas
- liblzma=5.6.4=hb9d3cd8_0
- liblzma-devel=5.6.4=hb9d3cd8_0
- libnetcdf=4.8.1=mpi_mpich_hcd871d9_6
- libnghttp2=1.58.0=h47da74e_1
- libnsl=2.0.1=hd590300_0
- libopenblas=0.3.25=pthreads_h413a1c8_0
- libpng=1.6.43=h2797004_0
- librsvg=2.58.0=hadf69e7_1
- libsanitizer=14.2.0=hed042b8_2
- libsqlite=3.46.0=hde9e2c9_0
- libssh2=1.11.0=h0841786_0
- libstdcxx=14.2.0=h8f9b012_2
- libstdcxx-ng=14.2.0=h4852527_2
- libtiff=4.4.0=h82bc61c_5
- libudunits2=2.2.28=h40f5838_3
- libunwind=1.7.2=he02047a_0
- libuuid=2.38.1=h0b41bf4_0
- libwebp-base=1.5.0=h851e524_0
- libxcb=1.15=h0b41bf4_0
- libxml2=2.12.7=hc051c1a_1
- libzip=1.10.1=h2629f0a_3
- libzlib=1.2.13=h4ab18f5_6
- locket=1.0.0=pyhd8ed1ab_0
- markupsafe=2.1.1=py37h540881e_1
- matplotlib-base=3.5.3=py37hf395dca_2
- mpi=1.0=mpich
- mpi4py=3.1.3=py37h52370cb_2
- mpich=4.0.3=h846660c_100
- munkres=1.1.4=pyh9f0ad1d_0
- ncurses=6.5=h2d0b736_3
- netcdf-fortran=4.6.0=mpi_mpich_h1e13492_2
- netcdf4=1.6.0=nompi_py37h7187172_102
- numpy=1.21.6=py37h976b520_0
- olefile=0.47=pyhd8ed1ab_0
- openjpeg=2.5.0=h7d73246_1
- openssl=3.4.1=h7b32b05_0
- packaging=23.2=pyhd8ed1ab_0
- pango=1.54.0=h84a9a3c_0
- partd=1.4.1=pyhd8ed1ab_0
- pcre2=10.43=hcad00b1_0
- pillow=8.3.1=py37h2c7a002_0
- pip=24.0=pyhd8ed1ab_0
- pixman=0.44.2=h29eaf8c_0
- proj=8.2.0=h277dcde_0
- pthread-stubs=0.4=hb9d3cd8_1002
- pyparsing=3.1.4=pyhd8ed1ab_0
- pyproj=3.2.1=py37hb589d83_5
- pyshp=2.3.1=pyhd8ed1ab_0
- python=3.7.12=hf930737_100_cpython
- python-dateutil=2.9.0=pyhd8ed1ab_0
- python-stratify=0.1.1=py37h6f94858_1004
- python-xxhash=3.0.0=py37h540881e_1
- python_abi=3.7=4_cp37m
- pyyaml=6.0=py37h540881e_4
- readline=8.2=h8c095d6_2
- scipy=1.7.3=py37hf2a6cf1_0
- setuptools=69.0.3=pyhd8ed1ab_0
- shapely=1.8.0=py37h9b0f7a3_4
- six=1.16.0=pyh6c4a22f_0
- sqlite=3.46.0=h6d4b2fc_0
- sysroot_linux-64=2.17=h0157908_18
- tk=8.6.13=noxft_h4845f30_101
- toolz=0.12.1=pyhd8ed1ab_0
- typing-extensions=4.7.1=hd8ed1ab_0
- typing_extensions=4.7.1=pyha770c72_0
- tzdata=2025b=h78e105d_0
- udunits2=2.2.28=h40f5838_3
- unicodedata2=14.0.0=py37h540881e_1
- wheel=0.42.0=pyhd8ed1ab_0
- xorg-kbproto=1.0.7=hb9d3cd8_1003
- xorg-libice=1.1.2=hb9d3cd8_0
- xorg-libsm=1.2.6=he73a12e_0
- xorg-libx11=1.8.9=h8ee46fc_0
- xorg-libxau=1.0.12=hb9d3cd8_0
- xorg-libxdmcp=1.1.5=hb9d3cd8_0
- xorg-libxext=1.3.4=h0b41bf4_2
- xorg-libxrender=0.9.11=hd590300_0
- xorg-renderproto=0.11.1=hb9d3cd8_1003
- xorg-xextproto=7.3.0=hb9d3cd8_1004
- xorg-xproto=7.0.31=hb9d3cd8_1008
- xxhash=0.8.0=h7f98852_3
- xz=5.6.4=hbcc6ac9_0
- xz-gpl-tools=5.6.4=hbcc6ac9_0
- xz-tools=5.6.4=hb9d3cd8_0
- yaml=0.2.5=h7f98852_2
- zlib=1.2.13=h4ab18f5_6
- zstd=1.5.6=ha6fb4c9_0
- pip:
- alabaster==0.7.13
- astroid==2.15.8
- attrs==24.2.0
- babel==2.14.0
- build==0.10.0
- charset-normalizer==3.4.1
- click==8.1.8
- click-plugins==1.1.1
- cligj==0.7.2
- colorama==0.4.6
- coverage==7.2.7
- dill==0.3.7
- docutils==0.17.1
- dodgy==0.2.1
- exceptiongroup==1.2.2
- fiona==1.9.6
- flake8==5.0.4
- flake8-polyfill==1.0.2
- gitdb==4.0.12
- gitpython==3.1.44
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==4.2.0
- iniconfig==2.0.0
- isodate==0.6.1
- isort==5.11.5
- lazy-object-proxy==1.9.0
- llvmlite==0.39.1
- lxml==5.3.1
- mccabe==0.7.0
- nc-time-axis==1.4.1
- networkx==2.6.3
- numba==0.56.4
- pathspec==0.11.2
- pep8-naming==0.10.0
- platformdirs==4.0.0
- pluggy==1.2.0
- prospector==1.10.3
- prov==2.0.1
- psutil==7.0.0
- py==1.11.0
- pycodestyle==2.9.1
- pydocstyle==6.3.0
- pydot==2.0.0
- pyflakes==2.5.0
- pygments==2.17.2
- pylint==2.17.7
- pylint-celery==0.3
- pylint-django==2.5.3
- pylint-flask==0.6
- pylint-plugin-utils==0.7
- pyproject-hooks==1.2.0
- pyroma==4.2
- pytest==7.4.4
- pytest-cov==4.1.0
- pytest-env==1.0.1
- pytest-flake8==1.1.3
- pytest-html==3.2.0
- pytest-metadata==3.0.0
- pytz==2025.2
- rdflib==6.3.2
- requests==2.31.0
- requirements-detector==1.2.2
- semver==3.0.4
- setoptconf-tmp==0.3.1
- smmap==5.0.2
- snowballstemmer==2.2.0
- sphinx==4.3.2
- sphinx-rtd-theme==1.3.0
- sphinxcontrib-applehelp==1.0.2
- sphinxcontrib-devhelp==1.0.2
- sphinxcontrib-htmlhelp==2.0.0
- sphinxcontrib-jquery==4.1
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==1.0.3
- sphinxcontrib-serializinghtml==1.1.5
- toml==0.10.2
- tomli==2.0.1
- tomlkit==0.12.5
- trove-classifiers==2025.3.19.19
- typed-ast==1.5.5
- urllib3==2.0.7
- vmprof==0.4.18.1
- wrapt==1.16.0
- yamale==4.0.4
- yamllint==1.32.0
- yapf==0.43.0
- zipp==3.15.0
prefix: /opt/conda/envs/ESMValCore
| [
"tests/integration/cmor/_fixes/cmip5/test_gfdl_cm2p1.py::flake-8::FLAKE8",
"tests/integration/cmor/_fixes/cmip5/test_gfdl_cm2p1.py::TestSftof::test_fix_data",
"tests/integration/cmor/_fixes/cmip5/test_gfdl_cm2p1.py::TestSftof::test_get",
"tests/integration/cmor/_fixes/cmip5/test_gfdl_cm2p1.py::TestAreacello::test_fix_data",
"tests/integration/cmor/_fixes/cmip5/test_gfdl_cm2p1.py::TestAreacello::test_fix_metadata",
"tests/integration/cmor/_fixes/cmip5/test_gfdl_cm2p1.py::TestAreacello::test_get",
"tests/integration/cmor/_fixes/cmip5/test_gfdl_cm2p1.py::TestSit::test_fix_metadata",
"tests/integration/cmor/_fixes/cmip5/test_gfdl_cm2p1.py::TestSit::test_fix_metadata_day_do_nothing",
"tests/integration/cmor/_fixes/cmip5/test_gfdl_cm2p1.py::TestSit::test_fix_metadata_not_needed",
"tests/integration/cmor/_fixes/cmip5/test_gfdl_cm2p1.py::TestSit::test_get"
] | [] | [] | [] | Apache License 2.0 | 6,395 | 521 | [
"esmvalcore/cmor/_fixes/cmip5/gfdl_cm2p1.py"
] |
|
googleapis__python-storage-48 | 0df8a91cf78d0352b96684228f599e8973a32531 | 2020-02-07 14:35:35 | c99b41604db2866063e3981878cbb3403278f7ff | frankyn: @IlyaFaer this LGTM, but please update relevant conformance tests before merging this code to confirm that it's working as expected.
I updated conformance tests in https://github.com/googleapis/conformance-tests/tree/master/storage/v1
If you have questions PLMK. Specifically for this change you should rely on the following two conformance tests:
* Query Parameter Encoding
* Query Parameter Ordering
tseaver: This PR also addresses part of the change for #12.
tseaver: @frankyn We don't currently do *any* of the cross-language conformance tests inside `python-storage`. I just made #57 to track that.
frankyn: @IlyaFaer please respond to @tseaver's comments before merging
frankyn: Friendly ping and pending Kokoro passing.
frankyn: Merging when green.
frankyn: Pending approval from @tseaver as there is on-going discussion.
frankyn: @tseaver friendly ping | diff --git a/google/cloud/storage/_signing.py b/google/cloud/storage/_signing.py
index 0da075d..8151786 100644
--- a/google/cloud/storage/_signing.py
+++ b/google/cloud/storage/_signing.py
@@ -509,7 +509,7 @@ def generate_signed_url_v4(
:type query_parameters: dict
:param query_parameters:
- (Optional) Additional query paramtersto be included as part of the
+ (Optional) Additional query parameters to be included as part of the
signed URLs. See:
https://cloud.google.com/storage/docs/xml-api/reference-headers#query
@@ -585,8 +585,7 @@ def generate_signed_url_v4(
if generation is not None:
query_parameters["generation"] = generation
- ordered_query_parameters = sorted(query_parameters.items())
- canonical_query_string = six.moves.urllib.parse.urlencode(ordered_query_parameters)
+ canonical_query_string = _url_encode(query_parameters)
lowercased_headers = dict(ordered_headers)
@@ -672,3 +671,34 @@ def _sign_message(message, access_token, service_account_email):
data = json.loads(response.data.decode("utf-8"))
return data["signature"]
+
+
+def _url_encode(query_params):
+ """Encode query params into URL.
+
+ :type query_params: dict
+ :param query_params: Query params to be encoded.
+
+ :rtype: str
+ :returns: URL encoded query params.
+ """
+ params = [
+ "{}={}".format(_quote_param(name), _quote_param(value))
+ for name, value in query_params.items()
+ ]
+
+ return "&".join(sorted(params))
+
+
+def _quote_param(param):
+ """Quote query param.
+
+ :type param: Any
+ :param param: Query param to be encoded.
+
+ :rtype: str
+ :returns: URL encoded query param.
+ """
+ if not isinstance(param, bytes):
+ param = str(param)
+ return six.moves.urllib.parse.quote(param, safe="~")
| [Storage] V4 Signature Supplying Additional Query Parameters With GCS Signed URLs
Tracking issue for work defined in companion document.
[New Client Request - Support For Query Params With GCS Signed URLs](https://docs.google.com/document/d/1PnH2K-M-2wkkYkUyLBh7JDDRqYgBNimP5fUFuEeB4u4/edit#heading=h.xgsgzd636vko) | googleapis/python-storage | diff --git a/tests/unit/test__signing.py b/tests/unit/test__signing.py
index c7231b5..47ed2bd 100644
--- a/tests/unit/test__signing.py
+++ b/tests/unit/test__signing.py
@@ -1,3 +1,5 @@
+# -*- coding: utf-8 -*-
+#
# Copyright 2017 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -705,6 +707,61 @@ class Test_sign_message(unittest.TestCase):
)
+class TestCustomURLEncoding(unittest.TestCase):
+ def test_url_encode(self):
+ from google.cloud.storage._signing import _url_encode
+
+ # param1 includes safe symbol ~
+ # param# includes symbols, which must be encoded
+ query_params = {"param1": "value~1-2", "param#": "*value+value/"}
+
+ self.assertEqual(
+ _url_encode(query_params), "param%23=%2Avalue%2Bvalue%2F¶m1=value~1-2"
+ )
+
+
+class TestQuoteParam(unittest.TestCase):
+ def test_ascii_symbols(self):
+ from google.cloud.storage._signing import _quote_param
+
+ encoded_param = _quote_param("param")
+ self.assertIsInstance(encoded_param, str)
+ self.assertEqual(encoded_param, "param")
+
+ def test_quoted_symbols(self):
+ from google.cloud.storage._signing import _quote_param
+
+ encoded_param = _quote_param("!#$%&'()*+,/:;=?@[]")
+ self.assertIsInstance(encoded_param, str)
+ self.assertEqual(
+ encoded_param, "%21%23%24%25%26%27%28%29%2A%2B%2C%2F%3A%3B%3D%3F%40%5B%5D"
+ )
+
+ def test_unquoted_symbols(self):
+ from google.cloud.storage._signing import _quote_param
+ import string
+
+ UNQUOTED = string.ascii_letters + string.digits + ".~_-"
+
+ encoded_param = _quote_param(UNQUOTED)
+ self.assertIsInstance(encoded_param, str)
+ self.assertEqual(encoded_param, UNQUOTED)
+
+ def test_unicode_symbols(self):
+ from google.cloud.storage._signing import _quote_param
+
+ encoded_param = _quote_param("ЁЙЦЯЩЯЩ")
+ self.assertIsInstance(encoded_param, str)
+ self.assertEqual(encoded_param, "%D0%81%D0%99%D0%A6%D0%AF%D0%A9%D0%AF%D0%A9")
+
+ def test_bytes(self):
+ from google.cloud.storage._signing import _quote_param
+
+ encoded_param = _quote_param(b"bytes")
+ self.assertIsInstance(encoded_param, str)
+ self.assertEqual(encoded_param, "bytes")
+
+
_DUMMY_SERVICE_ACCOUNT = None
@@ -731,6 +788,7 @@ def _run_conformance_test(resource, test_data):
method=test_data["method"],
_request_timestamp=test_data["timestamp"],
headers=test_data.get("headers"),
+ query_parameters=test_data.get("queryParameters"),
)
assert url == test_data["expectedUrl"]
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 3
},
"num_modified_files": 1
} | 1.26 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-xdist",
"mock"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | cachetools==4.2.4
certifi==2025.1.31
charset-normalizer==3.4.1
coverage==7.8.0
exceptiongroup==1.2.2
execnet==2.1.1
google-api-core==2.10.2
google-auth==1.35.0
google-cloud-core==1.7.3
-e git+https://github.com/googleapis/python-storage.git@0df8a91cf78d0352b96684228f599e8973a32531#egg=google_cloud_storage
google-resumable-media==0.5.1
googleapis-common-protos==1.69.2
idna==3.10
iniconfig==2.1.0
mock==5.2.0
packaging==24.2
pluggy==1.5.0
protobuf==4.25.6
pyasn1==0.6.1
pyasn1_modules==0.4.2
pytest==8.3.5
pytest-cov==6.0.0
pytest-xdist==3.6.1
requests==2.32.3
rsa==4.9
six==1.17.0
tomli==2.2.1
urllib3==2.3.0
| name: python-storage
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- cachetools==4.2.4
- certifi==2025.1.31
- charset-normalizer==3.4.1
- coverage==7.8.0
- exceptiongroup==1.2.2
- execnet==2.1.1
- google-api-core==2.10.2
- google-auth==1.35.0
- google-cloud-core==1.7.3
- google-resumable-media==0.5.1
- googleapis-common-protos==1.69.2
- idna==3.10
- iniconfig==2.1.0
- mock==5.2.0
- packaging==24.2
- pluggy==1.5.0
- protobuf==4.25.6
- pyasn1==0.6.1
- pyasn1-modules==0.4.2
- pytest==8.3.5
- pytest-cov==6.0.0
- pytest-xdist==3.6.1
- requests==2.32.3
- rsa==4.9
- six==1.17.0
- tomli==2.2.1
- urllib3==2.3.0
prefix: /opt/conda/envs/python-storage
| [
"tests/unit/test__signing.py::TestCustomURLEncoding::test_url_encode",
"tests/unit/test__signing.py::TestQuoteParam::test_ascii_symbols",
"tests/unit/test__signing.py::TestQuoteParam::test_bytes",
"tests/unit/test__signing.py::TestQuoteParam::test_quoted_symbols",
"tests/unit/test__signing.py::TestQuoteParam::test_unicode_symbols",
"tests/unit/test__signing.py::TestQuoteParam::test_unquoted_symbols"
] | [] | [
"tests/unit/test__signing.py::Test_get_expiration_seconds_v2::test_w_expiration_int",
"tests/unit/test__signing.py::Test_get_expiration_seconds_v2::test_w_expiration_naive_datetime",
"tests/unit/test__signing.py::Test_get_expiration_seconds_v2::test_w_expiration_none",
"tests/unit/test__signing.py::Test_get_expiration_seconds_v2::test_w_expiration_other_zone_datetime",
"tests/unit/test__signing.py::Test_get_expiration_seconds_v2::test_w_expiration_timedelta_days",
"tests/unit/test__signing.py::Test_get_expiration_seconds_v2::test_w_expiration_timedelta_seconds",
"tests/unit/test__signing.py::Test_get_expiration_seconds_v2::test_w_expiration_utc_datetime",
"tests/unit/test__signing.py::Test_get_expiration_seconds_v2::test_w_invalid_expiration_type",
"tests/unit/test__signing.py::Test_get_expiration_seconds_v4::test_w_expiration_int",
"tests/unit/test__signing.py::Test_get_expiration_seconds_v4::test_w_expiration_int_gt_seven_days",
"tests/unit/test__signing.py::Test_get_expiration_seconds_v4::test_w_expiration_naive_datetime",
"tests/unit/test__signing.py::Test_get_expiration_seconds_v4::test_w_expiration_none",
"tests/unit/test__signing.py::Test_get_expiration_seconds_v4::test_w_expiration_other_zone_datetime",
"tests/unit/test__signing.py::Test_get_expiration_seconds_v4::test_w_expiration_timedelta",
"tests/unit/test__signing.py::Test_get_expiration_seconds_v4::test_w_expiration_utc_datetime",
"tests/unit/test__signing.py::Test_get_expiration_seconds_v4::test_w_invalid_expiration_type",
"tests/unit/test__signing.py::Test_get_signed_query_params_v2::test_it",
"tests/unit/test__signing.py::Test_get_canonical_headers::test_w_dict",
"tests/unit/test__signing.py::Test_get_canonical_headers::test_w_embedded_ws",
"tests/unit/test__signing.py::Test_get_canonical_headers::test_w_list_and_multiples",
"tests/unit/test__signing.py::Test_get_canonical_headers::test_w_none",
"tests/unit/test__signing.py::Test_canonicalize_v2::test_w_headers_and_resumable",
"tests/unit/test__signing.py::Test_canonicalize_v2::test_w_query_paramters",
"tests/unit/test__signing.py::Test_canonicalize_v2::test_wo_headers_or_query_parameters",
"tests/unit/test__signing.py::Test_generate_signed_url_v2::test_w_custom_headers_dict",
"tests/unit/test__signing.py::Test_generate_signed_url_v2::test_w_custom_headers_list",
"tests/unit/test__signing.py::Test_generate_signed_url_v2::test_w_custom_query_parameters_w_none_value",
"tests/unit/test__signing.py::Test_generate_signed_url_v2::test_w_custom_query_parameters_w_string_value",
"tests/unit/test__signing.py::Test_generate_signed_url_v2::test_w_endpoint",
"tests/unit/test__signing.py::Test_generate_signed_url_v2::test_w_expiration_int",
"tests/unit/test__signing.py::Test_generate_signed_url_v2::test_w_generation",
"tests/unit/test__signing.py::Test_generate_signed_url_v2::test_w_method",
"tests/unit/test__signing.py::Test_generate_signed_url_v2::test_w_method_resumable",
"tests/unit/test__signing.py::Test_generate_signed_url_v2::test_w_response_disposition",
"tests/unit/test__signing.py::Test_generate_signed_url_v2::test_w_response_type",
"tests/unit/test__signing.py::Test_generate_signed_url_v2::test_with_access_token",
"tests/unit/test__signing.py::Test_generate_signed_url_v2::test_with_google_credentials",
"tests/unit/test__signing.py::Test_generate_signed_url_v4::test_w_api_access_endpoint",
"tests/unit/test__signing.py::Test_generate_signed_url_v4::test_w_content_md5",
"tests/unit/test__signing.py::Test_generate_signed_url_v4::test_w_content_type",
"tests/unit/test__signing.py::Test_generate_signed_url_v4::test_w_custom_headers",
"tests/unit/test__signing.py::Test_generate_signed_url_v4::test_w_custom_host_header",
"tests/unit/test__signing.py::Test_generate_signed_url_v4::test_w_custom_payload_hash_goog",
"tests/unit/test__signing.py::Test_generate_signed_url_v4::test_w_custom_query_parameters_w_none_value",
"tests/unit/test__signing.py::Test_generate_signed_url_v4::test_w_custom_query_parameters_w_string_value",
"tests/unit/test__signing.py::Test_generate_signed_url_v4::test_w_defaults",
"tests/unit/test__signing.py::Test_generate_signed_url_v4::test_w_expiration_too_long",
"tests/unit/test__signing.py::Test_generate_signed_url_v4::test_w_generation",
"tests/unit/test__signing.py::Test_generate_signed_url_v4::test_w_method",
"tests/unit/test__signing.py::Test_generate_signed_url_v4::test_w_method_resumable",
"tests/unit/test__signing.py::Test_generate_signed_url_v4::test_w_response_disposition",
"tests/unit/test__signing.py::Test_generate_signed_url_v4::test_w_response_type",
"tests/unit/test__signing.py::Test_generate_signed_url_v4::test_with_access_token",
"tests/unit/test__signing.py::Test_sign_message::test_sign_bytes",
"tests/unit/test__signing.py::Test_sign_message::test_sign_bytes_failure",
"tests/unit/test__signing.py::test_conformance_bucket[test_data0]",
"tests/unit/test__signing.py::test_conformance_blob[test_data0]",
"tests/unit/test__signing.py::test_conformance_blob[test_data1]",
"tests/unit/test__signing.py::test_conformance_blob[test_data2]",
"tests/unit/test__signing.py::test_conformance_blob[test_data3]",
"tests/unit/test__signing.py::test_conformance_blob[test_data4]",
"tests/unit/test__signing.py::test_conformance_blob[test_data5]",
"tests/unit/test__signing.py::test_conformance_blob[test_data6]",
"tests/unit/test__signing.py::test_conformance_blob[test_data7]",
"tests/unit/test__signing.py::test_conformance_blob[test_data8]"
] | [] | Apache License 2.0 | 6,399 | 491 | [
"google/cloud/storage/_signing.py"
] |
Toblerity__rtree-141 | bc6ccc0ed3e6d7a32719e3c6f3ba002b66bb1909 | 2020-02-07 21:30:00 | aecb530df833e663e478e4a6bc14366f9b4553c5 | adriangb: Welp not sure why the `Conda Win64 Python38` tester is failing with `OSError: could not find or load spatialindex_c-64.dll`.
adriangb: I suppose it's safe to ignore then.
sr-murthy: @adriangb I was able to checkout the PR (I was using the wrong remote) - all tests pass. That failure on Conda is an environment problem.
sr-murthy: The 64-bit Libspatialindex 1.9.3 lib. does appear to be installed in the Conda Win64 environment
https://dev.azure.com/hobuinc/Rtree/_build/results?buildId=1300&view=logs&j=c874c4c4-56b1-5d29-a8e3-464d971e2eca&t=fc265a19-c6b2-50d3-d5e9-a5b2f378d8f7&l=46
but the Rtree doesn't seem to be able to detect the corresponding DLL. The loading of the DLL involves these lines in `rtree/core.py`
https://github.com/sr-murthy/rtree/blob/master/rtree/core.py#L126
I think you'd need to work backwards to find out why that exception is triggered.
adriangb: > The 64-bit Libspatialindex 1.9.3 lib. does appear to be installed in the Conda Win64 environment
>
> https://dev.azure.com/hobuinc/Rtree/_build/results?buildId=1300&view=logs&j=c874c4c4-56b1-5d29-a8e3-464d971e2eca&t=fc265a19-c6b2-50d3-d5e9-a5b2f378d8f7&l=46
>
> but the Rtree doesn't seem to be able to detect the corresponding DLL. The loading of the DLL involves these lines in `rtree/core.py`
>
> https://github.com/Toblerity/rtree/blob/master/rtree/core.py#L126
>
> I think you'd need to work backwards to find out why that exception is triggered.
Ok will try to troubleshoot but since I don't have any management access to the Azure pipeline I'm going to be doing it by pushing code with more verbose logging and having the automated build trigger. Sorry if this causes notification bombardment.
hobu: It looks like a bum merge happened in libspatialindex's Conda feedstock that reverted the base library to 1.9.0! https://github.com/conda-forge/libspatialindex-feedstock/blob/master/recipe/meta.yaml#L1
The DLL name was modified after that release I think. This can be merged. @ocefpaf and I'll will fix that issue.
adriangb: Cool, glad we uncovered that issue as well! It also saved me a lot of blind troubleshooting sweat and tears. Merge whenever you are ready.
adriangb: @hobu I don't mean to bother, but do you have a schedule to merge / release? I'm trying to implement `nearest` in `geopandas`, it would be helpful to have this bug fixed. Thanks!
hobu: bouncing to see if CI is now cured with libspatialindex conda-forge refresh.
hobu: CI error is #142 | diff --git a/rtree/index.py b/rtree/index.py
index 69af61b..b90c88d 100644
--- a/rtree/index.py
+++ b/rtree/index.py
@@ -846,6 +846,12 @@ class Index(object):
return self._nearest_obj(coordinates, num_results, objects)
p_mins, p_maxs = self.get_coordinate_pointers(coordinates)
+ # p_num_results is an input and output for C++ lib
+ # as an input it says "get n closest neighbors"
+ # but if multiple neighbors are at the same distance, both will be returned
+ # so the number of returned neighbors may be > p_num_results
+ # thus p_num_results.contents.value gets set as an output by the C++ lib
+ # to indicate the actual number of results for _get_ids to use
p_num_results = ctypes.pointer(ctypes.c_uint64(num_results))
it = ctypes.pointer(ctypes.c_int64())
@@ -857,7 +863,7 @@ class Index(object):
ctypes.byref(it),
p_num_results)
- return self._get_ids(it, min(num_results, p_num_results.contents.value))
+ return self._get_ids(it, p_num_results.contents.value)
def _nearestTP(self, coordinates, velocities, times, num_results=1, objects=False):
p_mins, p_maxs = self.get_coordinate_pointers(coordinates)
| nearest picking just first item
I ran into a case where my index had a large rectangle with a smaller rectangle inside. When I run `nearest` with `num_results=1` on a point inside the smaller rectangle, only 1 result is returned. Since the point is within both (verified by running `intersection`) I would expect that the distance to both items is 0, thus they are both equally near and are both returned. I also noticed that it's always the first one that was inserted into the index that is returned (I swapped the order of insertion and it did not change the result, if one were actually nearer, the result should change).
```python3
from rtree import index
point = (0, 0)
small_box = (-10, -10, 10, 10)
large_box = (-50, -50, 50, 50)
idx = index.Index()
idx.insert(0, small_box)
idx.insert(1, large_box)
print(list(idx.nearest(point, 2)))
print(list(idx.nearest(point, 1)))
print(list(idx.intersection(point)))
print("\n")
# swap order of insertion and index, this should change which one nearest returns
idx = index.Index()
idx.insert(0, large_box)
idx.insert(1, small_box)
print(list(idx.nearest(point, 2)))
print(list(idx.nearest(point, 1)))
print(list(idx.intersection(point)))
print("\n")
```
Output:
```
[0, 1] # expected output for num_results=2
[0] # should also be 0, 1
[0, 1] # intersection confirms point is within both rectangles
[0, 1] # expected output for num_results=2
[0] # inconsistent with above result, swapping indexes should make this [1]
[0, 1] # intersection confirms point is within both rectangles
``` | Toblerity/rtree | diff --git a/tests/test_index.py b/tests/test_index.py
index 833bb3d..751c945 100644
--- a/tests/test_index.py
+++ b/tests/test_index.py
@@ -340,6 +340,38 @@ class IndexNearest(IndexTestCase):
idx.add(i, (start, 1, stop, 1))
hits = sorted(idx.nearest((13, 0, 20, 2), 3))
self.assertEqual(hits, [3, 4, 5])
+
+ def test_nearest_equidistant(self):
+ """Test that if records are equidistant, both are returned."""
+ point = (0, 0)
+ small_box = (-10, -10, 10, 10)
+ large_box = (-50, -50, 50, 50)
+
+ idx = index.Index()
+ idx.insert(0, small_box)
+ idx.insert(1, large_box)
+ self.assertEqual(list(idx.nearest(point, 2)), [0, 1])
+ self.assertEqual(list(idx.nearest(point, 1)), [0, 1])
+
+ idx.insert(2, (0, 0))
+ self.assertEqual(list(idx.nearest(point, 2)), [0, 1, 2])
+ self.assertEqual(list(idx.nearest(point, 1)), [0, 1, 2])
+
+ idx = index.Index()
+ idx.insert(0, small_box)
+ idx.insert(1, large_box)
+ idx.insert(2, (50, 50)) # point on top right vertex of large_box
+ point = (51, 51) # right outside of large_box
+ self.assertEqual(list(idx.nearest(point, 2)), [1, 2])
+ self.assertEqual(list(idx.nearest(point, 1)), [1, 2])
+
+ idx = index.Index()
+ idx.insert(0, small_box)
+ idx.insert(1, large_box)
+ idx.insert(2, (51, 51)) # point right outside on top right vertex of large_box
+ point = (51, 52) # shifted 1 unit up from the point above
+ self.assertEqual(list(idx.nearest(point, 2)), [2, 1])
+ self.assertEqual(list(idx.nearest(point, 1)), [2])
def test_nearest_object(self):
| {
"commit_name": "merge_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 1
} | 0.9 | {
"env_vars": null,
"env_yml_path": [
"environment.yml"
],
"install": "pip install -e .[all]",
"log_parser": "parse_log_pytest",
"no_use_env": true,
"packages": "environment.yml",
"pip_packages": [
"pytest",
"pytest-cov",
"numpy",
"flake8"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | coverage==7.8.0
exceptiongroup==1.2.2
flake8==7.2.0
iniconfig==2.1.0
mccabe==0.7.0
numpy==2.0.2
packaging==24.2
pluggy==1.5.0
pycodestyle==2.13.0
pyflakes==3.3.2
pytest==8.3.5
pytest-cov==6.0.0
-e git+https://github.com/Toblerity/rtree.git@bc6ccc0ed3e6d7a32719e3c6f3ba002b66bb1909#egg=Rtree
tomli==2.2.1
| name: rtree
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libspatialindex=1.9.3=h2531618_0
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==7.8.0
- exceptiongroup==1.2.2
- flake8==7.2.0
- iniconfig==2.1.0
- mccabe==0.7.0
- numpy==2.0.2
- packaging==24.2
- pluggy==1.5.0
- pycodestyle==2.13.0
- pyflakes==3.3.2
- pytest==8.3.5
- pytest-cov==6.0.0
- tomli==2.2.1
prefix: /opt/conda/envs/rtree
| [
"tests/test_index.py::IndexNearest::test_nearest_equidistant"
] | [] | [
"tests/test_index.py::IndexVersion::test_libsidx_version",
"tests/test_index.py::IndexBounds::test_invalid_specifications",
"tests/test_index.py::IndexProperties::test_index_properties",
"tests/test_index.py::IndexProperties::test_invalid_properties",
"tests/test_index.py::IndexProperties::test_result_limit",
"tests/test_index.py::IndexProperties::test_result_offset",
"tests/test_index.py::TestPickling::test_index",
"tests/test_index.py::TestPickling::test_property",
"tests/test_index.py::IndexContainer::test_container",
"tests/test_index.py::IndexIntersection::test_double_insertion",
"tests/test_index.py::IndexIntersection::test_intersection",
"tests/test_index.py::IndexIntersection::test_objects",
"tests/test_index.py::IndexSerialization::test_custom_filenames",
"tests/test_index.py::IndexSerialization::test_interleaving",
"tests/test_index.py::IndexSerialization::test_overwrite",
"tests/test_index.py::IndexSerialization::test_pickling",
"tests/test_index.py::IndexSerialization::test_unicode_filenames",
"tests/test_index.py::IndexNearest::test_nearest_basic",
"tests/test_index.py::IndexNearest::test_nearest_object",
"tests/test_index.py::IndexDelete::test_deletion",
"tests/test_index.py::IndexMoreDimensions::test_3d",
"tests/test_index.py::IndexMoreDimensions::test_4d",
"tests/test_index.py::IndexStream::test_empty_stream",
"tests/test_index.py::IndexStream::test_exception_at_beginning_of_generator",
"tests/test_index.py::IndexStream::test_exception_in_generator",
"tests/test_index.py::IndexStream::test_stream_input",
"tests/test_index.py::IndexCustomStorage::test_custom_storage",
"tests/test_index.py::IndexCustomStorage::test_custom_storage_reopening"
] | [] | MIT License | 6,406 | 338 | [
"rtree/index.py"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.