instance_id
stringlengths 12
57
| base_commit
stringlengths 40
40
| created_at
stringdate 2015-01-06 14:05:07
2025-04-29 17:56:51
| environment_setup_commit
stringlengths 40
40
| hints_text
stringlengths 0
158k
| patch
stringlengths 261
20.8k
| problem_statement
stringlengths 11
52.5k
| repo
stringlengths 7
53
| test_patch
stringlengths 280
206k
| meta
dict | version
stringclasses 463
values | install_config
dict | requirements
stringlengths 93
34k
⌀ | environment
stringlengths 772
20k
⌀ | FAIL_TO_PASS
sequencelengths 1
856
| FAIL_TO_FAIL
sequencelengths 0
536
| PASS_TO_PASS
sequencelengths 0
7.87k
| PASS_TO_FAIL
sequencelengths 0
92
| license_name
stringclasses 35
values | __index_level_0__
int64 11
21.4k
| num_tokens_patch
int64 103
4.99k
| before_filepaths
sequencelengths 0
14
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
tobymao__sqlglot-3725 | f4a28721fd33edb3178c1d99746209dadfbba487 | 2024-07-02 10:34:36 | c790c3b1fa274d7b0faf9f75e7dbc62bc4f55c67 | VaggelisD: CI tests failed on a snowflake executor test case, taking a look now
georgesittas: PR looks good @VaggelisD, couple of comments only | diff --git a/sqlglot/dialects/clickhouse.py b/sqlglot/dialects/clickhouse.py
index 75be9436..75217499 100644
--- a/sqlglot/dialects/clickhouse.py
+++ b/sqlglot/dialects/clickhouse.py
@@ -174,7 +174,6 @@ class ClickHouse(Dialect):
"DATE_FORMAT": _build_date_format,
"DATE_SUB": build_date_delta(exp.DateSub, default_unit=None),
"DATESUB": build_date_delta(exp.DateSub, default_unit=None),
- "EXTRACT": exp.RegexpExtract.from_arg_list,
"FORMATDATETIME": _build_date_format,
"JSONEXTRACTSTRING": build_json_extract_path(
exp.JSONExtractScalar, zero_based_indexing=False
@@ -347,7 +346,6 @@ class ClickHouse(Dialect):
"QUANTILE": lambda self: self._parse_quantile(),
}
- FUNCTION_PARSERS.pop("EXTRACT")
FUNCTION_PARSERS.pop("MATCH")
NO_PAREN_FUNCTION_PARSERS = parser.Parser.NO_PAREN_FUNCTION_PARSERS.copy()
@@ -410,6 +408,23 @@ class ClickHouse(Dialect):
"INDEX",
}
+ def _parse_extract(self) -> exp.Extract | exp.Anonymous:
+ index = self._index
+ this = self._parse_bitwise()
+ if self._match(TokenType.FROM):
+ self._retreat(index)
+ return super()._parse_extract()
+
+ # We return Anonymous here because extract and regexpExtract have different semantics,
+ # so parsing extract(foo, bar) into RegexpExtract can potentially break queries. E.g.,
+ # `extract('foobar', 'b')` works, but CH crashes for `regexpExtract('foobar', 'b')`.
+ #
+ # TODO: can we somehow convert the former into an equivalent `regexpExtract` call?
+ self._match(TokenType.COMMA)
+ return self.expression(
+ exp.Anonymous, this="extract", expressions=[this, self._parse_bitwise()]
+ )
+
def _parse_assignment(self) -> t.Optional[exp.Expression]:
this = super()._parse_assignment()
diff --git a/sqlglot/dialects/databricks.py b/sqlglot/dialects/databricks.py
index e2628dc9..00f63532 100644
--- a/sqlglot/dialects/databricks.py
+++ b/sqlglot/dialects/databricks.py
@@ -43,7 +43,7 @@ class Databricks(Spark):
class Parser(Spark.Parser):
LOG_DEFAULTS_TO_LN = True
STRICT_CAST = True
- COLON_IS_JSON_EXTRACT = True
+ COLON_IS_VARIANT_EXTRACT = True
FUNCTIONS = {
**Spark.Parser.FUNCTIONS,
diff --git a/sqlglot/dialects/dialect.py b/sqlglot/dialects/dialect.py
index 4c394be7..da212869 100644
--- a/sqlglot/dialects/dialect.py
+++ b/sqlglot/dialects/dialect.py
@@ -510,8 +510,28 @@ class Dialect(metaclass=_Dialect):
return dialect
if isinstance(dialect, str):
try:
- dialect_name, *kv_pairs = dialect.split(",")
- kwargs = {k.strip(): v.strip() for k, v in (kv.split("=") for kv in kv_pairs)}
+ dialect_name, *kv_strings = dialect.split(",")
+ kv_pairs = (kv.split("=") for kv in kv_strings)
+ kwargs = {}
+ for pair in kv_pairs:
+ key = pair[0].strip()
+ value: t.Union[bool | str | None] = None
+
+ if len(pair) == 1:
+ # Default initialize standalone settings to True
+ value = True
+ elif len(pair) == 2:
+ value = pair[1].strip()
+
+ # Coerce the value to boolean if it matches to the truthy/falsy values below
+ value_lower = value.lower()
+ if value_lower in ("true", "1"):
+ value = True
+ elif value_lower in ("false", "0"):
+ value = False
+
+ kwargs[key] = value
+
except ValueError:
raise ValueError(
f"Invalid dialect format: '{dialect}'. "
diff --git a/sqlglot/dialects/presto.py b/sqlglot/dialects/presto.py
index 69c1fbec..925af862 100644
--- a/sqlglot/dialects/presto.py
+++ b/sqlglot/dialects/presto.py
@@ -173,6 +173,35 @@ def _unix_to_time_sql(self: Presto.Generator, expression: exp.UnixToTime) -> str
return f"FROM_UNIXTIME(CAST({timestamp} AS DOUBLE) / POW(10, {scale}))"
+def _jsonextract_sql(self: Presto.Generator, expression: exp.JSONExtract) -> str:
+ is_json_extract = self.dialect.settings.get("variant_extract_is_json_extract", True)
+
+ # Generate JSON_EXTRACT unless the user has configured that a Snowflake / Databricks
+ # VARIANT extract (e.g. col:x.y) should map to dot notation (i.e ROW access) in Presto/Trino
+ if not expression.args.get("variant_extract") or is_json_extract:
+ return self.func(
+ "JSON_EXTRACT", expression.this, expression.expression, *expression.expressions
+ )
+
+ this = self.sql(expression, "this")
+
+ # Convert the JSONPath extraction `JSON_EXTRACT(col, '$.x.y) to a ROW access col.x.y
+ segments = []
+ for path_key in expression.expression.expressions[1:]:
+ if not isinstance(path_key, exp.JSONPathKey):
+ # Cannot transpile subscripts, wildcards etc to dot notation
+ self.unsupported(f"Cannot transpile JSONPath segment '{path_key}' to ROW access")
+ continue
+ key = path_key.this
+ if not exp.SAFE_IDENTIFIER_RE.match(key):
+ key = f'"{key}"'
+ segments.append(f".{key}")
+
+ expr = "".join(segments)
+
+ return f"{this}{expr}"
+
+
def _to_int(expression: exp.Expression) -> exp.Expression:
if not expression.type:
from sqlglot.optimizer.annotate_types import annotate_types
@@ -390,6 +419,7 @@ class Presto(Dialect):
exp.If: if_sql(),
exp.ILike: no_ilike_sql,
exp.Initcap: _initcap_sql,
+ exp.JSONExtract: _jsonextract_sql,
exp.Last: _first_last_sql,
exp.LastValue: _first_last_sql,
exp.LastDay: lambda self, e: self.func("LAST_DAY_OF_MONTH", e.this),
diff --git a/sqlglot/dialects/snowflake.py b/sqlglot/dialects/snowflake.py
index c7733801..1eab756b 100644
--- a/sqlglot/dialects/snowflake.py
+++ b/sqlglot/dialects/snowflake.py
@@ -241,7 +241,7 @@ class Snowflake(Dialect):
class Parser(parser.Parser):
IDENTIFY_PIVOT_STRINGS = True
DEFAULT_SAMPLING_METHOD = "BERNOULLI"
- COLON_IS_JSON_EXTRACT = True
+ COLON_IS_VARIANT_EXTRACT = True
ID_VAR_TOKENS = {
*parser.Parser.ID_VAR_TOKENS,
diff --git a/sqlglot/executor/python.py b/sqlglot/executor/python.py
index 674ef789..5988c4d9 100644
--- a/sqlglot/executor/python.py
+++ b/sqlglot/executor/python.py
@@ -447,6 +447,7 @@ class Python(Dialect):
exp.Is: lambda self, e: (
self.binary(e, "==") if isinstance(e.this, exp.Literal) else self.binary(e, "is")
),
+ exp.JSONExtract: lambda self, e: self.func(e.key, e.this, e.expression, *e.expressions),
exp.JSONPath: lambda self, e: f"[{','.join(self.sql(p) for p in e.expressions[1:])}]",
exp.JSONPathKey: lambda self, e: f"'{self.sql(e.this)}'",
exp.JSONPathSubscript: lambda self, e: f"'{e.this}'",
diff --git a/sqlglot/expressions.py b/sqlglot/expressions.py
index 600f57cb..764810dc 100644
--- a/sqlglot/expressions.py
+++ b/sqlglot/expressions.py
@@ -5481,7 +5481,13 @@ class JSONBContains(Binary, Func):
class JSONExtract(Binary, Func):
- arg_types = {"this": True, "expression": True, "only_json_types": False, "expressions": False}
+ arg_types = {
+ "this": True,
+ "expression": True,
+ "only_json_types": False,
+ "expressions": False,
+ "variant_extract": False,
+ }
_sql_names = ["JSON_EXTRACT"]
is_var_len_args = True
diff --git a/sqlglot/parser.py b/sqlglot/parser.py
index a9118938..d5fe792b 100644
--- a/sqlglot/parser.py
+++ b/sqlglot/parser.py
@@ -1204,8 +1204,8 @@ class Parser(metaclass=_Parser):
# Whether the -> and ->> operators expect documents of type JSON (e.g. Postgres)
JSON_ARROWS_REQUIRE_JSON_TYPE = False
- # Whether the `:` operator is used to extract a value from a JSON document
- COLON_IS_JSON_EXTRACT = False
+ # Whether the `:` operator is used to extract a value from a VARIANT column
+ COLON_IS_VARIANT_EXTRACT = False
# Whether or not a VALUES keyword needs to be followed by '(' to form a VALUES clause.
# If this is True and '(' is not found, the keyword will be treated as an identifier
@@ -1477,9 +1477,9 @@ class Parser(metaclass=_Parser):
def _try_parse(self, parse_method: t.Callable[[], T], retreat: bool = False) -> t.Optional[T]:
"""
- Attemps to backtrack if a parse function that contains a try/catch internally raises an error. This behavior can
- be different depending on the uset-set ErrorLevel, so _try_parse aims to solve this by setting & resetting
- the parser state accordingly
+ Attemps to backtrack if a parse function that contains a try/catch internally raises an error.
+ This behavior can be different depending on the uset-set ErrorLevel, so _try_parse aims to
+ solve this by setting & resetting the parser state accordingly
"""
index = self._index
error_level = self.error_level
@@ -4557,7 +4557,7 @@ class Parser(metaclass=_Parser):
return this
- def _parse_colon_as_json_extract(
+ def _parse_colon_as_variant_extract(
self, this: t.Optional[exp.Expression]
) -> t.Optional[exp.Expression]:
casts = []
@@ -4590,11 +4590,14 @@ class Parser(metaclass=_Parser):
if path:
json_path.append(self._find_sql(self._tokens[start_index], end_token))
+ # The VARIANT extract in Snowflake/Databricks is parsed as a JSONExtract; Snowflake uses the json_path in GET_PATH() while
+ # Databricks transforms it back to the colon/dot notation
if json_path:
this = self.expression(
exp.JSONExtract,
this=this,
expression=self.dialect.to_json_path(exp.Literal.string(".".join(json_path))),
+ variant_extract=True,
)
while casts:
@@ -4651,7 +4654,7 @@ class Parser(metaclass=_Parser):
this = self._parse_bracket(this)
- return self._parse_colon_as_json_extract(this) if self.COLON_IS_JSON_EXTRACT else this
+ return self._parse_colon_as_variant_extract(this) if self.COLON_IS_VARIANT_EXTRACT else this
def _parse_primary(self) -> t.Optional[exp.Expression]:
if self._match_set(self.PRIMARY_PARSERS):
@@ -5345,7 +5348,7 @@ class Parser(metaclass=_Parser):
order=self._match(TokenType.OVER) and self._parse_wrapped(self._parse_order),
)
- def _parse_extract(self) -> exp.Extract:
+ def _parse_extract(self) -> exp.Extract | exp.Anonymous:
this = self._parse_function() or self._parse_var_or_string(upper=True)
if self._match(TokenType.FROM):
| Snowflake nested fields are incorrectly translated using `JSON_EXTRACT`
**Before you file an issue**
- Make sure you specify the "read" dialect eg. `parse_one(sql, read="spark")`
- Make sure you specify the "write" dialect eg. `ast.sql(dialect="duckdb")`
- Check if the issue still exists on main
**Fully reproducible code snippet**
```
print(sqlglot.transpile("select payload:testField from snowflake_table;", read="snowflake", write="trino", pretty=True)[0])
```
*Output:*
```
SELECT
JSON_EXTRACT(payload, '$.testField')
FROM snowflake_table
```
*Expected output:*
```
SELECT
payload.testField
FROM snowflake_table
```
`payload:testField` is how nested fields of VARIANT type are accessed in Snowflake, they should be translated into `payload.testField` for Trino, since they would be or `row` type in Trino and not JSON.
**Official Documentation**
[Querying VARIANT nested fields in Snowflake](https://docs.snowflake.com/en/user-guide/querying-semistructured#traversing-semi-structured-data)
[Trino nested field query example](https://trino.io/blog/2020/08/14/dereference-pushdown.html#example)
| tobymao/sqlglot | diff --git a/tests/dialects/test_clickhouse.py b/tests/dialects/test_clickhouse.py
index f0452254..ef84d488 100644
--- a/tests/dialects/test_clickhouse.py
+++ b/tests/dialects/test_clickhouse.py
@@ -25,6 +25,7 @@ class TestClickhouse(Validator):
self.assertEqual(expr.sql(dialect="clickhouse"), "COUNT(x)")
self.assertIsNone(expr._meta)
+ self.validate_identity("SELECT EXTRACT(YEAR FROM toDateTime('2023-02-01'))")
self.validate_identity("extract(haystack, pattern)")
self.validate_identity("SELECT * FROM x LIMIT 1 UNION ALL SELECT * FROM y")
self.validate_identity("SELECT CAST(x AS Tuple(String, Array(Nullable(Float64))))")
diff --git a/tests/dialects/test_dialect.py b/tests/dialects/test_dialect.py
index be262d4a..c0afb2f4 100644
--- a/tests/dialects/test_dialect.py
+++ b/tests/dialects/test_dialect.py
@@ -102,14 +102,10 @@ class TestDialect(Validator):
lowercase_mysql = Dialect.get_or_raise("mysql, normalization_strategy = lowercase")
self.assertEqual(lowercase_mysql.normalization_strategy.value, "LOWERCASE")
- with self.assertRaises(ValueError) as cm:
+ with self.assertRaises(AttributeError) as cm:
Dialect.get_or_raise("mysql, normalization_strategy")
- self.assertEqual(
- str(cm.exception),
- "Invalid dialect format: 'mysql, normalization_strategy'. "
- "Please use the correct format: 'dialect [, k1 = v2 [, ...]]'.",
- )
+ self.assertEqual(str(cm.exception), "'bool' object has no attribute 'upper'")
with self.assertRaises(ValueError) as cm:
Dialect.get_or_raise("myqsl")
@@ -127,6 +123,12 @@ class TestDialect(Validator):
self.assertEqual(oracle_with_settings.normalization_strategy.value, "LOWERCASE")
self.assertEqual(oracle_with_settings.settings, {"version": "19.5"})
+ bool_settings = Dialect.get_or_raise("oracle, s1=TruE, s2=1, s3=FaLse, s4=0, s5=nonbool")
+ self.assertEqual(
+ bool_settings.settings,
+ {"s1": True, "s2": True, "s3": False, "s4": False, "s5": "nonbool"},
+ )
+
def test_compare_dialects(self):
bigquery_class = Dialect["bigquery"]
bigquery_object = BigQuery()
diff --git a/tests/dialects/test_presto.py b/tests/dialects/test_presto.py
index ebb270ab..82b9d626 100644
--- a/tests/dialects/test_presto.py
+++ b/tests/dialects/test_presto.py
@@ -1192,3 +1192,18 @@ MATCH_RECOGNIZE (
"starrocks": "SIGN(x)",
},
)
+
+ def test_json_vs_row_extract(self):
+ for dialect in ("trino", "presto"):
+ s = parse_one('SELECT col:x:y."special string"', read="snowflake")
+
+ dialect_json_extract_setting = f"{dialect}, variant_extract_is_json_extract=True"
+ dialect_row_access_setting = f"{dialect}, variant_extract_is_json_extract=False"
+
+ # By default, Snowflake VARIANT will generate JSON_EXTRACT() in Presto/Trino
+ json_extract_result = """SELECT JSON_EXTRACT(col, '$.x.y["special string"]')"""
+ self.assertEqual(s.sql(dialect), json_extract_result)
+ self.assertEqual(s.sql(dialect_json_extract_setting), json_extract_result)
+
+ # If the setting is overriden to False, then generate ROW access (dot notation)
+ self.assertEqual(s.sql(dialect_row_access_setting), 'SELECT col.x.y."special string"')
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 3
},
"num_modified_files": 8
} | 25.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-xdist",
"pytest-mock",
"pytest-asyncio"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | cfgv==3.4.0
coverage==7.8.0
distlib==0.3.9
duckdb==1.2.1
exceptiongroup==1.2.2
execnet==2.1.1
filelock==3.18.0
identify==2.6.9
iniconfig==2.1.0
Jinja2==3.1.6
MarkupSafe==3.0.2
maturin==1.8.3
mypy==1.15.0
mypy-extensions==1.0.0
nodeenv==1.9.1
numpy==2.0.2
packaging==24.2
pandas==2.2.3
pandas-stubs==2.2.2.240807
pdoc==15.0.1
platformdirs==4.3.7
pluggy==1.5.0
pre_commit==4.2.0
Pygments==2.19.1
pytest==8.3.5
pytest-asyncio==0.26.0
pytest-cov==6.0.0
pytest-mock==3.14.0
pytest-xdist==3.6.1
python-dateutil==2.9.0.post0
pytz==2025.2
PyYAML==6.0.2
ruff==0.4.3
six==1.17.0
-e git+https://github.com/tobymao/sqlglot.git@f4a28721fd33edb3178c1d99746209dadfbba487#egg=sqlglot
tomli==2.2.1
types-python-dateutil==2.9.0.20241206
types-pytz==2025.2.0.20250326
typing_extensions==4.13.0
tzdata==2025.2
virtualenv==20.29.3
| name: sqlglot
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- cfgv==3.4.0
- coverage==7.8.0
- distlib==0.3.9
- duckdb==1.2.1
- exceptiongroup==1.2.2
- execnet==2.1.1
- filelock==3.18.0
- identify==2.6.9
- iniconfig==2.1.0
- jinja2==3.1.6
- markupsafe==3.0.2
- maturin==1.8.3
- mypy==1.15.0
- mypy-extensions==1.0.0
- nodeenv==1.9.1
- numpy==2.0.2
- packaging==24.2
- pandas==2.2.3
- pandas-stubs==2.2.2.240807
- pdoc==15.0.1
- platformdirs==4.3.7
- pluggy==1.5.0
- pre-commit==4.2.0
- pygments==2.19.1
- pytest==8.3.5
- pytest-asyncio==0.26.0
- pytest-cov==6.0.0
- pytest-mock==3.14.0
- pytest-xdist==3.6.1
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyyaml==6.0.2
- ruff==0.4.3
- six==1.17.0
- tomli==2.2.1
- types-python-dateutil==2.9.0.20241206
- types-pytz==2025.2.0.20250326
- typing-extensions==4.13.0
- tzdata==2025.2
- virtualenv==20.29.3
prefix: /opt/conda/envs/sqlglot
| [
"tests/dialects/test_clickhouse.py::TestClickhouse::test_clickhouse",
"tests/dialects/test_dialect.py::TestDialect::test_get_or_raise",
"tests/dialects/test_presto.py::TestPresto::test_json_vs_row_extract"
] | [] | [
"tests/dialects/test_clickhouse.py::TestClickhouse::test_agg_functions",
"tests/dialects/test_clickhouse.py::TestClickhouse::test_cte",
"tests/dialects/test_clickhouse.py::TestClickhouse::test_datetime_funcs",
"tests/dialects/test_clickhouse.py::TestClickhouse::test_ddl",
"tests/dialects/test_clickhouse.py::TestClickhouse::test_drop_on_cluster",
"tests/dialects/test_clickhouse.py::TestClickhouse::test_parameterization",
"tests/dialects/test_clickhouse.py::TestClickhouse::test_signed_and_unsigned_types",
"tests/dialects/test_clickhouse.py::TestClickhouse::test_ternary",
"tests/dialects/test_dialect.py::TestDialect::test_alias",
"tests/dialects/test_dialect.py::TestDialect::test_array",
"tests/dialects/test_dialect.py::TestDialect::test_array_any",
"tests/dialects/test_dialect.py::TestDialect::test_cast",
"tests/dialects/test_dialect.py::TestDialect::test_cast_to_user_defined_type",
"tests/dialects/test_dialect.py::TestDialect::test_compare_dialects",
"tests/dialects/test_dialect.py::TestDialect::test_count_if",
"tests/dialects/test_dialect.py::TestDialect::test_create_sequence",
"tests/dialects/test_dialect.py::TestDialect::test_cross_join",
"tests/dialects/test_dialect.py::TestDialect::test_ddl",
"tests/dialects/test_dialect.py::TestDialect::test_decode",
"tests/dialects/test_dialect.py::TestDialect::test_enum",
"tests/dialects/test_dialect.py::TestDialect::test_hash_comments",
"tests/dialects/test_dialect.py::TestDialect::test_heredoc_strings",
"tests/dialects/test_dialect.py::TestDialect::test_if_null",
"tests/dialects/test_dialect.py::TestDialect::test_json",
"tests/dialects/test_dialect.py::TestDialect::test_lateral_subquery",
"tests/dialects/test_dialect.py::TestDialect::test_limit",
"tests/dialects/test_dialect.py::TestDialect::test_logarithm",
"tests/dialects/test_dialect.py::TestDialect::test_merge",
"tests/dialects/test_dialect.py::TestDialect::test_nested_ctes",
"tests/dialects/test_dialect.py::TestDialect::test_nullsafe_eq",
"tests/dialects/test_dialect.py::TestDialect::test_nullsafe_neq",
"tests/dialects/test_dialect.py::TestDialect::test_nvl2",
"tests/dialects/test_dialect.py::TestDialect::test_operators",
"tests/dialects/test_dialect.py::TestDialect::test_order_by",
"tests/dialects/test_dialect.py::TestDialect::test_qualify",
"tests/dialects/test_dialect.py::TestDialect::test_random",
"tests/dialects/test_dialect.py::TestDialect::test_reserved_keywords",
"tests/dialects/test_dialect.py::TestDialect::test_safediv",
"tests/dialects/test_dialect.py::TestDialect::test_set_operators",
"tests/dialects/test_dialect.py::TestDialect::test_substring",
"tests/dialects/test_dialect.py::TestDialect::test_time",
"tests/dialects/test_dialect.py::TestDialect::test_transactions",
"tests/dialects/test_dialect.py::TestDialect::test_truncate",
"tests/dialects/test_dialect.py::TestDialect::test_typeddiv",
"tests/dialects/test_dialect.py::TestDialect::test_unsupported_null_ordering",
"tests/dialects/test_presto.py::TestPresto::test_cast",
"tests/dialects/test_presto.py::TestPresto::test_ddl",
"tests/dialects/test_presto.py::TestPresto::test_encode_decode",
"tests/dialects/test_presto.py::TestPresto::test_hex_unhex",
"tests/dialects/test_presto.py::TestPresto::test_interval_plural_to_singular",
"tests/dialects/test_presto.py::TestPresto::test_json",
"tests/dialects/test_presto.py::TestPresto::test_match_recognize",
"tests/dialects/test_presto.py::TestPresto::test_presto",
"tests/dialects/test_presto.py::TestPresto::test_quotes",
"tests/dialects/test_presto.py::TestPresto::test_regex",
"tests/dialects/test_presto.py::TestPresto::test_signum",
"tests/dialects/test_presto.py::TestPresto::test_time",
"tests/dialects/test_presto.py::TestPresto::test_to_char",
"tests/dialects/test_presto.py::TestPresto::test_unicode_string",
"tests/dialects/test_presto.py::TestPresto::test_unnest"
] | [] | MIT License | 18,862 | 3,021 | [
"sqlglot/dialects/clickhouse.py",
"sqlglot/dialects/databricks.py",
"sqlglot/dialects/dialect.py",
"sqlglot/dialects/presto.py",
"sqlglot/dialects/snowflake.py",
"sqlglot/executor/python.py",
"sqlglot/expressions.py",
"sqlglot/parser.py"
] |
ESMValGroup__ESMValCore-2472 | 0dce90cb34b4c0db9e1c23ce37a0f5485629056a | 2024-07-02 13:05:47 | c3a2b6ce6c2f60585581c52c692144ccce71879c | bouweandela: @schlunma I think I may have found a working solution now. Previously I used sorted tuples to make the result hashable and independent of the order in which facets appear. I switched to frozensets now, these are also hashable and independent of the order of the facets and do not need to be sorted. | diff --git a/esmvalcore/_recipe/from_datasets.py b/esmvalcore/_recipe/from_datasets.py
index f68bd9e09..76cf368a4 100644
--- a/esmvalcore/_recipe/from_datasets.py
+++ b/esmvalcore/_recipe/from_datasets.py
@@ -5,9 +5,10 @@ from __future__ import annotations
import itertools
import logging
import re
+from collections.abc import Iterable, Mapping, Sequence
from functools import partial
from pathlib import Path
-from typing import TYPE_CHECKING, Any, Dict, Iterable, Mapping, Sequence
+from typing import TYPE_CHECKING, Any, Dict
from nested_lookup import nested_delete
@@ -102,11 +103,13 @@ def _move_datasets_up(recipe: Recipe) -> Recipe:
def _to_frozen(item):
- """Return a frozen and sorted copy of nested dicts and lists."""
- if isinstance(item, list):
- return tuple(sorted(_to_frozen(elem) for elem in item))
- if isinstance(item, dict):
- return tuple(sorted((k, _to_frozen(v)) for k, v in item.items()))
+ """Return a frozen copy of nested dicts and lists."""
+ if isinstance(item, str):
+ return item
+ if isinstance(item, Mapping):
+ return frozenset((k, _to_frozen(v)) for k, v in item.items())
+ if isinstance(item, Iterable):
+ return frozenset(_to_frozen(elem) for elem in item)
return item
| Writing of filled recipe fails with `TypeError: '<' not supported between instances of 'str' and 'tuple'`
**Describe the bug**
The recipe
```yaml
documentation:
title: Test
description: Test
authors:
- schlund_manuel
preprocessors:
global_ocean:
mask_landsea:
mask_out: land
area_statistics:
operator: sum
diagnostics:
test:
variables:
fgco2:
short_name: fgco2
preprocessor: global_ocean
project: CMIP5
mip: Omon
exp: esmHistorical
ensemble: r1i1p1
additional_datasets:
- {dataset: CanESM2, start_year: 1960, end_year: 2005}
- {dataset: IPSL-CM5A-LR, start_year: 2000, end_year: 2000}
scripts:
null
```
when ran with this configuration
```yaml
rootpath:
CMIP5:
/work/bd0854/DATA/ESMValTool2/CMIP5_DKRZ: DKRZ
/work/bd0854/DATA/ESMValTool2/download: ESGF
```
fails with the following error:
```bash
2024-06-27 11:11:33,746 UTC [2721773] ERROR Program terminated abnormally, see stack trace below for more information:
Traceback (most recent call last):
File "/home/b/b309141/repos/ESMValCore/esmvalcore/_main.py", line 533, in run
fire.Fire(ESMValTool())
File "/work/bd0854/b309141/miniforge3/envs/esm/lib/python3.11/site-packages/fire/core.py", line 143, in Fire
component_trace = _Fire(component, args, parsed_flag_args, context, name)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/work/bd0854/b309141/miniforge3/envs/esm/lib/python3.11/site-packages/fire/core.py", line 477, in _Fire
component, remaining_args = _CallAndUpdateTrace(
^^^^^^^^^^^^^^^^^^^^
File "/work/bd0854/b309141/miniforge3/envs/esm/lib/python3.11/site-packages/fire/core.py", line 693, in _CallAndUpdateTrace
component = fn(*varargs, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^
File "/home/b/b309141/repos/ESMValCore/esmvalcore/_main.py", line 413, in run
self._run(recipe, session)
File "/home/b/b309141/repos/ESMValCore/esmvalcore/_main.py", line 455, in _run
process_recipe(recipe_file=recipe, session=session)
File "/home/b/b309141/repos/ESMValCore/esmvalcore/_main.py", line 130, in process_recipe
recipe.run()
File "/home/b/b309141/repos/ESMValCore/esmvalcore/_recipe/recipe.py", line 1090, in run
filled_recipe = self.write_filled_recipe()
^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/b/b309141/repos/ESMValCore/esmvalcore/_recipe/recipe.py", line 1128, in write_filled_recipe
recipe = datasets_to_recipe(USED_DATASETS, self._raw_recipe)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/b/b309141/repos/ESMValCore/esmvalcore/_recipe/from_datasets.py", line 341, in datasets_to_recipe
dataset_recipe = _datasets_to_recipe(datasets)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/b/b309141/repos/ESMValCore/esmvalcore/_recipe/from_datasets.py", line 83, in _datasets_to_recipe
recipe = _move_datasets_up(recipe)
^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/b/b309141/repos/ESMValCore/esmvalcore/_recipe/from_datasets.py", line 92, in _move_datasets_up
_move_one_level_up(diagnostic, 'variables', 'additional_datasets')
File "/home/b/b309141/repos/ESMValCore/esmvalcore/_recipe/from_datasets.py", line 119, in _move_one_level_up
dataset_mapping[name] = {
^
File "/home/b/b309141/repos/ESMValCore/esmvalcore/_recipe/from_datasets.py", line 120, in <dictcomp>
_to_frozen(ds): ds
^^^^^^^^^^^^^^
File "/home/b/b309141/repos/ESMValCore/esmvalcore/_recipe/from_datasets.py", line 105, in _to_frozen
return tuple(sorted((k, _to_frozen(v)) for k, v in item.items()))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/b/b309141/repos/ESMValCore/esmvalcore/_recipe/from_datasets.py", line 105, in <genexpr>
return tuple(sorted((k, _to_frozen(v)) for k, v in item.items()))
^^^^^^^^^^^^^
File "/home/b/b309141/repos/ESMValCore/esmvalcore/_recipe/from_datasets.py", line 103, in _to_frozen
return tuple(sorted(_to_frozen(elem) for elem in item))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
TypeError: '<' not supported between instances of 'str' and 'tuple'
2024-06-27 11:11:33,763 UTC [2721773] INFO
If you have a question or need help, please start a new discussion on https://github.com/ESMValGroup/ESMValTool/discussions
If you suspect this is a bug, please open an issue on https://github.com/ESMValGroup/ESMValTool/issues
To make it easier to find out what the problem is, please consider attaching the files run/recipe_*.yml and run/main_log_debug.txt from the output directory.
```
See also https://github.com/ESMValGroup/ESMValTool/issues/3693#issuecomment-2194410283. | ESMValGroup/ESMValCore | diff --git a/tests/unit/recipe/test_from_datasets.py b/tests/unit/recipe/test_from_datasets.py
index 599119195..bc4c71408 100644
--- a/tests/unit/recipe/test_from_datasets.py
+++ b/tests/unit/recipe/test_from_datasets.py
@@ -23,24 +23,35 @@ def test_to_frozen():
"d",
"c",
],
+ "bb": "dc",
},
}
result = _to_frozen(data)
- expected = (
- (
- "a",
+ expected = frozenset(
+ {
(
- (
- "b",
- (
- "c",
- "d",
- ),
+ "a",
+ frozenset(
+ {
+ (
+ "b",
+ frozenset(
+ {
+ "c",
+ "d",
+ }
+ ),
+ ),
+ (
+ "bb",
+ "dc",
+ ),
+ }
),
),
- ),
- ("abc", "x"),
+ ("abc", "x"),
+ }
)
assert result == expected
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 1
} | 2.11 | {
"env_vars": null,
"env_yml_path": [
"environment.yml"
],
"install": "pip install -e .[develop]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "environment.yml",
"pip_packages": null,
"pre_install": null,
"python": "3.10",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | accessible-pygments @ file:///home/conda/feedstock_root/build_artifacts/accessible-pygments_1734956106558/work
alabaster @ file:///home/conda/feedstock_root/build_artifacts/alabaster_1733750398730/work
astroid @ file:///home/conda/feedstock_root/build_artifacts/astroid_1741614575978/work
asttokens @ file:///home/conda/feedstock_root/build_artifacts/asttokens_1733250440834/work
attrs @ file:///home/conda/feedstock_root/build_artifacts/attrs_1741918516150/work
autodocsumm @ file:///home/conda/feedstock_root/build_artifacts/autodocsumm_1729874401169/work
babel @ file:///home/conda/feedstock_root/build_artifacts/babel_1738490167835/work
beautifulsoup4 @ file:///home/conda/feedstock_root/build_artifacts/beautifulsoup4_1738740337718/work
bleach @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_bleach_1737382993/work
bokeh @ file:///home/conda/feedstock_root/build_artifacts/bokeh_1743516310424/work
Brotli @ file:///home/conda/feedstock_root/build_artifacts/brotli-split_1725267488082/work
Cartopy @ file:///home/conda/feedstock_root/build_artifacts/cartopy_1728342231186/work
cattrs @ file:///home/conda/feedstock_root/build_artifacts/cattrs_1733506656399/work
certifi @ file:///home/conda/feedstock_root/build_artifacts/certifi_1739515848642/work/certifi
cf-units @ file:///home/conda/feedstock_root/build_artifacts/cf-units_1730445501822/work
cf_xarray @ file:///home/conda/feedstock_root/build_artifacts/cf_xarray_1742393228644/work
cffi @ file:///home/conda/feedstock_root/build_artifacts/cffi_1725560520483/work
cfgv @ file:///home/conda/feedstock_root/build_artifacts/cfgv_1734267080977/work
cftime @ file:///home/conda/feedstock_root/build_artifacts/cftime_1725400453617/work
charset-normalizer @ file:///home/conda/feedstock_root/build_artifacts/charset-normalizer_1735929714516/work
click @ file:///home/conda/feedstock_root/build_artifacts/click_1734858813237/work
click-plugins @ file:///home/conda/feedstock_root/build_artifacts/click-plugins_1733731077999/work
cligj @ file:///home/conda/feedstock_root/build_artifacts/cligj_1733749956636/work
cloudpickle @ file:///home/conda/feedstock_root/build_artifacts/cloudpickle_1736947526808/work
colorama @ file:///home/conda/feedstock_root/build_artifacts/colorama_1733218098505/work
contourpy @ file:///home/conda/feedstock_root/build_artifacts/contourpy_1731428322366/work
coverage @ file:///home/conda/feedstock_root/build_artifacts/coverage_1743381215370/work
cryptography @ file:///home/conda/feedstock_root/build_artifacts/cryptography-split_1740893544290/work
cycler @ file:///home/conda/feedstock_root/build_artifacts/cycler_1733332471406/work
Cython @ file:///home/conda/feedstock_root/build_artifacts/cython_1739227941089/work
cytoolz @ file:///home/conda/feedstock_root/build_artifacts/cytoolz_1734107196509/work
dask @ file:///home/conda/feedstock_root/build_artifacts/dask-core_1725051073088/work
dask-expr @ file:///home/conda/feedstock_root/build_artifacts/dask-expr_1725320878138/work
dask-jobqueue @ file:///home/conda/feedstock_root/build_artifacts/dask-jobqueue_1724342200950/work
decorator @ file:///home/conda/feedstock_root/build_artifacts/decorator_1740384970518/work
defusedxml @ file:///home/conda/feedstock_root/build_artifacts/defusedxml_1615232257335/work
dill @ file:///home/conda/feedstock_root/build_artifacts/dill_1733249551891/work
distlib @ file:///home/conda/feedstock_root/build_artifacts/distlib_1733238395481/work
distributed @ file:///home/conda/feedstock_root/build_artifacts/distributed_1725058233754/work
docutils @ file:///home/conda/feedstock_root/build_artifacts/docutils_1733217766141/work
eccodes @ file:///home/conda/feedstock_root/build_artifacts/python-eccodes_1725958413205/work
esgf-pyclient @ file:///home/conda/feedstock_root/build_artifacts/esgf-pyclient_1736255127143/work
esmf_regrid @ file:///home/conda/feedstock_root/build_artifacts/iris-esmf-regrid_1740589309943/work
esmpy @ file:///home/conda/feedstock_root/build_artifacts/esmpy_1734359272810/work/src/addon/esmpy
-e git+https://github.com/ESMValGroup/ESMValCore.git@0dce90cb34b4c0db9e1c23ce37a0f5485629056a#egg=ESMValCore
ESMValTool-sample-data==0.0.3
exceptiongroup @ file:///home/conda/feedstock_root/build_artifacts/exceptiongroup_1733208806608/work
execnet @ file:///home/conda/feedstock_root/build_artifacts/execnet_1733230988954/work
executing @ file:///home/conda/feedstock_root/build_artifacts/executing_1733569351617/work
fastjsonschema @ file:///home/conda/feedstock_root/build_artifacts/python-fastjsonschema_1733235979760/work/dist
filelock @ file:///home/conda/feedstock_root/build_artifacts/filelock_1741969488311/work
findlibs @ file:///home/conda/feedstock_root/build_artifacts/findlibs_1682423883580/work
fiona @ file:///home/conda/feedstock_root/build_artifacts/fiona_1733507421330/work
fire @ file:///home/conda/feedstock_root/build_artifacts/fire_1727808373752/work
fonttools @ file:///home/conda/feedstock_root/build_artifacts/fonttools_1738940303262/work
fsspec @ file:///home/conda/feedstock_root/build_artifacts/fsspec_1743437245292/work
geographiclib @ file:///home/conda/feedstock_root/build_artifacts/geographiclib_1734342349249/work
geopy @ file:///home/conda/feedstock_root/build_artifacts/geopy_1734341931581/work
h2 @ file:///home/conda/feedstock_root/build_artifacts/h2_1738578511449/work
hpack @ file:///home/conda/feedstock_root/build_artifacts/hpack_1737618293087/work
humanfriendly @ file:///home/conda/feedstock_root/build_artifacts/humanfriendly_1733927922002/work
hyperframe @ file:///home/conda/feedstock_root/build_artifacts/hyperframe_1737618333194/work
identify @ file:///home/conda/feedstock_root/build_artifacts/identify_1741502659866/work
idna @ file:///home/conda/feedstock_root/build_artifacts/idna_1733211830134/work
imagesize @ file:///home/conda/feedstock_root/build_artifacts/imagesize_1656939531508/work
importlib_metadata @ file:///home/conda/feedstock_root/build_artifacts/importlib-metadata_1737420181517/work
importlib_resources @ file:///home/conda/feedstock_root/build_artifacts/importlib_resources_1736252299705/work
iniconfig @ file:///home/conda/feedstock_root/build_artifacts/iniconfig_1733223141826/work
ipython @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_ipython_1741457802/work
iris-grib @ file:///home/conda/feedstock_root/build_artifacts/iris-grib_1736338015682/work
isodate @ file:///home/conda/feedstock_root/build_artifacts/isodate_1733230734792/work
isort @ file:///home/conda/feedstock_root/build_artifacts/isort_1740643408806/work
itsdangerous @ file:///home/conda/feedstock_root/build_artifacts/itsdangerous_1733308265247/work
jedi @ file:///home/conda/feedstock_root/build_artifacts/jedi_1733300866624/work
Jinja2 @ file:///home/conda/feedstock_root/build_artifacts/jinja2_1741263328855/work
jsonschema @ file:///home/conda/feedstock_root/build_artifacts/jsonschema_1733472696581/work
jsonschema-specifications @ file:///tmp/tmpk0f344m9/src
jupyter_client @ file:///home/conda/feedstock_root/build_artifacts/jupyter_client_1733440914442/work
jupyter_core @ file:///home/conda/feedstock_root/build_artifacts/jupyter_core_1727163409502/work
jupyterlab_pygments @ file:///home/conda/feedstock_root/build_artifacts/jupyterlab_pygments_1733328101776/work
kiwisolver @ file:///home/conda/feedstock_root/build_artifacts/kiwisolver_1725459213453/work
latexcodec @ file:///home/conda/feedstock_root/build_artifacts/latexcodec_1592937263153/work
legacy-cgi @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_legacy-cgi_1743085170/work
locket @ file:///home/conda/feedstock_root/build_artifacts/locket_1650660393415/work
lxml @ file:///home/conda/feedstock_root/build_artifacts/lxml_1739211548986/work
lz4 @ file:///home/conda/feedstock_root/build_artifacts/lz4_1733474264078/work
markdown-it-py @ file:///home/conda/feedstock_root/build_artifacts/markdown-it-py_1733250460757/work
MarkupSafe @ file:///home/conda/feedstock_root/build_artifacts/markupsafe_1733219680183/work
matplotlib==3.10.1
matplotlib-inline @ file:///home/conda/feedstock_root/build_artifacts/matplotlib-inline_1733416936468/work
mccabe @ file:///home/conda/feedstock_root/build_artifacts/mccabe_1733216466933/work
mdurl @ file:///home/conda/feedstock_root/build_artifacts/mdurl_1733255585584/work
mistune @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_mistune_1742402716/work
msgpack @ file:///home/conda/feedstock_root/build_artifacts/msgpack-python_1725974993022/work
munkres==1.1.4
MyProxyClient @ file:///home/conda/feedstock_root/build_artifacts/myproxyclient_1734990056203/work
narwhals @ file:///home/conda/feedstock_root/build_artifacts/narwhals_1743462036727/work
nbclient @ file:///home/conda/feedstock_root/build_artifacts/nbclient_1734628800805/work
nbconvert @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_nbconvert-core_1738067871/work
nbformat @ file:///home/conda/feedstock_root/build_artifacts/nbformat_1733402752141/work
nbsphinx @ file:///home/conda/feedstock_root/build_artifacts/nbsphinx_1741075436613/work
nc-time-axis @ file:///home/conda/feedstock_root/build_artifacts/nc-time-axis_1734603295314/work
nested-lookup @ file:///home/conda/feedstock_root/build_artifacts/nested-lookup_1735368761757/work
netCDF4 @ file:///home/conda/feedstock_root/build_artifacts/netcdf4_1733253079498/work
networkx @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_networkx_1731521053/work
nodeenv @ file:///home/conda/feedstock_root/build_artifacts/nodeenv_1734112117269/work
numpy @ file:///home/conda/feedstock_root/build_artifacts/numpy_1742254824252/work/dist/numpy-2.2.4-cp310-cp310-linux_x86_64.whl#sha256=89bf69a84640f36d25198a479bdb3885a47026a4604a6e700781b7b89b023bd8
packaging @ file:///home/conda/feedstock_root/build_artifacts/packaging_1733203243479/work
pandas @ file:///home/conda/feedstock_root/build_artifacts/pandas_1726878398774/work
pandocfilters @ file:///home/conda/feedstock_root/build_artifacts/pandocfilters_1631603243851/work
parso @ file:///home/conda/feedstock_root/build_artifacts/parso_1733271261340/work
partd @ file:///home/conda/feedstock_root/build_artifacts/partd_1715026491486/work
pexpect @ file:///home/conda/feedstock_root/build_artifacts/pexpect_1733301927746/work
pickleshare @ file:///home/conda/feedstock_root/build_artifacts/pickleshare_1733327343728/work
pillow @ file:///home/conda/feedstock_root/build_artifacts/pillow_1735929693232/work
pkgutil_resolve_name @ file:///home/conda/feedstock_root/build_artifacts/pkgutil-resolve-name_1733344503739/work
platformdirs @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_platformdirs_1742485085/work
pluggy @ file:///home/conda/feedstock_root/build_artifacts/pluggy_1733222765875/work
pooch @ file:///home/conda/feedstock_root/build_artifacts/pooch_1733421311631/work
pre_commit @ file:///home/conda/feedstock_root/build_artifacts/pre-commit_1742475552107/work
prompt_toolkit @ file:///home/conda/feedstock_root/build_artifacts/prompt-toolkit_1737453357274/work
prov @ file:///home/conda/feedstock_root/build_artifacts/prov_1604323926302/work
psutil @ file:///home/conda/feedstock_root/build_artifacts/psutil_1740663128538/work
ptyprocess @ file:///home/conda/feedstock_root/build_artifacts/ptyprocess_1733302279685/work/dist/ptyprocess-0.7.0-py2.py3-none-any.whl#sha256=92c32ff62b5fd8cf325bec5ab90d7be3d2a8ca8c8a3813ff487a8d2002630d1f
pure_eval @ file:///home/conda/feedstock_root/build_artifacts/pure_eval_1733569405015/work
py-cordex @ file:///home/conda/feedstock_root/build_artifacts/py-cordex_1734951900345/work
pyarrow==19.0.1
pybtex @ file:///home/conda/feedstock_root/build_artifacts/pybtex_1733928100679/work
pycparser @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_pycparser_1733195786/work
pydata-sphinx-theme==0.16.1
pydocstyle==6.3.0
pydot @ file:///home/conda/feedstock_root/build_artifacts/pydot_1737244073205/work
Pygments @ file:///home/conda/feedstock_root/build_artifacts/pygments_1736243443484/work
pylint @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_pylint_1741550910/work
pyOpenSSL @ file:///home/conda/feedstock_root/build_artifacts/pyopenssl_1737243356468/work
pyparsing @ file:///home/conda/feedstock_root/build_artifacts/pyparsing_1743089729650/work
pyproj @ file:///home/conda/feedstock_root/build_artifacts/pyproj_1742323235700/work
pyshp @ file:///home/conda/feedstock_root/build_artifacts/pyshp_1733821528126/work
PySocks @ file:///home/conda/feedstock_root/build_artifacts/pysocks_1733217236728/work
pytest @ file:///home/conda/feedstock_root/build_artifacts/pytest_1740946542080/work
pytest-cov @ file:///home/conda/feedstock_root/build_artifacts/pytest-cov_1733223023082/work
pytest-env @ file:///home/conda/feedstock_root/build_artifacts/pytest-env_1734663258391/work
pytest-html @ file:///home/conda/feedstock_root/build_artifacts/pytest-html_1734739426954/work
pytest-metadata @ file:///home/conda/feedstock_root/build_artifacts/pytest-metadata_1734146180069/work
pytest-mock @ file:///home/conda/feedstock_root/build_artifacts/pytest-mock_1733364214944/work
pytest-xdist @ file:///home/conda/feedstock_root/build_artifacts/pytest-xdist_1733240780199/work
python-dateutil @ file:///home/conda/feedstock_root/build_artifacts/python-dateutil_1733215673016/work
pytz @ file:///home/conda/feedstock_root/build_artifacts/pytz_1706886791323/work
PyYAML @ file:///home/conda/feedstock_root/build_artifacts/pyyaml_1737454647378/work
pyzmq @ file:///home/conda/feedstock_root/build_artifacts/pyzmq_1741805149626/work
rdflib @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_rdflib_1743255530/work/dist
referencing @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_referencing_1737836872/work
requests @ file:///home/conda/feedstock_root/build_artifacts/requests_1733217035951/work
requests-cache @ file:///home/conda/feedstock_root/build_artifacts/requests-cache_1734246622535/work
rich @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_rich_1743371105/work/dist
rpds-py @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_rpds-py_1743037662/work
scipy @ file:///home/conda/feedstock_root/build_artifacts/scipy-split_1739790642651/work/dist/scipy-1.15.2-cp310-cp310-linux_x86_64.whl#sha256=9e52bad6c3294d1a5b04a13632118ca2157130603c6c018c2d710162b223b27e
scitools-iris @ file:///home/conda/feedstock_root/build_artifacts/iris_1737530532875/work
shapely @ file:///home/conda/feedstock_root/build_artifacts/shapely_1743678336697/work
six @ file:///home/conda/feedstock_root/build_artifacts/six_1733380938961/work
snowballstemmer @ file:///home/conda/feedstock_root/build_artifacts/snowballstemmer_1637143057757/work
sortedcontainers @ file:///home/conda/feedstock_root/build_artifacts/sortedcontainers_1738440353519/work
soupsieve @ file:///home/conda/feedstock_root/build_artifacts/soupsieve_1693929250441/work
Sphinx @ file:///home/conda/feedstock_root/build_artifacts/sphinx_1733754067767/work
sphinxcontrib-applehelp @ file:///home/conda/feedstock_root/build_artifacts/sphinxcontrib-applehelp_1733754101641/work
sphinxcontrib-devhelp @ file:///home/conda/feedstock_root/build_artifacts/sphinxcontrib-devhelp_1733754113405/work
sphinxcontrib-htmlhelp @ file:///home/conda/feedstock_root/build_artifacts/sphinxcontrib-htmlhelp_1733754280555/work
sphinxcontrib-jsmath @ file:///home/conda/feedstock_root/build_artifacts/sphinxcontrib-jsmath_1733753744933/work
sphinxcontrib-qthelp @ file:///home/conda/feedstock_root/build_artifacts/sphinxcontrib-qthelp_1733753408017/work
sphinxcontrib-serializinghtml @ file:///home/conda/feedstock_root/build_artifacts/sphinxcontrib-serializinghtml_1733750479108/work
stack_data @ file:///home/conda/feedstock_root/build_artifacts/stack_data_1733569443808/work
stratify @ file:///home/conda/feedstock_root/build_artifacts/python-stratify_1725548776579/work
tblib @ file:///home/conda/feedstock_root/build_artifacts/tblib_1743515515538/work
termcolor @ file:///home/conda/feedstock_root/build_artifacts/termcolor_1743676967121/work
tinycss2 @ file:///home/conda/feedstock_root/build_artifacts/tinycss2_1729802851396/work
toml @ file:///home/conda/feedstock_root/build_artifacts/toml_1734091811753/work
tomli @ file:///home/conda/feedstock_root/build_artifacts/tomli_1733256695513/work
tomlkit @ file:///home/conda/feedstock_root/build_artifacts/tomlkit_1733230743009/work
toolz @ file:///home/conda/feedstock_root/build_artifacts/toolz_1733736030883/work
tornado @ file:///home/conda/feedstock_root/build_artifacts/tornado_1732615898999/work
traitlets @ file:///home/conda/feedstock_root/build_artifacts/traitlets_1733367359838/work
typing_extensions @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_typing_extensions_1743201626/work
tzdata @ file:///home/conda/feedstock_root/build_artifacts/python-tzdata_1742745135198/work
ujson @ file:///home/conda/feedstock_root/build_artifacts/ujson_1724954402136/work
ukkonen @ file:///home/conda/feedstock_root/build_artifacts/ukkonen_1725784026316/work
unicodedata2 @ file:///home/conda/feedstock_root/build_artifacts/unicodedata2_1736692496989/work
url-normalize @ file:///home/conda/feedstock_root/build_artifacts/url-normalize_1734246623875/work
urllib3 @ file:///home/conda/feedstock_root/build_artifacts/urllib3_1734859416348/work
virtualenv @ file:///home/conda/feedstock_root/build_artifacts/virtualenv_1743473963109/work
vprof==0.38
wcwidth @ file:///home/conda/feedstock_root/build_artifacts/wcwidth_1733231326287/work
webencodings @ file:///home/conda/feedstock_root/build_artifacts/webencodings_1733236011802/work
WebOb @ file:///home/conda/feedstock_root/build_artifacts/webob_1733185657139/work
xarray @ file:///home/conda/feedstock_root/build_artifacts/xarray_1743444383176/work
xxhash @ file:///home/conda/feedstock_root/build_artifacts/python-xxhash_1740594790928/work
xyzservices @ file:///home/conda/feedstock_root/build_artifacts/xyzservices_1737234886776/work
yamale @ file:///home/conda/feedstock_root/build_artifacts/yamale_1735891169111/work
zict @ file:///home/conda/feedstock_root/build_artifacts/zict_1733261551178/work
zipp @ file:///home/conda/feedstock_root/build_artifacts/zipp_1732827521216/work
zstandard==0.23.0
| name: ESMValCore
channels:
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=conda_forge
- _openmp_mutex=4.5=2_gnu
- accessible-pygments=0.0.5=pyhd8ed1ab_1
- adwaita-icon-theme=48.0=unix_0
- alabaster=1.0.0=pyhd8ed1ab_1
- aom=3.9.1=hac33072_0
- astroid=3.3.9=py310hff52083_0
- asttokens=3.0.0=pyhd8ed1ab_1
- at-spi2-atk=2.38.0=h0630a04_3
- at-spi2-core=2.40.3=h0630a04_0
- atk-1.0=2.38.0=h04ea711_2
- attrs=25.3.0=pyh71513ae_0
- autodocsumm=0.2.14=pyhd8ed1ab_0
- aws-c-auth=0.8.7=h7743f02_1
- aws-c-cal=0.8.7=h7d555fd_1
- aws-c-common=0.12.1=hb9d3cd8_0
- aws-c-compression=0.3.1=hcbd9e4e_3
- aws-c-event-stream=0.5.4=h286e7e7_3
- aws-c-http=0.9.5=hbca0721_0
- aws-c-io=0.17.0=ha855f32_8
- aws-c-mqtt=0.12.2=hffac463_3
- aws-c-s3=0.7.13=h4c9fe3b_3
- aws-c-sdkutils=0.2.3=hcbd9e4e_3
- aws-checksums=0.2.3=hcbd9e4e_3
- aws-crt-cpp=0.31.1=h46b750d_1
- aws-sdk-cpp=1.11.510=h1fa5cb7_4
- azure-core-cpp=1.14.0=h5cfcd09_0
- azure-identity-cpp=1.10.0=h113e628_0
- azure-storage-blobs-cpp=12.13.0=h3cf044e_1
- azure-storage-common-cpp=12.8.0=h736e048_1
- azure-storage-files-datalake-cpp=12.12.0=ha633028_1
- babel=2.17.0=pyhd8ed1ab_0
- beautifulsoup4=4.13.3=pyha770c72_0
- bleach=6.2.0=pyh29332c3_4
- bleach-with-css=6.2.0=h82add2a_4
- blosc=1.21.6=he440d0b_1
- bokeh=3.7.2=pyhd8ed1ab_1
- brotli=1.1.0=hb9d3cd8_2
- brotli-bin=1.1.0=hb9d3cd8_2
- brotli-python=1.1.0=py310hf71b8c6_2
- bzip2=1.0.8=h4bc722e_7
- c-ares=1.34.4=hb9d3cd8_0
- ca-certificates=2025.1.31=hbcca054_0
- cairo=1.18.4=h3394656_0
- cartopy=0.24.0=py310h5eaa309_0
- cattrs=24.1.2=pyhd8ed1ab_1
- certifi=2025.1.31=pyhd8ed1ab_0
- cf-units=3.3.0=py310hf462985_0
- cf_xarray=0.10.4=pyhd8ed1ab_0
- cffi=1.17.1=py310h8deb56e_0
- cfgv=3.3.1=pyhd8ed1ab_1
- cftime=1.6.4=py310hf462985_1
- charset-normalizer=3.4.1=pyhd8ed1ab_0
- click=8.1.8=pyh707e725_0
- click-plugins=1.1.1=pyhd8ed1ab_1
- cligj=0.7.2=pyhd8ed1ab_2
- cloudpickle=3.1.1=pyhd8ed1ab_0
- colorama=0.4.6=pyhd8ed1ab_1
- contourpy=1.3.1=py310h3788b33_0
- coverage=7.8.0=py310h89163eb_0
- cryptography=44.0.2=py310h6c63255_0
- cycler=0.12.1=pyhd8ed1ab_1
- cython=3.0.12=py310had8cdd9_0
- cytoolz=1.0.1=py310ha75aee5_0
- dask=2024.8.2=pyhd8ed1ab_0
- dask-core=2024.8.2=pyhd8ed1ab_0
- dask-expr=1.1.13=pyhd8ed1ab_0
- dask-jobqueue=0.9.0=pyhd8ed1ab_0
- dav1d=1.2.1=hd590300_0
- dbus=1.13.6=h5008d03_3
- decorator=5.2.1=pyhd8ed1ab_0
- defusedxml=0.7.1=pyhd8ed1ab_0
- dill=0.3.9=pyhd8ed1ab_1
- distlib=0.3.9=pyhd8ed1ab_1
- distributed=2024.8.2=pyhd8ed1ab_0
- docutils=0.21.2=pyhd8ed1ab_1
- eccodes=2.39.0=hf413ef6_1
- epoxy=1.5.10=h166bdaf_1
- esgf-pyclient=0.3.1=pyhd8ed1ab_5
- esmf=8.7.0=nompi_h6063b07_1
- esmpy=8.7.0=pyhecae5ae_1
- exceptiongroup=1.2.2=pyhd8ed1ab_1
- execnet=2.1.1=pyhd8ed1ab_1
- executing=2.1.0=pyhd8ed1ab_1
- expat=2.7.0=h5888daf_0
- filelock=3.18.0=pyhd8ed1ab_0
- findlibs=0.0.5=pyhd8ed1ab_0
- fiona=1.10.1=py310h0aed7a2_3
- fire=0.7.0=pyhd8ed1ab_0
- font-ttf-dejavu-sans-mono=2.37=hab24e00_0
- font-ttf-inconsolata=3.000=h77eed37_0
- font-ttf-source-code-pro=2.038=h77eed37_0
- font-ttf-ubuntu=0.83=h77eed37_3
- fontconfig=2.15.0=h7e30c49_1
- fonts-conda-ecosystem=1=0
- fonts-conda-forge=1=0
- fonttools=4.56.0=py310h89163eb_0
- freeglut=3.2.2=ha6d2627_3
- freetype=2.13.3=h48d6fc4_0
- freexl=2.0.0=h9dce30a_2
- fribidi=1.0.10=h36c2ea0_0
- fsspec=2025.3.2=pyhd8ed1ab_0
- gdk-pixbuf=2.42.12=hb9ae30d_0
- geographiclib=2.0=pyhd8ed1ab_1
- geopy=2.4.1=pyhd8ed1ab_2
- geos=3.13.1=h97f6797_0
- geotiff=1.7.4=h239500f_2
- gflags=2.2.2=h5888daf_1005
- giflib=5.2.2=hd590300_0
- glib-tools=2.84.0=h4833e2c_0
- glog=0.7.1=hbabe93e_0
- graphite2=1.3.13=h59595ed_1003
- graphviz=12.2.1=h5ae0cbf_1
- gtk3=3.24.43=h0c6a113_5
- gts=0.7.6=h977cf35_4
- h2=4.2.0=pyhd8ed1ab_0
- harfbuzz=11.0.0=h76408a6_0
- hdf4=4.2.15=h2a13503_7
- hdf5=1.14.4=nompi_h2d575fe_105
- hicolor-icon-theme=0.17=ha770c72_2
- hpack=4.1.0=pyhd8ed1ab_0
- humanfriendly=10.0=pyh707e725_8
- hyperframe=6.1.0=pyhd8ed1ab_0
- icu=75.1=he02047a_0
- identify=2.6.9=pyhd8ed1ab_0
- idna=3.10=pyhd8ed1ab_1
- imagesize=1.4.1=pyhd8ed1ab_0
- importlib-metadata=8.6.1=pyha770c72_0
- importlib_metadata=8.6.1=hd8ed1ab_0
- importlib_resources=6.5.2=pyhd8ed1ab_0
- iniconfig=2.0.0=pyhd8ed1ab_1
- ipython=8.34.0=pyh907856f_0
- iris=3.11.1=pyha770c72_1
- iris-esmf-regrid=0.11.0=pyhd8ed1ab_2
- iris-grib=0.21.0=pyhd8ed1ab_0
- isodate=0.7.2=pyhd8ed1ab_1
- isort=6.0.1=pyhd8ed1ab_0
- itsdangerous=2.2.0=pyhd8ed1ab_1
- jasper=4.2.5=h1920b20_0
- jedi=0.19.2=pyhd8ed1ab_1
- jinja2=3.1.6=pyhd8ed1ab_0
- json-c=0.18=h6688a6e_0
- jsonschema=4.23.0=pyhd8ed1ab_1
- jsonschema-specifications=2024.10.1=pyhd8ed1ab_1
- jupyter_client=8.6.3=pyhd8ed1ab_1
- jupyter_core=5.7.2=pyh31011fe_1
- jupyterlab_pygments=0.3.0=pyhd8ed1ab_2
- keyutils=1.6.1=h166bdaf_0
- kiwisolver=1.4.7=py310h3788b33_0
- krb5=1.21.3=h659f571_0
- latexcodec=2.0.1=pyh9f0ad1d_0
- lcms2=2.17=h717163a_0
- ld_impl_linux-64=2.43=h712a8e2_4
- legacy-cgi=2.6.3=pyh41aed27_0
- lerc=4.0.0=h27087fc_0
- libabseil=20250127.1=cxx17_hbbce691_0
- libaec=1.1.3=h59595ed_0
- libarchive=3.7.7=h4585015_3
- libarrow=19.0.1=h052fb8e_6_cpu
- libarrow-acero=19.0.1=hcb10f89_6_cpu
- libarrow-dataset=19.0.1=hcb10f89_6_cpu
- libarrow-substrait=19.0.1=h1bed206_6_cpu
- libavif16=1.2.1=hbb36593_2
- libblas=3.9.0=31_h59b9bed_openblas
- libbrotlicommon=1.1.0=hb9d3cd8_2
- libbrotlidec=1.1.0=hb9d3cd8_2
- libbrotlienc=1.1.0=hb9d3cd8_2
- libcblas=3.9.0=31_he106b2a_openblas
- libcrc32c=1.1.2=h9c3ff4c_0
- libcups=2.3.3=h4637d8d_4
- libcurl=8.13.0=h332b0f4_0
- libde265=1.0.15=h00ab1b0_0
- libdeflate=1.23=h4ddbbb0_0
- libdrm=2.4.124=hb9d3cd8_0
- libedit=3.1.20250104=pl5321h7949ede_0
- libegl=1.7.0=ha4b6fd6_2
- libev=4.33=hd590300_2
- libevent=2.1.12=hf998b51_1
- libexpat=2.7.0=h5888daf_0
- libffi=3.4.6=h2dba641_1
- libgcc=14.2.0=h767d61c_2
- libgcc-ng=14.2.0=h69a702a_2
- libgd=2.3.3=h6f5c62b_11
- libgdal-core=3.10.2=hae73b24_5
- libgfortran=14.2.0=h69a702a_2
- libgfortran5=14.2.0=hf1ad2bd_2
- libgl=1.7.0=ha4b6fd6_2
- libglib=2.84.0=h2ff4ddf_0
- libglu=9.0.3=h03adeef_0
- libglvnd=1.7.0=ha4b6fd6_2
- libglx=1.7.0=ha4b6fd6_2
- libgomp=14.2.0=h767d61c_2
- libgoogle-cloud=2.36.0=hc4361e1_1
- libgoogle-cloud-storage=2.36.0=h0121fbd_1
- libgrpc=1.71.0=he753a82_0
- libheif=1.19.7=gpl_hc18d805_100
- libiconv=1.18=h4ce23a2_1
- libjpeg-turbo=3.0.0=hd590300_1
- libkml=1.3.0=hf539b9f_1021
- liblapack=3.9.0=31_h7ac8fdf_openblas
- liblzma=5.6.4=hb9d3cd8_0
- libnetcdf=4.9.2=nompi_h5ddbaa4_116
- libnghttp2=1.64.0=h161d5f1_0
- libnsl=2.0.1=hd590300_0
- libopenblas=0.3.29=pthreads_h94d23a6_0
- libopentelemetry-cpp=1.19.0=hd1b1c89_0
- libopentelemetry-cpp-headers=1.19.0=ha770c72_0
- libparquet=19.0.1=h081d1f1_6_cpu
- libpciaccess=0.18=hd590300_0
- libpng=1.6.47=h943b412_0
- libprotobuf=5.29.3=h501fc15_0
- libre2-11=2024.07.02=hba17884_3
- librsvg=2.58.4=he92a37e_3
- librttopo=1.1.0=hd718a1a_18
- libsodium=1.0.20=h4ab18f5_0
- libspatialite=5.1.0=he17ca71_14
- libsqlite=3.49.1=hee588c1_2
- libssh2=1.11.1=hf672d98_0
- libstdcxx=14.2.0=h8f9b012_2
- libstdcxx-ng=14.2.0=h4852527_2
- libthrift=0.21.0=h0e7cc3e_0
- libtiff=4.7.0=hd9ff511_3
- libudunits2=2.2.28=h40f5838_3
- libutf8proc=2.10.0=h4c51ac1_0
- libuuid=2.38.1=h0b41bf4_0
- libwebp-base=1.5.0=h851e524_0
- libxcb=1.17.0=h8a09558_0
- libxcrypt=4.4.36=hd590300_1
- libxkbcommon=1.8.1=hc4a0caf_0
- libxml2=2.13.7=h8d12d68_0
- libxslt=1.1.39=h76b75d6_0
- libzip=1.11.2=h6991a6a_0
- libzlib=1.3.1=hb9d3cd8_2
- locket=1.0.0=pyhd8ed1ab_0
- lxml=5.3.1=py310h6ee67d5_0
- lz4=4.3.3=py310h80b8a69_2
- lz4-c=1.10.0=h5888daf_1
- lzo=2.10=hd590300_1001
- markdown-it-py=3.0.0=pyhd8ed1ab_1
- markupsafe=3.0.2=py310h89163eb_1
- matplotlib-base=3.10.1=py310h68603db_0
- matplotlib-inline=0.1.7=pyhd8ed1ab_1
- mccabe=0.7.0=pyhd8ed1ab_1
- mdurl=0.1.2=pyhd8ed1ab_1
- minizip=4.0.7=h05a5f5f_3
- mistune=3.1.3=pyh29332c3_0
- msgpack-python=1.1.0=py310h3788b33_0
- munkres=1.1.4=pyh9f0ad1d_0
- myproxyclient=2.1.1=pyhd8ed1ab_1
- narwhals=1.33.0=pyhd8ed1ab_0
- nbclient=0.10.2=pyhd8ed1ab_0
- nbconvert=7.16.6=hb482800_0
- nbconvert-core=7.16.6=pyh29332c3_0
- nbconvert-pandoc=7.16.6=hed9df3c_0
- nbformat=5.10.4=pyhd8ed1ab_1
- nbsphinx=0.9.7=pyhd8ed1ab_0
- nc-time-axis=1.4.1=pyhd8ed1ab_1
- ncurses=6.5=h2d0b736_3
- nested-lookup=0.2.25=pyhd8ed1ab_2
- netcdf-fortran=4.6.1=nompi_ha5d1325_108
- netcdf4=1.7.2=nompi_py310h5146f0f_101
- networkx=3.4.2=pyh267e887_2
- nlohmann_json=3.11.3=he02047a_1
- nodeenv=1.9.1=pyhd8ed1ab_1
- numpy=2.2.4=py310hefbff90_0
- openjpeg=2.5.3=h5fbd93e_0
- openssl=3.4.1=h7b32b05_0
- orc=2.1.1=h17f744e_1
- packaging=24.2=pyhd8ed1ab_2
- pandas=2.2.3=py310h5eaa309_1
- pandoc=3.6.4=ha770c72_0
- pandocfilters=1.5.0=pyhd8ed1ab_0
- pango=1.56.3=h9ac818e_1
- parso=0.8.4=pyhd8ed1ab_1
- partd=1.4.2=pyhd8ed1ab_0
- pcre2=10.44=hba22ea6_2
- pexpect=4.9.0=pyhd8ed1ab_1
- pickleshare=0.7.5=pyhd8ed1ab_1004
- pillow=11.1.0=py310h7e6dc6c_0
- pip=25.0.1=pyh8b19718_0
- pixman=0.44.2=h29eaf8c_0
- pkgutil-resolve-name=1.3.10=pyhd8ed1ab_2
- platformdirs=4.3.7=pyh29332c3_0
- pluggy=1.5.0=pyhd8ed1ab_1
- pooch=1.8.2=pyhd8ed1ab_1
- pre-commit=4.2.0=pyha770c72_0
- proj=9.6.0=h0054346_1
- prometheus-cpp=1.3.0=ha5d0236_0
- prompt-toolkit=3.0.50=pyha770c72_0
- prov=2.0.0=pyhd3deb0d_0
- psutil=7.0.0=py310ha75aee5_0
- pthread-stubs=0.4=hb9d3cd8_1002
- ptyprocess=0.7.0=pyhd8ed1ab_1
- pure_eval=0.2.3=pyhd8ed1ab_1
- py-cordex=0.9.0=pyhd8ed1ab_1
- pyarrow=19.0.1=py310hff52083_0
- pyarrow-core=19.0.1=py310hac404ae_0_cpu
- pybtex=0.24.0=pyhd8ed1ab_3
- pycparser=2.22=pyh29332c3_1
- pydata-sphinx-theme=0.16.1=pyhd8ed1ab_0
- pydot=3.0.4=py310hff52083_0
- pygments=2.19.1=pyhd8ed1ab_0
- pylint=3.3.5=pyh29332c3_0
- pyopenssl=25.0.0=pyhd8ed1ab_0
- pyparsing=3.2.3=pyhd8ed1ab_1
- pyproj=3.7.1=py310h71d0299_1
- pyshp=2.3.1=pyhd8ed1ab_1
- pysocks=1.7.1=pyha55dd90_7
- pytest=8.3.5=pyhd8ed1ab_0
- pytest-cov=6.0.0=pyhd8ed1ab_1
- pytest-env=1.1.5=pyhd8ed1ab_1
- pytest-html=4.1.1=pyhd8ed1ab_1
- pytest-metadata=3.1.1=pyhd8ed1ab_1
- pytest-mock=3.14.0=pyhd8ed1ab_1
- pytest-xdist=3.6.1=pyhd8ed1ab_1
- python=3.10.16=habfa6aa_2_cpython
- python-dateutil=2.9.0.post0=pyhff2d567_1
- python-eccodes=2.37.0=py310hf462985_0
- python-fastjsonschema=2.21.1=pyhd8ed1ab_0
- python-stratify=0.3.0=py310hf462985_3
- python-tzdata=2025.2=pyhd8ed1ab_0
- python-xxhash=3.5.0=py310ha75aee5_2
- python_abi=3.10=6_cp310
- pytz=2024.1=pyhd8ed1ab_0
- pyyaml=6.0.2=py310h89163eb_2
- pyzmq=26.3.0=py310h71f11fc_0
- qhull=2020.2=h434a139_5
- rav1e=0.6.6=he8a937b_2
- rdflib=7.1.4=pyh29332c3_0
- re2=2024.07.02=h9925aae_3
- readline=8.2=h8c095d6_2
- referencing=0.36.2=pyh29332c3_0
- requests=2.32.3=pyhd8ed1ab_1
- requests-cache=1.2.1=pyhd8ed1ab_1
- rich=14.0.0=pyh29332c3_0
- rpds-py=0.24.0=py310hc1293b2_0
- s2n=1.5.15=hd830067_0
- scipy=1.15.2=py310h1d65ade_0
- setuptools=75.8.2=pyhff2d567_0
- shapely=2.1.0=py310h247727d_0
- six=1.17.0=pyhd8ed1ab_0
- snappy=1.2.1=h8bd8927_1
- snowballstemmer=2.2.0=pyhd8ed1ab_0
- sortedcontainers=2.4.0=pyhd8ed1ab_1
- soupsieve=2.5=pyhd8ed1ab_1
- sphinx=8.1.3=pyhd8ed1ab_1
- sphinxcontrib-applehelp=2.0.0=pyhd8ed1ab_1
- sphinxcontrib-devhelp=2.0.0=pyhd8ed1ab_1
- sphinxcontrib-htmlhelp=2.1.0=pyhd8ed1ab_1
- sphinxcontrib-jsmath=1.0.1=pyhd8ed1ab_1
- sphinxcontrib-qthelp=2.0.0=pyhd8ed1ab_1
- sphinxcontrib-serializinghtml=1.1.10=pyhd8ed1ab_1
- sqlite=3.49.1=h9eae976_2
- stack_data=0.6.3=pyhd8ed1ab_1
- svt-av1=3.0.2=h5888daf_0
- tblib=3.1.0=pyhd8ed1ab_0
- termcolor=3.0.1=pyhd8ed1ab_0
- tinycss2=1.4.0=pyhd8ed1ab_0
- tk=8.6.13=noxft_h4845f30_101
- toml=0.10.2=pyhd8ed1ab_1
- tomli=2.2.1=pyhd8ed1ab_1
- tomlkit=0.13.2=pyha770c72_1
- toolz=1.0.0=pyhd8ed1ab_1
- tornado=6.4.2=py310ha75aee5_0
- traitlets=5.14.3=pyhd8ed1ab_1
- typing-extensions=4.13.0=h9fa5a19_1
- typing_extensions=4.13.0=pyh29332c3_1
- tzdata=2025b=h78e105d_0
- udunits2=2.2.28=h40f5838_3
- ujson=5.10.0=py310hf71b8c6_1
- ukkonen=1.0.1=py310h3788b33_5
- unicodedata2=16.0.0=py310ha75aee5_0
- uriparser=0.9.8=hac33072_0
- url-normalize=1.4.3=pyhd8ed1ab_1
- urllib3=2.3.0=pyhd8ed1ab_0
- virtualenv=20.30.0=pyhd8ed1ab_0
- wayland=1.23.1=h3e06ad9_0
- wcwidth=0.2.13=pyhd8ed1ab_1
- webencodings=0.5.1=pyhd8ed1ab_3
- webob=1.8.9=pyhd8ed1ab_1
- wheel=0.45.1=pyhd8ed1ab_1
- x265=3.5=h924138e_3
- xarray=2025.3.1=pyhd8ed1ab_0
- xerces-c=3.2.5=h988505b_2
- xkeyboard-config=2.43=hb9d3cd8_0
- xorg-libice=1.1.2=hb9d3cd8_0
- xorg-libsm=1.2.6=he73a12e_0
- xorg-libx11=1.8.12=h4f16b4b_0
- xorg-libxau=1.0.12=hb9d3cd8_0
- xorg-libxcomposite=0.4.6=hb9d3cd8_2
- xorg-libxcursor=1.2.3=hb9d3cd8_0
- xorg-libxdamage=1.1.6=hb9d3cd8_0
- xorg-libxdmcp=1.1.5=hb9d3cd8_0
- xorg-libxext=1.3.6=hb9d3cd8_0
- xorg-libxfixes=6.0.1=hb9d3cd8_0
- xorg-libxi=1.8.2=hb9d3cd8_0
- xorg-libxinerama=1.1.5=h5888daf_1
- xorg-libxrandr=1.5.4=hb9d3cd8_0
- xorg-libxrender=0.9.12=hb9d3cd8_0
- xorg-libxtst=1.2.5=hb9d3cd8_3
- xorg-libxxf86vm=1.1.6=hb9d3cd8_0
- xxhash=0.8.3=hb9d3cd8_0
- xyzservices=2025.1.0=pyhd8ed1ab_0
- yamale=5.3.0=pyhd8ed1ab_0
- yaml=0.2.5=h7f98852_2
- zeromq=4.3.5=h3b0a872_7
- zict=3.0.0=pyhd8ed1ab_1
- zipp=3.21.0=pyhd8ed1ab_1
- zlib=1.3.1=hb9d3cd8_2
- zstandard=0.23.0=py310ha75aee5_1
- zstd=1.5.7=hb8e6e7a_2
- pip:
- esmvalcore==2.12.0.dev130+g0dce90cb3
- esmvaltool-sample-data==0.0.3
- pydocstyle==6.3.0
- vprof==0.38
prefix: /opt/conda/envs/ESMValCore
| [
"tests/unit/recipe/test_from_datasets.py::test_to_frozen"
] | [] | [
"tests/unit/recipe/test_from_datasets.py::test_datasets_to_recipe",
"tests/unit/recipe/test_from_datasets.py::test_update_datasets_in_recipe",
"tests/unit/recipe/test_from_datasets.py::test_supplementary_datasets_to_recipe",
"tests/unit/recipe/test_from_datasets.py::test_datasets_to_recipe_group_ensembles",
"tests/unit/recipe/test_from_datasets.py::test_datasets_to_recipe_no_diagnostic",
"tests/unit/recipe/test_from_datasets.py::test_group_identical_facets",
"tests/unit/recipe/test_from_datasets.py::test_group_ensemble_members",
"tests/unit/recipe/test_from_datasets.py::test_group_ensemble_members_mix_of_versions",
"tests/unit/recipe/test_from_datasets.py::test_group_ensembles_cmip5",
"tests/unit/recipe/test_from_datasets.py::test_group_ensembles_cmip6",
"tests/unit/recipe/test_from_datasets.py::test_move_one_level_up_diagnostic",
"tests/unit/recipe/test_from_datasets.py::test_move_one_level_up_recipe"
] | [] | Apache License 2.0 | 18,863 | 351 | [
"esmvalcore/_recipe/from_datasets.py"
] |
tobymao__sqlglot-3734 | 1e07c4d29a43192fb57c120f3b9c1c2fa27d0fa6 | 2024-07-03 14:17:07 | c790c3b1fa274d7b0faf9f75e7dbc62bc4f55c67 | georgesittas: > Came across Snowflake which supports `NVL()` (alias of `IFNULL`) _and_ `COALESCE()`. Their differences seem very subtle:
>
> * `NVL / IFNULL` only accept 2 arguments, `COALESCE` is varlen
>
> * The docs seem identical except maybe [the last bullet here](https://docs.snowflake.com/en/sql-reference/functions/nvl#usage-notes)
>
>
> [Snowflake NVL/IFNULL](https://docs.snowflake.com/en/sql-reference/functions/nvl#usage-notes) | [Snowflake COALESCE](https://docs.snowflake.com/en/sql-reference/functions/coalesce#usage-notes)
Yeah that makes sense, Oracle and Snowflake are very similar. Let's fix this for Snowflake too then I guess.
VaggelisD: Reverted the Snowflake change, can't find a tangible difference between `NVL/NULLIF` and `COALESCE` except the varlen args. | diff --git a/sqlglot/dialects/oracle.py b/sqlglot/dialects/oracle.py
index e4dad2cc..ed01ee8b 100644
--- a/sqlglot/dialects/oracle.py
+++ b/sqlglot/dialects/oracle.py
@@ -107,6 +107,7 @@ class Oracle(Dialect):
"TO_TIMESTAMP": build_formatted_time(exp.StrToTime, "oracle"),
"TO_DATE": build_formatted_time(exp.StrToDate, "oracle"),
}
+ FUNCTIONS.pop("NVL")
FUNCTION_PARSERS: t.Dict[str, t.Callable] = {
**parser.Parser.FUNCTION_PARSERS,
diff --git a/sqlglot/dialects/snowflake.py b/sqlglot/dialects/snowflake.py
index 1eab756b..a3e84f47 100644
--- a/sqlglot/dialects/snowflake.py
+++ b/sqlglot/dialects/snowflake.py
@@ -504,10 +504,11 @@ class Snowflake(Dialect):
return lateral
- def _parse_at_before(self, table: exp.Table) -> exp.Table:
+ def _parse_historical_data(self) -> t.Optional[exp.HistoricalData]:
# https://docs.snowflake.com/en/sql-reference/constructs/at-before
index = self._index
- if self._match_texts(("AT", "BEFORE")):
+ historical_data = None
+ if self._match_texts(self.HISTORICAL_DATA_PREFIX):
this = self._prev.text.upper()
kind = (
self._match(TokenType.L_PAREN)
@@ -518,14 +519,27 @@ class Snowflake(Dialect):
if expression:
self._match_r_paren()
- when = self.expression(
+ historical_data = self.expression(
exp.HistoricalData, this=this, kind=kind, expression=expression
)
- table.set("when", when)
else:
self._retreat(index)
- return table
+ return historical_data
+
+ def _parse_changes(self) -> t.Optional[exp.Changes]:
+ if not self._match_text_seq("CHANGES", "(", "INFORMATION", "=>"):
+ return None
+
+ information = self._parse_var(any_token=True)
+ self._match_r_paren()
+
+ return self.expression(
+ exp.Changes,
+ information=information,
+ at_before=self._parse_historical_data(),
+ end=self._parse_historical_data(),
+ )
def _parse_table_parts(
self, schema: bool = False, is_db_reference: bool = False, wildcard: bool = False
@@ -559,7 +573,15 @@ class Snowflake(Dialect):
else:
table = super()._parse_table_parts(schema=schema, is_db_reference=is_db_reference)
- return self._parse_at_before(table)
+ changes = self._parse_changes()
+ if changes:
+ table.set("changes", changes)
+
+ at_before = self._parse_historical_data()
+ if at_before:
+ table.set("when", at_before)
+
+ return table
def _parse_id_var(
self,
diff --git a/sqlglot/expressions.py b/sqlglot/expressions.py
index 764810dc..79511da4 100644
--- a/sqlglot/expressions.py
+++ b/sqlglot/expressions.py
@@ -2002,6 +2002,10 @@ class Check(Expression):
pass
+class Changes(Expression):
+ arg_types = {"information": True, "at_before": False, "end": False}
+
+
# https://docs.snowflake.com/en/sql-reference/constructs/connect-by
class Connect(Expression):
arg_types = {"start": False, "connect": True, "nocycle": False}
@@ -3034,6 +3038,7 @@ class Table(Expression):
"when": False,
"only": False,
"partition": False,
+ "changes": False,
}
@property
diff --git a/sqlglot/generator.py b/sqlglot/generator.py
index 6a2589f3..40ad4680 100644
--- a/sqlglot/generator.py
+++ b/sqlglot/generator.py
@@ -1749,7 +1749,10 @@ class Generator(metaclass=_Generator):
if when:
table = f"{table} {when}"
- return f"{only}{table}{partition}{version}{file_format}{alias}{hints}{pivots}{joins}{laterals}{ordinality}"
+ changes = self.sql(expression, "changes")
+ changes = f" {changes}" if changes else ""
+
+ return f"{only}{table}{changes}{partition}{version}{file_format}{alias}{hints}{pivots}{joins}{laterals}{ordinality}"
def tablesample_sql(
self,
@@ -4028,3 +4031,13 @@ class Generator(metaclass=_Generator):
expression.args.get("format"),
expression.args.get("zone"),
)
+
+ def changes_sql(self, expression: exp.Changes) -> str:
+ information = self.sql(expression, "information")
+ information = f"INFORMATION => {information}"
+ at_before = self.sql(expression, "at_before")
+ at_before = f"{self.seg('')}{at_before}" if at_before else ""
+ end = self.sql(expression, "end")
+ end = f"{self.seg('')}{end}" if end else ""
+
+ return f"CHANGES ({information}){at_before}{end}"
diff --git a/sqlglot/parser.py b/sqlglot/parser.py
index d5fe792b..d655b849 100644
--- a/sqlglot/parser.py
+++ b/sqlglot/parser.py
@@ -1132,6 +1132,7 @@ class Parser(metaclass=_Parser):
INSERT_ALTERNATIVES = {"ABORT", "FAIL", "IGNORE", "REPLACE", "ROLLBACK"}
CLONE_KEYWORDS = {"CLONE", "COPY"}
+ HISTORICAL_DATA_PREFIX = {"AT", "BEFORE", "END"}
HISTORICAL_DATA_KIND = {"TIMESTAMP", "OFFSET", "STATEMENT", "STREAM"}
OPCLASS_FOLLOW_KEYWORDS = {"ASC", "DESC", "NULLS", "WITH"}
| NVL should NOT be replaced by COALESCE in Oracle but stay NVL. Their behavior are not the same in term of data type conversion.
**Fully reproducible code snippet**
In oracle DB :
```
CREATE TABLE TEST1 (COL1 INT, COL2 VARCHAR2(10))
INSERT INTO TEST1 VALUES (1,'2')
```
```
SELECT * FROM TEST1 WHERE NVL(COL2, 1) <> COL1
```
--> COL1 COL2
1 2
```
SELECT * FROM TEST1 WHERE COALESCE(COL2, 1) <> COL1
```
--> ORA-00932: inconsistent datatypes: expected : CHAR ; got : NUMBER
As we can see, NVL does NOT care about the type, COALESCE does. Therefore both function are not identical.
In python with sqlglot:
sqlQuery: str = """ SELECT * FROM TEST1 WHERE NVL(COL2, 1) <> COL1"""
sql = sqlglot.parse_one(sql=sqlQuery, read='oracle')
print(sql.sql(dialect='oracle'))
-> SELECT * FROM TEST1 WHERE COALESCE(COL2, 1) <> COL1
I should have NVL and not COALESCE. These 2 functions are not identical.
**Official Documentation**
Please include links to official SQL documentation related to your issue.
https://docs.oracle.com/en/database/oracle/oracle-database/19/sqlrf/NVL.html
"If expr1 is character data, then Oracle Database converts expr2 to the data type of expr1 before comparing them and returns VARCHAR2 in the character set of expr1."
| tobymao/sqlglot | diff --git a/tests/dialects/test_oracle.py b/tests/dialects/test_oracle.py
index 70b8595d..1d9fd99b 100644
--- a/tests/dialects/test_oracle.py
+++ b/tests/dialects/test_oracle.py
@@ -86,9 +86,9 @@ class TestOracle(Validator):
"SELECT DISTINCT col1, col2 FROM table",
)
self.validate_identity(
- "SELECT * FROM T ORDER BY I OFFSET nvl(:variable1, 10) ROWS FETCH NEXT nvl(:variable2, 10) ROWS ONLY",
- "SELECT * FROM T ORDER BY I OFFSET COALESCE(:variable1, 10) ROWS FETCH NEXT COALESCE(:variable2, 10) ROWS ONLY",
+ "SELECT * FROM T ORDER BY I OFFSET NVL(:variable1, 10) ROWS FETCH NEXT NVL(:variable2, 10) ROWS ONLY",
)
+ self.validate_identity("NVL(x, y)").assert_is(exp.Anonymous)
self.validate_identity(
"SELECT * FROM t SAMPLE (.25)",
"SELECT * FROM t SAMPLE (0.25)",
@@ -189,13 +189,6 @@ class TestOracle(Validator):
"spark": "SELECT CAST(NULL AS VARCHAR(2328)) AS COL1",
},
)
- self.validate_all(
- "NVL(NULL, 1)",
- write={
- "": "COALESCE(NULL, 1)",
- "oracle": "COALESCE(NULL, 1)",
- },
- )
self.validate_all(
"DATE '2022-01-01'",
write={
diff --git a/tests/dialects/test_snowflake.py b/tests/dialects/test_snowflake.py
index 80f8faaa..88b2148d 100644
--- a/tests/dialects/test_snowflake.py
+++ b/tests/dialects/test_snowflake.py
@@ -190,18 +190,6 @@ WHERE
"""SELECT PARSE_JSON('{"food":{"fruit":"banana"}}'):food.fruit::VARCHAR""",
"""SELECT CAST(GET_PATH(PARSE_JSON('{"food":{"fruit":"banana"}}'), 'food.fruit') AS VARCHAR)""",
)
- self.validate_identity(
- "SELECT * FROM foo at",
- "SELECT * FROM foo AS at",
- )
- self.validate_identity(
- "SELECT * FROM foo before",
- "SELECT * FROM foo AS before",
- )
- self.validate_identity(
- "SELECT * FROM foo at (col)",
- "SELECT * FROM foo AS at(col)",
- )
self.validate_identity(
"SELECT * FROM unnest(x) with ordinality",
"SELECT * FROM TABLE(FLATTEN(INPUT => x)) AS _u(seq, key, path, index, value, this)",
@@ -1204,6 +1192,17 @@ WHERE
"SELECT oldt.*, newt.* FROM my_table BEFORE (STATEMENT => '8e5d0ca9-005e-44e6-b858-a8f5b37c5726') AS oldt FULL OUTER JOIN my_table AT (STATEMENT => '8e5d0ca9-005e-44e6-b858-a8f5b37c5726') AS newt ON oldt.id = newt.id WHERE oldt.id IS NULL OR newt.id IS NULL",
)
+ # Make sure that the historical data keywords can still be used as aliases
+ for historical_data_prefix in ("AT", "BEFORE", "END", "CHANGES"):
+ for schema_suffix in ("", "(col)"):
+ with self.subTest(
+ f"Testing historical data prefix alias: {historical_data_prefix}{schema_suffix}"
+ ):
+ self.validate_identity(
+ f"SELECT * FROM foo {historical_data_prefix}{schema_suffix}",
+ f"SELECT * FROM foo AS {historical_data_prefix}{schema_suffix}",
+ )
+
def test_ddl(self):
for constraint_prefix in ("WITH ", ""):
with self.subTest(f"Constraint prefix: {constraint_prefix}"):
@@ -2033,3 +2032,15 @@ SINGLE = TRUE""",
self.validate_identity("ALTER TABLE foo UNSET TAG a, b, c")
self.validate_identity("ALTER TABLE foo UNSET DATA_RETENTION_TIME_IN_DAYS, CHANGE_TRACKING")
+
+ def test_from_changes(self):
+ self.validate_identity(
+ """SELECT C1 FROM t1 CHANGES (INFORMATION => APPEND_ONLY) AT (STREAM => 's1') END (TIMESTAMP => $ts2)"""
+ )
+ self.validate_identity(
+ """SELECT C1 FROM t1 CHANGES (INFORMATION => APPEND_ONLY) BEFORE (STATEMENT => 'STMT_ID') END (TIMESTAMP => $ts2)"""
+ )
+ self.validate_identity(
+ """SELECT 1 FROM some_table CHANGES (INFORMATION => APPEND_ONLY) AT (TIMESTAMP => TO_TIMESTAMP_TZ('2024-07-01 00:00:00+00:00')) END (TIMESTAMP => TO_TIMESTAMP_TZ('2024-07-01 14:28:59.999999+00:00'))""",
+ """SELECT 1 FROM some_table CHANGES (INFORMATION => APPEND_ONLY) AT (TIMESTAMP => CAST('2024-07-01 00:00:00+00:00' AS TIMESTAMPTZ)) END (TIMESTAMP => CAST('2024-07-01 14:28:59.999999+00:00' AS TIMESTAMPTZ))""",
+ )
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 3
},
"num_modified_files": 5
} | 25.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest pytest-cov pytest-xdist pytest-mock pytest-asyncio",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | cfgv==3.4.0
coverage==7.8.0
distlib==0.3.9
duckdb==1.2.1
exceptiongroup==1.2.2
execnet==2.1.1
filelock==3.18.0
identify==2.6.9
iniconfig==2.1.0
Jinja2==3.1.6
MarkupSafe==3.0.2
maturin==1.8.3
mypy==1.15.0
mypy-extensions==1.0.0
nodeenv==1.9.1
numpy==2.0.2
packaging==24.2
pandas==2.2.3
pandas-stubs==2.2.2.240807
pdoc==15.0.1
platformdirs==4.3.7
pluggy==1.5.0
pre_commit==4.2.0
Pygments==2.19.1
pytest==8.3.5
pytest-asyncio==0.26.0
pytest-cov==6.0.0
pytest-mock==3.14.0
pytest-xdist==3.6.1
python-dateutil==2.9.0.post0
pytz==2025.2
PyYAML==6.0.2
ruff==0.4.3
six==1.17.0
-e git+https://github.com/tobymao/sqlglot.git@1e07c4d29a43192fb57c120f3b9c1c2fa27d0fa6#egg=sqlglot
tomli==2.2.1
types-python-dateutil==2.9.0.20241206
types-pytz==2025.2.0.20250326
typing_extensions==4.13.0
tzdata==2025.2
virtualenv==20.29.3
| name: sqlglot
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- cfgv==3.4.0
- coverage==7.8.0
- distlib==0.3.9
- duckdb==1.2.1
- exceptiongroup==1.2.2
- execnet==2.1.1
- filelock==3.18.0
- identify==2.6.9
- iniconfig==2.1.0
- jinja2==3.1.6
- markupsafe==3.0.2
- maturin==1.8.3
- mypy==1.15.0
- mypy-extensions==1.0.0
- nodeenv==1.9.1
- numpy==2.0.2
- packaging==24.2
- pandas==2.2.3
- pandas-stubs==2.2.2.240807
- pdoc==15.0.1
- platformdirs==4.3.7
- pluggy==1.5.0
- pre-commit==4.2.0
- pygments==2.19.1
- pytest==8.3.5
- pytest-asyncio==0.26.0
- pytest-cov==6.0.0
- pytest-mock==3.14.0
- pytest-xdist==3.6.1
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyyaml==6.0.2
- ruff==0.4.3
- six==1.17.0
- tomli==2.2.1
- types-python-dateutil==2.9.0.20241206
- types-pytz==2025.2.0.20250326
- typing-extensions==4.13.0
- tzdata==2025.2
- virtualenv==20.29.3
prefix: /opt/conda/envs/sqlglot
| [
"tests/dialects/test_oracle.py::TestOracle::test_oracle",
"tests/dialects/test_snowflake.py::TestSnowflake::test_from_changes"
] | [] | [
"tests/dialects/test_oracle.py::TestOracle::test_connect_by",
"tests/dialects/test_oracle.py::TestOracle::test_hints",
"tests/dialects/test_oracle.py::TestOracle::test_join_marker",
"tests/dialects/test_oracle.py::TestOracle::test_json_table",
"tests/dialects/test_oracle.py::TestOracle::test_match_recognize",
"tests/dialects/test_oracle.py::TestOracle::test_query_restrictions",
"tests/dialects/test_oracle.py::TestOracle::test_xml_table",
"tests/dialects/test_snowflake.py::TestSnowflake::test_alter_set_unset",
"tests/dialects/test_snowflake.py::TestSnowflake::test_copy",
"tests/dialects/test_snowflake.py::TestSnowflake::test_ddl",
"tests/dialects/test_snowflake.py::TestSnowflake::test_describe_table",
"tests/dialects/test_snowflake.py::TestSnowflake::test_flatten",
"tests/dialects/test_snowflake.py::TestSnowflake::test_historical_data",
"tests/dialects/test_snowflake.py::TestSnowflake::test_match_recognize",
"tests/dialects/test_snowflake.py::TestSnowflake::test_minus",
"tests/dialects/test_snowflake.py::TestSnowflake::test_null_treatment",
"tests/dialects/test_snowflake.py::TestSnowflake::test_parse_like_any",
"tests/dialects/test_snowflake.py::TestSnowflake::test_querying_semi_structured_data",
"tests/dialects/test_snowflake.py::TestSnowflake::test_regexp_replace",
"tests/dialects/test_snowflake.py::TestSnowflake::test_regexp_substr",
"tests/dialects/test_snowflake.py::TestSnowflake::test_sample",
"tests/dialects/test_snowflake.py::TestSnowflake::test_semi_structured_types",
"tests/dialects/test_snowflake.py::TestSnowflake::test_show_columns",
"tests/dialects/test_snowflake.py::TestSnowflake::test_show_imported_keys",
"tests/dialects/test_snowflake.py::TestSnowflake::test_show_objects",
"tests/dialects/test_snowflake.py::TestSnowflake::test_show_primary_keys",
"tests/dialects/test_snowflake.py::TestSnowflake::test_show_schemas",
"tests/dialects/test_snowflake.py::TestSnowflake::test_show_sequences",
"tests/dialects/test_snowflake.py::TestSnowflake::test_show_tables",
"tests/dialects/test_snowflake.py::TestSnowflake::test_show_unique_keys",
"tests/dialects/test_snowflake.py::TestSnowflake::test_show_users",
"tests/dialects/test_snowflake.py::TestSnowflake::test_show_views",
"tests/dialects/test_snowflake.py::TestSnowflake::test_snowflake",
"tests/dialects/test_snowflake.py::TestSnowflake::test_staged_files",
"tests/dialects/test_snowflake.py::TestSnowflake::test_storage_integration",
"tests/dialects/test_snowflake.py::TestSnowflake::test_stored_procedures",
"tests/dialects/test_snowflake.py::TestSnowflake::test_swap",
"tests/dialects/test_snowflake.py::TestSnowflake::test_table_literal",
"tests/dialects/test_snowflake.py::TestSnowflake::test_timestamps",
"tests/dialects/test_snowflake.py::TestSnowflake::test_try_cast",
"tests/dialects/test_snowflake.py::TestSnowflake::test_user_defined_functions",
"tests/dialects/test_snowflake.py::TestSnowflake::test_values"
] | [] | MIT License | 18,872 | 1,503 | [
"sqlglot/dialects/oracle.py",
"sqlglot/dialects/snowflake.py",
"sqlglot/expressions.py",
"sqlglot/generator.py",
"sqlglot/parser.py"
] |
Sceptre__sceptre-1485 | c06b3b5816a6ecb29cd9d1c4cd34eee19f8be642 | 2024-07-05 06:33:18 | deef08e31911493ac18220bf9ed46218210cb94d | diff --git a/sceptre/stack.py b/sceptre/stack.py
index f085ced..7990e81 100644
--- a/sceptre/stack.py
+++ b/sceptre/stack.py
@@ -305,6 +305,11 @@ class Stack:
or isinstance(value, Resolver)
)
+ if not isinstance(parameters, dict):
+ raise InvalidConfigFileError(
+ f"{self.name}: parameters must be a dictionary of key-value pairs, got {parameters}"
+ )
+
casted_parameters = {k: cast_value(v) for k, v in parameters.items()}
if not all(is_valid(value) for value in casted_parameters.values()):
| Sceptre should handle invalid types passed to the parameters dict gracefully
### Subject of the issue
All commands raise exceptions if `parameters` is a list instead of a dict. This did not occur in v4.4.2 and relates to the cast parameters feature.
### Your environment
* version of sceptre (sceptre --version) master
* version of python (python --version) 3.10
* which OS/distro Ubuntu
### Steps to reproduce
Tell us how to reproduce this issue. Please provide sceptre project files if possible,
you can use https://plnkr.co/edit/ANFHm61Ilt4mQVgF as a base.
### Expected behaviour
Should fail cleanly.
### Actual behaviour
In v4.4.2 Sceptre would generate invalid parameters and presumably blow up later e.g.
```
% sceptre dump-config
...
parameters:
- subnet-08b49bb04f76dddf6
- subnet-032c8e50fb828d6f9
- subnet-08e025d377a8a455e
```
On the current master branch:
```
Traceback (most recent call last):
File "/usr/local/bin/sceptre", line 8, in <module>
sys.exit(cli())
File "/usr/local/lib/python3.10/site-packages/click/core.py", line 1130, in __call__
return self.main(*args, **kwargs)
File "/usr/local/lib/python3.10/site-packages/click/core.py", line 1055, in main
rv = self.invoke(ctx)
File "/usr/local/lib/python3.10/site-packages/click/core.py", line 1657, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/usr/local/lib/python3.10/site-packages/click/core.py", line 1657, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/usr/local/lib/python3.10/site-packages/click/core.py", line 1404, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/usr/local/lib/python3.10/site-packages/click/core.py", line 760, in invoke
return __callback(*args, **kwargs)
File "/usr/local/lib/python3.10/site-packages/click/decorators.py", line 26, in new_func
return f(get_current_context(), *args, **kwargs)
File "/app/sceptre/sceptre/cli/helpers.py", line 46, in decorated
return func(*args, **kwargs)
File "/app/sceptre/sceptre/cli/list.py", line 197, in list_stacks
plan = SceptrePlan(context)
File "/app/sceptre/sceptre/plan/plan.py", line 50, in __init__
all_stacks, command_stacks = self.config_reader.construct_stacks()
File "/app/sceptre/sceptre/config/reader.py", line 273, in construct_stacks
stack = self._construct_stack(rel_path, stack_group_config)
File "/app/sceptre/sceptre/config/reader.py", line 639, in _construct_stack
stack = Stack(
File "/app/sceptre/sceptre/stack.py", line 266, in __init__
self.parameters = self._cast_parameters(parameters or {})
File "/app/sceptre/sceptre/stack.py", line 308, in _cast_parameters
casted_parameters = {k: cast_value(v) for k, v in parameters.items()}
AttributeError: 'list' object has no attribute 'items'
```
| Sceptre/sceptre | diff --git a/tests/test_stack.py b/tests/test_stack.py
index eeddf74..e67edc4 100644
--- a/tests/test_stack.py
+++ b/tests/test_stack.py
@@ -202,6 +202,20 @@ class TestStack(object):
parameters=parameters,
)
+ @pytest.mark.parametrize(
+ "parameters",
+ [["this", "is", "a", "list"], "a_string"],
+ )
+ def test_init__invalid_parameters__parameters_a_list(self, parameters):
+ with pytest.raises(InvalidConfigFileError):
+ Stack(
+ name="stack_name",
+ project_code="project_code",
+ template_handler_config={"type": "file"},
+ region="region",
+ parameters=parameters,
+ )
+
@pytest.mark.parametrize(
"parameters",
[
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 1
} | 4.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-sugar"
],
"pre_install": null,
"python": "3.9",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==25.3.0
boto3==1.37.23
botocore==1.37.23
certifi==2025.1.31
cfn-flip==1.3.0
charset-normalizer==3.4.1
click==8.1.8
colorama==0.4.3
coverage==7.8.0
deepdiff==5.8.1
deprecation==2.1.0
exceptiongroup==1.2.2
idna==3.10
iniconfig==2.1.0
Jinja2==3.1.6
jmespath==1.0.1
jsonschema==3.2.0
MarkupSafe==3.0.2
networkx==2.6.3
ordered-set==4.1.0
packaging==21.3
pluggy==1.5.0
pyparsing==3.2.3
pyrsistent==0.20.0
pytest==8.3.5
pytest-cov==6.0.0
pytest-sugar==1.0.0
python-dateutil==2.9.0.post0
PyYAML==6.0.2
requests==2.32.3
s3transfer==0.11.4
-e git+https://github.com/Sceptre/sceptre.git@c06b3b5816a6ecb29cd9d1c4cd34eee19f8be642#egg=sceptre
sceptre-cmd-resolver==2.0.0
sceptre-file-resolver==1.0.6
six==1.17.0
termcolor==3.0.0
tomli==2.2.1
urllib3==1.26.20
| name: sceptre
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==25.3.0
- boto3==1.37.23
- botocore==1.37.23
- certifi==2025.1.31
- cfn-flip==1.3.0
- charset-normalizer==3.4.1
- click==8.1.8
- colorama==0.4.3
- coverage==7.8.0
- deepdiff==5.8.1
- deprecation==2.1.0
- exceptiongroup==1.2.2
- idna==3.10
- iniconfig==2.1.0
- jinja2==3.1.6
- jmespath==1.0.1
- jsonschema==3.2.0
- markupsafe==3.0.2
- networkx==2.6.3
- ordered-set==4.1.0
- packaging==21.3
- pluggy==1.5.0
- pyparsing==3.2.3
- pyrsistent==0.20.0
- pytest==8.3.5
- pytest-cov==6.0.0
- pytest-sugar==1.0.0
- python-dateutil==2.9.0.post0
- pyyaml==6.0.2
- requests==2.32.3
- s3transfer==0.11.4
- sceptre==4.4.2
- sceptre-cmd-resolver==2.0.0
- sceptre-file-resolver==1.0.6
- six==1.17.0
- termcolor==3.0.0
- tomli==2.2.1
- urllib3==1.26.20
prefix: /opt/conda/envs/sceptre
| [
"tests/test_stack.py::TestStack::test_init__invalid_parameters__parameters_a_list[parameters0]",
"tests/test_stack.py::TestStack::test_init__invalid_parameters__parameters_a_list[a_string]"
] | [] | [
"tests/test_stack.py::TestStack::test_initialize_stack_with_template_path",
"tests/test_stack.py::TestStack::test_initialize_stack_with_template_handler",
"tests/test_stack.py::TestStack::test_raises_exception_if_path_and_handler_configured",
"tests/test_stack.py::TestStack::test_init__non_boolean_ignore_value__raises_invalid_config_file_error",
"tests/test_stack.py::TestStack::test_init__non_boolean_obsolete_value__raises_invalid_config_file_error",
"tests/test_stack.py::TestStack::test_init__invalid_parameters_raise_invalid_config_file_error[parameters0]",
"tests/test_stack.py::TestStack::test_init__invalid_parameters_raise_invalid_config_file_error[parameters1]",
"tests/test_stack.py::TestStack::test_init__valid_parameters_do_not_raise_invalid_config_file_error[parameters0]",
"tests/test_stack.py::TestStack::test_init__valid_parameters_do_not_raise_invalid_config_file_error[parameters1]",
"tests/test_stack.py::TestStack::test_init__valid_parameters_do_not_raise_invalid_config_file_error[parameters2]",
"tests/test_stack.py::TestStack::test_init__valid_parameters_do_not_raise_invalid_config_file_error[parameters3]",
"tests/test_stack.py::TestStack::test_init__valid_parameters_do_not_raise_invalid_config_file_error[parameters4]",
"tests/test_stack.py::TestStack::test_init__valid_parameters_do_not_raise_invalid_config_file_error[parameters5]",
"tests/test_stack.py::TestStack::test_init__valid_parameters_do_not_raise_invalid_config_file_error[parameters6]",
"tests/test_stack.py::TestStack::test_stack_repr",
"tests/test_stack.py::TestStack::test_configuration_manager__sceptre_role_raises_recursive_resolve__returns_connection_manager_with_no_role",
"tests/test_stack.py::TestStack::test_configuration_manager__sceptre_role_returns_value_second_access__returns_value_on_second_access",
"tests/test_stack.py::TestStack::test_configuration_manager__sceptre_role_returns_value__returns_connection_manager_with_that_role",
"tests/test_stack.py::TestStack::test_iam_role__is_removed_on_removal_version",
"tests/test_stack.py::TestStack::test_role_arn__is_removed_on_removal_version",
"tests/test_stack.py::TestStack::test_iam_role_session_duration__is_removed_on_removal_version",
"tests/test_stack.py::TestStack::test_init__iam_role_set_resolves_to_sceptre_role",
"tests/test_stack.py::TestStack::test_init__role_arn_set_resolves_to_cloudformation_service_role",
"tests/test_stack.py::TestStack::test_init__iam_role_session_duration_set_resolves_to_sceptre_role_session_duration",
"tests/test_stack.py::TestStackSceptreUserData::test_user_data_is_accessible",
"tests/test_stack.py::TestStackSceptreUserData::test_user_data_gets_resolved",
"tests/test_stack.py::TestStackSceptreUserData::test_recursive_user_data_gets_resolved"
] | [] | Apache License 2.0 | 18,886 | 165 | [
"sceptre/stack.py"
] |
|
python-wheel-build__fromager-142 | a1cdbae921a2c5300bcc409004729c7904c982cf | 2024-07-05 16:25:17 | a1cdbae921a2c5300bcc409004729c7904c982cf | dhellmann: The way the patch filenames are being parsed is not correct. | diff --git a/src/fromager/commands/__init__.py b/src/fromager/commands/__init__.py
index 05d92b8..aee7a37 100644
--- a/src/fromager/commands/__init__.py
+++ b/src/fromager/commands/__init__.py
@@ -1,10 +1,19 @@
-from . import bootstrap, build, build_order, canonicalize, download_sequence, step
+from . import (
+ bootstrap,
+ build,
+ build_order,
+ canonicalize,
+ download_sequence,
+ list_overrides,
+ step,
+)
commands = [
bootstrap.bootstrap,
build.build,
build.build_sequence,
build_order.build_order,
+ list_overrides.list_overrides,
step.step,
canonicalize.canonicalize,
download_sequence.download_sequence,
diff --git a/src/fromager/commands/list_overrides.py b/src/fromager/commands/list_overrides.py
new file mode 100644
index 0000000..1d4c78b
--- /dev/null
+++ b/src/fromager/commands/list_overrides.py
@@ -0,0 +1,13 @@
+import click
+
+from fromager import context, overrides
+
+
[email protected]()
[email protected]_obj
+def list_overrides(
+ wkctx: context.WorkContext,
+):
+ """List all of the packages with overrides in the current configuration."""
+ for name in overrides.list_all(wkctx.patches_dir, wkctx.envs_dir):
+ print(name)
diff --git a/src/fromager/overrides.py b/src/fromager/overrides.py
index b89bc53..4adaf79 100644
--- a/src/fromager/overrides.py
+++ b/src/fromager/overrides.py
@@ -5,7 +5,8 @@ import pathlib
import string
import typing
-from packaging.utils import canonicalize_name
+from packaging.utils import canonicalize_name, parse_sdist_filename
+from packaging.version import Version
from stevedore import extension
# An interface for reretrieving per-package information which influences
@@ -128,3 +129,54 @@ def find_override_method(distname: str, method: str) -> typing.Callable:
return None
logger.info("found %s override for %s", method, distname)
return getattr(mod, method)
+
+
+def list_all(patches_dir: pathlib.Path, envs_dir: pathlib.Path, test: bool = False):
+ exts = _get_extensions()
+
+ def patched_projects():
+ for item in patches_dir.glob("*"):
+ if not item.is_dir():
+ continue
+ fake_sdist = item.name + ".tar.gz"
+ name, _ = parse_sdist_filename(fake_sdist)
+ yield name
+
+ def patched_projects_legacy():
+ for item in patches_dir.glob("*.patch"):
+ parts = []
+ for p in item.stem.split("-"):
+ parts.append(p)
+ try:
+ Version(p)
+ # Stop when we get something we can parse as a version string.
+ break
+ except Exception:
+ pass
+ fake_sdist = ("-".join(parts)) + ".tar.gz"
+ try:
+ name, _ = parse_sdist_filename(fake_sdist)
+ except Exception as err:
+ logger.warning(f"could not extract package name from {item}: {err}")
+ continue
+ yield name
+
+ def env_projects():
+ for item in envs_dir.glob("*/*.env"):
+ yield item.stem
+
+ # Use canonicalize_name() to ensure we can correctly remove duplicate
+ # entries from the return list.
+ return sorted(
+ set(
+ canonicalize_name(n)
+ for n in itertools.chain(
+ exts.names(),
+ patched_projects(),
+ patched_projects_legacy(),
+ env_projects(),
+ )
+ if not test
+ or n != "fromager_test" # filter out test package except in test mode
+ )
+ )
| add a command to list all packages with overrides
When setting up a build environment with overrides, it will be useful to be able to establish tests for each customized build. To do that, it would be useful to be able to know exactly which packages have overrides, since that would let someone create a "linter" for their CI config to ensure that as a new override is added, a new test job is also added.
We should create a command to list the available overrides, including all of the plugins that can be loaded, any packages that have patches, and any packages that have env vars. The list should not include packages listed as pre-built.
The output should be just the list of packages, without versions, one name per line written to the console. We could optionally include versions, but since plugins don't have versions that may be less useful, so let's consider that as an enhancement for later.
The function used to generate the list of names should be a stable API so that a linter can import it and invoke it to get an iterable of the same names that are printed by the new command. | python-wheel-build/fromager | diff --git a/tests/test_overrides.py b/tests/test_overrides.py
index 08420a4..32a1164 100644
--- a/tests/test_overrides.py
+++ b/tests/test_overrides.py
@@ -96,3 +96,52 @@ def test_extra_environ_for_pkg_expansion(tmp_path: pathlib.Path):
with pytest.raises(ValueError):
extra_environ = overrides.extra_environ_for_pkg(tmp_path, pkg_name, variant)
+
+
+def test_list_all(tmp_path):
+ patches_dir = tmp_path / "patches"
+ patches_dir.mkdir()
+
+ project_patch_dir = patches_dir / "project-with-patch-1.2.3"
+ project_patch_dir.mkdir()
+
+ # legacy form
+ p1 = patches_dir / "legacy-project-1.2.3-001.patch"
+ np1 = patches_dir / "legacy-project-1.2.3.txt"
+ p2 = patches_dir / "fromager_test-1.2.3.patch" # duplicate
+
+ # new form with project dir
+ p3 = project_patch_dir / "003.patch"
+ p4 = project_patch_dir / "004.patch"
+ np2 = project_patch_dir / "not-a-patch.txt"
+
+ # Create all of the test files
+ for p in [p1, p2, p3, p4]:
+ p.write_text("this is a patch file")
+ for f in [np1, np2]:
+ f.write_text("this is not a patch file")
+
+ env_dir = tmp_path / "env"
+ env_dir.mkdir()
+ variant_dir = env_dir / "variant"
+ variant_dir.mkdir()
+ project_env = variant_dir / "project-with-env.env"
+ project_env.write_text("VAR1=VALUE1\nVAR2=VALUE2")
+ project_env2 = variant_dir / "fromager_test.env"
+ project_env2.write_text("VAR1=VALUE1\nVAR2=VALUE2") # duplicate
+
+ expected = [
+ "project-with-patch",
+ "legacy-project",
+ "project-with-env",
+ "fromager-test",
+ ]
+ expected.sort()
+
+ packages = overrides.list_all(
+ patches_dir=patches_dir,
+ envs_dir=env_dir,
+ test=True,
+ )
+
+ assert expected == packages
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_added_files",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 1
},
"num_modified_files": 2
} | 0.12 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest",
"pytest-cov",
"coverage",
"requests-mock",
"setuptools",
"setuptools_scm"
],
"pre_install": null,
"python": "3.10",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2025.1.31
cffi==1.17.1
charset-normalizer==3.4.1
click==8.1.8
coverage==7.8.0
cryptography==44.0.2
Deprecated==1.2.18
distlib==0.3.9
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
filelock==3.18.0
-e git+https://github.com/python-wheel-build/fromager.git@a1cdbae921a2c5300bcc409004729c7904c982cf#egg=fromager
html5lib==1.1
idna==3.10
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
packaging @ file:///croot/packaging_1734472117206/work
pbr==6.1.1
pkginfo==1.12.1.2
platformdirs==4.3.7
pluggy @ file:///croot/pluggy_1733169602837/work
pycparser==2.22
PyGithub==2.6.1
PyJWT==2.10.1
PyNaCl==1.5.0
pyproject_hooks==1.2.0
pytest @ file:///croot/pytest_1738938843180/work
pytest-cov==6.0.0
python-pypi-mirror==5.2.1
PyYAML==6.0.2
requests==2.32.3
requests-mock==1.12.1
resolvelib==1.1.0
setuptools-scm==8.2.0
six==1.17.0
stevedore==5.4.1
toml==0.10.2
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
typing_extensions==4.13.0
urllib3==2.3.0
virtualenv==20.29.3
webencodings==0.5.1
wrapt==1.17.2
| name: fromager
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- bzip2=1.0.8=h5eee18b_6
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py310h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- libuuid=1.41.5=h5eee18b_0
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py310h06a4308_0
- pip=25.0=py310h06a4308_0
- pluggy=1.5.0=py310h06a4308_0
- pytest=8.3.4=py310h06a4308_0
- python=3.10.16=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py310h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py310h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py310h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- cffi==1.17.1
- charset-normalizer==3.4.1
- click==8.1.8
- coverage==7.8.0
- cryptography==44.0.2
- deprecated==1.2.18
- distlib==0.3.9
- filelock==3.18.0
- fromager==0.12.2.dev11+ga1cdbae
- html5lib==1.1
- idna==3.10
- pbr==6.1.1
- pkginfo==1.12.1.2
- platformdirs==4.3.7
- pycparser==2.22
- pygithub==2.6.1
- pyjwt==2.10.1
- pynacl==1.5.0
- pyproject-hooks==1.2.0
- pytest-cov==6.0.0
- python-pypi-mirror==5.2.1
- pyyaml==6.0.2
- requests==2.32.3
- requests-mock==1.12.1
- resolvelib==1.1.0
- setuptools-scm==8.2.0
- six==1.17.0
- stevedore==5.4.1
- toml==0.10.2
- typing-extensions==4.13.0
- urllib3==2.3.0
- virtualenv==20.29.3
- webencodings==0.5.1
- wrapt==1.17.2
prefix: /opt/conda/envs/fromager
| [
"tests/test_overrides.py::test_list_all"
] | [] | [
"tests/test_overrides.py::test_patches_for_source_dir",
"tests/test_overrides.py::test_extra_environ_for_pkg",
"tests/test_overrides.py::test_extra_environ_for_pkg_expansion"
] | [] | Apache License 2.0 | 18,892 | 920 | [
"src/fromager/commands/__init__.py",
"src/fromager/overrides.py"
] |
lincc-frameworks__nested-pandas-121 | 307c904b8a148e7d5f95911bf69ef7ec974cede6 | 2024-07-09 18:13:37 | 975fbc88de84b6d8d9ae309f71ae3f0b96831818 | diff --git a/src/nested_pandas/utils/utils.py b/src/nested_pandas/utils/utils.py
index a1b0a9f..0f980d2 100644
--- a/src/nested_pandas/utils/utils.py
+++ b/src/nested_pandas/utils/utils.py
@@ -34,8 +34,9 @@ def count_nested(df, nested, by=None, join=True) -> NestedFrame:
)
else:
# this may be able to be sped up using tolists() as well
- counts = df[nested].apply(lambda x: x[by].value_counts())
+ counts = df[nested].apply(lambda x: x[by].value_counts(sort=False))
counts = counts.rename(columns={colname: f"n_{nested}_{colname}" for colname in counts.columns})
+ counts = counts.reindex(sorted(counts.columns), axis=1)
if join:
return df.join(counts)
# else just return the counts NestedFrame
| Count Nested: Sort output columns of `by` behavior alphabetically for nested-dask meta
**Feature request**
When `by` is set for count_nested, the output columns are ordered as per value_counts default behavior. This ordering is value-based and thus unpredictable for dask. We should have `by` output columns be sorted alphabetically to create a meta-friendly output for Nested-Dask. The PR should just involve implementing the behavior like this:
```
else:
# this may be able to be sped up using tolists() as well
counts = df[nested].apply(lambda x: x[by].value_counts(sort=False))
counts = counts.rename(columns={colname: f"n_{nested}_{colname}" for colname in counts.columns})
counts = counts.reindex(sorted(counts.columns), axis=1)
```
**Before submitting**
Please check the following:
- [x] I have described the purpose of the suggested change, specifying what I need the enhancement to accomplish, i.e. what problem it solves.
- [x] I have included any relevant links, screenshots, environment information, and data relevant to implementing the requested feature, as well as pseudocode for how I want to access the new functionality.
- [x] If I have ideas for how the new feature could be implemented, I have provided explanations and/or pseudocode and/or task lists for the steps.
| lincc-frameworks/nested-pandas | diff --git a/tests/nested_pandas/utils/test_utils.py b/tests/nested_pandas/utils/test_utils.py
index e1403a8..5397d75 100644
--- a/tests/nested_pandas/utils/test_utils.py
+++ b/tests/nested_pandas/utils/test_utils.py
@@ -15,7 +15,7 @@ def test_count_nested(join):
data={
"c": [0, 2, 4, 1, np.nan, 3, 1, 4, 1],
"d": [5, 4, 7, 5, 3, 1, 9, 3, 4],
- "label": ["a", "a", "b", "b", "a", "a", "b", "a", "b"],
+ "label": ["b", "a", "b", "b", "a", "a", "b", "a", "b"],
},
index=[0, 0, 0, 1, 1, 1, 2, 2, 2],
)
@@ -27,8 +27,14 @@ def test_count_nested(join):
# Test count by
label_counts = count_nested(base, "nested", by="label", join=join)
- assert all(label_counts["n_nested_a"].values == [2, 2, 1])
- assert all(label_counts["n_nested_b"].values == [1, 1, 2])
+
+ assert all(label_counts["n_nested_a"].values == [1, 2, 1])
+ assert all(label_counts["n_nested_b"].values == [2, 1, 2])
+
+ # Make sure the ordering is alphabetical
+ # https://github.com/lincc-frameworks/nested-pandas/issues/109
+ assert label_counts.columns[-1] == "n_nested_b"
+ assert label_counts.columns[-2] == "n_nested_a"
# Test join behavior
if join:
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 1
} | 0.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": [
"docs/requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | accessible-pygments==0.0.5
alabaster==0.7.16
anyio==4.9.0
argon2-cffi==23.1.0
argon2-cffi-bindings==21.2.0
arrow==1.3.0
astroid==3.3.9
asttokens==3.0.0
asv==0.6.3
asv_runner==0.2.1
async-lru==2.0.5
attrs==25.3.0
babel==2.17.0
beautifulsoup4==4.13.3
bleach==6.2.0
build==1.2.2.post1
certifi==2025.1.31
cffi==1.17.1
cfgv==3.4.0
charset-normalizer==3.4.1
comm==0.2.2
coverage==7.8.0
debugpy==1.8.13
decorator==5.2.1
defusedxml==0.7.1
distlib==0.3.9
docutils==0.21.2
exceptiongroup==1.2.2
executing==2.2.0
fastjsonschema==2.21.1
filelock==3.18.0
fqdn==1.5.1
h11==0.14.0
httpcore==1.0.7
httpx==0.28.1
identify==2.6.9
idna==3.10
imagesize==1.4.1
importlib_metadata==8.6.1
iniconfig==2.1.0
ipykernel==6.29.5
ipython==8.18.1
ipywidgets==8.1.5
isoduration==20.11.0
jedi==0.19.2
Jinja2==3.1.6
json5==0.10.0
jsonpointer==3.0.0
jsonschema==4.23.0
jsonschema-specifications==2024.10.1
jupyter==1.1.1
jupyter-console==6.6.3
jupyter-events==0.12.0
jupyter-lsp==2.2.5
jupyter_client==8.6.3
jupyter_core==5.7.2
jupyter_server==2.15.0
jupyter_server_terminals==0.5.3
jupyterlab==4.3.6
jupyterlab_pygments==0.3.0
jupyterlab_server==2.27.3
jupyterlab_widgets==3.0.13
jupytext==1.16.7
markdown-it-py==3.0.0
MarkupSafe==3.0.2
matplotlib-inline==0.1.7
mdit-py-plugins==0.4.2
mdurl==0.1.2
mistune==3.1.3
mypy==1.15.0
mypy-extensions==1.0.0
nbclient==0.10.2
nbconvert==7.16.6
nbformat==5.10.4
nbsphinx==0.9.7
nest-asyncio==1.6.0
-e git+https://github.com/lincc-frameworks/nested-pandas.git@307c904b8a148e7d5f95911bf69ef7ec974cede6#egg=nested_pandas
nodeenv==1.9.1
notebook==7.3.3
notebook_shim==0.2.4
numpy==2.0.2
overrides==7.7.0
packaging==24.2
pandas==2.2.3
pandocfilters==1.5.1
parso==0.8.4
pexpect==4.9.0
platformdirs==4.3.7
pluggy==1.5.0
pre_commit==4.2.0
prometheus_client==0.21.1
prompt_toolkit==3.0.50
psutil==7.0.0
ptyprocess==0.7.0
pure_eval==0.2.3
pyarrow==19.0.1
pycparser==2.22
pydata-sphinx-theme==0.15.4
Pygments==2.19.1
Pympler==1.1
pyproject_hooks==1.2.0
pytest==8.3.5
pytest-cov==6.0.0
python-dateutil==2.9.0.post0
python-json-logger==3.3.0
pytz==2025.2
PyYAML==6.0.2
pyzmq==26.3.0
referencing==0.36.2
requests==2.32.3
rfc3339-validator==0.1.4
rfc3986-validator==0.1.1
rpds-py==0.24.0
ruff==0.11.2
Send2Trash==1.8.3
six==1.17.0
sniffio==1.3.1
snowballstemmer==2.2.0
soupsieve==2.6
Sphinx==7.4.7
sphinx-autoapi==3.6.0
sphinx-book-theme==1.1.4
sphinx-copybutton==0.5.2
sphinxcontrib-applehelp==2.0.0
sphinxcontrib-devhelp==2.0.0
sphinxcontrib-htmlhelp==2.1.0
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==2.0.0
sphinxcontrib-serializinghtml==2.0.0
stack-data==0.6.3
stdlib-list==0.11.1
tabulate==0.9.0
terminado==0.18.1
tinycss2==1.4.0
tomli==2.2.1
tornado==6.4.2
traitlets==5.14.3
types-python-dateutil==2.9.0.20241206
typing_extensions==4.13.0
tzdata==2025.2
uri-template==1.3.0
urllib3==2.3.0
virtualenv==20.29.3
wcwidth==0.2.13
webcolors==24.11.1
webencodings==0.5.1
websocket-client==1.8.0
widgetsnbextension==4.0.13
zipp==3.21.0
| name: nested-pandas
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- accessible-pygments==0.0.5
- alabaster==0.7.16
- anyio==4.9.0
- argon2-cffi==23.1.0
- argon2-cffi-bindings==21.2.0
- arrow==1.3.0
- astroid==3.3.9
- asttokens==3.0.0
- asv==0.6.3
- asv-runner==0.2.1
- async-lru==2.0.5
- attrs==25.3.0
- babel==2.17.0
- beautifulsoup4==4.13.3
- bleach==6.2.0
- build==1.2.2.post1
- certifi==2025.1.31
- cffi==1.17.1
- cfgv==3.4.0
- charset-normalizer==3.4.1
- comm==0.2.2
- coverage==7.8.0
- debugpy==1.8.13
- decorator==5.2.1
- defusedxml==0.7.1
- distlib==0.3.9
- docutils==0.21.2
- exceptiongroup==1.2.2
- executing==2.2.0
- fastjsonschema==2.21.1
- filelock==3.18.0
- fqdn==1.5.1
- h11==0.14.0
- httpcore==1.0.7
- httpx==0.28.1
- identify==2.6.9
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- ipykernel==6.29.5
- ipython==8.18.1
- ipywidgets==8.1.5
- isoduration==20.11.0
- jedi==0.19.2
- jinja2==3.1.6
- json5==0.10.0
- jsonpointer==3.0.0
- jsonschema==4.23.0
- jsonschema-specifications==2024.10.1
- jupyter==1.1.1
- jupyter-client==8.6.3
- jupyter-console==6.6.3
- jupyter-core==5.7.2
- jupyter-events==0.12.0
- jupyter-lsp==2.2.5
- jupyter-server==2.15.0
- jupyter-server-terminals==0.5.3
- jupyterlab==4.3.6
- jupyterlab-pygments==0.3.0
- jupyterlab-server==2.27.3
- jupyterlab-widgets==3.0.13
- jupytext==1.16.7
- markdown-it-py==3.0.0
- markupsafe==3.0.2
- matplotlib-inline==0.1.7
- mdit-py-plugins==0.4.2
- mdurl==0.1.2
- mistune==3.1.3
- mypy==1.15.0
- mypy-extensions==1.0.0
- nbclient==0.10.2
- nbconvert==7.16.6
- nbformat==5.10.4
- nbsphinx==0.9.7
- nest-asyncio==1.6.0
- nested-pandas==0.1.2.dev19+g307c904
- nodeenv==1.9.1
- notebook==7.3.3
- notebook-shim==0.2.4
- numpy==2.0.2
- overrides==7.7.0
- packaging==24.2
- pandas==2.2.3
- pandocfilters==1.5.1
- parso==0.8.4
- pexpect==4.9.0
- platformdirs==4.3.7
- pluggy==1.5.0
- pre-commit==4.2.0
- prometheus-client==0.21.1
- prompt-toolkit==3.0.50
- psutil==7.0.0
- ptyprocess==0.7.0
- pure-eval==0.2.3
- pyarrow==19.0.1
- pycparser==2.22
- pydata-sphinx-theme==0.15.4
- pygments==2.19.1
- pympler==1.1
- pyproject-hooks==1.2.0
- pytest==8.3.5
- pytest-cov==6.0.0
- python-dateutil==2.9.0.post0
- python-json-logger==3.3.0
- pytz==2025.2
- pyyaml==6.0.2
- pyzmq==26.3.0
- referencing==0.36.2
- requests==2.32.3
- rfc3339-validator==0.1.4
- rfc3986-validator==0.1.1
- rpds-py==0.24.0
- ruff==0.11.2
- send2trash==1.8.3
- six==1.17.0
- sniffio==1.3.1
- snowballstemmer==2.2.0
- soupsieve==2.6
- sphinx==7.4.7
- sphinx-autoapi==3.6.0
- sphinx-book-theme==1.1.4
- sphinx-copybutton==0.5.2
- sphinxcontrib-applehelp==2.0.0
- sphinxcontrib-devhelp==2.0.0
- sphinxcontrib-htmlhelp==2.1.0
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==2.0.0
- sphinxcontrib-serializinghtml==2.0.0
- stack-data==0.6.3
- stdlib-list==0.11.1
- tabulate==0.9.0
- terminado==0.18.1
- tinycss2==1.4.0
- tomli==2.2.1
- tornado==6.4.2
- traitlets==5.14.3
- types-python-dateutil==2.9.0.20241206
- typing-extensions==4.13.0
- tzdata==2025.2
- uri-template==1.3.0
- urllib3==2.3.0
- virtualenv==20.29.3
- wcwidth==0.2.13
- webcolors==24.11.1
- webencodings==0.5.1
- websocket-client==1.8.0
- widgetsnbextension==4.0.13
- zipp==3.21.0
prefix: /opt/conda/envs/nested-pandas
| [
"tests/nested_pandas/utils/test_utils.py::test_count_nested[True]",
"tests/nested_pandas/utils/test_utils.py::test_count_nested[False]"
] | [] | [] | [] | MIT License | 18,920 | 222 | [
"src/nested_pandas/utils/utils.py"
] |
|
SpikeInterface__spikeinterface-3180 | e4fa25abf4a949a31bdfebe7c868e393c11cfed3 | 2024-07-10 22:23:22 | b3383575feade4bf70d623937d6ea2db9cd05b37 | diff --git a/src/spikeinterface/core/base.py b/src/spikeinterface/core/base.py
index 8b037ad10..f3e300fff 100644
--- a/src/spikeinterface/core/base.py
+++ b/src/spikeinterface/core/base.py
@@ -7,6 +7,7 @@ import warnings
import weakref
import json
import pickle
+import os
import random
import string
from packaging.version import parse
@@ -950,14 +951,13 @@ class BaseExtractor:
folder.mkdir(parents=True, exist_ok=False)
# dump provenance
+ provenance_file = folder / f"provenance.json"
if self.check_serializability("json"):
- provenance_file = folder / f"provenance.json"
- self.dump(provenance_file)
- elif self.check_serializability("pickle"):
- provenance_file = folder / f"provenance.pkl"
self.dump(provenance_file)
else:
- warnings.warn("The extractor is not serializable to file. The provenance will not be saved.")
+ provenance_file.write_text(
+ json.dumps({"warning": "the provenace is not json serializable!!!"}), encoding="utf8"
+ )
self.save_metadata_to_folder(folder)
@@ -1024,6 +1024,7 @@ class BaseExtractor:
cached: ZarrExtractor
Saved copy of the extractor.
"""
+ import zarr
from .zarrextractors import read_zarr
save_kwargs.pop("format", None)
diff --git a/src/spikeinterface/core/generate.py b/src/spikeinterface/core/generate.py
index 6ce94114c..f5312f9c4 100644
--- a/src/spikeinterface/core/generate.py
+++ b/src/spikeinterface/core/generate.py
@@ -1803,8 +1803,6 @@ class InjectTemplatesRecording(BaseRecording):
)
self.add_recording_segment(recording_segment)
- # to discuss: maybe we could set json serializability to False always
- # because templates could be large!
if not sorting.check_serializability("json"):
self._serializability["json"] = False
if parent_recording is not None:
diff --git a/src/spikeinterface/core/unitsaggregationsorting.py b/src/spikeinterface/core/unitsaggregationsorting.py
index ea019268f..9eb37e31e 100644
--- a/src/spikeinterface/core/unitsaggregationsorting.py
+++ b/src/spikeinterface/core/unitsaggregationsorting.py
@@ -34,7 +34,21 @@ class UnitsAggregationSorting(BaseSorting):
)
unit_ids = list(renamed_unit_ids)
else:
- unit_ids = list(np.arange(num_all_units))
+ unit_ids_dtypes = [sort.get_unit_ids().dtype for sort in sorting_list]
+ all_ids_are_same_type = np.unique(unit_ids_dtypes).size == 1
+ all_units_ids_are_unique = False
+ if all_ids_are_same_type:
+ combined_ids = np.concatenate([sort.get_unit_ids() for sort in sorting_list])
+ all_units_ids_are_unique = np.unique(combined_ids).size == num_all_units
+
+ if all_ids_are_same_type and all_units_ids_are_unique:
+ unit_ids = combined_ids
+ else:
+ default_unit_ids = [str(i) for i in range(num_all_units)]
+ if all_ids_are_same_type and np.issubdtype(unit_ids_dtypes[0], np.integer):
+ unit_ids = np.arange(num_all_units, dtype=np.uint64)
+ else:
+ unit_ids = default_unit_ids
# unit map maps unit ids that are used to get spike trains
u_id = 0
diff --git a/src/spikeinterface/generation/drift_tools.py b/src/spikeinterface/generation/drift_tools.py
index 70e13160f..cce2e08b5 100644
--- a/src/spikeinterface/generation/drift_tools.py
+++ b/src/spikeinterface/generation/drift_tools.py
@@ -458,9 +458,6 @@ class InjectDriftingTemplatesRecording(BaseRecording):
self.set_probe(drifting_templates.probe, in_place=True)
- # templates are too large, we don't serialize them to JSON
- self._serializability["json"] = False
-
self._kwargs = {
"sorting": sorting,
"drifting_templates": drifting_templates,
| Unit aggregation should preserve unit ids.
Like https://github.com/SpikeInterface/spikeinterface/issues/2789 but for UnitAggregation. Currently it does not:
```python
from spikeinterface.core import generate_sorting
sorting1 = generate_sorting(num_units=3)
sorting1 = sorting1.rename_units(new_unit_ids=["unit1", "unit2", "unit3"])
sorting2 = generate_sorting(num_units=3)
sorting2 = sorting2.rename_units(new_unit_ids=["unit4", "unit5", "unit6"])
from spikeinterface.core import aggregate_units
sorting = aggregate_units([sorting1, sorting2])
sorting.get_unit_ids()
array([0, 1, 2, 3, 4, 5])
``` | SpikeInterface/spikeinterface | diff --git a/src/spikeinterface/core/tests/test_unitsaggregationsorting.py b/src/spikeinterface/core/tests/test_unitsaggregationsorting.py
index b6cb479c7..09b8affe5 100644
--- a/src/spikeinterface/core/tests/test_unitsaggregationsorting.py
+++ b/src/spikeinterface/core/tests/test_unitsaggregationsorting.py
@@ -5,6 +5,7 @@ from spikeinterface.core import aggregate_units
from spikeinterface.core import NpzSortingExtractor
from spikeinterface.core import create_sorting_npz
+from spikeinterface.core import generate_sorting
def test_unitsaggregationsorting(create_cache_folder):
@@ -92,5 +93,42 @@ def test_unitsaggregationsorting(create_cache_folder):
print(sorting_agg_prop.get_property("brain_area"))
+def test_unit_aggregation_preserve_ids():
+
+ sorting1 = generate_sorting(num_units=3)
+ sorting1 = sorting1.rename_units(new_unit_ids=["unit1", "unit2", "unit3"])
+
+ sorting2 = generate_sorting(num_units=3)
+ sorting2 = sorting2.rename_units(new_unit_ids=["unit4", "unit5", "unit6"])
+
+ aggregated_sorting = aggregate_units([sorting1, sorting2])
+ assert aggregated_sorting.get_num_units() == 6
+ assert list(aggregated_sorting.get_unit_ids()) == ["unit1", "unit2", "unit3", "unit4", "unit5", "unit6"]
+
+
+def test_unit_aggregation_does_not_preserve_ids_if_not_unique():
+ sorting1 = generate_sorting(num_units=3)
+ sorting1 = sorting1.rename_units(new_unit_ids=["unit1", "unit2", "unit3"])
+
+ sorting2 = generate_sorting(num_units=3)
+ sorting2 = sorting2.rename_units(new_unit_ids=["unit1", "unit2", "unit3"])
+
+ aggregated_sorting = aggregate_units([sorting1, sorting2])
+ assert aggregated_sorting.get_num_units() == 6
+ assert list(aggregated_sorting.get_unit_ids()) == ["0", "1", "2", "3", "4", "5"]
+
+
+def test_unit_aggregation_does_not_preserve_ids_not_the_same_type():
+ sorting1 = generate_sorting(num_units=3)
+ sorting1 = sorting1.rename_units(new_unit_ids=["unit1", "unit2", "unit3"])
+
+ sorting2 = generate_sorting(num_units=2)
+ sorting2 = sorting2.rename_units(new_unit_ids=[1, 2])
+
+ aggregated_sorting = aggregate_units([sorting1, sorting2])
+ assert aggregated_sorting.get_num_units() == 5
+ assert list(aggregated_sorting.get_unit_ids()) == ["0", "1", "2", "3", "4"]
+
+
if __name__ == "__main__":
test_unitsaggregationsorting()
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 4
} | 0.100 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[full,widgets]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest",
"pytest-dependency",
"pytest-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | altair==5.5.0
asciitree==0.3.3
asttokens==3.0.0
attrs==25.3.0
certifi==2025.1.31
charset-normalizer==3.4.1
click==8.1.8
comm==0.2.2
contourpy==1.3.0
coverage==7.8.0
cuda-bindings==12.8.0
cuda-python==12.8.0
cycler==0.12.1
decorator==5.2.1
distinctipy==1.3.4
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
executing==2.2.0
fasteners==0.19
figurl==0.3.0a1
fonttools==4.56.0
h5py==3.13.0
hdmf==4.0.0
idna==3.10
importlib_resources==6.5.2
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
ipympl==0.9.7
ipython==8.18.1
ipywidgets==8.1.5
jedi==0.19.2
Jinja2==3.1.6
joblib==1.4.2
jsonschema==4.23.0
jsonschema-specifications==2024.10.1
jupyterlab_widgets==3.0.13
kachery==2.0.2
kiwisolver==1.4.7
llvmlite==0.43.0
MarkupSafe==3.0.2
matplotlib==3.9.4
matplotlib-inline==0.1.7
narwhals==1.32.0
neo==0.14.0
networkx==3.2.1
numba==0.60.0
numcodecs==0.12.1
numpy==1.26.4
packaging @ file:///croot/packaging_1734472117206/work
pandas==2.2.3
parso==0.8.4
pexpect==4.9.0
pillow==11.1.0
pluggy @ file:///croot/pluggy_1733169602837/work
probeinterface==0.2.26
prompt_toolkit==3.0.50
ptyprocess==0.7.0
pure_eval==0.2.3
Pygments==2.19.1
pynwb==3.0.0
pyparsing==3.2.3
pytest @ file:///croot/pytest_1738938843180/work
pytest-cov==6.0.0
pytest-dependency==0.6.0
python-dateutil==2.9.0.post0
pytz==2025.2
PyYAML==6.0.2
quantities==0.16.1
referencing==0.36.2
requests==2.32.3
rpds-py==0.24.0
ruamel.yaml==0.18.10
ruamel.yaml.clib==0.2.12
scikit-learn==1.6.1
scipy==1.13.1
simplejson==3.20.1
six==1.17.0
sortingview==0.14.1
-e git+https://github.com/SpikeInterface/spikeinterface.git@e4fa25abf4a949a31bdfebe7c868e393c11cfed3#egg=spikeinterface
stack-data==0.6.3
threadpoolctl==3.6.0
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
tqdm==4.67.1
traitlets==5.14.3
typing_extensions==4.13.0
tzdata==2025.2
urllib3==2.3.0
wcwidth==0.2.13
widgetsnbextension==4.0.13
xarray==2024.7.0
zarr==2.17.2
zipp==3.21.0
| name: spikeinterface
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- altair==5.5.0
- asciitree==0.3.3
- asttokens==3.0.0
- attrs==25.3.0
- certifi==2025.1.31
- charset-normalizer==3.4.1
- click==8.1.8
- comm==0.2.2
- contourpy==1.3.0
- coverage==7.8.0
- cuda-bindings==12.8.0
- cuda-python==12.8.0
- cycler==0.12.1
- decorator==5.2.1
- distinctipy==1.3.4
- executing==2.2.0
- fasteners==0.19
- figurl==0.3.0a1
- fonttools==4.56.0
- h5py==3.13.0
- hdmf==4.0.0
- idna==3.10
- importlib-resources==6.5.2
- ipympl==0.9.7
- ipython==8.18.1
- ipywidgets==8.1.5
- jedi==0.19.2
- jinja2==3.1.6
- joblib==1.4.2
- jsonschema==4.23.0
- jsonschema-specifications==2024.10.1
- jupyterlab-widgets==3.0.13
- kachery==2.0.2
- kiwisolver==1.4.7
- llvmlite==0.43.0
- markupsafe==3.0.2
- matplotlib==3.9.4
- matplotlib-inline==0.1.7
- narwhals==1.32.0
- neo==0.14.0
- networkx==3.2.1
- numba==0.60.0
- numcodecs==0.12.1
- numpy==1.26.4
- pandas==2.2.3
- parso==0.8.4
- pexpect==4.9.0
- pillow==11.1.0
- probeinterface==0.2.26
- prompt-toolkit==3.0.50
- ptyprocess==0.7.0
- pure-eval==0.2.3
- pygments==2.19.1
- pynwb==3.0.0
- pyparsing==3.2.3
- pytest-cov==6.0.0
- pytest-dependency==0.6.0
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyyaml==6.0.2
- quantities==0.16.1
- referencing==0.36.2
- requests==2.32.3
- rpds-py==0.24.0
- ruamel-yaml==0.18.10
- ruamel-yaml-clib==0.2.12
- scikit-learn==1.6.1
- scipy==1.13.1
- simplejson==3.20.1
- six==1.17.0
- sortingview==0.14.1
- spikeinterface==0.101.0rc0
- stack-data==0.6.3
- threadpoolctl==3.6.0
- tqdm==4.67.1
- traitlets==5.14.3
- typing-extensions==4.13.0
- tzdata==2025.2
- urllib3==2.3.0
- wcwidth==0.2.13
- widgetsnbextension==4.0.13
- xarray==2024.7.0
- zarr==2.17.2
- zipp==3.21.0
prefix: /opt/conda/envs/spikeinterface
| [
"src/spikeinterface/core/tests/test_unitsaggregationsorting.py::test_unit_aggregation_preserve_ids",
"src/spikeinterface/core/tests/test_unitsaggregationsorting.py::test_unit_aggregation_does_not_preserve_ids_if_not_unique",
"src/spikeinterface/core/tests/test_unitsaggregationsorting.py::test_unit_aggregation_does_not_preserve_ids_not_the_same_type"
] | [] | [
"src/spikeinterface/core/tests/test_unitsaggregationsorting.py::test_unitsaggregationsorting"
] | [] | MIT License | 18,933 | 1,050 | [
"src/spikeinterface/core/base.py",
"src/spikeinterface/core/generate.py",
"src/spikeinterface/core/unitsaggregationsorting.py",
"src/spikeinterface/generation/drift_tools.py"
] |
|
pyvista__pyvista-6371 | 3a6efbbc2bd6c0b3b882e1af153a86f9ed41855a | 2024-07-11 19:34:11 | 1402ec78e18e58c3f1f512739d5bce6a2ef58a09 | codecov[bot]: ## [Codecov](https://app.codecov.io/gh/pyvista/pyvista/pull/6371?dropdown=coverage&src=pr&el=h1&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=pyvista) Report
All modified and coverable lines are covered by tests :white_check_mark:
> Project coverage is 91.20%. Comparing base [(`9a88c38`)](https://app.codecov.io/gh/pyvista/pyvista/commit/9a88c38b87969899709289abc0b45c42cfc1e5a0?dropdown=coverage&el=desc&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=pyvista) to head [(`cfa42ac`)](https://app.codecov.io/gh/pyvista/pyvista/commit/cfa42ac2157897e96bac14c3851228e0baee67e9?dropdown=coverage&el=desc&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=pyvista).
> :exclamation: There is a different number of reports uploaded between BASE (9a88c38) and HEAD (cfa42ac). Click for more details.
>
> <details><summary>HEAD has 11 uploads less than BASE</summary>
>
>| Flag | BASE (9a88c38) | HEAD (cfa42ac) |
>|------|------|------|
>||12|1|
></details>
<details><summary>Additional details and impacted files</summary>
```diff
@@ Coverage Diff @@
## main #6371 +/- ##
==========================================
- Coverage 97.23% 91.20% -6.03%
==========================================
Files 142 142
Lines 26507 26528 +21
==========================================
- Hits 25773 24196 -1577
- Misses 734 2332 +1598
```
</details> | diff --git a/pyvista/core/datasetattributes.py b/pyvista/core/datasetattributes.py
index feb57baf..c7b93841 100644
--- a/pyvista/core/datasetattributes.py
+++ b/pyvista/core/datasetattributes.py
@@ -310,7 +310,7 @@ class DataSetAttributes(_vtk.VTKObjectWrapper):
pyvista_ndarray([0, 1, 2, 3, 4, 5, 6, 7])
"""
- self._raise_field_data_no_scalars_vectors()
+ self._raise_field_data_no_scalars_vectors_normals()
if self.GetScalars() is not None:
array = pyvista_ndarray(
self.GetScalars(),
@@ -351,7 +351,7 @@ class DataSetAttributes(_vtk.VTKObjectWrapper):
(8, 3)
"""
- self._raise_field_data_no_scalars_vectors()
+ self._raise_field_data_no_scalars_vectors_normals()
vectors = self.GetVectors()
if vectors is not None:
return pyvista_ndarray(vectors, dataset=self.dataset, association=self.association)
@@ -1084,10 +1084,10 @@ class DataSetAttributes(_vtk.VTKObjectWrapper):
if not 0 <= index < max_index:
raise KeyError(f'Array index ({index}) out of range [0, {max_index - 1}]')
- def _raise_field_data_no_scalars_vectors(self):
+ def _raise_field_data_no_scalars_vectors_normals(self):
"""Raise a ``TypeError`` if FieldData."""
if self.association == FieldAssociation.NONE:
- raise TypeError('FieldData does not have active scalars or vectors.')
+ raise TypeError('FieldData does not have active scalars or vectors or normals.')
@property
def active_scalars_name(self) -> str | None:
@@ -1140,12 +1140,64 @@ class DataSetAttributes(_vtk.VTKObjectWrapper):
if name is None:
self.SetActiveScalars(None)
return
- self._raise_field_data_no_scalars_vectors()
+ self._raise_field_data_no_scalars_vectors_normals()
dtype = self[name].dtype
# only vtkDataArray subclasses can be set as active attributes
if np.issubdtype(dtype, np.number) or np.issubdtype(dtype, bool):
self.SetActiveScalars(name)
+ @property
+ def _active_normals_name(self) -> str | None:
+ """Return name of the active normals.
+
+ Returns
+ -------
+ Optional[str]
+ Name of the active normals.
+
+ Examples
+ --------
+ Create a mesh add a new point array with normals.
+
+ >>> import pyvista as pv
+ >>> import numpy as np
+ >>> mesh = pv.Sphere()
+ >>> normals = np.random.default_rng().random((mesh.n_points, 3))
+ >>> mesh.point_data['my-normals'] = normals
+
+ Set the active normals.
+
+ >>> mesh.point_data._active_normals_name = 'my-normals'
+ >>> mesh.point_data._active_normals_name
+ 'my-normals'
+ """
+ if self.GetNormals() is not None:
+ return str(self.GetNormals().GetName())
+ return None
+
+ @_active_normals_name.setter
+ def _active_normals_name(self, name: str) -> None: # numpydoc ignore=GL08
+ """Set name of the active normals.
+
+ Parameters
+ ----------
+ name : str
+ Name of the active normals.
+
+ """
+ # permit setting no active
+ if name is None:
+ self.SetActiveNormals(None)
+ return
+ self._raise_field_data_no_scalars_vectors_normals()
+ if name not in self:
+ raise KeyError(f'DataSetAttribute does not contain "{name}"')
+ # verify that the array has the correct number of components
+ n_comp = self.GetArray(name).GetNumberOfComponents()
+ if n_comp != 3:
+ raise ValueError(f'{name} needs 3 components, has ({n_comp})')
+ self.SetActiveNormals(name)
+
@property
def active_vectors_name(self) -> str | None:
"""Return name of the active vectors.
@@ -1186,7 +1238,7 @@ class DataSetAttributes(_vtk.VTKObjectWrapper):
if name is None:
self.SetActiveVectors(None)
return
- self._raise_field_data_no_scalars_vectors()
+ self._raise_field_data_no_scalars_vectors_normals()
if name not in self:
raise KeyError(f'DataSetAttribute does not contain "{name}"')
# verify that the array has the correct number of components
diff --git a/pyvista/core/filters/poly_data.py b/pyvista/core/filters/poly_data.py
index 7975bf2f..d0154e1b 100644
--- a/pyvista/core/filters/poly_data.py
+++ b/pyvista/core/filters/poly_data.py
@@ -537,18 +537,22 @@ class PolyDataFilters(DataSetFilters):
faces=merged.GetCells(),
deep=False,
)
- # Calling update() will modify the active scalars in this specific
- # case. Store values to restore after updating.
+ # Calling update() will modify the active scalars and normals in this
+ # specific case. Store values to restore after updating.
active_point_scalars_name = merged.point_data.active_scalars_name
active_cell_scalars_name = merged.cell_data.active_scalars_name
+ active_point_normals_name = merged.point_data._active_normals_name
+ active_cell_normals_name = merged.cell_data._active_normals_name
polydata_merged.point_data.update(merged.point_data)
polydata_merged.cell_data.update(merged.cell_data)
polydata_merged.field_data.update(merged.field_data)
- # restore active scalars
+ # restore active scalars and normals
polydata_merged.point_data.active_scalars_name = active_point_scalars_name
polydata_merged.cell_data.active_scalars_name = active_cell_scalars_name
+ polydata_merged.point_data._active_normals_name = active_point_normals_name
+ polydata_merged.cell_data._active_normals_name = active_cell_normals_name
merged = polydata_merged
| `merge` does not set active normals
### Describe the bug, what's wrong, and what you expected.
If I set custom normals and plot them, everything works as expected:
``` python
import pyvista as pv
plane = pv.Plane(i_resolution=1, j_resolution=1)
plane["Normals"][0] # [0, 0, 1]
plane["Normals"] *= -1
plane.plot_normals()
```
<img src="https://github.com/pyvista/pyvista/assets/89109579/924ae596-b667-48ba-ba02-1a998344c55d" width="300">
But if I merge the mesh, the normals are broken:
``` python
merged = pv.merge([plane])
merged.plot_normals()
```
<img src="https://github.com/pyvista/pyvista/assets/89109579/647ed33b-5934-4712-8f21-ce38ccc60f6b" width="300">
The expected result is that the normals should stay the same after merging. So, in the second image, the normals should be pointing down like the first.
The issue seems to be that although `merge` will pass the `"Normals"` array to the output, they are not set as the _active_ normals.
``` python
merged["Normals"][0] # [0, 0, -1]
merged.active_normals # None
```
This means that when `point_normals` is accessed, it will compute new normals since none are active (and overwrite the custom normals).
``` python
merged.point_normals[0] # [0, 0, 1]
```
### Steps to reproduce the bug.
See above
### System Information
```shell
OS : Darwin (macOS 14.4.1)
CPU(s) : 8
Machine : arm64
Architecture : 64bit
Environment : Python
GPU Vendor : Apple
GPU Renderer : Apple M2
GPU Version : 4.1 Metal - 88
MathText Support : True
Python 3.9.13 (v3.9.13:6de2ca5339, May 17 2022, 11:37:23) [Clang 13.0.0
(clang-1300.0.29.30)]
pyvista : 0.44.dev0
vtk : 9.3.0
numpy : 1.26.4
matplotlib : 3.9.0
scooby : 0.10.0
pooch : 1.8.2
pillow : 10.3.0
imageio : 2.34.1
IPython : 8.18.1
colorcet : 3.1.0
cmocean : 4.0.3
ipywidgets : 8.1.3
scipy : 1.13.1
tqdm : 4.66.4
meshio : 5.3.5
pytest_pyvista : 0.1.9
trame : 3.6.2
trame_client : 3.2.0
trame_server : 3.0.2
trame_vtk : 2.8.9
trame_vuetify : 2.6.0
--------------------------------------------------------------------------------
```
### Screenshots
_No response_ | pyvista/pyvista | diff --git a/tests/core/test_dataset_filters.py b/tests/core/test_dataset_filters.py
index 1f130739..cb1095a4 100644
--- a/tests/core/test_dataset_filters.py
+++ b/tests/core/test_dataset_filters.py
@@ -2862,6 +2862,29 @@ def test_merge_general(uniform):
assert isinstance(merged, pv.PolyData)
+def test_merge_active_normals():
+ plane = pv.Plane()
+
+ # Check default normals
+ default_normal = np.array([0, 0, 1])
+ assert np.array_equal(plane["Normals"][0], default_normal)
+ assert np.array_equal(plane.active_normals[0], default_normal)
+ assert np.array_equal(plane.point_normals[0], default_normal)
+
+ # Customize the normals
+ plane["Normals"] *= -1
+ negative_normal = -default_normal
+ assert np.array_equal(plane["Normals"][0], negative_normal)
+ assert np.array_equal(plane.active_normals[0], negative_normal)
+ assert np.array_equal(plane.point_normals[0], negative_normal)
+
+ # Now test merge
+ merged = pv.merge([plane])
+ assert np.array_equal(merged["Normals"][0], negative_normal)
+ assert np.array_equal(merged.active_normals[0], negative_normal)
+ assert np.array_equal(merged.point_normals[0], negative_normal)
+
+
def test_iadd_general(uniform, hexbeam, sphere):
unstructured = hexbeam
sphere_shifted = sphere.copy()
diff --git a/tests/core/test_datasetattributes.py b/tests/core/test_datasetattributes.py
index 2ae02f45..32f1ad95 100644
--- a/tests/core/test_datasetattributes.py
+++ b/tests/core/test_datasetattributes.py
@@ -2,6 +2,7 @@ from __future__ import annotations
import os
import platform
+import re
from string import ascii_letters
from string import digits
from string import whitespace
@@ -171,6 +172,29 @@ def test_active_scalars_name(sphere):
assert sphere.point_data.active_scalars_name is None
+def test_active_normals_name():
+ # Load dataset known to have active normals by default
+ sphere = pv.Sphere()
+ assert sphere.point_data._active_normals_name == 'Normals'
+ sphere.clear_data()
+ assert sphere.point_data._active_normals_name is None
+
+ # Set name of custom normals
+ key = 'data0'
+ normals = np.array([[0, 1, 0]] * sphere.n_points)
+ sphere.point_data[key] = normals
+ assert sphere.point_data._active_normals_name is None
+ sphere.point_data._active_normals_name = key
+ assert sphere.point_data._active_normals_name == 'data0'
+
+ # Test raises
+ sphere.point_data[key] = range(sphere.n_points)
+ with pytest.raises(ValueError, match=re.escape('data0 needs 3 components, has (1)')):
+ sphere.point_data._active_normals_name = key
+ with pytest.raises(KeyError, match='DataSetAttribute does not contain "foobar"'):
+ sphere.point_data._active_normals_name = 'foobar'
+
+
def test_set_scalars(sphere):
scalars = np.array(sphere.n_points)
key = 'scalars'
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 2
} | 0.44 | {
"env_vars": null,
"env_yml_path": [
"environment.yml"
],
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "environment.yml",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-xdist"
],
"pre_install": null,
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | aiohappyeyeballs @ file:///home/conda/feedstock_root/build_artifacts/aiohappyeyeballs_1741775197943/work
aiohttp @ file:///home/conda/feedstock_root/build_artifacts/aiohttp_1742268596946/work
aiosignal @ file:///home/conda/feedstock_root/build_artifacts/aiosignal_1734342155601/work
anyio @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_anyio_1742243108/work
argon2-cffi @ file:///home/conda/feedstock_root/build_artifacts/argon2-cffi_1733311059102/work
argon2-cffi-bindings @ file:///home/conda/feedstock_root/build_artifacts/argon2-cffi-bindings_1725356560642/work
arrow @ file:///home/conda/feedstock_root/build_artifacts/arrow_1733584251875/work
asttokens @ file:///home/conda/feedstock_root/build_artifacts/asttokens_1733250440834/work
async-lru @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_async-lru_1742153708/work
async-timeout @ file:///home/conda/feedstock_root/build_artifacts/async-timeout_1733235340728/work
attrs @ file:///home/conda/feedstock_root/build_artifacts/attrs_1741918516150/work
babel @ file:///home/conda/feedstock_root/build_artifacts/babel_1738490167835/work
beautifulsoup4 @ file:///home/conda/feedstock_root/build_artifacts/beautifulsoup4_1738740337718/work
bleach @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_bleach_1737382993/work
Brotli @ file:///home/conda/feedstock_root/build_artifacts/brotli-split_1725267488082/work
cached-property @ file:///home/conda/feedstock_root/build_artifacts/cached_property_1615209429212/work
certifi @ file:///home/conda/feedstock_root/build_artifacts/certifi_1739515848642/work/certifi
cffi @ file:///home/conda/feedstock_root/build_artifacts/cffi_1725571112467/work
cftime @ file:///home/conda/feedstock_root/build_artifacts/cftime_1725400455427/work
charset-normalizer @ file:///home/conda/feedstock_root/build_artifacts/charset-normalizer_1735929714516/work
click @ file:///home/conda/feedstock_root/build_artifacts/click_1734858813237/work
cmocean @ file:///home/conda/feedstock_root/build_artifacts/cmocean_1734343940570/work
codecov @ file:///home/conda/feedstock_root/build_artifacts/codecov_1734975286850/work
colorama @ file:///home/conda/feedstock_root/build_artifacts/colorama_1733218098505/work
colorcet @ file:///home/conda/feedstock_root/build_artifacts/colorcet_1734007314889/work
colorspacious @ file:///home/conda/feedstock_root/build_artifacts/colorspacious_1734341944815/work
comm @ file:///home/conda/feedstock_root/build_artifacts/comm_1733502965406/work
contourpy @ file:///home/conda/feedstock_root/build_artifacts/contourpy_1727293517607/work
coverage @ file:///home/conda/feedstock_root/build_artifacts/coverage_1743381224823/work
cycler @ file:///home/conda/feedstock_root/build_artifacts/cycler_1733332471406/work
debugpy @ file:///home/conda/feedstock_root/build_artifacts/debugpy_1741148409996/work
decorator @ file:///home/conda/feedstock_root/build_artifacts/decorator_1740384970518/work
defusedxml @ file:///home/conda/feedstock_root/build_artifacts/defusedxml_1615232257335/work
exceptiongroup @ file:///home/conda/feedstock_root/build_artifacts/exceptiongroup_1733208806608/work
execnet @ file:///home/conda/feedstock_root/build_artifacts/execnet_1733230988954/work
executing @ file:///home/conda/feedstock_root/build_artifacts/executing_1733569351617/work
fastjsonschema @ file:///home/conda/feedstock_root/build_artifacts/python-fastjsonschema_1733235979760/work/dist
fonttools @ file:///home/conda/feedstock_root/build_artifacts/fonttools_1738940303262/work
fqdn @ file:///home/conda/feedstock_root/build_artifacts/fqdn_1733327382592/work/dist
frozenlist @ file:///home/conda/feedstock_root/build_artifacts/frozenlist_1737645236190/work
h11 @ file:///home/conda/feedstock_root/build_artifacts/h11_1733327467879/work
h2 @ file:///home/conda/feedstock_root/build_artifacts/h2_1738578511449/work
h5py @ file:///home/conda/feedstock_root/build_artifacts/h5py_1739952287730/work
hpack @ file:///home/conda/feedstock_root/build_artifacts/hpack_1737618293087/work
httpcore @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_httpcore_1731707562/work
httpx @ file:///home/conda/feedstock_root/build_artifacts/httpx_1733663348460/work
hyperframe @ file:///home/conda/feedstock_root/build_artifacts/hyperframe_1737618333194/work
hypothesis @ file:///home/conda/feedstock_root/build_artifacts/hypothesis_1742900877539/work
idna @ file:///home/conda/feedstock_root/build_artifacts/idna_1733211830134/work
imageio @ file:///home/conda/feedstock_root/build_artifacts/imageio_1738273805233/work
imageio-ffmpeg @ file:///home/conda/feedstock_root/build_artifacts/imageio-ffmpeg_1737102630951/work
importlib_metadata @ file:///home/conda/feedstock_root/build_artifacts/importlib-metadata_1737420181517/work
importlib_resources @ file:///home/conda/feedstock_root/build_artifacts/importlib_resources_1736252299705/work
iniconfig @ file:///home/conda/feedstock_root/build_artifacts/iniconfig_1733223141826/work
ipykernel @ file:///home/conda/feedstock_root/build_artifacts/ipykernel_1719845459717/work
ipython @ file:///home/conda/feedstock_root/build_artifacts/ipython_1701831663892/work
isoduration @ file:///home/conda/feedstock_root/build_artifacts/isoduration_1733493628631/work/dist
jedi @ file:///home/conda/feedstock_root/build_artifacts/jedi_1733300866624/work
Jinja2 @ file:///home/conda/feedstock_root/build_artifacts/jinja2_1741263328855/work
json5 @ file:///home/conda/feedstock_root/build_artifacts/json5_1733272076743/work
jsonpointer @ file:///home/conda/feedstock_root/build_artifacts/jsonpointer_1725302957584/work
jsonschema @ file:///home/conda/feedstock_root/build_artifacts/jsonschema_1733472696581/work
jsonschema-specifications @ file:///tmp/tmpk0f344m9/src
jupyter-events @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_jupyter_events_1738765986/work
jupyter-lsp @ file:///home/conda/feedstock_root/build_artifacts/jupyter-lsp-meta_1733492907176/work/jupyter-lsp
jupyter_client @ file:///home/conda/feedstock_root/build_artifacts/jupyter_client_1733440914442/work
jupyter_core @ file:///home/conda/feedstock_root/build_artifacts/jupyter_core_1727163409502/work
jupyter_server @ file:///home/conda/feedstock_root/build_artifacts/jupyter_server_1734702637701/work
jupyter_server_terminals @ file:///home/conda/feedstock_root/build_artifacts/jupyter_server_terminals_1733427956852/work
jupyterlab @ file:///home/conda/feedstock_root/build_artifacts/jupyterlab_1741964057182/work
jupyterlab_pygments @ file:///home/conda/feedstock_root/build_artifacts/jupyterlab_pygments_1733328101776/work
jupyterlab_server @ file:///home/conda/feedstock_root/build_artifacts/jupyterlab_server_1733599573484/work
kiwisolver @ file:///home/conda/feedstock_root/build_artifacts/kiwisolver_1725459266648/work
loguru @ file:///home/conda/feedstock_root/build_artifacts/loguru_1725349754278/work
markdown-it-py @ file:///home/conda/feedstock_root/build_artifacts/markdown-it-py_1733250460757/work
MarkupSafe @ file:///home/conda/feedstock_root/build_artifacts/markupsafe_1733219680183/work
matplotlib==3.9.4
matplotlib-inline @ file:///home/conda/feedstock_root/build_artifacts/matplotlib-inline_1733416936468/work
mdurl @ file:///home/conda/feedstock_root/build_artifacts/mdurl_1733255585584/work
meshio @ file:///home/conda/feedstock_root/build_artifacts/meshio_1706720595231/work
mistune @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_mistune_1742402716/work
more-itertools @ file:///home/conda/feedstock_root/build_artifacts/more-itertools_1736883817510/work
msgpack @ file:///home/conda/feedstock_root/build_artifacts/msgpack-python_1725975012026/work
multidict @ file:///home/conda/feedstock_root/build_artifacts/multidict_1742308123521/work
munkres==1.1.4
nbclient @ file:///home/conda/feedstock_root/build_artifacts/nbclient_1734628800805/work
nbconvert @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_nbconvert-core_1738067871/work
nbformat @ file:///home/conda/feedstock_root/build_artifacts/nbformat_1733402752141/work
nest_asyncio @ file:///home/conda/feedstock_root/build_artifacts/nest-asyncio_1733325553580/work
netCDF4 @ file:///home/conda/feedstock_root/build_artifacts/netcdf4_1733253338730/work
notebook_shim @ file:///home/conda/feedstock_root/build_artifacts/notebook-shim_1733408315203/work
numpy @ file:///home/conda/feedstock_root/build_artifacts/numpy_1707225342954/work/dist/numpy-1.26.4-cp39-cp39-linux_x86_64.whl#sha256=c799942b5898f6e6c60264d1663a6469a475290e758c654aeeb78e2596463abd
overrides @ file:///home/conda/feedstock_root/build_artifacts/overrides_1734587627321/work
packaging @ file:///home/conda/feedstock_root/build_artifacts/packaging_1733203243479/work
pandocfilters @ file:///home/conda/feedstock_root/build_artifacts/pandocfilters_1631603243851/work
parso @ file:///home/conda/feedstock_root/build_artifacts/parso_1733271261340/work
pexpect @ file:///home/conda/feedstock_root/build_artifacts/pexpect_1733301927746/work
pickleshare @ file:///home/conda/feedstock_root/build_artifacts/pickleshare_1733327343728/work
pillow @ file:///home/conda/feedstock_root/build_artifacts/pillow_1735929703139/work
pkgutil_resolve_name @ file:///home/conda/feedstock_root/build_artifacts/pkgutil-resolve-name_1733344503739/work
platformdirs @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_platformdirs_1742485085/work
pluggy @ file:///home/conda/feedstock_root/build_artifacts/pluggy_1733222765875/work
pooch @ file:///home/conda/feedstock_root/build_artifacts/pooch_1733421311631/work
prometheus_client @ file:///home/conda/feedstock_root/build_artifacts/prometheus_client_1733327310477/work
prompt_toolkit @ file:///home/conda/feedstock_root/build_artifacts/prompt-toolkit_1737453357274/work
propcache @ file:///home/conda/feedstock_root/build_artifacts/propcache_1737635528546/work
psutil @ file:///home/conda/feedstock_root/build_artifacts/psutil_1740663125313/work
ptyprocess @ file:///home/conda/feedstock_root/build_artifacts/ptyprocess_1733302279685/work/dist/ptyprocess-0.7.0-py2.py3-none-any.whl#sha256=92c32ff62b5fd8cf325bec5ab90d7be3d2a8ca8c8a3813ff487a8d2002630d1f
pure_eval @ file:///home/conda/feedstock_root/build_artifacts/pure_eval_1733569405015/work
pycparser @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_pycparser_1733195786/work
pyembree @ file:///home/conda/feedstock_root/build_artifacts/pyembree_1718702851992/work
Pygments @ file:///home/conda/feedstock_root/build_artifacts/pygments_1736243443484/work
pyparsing @ file:///home/conda/feedstock_root/build_artifacts/pyparsing_1743089729650/work
PySide6==6.8.3
PySocks @ file:///home/conda/feedstock_root/build_artifacts/pysocks_1733217236728/work
pytest @ file:///home/conda/feedstock_root/build_artifacts/pytest_1740946542080/work
pytest-cov @ file:///home/conda/feedstock_root/build_artifacts/pytest-cov_1733223023082/work
pytest-memprof==0.2.0
pytest-xdist @ file:///home/conda/feedstock_root/build_artifacts/pytest-xdist_1733240780199/work
python-dateutil @ file:///home/conda/feedstock_root/build_artifacts/python-dateutil_1733215673016/work
python-json-logger @ file:///home/conda/feedstock_root/build_artifacts/python-json-logger_1677079630776/work
pytz @ file:///home/conda/feedstock_root/build_artifacts/pytz_1742920838005/work
-e git+https://github.com/pyvista/pyvista.git@3a6efbbc2bd6c0b3b882e1af153a86f9ed41855a#egg=pyvista
PyYAML @ file:///home/conda/feedstock_root/build_artifacts/pyyaml_1737454647378/work
pyzmq @ file:///home/conda/feedstock_root/build_artifacts/pyzmq_1741805177758/work
referencing @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_referencing_1737836872/work
requests @ file:///home/conda/feedstock_root/build_artifacts/requests_1733217035951/work
rfc3339_validator @ file:///home/conda/feedstock_root/build_artifacts/rfc3339-validator_1733599910982/work
rfc3986-validator @ file:///home/conda/feedstock_root/build_artifacts/rfc3986-validator_1598024191506/work
rich @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_rich_1743371105/work/dist
rpds-py @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_rpds-py_1743037693/work
rtree @ file:///home/conda/feedstock_root/build_artifacts/rtree_1741378561624/work
scooby @ file:///home/conda/feedstock_root/build_artifacts/scooby_1734299019922/work
Send2Trash @ file:///home/conda/feedstock_root/build_artifacts/send2trash_1733322040660/work
shiboken6==6.8.3
six @ file:///home/conda/feedstock_root/build_artifacts/six_1733380938961/work
sniffio @ file:///home/conda/feedstock_root/build_artifacts/sniffio_1733244044561/work
sortedcontainers @ file:///home/conda/feedstock_root/build_artifacts/sortedcontainers_1738440353519/work
soupsieve @ file:///home/conda/feedstock_root/build_artifacts/soupsieve_1693929250441/work
stack_data @ file:///home/conda/feedstock_root/build_artifacts/stack_data_1733569443808/work
terminado @ file:///home/conda/feedstock_root/build_artifacts/terminado_1710262609923/work
tinycss2 @ file:///home/conda/feedstock_root/build_artifacts/tinycss2_1729802851396/work
toml @ file:///home/conda/feedstock_root/build_artifacts/toml_1734091811753/work
tomli @ file:///home/conda/feedstock_root/build_artifacts/tomli_1733256695513/work
tornado @ file:///home/conda/feedstock_root/build_artifacts/tornado_1732615921868/work
tqdm @ file:///home/conda/feedstock_root/build_artifacts/tqdm_1735661334605/work
traitlets @ file:///home/conda/feedstock_root/build_artifacts/traitlets_1733367359838/work
trame @ file:///home/conda/feedstock_root/build_artifacts/trame_1741413642518/work
trame-client @ file:///home/conda/feedstock_root/build_artifacts/trame-client_1742874331306/work
trame-server @ file:///home/conda/feedstock_root/build_artifacts/trame-server_1741638011360/work
trame-vtk @ file:///home/conda/feedstock_root/build_artifacts/trame-vtk_1739145199105/work
trame-vuetify @ file:///home/conda/feedstock_root/build_artifacts/trame-vuetify_1743263303319/work
trimesh @ file:///home/conda/feedstock_root/build_artifacts/trimesh_1743289679380/work
types-python-dateutil @ file:///home/conda/feedstock_root/build_artifacts/types-python-dateutil_1733612335562/work
typing_extensions @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_typing_extensions_1743201626/work
typing_utils @ file:///home/conda/feedstock_root/build_artifacts/typing_utils_1733331286120/work
unicodedata2 @ file:///home/conda/feedstock_root/build_artifacts/unicodedata2_1736692503055/work
uri-template @ file:///home/conda/feedstock_root/build_artifacts/uri-template_1733323593477/work/dist
urllib3 @ file:///home/conda/feedstock_root/build_artifacts/urllib3_1734859416348/work
vtk==9.3.1
wcwidth @ file:///home/conda/feedstock_root/build_artifacts/wcwidth_1733231326287/work
webcolors @ file:///home/conda/feedstock_root/build_artifacts/webcolors_1733359735138/work
webencodings @ file:///home/conda/feedstock_root/build_artifacts/webencodings_1733236011802/work
websocket-client @ file:///home/conda/feedstock_root/build_artifacts/websocket-client_1733157342724/work
wslink @ file:///home/conda/feedstock_root/build_artifacts/wslink_1742768409749/work
yarl @ file:///home/conda/feedstock_root/build_artifacts/yarl_1737575777699/work
zipp @ file:///home/conda/feedstock_root/build_artifacts/zipp_1732827521216/work
zstandard==0.23.0
| name: pyvista
channels:
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=conda_forge
- _openmp_mutex=4.5=2_gnu
- aiohappyeyeballs=2.6.1=pyhd8ed1ab_0
- aiohttp=3.11.14=py39h9399b63_0
- aiosignal=1.3.2=pyhd8ed1ab_0
- alsa-lib=1.2.13=hb9d3cd8_0
- anyio=4.9.0=pyh29332c3_0
- aom=3.9.1=hac33072_0
- argon2-cffi=23.1.0=pyhd8ed1ab_1
- argon2-cffi-bindings=21.2.0=py39h8cd3c5a_5
- arrow=1.3.0=pyhd8ed1ab_1
- asttokens=3.0.0=pyhd8ed1ab_1
- async-lru=2.0.5=pyh29332c3_0
- async-timeout=5.0.1=pyhd8ed1ab_1
- attr=2.5.1=h166bdaf_1
- attrs=25.3.0=pyh71513ae_0
- babel=2.17.0=pyhd8ed1ab_0
- beautifulsoup4=4.13.3=pyha770c72_0
- bleach=6.2.0=pyh29332c3_4
- bleach-with-css=6.2.0=h82add2a_4
- blosc=1.21.6=he440d0b_1
- brotli=1.1.0=hb9d3cd8_2
- brotli-bin=1.1.0=hb9d3cd8_2
- brotli-python=1.1.0=py39hf88036b_2
- bzip2=1.0.8=h4bc722e_7
- c-ares=1.34.4=hb9d3cd8_0
- ca-certificates=2025.1.31=hbcca054_0
- cached-property=1.5.2=hd8ed1ab_1
- cached_property=1.5.2=pyha770c72_1
- cairo=1.18.4=h3394656_0
- certifi=2025.1.31=pyhd8ed1ab_0
- cffi=1.17.1=py39h15c3d72_0
- cftime=1.6.4=py39hf3d9206_1
- charset-normalizer=3.4.1=pyhd8ed1ab_0
- click=8.1.8=pyh707e725_0
- cmocean=4.0.3=pyhd8ed1ab_1
- codecov=2.1.13=pyhd8ed1ab_1
- colorama=0.4.6=pyhd8ed1ab_1
- colorcet=3.1.0=pyhd8ed1ab_1
- colorspacious=1.1.2=pyhecae5ae_1
- comm=0.2.2=pyhd8ed1ab_1
- contourpy=1.3.0=py39h74842e3_2
- coverage=7.8.0=py39h9399b63_0
- cycler=0.12.1=pyhd8ed1ab_1
- cyrus-sasl=2.1.27=h54b06d7_7
- dav1d=1.2.1=hd590300_0
- dbus=1.13.6=h5008d03_3
- debugpy=1.8.13=py39hf88036b_0
- decorator=5.2.1=pyhd8ed1ab_0
- defusedxml=0.7.1=pyhd8ed1ab_0
- double-conversion=3.3.1=h5888daf_0
- embree=2.17.7=ha770c72_3
- exceptiongroup=1.2.2=pyhd8ed1ab_1
- execnet=2.1.1=pyhd8ed1ab_1
- executing=2.1.0=pyhd8ed1ab_1
- expat=2.6.4=h5888daf_0
- ffmpeg=7.1.1=gpl_h0b79d52_704
- font-ttf-dejavu-sans-mono=2.37=hab24e00_0
- font-ttf-inconsolata=3.000=h77eed37_0
- font-ttf-source-code-pro=2.038=h77eed37_0
- font-ttf-ubuntu=0.83=h77eed37_3
- fontconfig=2.15.0=h7e30c49_1
- fonts-conda-ecosystem=1=0
- fonts-conda-forge=1=0
- fonttools=4.56.0=py39h9399b63_0
- fqdn=1.5.1=pyhd8ed1ab_1
- freetype=2.13.3=h48d6fc4_0
- fribidi=1.0.10=h36c2ea0_0
- frozenlist=1.5.0=py39h9399b63_1
- gdk-pixbuf=2.42.12=hb9ae30d_0
- gettext=0.23.1=h5888daf_0
- gettext-tools=0.23.1=h5888daf_0
- gl2ps=1.4.2=hae5d5c5_1
- glew=2.1.0=h9c3ff4c_2
- gmp=6.3.0=hac33072_2
- graphite2=1.3.13=h59595ed_1003
- h11=0.14.0=pyhd8ed1ab_1
- h2=4.2.0=pyhd8ed1ab_0
- h5py=3.13.0=nompi_py39h30a5a8d_100
- harfbuzz=11.0.0=h76408a6_0
- hdf4=4.2.15=h2a13503_7
- hdf5=1.14.3=nompi_h2d575fe_109
- hpack=4.1.0=pyhd8ed1ab_0
- httpcore=1.0.7=pyh29332c3_1
- httpx=0.28.1=pyhd8ed1ab_0
- hyperframe=6.1.0=pyhd8ed1ab_0
- hypothesis=6.130.4=pyha770c72_0
- icu=75.1=he02047a_0
- idna=3.10=pyhd8ed1ab_1
- imageio=2.37.0=pyhfb79c49_0
- imageio-ffmpeg=0.6.0=pyhd8ed1ab_0
- importlib-metadata=8.6.1=pyha770c72_0
- importlib-resources=6.5.2=pyhd8ed1ab_0
- importlib_metadata=8.6.1=hd8ed1ab_0
- importlib_resources=6.5.2=pyhd8ed1ab_0
- iniconfig=2.0.0=pyhd8ed1ab_1
- ipykernel=6.29.5=pyh3099207_0
- ipython=8.18.1=pyh707e725_3
- isoduration=20.11.0=pyhd8ed1ab_1
- jack=1.9.22=h7c63dc7_2
- jedi=0.19.2=pyhd8ed1ab_1
- jinja2=3.1.6=pyhd8ed1ab_0
- json5=0.10.0=pyhd8ed1ab_1
- jsoncpp=1.9.6=hf42df4d_1
- jsonpointer=3.0.0=py39hf3d152e_1
- jsonschema=4.23.0=pyhd8ed1ab_1
- jsonschema-specifications=2024.10.1=pyhd8ed1ab_1
- jsonschema-with-format-nongpl=4.23.0=hd8ed1ab_1
- jupyter-lsp=2.2.5=pyhd8ed1ab_1
- jupyter_client=8.6.3=pyhd8ed1ab_1
- jupyter_core=5.7.2=pyh31011fe_1
- jupyter_events=0.12.0=pyh29332c3_0
- jupyter_server=2.15.0=pyhd8ed1ab_0
- jupyter_server_terminals=0.5.3=pyhd8ed1ab_1
- jupyterlab=4.3.6=pyhd8ed1ab_0
- jupyterlab_pygments=0.3.0=pyhd8ed1ab_2
- jupyterlab_server=2.27.3=pyhd8ed1ab_1
- keyutils=1.6.1=h166bdaf_0
- kiwisolver=1.4.7=py39h74842e3_0
- krb5=1.21.3=h659f571_0
- lame=3.100=h166bdaf_1003
- lcms2=2.17=h717163a_0
- ld_impl_linux-64=2.43=h712a8e2_4
- lerc=4.0.0=h27087fc_0
- level-zero=1.21.6=h84d6215_0
- libabseil=20250127.1=cxx17_hbbce691_0
- libaec=1.1.3=h59595ed_0
- libasprintf=0.23.1=h8e693c7_0
- libasprintf-devel=0.23.1=h8e693c7_0
- libass=0.17.3=h52826cd_2
- libblas=3.9.0=31_h59b9bed_openblas
- libbrotlicommon=1.1.0=hb9d3cd8_2
- libbrotlidec=1.1.0=hb9d3cd8_2
- libbrotlienc=1.1.0=hb9d3cd8_2
- libcap=2.75=h39aace5_0
- libcblas=3.9.0=31_he106b2a_openblas
- libclang-cpp20.1=20.1.1=default_hb5137d0_0
- libclang13=20.1.1=default_h9c6a7e4_0
- libcups=2.3.3=h4637d8d_4
- libcurl=8.12.1=h332b0f4_0
- libdb=6.2.32=h9c3ff4c_0
- libdeflate=1.23=h4ddbbb0_0
- libdrm=2.4.124=hb9d3cd8_0
- libedit=3.1.20250104=pl5321h7949ede_0
- libegl=1.7.0=ha4b6fd6_2
- libev=4.33=hd590300_2
- libexpat=2.6.4=h5888daf_0
- libffi=3.4.6=h2dba641_0
- libflac=1.4.3=h59595ed_0
- libgcc=14.2.0=h767d61c_2
- libgcc-ng=14.2.0=h69a702a_2
- libgcrypt-lib=1.11.0=hb9d3cd8_2
- libgettextpo=0.23.1=h5888daf_0
- libgettextpo-devel=0.23.1=h5888daf_0
- libgfortran=14.2.0=h69a702a_2
- libgfortran5=14.2.0=hf1ad2bd_2
- libgl=1.7.0=ha4b6fd6_2
- libglib=2.84.0=h2ff4ddf_0
- libglu=9.0.3=h03adeef_0
- libglvnd=1.7.0=ha4b6fd6_2
- libglx=1.7.0=ha4b6fd6_2
- libgomp=14.2.0=h767d61c_2
- libgpg-error=1.51=hbd13f7d_1
- libhwloc=2.11.2=default_h0d58e46_1001
- libiconv=1.18=h4ce23a2_1
- libjpeg-turbo=3.0.0=hd590300_1
- liblapack=3.9.0=31_h7ac8fdf_openblas
- libllvm20=20.1.1=ha7bfdaf_0
- liblzma=5.6.4=hb9d3cd8_0
- libnetcdf=4.9.2=nompi_h00e09a9_116
- libnghttp2=1.64.0=h161d5f1_0
- libnsl=2.0.1=hd590300_0
- libntlm=1.8=hb9d3cd8_0
- libogg=1.3.5=h4ab18f5_0
- libopenblas=0.3.29=pthreads_h94d23a6_0
- libopengl=1.7.0=ha4b6fd6_2
- libopenvino=2025.0.0=hdc3f47d_3
- libopenvino-auto-batch-plugin=2025.0.0=h4d9b6c2_3
- libopenvino-auto-plugin=2025.0.0=h4d9b6c2_3
- libopenvino-hetero-plugin=2025.0.0=h981d57b_3
- libopenvino-intel-cpu-plugin=2025.0.0=hdc3f47d_3
- libopenvino-intel-gpu-plugin=2025.0.0=hdc3f47d_3
- libopenvino-intel-npu-plugin=2025.0.0=hdc3f47d_3
- libopenvino-ir-frontend=2025.0.0=h981d57b_3
- libopenvino-onnx-frontend=2025.0.0=h0e684df_3
- libopenvino-paddle-frontend=2025.0.0=h0e684df_3
- libopenvino-pytorch-frontend=2025.0.0=h5888daf_3
- libopenvino-tensorflow-frontend=2025.0.0=h684f15b_3
- libopenvino-tensorflow-lite-frontend=2025.0.0=h5888daf_3
- libopus=1.3.1=h7f98852_1
- libpciaccess=0.18=hd590300_0
- libpng=1.6.47=h943b412_0
- libpq=17.4=h27ae623_0
- libprotobuf=5.29.3=h501fc15_0
- librsvg=2.58.4=he92a37e_3
- libsndfile=1.2.2=hc60ed4a_1
- libsodium=1.0.20=h4ab18f5_0
- libspatialindex=2.1.0=he57a185_0
- libsqlite=3.49.1=hee588c1_2
- libssh2=1.11.1=hf672d98_0
- libstdcxx=14.2.0=h8f9b012_2
- libstdcxx-ng=14.2.0=h4852527_2
- libsystemd0=257.4=h4e0b6ca_1
- libtheora=1.1.1=h4ab18f5_1006
- libtiff=4.7.0=hd9ff511_3
- libudev1=257.4=hbe16f8c_1
- libunwind=1.6.2=h9c3ff4c_0
- liburing=2.9=h84d6215_0
- libusb=1.0.28=hb9d3cd8_0
- libuuid=2.38.1=h0b41bf4_0
- libva=2.22.0=h4f16b4b_2
- libvorbis=1.3.7=h9c3ff4c_0
- libvpx=1.14.1=hac33072_0
- libwebp-base=1.5.0=h851e524_0
- libxcb=1.17.0=h8a09558_0
- libxcrypt=4.4.36=hd590300_1
- libxkbcommon=1.8.1=hc4a0caf_0
- libxml2=2.13.7=h8d12d68_0
- libxslt=1.1.39=h76b75d6_0
- libzip=1.11.2=h6991a6a_0
- libzlib=1.3.1=hb9d3cd8_2
- loguru=0.7.2=py39hf3d152e_2
- lz4-c=1.10.0=h5888daf_1
- markdown-it-py=3.0.0=pyhd8ed1ab_1
- markupsafe=3.0.2=py39h9399b63_1
- matplotlib=3.9.4=py39hf3d152e_0
- matplotlib-base=3.9.4=py39h16632d1_0
- matplotlib-inline=0.1.7=pyhd8ed1ab_1
- mdurl=0.1.2=pyhd8ed1ab_1
- meshio=5.3.5=pyhd8ed1ab_0
- mistune=3.1.3=pyh29332c3_0
- more-itertools=10.6.0=pyhd8ed1ab_0
- mpg123=1.32.9=hc50e24c_0
- msgpack-python=1.1.0=py39h74842e3_0
- multidict=6.2.0=py39h9399b63_0
- munkres=1.1.4=pyh9f0ad1d_0
- mysql-common=9.0.1=h266115a_5
- mysql-libs=9.0.1=he0572af_5
- nbclient=0.10.2=pyhd8ed1ab_0
- nbconvert-core=7.16.6=pyh29332c3_0
- nbformat=5.10.4=pyhd8ed1ab_1
- ncurses=6.5=h2d0b736_3
- nest-asyncio=1.6.0=pyhd8ed1ab_1
- netcdf4=1.7.2=nompi_py39h9d02bfe_101
- nlohmann_json=3.11.3=he02047a_1
- notebook-shim=0.2.4=pyhd8ed1ab_1
- numpy=1.26.4=py39h474f0d3_0
- ocl-icd=2.3.2=hb9d3cd8_2
- opencl-headers=2024.10.24=h5888daf_0
- openh264=2.6.0=hc22cd8d_0
- openjpeg=2.5.3=h5fbd93e_0
- openldap=2.6.9=he970967_0
- openssl=3.4.1=h7b32b05_0
- overrides=7.7.0=pyhd8ed1ab_1
- packaging=24.2=pyhd8ed1ab_2
- pandocfilters=1.5.0=pyhd8ed1ab_0
- pango=1.56.3=h9ac818e_1
- parso=0.8.4=pyhd8ed1ab_1
- pcre2=10.44=hba22ea6_2
- pexpect=4.9.0=pyhd8ed1ab_1
- pickleshare=0.7.5=pyhd8ed1ab_1004
- pillow=11.1.0=py39h15c0740_0
- pip=25.0.1=pyh8b19718_0
- pixman=0.44.2=h29eaf8c_0
- pkgutil-resolve-name=1.3.10=pyhd8ed1ab_2
- platformdirs=4.3.7=pyh29332c3_0
- pluggy=1.5.0=pyhd8ed1ab_1
- pooch=1.8.2=pyhd8ed1ab_1
- proj=9.5.1=h0054346_0
- prometheus_client=0.21.1=pyhd8ed1ab_0
- prompt-toolkit=3.0.50=pyha770c72_0
- propcache=0.2.1=py39h9399b63_1
- psutil=7.0.0=py39h8cd3c5a_0
- pthread-stubs=0.4=hb9d3cd8_1002
- ptyprocess=0.7.0=pyhd8ed1ab_1
- pugixml=1.15=h3f63f65_0
- pulseaudio-client=17.0=hac146a9_1
- pure_eval=0.2.3=pyhd8ed1ab_1
- pycparser=2.22=pyh29332c3_1
- pyembree=0.1.6=py39hddac248_4
- pygments=2.19.1=pyhd8ed1ab_0
- pyparsing=3.2.3=pyhd8ed1ab_1
- pyside6=6.8.3=py39h0383914_0
- pysocks=1.7.1=pyha55dd90_7
- pytest=8.3.5=pyhd8ed1ab_0
- pytest-cov=6.0.0=pyhd8ed1ab_1
- pytest-xdist=3.6.1=pyhd8ed1ab_1
- python=3.9.21=h9c0c6dc_1_cpython
- python-dateutil=2.9.0.post0=pyhff2d567_1
- python-fastjsonschema=2.21.1=pyhd8ed1ab_0
- python-json-logger=2.0.7=pyhd8ed1ab_0
- python_abi=3.9=5_cp39
- pytz=2025.2=pyhd8ed1ab_0
- pyyaml=6.0.2=py39h9399b63_2
- pyzmq=26.3.0=py39h4e4fb57_0
- qhull=2020.2=h434a139_5
- qt6-main=6.8.3=h6441bc3_1
- readline=8.2=h8c095d6_2
- referencing=0.36.2=pyh29332c3_0
- requests=2.32.3=pyhd8ed1ab_1
- rfc3339-validator=0.1.4=pyhd8ed1ab_1
- rfc3986-validator=0.1.1=pyh9f0ad1d_0
- rich=14.0.0=pyh29332c3_0
- rpds-py=0.24.0=py39h3506688_0
- rtree=1.4.0=pyh11ca60a_1
- scooby=0.10.0=pyhd8ed1ab_1
- sdl2=2.32.50=h9b8e6db_1
- sdl3=3.2.8=h3083f51_0
- send2trash=1.8.3=pyh0d859eb_1
- setuptools=75.8.2=pyhff2d567_0
- six=1.17.0=pyhd8ed1ab_0
- snappy=1.2.1=h8bd8927_1
- sniffio=1.3.1=pyhd8ed1ab_1
- sortedcontainers=2.4.0=pyhd8ed1ab_1
- soupsieve=2.5=pyhd8ed1ab_1
- sqlite=3.49.1=h9eae976_2
- stack_data=0.6.3=pyhd8ed1ab_1
- svt-av1=3.0.2=h5888daf_0
- tbb=2022.0.0=hceb3a55_0
- terminado=0.18.1=pyh0d859eb_0
- tinycss2=1.4.0=pyhd8ed1ab_0
- tk=8.6.13=noxft_h4845f30_101
- toml=0.10.2=pyhd8ed1ab_1
- tomli=2.2.1=pyhd8ed1ab_1
- tornado=6.4.2=py39h8cd3c5a_0
- tqdm=4.67.1=pyhd8ed1ab_1
- traitlets=5.14.3=pyhd8ed1ab_1
- trame=3.8.1=pyhd8ed1ab_0
- trame-client=3.6.1=pyhd8ed1ab_0
- trame-server=3.4.0=pyhd8ed1ab_0
- trame-vtk=2.8.15=pyhb419c8b_0
- trame-vuetify=2.9.0=pyhd8ed1ab_0
- trimesh=4.6.6=pyh7b2049a_0
- types-python-dateutil=2.9.0.20241206=pyhd8ed1ab_0
- typing-extensions=4.13.0=h9fa5a19_1
- typing_extensions=4.13.0=pyh29332c3_1
- typing_utils=0.1.0=pyhd8ed1ab_1
- tzdata=2025b=h78e105d_0
- unicodedata2=16.0.0=py39h8cd3c5a_0
- uri-template=1.3.0=pyhd8ed1ab_1
- urllib3=2.3.0=pyhd8ed1ab_0
- utfcpp=4.0.6=h005c6e1_0
- vtk=9.3.1=qt_py39h71fb23e_216
- vtk-base=9.3.1=qt_py39habd23de_216
- vtk-io-ffmpeg=9.3.1=qt_py39h71fb23e_216
- wayland=1.23.1=h3e06ad9_0
- wayland-protocols=1.42=hd8ed1ab_0
- wcwidth=0.2.13=pyhd8ed1ab_1
- webcolors=24.11.1=pyhd8ed1ab_0
- webencodings=0.5.1=pyhd8ed1ab_3
- websocket-client=1.8.0=pyhd8ed1ab_1
- wheel=0.45.1=pyhd8ed1ab_1
- wslink=2.3.3=pyhd8ed1ab_0
- x264=1!164.3095=h166bdaf_2
- x265=3.5=h924138e_3
- xcb-util=0.4.1=hb711507_2
- xcb-util-cursor=0.1.5=hb9d3cd8_0
- xcb-util-image=0.4.0=hb711507_2
- xcb-util-keysyms=0.4.1=hb711507_0
- xcb-util-renderutil=0.3.10=hb711507_0
- xcb-util-wm=0.4.2=hb711507_0
- xkeyboard-config=2.43=hb9d3cd8_0
- xorg-libice=1.1.2=hb9d3cd8_0
- xorg-libsm=1.2.6=he73a12e_0
- xorg-libx11=1.8.12=h4f16b4b_0
- xorg-libxau=1.0.12=hb9d3cd8_0
- xorg-libxcomposite=0.4.6=hb9d3cd8_2
- xorg-libxcursor=1.2.3=hb9d3cd8_0
- xorg-libxdamage=1.1.6=hb9d3cd8_0
- xorg-libxdmcp=1.1.5=hb9d3cd8_0
- xorg-libxext=1.3.6=hb9d3cd8_0
- xorg-libxfixes=6.0.1=hb9d3cd8_0
- xorg-libxi=1.8.2=hb9d3cd8_0
- xorg-libxrandr=1.5.4=hb9d3cd8_0
- xorg-libxrender=0.9.12=hb9d3cd8_0
- xorg-libxscrnsaver=1.2.4=hb9d3cd8_0
- xorg-libxt=1.3.1=hb9d3cd8_0
- xorg-libxtst=1.2.5=hb9d3cd8_3
- xorg-libxxf86vm=1.1.6=hb9d3cd8_0
- yaml=0.2.5=h7f98852_2
- yarl=1.18.3=py39h9399b63_1
- zeromq=4.3.5=h3b0a872_7
- zipp=3.21.0=pyhd8ed1ab_1
- zlib=1.3.1=hb9d3cd8_2
- zstandard=0.23.0=py39h8cd3c5a_1
- zstd=1.5.7=hb8e6e7a_2
- pip:
- pytest-memprof==0.2.0
- pyvista==0.45.dev0
prefix: /opt/conda/envs/pyvista
| [
"tests/core/test_dataset_filters.py::test_merge_active_normals",
"tests/core/test_datasetattributes.py::test_active_normals_name"
] | [] | [
"tests/core/test_dataset_filters.py::test_datasetfilters_init",
"tests/core/test_dataset_filters.py::test_clip_filter",
"tests/core/test_dataset_filters.py::test_clip_by_scalars_filter[False-False]",
"tests/core/test_dataset_filters.py::test_clip_by_scalars_filter[False-True]",
"tests/core/test_dataset_filters.py::test_clip_by_scalars_filter[True-False]",
"tests/core/test_dataset_filters.py::test_clip_by_scalars_filter[True-True]",
"tests/core/test_dataset_filters.py::test_clip_filter_no_active",
"tests/core/test_dataset_filters.py::test_clip_filter_scalar_multiple",
"tests/core/test_dataset_filters.py::test_clip_filter_composite",
"tests/core/test_dataset_filters.py::test_clip_box",
"tests/core/test_dataset_filters.py::test_clip_box_composite",
"tests/core/test_dataset_filters.py::test_clip_surface",
"tests/core/test_dataset_filters.py::test_clip_closed_surface",
"tests/core/test_dataset_filters.py::test_implicit_distance",
"tests/core/test_dataset_filters.py::test_slice_filter",
"tests/core/test_dataset_filters.py::test_slice_filter_composite",
"tests/core/test_dataset_filters.py::test_slice_orthogonal_filter",
"tests/core/test_dataset_filters.py::test_slice_orthogonal_filter_composite",
"tests/core/test_dataset_filters.py::test_slice_along_axis",
"tests/core/test_dataset_filters.py::test_slice_along_axis_composite",
"tests/core/test_dataset_filters.py::test_threshold",
"tests/core/test_dataset_filters.py::test_threshold_all_scalars",
"tests/core/test_dataset_filters.py::test_threshold_multicomponent",
"tests/core/test_dataset_filters.py::test_threshold_percent",
"tests/core/test_dataset_filters.py::test_threshold_paraview_consistency",
"tests/core/test_dataset_filters.py::test_outline",
"tests/core/test_dataset_filters.py::test_outline_composite",
"tests/core/test_dataset_filters.py::test_outline_corners",
"tests/core/test_dataset_filters.py::test_outline_corners_composite",
"tests/core/test_dataset_filters.py::test_extract_geometry",
"tests/core/test_dataset_filters.py::test_extract_geometry_extent",
"tests/core/test_dataset_filters.py::test_extract_all_edges",
"tests/core/test_dataset_filters.py::test_extract_all_edges_no_data",
"tests/core/test_dataset_filters.py::test_wireframe_composite",
"tests/core/test_dataset_filters.py::test_delaunay_2d_unstructured",
"tests/core/test_dataset_filters.py::test_contour[contour]",
"tests/core/test_dataset_filters.py::test_contour[marching_cubes]",
"tests/core/test_dataset_filters.py::test_contour[flying_edges]",
"tests/core/test_dataset_filters.py::test_contour_errors",
"tests/core/test_dataset_filters.py::test_elevation",
"tests/core/test_dataset_filters.py::test_elevation_composite",
"tests/core/test_dataset_filters.py::test_texture_map_to_plane",
"tests/core/test_dataset_filters.py::test_texture_map_to_sphere",
"tests/core/test_dataset_filters.py::test_compute_cell_sizes",
"tests/core/test_dataset_filters.py::test_compute_cell_sizes_composite",
"tests/core/test_dataset_filters.py::test_cell_centers",
"tests/core/test_dataset_filters.py::test_cell_centers_composite",
"tests/core/test_dataset_filters.py::test_glyph",
"tests/core/test_dataset_filters.py::test_glyph_cell_point_data",
"tests/core/test_dataset_filters.py::test_glyph_settings",
"tests/core/test_dataset_filters.py::test_glyph_orient_and_scale",
"tests/core/test_dataset_filters.py::test_glyph_color_mode[scale]",
"tests/core/test_dataset_filters.py::test_glyph_color_mode[scalar]",
"tests/core/test_dataset_filters.py::test_glyph_color_mode[vector]",
"tests/core/test_dataset_filters.py::test_glyph_raises",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-True-all-0]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-True-all-1]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-True-all-2]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-True-all-3]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-True-all-4]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-True-largest-0]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-True-largest-1]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-True-largest-2]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-True-largest-3]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-True-largest-4]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-True-specified-0]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-True-specified-1]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-True-specified-2]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-True-specified-3]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-True-specified-4]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-True-cell_seed-0]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-True-cell_seed-1]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-True-cell_seed-2]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-True-cell_seed-3]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-True-cell_seed-4]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-True-point_seed-0]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-True-point_seed-1]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-True-point_seed-2]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-True-point_seed-3]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-True-point_seed-4]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-True-closest-0]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-True-closest-1]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-True-closest-2]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-True-closest-3]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-True-closest-4]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-False-all-0]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-False-all-1]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-False-all-2]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-False-all-3]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-False-all-4]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-False-largest-0]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-False-largest-1]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-False-largest-2]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-False-largest-3]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-False-largest-4]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-False-specified-0]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-False-specified-1]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-False-specified-2]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-False-specified-3]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-False-specified-4]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-False-cell_seed-0]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-False-cell_seed-1]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-False-cell_seed-2]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-False-cell_seed-3]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-False-cell_seed-4]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-False-point_seed-0]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-False-point_seed-1]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-False-point_seed-2]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-False-point_seed-3]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-False-point_seed-4]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-False-closest-0]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-False-closest-1]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-False-closest-2]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-False-closest-3]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[True-False-closest-4]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-True-all-0]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-True-all-1]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-True-all-2]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-True-all-3]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-True-all-4]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-True-largest-0]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-True-largest-1]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-True-largest-2]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-True-largest-3]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-True-largest-4]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-True-specified-0]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-True-specified-1]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-True-specified-2]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-True-specified-3]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-True-specified-4]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-True-cell_seed-0]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-True-cell_seed-1]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-True-cell_seed-2]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-True-cell_seed-3]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-True-cell_seed-4]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-True-point_seed-0]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-True-point_seed-1]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-True-point_seed-2]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-True-point_seed-3]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-True-point_seed-4]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-True-closest-0]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-True-closest-1]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-True-closest-2]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-True-closest-3]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-True-closest-4]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-False-all-0]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-False-all-1]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-False-all-2]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-False-all-3]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-False-all-4]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-False-largest-0]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-False-largest-1]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-False-largest-2]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-False-largest-3]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-False-largest-4]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-False-specified-0]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-False-specified-1]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-False-specified-2]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-False-specified-3]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-False-specified-4]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-False-cell_seed-0]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-False-cell_seed-1]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-False-cell_seed-2]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-False-cell_seed-3]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-False-cell_seed-4]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-False-point_seed-0]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-False-point_seed-1]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-False-point_seed-2]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-False-point_seed-3]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-False-point_seed-4]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-False-closest-0]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-False-closest-1]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-False-closest-2]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-False-closest-3]",
"tests/core/test_dataset_filters.py::test_connectivity_inplace_and_output_type[False-False-closest-4]",
"tests/core/test_dataset_filters.py::test_connectivity_label_regions[all-0]",
"tests/core/test_dataset_filters.py::test_connectivity_label_regions[all-1]",
"tests/core/test_dataset_filters.py::test_connectivity_label_regions[all-2]",
"tests/core/test_dataset_filters.py::test_connectivity_label_regions[all-3]",
"tests/core/test_dataset_filters.py::test_connectivity_label_regions[all-4]",
"tests/core/test_dataset_filters.py::test_connectivity_label_regions[largest-0]",
"tests/core/test_dataset_filters.py::test_connectivity_label_regions[largest-1]",
"tests/core/test_dataset_filters.py::test_connectivity_label_regions[largest-2]",
"tests/core/test_dataset_filters.py::test_connectivity_label_regions[largest-3]",
"tests/core/test_dataset_filters.py::test_connectivity_label_regions[largest-4]",
"tests/core/test_dataset_filters.py::test_connectivity_label_regions[specified-0]",
"tests/core/test_dataset_filters.py::test_connectivity_label_regions[specified-1]",
"tests/core/test_dataset_filters.py::test_connectivity_label_regions[specified-2]",
"tests/core/test_dataset_filters.py::test_connectivity_label_regions[specified-3]",
"tests/core/test_dataset_filters.py::test_connectivity_label_regions[specified-4]",
"tests/core/test_dataset_filters.py::test_connectivity_label_regions[cell_seed-0]",
"tests/core/test_dataset_filters.py::test_connectivity_label_regions[cell_seed-1]",
"tests/core/test_dataset_filters.py::test_connectivity_label_regions[cell_seed-2]",
"tests/core/test_dataset_filters.py::test_connectivity_label_regions[cell_seed-3]",
"tests/core/test_dataset_filters.py::test_connectivity_label_regions[cell_seed-4]",
"tests/core/test_dataset_filters.py::test_connectivity_label_regions[point_seed-0]",
"tests/core/test_dataset_filters.py::test_connectivity_label_regions[point_seed-1]",
"tests/core/test_dataset_filters.py::test_connectivity_label_regions[point_seed-2]",
"tests/core/test_dataset_filters.py::test_connectivity_label_regions[point_seed-3]",
"tests/core/test_dataset_filters.py::test_connectivity_label_regions[point_seed-4]",
"tests/core/test_dataset_filters.py::test_connectivity_label_regions[closest-0]",
"tests/core/test_dataset_filters.py::test_connectivity_label_regions[closest-1]",
"tests/core/test_dataset_filters.py::test_connectivity_label_regions[closest-2]",
"tests/core/test_dataset_filters.py::test_connectivity_label_regions[closest-3]",
"tests/core/test_dataset_filters.py::test_connectivity_label_regions[closest-4]",
"tests/core/test_dataset_filters.py::test_connectivity_raises",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[cell-all-0]",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[cell-all-1]",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[cell-all-2]",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[cell-all-3]",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[cell-all-4]",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[cell-largest-0]",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[cell-largest-1]",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[cell-largest-2]",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[cell-largest-3]",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[cell-largest-4]",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[cell-specified-0]",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[cell-specified-1]",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[cell-specified-2]",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[cell-specified-3]",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[cell-specified-4]",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[cell-cell_seed-0]",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[cell-cell_seed-1]",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[cell-cell_seed-2]",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[cell-cell_seed-3]",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[cell-cell_seed-4]",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[cell-point_seed-0]",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[cell-point_seed-1]",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[cell-point_seed-2]",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[cell-point_seed-3]",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[cell-point_seed-4]",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[cell-closest-0]",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[cell-closest-1]",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[cell-closest-2]",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[cell-closest-3]",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[cell-closest-4]",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[point-all-0]",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[point-all-1]",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[point-all-2]",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[point-all-3]",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[point-all-4]",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[point-largest-0]",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[point-largest-1]",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[point-largest-2]",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[point-largest-3]",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[point-largest-4]",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[point-specified-0]",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[point-specified-1]",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[point-specified-2]",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[point-specified-3]",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[point-specified-4]",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[point-cell_seed-0]",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[point-cell_seed-1]",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[point-cell_seed-2]",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[point-cell_seed-3]",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[point-cell_seed-4]",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[point-point_seed-0]",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[point-point_seed-1]",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[point-point_seed-2]",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[point-point_seed-3]",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[point-point_seed-4]",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[point-closest-0]",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[point-closest-1]",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[point-closest-2]",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[point-closest-3]",
"tests/core/test_dataset_filters.py::test_connectivity_scalar_range[point-closest-4]",
"tests/core/test_dataset_filters.py::test_connectivity_all",
"tests/core/test_dataset_filters.py::test_connectivity_largest",
"tests/core/test_dataset_filters.py::test_connectivity_specified",
"tests/core/test_dataset_filters.py::test_connectivity_specified_returns_empty[0]",
"tests/core/test_dataset_filters.py::test_connectivity_specified_returns_empty[1]",
"tests/core/test_dataset_filters.py::test_connectivity_specified_returns_empty[2]",
"tests/core/test_dataset_filters.py::test_connectivity_specified_returns_empty[3]",
"tests/core/test_dataset_filters.py::test_connectivity_specified_returns_empty[4]",
"tests/core/test_dataset_filters.py::test_connectivity_point_seed",
"tests/core/test_dataset_filters.py::test_connectivity_cell_seed",
"tests/core/test_dataset_filters.py::test_connectivity_closest_point",
"tests/core/test_dataset_filters.py::test_split_bodies",
"tests/core/test_dataset_filters.py::test_warp_by_scalar",
"tests/core/test_dataset_filters.py::test_warp_by_vector",
"tests/core/test_dataset_filters.py::test_invalid_warp_scalar",
"tests/core/test_dataset_filters.py::test_invalid_warp_scalar_inplace",
"tests/core/test_dataset_filters.py::test_invalid_warp_vector",
"tests/core/test_dataset_filters.py::test_cell_data_to_point_data",
"tests/core/test_dataset_filters.py::test_cell_data_to_point_data_composite",
"tests/core/test_dataset_filters.py::test_point_data_to_cell_data",
"tests/core/test_dataset_filters.py::test_point_data_to_cell_data_composite",
"tests/core/test_dataset_filters.py::test_triangulate",
"tests/core/test_dataset_filters.py::test_triangulate_composite",
"tests/core/test_dataset_filters.py::test_delaunay_3d",
"tests/core/test_dataset_filters.py::test_smooth",
"tests/core/test_dataset_filters.py::test_smooth_taubin",
"tests/core/test_dataset_filters.py::test_sample",
"tests/core/test_dataset_filters.py::test_sample_composite",
"tests/core/test_dataset_filters.py::test_streamlines_dir[forward]",
"tests/core/test_dataset_filters.py::test_streamlines_dir[backward]",
"tests/core/test_dataset_filters.py::test_streamlines_dir[both]",
"tests/core/test_dataset_filters.py::test_streamlines_type[2]",
"tests/core/test_dataset_filters.py::test_streamlines_type[4]",
"tests/core/test_dataset_filters.py::test_streamlines_type[45]",
"tests/core/test_dataset_filters.py::test_streamlines_cell_point[point]",
"tests/core/test_dataset_filters.py::test_streamlines_cell_point[cell]",
"tests/core/test_dataset_filters.py::test_streamlines_return_source",
"tests/core/test_dataset_filters.py::test_streamlines_start_position",
"tests/core/test_dataset_filters.py::test_streamlines_errors",
"tests/core/test_dataset_filters.py::test_streamlines_from_source",
"tests/core/test_dataset_filters.py::test_streamlines_from_source_structured_grids",
"tests/core/test_dataset_filters.py::test_streamlines_evenly_spaced_2D",
"tests/core/test_dataset_filters.py::test_streamlines_evenly_spaced_2D_sep_dist_ratio",
"tests/core/test_dataset_filters.py::test_streamlines_evenly_spaced_2D_start_position",
"tests/core/test_dataset_filters.py::test_streamlines_evenly_spaced_2D_vectors",
"tests/core/test_dataset_filters.py::test_streamlines_evenly_spaced_2D_integrator_type",
"tests/core/test_dataset_filters.py::test_streamlines_evenly_spaced_2D_interpolator_type",
"tests/core/test_dataset_filters.py::test_streamlines_evenly_spaced_2D_errors",
"tests/core/test_dataset_filters.py::test_sample_over_line",
"tests/core/test_dataset_filters.py::test_plot_over_line",
"tests/core/test_dataset_filters.py::test_sample_over_multiple_lines",
"tests/core/test_dataset_filters.py::test_sample_over_circular_arc",
"tests/core/test_dataset_filters.py::test_sample_over_circular_arc_normal",
"tests/core/test_dataset_filters.py::test_plot_over_circular_arc",
"tests/core/test_dataset_filters.py::test_plot_over_circular_arc_normal",
"tests/core/test_dataset_filters.py::test_slice_along_line",
"tests/core/test_dataset_filters.py::test_extract_points_adjacent_cells_True[extract_points]",
"tests/core/test_dataset_filters.py::test_extract_points_adjacent_cells_True[extract_values]",
"tests/core/test_dataset_filters.py::test_extract_points_adjacent_cells_False[extract_points]",
"tests/core/test_dataset_filters.py::test_extract_points_adjacent_cells_False[extract_values]",
"tests/core/test_dataset_filters.py::test_extract_points_include_cells_False[extract_points]",
"tests/core/test_dataset_filters.py::test_extract_points_include_cells_False[extract_values]",
"tests/core/test_dataset_filters.py::test_extract_points_default",
"tests/core/test_dataset_filters.py::test_extract_values_preference[True-point]",
"tests/core/test_dataset_filters.py::test_extract_values_preference[True-cell]",
"tests/core/test_dataset_filters.py::test_extract_values_preference[False-cell]",
"tests/core/test_dataset_filters.py::test_extract_values_input_values_and_invert[values0-True-point]",
"tests/core/test_dataset_filters.py::test_extract_values_input_values_and_invert[values0-True-cell]",
"tests/core/test_dataset_filters.py::test_extract_values_input_values_and_invert[values0-False-point]",
"tests/core/test_dataset_filters.py::test_extract_values_input_values_and_invert[values0-False-cell]",
"tests/core/test_dataset_filters.py::test_extract_values_input_values_and_invert[values1-True-point]",
"tests/core/test_dataset_filters.py::test_extract_values_input_values_and_invert[values1-True-cell]",
"tests/core/test_dataset_filters.py::test_extract_values_input_values_and_invert[values1-False-point]",
"tests/core/test_dataset_filters.py::test_extract_values_input_values_and_invert[values1-False-cell]",
"tests/core/test_dataset_filters.py::test_extract_values_input_values_and_invert[values2-True-point]",
"tests/core/test_dataset_filters.py::test_extract_values_input_values_and_invert[values2-True-cell]",
"tests/core/test_dataset_filters.py::test_extract_values_input_values_and_invert[values2-False-point]",
"tests/core/test_dataset_filters.py::test_extract_values_input_values_and_invert[values2-False-cell]",
"tests/core/test_dataset_filters.py::test_extract_values_open_intervals",
"tests/core/test_dataset_filters.py::test_split_values_extract_values_component[split_values-kwargs0-None-0-2-expected_volume0]",
"tests/core/test_dataset_filters.py::test_split_values_extract_values_component[split_values-kwargs0-None-any-2-expected_volume1]",
"tests/core/test_dataset_filters.py::test_split_values_extract_values_component[split_values-kwargs0-None-all-2-expected_volume2]",
"tests/core/test_dataset_filters.py::test_split_values_extract_values_component[split_values-kwargs0-0-0-2-expected_volume3]",
"tests/core/test_dataset_filters.py::test_split_values_extract_values_component[split_values-kwargs0-0-1-2-expected_volume4]",
"tests/core/test_dataset_filters.py::test_split_values_extract_values_component[split_values-kwargs0-0-any-2-expected_volume5]",
"tests/core/test_dataset_filters.py::test_split_values_extract_values_component[split_values-kwargs0-0-all-2-expected_volume6]",
"tests/core/test_dataset_filters.py::test_split_values_extract_values_component[split_values-kwargs0--0.5-0-2-expected_volume7]",
"tests/core/test_dataset_filters.py::test_split_values_extract_values_component[split_values-kwargs0--0.5-1-2-expected_volume8]",
"tests/core/test_dataset_filters.py::test_split_values_extract_values_component[split_values-kwargs0--0.5-any-4-expected_volume9]",
"tests/core/test_dataset_filters.py::test_split_values_extract_values_component[split_values-kwargs0--0.5-all-4-expected_volume10]",
"tests/core/test_dataset_filters.py::test_split_values_extract_values_component[extract_values-kwargs1-None-0-2-expected_volume0]",
"tests/core/test_dataset_filters.py::test_split_values_extract_values_component[extract_values-kwargs1-None-any-2-expected_volume1]",
"tests/core/test_dataset_filters.py::test_split_values_extract_values_component[extract_values-kwargs1-None-all-2-expected_volume2]",
"tests/core/test_dataset_filters.py::test_split_values_extract_values_component[extract_values-kwargs1-0-0-2-expected_volume3]",
"tests/core/test_dataset_filters.py::test_split_values_extract_values_component[extract_values-kwargs1-0-1-2-expected_volume4]",
"tests/core/test_dataset_filters.py::test_split_values_extract_values_component[extract_values-kwargs1-0-any-2-expected_volume5]",
"tests/core/test_dataset_filters.py::test_split_values_extract_values_component[extract_values-kwargs1-0-all-2-expected_volume6]",
"tests/core/test_dataset_filters.py::test_split_values_extract_values_component[extract_values-kwargs1--0.5-0-2-expected_volume7]",
"tests/core/test_dataset_filters.py::test_split_values_extract_values_component[extract_values-kwargs1--0.5-1-2-expected_volume8]",
"tests/core/test_dataset_filters.py::test_split_values_extract_values_component[extract_values-kwargs1--0.5-any-4-expected_volume9]",
"tests/core/test_dataset_filters.py::test_split_values_extract_values_component[extract_values-kwargs1--0.5-all-4-expected_volume10]",
"tests/core/test_dataset_filters.py::test_extract_values_split_ranges_values",
"tests/core/test_dataset_filters.py::test_extract_values_dict_input[dict_inputs0-block_names0]",
"tests/core/test_dataset_filters.py::test_extract_values_dict_input[dict_inputs1-block_names1]",
"tests/core/test_dataset_filters.py::test_extract_values_dict_input[dict_inputs2-block_names2]",
"tests/core/test_dataset_filters.py::test_extract_values_dict_input[dict_inputs3-block_names3]",
"tests/core/test_dataset_filters.py::test_extract_values_component_mode[0-0-expected0-expected_invert0-True-False-True]",
"tests/core/test_dataset_filters.py::test_extract_values_component_mode[0-0-expected0-expected_invert0-True-False-False]",
"tests/core/test_dataset_filters.py::test_extract_values_component_mode[0-0-expected0-expected_invert0-False-True-True]",
"tests/core/test_dataset_filters.py::test_extract_values_component_mode[0-0-expected0-expected_invert0-False-True-False]",
"tests/core/test_dataset_filters.py::test_extract_values_component_mode[0-0-expected0-expected_invert0-False-False-True]",
"tests/core/test_dataset_filters.py::test_extract_values_component_mode[0-0-expected0-expected_invert0-False-False-False]",
"tests/core/test_dataset_filters.py::test_extract_values_component_mode[0-1-expected1-expected_invert1-True-False-True]",
"tests/core/test_dataset_filters.py::test_extract_values_component_mode[0-1-expected1-expected_invert1-True-False-False]",
"tests/core/test_dataset_filters.py::test_extract_values_component_mode[0-1-expected1-expected_invert1-False-True-True]",
"tests/core/test_dataset_filters.py::test_extract_values_component_mode[0-1-expected1-expected_invert1-False-True-False]",
"tests/core/test_dataset_filters.py::test_extract_values_component_mode[0-1-expected1-expected_invert1-False-False-True]",
"tests/core/test_dataset_filters.py::test_extract_values_component_mode[0-1-expected1-expected_invert1-False-False-False]",
"tests/core/test_dataset_filters.py::test_extract_values_component_mode[0-2-expected2-expected_invert2-True-False-True]",
"tests/core/test_dataset_filters.py::test_extract_values_component_mode[0-2-expected2-expected_invert2-True-False-False]",
"tests/core/test_dataset_filters.py::test_extract_values_component_mode[0-2-expected2-expected_invert2-False-True-True]",
"tests/core/test_dataset_filters.py::test_extract_values_component_mode[0-2-expected2-expected_invert2-False-True-False]",
"tests/core/test_dataset_filters.py::test_extract_values_component_mode[0-2-expected2-expected_invert2-False-False-True]",
"tests/core/test_dataset_filters.py::test_extract_values_component_mode[0-2-expected2-expected_invert2-False-False-False]",
"tests/core/test_dataset_filters.py::test_extract_values_component_mode[0-any-expected3-expected_invert3-True-False-True]",
"tests/core/test_dataset_filters.py::test_extract_values_component_mode[0-any-expected3-expected_invert3-True-False-False]",
"tests/core/test_dataset_filters.py::test_extract_values_component_mode[0-any-expected3-expected_invert3-False-True-True]",
"tests/core/test_dataset_filters.py::test_extract_values_component_mode[0-any-expected3-expected_invert3-False-True-False]",
"tests/core/test_dataset_filters.py::test_extract_values_component_mode[0-any-expected3-expected_invert3-False-False-True]",
"tests/core/test_dataset_filters.py::test_extract_values_component_mode[0-any-expected3-expected_invert3-False-False-False]",
"tests/core/test_dataset_filters.py::test_extract_values_component_mode[1-any-expected4-expected_invert4-True-False-True]",
"tests/core/test_dataset_filters.py::test_extract_values_component_mode[1-any-expected4-expected_invert4-True-False-False]",
"tests/core/test_dataset_filters.py::test_extract_values_component_mode[1-any-expected4-expected_invert4-False-True-True]",
"tests/core/test_dataset_filters.py::test_extract_values_component_mode[1-any-expected4-expected_invert4-False-True-False]",
"tests/core/test_dataset_filters.py::test_extract_values_component_mode[1-any-expected4-expected_invert4-False-False-True]",
"tests/core/test_dataset_filters.py::test_extract_values_component_mode[1-any-expected4-expected_invert4-False-False-False]",
"tests/core/test_dataset_filters.py::test_extract_values_component_mode[0-all-expected5-expected_invert5-True-False-True]",
"tests/core/test_dataset_filters.py::test_extract_values_component_mode[0-all-expected5-expected_invert5-True-False-False]",
"tests/core/test_dataset_filters.py::test_extract_values_component_mode[0-all-expected5-expected_invert5-False-True-True]",
"tests/core/test_dataset_filters.py::test_extract_values_component_mode[0-all-expected5-expected_invert5-False-True-False]",
"tests/core/test_dataset_filters.py::test_extract_values_component_mode[0-all-expected5-expected_invert5-False-False-True]",
"tests/core/test_dataset_filters.py::test_extract_values_component_mode[0-all-expected5-expected_invert5-False-False-False]",
"tests/core/test_dataset_filters.py::test_extract_values_component_mode[1-all-expected6-expected_invert6-True-False-True]",
"tests/core/test_dataset_filters.py::test_extract_values_component_mode[1-all-expected6-expected_invert6-True-False-False]",
"tests/core/test_dataset_filters.py::test_extract_values_component_mode[1-all-expected6-expected_invert6-False-True-True]",
"tests/core/test_dataset_filters.py::test_extract_values_component_mode[1-all-expected6-expected_invert6-False-True-False]",
"tests/core/test_dataset_filters.py::test_extract_values_component_mode[1-all-expected6-expected_invert6-False-False-True]",
"tests/core/test_dataset_filters.py::test_extract_values_component_mode[1-all-expected6-expected_invert6-False-False-False]",
"tests/core/test_dataset_filters.py::test_extract_values_component_mode[values7-multi-expected7-expected_invert7-True-False-False]",
"tests/core/test_dataset_filters.py::test_extract_values_component_mode[values7-multi-expected7-expected_invert7-False-True-False]",
"tests/core/test_dataset_filters.py::test_extract_values_component_mode[values7-multi-expected7-expected_invert7-False-False-False]",
"tests/core/test_dataset_filters.py::test_extract_values_component_mode[values8-multi-expected8-expected_invert8-True-False-False]",
"tests/core/test_dataset_filters.py::test_extract_values_component_mode[values8-multi-expected8-expected_invert8-False-True-False]",
"tests/core/test_dataset_filters.py::test_extract_values_component_mode[values8-multi-expected8-expected_invert8-False-False-False]",
"tests/core/test_dataset_filters.py::test_extract_values_component_values_split_unique[split_values-kwargs0]",
"tests/core/test_dataset_filters.py::test_extract_values_component_values_split_unique[extract_values-kwargs1]",
"tests/core/test_dataset_filters.py::test_extract_values_pass_ids[True-True]",
"tests/core/test_dataset_filters.py::test_extract_values_pass_ids[True-False]",
"tests/core/test_dataset_filters.py::test_extract_values_pass_ids[False-True]",
"tests/core/test_dataset_filters.py::test_extract_values_pass_ids[False-False]",
"tests/core/test_dataset_filters.py::test_extract_values_empty",
"tests/core/test_dataset_filters.py::test_extract_values_raises",
"tests/core/test_dataset_filters.py::test_slice_along_line_composite",
"tests/core/test_dataset_filters.py::test_interpolate",
"tests/core/test_dataset_filters.py::test_select_enclosed_points",
"tests/core/test_dataset_filters.py::test_decimate_boundary",
"tests/core/test_dataset_filters.py::test_extract_surface",
"tests/core/test_dataset_filters.py::test_merge_general",
"tests/core/test_dataset_filters.py::test_iadd_general",
"tests/core/test_dataset_filters.py::test_compute_cell_quality",
"tests/core/test_dataset_filters.py::test_compute_boundary_mesh_quality",
"tests/core/test_dataset_filters.py::test_compute_derivatives",
"tests/core/test_dataset_filters.py::test_extract_subset",
"tests/core/test_dataset_filters.py::test_gaussian_smooth_output_type",
"tests/core/test_dataset_filters.py::test_gaussian_smooth_constant_data",
"tests/core/test_dataset_filters.py::test_gaussian_smooth_outlier",
"tests/core/test_dataset_filters.py::test_gaussian_smooth_cell_data_specified",
"tests/core/test_dataset_filters.py::test_gaussian_smooth_cell_data_active",
"tests/core/test_dataset_filters.py::test_median_smooth_output_type",
"tests/core/test_dataset_filters.py::test_median_smooth_constant_data",
"tests/core/test_dataset_filters.py::test_median_smooth_outlier",
"tests/core/test_dataset_filters.py::test_image_dilate_erode_output_type",
"tests/core/test_dataset_filters.py::test_image_dilate_erode_dilation",
"tests/core/test_dataset_filters.py::test_image_dilate_erode_erosion",
"tests/core/test_dataset_filters.py::test_image_dilate_erode_cell_data_specified",
"tests/core/test_dataset_filters.py::test_image_dilate_erode_cell_data_active",
"tests/core/test_dataset_filters.py::test_image_threshold_output_type",
"tests/core/test_dataset_filters.py::test_image_threshold_raises",
"tests/core/test_dataset_filters.py::test_image_threshold_dtype[float-float]",
"tests/core/test_dataset_filters.py::test_image_threshold_dtype[float-int]",
"tests/core/test_dataset_filters.py::test_image_threshold_dtype[int-float]",
"tests/core/test_dataset_filters.py::test_image_threshold_dtype[int-int]",
"tests/core/test_dataset_filters.py::test_image_threshold_dtype[uint8-float]",
"tests/core/test_dataset_filters.py::test_image_threshold_dtype[uint8-int]",
"tests/core/test_dataset_filters.py::test_image_threshold_wrong_threshold_length",
"tests/core/test_dataset_filters.py::test_image_threshold_wrong_threshold_type",
"tests/core/test_dataset_filters.py::test_image_threshold_upper[0-1]",
"tests/core/test_dataset_filters.py::test_image_threshold_upper[0-None]",
"tests/core/test_dataset_filters.py::test_image_threshold_upper[None-1]",
"tests/core/test_dataset_filters.py::test_image_threshold_upper[None-None]",
"tests/core/test_dataset_filters.py::test_image_threshold_between[0-1]",
"tests/core/test_dataset_filters.py::test_image_threshold_between[0-None]",
"tests/core/test_dataset_filters.py::test_image_threshold_between[None-1]",
"tests/core/test_dataset_filters.py::test_image_threshold_between[None-None]",
"tests/core/test_dataset_filters.py::test_extract_subset_structured",
"tests/core/test_dataset_filters.py::test_concatenate_structured",
"tests/core/test_dataset_filters.py::test_concatenate_structured_bad_dimensions",
"tests/core/test_dataset_filters.py::test_concatenate_structured_bad_inputs",
"tests/core/test_dataset_filters.py::test_concatenate_structured_bad_point_data",
"tests/core/test_dataset_filters.py::test_concatenate_structured_disconnected",
"tests/core/test_dataset_filters.py::test_concatenate_structured_different_arrays",
"tests/core/test_dataset_filters.py::test_structured_add_non_grid",
"tests/core/test_dataset_filters.py::test_poly_data_strip",
"tests/core/test_dataset_filters.py::test_shrink",
"tests/core/test_dataset_filters.py::test_tessellate",
"tests/core/test_dataset_filters.py::test_transform_mesh[0-0]",
"tests/core/test_dataset_filters.py::test_transform_mesh[0-1]",
"tests/core/test_dataset_filters.py::test_transform_mesh[0-2]",
"tests/core/test_dataset_filters.py::test_transform_mesh[1-0]",
"tests/core/test_dataset_filters.py::test_transform_mesh[1-1]",
"tests/core/test_dataset_filters.py::test_transform_mesh[1-2]",
"tests/core/test_dataset_filters.py::test_transform_mesh[2-0]",
"tests/core/test_dataset_filters.py::test_transform_mesh[2-1]",
"tests/core/test_dataset_filters.py::test_transform_mesh[2-2]",
"tests/core/test_dataset_filters.py::test_transform_mesh_and_vectors[0-0]",
"tests/core/test_dataset_filters.py::test_transform_mesh_and_vectors[0-1]",
"tests/core/test_dataset_filters.py::test_transform_mesh_and_vectors[0-2]",
"tests/core/test_dataset_filters.py::test_transform_mesh_and_vectors[1-0]",
"tests/core/test_dataset_filters.py::test_transform_mesh_and_vectors[1-1]",
"tests/core/test_dataset_filters.py::test_transform_mesh_and_vectors[1-2]",
"tests/core/test_dataset_filters.py::test_transform_mesh_and_vectors[2-0]",
"tests/core/test_dataset_filters.py::test_transform_mesh_and_vectors[2-1]",
"tests/core/test_dataset_filters.py::test_transform_mesh_and_vectors[2-2]",
"tests/core/test_dataset_filters.py::test_transform_int_vectors_warning[0-0]",
"tests/core/test_dataset_filters.py::test_transform_int_vectors_warning[0-1]",
"tests/core/test_dataset_filters.py::test_transform_int_vectors_warning[0-2]",
"tests/core/test_dataset_filters.py::test_transform_int_vectors_warning[1-0]",
"tests/core/test_dataset_filters.py::test_transform_int_vectors_warning[1-1]",
"tests/core/test_dataset_filters.py::test_transform_int_vectors_warning[1-2]",
"tests/core/test_dataset_filters.py::test_transform_int_vectors_warning[2-0]",
"tests/core/test_dataset_filters.py::test_transform_int_vectors_warning[2-1]",
"tests/core/test_dataset_filters.py::test_transform_int_vectors_warning[2-2]",
"tests/core/test_dataset_filters.py::test_transform_inplace_bad_types[dataset0]",
"tests/core/test_dataset_filters.py::test_transform_inplace_bad_types[dataset1]",
"tests/core/test_dataset_filters.py::test_reflect_mesh_about_point",
"tests/core/test_dataset_filters.py::test_reflect_mesh_with_vectors",
"tests/core/test_dataset_filters.py::test_reflect_inplace[dataset0]",
"tests/core/test_dataset_filters.py::test_reflect_inplace[dataset1]",
"tests/core/test_dataset_filters.py::test_reflect_inplace[dataset2]",
"tests/core/test_dataset_filters.py::test_transform_inplace_bad_types_2[dataset0]",
"tests/core/test_dataset_filters.py::test_transform_inplace_bad_types_2[dataset1]",
"tests/core/test_dataset_filters.py::test_extrude_rotate",
"tests/core/test_dataset_filters.py::test_extrude_rotate_inplace",
"tests/core/test_dataset_filters.py::test_extrude_trim",
"tests/core/test_dataset_filters.py::test_extrude_trim_strategy[intersection-boundary_edges]",
"tests/core/test_dataset_filters.py::test_extrude_trim_strategy[intersection-all_edges]",
"tests/core/test_dataset_filters.py::test_extrude_trim_strategy[minimum_distance-boundary_edges]",
"tests/core/test_dataset_filters.py::test_extrude_trim_strategy[minimum_distance-all_edges]",
"tests/core/test_dataset_filters.py::test_extrude_trim_strategy[maximum_distance-boundary_edges]",
"tests/core/test_dataset_filters.py::test_extrude_trim_strategy[maximum_distance-all_edges]",
"tests/core/test_dataset_filters.py::test_extrude_trim_strategy[average_distance-boundary_edges]",
"tests/core/test_dataset_filters.py::test_extrude_trim_strategy[average_distance-all_edges]",
"tests/core/test_dataset_filters.py::test_extrude_trim_catch",
"tests/core/test_dataset_filters.py::test_extrude_trim_inplace",
"tests/core/test_dataset_filters.py::test_subdivide_adaptive[True]",
"tests/core/test_dataset_filters.py::test_subdivide_adaptive[False]",
"tests/core/test_dataset_filters.py::test_invalid_subdivide_adaptive",
"tests/core/test_dataset_filters.py::test_collision",
"tests/core/test_dataset_filters.py::test_collision_solid_non_triangle",
"tests/core/test_dataset_filters.py::test_reconstruct_surface_poly",
"tests/core/test_dataset_filters.py::test_is_manifold",
"tests/core/test_dataset_filters.py::test_reconstruct_surface_unstructured",
"tests/core/test_dataset_filters.py::test_integrate_data_datasets",
"tests/core/test_dataset_filters.py::test_integrate_data",
"tests/core/test_dataset_filters.py::test_align",
"tests/core/test_dataset_filters.py::test_subdivide_tetra",
"tests/core/test_dataset_filters.py::test_extract_cells_by_type",
"tests/core/test_dataset_filters.py::test_merge_points",
"tests/core/test_dataset_filters.py::test_sort_labels",
"tests/core/test_dataset_filters.py::test_pack_labels",
"tests/core/test_dataset_filters.py::test_pack_labels_inplace",
"tests/core/test_dataset_filters.py::test_pack_labels_output_scalars",
"tests/core/test_dataset_filters.py::test_pack_labels_preference",
"tests/core/test_datasetattributes.py::test_init",
"tests/core/test_datasetattributes.py::test_bool",
"tests/core/test_datasetattributes.py::test_getitem",
"tests/core/test_datasetattributes.py::test_setitem",
"tests/core/test_datasetattributes.py::test_repr",
"tests/core/test_datasetattributes.py::test_repr_field_attributes_with_string",
"tests/core/test_datasetattributes.py::test_empty_active_vectors",
"tests/core/test_datasetattributes.py::test_valid_array_len_points",
"tests/core/test_datasetattributes.py::test_valid_array_len_cells",
"tests/core/test_datasetattributes.py::test_valid_array_len_field",
"tests/core/test_datasetattributes.py::test_get",
"tests/core/test_datasetattributes.py::test_active_scalars_name",
"tests/core/test_datasetattributes.py::test_set_scalars",
"tests/core/test_datasetattributes.py::test_eq",
"tests/core/test_datasetattributes.py::test_add_matrix",
"tests/core/test_datasetattributes.py::test_set_fails_with_wrong_shape",
"tests/core/test_datasetattributes.py::test_set_active_scalars_fail",
"tests/core/test_datasetattributes.py::test_set_active_vectors",
"tests/core/test_datasetattributes.py::test_set_vectors",
"tests/core/test_datasetattributes.py::test_set_invalid_vectors",
"tests/core/test_datasetattributes.py::test_set_texture_coordinates_name",
"tests/core/test_datasetattributes.py::test_set_bitarray",
"tests/core/test_datasetattributes.py::test_get_array_should_fail_if_does_not_exist[invalid_array_name]",
"tests/core/test_datasetattributes.py::test_get_array_should_fail_if_does_not_exist[-1]",
"tests/core/test_datasetattributes.py::test_get_array_should_return_bool_array",
"tests/core/test_datasetattributes.py::test_get_array_bool_array_should_be_identical",
"tests/core/test_datasetattributes.py::test_add_should_not_add_none_array",
"tests/core/test_datasetattributes.py::test_add_should_contain_array_name",
"tests/core/test_datasetattributes.py::test_add_should_contain_exact_array",
"tests/core/test_datasetattributes.py::test_getters_should_return_same_result",
"tests/core/test_datasetattributes.py::test_contains_should_contain_when_added",
"tests/core/test_datasetattributes.py::test_set_array_catch",
"tests/core/test_datasetattributes.py::test_set_array_should_accept_scalar_value",
"tests/core/test_datasetattributes.py::test_set_array_scalar_value_should_give_array",
"tests/core/test_datasetattributes.py::test_set_array_string_lists_should_equal",
"tests/core/test_datasetattributes.py::test_set_array_string_array_should_equal",
"tests/core/test_datasetattributes.py::test_hexbeam_field_attributes_active_scalars",
"tests/core/test_datasetattributes.py::test_should_remove_array",
"tests/core/test_datasetattributes.py::test_should_del_array",
"tests/core/test_datasetattributes.py::test_should_pop_array",
"tests/core/test_datasetattributes.py::test_pop_should_return_arange_narray",
"tests/core/test_datasetattributes.py::test_pop_should_return_bool_array",
"tests/core/test_datasetattributes.py::test_pop_should_return_string_array",
"tests/core/test_datasetattributes.py::test_should_pop_array_invalid",
"tests/core/test_datasetattributes.py::test_remove_should_fail_on_bad_argument[None]",
"tests/core/test_datasetattributes.py::test_remove_should_fail_on_bad_argument[nonexistent_array_name]",
"tests/core/test_datasetattributes.py::test_remove_should_fail_on_bad_argument[]",
"tests/core/test_datasetattributes.py::test_remove_should_fail_on_bad_argument[-1]",
"tests/core/test_datasetattributes.py::test_del_should_fail_bad_argument[None]",
"tests/core/test_datasetattributes.py::test_del_should_fail_bad_argument[nonexistent_array_name]",
"tests/core/test_datasetattributes.py::test_del_should_fail_bad_argument[]",
"tests/core/test_datasetattributes.py::test_del_should_fail_bad_argument[-1]",
"tests/core/test_datasetattributes.py::test_pop_should_fail_bad_argument[None]",
"tests/core/test_datasetattributes.py::test_pop_should_fail_bad_argument[nonexistent_array_name]",
"tests/core/test_datasetattributes.py::test_pop_should_fail_bad_argument[]",
"tests/core/test_datasetattributes.py::test_pop_should_fail_bad_argument[-1]",
"tests/core/test_datasetattributes.py::test_length_should_increment_on_set_array",
"tests/core/test_datasetattributes.py::test_length_should_decrement_on_remove",
"tests/core/test_datasetattributes.py::test_length_should_decrement_on_pop",
"tests/core/test_datasetattributes.py::test_length_should_be_0_on_clear",
"tests/core/test_datasetattributes.py::test_keys_should_be_strings",
"tests/core/test_datasetattributes.py::test_key_should_exist",
"tests/core/test_datasetattributes.py::test_values_should_be_pyvista_ndarrays",
"tests/core/test_datasetattributes.py::test_value_should_exist",
"tests/core/test_datasetattributes.py::test_active_scalars_setter",
"tests/core/test_datasetattributes.py::test_active_scalars_setter_no_override",
"tests/core/test_datasetattributes.py::test_preserve_field_data_after_extract_cells",
"tests/core/test_datasetattributes.py::test_assign_labels_to_points",
"tests/core/test_datasetattributes.py::test_normals_get",
"tests/core/test_datasetattributes.py::test_normals_set",
"tests/core/test_datasetattributes.py::test_normals_name",
"tests/core/test_datasetattributes.py::test_normals_raise_field",
"tests/core/test_datasetattributes.py::test_add_two_vectors",
"tests/core/test_datasetattributes.py::test_active_vectors_name_setter",
"tests/core/test_datasetattributes.py::test_active_vectors_eq",
"tests/core/test_datasetattributes.py::test_active_texture_coordinates_name",
"tests/core/test_datasetattributes.py::test_complex_raises",
"tests/core/test_datasetattributes.py::test_complex[complex64]",
"tests/core/test_datasetattributes.py::test_complex[complex128]",
"tests/core/test_datasetattributes.py::test_active_t_coords_deprecated",
"tests/core/test_datasetattributes.py::test_active_t_coords_name_deprecated"
] | [] | MIT License | 18,943 | 1,443 | [
"pyvista/core/datasetattributes.py",
"pyvista/core/filters/poly_data.py"
] |
swansonk14__typed-argument-parser-145 | b569790081a9a85039a75bf810a08df8b5969fc3 | 2024-07-11 20:57:44 | 1a3af2a9e48fa2f0b2af25359f658cd55dd65a6b | diff --git a/src/tap/tap.py b/src/tap/tap.py
index 09a5134..17884b6 100644
--- a/src/tap/tap.py
+++ b/src/tap/tap.py
@@ -370,7 +370,7 @@ class Tap(ArgumentParser):
pass
@staticmethod
- def get_reproducibility_info(repo_path: Optional[PathLike] = None) -> Dict[str, str]:
+ def get_reproducibility_info(repo_path: Optional[PathLike] = None) -> Dict[str, Optional[str]]:
"""Gets a dictionary of reproducibility information.
Reproducibility information always includes:
@@ -380,7 +380,8 @@ class Tap(ArgumentParser):
If git is installed, reproducibility information also includes:
- git_root: The root of the git repo where the command is run.
- git_url: The url of the current hash of the git repo where the command is run.
- Ex. https://github.com/swansonk14/rationale-alignment/tree/<hash>
+ Ex. https://github.com/swansonk14/rationale-alignment/tree/<hash>.
+ If it is a local repo, the url is None.
- git_has_uncommitted_changes: Whether the current git repo has uncommitted changes.
:param repo_path: Path to the git repo to examine for reproducibility info.
diff --git a/src/tap/utils.py b/src/tap/utils.py
index 37e6e16..b1feeeb 100644
--- a/src/tap/utils.py
+++ b/src/tap/utils.py
@@ -70,7 +70,20 @@ class GitInfo:
"""
return check_output(["git", "rev-parse", "--show-toplevel"], cwd=self.repo_path)
- def get_git_url(self, commit_hash: bool = True) -> str:
+ def get_git_version(self) -> tuple:
+ """Gets the version of git.
+
+ :return: The version of git, as a tuple of strings.
+
+ Example:
+ >>> get_git_version()
+ (2, 17, 1) # for git version 2.17.1
+ """
+ raw = check_output(["git", "--version"])
+ number_start_index = next(i for i, c in enumerate(raw) if c.isdigit())
+ return tuple(int(num) for num in raw[number_start_index:].split("."))
+
+ def get_git_url(self, commit_hash: bool = True) -> Optional[str]:
"""Gets the https url of the git repo where the command is run.
:param commit_hash: If True, the url links to the latest local git commit hash.
@@ -78,11 +91,19 @@ class GitInfo:
:return: The https url of the current git repo.
"""
# Get git url (either https or ssh)
+ input_remote = (
+ ["git", "remote", "get-url", "origin"]
+ if self.get_git_version() >= (2, 0)
+ else ["git", "config", "--get", "remote.origin.url"]
+ )
try:
- url = check_output(["git", "remote", "get-url", "origin"], cwd=self.repo_path)
- except subprocess.CalledProcessError:
- # For git versions <2.0
- url = check_output(["git", "config", "--get", "remote.origin.url"], cwd=self.repo_path)
+ url = check_output(input_remote, cwd=self.repo_path)
+ except subprocess.CalledProcessError as e:
+ if e.returncode == 2:
+ # https://git-scm.com/docs/git-remote#_exit_status
+ # 2: The remote does not exist.
+ return None
+ raise e
# Remove .git at end
url = url[: -len(".git")]
| Reproducibility info fails for git repo with no remote
If I have a git repository that does not have a remote, `get_reproducibility_info` will fail with a `CalledProcessError` exception. It appears that if a git repo is present, Tap assumes it has an `origin` remote, yet this need not be the case. I often use local git repos for initial development before pushing them somewhere.
Reproducer:
```
$ mkdir test
$ cd test
# Write test.py appropriately...
$ cat test.py
from tap import Tap
class ArgumentParser(Tap):
foo: bool = False
if __name__ == '__main__':
args = ArgumentParser().parse_args()
print(args.get_reproducibility_info())
# Run with no git repo and it works fine:
$ python test.py
{'command_line': 'python test.py', 'time': 'Tue Feb 28 13:03:00 2023'}
# Set up a git repo to break it:
$ git init
$ python test.py
Traceback (most recent call last):
File "/home/ndryden/envs/test/lib/python3.9/site-packages/tap/utils.py", line 82, in get_git_url
url = check_output(['git', 'remote', 'get-url', 'origin'], cwd=self.repo_path)
File "/home/ndryden/envs/test/lib/python3.9/site-packages/tap/utils.py", line 45, in check_output
output = subprocess.check_output(command, stderr=devnull, **kwargs).decode('utf-8').strip()
File "/home/ndryden/envs/test/lib/python3.9/subprocess.py", line 424, in check_output
return run(*popenargs, stdout=PIPE, timeout=timeout, check=True,
File "/home/ndryden/envs/test/lib/python3.9/subprocess.py", line 528, in run
raise CalledProcessError(retcode, process.args,
subprocess.CalledProcessError: Command '['git', 'remote', 'get-url', 'origin']' returned non-zero exit status 2.
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/ndryden/test/test.py", line 8, in <module>
print(args.get_reproducibility_info())
File "/home/ndryden/envs/test/lib/python3.9/site-packages/tap/tap.py", line 392, in get_reproducibility_info
reproducibility['git_url'] = git_info.get_git_url(commit_hash=True)
File "/home/ndryden/envs/test/lib/python3.9/site-packages/tap/utils.py", line 85, in get_git_url
url = check_output(['git', 'config', '--get', 'remote.origin.url'], cwd=self.repo_path)
File "/home/ndryden/envs/test/lib/python3.9/site-packages/tap/utils.py", line 45, in check_output
output = subprocess.check_output(command, stderr=devnull, **kwargs).decode('utf-8').strip()
File "/home/ndryden/envs/test/lib/python3.9/subprocess.py", line 424, in check_output
return run(*popenargs, stdout=PIPE, timeout=timeout, check=True,
File "/home/ndryden/envs/test/lib/python3.9/subprocess.py", line 528, in run
raise CalledProcessError(retcode, process.args,
subprocess.CalledProcessError: Command '['git', 'config', '--get', 'remote.origin.url']' returned non-zero exit status 1.
```
This is because the remote `origin` does not exist, and `git remote get-url origin` returns status 2 in this case:
```
$ git remote get-url origin
error: No such remote 'origin'
```
(Likewise, `git config --get remote.origin.url` returns status 1.)
You might be able to solve this by checking whether `origin` exists with `git remote -v` before executing this.
Incidentally, after adding a fake origin remote this reveals another bug, where `git rev-parse HEAD` fails when there are no commits, leading to a similar exception. Not sure if this deserves a separate issue (and seems like even more of an edge-case). | swansonk14/typed-argument-parser | diff --git a/tests/test_utils.py b/tests/test_utils.py
index a3fabcf..01528c8 100644
--- a/tests/test_utils.py
+++ b/tests/test_utils.py
@@ -108,6 +108,17 @@ class GitTests(TestCase):
url = f"{true_url}/tree/"
self.assertEqual(self.git_info.get_git_url(commit_hash=True)[: len(url)], url)
+ def test_get_git_url_no_remote(self) -> None:
+ subprocess.run(["git", "remote", "remove", "origin"])
+ self.assertIsNone(self.git_info.get_git_url())
+
+ def test_get_git_version(self) -> None:
+ git_version = self.git_info.get_git_version()
+ self.assertEqual(len(git_version), 3)
+ self.assertIsInstance(git_version, tuple)
+ for v in git_version:
+ self.assertIsInstance(v, int)
+
def test_has_uncommitted_changes_false(self) -> None:
self.assertFalse(self.git_info.has_uncommitted_changes())
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 2
} | 1.10 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | annotated-types==0.7.0
coverage==7.8.0
docstring_parser==0.16
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
flake8==7.2.0
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
mccabe==0.7.0
mypy-extensions==1.0.0
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
pycodestyle==2.13.0
pydantic==2.11.1
pydantic_core==2.33.0
pyflakes==3.3.1
pytest @ file:///croot/pytest_1738938843180/work
pytest-cov==6.0.0
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
-e git+https://github.com/swansonk14/typed-argument-parser.git@b569790081a9a85039a75bf810a08df8b5969fc3#egg=typed_argument_parser
typing-inspect==0.9.0
typing-inspection==0.4.0
typing_extensions==4.13.0
| name: typed-argument-parser
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- annotated-types==0.7.0
- coverage==7.8.0
- docstring-parser==0.16
- flake8==7.2.0
- mccabe==0.7.0
- mypy-extensions==1.0.0
- pycodestyle==2.13.0
- pydantic==2.11.1
- pydantic-core==2.33.0
- pyflakes==3.3.1
- pytest-cov==6.0.0
- typed-argument-parser==1.10.1
- typing-extensions==4.13.0
- typing-inspect==0.9.0
- typing-inspection==0.4.0
prefix: /opt/conda/envs/typed-argument-parser
| [
"tests/test_utils.py::GitTests::test_get_git_url_no_remote",
"tests/test_utils.py::GitTests::test_get_git_version"
] | [] | [
"tests/test_utils.py::GitTests::test_get_git_root",
"tests/test_utils.py::GitTests::test_get_git_root_subdir",
"tests/test_utils.py::GitTests::test_get_git_url_https",
"tests/test_utils.py::GitTests::test_get_git_url_https_enterprise",
"tests/test_utils.py::GitTests::test_get_git_url_https_hash",
"tests/test_utils.py::GitTests::test_get_git_url_https_hash_enterprise",
"tests/test_utils.py::GitTests::test_get_git_url_ssh",
"tests/test_utils.py::GitTests::test_get_git_url_ssh_enterprise",
"tests/test_utils.py::GitTests::test_get_git_url_ssh_hash",
"tests/test_utils.py::GitTests::test_get_git_url_ssh_hash_enterprise",
"tests/test_utils.py::GitTests::test_has_git_false",
"tests/test_utils.py::GitTests::test_has_git_true",
"tests/test_utils.py::GitTests::test_has_uncommited_changes_true",
"tests/test_utils.py::GitTests::test_has_uncommitted_changes_false",
"tests/test_utils.py::TypeToStrTests::test_type_to_str",
"tests/test_utils.py::ClassColumnTests::test_column_comment",
"tests/test_utils.py::ClassColumnTests::test_column_method",
"tests/test_utils.py::ClassColumnTests::test_column_simple",
"tests/test_utils.py::ClassColumnTests::test_column_space",
"tests/test_utils.py::ClassColumnTests::test_dataclass",
"tests/test_utils.py::ClassColumnTests::test_dataclass_method",
"tests/test_utils.py::ClassVariableTests::test_bad_spacing_multiline",
"tests/test_utils.py::ClassVariableTests::test_commented_variables",
"tests/test_utils.py::ClassVariableTests::test_dataclass",
"tests/test_utils.py::ClassVariableTests::test_functions_with_docs_multiline",
"tests/test_utils.py::ClassVariableTests::test_multiple_variable",
"tests/test_utils.py::ClassVariableTests::test_no_variables",
"tests/test_utils.py::ClassVariableTests::test_one_variable",
"tests/test_utils.py::ClassVariableTests::test_separated_variables",
"tests/test_utils.py::ClassVariableTests::test_single_quote_multiline",
"tests/test_utils.py::ClassVariableTests::test_triple_quote_multiline",
"tests/test_utils.py::ClassVariableTests::test_typed_variables",
"tests/test_utils.py::GetLiteralsTests::test_get_literals_empty",
"tests/test_utils.py::GetLiteralsTests::test_get_literals_primitives",
"tests/test_utils.py::GetLiteralsTests::test_get_literals_string",
"tests/test_utils.py::GetLiteralsTests::test_get_literals_uniqueness",
"tests/test_utils.py::TupleTypeEnforcerTests::test_tuple_type_enforcer_infinite",
"tests/test_utils.py::TupleTypeEnforcerTests::test_tuple_type_enforcer_multi_types_different",
"tests/test_utils.py::TupleTypeEnforcerTests::test_tuple_type_enforcer_multi_types_same",
"tests/test_utils.py::TupleTypeEnforcerTests::test_tuple_type_enforcer_one_type_bool",
"tests/test_utils.py::TupleTypeEnforcerTests::test_tuple_type_enforcer_one_type_float",
"tests/test_utils.py::TupleTypeEnforcerTests::test_tuple_type_enforcer_one_type_int",
"tests/test_utils.py::TupleTypeEnforcerTests::test_tuple_type_enforcer_one_type_str",
"tests/test_utils.py::TupleTypeEnforcerTests::test_tuple_type_enforcer_zero_types",
"tests/test_utils.py::NestedReplaceTypeTests::test_nested_replace_type_nested",
"tests/test_utils.py::NestedReplaceTypeTests::test_nested_replace_type_notype",
"tests/test_utils.py::NestedReplaceTypeTests::test_nested_replace_type_unnested",
"tests/test_utils.py::PythonObjectEncoderTests::test_python_object_encoder_complex",
"tests/test_utils.py::PythonObjectEncoderTests::test_python_object_encoder_set",
"tests/test_utils.py::PythonObjectEncoderTests::test_python_object_encoder_simple_types",
"tests/test_utils.py::PythonObjectEncoderTests::test_python_object_encoder_tuple",
"tests/test_utils.py::PythonObjectEncoderTests::test_python_object_encoder_unpicklable",
"tests/test_utils.py::EnforceReproducibilityTests::test_git_url_not_in_current_reproducibility_data",
"tests/test_utils.py::EnforceReproducibilityTests::test_git_url_not_in_saved_reproducibility_data",
"tests/test_utils.py::EnforceReproducibilityTests::test_git_urls_disagree",
"tests/test_utils.py::EnforceReproducibilityTests::test_saved_reproducibility_data_is_none",
"tests/test_utils.py::EnforceReproducibilityTests::test_throw_error_for_saved_uncommitted_changes",
"tests/test_utils.py::EnforceReproducibilityTests::test_throw_error_for_uncommitted_changes"
] | [] | MIT License | 18,945 | 891 | [
"src/tap/tap.py",
"src/tap/utils.py"
] |
|
reata__sqllineage-637 | 10442e52aed3d937b252293d556ed1d0afa8b622 | 2024-07-12 22:52:11 | e9e3048afae1705a4c3f80e4ca56dcc0c6df998e | codecov[bot]: ## [Codecov](https://app.codecov.io/gh/reata/sqllineage/pull/637?dropdown=coverage&src=pr&el=h1&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=None) Report
All modified and coverable lines are covered by tests :white_check_mark:
> Project coverage is 99.55%. Comparing base [(`10442e5`)](https://app.codecov.io/gh/reata/sqllineage/commit/10442e52aed3d937b252293d556ed1d0afa8b622?dropdown=coverage&el=desc&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=None) to head [(`c37c9ae`)](https://app.codecov.io/gh/reata/sqllineage/commit/c37c9ae5eeca3d30e431cec7f3e128295125d9ef?dropdown=coverage&el=desc&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=None).
<details><summary>Additional details and impacted files</summary>
```diff
@@ Coverage Diff @@
## master #637 +/- ##
=======================================
Coverage 99.55% 99.55%
=======================================
Files 41 41
Lines 2237 2245 +8
=======================================
+ Hits 2227 2235 +8
Misses 10 10
```
</details>
[:umbrella: View full report in Codecov by Sentry](https://app.codecov.io/gh/reata/sqllineage/pull/637?dropdown=coverage&src=pr&el=continue&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=None).
:loudspeaker: Have feedback on the report? [Share it here](https://about.codecov.io/codecov-pr-comment-feedback/?utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=None).
| diff --git a/sqllineage/cli.py b/sqllineage/cli.py
index 857de5c..e4630a0 100644
--- a/sqllineage/cli.py
+++ b/sqllineage/cli.py
@@ -16,7 +16,7 @@ from sqllineage.core.metadata.sqlalchemy import SQLAlchemyMetaDataProvider
from sqllineage.drawing import draw_lineage_graph
from sqllineage.runner import LineageRunner
from sqllineage.utils.constant import LineageLevel
-from sqllineage.utils.helpers import extract_sql_from_args
+from sqllineage.utils.helpers import extract_file_path_from_args, extract_sql_from_args
logger = logging.getLogger(__name__)
@@ -108,8 +108,10 @@ def main(args=None) -> None:
warnings.warn("Both -e and -f options are specified. -e option will be ignored")
if args.f or args.e:
sql = extract_sql_from_args(args)
+ file_path = extract_file_path_from_args(args)
runner = LineageRunner(
sql,
+ file_path=file_path,
dialect=args.dialect,
metadata_provider=metadata_provider,
verbose=args.verbose,
diff --git a/sqllineage/core/parser/sqlfluff/analyzer.py b/sqllineage/core/parser/sqlfluff/analyzer.py
index 74e0ace..9fd3b10 100644
--- a/sqllineage/core/parser/sqlfluff/analyzer.py
+++ b/sqllineage/core/parser/sqlfluff/analyzer.py
@@ -27,8 +27,10 @@ class SqlFluffLineageAnalyzer(LineageAnalyzer):
PARSER_NAME = "sqlfluff"
SUPPORTED_DIALECTS = list(dialect.label for dialect in dialect_readout())
- def __init__(self, dialect: str, silent_mode: bool = False):
- self._dialect = dialect
+ def __init__(self, file_path: str, dialect: str, silent_mode: bool = False):
+ self._sqlfluff_config = FluffConfig.from_path(
+ path=file_path, overrides={"dialect": dialect}
+ )
self._silent_mode = silent_mode
self.tsql_split_cache: Dict[str, BaseSegment] = {}
@@ -57,7 +59,7 @@ class SqlFluffLineageAnalyzer(LineageAnalyzer):
else:
statement_segment = statement_segments[0]
for extractor in [
- extractor_cls(self._dialect, metadata_provider)
+ extractor_cls(self._sqlfluff_config.get("dialect"), metadata_provider)
for extractor_cls in BaseExtractor.__subclasses__()
]:
if extractor.can_extract(statement_segment.type):
@@ -79,9 +81,7 @@ class SqlFluffLineageAnalyzer(LineageAnalyzer):
)
def _list_specific_statement_segment(self, sql: str):
- parsed = Linter(
- config=FluffConfig.from_root(overrides={"dialect": self._dialect})
- ).parse_string(sql)
+ parsed = Linter(config=self._sqlfluff_config).parse_string(sql)
violations = [
str(e)
for e in parsed.violations
diff --git a/sqllineage/runner.py b/sqllineage/runner.py
index 9d666b5..113d036 100644
--- a/sqllineage/runner.py
+++ b/sqllineage/runner.py
@@ -42,6 +42,7 @@ class LineageRunner(object):
verbose: bool = False,
silent_mode: bool = False,
draw_options: Optional[Dict[str, Any]] = None,
+ file_path: str = ".",
):
"""
The entry point of SQLLineage after command line options are parsed.
@@ -51,6 +52,7 @@ class LineageRunner(object):
:param metadata_provider: metadata service object providing table schema
:param verbose: verbose flag indicating whether statement-wise lineage result will be shown
:param silent_mode: boolean flag indicating whether to skip lineage analysis for unknown statement types
+ :param file_path: path of the SQL file.
"""
if dialect == SQLPARSE_DIALECT:
warnings.warn(
@@ -60,6 +62,7 @@ class LineageRunner(object):
stacklevel=2,
)
self._sql = sql
+ self._file_path = file_path
self._verbose = verbose
self._draw_options = draw_options if draw_options else {}
self._evaluated = False
@@ -183,7 +186,9 @@ Target Tables:
analyzer = (
SqlParseLineageAnalyzer()
if self._dialect == SQLPARSE_DIALECT
- else SqlFluffLineageAnalyzer(self._dialect, self._silent_mode)
+ else SqlFluffLineageAnalyzer(
+ self._file_path, self._dialect, self._silent_mode
+ )
)
if SQLLineageConfig.TSQL_NO_SEMICOLON and self._dialect == "tsql":
self._stmt = analyzer.split_tsql(self._sql.strip())
diff --git a/sqllineage/utils/helpers.py b/sqllineage/utils/helpers.py
index df9c3ed..86fbbda 100644
--- a/sqllineage/utils/helpers.py
+++ b/sqllineage/utils/helpers.py
@@ -46,6 +46,13 @@ def extract_sql_from_args(args: Namespace) -> str:
return sql
+def extract_file_path_from_args(args: Namespace) -> str:
+ file_path = "."
+ if getattr(args, "f", None):
+ file_path = args.f
+ return file_path
+
+
def split(sql: str) -> List[str]:
# TODO: we need a parser independent split function
import sqlparse
| SQLfluff Not read nested configs.
**Describe the bug**
SQLfluff can read nested configs and override values from them. But sqllineage use wrong read config function [here](https://github.com/reata/sqllineage/blob/master/sqllineage/core/parser/sqlfluff/analyzer.py#L83).
`FluffConfig.from_root` can read only local config from cwd. Intead of that, I think, we shoud use `FluffConfig.from_path` and throw the path of validating file. In that case, sqlfluff will read all configs from cwd to file location.
**To Reproduce**
Create some directories like `folder1/folder2/folder3`
In `folder1` and `folder3` create `.sqlfluff` configs.
Add `foo.sql` to `folder3`
Use sql query with mocks like `SELECT * FROM {{ source_table }}`
Add `source_table` value to `/folder1/folder2/folder3/.sqlfluff` and leave `/folder1/.sqlfluff` empty.
Run
`sqllineage -d athena -f /folder1/folder2/folder3/foo.sql`
As result we will get Error `Line 1, Position 10: Found unparsable section: 'FROM'`
**Expected behavior**
`Sqllineage` uses `sqlfluff` correctly to read neasted configs.
**Additional context**
To fix that issue we have to change [this function](https://github.com/reata/sqllineage/blob/master/sqllineage/core/parser/sqlfluff/analyzer.py#L83) to `FluffConfig.from_path(path=file_path, overrides={"dialect": self._dialect})` and throw validating file path. | reata/sqllineage | diff --git a/tests/core/test_runner.py b/tests/core/test_runner.py
index 23210cf..aec0d52 100644
--- a/tests/core/test_runner.py
+++ b/tests/core/test_runner.py
@@ -1,6 +1,7 @@
import os
import tempfile
+from sqllineage.cli import main
from sqllineage.core.models import SubQuery
from sqllineage.runner import LineageRunner
from sqllineage.utils.constant import LineageLevel
@@ -51,3 +52,24 @@ tbl_name=my_table"""
assert_table_lineage_equal(sql, {"my_table"}, test_sqlparse=False)
finally:
os.chdir(cwd)
+
+
+def test_respect_nested_sqlfluff_configuration_file():
+ sqlfluff_config = """[sqlfluff:templater:jinja:context]
+num_things=456
+tbl_name=my_table"""
+ cwd = os.getcwd()
+ with tempfile.TemporaryDirectory() as tmpdirname:
+ try:
+ os.chdir(tmpdirname)
+ nested_dir = os.path.join(tmpdirname, "nested_dir")
+ os.mkdir(nested_dir)
+ with open(os.path.join(nested_dir, ".sqlfluff"), "w") as f:
+ f.write(sqlfluff_config)
+ with open(os.path.join(nested_dir, "nested.sql"), "w") as f:
+ f.write(
+ "SELECT {{ num_things }} FROM {{ tbl_name }} WHERE id > 10 LIMIT 5"
+ )
+ main(["-f", nested_dir + "/nested.sql"])
+ finally:
+ os.chdir(cwd)
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 4
} | 1.5 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[ci]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"tox",
"codecov",
"black",
"flake8",
"mypy",
"bandit",
"Sphinx",
"sphinx_rtd_theme"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc",
"curl -sL https://deb.nodesource.com/setup_18.x | bash -",
"apt-get install -y nodejs"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.16
babel==2.17.0
backports.tarfile==1.2.0
bandit==1.8.3
black==25.1.0
cachetools==5.5.2
certifi==2025.1.31
cffi==1.17.1
chardet==5.2.0
charset-normalizer==3.4.1
click==8.1.8
codecov==2.1.13
colorama==0.4.6
coverage==7.8.0
cryptography==44.0.2
diff_cover==9.2.4
distlib==0.3.9
docutils==0.21.2
exceptiongroup==1.2.2
filelock==3.18.0
flake8==7.2.0
flake8-blind-except==0.2.1
flake8-builtins==2.5.0
flake8-import-order==0.18.2
flake8-logging-format==2024.24.12
greenlet==3.1.1
id==1.5.0
idna==3.10
imagesize==1.4.1
importlib_metadata==8.6.1
iniconfig==2.1.0
jaraco.classes==3.4.0
jaraco.context==6.0.1
jaraco.functools==4.1.0
jeepney==0.9.0
Jinja2==3.1.6
keyring==25.6.0
markdown-it-py==3.0.0
MarkupSafe==3.0.2
mccabe==0.7.0
mdurl==0.1.2
more-itertools==10.6.0
mypy==1.15.0
mypy-extensions==1.0.0
networkx==3.2.1
nh3==0.2.21
packaging==24.2
pathspec==0.12.1
pbr==6.1.1
platformdirs==4.3.7
pluggy==1.5.0
pycodestyle==2.13.0
pycparser==2.22
pyflakes==3.3.2
Pygments==2.19.1
pyproject-api==1.9.0
pytest==8.3.5
pytest-cov==6.1.0
PyYAML==6.0.2
readme_renderer==44.0
regex==2024.11.6
requests==2.32.3
requests-toolbelt==1.0.0
rfc3986==2.0.0
rich==14.0.0
SecretStorage==3.3.3
snowballstemmer==2.2.0
Sphinx==7.4.7
sphinx-rtd-theme==3.0.2
sphinxcontrib-applehelp==2.0.0
sphinxcontrib-devhelp==2.0.0
sphinxcontrib-htmlhelp==2.1.0
sphinxcontrib-jquery==4.1
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==2.0.0
sphinxcontrib-serializinghtml==2.0.0
SQLAlchemy==2.0.40
sqlfluff==3.3.0
-e git+https://github.com/reata/sqllineage.git@10442e52aed3d937b252293d556ed1d0afa8b622#egg=sqllineage
sqlparse==0.5.0
stevedore==5.4.1
tblib==3.1.0
toml==0.10.2
tomli==2.2.1
tox==4.25.0
tqdm==4.67.1
twine==6.1.0
typing_extensions==4.13.1
urllib3==2.3.0
virtualenv==20.30.0
zipp==3.21.0
| name: sqllineage
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.16
- babel==2.17.0
- backports-tarfile==1.2.0
- bandit==1.8.3
- black==25.1.0
- cachetools==5.5.2
- certifi==2025.1.31
- cffi==1.17.1
- chardet==5.2.0
- charset-normalizer==3.4.1
- click==8.1.8
- codecov==2.1.13
- colorama==0.4.6
- coverage==7.8.0
- cryptography==44.0.2
- diff-cover==9.2.4
- distlib==0.3.9
- docutils==0.21.2
- exceptiongroup==1.2.2
- filelock==3.18.0
- flake8==7.2.0
- flake8-blind-except==0.2.1
- flake8-builtins==2.5.0
- flake8-import-order==0.18.2
- flake8-logging-format==2024.24.12
- greenlet==3.1.1
- id==1.5.0
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- jaraco-classes==3.4.0
- jaraco-context==6.0.1
- jaraco-functools==4.1.0
- jeepney==0.9.0
- jinja2==3.1.6
- keyring==25.6.0
- markdown-it-py==3.0.0
- markupsafe==3.0.2
- mccabe==0.7.0
- mdurl==0.1.2
- more-itertools==10.6.0
- mypy==1.15.0
- mypy-extensions==1.0.0
- networkx==3.2.1
- nh3==0.2.21
- packaging==24.2
- pathspec==0.12.1
- pbr==6.1.1
- platformdirs==4.3.7
- pluggy==1.5.0
- pycodestyle==2.13.0
- pycparser==2.22
- pyflakes==3.3.2
- pygments==2.19.1
- pyproject-api==1.9.0
- pytest==8.3.5
- pytest-cov==6.1.0
- pyyaml==6.0.2
- readme-renderer==44.0
- regex==2024.11.6
- requests==2.32.3
- requests-toolbelt==1.0.0
- rfc3986==2.0.0
- rich==14.0.0
- secretstorage==3.3.3
- snowballstemmer==2.2.0
- sphinx==7.4.7
- sphinx-rtd-theme==3.0.2
- sphinxcontrib-applehelp==2.0.0
- sphinxcontrib-devhelp==2.0.0
- sphinxcontrib-htmlhelp==2.1.0
- sphinxcontrib-jquery==4.1
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==2.0.0
- sphinxcontrib-serializinghtml==2.0.0
- sqlalchemy==2.0.40
- sqlfluff==3.3.0
- sqlparse==0.5.0
- stevedore==5.4.1
- tblib==3.1.0
- toml==0.10.2
- tomli==2.2.1
- tox==4.25.0
- tqdm==4.67.1
- twine==6.1.0
- typing-extensions==4.13.1
- urllib3==2.3.0
- virtualenv==20.30.0
- zipp==3.21.0
prefix: /opt/conda/envs/sqllineage
| [
"tests/core/test_runner.py::test_respect_nested_sqlfluff_configuration_file"
] | [] | [
"tests/core/test_runner.py::test_runner_dummy",
"tests/core/test_runner.py::test_statements_trim_comment",
"tests/core/test_runner.py::test_silent_mode",
"tests/core/test_runner.py::test_get_column_lineage_exclude_subquery_inpath",
"tests/core/test_runner.py::test_respect_sqlfluff_configuration_file"
] | [] | MIT License | 18,958 | 1,332 | [
"sqllineage/cli.py",
"sqllineage/core/parser/sqlfluff/analyzer.py",
"sqllineage/runner.py",
"sqllineage/utils/helpers.py"
] |
python-pillow__Pillow-8230 | 4721c31b19523e3c86f8d2fef62fdad25d2eb11d | 2024-07-13 02:41:18 | 7e6e08ea6aa8e05aa20425a6d9c07d4146241909 | diff --git a/src/PIL/Image.py b/src/PIL/Image.py
index fbeecef0e..e1cbf5335 100644
--- a/src/PIL/Image.py
+++ b/src/PIL/Image.py
@@ -4111,7 +4111,7 @@ class Exif(_ExifBase):
ifd = self._get_ifd_dict(tag_data, tag)
if ifd is not None:
self._ifds[tag] = ifd
- ifd = self._ifds.get(tag, {})
+ ifd = self._ifds.setdefault(tag, {})
if tag == ExifTags.IFD.Exif and self._hidden_data:
ifd = {
k: v
| Possible to add an IFD to an image's EXIF data?
(apologies if I'm using wrong/weird terminology)
I'm trying to add EXIF tags to an image that doesn't initially have any. Some of the tags I'm trying to set belong to `IFD.Exif` . When I run `img.getexif().get_ifd(IFD.Exif)` I get back an empty `dict` that doesn't persist anything I try to add to it.
However if the image already has data for `IFD.Exif` (so `img.getexif().get_ifd(IFD.Exif)` isn't `{}`) then I'm able to manipulate it as expected.
I'm not super familiar with how EXIF data works at a technical level, but I *think* I'm missing something that I need to do/set to
the image before I can set the tags I want to set. But I'm not 100% sure if that's the case, and if can I do it with Pillow?
### What are your OS, Python and Pillow versions?
* OS: macOS 14.5
* Python: 3.12.4
* Pillow: 10.3.0
```text
--------------------------------------------------------------------
Pillow 10.3.0
Python 3.12.4 (main, Jun 27 2024, 18:40:44) [Clang 15.0.0 (clang-1500.3.9.4)]
--------------------------------------------------------------------
Python executable is /Users/tegan/Library/Caches/pypoetry/virtualenvs/exifmate-Hu18rg69-py3.12/bin/python3
Environment Python files loaded from /Users/tegan/Library/Caches/pypoetry/virtualenvs/exifmate-Hu18rg69-py3.12
System Python files loaded from /Users/tegan/.pyenv/versions/3.12.4
--------------------------------------------------------------------
Python Pillow modules loaded from /Users/tegan/Library/Caches/pypoetry/virtualenvs/exifmate-Hu18rg69-py3.12/lib/python3.12/site-packages/PIL
Binary Pillow modules loaded from /Users/tegan/Library/Caches/pypoetry/virtualenvs/exifmate-Hu18rg69-py3.12/lib/python3.12/site-packages/PIL
--------------------------------------------------------------------
--- PIL CORE support ok, compiled for 10.3.0
*** TKINTER support not installed
--- FREETYPE2 support ok, loaded 2.13.2
--- LITTLECMS2 support ok, loaded 2.16
--- WEBP support ok, loaded 1.3.2
--- WEBP Transparency support ok
--- WEBPMUX support ok
--- WEBP Animation support ok
--- JPEG support ok, compiled for libjpeg-turbo 3.0.2
--- OPENJPEG (JPEG2000) support ok, loaded 2.5.2
--- ZLIB (PNG/ZIP) support ok, loaded 1.3.1
--- LIBTIFF support ok, loaded 4.6.0
*** RAQM (Bidirectional Text) support not installed
*** LIBIMAGEQUANT (Quantization method) support not installed
--- XCB (X protocol) support ok
--------------------------------------------------------------------
```
### Sample Code
```py
from PIL.TiffImagePlugin import IFDRational
from PIL.ExifTags import IFD
from PIL import Image
tag_id = 33434 # exposure time tag
exposure_time = IFDRational(1/500)
# behavior with existing EXIF data
img_one = Image.open("has-exif.jpg")
exif_one = img_one.getexif()
exif_one.get_ifd(IFD.Exif)[tag_id] = exposure_time
assert exif_one.get_ifd(IFD.Exif)[tag_id] == exposure_time
# saving the image with the modified data also works
# img_one.save("has-exif.jpg", exif=exif_one)
# behavior with no existing EXIF data
img_two = Image.open("no-exif.jpg")
exif_two = img_two.getexif() # presumably this is basically the same as `Image.Exif()`
assert exif_two.get_ifd(IFD.Exif) == {}
exif_two.get_ifd(IFD.Exif)[tag_id] = exposure_time
# these assertions pass but I expect them both to fail
assert exif_two.get_ifd(IFD.Exif) == {}
assert exif_two.get_ifd(IFD.Exif).get(tag_id) == None
```
[sample pictures.zip](https://github.com/user-attachments/files/16199554/sample.pictures.zip)
| python-pillow/Pillow | diff --git a/Tests/test_image.py b/Tests/test_image.py
index 5795f6c5c..2d35cd277 100644
--- a/Tests/test_image.py
+++ b/Tests/test_image.py
@@ -774,6 +774,14 @@ class TestImage:
exif.load(b"Exif\x00\x00")
assert not dict(exif)
+ def test_empty_get_ifd(self) -> None:
+ exif = Image.Exif()
+ ifd = exif.get_ifd(0x8769)
+ assert ifd == {}
+
+ ifd[36864] = b"0220"
+ assert exif.get_ifd(0x8769) == {36864: b"0220"}
+
@mark_if_feature_version(
pytest.mark.valgrind_known_error, "libjpeg_turbo", "2.0", reason="Known Failing"
)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_media"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 3
},
"num_modified_files": 1
} | 10.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest pytest-cov pytest-timeout",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc libjpeg-dev zlib1g-dev libtiff5-dev libfreetype6-dev liblcms2-dev libwebp-dev tcl8.6-dev tk8.6-dev libharfbuzz-dev libfribidi-dev libxcb1-dev"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | coverage==7.8.0
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
packaging @ file:///croot/packaging_1734472117206/work
-e git+https://github.com/python-pillow/Pillow.git@4721c31b19523e3c86f8d2fef62fdad25d2eb11d#egg=pillow
pluggy @ file:///croot/pluggy_1733169602837/work
pytest @ file:///croot/pytest_1738938843180/work
pytest-cov==6.0.0
pytest-timeout==2.3.1
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
| name: Pillow
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==7.8.0
- pillow==11.0.0.dev0
- pytest-cov==6.0.0
- pytest-timeout==2.3.1
prefix: /opt/conda/envs/Pillow
| [
"Tests/test_image.py::TestImage::test_empty_get_ifd"
] | [] | [
"Tests/test_image.py::TestImage::test_image_modes_success[1]",
"Tests/test_image.py::TestImage::test_image_modes_success[CMYK]",
"Tests/test_image.py::TestImage::test_image_modes_success[F]",
"Tests/test_image.py::TestImage::test_image_modes_success[HSV]",
"Tests/test_image.py::TestImage::test_image_modes_success[I]",
"Tests/test_image.py::TestImage::test_image_modes_success[I;16]",
"Tests/test_image.py::TestImage::test_image_modes_success[I;16B]",
"Tests/test_image.py::TestImage::test_image_modes_success[I;16L]",
"Tests/test_image.py::TestImage::test_image_modes_success[I;16N]",
"Tests/test_image.py::TestImage::test_image_modes_success[L]",
"Tests/test_image.py::TestImage::test_image_modes_success[LA]",
"Tests/test_image.py::TestImage::test_image_modes_success[La]",
"Tests/test_image.py::TestImage::test_image_modes_success[LAB]",
"Tests/test_image.py::TestImage::test_image_modes_success[P]",
"Tests/test_image.py::TestImage::test_image_modes_success[PA]",
"Tests/test_image.py::TestImage::test_image_modes_success[RGB]",
"Tests/test_image.py::TestImage::test_image_modes_success[RGBA]",
"Tests/test_image.py::TestImage::test_image_modes_success[RGBa]",
"Tests/test_image.py::TestImage::test_image_modes_success[RGBX]",
"Tests/test_image.py::TestImage::test_image_modes_success[YCbCr]",
"Tests/test_image.py::TestImage::test_image_modes_success[BGR;15]",
"Tests/test_image.py::TestImage::test_image_modes_success[BGR;16]",
"Tests/test_image.py::TestImage::test_image_modes_success[BGR;24]",
"Tests/test_image.py::TestImage::test_image_modes_fail[]",
"Tests/test_image.py::TestImage::test_image_modes_fail[bad]",
"Tests/test_image.py::TestImage::test_image_modes_fail[very",
"Tests/test_image.py::TestImage::test_exception_inheritance",
"Tests/test_image.py::TestImage::test_sanity",
"Tests/test_image.py::TestImage::test_open_formats",
"Tests/test_image.py::TestImage::test_open_verbose_failure",
"Tests/test_image.py::TestImage::test_width_height",
"Tests/test_image.py::TestImage::test_set_mode",
"Tests/test_image.py::TestImage::test_invalid_image",
"Tests/test_image.py::TestImage::test_bad_mode",
"Tests/test_image.py::TestImage::test_stringio",
"Tests/test_image.py::TestImage::test_fp_name",
"Tests/test_image.py::TestImage::test_tempfile",
"Tests/test_image.py::TestImage::test_unknown_extension",
"Tests/test_image.py::TestImage::test_internals",
"Tests/test_image.py::TestImage::test_readonly_save",
"Tests/test_image.py::TestImage::test_dump",
"Tests/test_image.py::TestImage::test_comparison_with_other_type",
"Tests/test_image.py::TestImage::test_expand_x",
"Tests/test_image.py::TestImage::test_expand_xy",
"Tests/test_image.py::TestImage::test_getbands",
"Tests/test_image.py::TestImage::test_getchannel_wrong_params",
"Tests/test_image.py::TestImage::test_getchannel",
"Tests/test_image.py::TestImage::test_getbbox",
"Tests/test_image.py::TestImage::test_ne",
"Tests/test_image.py::TestImage::test_alpha_composite",
"Tests/test_image.py::TestImage::test_alpha_inplace",
"Tests/test_image.py::TestImage::test_register_open_duplicates",
"Tests/test_image.py::TestImage::test_registered_extensions_uninitialized",
"Tests/test_image.py::TestImage::test_registered_extensions",
"Tests/test_image.py::TestImage::test_effect_mandelbrot",
"Tests/test_image.py::TestImage::test_effect_mandelbrot_bad_arguments",
"Tests/test_image.py::TestImage::test_effect_noise",
"Tests/test_image.py::TestImage::test_effect_spread",
"Tests/test_image.py::TestImage::test_effect_spread_zero",
"Tests/test_image.py::TestImage::test_check_size",
"Tests/test_image.py::TestImage::test_empty_image[size0]",
"Tests/test_image.py::TestImage::test_empty_image[size1]",
"Tests/test_image.py::TestImage::test_storage_neg",
"Tests/test_image.py::TestImage::test_one_item_tuple",
"Tests/test_image.py::TestImage::test_linear_gradient_wrong_mode",
"Tests/test_image.py::TestImage::test_linear_gradient[L]",
"Tests/test_image.py::TestImage::test_linear_gradient[P]",
"Tests/test_image.py::TestImage::test_linear_gradient[I]",
"Tests/test_image.py::TestImage::test_linear_gradient[F]",
"Tests/test_image.py::TestImage::test_radial_gradient_wrong_mode",
"Tests/test_image.py::TestImage::test_radial_gradient[L]",
"Tests/test_image.py::TestImage::test_radial_gradient[P]",
"Tests/test_image.py::TestImage::test_radial_gradient[I]",
"Tests/test_image.py::TestImage::test_radial_gradient[F]",
"Tests/test_image.py::TestImage::test_register_extensions",
"Tests/test_image.py::TestImage::test_remap_palette",
"Tests/test_image.py::TestImage::test_remap_palette_transparency",
"Tests/test_image.py::TestImage::test__new",
"Tests/test_image.py::TestImage::test_p_from_rgb_rgba[RGB-#DDEEFF]",
"Tests/test_image.py::TestImage::test_p_from_rgb_rgba[RGB-color1]",
"Tests/test_image.py::TestImage::test_p_from_rgb_rgba[RGBA-color2]",
"Tests/test_image.py::TestImage::test_no_resource_warning_on_save",
"Tests/test_image.py::TestImage::test_no_new_file_on_error",
"Tests/test_image.py::TestImage::test_load_on_nonexclusive_multiframe",
"Tests/test_image.py::TestImage::test_empty_exif",
"Tests/test_image.py::TestImage::test_exif_jpeg",
"Tests/test_image.py::TestImage::test_exif_webp",
"Tests/test_image.py::TestImage::test_exif_png",
"Tests/test_image.py::TestImage::test_exif_interop",
"Tests/test_image.py::TestImage::test_exif_ifd1",
"Tests/test_image.py::TestImage::test_exif_ifd",
"Tests/test_image.py::TestImage::test_exif_load_from_fp",
"Tests/test_image.py::TestImage::test_exif_hide_offsets",
"Tests/test_image.py::TestImage::test_empty_xmp",
"Tests/test_image.py::TestImage::test_getxmp_padded",
"Tests/test_image.py::TestImage::test_zero_tobytes[size0]",
"Tests/test_image.py::TestImage::test_zero_tobytes[size1]",
"Tests/test_image.py::TestImage::test_zero_tobytes[size2]",
"Tests/test_image.py::TestImage::test_zero_frombytes[size0]",
"Tests/test_image.py::TestImage::test_zero_frombytes[size1]",
"Tests/test_image.py::TestImage::test_zero_frombytes[size2]",
"Tests/test_image.py::TestImage::test_has_transparency_data",
"Tests/test_image.py::TestImage::test_apply_transparency",
"Tests/test_image.py::TestImage::test_constants",
"Tests/test_image.py::TestImage::test_overrun[fli_overrun.bin]",
"Tests/test_image.py::TestImage::test_overrun[sgi_overrun.bin]",
"Tests/test_image.py::TestImage::test_overrun[sgi_overrun_expandrow.bin]",
"Tests/test_image.py::TestImage::test_overrun[sgi_overrun_expandrow2.bin]",
"Tests/test_image.py::TestImage::test_overrun[pcx_overrun.bin]",
"Tests/test_image.py::TestImage::test_overrun[pcx_overrun2.bin]",
"Tests/test_image.py::TestImage::test_overrun[ossfuzz-4836216264589312.pcx]",
"Tests/test_image.py::TestImage::test_overrun[01r_00.pcx]",
"Tests/test_image.py::TestImage::test_fli_overrun2",
"Tests/test_image.py::TestImage::test_exit_fp",
"Tests/test_image.py::TestImage::test_close_graceful",
"Tests/test_image.py::TestImageBytes::test_roundtrip_bytes_constructor[1]",
"Tests/test_image.py::TestImageBytes::test_roundtrip_bytes_constructor[CMYK]",
"Tests/test_image.py::TestImageBytes::test_roundtrip_bytes_constructor[F]",
"Tests/test_image.py::TestImageBytes::test_roundtrip_bytes_constructor[HSV]",
"Tests/test_image.py::TestImageBytes::test_roundtrip_bytes_constructor[I]",
"Tests/test_image.py::TestImageBytes::test_roundtrip_bytes_constructor[I;16]",
"Tests/test_image.py::TestImageBytes::test_roundtrip_bytes_constructor[I;16B]",
"Tests/test_image.py::TestImageBytes::test_roundtrip_bytes_constructor[I;16L]",
"Tests/test_image.py::TestImageBytes::test_roundtrip_bytes_constructor[I;16N]",
"Tests/test_image.py::TestImageBytes::test_roundtrip_bytes_constructor[L]",
"Tests/test_image.py::TestImageBytes::test_roundtrip_bytes_constructor[LA]",
"Tests/test_image.py::TestImageBytes::test_roundtrip_bytes_constructor[La]",
"Tests/test_image.py::TestImageBytes::test_roundtrip_bytes_constructor[LAB]",
"Tests/test_image.py::TestImageBytes::test_roundtrip_bytes_constructor[P]",
"Tests/test_image.py::TestImageBytes::test_roundtrip_bytes_constructor[PA]",
"Tests/test_image.py::TestImageBytes::test_roundtrip_bytes_constructor[RGB]",
"Tests/test_image.py::TestImageBytes::test_roundtrip_bytes_constructor[RGBA]",
"Tests/test_image.py::TestImageBytes::test_roundtrip_bytes_constructor[RGBa]",
"Tests/test_image.py::TestImageBytes::test_roundtrip_bytes_constructor[RGBX]",
"Tests/test_image.py::TestImageBytes::test_roundtrip_bytes_constructor[YCbCr]",
"Tests/test_image.py::TestImageBytes::test_roundtrip_bytes_constructor[BGR;15]",
"Tests/test_image.py::TestImageBytes::test_roundtrip_bytes_constructor[BGR;16]",
"Tests/test_image.py::TestImageBytes::test_roundtrip_bytes_constructor[BGR;24]",
"Tests/test_image.py::TestImageBytes::test_roundtrip_bytes_method[1]",
"Tests/test_image.py::TestImageBytes::test_roundtrip_bytes_method[CMYK]",
"Tests/test_image.py::TestImageBytes::test_roundtrip_bytes_method[F]",
"Tests/test_image.py::TestImageBytes::test_roundtrip_bytes_method[HSV]",
"Tests/test_image.py::TestImageBytes::test_roundtrip_bytes_method[I]",
"Tests/test_image.py::TestImageBytes::test_roundtrip_bytes_method[I;16]",
"Tests/test_image.py::TestImageBytes::test_roundtrip_bytes_method[I;16B]",
"Tests/test_image.py::TestImageBytes::test_roundtrip_bytes_method[I;16L]",
"Tests/test_image.py::TestImageBytes::test_roundtrip_bytes_method[I;16N]",
"Tests/test_image.py::TestImageBytes::test_roundtrip_bytes_method[L]",
"Tests/test_image.py::TestImageBytes::test_roundtrip_bytes_method[LA]",
"Tests/test_image.py::TestImageBytes::test_roundtrip_bytes_method[La]",
"Tests/test_image.py::TestImageBytes::test_roundtrip_bytes_method[LAB]",
"Tests/test_image.py::TestImageBytes::test_roundtrip_bytes_method[P]",
"Tests/test_image.py::TestImageBytes::test_roundtrip_bytes_method[PA]",
"Tests/test_image.py::TestImageBytes::test_roundtrip_bytes_method[RGB]",
"Tests/test_image.py::TestImageBytes::test_roundtrip_bytes_method[RGBA]",
"Tests/test_image.py::TestImageBytes::test_roundtrip_bytes_method[RGBa]",
"Tests/test_image.py::TestImageBytes::test_roundtrip_bytes_method[RGBX]",
"Tests/test_image.py::TestImageBytes::test_roundtrip_bytes_method[YCbCr]",
"Tests/test_image.py::TestImageBytes::test_roundtrip_bytes_method[BGR;15]",
"Tests/test_image.py::TestImageBytes::test_roundtrip_bytes_method[BGR;16]",
"Tests/test_image.py::TestImageBytes::test_roundtrip_bytes_method[BGR;24]",
"Tests/test_image.py::TestImageBytes::test_getdata_putdata[1]",
"Tests/test_image.py::TestImageBytes::test_getdata_putdata[CMYK]",
"Tests/test_image.py::TestImageBytes::test_getdata_putdata[F]",
"Tests/test_image.py::TestImageBytes::test_getdata_putdata[HSV]",
"Tests/test_image.py::TestImageBytes::test_getdata_putdata[I]",
"Tests/test_image.py::TestImageBytes::test_getdata_putdata[I;16]",
"Tests/test_image.py::TestImageBytes::test_getdata_putdata[I;16B]",
"Tests/test_image.py::TestImageBytes::test_getdata_putdata[I;16L]",
"Tests/test_image.py::TestImageBytes::test_getdata_putdata[I;16N]",
"Tests/test_image.py::TestImageBytes::test_getdata_putdata[L]",
"Tests/test_image.py::TestImageBytes::test_getdata_putdata[LA]",
"Tests/test_image.py::TestImageBytes::test_getdata_putdata[La]",
"Tests/test_image.py::TestImageBytes::test_getdata_putdata[LAB]",
"Tests/test_image.py::TestImageBytes::test_getdata_putdata[P]",
"Tests/test_image.py::TestImageBytes::test_getdata_putdata[PA]",
"Tests/test_image.py::TestImageBytes::test_getdata_putdata[RGB]",
"Tests/test_image.py::TestImageBytes::test_getdata_putdata[RGBA]",
"Tests/test_image.py::TestImageBytes::test_getdata_putdata[RGBa]",
"Tests/test_image.py::TestImageBytes::test_getdata_putdata[RGBX]",
"Tests/test_image.py::TestImageBytes::test_getdata_putdata[YCbCr]",
"Tests/test_image.py::TestImageBytes::test_getdata_putdata[BGR;15]",
"Tests/test_image.py::TestImageBytes::test_getdata_putdata[BGR;16]",
"Tests/test_image.py::TestImageBytes::test_getdata_putdata[BGR;24]",
"Tests/test_image.py::TestRegistry::test_encode_registry",
"Tests/test_image.py::TestRegistry::test_encode_registry_fail"
] | [] | MIT-CMU License | 18,962 | 166 | [
"src/PIL/Image.py"
] |
|
roskakori__pygount-165 | 8f4cb8afed65d4879090e333e495b915ed8c9489 | 2024-07-13 11:20:48 | 8f4cb8afed65d4879090e333e495b915ed8c9489 | diff --git a/pygount/analysis.py b/pygount/analysis.py
index 5246e4f..72b9cf3 100644
--- a/pygount/analysis.py
+++ b/pygount/analysis.py
@@ -27,6 +27,8 @@ import pygount.xmldialect
from pygount.common import deprecated, mapped_repr
from pygount.git_storage import GitStorage, git_remote_url_and_revision_if_any
+GIT_REPO_REGEX = re.compile(r"^(https?://|git@)")
+
# Attempt to import chardet.
try:
import chardet.universaldetector
@@ -624,6 +626,13 @@ class SourceScanner:
# TODO#113: Find a way to exclude the ugly temp folder from the source path.
result.extend(self._paths_and_group_to_analyze(git_storage.temp_folder))
else:
+ git_url_match = re.match(GIT_REPO_REGEX, source_pattern_to_analyze)
+ if git_url_match is not None:
+ raise pygount.Error(
+ 'URL to git repository must end with ".git", for example '
+ "[email protected]:roskakori/pygount.git or "
+ "https://github.com/roskakori/pygount.git."
+ )
result.extend(self._paths_and_group_to_analyze(source_pattern_to_analyze))
except OSError as error:
assert source_pattern_to_analyze is not None
| Fix silent error on git failing
Steps to reproduce:
1. Run:
```sh
pygount https://github.com/roskakori/pygount/
```
2. No output shows.
Expected behavior:
If the specified path starts with `http[s]://` or `git@` but the regex for a full repository path does not match, the following error message shows:
> URL to git repository must end with ".git", for example [email protected]:roskakori/pygount.git or https://github.com/roskakori/pygount.git. | roskakori/pygount | diff --git a/tests/test_analysis.py b/tests/test_analysis.py
index 5fe4af6..9a09a43 100644
--- a/tests/test_analysis.py
+++ b/tests/test_analysis.py
@@ -64,6 +64,12 @@ class SourceScannerTest(TempFolderTest):
scanned_names = [os.path.basename(source_path) for source_path, _ in scanner.source_paths()]
assert scanned_names == [name_to_include]
+ def test_fails_on_non_repo_url(self):
+ non_repo_urls = [["https://github.com/roskakori/pygount/"], ["[email protected]:roskakori/pygount"]]
+ for non_repo_url in non_repo_urls:
+ with analysis.SourceScanner(non_repo_url) as scanner, pytest.raises(PygountError):
+ next(scanner.source_paths())
+
def test_can_find_python_files_in_dot(self):
scanner = analysis.SourceScanner(["."], "py")
actual_paths = list(scanner.source_paths())
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 1
} | 1.8 | {
"env_vars": null,
"env_yml_path": [],
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest pytest-cov pytest-xdist pytest-mock",
"pytest"
],
"pre_install": [],
"python": "3.9",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | chardet==5.2.0
coverage==7.8.0
exceptiongroup==1.2.2
execnet==2.1.1
gitdb==4.0.12
GitPython==3.1.44
iniconfig==2.1.0
markdown-it-py==3.0.0
mdurl==0.1.2
packaging==24.2
pluggy==1.5.0
Pygments==2.19.1
-e git+https://github.com/roskakori/pygount.git@8f4cb8afed65d4879090e333e495b915ed8c9489#egg=pygount
pytest==8.3.5
pytest-cov==6.0.0
pytest-mock==3.14.0
pytest-xdist==3.6.1
rich==13.9.4
smmap==5.0.2
tomli==2.2.1
typing_extensions==4.13.0
| name: pygount
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- chardet==5.2.0
- coverage==7.8.0
- exceptiongroup==1.2.2
- execnet==2.1.1
- gitdb==4.0.12
- gitpython==3.1.44
- iniconfig==2.1.0
- markdown-it-py==3.0.0
- mdurl==0.1.2
- packaging==24.2
- pluggy==1.5.0
- pygments==2.19.1
- pygount==1.8.1
- pytest==8.3.5
- pytest-cov==6.0.0
- pytest-mock==3.14.0
- pytest-xdist==3.6.1
- rich==13.9.4
- smmap==5.0.2
- tomli==2.2.1
- typing-extensions==4.13.0
prefix: /opt/conda/envs/pygount
| [
"tests/test_analysis.py::SourceScannerTest::test_fails_on_non_repo_url"
] | [] | [
"tests/test_analysis.py::SourceScannerTest::test_can_find_any_files",
"tests/test_analysis.py::SourceScannerTest::test_can_find_files_from_mixed_cloned_git_remote_url_and_local",
"tests/test_analysis.py::SourceScannerTest::test_can_find_no_files",
"tests/test_analysis.py::SourceScannerTest::test_can_find_python_files",
"tests/test_analysis.py::SourceScannerTest::test_can_find_python_files_in_dot",
"tests/test_analysis.py::SourceScannerTest::test_can_skip_dot_folder",
"tests/test_analysis.py::AnalysisTest::test_can_analyze_c",
"tests/test_analysis.py::AnalysisTest::test_can_analyze_python",
"tests/test_analysis.py::AnalysisTest::test_can_compute_python_line_parts",
"tests/test_analysis.py::AnalysisTest::test_can_convert_python_strings_to_comments",
"tests/test_analysis.py::AnalysisTest::test_can_deline_tokens",
"tests/test_analysis.py::AnalysisTest::test_can_detect_white_text",
"tests/test_analysis.py::FileAnalysisTest::test_can_analyze_bytesio",
"tests/test_analysis.py::FileAnalysisTest::test_can_analyze_embedded_language",
"tests/test_analysis.py::FileAnalysisTest::test_can_analyze_encoding_error",
"tests/test_analysis.py::FileAnalysisTest::test_can_analyze_oracle_sql",
"tests/test_analysis.py::FileAnalysisTest::test_can_analyze_stringio",
"tests/test_analysis.py::FileAnalysisTest::test_can_analyze_unknown_language",
"tests/test_analysis.py::FileAnalysisTest::test_can_analyze_webfocus",
"tests/test_analysis.py::FileAnalysisTest::test_can_analyze_xml_dialect",
"tests/test_analysis.py::FileAnalysisTest::test_can_detect_binary_source_code",
"tests/test_analysis.py::FileAnalysisTest::test_can_detect_silent_dos_batch_remarks",
"tests/test_analysis.py::FileAnalysisTest::test_can_merge_embedded_language",
"tests/test_analysis.py::FileAnalysisTest::test_fails_on_non_seekable_file_handle_with_encoding_automatic",
"tests/test_analysis.py::FileAnalysisTest::test_fails_on_non_seekable_file_handle_with_encoding_chardet",
"tests/test_analysis.py::FileAnalysisTest::test_fails_on_unknown_magic_encoding_comment",
"tests/test_analysis.py::test_can_repr_source_analysis_from_file",
"tests/test_analysis.py::test_can_repr_empty_source_analysis_from_file",
"tests/test_analysis.py::test_can_repr_error_source_analysis_from_file",
"tests/test_analysis.py::test_can_guess_lexer_for_python",
"tests/test_analysis.py::test_can_guess_lexer_for_plain_text",
"tests/test_analysis.py::test_can_guess_lexer_for_cmakelists",
"tests/test_analysis.py::test_can_use_deprecated_counts",
"tests/test_analysis.py::EncodingTest::test_can_detect_automatic_encoding_for_empty_source",
"tests/test_analysis.py::EncodingTest::test_can_detect_binary_with_zero_byte",
"tests/test_analysis.py::EncodingTest::test_can_detect_bom_encodings",
"tests/test_analysis.py::EncodingTest::test_can_detect_chardet_encoding",
"tests/test_analysis.py::EncodingTest::test_can_detect_magic_comment",
"tests/test_analysis.py::EncodingTest::test_can_detect_plain_encoding",
"tests/test_analysis.py::EncodingTest::test_can_detect_utf16_as_non_binary",
"tests/test_analysis.py::EncodingTest::test_can_detect_utf8_when_cp1252_would_fail",
"tests/test_analysis.py::EncodingTest::test_can_detect_xml_prolog",
"tests/test_analysis.py::EncodingTest::test_can_use_hardcoded_ending",
"tests/test_analysis.py::GeneratedCodeTest::test_can_analyze_generated_code_with_own_pattern",
"tests/test_analysis.py::GeneratedCodeTest::test_can_detect_generated_code",
"tests/test_analysis.py::GeneratedCodeTest::test_can_detect_non_generated_code",
"tests/test_analysis.py::GeneratedCodeTest::test_can_not_detect_generated_code_with_late_comment",
"tests/test_analysis.py::SizeTest::test_can_detect_empty_source_code",
"tests/test_analysis.py::test_can_analyze_project_markdown_files",
"tests/test_analysis.py::test_has_no_duplicate_in_pygount_source",
"tests/test_analysis.py::test_can_match_deprecated_functions",
"tests/test_analysis.py::test_can_compute_base_language",
"tests/test_analysis.py::DuplicatePoolTest::test_can_detect_duplicate",
"tests/test_analysis.py::DuplicatePoolTest::test_can_distinguish_different_files"
] | [] | BSD 3-Clause "New" or "Revised" License | 18,965 | 334 | [
"pygount/analysis.py"
] |
|
python-control__python-control-1030 | bb82883cb4abf5171e82ae4c025c012f5218b6a7 | 2024-07-13 16:29:06 | 93c4c8d2827bad02e48234799f159f196acb2282 | diff --git a/control/timeplot.py b/control/timeplot.py
index 2eb7aec9..f257d2b3 100644
--- a/control/timeplot.py
+++ b/control/timeplot.py
@@ -738,7 +738,8 @@ def combine_time_responses(response_list, trace_labels=None, title=None):
if generate_trace_labels:
trace_labels.append(response.title)
trace_types.append(
- None if response.trace_types is None else response.types[0])
+ None if response.trace_types is None
+ else response.trace_types[0])
else:
# Save the data
diff --git a/control/timeresp.py b/control/timeresp.py
index f844b1df..244d90c2 100644
--- a/control/timeresp.py
+++ b/control/timeresp.py
@@ -1674,19 +1674,17 @@ def step_info(sysdata, T=None, T_num=None, yfinal=None, params=None,
if not np.isnan(InfValue) and not np.isinf(InfValue):
# RiseTime
- tr_lower_index = np.nonzero(
+ tr_lower_index = np.where(
sgnInf * (yout - RiseTimeLimits[0] * InfValue) >= 0
)[0][0]
- tr_upper_index = np.nonzero(
+ tr_upper_index = np.where(
sgnInf * (yout - RiseTimeLimits[1] * InfValue) >= 0
)[0][0]
rise_time = T[tr_upper_index] - T[tr_lower_index]
# SettlingTime
- outside_threshold = np.nonzero(
- np.abs(yout/InfValue - 1) >= SettlingTimeThreshold)[0]
- settled = 0 if outside_threshold.size == 0 \
- else outside_threshold[-1] + 1
+ settled = np.where(
+ np.abs(yout/InfValue-1) >= SettlingTimeThreshold)[0][-1]+1
# MIMO systems can have unsettled channels without infinite
# InfValue
if settled < len(T):
| `control.combine_time_responses` fails to combine time responses
Using `control.combine_time_responses` results in the following error:
```python
AttributeError: 'TimeResponseData' object has no attribute 'types'
```
This is because the code that adds new trace labels to the combined data has a bug where is trying to access a `types` attribute from the `TimeResponseData` objects instead of `trace_types`. | python-control/python-control | diff --git a/control/tests/timeplot_test.py b/control/tests/timeplot_test.py
index 0fcc159b..58ef062a 100644
--- a/control/tests/timeplot_test.py
+++ b/control/tests/timeplot_test.py
@@ -258,7 +258,7 @@ def test_combine_time_responses():
sys_mimo = ct.rss(4, 2, 2)
timepts = np.linspace(0, 10, 100)
- # Combine two response with ntrace = 0
+ # Combine two responses with ntrace = 0
U = np.vstack([np.sin(timepts), np.cos(2*timepts)])
resp1 = ct.input_output_response(sys_mimo, timepts, U)
@@ -293,6 +293,7 @@ def test_combine_time_responses():
combresp4 = ct.combine_time_responses(
[resp1, resp2, resp3], trace_labels=labels)
assert combresp4.trace_labels == labels
+ assert combresp4.trace_types == [None, None, 'step', 'step']
# Automatically generated trace label names and types
resp5 = ct.step_response(sys_mimo, timepts)
@@ -302,7 +303,13 @@ def test_combine_time_responses():
combresp5 = ct.combine_time_responses([resp1, resp5])
assert combresp5.trace_labels == [resp1.title] + \
["test, trace 0", "test, trace 1"]
- assert combresp4.trace_types == [None, None, 'step', 'step']
+ assert combresp5.trace_types == [None, None, None]
+
+ # ntraces = 0 with trace_types != None
+ # https://github.com/python-control/python-control/issues/1025
+ resp6 = ct.forced_response(sys_mimo, timepts, U)
+ combresp6 = ct.combine_time_responses([resp1, resp6])
+ assert combresp6.trace_types == [None, 'forced']
with pytest.raises(ValueError, match="must have the same number"):
resp = ct.step_response(ct.rss(4, 2, 3), timepts)
diff --git a/control/tests/timeresp_test.py b/control/tests/timeresp_test.py
index e2d93be0..73032c0a 100644
--- a/control/tests/timeresp_test.py
+++ b/control/tests/timeresp_test.py
@@ -1,7 +1,6 @@
"""timeresp_test.py - test time response functions"""
from copy import copy
-from math import isclose
import numpy as np
import pytest
@@ -9,11 +8,11 @@ import scipy as sp
import control as ct
from control import StateSpace, TransferFunction, c2d, isctime, ss2tf, tf2ss
-from control.exception import pandas_check, slycot_check
+from control.exception import slycot_check, pandas_check
from control.tests.conftest import slycotonly
-from control.timeresp import _default_time_vector, _ideal_tfinal_and_dt, \
- forced_response, impulse_response, initial_response, step_info, \
- step_response
+from control.timeresp import (_default_time_vector, _ideal_tfinal_and_dt,
+ forced_response, impulse_response,
+ initial_response, step_info, step_response)
class TSys:
@@ -1276,45 +1275,3 @@ def test_no_pandas():
# Convert to pandas
with pytest.raises(ImportError, match="pandas"):
df = resp.to_pandas()
-
-
-# https://github.com/python-control/python-control/issues/1014
-def test_step_info_nonstep():
- # Pass a constant input
- timepts = np.linspace(0, 10, endpoint=False)
- y_const = np.ones_like(timepts)
-
- # Constant value of 1
- step_info = ct.step_info(y_const, timepts)
- assert step_info['RiseTime'] == 0
- assert step_info['SettlingTime'] == 0
- assert step_info['SettlingMin'] == 1
- assert step_info['SettlingMax'] == 1
- assert step_info['Overshoot'] == 0
- assert step_info['Undershoot'] == 0
- assert step_info['Peak'] == 1
- assert step_info['PeakTime'] == 0
- assert step_info['SteadyStateValue'] == 1
-
- # Constant value of -1
- step_info = ct.step_info(-y_const, timepts)
- assert step_info['RiseTime'] == 0
- assert step_info['SettlingTime'] == 0
- assert step_info['SettlingMin'] == -1
- assert step_info['SettlingMax'] == -1
- assert step_info['Overshoot'] == 0
- assert step_info['Undershoot'] == 0
- assert step_info['Peak'] == 1
- assert step_info['PeakTime'] == 0
- assert step_info['SteadyStateValue'] == -1
-
- # Ramp from -1 to 1
- step_info = ct.step_info(-1 + 2 * timepts/10, timepts)
- assert step_info['RiseTime'] == 3.8
- assert step_info['SettlingTime'] == 9.8
- assert isclose(step_info['SettlingMin'], 0.88)
- assert isclose(step_info['SettlingMax'], 0.96)
- assert step_info['Overshoot'] == 0
- assert step_info['Peak'] == 1
- assert step_info['PeakTime'] == 0
- assert isclose(step_info['SteadyStateValue'], 0.96)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 2
} | 0.10 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[test]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-timeout",
"ruff"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc gfortran"
],
"python": "3.10",
"reqs_path": [
"doc/requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.16
asttokens==3.0.0
attrs==25.3.0
babel==2.17.0
beautifulsoup4==4.13.3
bleach==6.2.0
certifi==2025.1.31
charset-normalizer==3.4.1
comm==0.2.2
contourpy==1.3.1
-e git+https://github.com/python-control/python-control.git@bb82883cb4abf5171e82ae4c025c012f5218b6a7#egg=control
cycler==0.12.1
debugpy==1.8.13
decorator==5.2.1
defusedxml==0.7.1
docutils==0.16
exceptiongroup==1.2.2
executing==2.2.0
fastjsonschema==2.21.1
fonttools==4.56.0
idna==3.10
imagesize==1.4.1
iniconfig==2.1.0
ipykernel==6.29.5
ipython==8.34.0
jedi==0.19.2
Jinja2==3.1.6
jsonschema==4.23.0
jsonschema-specifications==2024.10.1
jupyter_client==8.6.3
jupyter_core==5.7.2
jupyterlab_pygments==0.3.0
kiwisolver==1.4.8
MarkupSafe==3.0.2
matplotlib==3.10.1
matplotlib-inline==0.1.7
mistune==3.1.3
nbclient==0.10.2
nbconvert==7.16.6
nbformat==5.10.4
nbsphinx==0.9.3
nest-asyncio==1.6.0
numpy==2.2.4
numpydoc==1.6.0
packaging==24.2
pandocfilters==1.5.1
parso==0.8.4
pexpect==4.9.0
pillow==11.1.0
platformdirs==4.3.7
pluggy==1.5.0
prompt_toolkit==3.0.50
psutil==7.0.0
ptyprocess==0.7.0
pure_eval==0.2.3
Pygments==2.19.1
pyparsing==3.2.3
pytest==8.3.5
pytest-timeout==2.3.1
python-dateutil==2.9.0.post0
pyzmq==26.3.0
referencing==0.36.2
requests==2.32.3
rpds-py==0.24.0
ruff==0.11.2
scipy==1.15.2
six==1.17.0
snowballstemmer==2.2.0
soupsieve==2.6
Sphinx==5.3.0
sphinx-rtd-theme==2.0.0
sphinxcontrib-applehelp==2.0.0
sphinxcontrib-devhelp==2.0.0
sphinxcontrib-htmlhelp==2.1.0
sphinxcontrib-jquery==4.1
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==2.0.0
sphinxcontrib-serializinghtml==2.0.0
stack-data==0.6.3
tabulate==0.9.0
tinycss2==1.4.0
tomli==2.2.1
tornado==6.4.2
traitlets==5.14.3
typing_extensions==4.13.0
urllib3==2.3.0
wcwidth==0.2.13
webencodings==0.5.1
| name: python-control
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- bzip2=1.0.8=h5eee18b_6
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- libuuid=1.41.5=h5eee18b_0
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py310h06a4308_0
- python=3.10.16=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py310h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py310h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.16
- asttokens==3.0.0
- attrs==25.3.0
- babel==2.17.0
- beautifulsoup4==4.13.3
- bleach==6.2.0
- certifi==2025.1.31
- charset-normalizer==3.4.1
- comm==0.2.2
- contourpy==1.3.1
- control==0.10.1.dev113+gbb82883c
- cycler==0.12.1
- debugpy==1.8.13
- decorator==5.2.1
- defusedxml==0.7.1
- docutils==0.16
- exceptiongroup==1.2.2
- executing==2.2.0
- fastjsonschema==2.21.1
- fonttools==4.56.0
- idna==3.10
- imagesize==1.4.1
- iniconfig==2.1.0
- ipykernel==6.29.5
- ipython==8.34.0
- jedi==0.19.2
- jinja2==3.1.6
- jsonschema==4.23.0
- jsonschema-specifications==2024.10.1
- jupyter-client==8.6.3
- jupyter-core==5.7.2
- jupyterlab-pygments==0.3.0
- kiwisolver==1.4.8
- markupsafe==3.0.2
- matplotlib==3.10.1
- matplotlib-inline==0.1.7
- mistune==3.1.3
- nbclient==0.10.2
- nbconvert==7.16.6
- nbformat==5.10.4
- nbsphinx==0.9.3
- nest-asyncio==1.6.0
- numpy==2.2.4
- numpydoc==1.6.0
- packaging==24.2
- pandocfilters==1.5.1
- parso==0.8.4
- pexpect==4.9.0
- pillow==11.1.0
- platformdirs==4.3.7
- pluggy==1.5.0
- prompt-toolkit==3.0.50
- psutil==7.0.0
- ptyprocess==0.7.0
- pure-eval==0.2.3
- pygments==2.19.1
- pyparsing==3.2.3
- pytest==8.3.5
- pytest-timeout==2.3.1
- python-dateutil==2.9.0.post0
- pyzmq==26.3.0
- referencing==0.36.2
- requests==2.32.3
- rpds-py==0.24.0
- ruff==0.11.2
- scipy==1.15.2
- six==1.17.0
- snowballstemmer==2.2.0
- soupsieve==2.6
- sphinx==5.3.0
- sphinx-rtd-theme==2.0.0
- sphinxcontrib-applehelp==2.0.0
- sphinxcontrib-devhelp==2.0.0
- sphinxcontrib-htmlhelp==2.1.0
- sphinxcontrib-jquery==4.1
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==2.0.0
- sphinxcontrib-serializinghtml==2.0.0
- stack-data==0.6.3
- tabulate==0.9.0
- tinycss2==1.4.0
- tomli==2.2.1
- tornado==6.4.2
- traitlets==5.14.3
- typing-extensions==4.13.0
- urllib3==2.3.0
- wcwidth==0.2.13
- webencodings==0.5.1
prefix: /opt/conda/envs/python-control
| [
"control/tests/timeplot_test.py::test_combine_time_responses"
] | [] | [
"control/tests/timeplot_test.py::test_response_plots[step_response-False-True-False-False-False-False-sys0]",
"control/tests/timeplot_test.py::test_response_plots[step_response-False-True-False-False-False-False-sys1]",
"control/tests/timeplot_test.py::test_response_plots[step_response-None-True-False-False-False-False-sys0]",
"control/tests/timeplot_test.py::test_response_plots[step_response-None-True-False-False-False-False-sys1]",
"control/tests/timeplot_test.py::test_response_plots[step_response-True-True-False-False-False-False-sys0]",
"control/tests/timeplot_test.py::test_response_plots[step_response-True-True-False-False-False-False-sys1]",
"control/tests/timeplot_test.py::test_response_plots[step_response-overlay-True-False-False-False-False-sys0]",
"control/tests/timeplot_test.py::test_response_plots[step_response-overlay-True-False-False-False-False-sys1]",
"control/tests/timeplot_test.py::test_response_plots[step_response-overlay-True-True-False-False-False-sys0]",
"control/tests/timeplot_test.py::test_response_plots[step_response-overlay-True-True-False-False-False-sys1]",
"control/tests/timeplot_test.py::test_response_plots[step_response-overlay-True-False-True-False-False-sys0]",
"control/tests/timeplot_test.py::test_response_plots[step_response-overlay-True-False-True-False-False-sys1]",
"control/tests/timeplot_test.py::test_response_plots[step_response-overlay-True-False-False-True-False-sys0]",
"control/tests/timeplot_test.py::test_response_plots[step_response-overlay-True-False-False-True-False-sys1]",
"control/tests/timeplot_test.py::test_response_plots[step_response-overlay-True-False-False-False-True-sys0]",
"control/tests/timeplot_test.py::test_response_plots[step_response-overlay-True-False-False-False-True-sys1]",
"control/tests/timeplot_test.py::test_response_plots[step_response-False-False-False-False-False-False-sys0]",
"control/tests/timeplot_test.py::test_response_plots[step_response-False-False-False-False-False-False-sys1]",
"control/tests/timeplot_test.py::test_response_plots[step_response-None-False-False-False-False-False-sys0]",
"control/tests/timeplot_test.py::test_response_plots[step_response-None-False-False-False-False-False-sys1]",
"control/tests/timeplot_test.py::test_response_plots[step_response-overlay-False-False-False-False-False-sys0]",
"control/tests/timeplot_test.py::test_response_plots[step_response-overlay-False-False-False-False-False-sys1]",
"control/tests/timeplot_test.py::test_response_plots[step_response-True-True-False-True-False-False-sys0]",
"control/tests/timeplot_test.py::test_response_plots[step_response-True-True-False-True-False-False-sys1]",
"control/tests/timeplot_test.py::test_response_plots[step_response-True-True-False-False-False-True-sys0]",
"control/tests/timeplot_test.py::test_response_plots[step_response-True-True-False-False-False-True-sys1]",
"control/tests/timeplot_test.py::test_response_plots[step_response-True-True-False-True-False-True-sys0]",
"control/tests/timeplot_test.py::test_response_plots[step_response-True-True-False-True-False-True-sys1]",
"control/tests/timeplot_test.py::test_response_plots[step_response-True-True-True-False-True-True-sys0]",
"control/tests/timeplot_test.py::test_response_plots[step_response-True-True-True-False-True-True-sys1]",
"control/tests/timeplot_test.py::test_response_plots[step_response-True-True-False-True-True-True-sys0]",
"control/tests/timeplot_test.py::test_response_plots[step_response-True-True-False-True-True-True-sys1]",
"control/tests/timeplot_test.py::test_response_plots[impulse_response-False-True-True-False-False-False-sys1]",
"control/tests/timeplot_test.py::test_response_plots[initial_response-None-True-False-False-False-False-sys0]",
"control/tests/timeplot_test.py::test_response_plots[initial_response-None-True-False-False-False-False-sys1]",
"control/tests/timeplot_test.py::test_response_plots[initial_response-False-True-False-False-False-False-sys0]",
"control/tests/timeplot_test.py::test_response_plots[initial_response-False-True-False-False-False-False-sys1]",
"control/tests/timeplot_test.py::test_response_plots[initial_response-True-True-False-False-False-False-sys0]",
"control/tests/timeplot_test.py::test_response_plots[initial_response-True-True-False-False-False-False-sys1]",
"control/tests/timeplot_test.py::test_response_plots[forced_response-True-True-False-False-False-False-sys0]",
"control/tests/timeplot_test.py::test_response_plots[forced_response-True-True-False-False-False-False-sys1]",
"control/tests/timeplot_test.py::test_response_plots[forced_response-None-True-False-False-False-False-sys0]",
"control/tests/timeplot_test.py::test_response_plots[forced_response-None-True-False-False-False-False-sys1]",
"control/tests/timeplot_test.py::test_response_plots[forced_response-False-True-False-False-False-False-sys0]",
"control/tests/timeplot_test.py::test_response_plots[forced_response-False-True-False-False-False-False-sys1]",
"control/tests/timeplot_test.py::test_response_plots[forced_response-True-True-True-False-False-False-sys0]",
"control/tests/timeplot_test.py::test_response_plots[forced_response-True-True-True-False-False-False-sys1]",
"control/tests/timeplot_test.py::test_response_plots[forced_response-True-True-True-True-False-False-sys0]",
"control/tests/timeplot_test.py::test_response_plots[forced_response-True-True-True-True-False-False-sys1]",
"control/tests/timeplot_test.py::test_response_plots[forced_response-True-True-True-True-True-False-sys0]",
"control/tests/timeplot_test.py::test_response_plots[forced_response-True-True-True-True-True-False-sys1]",
"control/tests/timeplot_test.py::test_response_plots[forced_response-True-True-True-True-True-True-sys0]",
"control/tests/timeplot_test.py::test_response_plots[forced_response-True-True-True-True-True-True-sys1]",
"control/tests/timeplot_test.py::test_response_plots[forced_response-overlay-True-True-True-False-True-sys0]",
"control/tests/timeplot_test.py::test_response_plots[forced_response-overlay-True-True-True-False-True-sys1]",
"control/tests/timeplot_test.py::test_response_plots[input_output_response-True-True-False-False-False-False-sys0]",
"control/tests/timeplot_test.py::test_response_plots[input_output_response-True-True-False-False-False-False-sys1]",
"control/tests/timeplot_test.py::test_axes_setup",
"control/tests/timeplot_test.py::test_list_responses[step_response]",
"control/tests/timeplot_test.py::test_list_responses[initial_response]",
"control/tests/timeplot_test.py::test_list_responses[impulse_response]",
"control/tests/timeplot_test.py::test_list_responses[forced_response]",
"control/tests/timeplot_test.py::test_list_responses[input_output_response]",
"control/tests/timeplot_test.py::test_rcParams",
"control/tests/timeplot_test.py::test_timeplot_trace_labels[step_response]",
"control/tests/timeplot_test.py::test_timeplot_trace_labels[initial_response]",
"control/tests/timeplot_test.py::test_timeplot_trace_labels[impulse_response]",
"control/tests/timeplot_test.py::test_timeplot_trace_labels[forced_response]",
"control/tests/timeplot_test.py::test_timeplot_trace_labels[input_output_response]",
"control/tests/timeplot_test.py::test_full_label_override",
"control/tests/timeplot_test.py::test_relabel",
"control/tests/timeplot_test.py::test_errors",
"control/tests/timeplot_test.py::test_legend_customization",
"control/tests/timeresp_test.py::TestTimeresp::test_step_response_siso[siso_ss1-kwargs0]",
"control/tests/timeresp_test.py::TestTimeresp::test_step_response_siso[siso_ss1-kwargs1]",
"control/tests/timeresp_test.py::TestTimeresp::test_step_response_siso[siso_ss1-kwargs2]",
"control/tests/timeresp_test.py::TestTimeresp::test_step_response_siso[siso_ss1-kwargs3]",
"control/tests/timeresp_test.py::TestTimeresp::test_step_response_mimo[mimo_ss1]",
"control/tests/timeresp_test.py::TestTimeresp::test_step_response_return[mimo_ss1]",
"control/tests/timeresp_test.py::TestTimeresp::test_step_nostates[continuous]",
"control/tests/timeresp_test.py::TestTimeresp::test_step_nostates[discrete]",
"control/tests/timeresp_test.py::TestTimeresp::test_step_info[siso_tf_step_matlab-ltisys-yfinal]",
"control/tests/timeresp_test.py::TestTimeresp::test_step_info[siso_tf_step_matlab-ltisys-no",
"control/tests/timeresp_test.py::TestTimeresp::test_step_info[siso_tf_step_matlab-time",
"control/tests/timeresp_test.py::TestTimeresp::test_step_info[siso_ss_step_matlab-ltisys-yfinal]",
"control/tests/timeresp_test.py::TestTimeresp::test_step_info[siso_ss_step_matlab-ltisys-no",
"control/tests/timeresp_test.py::TestTimeresp::test_step_info[siso_ss_step_matlab-time",
"control/tests/timeresp_test.py::TestTimeresp::test_step_info[siso_tf_kpos-ltisys-yfinal]",
"control/tests/timeresp_test.py::TestTimeresp::test_step_info[siso_tf_kpos-ltisys-no",
"control/tests/timeresp_test.py::TestTimeresp::test_step_info[siso_tf_kpos-time",
"control/tests/timeresp_test.py::TestTimeresp::test_step_info[siso_tf_kneg-ltisys-yfinal]",
"control/tests/timeresp_test.py::TestTimeresp::test_step_info[siso_tf_kneg-ltisys-no",
"control/tests/timeresp_test.py::TestTimeresp::test_step_info[siso_tf_kneg-time",
"control/tests/timeresp_test.py::TestTimeresp::test_step_info[siso_tf_type1-ltisys-yfinal]",
"control/tests/timeresp_test.py::TestTimeresp::test_step_info[siso_tf_type1-ltisys-no",
"control/tests/timeresp_test.py::TestTimeresp::test_step_info[siso_tf_asymptotic_from_neg1-ltisys-yfinal]",
"control/tests/timeresp_test.py::TestTimeresp::test_step_info[siso_tf_asymptotic_from_neg1-ltisys-no",
"control/tests/timeresp_test.py::TestTimeresp::test_step_info[siso_tf_asymptotic_from_neg1-time",
"control/tests/timeresp_test.py::TestTimeresp::test_step_info_mimo[mimo_ss_step_matlab-ltisys-yfinal]",
"control/tests/timeresp_test.py::TestTimeresp::test_step_info_mimo[mimo_ss_step_matlab-ltisys-no_yfinal]",
"control/tests/timeresp_test.py::TestTimeresp::test_step_info_mimo[mimo_ss_step_matlab-time",
"control/tests/timeresp_test.py::TestTimeresp::test_step_info_invalid",
"control/tests/timeresp_test.py::TestTimeresp::test_step_pole_cancellation[tsystem0]",
"control/tests/timeresp_test.py::TestTimeresp::test_impulse_response_siso[siso_ss2-kwargs0]",
"control/tests/timeresp_test.py::TestTimeresp::test_impulse_response_siso[siso_ss2-kwargs1]",
"control/tests/timeresp_test.py::TestTimeresp::test_impulse_response_siso[siso_dtf0-kwargs2]",
"control/tests/timeresp_test.py::TestTimeresp::test_impulse_response_mimo[mimo_ss2]",
"control/tests/timeresp_test.py::TestTimeresp::test_discrete_time_impulse[siso_tf1]",
"control/tests/timeresp_test.py::TestTimeresp::test_discrete_time_impulse_input",
"control/tests/timeresp_test.py::TestTimeresp::test_impulse_response_warnD[siso_ss1]",
"control/tests/timeresp_test.py::TestTimeresp::test_initial_response[siso_ss1-kwargs0]",
"control/tests/timeresp_test.py::TestTimeresp::test_initial_response[siso_ss1-kwargs1]",
"control/tests/timeresp_test.py::TestTimeresp::test_initial_response[siso_ss1-kwargs2]",
"control/tests/timeresp_test.py::TestTimeresp::test_initial_response[siso_ss1-kwargs3]",
"control/tests/timeresp_test.py::TestTimeresp::test_initial_response[siso_ss1-kwargs4]",
"control/tests/timeresp_test.py::TestTimeresp::test_initial_response_mimo[mimo_ss1]",
"control/tests/timeresp_test.py::TestTimeresp::test_forced_response_step[siso_ss1]",
"control/tests/timeresp_test.py::TestTimeresp::test_forced_response_step[siso_tf2]",
"control/tests/timeresp_test.py::TestTimeresp::test_forced_response_initial[siso_ss1-u0]",
"control/tests/timeresp_test.py::TestTimeresp::test_forced_response_initial[siso_ss1-0]",
"control/tests/timeresp_test.py::TestTimeresp::test_forced_response_initial[siso_tf2-u0]",
"control/tests/timeresp_test.py::TestTimeresp::test_forced_response_initial[siso_tf2-0]",
"control/tests/timeresp_test.py::TestTimeresp::test_forced_response_mimo[mimo_ss1-True]",
"control/tests/timeresp_test.py::TestTimeresp::test_forced_response_mimo[mimo_dss2-True]",
"control/tests/timeresp_test.py::TestTimeresp::test_forced_response_mimo[mimo_dss2-False]",
"control/tests/timeresp_test.py::TestTimeresp::test_forced_response_legacy",
"control/tests/timeresp_test.py::TestTimeresp::test_forced_response_T_U[ctime",
"control/tests/timeresp_test.py::TestTimeresp::test_forced_response_T_U[dt=True,",
"control/tests/timeresp_test.py::TestTimeresp::test_forced_response_T_U[dt=0.2,",
"control/tests/timeresp_test.py::TestTimeresp::test_forced_response_T_U[dt=None,",
"control/tests/timeresp_test.py::TestTimeresp::test_forced_response_T_U[dt",
"control/tests/timeresp_test.py::TestTimeresp::test_forced_response_invalid_c[siso_ss1]",
"control/tests/timeresp_test.py::TestTimeresp::test_forced_response_invalid_d[siso_dss2]",
"control/tests/timeresp_test.py::TestTimeresp::test_lsim_double_integrator[zeros]",
"control/tests/timeresp_test.py::TestTimeresp::test_lsim_double_integrator[ones]",
"control/tests/timeresp_test.py::TestTimeresp::test_lsim_double_integrator[linear]",
"control/tests/timeresp_test.py::TestTimeresp::test_auto_generated_time_vector_tfinal[tfsys0-13.81551]",
"control/tests/timeresp_test.py::TestTimeresp::test_auto_generated_time_vector_tfinal[tfsys1-25]",
"control/tests/timeresp_test.py::TestTimeresp::test_auto_generated_time_vector_tfinal[tfsys2-25]",
"control/tests/timeresp_test.py::TestTimeresp::test_auto_generated_time_vector_dt_cont1[10-0]",
"control/tests/timeresp_test.py::TestTimeresp::test_auto_generated_time_vector_dt_cont1[100-0]",
"control/tests/timeresp_test.py::TestTimeresp::test_auto_generated_time_vector_dt_cont1[100-0.1]",
"control/tests/timeresp_test.py::TestTimeresp::test_auto_generated_time_vector_dt_cont2",
"control/tests/timeresp_test.py::TestTimeresp::test_default_timevector_long",
"control/tests/timeresp_test.py::TestTimeresp::test_default_timevector_functions_c[step_response]",
"control/tests/timeresp_test.py::TestTimeresp::test_default_timevector_functions_c[impulse_response]",
"control/tests/timeresp_test.py::TestTimeresp::test_default_timevector_functions_c[initial_response]",
"control/tests/timeresp_test.py::TestTimeresp::test_default_timevector_functions_d[0.1-step_response]",
"control/tests/timeresp_test.py::TestTimeresp::test_default_timevector_functions_d[0.1-impulse_response]",
"control/tests/timeresp_test.py::TestTimeresp::test_default_timevector_functions_d[0.1-initial_response]",
"control/tests/timeresp_test.py::TestTimeresp::test_default_timevector_functions_d[0.112-step_response]",
"control/tests/timeresp_test.py::TestTimeresp::test_default_timevector_functions_d[0.112-impulse_response]",
"control/tests/timeresp_test.py::TestTimeresp::test_default_timevector_functions_d[0.112-initial_response]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[None-step_response-siso_ss2]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[None-step_response-siso_tf1]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[None-step_response-siso_dss1]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[None-step_response-siso_dtf1]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[None-step_response-siso_dss2]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[None-step_response-siso_dtf2]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[None-step_response-siso_ss2_dtnone]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[None-step_response-mimo_ss2]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[None-step_response-mimo_dss1]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[None-impulse_response-siso_ss2]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[None-impulse_response-siso_tf1]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[None-impulse_response-siso_dss1]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[None-impulse_response-siso_dtf1]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[None-impulse_response-siso_dss2]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[None-impulse_response-siso_dtf2]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[None-impulse_response-siso_ss2_dtnone]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[None-impulse_response-mimo_ss2]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[None-impulse_response-mimo_dss1]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[None-initial_response-siso_ss2]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[None-initial_response-siso_tf1]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[None-initial_response-siso_dss1]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[None-initial_response-siso_dtf1]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[None-initial_response-siso_dss2]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[None-initial_response-siso_dtf2]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[None-initial_response-siso_ss2_dtnone]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[None-initial_response-mimo_ss2]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[None-initial_response-mimo_dss1]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[None-forced_response-siso_ss2]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[None-forced_response-siso_dss1]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[None-forced_response-siso_dtf1]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[None-forced_response-siso_dss2]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[None-forced_response-siso_dtf2]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[None-forced_response-siso_ss2_dtnone]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[None-forced_response-mimo_ss2]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[None-forced_response-mimo_dss1]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[True-step_response-siso_ss2]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[True-step_response-siso_tf1]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[True-step_response-siso_dss1]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[True-step_response-siso_dtf1]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[True-step_response-siso_dss2]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[True-step_response-siso_dtf2]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[True-step_response-siso_ss2_dtnone]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[True-step_response-mimo_ss2]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[True-step_response-mimo_dss1]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[True-impulse_response-siso_ss2]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[True-impulse_response-siso_tf1]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[True-impulse_response-siso_dss1]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[True-impulse_response-siso_dtf1]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[True-impulse_response-siso_dss2]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[True-impulse_response-siso_dtf2]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[True-impulse_response-siso_ss2_dtnone]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[True-impulse_response-mimo_ss2]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[True-impulse_response-mimo_dss1]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[True-initial_response-siso_ss2]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[True-initial_response-siso_tf1]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[True-initial_response-siso_dss1]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[True-initial_response-siso_dtf1]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[True-initial_response-siso_dss2]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[True-initial_response-siso_dtf2]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[True-initial_response-siso_ss2_dtnone]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[True-initial_response-mimo_ss2]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[True-initial_response-mimo_dss1]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[True-forced_response-siso_ss2]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[True-forced_response-siso_dss1]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[True-forced_response-siso_dtf1]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[True-forced_response-siso_dss2]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[True-forced_response-siso_dtf2]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[True-forced_response-siso_ss2_dtnone]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[True-forced_response-mimo_ss2]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[True-forced_response-mimo_dss1]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[False-step_response-siso_ss2]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[False-step_response-siso_tf1]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[False-step_response-siso_dss1]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[False-step_response-siso_dtf1]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[False-step_response-siso_dss2]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[False-step_response-siso_dtf2]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[False-step_response-siso_ss2_dtnone]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[False-step_response-mimo_ss2]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[False-step_response-mimo_dss1]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[False-impulse_response-siso_ss2]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[False-impulse_response-siso_tf1]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[False-impulse_response-siso_dss1]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[False-impulse_response-siso_dtf1]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[False-impulse_response-siso_dss2]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[False-impulse_response-siso_dtf2]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[False-impulse_response-siso_ss2_dtnone]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[False-impulse_response-mimo_ss2]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[False-impulse_response-mimo_dss1]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[False-initial_response-siso_ss2]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[False-initial_response-siso_tf1]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[False-initial_response-siso_dss1]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[False-initial_response-siso_dtf1]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[False-initial_response-siso_dss2]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[False-initial_response-siso_dtf2]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[False-initial_response-siso_ss2_dtnone]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[False-initial_response-mimo_ss2]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[False-initial_response-mimo_dss1]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[False-forced_response-siso_ss2]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[False-forced_response-siso_dss1]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[False-forced_response-siso_dtf1]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[False-forced_response-siso_dss2]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[False-forced_response-siso_dtf2]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[False-forced_response-siso_ss2_dtnone]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[False-forced_response-mimo_ss2]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector[False-forced_response-mimo_dss1]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector_interpolation[siso_dtf2-None]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector_interpolation[siso_dtf2-True]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_vector_interpolation[siso_dtf2-False]",
"control/tests/timeresp_test.py::TestTimeresp::test_discrete_time_steps[siso_dtf2]",
"control/tests/timeresp_test.py::TestTimeresp::test_time_series_data_convention_2D[siso_ss1]",
"control/tests/timeresp_test.py::TestTimeresp::test_squeeze[1-1-1-None-shape10-shape20-ss]",
"control/tests/timeresp_test.py::TestTimeresp::test_squeeze[1-1-1-None-shape10-shape20-tf]",
"control/tests/timeresp_test.py::TestTimeresp::test_squeeze[2-1-1-True-shape11-shape21-ss]",
"control/tests/timeresp_test.py::TestTimeresp::test_squeeze[2-1-1-True-shape11-shape21-tf]",
"control/tests/timeresp_test.py::TestTimeresp::test_squeeze[3-1-1-False-shape12-shape22-ss]",
"control/tests/timeresp_test.py::TestTimeresp::test_squeeze[3-1-1-False-shape12-shape22-tf]",
"control/tests/timeresp_test.py::TestTimeresp::test_squeeze[3-2-1-None-shape13-shape23-ss]",
"control/tests/timeresp_test.py::TestTimeresp::test_squeeze[4-2-1-True-shape14-shape24-ss]",
"control/tests/timeresp_test.py::TestTimeresp::test_squeeze[5-2-1-False-shape15-shape25-ss]",
"control/tests/timeresp_test.py::TestTimeresp::test_squeeze[3-1-2-None-shape16-shape26-ss]",
"control/tests/timeresp_test.py::TestTimeresp::test_squeeze[4-1-2-True-shape17-shape27-ss]",
"control/tests/timeresp_test.py::TestTimeresp::test_squeeze[5-1-2-False-shape18-shape28-ss]",
"control/tests/timeresp_test.py::TestTimeresp::test_squeeze[4-2-2-None-shape19-shape29-ss]",
"control/tests/timeresp_test.py::TestTimeresp::test_squeeze[5-2-2-True-shape110-shape210-ss]",
"control/tests/timeresp_test.py::TestTimeresp::test_squeeze[6-2-2-False-shape111-shape211-ss]",
"control/tests/timeresp_test.py::TestTimeresp::test_squeeze_exception[ss]",
"control/tests/timeresp_test.py::TestTimeresp::test_squeeze_exception[tf]",
"control/tests/timeresp_test.py::TestTimeresp::test_squeeze_0_8_4[1-1-1-None-shape0]",
"control/tests/timeresp_test.py::TestTimeresp::test_squeeze_0_8_4[2-1-1-True-shape1]",
"control/tests/timeresp_test.py::TestTimeresp::test_squeeze_0_8_4[3-1-1-False-shape2]",
"control/tests/timeresp_test.py::TestTimeresp::test_squeeze_0_8_4[1-2-1-None-shape3]",
"control/tests/timeresp_test.py::TestTimeresp::test_squeeze_0_8_4[2-2-1-True-shape4]",
"control/tests/timeresp_test.py::TestTimeresp::test_squeeze_0_8_4[3-2-1-False-shape5]",
"control/tests/timeresp_test.py::TestTimeresp::test_squeeze_0_8_4[1-1-2-None-shape6]",
"control/tests/timeresp_test.py::TestTimeresp::test_squeeze_0_8_4[2-1-2-True-shape7]",
"control/tests/timeresp_test.py::TestTimeresp::test_squeeze_0_8_4[3-1-2-False-shape8]",
"control/tests/timeresp_test.py::TestTimeresp::test_squeeze_0_8_4[1-2-2-None-shape9]",
"control/tests/timeresp_test.py::TestTimeresp::test_squeeze_0_8_4[2-2-2-True-shape10]",
"control/tests/timeresp_test.py::TestTimeresp::test_squeeze_0_8_4[3-2-2-False-shape11]",
"control/tests/timeresp_test.py::TestTimeresp::test_response_transpose[4-1-1-None-ysh_in0-ysh_no0-xsh_in0]",
"control/tests/timeresp_test.py::TestTimeresp::test_response_transpose[4-1-1-True-ysh_in1-ysh_no1-xsh_in1]",
"control/tests/timeresp_test.py::TestTimeresp::test_response_transpose[4-1-1-False-ysh_in2-ysh_no2-xsh_in2]",
"control/tests/timeresp_test.py::TestTimeresp::test_response_transpose[4-2-1-None-ysh_in3-ysh_no3-xsh_in3]",
"control/tests/timeresp_test.py::TestTimeresp::test_response_transpose[4-2-1-True-ysh_in4-ysh_no4-xsh_in4]",
"control/tests/timeresp_test.py::TestTimeresp::test_response_transpose[4-2-1-False-ysh_in5-ysh_no5-xsh_in5]",
"control/tests/timeresp_test.py::TestTimeresp::test_response_transpose[4-1-2-None-ysh_in6-ysh_no6-xsh_in6]",
"control/tests/timeresp_test.py::TestTimeresp::test_response_transpose[4-1-2-True-ysh_in7-ysh_no7-xsh_in7]",
"control/tests/timeresp_test.py::TestTimeresp::test_response_transpose[4-1-2-False-ysh_in8-ysh_no8-xsh_in8]",
"control/tests/timeresp_test.py::TestTimeresp::test_response_transpose[4-2-2-None-ysh_in9-ysh_no9-xsh_in9]",
"control/tests/timeresp_test.py::TestTimeresp::test_response_transpose[4-2-2-True-ysh_in10-ysh_no10-xsh_in10]",
"control/tests/timeresp_test.py::TestTimeresp::test_response_transpose[4-2-2-False-ysh_in11-ysh_no11-xsh_in11]",
"control/tests/timeresp_test.py::test_no_pandas"
] | [] | BSD 3-Clause "New" or "Revised" License | 18,968 | 496 | [
"control/timeplot.py",
"control/timeresp.py"
] |
|
lmfit__lmfit-py-961 | b72cfb28e7a9ea835dd287ceb7fda9d549fd924f | 2024-07-13 22:10:47 | 0c2bd14e5a6a9e0425d0684c9bcd89aaa6725c34 | codecov[bot]: ## [Codecov](https://app.codecov.io/gh/lmfit/lmfit-py/pull/961?dropdown=coverage&src=pr&el=h1&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=lmfit) Report
Attention: Patch coverage is `57.14286%` with `3 lines` in your changes missing coverage. Please review.
> Project coverage is 93.15%. Comparing base [(`b72cfb2`)](https://app.codecov.io/gh/lmfit/lmfit-py/commit/b72cfb28e7a9ea835dd287ceb7fda9d549fd924f?dropdown=coverage&el=desc&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=lmfit) to head [(`0560b74`)](https://app.codecov.io/gh/lmfit/lmfit-py/commit/0560b7413a7fb9369528285aeeff5463f7727e24?dropdown=coverage&el=desc&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=lmfit).
| [Files](https://app.codecov.io/gh/lmfit/lmfit-py/pull/961?dropdown=coverage&src=pr&el=tree&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=lmfit) | Patch % | Lines |
|---|---|---|
| [lmfit/model.py](https://app.codecov.io/gh/lmfit/lmfit-py/pull/961?src=pr&el=tree&filepath=lmfit%2Fmodel.py&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=lmfit#diff-bG1maXQvbW9kZWwucHk=) | 57.14% | [3 Missing :warning: ](https://app.codecov.io/gh/lmfit/lmfit-py/pull/961?src=pr&el=tree&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=lmfit) |
<details><summary>Additional details and impacted files</summary>
```diff
@@ Coverage Diff @@
## master #961 +/- ##
==========================================
- Coverage 93.20% 93.15% -0.05%
==========================================
Files 10 10
Lines 3765 3769 +4
==========================================
+ Hits 3509 3511 +2
- Misses 256 258 +2
```
</details>
[:umbrella: View full report in Codecov by Sentry](https://app.codecov.io/gh/lmfit/lmfit-py/pull/961?dropdown=coverage&src=pr&el=continue&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=lmfit).
:loudspeaker: Have feedback on the report? [Share it here](https://about.codecov.io/codecov-pr-comment-feedback/?utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=lmfit).
newville: @reneeotten If you agree, I think squash-merging this would allow tagging and pushing version 1.3.2.
newville: @reneeotten I'm going to (squash) merge this and tag and push as 1.3.2 | diff --git a/lmfit/model.py b/lmfit/model.py
index c19bcef1..34a78c78 100644
--- a/lmfit/model.py
+++ b/lmfit/model.py
@@ -1255,10 +1255,14 @@ class CompositeModel(Model):
if 'nan_policy' not in kws:
kws['nan_policy'] = self.left.nan_policy
+ # CompositeModel cannot have a prefix.
+ if 'prefix' in kws:
+ warnings.warn("CompositeModel ignores `prefix` argument")
+ kws['prefix'] = ''
+
def _tmp(self, *args, **kws):
pass
Model.__init__(self, _tmp, **kws)
-
for side in (left, right):
prefix = side.prefix
for basename, hint in side.param_hints.items():
@@ -1548,7 +1552,10 @@ class ModelResult(Minimizer):
if data is not None:
self.data = data
if params is not None:
- self.init_params = params
+ self.init_params = deepcopy(params)
+ else:
+ self.init_params = deepcopy(self.params)
+
if weights is not None:
self.weights = weights
if method is not None:
@@ -1559,8 +1566,8 @@ class ModelResult(Minimizer):
self.ci_out = None
self.userargs = (self.data, self.weights)
self.userkws.update(kwargs)
- self.init_fit = self.model.eval(params=self.params, **self.userkws)
- _ret = self.minimize(method=self.method)
+ self.init_fit = self.model.eval(params=self.init_params, **self.userkws)
+ _ret = self.minimize(method=self.method, params=self.init_params)
self.model.post_fit(_ret)
_ret.params.create_uvars(covar=_ret.covar)
| ModelResult.fit() does not use provided params
#### First Time Issue Code
Yes, I read the instructions and I am sure this is a GitHub Issue.
#### Description
When using ModelResult.fit() the provided params are not being used as the initial values.
In the below example, result_leastsq.params shows that result_brute.params was used as the initial values rather than the params provided.
result_leastsq.init_params does report the correct values.
###### A Minimal, Complete, and Verifiable example
```
import numpy as np
import lmfit
import copy
x = np.linspace(-10, 10, 100)
y = 3 * x**2 - 2 * x + 1 + np.random.normal(scale=10, size=x.size)
def quadratic(x, a, b, c):
return a * x**2 + b * x + c
model = lmfit.Model(quadratic)
params = model.make_params(a=dict(value=1, min=-10, max=10),
b=dict(value=0, min=-10, max=10),
c=dict(value=0, min=-10, max=10),)
result_brute = model.fit(y, params, x=x, method='brute')
result_leastsq = copy.deepcopy(result_brute)
result_leastsq.fit(method='leastsq',params=result_brute.candidates[49].params)
```
###### Version information
Python: 3.11.9 (main, Apr 19 2024, 16:48:06) [GCC 11.2.0]
lmfit: 1.3.1, scipy: 1.14.0, numpy: 2.0.0,asteval: 1.0.0, uncertainties: 3.2.1
| lmfit/lmfit-py | diff --git a/tests/test_model.py b/tests/test_model.py
index 24dfbfcd..7f4bbdfb 100644
--- a/tests/test_model.py
+++ b/tests/test_model.py
@@ -13,7 +13,7 @@ import lmfit
from lmfit import Model, Parameters, models
from lmfit.lineshapes import gaussian, lorentzian, step, voigt
from lmfit.model import get_reducer, propagate_err
-from lmfit.models import GaussianModel, PseudoVoigtModel
+from lmfit.models import GaussianModel, PseudoVoigtModel, QuadraticModel
@pytest.fixture()
@@ -1648,3 +1648,52 @@ def test_custom_variadic_model():
assert result.nfev > 7
assert_allclose(result.values['c0'], 5.0, 0.02, 0.02, '', True)
assert_allclose(result.values['c1'], 3.3, 0.02, 0.02, '', True)
+
+
+def test_model_refitting():
+ """Github #960"""
+ np.random.seed(0)
+ x = np.linspace(0, 100, 5001)
+ y = gaussian(x, amplitude=90, center=60, sigma=4) + 30 + 0.3*x - 0.0030*x*x
+ y += np.random.normal(size=5001, scale=0.5)
+
+ model = GaussianModel(prefix='peak_') + QuadraticModel(prefix='bkg_')
+
+ params = model.make_params(bkg_a=0, bkg_b=0, bkg_c=20, peak_amplitude=200,
+ peak_center=55, peak_sigma=10)
+
+ result = model.fit(y, params, x=x, method='powell')
+ assert result.chisqr > 12000.0
+ assert result.nfev > 500
+ assert result.params['peak_amplitude'].value > 500
+ assert result.params['peak_amplitude'].value < 5000
+ assert result.params['peak_sigma'].value > 10
+ assert result.params['peak_sigma'].value < 100
+
+ # now re-fit with LM
+ result.fit(y, x=x, method='leastsq')
+
+ assert result.nfev > 25
+ assert result.nfev < 200
+ assert result.chisqr < 2000.0
+
+ assert result.params['peak_amplitude'].value > 85
+ assert result.params['peak_amplitude'].value < 95
+ assert result.params['peak_sigma'].value > 3
+ assert result.params['peak_sigma'].value < 5
+
+ # and assert that the initial value are from the Powell result
+ assert result.init_values['peak_amplitude'] > 1500
+ assert result.init_values['peak_sigma'] > 25
+
+ params = model.make_params(bkg_a=0, bkg_b=-.02, bkg_c=26, peak_amplitude=20,
+ peak_center=62, peak_sigma=3)
+
+ # now re-fit with LM and these new params
+ result.fit(y, params, x=x, method='leastsq')
+
+ # and assert that the initial value are from the Powell result
+ assert result.init_values['peak_amplitude'] > 19
+ assert result.init_values['peak_amplitude'] < 21
+ assert result.init_values['peak_sigma'] > 2
+ assert result.init_values['peak_sigma'] < 4
| {
"commit_name": "merge_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 1
} | 1.3 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[all]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"flaky"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.16
annotated-types==0.7.0
asteval==1.0.6
asttokens==3.0.0
attrs==25.3.0
babel==2.17.0
backports.tarfile==1.2.0
beautifulsoup4==4.13.3
bleach==6.2.0
build==1.2.2.post1
cairocffi==1.7.1
CairoSVG==2.7.1
certifi==2025.1.31
cffi==1.17.1
cfgv==3.4.0
charset-normalizer==3.4.1
check-wheel-contents==0.6.1
click==8.1.8
comm==0.2.2
contourpy==1.3.0
corner==2.2.3
coverage==7.8.0
cryptography==44.0.2
cssselect2==0.8.0
cycler==0.12.1
debugpy==1.8.13
decorator==5.2.1
defusedxml==0.7.1
dill==0.3.9
distlib==0.3.9
docutils==0.21.2
emcee==3.1.6
exceptiongroup==1.2.2
executing==2.2.0
fastjsonschema==2.21.1
filelock==3.18.0
flake8==7.2.0
Flake8-pyproject==1.2.3
flaky==3.8.1
fonttools==4.56.0
id==1.5.0
identify==2.6.9
idna==3.10
imagesize==1.4.1
importlib_metadata==8.6.1
importlib_resources==6.5.2
iniconfig==2.1.0
ipykernel==6.29.5
ipython==8.18.1
ipywidgets==8.1.5
jaraco.classes==3.4.0
jaraco.context==6.0.1
jaraco.functools==4.1.0
jedi==0.19.2
jeepney==0.9.0
Jinja2==3.1.6
jsonschema==4.23.0
jsonschema-specifications==2024.10.1
jupyter-sphinx==0.5.3
jupyter_client==8.6.3
jupyter_core==5.7.2
jupyterlab_pygments==0.3.0
jupyterlab_widgets==3.0.13
keyring==25.6.0
kiwisolver==1.4.7
-e git+https://github.com/lmfit/lmfit-py.git@b72cfb28e7a9ea835dd287ceb7fda9d549fd924f#egg=lmfit
markdown-it-py==3.0.0
MarkupSafe==3.0.2
matplotlib==3.9.4
matplotlib-inline==0.1.7
mccabe==0.7.0
mdurl==0.1.2
mistune==3.1.3
more-itertools==10.6.0
mpmath==1.3.0
nbclient==0.10.2
nbconvert==7.16.6
nbformat==5.10.4
nest-asyncio==1.6.0
nh3==0.2.21
nodeenv==1.9.1
numdifftools==0.9.41
numexpr==2.10.2
numpy==2.0.2
packaging==24.2
pandas==2.2.3
pandocfilters==1.5.1
parso==0.8.4
pexpect==4.9.0
pillow==11.1.0
platformdirs==4.3.7
pluggy==1.5.0
pre_commit==4.2.0
prompt_toolkit==3.0.50
psutil==7.0.0
ptyprocess==0.7.0
pure_eval==0.2.3
pycodestyle==2.13.0
pycparser==2.22
pydantic==2.11.1
pydantic_core==2.33.0
pyflakes==3.3.1
Pygments==2.19.1
pyparsing==3.2.3
pyproject_hooks==1.2.0
pytest==8.3.5
pytest-cov==6.0.0
python-dateutil==2.9.0.post0
pytz==2025.2
PyYAML==6.0.2
pyzmq==26.3.0
readme_renderer==44.0
referencing==0.36.2
requests==2.32.3
requests-toolbelt==1.0.0
rfc3986==2.0.0
rich==14.0.0
rpds-py==0.24.0
scipy==1.13.1
SecretStorage==3.3.3
six==1.17.0
snowballstemmer==2.2.0
soupsieve==2.6
Sphinx==7.4.7
sphinx-gallery==0.19.0
sphinxcontrib-applehelp==2.0.0
sphinxcontrib-devhelp==2.0.0
sphinxcontrib-htmlhelp==2.1.0
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==2.0.0
sphinxcontrib-serializinghtml==2.0.0
sphinxcontrib-svg2pdfconverter==1.3.0
stack-data==0.6.3
sympy==1.13.3
tinycss2==1.4.0
tomli==2.2.1
tornado==6.4.2
traitlets==5.14.3
twine==6.1.0
typing-inspection==0.4.0
typing_extensions==4.13.0
tzdata==2025.2
uncertainties==3.2.2
urllib3==2.3.0
virtualenv==20.29.3
wcwidth==0.2.13
webencodings==0.5.1
wheel-filename==1.4.2
widgetsnbextension==4.0.13
zipp==3.21.0
| name: lmfit-py
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.16
- annotated-types==0.7.0
- asteval==1.0.6
- asttokens==3.0.0
- attrs==25.3.0
- babel==2.17.0
- backports-tarfile==1.2.0
- beautifulsoup4==4.13.3
- bleach==6.2.0
- build==1.2.2.post1
- cairocffi==1.7.1
- cairosvg==2.7.1
- certifi==2025.1.31
- cffi==1.17.1
- cfgv==3.4.0
- charset-normalizer==3.4.1
- check-wheel-contents==0.6.1
- click==8.1.8
- comm==0.2.2
- contourpy==1.3.0
- corner==2.2.3
- coverage==7.8.0
- cryptography==44.0.2
- cssselect2==0.8.0
- cycler==0.12.1
- debugpy==1.8.13
- decorator==5.2.1
- defusedxml==0.7.1
- dill==0.3.9
- distlib==0.3.9
- docutils==0.21.2
- emcee==3.1.6
- exceptiongroup==1.2.2
- executing==2.2.0
- fastjsonschema==2.21.1
- filelock==3.18.0
- flake8==7.2.0
- flake8-pyproject==1.2.3
- flaky==3.8.1
- fonttools==4.56.0
- id==1.5.0
- identify==2.6.9
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==8.6.1
- importlib-resources==6.5.2
- iniconfig==2.1.0
- ipykernel==6.29.5
- ipython==8.18.1
- ipywidgets==8.1.5
- jaraco-classes==3.4.0
- jaraco-context==6.0.1
- jaraco-functools==4.1.0
- jedi==0.19.2
- jeepney==0.9.0
- jinja2==3.1.6
- jsonschema==4.23.0
- jsonschema-specifications==2024.10.1
- jupyter-client==8.6.3
- jupyter-core==5.7.2
- jupyter-sphinx==0.5.3
- jupyterlab-pygments==0.3.0
- jupyterlab-widgets==3.0.13
- keyring==25.6.0
- kiwisolver==1.4.7
- lmfit==1.3.1.post8+gb72cfb28
- markdown-it-py==3.0.0
- markupsafe==3.0.2
- matplotlib==3.9.4
- matplotlib-inline==0.1.7
- mccabe==0.7.0
- mdurl==0.1.2
- mistune==3.1.3
- more-itertools==10.6.0
- mpmath==1.3.0
- nbclient==0.10.2
- nbconvert==7.16.6
- nbformat==5.10.4
- nest-asyncio==1.6.0
- nh3==0.2.21
- nodeenv==1.9.1
- numdifftools==0.9.41
- numexpr==2.10.2
- numpy==2.0.2
- packaging==24.2
- pandas==2.2.3
- pandocfilters==1.5.1
- parso==0.8.4
- pexpect==4.9.0
- pillow==11.1.0
- platformdirs==4.3.7
- pluggy==1.5.0
- pre-commit==4.2.0
- prompt-toolkit==3.0.50
- psutil==7.0.0
- ptyprocess==0.7.0
- pure-eval==0.2.3
- pycodestyle==2.13.0
- pycparser==2.22
- pydantic==2.11.1
- pydantic-core==2.33.0
- pyflakes==3.3.1
- pygments==2.19.1
- pyparsing==3.2.3
- pyproject-hooks==1.2.0
- pytest==8.3.5
- pytest-cov==6.0.0
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyyaml==6.0.2
- pyzmq==26.3.0
- readme-renderer==44.0
- referencing==0.36.2
- requests==2.32.3
- requests-toolbelt==1.0.0
- rfc3986==2.0.0
- rich==14.0.0
- rpds-py==0.24.0
- scipy==1.13.1
- secretstorage==3.3.3
- six==1.17.0
- snowballstemmer==2.2.0
- soupsieve==2.6
- sphinx==7.4.7
- sphinx-gallery==0.19.0
- sphinxcontrib-applehelp==2.0.0
- sphinxcontrib-devhelp==2.0.0
- sphinxcontrib-htmlhelp==2.1.0
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==2.0.0
- sphinxcontrib-serializinghtml==2.0.0
- sphinxcontrib-svg2pdfconverter==1.3.0
- stack-data==0.6.3
- sympy==1.13.3
- tinycss2==1.4.0
- tomli==2.2.1
- tornado==6.4.2
- traitlets==5.14.3
- twine==6.1.0
- typing-extensions==4.13.0
- typing-inspection==0.4.0
- tzdata==2025.2
- uncertainties==3.2.2
- urllib3==2.3.0
- virtualenv==20.29.3
- wcwidth==0.2.13
- webencodings==0.5.1
- wheel-filename==1.4.2
- widgetsnbextension==4.0.13
- zipp==3.21.0
prefix: /opt/conda/envs/lmfit-py
| [
"tests/test_model.py::test_model_refitting"
] | [] | [
"tests/test_model.py::test_get_reducer_invalid_option",
"tests/test_model.py::test_get_reducer[real-expected_array0]",
"tests/test_model.py::test_get_reducer[imag-expected_array1]",
"tests/test_model.py::test_get_reducer[abs-expected_array2]",
"tests/test_model.py::test_get_reducer[angle-expected_array3]",
"tests/test_model.py::test_propagate_err_invalid_option",
"tests/test_model.py::test_propagate_err_unequal_shape_z_dz",
"tests/test_model.py::test_propagate_err[real]",
"tests/test_model.py::test_propagate_err[imag]",
"tests/test_model.py::test_propagate_err[abs]",
"tests/test_model.py::test_propagate_err[angle]",
"tests/test_model.py::test_initialize_Model_class_default_arguments",
"tests/test_model.py::test_initialize_Model_class_independent_vars",
"tests/test_model.py::test_initialize_Model_class_param_names",
"tests/test_model.py::test_initialize_Model_class_nan_policy[raise]",
"tests/test_model.py::test_initialize_Model_class_nan_policy[omit]",
"tests/test_model.py::test_initialize_Model_class_nan_policy[propagate]",
"tests/test_model.py::test_initialize_Model_class_prefix",
"tests/test_model.py::test_initialize_Model_name",
"tests/test_model.py::test_initialize_Model_kws",
"tests/test_model.py::test_Model_reprstring[False-Model(gaussian)]",
"tests/test_model.py::test_Model_reprstring[True-Model(gaussian,",
"tests/test_model.py::test_Model_get_state",
"tests/test_model.py::test_Model_set_state",
"tests/test_model.py::test_Model_dumps_loads",
"tests/test_model.py::test_Model_getter_setter_name",
"tests/test_model.py::test_Model_getter_setter_prefix",
"tests/test_model.py::test_Model_getter_param_names",
"tests/test_model.py::test_Model__repr__",
"tests/test_model.py::test_Model_copy",
"tests/test_model.py::test__parse_params_func_None",
"tests/test_model.py::test__parse_params_asteval_functions",
"tests/test_model.py::test__parse_params_inspect_signature",
"tests/test_model.py::test_make_params_withprefixs",
"tests/test_model.py::test__parse_params_forbidden_variable_names",
"tests/test_model.py::test_coercion_of_input_data[int16]",
"tests/test_model.py::test_coercion_of_input_data[int32]",
"tests/test_model.py::test_coercion_of_input_data[float32]",
"tests/test_model.py::test_coercion_of_input_data[complex64]",
"tests/test_model.py::test_coercion_of_input_data[complex128]",
"tests/test_model.py::test_coercion_of_input_data[list]",
"tests/test_model.py::test_coercion_of_input_data[tuple]",
"tests/test_model.py::test_coercion_of_input_data[pandas-real]",
"tests/test_model.py::test_coercion_of_input_data[pandas-complex]",
"tests/test_model.py::test_figure_default_title",
"tests/test_model.py::test_figure_title_using_title_keyword_argument",
"tests/test_model.py::test_figure_title_using_title_to_ax_kws",
"tests/test_model.py::test_priority_setting_figure_title",
"tests/test_model.py::test_eval_with_kwargs",
"tests/test_model.py::test_guess_requires_x",
"tests/test_model.py::TestUserDefiniedModel::test_aic",
"tests/test_model.py::TestUserDefiniedModel::test_bic",
"tests/test_model.py::TestUserDefiniedModel::test_bounding",
"tests/test_model.py::TestUserDefiniedModel::test_change_prefix",
"tests/test_model.py::TestUserDefiniedModel::test_composite_has_bestvalues",
"tests/test_model.py::TestUserDefiniedModel::test_composite_model_with_expr_constrains",
"tests/test_model.py::TestUserDefiniedModel::test_composite_plotting",
"tests/test_model.py::TestUserDefiniedModel::test_data_alignment",
"tests/test_model.py::TestUserDefiniedModel::test_different_independent_vars_composite_modeld",
"tests/test_model.py::TestUserDefiniedModel::test_eval_components",
"tests/test_model.py::TestUserDefiniedModel::test_explicit_independent_vars",
"tests/test_model.py::TestUserDefiniedModel::test_extra_param_issues_warning",
"tests/test_model.py::TestUserDefiniedModel::test_fit",
"tests/test_model.py::TestUserDefiniedModel::test_fit_with_weights",
"tests/test_model.py::TestUserDefiniedModel::test_hints_for_peakmodels",
"tests/test_model.py::TestUserDefiniedModel::test_hints_in_composite_models",
"tests/test_model.py::TestUserDefiniedModel::test_independent_var_parsing",
"tests/test_model.py::TestUserDefiniedModel::test_lists_become_arrays",
"tests/test_model.py::TestUserDefiniedModel::test_missing_independent_variable_raises_error",
"tests/test_model.py::TestUserDefiniedModel::test_missing_param_raises_error",
"tests/test_model.py::TestUserDefiniedModel::test_model_name",
"tests/test_model.py::TestUserDefiniedModel::test_model_nan_policy",
"tests/test_model.py::TestUserDefiniedModel::test_model_nan_policy_NaNs_by_model",
"tests/test_model.py::TestUserDefiniedModel::test_model_with_prefix",
"tests/test_model.py::TestUserDefiniedModel::test_param_hint_explicit_value",
"tests/test_model.py::TestUserDefiniedModel::test_result_attributes",
"tests/test_model.py::TestUserDefiniedModel::test_result_eval",
"tests/test_model.py::TestUserDefiniedModel::test_result_eval_custom_x",
"tests/test_model.py::TestUserDefiniedModel::test_result_report",
"tests/test_model.py::TestUserDefiniedModel::test_sum_composite_models",
"tests/test_model.py::TestUserDefiniedModel::test_sum_of_two_gaussians",
"tests/test_model.py::TestUserDefiniedModel::test_symmetric_boundss",
"tests/test_model.py::TestUserDefiniedModel::test_unprefixed_name_collisions",
"tests/test_model.py::TestUserDefiniedModel::test_user_defined_gaussian_plus_constant",
"tests/test_model.py::TestUserDefiniedModel::test_vary_false",
"tests/test_model.py::TestUserDefiniedModel::test_weird_param_hints",
"tests/test_model.py::TestUserDefiniedModel::test_wrapped_model_func",
"tests/test_model.py::TestLinear::test_aic",
"tests/test_model.py::TestLinear::test_bic",
"tests/test_model.py::TestLinear::test_data_alignment",
"tests/test_model.py::TestLinear::test_explicit_independent_vars",
"tests/test_model.py::TestLinear::test_fit",
"tests/test_model.py::TestLinear::test_fit_with_weights",
"tests/test_model.py::TestLinear::test_result_attributes",
"tests/test_model.py::TestLinear::test_result_eval",
"tests/test_model.py::TestLinear::test_result_eval_custom_x",
"tests/test_model.py::TestLinear::test_result_report",
"tests/test_model.py::TestParabolic::test_aic",
"tests/test_model.py::TestParabolic::test_bic",
"tests/test_model.py::TestParabolic::test_data_alignment",
"tests/test_model.py::TestParabolic::test_explicit_independent_vars",
"tests/test_model.py::TestParabolic::test_fit",
"tests/test_model.py::TestParabolic::test_fit_with_weights",
"tests/test_model.py::TestParabolic::test_result_attributes",
"tests/test_model.py::TestParabolic::test_result_eval",
"tests/test_model.py::TestParabolic::test_result_eval_custom_x",
"tests/test_model.py::TestParabolic::test_result_report",
"tests/test_model.py::TestPolynomialOrder2::test_aic",
"tests/test_model.py::TestPolynomialOrder2::test_bic",
"tests/test_model.py::TestPolynomialOrder2::test_data_alignment",
"tests/test_model.py::TestPolynomialOrder2::test_explicit_independent_vars",
"tests/test_model.py::TestPolynomialOrder2::test_fit",
"tests/test_model.py::TestPolynomialOrder2::test_fit_with_weights",
"tests/test_model.py::TestPolynomialOrder2::test_result_attributes",
"tests/test_model.py::TestPolynomialOrder2::test_result_eval",
"tests/test_model.py::TestPolynomialOrder2::test_result_eval_custom_x",
"tests/test_model.py::TestPolynomialOrder2::test_result_report",
"tests/test_model.py::TestPolynomialOrder3::test_aic",
"tests/test_model.py::TestPolynomialOrder3::test_bic",
"tests/test_model.py::TestPolynomialOrder3::test_data_alignment",
"tests/test_model.py::TestPolynomialOrder3::test_explicit_independent_vars",
"tests/test_model.py::TestPolynomialOrder3::test_fit",
"tests/test_model.py::TestPolynomialOrder3::test_fit_with_weights",
"tests/test_model.py::TestPolynomialOrder3::test_result_attributes",
"tests/test_model.py::TestPolynomialOrder3::test_result_eval",
"tests/test_model.py::TestPolynomialOrder3::test_result_eval_custom_x",
"tests/test_model.py::TestPolynomialOrder3::test_result_report",
"tests/test_model.py::TestConstant::test_aic",
"tests/test_model.py::TestConstant::test_bic",
"tests/test_model.py::TestConstant::test_data_alignment",
"tests/test_model.py::TestConstant::test_fit",
"tests/test_model.py::TestConstant::test_fit_with_weights",
"tests/test_model.py::TestConstant::test_result_attributes",
"tests/test_model.py::TestConstant::test_result_eval",
"tests/test_model.py::TestConstant::test_result_report",
"tests/test_model.py::TestPowerlaw::test_aic",
"tests/test_model.py::TestPowerlaw::test_bic",
"tests/test_model.py::TestPowerlaw::test_data_alignment",
"tests/test_model.py::TestPowerlaw::test_explicit_independent_vars",
"tests/test_model.py::TestPowerlaw::test_fit",
"tests/test_model.py::TestPowerlaw::test_fit_with_weights",
"tests/test_model.py::TestPowerlaw::test_result_attributes",
"tests/test_model.py::TestPowerlaw::test_result_eval",
"tests/test_model.py::TestPowerlaw::test_result_eval_custom_x",
"tests/test_model.py::TestPowerlaw::test_result_report",
"tests/test_model.py::TestExponential::test_aic",
"tests/test_model.py::TestExponential::test_bic",
"tests/test_model.py::TestExponential::test_data_alignment",
"tests/test_model.py::TestExponential::test_explicit_independent_vars",
"tests/test_model.py::TestExponential::test_fit",
"tests/test_model.py::TestExponential::test_fit_with_weights",
"tests/test_model.py::TestExponential::test_result_attributes",
"tests/test_model.py::TestExponential::test_result_eval",
"tests/test_model.py::TestExponential::test_result_eval_custom_x",
"tests/test_model.py::TestExponential::test_result_report",
"tests/test_model.py::TestComplexConstant::test_aic",
"tests/test_model.py::TestComplexConstant::test_bic",
"tests/test_model.py::TestComplexConstant::test_data_alignment",
"tests/test_model.py::TestComplexConstant::test_explicit_independent_vars",
"tests/test_model.py::TestComplexConstant::test_fit",
"tests/test_model.py::TestComplexConstant::test_fit_with_weights",
"tests/test_model.py::TestComplexConstant::test_result_attributes",
"tests/test_model.py::TestComplexConstant::test_result_eval",
"tests/test_model.py::TestComplexConstant::test_result_eval_custom_x",
"tests/test_model.py::TestComplexConstant::test_result_report",
"tests/test_model.py::TestExpression::test_aic",
"tests/test_model.py::TestExpression::test_bic",
"tests/test_model.py::TestExpression::test_composite_with_expression",
"tests/test_model.py::TestExpression::test_data_alignment",
"tests/test_model.py::TestExpression::test_explicit_independent_vars",
"tests/test_model.py::TestExpression::test_fit",
"tests/test_model.py::TestExpression::test_fit_with_weights",
"tests/test_model.py::TestExpression::test_result_attributes",
"tests/test_model.py::TestExpression::test_result_eval",
"tests/test_model.py::TestExpression::test_result_eval_custom_x",
"tests/test_model.py::TestExpression::test_result_report",
"tests/test_model.py::test_make_params_valuetypes",
"tests/test_model.py::test_complex_model_eval_uncertainty",
"tests/test_model.py::test_compositemodel_returning_list",
"tests/test_model.py::test_rsquared_with_weights",
"tests/test_model.py::test_custom_variadic_model"
] | [] | BSD-3 | 18,971 | 443 | [
"lmfit/model.py"
] |
SALib__SALib-628 | da19e0901f9355de02f9691b394154354fbe4792 | 2024-07-14 02:09:15 | 8e9f28feea6566932d10938dbdc219c197aae60b | diff --git a/src/SALib/analyze/delta.py b/src/SALib/analyze/delta.py
index f80cad7..783b603 100644
--- a/src/SALib/analyze/delta.py
+++ b/src/SALib/analyze/delta.py
@@ -137,13 +137,13 @@ def calc_delta(Y, Ygrid, X, m):
# if not np.all(np.equal(Y_ix, Y_ix[0])):
Y_ix = Y[ix]
- if Y_ix.ptp() != 0.0:
+ if np.ptp(Y_ix) != 0.0:
fyc = gaussian_kde(Y_ix, bw_method="silverman")(Ygrid)
fy_ = np.abs(fy - fyc)
else:
fy_ = np.abs(fy)
- d_hat += (nm / (2 * N)) * np.trapz(fy_, Ygrid)
+ d_hat += (nm / (2 * N)) * np.trapezoid(fy_, Ygrid)
return d_hat
@@ -168,7 +168,7 @@ def bias_reduced_delta(Y, Ygrid, X, m, num_resamples, conf_level, y_resamples):
def sobol_first(Y, X, m):
# pre-process to catch constant array
# see: https://github.com/numpy/numpy/issues/9631
- if Y.ptp() == 0.0:
+ if np.ptp(Y) == 0.0:
# Catch constant results
# If Y does not change then it is not sensitive to anything...
return 0.0
diff --git a/src/SALib/analyze/morris.py b/src/SALib/analyze/morris.py
index 010ece4..c749a02 100644
--- a/src/SALib/analyze/morris.py
+++ b/src/SALib/analyze/morris.py
@@ -245,7 +245,7 @@ def _compute_grouped_sigma(
sigma = np.zeros(groups.shape[1], dtype=float)
np.copyto(sigma, sigma_agg, where=groups.sum(axis=0) == 1)
- np.copyto(sigma, np.NAN, where=groups.sum(axis=0) != 1)
+ np.copyto(sigma, np.nan, where=groups.sum(axis=0) != 1)
return sigma
diff --git a/src/SALib/analyze/sobol.py b/src/SALib/analyze/sobol.py
index 67c587e..d38101b 100644
--- a/src/SALib/analyze/sobol.py
+++ b/src/SALib/analyze/sobol.py
@@ -154,7 +154,7 @@ def analyze(
if keep_resamples:
S["S1_conf_all"][:, j] = S1_conf_j
- var_diff = np.r_[A[r], B[r]].ptp()
+ var_diff = np.ptp(np.r_[A[r], B[r]])
if var_diff != 0.0:
S["S1_conf"][j] = Z * S1_conf_j.std(ddof=1)
else:
@@ -212,7 +212,7 @@ def first_order(A, AB, B):
sample variance
"""
y = np.r_[A, B]
- if y.ptp() == 0:
+ if np.ptp(y) == 0:
warn(CONST_RESULT_MSG)
return np.array([0.0])
@@ -225,7 +225,7 @@ def total_order(A, AB, B):
sample variance
"""
y = np.r_[A, B]
- if y.ptp() == 0:
+ if np.ptp(y) == 0:
warn(CONST_RESULT_MSG)
return np.array([0.0])
@@ -235,7 +235,7 @@ def total_order(A, AB, B):
def second_order(A, ABj, ABk, BAj, B):
"""Second order estimator following Saltelli 2002"""
y = np.r_[A, B]
- if y.ptp() == 0:
+ if np.ptp(y) == 0:
warn(CONST_RESULT_MSG)
return np.array([0.0])
diff --git a/src/SALib/util/__init__.py b/src/SALib/util/__init__.py
index 335b858..72fbe0f 100644
--- a/src/SALib/util/__init__.py
+++ b/src/SALib/util/__init__.py
@@ -299,7 +299,7 @@ def compute_groups_matrix(groups: List):
groups and a list of unique group names
"""
num_vars = len(groups)
- unique_group_names = pd.unique(groups)
+ unique_group_names = pd.unique(np.array(groups))
number_of_groups = len(unique_group_names)
indices = dict([(x, i) for (i, x) in enumerate(unique_group_names)])
| Migrate to numpy>=2.0.0
Numpy 2.0.0 was released at June 16. It seems that SALib is incompatible with the new numpy now.
E.g., the quick start example, if run this after `pip install SALib --force-reinstall`
```python
from SALib.sample import saltelli
from SALib.analyze import sobol
from SALib.test_functions import Ishigami
import numpy as np
problem = {
'num_vars': 3,
'names': ['x1', 'x2', 'x3'],
'bounds': [[-np.pi, np.pi]]*3
}
# Generate samples
param_values = saltelli.sample(problem, 1024)
# Run model (example)
Y = Ishigami.evaluate(param_values)
# Perform analysis
Si = sobol.analyze(problem, Y, print_to_console=True)
# Returns a dictionary with keys 'S1', 'S1_conf', 'ST', and 'ST_conf'
# (first and total-order indices with bootstrap confidence intervals)
```
will yield the following error message,
```
Traceback (most recent call last):
File "...", line 19, in <module>
Si = sobol.analyze(problem, Y, print_to_console=True)
File "python3.9/site-packages/SALib/analyze/sobol.py", line 151, in analyze
S["S1"][j] = first_order(A, AB[:, j], B)
File ".../python3.9/site-packages/SALib/analyze/sobol.py", line 215, in first_order
if y.ptp() == 0:
AttributeError: `ptp` was removed from the ndarray class in NumPy 2.0. Use np.ptp(arr, ...) instead.
```
Any plan on migrating to numpy>=2.0.0? | SALib/SALib | diff --git a/src/SALib/test_functions/Sobol_G.py b/src/SALib/test_functions/Sobol_G.py
index 1c5194f..2aedd46 100644
--- a/src/SALib/test_functions/Sobol_G.py
+++ b/src/SALib/test_functions/Sobol_G.py
@@ -81,7 +81,7 @@ def evaluate(values, a=None, delta=None, alpha=None):
mod_x = shift_of_x - integral
temp_y = np.abs(2 * mod_x - 1) ** alpha
y_elements = ((1 + alpha) * temp_y + a) / (1 + a)
- Y[i] = np.prod(y_elements)
+ Y[i] = y_elements.prod()
return Y
@@ -103,7 +103,7 @@ def _total_variance(a=None, alpha=None):
alpha = np.ones_like(a)
a = np.array(a)
- return np.add(-1, np.product(1 + _partial_first_order_variance(a, alpha), axis=0))
+ return np.add(-1, np.prod(1 + _partial_first_order_variance(a, alpha), axis=0))
def sensitivity_index(a, alpha=None):
@@ -116,6 +116,6 @@ def total_sensitivity_index(a, alpha=None):
pv = _partial_first_order_variance(a, alpha)
tv = _total_variance(a, alpha)
- product_pv = np.product(1 + pv, axis=0)
+ product_pv = np.prod(1 + pv, axis=0)
return np.divide(pv * np.divide(product_pv, 1 + pv.T), tv)
diff --git a/tests/sample/test_latin.py b/tests/sample/test_latin.py
index f8e6e6b..a4b9e5f 100644
--- a/tests/sample/test_latin.py
+++ b/tests/sample/test_latin.py
@@ -137,8 +137,8 @@ class TestLatinSample:
# Group samples should have the same values
# Get (max - min) with the `ptp()` method, the result of which should be
# an array of zeros
- diff = samples[:, ::2].ptp(axis=1)
+ diff = np.ptp(samples[:, ::2], axis=1)
assert np.all(diff == 0), "Grouped samples do not have the same values"
- diff = samples[:, 1::2].ptp(axis=1)
+ diff = np.ptp(samples[:, 1::2], axis=1)
assert np.all(diff == 0), "Grouped samples do not have the same values"
diff --git a/tests/test_analyze_morris.py b/tests/test_analyze_morris.py
index e5b7760..eb059ee 100644
--- a/tests/test_analyze_morris.py
+++ b/tests/test_analyze_morris.py
@@ -931,7 +931,7 @@ def test_compute_grouped_sigma():
sigma = np.std(ee, axis=1, ddof=1)
actual = _compute_grouped_sigma(sigma, group_matrix)
- desired = np.array([1.79352911, np.NAN], dtype=float)
+ desired = np.array([1.79352911, np.nan], dtype=float)
assert_allclose(actual, desired, rtol=1e-1)
diff --git a/tests/test_regression.py b/tests/test_regression.py
index a987013..61229bf 100644
--- a/tests/test_regression.py
+++ b/tests/test_regression.py
@@ -118,7 +118,7 @@ class TestMorris:
assert_allclose(Si["mu"], [9.786986, -9.938717e-13], atol=0, rtol=1e-5)
- assert_allclose(Si["sigma"], [6.453729, np.NaN], atol=0, rtol=1e-5)
+ assert_allclose(Si["sigma"], [6.453729, np.nan], atol=0, rtol=1e-5)
assert_allclose(Si["mu_star"], [9.786986, 7.875], atol=0, rtol=1e-5)
diff --git a/tests/test_to_df.py b/tests/test_to_df.py
index b024cb8..e42755a 100644
--- a/tests/test_to_df.py
+++ b/tests/test_to_df.py
@@ -1,7 +1,7 @@
import numpy as np
import pandas as pd
from SALib.sample import (
- saltelli,
+ sobol as sobol_sample,
morris as morris_sample,
finite_diff,
fast_sampler,
@@ -60,7 +60,7 @@ def test_sobol_to_df():
params = ["x1", "x2", "x3"]
problem = {"num_vars": 3, "names": params, "bounds": [[-np.pi, np.pi]] * 3}
- X = saltelli.sample(problem, 512)
+ X = sobol_sample.sample(problem, 512)
Y = Ishigami.evaluate(X)
Si = sobol.analyze(problem, Y, print_to_console=False)
total, first, second = Si.to_df()
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 4
} | 1.5 | {
"env_vars": null,
"env_yml_path": [
"environment.yml"
],
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": true,
"packages": "environment.yml",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | accessible-pygments @ file:///home/conda/feedstock_root/build_artifacts/accessible-pygments_1734956106558/work
alabaster @ file:///home/conda/feedstock_root/build_artifacts/alabaster_1704848697227/work
anyio @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_anyio_1742243108/work
babel @ file:///home/conda/feedstock_root/build_artifacts/babel_1738490167835/work
backports.tarfile @ file:///home/conda/feedstock_root/build_artifacts/backports.tarfile_1733325779670/work
beautifulsoup4 @ file:///home/conda/feedstock_root/build_artifacts/beautifulsoup4_1738740337718/work
Bottleneck @ file:///croot/bottleneck_1731058641041/work
Brotli @ file:///home/conda/feedstock_root/build_artifacts/brotli-split_1648883617327/work
certifi @ file:///home/conda/feedstock_root/build_artifacts/certifi_1739515848642/work/certifi
cffi @ file:///home/conda/feedstock_root/build_artifacts/cffi_1636046055389/work
cfgv @ file:///home/conda/feedstock_root/build_artifacts/cfgv_1734267080977/work
charset-normalizer @ file:///home/conda/feedstock_root/build_artifacts/charset-normalizer_1735929714516/work
click @ file:///home/conda/feedstock_root/build_artifacts/click_1734858813237/work
colorama @ file:///home/conda/feedstock_root/build_artifacts/colorama_1733218098505/work
contourpy @ file:///croot/contourpy_1738160616259/work
coverage @ file:///home/conda/feedstock_root/build_artifacts/coverage_1743381221492/work
cryptography @ file:///croot/cryptography_1740577825284/work
cycler @ file:///home/conda/feedstock_root/build_artifacts/cycler_1733332471406/work
dill @ file:///home/conda/feedstock_root/build_artifacts/dill_1733249551891/work
distlib @ file:///home/conda/feedstock_root/build_artifacts/distlib_1733238395481/work
docutils @ file:///home/conda/feedstock_root/build_artifacts/docutils_1733217766141/work
editables @ file:///home/conda/feedstock_root/build_artifacts/editables_1733208130372/work
exceptiongroup @ file:///home/conda/feedstock_root/build_artifacts/exceptiongroup_1733208806608/work
filelock @ file:///home/conda/feedstock_root/build_artifacts/filelock_1741969488311/work
fonttools @ file:///home/conda/feedstock_root/build_artifacts/fonttools_1651017733844/work
h11 @ file:///home/conda/feedstock_root/build_artifacts/h11_1733327467879/work
h2 @ file:///home/conda/feedstock_root/build_artifacts/h2_1738578511449/work
hatch @ file:///home/conda/feedstock_root/build_artifacts/hatch_1714409935314/work
hatchling @ file:///home/conda/feedstock_root/build_artifacts/hatchling_1706161019666/work
hpack @ file:///home/conda/feedstock_root/build_artifacts/hpack_1737618293087/work
httpcore @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_httpcore_1731707562/work
httpx @ file:///home/conda/feedstock_root/build_artifacts/httpx_1733663348460/work
hyperframe @ file:///home/conda/feedstock_root/build_artifacts/hyperframe_1737618333194/work
hyperlink @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_hyperlink_1733319972/work
identify @ file:///home/conda/feedstock_root/build_artifacts/identify_1741502659866/work
idna @ file:///home/conda/feedstock_root/build_artifacts/idna_1733211830134/work
imagesize @ file:///home/conda/feedstock_root/build_artifacts/imagesize_1656939531508/work
importlib_metadata @ file:///home/conda/feedstock_root/build_artifacts/importlib-metadata_1737420181517/work
importlib_resources @ file:///home/conda/feedstock_root/build_artifacts/importlib_resources_1736252299705/work
iniconfig @ file:///home/conda/feedstock_root/build_artifacts/iniconfig_1733223141826/work
jaraco.classes @ file:///home/conda/feedstock_root/build_artifacts/jaraco.classes_1733325873251/work
jaraco.context @ file:///home/conda/feedstock_root/build_artifacts/jaraco.context_1733382590553/work
jaraco.functools @ file:///home/conda/feedstock_root/build_artifacts/jaraco.functools_1733746366381/work
jeepney @ file:///home/conda/feedstock_root/build_artifacts/jeepney_1740828240267/work
Jinja2 @ file:///home/conda/feedstock_root/build_artifacts/jinja2_1741263328855/work
keyring @ file:///home/conda/feedstock_root/build_artifacts/keyring_1735210185992/work
kiwisolver @ file:///home/conda/feedstock_root/build_artifacts/kiwisolver_1648854392795/work
markdown-it-py @ file:///home/conda/feedstock_root/build_artifacts/markdown-it-py_1733250460757/work
MarkupSafe @ file:///home/conda/feedstock_root/build_artifacts/markupsafe_1648737556467/work
matplotlib==3.9.2
mdit-py-plugins==0.4.2
mdurl @ file:///home/conda/feedstock_root/build_artifacts/mdurl_1733255585584/work
more-itertools @ file:///home/conda/feedstock_root/build_artifacts/more-itertools_1736883817510/work
multiprocess==0.70.17
munkres==1.1.4
myst-parser==3.0.1
nodeenv @ file:///home/conda/feedstock_root/build_artifacts/nodeenv_1734112117269/work
numexpr @ file:///croot/numexpr_1730215937391/work
numpy @ file:///croot/numpy_and_numpy_base_1725470312869/work/dist/numpy-2.0.1-cp39-cp39-linux_x86_64.whl#sha256=b8c18bbfe185fbdff23024458e4b8ffbe2040e705abd5fb6cda1ef9d20b5974d
numpydoc @ file:///home/conda/feedstock_root/build_artifacts/numpydoc_1733650859674/work
packaging @ file:///home/conda/feedstock_root/build_artifacts/packaging_1733203243479/work
pandas @ file:///croot/pandas_1732735089971/work/dist/pandas-2.2.3-cp39-cp39-linux_x86_64.whl#sha256=0a51ed2e81ab863e3d00ed6c5049192ce578ecb38fb467d2f9a6585d3c25f666
pathos @ file:///home/conda/feedstock_root/build_artifacts/pathos_1706521040387/work
pathspec @ file:///home/conda/feedstock_root/build_artifacts/pathspec_1733233363808/work
pexpect @ file:///home/conda/feedstock_root/build_artifacts/pexpect_1733301927746/work
pillow @ file:///croot/pillow_1738010226202/work
platformdirs @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_platformdirs_1742485085/work
pluggy @ file:///home/conda/feedstock_root/build_artifacts/pluggy_1733222765875/work
pox @ file:///home/conda/feedstock_root/build_artifacts/pox_1734201753136/work
ppft @ file:///home/conda/feedstock_root/build_artifacts/ppft_1734201548824/work
pre_commit @ file:///home/conda/feedstock_root/build_artifacts/pre-commit_1742475552107/work
ptyprocess @ file:///home/conda/feedstock_root/build_artifacts/ptyprocess_1733302279685/work/dist/ptyprocess-0.7.0-py2.py3-none-any.whl#sha256=92c32ff62b5fd8cf325bec5ab90d7be3d2a8ca8c8a3813ff487a8d2002630d1f
pycparser @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_pycparser_1733195786/work
pydata-sphinx-theme==0.16.1
Pygments @ file:///home/conda/feedstock_root/build_artifacts/pygments_1736243443484/work
pyparsing @ file:///home/conda/feedstock_root/build_artifacts/pyparsing_1743089729650/work
PySide6==6.7.2
PySocks @ file:///home/conda/feedstock_root/build_artifacts/pysocks_1733217236728/work
pytest @ file:///home/conda/feedstock_root/build_artifacts/pytest_1740946542080/work
pytest-cov @ file:///home/conda/feedstock_root/build_artifacts/pytest-cov_1733223023082/work
python-dateutil @ file:///home/conda/feedstock_root/build_artifacts/python-dateutil_1733215673016/work
pytz @ file:///home/conda/feedstock_root/build_artifacts/pytz_1742920838005/work
PyYAML @ file:///home/conda/feedstock_root/build_artifacts/pyyaml_1648757097602/work
requests @ file:///home/conda/feedstock_root/build_artifacts/requests_1733217035951/work
rich @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_rich_1743371105/work/dist
-e git+https://github.com/SALib/SALib.git@da19e0901f9355de02f9691b394154354fbe4792#egg=SALib
scipy @ file:///croot/scipy_1733756309941/work/dist/scipy-1.13.1-cp39-cp39-linux_x86_64.whl#sha256=3b247b926209f2d9f719ebae39faf3ff891b2596150ed8f8349adfc3eb19441c
SecretStorage @ file:///home/conda/feedstock_root/build_artifacts/secretstorage_1725915609225/work
shellingham @ file:///home/conda/feedstock_root/build_artifacts/shellingham_1733300899265/work
shiboken6==6.7.2
six @ file:///home/conda/feedstock_root/build_artifacts/six_1733380938961/work
sniffio @ file:///home/conda/feedstock_root/build_artifacts/sniffio_1733244044561/work
snowballstemmer @ file:///home/conda/feedstock_root/build_artifacts/snowballstemmer_1637143057757/work
soupsieve @ file:///home/conda/feedstock_root/build_artifacts/soupsieve_1693929250441/work
Sphinx @ file:///home/conda/feedstock_root/build_artifacts/sphinx_1721487534232/work
sphinxcontrib-applehelp @ file:///home/conda/feedstock_root/build_artifacts/sphinxcontrib-applehelp_1733754101641/work
sphinxcontrib-devhelp @ file:///home/conda/feedstock_root/build_artifacts/sphinxcontrib-devhelp_1733754113405/work
sphinxcontrib-htmlhelp @ file:///home/conda/feedstock_root/build_artifacts/sphinxcontrib-htmlhelp_1733754280555/work
sphinxcontrib-jsmath @ file:///home/conda/feedstock_root/build_artifacts/sphinxcontrib-jsmath_1733753744933/work
sphinxcontrib-qthelp @ file:///home/conda/feedstock_root/build_artifacts/sphinxcontrib-qthelp_1733753408017/work
sphinxcontrib-serializinghtml @ file:///home/conda/feedstock_root/build_artifacts/sphinxcontrib-serializinghtml_1733750479108/work
tabulate @ file:///home/conda/feedstock_root/build_artifacts/tabulate_1733589744265/work
toml @ file:///home/conda/feedstock_root/build_artifacts/toml_1734091811753/work
tomli @ file:///home/conda/feedstock_root/build_artifacts/tomli_1733256695513/work
tomli_w @ file:///home/conda/feedstock_root/build_artifacts/tomli-w_1736962227066/work
tomlkit @ file:///home/conda/feedstock_root/build_artifacts/tomlkit_1733230743009/work
tornado @ file:///home/conda/feedstock_root/build_artifacts/tornado_1648827245914/work
trove-classifiers @ file:///home/conda/feedstock_root/build_artifacts/trove-classifiers_1742485454731/work
typing_extensions @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_typing_extensions_1743201626/work
tzdata @ file:///home/conda/feedstock_root/build_artifacts/python-tzdata_1742745135198/work
ukkonen @ file:///home/conda/feedstock_root/build_artifacts/ukkonen_1649407036294/work
unicodedata2 @ file:///home/conda/feedstock_root/build_artifacts/unicodedata2_1649111919389/work
urllib3 @ file:///home/conda/feedstock_root/build_artifacts/urllib3_1734859416348/work
userpath @ file:///home/conda/feedstock_root/build_artifacts/userpath_1735924896483/work
virtualenv @ file:///home/conda/feedstock_root/build_artifacts/virtualenv_1713381909298/work
zipp @ file:///home/conda/feedstock_root/build_artifacts/zipp_1732827521216/work
zstandard @ file:///croot/zstandard_1731356346222/work
| name: SALib
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- accessible-pygments=0.0.5=pyhd8ed1ab_1
- alabaster=0.7.16=pyhd8ed1ab_0
- anyio=4.9.0=pyh29332c3_0
- babel=2.17.0=pyhd8ed1ab_0
- backports=1.0=pyhd8ed1ab_5
- backports.tarfile=1.2.0=pyhd8ed1ab_1
- beautifulsoup4=4.13.3=pyha770c72_0
- blas=1.1=openblas
- bottleneck=1.4.2=py39ha9d4c09_0
- brotli=1.0.9=h166bdaf_7
- brotli-bin=1.0.9=h166bdaf_7
- brotli-python=1.0.9=py39h5a03fae_7
- bzip2=1.0.8=h7f98852_4
- c-ares=1.19.1=h5eee18b_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2025.1.31=pyhd8ed1ab_0
- cffi=1.15.0=py39h4bc2ebd_0
- cfgv=3.3.1=pyhd8ed1ab_1
- charset-normalizer=3.4.1=pyhd8ed1ab_0
- click=8.1.8=pyh707e725_0
- colorama=0.4.6=pyhd8ed1ab_1
- contourpy=1.2.1=py39hdb19cb5_1
- coverage=7.8.0=pyhe1237c8_0
- cryptography=44.0.1=py39h7825ff9_0
- cycler=0.12.1=pyhd8ed1ab_1
- cyrus-sasl=2.1.28=h52b45da_1
- dbus=1.13.18=hb2f20db_0
- dill=0.3.9=pyhd8ed1ab_1
- distlib=0.3.9=pyhd8ed1ab_1
- docutils=0.21.2=pyhd8ed1ab_1
- editables=0.5=pyhd8ed1ab_1
- exceptiongroup=1.2.2=pyhd8ed1ab_1
- expat=2.6.4=h6a678d5_0
- filelock=3.18.0=pyhd8ed1ab_0
- fontconfig=2.14.1=h55d465d_3
- fonttools=4.33.3=py39hb9d737c_0
- freetype=2.12.1=h4a9f257_0
- glib=2.78.4=h6a678d5_0
- glib-tools=2.78.4=h6a678d5_0
- h11=0.14.0=pyhd8ed1ab_1
- h2=4.2.0=pyhd8ed1ab_0
- hatch=1.9.7=pyhd8ed1ab_0
- hatchling=1.21.1=pyhd8ed1ab_0
- hpack=4.1.0=pyhd8ed1ab_0
- httpcore=1.0.7=pyh29332c3_1
- httpx=0.28.1=pyhd8ed1ab_0
- hyperframe=6.1.0=pyhd8ed1ab_0
- hyperlink=21.0.0=pyh29332c3_1
- icu=73.1=h6a678d5_0
- identify=2.6.9=pyhd8ed1ab_0
- idna=3.10=pyhd8ed1ab_1
- imagesize=1.4.1=pyhd8ed1ab_0
- importlib-metadata=8.6.1=pyha770c72_0
- importlib_resources=6.5.2=pyhd8ed1ab_0
- iniconfig=2.0.0=pyhd8ed1ab_1
- jaraco.classes=3.4.0=pyhd8ed1ab_2
- jaraco.context=6.0.1=pyhd8ed1ab_0
- jaraco.functools=4.1.0=pyhd8ed1ab_0
- jeepney=0.9.0=pyhd8ed1ab_0
- jinja2=3.1.6=pyhd8ed1ab_0
- jpeg=9e=h166bdaf_1
- keyring=25.6.0=pyha804496_0
- kiwisolver=1.4.2=py39hf939315_1
- krb5=1.20.1=h143b758_1
- lcms2=2.16=hb9589c4_0
- ld_impl_linux-64=2.40=h12ee557_0
- lerc=4.0.0=h6a678d5_0
- libabseil=20250127.0=cxx17_h6a678d5_0
- libbrotlicommon=1.0.9=h166bdaf_7
- libbrotlidec=1.0.9=h166bdaf_7
- libbrotlienc=1.0.9=h166bdaf_7
- libclang13=14.0.6=default_he11475f_2
- libcups=2.4.2=h2d74bed_1
- libcurl=8.12.1=hc9e6f67_0
- libdeflate=1.22=h5eee18b_0
- libedit=3.1.20230828=h5eee18b_0
- libev=4.33=h516909a_1
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgfortran=3.0.0=1
- libgfortran-ng=13.2.0=h69a702a_0
- libgfortran5=13.2.0=ha4646dd_0
- libglib=2.78.4=hdc74915_0
- libgomp=11.2.0=h1234567_1
- libiconv=1.17=h166bdaf_0
- libllvm14=14.0.6=hecde1de_4
- libnghttp2=1.57.0=h2d74bed_0
- libopenblas=0.3.21=h043d6bf_0
- libpng=1.6.39=h5eee18b_0
- libpq=17.4=hdbd6064_0
- libprotobuf=5.29.3=hc99497a_0
- libssh2=1.11.1=h251f7ec_0
- libstdcxx-ng=11.2.0=h1234567_1
- libtiff=4.5.1=hffd6297_1
- libuuid=1.41.5=h5eee18b_0
- libwebp-base=1.3.2=h5eee18b_1
- libxcb=1.15=h7f8727e_0
- libxkbcommon=1.0.3=he3ba5ed_0
- libxml2=2.13.5=hfdd30dd_0
- libxslt=1.1.41=h097e994_0
- lz4-c=1.9.4=h6a678d5_1
- markdown-it-py=3.0.0=pyhd8ed1ab_1
- markupsafe=2.1.1=py39hb9d737c_1
- matplotlib=3.9.2=py39hf3d152e_2
- matplotlib-base=3.9.2=py39hbfdbfaf_1
- mdurl=0.1.2=pyhd8ed1ab_1
- minizip=4.0.3=hf59b114_0
- more-itertools=10.6.0=pyhd8ed1ab_0
- munkres=1.1.4=pyh9f0ad1d_0
- mysql=8.4.0=h721767e_2
- ncurses=6.4=h6a678d5_0
- nodeenv=1.9.1=pyhd8ed1ab_1
- nspr=4.35=h6a678d5_0
- nss=3.89.1=h6a678d5_0
- numexpr=2.10.1=py39hd28fd6d_0
- numpy=2.0.1=py39heeff2f4_1
- numpy-base=2.0.1=py39h8a23956_1
- numpydoc=1.8.0=pyhd8ed1ab_1
- openblas=0.3.4=ha44fe06_0
- openjpeg=2.5.2=he7f1fd0_0
- openldap=2.6.4=h42fbc30_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=pyhd8ed1ab_2
- pandas=2.2.3=py39h6a678d5_0
- pathos=0.3.2=pyhd8ed1ab_0
- pathspec=0.12.1=pyhd8ed1ab_1
- pcre2=10.42=hebb0a14_1
- pexpect=4.9.0=pyhd8ed1ab_1
- pillow=11.1.0=py39hcea889d_0
- pip=25.0.1=pyh8b19718_0
- platformdirs=4.3.7=pyh29332c3_0
- pluggy=1.5.0=pyhd8ed1ab_1
- pox=0.3.5=pyhd8ed1ab_1
- ppft=1.7.6.9=pyhd8ed1ab_1
- pre-commit=4.2.0=pyha770c72_0
- ptyprocess=0.7.0=pyhd8ed1ab_1
- pybind11-abi=4=hd8ed1ab_3
- pycparser=2.22=pyh29332c3_1
- pydata-sphinx-theme=0.16.1=pyhd8ed1ab_0
- pygments=2.19.1=pyhd8ed1ab_0
- pyparsing=3.2.3=pyhd8ed1ab_1
- pyside6=6.7.2=py39ha2c6bb1_0
- pysocks=1.7.1=pyha55dd90_7
- pytest=8.3.5=pyhd8ed1ab_0
- pytest-cov=6.0.0=pyhd8ed1ab_1
- python=3.9.21=he870216_1
- python-dateutil=2.9.0.post0=pyhff2d567_1
- python-tzdata=2025.2=pyhd8ed1ab_0
- python_abi=3.9=2_cp39
- pytz=2025.2=pyhd8ed1ab_0
- pyyaml=6.0=py39hb9d737c_4
- qtbase=6.7.3=hdaa5aa8_0
- qtdeclarative=6.7.3=h6a678d5_0
- qtshadertools=6.7.3=h6a678d5_0
- qtsvg=6.7.3=he621ea3_0
- qttools=6.7.3=h80c7b02_0
- qtwebchannel=6.7.3=h6a678d5_0
- qtwebengine=6.7.2=hcbda680_0
- qtwebsockets=6.7.3=h6a678d5_0
- readline=8.2=h5eee18b_0
- requests=2.32.3=pyhd8ed1ab_1
- rich=14.0.0=pyh29332c3_0
- scipy=1.13.1=py39heeff2f4_1
- secretstorage=3.3.3=py39hf3d152e_3
- setuptools=75.8.0=py39h06a4308_0
- shellingham=1.5.4=pyhd8ed1ab_1
- six=1.17.0=pyhd8ed1ab_0
- sniffio=1.3.1=pyhd8ed1ab_1
- snowballstemmer=2.2.0=pyhd8ed1ab_0
- soupsieve=2.5=pyhd8ed1ab_1
- sphinx=7.4.7=pyhd8ed1ab_0
- sphinxcontrib-applehelp=2.0.0=pyhd8ed1ab_1
- sphinxcontrib-devhelp=2.0.0=pyhd8ed1ab_1
- sphinxcontrib-htmlhelp=2.1.0=pyhd8ed1ab_1
- sphinxcontrib-jsmath=1.0.1=pyhd8ed1ab_1
- sphinxcontrib-qthelp=2.0.0=pyhd8ed1ab_1
- sphinxcontrib-serializinghtml=1.1.10=pyhd8ed1ab_1
- sqlite=3.45.3=h5eee18b_0
- tabulate=0.9.0=pyhd8ed1ab_2
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd8ed1ab_1
- tomli=2.2.1=pyhd8ed1ab_1
- tomli-w=1.2.0=pyhd8ed1ab_0
- tomlkit=0.13.2=pyha770c72_1
- tornado=6.1=py39hb9d737c_3
- trove-classifiers=2025.3.19.19=pyhd8ed1ab_0
- typing-extensions=4.13.0=h9fa5a19_1
- typing_extensions=4.13.0=pyh29332c3_1
- tzdata=2025a=h04d1e81_0
- ukkonen=1.0.1=py39hf939315_2
- unicodedata2=14.0.0=py39hb9d737c_1
- urllib3=2.3.0=pyhd8ed1ab_0
- userpath=1.9.2=pyhd8ed1ab_0
- virtualenv=20.25.3=pyhd8ed1ab_0
- wheel=0.45.1=py39h06a4308_0
- xcb-util-cursor=0.1.4=h5eee18b_0
- xz=5.6.4=h5eee18b_1
- yaml=0.2.5=h7f98852_2
- zipp=3.21.0=pyhd8ed1ab_1
- zlib=1.2.13=h5eee18b_1
- zstandard=0.23.0=py39h2c38b39_1
- zstd=1.5.6=hc292b87_0
- pip:
- mdit-py-plugins==0.4.2
- multiprocess==0.70.17
- myst-parser==3.0.1
- salib==1.5.1.dev4+gda19e09
prefix: /opt/conda/envs/SALib
| [
"tests/test_analyze_morris.py::test_analysis_of_morris_results",
"tests/test_analyze_morris.py::test_analysis_of_morris_results_scaled",
"tests/test_analyze_morris.py::test_compute_grouped_sigma",
"tests/test_analyze_morris.py::test_doesnot_raise_error_if_floats",
"tests/test_regression.py::TestMorris::test_regression_morris_vanilla",
"tests/test_regression.py::TestMorris::test_regression_morris_scaled",
"tests/test_regression.py::TestMorris::test_regression_morris_groups",
"tests/test_regression.py::TestMorris::test_regression_morris_groups_brute_optim",
"tests/test_regression.py::TestMorris::test_regression_morris_groups_local_optim",
"tests/test_regression.py::TestMorris::test_regression_morris_optimal",
"tests/test_regression.py::test_regression_sobol",
"tests/test_regression.py::test_regression_sobol_parallel",
"tests/test_regression.py::test_regression_sobol_groups",
"tests/test_regression.py::test_regression_sobol_groups_dists",
"tests/test_regression.py::test_regression_delta",
"tests/test_regression.py::test_regression_delta_svm",
"tests/test_to_df.py::test_morris_to_df",
"tests/test_to_df.py::test_sobol_to_df"
] | [] | [
"tests/sample/test_latin.py::TestLatinSample::test_latin_sample_trivial",
"tests/sample/test_latin.py::TestLatinSample::test_latin_sample_trivial_group",
"tests/sample/test_latin.py::TestLatinSample::test_latin_sample_one_group",
"tests/sample/test_latin.py::TestLatinSample::test_latin_sample_no_groups",
"tests/sample/test_latin.py::TestLatinSample::test_latin_sample_two_groups",
"tests/sample/test_latin.py::TestLatinSample::test_latin_group_constant",
"tests/test_analyze_morris.py::test_compute_mu_star_confidence",
"tests/test_analyze_morris.py::test_conf_level_within_zero_one_bounds",
"tests/test_analyze_morris.py::test_compute_elementary_effects",
"tests/test_analyze_morris.py::test_compute_elementary_effects_scaled",
"tests/test_analyze_morris.py::test_compute_grouped_elementary_effects",
"tests/test_analyze_morris.py::test_compute_elementary_effects_small",
"tests/test_analyze_morris.py::test_reorganize_output_matrix_increased",
"tests/test_analyze_morris.py::test_reorganize_output_matrix_decreased",
"tests/test_analyze_morris.py::test_compute_grouped_metric",
"tests/test_analyze_morris.py::test_check_if_array_of_floats",
"tests/test_regression.py::test_regression_fast",
"tests/test_regression.py::test_regression_hdmr_ishigami",
"tests/test_regression.py::test_regression_hdmr_case1",
"tests/test_regression.py::test_regression_hdmr_case2",
"tests/test_regression.py::test_regression_hdmr_case3",
"tests/test_regression.py::test_regression_rbd_fast",
"tests/test_regression.py::test_regression_dgsm",
"tests/test_to_df.py::test_dgsm_to_df",
"tests/test_to_df.py::test_fast_to_df",
"tests/test_to_df.py::test_ff_to_df",
"tests/test_to_df.py::test_rbd_to_df"
] | [] | MIT License | 18,972 | 1,172 | [
"src/SALib/analyze/delta.py",
"src/SALib/analyze/morris.py",
"src/SALib/analyze/sobol.py",
"src/SALib/util/__init__.py"
] |
|
exxamalte__python-georss-qld-bushfire-alert-client-30 | 6974c5f7d27cdcacbcc1ac59e06e56e8787ac9a4 | 2024-07-14 07:14:52 | 6974c5f7d27cdcacbcc1ac59e06e56e8787ac9a4 | diff --git a/georss_qld_bushfire_alert_client/feed.py b/georss_qld_bushfire_alert_client/feed.py
index 6623a7d..3eb700f 100644
--- a/georss_qld_bushfire_alert_client/feed.py
+++ b/georss_qld_bushfire_alert_client/feed.py
@@ -8,7 +8,8 @@ from georss_client.feed import GeoRssFeed
from .feed_entry import QldBushfireAlertFeedEntry
-URL: Final = "https://www.qfes.qld.gov.au/data/alerts/bushfireAlert.xml"
+# URL as published here: https://www.fire.qld.gov.au/Current-Incidents
+URL: Final = "https://publiccontent-gis-psba-qld-gov-au.s3.amazonaws.com/content/Feeds/BushfireCurrentIncidents/bushfireAlert.xml"
class QldBushfireAlertFeed(GeoRssFeed):
| XML URL appears to have been changed.
The URL specified in feed.py ("https://www.qfes.qld.gov.au/data/alerts/bushfireAlert.xml") doesn't appear to be valid currently.
The Current bushfire warnings and incidents page on the Queensland Fire Department's website presently links to an XML file located at "https://publiccontent-gis-psba-qld-gov-au.s3.amazonaws.com/content/Feeds/BushfireCurrentIncidents/bushfireAlert.xml".
I've swapped the URL over in my feed.py file and everything seems to work no wukkas, not sure why they changed the location or if they'll change it back though. | exxamalte/python-georss-qld-bushfire-alert-client | diff --git a/tests/test_feed.py b/tests/test_feed.py
index bf1c1f7..08b5192 100644
--- a/tests/test_feed.py
+++ b/tests/test_feed.py
@@ -23,8 +23,8 @@ def test_update_ok(mock_session, mock_request):
feed = QldBushfireAlertFeed(HOME_COORDINATES)
assert (
repr(feed) == "<QldBushfireAlertFeed(home=(-31.0, 151.0), "
- "url=https://www.qfes.qld.gov.au/data/alerts/"
- "bushfireAlert.xml, radius=None, categories="
+ "url=https://publiccontent-gis-psba-qld-gov-au.s3.amazonaws.com/content/"
+ "Feeds/BushfireCurrentIncidents/bushfireAlert.xml, radius=None, categories="
"None)>"
)
status, entries = feed.update()
diff --git a/tests/test_feed_manager.py b/tests/test_feed_manager.py
index 872afbb..d093007 100644
--- a/tests/test_feed_manager.py
+++ b/tests/test_feed_manager.py
@@ -40,9 +40,8 @@ def test_feed_manager(mock_session, mock_request):
assert (
repr(feed_manager) == "<QldBushfireAlertFeedManager("
"feed=<QldBushfireAlertFeed(home="
- "(-31.0, 151.0), url=https://www."
- "qfes.qld.gov.au/data/alerts/"
- "bushfireAlert.xml, "
+ "(-31.0, 151.0), url=https://publiccontent-gis-psba-qld-gov-au.s3.amazonaws.com/"
+ "content/Feeds/BushfireCurrentIncidents/bushfireAlert.xml, "
"radius=None, categories=None)>)>"
)
feed_manager.update()
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 1
} | 0.7 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest pytest-cov pytest-xdist pytest-mock pytest-asyncio"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2025.1.31
charset-normalizer==3.4.1
coverage==7.8.0
exceptiongroup==1.2.2
execnet==2.1.1
georss_client==0.18
-e git+https://github.com/exxamalte/python-georss-qld-bushfire-alert-client.git@6974c5f7d27cdcacbcc1ac59e06e56e8787ac9a4#egg=georss_qld_bushfire_alert_client
haversine==2.9.0
idna==3.10
iniconfig==2.1.0
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
pytest-asyncio==0.26.0
pytest-cov==6.0.0
pytest-mock==3.14.0
pytest-xdist==3.6.1
python-dateutil==2.9.0.post0
requests==2.32.3
six==1.17.0
tomli==2.2.1
typing_extensions==4.13.0
urllib3==2.3.0
xmltodict==0.14.2
| name: python-georss-qld-bushfire-alert-client
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- charset-normalizer==3.4.1
- coverage==7.8.0
- exceptiongroup==1.2.2
- execnet==2.1.1
- georss-client==0.18
- georss-qld-bushfire-alert-client==0.7
- haversine==2.9.0
- idna==3.10
- iniconfig==2.1.0
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- pytest-asyncio==0.26.0
- pytest-cov==6.0.0
- pytest-mock==3.14.0
- pytest-xdist==3.6.1
- python-dateutil==2.9.0.post0
- requests==2.32.3
- six==1.17.0
- tomli==2.2.1
- typing-extensions==4.13.0
- urllib3==2.3.0
- xmltodict==0.14.2
prefix: /opt/conda/envs/python-georss-qld-bushfire-alert-client
| [
"tests/test_feed.py::test_update_ok",
"tests/test_feed_manager.py::test_feed_manager"
] | [] | [
"tests/test_feed.py::test_update_ok_with_category"
] | [] | Apache License 2.0 | 18,973 | 217 | [
"georss_qld_bushfire_alert_client/feed.py"
] |
|
ethz-spylab__agentdojo-5 | 34e9a686ac5ec83657ad0dad285d6d32c4d256da | 2024-07-15 08:34:04 | 263a16110246aa939c73b0c0a5a3c6af58034c3f | diff --git a/src/agentdojo/ast_utils.py b/src/agentdojo/ast_utils.py
index 54c793a2..b8f0c8e3 100644
--- a/src/agentdojo/ast_utils.py
+++ b/src/agentdojo/ast_utils.py
@@ -2,7 +2,7 @@ import ast
import random
import string
-from agentdojo.functions_runtime import FunctionCall
+from agentdojo.functions_runtime import FunctionCall, FunctionCallArgTypes
class ASTParsingError(Exception): ...
@@ -31,6 +31,20 @@ def create_python_function_from_tool_call(func_call_dict: FunctionCall) -> str:
raise ValueError(f"Error creating AST from dictionary: {e}")
+def parse_arg_value(arg_value: ast.expr | None) -> FunctionCallArgTypes:
+ match arg_value:
+ case ast.Constant(value=value):
+ return value
+ case ast.List(elts=elements):
+ return [parse_arg_value(element) for element in elements]
+ case ast.Dict(keys=keys, values=values):
+ return {parse_arg_value(key): parse_arg_value(value) for key, value in zip(keys, values)}
+ case None:
+ return None
+ case _:
+ raise ASTParsingError(f"The keyword values should be literals. Got: {arg_value}")
+
+
def parse_tool_call_from_ast(call: ast.expr) -> FunctionCall:
if not isinstance(call, ast.Call):
raise ASTParsingError("The function calls should be in the form of a call.")
@@ -41,9 +55,7 @@ def parse_tool_call_from_ast(call: ast.expr) -> FunctionCall:
for keyword in call.keywords:
if not isinstance(keyword.arg, str):
raise ASTParsingError(f"The keyword arguments should be strings. Got {keyword.arg}")
- if not isinstance(keyword.value, ast.Constant):
- raise ASTParsingError(f"The keyword values should be literals. Got: {keyword.value}")
- args[keyword.arg] = keyword.value.value
+ args[keyword.arg] = parse_arg_value(keyword.value)
random_string = "".join(random.choices(string.ascii_lowercase + string.digits, k=10))
| Problem parsing list argument in LLaMA model
The problem boils down to the following. If LLaMA outputs a list as argument to a function
```
<function-thoughts>I can use the `send_email` function to send an email to the specified recipient with the given subject and body.</function-thoughts>
<function-call>[send_email(recipients=["[email protected]"], subject="Important message!", body="Hey, how is it going?")]</function-call>
```
this goes through the function [parse_tool_calls_from_python_function](https://github.com/ethz-spylab/agentdojo/blob/34e9a686ac5ec83657ad0dad285d6d32c4d256da/src/agentdojo/ast_utils.py#L53) and throws error:
```
agentdojo.ast_utils.ASTParsingError: The keyword values should be literals. Got: <ast.List object at 0x7f4e78077df0>
```
because it fails to parse the list correctly [here](https://github.com/ethz-spylab/agentdojo/blob/34e9a686ac5ec83657ad0dad285d6d32c4d256da/src/agentdojo/ast_utils.py#L44).
A minimal reproducible example follows:
```
output = """[send_email(recipients=["[email protected]"], subject="Important message!", body="Hey, how is it going?")]"""
parsed_model_output = ast.parse(output)
if not isinstance(parsed_model_output.body[0], ast.Expr):
raise ASTParsingError("Error parsing the model output.")
if not isinstance(parsed_model_output.body[0].value, ast.List):
if not isinstance(parsed_model_output.body[0].value, ast.Call):
raise ASTParsingError(f"The function calls should be given a list. Got {parsed_model_output.body[0].value}")
calls_list = [parsed_model_output.body[0].value]
else:
calls_list = parsed_model_output.body[0].value.elts
call = calls_list[0]
class ASTParsingError(Exception): ...
args = {}
for keyword in call.keywords:
if not isinstance(keyword.arg, str):
raise ASTParsingError(f"The keyword arguments should be strings. Got {keyword.arg}")
if not isinstance(keyword.value, ast.Constant):
raise ASTParsingError(f"The keyword values should be literals. Got: {keyword.value}")
args[keyword.arg] = keyword.value.value
```
I am not sure if this is supposed to be the case but lists as arguments seem reasonable in this case. I implemented a workaround using:
```
def parse_tool_call_from_ast(call: ast.expr) -> FunctionCall:
if not isinstance(call, ast.Call):
raise ASTParsingError("The function calls should be in the form of a call.")
if not isinstance(call.func, ast.Name):
raise ASTParsingError("The function calls should be in the form of a function name.")
function_name = call.func.id
args = {}
for keyword in call.keywords:
if not isinstance(keyword.arg, str):
raise ASTParsingError(f"The keyword arguments should be strings. Got {keyword.arg}")
if not isinstance(keyword.value, ast.Constant) and not isinstance(keyword.value, ast.List):
raise ASTParsingError(f"The keyword values should be literals. Got: {keyword.value}")
if isinstance(keyword.value, ast.List):
# TODO: workaround for lists. Get the value of each element in the list
args[keyword.arg] = [element.value for element in keyword.value.elts]
else:
args[keyword.arg] = keyword.value.value
random_string = "".join(random.choices(string.ascii_lowercase + string.digits, k=10))
return FunctionCall(function=function_name, args=args, id=f"tool_call_{random_string}")
```
Not sure if this was intended or if there is a better way to handle this. I leave up to you to find the best solution! :) | ethz-spylab/agentdojo | diff --git a/tests/test_ast_utils.py b/tests/test_ast_utils.py
index 8dd5e3f8..84870e40 100644
--- a/tests/test_ast_utils.py
+++ b/tests/test_ast_utils.py
@@ -27,3 +27,16 @@ def test_parse_tool_calls_from_python_function():
assert tool_calls[1]["args"] == {"a": 1, "b": 2}
assert tool_calls[2]["function"] == "f"
assert tool_calls[2]["args"] == {}
+
+
+def test_parse_tool_calls_from_python_function_weird_args():
+ model_output = "[send_email(recipients=['[email protected]'], subject='Hello', body='Hello', attachments={'type': 'document', 'id': '12'})]"
+ tool_calls = parse_tool_calls_from_python_function(model_output)
+ assert len(tool_calls) == 1
+ assert tool_calls[0]["function"] == "send_email"
+ assert tool_calls[0]["args"] == {
+ "recipients": ["[email protected]"],
+ "subject": "Hello",
+ "body": "Hello",
+ "attachments": {"type": "document", "id": "12"},
+ }
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 1
} | 0.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[transformers,docs]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.12.2",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | -e git+https://github.com/ethz-spylab/agentdojo.git@34e9a686ac5ec83657ad0dad285d6d32c4d256da#egg=agentdojo
annotated-types==0.7.0
anthropic==0.49.0
anyio==4.9.0
babel==2.17.0
backrefs==5.8
black==25.1.0
cachetools==5.5.2
certifi==2025.1.31
charset-normalizer==3.4.1
click==8.1.8
cohere==5.14.0
colorama==0.4.6
deepdiff==8.4.2
distro==1.9.0
dnspython==2.7.0
docstring_parser==0.16
email_validator==2.2.0
fastavro==1.10.0
filelock==3.18.0
fsspec==2025.3.1
ghp-import==2.1.0
google-api-core==2.24.2
google-auth==2.38.0
google-cloud-aiplatform==1.71.1
google-cloud-bigquery==3.31.0
google-cloud-core==2.4.3
google-cloud-resource-manager==1.14.2
google-cloud-storage==2.19.0
google-crc32c==1.7.1
google-resumable-media==2.7.2
googleapis-common-protos==1.69.2
greenlet==3.1.1
griffe==1.7.1
grpc-google-iam-v1==0.14.2
grpcio==1.71.0
grpcio-status==1.71.0
h11==0.14.0
httpcore==1.0.7
httpx==0.28.1
httpx-sse==0.4.0
huggingface-hub==0.30.0
idna==3.10
iniconfig==2.1.0
Jinja2==3.1.6
jiter==0.9.0
jsonpatch==1.33
jsonpointer==3.0.0
jsonref==1.1.0
langchain==0.3.22
langchain-core==0.3.49
langchain-text-splitters==0.3.7
langsmith==0.3.19
Markdown==3.7
markdown-it-py==3.0.0
MarkupSafe==3.0.2
mdurl==0.1.2
mergedeep==1.3.4
mkdocs==1.6.1
mkdocs-autorefs==1.4.1
mkdocs-get-deps==0.2.0
mkdocs-material==9.6.10
mkdocs-material-extensions==1.3.1
mkdocstrings==0.29.1
mkdocstrings-python==1.16.8
mpmath==1.3.0
mypy-extensions==1.0.0
networkx==3.4.2
numpy==2.2.4
nvidia-cublas-cu12==12.4.5.8
nvidia-cuda-cupti-cu12==12.4.127
nvidia-cuda-nvrtc-cu12==12.4.127
nvidia-cuda-runtime-cu12==12.4.127
nvidia-cudnn-cu12==9.1.0.70
nvidia-cufft-cu12==11.2.1.3
nvidia-curand-cu12==10.3.5.147
nvidia-cusolver-cu12==11.6.1.9
nvidia-cusparse-cu12==12.3.1.170
nvidia-cusparselt-cu12==0.6.2
nvidia-nccl-cu12==2.21.5
nvidia-nvjitlink-cu12==12.4.127
nvidia-nvtx-cu12==12.4.127
openai==1.69.0
openapi-pydantic==0.5.1
orderly-set==5.3.0
orjson==3.10.16
packaging==24.2
paginate==0.5.7
pathspec==0.12.1
platformdirs==4.3.7
pluggy==1.5.0
proto-plus==1.26.1
protobuf==5.29.4
pyasn1==0.6.1
pyasn1_modules==0.4.2
pydantic==2.11.1
pydantic_core==2.33.0
Pygments==2.19.1
pymdown-extensions==10.14.3
pytest==8.3.5
python-dateutil==2.9.0.post0
python-dotenv==1.1.0
PyYAML==6.0.2
pyyaml_env_tag==0.1
regex==2024.11.6
requests==2.32.3
requests-toolbelt==1.0.0
rich==14.0.0
rsa==4.9
safetensors==0.5.3
setuptools==75.8.0
shapely==2.1.0rc1
six==1.17.0
sniffio==1.3.1
SQLAlchemy==2.0.40
sympy==1.13.1
tenacity==9.0.0
tokenizers==0.21.1
torch==2.6.0
tqdm==4.67.1
transformers==4.50.3
triton==3.2.0
types-requests==2.32.0.20250328
typing-inspection==0.4.0
typing_extensions==4.13.0
urllib3==2.3.0
vertexai==1.71.1
watchdog==6.0.0
wheel==0.45.1
zstandard==0.23.0
| name: agentdojo
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- bzip2=1.0.8=h5eee18b_6
- ca-certificates=2025.2.25=h06a4308_0
- expat=2.6.4=h6a678d5_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- libuuid=1.41.5=h5eee18b_0
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py312h06a4308_0
- python=3.12.2=h996f2a0_0
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py312h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py312h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- agentdojo==0.1.7
- annotated-types==0.7.0
- anthropic==0.49.0
- anyio==4.9.0
- babel==2.17.0
- backrefs==5.8
- black==25.1.0
- cachetools==5.5.2
- certifi==2025.1.31
- charset-normalizer==3.4.1
- click==8.1.8
- cohere==5.14.0
- colorama==0.4.6
- deepdiff==8.4.2
- distro==1.9.0
- dnspython==2.7.0
- docstring-parser==0.16
- email-validator==2.2.0
- fastavro==1.10.0
- filelock==3.18.0
- fsspec==2025.3.1
- ghp-import==2.1.0
- google-api-core==2.24.2
- google-auth==2.38.0
- google-cloud-aiplatform==1.71.1
- google-cloud-bigquery==3.31.0
- google-cloud-core==2.4.3
- google-cloud-resource-manager==1.14.2
- google-cloud-storage==2.19.0
- google-crc32c==1.7.1
- google-resumable-media==2.7.2
- googleapis-common-protos==1.69.2
- greenlet==3.1.1
- griffe==1.7.1
- grpc-google-iam-v1==0.14.2
- grpcio==1.71.0
- grpcio-status==1.71.0
- h11==0.14.0
- httpcore==1.0.7
- httpx==0.28.1
- httpx-sse==0.4.0
- huggingface-hub==0.30.0
- idna==3.10
- iniconfig==2.1.0
- jinja2==3.1.6
- jiter==0.9.0
- jsonpatch==1.33
- jsonpointer==3.0.0
- jsonref==1.1.0
- langchain==0.3.22
- langchain-core==0.3.49
- langchain-text-splitters==0.3.7
- langsmith==0.3.19
- markdown==3.7
- markdown-it-py==3.0.0
- markupsafe==3.0.2
- mdurl==0.1.2
- mergedeep==1.3.4
- mkdocs==1.6.1
- mkdocs-autorefs==1.4.1
- mkdocs-get-deps==0.2.0
- mkdocs-material==9.6.10
- mkdocs-material-extensions==1.3.1
- mkdocstrings==0.29.1
- mkdocstrings-python==1.16.8
- mpmath==1.3.0
- mypy-extensions==1.0.0
- networkx==3.4.2
- numpy==2.2.4
- nvidia-cublas-cu12==12.4.5.8
- nvidia-cuda-cupti-cu12==12.4.127
- nvidia-cuda-nvrtc-cu12==12.4.127
- nvidia-cuda-runtime-cu12==12.4.127
- nvidia-cudnn-cu12==9.1.0.70
- nvidia-cufft-cu12==11.2.1.3
- nvidia-curand-cu12==10.3.5.147
- nvidia-cusolver-cu12==11.6.1.9
- nvidia-cusparse-cu12==12.3.1.170
- nvidia-cusparselt-cu12==0.6.2
- nvidia-nccl-cu12==2.21.5
- nvidia-nvjitlink-cu12==12.4.127
- nvidia-nvtx-cu12==12.4.127
- openai==1.69.0
- openapi-pydantic==0.5.1
- orderly-set==5.3.0
- orjson==3.10.16
- packaging==24.2
- paginate==0.5.7
- pathspec==0.12.1
- platformdirs==4.3.7
- pluggy==1.5.0
- proto-plus==1.26.1
- protobuf==5.29.4
- pyasn1==0.6.1
- pyasn1-modules==0.4.2
- pydantic==2.11.1
- pydantic-core==2.33.0
- pygments==2.19.1
- pymdown-extensions==10.14.3
- pytest==8.3.5
- python-dateutil==2.9.0.post0
- python-dotenv==1.1.0
- pyyaml==6.0.2
- pyyaml-env-tag==0.1
- regex==2024.11.6
- requests==2.32.3
- requests-toolbelt==1.0.0
- rich==14.0.0
- rsa==4.9
- safetensors==0.5.3
- shapely==2.1.0rc1
- six==1.17.0
- sniffio==1.3.1
- sqlalchemy==2.0.40
- sympy==1.13.1
- tenacity==9.0.0
- tokenizers==0.21.1
- torch==2.6.0
- tqdm==4.67.1
- transformers==4.50.3
- triton==3.2.0
- types-requests==2.32.0.20250328
- typing-extensions==4.13.0
- typing-inspection==0.4.0
- urllib3==2.3.0
- vertexai==1.71.1
- watchdog==6.0.0
- zstandard==0.23.0
prefix: /opt/conda/envs/agentdojo
| [
"tests/test_ast_utils.py::test_parse_tool_calls_from_python_function_weird_args"
] | [] | [
"tests/test_ast_utils.py::test_create_python_function_from_tool_call",
"tests/test_ast_utils.py::test_parse_tool_calls_from_python_function"
] | [] | MIT License | 18,980 | 486 | [
"src/agentdojo/ast_utils.py"
] |
|
pybamm-team__PyBaMM-4267 | 668e563b01749bcbb55a017e39b0cf94d8256619 | 2024-07-15 18:37:33 | 668e563b01749bcbb55a017e39b0cf94d8256619 | codecov[bot]: ## [Codecov](https://app.codecov.io/gh/pybamm-team/PyBaMM/pull/4267?dropdown=coverage&src=pr&el=h1&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=pybamm-team) Report
All modified and coverable lines are covered by tests :white_check_mark:
> Project coverage is 99.56%. Comparing base [(`ab7348f`)](https://app.codecov.io/gh/pybamm-team/PyBaMM/commit/ab7348fe05a42510e3e0fe3be0a606eae7f5364e?dropdown=coverage&el=desc&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=pybamm-team) to head [(`ded1844`)](https://app.codecov.io/gh/pybamm-team/PyBaMM/commit/ded1844af500ed8c092b6ce06f515b20056cf0d4?dropdown=coverage&el=desc&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=pybamm-team).
> Report is 2 commits behind head on develop.
<details><summary>Additional details and impacted files</summary>
```diff
@@ Coverage Diff @@
## develop #4267 +/- ##
=========================================
Coverage 99.55% 99.56%
=========================================
Files 288 288
Lines 21897 22089 +192
=========================================
+ Hits 21800 21992 +192
Misses 97 97
```
</details>
[:umbrella: View full report in Codecov by Sentry](https://app.codecov.io/gh/pybamm-team/PyBaMM/pull/4267?dropdown=coverage&src=pr&el=continue&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=pybamm-team).
:loudspeaker: Have feedback on the report? [Share it here](https://about.codecov.io/codecov-pr-comment-feedback/?utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=pybamm-team).
| diff --git a/pybamm/util.py b/pybamm/util.py
index 0fe02c835..130cb5ba4 100644
--- a/pybamm/util.py
+++ b/pybamm/util.py
@@ -57,14 +57,22 @@ class FuzzyDict(dict):
try:
return super().__getitem__(key)
except KeyError as error:
- if "particle diffusivity" in key:
- warn(
- f"The parameter '{key.replace('particle', 'electrode')}' "
- f"has been renamed to '{key}'",
- DeprecationWarning,
- stacklevel=2,
+ if "electrode diffusivity" in key or "particle diffusivity" in key:
+ old_term, new_term = (
+ ("electrode", "particle")
+ if "electrode diffusivity" in key
+ else ("particle", "electrode")
)
- return super().__getitem__(key.replace("particle", "electrode"))
+ alternative_key = key.replace(old_term, new_term)
+
+ if old_term == "electrode":
+ warn(
+ f"The parameter '{alternative_key}' has been renamed to '{key}' and will be removed in a future release. Using '{key}'",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+
+ return super().__getitem__(alternative_key)
if key in ["Negative electrode SOC", "Positive electrode SOC"]:
domain = key.split(" ")[0]
raise KeyError(
| [Bug]: "electrode diffusivity" -> "particle diffusivity" is still breaking change on `develop` when using pre-24.5 pybamm.ParameterValues
### PyBaMM Version
develop
### Python Version
3.9.13
### Describe the bug
Since #4072 re-inverted the logic fix in #3871, the release candidate on `develop` causes the rename of "electrode diffusivity" -> "particle diffusivity" to be a breaking change. Parameter sets using the old key names do not simulate in the 24.5 RC.
### Steps to Reproduce
```python
import pybamm
parameter_values = pybamm.ParameterValues("Chen2020")
# Dummy a pybamm.ParameterValues object with a legacy parameter name
parameter_values.update(
{
"Negative electrode diffusivity [m2.s-1]": parameter_values["Negative particle diffusivity [m2.s-1]"],
},
check_already_exists=False,
)
del parameter_values["Negative particle diffusivity [m2.s-1]"]
# Try to run a simulation with the old-style parameter set
model = pybamm.lithium_ion.DFN()
experiment = pybamm.Experiment(
[
"Discharge at 1C until 3 V",
]
)
sim = pybamm.Simulation(
model,
parameter_values=parameter_values,
experiment=experiment,
)
# KeyError provoked here on call into pybamm.ParameterValues with new key name
sol = sim.solve()
```
### Relevant log output
_No response_ | pybamm-team/PyBaMM | diff --git a/tests/unit/test_util.py b/tests/unit/test_util.py
index 21673a44f..abcdb4dca 100644
--- a/tests/unit/test_util.py
+++ b/tests/unit/test_util.py
@@ -34,6 +34,11 @@ class TestUtil:
"SEI current": 3,
"Lithium plating current": 4,
"A dimensional variable [m]": 5,
+ "Positive particle diffusivity [m2.s-1]": 6,
+ }
+ )
+ d2 = pybamm.FuzzyDict(
+ {
"Positive electrode diffusivity [m2.s-1]": 6,
}
)
@@ -56,6 +61,16 @@ class TestUtil:
with pytest.raises(KeyError, match="Upper voltage"):
d.__getitem__("Open-circuit voltage at 100% SOC [V]")
+ assert (
+ d2["Positive particle diffusivity [m2.s-1]"]
+ == d["Positive particle diffusivity [m2.s-1]"]
+ )
+
+ assert (
+ d2["Positive electrode diffusivity [m2.s-1]"]
+ == d["Positive electrode diffusivity [m2.s-1]"]
+ )
+
with pytest.warns(DeprecationWarning):
assert (
d["Positive electrode diffusivity [m2.s-1]"]
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 1
} | 24.52 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest pytest-cov pytest-xdist pytest-mock"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc gfortran graphviz"
],
"python": "3.9",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | anytree==2.12.1
argcomplete==3.6.1
asttokens==3.0.0
attrs==25.3.0
casadi==3.7.0
certifi==2025.1.31
cfgv==3.4.0
charset-normalizer==3.4.1
colorlog==6.9.0
comm==0.2.2
coverage==7.8.0
debugpy==1.8.13
decorator==5.2.1
dependency-groups==1.3.0
distlib==0.3.9
exceptiongroup==1.2.2
execnet==2.1.1
executing==2.2.0
fastjsonschema==2.21.1
filelock==3.18.0
identify==2.6.9
idna==3.10
importlib_metadata==8.6.1
iniconfig==2.1.0
ipykernel==6.29.5
ipython==8.18.1
jedi==0.19.2
jsonschema==4.23.0
jsonschema-specifications==2024.10.1
jupyter_client==8.6.3
jupyter_core==5.7.2
matplotlib-inline==0.1.7
mpmath==1.3.0
nbclient==0.10.2
nbformat==5.10.4
nbmake==1.5.5
nest-asyncio==1.6.0
nodeenv==1.9.1
nox==2025.2.9
numpy==1.26.4
packaging==24.2
pandas==2.2.3
parameterized==0.9.0
parso==0.8.4
pexpect==4.9.0
platformdirs==4.3.7
pluggy==1.5.0
pooch==1.8.2
pre_commit==4.2.0
prompt_toolkit==3.0.50
psutil==7.0.0
ptyprocess==0.7.0
pure_eval==0.2.3
-e git+https://github.com/pybamm-team/PyBaMM.git@668e563b01749bcbb55a017e39b0cf94d8256619#egg=pybamm
Pygments==2.19.1
pytest==8.3.5
pytest-cov==6.0.0
pytest-doctestplus==1.4.0
pytest-mock==3.14.0
pytest-xdist==3.6.1
python-dateutil==2.9.0.post0
pytz==2025.2
PyYAML==6.0.2
pyzmq==26.3.0
referencing==0.36.2
requests==2.32.3
rpds-py==0.24.0
ruff==0.11.2
scipy==1.13.1
six==1.17.0
stack-data==0.6.3
sympy==1.13.3
tomli==2.2.1
tornado==6.4.2
traitlets==5.14.3
typing_extensions==4.13.0
tzdata==2025.2
urllib3==2.3.0
virtualenv==20.29.3
wcwidth==0.2.13
xarray==2024.7.0
zipp==3.21.0
| name: PyBaMM
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- anytree==2.12.1
- argcomplete==3.6.1
- asttokens==3.0.0
- attrs==25.3.0
- casadi==3.7.0
- certifi==2025.1.31
- cfgv==3.4.0
- charset-normalizer==3.4.1
- colorlog==6.9.0
- comm==0.2.2
- coverage==7.8.0
- debugpy==1.8.13
- decorator==5.2.1
- dependency-groups==1.3.0
- distlib==0.3.9
- exceptiongroup==1.2.2
- execnet==2.1.1
- executing==2.2.0
- fastjsonschema==2.21.1
- filelock==3.18.0
- identify==2.6.9
- idna==3.10
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- ipykernel==6.29.5
- ipython==8.18.1
- jedi==0.19.2
- jsonschema==4.23.0
- jsonschema-specifications==2024.10.1
- jupyter-client==8.6.3
- jupyter-core==5.7.2
- matplotlib-inline==0.1.7
- mpmath==1.3.0
- nbclient==0.10.2
- nbformat==5.10.4
- nbmake==1.5.5
- nest-asyncio==1.6.0
- nodeenv==1.9.1
- nox==2025.2.9
- numpy==1.26.4
- packaging==24.2
- pandas==2.2.3
- parameterized==0.9.0
- parso==0.8.4
- pexpect==4.9.0
- platformdirs==4.3.7
- pluggy==1.5.0
- pooch==1.8.2
- pre-commit==4.2.0
- prompt-toolkit==3.0.50
- psutil==7.0.0
- ptyprocess==0.7.0
- pure-eval==0.2.3
- pybamm==24.5rc2
- pygments==2.19.1
- pytest==8.3.5
- pytest-cov==6.0.0
- pytest-doctestplus==1.4.0
- pytest-mock==3.14.0
- pytest-xdist==3.6.1
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyyaml==6.0.2
- pyzmq==26.3.0
- referencing==0.36.2
- requests==2.32.3
- rpds-py==0.24.0
- ruff==0.11.2
- scipy==1.13.1
- six==1.17.0
- stack-data==0.6.3
- sympy==1.13.3
- tomli==2.2.1
- tornado==6.4.2
- traitlets==5.14.3
- typing-extensions==4.13.0
- tzdata==2025.2
- urllib3==2.3.0
- virtualenv==20.29.3
- wcwidth==0.2.13
- xarray==2024.7.0
- zipp==3.21.0
prefix: /opt/conda/envs/PyBaMM
| [
"tests/unit/test_util.py::TestUtil::test_fuzzy_dict"
] | [] | [
"tests/unit/test_util.py::TestUtil::test_get_parameters_filepath",
"tests/unit/test_util.py::TestUtil::test_is_constant_and_can_evaluate",
"tests/unit/test_util.py::TestSearch::test_url_gets_to_stdout",
"tests/unit/test_util.py::TestUtil::test_is_jax_compatible",
"tests/unit/test_util.py::TestUtil::test_git_commit_info",
"tests/unit/test_util.py::TestUtil::test_optional_dependencies",
"tests/unit/test_util.py::TestUtil::test_import_optional_dependency",
"tests/unit/test_util.py::TestUtil::test_pybamm_import"
] | [] | BSD 3-Clause "New" or "Revised" License | 18,987 | 355 | [
"pybamm/util.py"
] |
eyurtsev__kor-306 | 9a3afa3104b4d3422c7b0eb38bf24a7732e3be54 | 2024-07-16 03:38:11 | 9a3afa3104b4d3422c7b0eb38bf24a7732e3be54 | eyurtsev: Awesome, I'll confirm this works and make this a breaking release since .run interface won't be available anymore
Sachin-Bhat: Hey @eyurtsev,
Do let me know if any changes need to be made from my side.
Cheers,
Sachin | diff --git a/kor/extraction/api.py b/kor/extraction/api.py
index 5e196c5..8b35d7d 100644
--- a/kor/extraction/api.py
+++ b/kor/extraction/api.py
@@ -1,11 +1,13 @@
"""Kor API for extraction related functionality."""
+
import asyncio
from typing import Any, Callable, List, Optional, Sequence, Type, Union, cast
-from langchain.chains import LLMChain
from langchain_core.documents import Document
from langchain_core.language_models import BaseLanguageModel
+from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import PromptTemplate
+from langchain_core.runnables import Runnable
from kor.encoders import Encoder, InputFormatter, initialize_encoder
from kor.extraction.parser import KorParser
@@ -18,7 +20,7 @@ from kor.validators import Validator
async def _extract_from_document_with_semaphore(
semaphore: asyncio.Semaphore,
- chain: LLMChain,
+ chain: Runnable,
document: Document,
uid: str,
source_uid: str,
@@ -26,7 +28,7 @@ async def _extract_from_document_with_semaphore(
"""Extract from document with a semaphore to limit concurrency."""
async with semaphore:
extraction_result: Extraction = cast(
- Extraction, await chain.arun(document.page_content)
+ Extraction, await chain.ainvoke(document.page_content)
)
return {
"uid": uid,
@@ -52,7 +54,7 @@ def create_extraction_chain(
instruction_template: Optional[PromptTemplate] = None,
verbose: Optional[bool] = None,
**encoder_kwargs: Any,
-) -> LLMChain:
+) -> Runnable:
"""Create an extraction chain.
Args:
@@ -75,8 +77,10 @@ def create_extraction_chain(
* "type_description": type description of the node (from TypeDescriptor)
* "format_instructions": information on how to format the output
(from Encoder)
- verbose: if provided, sets the verbosity on the chain, otherwise default
- verbosity of the chain will be used
+ verbose: Deprecated, use langchain_core.globals.set_verbose and
+ langchain_core.globals.set_debug instead.
+ Please reference this guide for more information:
+ https://python.langchain.com/v0.2/docs/how_to/debugging
encoder_kwargs: Keyword arguments to pass to the encoder class
Returns:
@@ -93,32 +97,39 @@ def create_extraction_chain(
chain = create_extraction_chain(llm, node, encoder_or_encoder_class="JSON",
input_formatter="triple_quotes")
"""
+
+ if verbose is not None:
+ raise NotImplementedError(
+ "The verbose argument is no longer supported. Instead if you want to see "
+ "verbose output, please reference this guide for more information: "
+ "https://python.langchain.com/v0.2/docs/how_to/debugging "
+ )
+
if not isinstance(node, Object):
raise ValueError(f"node must be an Object got {type(node)}")
encoder = initialize_encoder(encoder_or_encoder_class, node, **encoder_kwargs)
type_descriptor_to_use = initialize_type_descriptors(type_descriptor)
- chain_kwargs = {}
- if verbose is not None:
- chain_kwargs["verbose"] = verbose
-
- return LLMChain(
- llm=llm,
- prompt=create_langchain_prompt(
- node,
- encoder,
- type_descriptor_to_use,
- validator=validator,
- instruction_template=instruction_template,
- input_formatter=input_formatter,
- ),
- output_parser=KorParser(encoder=encoder, validator=validator, schema_=node),
- **chain_kwargs,
+ prompt = create_langchain_prompt(
+ node,
+ encoder,
+ type_descriptor_to_use,
+ validator=validator,
+ instruction_template=instruction_template,
+ input_formatter=input_formatter,
+ )
+
+ chain = (
+ prompt
+ | llm
+ | StrOutputParser()
+ | KorParser(encoder=encoder, validator=validator, schema_=node)
)
+ return chain
async def extract_from_documents(
- chain: LLMChain,
+ chain: Runnable,
documents: Sequence[Document],
*,
max_concurrency: int = 1,
| LLMChain is deprecated
Hello,
Seems this project still uses the LLMChain which was deprecated in langchain 0.1.17 and would be removed in langchain 0.3. I do have something that works I will create a PR for this. | eyurtsev/kor | diff --git a/tests/extraction/test_extraction_with_chain.py b/tests/extraction/test_extraction_with_chain.py
index 086ad76..68b3ab8 100644
--- a/tests/extraction/test_extraction_with_chain.py
+++ b/tests/extraction/test_extraction_with_chain.py
@@ -1,10 +1,8 @@
"""Test that the extraction chain works as expected."""
from typing import Any, Mapping, Optional
-import langchain
import pytest
-from langchain.chains import LLMChain
-from langchain_core.prompts import PromptTemplate
+from langchain_core.runnables import Runnable
from kor.encoders import CSVEncoder, JSONEncoder
from kor.extraction import create_extraction_chain
@@ -40,7 +38,7 @@ def test_create_extraction_chain(options: Mapping[str, Any]) -> None:
for schema in [SIMPLE_OBJECT_SCHEMA]:
chain = create_extraction_chain(chat_model, schema, **options)
- assert isinstance(chain, LLMChain)
+ assert isinstance(chain, Runnable)
# Try to run through predict and parse
chain.invoke("some string") # type: ignore
@@ -60,7 +58,7 @@ def test_create_extraction_chain_with_csv_encoder(options: Mapping[str, Any]) ->
chat_model = ToyChatModel(response="hello")
chain = create_extraction_chain(chat_model, **options)
- assert isinstance(chain, LLMChain)
+ assert isinstance(chain, Runnable)
# Try to run through predict and parse
chain.invoke("some string") # type: ignore
@@ -105,46 +103,14 @@ def test_not_implemented_assertion_raised_for_csv(options: Mapping[str, Any]) ->
create_extraction_chain(chat_model, **options)
[email protected]("verbose", [True, False, None])
[email protected]("verbose", [True, False])
def test_instantiation_with_verbose_flag(verbose: Optional[bool]) -> None:
"""Create an extraction chain."""
chat_model = ToyChatModel(response="hello")
- chain = create_extraction_chain(
- chat_model,
- SIMPLE_OBJECT_SCHEMA,
- encoder_or_encoder_class="json",
- verbose=verbose,
- )
- assert isinstance(chain, LLMChain)
- if verbose is None:
- expected_verbose = langchain.verbose
- else:
- expected_verbose = verbose
- assert chain.verbose == expected_verbose
-
-
-def test_using_custom_template() -> None:
- """Create an extraction chain with a custom template."""
- template = PromptTemplate(
- input_variables=["format_instructions", "type_description"],
- template=(
- "custom_prefix\n"
- "{type_description}\n\n"
- "{format_instructions}\n"
- "custom_suffix"
- ),
- )
- chain = create_extraction_chain(
- ToyChatModel(response="hello"),
- OBJECT_SCHEMA_WITH_MANY,
- instruction_template=template,
- encoder_or_encoder_class="json",
- )
- prompt_value = chain.prompt.format_prompt(text="hello")
- system_message = prompt_value.to_messages()[0]
- string_value = prompt_value.to_string()
-
- assert "custom_prefix" in string_value
- assert "custom_suffix" in string_value
- assert "custom_prefix" in system_message.content
- assert "custom_suffix" in system_message.content
+ with pytest.raises(NotImplementedError):
+ create_extraction_chain(
+ chat_model,
+ SIMPLE_OBJECT_SCHEMA,
+ encoder_or_encoder_class="json",
+ verbose=verbose,
+ )
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 0
},
"num_modified_files": 1
} | 1.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest pytest-cov pytest-xdist pytest-mock pytest-asyncio"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | aiohappyeyeballs==2.6.1
aiohttp==3.11.14
aiosignal==1.3.2
annotated-types==0.7.0
anyio==4.9.0
async-timeout==4.0.3
attrs==25.3.0
certifi==2025.1.31
charset-normalizer==3.4.1
coverage==7.8.0
exceptiongroup==1.2.2
execnet==2.1.1
frozenlist==1.5.0
greenlet==3.1.1
h11==0.14.0
httpcore==1.0.7
httpx==0.28.1
idna==3.10
iniconfig==2.1.0
jsonpatch==1.33
jsonpointer==3.0.0
-e git+https://github.com/eyurtsev/kor.git@9a3afa3104b4d3422c7b0eb38bf24a7732e3be54#egg=kor
langchain==0.2.17
langchain-core==0.2.43
langchain-text-splitters==0.2.4
langsmith==0.1.147
multidict==6.2.0
numpy==1.26.4
orjson==3.10.16
packaging==24.2
pandas==2.2.3
pluggy==1.5.0
propcache==0.3.1
pydantic==2.11.1
pydantic_core==2.33.0
pytest==8.3.5
pytest-asyncio==0.26.0
pytest-cov==6.0.0
pytest-mock==3.14.0
pytest-xdist==3.6.1
python-dateutil==2.9.0.post0
pytz==2025.2
PyYAML==6.0.2
requests==2.32.3
requests-toolbelt==1.0.0
six==1.17.0
sniffio==1.3.1
SQLAlchemy==2.0.40
tenacity==8.5.0
tomli==2.2.1
typing-inspection==0.4.0
typing_extensions==4.13.0
tzdata==2025.2
urllib3==2.3.0
yarl==1.18.3
| name: kor
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- aiohappyeyeballs==2.6.1
- aiohttp==3.11.14
- aiosignal==1.3.2
- annotated-types==0.7.0
- anyio==4.9.0
- async-timeout==4.0.3
- attrs==25.3.0
- certifi==2025.1.31
- charset-normalizer==3.4.1
- coverage==7.8.0
- exceptiongroup==1.2.2
- execnet==2.1.1
- frozenlist==1.5.0
- greenlet==3.1.1
- h11==0.14.0
- httpcore==1.0.7
- httpx==0.28.1
- idna==3.10
- iniconfig==2.1.0
- jsonpatch==1.33
- jsonpointer==3.0.0
- kor==1.0.2
- langchain==0.2.17
- langchain-core==0.2.43
- langchain-text-splitters==0.2.4
- langsmith==0.1.147
- multidict==6.2.0
- numpy==1.26.4
- orjson==3.10.16
- packaging==24.2
- pandas==2.2.3
- pluggy==1.5.0
- propcache==0.3.1
- pydantic==2.11.1
- pydantic-core==2.33.0
- pytest==8.3.5
- pytest-asyncio==0.26.0
- pytest-cov==6.0.0
- pytest-mock==3.14.0
- pytest-xdist==3.6.1
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyyaml==6.0.2
- requests==2.32.3
- requests-toolbelt==1.0.0
- six==1.17.0
- sniffio==1.3.1
- sqlalchemy==2.0.40
- tenacity==8.5.0
- tomli==2.2.1
- typing-extensions==4.13.0
- typing-inspection==0.4.0
- tzdata==2025.2
- urllib3==2.3.0
- yarl==1.18.3
prefix: /opt/conda/envs/kor
| [
"tests/extraction/test_extraction_with_chain.py::test_instantiation_with_verbose_flag[True]",
"tests/extraction/test_extraction_with_chain.py::test_instantiation_with_verbose_flag[False]"
] | [] | [
"tests/extraction/test_extraction_with_chain.py::test_create_extraction_chain[options0]",
"tests/extraction/test_extraction_with_chain.py::test_create_extraction_chain[options1]",
"tests/extraction/test_extraction_with_chain.py::test_create_extraction_chain[options2]",
"tests/extraction/test_extraction_with_chain.py::test_create_extraction_chain[options3]",
"tests/extraction/test_extraction_with_chain.py::test_create_extraction_chain[options4]",
"tests/extraction/test_extraction_with_chain.py::test_create_extraction_chain[options5]",
"tests/extraction/test_extraction_with_chain.py::test_create_extraction_chain[options6]",
"tests/extraction/test_extraction_with_chain.py::test_create_extraction_chain[options7]",
"tests/extraction/test_extraction_with_chain.py::test_create_extraction_chain[options8]",
"tests/extraction/test_extraction_with_chain.py::test_create_extraction_chain_with_csv_encoder[options0]",
"tests/extraction/test_extraction_with_chain.py::test_create_extraction_chain_with_csv_encoder[options1]",
"tests/extraction/test_extraction_with_chain.py::test_not_implemented_assertion_raised_for_csv[options0]",
"tests/extraction/test_extraction_with_chain.py::test_not_implemented_assertion_raised_for_csv[options1]"
] | [] | MIT License | 18,992 | 974 | [
"kor/extraction/api.py"
] |
lincc-frameworks__nested-pandas-123 | 3cd3baaa4c87973af20da3aed87c40f05106cfe1 | 2024-07-16 21:58:47 | 975fbc88de84b6d8d9ae309f71ae3f0b96831818 | diff --git a/src/nested_pandas/nestedframe/core.py b/src/nested_pandas/nestedframe/core.py
index f0169b5..285c598 100644
--- a/src/nested_pandas/nestedframe/core.py
+++ b/src/nested_pandas/nestedframe/core.py
@@ -103,6 +103,62 @@ class NestedFrame(pd.DataFrame):
new_df[label] = packed
return new_df
+ @classmethod
+ def from_flat(cls, df, base_columns, nested_columns=None, index=None, name="nested"):
+ """Creates a NestedFrame with base and nested columns from a flat
+ dataframe.
+
+ Parameters
+ ----------
+ df: pd.DataFrame or NestedFrame
+ A flat dataframe.
+ base_columns: list-like
+ The columns that should be used as base (flat) columns in the
+ output dataframe.
+ nested_columns: list-like, or None
+ The columns that should be packed into a nested column. All columns
+ in the list will attempt to be packed into a single nested column
+ with the name provided in `nested_name`. If None, is defined as all
+ columns not in `base_columns`.
+ index: str, or None
+ The name of a column to use as the new index. Typically, the index
+ should have a unique value per row for base columns, and should
+ repeat for nested columns. For example, a dataframe with two
+ columns; a=[1,1,1,2,2,2] and b=[5,10,15,20,25,30] would want an
+ index like [0,0,0,1,1,1] if a is chosen as a base column. If not
+ provided the current index will be used.
+ name:
+ The name of the output column the `nested_columns` are packed into.
+
+ Returns
+ -------
+ NestedFrame
+ A NestedFrame with the specified nesting structure.
+
+ Examples
+ --------
+
+ >>> nf = NestedFrame({"a":[1,1,1,2,2], "b":[2,2,2,4,4],
+ ... "c":[1,2,3,4,5], "d":[2,4,6,8,10]},
+ ... index=[0,0,0,1,1])
+
+ >>> NestedFrame.from_flat(nf, base_columns=["a","b"])
+ """
+
+ # Resolve new index
+ if index is not None:
+ # if a base column is chosen remove it
+ if index in base_columns:
+ base_columns = [col for col in base_columns if col != index]
+ df = df.set_index(index)
+
+ # drop duplicates on index
+ out_df = df[base_columns][~df.index.duplicated(keep="first")]
+
+ # add nested
+ nested_columns = [col for col in df.columns if col not in base_columns]
+ return out_df.add_nested(df[nested_columns], name=name)
+
def _split_query(self, expr) -> dict:
"""Splits a pandas query into multiple subqueries for nested and base layers"""
# Ensure query has needed spacing for upcoming split
| Create NestedPandas from a flat source table assuming some columns to be object columns
**Feature request**
It would be really nice if we could generate NestedFrame with both "base" and "nested" columns from a single flat data-frame.
```python
NestedFrame.from_flat(df, base_columns=["a", "b"])
```
It would be super-great if user could specify `base_columns=None` and we just guess which columns are base columns.
**Before submitting**
Please check the following:
- [x] I have described the purpose of the suggested change, specifying what I need the enhancement to accomplish, i.e. what problem it solves.
- [x] I have included any relevant links, screenshots, environment information, and data relevant to implementing the requested feature, as well as pseudocode for how I want to access the new functionality.
- [ ] If I have ideas for how the new feature could be implemented, I have provided explanations and/or pseudocode and/or task lists for the steps.
| lincc-frameworks/nested-pandas | diff --git a/tests/nested_pandas/nestedframe/test_nestedframe.py b/tests/nested_pandas/nestedframe/test_nestedframe.py
index 7590fce..019f5f9 100644
--- a/tests/nested_pandas/nestedframe/test_nestedframe.py
+++ b/tests/nested_pandas/nestedframe/test_nestedframe.py
@@ -145,6 +145,41 @@ def test_add_nested_for_empty_df():
assert_frame_equal(new_base.nested.nest.to_flat(), nested.astype(pd.ArrowDtype(pa.float64())))
[email protected]("index", [None, "a", "c"])
+def test_from_flat(index):
+ """Test the NestedFrame.from_flat functionality"""
+ nf = NestedFrame(
+ {"a": [1, 1, 1, 2, 2], "b": [2, 2, 2, 4, 4], "c": [1, 2, 3, 4, 5], "d": [2, 4, 6, 8, 10]},
+ index=[0, 0, 0, 1, 1],
+ )
+
+ out_nf = NestedFrame.from_flat(nf, base_columns=["a", "b"], index=index, name="new_nested")
+
+ if index is None:
+ assert list(out_nf.columns) == ["a", "b", "new_nested"]
+ assert list(out_nf.new_nested.nest.fields) == ["c", "d"]
+ assert len(out_nf) == 2
+ elif index == "a":
+ assert list(out_nf.columns) == ["b", "new_nested"]
+ assert list(out_nf.new_nested.nest.fields) == ["c", "d"]
+ assert len(out_nf) == 2
+ elif index == "c": # not what a user likely wants, but should still work
+ assert list(out_nf.columns) == ["a", "b", "new_nested"]
+ assert list(out_nf.new_nested.nest.fields) == ["d"]
+ assert len(out_nf) == 5
+
+
+def test_recover_from_flat():
+ """test that going to_flat and then from_flat recovers the same df"""
+ nf = generate_data(5, 10, seed=1)
+
+ flat = nf["nested"].nest.to_flat()
+
+ nf2 = NestedFrame.from_flat(nf[["a", "b"]].join(flat), base_columns=["a", "b"], name="nested")
+
+ assert nf2.equals(nf)
+
+
def test_query():
"""Test that NestedFrame.query handles nested queries correctly"""
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 1
},
"num_modified_files": 1
} | 0.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "python -m pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | anyio==4.9.0
argon2-cffi==23.1.0
argon2-cffi-bindings==21.2.0
arrow==1.3.0
asttokens==3.0.0
asv==0.6.3
asv_runner==0.2.1
async-lru==2.0.5
attrs==25.3.0
babel==2.17.0
beautifulsoup4==4.13.3
bleach==6.2.0
build==1.2.2.post1
certifi==2025.1.31
cffi==1.17.1
cfgv==3.4.0
charset-normalizer==3.4.1
comm==0.2.2
coverage==7.8.0
debugpy==1.8.13
decorator==5.2.1
defusedxml==0.7.1
distlib==0.3.9
exceptiongroup==1.2.2
executing==2.2.0
fastjsonschema==2.21.1
filelock==3.18.0
fqdn==1.5.1
h11==0.14.0
httpcore==1.0.7
httpx==0.28.1
identify==2.6.9
idna==3.10
importlib_metadata==8.6.1
iniconfig==2.1.0
ipykernel==6.29.5
ipython==8.18.1
ipywidgets==8.1.5
isoduration==20.11.0
jedi==0.19.2
Jinja2==3.1.6
json5==0.10.0
jsonpointer==3.0.0
jsonschema==4.23.0
jsonschema-specifications==2024.10.1
jupyter==1.1.1
jupyter-console==6.6.3
jupyter-events==0.12.0
jupyter-lsp==2.2.5
jupyter_client==8.6.3
jupyter_core==5.7.2
jupyter_server==2.15.0
jupyter_server_terminals==0.5.3
jupyterlab==4.3.6
jupyterlab_pygments==0.3.0
jupyterlab_server==2.27.3
jupyterlab_widgets==3.0.13
MarkupSafe==3.0.2
matplotlib-inline==0.1.7
mistune==3.1.3
mypy==1.15.0
mypy-extensions==1.0.0
nbclient==0.10.2
nbconvert==7.16.6
nbformat==5.10.4
nest-asyncio==1.6.0
-e git+https://github.com/lincc-frameworks/nested-pandas.git@3cd3baaa4c87973af20da3aed87c40f05106cfe1#egg=nested_pandas
nodeenv==1.9.1
notebook==7.3.3
notebook_shim==0.2.4
numpy==2.0.2
overrides==7.7.0
packaging==24.2
pandas==2.2.3
pandocfilters==1.5.1
parso==0.8.4
pexpect==4.9.0
platformdirs==4.3.7
pluggy==1.5.0
pre_commit==4.2.0
prometheus_client==0.21.1
prompt_toolkit==3.0.50
psutil==7.0.0
ptyprocess==0.7.0
pure_eval==0.2.3
pyarrow==19.0.1
pycparser==2.22
Pygments==2.19.1
Pympler==1.1
pyproject_hooks==1.2.0
pytest==8.3.5
pytest-cov==6.0.0
python-dateutil==2.9.0.post0
python-json-logger==3.3.0
pytz==2025.2
PyYAML==6.0.2
pyzmq==26.3.0
referencing==0.36.2
requests==2.32.3
rfc3339-validator==0.1.4
rfc3986-validator==0.1.1
rpds-py==0.24.0
ruff==0.11.2
Send2Trash==1.8.3
six==1.17.0
sniffio==1.3.1
soupsieve==2.6
stack-data==0.6.3
tabulate==0.9.0
terminado==0.18.1
tinycss2==1.4.0
tomli==2.2.1
tornado==6.4.2
traitlets==5.14.3
types-python-dateutil==2.9.0.20241206
typing_extensions==4.13.0
tzdata==2025.2
uri-template==1.3.0
urllib3==2.3.0
virtualenv==20.29.3
wcwidth==0.2.13
webcolors==24.11.1
webencodings==0.5.1
websocket-client==1.8.0
widgetsnbextension==4.0.13
zipp==3.21.0
| name: nested-pandas
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- anyio==4.9.0
- argon2-cffi==23.1.0
- argon2-cffi-bindings==21.2.0
- arrow==1.3.0
- asttokens==3.0.0
- asv==0.6.3
- asv-runner==0.2.1
- async-lru==2.0.5
- attrs==25.3.0
- babel==2.17.0
- beautifulsoup4==4.13.3
- bleach==6.2.0
- build==1.2.2.post1
- certifi==2025.1.31
- cffi==1.17.1
- cfgv==3.4.0
- charset-normalizer==3.4.1
- comm==0.2.2
- coverage==7.8.0
- debugpy==1.8.13
- decorator==5.2.1
- defusedxml==0.7.1
- distlib==0.3.9
- exceptiongroup==1.2.2
- executing==2.2.0
- fastjsonschema==2.21.1
- filelock==3.18.0
- fqdn==1.5.1
- h11==0.14.0
- httpcore==1.0.7
- httpx==0.28.1
- identify==2.6.9
- idna==3.10
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- ipykernel==6.29.5
- ipython==8.18.1
- ipywidgets==8.1.5
- isoduration==20.11.0
- jedi==0.19.2
- jinja2==3.1.6
- json5==0.10.0
- jsonpointer==3.0.0
- jsonschema==4.23.0
- jsonschema-specifications==2024.10.1
- jupyter==1.1.1
- jupyter-client==8.6.3
- jupyter-console==6.6.3
- jupyter-core==5.7.2
- jupyter-events==0.12.0
- jupyter-lsp==2.2.5
- jupyter-server==2.15.0
- jupyter-server-terminals==0.5.3
- jupyterlab==4.3.6
- jupyterlab-pygments==0.3.0
- jupyterlab-server==2.27.3
- jupyterlab-widgets==3.0.13
- markupsafe==3.0.2
- matplotlib-inline==0.1.7
- mistune==3.1.3
- mypy==1.15.0
- mypy-extensions==1.0.0
- nbclient==0.10.2
- nbconvert==7.16.6
- nbformat==5.10.4
- nest-asyncio==1.6.0
- nested-pandas==0.1.2
- nodeenv==1.9.1
- notebook==7.3.3
- notebook-shim==0.2.4
- numpy==2.0.2
- overrides==7.7.0
- packaging==24.2
- pandas==2.2.3
- pandocfilters==1.5.1
- parso==0.8.4
- pexpect==4.9.0
- platformdirs==4.3.7
- pluggy==1.5.0
- pre-commit==4.2.0
- prometheus-client==0.21.1
- prompt-toolkit==3.0.50
- psutil==7.0.0
- ptyprocess==0.7.0
- pure-eval==0.2.3
- pyarrow==19.0.1
- pycparser==2.22
- pygments==2.19.1
- pympler==1.1
- pyproject-hooks==1.2.0
- pytest==8.3.5
- pytest-cov==6.0.0
- python-dateutil==2.9.0.post0
- python-json-logger==3.3.0
- pytz==2025.2
- pyyaml==6.0.2
- pyzmq==26.3.0
- referencing==0.36.2
- requests==2.32.3
- rfc3339-validator==0.1.4
- rfc3986-validator==0.1.1
- rpds-py==0.24.0
- ruff==0.11.2
- send2trash==1.8.3
- six==1.17.0
- sniffio==1.3.1
- soupsieve==2.6
- stack-data==0.6.3
- tabulate==0.9.0
- terminado==0.18.1
- tinycss2==1.4.0
- tomli==2.2.1
- tornado==6.4.2
- traitlets==5.14.3
- types-python-dateutil==2.9.0.20241206
- typing-extensions==4.13.0
- tzdata==2025.2
- uri-template==1.3.0
- urllib3==2.3.0
- virtualenv==20.29.3
- wcwidth==0.2.13
- webcolors==24.11.1
- webencodings==0.5.1
- websocket-client==1.8.0
- widgetsnbextension==4.0.13
- zipp==3.21.0
prefix: /opt/conda/envs/nested-pandas
| [
"tests/nested_pandas/nestedframe/test_nestedframe.py::test_from_flat[None]",
"tests/nested_pandas/nestedframe/test_nestedframe.py::test_from_flat[a]",
"tests/nested_pandas/nestedframe/test_nestedframe.py::test_from_flat[c]",
"tests/nested_pandas/nestedframe/test_nestedframe.py::test_recover_from_flat"
] | [] | [
"tests/nested_pandas/nestedframe/test_nestedframe.py::test_nestedframe_construction",
"tests/nested_pandas/nestedframe/test_nestedframe.py::test_all_columns",
"tests/nested_pandas/nestedframe/test_nestedframe.py::test_nested_columns",
"tests/nested_pandas/nestedframe/test_nestedframe.py::test_is_known_hierarchical_column",
"tests/nested_pandas/nestedframe/test_nestedframe.py::test_add_nested_with_flat_df",
"tests/nested_pandas/nestedframe/test_nestedframe.py::test_add_nested_with_flat_df_and_mismatched_index",
"tests/nested_pandas/nestedframe/test_nestedframe.py::test_add_nested_with_series",
"tests/nested_pandas/nestedframe/test_nestedframe.py::test_add_nested_with_series_and_mismatched_index",
"tests/nested_pandas/nestedframe/test_nestedframe.py::test_add_nested_for_empty_df",
"tests/nested_pandas/nestedframe/test_nestedframe.py::test_query",
"tests/nested_pandas/nestedframe/test_nestedframe.py::test_dropna",
"tests/nested_pandas/nestedframe/test_nestedframe.py::test_dropna_layer_as_base_column",
"tests/nested_pandas/nestedframe/test_nestedframe.py::test_dropna_inplace_base",
"tests/nested_pandas/nestedframe/test_nestedframe.py::test_dropna_inplace_nested",
"tests/nested_pandas/nestedframe/test_nestedframe.py::test_dropna_errors",
"tests/nested_pandas/nestedframe/test_nestedframe.py::test_reduce",
"tests/nested_pandas/nestedframe/test_nestedframe.py::test_reduce_duplicated_cols"
] | [] | MIT License | 18,999 | 751 | [
"src/nested_pandas/nestedframe/core.py"
] |
|
dask__dask-11233 | 6a04b4ed70934b548d25a793eed90f033a690531 | 2024-07-17 09:29:31 | 2e9e247771a30611eb133d03892135cd72cf4ba4 | github-actions[bot]: ## Unit Test Results
_See [test report](https://dask.github.io/dask/test_report.html) for an extended history of previous test failures. This is useful for diagnosing flaky tests._
15 files ± 0 15 suites ±0 3h 31m 30s :stopwatch: + 5m 32s
13 128 tests + 1 12 013 :white_check_mark: + 1 1 068 :zzz: ±0 47 :x: ±0
162 783 runs +12 139 947 :white_check_mark: +10 22 789 :zzz: +2 47 :x: ±0
For more details on these failures, see [this check](https://github.com/dask/dask/runs/27554928860).
Results for commit 2de6c0ed. ± Comparison against base commit 6a04b4ed.
[test-results]:data:application/gzip;base64,H4sIANmWl2YC/02MSQ7DIBAEvxJxzoHNwOQzkYFBQvESYThF+Xswjpdj1fTUh4Q44EIeN9bdb2QpMZ/kS+pznKeVuQJaVb3mdheMm52fS3GujSgTp3zF9yqpOoehj0N1Uh8GU5pTVWs8lam1FddG/PloC4D2tsmtzesQdndtN3FNu3kcY65AuEflKHrUQlqwHp2QIXSKWY2S9RaMYV1QQL4/0IBEohwBAAA=
| diff --git a/dask/dataframe/backends.py b/dask/dataframe/backends.py
index b2b4b63d9..afdf46522 100644
--- a/dask/dataframe/backends.py
+++ b/dask/dataframe/backends.py
@@ -436,7 +436,7 @@ def _nonempty_series(s, idx=None):
data = [s.iloc[0]] * 2
elif isinstance(dtype, pd.DatetimeTZDtype):
entry = pd.Timestamp("1970-01-01", tz=dtype.tz)
- data = [entry, entry]
+ data = pd.array([entry, entry], dtype=dtype)
elif isinstance(dtype, pd.CategoricalDtype):
if len(s.cat.categories):
data = [s.cat.categories[0]] * 2
diff --git a/dask/delayed.py b/dask/delayed.py
index 350e89fbf..1ff6a2dba 100644
--- a/dask/delayed.py
+++ b/dask/delayed.py
@@ -95,6 +95,10 @@ def unpack_collections(expr):
return expr._key, (expr,)
if is_dask_collection(expr):
+ if hasattr(expr, "optimize"):
+ # Optimize dask-expr collections
+ expr = expr.optimize()
+
finalized = finalize(expr)
return finalized._key, (finalized,)
| Roundtripping timezone-aware DataFrame through parquet doesn't preserve timestamp resolution
While diagnosing some of the failures we're seeing over in https://github.com/coiled/dask-bigquery/pull/81, I stumbled across an issue with roundtripping timezone-aware timeseries data through parquet with Dask. Here's a minimal reproducer:
```python
import random
import pandas as pd
import dask.dataframe as dd
# Generate some random synthetic data
records = [
{
"number": random.randint(0, 100),
"timestamp": pd.Timestamp.utcnow(),
"idx": i,
}
for i in range(10)
]
df = pd.DataFrame(records)
# Change timestamp resolution to us (this is important)
df["timestamp"] = df["timestamp"].astype("datetime64[us, UTC]")
# Roundtrip through parquet with Dask
ddf = dd.from_pandas(df, npartitions=2)
outdir = "test.parquet"
ddf.to_parquet(outdir)
ddf2 = dd.read_parquet(outdir)
dd.utils.assert_eq(ddf, ddf2, check_divisions=False)
```
which raises this error:
```
Traceback (most recent call last):
File "/Users/james/projects/dask/dask/test.py", line 24, in <module>
dd.utils.assert_eq(ddf, ddf2, check_divisions=False)
File "/Users/james/projects/dask/dask/dask/dataframe/utils.py", line 603, in assert_eq
tm.assert_frame_equal(
File "/Users/james/mambaforge/envs/dask-py312/lib/python3.12/site-packages/pandas/_testing/asserters.py", line 1279, in assert_frame_equal
assert_series_equal(
File "/Users/james/mambaforge/envs/dask-py312/lib/python3.12/site-packages/pandas/_testing/asserters.py", line 975, in assert_series_equal
assert_attr_equal("dtype", left, right, obj=f"Attributes of {obj}")
File "/Users/james/mambaforge/envs/dask-py312/lib/python3.12/site-packages/pandas/_testing/asserters.py", line 421, in assert_attr_equal
raise_assert_detail(obj, msg, left_attr, right_attr)
File "/Users/james/mambaforge/envs/dask-py312/lib/python3.12/site-packages/pandas/_testing/asserters.py", line 614, in raise_assert_detail
raise AssertionError(msg)
AssertionError: Attributes of DataFrame.iloc[:, 1] (column name="timestamp") are different
Attribute "dtype" are different
[left]: datetime64[us, UTC]
[right]: datetime64[ns, UTC]
```
Note the initial `ddf` DataFrame has `us` resolution, but after roundtripping through parquet, the `ddf2` DataFrame has `ns` resolution.
A couple of additional observations:
1. The equivalent `pandas` code (i.e. removing `dd.from_pandas`) doesn't raise an error.
2. If I remove timezone information altogether (e.g. use `pd.Timestamp.now()` instead of `pd.Timestamp.utcnow()`), then this also doesn't raise an error.
cc @phofl @fjetter | dask/dask | diff --git a/dask/dataframe/tests/test_dataframe.py b/dask/dataframe/tests/test_dataframe.py
index a9a77bdf1..d1fb8524e 100644
--- a/dask/dataframe/tests/test_dataframe.py
+++ b/dask/dataframe/tests/test_dataframe.py
@@ -36,6 +36,7 @@ from dask.dataframe.core import (
repartition_divisions,
total_mem_usage,
)
+from dask.dataframe.dispatch import meta_nonempty
from dask.dataframe.utils import (
assert_eq,
assert_eq_dtypes,
@@ -6308,6 +6309,20 @@ def test_enforce_runtime_divisions():
ddf.enforce_runtime_divisions().compute()
+def test_preserve_ts_unit_in_meta_creation():
+ pdf = pd.DataFrame(
+ {
+ "a": [1],
+ "timestamp": pd.Series(
+ [pd.Timestamp.utcnow()], dtype="datetime64[us, UTC]"
+ ),
+ }
+ )
+ df = dd.from_pandas(pdf, npartitions=1)
+ assert_eq(meta_nonempty(df._meta).dtypes, pdf.dtypes)
+ assert_eq(df, pdf)
+
+
def test_query_planning_config_warns():
# Make sure dd._dask_expr_enabled() warns if the current
# "dataframe.query-planning" config conflicts with the
@@ -6316,3 +6331,23 @@ def test_query_planning_config_warns():
expect = "enabled" if dd.DASK_EXPR_ENABLED else "disabled"
with pytest.warns(match=f"query planning is already {expect}"):
dd._dask_expr_enabled()
+
+
+def test_dataframe_into_delayed():
+ if not DASK_EXPR_ENABLED:
+ pytest.skip("Only relevant for dask.expr")
+
+ pdf = pd.DataFrame({"a": [1, 2, 3], "b": 1})
+ df = dd.from_pandas(pdf, npartitions=2)
+
+ def test_func(df):
+ return df.sum().sum()
+
+ def delayed_func(i):
+ # sanity check
+ assert i.sum() == 6
+
+ df = df[["a"]].map_partitions(test_func, meta=(None, int))
+ result = delayed(delayed_func)(df)
+ assert sum(map(len, result.dask.layers.values())) == 6
+ result.compute()
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 2
} | 1.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[complete]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-rerunfailures",
"pytest-timeout"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.10",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | bokeh==3.7.2
click==8.1.8
cloudpickle==3.1.1
contourpy==1.3.1
coverage==7.8.0
-e git+https://github.com/dask/dask.git@6a04b4ed70934b548d25a793eed90f033a690531#egg=dask
dask-expr==1.1.8
distributed==2024.7.0
exceptiongroup==1.2.2
fsspec==2025.3.2
importlib_metadata==8.6.1
iniconfig==2.1.0
Jinja2==3.1.6
locket==1.0.0
lz4==4.4.3
MarkupSafe==3.0.2
msgpack==1.1.0
narwhals==1.32.0
numpy==2.2.4
packaging==24.2
pandas==2.2.3
partd==1.4.2
pillow==11.1.0
pluggy==1.5.0
psutil==7.0.0
pyarrow==19.0.1
pyarrow-hotfix==0.6
pytest==8.3.5
pytest-cov==6.0.0
pytest-rerunfailures==15.0
pytest-timeout==2.3.1
python-dateutil==2.9.0.post0
pytz==2025.2
PyYAML==6.0.2
six==1.17.0
sortedcontainers==2.4.0
tblib==3.1.0
tomli==2.2.1
toolz==1.0.0
tornado==6.4.2
tzdata==2025.2
urllib3==2.3.0
xyzservices==2025.1.0
zict==3.0.0
zipp==3.21.0
| name: dask
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- bzip2=1.0.8=h5eee18b_6
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- libuuid=1.41.5=h5eee18b_0
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py310h06a4308_0
- python=3.10.16=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py310h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.45.1=py310h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- bokeh==3.7.2
- click==8.1.8
- cloudpickle==3.1.1
- contourpy==1.3.1
- coverage==7.8.0
- dask==2024.7.0+4.g6a04b4ed7
- dask-expr==1.1.8
- distributed==2024.7.0
- exceptiongroup==1.2.2
- fsspec==2025.3.2
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- jinja2==3.1.6
- locket==1.0.0
- lz4==4.4.3
- markupsafe==3.0.2
- msgpack==1.1.0
- narwhals==1.32.0
- numpy==2.2.4
- packaging==24.2
- pandas==2.2.3
- partd==1.4.2
- pillow==11.1.0
- pluggy==1.5.0
- psutil==7.0.0
- pyarrow==19.0.1
- pyarrow-hotfix==0.6
- pytest==8.3.5
- pytest-cov==6.0.0
- pytest-rerunfailures==15.0
- pytest-timeout==2.3.1
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyyaml==6.0.2
- six==1.17.0
- sortedcontainers==2.4.0
- tblib==3.1.0
- tomli==2.2.1
- toolz==1.0.0
- tornado==6.4.2
- tzdata==2025.2
- urllib3==2.3.0
- xyzservices==2025.1.0
- zict==3.0.0
- zipp==3.21.0
prefix: /opt/conda/envs/dask
| [
"dask/dataframe/tests/test_dataframe.py::test_preserve_ts_unit_in_meta_creation",
"dask/dataframe/tests/test_dataframe.py::test_dataframe_into_delayed"
] | [] | [
"dask/dataframe/tests/test_dataframe.py::test_Dataframe",
"dask/dataframe/tests/test_dataframe.py::test_head_tail",
"dask/dataframe/tests/test_dataframe.py::test_head_npartitions",
"dask/dataframe/tests/test_dataframe.py::test_head_npartitions_warn",
"dask/dataframe/tests/test_dataframe.py::test_index_head",
"dask/dataframe/tests/test_dataframe.py::test_Series",
"dask/dataframe/tests/test_dataframe.py::test_Index",
"dask/dataframe/tests/test_dataframe.py::test_axes",
"dask/dataframe/tests/test_dataframe.py::test_series_axes",
"dask/dataframe/tests/test_dataframe.py::test_Scalar",
"dask/dataframe/tests/test_dataframe.py::test_scalar_raises",
"dask/dataframe/tests/test_dataframe.py::test_attributes",
"dask/dataframe/tests/test_dataframe.py::test_column_names",
"dask/dataframe/tests/test_dataframe.py::test_columns_named_divisions_and_meta",
"dask/dataframe/tests/test_dataframe.py::test_index_names",
"dask/dataframe/tests/test_dataframe.py::test_rename_columns",
"dask/dataframe/tests/test_dataframe.py::test_rename_series",
"dask/dataframe/tests/test_dataframe.py::test_rename_series_method",
"dask/dataframe/tests/test_dataframe.py::test_rename_series_method_2",
"dask/dataframe/tests/test_dataframe.py::test_describe_numeric[dask-test_values1]",
"dask/dataframe/tests/test_dataframe.py::test_describe[None-None-None-subset0]",
"dask/dataframe/tests/test_dataframe.py::test_describe[None-None-None-subset1]",
"dask/dataframe/tests/test_dataframe.py::test_describe[None-None-None-subset2]",
"dask/dataframe/tests/test_dataframe.py::test_describe[None-None-None-subset3]",
"dask/dataframe/tests/test_dataframe.py::test_describe[None-None-None-subset4]",
"dask/dataframe/tests/test_dataframe.py::test_describe[all-None-None-None]",
"dask/dataframe/tests/test_dataframe.py::test_describe[include6-None-percentiles6-None]",
"dask/dataframe/tests/test_dataframe.py::test_describe[include7-None-None-None]",
"dask/dataframe/tests/test_dataframe.py::test_describe[include8-None-percentiles8-None]",
"dask/dataframe/tests/test_dataframe.py::test_describe[None-exclude9-None-None]",
"dask/dataframe/tests/test_dataframe.py::test_describe[include10-None-None-None]",
"dask/dataframe/tests/test_dataframe.py::test_describe_empty",
"dask/dataframe/tests/test_dataframe.py::test_describe_for_possibly_unsorted_q",
"dask/dataframe/tests/test_dataframe.py::test_cumulative",
"dask/dataframe/tests/test_dataframe.py::test_cumulative_with_nans",
"dask/dataframe/tests/test_dataframe.py::test_cumulative_with_duplicate_columns",
"dask/dataframe/tests/test_dataframe.py::test_cumulative_empty_partitions[func0]",
"dask/dataframe/tests/test_dataframe.py::test_cumulative_empty_partitions[func1]",
"dask/dataframe/tests/test_dataframe.py::test_dropna",
"dask/dataframe/tests/test_dataframe.py::test_clip[2-5]",
"dask/dataframe/tests/test_dataframe.py::test_clip[2.5-3.5]",
"dask/dataframe/tests/test_dataframe.py::test_clip_axis_0",
"dask/dataframe/tests/test_dataframe.py::test_clip_axis_1",
"dask/dataframe/tests/test_dataframe.py::test_squeeze",
"dask/dataframe/tests/test_dataframe.py::test_where_mask",
"dask/dataframe/tests/test_dataframe.py::test_map_partitions_multi_argument",
"dask/dataframe/tests/test_dataframe.py::test_map_partitions",
"dask/dataframe/tests/test_dataframe.py::test_map_partitions_type",
"dask/dataframe/tests/test_dataframe.py::test_map_partitions_partition_info",
"dask/dataframe/tests/test_dataframe.py::test_map_partitions_names",
"dask/dataframe/tests/test_dataframe.py::test_map_partitions_column_info",
"dask/dataframe/tests/test_dataframe.py::test_map_partitions_method_names",
"dask/dataframe/tests/test_dataframe.py::test_map_partitions_propagates_index_metadata",
"dask/dataframe/tests/test_dataframe.py::test_map_partitions_with_delayed_collection",
"dask/dataframe/tests/test_dataframe.py::test_metadata_inference_single_partition_aligned_args",
"dask/dataframe/tests/test_dataframe.py::test_align_dataframes",
"dask/dataframe/tests/test_dataframe.py::test_drop_duplicates[None]",
"dask/dataframe/tests/test_dataframe.py::test_drop_duplicates_subset",
"dask/dataframe/tests/test_dataframe.py::test_get_partition",
"dask/dataframe/tests/test_dataframe.py::test_ndim",
"dask/dataframe/tests/test_dataframe.py::test_dtype",
"dask/dataframe/tests/test_dataframe.py::test_value_counts",
"dask/dataframe/tests/test_dataframe.py::test_value_counts_not_sorted",
"dask/dataframe/tests/test_dataframe.py::test_value_counts_with_dropna",
"dask/dataframe/tests/test_dataframe.py::test_value_counts_with_normalize",
"dask/dataframe/tests/test_dataframe.py::test_value_counts_with_normalize_and_dropna[True]",
"dask/dataframe/tests/test_dataframe.py::test_value_counts_with_normalize_and_dropna[False]",
"dask/dataframe/tests/test_dataframe.py::test_unique",
"dask/dataframe/tests/test_dataframe.py::test_isin",
"dask/dataframe/tests/test_dataframe.py::test_contains_frame",
"dask/dataframe/tests/test_dataframe.py::test_len",
"dask/dataframe/tests/test_dataframe.py::test_size",
"dask/dataframe/tests/test_dataframe.py::test_shape",
"dask/dataframe/tests/test_dataframe.py::test_nbytes",
"dask/dataframe/tests/test_dataframe.py::test_quantile[0.3-dask]",
"dask/dataframe/tests/test_dataframe.py::test_quantile[0.5-dask]",
"dask/dataframe/tests/test_dataframe.py::test_quantile[0.9-dask]",
"dask/dataframe/tests/test_dataframe.py::test_quantile_missing[dask]",
"dask/dataframe/tests/test_dataframe.py::test_empty_quantile[dask]",
"dask/dataframe/tests/test_dataframe.py::test_dataframe_quantile[None-dask-expected1]",
"dask/dataframe/tests/test_dataframe.py::test_dataframe_quantile[True-dask-expected1]",
"dask/dataframe/tests/test_dataframe.py::test_dataframe_quantile[False-dask-expected1]",
"dask/dataframe/tests/test_dataframe.py::test_quantile_datetime_numeric_only_false",
"dask/dataframe/tests/test_dataframe.py::test_quantile_for_possibly_unsorted_q",
"dask/dataframe/tests/test_dataframe.py::test_quantile_tiny_partitions",
"dask/dataframe/tests/test_dataframe.py::test_quantile_trivial_partitions",
"dask/dataframe/tests/test_dataframe.py::test_index",
"dask/dataframe/tests/test_dataframe.py::test_assign",
"dask/dataframe/tests/test_dataframe.py::test_assign_callable",
"dask/dataframe/tests/test_dataframe.py::test_assign_dtypes",
"dask/dataframe/tests/test_dataframe.py::test_assign_pandas_series",
"dask/dataframe/tests/test_dataframe.py::test_map",
"dask/dataframe/tests/test_dataframe.py::test_concat",
"dask/dataframe/tests/test_dataframe.py::test_known_divisions",
"dask/dataframe/tests/test_dataframe.py::test_unknown_divisions",
"dask/dataframe/tests/test_dataframe.py::test_with_min_count",
"dask/dataframe/tests/test_dataframe.py::test_align[inner]",
"dask/dataframe/tests/test_dataframe.py::test_align[outer]",
"dask/dataframe/tests/test_dataframe.py::test_align[left]",
"dask/dataframe/tests/test_dataframe.py::test_align[right]",
"dask/dataframe/tests/test_dataframe.py::test_align_axis[inner]",
"dask/dataframe/tests/test_dataframe.py::test_align_axis[outer]",
"dask/dataframe/tests/test_dataframe.py::test_align_axis[left]",
"dask/dataframe/tests/test_dataframe.py::test_align_axis[right]",
"dask/dataframe/tests/test_dataframe.py::test_combine",
"dask/dataframe/tests/test_dataframe.py::test_combine_first",
"dask/dataframe/tests/test_dataframe.py::test_dataframe_picklable",
"dask/dataframe/tests/test_dataframe.py::test_random_partitions",
"dask/dataframe/tests/test_dataframe.py::test_series_round",
"dask/dataframe/tests/test_dataframe.py::test_repartition",
"dask/dataframe/tests/test_dataframe.py::test_repartition_divisions",
"dask/dataframe/tests/test_dataframe.py::test_repartition_on_pandas_dataframe",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-float-1-1-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-float-1-1-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-float-1-2-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-float-1-2-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-float-1-4-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-float-1-4-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-float-1-5-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-float-1-5-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-float-2-1-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-float-2-1-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-float-2-2-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-float-2-2-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-float-2-4-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-float-2-4-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-float-2-5-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-float-2-5-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-float-4-1-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-float-4-1-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-float-4-2-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-float-4-2-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-float-4-4-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-float-4-4-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-float-4-5-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-float-4-5-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-float-5-1-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-float-5-1-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-float-5-2-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-float-5-2-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-float-5-4-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-float-5-4-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-float-5-5-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-float-5-5-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-M8[ns]-1-1-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-M8[ns]-1-1-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-M8[ns]-1-2-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-M8[ns]-1-2-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-M8[ns]-1-4-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-M8[ns]-1-4-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-M8[ns]-1-5-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-M8[ns]-1-5-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-M8[ns]-2-1-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-M8[ns]-2-1-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-M8[ns]-2-2-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-M8[ns]-2-2-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-M8[ns]-2-4-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-M8[ns]-2-4-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-M8[ns]-2-5-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-M8[ns]-2-5-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-M8[ns]-4-1-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-M8[ns]-4-1-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-M8[ns]-4-2-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-M8[ns]-4-2-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-M8[ns]-4-4-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-M8[ns]-4-4-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-M8[ns]-4-5-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-M8[ns]-4-5-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-M8[ns]-5-1-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-M8[ns]-5-1-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-M8[ns]-5-2-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-M8[ns]-5-2-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-M8[ns]-5-4-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-M8[ns]-5-4-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-M8[ns]-5-5-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>0-M8[ns]-5-5-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-float-1-1-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-float-1-1-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-float-1-2-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-float-1-2-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-float-1-4-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-float-1-4-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-float-1-5-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-float-1-5-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-float-2-1-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-float-2-1-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-float-2-2-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-float-2-2-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-float-2-4-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-float-2-4-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-float-2-5-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-float-2-5-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-float-4-1-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-float-4-1-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-float-4-2-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-float-4-2-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-float-4-4-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-float-4-4-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-float-4-5-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-float-4-5-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-float-5-1-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-float-5-1-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-float-5-2-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-float-5-2-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-float-5-4-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-float-5-4-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-float-5-5-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-float-5-5-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-M8[ns]-1-1-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-M8[ns]-1-1-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-M8[ns]-1-2-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-M8[ns]-1-2-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-M8[ns]-1-4-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-M8[ns]-1-4-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-M8[ns]-1-5-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-M8[ns]-1-5-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-M8[ns]-2-1-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-M8[ns]-2-1-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-M8[ns]-2-2-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-M8[ns]-2-2-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-M8[ns]-2-4-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-M8[ns]-2-4-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-M8[ns]-2-5-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-M8[ns]-2-5-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-M8[ns]-4-1-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-M8[ns]-4-1-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-M8[ns]-4-2-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-M8[ns]-4-2-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-M8[ns]-4-4-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-M8[ns]-4-4-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-M8[ns]-4-5-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-M8[ns]-4-5-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-M8[ns]-5-1-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-M8[ns]-5-1-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-M8[ns]-5-2-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-M8[ns]-5-2-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-M8[ns]-5-4-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-M8[ns]-5-4-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-M8[ns]-5-5-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions[<lambda>1-M8[ns]-5-5-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_partition_size[<lambda>0-1kiB-2-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_partition_size[<lambda>0-1kiB-2-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_partition_size[<lambda>0-1kiB-5-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_partition_size[<lambda>0-1kiB-5-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_partition_size[<lambda>0-379-2-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_partition_size[<lambda>0-379-2-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_partition_size[<lambda>0-379-5-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_partition_size[<lambda>0-379-5-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_partition_size[<lambda>1-1kiB-2-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_partition_size[<lambda>1-1kiB-2-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_partition_size[<lambda>1-1kiB-5-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_partition_size[<lambda>1-1kiB-5-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_partition_size[<lambda>1-379-2-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_partition_size[<lambda>1-379-2-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_partition_size[<lambda>1-379-5-True]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_partition_size[<lambda>1-379-5-False]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_partition_size_arg",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions_same_limits",
"dask/dataframe/tests/test_dataframe.py::test_repartition_npartitions_numeric_edge_case",
"dask/dataframe/tests/test_dataframe.py::test_repartition_object_index",
"dask/dataframe/tests/test_dataframe.py::test_repartition_datetime_tz_index",
"dask/dataframe/tests/test_dataframe.py::test_repartition_freq_divisions",
"dask/dataframe/tests/test_dataframe.py::test_repartition_freq_errors",
"dask/dataframe/tests/test_dataframe.py::test_repartition_freq_month",
"dask/dataframe/tests/test_dataframe.py::test_repartition_freq_day",
"dask/dataframe/tests/test_dataframe.py::test_map_freq_to_period_start[M-MS]",
"dask/dataframe/tests/test_dataframe.py::test_map_freq_to_period_start[ME-MS]",
"dask/dataframe/tests/test_dataframe.py::test_map_freq_to_period_start[MS-MS]",
"dask/dataframe/tests/test_dataframe.py::test_map_freq_to_period_start[2M-2MS]",
"dask/dataframe/tests/test_dataframe.py::test_map_freq_to_period_start[Q-QS]",
"dask/dataframe/tests/test_dataframe.py::test_map_freq_to_period_start[Q-FEB-QS-FEB]",
"dask/dataframe/tests/test_dataframe.py::test_map_freq_to_period_start[2Q-2QS]",
"dask/dataframe/tests/test_dataframe.py::test_map_freq_to_period_start[2Q-FEB-2QS-FEB]",
"dask/dataframe/tests/test_dataframe.py::test_map_freq_to_period_start[2QS-FEB-2QS-FEB]",
"dask/dataframe/tests/test_dataframe.py::test_map_freq_to_period_start[BQ-BQS]",
"dask/dataframe/tests/test_dataframe.py::test_map_freq_to_period_start[2BQ-2BQS]",
"dask/dataframe/tests/test_dataframe.py::test_map_freq_to_period_start[SM-SMS]",
"dask/dataframe/tests/test_dataframe.py::test_map_freq_to_period_start[A-YS]",
"dask/dataframe/tests/test_dataframe.py::test_map_freq_to_period_start[Y-YS0]",
"dask/dataframe/tests/test_dataframe.py::test_map_freq_to_period_start[A-JUN-YS-JUN]",
"dask/dataframe/tests/test_dataframe.py::test_map_freq_to_period_start[Y-JUN-YS-JUN]",
"dask/dataframe/tests/test_dataframe.py::test_map_freq_to_period_start[BA-BYS]",
"dask/dataframe/tests/test_dataframe.py::test_map_freq_to_period_start[2BA-2BYS]",
"dask/dataframe/tests/test_dataframe.py::test_map_freq_to_period_start[BY-BYS]",
"dask/dataframe/tests/test_dataframe.py::test_map_freq_to_period_start[Y-YS1]",
"dask/dataframe/tests/test_dataframe.py::test_map_freq_to_period_start[freq20-expected_freq20]",
"dask/dataframe/tests/test_dataframe.py::test_repartition_input_errors",
"dask/dataframe/tests/test_dataframe.py::test_embarrassingly_parallel_operations",
"dask/dataframe/tests/test_dataframe.py::test_fillna",
"dask/dataframe/tests/test_dataframe.py::test_ffill",
"dask/dataframe/tests/test_dataframe.py::test_bfill",
"dask/dataframe/tests/test_dataframe.py::test_delayed_roundtrip[True]",
"dask/dataframe/tests/test_dataframe.py::test_delayed_roundtrip[False]",
"dask/dataframe/tests/test_dataframe.py::test_from_delayed_lazy_if_meta_provided",
"dask/dataframe/tests/test_dataframe.py::test_fillna_duplicate_index",
"dask/dataframe/tests/test_dataframe.py::test_fillna_multi_dataframe",
"dask/dataframe/tests/test_dataframe.py::test_fillna_dask_dataframe_input",
"dask/dataframe/tests/test_dataframe.py::test_ffill_bfill",
"dask/dataframe/tests/test_dataframe.py::test_fillna_series_types",
"dask/dataframe/tests/test_dataframe.py::test_sample",
"dask/dataframe/tests/test_dataframe.py::test_sample_without_replacement",
"dask/dataframe/tests/test_dataframe.py::test_sample_raises",
"dask/dataframe/tests/test_dataframe.py::test_select_dtypes[include0-None]",
"dask/dataframe/tests/test_dataframe.py::test_select_dtypes[None-exclude1]",
"dask/dataframe/tests/test_dataframe.py::test_select_dtypes[include2-exclude2]",
"dask/dataframe/tests/test_dataframe.py::test_select_dtypes[include3-None]",
"dask/dataframe/tests/test_dataframe.py::test_deterministic_apply_concat_apply_names",
"dask/dataframe/tests/test_dataframe.py::test_pipe",
"dask/dataframe/tests/test_dataframe.py::test_gh_517",
"dask/dataframe/tests/test_dataframe.py::test_drop_axis_1",
"dask/dataframe/tests/test_dataframe.py::test_drop_columns[columns0]",
"dask/dataframe/tests/test_dataframe.py::test_drop_columns[columns1]",
"dask/dataframe/tests/test_dataframe.py::test_gh580",
"dask/dataframe/tests/test_dataframe.py::test_gh6305",
"dask/dataframe/tests/test_dataframe.py::test_rename_dict",
"dask/dataframe/tests/test_dataframe.py::test_rename_function",
"dask/dataframe/tests/test_dataframe.py::test_rename_index",
"dask/dataframe/tests/test_dataframe.py::test_to_timestamp",
"dask/dataframe/tests/test_dataframe.py::test_to_frame",
"dask/dataframe/tests/test_dataframe.py::test_to_dask_array_raises[False0]",
"dask/dataframe/tests/test_dataframe.py::test_to_dask_array_raises[False1]",
"dask/dataframe/tests/test_dataframe.py::test_to_dask_array_unknown[False]",
"dask/dataframe/tests/test_dataframe.py::test_to_dask_array_unknown[True]",
"dask/dataframe/tests/test_dataframe.py::test_to_dask_array[lengths0-False-None]",
"dask/dataframe/tests/test_dataframe.py::test_to_dask_array[True-False-None]",
"dask/dataframe/tests/test_dataframe.py::test_to_dask_array[True-False-meta2]",
"dask/dataframe/tests/test_dataframe.py::test_apply",
"dask/dataframe/tests/test_dataframe.py::test_apply_warns",
"dask/dataframe/tests/test_dataframe.py::test_dataframe_map[None]",
"dask/dataframe/tests/test_dataframe.py::test_dataframe_map[ignore]",
"dask/dataframe/tests/test_dataframe.py::test_add_prefix",
"dask/dataframe/tests/test_dataframe.py::test_add_suffix",
"dask/dataframe/tests/test_dataframe.py::test_abs",
"dask/dataframe/tests/test_dataframe.py::test_round",
"dask/dataframe/tests/test_dataframe.py::test_cov_dataframe[None]",
"dask/dataframe/tests/test_dataframe.py::test_cov_dataframe[True]",
"dask/dataframe/tests/test_dataframe.py::test_cov_dataframe[False]",
"dask/dataframe/tests/test_dataframe.py::test_cov_series",
"dask/dataframe/tests/test_dataframe.py::test_corr",
"dask/dataframe/tests/test_dataframe.py::test_corr_same_name",
"dask/dataframe/tests/test_dataframe.py::test_cov_corr_meta[1]",
"dask/dataframe/tests/test_dataframe.py::test_cov_corr_meta[2]",
"dask/dataframe/tests/test_dataframe.py::test_cov_corr_mixed[True]",
"dask/dataframe/tests/test_dataframe.py::test_autocorr",
"dask/dataframe/tests/test_dataframe.py::test_apply_infer_columns",
"dask/dataframe/tests/test_dataframe.py::test_index_time_properties",
"dask/dataframe/tests/test_dataframe.py::test_nlargest_nsmallest",
"dask/dataframe/tests/test_dataframe.py::test_nlargest_nsmallest_raises",
"dask/dataframe/tests/test_dataframe.py::test_reset_index",
"dask/dataframe/tests/test_dataframe.py::test_dataframe_compute_forward_kwargs",
"dask/dataframe/tests/test_dataframe.py::test_series_iter",
"dask/dataframe/tests/test_dataframe.py::test_dataframe_iterrows",
"dask/dataframe/tests/test_dataframe.py::test_dataframe_itertuples",
"dask/dataframe/tests/test_dataframe.py::test_dataframe_items[columns0]",
"dask/dataframe/tests/test_dataframe.py::test_dataframe_itertuples_with_index_false",
"dask/dataframe/tests/test_dataframe.py::test_dataframe_itertuples_with_name_none",
"dask/dataframe/tests/test_dataframe.py::test_astype",
"dask/dataframe/tests/test_dataframe.py::test_astype_categoricals",
"dask/dataframe/tests/test_dataframe.py::test_astype_categoricals_known",
"dask/dataframe/tests/test_dataframe.py::test_groupby_callable",
"dask/dataframe/tests/test_dataframe.py::test_methods_tokenize_differently",
"dask/dataframe/tests/test_dataframe.py::test_gh_1301",
"dask/dataframe/tests/test_dataframe.py::test_timeseries_sorted",
"dask/dataframe/tests/test_dataframe.py::test_index_errors",
"dask/dataframe/tests/test_dataframe.py::test_index_nulls[None]",
"dask/dataframe/tests/test_dataframe.py::test_index_nulls[null_value1]",
"dask/dataframe/tests/test_dataframe.py::test_index_nulls[null_value2]",
"dask/dataframe/tests/test_dataframe.py::test_set_index_with_index",
"dask/dataframe/tests/test_dataframe.py::test_column_assignment",
"dask/dataframe/tests/test_dataframe.py::test_array_assignment",
"dask/dataframe/tests/test_dataframe.py::test_columns_assignment",
"dask/dataframe/tests/test_dataframe.py::test_attribute_assignment",
"dask/dataframe/tests/test_dataframe.py::test_setitem_triggering_realign",
"dask/dataframe/tests/test_dataframe.py::test_inplace_operators",
"dask/dataframe/tests/test_dataframe.py::test_idxmaxmin[idx0-True]",
"dask/dataframe/tests/test_dataframe.py::test_idxmaxmin[idx0-False]",
"dask/dataframe/tests/test_dataframe.py::test_idxmaxmin[idx1-True]",
"dask/dataframe/tests/test_dataframe.py::test_idxmaxmin[idx1-False]",
"dask/dataframe/tests/test_dataframe.py::test_idxmaxmin[idx2-True]",
"dask/dataframe/tests/test_dataframe.py::test_idxmaxmin[idx2-False]",
"dask/dataframe/tests/test_dataframe.py::test_idxmaxmin_numeric_only[idxmin]",
"dask/dataframe/tests/test_dataframe.py::test_idxmaxmin_numeric_only[idxmax]",
"dask/dataframe/tests/test_dataframe.py::test_idxmaxmin_empty_partitions",
"dask/dataframe/tests/test_dataframe.py::test_mode_numeric_only",
"dask/dataframe/tests/test_dataframe.py::test_getitem_meta",
"dask/dataframe/tests/test_dataframe.py::test_getitem_string_subclass",
"dask/dataframe/tests/test_dataframe.py::test_getitem_column_types[list]",
"dask/dataframe/tests/test_dataframe.py::test_getitem_column_types[array]",
"dask/dataframe/tests/test_dataframe.py::test_getitem_column_types[Series]",
"dask/dataframe/tests/test_dataframe.py::test_getitem_column_types[Index]",
"dask/dataframe/tests/test_dataframe.py::test_getitem_with_bool_dataframe_as_key",
"dask/dataframe/tests/test_dataframe.py::test_getitem_with_non_series",
"dask/dataframe/tests/test_dataframe.py::test_ipython_completion",
"dask/dataframe/tests/test_dataframe.py::test_diff",
"dask/dataframe/tests/test_dataframe.py::test_shift",
"dask/dataframe/tests/test_dataframe.py::test_shift_with_freq_DatetimeIndex[B-False]",
"dask/dataframe/tests/test_dataframe.py::test_shift_with_freq_DatetimeIndex[D-True]",
"dask/dataframe/tests/test_dataframe.py::test_shift_with_freq_DatetimeIndex[h-True]",
"dask/dataframe/tests/test_dataframe.py::test_shift_with_freq_PeriodIndex[D-True]",
"dask/dataframe/tests/test_dataframe.py::test_shift_with_freq_PeriodIndex[h-True]",
"dask/dataframe/tests/test_dataframe.py::test_shift_with_freq_TimedeltaIndex",
"dask/dataframe/tests/test_dataframe.py::test_shift_with_freq_errors",
"dask/dataframe/tests/test_dataframe.py::test_hash_split_unique[1-2-1]",
"dask/dataframe/tests/test_dataframe.py::test_hash_split_unique[1-2-4]",
"dask/dataframe/tests/test_dataframe.py::test_hash_split_unique[1-2-20]",
"dask/dataframe/tests/test_dataframe.py::test_hash_split_unique[1-5-1]",
"dask/dataframe/tests/test_dataframe.py::test_hash_split_unique[1-5-4]",
"dask/dataframe/tests/test_dataframe.py::test_hash_split_unique[1-5-20]",
"dask/dataframe/tests/test_dataframe.py::test_hash_split_unique[5-2-1]",
"dask/dataframe/tests/test_dataframe.py::test_hash_split_unique[5-2-4]",
"dask/dataframe/tests/test_dataframe.py::test_hash_split_unique[5-2-20]",
"dask/dataframe/tests/test_dataframe.py::test_hash_split_unique[5-5-1]",
"dask/dataframe/tests/test_dataframe.py::test_hash_split_unique[5-5-4]",
"dask/dataframe/tests/test_dataframe.py::test_hash_split_unique[5-5-20]",
"dask/dataframe/tests/test_dataframe.py::test_hash_split_unique[20-2-1]",
"dask/dataframe/tests/test_dataframe.py::test_hash_split_unique[20-2-4]",
"dask/dataframe/tests/test_dataframe.py::test_hash_split_unique[20-2-20]",
"dask/dataframe/tests/test_dataframe.py::test_hash_split_unique[20-5-1]",
"dask/dataframe/tests/test_dataframe.py::test_hash_split_unique[20-5-4]",
"dask/dataframe/tests/test_dataframe.py::test_hash_split_unique[20-5-20]",
"dask/dataframe/tests/test_dataframe.py::test_split_out_drop_duplicates[None]",
"dask/dataframe/tests/test_dataframe.py::test_split_out_drop_duplicates[2]",
"dask/dataframe/tests/test_dataframe.py::test_split_out_value_counts[None]",
"dask/dataframe/tests/test_dataframe.py::test_split_out_value_counts[2]",
"dask/dataframe/tests/test_dataframe.py::test_values",
"dask/dataframe/tests/test_dataframe.py::test_values_extension_dtypes",
"dask/dataframe/tests/test_dataframe.py::test_copy",
"dask/dataframe/tests/test_dataframe.py::test_del",
"dask/dataframe/tests/test_dataframe.py::test_memory_usage_dataframe[True-True]",
"dask/dataframe/tests/test_dataframe.py::test_memory_usage_dataframe[True-False]",
"dask/dataframe/tests/test_dataframe.py::test_memory_usage_dataframe[False-True]",
"dask/dataframe/tests/test_dataframe.py::test_memory_usage_dataframe[False-False]",
"dask/dataframe/tests/test_dataframe.py::test_memory_usage_series[True-True]",
"dask/dataframe/tests/test_dataframe.py::test_memory_usage_series[True-False]",
"dask/dataframe/tests/test_dataframe.py::test_memory_usage_series[False-True]",
"dask/dataframe/tests/test_dataframe.py::test_memory_usage_series[False-False]",
"dask/dataframe/tests/test_dataframe.py::test_memory_usage_index[True]",
"dask/dataframe/tests/test_dataframe.py::test_memory_usage_index[False]",
"dask/dataframe/tests/test_dataframe.py::test_memory_usage_per_partition[True-True]",
"dask/dataframe/tests/test_dataframe.py::test_memory_usage_per_partition[True-False]",
"dask/dataframe/tests/test_dataframe.py::test_memory_usage_per_partition[False-True]",
"dask/dataframe/tests/test_dataframe.py::test_memory_usage_per_partition[False-False]",
"dask/dataframe/tests/test_dataframe.py::test_dataframe_reductions_arithmetic[sum]",
"dask/dataframe/tests/test_dataframe.py::test_dataframe_reductions_arithmetic[mean]",
"dask/dataframe/tests/test_dataframe.py::test_dataframe_reductions_arithmetic[std]",
"dask/dataframe/tests/test_dataframe.py::test_dataframe_reductions_arithmetic[var]",
"dask/dataframe/tests/test_dataframe.py::test_dataframe_reductions_arithmetic[count]",
"dask/dataframe/tests/test_dataframe.py::test_dataframe_reductions_arithmetic[min]",
"dask/dataframe/tests/test_dataframe.py::test_dataframe_reductions_arithmetic[max]",
"dask/dataframe/tests/test_dataframe.py::test_dataframe_reductions_arithmetic[idxmin]",
"dask/dataframe/tests/test_dataframe.py::test_dataframe_reductions_arithmetic[idxmax]",
"dask/dataframe/tests/test_dataframe.py::test_dataframe_reductions_arithmetic[prod]",
"dask/dataframe/tests/test_dataframe.py::test_dataframe_reductions_arithmetic[all]",
"dask/dataframe/tests/test_dataframe.py::test_dataframe_reductions_arithmetic[sem]",
"dask/dataframe/tests/test_dataframe.py::test_median",
"dask/dataframe/tests/test_dataframe.py::test_median_approximate[dask]",
"dask/dataframe/tests/test_dataframe.py::test_datetime_loc_open_slicing",
"dask/dataframe/tests/test_dataframe.py::test_to_datetime[False]",
"dask/dataframe/tests/test_dataframe.py::test_to_timedelta",
"dask/dataframe/tests/test_dataframe.py::test_isna[values0]",
"dask/dataframe/tests/test_dataframe.py::test_isna[values1]",
"dask/dataframe/tests/test_dataframe.py::test_slice_on_filtered_boundary[0]",
"dask/dataframe/tests/test_dataframe.py::test_slice_on_filtered_boundary[9]",
"dask/dataframe/tests/test_dataframe.py::test_boundary_slice_nonmonotonic",
"dask/dataframe/tests/test_dataframe.py::test_boundary_slice_empty",
"dask/dataframe/tests/test_dataframe.py::test_with_boundary[-1-None-False-False-drop0]",
"dask/dataframe/tests/test_dataframe.py::test_with_boundary[-1-None-False-True-drop1]",
"dask/dataframe/tests/test_dataframe.py::test_with_boundary[None-3-False-False-drop2]",
"dask/dataframe/tests/test_dataframe.py::test_with_boundary[None-3-True-False-drop3]",
"dask/dataframe/tests/test_dataframe.py::test_with_boundary[-0.5-None-False-False-drop4]",
"dask/dataframe/tests/test_dataframe.py::test_with_boundary[-0.5-None-False-True-drop5]",
"dask/dataframe/tests/test_dataframe.py::test_with_boundary[-1.5-None-False-True-drop6]",
"dask/dataframe/tests/test_dataframe.py::test_with_boundary[None-3.5-False-False-drop7]",
"dask/dataframe/tests/test_dataframe.py::test_with_boundary[None-3.5-True-False-drop8]",
"dask/dataframe/tests/test_dataframe.py::test_with_boundary[None-2.5-False-False-drop9]",
"dask/dataframe/tests/test_dataframe.py::test_boundary_slice_same[index0-0-9]",
"dask/dataframe/tests/test_dataframe.py::test_boundary_slice_same[index1--1-None]",
"dask/dataframe/tests/test_dataframe.py::test_boundary_slice_same[index2-None-10]",
"dask/dataframe/tests/test_dataframe.py::test_boundary_slice_same[index3-None-None]",
"dask/dataframe/tests/test_dataframe.py::test_boundary_slice_same[index4--1-None]",
"dask/dataframe/tests/test_dataframe.py::test_boundary_slice_same[index5-None-2]",
"dask/dataframe/tests/test_dataframe.py::test_boundary_slice_same[index6--2-3]",
"dask/dataframe/tests/test_dataframe.py::test_boundary_slice_same[index7-None-None]",
"dask/dataframe/tests/test_dataframe.py::test_boundary_slice_same[index8-left8-None]",
"dask/dataframe/tests/test_dataframe.py::test_boundary_slice_same[index9-None-right9]",
"dask/dataframe/tests/test_dataframe.py::test_boundary_slice_same[index10-left10-None]",
"dask/dataframe/tests/test_dataframe.py::test_boundary_slice_same[index11-None-right11]",
"dask/dataframe/tests/test_dataframe.py::test_better_errors_object_reductions",
"dask/dataframe/tests/test_dataframe.py::test_sample_empty_partitions",
"dask/dataframe/tests/test_dataframe.py::test_coerce",
"dask/dataframe/tests/test_dataframe.py::test_bool",
"dask/dataframe/tests/test_dataframe.py::test_map_partition_array[asarray]",
"dask/dataframe/tests/test_dataframe.py::test_map_partition_array[func1]",
"dask/dataframe/tests/test_dataframe.py::test_mixed_dask_array_operations",
"dask/dataframe/tests/test_dataframe.py::test_mixed_dask_array_operations_errors",
"dask/dataframe/tests/test_dataframe.py::test_mixed_dask_array_multi_dimensional",
"dask/dataframe/tests/test_dataframe.py::test_meta_raises",
"dask/dataframe/tests/test_dataframe.py::test_partitions_indexer",
"dask/dataframe/tests/test_dataframe.py::test_mod_eq",
"dask/dataframe/tests/test_dataframe.py::test_setitem",
"dask/dataframe/tests/test_dataframe.py::test_setitem_with_bool_dataframe_as_key",
"dask/dataframe/tests/test_dataframe.py::test_setitem_with_bool_series_as_key",
"dask/dataframe/tests/test_dataframe.py::test_setitem_with_numeric_column_name_raises_not_implemented",
"dask/dataframe/tests/test_dataframe.py::test_broadcast",
"dask/dataframe/tests/test_dataframe.py::test_has_parallel_type",
"dask/dataframe/tests/test_dataframe.py::test_map_index",
"dask/dataframe/tests/test_dataframe.py::test_assign_index",
"dask/dataframe/tests/test_dataframe.py::test_index_divisions",
"dask/dataframe/tests/test_dataframe.py::test_replace",
"dask/dataframe/tests/test_dataframe.py::test_dtype_cast",
"dask/dataframe/tests/test_dataframe.py::test_series_map[False-False-1-1]",
"dask/dataframe/tests/test_dataframe.py::test_series_map[False-True-1-1]",
"dask/dataframe/tests/test_dataframe.py::test_series_map[True-False-1-1]",
"dask/dataframe/tests/test_dataframe.py::test_series_map[True-True-1-1]",
"dask/dataframe/tests/test_dataframe.py::test_pop",
"dask/dataframe/tests/test_dataframe.py::test_nunique[0-True]",
"dask/dataframe/tests/test_dataframe.py::test_nunique[0-False]",
"dask/dataframe/tests/test_dataframe.py::test_nunique[1-True]",
"dask/dataframe/tests/test_dataframe.py::test_nunique[1-False]",
"dask/dataframe/tests/test_dataframe.py::test_iter",
"dask/dataframe/tests/test_dataframe.py::test_dataframe_groupby_cumsum_agg_empty_partitions",
"dask/dataframe/tests/test_dataframe.py::test_dataframe_groupby_cumprod_agg_empty_partitions",
"dask/dataframe/tests/test_dataframe.py::test_join_series",
"dask/dataframe/tests/test_dataframe.py::test_assign_na_float_columns",
"dask/dataframe/tests/test_dataframe.py::test_assign_no_warning_fragmented",
"dask/dataframe/tests/test_dataframe.py::test_dot",
"dask/dataframe/tests/test_dataframe.py::test_dot_nan",
"dask/dataframe/tests/test_dataframe.py::test_use_of_weakref_proxy",
"dask/dataframe/tests/test_dataframe.py::test_is_monotonic_numeric[Series-False-series0]",
"dask/dataframe/tests/test_dataframe.py::test_is_monotonic_numeric[Series-False-series1]",
"dask/dataframe/tests/test_dataframe.py::test_is_monotonic_numeric[Series-False-series2]",
"dask/dataframe/tests/test_dataframe.py::test_is_monotonic_numeric[Series-False-series3]",
"dask/dataframe/tests/test_dataframe.py::test_is_monotonic_numeric[Series-False-series4]",
"dask/dataframe/tests/test_dataframe.py::test_is_monotonic_numeric[Series-True-series0]",
"dask/dataframe/tests/test_dataframe.py::test_is_monotonic_numeric[Series-True-series1]",
"dask/dataframe/tests/test_dataframe.py::test_is_monotonic_numeric[Series-True-series2]",
"dask/dataframe/tests/test_dataframe.py::test_is_monotonic_numeric[Series-True-series3]",
"dask/dataframe/tests/test_dataframe.py::test_is_monotonic_numeric[Series-True-series4]",
"dask/dataframe/tests/test_dataframe.py::test_is_monotonic_numeric[Index-False-series0]",
"dask/dataframe/tests/test_dataframe.py::test_is_monotonic_numeric[Index-False-series1]",
"dask/dataframe/tests/test_dataframe.py::test_is_monotonic_numeric[Index-False-series2]",
"dask/dataframe/tests/test_dataframe.py::test_is_monotonic_numeric[Index-False-series3]",
"dask/dataframe/tests/test_dataframe.py::test_is_monotonic_numeric[Index-False-series4]",
"dask/dataframe/tests/test_dataframe.py::test_is_monotonic_numeric[Index-True-series0]",
"dask/dataframe/tests/test_dataframe.py::test_is_monotonic_numeric[Index-True-series1]",
"dask/dataframe/tests/test_dataframe.py::test_is_monotonic_numeric[Index-True-series2]",
"dask/dataframe/tests/test_dataframe.py::test_is_monotonic_numeric[Index-True-series3]",
"dask/dataframe/tests/test_dataframe.py::test_is_monotonic_numeric[Index-True-series4]",
"dask/dataframe/tests/test_dataframe.py::test_is_monotonic_dt64",
"dask/dataframe/tests/test_dataframe.py::test_index_is_monotonic_dt64",
"dask/dataframe/tests/test_dataframe.py::test_is_monotonic_empty_partitions",
"dask/dataframe/tests/test_dataframe.py::test_from_dict[2-columns-int]",
"dask/dataframe/tests/test_dataframe.py::test_from_dict[2-columns-float]",
"dask/dataframe/tests/test_dataframe.py::test_from_dict[2-index-int]",
"dask/dataframe/tests/test_dataframe.py::test_from_dict[2-index-float]",
"dask/dataframe/tests/test_dataframe.py::test_from_dict[5-columns-int]",
"dask/dataframe/tests/test_dataframe.py::test_from_dict[5-columns-float]",
"dask/dataframe/tests/test_dataframe.py::test_from_dict[5-index-int]",
"dask/dataframe/tests/test_dataframe.py::test_from_dict[5-index-float]",
"dask/dataframe/tests/test_dataframe.py::test_from_dict_raises",
"dask/dataframe/tests/test_dataframe.py::test_empty",
"dask/dataframe/tests/test_dataframe.py::test_pyarrow_extension_dtype[int64[pyarrow]]",
"dask/dataframe/tests/test_dataframe.py::test_pyarrow_extension_dtype[int32[pyarrow]]",
"dask/dataframe/tests/test_dataframe.py::test_pyarrow_extension_dtype[float64[pyarrow]]",
"dask/dataframe/tests/test_dataframe.py::test_pyarrow_extension_dtype[float32[pyarrow]]",
"dask/dataframe/tests/test_dataframe.py::test_pyarrow_extension_dtype[uint8[pyarrow]]",
"dask/dataframe/tests/test_dataframe.py::test_pyarrow_decimal_extension_dtype",
"dask/dataframe/tests/test_dataframe.py::test_to_backend",
"dask/dataframe/tests/test_dataframe.py::test_transform_getitem_works[max]",
"dask/dataframe/tests/test_dataframe.py::test_transform_getitem_works[sum]",
"dask/dataframe/tests/test_dataframe.py::test_mask_where_array_like[df0-cond0]",
"dask/dataframe/tests/test_dataframe.py::test_mask_where_array_like[df1-cond1]",
"dask/dataframe/tests/test_dataframe.py::test_mask_where_array_like[df2-cond2]",
"dask/dataframe/tests/test_dataframe.py::test_mask_where_array_like[df3-cond3]",
"dask/dataframe/tests/test_dataframe.py::test_mask_where_array_like[df4-cond4]",
"dask/dataframe/tests/test_dataframe.py::test_mask_where_callable",
"dask/dataframe/tests/test_dataframe.py::test_pyarrow_schema_dispatch",
"dask/dataframe/tests/test_dataframe.py::test_pyarrow_schema_dispatch_preserves_index[True]",
"dask/dataframe/tests/test_dataframe.py::test_pyarrow_schema_dispatch_preserves_index[False]",
"dask/dataframe/tests/test_dataframe.py::test_pyarrow_conversion_dispatch[True]",
"dask/dataframe/tests/test_dataframe.py::test_pyarrow_conversion_dispatch[False]",
"dask/dataframe/tests/test_dataframe.py::test_enforce_runtime_divisions",
"dask/dataframe/tests/test_dataframe.py::test_query_planning_config_warns"
] | [] | BSD 3-Clause "New" or "Revised" License | 19,000 | 336 | [
"dask/dataframe/backends.py",
"dask/delayed.py"
] |
python-babel__babel-1100 | 2f87363410f3c904e107e85ca10b9f84902db93f | 2024-07-17 15:20:43 | d3346ee33b13f50af582e31ae5c337aa409dda11 | codecov[bot]: ## [Codecov](https://app.codecov.io/gh/python-babel/babel/pull/1100?dropdown=coverage&src=pr&el=h1&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=python-babel) Report
All modified and coverable lines are covered by tests :white_check_mark:
> Project coverage is 90.34%. Comparing base [(`2f87363`)](https://app.codecov.io/gh/python-babel/babel/commit/2f87363410f3c904e107e85ca10b9f84902db93f?dropdown=coverage&el=desc&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=python-babel) to head [(`ff4c01c`)](https://app.codecov.io/gh/python-babel/babel/commit/ff4c01cd90bdcb36f7a6d7ccc413c48257554979?dropdown=coverage&el=desc&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=python-babel).
<details><summary>Additional details and impacted files</summary>
```diff
@@ Coverage Diff @@
## master #1100 +/- ##
==========================================
- Coverage 91.09% 90.34% -0.75%
==========================================
Files 26 26
Lines 4470 4476 +6
==========================================
- Hits 4072 4044 -28
- Misses 398 432 +34
```
| [Flag](https://app.codecov.io/gh/python-babel/babel/pull/1100/flags?src=pr&el=flags&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=python-babel) | Coverage Δ | |
|---|---|---|
| [macos-12-3.10](https://app.codecov.io/gh/python-babel/babel/pull/1100/flags?src=pr&el=flag&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=python-babel) | `?` | |
| [macos-12-3.11](https://app.codecov.io/gh/python-babel/babel/pull/1100/flags?src=pr&el=flag&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=python-babel) | `?` | |
| [macos-12-3.12](https://app.codecov.io/gh/python-babel/babel/pull/1100/flags?src=pr&el=flag&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=python-babel) | `?` | |
| [macos-12-3.8](https://app.codecov.io/gh/python-babel/babel/pull/1100/flags?src=pr&el=flag&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=python-babel) | `?` | |
| [macos-12-3.9](https://app.codecov.io/gh/python-babel/babel/pull/1100/flags?src=pr&el=flag&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=python-babel) | `?` | |
| [macos-12-pypy3.10](https://app.codecov.io/gh/python-babel/babel/pull/1100/flags?src=pr&el=flag&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=python-babel) | `?` | |
| [ubuntu-22.04-3.10](https://app.codecov.io/gh/python-babel/babel/pull/1100/flags?src=pr&el=flag&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=python-babel) | `90.05% <66.66%> (+0.12%)` | :arrow_up: |
| [ubuntu-22.04-3.11](https://app.codecov.io/gh/python-babel/babel/pull/1100/flags?src=pr&el=flag&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=python-babel) | `90.05% <66.66%> (+0.12%)` | :arrow_up: |
| [ubuntu-22.04-3.12](https://app.codecov.io/gh/python-babel/babel/pull/1100/flags?src=pr&el=flag&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=python-babel) | `?` | |
| [ubuntu-22.04-3.8](https://app.codecov.io/gh/python-babel/babel/pull/1100/flags?src=pr&el=flag&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=python-babel) | `90.05% <50.00%> (+0.19%)` | :arrow_up: |
| [ubuntu-22.04-3.9](https://app.codecov.io/gh/python-babel/babel/pull/1100/flags?src=pr&el=flag&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=python-babel) | `90.05% <66.66%> (+0.12%)` | :arrow_up: |
| [ubuntu-22.04-pypy3.10](https://app.codecov.io/gh/python-babel/babel/pull/1100/flags?src=pr&el=flag&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=python-babel) | `?` | |
| [windows-2022-3.10](https://app.codecov.io/gh/python-babel/babel/pull/1100/flags?src=pr&el=flag&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=python-babel) | `?` | |
| [windows-2022-3.11](https://app.codecov.io/gh/python-babel/babel/pull/1100/flags?src=pr&el=flag&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=python-babel) | `?` | |
| [windows-2022-3.12](https://app.codecov.io/gh/python-babel/babel/pull/1100/flags?src=pr&el=flag&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=python-babel) | `?` | |
| [windows-2022-3.8](https://app.codecov.io/gh/python-babel/babel/pull/1100/flags?src=pr&el=flag&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=python-babel) | `?` | |
| [windows-2022-3.9](https://app.codecov.io/gh/python-babel/babel/pull/1100/flags?src=pr&el=flag&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=python-babel) | `?` | |
| [windows-2022-pypy3.10](https://app.codecov.io/gh/python-babel/babel/pull/1100/flags?src=pr&el=flag&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=python-babel) | `?` | |
Flags with carried forward coverage won't be shown. [Click here](https://docs.codecov.io/docs/carryforward-flags?utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=python-babel#carryforward-flags-in-the-pull-request-comment) to find out more.
</details>
[:umbrella: View full report in Codecov by Sentry](https://app.codecov.io/gh/python-babel/babel/pull/1100?dropdown=coverage&src=pr&el=continue&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=python-babel).
:loudspeaker: Have feedback on the report? [Share it here](https://about.codecov.io/codecov-pr-comment-feedback/?utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=python-babel).
matthiashuschle: Looks good to me. | diff --git a/babel/localtime/_helpers.py b/babel/localtime/_helpers.py
index f27b315..e7e6705 100644
--- a/babel/localtime/_helpers.py
+++ b/babel/localtime/_helpers.py
@@ -2,7 +2,11 @@ try:
import pytz
except ModuleNotFoundError:
pytz = None
+
+try:
import zoneinfo
+except ModuleNotFoundError:
+ zoneinfo = None
def _get_tzinfo(tzenv: str):
@@ -19,6 +23,16 @@ def _get_tzinfo(tzenv: str):
else:
try:
return zoneinfo.ZoneInfo(tzenv)
+ except ValueError as ve:
+ # This is somewhat hacky, but since _validate_tzfile_path() doesn't
+ # raise a specific error type, we'll need to check the message to be
+ # one we know to be from that function.
+ # If so, we pretend it meant that the TZ didn't exist, for the benefit
+ # of `babel.localtime` catching the `LookupError` raised by
+ # `_get_tzinfo_or_raise()`.
+ # See https://github.com/python-babel/babel/issues/1092
+ if str(ve).startswith("ZoneInfo keys "):
+ return None
except zoneinfo.ZoneInfoNotFoundError:
pass
| ValueError from failed import of localtime submodule
## Overview Description
When calling routines like `babel.number.get_decimal_symbol`, an internal import of `babel.localtime` fails under certain circumstances with a ValueError, that may easily slip awareness.
## Steps to Reproduce
1. Have a Unix system
2. set TZ=/UTC
3. in Python 3.9+ without `pytz`, run `babel.numbers.parse_decimal("5.2")`
## Actual Results
```
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/usr/local/lib/python3.10/dist-packages/babel/numbers.py", line 1103, in parse_decimal
group_symbol = get_group_symbol(locale, numbering_system=numbering_system)
File "/usr/local/lib/python3.10/dist-packages/babel/numbers.py", line 452, in get_group_symbol
return _get_number_symbols(locale, numbering_system=numbering_system).get('group', ',')
File "/usr/local/lib/python3.10/dist-packages/babel/numbers.py", line 335, in _get_number_symbols
return parsed_locale.number_symbols[numbering_system]
File "/usr/local/lib/python3.10/dist-packages/babel/core.py", line 640, in number_symbols
return self._data['number_symbols']
File "/usr/local/lib/python3.10/dist-packages/babel/core.py", line 439, in _data
self.__data = localedata.LocaleDataDict(localedata.load(str(self)))
File "/usr/local/lib/python3.10/dist-packages/babel/localedata.py", line 137, in load
data = load(parent).copy()
File "/usr/local/lib/python3.10/dist-packages/babel/localedata.py", line 137, in load
data = load(parent).copy()
File "/usr/local/lib/python3.10/dist-packages/babel/localedata.py", line 137, in load
data = load(parent).copy()
File "/usr/local/lib/python3.10/dist-packages/babel/localedata.py", line 143, in load
data = pickle.load(fileobj)
File "/usr/local/lib/python3.10/dist-packages/babel/dates.py", line 34, in <module>
from babel import localtime
File "/usr/local/lib/python3.10/dist-packages/babel/localtime/__init__.py", line 41, in <module>
LOCALTZ = get_localzone()
File "/usr/local/lib/python3.10/dist-packages/babel/localtime/__init__.py", line 37, in get_localzone
return _get_localzone()
File "/usr/local/lib/python3.10/dist-packages/babel/localtime/_unix.py", line 36, in _get_localzone
return _tz_from_env(tzenv)
File "/usr/local/lib/python3.10/dist-packages/babel/localtime/_unix.py", line 21, in _tz_from_env
return _get_tzinfo_or_raise(tzenv)
File "/usr/local/lib/python3.10/dist-packages/babel/localtime/_helpers.py", line 29, in _get_tzinfo_or_raise
tzinfo = _get_tzinfo(tzenv)
File "/usr/local/lib/python3.10/dist-packages/babel/localtime/_helpers.py", line 21, in _get_tzinfo
return zoneinfo.ZoneInfo(tzenv)
File "/usr/lib/python3.10/zoneinfo/_tzpath.py", line 67, in find_tzfile
_validate_tzfile_path(key)
File "/usr/lib/python3.10/zoneinfo/_tzpath.py", line 81, in _validate_tzfile_path
raise ValueError(
ValueError: ZoneInfo keys may not be absolute paths, got: /UTC
```
## Expected Results
`Decimal('5.2')`
## Reproducibility
Works repeatedly with many calls, as long as they rely on `localtime`.
## Additional Information
AFAICT the `babel.localtime` submodule is loaded lazily in the course of processing calls like `babel.number.parse_decimal` or `babel.number.get_decimal_symbol`. On systems where `pytz` is not present, `zoneinfo` is loaded in `babel.localtime._unix.py` via `babel.localtime.__init__.py`, where also `get_localzone` is called. On systems with `TZ` set (and it not being a local file), this triggers a call to `zoneinfo.ZoneInfo` with the value of `TZ`. This may be an invalid value, which raises an uncaught ValueError. This error is raised again on repeated calls, as the import fails.
While the workaround is pretty easy (install `pytz`), the problem here is awareness:
- I don't think many people check the full env list for potential pitfalls. It may even be out of the user's control - especially on cloud services.
- I can't think of a test that catches this accidentally.
- Many of these calls are specifically wrapped to catch `ValueError` under the assumption that this means that the parsing failed due to the input value.
So this is a potential case of "fails only in production silently". | python-babel/babel | diff --git a/tests/test_localtime.py b/tests/test_localtime.py
new file mode 100644
index 0000000..723ffa0
--- /dev/null
+++ b/tests/test_localtime.py
@@ -0,0 +1,29 @@
+import sys
+
+import pytest
+
+from babel.localtime import _helpers, get_localzone
+
+
[email protected](
+ sys.platform == "win32",
+ reason="Issue 1092 is not applicable on Windows",
+)
+def test_issue_1092_without_pytz(monkeypatch):
+ pytest.importorskip("zoneinfo", reason="zoneinfo is not available")
+ monkeypatch.setenv("TZ", "/UTC") # Malformed timezone name.
+ # In case pytz _is_ also installed, we want to pretend it's not, so patch it out...
+ monkeypatch.setattr(_helpers, "pytz", None)
+ with pytest.raises(LookupError):
+ get_localzone()
+
+
[email protected](
+ sys.platform == "win32",
+ reason="Issue 1092 is not applicable on Windows",
+)
+def test_issue_1092_with_pytz(monkeypatch):
+ pytest.importorskip("pytz", reason="pytz is not installed")
+ monkeypatch.setenv("TZ", "/UTC") # Malformed timezone name.
+ with pytest.raises(LookupError):
+ get_localzone()
| {
"commit_name": "merge_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 1
},
"num_modified_files": 1
} | 2.15 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest pytest-cov",
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": [
"docs/requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.16
-e git+https://github.com/python-babel/babel.git@2f87363410f3c904e107e85ca10b9f84902db93f#egg=Babel
certifi==2025.1.31
charset-normalizer==3.4.1
coverage==7.8.0
docutils==0.19
exceptiongroup==1.2.2
freezegun==1.5.1
idna==3.10
imagesize==1.4.1
importlib_metadata==8.6.1
iniconfig==2.1.0
Jinja2==3.1.6
MarkupSafe==3.0.2
packaging==24.2
pluggy==1.5.0
Pygments==2.19.1
pytest==8.3.5
pytest-cov==6.0.0
python-dateutil==2.9.0.post0
requests==2.32.3
six==1.17.0
snowballstemmer==2.2.0
Sphinx==5.3.0
sphinxcontrib-applehelp==2.0.0
sphinxcontrib-devhelp==2.0.0
sphinxcontrib-htmlhelp==2.1.0
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==2.0.0
sphinxcontrib-serializinghtml==2.0.0
tomli==2.2.1
urllib3==2.3.0
zipp==3.21.0
| name: babel
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.16
- babel==2.15.0
- certifi==2025.1.31
- charset-normalizer==3.4.1
- coverage==7.8.0
- docutils==0.19
- exceptiongroup==1.2.2
- freezegun==1.5.1
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- jinja2==3.1.6
- markupsafe==3.0.2
- packaging==24.2
- pluggy==1.5.0
- pygments==2.19.1
- pytest==8.3.5
- pytest-cov==6.0.0
- python-dateutil==2.9.0.post0
- requests==2.32.3
- six==1.17.0
- snowballstemmer==2.2.0
- sphinx==5.3.0
- sphinxcontrib-applehelp==2.0.0
- sphinxcontrib-devhelp==2.0.0
- sphinxcontrib-htmlhelp==2.1.0
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==2.0.0
- sphinxcontrib-serializinghtml==2.0.0
- tomli==2.2.1
- urllib3==2.3.0
- zipp==3.21.0
prefix: /opt/conda/envs/babel
| [
"tests/test_localtime.py::test_issue_1092_without_pytz"
] | [] | [] | [] | BSD 3-Clause "New" or "Revised" License | 19,007 | 328 | [
"babel/localtime/_helpers.py"
] |
unionai-oss__pandera-1754 | bf5ef4c2efda8c908e5f0f1e3c60aae81c5690b0 | 2024-07-18 20:27:23 | ea4538d2f71795bba09e602d568d673798c92b35 | diff --git a/pandera/backends/pandas/components.py b/pandera/backends/pandas/components.py
index 592f471..c70b95c 100644
--- a/pandera/backends/pandas/components.py
+++ b/pandera/backends/pandas/components.py
@@ -3,7 +3,7 @@
# pylint: disable=too-many-locals
import traceback
-from copy import copy, deepcopy
+from copy import deepcopy
from typing import Iterable, List, Optional, Union
import numpy as np
@@ -71,7 +71,7 @@ class ColumnBackend(ArraySchemaBackend):
# pylint: disable=super-with-arguments
validated_check_obj = super(ColumnBackend, self).validate(
check_obj,
- copy(schema).set_name(column_name),
+ deepcopy(schema).set_name(column_name),
head=head,
tail=tail,
sample=sample,
| Wrong Column name in SchemaErrors using regex Columns
**Describe the bug**
When using a regex column to validate columns in a data frame incorrect data for any column in the regex gets listed under a single comlun.
- [x] I have checked that this issue has not already been reported.
- [x] I have confirmed this bug exists on the latest version of pandera.
- [ ] (optional) I have confirmed this bug exists on the main branch of pandera.
**Note**: Please read [this guide](https://matthewrocklin.com/blog/work/2018/02/28/minimal-bug-reports) detailing how to provide the necessary information for us to reproduce your bug.
#### Code Sample, a copy-pastable example
```python
import pandas as pd
import pandera as pa
def fn(data) -> bool:
return False
df = pd.DataFrame([{"a": 0, "b": 1}, {"a": 2, "b":3}])
schema = {}
schema[r"a|b"] = pa.Column(dtype=int, regex=True, checks=[pa.Check(element_wise=True, check_fn=fn)])
#schema[r"[a|b]"] = pa.Column(dtype=int, regex=True, checks=[pa.Check(element_wise=True, check_fn=fn)])
#individual columns are reported accurately
#schema["a"] = pa.Column(dtype=int, checks=[pa.Check(element_wise=True, check_fn=fn)])
#schema["b"] = pa.Column(dtype=int, checks=[pa.Check(element_wise=True, check_fn=fn)])
schema = pa.DataFrameSchema(schema)
try:
schema.validate(df, lazy=True)
except pa.errors.SchemaErrors as err:
print(err.failure_cases)
```
Output:
```logs
schema_context column check check_number failure_case index
0 Column b fn 0 0 0
1 Column b fn 0 2 1
2 Column b fn 0 1 0
3 Column b fn 0 3 1
```
#### Expected behavior
A clear and concise description of what you expected to happen.
I would expect this output.
```logs
schema_context column check check_number failure_case index
0 Column a fn 0 0 0
1 Column a fn 0 2 1
2 Column b fn 0 1 0
3 Column b fn 0 3 1
```
#### Desktop (please complete the following information):
- OS: [e.g. iOS] macOS 14.5 (23F79)
- Version: [e.g. 22] - 0.20.3
| unionai-oss/pandera | diff --git a/tests/core/test_schema_components.py b/tests/core/test_schema_components.py
index 9467fbe..a7819ca 100644
--- a/tests/core/test_schema_components.py
+++ b/tests/core/test_schema_components.py
@@ -476,6 +476,38 @@ def test_column_regex_matching(
assert expected_matches == matched_columns.tolist()
+def test_column_regex_error_failure_cases():
+
+ data = pd.DataFrame({"a": [0, 2], "b": [1, 3]})
+
+ column_schema = Column(
+ name=r"a|b",
+ dtype=int,
+ regex=True,
+ checks=Check(
+ element_wise=True,
+ name="custom_check",
+ check_fn=lambda *args, **kwargs: False,
+ ),
+ )
+
+ expected_error = pd.DataFrame(
+ {
+ "schema_context": ["Column"] * 4,
+ "column": ["a", "a", "b", "b"],
+ "check": ["custom_check"] * 4,
+ "check_number": [0] * 4,
+ "failure_case": [0, 2, 1, 3],
+ "index": [0, 1, 0, 1],
+ }
+ )
+
+ try:
+ column_schema.validate(data, lazy=True)
+ except errors.SchemaErrors as err:
+ pd.testing.assert_frame_equal(err.failure_cases, expected_error)
+
+
INT_REGEX = r"-?\d+$"
FLOAT_REGEX = r"-?\d+\.\d+$"
DATETIME_REGEX = r"\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}"
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 1
},
"num_modified_files": 1
} | 0.20 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[all]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": null,
"python": "3.9",
"reqs_path": [
"dev/requirements-3.9.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | aiosignal==1.3.1
alabaster==0.7.16
annotated-types==0.7.0
anyio==4.4.0
appnope==0.1.4
argcomplete==3.4.0
astroid==2.15.8
asttokens==2.4.1
asv==0.6.3
asv_runner==0.2.1
attrs==23.2.0
Babel==2.15.0
backports.tarfile==1.2.0
beautifulsoup4==4.12.3
black==24.4.2
build==1.2.1
certifi==2024.7.4
cffi==1.17.1
cfgv==3.4.0
chardet==5.2.0
charset-normalizer==3.3.2
click==8.1.7
cloudpickle==3.0.0
colorlog==6.8.2
comm==0.2.2
commonmark==0.9.1
coverage==7.6.0
cryptography==44.0.2
dask==2024.7.0
dask-expr==1.1.7
debugpy==1.8.2
decorator==5.1.1
dill==0.3.8
distlib==0.3.8
distributed==2024.7.0
dnspython==2.6.1
docutils==0.21.2
email_validator==2.2.0
exceptiongroup==1.2.2
execnet==2.1.1
executing==2.0.1
fastapi==0.111.0
fastapi-cli==0.0.4
fastjsonschema==2.20.0
filelock==3.15.4
frictionless==4.40.8
frozenlist==1.4.1
fsspec==2024.6.1
furo==2024.5.6
geopandas==1.0.1
greenlet==3.0.3
grpcio==1.64.1
h11==0.14.0
httpcore==1.0.5
httptools==0.6.1
httpx==0.27.0
hypothesis==6.108.0
identify==2.6.0
idna==3.7
imagesize==1.4.1
importlib_metadata==8.0.0
iniconfig==2.0.0
ipykernel==6.29.5
ipython==8.18.1
isodate==0.6.1
isort==5.13.2
jaraco.classes==3.4.0
jaraco.context==5.3.0
jaraco.functools==4.0.1
jedi==0.19.1
jeepney==0.9.0
Jinja2==3.1.4
joblib==1.4.2
json5==0.9.25
jsonschema==4.23.0
jsonschema-specifications==2023.12.1
jupyter-cache==1.0.0
jupyter_client==8.6.2
jupyter_core==5.7.2
keyring==25.2.1
lazy-object-proxy==1.10.0
locket==1.0.0
markdown-it-py==3.0.0
marko==2.1.2
MarkupSafe==2.1.5
matplotlib-inline==0.1.7
mccabe==0.7.0
mdit-py-plugins==0.4.1
mdurl==0.1.2
modin==0.31.0
more-itertools==10.3.0
msgpack==1.0.8
multimethod==1.10
mypy==1.10.0
mypy-extensions==1.0.0
myst-nb==1.1.1
myst-parser==3.0.1
nbclient==0.10.0
nbformat==5.10.4
nest-asyncio==1.6.0
nh3==0.2.18
nodeenv==1.9.1
nox==2024.4.15
numpy==2.0.0
orjson==3.10.6
packaging==24.1
pandas==2.2.2
pandas-stubs==2.2.2.240603
-e git+https://github.com/unionai-oss/pandera.git@bf5ef4c2efda8c908e5f0f1e3c60aae81c5690b0#egg=pandera
parso==0.8.4
partd==1.4.2
pathspec==0.12.1
petl==1.7.15
pexpect==4.9.0
pkginfo==1.10.0
platformdirs==4.2.2
pluggy==1.5.0
polars==1.1.0
pre-commit==3.7.1
prompt_toolkit==3.0.47
protobuf==5.27.2
psutil==6.0.0
ptyprocess==0.7.0
pure-eval==0.2.2
py4j==0.10.9.7
pyarrow==16.1.0
pycparser==2.22
pydantic==2.8.2
pydantic_core==2.20.1
Pygments==2.18.0
pylint==2.17.3
Pympler==1.1
pyogrio==0.9.0
pyproj==3.6.1
pyproject_hooks==1.1.0
pyspark==3.5.1
pytest==8.2.2
pytest-asyncio==0.23.7
pytest-cov==5.0.0
pytest-xdist==3.6.1
python-dateutil==2.9.0.post0
python-dotenv==1.0.1
python-multipart==0.0.9
python-slugify==8.0.4
pytz==2024.1
PyYAML==6.0.1
pyzmq==26.0.3
ray==2.32.0
readme_renderer==44.0
recommonmark==0.7.1
referencing==0.35.1
requests==2.32.3
requests-toolbelt==1.0.0
rfc3986==2.0.0
rich==13.7.1
rpds-py==0.19.0
scipy==1.13.1
SecretStorage==3.3.3
shapely==2.0.5
shellingham==1.5.4
simpleeval==0.9.13
six==1.16.0
sniffio==1.3.1
snowballstemmer==2.2.0
sortedcontainers==2.4.0
soupsieve==2.5
Sphinx==7.3.7
sphinx-autodoc-typehints==1.14.1
sphinx-basic-ng==1.0.0b2
sphinx-copybutton==0.5.2
sphinx-docsearch==0.0.7
sphinx_design==0.6.0
sphinxcontrib-applehelp==1.0.8
sphinxcontrib-devhelp==1.0.6
sphinxcontrib-htmlhelp==2.0.5
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==1.0.7
sphinxcontrib-serializinghtml==1.1.10
SQLAlchemy==2.0.31
stack-data==0.6.3
starlette==0.37.2
stringcase==1.2.0
tabulate==0.9.0
tblib==3.0.0
text-unidecode==1.3
tomli==2.0.1
tomlkit==0.13.0
toolz==0.12.1
tornado==6.4.1
traitlets==5.14.3
twine==5.1.1
typeguard==4.3.0
typer==0.12.3
types-click==7.1.8
types-pkg-resources==0.1.3
types-pytz==2024.1.0.20240417
types-PyYAML==6.0.12.20240311
types-requests==2.32.0.20240712
typing-inspect==0.9.0
typing_extensions==4.12.2
tzdata==2024.1
ujson==5.10.0
urllib3==2.2.2
uvicorn==0.30.1
uvloop==0.19.0
validators==0.32.0
virtualenv==20.26.3
watchfiles==0.22.0
wcwidth==0.2.13
websockets==12.0
wrapt==1.16.0
xdoctest==1.1.5
zict==3.0.0
zipp==3.19.2
| name: pandera
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- aiosignal==1.3.1
- alabaster==0.7.16
- annotated-types==0.7.0
- anyio==4.4.0
- appnope==0.1.4
- argcomplete==3.4.0
- astroid==2.15.8
- asttokens==2.4.1
- asv==0.6.3
- asv-runner==0.2.1
- attrs==23.2.0
- babel==2.15.0
- backports-tarfile==1.2.0
- beautifulsoup4==4.12.3
- black==24.4.2
- build==1.2.1
- certifi==2024.7.4
- cffi==1.17.1
- cfgv==3.4.0
- chardet==5.2.0
- charset-normalizer==3.3.2
- click==8.1.7
- cloudpickle==3.0.0
- colorlog==6.8.2
- comm==0.2.2
- commonmark==0.9.1
- coverage==7.6.0
- cryptography==44.0.2
- dask==2024.7.0
- dask-expr==1.1.7
- debugpy==1.8.2
- decorator==5.1.1
- dill==0.3.8
- distlib==0.3.8
- distributed==2024.7.0
- dnspython==2.6.1
- docutils==0.21.2
- email-validator==2.2.0
- exceptiongroup==1.2.2
- execnet==2.1.1
- executing==2.0.1
- fastapi==0.111.0
- fastapi-cli==0.0.4
- fastjsonschema==2.20.0
- filelock==3.15.4
- frictionless==4.40.8
- frozenlist==1.4.1
- fsspec==2024.6.1
- furo==2024.5.6
- geopandas==1.0.1
- greenlet==3.0.3
- grpcio==1.64.1
- h11==0.14.0
- httpcore==1.0.5
- httptools==0.6.1
- httpx==0.27.0
- hypothesis==6.108.0
- identify==2.6.0
- idna==3.7
- imagesize==1.4.1
- importlib-metadata==8.0.0
- iniconfig==2.0.0
- ipykernel==6.29.5
- ipython==8.18.1
- isodate==0.6.1
- isort==5.13.2
- jaraco-classes==3.4.0
- jaraco-context==5.3.0
- jaraco-functools==4.0.1
- jedi==0.19.1
- jeepney==0.9.0
- jinja2==3.1.4
- joblib==1.4.2
- json5==0.9.25
- jsonschema==4.23.0
- jsonschema-specifications==2023.12.1
- jupyter-cache==1.0.0
- jupyter-client==8.6.2
- jupyter-core==5.7.2
- keyring==25.2.1
- lazy-object-proxy==1.10.0
- locket==1.0.0
- markdown-it-py==3.0.0
- marko==2.1.2
- markupsafe==2.1.5
- matplotlib-inline==0.1.7
- mccabe==0.7.0
- mdit-py-plugins==0.4.1
- mdurl==0.1.2
- modin==0.31.0
- more-itertools==10.3.0
- msgpack==1.0.8
- multimethod==1.10
- mypy==1.10.0
- mypy-extensions==1.0.0
- myst-nb==1.1.1
- myst-parser==3.0.1
- nbclient==0.10.0
- nbformat==5.10.4
- nest-asyncio==1.6.0
- nh3==0.2.18
- nodeenv==1.9.1
- nox==2024.4.15
- numpy==2.0.0
- orjson==3.10.6
- packaging==24.1
- pandas==2.2.2
- pandas-stubs==2.2.2.240603
- pandera==0.0.0+dev0
- parso==0.8.4
- partd==1.4.2
- pathspec==0.12.1
- petl==1.7.15
- pexpect==4.9.0
- pip==24.1.2
- pkginfo==1.10.0
- platformdirs==4.2.2
- pluggy==1.5.0
- polars==1.1.0
- pre-commit==3.7.1
- prompt-toolkit==3.0.47
- protobuf==5.27.2
- psutil==6.0.0
- ptyprocess==0.7.0
- pure-eval==0.2.2
- py4j==0.10.9.7
- pyarrow==16.1.0
- pycparser==2.22
- pydantic==2.8.2
- pydantic-core==2.20.1
- pygments==2.18.0
- pylint==2.17.3
- pympler==1.1
- pyogrio==0.9.0
- pyproj==3.6.1
- pyproject-hooks==1.1.0
- pyspark==3.5.1
- pytest==8.2.2
- pytest-asyncio==0.23.7
- pytest-cov==5.0.0
- pytest-xdist==3.6.1
- python-dateutil==2.9.0.post0
- python-dotenv==1.0.1
- python-multipart==0.0.9
- python-slugify==8.0.4
- pytz==2024.1
- pyyaml==6.0.1
- pyzmq==26.0.3
- ray==2.32.0
- readme-renderer==44.0
- recommonmark==0.7.1
- referencing==0.35.1
- requests==2.32.3
- requests-toolbelt==1.0.0
- rfc3986==2.0.0
- rich==13.7.1
- rpds-py==0.19.0
- scipy==1.13.1
- secretstorage==3.3.3
- shapely==2.0.5
- shellingham==1.5.4
- simpleeval==0.9.13
- six==1.16.0
- sniffio==1.3.1
- snowballstemmer==2.2.0
- sortedcontainers==2.4.0
- soupsieve==2.5
- sphinx==7.3.7
- sphinx-autodoc-typehints==1.14.1
- sphinx-basic-ng==1.0.0b2
- sphinx-copybutton==0.5.2
- sphinx-design==0.6.0
- sphinx-docsearch==0.0.7
- sphinxcontrib-applehelp==1.0.8
- sphinxcontrib-devhelp==1.0.6
- sphinxcontrib-htmlhelp==2.0.5
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==1.0.7
- sphinxcontrib-serializinghtml==1.1.10
- sqlalchemy==2.0.31
- stack-data==0.6.3
- starlette==0.37.2
- stringcase==1.2.0
- tabulate==0.9.0
- tblib==3.0.0
- text-unidecode==1.3
- tomli==2.0.1
- tomlkit==0.13.0
- toolz==0.12.1
- tornado==6.4.1
- traitlets==5.14.3
- twine==5.1.1
- typeguard==4.3.0
- typer==0.12.3
- types-click==7.1.8
- types-pkg-resources==0.1.3
- types-pytz==2024.1.0.20240417
- types-pyyaml==6.0.12.20240311
- types-requests==2.32.0.20240712
- typing-extensions==4.12.2
- typing-inspect==0.9.0
- tzdata==2024.1
- ujson==5.10.0
- urllib3==2.2.2
- uvicorn==0.30.1
- uvloop==0.19.0
- validators==0.32.0
- virtualenv==20.26.3
- watchfiles==0.22.0
- wcwidth==0.2.13
- websockets==12.0
- wrapt==1.16.0
- xdoctest==1.1.5
- zict==3.0.0
- zipp==3.19.2
prefix: /opt/conda/envs/pandera
| [
"tests/core/test_schema_components.py::test_column_regex_error_failure_cases"
] | [] | [
"tests/core/test_schema_components.py::test_column",
"tests/core/test_schema_components.py::test_column_coerce",
"tests/core/test_schema_components.py::test_column_in_dataframe_schema",
"tests/core/test_schema_components.py::test_index_schema",
"tests/core/test_schema_components.py::test_index_schema_coerce[Float]",
"tests/core/test_schema_components.py::test_index_schema_coerce[Int]",
"tests/core/test_schema_components.py::test_index_schema_coerce[String]",
"tests/core/test_schema_components.py::test_index_schema_coerce_when_coerce_specified_at_schema_level[Float]",
"tests/core/test_schema_components.py::test_index_schema_coerce_when_coerce_specified_at_schema_level[Int]",
"tests/core/test_schema_components.py::test_index_schema_coerce_when_coerce_specified_at_schema_level[String]",
"tests/core/test_schema_components.py::test_multi_index_columns",
"tests/core/test_schema_components.py::test_multi_index_column_errors[schema0-df0]",
"tests/core/test_schema_components.py::test_multi_index_column_errors[schema1-df1]",
"tests/core/test_schema_components.py::test_multi_index_index",
"tests/core/test_schema_components.py::test_single_index_multi_index_mismatch",
"tests/core/test_schema_components.py::test_multi_index_schema_coerce",
"tests/core/test_schema_components.py::tests_multi_index_subindex_coerce",
"tests/core/test_schema_components.py::tests_multi_index_subindex_coerce_with_empty_subindex[True]",
"tests/core/test_schema_components.py::tests_multi_index_subindex_coerce_with_empty_subindex[False]",
"tests/core/test_schema_components.py::test_schema_component_equality_operators",
"tests/core/test_schema_components.py::test_column_regex",
"tests/core/test_schema_components.py::test_column_regex_multiindex",
"tests/core/test_schema_components.py::test_column_regex_matching[column_name_regex0-expected_matches0-None]",
"tests/core/test_schema_components.py::test_column_regex_matching[column_name_regex1-expected_matches1-None]",
"tests/core/test_schema_components.py::test_column_regex_matching[column_name_regex2-expected_matches2-None]",
"tests/core/test_schema_components.py::test_column_regex_matching[column_name_regex3-expected_matches3-None]",
"tests/core/test_schema_components.py::test_column_regex_matching[column_name_regex4-expected_matches4-None]",
"tests/core/test_schema_components.py::test_column_regex_matching[column_name_regex5-None-SchemaError]",
"tests/core/test_schema_components.py::test_column_regex_matching[foo_1-None-IndexError]",
"tests/core/test_schema_components.py::test_column_regex_matching[column_name_regex7-None-IndexError]",
"tests/core/test_schema_components.py::test_column_regex_matching[column_name_regex8-None-IndexError]",
"tests/core/test_schema_components.py::test_column_regex_matching[column_name_regex9-None-IndexError]",
"tests/core/test_schema_components.py::test_column_regex_matching_non_str_types[.+-expected_matches0]",
"tests/core/test_schema_components.py::test_column_regex_matching_non_str_types[-?\\\\d+$-expected_matches1]",
"tests/core/test_schema_components.py::test_column_regex_matching_non_str_types[-?\\\\d+\\\\.\\\\d+$-expected_matches2]",
"tests/core/test_schema_components.py::test_column_regex_matching_non_str_types[\\\\d{4}-\\\\d{2}-\\\\d{2}",
"tests/core/test_schema_components.py::test_column_regex_matching_non_str_types_multiindex[column_name_regex0-expected_matches0]",
"tests/core/test_schema_components.py::test_column_regex_matching_non_str_types_multiindex[column_name_regex1-expected_matches1]",
"tests/core/test_schema_components.py::test_column_regex_matching_non_str_types_multiindex[column_name_regex2-expected_matches2]",
"tests/core/test_schema_components.py::test_column_regex_matching_non_str_types_multiindex[column_name_regex3-expected_matches3]",
"tests/core/test_schema_components.py::test_column_regex_matching_non_str_types_multiindex[column_name_regex4-expected_matches4]",
"tests/core/test_schema_components.py::test_column_regex_strict",
"tests/core/test_schema_components.py::test_column_regex_non_str_types",
"tests/core/test_schema_components.py::test_column_type_can_be_set",
"tests/core/test_schema_components.py::test_multiindex_duplicate_index_names[schema0-multiindex0-False]",
"tests/core/test_schema_components.py::test_multiindex_duplicate_index_names[schema0-multiindex1-True]",
"tests/core/test_schema_components.py::test_multiindex_duplicate_index_names[schema1-multiindex0-False]",
"tests/core/test_schema_components.py::test_multiindex_duplicate_index_names[schema1-multiindex1-True]",
"tests/core/test_schema_components.py::test_multiindex_ordered[multiindex0-schema0-False]",
"tests/core/test_schema_components.py::test_multiindex_ordered[multiindex1-schema1-True]",
"tests/core/test_schema_components.py::test_multiindex_ordered[multiindex2-schema2-True]",
"tests/core/test_schema_components.py::test_multiindex_ordered[multiindex3-schema3-False]",
"tests/core/test_schema_components.py::test_multiindex_ordered[multiindex4-schema4-False]",
"tests/core/test_schema_components.py::test_multiindex_ordered[multiindex5-schema5-True]",
"tests/core/test_schema_components.py::test_multiindex_ordered[multiindex6-schema6-True]",
"tests/core/test_schema_components.py::test_multiindex_ordered[multiindex7-schema7-False]",
"tests/core/test_schema_components.py::test_multiindex_ordered[multiindex8-schema8-False]",
"tests/core/test_schema_components.py::test_multiindex_ordered[multiindex9-schema9-column",
"tests/core/test_schema_components.py::test_multiindex_ordered[multiindex10-schema10-False]",
"tests/core/test_schema_components.py::test_multiindex_ordered[multiindex11-schema11-False]",
"tests/core/test_schema_components.py::test_multiindex_unordered[multiindex0-schema0-True]",
"tests/core/test_schema_components.py::test_multiindex_unordered[multiindex1-schema1-True]",
"tests/core/test_schema_components.py::test_multiindex_unordered[multiindex2-schema2-False]",
"tests/core/test_schema_components.py::test_multiindex_unordered[multiindex3-schema3-False]",
"tests/core/test_schema_components.py::test_multiindex_unordered[multiindex4-schema4-False]",
"tests/core/test_schema_components.py::test_multiindex_unordered[multiindex5-schema5-False]",
"tests/core/test_schema_components.py::test_multiindex_unordered_init_exception[indexes0]",
"tests/core/test_schema_components.py::test_multiindex_unordered_init_exception[indexes1]",
"tests/core/test_schema_components.py::test_multiindex_unordered_init_exception[indexes2]",
"tests/core/test_schema_components.py::test_multiindex_incorrect_input[indexes0]",
"tests/core/test_schema_components.py::test_multiindex_incorrect_input[indexes1]",
"tests/core/test_schema_components.py::test_multiindex_incorrect_input[indexes2]",
"tests/core/test_schema_components.py::test_multiindex_incorrect_input[indexes3]",
"tests/core/test_schema_components.py::test_multiindex_incorrect_input[1]",
"tests/core/test_schema_components.py::test_multiindex_incorrect_input[1.0]",
"tests/core/test_schema_components.py::test_multiindex_incorrect_input[foo]",
"tests/core/test_schema_components.py::test_index_validation_pandas_string_dtype",
"tests/core/test_schema_components.py::test_column_default_works_when_dtype_match[True-None-str-a",
"tests/core/test_schema_components.py::test_column_default_works_when_dtype_match[True-None-bool-True]",
"tests/core/test_schema_components.py::test_column_default_works_when_dtype_match[True-None-bool-False]",
"tests/core/test_schema_components.py::test_column_default_works_when_dtype_match[True-None-float-42.0]",
"tests/core/test_schema_components.py::test_column_default_works_when_dtype_match[True-None-Int64-0]",
"tests/core/test_schema_components.py::test_column_default_works_when_dtype_match[True-nan-str-a",
"tests/core/test_schema_components.py::test_column_default_works_when_dtype_match[True-nan-bool-True]",
"tests/core/test_schema_components.py::test_column_default_works_when_dtype_match[True-nan-bool-False]",
"tests/core/test_schema_components.py::test_column_default_works_when_dtype_match[True-nan-float-42.0]",
"tests/core/test_schema_components.py::test_column_default_works_when_dtype_match[True-nan-Int64-0]",
"tests/core/test_schema_components.py::test_column_default_works_when_dtype_match[False-None-str-a",
"tests/core/test_schema_components.py::test_column_default_works_when_dtype_match[False-None-bool-True]",
"tests/core/test_schema_components.py::test_column_default_works_when_dtype_match[False-None-bool-False]",
"tests/core/test_schema_components.py::test_column_default_works_when_dtype_match[False-None-float-42.0]",
"tests/core/test_schema_components.py::test_column_default_works_when_dtype_match[False-None-Int64-0]",
"tests/core/test_schema_components.py::test_column_default_works_when_dtype_match[False-nan-str-a",
"tests/core/test_schema_components.py::test_column_default_works_when_dtype_match[False-nan-bool-True]",
"tests/core/test_schema_components.py::test_column_default_works_when_dtype_match[False-nan-bool-False]",
"tests/core/test_schema_components.py::test_column_default_works_when_dtype_match[False-nan-float-42.0]",
"tests/core/test_schema_components.py::test_column_default_errors_on_dtype_mismatch[str-1]",
"tests/core/test_schema_components.py::test_column_default_errors_on_dtype_mismatch[bool-42.0]",
"tests/core/test_schema_components.py::test_column_default_errors_on_dtype_mismatch[float-True]",
"tests/core/test_schema_components.py::test_column_default_errors_on_dtype_mismatch[Int64-a"
] | [] | MIT License | 19,020 | 206 | [
"pandera/backends/pandas/components.py"
] |
|
canonical__charmcraft-1745 | 51a55f7320370179bb48b4e5f6cd02a9de46cc13 | 2024-07-19 22:53:24 | 534c028fb418409d62608ea3efe2c8ad1dad4d0d | diff --git a/charmcraft/utils/file.py b/charmcraft/utils/file.py
index cb4d5be3..a88c7393 100644
--- a/charmcraft/utils/file.py
+++ b/charmcraft/utils/file.py
@@ -63,8 +63,9 @@ def build_zip(zip_path: PathOrString, prime_dir: PathOrString) -> None:
"""
zip_path = pathlib.Path(zip_path).resolve()
prime_dir = pathlib.Path(prime_dir).resolve()
- with zipfile.ZipFile(zip_path, "w", zipfile.ZIP_DEFLATED) as file:
- for file_path in prime_dir.rglob("*"):
- if not file_path.is_file():
- continue
- file.write(file_path, file_path.relative_to(prime_dir))
+ with zipfile.ZipFile(zip_path, "w", zipfile.ZIP_DEFLATED) as zip_file:
+ # Using os.walk() because Path.walk() is only added in 3.12
+ for dir_path_str, _, filenames in os.walk(prime_dir, followlinks=True):
+ for filename in filenames:
+ file_path = pathlib.Path(dir_path_str, filename)
+ zip_file.write(file_path, file_path.relative_to(prime_dir))
| Charmcraft 3.x not including symlinked directories in resulting charm
### Bug Description
Building classical charms with version 3 is not adding the submodules like charmhelpers in the hooks folder.
So running a charm build with version 3 is breaking saying that it's not possible to find the module charmhelpers
### To Reproduce
install charmcraft latest/edge
build a classic charm like [charm-nrpe](https://github.com/canonical/charm-nrpe) and deploy.
### Environment
Ubuntu 22.04 LTS
### charmcraft.yaml
```shell
type: charm
parts:
charm:
plugin: dump
source: .
prime:
- actions/*
- files/*
- hooks/*
- hooks/charmhelpers/*
- templates/*
- actions.yaml
- config.yaml
- copyright
- LICENSE
- metadata.yaml
- README.md
bases:
- build-on:
- name: ubuntu
channel: "20.04"
architectures: ["amd64"]
run-on:
- name: ubuntu
channel: "22.04"
architectures:
- amd64
- name: ubuntu
channel: "20.04"
architectures:
- amd64
- name: ubuntu
channel: "18.04"
architectures:
- amd64
```
### Relevant log output
```shell
unit-nrpe-0: 12:44:22 INFO juju.worker.meterstatus skipped "meter-status-changed" hook (missing)
unit-nrpe-0: 12:44:22 WARNING unit.nrpe/0.install Traceback (most recent call last):
unit-nrpe-0: 12:44:22 WARNING unit.nrpe/0.install File "/var/lib/juju/agents/unit-nrpe-0/charm/hooks/install", line 4, in <module>
unit-nrpe-0: 12:44:22 WARNING unit.nrpe/0.install import services
unit-nrpe-0: 12:44:22 WARNING unit.nrpe/0.install File "/var/lib/juju/agents/unit-nrpe-0/charm/hooks/services.py", line 11, in <module>
unit-nrpe-0: 12:44:22 WARNING unit.nrpe/0.install from charmhelpers.core import hookenv
unit-nrpe-0: 12:44:22 WARNING unit.nrpe/0.install ModuleNotFoundError: No module named 'charmhelpers'
unit-nrpe-0: 12:44:22 ERROR juju.worker.uniter.operation hook "install" (via explicit, bespoke hook script) failed: exit status 1
unit-nrpe-0: 12:44:22 INFO juju.worker.uniter awaiting error resolution for "install" hook
unit-nrpe-0: 12:45:49 INFO juju.worker.uniter awaiting error resolution for "install" hook
unit-nrpe-0: 12:48:43 INFO juju.worker.uniter awaiting error resolution for "install" hook
```
| canonical/charmcraft | diff --git a/tests/unit/utils/test_file.py b/tests/unit/utils/test_file.py
index e13c2de0..04efda2c 100644
--- a/tests/unit/utils/test_file.py
+++ b/tests/unit/utils/test_file.py
@@ -105,41 +105,46 @@ def test_zipbuild_simple(tmp_path):
@pytest.mark.skipif(sys.platform == "win32", reason="Windows not [yet] supported")
-def test_zipbuild_symlink_simple(tmp_path):
+def test_zipbuild_symlinks(tmp_path: pathlib.Path):
"""Symlinks are supported."""
build_dir = tmp_path / "somedir"
build_dir.mkdir()
- testfile1 = build_dir / "real.txt"
- testfile1.write_bytes(b"123\x00456")
- testfile2 = build_dir / "link.txt"
- testfile2.symlink_to(testfile1)
+ outside_dir = tmp_path / "another_dir"
+ outside_dir.mkdir()
+ outside_file = outside_dir / "some_file"
+ outside_file.write_bytes(b"123\x00456")
- zip_filepath = tmp_path / "testresult.zip"
- build_zip(zip_filepath, build_dir)
+ internal_dir = build_dir / "subdirectory"
+ internal_dir.mkdir()
+ real_file = internal_dir / "real.txt"
+ real_file.write_bytes(b"123\x00456")
- zf = zipfile.ZipFile(zip_filepath)
- assert sorted(x.filename for x in zf.infolist()) == ["link.txt", "real.txt"]
- assert zf.read("real.txt") == b"123\x00456"
- assert zf.read("link.txt") == b"123\x00456"
+ internal_file_link = build_dir / "link.txt"
+ internal_file_link.symlink_to(real_file)
+ internal_dir_link = build_dir / "link_dir"
+ internal_dir_link.symlink_to(internal_dir)
[email protected](sys.platform == "win32", reason="Windows not [yet] supported")
-def test_zipbuild_symlink_outside(tmp_path):
- """No matter where the symlink points to."""
- # outside the build dir
- testfile1 = tmp_path / "real.txt"
- testfile1.write_bytes(b"123\x00456")
+ external_file_link = build_dir / "external_link.txt"
+ external_file_link.symlink_to(outside_file)
- # inside the build dir
- build_dir = tmp_path / "somedir"
- build_dir.mkdir()
- testfile2 = build_dir / "link.txt"
- testfile2.symlink_to(testfile1)
+ external_dir_link = build_dir / "external_link_dir"
+ external_dir_link.symlink_to(outside_dir)
zip_filepath = tmp_path / "testresult.zip"
build_zip(zip_filepath, build_dir)
zf = zipfile.ZipFile(zip_filepath)
- assert sorted(x.filename for x in zf.infolist()) == ["link.txt"]
- assert zf.read("link.txt") == b"123\x00456"
+
+ expected_files = [
+ "external_link.txt",
+ "external_link_dir/some_file",
+ "link.txt",
+ "link_dir/real.txt",
+ "subdirectory/real.txt",
+ ]
+
+ assert sorted(x.filename for x in zf.infolist()) == expected_files
+ for file_name in expected_files:
+ assert zf.read(file_name) == b"123\x00456"
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 1
} | 2.7 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"ruff",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y python3-pip python3-setuptools python3-wheel python3-venv libapt-pkg-dev"
],
"python": "3.10",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==23.2.0
certifi==2024.7.4
cffi==1.16.0
-e git+https://github.com/canonical/charmcraft.git@51a55f7320370179bb48b4e5f6cd02a9de46cc13#egg=charmcraft
charset-normalizer==3.3.2
coverage==7.8.0
craft-application==3.2.0
craft-archives==1.2.0
craft-cli==2.6.0
craft-grammar==1.2.0
craft-parts==1.33.0
craft-providers==1.24.1
craft-store==2.6.2
cryptography==42.0.8
Deprecated==1.2.14
distro==1.9.0
docker==7.1.0
exceptiongroup==1.2.2
flake8==7.2.0
freezegun==1.5.1
httplib2==0.22.0
humanize==4.9.0
hypothesis==6.130.5
idna==3.7
importlib_metadata==7.1.0
iniconfig==2.1.0
jaraco.classes==3.4.0
jeepney==0.8.0
Jinja2==3.1.4
jsonschema==4.22.0
jsonschema-specifications==2023.12.1
keyring==24.3.1
launchpadlib==1.11.0
lazr.restfulclient==0.14.6
lazr.uri==1.0.6
macaroonbakery==1.3.4
MarkupSafe==2.1.5
mccabe==0.7.0
more-itertools==10.2.0
oauthlib==3.2.2
overrides==7.7.0
packaging==24.0
platformdirs==4.2.2
pluggy==1.5.0
protobuf==5.26.1
pycodestyle==2.13.0
pycparser==2.22
pydantic==1.10.15
pydantic-yaml==0.11.2
pydocstyle==6.3.0
pyfakefs==5.8.0
pyflakes==3.3.2
pygit2==1.14.1
pymacaroons==0.13.0
PyNaCl==1.5.0
pyparsing==3.1.2
pyRFC3339==1.1
pytest==8.3.5
pytest-check==2.5.2
pytest-cov==6.0.0
pytest-mock==3.14.0
pytest-subprocess==1.5.3
python-dateutil==2.9.0.post0
pytz==2024.1
pyxdg==0.28
PyYAML==6.0.1
referencing==0.35.1
requests==2.31.0
requests-toolbelt==1.0.0
requests-unixsocket==0.3.0
responses==0.25.7
rpds-py==0.18.1
ruff==0.11.2
SecretStorage==3.3.3
six==1.16.0
snap-helpers==0.4.2
snowballstemmer==2.2.0
sortedcontainers==2.4.0
tabulate==0.9.0
tomli==2.2.1
types-Deprecated==1.2.9.20240311
types-PyYAML==6.0.12.20240311
typing_extensions==4.13.0
urllib3==1.26.18
wadllib==1.3.6
wrapt==1.16.0
zipp==3.19.1
| name: charmcraft
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- bzip2=1.0.8=h5eee18b_6
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- libuuid=1.41.5=h5eee18b_0
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py310h06a4308_0
- python=3.10.16=he870216_1
- readline=8.2=h5eee18b_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py310h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==23.2.0
- certifi==2024.7.4
- cffi==1.16.0
- charmcraft==3.0.0.post93+g51a55f73
- charset-normalizer==3.3.2
- coverage==7.8.0
- craft-application==3.2.0
- craft-archives==1.2.0
- craft-cli==2.6.0
- craft-grammar==1.2.0
- craft-parts==1.33.0
- craft-providers==1.24.1
- craft-store==2.6.2
- cryptography==42.0.8
- deprecated==1.2.14
- distro==1.9.0
- docker==7.1.0
- exceptiongroup==1.2.2
- flake8==7.2.0
- freezegun==1.5.1
- httplib2==0.22.0
- humanize==4.9.0
- hypothesis==6.130.5
- idna==3.7
- importlib-metadata==7.1.0
- iniconfig==2.1.0
- jaraco-classes==3.4.0
- jeepney==0.8.0
- jinja2==3.1.4
- jsonschema==4.22.0
- jsonschema-specifications==2023.12.1
- keyring==24.3.1
- launchpadlib==1.11.0
- lazr-restfulclient==0.14.6
- lazr-uri==1.0.6
- macaroonbakery==1.3.4
- markupsafe==2.1.5
- mccabe==0.7.0
- more-itertools==10.2.0
- oauthlib==3.2.2
- overrides==7.7.0
- packaging==24.0
- platformdirs==4.2.2
- pluggy==1.5.0
- protobuf==5.26.1
- pycodestyle==2.13.0
- pycparser==2.22
- pydantic==1.10.15
- pydantic-yaml==0.11.2
- pydocstyle==6.3.0
- pyfakefs==5.8.0
- pyflakes==3.3.2
- pygit2==1.14.1
- pymacaroons==0.13.0
- pynacl==1.5.0
- pyparsing==3.1.2
- pyrfc3339==1.1
- pytest==8.3.5
- pytest-check==2.5.2
- pytest-cov==6.0.0
- pytest-mock==3.14.0
- pytest-subprocess==1.5.3
- python-dateutil==2.9.0.post0
- pytz==2024.1
- pyxdg==0.28
- pyyaml==6.0.1
- referencing==0.35.1
- requests==2.31.0
- requests-toolbelt==1.0.0
- requests-unixsocket==0.3.0
- responses==0.25.7
- rpds-py==0.18.1
- ruff==0.11.2
- secretstorage==3.3.3
- setuptools==70.0.0
- six==1.16.0
- snap-helpers==0.4.2
- snowballstemmer==2.2.0
- sortedcontainers==2.4.0
- tabulate==0.9.0
- tomli==2.2.1
- types-deprecated==1.2.9.20240311
- types-pyyaml==6.0.12.20240311
- typing-extensions==4.13.0
- urllib3==1.26.18
- wadllib==1.3.6
- wrapt==1.16.0
- zipp==3.19.1
prefix: /opt/conda/envs/charmcraft
| [
"tests/unit/utils/test_file.py::test_zipbuild_symlinks"
] | [
"tests/unit/utils/test_file.py::test_usefulfilepath_inaccessible"
] | [
"tests/unit/utils/test_file.py::test_make_executable_read_bits",
"tests/unit/utils/test_file.py::test_usefulfilepath_pathlib",
"tests/unit/utils/test_file.py::test_usefulfilepath_home_expanded",
"tests/unit/utils/test_file.py::test_usefulfilepath_missing",
"tests/unit/utils/test_file.py::test_usefulfilepath_not_a_file",
"tests/unit/utils/test_file.py::test_zipbuild_simple"
] | [] | Apache License 2.0 | 19,030 | 279 | [
"charmcraft/utils/file.py"
] |
|
canonical__charmcraft-1746 | ac08e98e5dd6d0764beba24275193004bdc3df2d | 2024-07-19 23:20:32 | 534c028fb418409d62608ea3efe2c8ad1dad4d0d | diff --git a/charmcraft/extensions/gunicorn.py b/charmcraft/extensions/gunicorn.py
index 23f78a9e..56ae3ce7 100644
--- a/charmcraft/extensions/gunicorn.py
+++ b/charmcraft/extensions/gunicorn.py
@@ -158,7 +158,13 @@ class _GunicornBase(Extension):
"grafana-dashboard": {"interface": "grafana_dashboard"},
},
"config": {"options": {**self._WEBSERVER_OPTIONS, **self.options}},
- "parts": {"charm": {"plugin": "charm", "source": "."}},
+ "parts": {
+ "charm": {
+ "plugin": "charm",
+ "source": ".",
+ "build-snaps": ["rustup"], # Needed to build pydantic.
+ }
+ },
}
@override
@@ -225,12 +231,6 @@ class FlaskFramework(_GunicornBase):
"""Check if the extension is in an experimental state."""
return False
- @override
- def get_parts_snippet(self) -> dict[str, Any]:
- """Return the parts to add to parts."""
- # rust is needed to build pydantic-core, a dependency of flask.
- return {"flask-framework/rust-deps": {"plugin": "nil", "build-packages": ["cargo"]}}
-
class DjangoFramework(_GunicornBase):
"""Extension for 12-factor Django applications."""
diff --git a/charmcraft/linters.py b/charmcraft/linters.py
index aef45b3a..87e73a91 100644
--- a/charmcraft/linters.py
+++ b/charmcraft/linters.py
@@ -88,8 +88,10 @@ class BaseChecker(metaclass=abc.ABCMeta):
"""Get the result of a single checker."""
try:
result = self.run(base_dir)
- except Exception:
+ except Exception as exc:
result = self.exception_result
+ if not self.text:
+ self.text = str(exc)
return CheckResult(
check_type=self.check_type,
name=self.name,
@@ -189,14 +191,21 @@ class Framework(AttributeChecker):
def __init__(self):
self.result = None
+ self.__text = None
@property
- def text(self):
+ def text(self) -> str:
"""Return a text in function of the result state."""
+ if self.__text:
+ return self.__text
if self.result is None:
- return None
+ return ""
return self.result_texts[self.result]
+ @text.setter
+ def text(self, value: str) -> None:
+ self.__text = value
+
def _get_imports(self, filepath: pathlib.Path) -> Generator[list[str], None, None]:
"""Parse a Python filepath and yield its imports.
@@ -255,14 +264,12 @@ class Framework(AttributeChecker):
def run(self, basedir: pathlib.Path) -> str:
"""Run the proper verifications."""
+ self.result = self.Result.UNKNOWN
if self._check_operator(basedir):
- result = self.Result.OPERATOR
+ self.result = self.Result.OPERATOR
elif self._check_reactive(basedir):
- result = self.Result.REACTIVE
- else:
- result = self.Result.UNKNOWN
- self.result = result
- return result
+ self.result = self.Result.REACTIVE
+ return self.result
class JujuMetadata(Linter):
| FTBFS with reactive plugin, getting charmcraft internal error
### Bug Description
Buliding a charm with a reactive plugin results in an internal error on charmcraft 3
```
charmcraft internal error: ValidationError(model='CheckResult', errors=[{'loc': ('text',), 'msg': 'none is not an allowed value', 'type': 'type_error.none.not_allowed'}])
```
This seems similar to this issue: https://github.com/canonical/charmcraft/issues/1439
This issue is marked as resolved though. -- I'm openening a new one but if you feel this is really the above issue please feel free to close as duplicate.
### To Reproduce
```
git clone [email protected]:sabaini/charm-ceph-fs.git
cd charm-ceph-fs
git checkout ch-reactive
charmcraft -v pack
```
### Environment
Running on Ubuntu 24.04 with lxd
charmcraft 3.0.0 3246 3.x/beta canonical✓ classic
lxd 5.21.1-d46c406 28460 5.21/stable canonical✓ -
### charmcraft.yaml
```shell
type: charm
parts:
charm:
plugin: reactive
reactive-charm-build-arguments:
- --binary-wheels-from-source
build-packages:
- tox
- git
- python3-dev
- libffi-dev
source: src/
build-snaps:
- charm/latest/edge
base: [email protected]
build-base: [email protected]
platforms:
amd64:
build-on: amd64
build-for: amd64
arm64:
build-on: arm64
build-for: arm64
s390x:
build-on: s390x
build-for: s390x
ppc64el:
build-on: ppc64el
build-for: ppc64el
```
### Relevant log output
```shell
...
2024-07-01 18:02:04.661 Executing on host: lxc --project charmcraft file pull local:charmcraft-ceph-fs-on-amd64-for-amd64-59656058/etc/craft-instance.conf /home/peter/tmpgiz3en9c.tmp-craft/tmpmf3qpa84
2024-07-01 18:02:04.712 Instance is compatible with compatibility tag 'charmcraft-buildd-base-v7'
2024-07-01 18:02:04.712 No cache path set, not mounting cache directories.
2024-07-01 18:02:04.712 Waiting for environment to be ready...
2024-07-01 18:02:04.712 Executing in container: lxc --project charmcraft exec local:charmcraft-ceph-fs-on-amd64-for-amd64-59656058 -- env CRAFT_MANAGED_MODE=1 CHARMCRAFT_MANAGED_MODE=1 DEBIAN_FRONTEND=noninteractive DEBCONF_NONINTERACTIVE_SEEN=true DEBIAN_PRIORITY=critical systemctl is-system-running
2024-07-01 18:02:04.804 Waiting for networking to be ready...
2024-07-01 18:02:04.804 Executing in container: lxc --project charmcraft exec local:charmcraft-ceph-fs-on-amd64-for-amd64-59656058 -- env CRAFT_MANAGED_MODE=1 CHARMCRAFT_MANAGED_MODE=1 DEBIAN_FRONTEND=noninteractive DEBCONF_NONINTERACTIVE_SEEN=true DEBIAN_PRIORITY=critical getent hosts snapcraft.io
2024-07-01 18:02:04.985 Executing in container: lxc --project charmcraft exec local:charmcraft-ceph-fs-on-amd64-for-amd64-59656058 -- env CRAFT_MANAGED_MODE=1 CHARMCRAFT_MANAGED_MODE=1 DEBIAN_FRONTEND=noninteractive DEBCONF_NONINTERACTIVE_SEEN=true DEBIAN_PRIORITY=critical snap unset system proxy.http
2024-07-01 18:02:05.774 Executing in container: lxc --project charmcraft exec local:charmcraft-ceph-fs-on-amd64-for-amd64-59656058 -- env CRAFT_MANAGED_MODE=1 CHARMCRAFT_MANAGED_MODE=1 DEBIAN_FRONTEND=noninteractive DEBCONF_NONINTERACTIVE_SEEN=true DEBIAN_PRIORITY=critical snap unset system proxy.https
2024-07-01 18:02:05.961 Installing snap 'charmcraft' with channel=None and classic=True
2024-07-01 18:02:05.961 Installing snap 'charmcraft' from host (classic=True)
2024-07-01 18:02:05.964 Installing base snap 'core22' for 'charmcraft' from host
2024-07-01 18:02:05.964 Installing snap 'core22' from host (classic=False)
2024-07-01 18:02:05.966 Executing in container: lxc --project charmcraft exec local:charmcraft-ceph-fs-on-amd64-for-amd64-59656058 -- env CRAFT_MANAGED_MODE=1 CHARMCRAFT_MANAGED_MODE=1 DEBIAN_FRONTEND=noninteractive DEBCONF_NONINTERACTIVE_SEEN=true DEBIAN_PRIORITY=critical test -f /etc/craft-instance.conf
2024-07-01 18:02:06.080 Executing on host: lxc --project charmcraft file pull local:charmcraft-ceph-fs-on-amd64-for-amd64-59656058/etc/craft-instance.conf /home/peter/tmpbepts9y2.tmp-craft/tmpzkj98phj
2024-07-01 18:02:06.133 Revisions found: host='1380', target='1380'
2024-07-01 18:02:06.133 Skipping snap injection: target is already up-to-date with revision on host
2024-07-01 18:02:06.134 Executing in container: lxc --project charmcraft exec local:charmcraft-ceph-fs-on-amd64-for-amd64-59656058 -- env CRAFT_MANAGED_MODE=1 CHARMCRAFT_MANAGED_MODE=1 DEBIAN_FRONTEND=noninteractive DEBCONF_NONINTERACTIVE_SEEN=true DEBIAN_PRIORITY=critical test -f /etc/craft-instance.conf
2024-07-01 18:02:06.248 Executing on host: lxc --project charmcraft file pull local:charmcraft-ceph-fs-on-amd64-for-amd64-59656058/etc/craft-instance.conf /home/peter/tmp5571hs1d.tmp-craft/tmpi18918bg
2024-07-01 18:02:06.301 Revisions found: host='3246', target='3246'
2024-07-01 18:02:06.301 Skipping snap injection: target is already up-to-date with revision on host
2024-07-01 18:02:06.383 Setting instance timezone to match host timezone 'Europe/Vienna'.
2024-07-01 18:02:06.383 Executing on host: lxc --project charmcraft config set local:charmcraft-ceph-fs-on-amd64-for-amd64-59656058 environment.TZ Europe/Vienna
2024-07-01 18:02:06.448 Executing on host: lxc --project charmcraft config device show local:charmcraft-ceph-fs-on-amd64-for-amd64-59656058
2024-07-01 18:02:06.504 Executing on host: lxc --project charmcraft config device add local:charmcraft-ceph-fs-on-amd64-for-amd64-59656058 disk-/root/project disk source=/home/peter/src/openstack/charms/charm-ceph-fs path=/root/project
2024-07-01 18:02:06.591 Instance launched and working directory mounted
2024-07-01 18:02:06.591 Pushing bashrc to instance
2024-07-01 18:02:06.591 Executing on host: lxc --project charmcraft file push /tmp/tmp2jlqrmkg local:charmcraft-ceph-fs-on-amd64-for-amd64-59656058/root/.bashrc --mode=644
2024-07-01 18:02:06.640 Executing in container: lxc --project charmcraft exec local:charmcraft-ceph-fs-on-amd64-for-amd64-59656058 -- env CRAFT_MANAGED_MODE=1 CHARMCRAFT_MANAGED_MODE=1 DEBIAN_FRONTEND=noninteractive DEBCONF_NONINTERACTIVE_SEEN=true DEBIAN_PRIORITY=critical chown root:root /root/.bashrc
2024-07-01 18:02:06.717 Emitter: Pausing control of the terminal
2024-07-01 18:05:49.980 Emitter: Resuming control of the terminal
2024-07-01 18:05:49.980 Executing in container: lxc --project charmcraft exec local:charmcraft-ceph-fs-on-amd64-for-amd64-59656058 -- env CRAFT_MANAGED_MODE=1 CHARMCRAFT_MANAGED_MODE=1 DEBIAN_FRONTEND=noninteractive DEBCONF_NONINTERACTIVE_SEEN=true DEBIAN_PRIORITY=critical test -f /tmp/charmcraft.log
2024-07-01 18:05:50.094 Executing on host: lxc --project charmcraft file pull local:charmcraft-ceph-fs-on-amd64-for-amd64-59656058/tmp/charmcraft.log /home/peter/tmpyx_fr43q.tmp-craft/tmpwrtmt5ck
2024-07-01 18:05:50.163 Logs retrieved from managed instance:
2024-07-01 18:05:50.163 :: 2024-07-01 18:02:08.769 Starting charmcraft, version 3.0.0
2024-07-01 18:05:50.163 :: 2024-07-01 18:02:08.769 Configuring application...
2024-07-01 18:05:50.163 :: 2024-07-01 18:02:08.769 Preparing application...
2024-07-01 18:05:50.163 :: 2024-07-01 18:02:08.770 Build plan: platform=amd64, build_for=None
2024-07-01 18:05:50.163 :: 2024-07-01 18:02:08.772 Loading project file '/root/project/charmcraft.yaml'
2024-07-01 18:05:50.163 :: 2024-07-01 18:02:08.774 Setting target machine to x86_64
2024-07-01 18:05:50.163 :: 2024-07-01 18:02:08.775 Processing grammar (on amd64 for amd64)
2024-07-01 18:05:50.163 :: 2024-07-01 18:02:08.777 Initialising lifecycle manager in /root
2024-07-01 18:05:50.163 :: 2024-07-01 18:02:08.777 Project vars: {'version': 'unversioned'}
2024-07-01 18:05:50.163 :: 2024-07-01 18:02:08.777 Adopting part: None
2024-07-01 18:05:50.163 :: 2024-07-01 18:02:08.777 Using parallel build count of 16 from CPU count
2024-07-01 18:05:50.163 :: 2024-07-01 18:02:08.777 is_snap: True, SNAP_NAME set to charmcraft
2024-07-01 18:05:50.163 :: 2024-07-01 18:02:08.778 process charm:Step.PULL
2024-07-01 18:05:50.163 :: 2024-07-01 18:02:08.778 add action charm:Step.PULL(ActionType.RUN)
2024-07-01 18:05:50.163 :: 2024-07-01 18:02:08.778 process charm:Step.BUILD
2024-07-01 18:05:50.163 :: 2024-07-01 18:02:08.778 add action charm:Step.BUILD(ActionType.RUN)
2024-07-01 18:05:50.163 :: 2024-07-01 18:02:08.778 process charm:Step.STAGE
2024-07-01 18:05:50.163 :: 2024-07-01 18:02:08.779 add action charm:Step.STAGE(ActionType.RUN)
2024-07-01 18:05:50.163 :: 2024-07-01 18:02:08.779 process charm:Step.PRIME
2024-07-01 18:05:50.163 :: 2024-07-01 18:02:08.779 add action charm:Step.PRIME(ActionType.RUN)
2024-07-01 18:05:50.163 :: 2024-07-01 18:02:08.779 Initialising lifecycle
2024-07-01 18:05:50.163 :: 2024-07-01 18:02:08.779 ignore patterns: []
2024-07-01 18:05:50.163 :: 2024-07-01 18:02:08.779 part build packages: ['tox', 'git', 'python3-dev', 'libffi-dev']
2024-07-01 18:05:50.163 :: 2024-07-01 18:02:08.779 part build snaps: ['charm/latest/edge']
2024-07-01 18:05:50.163 :: 2024-07-01 18:02:08.779 Installing build-packages
2024-07-01 18:05:50.163 :: 2024-07-01 18:02:08.780 Requested build-packages: ['git', 'libffi-dev', 'python3-dev', 'tox']
2024-07-01 18:05:50.163 :: 2024-07-01 18:02:09.836 Marking tox (and its dependencies) to be fetched
2024-07-01 18:05:50.163 :: 2024-07-01 18:02:09.837 package: tox
2024-07-01 18:05:50.163 :: 2024-07-01 18:02:09.857 Marking git (and its dependencies) to be fetched
2024-07-01 18:05:50.163 :: 2024-07-01 18:02:09.857 package: git
2024-07-01 18:05:50.163 :: 2024-07-01 18:02:09.876 Marking libffi-dev (and its dependencies) to be fetched
2024-07-01 18:05:50.163 :: 2024-07-01 18:02:09.876 package: libffi-dev
2024-07-01 18:05:50.163 :: 2024-07-01 18:02:09.895 Marking python3-dev (and its dependencies) to be fetched
2024-07-01 18:05:50.163 :: 2024-07-01 18:02:09.896 package: python3-dev
2024-07-01 18:05:50.163 :: 2024-07-01 18:02:09.936 Executing: ['apt-get', 'update']
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:10.174 :: Get:1 http://security.ubuntu.com/ubuntu noble-security InRelease [126 kB]
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:10.432 :: Hit:2 http://archive.ubuntu.com/ubuntu noble InRelease
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:10.465 :: Get:3 http://security.ubuntu.com/ubuntu noble-security/main amd64 Packages [186 kB]
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:10.566 :: Get:4 http://archive.ubuntu.com/ubuntu noble-updates InRelease [126 kB]
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:10.606 :: Get:5 http://security.ubuntu.com/ubuntu noble-security/main Translation-en [47.6 kB]
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:10.613 :: Get:6 http://security.ubuntu.com/ubuntu noble-security/universe amd64 Packages [62.3 kB]
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:10.627 :: Get:7 http://security.ubuntu.com/ubuntu noble-security/universe Translation-en [21.7 kB]
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:10.632 :: Get:8 http://security.ubuntu.com/ubuntu noble-security/restricted amd64 Packages [135 kB]
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:10.666 :: Get:9 http://security.ubuntu.com/ubuntu noble-security/restricted Translation-en [25.9 kB]
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:11.134 :: Get:10 http://archive.ubuntu.com/ubuntu noble-backports InRelease [126 kB]
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:11.284 :: Get:11 http://archive.ubuntu.com/ubuntu noble-updates/main amd64 Packages [215 kB]
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:11.399 :: Get:12 http://archive.ubuntu.com/ubuntu noble-updates/main Translation-en [58.6 kB]
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:11.427 :: Get:13 http://archive.ubuntu.com/ubuntu noble-updates/universe amd64 Packages [112 kB]
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:11.483 :: Get:14 http://archive.ubuntu.com/ubuntu noble-updates/universe Translation-en [40.8 kB]
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:11.504 :: Get:15 http://archive.ubuntu.com/ubuntu noble-updates/restricted amd64 Packages [139 kB]
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:11.533 :: Get:16 http://archive.ubuntu.com/ubuntu noble-updates/restricted Translation-en [27.0 kB]
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:11.538 :: Get:17 http://archive.ubuntu.com/ubuntu noble-backports/universe amd64 Packages [8728 B]
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:11.539 :: Get:18 http://archive.ubuntu.com/ubuntu noble-backports/universe Translation-en [10.0 kB]
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:11.589 :: Fetched 1467 kB in 2s (915 kB/s)
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:12.180 :: Reading package lists...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:12.196 Installing packages: git libffi-dev python3-dev tox
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:12.210 :: Reading package lists...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:12.375 :: Building dependency tree...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:12.376 :: Reading state information...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:12.552 :: python3-dev is already the newest version (3.12.3-0ubuntu1).
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:12.552 :: The following additional packages will be installed:
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:12.553 :: git-man libcurl3t64-gnutls liberror-perl python3-cachetools python3-chardet
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:12.553 :: python3-colorama python3-distlib python3-filelock python3-packaging
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:12.553 :: python3-pip-whl python3-platformdirs python3-pluggy python3-pyproject-api
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:12.553 :: python3-setuptools-whl python3-virtualenv python3-wheel-whl
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:12.554 :: Suggested packages:
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:12.554 :: gettext-base git-daemon-run | git-daemon-sysvinit git-doc git-email git-gui
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:12.554 :: gitk gitweb git-cvs git-mediawiki git-svn
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:12.554 :: Recommended packages:
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:12.554 :: less python3-distutils
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:12.574 :: The following NEW packages will be installed:
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:12.574 :: git git-man libcurl3t64-gnutls liberror-perl libffi-dev python3-cachetools
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:12.574 :: python3-chardet python3-colorama python3-distlib python3-filelock
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:12.574 :: python3-packaging python3-pip-whl python3-platformdirs python3-pluggy
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:12.574 :: python3-pyproject-api python3-setuptools-whl python3-virtualenv
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:12.574 :: python3-wheel-whl tox
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:12.992 :: 0 upgraded, 19 newly installed, 0 to remove and 14 not upgraded.
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:12.992 :: Need to get 8675 kB of archives.
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:12.992 :: After this operation, 34.8 MB of additional disk space will be used.
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:12.992 :: Get:1 http://archive.ubuntu.com/ubuntu noble-updates/main amd64 libcurl3t64-gnutls amd64 8.5.0-2ubuntu10.1 [333 kB]
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:13.900 :: Get:2 http://archive.ubuntu.com/ubuntu noble/main amd64 liberror-perl all 0.17029-2 [25.6 kB]
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:13.909 :: Get:3 http://archive.ubuntu.com/ubuntu noble-updates/main amd64 git-man all 1:2.43.0-1ubuntu7.1 [1100 kB]
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:14.196 :: Get:4 http://archive.ubuntu.com/ubuntu noble-updates/main amd64 git amd64 1:2.43.0-1ubuntu7.1 [3679 kB]
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:15.286 :: Get:5 http://archive.ubuntu.com/ubuntu noble/main amd64 python3-cachetools all 5.3.0-2 [10.6 kB]
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:15.286 :: Get:6 http://archive.ubuntu.com/ubuntu noble/main amd64 python3-chardet all 5.2.0+dfsg-1 [117 kB]
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:15.291 :: Get:7 http://archive.ubuntu.com/ubuntu noble/main amd64 python3-colorama all 0.4.6-4 [32.1 kB]
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:15.303 :: Get:8 http://archive.ubuntu.com/ubuntu noble/universe amd64 python3-distlib all 0.3.8-1 [318 kB]
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:15.327 :: Get:9 http://archive.ubuntu.com/ubuntu noble/universe amd64 python3-filelock all 3.13.1-1 [10.8 kB]
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:15.331 :: Get:10 http://archive.ubuntu.com/ubuntu noble/main amd64 python3-packaging all 24.0-1 [41.1 kB]
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:15.332 :: Get:11 http://archive.ubuntu.com/ubuntu noble/universe amd64 python3-pip-whl all 24.0+dfsg-1ubuntu1 [1702 kB]
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:15.622 :: Get:12 http://archive.ubuntu.com/ubuntu noble/main amd64 python3-platformdirs all 4.2.0-1 [16.1 kB]
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:15.625 :: Get:13 http://archive.ubuntu.com/ubuntu noble/universe amd64 python3-pluggy all 1.4.0-1 [20.4 kB]
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:15.627 :: Get:14 http://archive.ubuntu.com/ubuntu noble/universe amd64 python3-pyproject-api all 1.6.1-1 [50.9 kB]
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:15.635 :: Get:15 http://archive.ubuntu.com/ubuntu noble/universe amd64 python3-setuptools-whl all 68.1.2-2ubuntu1 [715 kB]
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:15.779 :: Get:16 http://archive.ubuntu.com/ubuntu noble/universe amd64 python3-wheel-whl all 0.42.0-2 [67.8 kB]
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:15.791 :: Get:17 http://archive.ubuntu.com/ubuntu noble/universe amd64 python3-virtualenv all 20.25.0+ds-2 [70.8 kB]
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:15.802 :: Get:18 http://archive.ubuntu.com/ubuntu noble/universe amd64 tox all 4.13.0-1 [302 kB]
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:15.864 :: Get:19 http://archive.ubuntu.com/ubuntu noble/main amd64 libffi-dev amd64 3.4.6-1build1 [62.8 kB]
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.068 :: Fetched 8675 kB in 3s (2645 kB/s)
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.085 :: Selecting previously unselected package libcurl3t64-gnutls:amd64.
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.102 :: (Reading database ... 16780 files and directories currently installed.)
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.102 :: Preparing to unpack .../00-libcurl3t64-gnutls_8.5.0-2ubuntu10.1_amd64.deb ...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.105 :: Unpacking libcurl3t64-gnutls:amd64 (8.5.0-2ubuntu10.1) ...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.127 :: Selecting previously unselected package liberror-perl.
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.130 :: Preparing to unpack .../01-liberror-perl_0.17029-2_all.deb ...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.132 :: Unpacking liberror-perl (0.17029-2) ...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.150 :: Selecting previously unselected package git-man.
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.153 :: Preparing to unpack .../02-git-man_1%3a2.43.0-1ubuntu7.1_all.deb ...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.154 :: Unpacking git-man (1:2.43.0-1ubuntu7.1) ...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.197 :: Selecting previously unselected package git.
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.200 :: Preparing to unpack .../03-git_1%3a2.43.0-1ubuntu7.1_amd64.deb ...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.205 :: Unpacking git (1:2.43.0-1ubuntu7.1) ...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.379 :: Selecting previously unselected package python3-cachetools.
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.381 :: Preparing to unpack .../04-python3-cachetools_5.3.0-2_all.deb ...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.383 :: Unpacking python3-cachetools (5.3.0-2) ...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.400 :: Selecting previously unselected package python3-chardet.
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.403 :: Preparing to unpack .../05-python3-chardet_5.2.0+dfsg-1_all.deb ...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.404 :: Unpacking python3-chardet (5.2.0+dfsg-1) ...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.430 :: Selecting previously unselected package python3-colorama.
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.433 :: Preparing to unpack .../06-python3-colorama_0.4.6-4_all.deb ...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.434 :: Unpacking python3-colorama (0.4.6-4) ...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.453 :: Selecting previously unselected package python3-distlib.
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.456 :: Preparing to unpack .../07-python3-distlib_0.3.8-1_all.deb ...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.457 :: Unpacking python3-distlib (0.3.8-1) ...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.480 :: Selecting previously unselected package python3-filelock.
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.483 :: Preparing to unpack .../08-python3-filelock_3.13.1-1_all.deb ...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.484 :: Unpacking python3-filelock (3.13.1-1) ...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.502 :: Selecting previously unselected package python3-packaging.
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.505 :: Preparing to unpack .../09-python3-packaging_24.0-1_all.deb ...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.506 :: Unpacking python3-packaging (24.0-1) ...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.523 :: Selecting previously unselected package python3-pip-whl.
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.526 :: Preparing to unpack .../10-python3-pip-whl_24.0+dfsg-1ubuntu1_all.deb ...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.528 :: Unpacking python3-pip-whl (24.0+dfsg-1ubuntu1) ...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.550 :: Selecting previously unselected package python3-platformdirs.
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.553 :: Preparing to unpack .../11-python3-platformdirs_4.2.0-1_all.deb ...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.555 :: Unpacking python3-platformdirs (4.2.0-1) ...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.573 :: Selecting previously unselected package python3-pluggy.
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.575 :: Preparing to unpack .../12-python3-pluggy_1.4.0-1_all.deb ...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.577 :: Unpacking python3-pluggy (1.4.0-1) ...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.596 :: Selecting previously unselected package python3-pyproject-api.
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.598 :: Preparing to unpack .../13-python3-pyproject-api_1.6.1-1_all.deb ...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.600 :: Unpacking python3-pyproject-api (1.6.1-1) ...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.620 :: Selecting previously unselected package python3-setuptools-whl.
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.623 :: Preparing to unpack .../14-python3-setuptools-whl_68.1.2-2ubuntu1_all.deb ...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.624 :: Unpacking python3-setuptools-whl (68.1.2-2ubuntu1) ...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.642 :: Selecting previously unselected package python3-wheel-whl.
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.645 :: Preparing to unpack .../15-python3-wheel-whl_0.42.0-2_all.deb ...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.646 :: Unpacking python3-wheel-whl (0.42.0-2) ...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.663 :: Selecting previously unselected package python3-virtualenv.
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.666 :: Preparing to unpack .../16-python3-virtualenv_20.25.0+ds-2_all.deb ...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.668 :: Unpacking python3-virtualenv (20.25.0+ds-2) ...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.695 :: Selecting previously unselected package tox.
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.698 :: Preparing to unpack .../17-tox_4.13.0-1_all.deb ...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.699 :: Unpacking tox (4.13.0-1) ...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.737 :: Selecting previously unselected package libffi-dev:amd64.
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.739 :: Preparing to unpack .../18-libffi-dev_3.4.6-1build1_amd64.deb ...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.741 :: Unpacking libffi-dev:amd64 (3.4.6-1build1) ...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.798 :: Setting up python3-setuptools-whl (68.1.2-2ubuntu1) ...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.802 :: Setting up python3-filelock (3.13.1-1) ...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.889 :: Setting up python3-cachetools (5.3.0-2) ...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:16.980 :: Setting up python3-colorama (0.4.6-4) ...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:17.081 :: Setting up python3-pip-whl (24.0+dfsg-1ubuntu1) ...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:17.085 :: Setting up libcurl3t64-gnutls:amd64 (8.5.0-2ubuntu10.1) ...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:17.090 :: Setting up python3-distlib (0.3.8-1) ...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:17.243 :: Setting up libffi-dev:amd64 (3.4.6-1build1) ...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:17.248 :: Setting up python3-platformdirs (4.2.0-1) ...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:17.340 :: Setting up liberror-perl (0.17029-2) ...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:17.345 :: Setting up python3-packaging (24.0-1) ...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:17.451 :: Setting up python3-chardet (5.2.0+dfsg-1) ...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:17.721 :: Setting up python3-pluggy (1.4.0-1) ...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:17.820 :: Setting up git-man (1:2.43.0-1ubuntu7.1) ...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:17.825 :: Setting up python3-wheel-whl (0.42.0-2) ...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:17.829 :: Setting up python3-pyproject-api (1.6.1-1) ...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:17.924 :: Setting up python3-virtualenv (20.25.0+ds-2) ...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:18.095 :: Setting up git (1:2.43.0-1ubuntu7.1) ...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:18.105 :: Setting up tox (4.13.0-1) ...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:18.319 :: Processing triggers for libc-bin (2.39-0ubuntu8) ...
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:19.445 Found installed version 1:2.43.0-1ubuntu7.1 for package git
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:19.445 Found installed version 1:2.43.0-1ubuntu7.1 for package git-man
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:19.445 Found installed version 8.5.0-2ubuntu10.1 for package libcurl3t64-gnutls
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:19.445 Found installed version 0.17029-2 for package liberror-perl
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:19.445 Found installed version 3.4.6-1build1 for package libffi-dev
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:19.445 Found installed version 5.3.0-2 for package python3-cachetools
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:19.446 Found installed version 5.2.0+dfsg-1 for package python3-chardet
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:19.446 Found installed version 0.4.6-4 for package python3-colorama
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:19.446 Found installed version 0.3.8-1 for package python3-distlib
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:19.446 Found installed version 3.13.1-1 for package python3-filelock
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:19.446 Found installed version 24.0-1 for package python3-packaging
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:19.446 Found installed version 24.0+dfsg-1ubuntu1 for package python3-pip-whl
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:19.446 Found installed version 4.2.0-1 for package python3-platformdirs
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:19.446 Found installed version 1.4.0-1 for package python3-pluggy
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:19.446 Found installed version 1.6.1-1 for package python3-pyproject-api
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:19.446 Found installed version 68.1.2-2ubuntu1 for package python3-setuptools-whl
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:19.446 Found installed version 20.25.0+ds-2 for package python3-virtualenv
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:19.446 Found installed version 0.42.0-2 for package python3-wheel-whl
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:19.446 Found installed version 4.13.0-1 for package tox
2024-07-01 18:05:50.164 :: 2024-07-01 18:02:19.449 Installing build-snaps
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:19.847 Installing snap: charm
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:29.001 verify plugin environment for part 'charm'
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:29.001 plugin validation environment: # Environment
2024-07-01 18:05:50.165 :: ## Application environment
2024-07-01 18:05:50.165 :: ## Part environment
2024-07-01 18:05:50.165 :: export CRAFT_ARCH_TRIPLET="x86_64-linux-gnu"
2024-07-01 18:05:50.165 :: export CRAFT_TARGET_ARCH="amd64"
2024-07-01 18:05:50.165 :: export CRAFT_ARCH_BUILD_ON="amd64"
2024-07-01 18:05:50.165 :: export CRAFT_ARCH_BUILD_FOR="amd64"
2024-07-01 18:05:50.165 :: export CRAFT_ARCH_TRIPLET_BUILD_ON="x86_64-linux-gnu"
2024-07-01 18:05:50.165 :: export CRAFT_ARCH_TRIPLET_BUILD_FOR="x86_64-linux-gnu"
2024-07-01 18:05:50.165 :: export CRAFT_PARALLEL_BUILD_COUNT="16"
2024-07-01 18:05:50.165 :: export CRAFT_PROJECT_DIR="/root/project"
2024-07-01 18:05:50.165 :: export CRAFT_STAGE="/root/stage"
2024-07-01 18:05:50.165 :: export CRAFT_PRIME="/root/prime"
2024-07-01 18:05:50.165 :: export CRAFT_PROJECT_NAME="ceph-fs"
2024-07-01 18:05:50.165 :: export CRAFT_PART_NAME="charm"
2024-07-01 18:05:50.165 :: export CRAFT_STEP_NAME="BUILD"
2024-07-01 18:05:50.165 :: export CRAFT_PART_SRC="/root/parts/charm/src"
2024-07-01 18:05:50.165 :: export CRAFT_PART_SRC_WORK="/root/parts/charm/src"
2024-07-01 18:05:50.165 :: export CRAFT_PART_BUILD="/root/parts/charm/build"
2024-07-01 18:05:50.165 :: export CRAFT_PART_BUILD_WORK="/root/parts/charm/build"
2024-07-01 18:05:50.165 :: export CRAFT_PART_INSTALL="/root/parts/charm/install"
2024-07-01 18:05:50.165 :: ## Plugin environment
2024-07-01 18:05:50.165 :: export CRYPTOGRAPHY_OPENSSL_NO_LEGACY="true"
2024-07-01 18:05:50.165 :: ## User environment
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:29.001 plugin validation command: 'charm version --format json'
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:29.849 Pulling charm
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:29.849 execute action charm:Action(part_name='charm', step=Step.PULL, action_type=ActionType.RUN, reason=None, project_vars=None, properties=ActionProperties(changed_files=None, changed_dirs=None))
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:29.862 Building charm
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:29.862 execute action charm:Action(part_name='charm', step=Step.BUILD, action_type=ActionType.RUN, reason=None, project_vars=None, properties=ActionProperties(changed_files=None, changed_dirs=None))
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:29.862 load state file: /root/parts/charm/state/pull
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:29.868 remove directory /root/parts/charm/build
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:29.872 plugin validation environment: # Environment
2024-07-01 18:05:50.165 :: ## Application environment
2024-07-01 18:05:50.165 :: ## Part environment
2024-07-01 18:05:50.165 :: export CRAFT_ARCH_TRIPLET="x86_64-linux-gnu"
2024-07-01 18:05:50.165 :: export CRAFT_TARGET_ARCH="amd64"
2024-07-01 18:05:50.165 :: export CRAFT_ARCH_BUILD_ON="amd64"
2024-07-01 18:05:50.165 :: export CRAFT_ARCH_BUILD_FOR="amd64"
2024-07-01 18:05:50.165 :: export CRAFT_ARCH_TRIPLET_BUILD_ON="x86_64-linux-gnu"
2024-07-01 18:05:50.165 :: export CRAFT_ARCH_TRIPLET_BUILD_FOR="x86_64-linux-gnu"
2024-07-01 18:05:50.165 :: export CRAFT_PARALLEL_BUILD_COUNT="16"
2024-07-01 18:05:50.165 :: export CRAFT_PROJECT_DIR="/root/project"
2024-07-01 18:05:50.165 :: export CRAFT_STAGE="/root/stage"
2024-07-01 18:05:50.165 :: export CRAFT_PRIME="/root/prime"
2024-07-01 18:05:50.165 :: export CRAFT_PROJECT_NAME="ceph-fs"
2024-07-01 18:05:50.165 :: export CRAFT_PART_NAME="charm"
2024-07-01 18:05:50.165 :: export CRAFT_STEP_NAME="BUILD"
2024-07-01 18:05:50.165 :: export CRAFT_PART_SRC="/root/parts/charm/src"
2024-07-01 18:05:50.165 :: export CRAFT_PART_SRC_WORK="/root/parts/charm/src"
2024-07-01 18:05:50.165 :: export CRAFT_PART_BUILD="/root/parts/charm/build"
2024-07-01 18:05:50.165 :: export CRAFT_PART_BUILD_WORK="/root/parts/charm/build"
2024-07-01 18:05:50.165 :: export CRAFT_PART_INSTALL="/root/parts/charm/install"
2024-07-01 18:05:50.165 :: ## Plugin environment
2024-07-01 18:05:50.165 :: export CRYPTOGRAPHY_OPENSSL_NO_LEGACY="true"
2024-07-01 18:05:50.165 :: ## User environment
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:29.872 plugin validation command: 'charm version --format json'
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:30.513 Executing PosixPath('/root/parts/charm/run/build.sh')
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:30.514 :: + /snap/charmcraft/3246/bin/python3 -I /snap/charmcraft/3246/lib/python3.10/site-packages/charmcraft/parts/reactive.py ceph-fs /root/parts/charm/build /root/parts/charm/install --binary-wheels-from-source
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:31.789 :: I: metadata name (ceph-fs) must match directory name (build) exactly for local deployment.
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:31.790 :: I: `display-name` not provided, add for custom naming in the UI
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:31.790 :: I: DEPRECATED: series parameter is ignored by charmcraft,use bases in charmcraft.yaml
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:31.790 :: I: no hooks directory
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:31.790 :: I: all charms should provide at least one thing
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:31.790 :: I: relation ceph-mds has no hooks
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:31.790 :: I: missing recommended hook install
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:31.790 :: I: missing recommended hook start
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:31.790 :: I: missing recommended hook stop
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:31.790 :: I: missing recommended hook config-changed
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:31.790 :: I: config.yaml: option key has no default value
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:31.790 :: I: config.yaml: option ceph-public-network has no default value
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:31.790 :: I: config.yaml: option rbd-pool-name has no default value
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:31.790 :: I: config.yaml: option metadata-pool has no default value
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:31.790 :: I: config.yaml: option ec-profile-name has no default value
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:31.790 :: I: config.yaml: option ec-profile-locality has no default value
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:31.790 :: I: config.yaml: option ec-profile-crush-locality has no default value
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:31.790 :: I: config.yaml: option ec-profile-durability-estimator has no default value
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:31.791 :: I: config.yaml: option ec-profile-helper-chunks has no default value
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:31.791 :: I: config.yaml: option ec-profile-scalar-mds has no default value
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:31.791 :: I: config.yaml: option ec-profile-technique has no default value
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:31.791 :: I: config.yaml: option ec-profile-device-class has no default value
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:31.791 :: I: config.yaml: option bluestore-compression-algorithm has no default value
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:31.791 :: I: config.yaml: option bluestore-compression-mode has no default value
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:31.791 :: I: config.yaml: option bluestore-compression-required-ratio has no default value
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:31.791 :: I: config.yaml: option bluestore-compression-min-blob-size has no default value
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:31.791 :: I: config.yaml: option bluestore-compression-min-blob-size-hdd has no default value
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:31.791 :: I: config.yaml: option bluestore-compression-min-blob-size-ssd has no default value
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:31.791 :: I: config.yaml: option bluestore-compression-max-blob-size has no default value
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:31.791 :: I: config.yaml: option bluestore-compression-max-blob-size-hdd has no default value
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:31.791 :: I: config.yaml: option bluestore-compression-max-blob-size-ssd has no default value
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:32.874 :: build: The lockfile /root/parts/charm/build/build.lock was not found; building using latest versions.
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:32.874 :: build: Probably running as root in charmcraft, proactively installing the `git` and `virtualenv` packages.
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:32.877 ::
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:32.877 :: WARNING: apt does not have a stable CLI interface. Use with caution in scripts.
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:32.878 ::
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:33.558 :: Reading package lists...
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:33.745 :: Building dependency tree...
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:33.745 :: Reading state information...
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:33.945 :: git is already the newest version (1:2.43.0-1ubuntu7.1).
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:33.977 :: The following NEW packages will be installed:
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:33.977 :: virtualenv
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:34.002 :: 0 upgraded, 1 newly installed, 0 to remove and 14 not upgraded.
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:34.289 :: Need to get 1978 B of archives.
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:34.289 :: After this operation, 13.3 kB of additional disk space will be used.
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:34.290 :: Get:1 http://archive.ubuntu.com/ubuntu noble/universe amd64 virtualenv all 20.25.0+ds-2 [1978 B]
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:34.475 :: Fetched 1978 B in 0s (7405 B/s)
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:34.490 :: Selecting previously unselected package virtualenv.
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:34.498 :: (Reading database ... 18550 files and directories currently installed.)
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:34.498 :: Preparing to unpack .../virtualenv_20.25.0+ds-2_all.deb ...
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:34.500 :: Unpacking virtualenv (20.25.0+ds-2) ...
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:34.521 :: Setting up virtualenv (20.25.0+ds-2) ...
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:34.633 :: build: Destination charm directory: /root/parts/charm/build/ceph-fs
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:43.498 :: build: Processing layer: layer:options
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:43.538 :: build: Processing layer: layer:basic
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:43.613 :: build: Processing layer: layer:openstack
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:43.700 :: build: Processing layer: layer:ceph
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:43.744 :: build: Processing layer: ceph-fs (from .)
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:43.852 :: build: Processing interface: tls-certificates
2024-07-01 18:05:50.165 :: 2024-07-01 18:02:43.852 :: build: Processing interface: ceph-mds
2024-07-01 18:05:50.165 :: 2024-07-01 18:05:48.565 :: build:
2024-07-01 18:05:50.165 :: 2024-07-01 18:05:48.565 :: build: ---------------------------------------
2024-07-01 18:05:50.165 :: 2024-07-01 18:05:48.565 :: build: Build Report
2024-07-01 18:05:50.165 :: 2024-07-01 18:05:48.565 :: build: ---------------------------------------
2024-07-01 18:05:50.165 :: 2024-07-01 18:05:48.566 :: build: New build; all files were modified.
2024-07-01 18:05:50.165 :: 2024-07-01 18:05:48.602 :: proof: I: `display-name` not provided, add for custom naming in the UI
2024-07-01 18:05:50.165 :: 2024-07-01 18:05:48.602 :: proof: I: DEPRECATED: series parameter is ignored by charmcraft,use bases in charmcraft.yaml
2024-07-01 18:05:50.165 :: 2024-07-01 18:05:48.602 :: proof: I: all charms should provide at least one thing
2024-07-01 18:05:50.165 :: 2024-07-01 18:05:48.602 :: proof: I: config.yaml: option key has no default value
2024-07-01 18:05:50.165 :: 2024-07-01 18:05:48.602 :: proof: I: config.yaml: option ceph-public-network has no default value
2024-07-01 18:05:50.165 :: 2024-07-01 18:05:48.602 :: proof: I: config.yaml: option rbd-pool-name has no default value
2024-07-01 18:05:50.165 :: 2024-07-01 18:05:48.602 :: proof: I: config.yaml: option metadata-pool has no default value
2024-07-01 18:05:50.165 :: 2024-07-01 18:05:48.603 :: proof: I: config.yaml: option ec-profile-name has no default value
2024-07-01 18:05:50.165 :: 2024-07-01 18:05:48.603 :: proof: I: config.yaml: option ec-profile-locality has no default value
2024-07-01 18:05:50.165 :: 2024-07-01 18:05:48.603 :: proof: I: config.yaml: option ec-profile-crush-locality has no default value
2024-07-01 18:05:50.165 :: 2024-07-01 18:05:48.603 :: proof: I: config.yaml: option ec-profile-durability-estimator has no default value
2024-07-01 18:05:50.165 :: 2024-07-01 18:05:48.603 :: proof: I: config.yaml: option ec-profile-helper-chunks has no default value
2024-07-01 18:05:50.165 :: 2024-07-01 18:05:48.603 :: proof: I: config.yaml: option ec-profile-scalar-mds has no default value
2024-07-01 18:05:50.165 :: 2024-07-01 18:05:48.603 :: proof: I: config.yaml: option ec-profile-technique has no default value
2024-07-01 18:05:50.165 :: 2024-07-01 18:05:48.603 :: proof: I: config.yaml: option ec-profile-device-class has no default value
2024-07-01 18:05:50.165 :: 2024-07-01 18:05:48.603 :: proof: I: config.yaml: option bluestore-compression-algorithm has no default value
2024-07-01 18:05:50.165 :: 2024-07-01 18:05:48.603 :: proof: I: config.yaml: option bluestore-compression-mode has no default value
2024-07-01 18:05:50.165 :: 2024-07-01 18:05:48.603 :: proof: I: config.yaml: option bluestore-compression-required-ratio has no default value
2024-07-01 18:05:50.165 :: 2024-07-01 18:05:48.603 :: proof: I: config.yaml: option bluestore-compression-min-blob-size has no default value
2024-07-01 18:05:50.165 :: 2024-07-01 18:05:48.603 :: proof: I: config.yaml: option bluestore-compression-min-blob-size-hdd has no default value
2024-07-01 18:05:50.165 :: 2024-07-01 18:05:48.603 :: proof: I: config.yaml: option bluestore-compression-min-blob-size-ssd has no default value
2024-07-01 18:05:50.165 :: 2024-07-01 18:05:48.603 :: proof: I: config.yaml: option bluestore-compression-max-blob-size has no default value
2024-07-01 18:05:50.165 :: 2024-07-01 18:05:48.603 :: proof: I: config.yaml: option bluestore-compression-max-blob-size-hdd has no default value
2024-07-01 18:05:50.165 :: 2024-07-01 18:05:48.603 :: proof: I: config.yaml: option bluestore-compression-max-blob-size-ssd has no default value
2024-07-01 18:05:50.165 :: 2024-07-01 18:05:48.630 :: charm tool execution command=['charm', 'proof']
2024-07-01 18:05:50.165 :: 2024-07-01 18:05:48.630 :: charm tool execution SUCCESS: returncode=0
2024-07-01 18:05:50.165 :: 2024-07-01 18:05:48.630 :: charm tool execution command=['charm', 'build', '--binary-wheels-from-source', '-o', '/root/parts/charm/build']
2024-07-01 18:05:50.165 :: 2024-07-01 18:05:48.630 :: charm tool execution SUCCESS: returncode=0
2024-07-01 18:05:50.165 :: 2024-07-01 18:05:49.755 Staging charm
2024-07-01 18:05:50.165 :: 2024-07-01 18:05:49.756 execute action charm:Action(part_name='charm', step=Step.STAGE, action_type=ActionType.RUN, reason=None, project_vars=None, properties=ActionProperties(changed_files=None, changed_dirs=None))
2024-07-01 18:05:50.165 :: 2024-07-01 18:05:49.805 Priming charm
2024-07-01 18:05:50.165 :: 2024-07-01 18:05:49.805 execute action charm:Action(part_name='charm', step=Step.PRIME, action_type=ActionType.RUN, reason=None, project_vars=None, properties=ActionProperties(changed_files=None, changed_dirs=None))
2024-07-01 18:05:50.165 :: 2024-07-01 18:05:49.836 Setting up PackageService
2024-07-01 18:05:50.165 :: 2024-07-01 18:05:49.836 Update project variables: {'version': 'unversioned'}
2024-07-01 18:05:50.165 :: 2024-07-01 18:05:49.837 Setting up AnalysisService
2024-07-01 18:05:50.165 :: 2024-07-01 18:05:49.854 Reading '/root/prime/metadata.yaml'
2024-07-01 18:05:50.165 :: 2024-07-01 18:05:49.872 Reading '/root/prime/metadata.yaml'
2024-07-01 18:05:50.165 :: 2024-07-01 18:05:49.873 Validating metadata keys
2024-07-01 18:05:50.165 :: 2024-07-01 18:05:49.874 charmcraft internal error: ValidationError(model='CheckResult', errors=[{'loc': ('text',), 'msg': 'none is not an allowed value', 'type': 'type_error.none.not_allowed'}])
2024-07-01 18:05:50.166 :: 2024-07-01 18:05:49.878 Traceback (most recent call last):
2024-07-01 18:05:50.166 :: 2024-07-01 18:05:49.878 File "/snap/charmcraft/3246/lib/python3.10/site-packages/craft_application/application.py", line 483, in run
2024-07-01 18:05:50.166 :: 2024-07-01 18:05:49.878 return_code = dispatcher.run() or 0
2024-07-01 18:05:50.166 :: 2024-07-01 18:05:49.878 File "/snap/charmcraft/3246/lib/python3.10/site-packages/craft_cli/dispatcher.py", line 487, in run
2024-07-01 18:05:50.166 :: 2024-07-01 18:05:49.878 return self._loaded_command.run(self._parsed_command_args)
2024-07-01 18:05:50.166 :: 2024-07-01 18:05:49.878 File "/snap/charmcraft/3246/lib/python3.10/site-packages/craft_application/commands/base.py", line 182, in run
2024-07-01 18:05:50.166 :: 2024-07-01 18:05:49.878 result = self._run(parsed_args, **kwargs) or result
2024-07-01 18:05:50.166 :: 2024-07-01 18:05:49.878 File "/snap/charmcraft/3246/lib/python3.10/site-packages/craft_application/commands/lifecycle.py", line 337, in _run
2024-07-01 18:05:50.166 :: 2024-07-01 18:05:49.878 super()._run(parsed_args, step_name="prime")
2024-07-01 18:05:50.166 :: 2024-07-01 18:05:49.878 File "/snap/charmcraft/3246/lib/python3.10/site-packages/craft_application/commands/lifecycle.py", line 301, in _run
2024-07-01 18:05:50.166 :: 2024-07-01 18:05:49.878 self._services.package.write_metadata(self._services.lifecycle.prime_dir)
2024-07-01 18:05:50.166 :: 2024-07-01 18:05:49.878 File "/snap/charmcraft/3246/lib/python3.10/site-packages/charmcraft/services/package.py", line 226, in write_metadata
2024-07-01 18:05:50.166 :: 2024-07-01 18:05:49.878 manifest = self.get_manifest(lint_results)
2024-07-01 18:05:50.166 :: 2024-07-01 18:05:49.878 File "/snap/charmcraft/3246/lib/python3.10/site-packages/charmcraft/services/package.py", line 170, in get_manifest
2024-07-01 18:05:50.166 :: 2024-07-01 18:05:49.878 attributes = [
2024-07-01 18:05:50.166 :: 2024-07-01 18:05:49.878 File "/snap/charmcraft/3246/lib/python3.10/site-packages/charmcraft/services/package.py", line 170, in <listcomp>
2024-07-01 18:05:50.166 :: 2024-07-01 18:05:49.878 attributes = [
2024-07-01 18:05:50.166 :: 2024-07-01 18:05:49.878 File "/snap/charmcraft/3246/lib/python3.10/site-packages/charmcraft/services/analysis.py", line 47, in lint_directory
2024-07-01 18:05:50.166 :: 2024-07-01 18:05:49.879 yield checker.get_result(path)
2024-07-01 18:05:50.166 :: 2024-07-01 18:05:49.879 File "/snap/charmcraft/3246/lib/python3.10/site-packages/charmcraft/linters.py", line 93, in get_result
2024-07-01 18:05:50.166 :: 2024-07-01 18:05:49.879 return CheckResult(
2024-07-01 18:05:50.166 :: 2024-07-01 18:05:49.879 File "/snap/charmcraft/3246/lib/python3.10/site-packages/pydantic/dataclasses.py", line 332, in new_init
2024-07-01 18:05:50.166 :: 2024-07-01 18:05:49.879 self.__pydantic_validate_values__()
2024-07-01 18:05:50.166 :: 2024-07-01 18:05:49.879 File "/snap/charmcraft/3246/lib/python3.10/site-packages/pydantic/dataclasses.py", line 447, in _dataclass_validate_values
2024-07-01 18:05:50.166 :: 2024-07-01 18:05:49.879 raise validation_error
2024-07-01 18:05:50.166 :: 2024-07-01 18:05:49.879 pydantic.error_wrappers.ValidationError: 1 validation error for CheckResult
2024-07-01 18:05:50.166 :: 2024-07-01 18:05:49.879 text
2024-07-01 18:05:50.166 :: 2024-07-01 18:05:49.879 none is not an allowed value (type=type_error.none.not_allowed)
2024-07-01 18:05:50.166 Executing on host: lxc --project charmcraft config device show local:charmcraft-ceph-fs-on-amd64-for-amd64-59656058
2024-07-01 18:05:50.214 Executing on host: lxc --project charmcraft config device remove local:charmcraft-ceph-fs-on-amd64-for-amd64-59656058 disk-/root/project
2024-07-01 18:05:50.286 Executing on host: lxc --project charmcraft stop local:charmcraft-ceph-fs-on-amd64-for-amd64-59656058
2024-07-01 18:05:54.304 Failed to execute charmcraft in instance.
2024-07-01 18:05:54.305 Traceback (most recent call last):
2024-07-01 18:05:54.305 File "/snap/charmcraft/3246/lib/python3.10/site-packages/craft_application/application.py", line 333, in run_managed
2024-07-01 18:05:54.305 instance.execute_run( # pyright: ignore[reportUnknownMemberType,reportUnknownVariableType]
2024-07-01 18:05:54.305 File "/snap/charmcraft/3246/lib/python3.10/site-packages/craft_providers/lxd/lxd_instance.py", line 293, in execute_run
2024-07-01 18:05:54.305 return self.lxc.exec(
2024-07-01 18:05:54.305 File "/snap/charmcraft/3246/lib/python3.10/site-packages/craft_providers/lxd/lxc.py", line 387, in exec
2024-07-01 18:05:54.305 return runner(final_cmd, timeout=timeout, check=check, **kwargs)
2024-07-01 18:05:54.305 File "/snap/charmcraft/3246/usr/lib/python3.10/subprocess.py", line 526, in run
2024-07-01 18:05:54.305 raise CalledProcessError(retcode, process.args,
2024-07-01 18:05:54.305 subprocess.CalledProcessError: Command '['lxc', '--project', 'charmcraft', 'exec', 'local:charmcraft-ceph-fs-on-amd64-for-amd64-59656058', '--cwd', '/root/project', '--', 'env', 'CRAFT_MANAGED_MODE=1', 'CHARMCRAFT_MANAGED_MODE=1', 'DEBIAN_FRONTEND=noninteractive', 'DEBCONF_NONINTERACTIVE_SEEN=true', 'DEBIAN_PRIORITY=critical', 'CRAFT_PLATFORM=amd64', 'charmcraft', '-v', 'pack']' returned non-zero exit status 70.
2024-07-01 18:05:54.305
2024-07-01 18:05:54.305 The above exception was the direct cause of the following exception:
2024-07-01 18:05:54.305 Traceback (most recent call last):
2024-07-01 18:05:54.305 File "/snap/charmcraft/3246/lib/python3.10/site-packages/craft_application/application.py", line 479, in run
2024-07-01 18:05:54.305 self.run_managed(platform, build_for)
2024-07-01 18:05:54.305 File "/snap/charmcraft/3246/lib/python3.10/site-packages/charmcraft/application/main.py", line 143, in run_managed
2024-07-01 18:05:54.305 super().run_managed(platform, build_for)
2024-07-01 18:05:54.305 File "/snap/charmcraft/3246/lib/python3.10/site-packages/craft_application/application.py", line 340, in run_managed
2024-07-01 18:05:54.305 raise craft_providers.ProviderError(
2024-07-01 18:05:54.305 craft_providers.errors.ProviderError: Failed to execute charmcraft in instance.
2024-07-01 18:05:54.305 Full execution log: '/home/peter/.local/state/charmcraft/log/charmcraft-20240701-180202.747695.log'
```
| canonical/charmcraft | diff --git a/tests/extensions/test_gunicorn.py b/tests/extensions/test_gunicorn.py
index e2f8d55c..1151fde3 100644
--- a/tests/extensions/test_gunicorn.py
+++ b/tests/extensions/test_gunicorn.py
@@ -66,8 +66,7 @@ def flask_input_yaml_fixture():
"options": {**FlaskFramework.options, **FlaskFramework._WEBSERVER_OPTIONS}
},
"parts": {
- "charm": {"plugin": "charm", "source": "."},
- "flask-framework/rust-deps": {"plugin": "nil", "build-packages": ["cargo"]},
+ "charm": {"plugin": "charm", "source": ".", "build-snaps": ["rustup"]},
},
"peers": {"secret-storage": {"interface": "secret-storage"}},
"provides": {
@@ -121,7 +120,7 @@ def flask_input_yaml_fixture():
"config": {
"options": {**DjangoFramework.options, **DjangoFramework._WEBSERVER_OPTIONS}
},
- "parts": {"charm": {"plugin": "charm", "source": "."}},
+ "parts": {"charm": {"plugin": "charm", "source": ".", "build-snaps": ["rustup"]}},
"peers": {"secret-storage": {"interface": "secret-storage"}},
"provides": {
"metrics-endpoint": {"interface": "prometheus_scrape"},
@@ -255,4 +254,8 @@ def test_handle_charm_part(flask_input_yaml, tmp_path):
apply_extensions(tmp_path, flask_input_yaml)
del flask_input_yaml["parts"]
applied = apply_extensions(tmp_path, flask_input_yaml)
- assert applied["parts"]["charm"] == {"plugin": "charm", "source": "."}
+ assert applied["parts"]["charm"] == {
+ "plugin": "charm",
+ "source": ".",
+ "build-snaps": ["rustup"],
+ }
diff --git a/tests/spread/commands/init-flask-framework/task.yaml b/tests/spread/commands/init-flask-framework/task.yaml
index 65ad3973..fd58ecbf 100644
--- a/tests/spread/commands/init-flask-framework/task.yaml
+++ b/tests/spread/commands/init-flask-framework/task.yaml
@@ -1,9 +1,11 @@
summary: test charmcraft init with flask-framework profile
+priority: 500 # This builds pydantic, so do it early
+kill-timeout: 75m # Because it builds pydantic, it takes a long time.
+systems:
+ # We only need to run this test once, and it takes a long time.
+ - ubuntu-22.04-64
execute: |
- unset CHARMCRAFT_STORE_API_URL
- unset CHARMCRAFT_UPLOAD_URL
- unset CHARMCRAFT_REGISTRY_URL
mkdir -p test-init
cd test-init
charmcraft init --profile flask-framework
diff --git a/tests/spread/smoketests/basic/charmcraft-23.10.yaml b/tests/spread/smoketests/basic/charmcraft-23.10.yaml
deleted file mode 100644
index 984eb861..00000000
--- a/tests/spread/smoketests/basic/charmcraft-23.10.yaml
+++ /dev/null
@@ -1,12 +0,0 @@
-name: mantic
-type: charm
-title: A charm built and running on mantic.
-summary: A charm built and running on mantic.
-description: A charm built and running on mantic.
-bases:
- - build-on:
- - name: ubuntu
- channel: "23.10"
- run-on:
- - name: ubuntu
- channel: "23.10"
diff --git a/tests/spread/smoketests/basic/charmcraft-bases-23.10.yaml b/tests/spread/smoketests/basic/charmcraft-bases-23.10.yaml
deleted file mode 100644
index 984eb861..00000000
--- a/tests/spread/smoketests/basic/charmcraft-bases-23.10.yaml
+++ /dev/null
@@ -1,12 +0,0 @@
-name: mantic
-type: charm
-title: A charm built and running on mantic.
-summary: A charm built and running on mantic.
-description: A charm built and running on mantic.
-bases:
- - build-on:
- - name: ubuntu
- channel: "23.10"
- run-on:
- - name: ubuntu
- channel: "23.10"
diff --git a/tests/spread/smoketests/basic/task.yaml b/tests/spread/smoketests/basic/task.yaml
index 79ba8ac8..8bf7a3b5 100644
--- a/tests/spread/smoketests/basic/task.yaml
+++ b/tests/spread/smoketests/basic/task.yaml
@@ -5,9 +5,8 @@ priority: 50 # Because these can take a while, run them early.
environment:
BASE_CHANNEL/focal_bases: 20.04
BASE_CHANNEL/jammy_bases,jammy_platforms: 22.04
- BASE_CHANNEL/mantic_bases: 23.10 # Non-LTS
BASE_CHANNEL/noble_platforms: 24.04
- CHARM_TYPE/focal_bases,jammy_bases,mantic_bases: bases
+ CHARM_TYPE/focal_bases,jammy_bases: bases
CHARM_TYPE/jammy_platforms,noble_platforms: platforms
# Alma Linux is disabled temporarily: https://github.com/canonical/charmcraft/issues/1496
# BASE_CHANNEL/alma: alma9
diff --git a/tests/spread/smoketests/remote-build/task.yaml b/tests/spread/smoketests/remote-build/task.yaml
index 9bcf821c..4cb44247 100644
--- a/tests/spread/smoketests/remote-build/task.yaml
+++ b/tests/spread/smoketests/remote-build/task.yaml
@@ -9,7 +9,6 @@ environment:
LAUNCHPAD_TOKEN: "$(HOST: echo ${LAUNCHPAD_TOKEN})"
BASE_NAME: ubuntu
BASE_CHANNEL/jammy: 22.04
- BASE_CHANNEL/mantic: 23.10 # Non-LTS
include:
- tests/
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 2
} | 2.7 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -r requirements-dev.txt -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"ruff",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.10",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==23.2.0
certifi==2024.7.4
cffi==1.16.0
-e git+https://github.com/canonical/charmcraft.git@ac08e98e5dd6d0764beba24275193004bdc3df2d#egg=charmcraft
charset-normalizer==3.3.2
coverage==7.5.3
craft-application==3.2.0
craft-archives==1.2.0
craft-cli==2.6.0
craft-grammar==1.2.0
craft-parts==1.33.0
craft-providers==1.24.1
craft-store==2.6.2
cryptography==42.0.8
Deprecated==1.2.14
distro==1.9.0
docker==7.1.0
exceptiongroup==1.2.2
flake8==7.0.0
freezegun==1.5.1
httplib2==0.22.0
humanize==4.9.0
hypothesis==6.100.5
idna==3.7
importlib_metadata==7.1.0
iniconfig==2.0.0
jaraco.classes==3.4.0
jeepney==0.8.0
Jinja2==3.1.4
jsonschema==4.22.0
jsonschema-specifications==2023.12.1
keyring==24.3.1
launchpadlib==1.11.0
lazr.restfulclient==0.14.6
lazr.uri==1.0.6
macaroonbakery==1.3.4
MarkupSafe==2.1.5
mccabe==0.7.0
more-itertools==10.2.0
oauthlib==3.2.2
overrides==7.7.0
packaging==24.0
platformdirs==4.2.2
pluggy==1.5.0
protobuf==5.26.1
pycodestyle==2.11.1
pycparser==2.22
pydantic==1.10.15
pydantic-yaml==0.11.2
pydocstyle==6.3.0
pyfakefs==5.4.1
pyflakes==3.2.0
pygit2==1.14.1
pymacaroons==0.13.0
PyNaCl==1.5.0
pyparsing==3.1.2
pyRFC3339==1.1
pytest==8.2.0
pytest-check==2.3.1
pytest-cov==5.0.0
pytest-mock==3.14.0
pytest-subprocess==1.5.0
python-dateutil==2.9.0.post0
pytz==2024.1
pyxdg==0.28
PyYAML==6.0.1
referencing==0.35.1
requests==2.31.0
requests-toolbelt==1.0.0
requests-unixsocket==0.3.0
responses==0.25.0
rpds-py==0.18.1
ruff==0.11.2
SecretStorage==3.3.3
six==1.16.0
snap-helpers==0.4.2
snowballstemmer==2.2.0
sortedcontainers==2.4.0
tabulate==0.9.0
tomli==2.2.1
types-Deprecated==1.2.9.20240311
types-PyYAML==6.0.12.20240311
typing_extensions==4.11.0
urllib3==1.26.18
wadllib==1.3.6
wrapt==1.16.0
zipp==3.19.1
| name: charmcraft
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- bzip2=1.0.8=h5eee18b_6
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- libuuid=1.41.5=h5eee18b_0
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py310h06a4308_0
- python=3.10.16=he870216_1
- readline=8.2=h5eee18b_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py310h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==23.2.0
- certifi==2024.7.4
- cffi==1.16.0
- charmcraft==3.0.0.post94+gac08e98e
- charset-normalizer==3.3.2
- coverage==7.5.3
- craft-application==3.2.0
- craft-archives==1.2.0
- craft-cli==2.6.0
- craft-grammar==1.2.0
- craft-parts==1.33.0
- craft-providers==1.24.1
- craft-store==2.6.2
- cryptography==42.0.8
- deprecated==1.2.14
- distro==1.9.0
- docker==7.1.0
- exceptiongroup==1.2.2
- flake8==7.0.0
- freezegun==1.5.1
- httplib2==0.22.0
- humanize==4.9.0
- hypothesis==6.100.5
- idna==3.7
- importlib-metadata==7.1.0
- iniconfig==2.0.0
- jaraco-classes==3.4.0
- jeepney==0.8.0
- jinja2==3.1.4
- jsonschema==4.22.0
- jsonschema-specifications==2023.12.1
- keyring==24.3.1
- launchpadlib==1.11.0
- lazr-restfulclient==0.14.6
- lazr-uri==1.0.6
- macaroonbakery==1.3.4
- markupsafe==2.1.5
- mccabe==0.7.0
- more-itertools==10.2.0
- oauthlib==3.2.2
- overrides==7.7.0
- packaging==24.0
- platformdirs==4.2.2
- pluggy==1.5.0
- protobuf==5.26.1
- pycodestyle==2.11.1
- pycparser==2.22
- pydantic==1.10.15
- pydantic-yaml==0.11.2
- pydocstyle==6.3.0
- pyfakefs==5.4.1
- pyflakes==3.2.0
- pygit2==1.14.1
- pymacaroons==0.13.0
- pynacl==1.5.0
- pyparsing==3.1.2
- pyrfc3339==1.1
- pytest==8.2.0
- pytest-check==2.3.1
- pytest-cov==5.0.0
- pytest-mock==3.14.0
- pytest-subprocess==1.5.0
- python-dateutil==2.9.0.post0
- pytz==2024.1
- pyxdg==0.28
- pyyaml==6.0.1
- referencing==0.35.1
- requests==2.31.0
- requests-toolbelt==1.0.0
- requests-unixsocket==0.3.0
- responses==0.25.0
- rpds-py==0.18.1
- ruff==0.11.2
- secretstorage==3.3.3
- setuptools==70.0.0
- six==1.16.0
- snap-helpers==0.4.2
- snowballstemmer==2.2.0
- sortedcontainers==2.4.0
- tabulate==0.9.0
- tomli==2.2.1
- types-deprecated==1.2.9.20240311
- types-pyyaml==6.0.12.20240311
- typing-extensions==4.11.0
- urllib3==1.26.18
- wadllib==1.3.6
- wrapt==1.16.0
- zipp==3.19.1
prefix: /opt/conda/envs/charmcraft
| [
"tests/extensions/test_gunicorn.py::test_apply_extensions_correct[input_yaml0-False-expected0]",
"tests/extensions/test_gunicorn.py::test_apply_extensions_correct[input_yaml1-True-expected1]",
"tests/extensions/test_gunicorn.py::test_handle_charm_part"
] | [] | [
"tests/extensions/test_gunicorn.py::test_flask_protected_fields[type]",
"tests/extensions/test_gunicorn.py::test_flask_protected_fields[containers]",
"tests/extensions/test_gunicorn.py::test_flask_protected_fields[peers]",
"tests/extensions/test_gunicorn.py::test_flask_protected_fields[resources]",
"tests/extensions/test_gunicorn.py::test_flask_merge_options",
"tests/extensions/test_gunicorn.py::test_flask_merge_action",
"tests/extensions/test_gunicorn.py::test_flask_merge_relation",
"tests/extensions/test_gunicorn.py::test_flask_merge_charm_libs",
"tests/extensions/test_gunicorn.py::test_flask_incompatible_fields[devices]",
"tests/extensions/test_gunicorn.py::test_flask_incompatible_fields[extra-bindings]",
"tests/extensions/test_gunicorn.py::test_flask_incompatible_fields[storage]",
"tests/extensions/test_gunicorn.py::test_flask_incompatible_fields[duplicate-options]",
"tests/extensions/test_gunicorn.py::test_flask_incompatible_fields[duplicate-requires]",
"tests/extensions/test_gunicorn.py::test_flask_incompatible_fields[duplicate-provides]",
"tests/extensions/test_gunicorn.py::test_flask_incompatible_fields[reserved-config-prefix-webserver]",
"tests/extensions/test_gunicorn.py::test_flask_incompatible_fields[reserved-config-prefix-flask]"
] | [] | Apache License 2.0 | 19,031 | 823 | [
"charmcraft/extensions/gunicorn.py",
"charmcraft/linters.py"
] |
|
nipy__nipype-3658 | e03ab6f99f85fb54bc5f1ed2d9222af8f5bd66e0 | 2024-07-20 15:17:26 | 4d1352ade7171fd5f55eff62cee4c99a4f9cfed1 | codecov[bot]: ## [Codecov](https://app.codecov.io/gh/nipy/nipype/pull/3658?dropdown=coverage&src=pr&el=h1&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=nipy) Report
All modified and coverable lines are covered by tests :white_check_mark:
> Project coverage is 39.50%. Comparing base [(`4d1352a`)](https://app.codecov.io/gh/nipy/nipype/commit/4d1352ade7171fd5f55eff62cee4c99a4f9cfed1?dropdown=coverage&el=desc&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=nipy) to head [(`e1da594`)](https://app.codecov.io/gh/nipy/nipype/commit/e1da594a7aba5bebc53d419bf42451761e1c3c8e?dropdown=coverage&el=desc&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=nipy).
> :exclamation: There is a different number of reports uploaded between BASE (4d1352a) and HEAD (e1da594). Click for more details.
>
> <details><summary>HEAD has 34 uploads less than BASE</summary>
>
>| Flag | BASE (4d1352a) | HEAD (e1da594) |
>|------|------|------|
>||37|3|
></details>
<details><summary>Additional details and impacted files</summary>
```diff
@@ Coverage Diff @@
## master #3658 +/- ##
===========================================
- Coverage 70.83% 39.50% -31.33%
===========================================
Files 1276 1276
Lines 59314 59323 +9
Branches 9824 9824
===========================================
- Hits 42013 23436 -18577
- Misses 16125 35744 +19619
+ Partials 1176 143 -1033
```
</details>
[:umbrella: View full report in Codecov by Sentry](https://app.codecov.io/gh/nipy/nipype/pull/3658?dropdown=coverage&src=pr&el=continue&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=nipy).
:loudspeaker: Have feedback on the report? [Share it here](https://about.codecov.io/codecov-pr-comment-feedback/?utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=nipy).
| diff --git a/nipype/interfaces/mrtrix3/utils.py b/nipype/interfaces/mrtrix3/utils.py
index 7e25288d1..8a69232ec 100644
--- a/nipype/interfaces/mrtrix3/utils.py
+++ b/nipype/interfaces/mrtrix3/utils.py
@@ -241,6 +241,49 @@ class Generate5ttInputSpec(MRTrix3BaseInputSpec):
desc="input image / directory",
)
out_file = File(argstr="%s", mandatory=True, position=-1, desc="output image")
+ t2_image = File(
+ exists=True,
+ argstr="-t2 %s",
+ desc="Provide a T2-weighted image in addition to the default T1-weighted image. (Only for 'fsl' algorithm)",
+ )
+ mask_file = File(
+ exists=True,
+ argstr="-mask %s",
+ desc="Provide a brain mask image. (Only for 'fsl' algorithm)",
+ )
+ premasked = traits.Bool(
+ argstr="-premasked",
+ desc="Assume that the input image is already brain-masked. (Only for 'fsl' algorithm)",
+ )
+ nocrop = traits.Bool(
+ argstr="-nocrop",
+ desc="Do not crop the image to the region of interest.",
+ )
+ sgm_amyg_hipp = traits.Bool(
+ argstr="-sgm_amyg_hipp",
+ desc="Include the amygdala and hippocampus in the subcortical grey matter segment.",
+ )
+ template = File(
+ exists=True,
+ argstr="-template %s",
+ desc="Provide an image that will form the template for the generated 5TT image. (Only for 'hsvs' algorithm)",
+ )
+ hippocampi = traits.Enum(
+ "subfields",
+ "first",
+ "aseg",
+ argstr="-hippocampi %s",
+ desc="Choose the method used to segment the hippocampi. (Only for 'freesurfer' algorithm)",
+ )
+ white_stem = traits.Bool(
+ argstr="-white_stem",
+ desc="Classify the brainstem as white matter. (Only for 'hsvs' algorithm)",
+ )
+ lut_file = File(
+ exists=True,
+ argstr="-lut %s",
+ desc="Manually provide path to the lookup table on which the input parcellation image is based. (Only for 'freesurfer' algorithm)",
+ )
class Generate5ttOutputSpec(TraitedSpec):
| [ENH] Enhance implementation of 5ttgen
### Summary
Current implementation lacks some inputs that are available through the original command ([MRTrix3's `5ttgen`](https://mrtrix.readthedocs.io/en/dev/reference/commands/5ttgen.html))
### Actual behavior
Currently only `in_file`, `algorithm` and `out_file` are available as inputs.
### Expected behavior
It'd be great to pass some other available inputs through the original algorithm...
### Platform details:
```
{'commit_hash': '4d1352ade',
'commit_source': 'repository',
'networkx_version': '3.3',
'nibabel_version': '5.2.1',
'nipype_version': '1.8.7.dev0',
'numpy_version': '2.0.0',
'pkg_path': '/home/galkepler/Projects/nipype/nipype',
'scipy_version': '1.14.0',
'sys_executable': '/home/galkepler/Projects/nipype/venv/bin/python',
'sys_platform': 'linux',
'sys_version': '3.11.8 | packaged by conda-forge | (main, Feb 16 2024, '
'20:53:32) [GCC 12.3.0]',
'traits_version': '6.3.2'}
```
### Execution environment
- My python environment outside container
| nipy/nipype | diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_Generate5tt.py b/nipype/interfaces/mrtrix3/tests/test_auto_Generate5tt.py
index 949fa2628..d8f6e5336 100644
--- a/nipype/interfaces/mrtrix3/tests/test_auto_Generate5tt.py
+++ b/nipype/interfaces/mrtrix3/tests/test_auto_Generate5tt.py
@@ -28,6 +28,9 @@ def test_Generate5tt_inputs():
argstr="-fslgrad %s %s",
xor=["grad_file"],
),
+ hippocampi=dict(
+ argstr="-hippocampi %s",
+ ),
in_bval=dict(
extensions=None,
),
@@ -40,6 +43,17 @@ def test_Generate5tt_inputs():
mandatory=True,
position=-2,
),
+ lut_file=dict(
+ argstr="-lut %s",
+ extensions=None,
+ ),
+ mask_file=dict(
+ argstr="-mask %s",
+ extensions=None,
+ ),
+ nocrop=dict(
+ argstr="-nocrop",
+ ),
nthreads=dict(
argstr="-nthreads %d",
nohash=True,
@@ -57,6 +71,23 @@ def test_Generate5tt_inputs():
mandatory=True,
position=-1,
),
+ premasked=dict(
+ argstr="-premasked",
+ ),
+ sgm_amyg_hipp=dict(
+ argstr="-sgm_amyg_hipp",
+ ),
+ t2_image=dict(
+ argstr="-t2 %s",
+ extensions=None,
+ ),
+ template=dict(
+ argstr="-template %s",
+ extensions=None,
+ ),
+ white_stem=dict(
+ argstr="-white_stem",
+ ),
)
inputs = Generate5tt.input_spec()
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 1
} | 1.8 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.16
asttokens==3.0.0
attrs==25.3.0
babel==2.17.0
beautifulsoup4==4.13.3
black==25.1.0
bleach==6.2.0
certifi==2025.1.31
charset-normalizer==3.4.1
ci-info==0.3.0
click==8.1.8
codecov==2.1.13
contourpy==1.3.0
coverage==7.8.0
cycler==0.12.1
decorator==5.2.1
deepdiff==8.4.2
defusedxml==0.7.1
dipy==1.10.0
docutils==0.21.2
etelemetry==0.3.1
exceptiongroup==1.2.2
executing==2.2.0
fastjsonschema==2.21.1
filelock==3.18.0
fonttools==4.56.0
h5py==3.13.0
idna==3.10
imagesize==1.4.1
importlib_metadata==8.6.1
importlib_resources==6.5.2
iniconfig==2.1.0
ipython==8.18.1
isodate==0.6.1
jedi==0.19.2
Jinja2==3.1.6
jsonschema==4.23.0
jsonschema-specifications==2024.10.1
jupyter_client==8.6.3
jupyter_core==5.7.2
jupyterlab_pygments==0.3.0
kiwisolver==1.4.7
looseversion==1.3.0
lxml==5.3.1
MarkupSafe==3.0.2
matplotlib==3.9.4
matplotlib-inline==0.1.7
mistune==3.1.3
mypy-extensions==1.0.0
nbclient==0.10.2
nbconvert==7.16.6
nbformat==5.10.4
nbsphinx==0.9.7
networkx==3.2.1
nibabel==5.3.2
-e git+https://github.com/nipy/nipype.git@e03ab6f99f85fb54bc5f1ed2d9222af8f5bd66e0#egg=nipype
numpy==1.26.4
orderly-set==5.3.0
packaging==24.2
pandocfilters==1.5.1
parso==0.8.4
pathspec==0.12.1
pbr==6.1.1
pexpect==4.9.0
pillow==11.1.0
platformdirs==4.3.7
pluggy==1.5.0
prompt_toolkit==3.0.50
prov==2.0.1
ptyprocess==0.7.0
pure_eval==0.2.3
pydot==3.0.4
Pygments==2.19.1
pyparsing==3.2.3
pytest==8.3.5
pytest-cov==6.0.0
pytest-doctestplus==1.4.0
pytest-env==1.1.5
pytest-timeout==2.3.1
python-dateutil==2.9.0.post0
pyzmq==26.3.0
rdflib==6.3.2
referencing==0.36.2
requests==2.32.3
rpds-py==0.24.0
scipy==1.13.1
setuptools-scm==8.2.0
simplejson==3.20.1
six==1.17.0
snowballstemmer==2.2.0
soupsieve==2.6
Sphinx==7.4.7
sphinx-argparse==0.4.0
sphinxcontrib-apidoc==0.5.0
sphinxcontrib-applehelp==2.0.0
sphinxcontrib-devhelp==2.0.0
sphinxcontrib-htmlhelp==2.1.0
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==2.0.0
sphinxcontrib-serializinghtml==2.0.0
stack-data==0.6.3
tinycss2==1.4.0
tomli==2.2.1
tornado==6.4.2
tqdm==4.67.1
traitlets==5.14.3
traits==6.3.2
trx-python==0.3
typing_extensions==4.13.0
urllib3==2.3.0
wcwidth==0.2.13
webencodings==0.5.1
zipp==3.21.0
| name: nipype
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.16
- asttokens==3.0.0
- attrs==25.3.0
- babel==2.17.0
- beautifulsoup4==4.13.3
- black==25.1.0
- bleach==6.2.0
- certifi==2025.1.31
- charset-normalizer==3.4.1
- ci-info==0.3.0
- click==8.1.8
- codecov==2.1.13
- contourpy==1.3.0
- coverage==7.8.0
- cycler==0.12.1
- decorator==5.2.1
- deepdiff==8.4.2
- defusedxml==0.7.1
- dipy==1.10.0
- docutils==0.21.2
- etelemetry==0.3.1
- exceptiongroup==1.2.2
- executing==2.2.0
- fastjsonschema==2.21.1
- filelock==3.18.0
- fonttools==4.56.0
- h5py==3.13.0
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==8.6.1
- importlib-resources==6.5.2
- iniconfig==2.1.0
- ipython==8.18.1
- isodate==0.6.1
- jedi==0.19.2
- jinja2==3.1.6
- jsonschema==4.23.0
- jsonschema-specifications==2024.10.1
- jupyter-client==8.6.3
- jupyter-core==5.7.2
- jupyterlab-pygments==0.3.0
- kiwisolver==1.4.7
- looseversion==1.3.0
- lxml==5.3.1
- markupsafe==3.0.2
- matplotlib==3.9.4
- matplotlib-inline==0.1.7
- mistune==3.1.3
- mypy-extensions==1.0.0
- nbclient==0.10.2
- nbconvert==7.16.6
- nbformat==5.10.4
- nbsphinx==0.9.7
- networkx==3.2.1
- nibabel==5.3.2
- nipype==1.8.7.dev0
- numpy==1.26.4
- orderly-set==5.3.0
- packaging==24.2
- pandocfilters==1.5.1
- parso==0.8.4
- pathspec==0.12.1
- pbr==6.1.1
- pexpect==4.9.0
- pillow==11.1.0
- platformdirs==4.3.7
- pluggy==1.5.0
- prompt-toolkit==3.0.50
- prov==2.0.1
- ptyprocess==0.7.0
- pure-eval==0.2.3
- pydot==3.0.4
- pygments==2.19.1
- pyparsing==3.2.3
- pytest==8.3.5
- pytest-cov==6.0.0
- pytest-doctestplus==1.4.0
- pytest-env==1.1.5
- pytest-timeout==2.3.1
- python-dateutil==2.9.0.post0
- pyzmq==26.3.0
- rdflib==6.3.2
- referencing==0.36.2
- requests==2.32.3
- rpds-py==0.24.0
- scipy==1.13.1
- setuptools-scm==8.2.0
- simplejson==3.20.1
- six==1.17.0
- snowballstemmer==2.2.0
- soupsieve==2.6
- sphinx==7.4.7
- sphinx-argparse==0.4.0
- sphinxcontrib-apidoc==0.5.0
- sphinxcontrib-applehelp==2.0.0
- sphinxcontrib-devhelp==2.0.0
- sphinxcontrib-htmlhelp==2.1.0
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==2.0.0
- sphinxcontrib-serializinghtml==2.0.0
- stack-data==0.6.3
- tinycss2==1.4.0
- tomli==2.2.1
- tornado==6.4.2
- tqdm==4.67.1
- traitlets==5.14.3
- traits==6.3.2
- trx-python==0.3
- typing-extensions==4.13.0
- urllib3==2.3.0
- wcwidth==0.2.13
- webencodings==0.5.1
- zipp==3.21.0
prefix: /opt/conda/envs/nipype
| [
"nipype/interfaces/mrtrix3/tests/test_auto_Generate5tt.py::test_Generate5tt_inputs"
] | [] | [
"nipype/interfaces/mrtrix3/tests/test_auto_Generate5tt.py::test_Generate5tt_outputs"
] | [] | Apache License 2.0 | 19,033 | 608 | [
"nipype/interfaces/mrtrix3/utils.py"
] |
yt-dlp__yt-dlp-10516 | a3bab4752a2b3d56e5a59b4e0411bb8f695c010b | 2024-07-21 04:15:54 | 4b69e1b53ea21e631cd5dd68ff531e2f1671ec17 | diff --git a/yt_dlp/extractor/tiktok.py b/yt_dlp/extractor/tiktok.py
index aa1dcecf6..9d823a315 100644
--- a/yt_dlp/extractor/tiktok.py
+++ b/yt_dlp/extractor/tiktok.py
@@ -23,7 +23,6 @@
mimetype2ext,
parse_qs,
qualities,
- remove_start,
srt_subtitles_timecode,
str_or_none,
traverse_obj,
@@ -254,7 +253,16 @@ def _extract_web_data_and_status(self, url, video_id, fatal=True):
def _get_subtitles(self, aweme_detail, aweme_id, user_name):
# TODO: Extract text positioning info
+
+ EXT_MAP = { # From lowest to highest preference
+ 'creator_caption': 'json',
+ 'srt': 'srt',
+ 'webvtt': 'vtt',
+ }
+ preference = qualities(tuple(EXT_MAP.values()))
+
subtitles = {}
+
# aweme/detail endpoint subs
captions_info = traverse_obj(
aweme_detail, ('interaction_stickers', ..., 'auto_video_caption_info', 'auto_captions', ...), expected_type=dict)
@@ -278,8 +286,8 @@ def _get_subtitles(self, aweme_detail, aweme_id, user_name):
if not caption.get('url'):
continue
subtitles.setdefault(caption.get('lang') or 'en', []).append({
- 'ext': remove_start(caption.get('caption_format'), 'web'),
'url': caption['url'],
+ 'ext': EXT_MAP.get(caption.get('Format')),
})
# webpage subs
if not subtitles:
@@ -288,9 +296,14 @@ def _get_subtitles(self, aweme_detail, aweme_id, user_name):
self._create_url(user_name, aweme_id), aweme_id, fatal=False)
for caption in traverse_obj(aweme_detail, ('video', 'subtitleInfos', lambda _, v: v['Url'])):
subtitles.setdefault(caption.get('LanguageCodeName') or 'en', []).append({
- 'ext': remove_start(caption.get('Format'), 'web'),
'url': caption['Url'],
+ 'ext': EXT_MAP.get(caption.get('Format')),
})
+
+ # Deprioritize creator_caption json since it can't be embedded or used by media players
+ for lang, subs_list in subtitles.items():
+ subtitles[lang] = sorted(subs_list, key=lambda x: preference(x['ext']))
+
return subtitles
def _parse_url_key(self, url_key):
diff --git a/yt_dlp/extractor/youtube.py b/yt_dlp/extractor/youtube.py
index 53aca3816..7364e8a2e 100644
--- a/yt_dlp/extractor/youtube.py
+++ b/yt_dlp/extractor/youtube.py
@@ -3131,7 +3131,14 @@ def _decrypt_nsig(self, s, video_id, player_url):
def _extract_n_function_name(self, jscode):
funcname, idx = self._search_regex(
- r'''(?x)(?:\.get\("n"\)\)&&\(b=|b=String\.fromCharCode\(110\),c=a\.get\(b\)\)&&\(c=)
+ r'''(?x)
+ (?:
+ \.get\("n"\)\)&&\(b=|
+ (?:
+ b=String\.fromCharCode\(110\)|
+ ([a-zA-Z0-9$.]+)&&\(b="nn"\[\+\1\]
+ ),c=a\.get\(b\)\)&&\(c=
+ )
(?P<nfunc>[a-zA-Z0-9$]+)(?:\[(?P<idx>\d+)\])?\([a-zA-Z0-9]\)''',
jscode, 'Initial JS player n function name', group=('nfunc', 'idx'))
if not idx:
| [tiktok] ERROR: The extracted extension ('eng-US.creator_caption') is unusual and will be skipped for safety reasons.
### DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
- [X] I understand that I will be **blocked** if I *intentionally* remove or skip any mandatory\* field
### Checklist
- [X] I'm reporting that yt-dlp is broken on a **supported** site
- [X] I've verified that I have **updated yt-dlp to nightly or master** ([update instructions](https://github.com/yt-dlp/yt-dlp#update-channels))
- [X] I've checked that all provided URLs are playable in a browser with the same IP and same login details
- [X] I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/yt-dlp/yt-dlp/wiki/FAQ#video-url-contains-an-ampersand--and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
- [X] I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
- [X] I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
- [ ] I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and I'm willing to share it if required
### Region
Earth
### Provide a description that is worded well enough to be understood
Some subtitles from tiktok have an unusual extension, and causes the video to not be downloaded since it's raised as an error.
I used `--compat-opt allow-unsafe-ext` to force the download and it contained the following:
`{"utterances":[{"text":"Casey Casey Casey","start_time":853,"end_time":2286,"words":null,"text_size":22,"text_color":"#ffffffff","bg_color":"#00000000","alignment":0,"source_width":0.5533,"source_height":0.0568},{"text":"Alien vs. Predator","start_time":20721,"end_time":22080,"words":null,"text_size":22,"text_color":"#ffffffff","bg_color":"#00000000","alignment":0,"source_width":0.5047,"source_height":0.0568},{"text":" a Mexican arguing with a Catholic priest","start_time":15656,"end_time":19938,"words":null,"text_size":22,"text_color":"#ffffffff","bg_color":"#00000000","alignment":0,"source_width":0.7199,"source_height":0.1047},{"text":"no...","start_time":15133,"end_time":15655,"words":null,"text_size":22,"text_color":"#ffffffff","bg_color":"#00000000","alignment":0,"source_width":0.1547,"source_height":0.0568},{"text":"what do you call a Mexican arguing with a Catholic priest?","start_time":4533,"end_time":9486,"words":null,"text_size":22,"text_color":"#ffffffff","bg_color":"#00000000","alignment":0,"source_width":0.8303,"source_height":0.1047},{"text":"oh my god it's hot","start_time":22081,"end_time":23218,"words":null,"text_size":22,"text_color":"#ffffffff","bg_color":"#00000000","alignment":0,"source_width":0.5065,"source_height":0.0568}]}`
### Provide verbose output that clearly demonstrates the problem
- [X] Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
- [ ] If using API, add `'verbose': True` to `YoutubeDL` params instead
- [X] Copy the WHOLE output (starting with `[debug] Command-line config`) and insert it below
### Complete Verbose Output
```shell
[debug] Command-line config: ['-vU', '--write-subs', 'https://www.tiktok.com/@gliceron/video/7236544964514123050']
[debug] Encodings: locale UTF-8, fs utf-8, pref UTF-8, out utf-8, error utf-8, screen utf-8
[debug] yt-dlp version [email protected] from yt-dlp/yt-dlp-nightly-builds [a3bab4752] (zip)
[debug] Python 3.9.19 (CPython amd64 64bit) - FreeBSD-13.2-RELEASE-p1-amd64-64bit-ELF (OpenSSL 1.1.1w-freebsd 11 Sep 2023, libc 7)
[debug] exe versions: ffmpeg 6.1.1 (setts), ffprobe 6.1.1, rtmpdump 2.4
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.12.07, mutagen-1.46.0, requests-2.32.3, sqlite3-3.46.0, urllib3-2.1.0, websockets-12.0
[debug] Proxy map: {}
[debug] Request Handlers: urllib, requests, websockets
[debug] Plugin directories: ['/home/weird/.local/lib/python3.9/site-packages/yt_dlp_plugins']
[debug] Loaded 1829 extractors
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp-nightly-builds/releases/latest
Latest version: [email protected] from yt-dlp/yt-dlp-nightly-builds
yt-dlp is up to date ([email protected] from yt-dlp/yt-dlp-nightly-builds)
[TikTok] Extracting URL: https://www.tiktok.com/@gliceron/video/7236544964514123050
[TikTok] 7236544964514123050: Downloading webpage
[debug] [TikTok] Found universal data for rehydration
[info] 7236544964514123050: Downloading subtitles: eng-US
[debug] Formats sorted by: hasvid, ie_pref, lang, quality, res, fps, hdr:12(7), vcodec:vp9.2(10), channels, acodec, size, br, asr, proto, vext, aext, hasaud, source, id
[debug] Default format spec: bestvideo*+bestaudio/best
[info] 7236544964514123050: Downloading 1 format(s): bytevc1_1080p_1516867-1
ERROR: The extracted extension ('eng-US.creator_caption') is unusual and will be skipped for safety reasons. If you believe this is an error, please report this issue on https://github.com/yt-dlp/yt-dlp/issues?q= , filling out the appropriate issue template. Confirm you are on the latest version using yt-dlp -U
Traceback (most recent call last):
File "/home/weird/bin/yt-dlp-nightly/yt_dlp/YoutubeDL.py", line 179, in wrapper
return func(self, *args, **kwargs)
File "/home/weird/bin/yt-dlp-nightly/yt_dlp/YoutubeDL.py", line 3271, in process_info
sub_files = self._write_subtitles(info_dict, temp_filename)
File "/home/weird/bin/yt-dlp-nightly/yt_dlp/YoutubeDL.py", line 4328, in _write_subtitles
sub_filename = subtitles_filename(filename, sub_lang, sub_format, info_dict.get('ext'))
File "/home/weird/bin/yt-dlp-nightly/yt_dlp/utils/_utils.py", line 1262, in subtitles_filename
File "/home/weird/bin/yt-dlp-nightly/yt_dlp/utils/_utils.py", line 2097, in _change_extension
File "/home/weird/bin/yt-dlp-nightly/yt_dlp/utils/_utils.py", line 5167, in sanitize_extension
yt_dlp.utils._UnsafeExtensionError: unsafe file extension: 'eng-US.creator_caption'
```
| yt-dlp/yt-dlp | diff --git a/test/test_youtube_signature.py b/test/test_youtube_signature.py
index a14bef511..ae167d16d 100644
--- a/test/test_youtube_signature.py
+++ b/test/test_youtube_signature.py
@@ -171,6 +171,10 @@
'https://www.youtube.com/s/player/b22ef6e7/player_ias.vflset/en_US/base.js',
'b6HcntHGkvBLk_FRf', 'kNPW6A7FyP2l8A',
),
+ (
+ 'https://www.youtube.com/s/player/3400486c/player_ias.vflset/en_US/base.js',
+ 'lL46g3XifCKUZn1Xfw', 'z767lhet6V2Skl',
+ ),
]
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 2
} | 2024.07 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-xdist",
"pytest-mock",
"pytest-asyncio",
"ffmpeg",
"ffprobe"
],
"pre_install": null,
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | autopep8==2.3.2
Brotli==1.1.0
certifi==2025.1.31
cfgv==3.4.0
charset-normalizer==3.4.1
coverage==7.8.0
distlib==0.3.9
exceptiongroup==1.2.2
execnet==2.1.1
ffmpeg==1.4
ffprobe==0.5
filelock==3.18.0
identify==2.6.9
idna==3.10
iniconfig==2.1.0
mutagen==1.47.0
nodeenv==1.9.1
packaging==24.2
platformdirs==4.3.7
pluggy==1.5.0
pre_commit==4.2.0
pycodestyle==2.13.0
pycryptodomex==3.22.0
pytest==8.3.5
pytest-asyncio==0.26.0
pytest-cov==6.0.0
pytest-mock==3.14.0
pytest-xdist==3.6.1
PyYAML==6.0.2
requests==2.32.3
ruff==0.5.7
tomli==2.2.1
typing_extensions==4.13.0
urllib3==2.3.0
virtualenv==20.29.3
websockets==15.0.1
-e git+https://github.com/yt-dlp/yt-dlp.git@a3bab4752a2b3d56e5a59b4e0411bb8f695c010b#egg=yt_dlp
| name: yt-dlp
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- autopep8==2.3.2
- brotli==1.1.0
- certifi==2025.1.31
- cfgv==3.4.0
- charset-normalizer==3.4.1
- coverage==7.8.0
- distlib==0.3.9
- exceptiongroup==1.2.2
- execnet==2.1.1
- ffmpeg==1.4
- ffprobe==0.5
- filelock==3.18.0
- identify==2.6.9
- idna==3.10
- iniconfig==2.1.0
- mutagen==1.47.0
- nodeenv==1.9.1
- packaging==24.2
- platformdirs==4.3.7
- pluggy==1.5.0
- pre-commit==4.2.0
- pycodestyle==2.13.0
- pycryptodomex==3.22.0
- pytest==8.3.5
- pytest-asyncio==0.26.0
- pytest-cov==6.0.0
- pytest-mock==3.14.0
- pytest-xdist==3.6.1
- pyyaml==6.0.2
- requests==2.32.3
- ruff==0.5.7
- tomli==2.2.1
- typing-extensions==4.13.0
- urllib3==2.3.0
- virtualenv==20.29.3
- websockets==15.0.1
- yt-dlp==2024.7.16
prefix: /opt/conda/envs/yt-dlp
| [
"test/test_youtube_signature.py::TestSignature::test_nsig_js_3400486c"
] | [] | [
"test/test_youtube_signature.py::TestPlayerInfo::test_youtube_extract_player_info",
"test/test_youtube_signature.py::TestSignature::test_nsig_js_009f1d77",
"test/test_youtube_signature.py::TestSignature::test_nsig_js_113ca41c",
"test/test_youtube_signature.py::TestSignature::test_nsig_js_1f7d5369",
"test/test_youtube_signature.py::TestSignature::test_nsig_js_2dfe380c",
"test/test_youtube_signature.py::TestSignature::test_nsig_js_324f67b9",
"test/test_youtube_signature.py::TestSignature::test_nsig_js_4c3f79c5",
"test/test_youtube_signature.py::TestSignature::test_nsig_js_590f65a6",
"test/test_youtube_signature.py::TestSignature::test_nsig_js_5a3b6271",
"test/test_youtube_signature.py::TestSignature::test_nsig_js_5dd88d1d",
"test/test_youtube_signature.py::TestSignature::test_nsig_js_6f20102c",
"test/test_youtube_signature.py::TestSignature::test_nsig_js_7862ca1f",
"test/test_youtube_signature.py::TestSignature::test_nsig_js_7a062b77",
"test/test_youtube_signature.py::TestSignature::test_nsig_js_8040e515",
"test/test_youtube_signature.py::TestSignature::test_nsig_js_8c7583ff",
"test/test_youtube_signature.py::TestSignature::test_nsig_js_9216d1f7",
"test/test_youtube_signature.py::TestSignature::test_nsig_js_b22ef6e7",
"test/test_youtube_signature.py::TestSignature::test_nsig_js_b7910ca8",
"test/test_youtube_signature.py::TestSignature::test_nsig_js_c57c113c",
"test/test_youtube_signature.py::TestSignature::test_nsig_js_c81bbb4a",
"test/test_youtube_signature.py::TestSignature::test_nsig_js_cfa9e7cb",
"test/test_youtube_signature.py::TestSignature::test_nsig_js_dac945fd",
"test/test_youtube_signature.py::TestSignature::test_nsig_js_dc0c6770",
"test/test_youtube_signature.py::TestSignature::test_nsig_js_e06dea74",
"test/test_youtube_signature.py::TestSignature::test_nsig_js_f1ca6900",
"test/test_youtube_signature.py::TestSignature::test_nsig_js_f8cb7a3b",
"test/test_youtube_signature.py::TestSignature::test_signature_js_6ed0d907",
"test/test_youtube_signature.py::TestSignature::test_signature_js_mVwz",
"test/test_youtube_signature.py::TestSignature::test_signature_js_vfl0Cbn9e",
"test/test_youtube_signature.py::TestSignature::test_signature_js_vfl9FYC6l",
"test/test_youtube_signature.py::TestSignature::test_signature_js_vflBb0OQx",
"test/test_youtube_signature.py::TestSignature::test_signature_js_vflCGk6yw",
"test/test_youtube_signature.py::TestSignature::test_signature_js_vflHOr_nV",
"test/test_youtube_signature.py::TestSignature::test_signature_js_vflKjOTVq",
"test/test_youtube_signature.py::TestSignature::test_signature_js_vflXGBaUN",
"test/test_youtube_signature.py::TestSignature::test_signature_js_vfldJ8xgI"
] | [] | The Unlicense | 19,037 | 941 | [
"yt_dlp/extractor/tiktok.py",
"yt_dlp/extractor/youtube.py"
] |
|
transientskp__pyse-57 | b33e6b3a2b2838c268852101f17e50b19832b622 | 2024-07-23 11:54:18 | 8bd77c9ae181ade28a0f78ceeed02fe25e90ccd6 | diff --git a/sourcefinder/extract.py b/sourcefinder/extract.py
index 1fd5bbf..cb74d86 100644
--- a/sourcefinder/extract.py
+++ b/sourcefinder/extract.py
@@ -241,7 +241,7 @@ class ParamSet(MutableMapping):
self.alpha_maj3 = alpha_maj3
self.alpha_min3 = alpha_min3
- self.values = {
+ self.measurements = {
'peak': Uncertain(),
'flux': Uncertain(),
'xbar': Uncertain(),
@@ -268,31 +268,29 @@ class ParamSet(MutableMapping):
self.reduced_chisq = None
def __getitem__(self, item):
- return self.values[item]
+ return self.measurements[item]
def __setitem__(self, item, value):
- if item in self.values:
+ if item in self.measurements:
if isinstance(value, Uncertain):
- self.values[item] = value
+ self.measurements[item] = value
else:
- self.values[item].value = value
- elif item[:3] == 'err' and item[3:] in self.values:
- self.values[item[3:]].error = value
+ self.measurements[item].value = value
else:
raise AttributeError("Invalid parameter")
def __delitem__(self, key):
- raise Exception
+ del self.measurements[key]
def __iter__(self):
- raise Exception
+ return iter(self.measurements)
def __len__(self):
- raise Exception
+ return len(self.measurements)
def keys(self):
""" """
- return list(self.values.keys())
+ return list(self.measurements.keys())
def calculate_errors(self, noise, max_pix_variance_factor, correlation_lengths, threshold):
"""Calculate positional errors
| Implementation of `ParamSet` is inconsistent
### Description
Output from mypy
```
sourcefinder/extract.py:244: error: Cannot assign to a method [method-assign]
sourcefinder/extract.py:244: error: Incompatible types in assignment (expression has type "dict[str, Uncertain]", variable has type "Callable[[], ValuesView[Any]]") [assignment]
sourcefinder/extract.py:271: error: Value of type "Callable[[], ValuesView[Any]]" is not indexable [index]
sourcefinder/extract.py:274: error: Unsupported right operand type for in ("Callable[[], ValuesView[Any]]") [operator]
sourcefinder/extract.py:276: error: Unsupported target for indexed assignment ("Callable[[], ValuesView[Any]]") [index]
sourcefinder/extract.py:278: error: Value of type "Callable[[], ValuesView[Any]]" is not indexable [index]
sourcefinder/extract.py:279: error: Unsupported right operand type for in ("Callable[[], ValuesView[Any]]") [operator]
sourcefinder/extract.py:280: error: Value of type "Callable[[], ValuesView[Any]]" is not indexable [index]
sourcefinder/extract.py:295: error: "Callable[[], ValuesView[Any]]" has no attribute "keys" [attr-defined]
```
The implementation is inconsistent. While it inherits from `collections.abc.MutableMapping`, it doesn't respect the [mapping API](https://docs.python.org/3/glossary.html#term-mapping)
Problem is here (the first error above):
https://github.com/transientskp/pyse/blob/1de96f08469410abccd9e7a973f89d963faa4c2a/sourcefinder/extract.py#L244-L255
For example, in the line mentioned in the error above, it should not be overriding the `values()` method. AFAIU, the implementation stores it's data as a regular `dict` in `ParamSet.values`, and all downstream use looks like this: `instance.values.<dict_method call>` instead of `instance.<dict_method_call>`.
### Resolution
1. Either `MutableMapping` should be removed.
```diff
@@ -220,7 +220,7 @@ class Island(object):
return measurement, gauss_residual
-class ParamSet(MutableMapping):
+class ParamSet:
"""
All the source fitting methods should go to produce a ParamSet, which
gives all the information necessary to make a Detection.
```
2. Or the implementation should be fixed to reflect the Python mapping API.
| transientskp/pyse | diff --git a/test/test_errors.py b/test/test_errors.py
index 1446037..19ed95a 100644
--- a/test/test_errors.py
+++ b/test/test_errors.py
@@ -15,7 +15,7 @@ class DummyImage(object):
def get_paramset():
paramset = ParamSet()
paramset.sig = 1
- paramset.values = {
+ paramset.measurements = {
'peak': Uncertain(10, 0),
'flux': Uncertain(10, 0),
'semimajor': Uncertain(10, 1),
@@ -100,13 +100,13 @@ class TestPositionErrors(unittest.TestCase):
# See HipChat discussion of 2013-08-28.
# Values of all parameters are dummies except for the pixel position.
# First, construct a source at the NCP
- self.p.values['xbar'] = Uncertain(1025, 1)
- self.p.values['ybar'] = Uncertain(1025, 1)
+ self.p.measurements['xbar'] = Uncertain(1025, 1)
+ self.p.measurements['ybar'] = Uncertain(1025, 1)
d_ncp = Detection(self.p, self.ncp_image)
# Then construct a source somewhere away from the NCP
- self.p.values['xbar'] = Uncertain(125, 1)
- self.p.values['ybar'] = Uncertain(125, 1)
+ self.p.measurements['xbar'] = Uncertain(125, 1)
+ self.p.measurements['ybar'] = Uncertain(125, 1)
d_not_ncp = Detection(self.p, self.ncp_image)
# One source is at higher declination
@@ -118,24 +118,24 @@ class TestPositionErrors(unittest.TestCase):
def test_error_radius_value(self):
# Demonstrate that we calculate the expected value for the error
# radius
- self.p.values['xbar'] = Uncertain(1025, 1)
- self.p.values['ybar'] = Uncertain(1025, 1)
+ self.p.measurements['xbar'] = Uncertain(1025, 1)
+ self.p.measurements['ybar'] = Uncertain(1025, 1)
d_ncp = Detection(self.p, self.ncp_image)
# cdelt gives the per-pixel increment along the axis in degrees
# Detection.error_radius is in arcsec
expected_error_radius = math.sqrt(
- (self.p.values['xbar'].error * self.ncp_image.wcs.cdelt[
+ (self.p.measurements['xbar'].error * self.ncp_image.wcs.cdelt[
0] * 3600) ** 2 +
- (self.p.values['ybar'].error * self.ncp_image.wcs.cdelt[
+ (self.p.measurements['ybar'].error * self.ncp_image.wcs.cdelt[
1] * 3600) ** 2
)
self.assertAlmostEqual(d_ncp.error_radius, expected_error_radius, 6)
def test_error_radius_with_dec(self):
- self.p.values['xbar'] = Uncertain(1025, 1)
- self.p.values['ybar'] = Uncertain(1025, 1)
+ self.p.measurements['xbar'] = Uncertain(1025, 1)
+ self.p.measurements['ybar'] = Uncertain(1025, 1)
d_ncp = Detection(self.p, self.ncp_image)
d_equator = Detection(self.p, self.equator_image)
self.assertEqual(d_ncp.error_radius, d_equator.error_radius)
diff --git a/test/test_image.py b/test/test_image.py
index e271a90..04b9abc 100644
--- a/test/test_image.py
+++ b/test/test_image.py
@@ -205,7 +205,7 @@ class TestFitFixedPositions(unittest.TestCase):
img = self.image
fake_params = sourcefinder.extract.ParamSet()
- fake_params.values.update({
+ fake_params.measurements.update({
'peak': Uncertain(0.0, 0.5),
'flux': Uncertain(0.0, 0.5),
'xbar': Uncertain(5.5, 10000.5), # Danger Will Robinson
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 1
} | 0.3 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.10",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | astropy==6.1.7
astropy-iers-data==0.2025.3.31.0.36.18
click==8.1.8
cloudpickle==3.1.1
coverage==7.8.0
dask==2025.3.0
exceptiongroup==1.2.2
fsspec==2025.3.1
importlib_metadata==8.6.1
iniconfig==2.1.0
locket==1.0.0
numpy==1.26.4
packaging==24.2
partd==1.4.2
pluggy==1.5.0
psutil==7.0.0
pyerfa==2.0.1.5
pytest==8.3.5
pytest-cov==6.0.0
python-casacore==3.6.1
python-dateutil==2.9.0.post0
pytz==2025.2
PyYAML==6.0.2
-e git+https://github.com/transientskp/pyse.git@b33e6b3a2b2838c268852101f17e50b19832b622#egg=radio_pyse
scipy==1.15.2
six==1.17.0
tomli==2.2.1
toolz==1.0.0
zipp==3.21.0
| name: pyse
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- bzip2=1.0.8=h5eee18b_6
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- libuuid=1.41.5=h5eee18b_0
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py310h06a4308_0
- python=3.10.16=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py310h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py310h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- astropy==6.1.7
- astropy-iers-data==0.2025.3.31.0.36.18
- click==8.1.8
- cloudpickle==3.1.1
- coverage==7.8.0
- dask==2025.3.0
- exceptiongroup==1.2.2
- fsspec==2025.3.1
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- locket==1.0.0
- numpy==1.26.4
- packaging==24.2
- partd==1.4.2
- pluggy==1.5.0
- psutil==7.0.0
- pyerfa==2.0.1.5
- pytest==8.3.5
- pytest-cov==6.0.0
- python-casacore==3.6.1
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyyaml==6.0.2
- radio-pyse==0.3.1
- scipy==1.15.2
- six==1.17.0
- tomli==2.2.1
- toolz==1.0.0
- zipp==3.21.0
prefix: /opt/conda/envs/pyse
| [
"test/test_errors.py::TestFluxErrors::test_negative_flux_condon",
"test/test_errors.py::TestFluxErrors::test_negative_flux_moments",
"test/test_errors.py::TestFluxErrors::test_positive_flux_condon",
"test/test_errors.py::TestFluxErrors::test_positive_flux_moments",
"test/test_errors.py::TestPositionErrors::test_error_radius_value",
"test/test_errors.py::TestPositionErrors::test_error_radius_with_dec",
"test/test_errors.py::TestPositionErrors::test_ra_error_scaling",
"test/test_image.py::TestFitFixedPositions::testErrorBoxOverlapsEdge"
] | [] | [
"test/test_image.py::TestNumpySubroutines::testBoxSlicing",
"test/test_image.py::TestMapsType::testmaps_array_type",
"test/test_image.py::TestFitFixedPositions::testBackgroundAtGivenPosition",
"test/test_image.py::TestFitFixedPositions::testForcedFitAtNans",
"test/test_image.py::TestFitFixedPositions::testGivenPositionOutsideImage",
"test/test_image.py::TestFitFixedPositions::testHighFitThreshold",
"test/test_image.py::TestFitFixedPositions::testLowFitThreshold",
"test/test_image.py::TestFitFixedPositions::testSourceAtGivenPosition",
"test/test_image.py::TestFitFixedPositions::testTooCloseToEdgePosition",
"test/test_image.py::TestSimpleImageSourceFind::testForceSourceShape",
"test/test_image.py::TestSimpleImageSourceFind::testNoLabelledIslandsCase",
"test/test_image.py::TestSimpleImageSourceFind::testSingleSourceExtraction",
"test/test_image.py::TestSimpleImageSourceFind::testWcsConversionConsistency",
"test/test_image.py::TestMaskedSource::testWholeSourceMasked",
"test/test_image.py::TestMaskedBackground::testMaskedBackgroundBlind",
"test/test_image.py::TestMaskedBackground::testMaskedBackgroundForcedFit",
"test/test_image.py::TestFailureModes::testFlatImage"
] | [] | BSD 2-Clause "Simplified" License | 19,060 | 430 | [
"sourcefinder/extract.py"
] |
|
neuroinformatics-unit__movement-243 | 9ba430212c5b9faa0e1dc72ada493666e54c4c1b | 2024-07-23 14:52:43 | fc30b5a982bd39fb2750090f989d4025d35e0db4 | sonarcloud[bot]: ## [](https://sonarcloud.io/dashboard?id=neuroinformatics-unit_movement&pullRequest=243) **Quality Gate passed**
Issues
 [0 New issues](https://sonarcloud.io/project/issues?id=neuroinformatics-unit_movement&pullRequest=243&issueStatuses=OPEN,CONFIRMED&sinceLeakPeriod=true)
 [0 Accepted issues](https://sonarcloud.io/project/issues?id=neuroinformatics-unit_movement&pullRequest=243&issueStatuses=ACCEPTED)
Measures
 [0 Security Hotspots](https://sonarcloud.io/project/security_hotspots?id=neuroinformatics-unit_movement&pullRequest=243&issueStatuses=OPEN,CONFIRMED&sinceLeakPeriod=true)
 [0.0% Coverage on New Code](https://sonarcloud.io/component_measures?id=neuroinformatics-unit_movement&pullRequest=243&metric=new_coverage&view=list)
 [0.0% Duplication on New Code](https://sonarcloud.io/component_measures?id=neuroinformatics-unit_movement&pullRequest=243&metric=new_duplicated_lines_density&view=list)
[See analysis details on SonarCloud](https://sonarcloud.io/dashboard?id=neuroinformatics-unit_movement&pullRequest=243)
codecov[bot]: ## [Codecov](https://app.codecov.io/gh/neuroinformatics-unit/movement/pull/243?dropdown=coverage&src=pr&el=h1&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=neuroinformatics-unit) Report
All modified and coverable lines are covered by tests :white_check_mark:
> Project coverage is 99.74%. Comparing base [(`242f532`)](https://app.codecov.io/gh/neuroinformatics-unit/movement/commit/242f5328c11f21ae605169a8dc780b6d855ff478?dropdown=coverage&el=desc&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=neuroinformatics-unit) to head [(`3c078a3`)](https://app.codecov.io/gh/neuroinformatics-unit/movement/commit/3c078a395b70349ae922692a998523326d31015d?dropdown=coverage&el=desc&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=neuroinformatics-unit).
<details><summary>Additional details and impacted files</summary>
```diff
@@ Coverage Diff @@
## main #243 +/- ##
=======================================
Coverage 99.74% 99.74%
=======================================
Files 13 13
Lines 771 782 +11
=======================================
+ Hits 769 780 +11
Misses 2 2
```
</details>
[:umbrella: View full report in Codecov by Sentry](https://app.codecov.io/gh/neuroinformatics-unit/movement/pull/243?dropdown=coverage&src=pr&el=continue&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=neuroinformatics-unit).
:loudspeaker: Have feedback on the report? [Share it here](https://about.codecov.io/codecov-pr-comment-feedback/?utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=neuroinformatics-unit).
sonarcloud[bot]: ## [](https://sonarcloud.io/dashboard?id=neuroinformatics-unit_movement&pullRequest=243) **Quality Gate passed**
Issues
 [0 New issues](https://sonarcloud.io/project/issues?id=neuroinformatics-unit_movement&pullRequest=243&issueStatuses=OPEN,CONFIRMED&sinceLeakPeriod=true)
 [0 Accepted issues](https://sonarcloud.io/project/issues?id=neuroinformatics-unit_movement&pullRequest=243&issueStatuses=ACCEPTED)
Measures
 [0 Security Hotspots](https://sonarcloud.io/project/security_hotspots?id=neuroinformatics-unit_movement&pullRequest=243&issueStatuses=OPEN,CONFIRMED&sinceLeakPeriod=true)
 [0.0% Coverage on New Code](https://sonarcloud.io/component_measures?id=neuroinformatics-unit_movement&pullRequest=243&metric=new_coverage&view=list)
 [0.0% Duplication on New Code](https://sonarcloud.io/component_measures?id=neuroinformatics-unit_movement&pullRequest=243&metric=new_duplicated_lines_density&view=list)
[See analysis details on SonarCloud](https://sonarcloud.io/dashboard?id=neuroinformatics-unit_movement&pullRequest=243)
niksirbi: @sfmig I'll give you a chance to comment on this before merging, because we've discussed this together. No rush
sonarcloud[bot]: ## [](https://sonarcloud.io/dashboard?id=neuroinformatics-unit_movement&pullRequest=243) **Quality Gate passed**
Issues
 [0 New issues](https://sonarcloud.io/project/issues?id=neuroinformatics-unit_movement&pullRequest=243&issueStatuses=OPEN,CONFIRMED&sinceLeakPeriod=true)
 [0 Accepted issues](https://sonarcloud.io/project/issues?id=neuroinformatics-unit_movement&pullRequest=243&issueStatuses=ACCEPTED)
Measures
 [0 Security Hotspots](https://sonarcloud.io/project/security_hotspots?id=neuroinformatics-unit_movement&pullRequest=243&issueStatuses=OPEN,CONFIRMED&sinceLeakPeriod=true)
 [0.0% Coverage on New Code](https://sonarcloud.io/component_measures?id=neuroinformatics-unit_movement&pullRequest=243&metric=new_coverage&view=list)
 [0.0% Duplication on New Code](https://sonarcloud.io/component_measures?id=neuroinformatics-unit_movement&pullRequest=243&metric=new_duplicated_lines_density&view=list)
[See analysis details on SonarCloud](https://sonarcloud.io/dashboard?id=neuroinformatics-unit_movement&pullRequest=243)
sonarcloud[bot]: ## [](https://sonarcloud.io/dashboard?id=neuroinformatics-unit_movement&pullRequest=243) **Quality Gate passed**
Issues
 [0 New issues](https://sonarcloud.io/project/issues?id=neuroinformatics-unit_movement&pullRequest=243&issueStatuses=OPEN,CONFIRMED&sinceLeakPeriod=true)
 [0 Accepted issues](https://sonarcloud.io/project/issues?id=neuroinformatics-unit_movement&pullRequest=243&issueStatuses=ACCEPTED)
Measures
 [0 Security Hotspots](https://sonarcloud.io/project/security_hotspots?id=neuroinformatics-unit_movement&pullRequest=243&issueStatuses=OPEN,CONFIRMED&sinceLeakPeriod=true)
 [0.0% Coverage on New Code](https://sonarcloud.io/component_measures?id=neuroinformatics-unit_movement&pullRequest=243&metric=new_coverage&view=list)
 [0.0% Duplication on New Code](https://sonarcloud.io/component_measures?id=neuroinformatics-unit_movement&pullRequest=243&metric=new_duplicated_lines_density&view=list)
[See analysis details on SonarCloud](https://sonarcloud.io/dashboard?id=neuroinformatics-unit_movement&pullRequest=243)
sonarcloud[bot]: ## [](https://sonarcloud.io/dashboard?id=neuroinformatics-unit_movement&pullRequest=243) **Quality Gate passed**
Issues
 [0 New issues](https://sonarcloud.io/project/issues?id=neuroinformatics-unit_movement&pullRequest=243&issueStatuses=OPEN,CONFIRMED&sinceLeakPeriod=true)
 [0 Accepted issues](https://sonarcloud.io/project/issues?id=neuroinformatics-unit_movement&pullRequest=243&issueStatuses=ACCEPTED)
Measures
 [0 Security Hotspots](https://sonarcloud.io/project/security_hotspots?id=neuroinformatics-unit_movement&pullRequest=243&issueStatuses=OPEN,CONFIRMED&sinceLeakPeriod=true)
 [0.0% Coverage on New Code](https://sonarcloud.io/component_measures?id=neuroinformatics-unit_movement&pullRequest=243&metric=new_coverage&view=list)
 [0.0% Duplication on New Code](https://sonarcloud.io/component_measures?id=neuroinformatics-unit_movement&pullRequest=243&metric=new_duplicated_lines_density&view=list)
[See analysis details on SonarCloud](https://sonarcloud.io/dashboard?id=neuroinformatics-unit_movement&pullRequest=243)
niksirbi: Thanks for the thorough review @sfmig. I believe I've addressed all your points by basically adopting all your suggestions (see edited part of the PR description). Re-requesting your review.
sonarcloud[bot]: ## [](https://sonarcloud.io/dashboard?id=neuroinformatics-unit_movement&pullRequest=243) **Quality Gate passed**
Issues
 [0 New issues](https://sonarcloud.io/project/issues?id=neuroinformatics-unit_movement&pullRequest=243&issueStatuses=OPEN,CONFIRMED&sinceLeakPeriod=true)
 [0 Accepted issues](https://sonarcloud.io/project/issues?id=neuroinformatics-unit_movement&pullRequest=243&issueStatuses=ACCEPTED)
Measures
 [0 Security Hotspots](https://sonarcloud.io/project/security_hotspots?id=neuroinformatics-unit_movement&pullRequest=243&issueStatuses=OPEN,CONFIRMED&sinceLeakPeriod=true)
 [0.0% Coverage on New Code](https://sonarcloud.io/component_measures?id=neuroinformatics-unit_movement&pullRequest=243&metric=new_coverage&view=list)
 [0.0% Duplication on New Code](https://sonarcloud.io/component_measures?id=neuroinformatics-unit_movement&pullRequest=243&metric=new_duplicated_lines_density&view=list)
[See analysis details on SonarCloud](https://sonarcloud.io/dashboard?id=neuroinformatics-unit_movement&pullRequest=243)
| diff --git a/examples/compute_kinematics.py b/examples/compute_kinematics.py
index 1ab2731..b3fefd4 100644
--- a/examples/compute_kinematics.py
+++ b/examples/compute_kinematics.py
@@ -10,14 +10,13 @@ visualise the results.
# Imports
# -------
-import numpy as np
-
# For interactive plots: install ipympl with `pip install ipympl` and uncomment
# the following line in your notebook
# %matplotlib widget
from matplotlib import pyplot as plt
from movement import sample_data
+from movement.utils.vector import compute_norm
# %%
# Load sample dataset
@@ -255,13 +254,12 @@ fig.colorbar(sc, ax=ax, label="time (s)")
# mouse along its trajectory.
# length of each displacement vector
-displacement_vectors_lengths = np.linalg.norm(
- displacement.sel(individuals=mouse_name, space=["x", "y"]).squeeze(),
- axis=1,
+displacement_vectors_lengths = compute_norm(
+ displacement.sel(individuals=mouse_name)
)
-# sum of all displacement vectors
-total_displacement = np.sum(displacement_vectors_lengths, axis=0) # in pixels
+# sum the lengths of all displacement vectors (in pixels)
+total_displacement = displacement_vectors_lengths.sum(dim="time").values[0]
print(
f"The mouse {mouse_name}'s trajectory is {total_displacement:.2f} "
@@ -299,14 +297,12 @@ plt.gcf().show()
# uses second order central differences.
# %%
-# We can also visualise the speed, as the norm of the velocity vector:
+# We can also visualise the speed, as the magnitude (norm)
+# of the velocity vector:
fig, axes = plt.subplots(3, 1, sharex=True, sharey=True)
for mouse_name, ax in zip(velocity.individuals.values, axes, strict=False):
- # compute the norm of the velocity vector for one mouse
- speed_one_mouse = np.linalg.norm(
- velocity.sel(individuals=mouse_name, space=["x", "y"]).squeeze(),
- axis=1,
- )
+ # compute the magnitude of the velocity vector for one mouse
+ speed_one_mouse = compute_norm(velocity.sel(individuals=mouse_name))
# plot speed against time
ax.plot(speed_one_mouse)
ax.set_title(mouse_name)
@@ -379,16 +375,12 @@ for mouse_name, ax in zip(accel.individuals.values, axes, strict=False):
fig.tight_layout()
# %%
-# The norm of the acceleration vector is the magnitude of the
-# acceleration.
-# We can also represent this for each individual.
+# The can also represent the magnitude (norm) of the acceleration vector
+# for each individual:
fig, axes = plt.subplots(3, 1, sharex=True, sharey=True)
for mouse_name, ax in zip(accel.individuals.values, axes, strict=False):
- # compute norm of the acceleration vector for one mouse
- accel_one_mouse = np.linalg.norm(
- accel.sel(individuals=mouse_name, space=["x", "y"]).squeeze(),
- axis=1,
- )
+ # compute magnitude of the acceleration vector for one mouse
+ accel_one_mouse = compute_norm(accel.sel(individuals=mouse_name))
# plot acceleration against time
ax.plot(accel_one_mouse)
diff --git a/movement/utils/vector.py b/movement/utils/vector.py
index c35990e..0d5d88c 100644
--- a/movement/utils/vector.py
+++ b/movement/utils/vector.py
@@ -6,6 +6,93 @@ import xarray as xr
from movement.utils.logging import log_error
+def compute_norm(data: xr.DataArray) -> xr.DataArray:
+ """Compute the norm of the vectors along the spatial dimension.
+
+ The norm of a vector is its magnitude, also called Euclidean norm, 2-norm
+ or Euclidean length. Note that if the input data is expressed in polar
+ coordinates, the magnitude of a vector is the same as its radial coordinate
+ ``rho``.
+
+ Parameters
+ ----------
+ data : xarray.DataArray
+ The input data array containing either ``space`` or ``space_pol``
+ as a dimension.
+
+ Returns
+ -------
+ xarray.DataArray
+ A data array holding the norm of the input vectors.
+ Note that this output array has no spatial dimension but preserves
+ all other dimensions of the input data array (see Notes).
+
+ Notes
+ -----
+ If the input data array is a ``position`` array, this function will compute
+ the magnitude of the position vectors, for every individual and keypoint,
+ at every timestep. If the input data array is a ``shape`` array of a
+ bounding boxes dataset, it will compute the magnitude of the shape
+ vectors (i.e., the diagonal of the bounding box),
+ for every individual and at every timestep.
+
+
+ """
+ if "space" in data.dims:
+ _validate_dimension_coordinates(data, {"space": ["x", "y"]})
+ return xr.apply_ufunc(
+ np.linalg.norm,
+ data,
+ input_core_dims=[["space"]],
+ kwargs={"axis": -1},
+ )
+ elif "space_pol" in data.dims:
+ _validate_dimension_coordinates(data, {"space_pol": ["rho", "phi"]})
+ return data.sel(space_pol="rho", drop=True)
+ else:
+ _raise_error_for_missing_spatial_dim()
+
+
+def convert_to_unit(data: xr.DataArray) -> xr.DataArray:
+ """Convert the vectors along the spatial dimension into unit vectors.
+
+ A unit vector is a vector pointing in the same direction as the original
+ vector but with norm = 1.
+
+ Parameters
+ ----------
+ data : xarray.DataArray
+ The input data array containing either ``space`` or ``space_pol``
+ as a dimension.
+
+ Returns
+ -------
+ xarray.DataArray
+ A data array holding the unit vectors of the input data array
+ (all input dimensions are preserved).
+
+ Notes
+ -----
+ Note that the unit vector for the null vector is undefined, since the null
+ vector has 0 norm and no direction associated with it.
+
+ """
+ if "space" in data.dims:
+ _validate_dimension_coordinates(data, {"space": ["x", "y"]})
+ return data / compute_norm(data)
+ elif "space_pol" in data.dims:
+ _validate_dimension_coordinates(data, {"space_pol": ["rho", "phi"]})
+ # Set both rho and phi values to NaN at null vectors (where rho = 0)
+ new_data = xr.where(data.sel(space_pol="rho") == 0, np.nan, data)
+ # Set the rho values to 1 for non-null vectors (phi is preserved)
+ new_data.loc[{"space_pol": "rho"}] = xr.where(
+ new_data.sel(space_pol="rho").isnull(), np.nan, 1
+ )
+ return new_data
+ else:
+ _raise_error_for_missing_spatial_dim()
+
+
def cart2pol(data: xr.DataArray) -> xr.DataArray:
"""Transform Cartesian coordinates to polar.
@@ -25,12 +112,7 @@ def cart2pol(data: xr.DataArray) -> xr.DataArray:
"""
_validate_dimension_coordinates(data, {"space": ["x", "y"]})
- rho = xr.apply_ufunc(
- np.linalg.norm,
- data,
- input_core_dims=[["space"]],
- kwargs={"axis": -1},
- )
+ rho = compute_norm(data)
phi = xr.apply_ufunc(
np.arctan2,
data.sel(space="y"),
@@ -122,3 +204,11 @@ def _validate_dimension_coordinates(
)
if error_message:
raise log_error(ValueError, error_message)
+
+
+def _raise_error_for_missing_spatial_dim() -> None:
+ raise log_error(
+ ValueError,
+ "Input data array must contain either 'space' or 'space_pol' "
+ "as dimensions.",
+ )
| Add norm function to the vector utils
**Is your feature request related to a problem? Please describe.**
We often need to get the magnitude of a vector.
**Describe the solution you'd like**
A `norm()` utility function that would function similar to existing `cart2pol` and `pol2cart` utilites.
This can facilitate #147
| neuroinformatics-unit/movement | diff --git a/tests/test_unit/test_vector.py b/tests/test_unit/test_vector.py
index 88dd85b..8787a46 100644
--- a/tests/test_unit/test_vector.py
+++ b/tests/test_unit/test_vector.py
@@ -121,3 +121,70 @@ class TestVector:
with expected_exception:
result = vector.pol2cart(ds.pol)
xr.testing.assert_allclose(result, ds.cart)
+
+ @pytest.mark.parametrize(
+ "ds, expected_exception",
+ [
+ ("cart_pol_dataset", does_not_raise()),
+ ("cart_pol_dataset_with_nan", does_not_raise()),
+ ("cart_pol_dataset_missing_cart_dim", pytest.raises(ValueError)),
+ (
+ "cart_pol_dataset_missing_cart_coords",
+ pytest.raises(ValueError),
+ ),
+ ],
+ )
+ def test_compute_norm(self, ds, expected_exception, request):
+ """Test vector norm computation with known values."""
+ ds = request.getfixturevalue(ds)
+ with expected_exception:
+ # validate the norm computation
+ result = vector.compute_norm(ds.cart)
+ expected = np.sqrt(
+ ds.cart.sel(space="x") ** 2 + ds.cart.sel(space="y") ** 2
+ )
+ xr.testing.assert_allclose(result, expected)
+
+ # result should be the same from Cartesian and polar coordinates
+ xr.testing.assert_allclose(result, vector.compute_norm(ds.pol))
+
+ # The result should only contain the time dimension.
+ assert result.dims == ("time",)
+
+ @pytest.mark.parametrize(
+ "ds, expected_exception",
+ [
+ ("cart_pol_dataset", does_not_raise()),
+ ("cart_pol_dataset_with_nan", does_not_raise()),
+ ("cart_pol_dataset_missing_cart_dim", pytest.raises(ValueError)),
+ ],
+ )
+ def test_convert_to_unit(self, ds, expected_exception, request):
+ """Test conversion to unit vectors (normalisation)."""
+ ds = request.getfixturevalue(ds)
+ with expected_exception:
+ # normalise both the Cartesian and the polar data to unit vectors
+ unit_cart = vector.convert_to_unit(ds.cart)
+ unit_pol = vector.convert_to_unit(ds.pol)
+ # they should yield the same result, just in different coordinates
+ xr.testing.assert_allclose(unit_cart, vector.pol2cart(unit_pol))
+ xr.testing.assert_allclose(unit_pol, vector.cart2pol(unit_cart))
+
+ # since we established that polar vs Cartesian unit vectors are
+ # equivalent, it's enough to do other assertions on either one
+
+ # the normalised data should have the same dimensions as the input
+ assert unit_cart.dims == ds.cart.dims
+
+ # unit vector should be NaN if the input vector was null or NaN
+ is_null_vec = (ds.cart == 0).all("space") # null vec: x=0, y=0
+ is_nan_vec = ds.cart.isnull().any("space") # any NaN in x or y
+ expected_nan_idxs = is_null_vec | is_nan_vec
+ assert unit_cart.where(expected_nan_idxs).isnull().all()
+
+ # For non-NaN unit vectors in polar coordinates, the rho values
+ # should be 1 and the phi values should be the same as the input
+ expected_unit_pol = ds.pol.copy()
+ expected_unit_pol.loc[{"space_pol": "rho"}] = 1
+ expected_unit_pol = expected_unit_pol.where(~expected_nan_idxs)
+ xr.testing.assert_allclose(unit_pol, expected_unit_pol)
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_issue_reference",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 2
} | 0.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pip",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-xdist",
"pytest-mock",
"pytest-asyncio"
],
"pre_install": null,
"python": "3.11",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==25.3.0
Bottleneck==1.4.2
build==1.2.2.post1
cachetools==5.5.2
Cartopy==0.24.1
certifi==2025.1.31
cfgv==3.4.0
cftime==1.6.4.post1
chardet==5.2.0
charset-normalizer==3.4.1
check-manifest==0.50
codespell==2.4.1
colorama==0.4.6
contourpy==1.3.1
coverage==7.8.0
cycler==0.12.1
distlib==0.3.9
execnet==2.1.1
filelock==3.18.0
flox==0.10.1
fonttools==4.56.0
h5py==3.13.0
hdmf==3.14.6
identify==2.6.9
idna==3.10
imageio==2.37.0
imageio-ffmpeg==0.6.0
iniconfig==2.1.0
jsonschema==4.23.0
jsonschema-specifications==2024.10.1
kiwisolver==1.4.8
llvmlite==0.44.0
matplotlib==3.10.1
-e git+https://github.com/neuroinformatics-unit/movement.git@9ba430212c5b9faa0e1dc72ada493666e54c4c1b#egg=movement
mypy==1.15.0
mypy-extensions==1.0.0
nc-time-axis==1.4.1
ndx-pose==0.1.1
nodeenv==1.9.1
numba==0.61.0
numbagg==0.9.0
numpy==2.1.3
numpy-groupies==0.11.2
opt_einsum==3.4.0
packaging==24.2
pandas==2.2.3
pandas-stubs==2.2.3.250308
pillow==11.1.0
platformdirs==4.3.7
pluggy==1.5.0
pooch==1.8.2
pre_commit==4.2.0
pynwb==2.8.3
pyparsing==3.2.3
pyproj==3.7.1
pyproject-api==1.9.0
pyproject_hooks==1.2.0
pyshp==2.3.1
pytest==8.3.5
pytest-asyncio==0.26.0
pytest-cov==6.0.0
pytest-mock==3.14.0
pytest-xdist==3.6.1
python-dateutil==2.9.0.post0
pytz==2025.2
PyYAML==6.0.2
referencing==0.36.2
requests==2.32.3
rpds-py==0.24.0
ruamel.yaml==0.18.10
ruamel.yaml.clib==0.2.12
ruff==0.11.2
scipy==1.15.2
seaborn==0.13.2
setuptools-scm==8.2.0
shapely==2.0.7
simplejson==3.20.1
six==1.17.0
sleap-io==0.2.0
toolz==1.0.0
tox==4.25.0
tqdm==4.67.1
types-attrs==19.1.0
types-pytz==2025.2.0.20250326
types-PyYAML==6.0.12.20250326
types-requests==2.32.0.20250328
typing_extensions==4.13.0
tzdata==2025.2
urllib3==2.3.0
virtualenv==20.29.3
xarray==2025.3.1
| name: movement
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- bzip2=1.0.8=h5eee18b_6
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- libuuid=1.41.5=h5eee18b_0
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py311h06a4308_0
- python=3.11.11=he870216_0
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py311h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.45.1=py311h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==25.3.0
- bottleneck==1.4.2
- build==1.2.2.post1
- cachetools==5.5.2
- cartopy==0.24.1
- certifi==2025.1.31
- cfgv==3.4.0
- cftime==1.6.4.post1
- chardet==5.2.0
- charset-normalizer==3.4.1
- check-manifest==0.50
- codespell==2.4.1
- colorama==0.4.6
- contourpy==1.3.1
- coverage==7.8.0
- cycler==0.12.1
- distlib==0.3.9
- execnet==2.1.1
- filelock==3.18.0
- flox==0.10.1
- fonttools==4.56.0
- h5py==3.13.0
- hdmf==3.14.6
- identify==2.6.9
- idna==3.10
- imageio==2.37.0
- imageio-ffmpeg==0.6.0
- iniconfig==2.1.0
- jsonschema==4.23.0
- jsonschema-specifications==2024.10.1
- kiwisolver==1.4.8
- llvmlite==0.44.0
- matplotlib==3.10.1
- movement==0.0.20.dev16+g9ba4302
- mypy==1.15.0
- mypy-extensions==1.0.0
- nc-time-axis==1.4.1
- ndx-pose==0.1.1
- nodeenv==1.9.1
- numba==0.61.0
- numbagg==0.9.0
- numpy==2.1.3
- numpy-groupies==0.11.2
- opt-einsum==3.4.0
- packaging==24.2
- pandas==2.2.3
- pandas-stubs==2.2.3.250308
- pillow==11.1.0
- platformdirs==4.3.7
- pluggy==1.5.0
- pooch==1.8.2
- pre-commit==4.2.0
- pynwb==2.8.3
- pyparsing==3.2.3
- pyproj==3.7.1
- pyproject-api==1.9.0
- pyproject-hooks==1.2.0
- pyshp==2.3.1
- pytest==8.3.5
- pytest-asyncio==0.26.0
- pytest-cov==6.0.0
- pytest-mock==3.14.0
- pytest-xdist==3.6.1
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyyaml==6.0.2
- referencing==0.36.2
- requests==2.32.3
- rpds-py==0.24.0
- ruamel-yaml==0.18.10
- ruamel-yaml-clib==0.2.12
- ruff==0.11.2
- scipy==1.15.2
- seaborn==0.13.2
- setuptools-scm==8.2.0
- shapely==2.0.7
- simplejson==3.20.1
- six==1.17.0
- sleap-io==0.2.0
- toolz==1.0.0
- tox==4.25.0
- tqdm==4.67.1
- types-attrs==19.1.0
- types-pytz==2025.2.0.20250326
- types-pyyaml==6.0.12.20250326
- types-requests==2.32.0.20250328
- typing-extensions==4.13.0
- tzdata==2025.2
- urllib3==2.3.0
- virtualenv==20.29.3
- xarray==2025.3.1
prefix: /opt/conda/envs/movement
| [
"tests/test_unit/test_vector.py::TestVector::test_compute_norm[cart_pol_dataset-expected_exception0]",
"tests/test_unit/test_vector.py::TestVector::test_compute_norm[cart_pol_dataset_with_nan-expected_exception1]",
"tests/test_unit/test_vector.py::TestVector::test_compute_norm[cart_pol_dataset_missing_cart_dim-expected_exception2]",
"tests/test_unit/test_vector.py::TestVector::test_compute_norm[cart_pol_dataset_missing_cart_coords-expected_exception3]",
"tests/test_unit/test_vector.py::TestVector::test_convert_to_unit[cart_pol_dataset-expected_exception0]",
"tests/test_unit/test_vector.py::TestVector::test_convert_to_unit[cart_pol_dataset_with_nan-expected_exception1]",
"tests/test_unit/test_vector.py::TestVector::test_convert_to_unit[cart_pol_dataset_missing_cart_dim-expected_exception2]"
] | [] | [
"tests/test_unit/test_vector.py::TestVector::test_cart2pol[cart_pol_dataset-expected_exception0]",
"tests/test_unit/test_vector.py::TestVector::test_cart2pol[cart_pol_dataset_with_nan-expected_exception1]",
"tests/test_unit/test_vector.py::TestVector::test_cart2pol[cart_pol_dataset_missing_cart_dim-expected_exception2]",
"tests/test_unit/test_vector.py::TestVector::test_cart2pol[cart_pol_dataset_missing_cart_coords-expected_exception3]",
"tests/test_unit/test_vector.py::TestVector::test_pol2cart[cart_pol_dataset-expected_exception0]",
"tests/test_unit/test_vector.py::TestVector::test_pol2cart[cart_pol_dataset_with_nan-expected_exception1]",
"tests/test_unit/test_vector.py::TestVector::test_pol2cart[cart_pol_dataset_missing_pol_dim-expected_exception2]",
"tests/test_unit/test_vector.py::TestVector::test_pol2cart[cart_pol_dataset_missing_pol_coords-expected_exception3]"
] | [] | BSD 3-Clause "New" or "Revised" License | 19,061 | 1,897 | [
"examples/compute_kinematics.py",
"movement/utils/vector.py"
] |
TDAmeritrade__stumpy-1015 | 3077d0ddfb315464321dc86f8ec3bf2cab9ce3b1 | 2024-07-23 17:15:21 | ce0cd8c62be3d59a4cdc80c2870999357424f9d8 | codecov[bot]: ## [Codecov](https://app.codecov.io/gh/TDAmeritrade/stumpy/pull/1015?dropdown=coverage&src=pr&el=h1&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=TDAmeritrade) Report
Attention: Patch coverage is `90.90909%` with `2 lines` in your changes missing coverage. Please review.
> Project coverage is 97.40%. Comparing base [(`3077d0d`)](https://app.codecov.io/gh/TDAmeritrade/stumpy/commit/3077d0ddfb315464321dc86f8ec3bf2cab9ce3b1?dropdown=coverage&el=desc&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=TDAmeritrade) to head [(`5d5a039`)](https://app.codecov.io/gh/TDAmeritrade/stumpy/commit/5d5a039c2e016ed72c03a08fdcc38a0262940af1?dropdown=coverage&el=desc&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=TDAmeritrade).
| [Files](https://app.codecov.io/gh/TDAmeritrade/stumpy/pull/1015?dropdown=coverage&src=pr&el=tree&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=TDAmeritrade) | Patch % | Lines |
|---|---|---|
| [stumpy/aamp\_motifs.py](https://app.codecov.io/gh/TDAmeritrade/stumpy/pull/1015?src=pr&el=tree&filepath=stumpy%2Faamp_motifs.py&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=TDAmeritrade#diff-c3R1bXB5L2FhbXBfbW90aWZzLnB5) | 60.00% | [2 Missing :warning: ](https://app.codecov.io/gh/TDAmeritrade/stumpy/pull/1015?src=pr&el=tree&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=TDAmeritrade) |
<details><summary>Additional details and impacted files</summary>
```diff
@@ Coverage Diff @@
## main #1015 +/- ##
==========================================
- Coverage 97.41% 97.40% -0.02%
==========================================
Files 89 89
Lines 14922 14940 +18
==========================================
+ Hits 14537 14553 +16
- Misses 385 387 +2
```
</details>
[:umbrella: View full report in Codecov by Sentry](https://app.codecov.io/gh/TDAmeritrade/stumpy/pull/1015?dropdown=coverage&src=pr&el=continue&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=TDAmeritrade).
:loudspeaker: Have feedback on the report? [Share it here](https://about.codecov.io/codecov-pr-comment-feedback/?utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=TDAmeritrade).
seanlaw: @ejorgensen-wl Thanks again for identifying this bug. The decision to do:
https://github.com/TDAmeritrade/stumpy/blob/3077d0ddfb315464321dc86f8ec3bf2cab9ce3b1/stumpy/motifs.py#L336-L337
was decided a long time ago and, in hindsight, that was a poor choice. Instead, I think we could've/should've been more specific and done:
```
if max_matches is None: # pragma: no cover
max_matches = P.shape[-1]
```
`P.shape[-1]` would be the same as the maximum number of subsequences in `T` that are possible and is a more explicit number than `np.inf`, which isn't even an integer (it's a floating point value).
What do you think about this remedy instead? Would this work?
seanlaw: > That's an excellent and much cleaner solution than what I was thinking, thanks! I implemented your suggestions in my latest commit.
@ejorgensen-wl Thank you for your patience and for working through this with us! We really appreciate the PR. Once the tests all pass, I'll merge the PR (unless you had anything further to add?).
ejorgensen-wl: I like it as is - thanks! | diff --git a/stumpy/aamp_motifs.py b/stumpy/aamp_motifs.py
index 0d5f67d..c7a852f 100644
--- a/stumpy/aamp_motifs.py
+++ b/stumpy/aamp_motifs.py
@@ -268,7 +268,7 @@ def aamp_motifs(
m = T.shape[-1] - P.shape[-1] + 1
excl_zone = int(np.ceil(m / config.STUMPY_EXCL_ZONE_DENOM))
if max_matches is None: # pragma: no cover
- max_matches = np.inf
+ max_matches = P.shape[-1]
if cutoff is None: # pragma: no cover
P_copy = P.copy().astype(np.float64)
P_copy[np.isinf(P_copy)] = np.nan
diff --git a/stumpy/motifs.py b/stumpy/motifs.py
index eaad4cf..8c560f6 100644
--- a/stumpy/motifs.py
+++ b/stumpy/motifs.py
@@ -334,7 +334,7 @@ def motifs(
m = T.shape[-1] - P.shape[-1] + 1
excl_zone = int(np.ceil(m / config.STUMPY_EXCL_ZONE_DENOM))
if max_matches is None: # pragma: no cover
- max_matches = np.inf
+ max_matches = P.shape[-1]
if cutoff is None: # pragma: no cover
P_copy = P.copy().astype(np.float64)
P_copy[np.isinf(P_copy)] = np.nan
| _motifs max_matches=None bug
When finding motifs, the function encounters a bug when `max_matches` is set to `None` to not restrict the number of matches. If the `max_matches` argument is set to `None` for `stumpy.motifs`, it assigns `max_matches = np.inf` before passing to `_motifs` which then tries to slice the `query_matches` with `np.inf` as seen in the error below.
Example:
```
>>> import numpy as np
>>> import stumpy
>>> arr = np.array([0.,1,2,2,0,1,2,2,0,0,0,0])
>>> mp = stumpy.stump(arr, m=3)
>>> stumpy.motifs(arr, mp[:,0], max_matches=None)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/Users/user0/code/venv/lib/python3.10/site-packages/stumpy/core.py", line 138, in inner_wrapper
return norm(*args, **kwargs)
File "/Users/user0/code/venv/lib/python3.10/site-packages/stumpy/motifs.py", line 361, in motifs
motif_distances, motif_indices = _motifs(
File "/Users/user0/code/venv/lib/python3.10/site-packages/stumpy/motifs.py", line 134, in _motifs
motif_distances.append(query_matches[:max_matches, 0])
TypeError: slice indices must be integers or None or have an __index__ method
``` | TDAmeritrade/stumpy | diff --git a/tests/test_motifs.py b/tests/test_motifs.py
index e7d571f..61e9ded 100644
--- a/tests/test_motifs.py
+++ b/tests/test_motifs.py
@@ -656,3 +656,30 @@ def test_motifs_with_isconstant():
npt.assert_almost_equal(ref_distances, comp_distance)
npt.assert_almost_equal(ref_indices, comp_indices)
+
+
+def test_motifs_with_max_matches_none():
+ T = np.random.rand(16)
+ m = 3
+
+ max_motifs = 1
+ max_matches = None
+ max_distance = np.inf
+ cutoff = np.inf
+
+ # performant
+ mp = naive.stump(T, m, row_wise=True)
+ comp_distance, comp_indices = motifs(
+ T,
+ mp[:, 0].astype(np.float64),
+ min_neighbors=1,
+ max_distance=max_distance,
+ cutoff=cutoff,
+ max_matches=max_matches,
+ max_motifs=max_motifs,
+ )
+
+ ref_len = len(T) - m + 1
+
+ npt.assert_(ref_len >= comp_distance.shape[1])
+ npt.assert_(ref_len >= comp_indices.shape[1])
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 2
} | 1.13 | {
"env_vars": null,
"env_yml_path": [],
"install": "pip install -e .[ci]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"flake8",
"black",
"isort"
],
"pre_install": [],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | black==25.1.0
click==8.1.8
cloudpickle==3.1.1
coverage==7.8.0
dask==2024.8.0
distributed==2024.8.0
exceptiongroup==1.2.2
flake8==7.2.0
flake8-docstrings==1.7.0
fsspec==2025.3.2
importlib_metadata==8.6.1
iniconfig==2.1.0
isort==6.0.1
Jinja2==3.1.6
llvmlite==0.43.0
locket==1.0.0
MarkupSafe==3.0.2
mccabe==0.7.0
msgpack==1.1.0
mypy-extensions==1.0.0
numba==0.60.0
numpy==2.0.2
packaging==24.2
pandas==2.2.3
partd==1.4.2
pathspec==0.12.1
platformdirs==4.3.7
pluggy==1.5.0
psutil==7.0.0
pycodestyle==2.13.0
pydocstyle==6.3.0
pyflakes==3.3.2
pytest==8.3.5
pytest-cov==6.0.0
python-dateutil==2.9.0.post0
pytz==2025.2
PyYAML==6.0.2
scipy==1.13.1
six==1.17.0
snowballstemmer==2.2.0
sortedcontainers==2.4.0
-e git+https://github.com/TDAmeritrade/stumpy.git@3077d0ddfb315464321dc86f8ec3bf2cab9ce3b1#egg=stumpy
tbb==2022.1.0
tblib==3.1.0
tcmlib==1.3.0
tomli==2.2.1
toolz==1.0.0
tornado==6.4.2
typing_extensions==4.13.0
tzdata==2025.2
urllib3==2.3.0
zict==3.0.0
zipp==3.21.0
| name: stumpy
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- black==25.1.0
- click==8.1.8
- cloudpickle==3.1.1
- coverage==7.8.0
- dask==2024.8.0
- distributed==2024.8.0
- exceptiongroup==1.2.2
- flake8==7.2.0
- flake8-docstrings==1.7.0
- fsspec==2025.3.2
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- isort==6.0.1
- jinja2==3.1.6
- llvmlite==0.43.0
- locket==1.0.0
- markupsafe==3.0.2
- mccabe==0.7.0
- msgpack==1.1.0
- mypy-extensions==1.0.0
- numba==0.60.0
- numpy==2.0.2
- packaging==24.2
- pandas==2.2.3
- partd==1.4.2
- pathspec==0.12.1
- platformdirs==4.3.7
- pluggy==1.5.0
- psutil==7.0.0
- pycodestyle==2.13.0
- pydocstyle==6.3.0
- pyflakes==3.3.2
- pytest==8.3.5
- pytest-cov==6.0.0
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyyaml==6.0.2
- scipy==1.13.1
- six==1.17.0
- snowballstemmer==2.2.0
- sortedcontainers==2.4.0
- stumpy==1.13.0
- tbb==2022.1.0
- tblib==3.1.0
- tcmlib==1.3.0
- tomli==2.2.1
- toolz==1.0.0
- tornado==6.4.2
- typing-extensions==4.13.0
- tzdata==2025.2
- urllib3==2.3.0
- zict==3.0.0
- zipp==3.21.0
prefix: /opt/conda/envs/stumpy
| [
"tests/test_motifs.py::test_motifs_with_max_matches_none"
] | [] | [
"tests/test_motifs.py::test_motifs_one_motif",
"tests/test_motifs.py::test_motifs_two_motifs",
"tests/test_motifs.py::test_motifs_max_matches",
"tests/test_motifs.py::test_motifs_max_matches_max_distances_inf",
"tests/test_motifs.py::test_naive_match_exclusion_zone",
"tests/test_motifs.py::test_match[Q0-T0]",
"tests/test_motifs.py::test_match[Q1-T1]",
"tests/test_motifs.py::test_match[Q2-T2]",
"tests/test_motifs.py::test_match_mean_stddev[Q0-T0]",
"tests/test_motifs.py::test_match_mean_stddev[Q1-T1]",
"tests/test_motifs.py::test_match_mean_stddev[Q2-T2]",
"tests/test_motifs.py::test_match_isconstant[Q0-T0]",
"tests/test_motifs.py::test_match_isconstant[Q1-T1]",
"tests/test_motifs.py::test_match_isconstant[Q2-T2]",
"tests/test_motifs.py::test_match_mean_stddev_isconstant[Q0-T0]",
"tests/test_motifs.py::test_match_mean_stddev_isconstant[Q1-T1]",
"tests/test_motifs.py::test_match_mean_stddev_isconstant[Q2-T2]",
"tests/test_motifs.py::test_multi_match",
"tests/test_motifs.py::test_multi_match_isconstant",
"tests/test_motifs.py::test_motifs",
"tests/test_motifs.py::test_motifs_with_isconstant"
] | [] | 3-Clause BSD license | 19,064 | 375 | [
"stumpy/aamp_motifs.py",
"stumpy/motifs.py"
] |
OpenMDAO__dymos-1087 | d61cd9792489e4cbd428d9f5b9fbe4799a75797f | 2024-07-23 20:52:06 | a7b77502f96cc7ee49b166fbde6f635b763214ef | coveralls:
[](https://coveralls.io/builds/68912280)
coverage: 92.532% (-0.2%) from 92.757%
when pulling **73a6363672317bd0530b9f5d5e2dd5267ef16572 on robfalck:load_case_set_state_vals**
into **d61cd9792489e4cbd428d9f5b9fbe4799a75797f on OpenMDAO:master**.
| diff --git a/dymos/phase/phase.py b/dymos/phase/phase.py
index 58047247..26088aaf 100644
--- a/dymos/phase/phase.py
+++ b/dymos/phase/phase.py
@@ -1,7 +1,6 @@
-from collections.abc import Iterable, Callable, Sequence
+from collections.abc import Iterable, Callable
from copy import deepcopy
import inspect
-from os import path
import warnings
import numpy as np
@@ -29,7 +28,7 @@ from ..utils.indexing import get_constraint_flat_idxs
from ..utils.introspection import configure_time_introspection, _configure_constraint_introspection, \
configure_controls_introspection, configure_parameters_introspection, \
configure_timeseries_output_introspection, classify_var, configure_timeseries_expr_introspection
-from ..utils.misc import _unspecified
+from ..utils.misc import _unspecified, create_subprob
from ..utils.lgl import lgl
@@ -58,6 +57,12 @@ class Phase(om.Group):
A phase instance from which the initialized phase should copy its data.
**kwargs : dict
Dictionary of optional phase arguments.
+
+ Attributes
+ ----------
+ sim_prob : Problem or None
+ The OpenMDAO problem used for trajectory simulation.
+ This is None unless the simulate method has been called.
"""
def __init__(self, from_phase=None, **kwargs):
_kwargs = kwargs.copy()
@@ -69,10 +74,6 @@ class Phase(om.Group):
# These are the options which will be set at setup time.
# Prior to setup, the options are placed into the user_*_options dictionaries.
- # self.time_options = TimeOptionsDictionary()
- # self.state_options = {}
- # self.control_options = {}
- # self.parameter_options = {}
self.refine_options = GridRefinementOptionsDictionary()
self.simulate_options = SimulateOptionsDictionary()
self.timeseries_ec_vars = {}
@@ -87,6 +88,7 @@ class Phase(om.Group):
'subset': 'all',
'outputs': {}}}
self._objectives = {}
+ self.sim_prob = None
super(Phase, self).__init__(**_kwargs)
@@ -2699,7 +2701,7 @@ class Phase(om.Group):
prob.set_val(prob_path, val)
def simulate(self, times_per_seg=None, method=_unspecified, atol=_unspecified, rtol=_unspecified,
- first_step=_unspecified, max_step=_unspecified, record_file=None):
+ first_step=_unspecified, max_step=_unspecified, record_file=None, reports=False):
"""
Simulate the Phase using scipy.integrate.solve_ivp.
@@ -2721,6 +2723,8 @@ class Phase(om.Group):
record_file : str or None
If a string, the file to which the result of the simulation will be saved.
If None, no record of the simulation will be saved.
+ reports : bool or None or str or Sequence
+ Reports setting for the subproblems run under simualate.
Returns
-------
@@ -2729,7 +2733,9 @@ class Phase(om.Group):
can be interrogated to obtain timeseries outputs in the same manner as other Phases
to obtain results at the requested times.
"""
- sim_prob = om.Problem(model=om.Group())
+ self.sim_prob = sim_prob = create_subprob(base_name=f'{self.name}_simulation',
+ comm=self.comm,
+ reports=reports)
sim_phase = self.get_simulation_phase(times_per_seg=times_per_seg, method=method, atol=atol, rtol=rtol,
first_step=first_step, max_step=max_step)
@@ -2740,7 +2746,10 @@ class Phase(om.Group):
rec = om.SqliteRecorder(record_file)
sim_prob.add_recorder(rec)
- sim_prob.setup(check=True)
+ if om_version <= (3, 42, 2):
+ sim_prob.setup(check=True)
+ else:
+ sim_prob.setup(check=True, parent=self)
sim_prob.final_setup()
sim_phase.set_vals_from_phase(from_phase=self)
@@ -3064,8 +3073,7 @@ class Phase(om.Group):
prev_time_val = np.flip(prev_time_val, axis=0)
unique_idxs = np.flip(unique_idxs, axis=0)
- self.set_val('t_initial', t_initial, units=prev_time_units)
- self.set_val('t_duration', t_duration, units=prev_time_units)
+ self.set_time_val(initial=t_initial, duration=t_duration, units=prev_time_units)
# Interpolate the timeseries state outputs from the previous solution onto the new grid.
if not isinstance(self, dm.AnalyticPhase):
@@ -3081,19 +3089,12 @@ class Phase(om.Group):
prev_state_val = prev_vars[prev_state_path]['val']
prev_state_units = prev_vars[prev_state_path]['units']
- interp_vals = self.interp(name=state_name,
- xs=prev_time_val,
- ys=prev_state_val[unique_idxs],
- kind='slinear')
if options['lower'] is not None or options['upper'] is not None:
- interp_vals = interp_vals.clip(options['lower'], options['upper'])
- self.set_val(f'states:{state_name}',
- interp_vals,
- units=prev_state_units)
- try:
- self.set_val(f'initial_states:{state_name}', prev_state_val[0, ...], units=prev_state_units)
- except KeyError:
- pass
+ prev_state_val = prev_state_val.clip(options['lower'], options['upper'])
+ self.set_state_val(state_name,
+ vals=prev_state_val[unique_idxs],
+ time_vals=prev_time_val,
+ units=prev_state_units)
if options['fix_final']:
warning_message = f"{phase_name}.states:{state_name} specifies 'fix_final=True'. " \
@@ -3114,13 +3115,12 @@ class Phase(om.Group):
prev_control_val = prev_vars[prev_control_path]['val']
prev_control_units = prev_vars[prev_control_path]['units']
- interp_vals = self.interp(name=control_name,
- xs=prev_time_val,
- ys=prev_control_val[unique_idxs],
- kind='slinear')
if options['lower'] is not None or options['upper'] is not None:
- interp_vals = interp_vals.clip(options['lower'], options['upper'])
- self.set_val(f'controls:{control_name}', interp_vals, units=prev_control_units)
+ prev_control_val = prev_control_val.clip(options['lower'], options['upper'])
+ self.set_control_val(control_name,
+ vals=prev_control_val[unique_idxs],
+ time_vals=prev_time_val,
+ units=prev_control_units)
if options['fix_final']:
warning_message = f"{phase_name}.controls:{control_name} specifies 'fix_final=True'. " \
f"If the given restart file has a" \
@@ -3132,7 +3132,7 @@ class Phase(om.Group):
if f'{prev_phase_prom_path}.parameter_vals:{param_name}' in prev_vars:
prev_param_val = prev_vars[f'{prev_phase_prom_path}.parameter_vals:{param_name}']['val']
prev_param_units = prev_vars[f'{prev_phase_prom_path}.parameter_vals:{param_name}']['units']
- self.set_val(f'parameters:{param_name}', prev_param_val[0, ...], units=prev_param_units)
+ self.set_parameter_val(param_name, prev_param_val[0, ...], units=prev_param_units)
else:
issue_warning(f'Unable to find "{prev_phase_prom_path}.parameter_vals:{param_name}" '
f'in data from case being loaded.')
diff --git a/dymos/trajectory/trajectory.py b/dymos/trajectory/trajectory.py
index 4060f5a4..f54c093c 100644
--- a/dymos/trajectory/trajectory.py
+++ b/dymos/trajectory/trajectory.py
@@ -11,6 +11,7 @@ from openmdao.utils.units import unit_conversion
import numpy as np
import networkx as nx
+import openmdao
import openmdao.api as om
from openmdao.utils.mpi import MPI
@@ -21,11 +22,14 @@ from .phase_linkage_comp import PhaseLinkageComp
from ..phase.analytic_phase import AnalyticPhase
from ..phase.options import TrajParameterOptionsDictionary
from ..transcriptions.common import ParameterComp
-from ..utils.misc import get_rate_units, _unspecified, _none_or_unspecified
+from ..utils.misc import create_subprob, get_rate_units, _unspecified, _none_or_unspecified
from ..utils.introspection import get_promoted_vars, get_source_metadata, _get_common_metadata
from .._options import options as dymos_options
+om_version = tuple([int(s) for s in openmdao.__version__.split('-')[0].split('.')])
+
+
class Trajectory(om.Group):
"""
Class definition for a Trajectory group.
@@ -44,6 +48,9 @@ class Trajectory(om.Group):
A dictionary of parameter names and their associated TrajectoryParameterOptionsDictionary
phases : om.Group or om.ParallelGroup
The Group which contains phases for this Trajectory.
+ sim_prob : Problem or None
+ The OpenMDAO problem used for trajectory simulation.
+ This is None unless the simulate method has been called.
_linkages : OrderedDict
A dictionary containing phase linkage information for the Trajectory.
@@ -59,6 +66,7 @@ class Trajectory(om.Group):
self._phases = {}
self._phase_graph = nx.DiGraph()
self._has_connected_phases = False
+ self.sim_prob = None
self.phases = om.ParallelGroup() if self.options['parallel_phases'] else om.Group()
@@ -1455,7 +1463,6 @@ class Trajectory(om.Group):
for name, phs in self._phases.items():
if phs.simulate_options is None:
continue
-
sim_phs = phs.get_simulation_phase(times_per_seg=times_per_seg, method=method,
atol=atol, rtol=rtol, first_step=first_step,
max_step=max_step, reports=reports)
@@ -1466,7 +1473,9 @@ class Trajectory(om.Group):
sim_traj.parameter_options.update(self.parameter_options)
- sim_prob = om.Problem(model=om.Group(), reports=reports, comm=self.comm)
+ self.sim_prob = sim_prob = create_subprob(base_name=f'{self.name}_simulation',
+ comm=self.comm,
+ reports=reports)
traj_name = self.name if self.name else 'sim_traj'
sim_prob.model.add_subsystem(traj_name, sim_traj)
@@ -1483,7 +1492,10 @@ class Trajectory(om.Group):
# fault of the user.
warnings.filterwarnings(action='ignore', category=om.UnusedOptionWarning)
warnings.filterwarnings(action='ignore', category=om.SetupWarning)
- sim_prob.setup()
+ if om_version <= (3, 42, 2):
+ sim_prob.setup(check=True)
+ else:
+ sim_prob.setup(check=True, parent=self)
sim_prob.final_setup()
# Assign trajectory parameter values
diff --git a/dymos/transcriptions/explicit_shooting/ode_integration_comp.py b/dymos/transcriptions/explicit_shooting/ode_integration_comp.py
index 7f3984d0..d53f605f 100644
--- a/dymos/transcriptions/explicit_shooting/ode_integration_comp.py
+++ b/dymos/transcriptions/explicit_shooting/ode_integration_comp.py
@@ -1,10 +1,16 @@
import numpy as np
-import openmdao.api as om
from scipy.integrate import solve_ivp
+import openmdao
+import openmdao.api as om
+
from ..._options import options as dymos_options
from .ode_evaluation_group import ODEEvaluationGroup
+from dymos.utils.misc import create_subprob
+
+
+om_version = tuple([int(s) for s in openmdao.__version__.split('-')[0].split('.')])
class ODEIntegrationComp(om.ExplicitComponent):
@@ -93,7 +99,10 @@ class ODEIntegrationComp(om.ExplicitComponent):
'transcription where the number of nodes per segment can exceed 20 to 30.')
def _setup_subprob(self):
- self._eval_subprob = p = om.Problem(comm=self.comm, reports=self._reports)
+ self._eval_subprob = p = create_subprob(base_name=f'{self.pathname}_subprob',
+ comm=self.comm,
+ reports=self._reports)
+
p.model.add_subsystem('ode_eval',
ODEEvaluationGroup(ode_class=self.options['ode_class'],
time_options=self.time_options,
@@ -107,7 +116,10 @@ class ODEIntegrationComp(om.ExplicitComponent):
promotes_inputs=['*'],
promotes_outputs=['*'])
- p.setup()
+ if om_version <= (3, 34, 2):
+ p.setup()
+ else:
+ p.setup(parent=self)
p.final_setup()
def _set_segment_index(self, idx):
diff --git a/dymos/transcriptions/pseudospectral/pseudospectral_base.py b/dymos/transcriptions/pseudospectral/pseudospectral_base.py
index 750874a6..e360fa5e 100644
--- a/dymos/transcriptions/pseudospectral/pseudospectral_base.py
+++ b/dymos/transcriptions/pseudospectral/pseudospectral_base.py
@@ -686,7 +686,7 @@ class PseudospectralBase(TranscriptionBase):
if np.isscalar(vals):
interp_vals = vals
else:
- interp_vals = phase.interp(name, vals, time_vals,
+ interp_vals = phase.interp(name, ys=vals, xs=time_vals,
nodes='state_input',
kind=interpolation_kind)
input_data[f'states:{name}'] = interp_vals
diff --git a/dymos/utils/misc.py b/dymos/utils/misc.py
index fd55f736..60240ba5 100644
--- a/dymos/utils/misc.py
+++ b/dymos/utils/misc.py
@@ -2,9 +2,11 @@ from collections.abc import Iterable
import numpy as np
+import openmdao.api as om
+from openmdao.core.constants import _ReprClass
+
from .constants import INF_BOUND
from .indexing import get_desvar_indices
-from openmdao.core.constants import _ReprClass
# unique object to check if default is given (when None is an allowed value)
@@ -212,3 +214,44 @@ def GroupWrapperConfig(comp_class, config_io_args=None):
self.configure_io(*args)
return WrappedClass
+
+
+def create_subprob(base_name, comm, reports=False):
+ """
+ Create a new problem using basename possibly appended with unique identifiers if name collisions occur.
+
+ Parameters
+ ----------
+ base_name : str
+ The base name of the problem. This may be appended by `_{int}` to obtain a unique problem name.
+ In the event of running under MPI, an 8-character hash may further append the name to ensure
+ it is unique.
+ comm : comm
+ The MPI comm to be used by the subproblem.
+ reports : bool or None or str or Sequence
+ Reports setting for the subproblems run under simualate.
+
+ Returns
+ -------
+ Problem
+ The instantiated OpenMDAO problem instance.
+ """
+ from openmdao.core.problem import _problem_names
+
+ # Find a unique sim problem name. This mostly causes problems
+ # when many simulations are being run in a single process, as in testing.
+ i = 0
+ sim_prob_name = f'{base_name}_{i}'
+ while sim_prob_name in _problem_names:
+ i += 1
+ sim_prob_name = f'{base_name}_{i}'
+
+ try:
+ p = om.Problem(comm=comm, reports=reports, name=sim_prob_name)
+ except ValueError:
+ # Testing under MPI, we still might have name collisions. In that case, add a random hash
+ # to the end of the problem name.
+ import hashlib
+ str_hash = hashlib.sha256(used_for_security=False)[:8]
+ p = om.Problem(comm=comm, reports=reports, name=f'{sim_prob_name}_{str_hash}')
+ return p
| `phase.load_case` should use the new `phase.set_xxx_vals` API
### Description
`phase.load_case` was not using the set_xxx_vals API.
As a result, under the Birkhoff transcription, the initial and final state values were not being properly set.
### Example
N/A
### Dymos Version
1.10.1-dev
### Relevant environment information
_No response_ | OpenMDAO/dymos | diff --git a/dymos/examples/brachistochrone/test/test_brachistochrone_subprob_reports.py b/dymos/examples/brachistochrone/test/test_brachistochrone_subprob_reports.py
index a01a8ad0..e4899487 100644
--- a/dymos/examples/brachistochrone/test/test_brachistochrone_subprob_reports.py
+++ b/dymos/examples/brachistochrone/test/test_brachistochrone_subprob_reports.py
@@ -4,21 +4,24 @@ import pathlib
import os
from packaging.version import Version
+import openmdao
import openmdao.api as om
import openmdao.core.problem
from openmdao.utils.testing_utils import use_tempdirs
from openmdao.utils.tests.test_hooks import hooks_active
from openmdao.visualization.n2_viewer.n2_viewer import _default_n2_filename
from openmdao.visualization.scaling_viewer.scaling_report import _default_scaling_filename
-from openmdao import __version__ as openmdao_version
import dymos as dm
from dymos.examples.brachistochrone.brachistochrone_ode import BrachistochroneODE
-def setup_model_radau(do_reports):
- p = om.Problem(model=om.Group())
+om_version = tuple([int(s) for s in openmdao.__version__.split('-')[0].split('.')])
+
+
+def setup_model_radau(do_reports, probname):
+ p = om.Problem(model=om.Group(), name=probname)
p.driver = om.ScipyOptimizeDriver()
p.driver.declare_coloring(tol=1.0E-12)
@@ -64,16 +67,13 @@ def setup_model_radau(do_reports):
phase.set_control_val('theta', [5, 100.5])
phase.set_parameter_val('g', 9.80665)
- if do_reports:
- dm.run_problem(p, run_driver=True, simulate=True, simulate_kwargs={'reports': True})
- else:
- dm.run_problem(p, run_driver=True, simulate=True)
+ dm.run_problem(p, run_driver=True, simulate=True, simulate_kwargs={'reports': do_reports})
return p
-def setup_model_shooting(do_reports):
- prob = om.Problem()
+def setup_model_shooting(do_reports, probname):
+ prob = om.Problem(name=probname)
prob.driver = om.ScipyOptimizeDriver()
prob.driver.declare_coloring(tol=1.0E-12)
@@ -112,198 +112,97 @@ def setup_model_shooting(do_reports):
phase.set_control_val('theta', [0.01, 90], units='deg')
phase.set_parameter_val('g', 1.0)
- dm.run_problem(prob, run_driver=True, simulate=False)
+ dm.run_problem(prob, run_driver=True, simulate=True)
return prob
-# reports API between 3.18 and 3.19, so handle it here in order to be able to test against older
-# versions of openmdao
-if Version(openmdao_version) > Version("3.18"):
- from openmdao.utils.reports_system import get_reports_dir, clear_reports
-
- @use_tempdirs
- class TestSubproblemReportToggle(unittest.TestCase):
-
- def setUp(self):
- self.n2_filename = _default_n2_filename
- self.scaling_filename = _default_scaling_filename
-
- # set things to a known initial state for all the test runs
- openmdao.core.problem._problem_names = [] # need to reset these to simulate separate runs
- os.environ.pop('OPENMDAO_REPORTS', None)
- os.environ.pop('OPENMDAO_REPORTS_DIR', None)
- # We need to remove the TESTFLO_RUNNING environment variable for these tests to run.
- # The reports code checks to see if TESTFLO_RUNNING is set and will not do anything if set
- # But we need to remember whether it was set so we can restore it
- self.testflo_running = os.environ.pop('TESTFLO_RUNNING', None)
- clear_reports()
-
- self.count = 0
-
- def tearDown(self):
- # restore what was there before running the test
- if self.testflo_running is not None:
- os.environ['TESTFLO_RUNNING'] = self.testflo_running
-
- @hooks_active
- def test_no_sim_reports(self):
- p = setup_model_radau(do_reports=False)
-
- report_subdirs = sorted([e for e in pathlib.Path(get_reports_dir()).iterdir() if e.is_dir()])
-
- # Test that a report subdir was made
- self.assertEqual(len(report_subdirs), 1)
-
- path = pathlib.Path(report_subdirs[0]).joinpath(self.n2_filename)
- self.assertTrue(path.is_file(), f'The N2 report file, {str(path)} was not found')
- path = pathlib.Path(report_subdirs[0]).joinpath(self.scaling_filename)
- self.assertTrue(path.is_file(), f'The scaling report file, {str(path)}, was not found')
-
- @hooks_active
- def test_make_sim_reports(self):
- p = setup_model_radau(do_reports=True)
-
- report_subdirs = sorted([e for e in pathlib.Path(get_reports_dir()).iterdir() if e.is_dir()])
-
- # Test that a report subdir was made
- # There is the nominal problem, the simulation problem, and a subproblem for the simulation.
- self.assertEqual(len(report_subdirs), 3)
-
- for subdir in report_subdirs:
- path = pathlib.Path(subdir).joinpath(self.n2_filename)
- self.assertTrue(path.is_file(), f'The N2 report file, {str(path)} was not found')
-
- @hooks_active
- def test_explicitshooting_no_subprob_reports(self):
- p = setup_model_shooting(do_reports=False)
-
- report_subdirs = sorted([e for e in pathlib.Path(get_reports_dir()).iterdir() if e.is_dir()])
-
- # Test that a report subdir was made
- self.assertEqual(len(report_subdirs), 1)
-
- path = pathlib.Path(report_subdirs[0]).joinpath(self.n2_filename)
- self.assertTrue(path.is_file(), f'The N2 report file, {str(path)} was not found')
- path = pathlib.Path(report_subdirs[0]).joinpath(self.scaling_filename)
- self.assertTrue(path.is_file(), f'The scaling report file, {str(path)}, was not found')
-
- @hooks_active
- def test_explicitshooting_make_subprob_reports(self):
- p = setup_model_shooting(do_reports=True)
-
- report_subdirs = sorted([e for e in pathlib.Path(get_reports_dir()).iterdir() if e.is_dir()])
-
- # Test that a report subdir was made
- # There is the nominal problem, a subproblem for integration, and a subproblem for the derivatives.
- self.assertEqual(len(report_subdirs), 2)
-
- path = pathlib.Path(report_subdirs[0]).joinpath(self.n2_filename)
- self.assertTrue(path.is_file(), f'The N2 report file, {str(path)} was not found')
- path = pathlib.Path(report_subdirs[0]).joinpath(self.scaling_filename)
- self.assertTrue(path.is_file(), f'The scaling report file, {str(path)}, was not found')
-
- for subdir in report_subdirs:
- path = pathlib.Path(subdir).joinpath(self.n2_filename)
- self.assertTrue(path.is_file(), f'The N2 report file, {str(path)} was not found')
-
-else: # old OM versions before reports API changed...
- from openmdao.utils.reports_system import set_default_reports_dir, _reports_dir, clear_reports, \
- setup_default_reports
-
- @use_tempdirs
- class TestSubproblemReportToggle(unittest.TestCase):
+@use_tempdirs
+class TestSubproblemReportToggle(unittest.TestCase):
- def setUp(self):
- self.n2_filename = _default_n2_filename
- self.scaling_filename = _default_scaling_filename
+ def setUp(self):
+ self.n2_filename = _default_n2_filename
+ self.scaling_filename = _default_scaling_filename
- # set things to a known initial state for all the test runs
- openmdao.core.problem._problem_names = [] # need to reset these to simulate separate runs
- os.environ.pop('OPENMDAO_REPORTS', None)
- os.environ.pop('OPENMDAO_REPORTS_DIR', None)
- # We need to remove the TESTFLO_RUNNING environment variable for these tests to run.
- # The reports code checks to see if TESTFLO_RUNNING is set and will not do anything if set
- # But we need to remember whether it was set so we can restore it
- self.testflo_running = os.environ.pop('TESTFLO_RUNNING', None)
- clear_reports()
- set_default_reports_dir(_reports_dir)
+ # set things to a known initial state for all the test runs
+ openmdao.core.problem._problem_names = [] # need to reset these to simulate separate runs
+ os.environ.pop('OPENMDAO_REPORTS', None)
+ os.environ.pop('OPENMDAO_REPORTS_DIR', None)
+ # We need to remove the TESTFLO_RUNNING environment variable for these tests to run.
+ # The reports code checks to see if TESTFLO_RUNNING is set and will not do anything if set
+ # But we need to remember whether it was set so we can restore it
+ self.testflo_running = os.environ.pop('TESTFLO_RUNNING', None)
- self.count = 0
+ self.count = 0
- def tearDown(self):
- # restore what was there before running the test
- if self.testflo_running is not None:
- os.environ['TESTFLO_RUNNING'] = self.testflo_running
+ def tearDown(self):
+ # restore what was there before running the test
+ if self.testflo_running is not None:
+ os.environ['TESTFLO_RUNNING'] = self.testflo_running
- @hooks_active
- def test_no_sim_reports(self):
- setup_default_reports()
+ @unittest.skipIf(om_version <= (3, 34, 2), 'Requires OpenMDAO version later than 3.34.2')
+ @hooks_active
+ def test_no_sim_reports(self):
+ p = setup_model_radau(do_reports=False, probname='test_no_sim_reports')
- p = setup_model_radau(do_reports=False)
+ main_outputs_dir = p.get_outputs_dir()
+ main_reports_dir = p.get_reports_dir()
- problem_reports_dir = pathlib.Path(_reports_dir).joinpath(p._name)
- report_subdirs = sorted([e for e in pathlib.Path(_reports_dir).iterdir() if e.is_dir()])
+ sim_outputs_dir = main_outputs_dir / 'traj0_simulation_out'
+ sim_reports_dir = sim_outputs_dir / 'reports'
- # Test that a report subdir was made
- self.assertEqual(len(report_subdirs), 1)
+ self.assertFalse(sim_reports_dir.exists())
- path = pathlib.Path(problem_reports_dir).joinpath(self.n2_filename)
- self.assertTrue(path.is_file(), f'The N2 report file, {str(path)} was not found')
- path = pathlib.Path(problem_reports_dir).joinpath(self.scaling_filename)
- self.assertTrue(path.is_file(), f'The scaling report file, {str(path)}, was not found')
+ @unittest.skipIf(om_version <= (3, 34, 2), 'Requires OpenMDAO version later than 3.34.2')
+ @hooks_active
+ def test_make_sim_reports(self):
+ p = setup_model_radau(do_reports=True, probname='test_make_sim_reports')
- @hooks_active
- def test_make_sim_reports(self):
- setup_default_reports()
+ main_reports_dir = p.get_reports_dir()
- p = setup_model_radau(do_reports=True)
+ traj = p.model._get_subsystem('traj0')
+ sim_reports_dir = traj.sim_prob.get_reports_dir()
- report_subdirs = sorted([e for e in pathlib.Path(_reports_dir).iterdir() if e.is_dir()])
+ self.assertTrue((main_reports_dir / self.n2_filename).exists())
+ self.assertTrue(sim_reports_dir.exists())
+ self.assertTrue((sim_reports_dir / self.n2_filename).exists())
- # Test that a report subdir was made
- # # There is the nominal problem, the simulation problem, and a subproblem for each segment in the simulation.
- self.assertEqual(len(report_subdirs), 12)
+ @unittest.skipIf(om_version <= (3, 34, 2), 'Requires OpenMDAO version later than 3.34.2')
+ @hooks_active
+ def test_explicitshooting_no_subprob_reports(self):
+ p = setup_model_shooting(do_reports=False,
+ probname='test_explicitshooting_no_subprob_reports')
- for subdir in report_subdirs:
- path = pathlib.Path(subdir).joinpath(self.n2_filename)
- self.assertTrue(path.is_file(), f'The N2 report file, {str(path)} was not found')
+ main_reports_dir = p.get_reports_dir()
+ subprob_reports_dir = p.model.phase0.integrator._eval_subprob.get_reports_dir()
- @hooks_active
- def test_explicitshooting_no_subprob_reports(self):
- setup_default_reports()
+ main_reports = os.listdir(main_reports_dir)
- p = setup_model_shooting(do_reports=False)
+ self.assertFalse(subprob_reports_dir.exists())
- problem_reports_dir = pathlib.Path(_reports_dir).joinpath(p._name)
- report_subdirs = sorted([e for e in pathlib.Path(_reports_dir).iterdir() if e.is_dir()])
+ self.assertIn(self.n2_filename, main_reports)
+ self.assertIn(self.scaling_filename, main_reports)
- # Test that a report subdir was made
- self.assertEqual(len(report_subdirs), 1)
+ @unittest.skipIf(om_version <= (3, 34, 2), 'Requires OpenMDAO version later than 3.34.2')
+ @hooks_active
+ def test_explicitshooting_make_subprob_reports(self):
+ p = setup_model_shooting(do_reports=True,
+ probname='test_explicitshooting_make_subprob_reports')
- path = pathlib.Path(problem_reports_dir).joinpath(self.n2_filename)
- self.assertTrue(path.is_file(), f'The N2 report file, {str(path)} was not found')
- path = pathlib.Path(problem_reports_dir).joinpath(self.scaling_filename)
- self.assertTrue(path.is_file(), f'The scaling report file, {str(path)}, was not found')
+ main_reports_dir = p.get_reports_dir()
+ subprob_reports_dir = p.model.phase0.integrator._eval_subprob.get_reports_dir()
- @hooks_active
- def test_explicitshooting_make_subprob_reports(self):
- setup_default_reports()
+ main_reports = os.listdir(main_reports_dir)
+ subprob_reports = os.listdir(subprob_reports_dir)
- p = setup_model_shooting(do_reports=True)
+ self.assertIn(self.n2_filename, main_reports)
+ self.assertIn(self.n2_filename, subprob_reports)
- problem_reports_dir = pathlib.Path(_reports_dir).joinpath(p._name)
- report_subdirs = sorted([e for e in pathlib.Path(_reports_dir).iterdir() if e.is_dir()])
+ self.assertIn(self.scaling_filename, main_reports)
- # Test that a report subdir was made
- # There is the nominal problem and a subproblem for integration
- self.assertEqual(len(report_subdirs), 2)
+ # The subprob has no optimization, so should not have a scaling report
+ self.assertNotIn(self.scaling_filename, subprob_reports)
- path = pathlib.Path(problem_reports_dir).joinpath(self.n2_filename)
- self.assertTrue(path.is_file(), f'The N2 report file, {str(path)} was not found')
- path = pathlib.Path(problem_reports_dir).joinpath(self.scaling_filename)
- self.assertTrue(path.is_file(), f'The scaling report file, {str(path)}, was not found')
- for subdir in report_subdirs:
- path = pathlib.Path(subdir).joinpath(self.n2_filename)
- self.assertTrue(path.is_file(), f'The N2 report file, {str(path)} was not found')
+if __name__ == '__main__':
+ unittest.main()
diff --git a/dymos/test/test_load_case.py b/dymos/test/test_load_case.py
index 7cbab5fe..1f07393a 100644
--- a/dymos/test/test_load_case.py
+++ b/dymos/test/test_load_case.py
@@ -184,6 +184,76 @@ class TestLoadCase(unittest.TestCase):
q.model.phase0.interp(xs=time_q, ys=theta_q, nodes='all'),
tolerance=1.0E-2)
+ def test_load_case_radau_to_birkhoff(self):
+ import openmdao.api as om
+ from openmdao.utils.assert_utils import assert_near_equal
+ import dymos as dm
+
+ p = setup_problem(dm.Radau(num_segments=20))
+
+ # Solve for the optimal trajectory
+ dm.run_problem(p)
+
+ # Load the solution
+ case = om.CaseReader('dymos_solution.db').get_case('final')
+
+ # create a problem with a different transcription with a different number of variables
+ q = setup_problem(dm.Birkhoff(grid=dm.BirkhoffGrid(num_nodes=50)))
+
+ # Fill q with junk so that we can be sure load_case worked
+ q['phase0.t_initial'] = -88
+ q['phase0.t_duration'] = 88
+
+ q['phase0.states:x'] = -88
+ q['phase0.states:y'] = -88
+ q['phase0.states:v'] = -88
+
+ q['phase0.initial_states:x'] = -88
+ q['phase0.initial_states:y'] = -88
+ q['phase0.initial_states:v'] = -88
+
+ q['phase0.final_states:x'] = -88
+ q['phase0.final_states:y'] = -88
+ q['phase0.final_states:v'] = -88
+
+ # Load the values from the previous solution
+ q.load_case(case)
+
+ # Run the model to ensure we find the same output values as those that we recorded
+ q.run_model()
+
+ time_p = case.get_val('phase0.timeseries.time')
+ theta_p = case.get_val('phase0.timeseries.theta')
+
+ time_q = q.get_val('phase0.timeseries.time')
+ theta_q = q.get_val('phase0.timeseries.theta')
+
+ x_p = case.get_val('phase0.timeseries.x')
+ y_p = case.get_val('phase0.timeseries.y')
+ v_p = case.get_val('phase0.timeseries.v')
+
+ x0_q = q.get_val('phase0.initial_states:x')
+ xf_q = q.get_val('phase0.final_states:x')
+
+ y0_q = q.get_val('phase0.initial_states:y')
+ yf_q = q.get_val('phase0.final_states:y')
+
+ v0_q = q.get_val('phase0.initial_states:v')
+ vf_q = q.get_val('phase0.final_states:v')
+
+ assert_near_equal(q.model.phase0.interp(xs=time_p, ys=theta_p, nodes='all'),
+ q.model.phase0.interp(xs=time_q, ys=theta_q, nodes='all'),
+ tolerance=1.0E-2)
+
+ assert_near_equal(x_p[0, ...], x0_q, tolerance=1.0E-5)
+ assert_near_equal(x_p[-1, ...], xf_q, tolerance=1.0E-5)
+
+ assert_near_equal(y_p[0, ...], y0_q, tolerance=1.0E-5)
+ assert_near_equal(y_p[-1, ...], yf_q, tolerance=1.0E-5)
+
+ assert_near_equal(v_p[0, ...], v0_q, tolerance=1.0E-5)
+ assert_near_equal(v_p[-1, ...], vf_q, tolerance=1.0E-5)
+
def test_load_case_warn_fix_final_states(self):
import openmdao.api as om
from openmdao.utils.assert_utils import assert_warnings
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 3
},
"num_modified_files": 5
} | 1.10 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[all]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"testflo",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"docs/dymos_book/requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | accessible-pygments==0.0.5
aiounittest==1.5.0
alabaster==0.7.16
anyio==4.9.0
appdirs==1.4.4
argon2-cffi==23.1.0
argon2-cffi-bindings==21.2.0
arrow==1.3.0
asttokens==3.0.0
async-lru==2.0.5
attrs==25.3.0
babel==2.17.0
baron==0.10.1
beautifulsoup4==4.13.3
bleach==6.2.0
bokeh==3.4.3
certifi==2025.1.31
cffi==1.17.1
charset-normalizer==3.4.1
click==8.1.8
comm==0.2.2
contourpy==1.3.0
coverage==7.8.0
cycler==0.12.1
debugpy==1.8.13
decorator==5.2.1
defusedxml==0.7.1
docutils==0.18.1
-e git+https://github.com/OpenMDAO/dymos.git@d61cd9792489e4cbd428d9f5b9fbe4799a75797f#egg=dymos
exceptiongroup==1.2.2
executing==2.2.0
fastjsonschema==2.21.1
fonttools==4.56.0
fqdn==1.5.1
greenlet==3.1.1
h11==0.14.0
httpcore==1.0.7
httpx==0.28.1
idna==3.10
imagesize==1.4.1
importlib_metadata==8.6.1
importlib_resources==6.5.2
iniconfig==2.1.0
ipykernel==6.29.5
ipython==8.18.1
ipywidgets==8.1.5
isoduration==20.11.0
jax==0.4.30
jaxlib==0.4.30
jedi==0.19.2
Jinja2==3.1.6
json5==0.10.0
jsonpointer==3.0.0
jsonschema==4.23.0
jsonschema-specifications==2024.10.1
jupyter==1.1.1
jupyter-book==0.14.0
jupyter-cache==0.6.1
jupyter-console==6.6.3
jupyter-events==0.12.0
jupyter-lsp==2.2.5
jupyter_client==8.6.3
jupyter_core==5.7.2
jupyter_server==2.15.0
jupyter_server_terminals==0.5.3
jupyterlab==4.3.6
jupyterlab_pygments==0.3.0
jupyterlab_server==2.27.3
jupyterlab_widgets==3.0.13
kiwisolver==1.4.7
latexcodec==3.0.0
linkify-it-py==2.0.3
llvmlite==0.43.0
markdown-it-py==2.2.0
MarkupSafe==3.0.2
matplotlib==3.9.4
matplotlib-inline==0.1.7
mdit-py-plugins==0.3.5
mdurl==0.1.2
mistune==3.1.3
ml_dtypes==0.5.1
myst-nb==0.17.2
myst-parser==0.18.1
nbclient==0.7.4
nbconvert==7.16.6
nbformat==5.10.4
nest-asyncio==1.6.0
networkx==3.2.1
notebook==7.3.3
notebook_shim==0.2.4
numba==0.60.0
numpy==2.0.2
numpydoc==1.6.0
openmdao==3.38.0
opt_einsum==3.4.0
overrides==7.7.0
packaging==24.2
pandas==2.2.3
pandocfilters==1.5.1
parso==0.8.4
pexpect==4.9.0
pillow==11.1.0
platformdirs==4.3.7
playwright==1.51.0
pluggy==1.5.0
prometheus_client==0.21.1
prompt_toolkit==3.0.50
psutil==7.0.0
ptyprocess==0.7.0
pure_eval==0.2.3
pybtex==0.24.0
pybtex-docutils==1.0.3
pycodestyle==2.13.0
pycparser==2.22
pydata-sphinx-theme==0.12.0
pyee==12.1.1
Pygments==2.19.1
pyparsing==3.2.3
pytest==8.3.5
python-dateutil==2.9.0.post0
python-json-logger==3.3.0
pytz==2025.2
PyYAML==6.0.2
pyzmq==26.3.0
redbaron==0.9.2
referencing==0.36.2
requests==2.32.3
rfc3339-validator==0.1.4
rfc3986-validator==0.1.1
rpds-py==0.24.0
rply==0.7.8
scipy==1.13.1
Send2Trash==1.8.3
six==1.17.0
sniffio==1.3.1
snowballstemmer==2.2.0
soupsieve==2.6
Sphinx==5.0.2
sphinx-book-theme==0.4.0rc1
sphinx-comments==0.0.3
sphinx-copybutton==0.5.2
sphinx-jupyterbook-latex==0.5.2
sphinx-multitoc-numbering==0.1.3
sphinx-thebe==0.2.1
sphinx-togglebutton==0.3.2
sphinx_design==0.3.0
sphinx_external_toc==0.3.1
sphinxcontrib-applehelp==2.0.0
sphinxcontrib-bibtex==2.5.0
sphinxcontrib-devhelp==2.0.0
sphinxcontrib-htmlhelp==2.1.0
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==2.0.0
sphinxcontrib-serializinghtml==2.0.0
SQLAlchemy==2.0.40
stack-data==0.6.3
tabulate==0.9.0
terminado==0.18.1
testflo==1.4.19
tinycss2==1.4.0
tomli==2.2.1
tornado==6.4.2
traitlets==5.14.3
types-python-dateutil==2.9.0.20241206
typing_extensions==4.13.0
tzdata==2025.2
uc-micro-py==1.0.3
uri-template==1.3.0
urllib3==2.3.0
wcwidth==0.2.13
webcolors==24.11.1
webencodings==0.5.1
websocket-client==1.8.0
widgetsnbextension==4.0.13
wrapt==1.17.2
xyzservices==2025.1.0
zipp==3.21.0
| name: dymos
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- accessible-pygments==0.0.5
- aiounittest==1.5.0
- alabaster==0.7.16
- anyio==4.9.0
- appdirs==1.4.4
- argon2-cffi==23.1.0
- argon2-cffi-bindings==21.2.0
- arrow==1.3.0
- asttokens==3.0.0
- async-lru==2.0.5
- attrs==25.3.0
- babel==2.17.0
- baron==0.10.1
- beautifulsoup4==4.13.3
- bleach==6.2.0
- bokeh==3.4.3
- certifi==2025.1.31
- cffi==1.17.1
- charset-normalizer==3.4.1
- click==8.1.8
- comm==0.2.2
- contourpy==1.3.0
- coverage==7.8.0
- cycler==0.12.1
- debugpy==1.8.13
- decorator==5.2.1
- defusedxml==0.7.1
- docutils==0.18.1
- dymos==1.10.1.dev0
- exceptiongroup==1.2.2
- executing==2.2.0
- fastjsonschema==2.21.1
- fonttools==4.56.0
- fqdn==1.5.1
- greenlet==3.1.1
- h11==0.14.0
- httpcore==1.0.7
- httpx==0.28.1
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==8.6.1
- importlib-resources==6.5.2
- iniconfig==2.1.0
- ipykernel==6.29.5
- ipython==8.18.1
- ipywidgets==8.1.5
- isoduration==20.11.0
- jax==0.4.30
- jaxlib==0.4.30
- jedi==0.19.2
- jinja2==3.1.6
- json5==0.10.0
- jsonpointer==3.0.0
- jsonschema==4.23.0
- jsonschema-specifications==2024.10.1
- jupyter==1.1.1
- jupyter-book==0.14.0
- jupyter-cache==0.6.1
- jupyter-client==8.6.3
- jupyter-console==6.6.3
- jupyter-core==5.7.2
- jupyter-events==0.12.0
- jupyter-lsp==2.2.5
- jupyter-server==2.15.0
- jupyter-server-terminals==0.5.3
- jupyterlab==4.3.6
- jupyterlab-pygments==0.3.0
- jupyterlab-server==2.27.3
- jupyterlab-widgets==3.0.13
- kiwisolver==1.4.7
- latexcodec==3.0.0
- linkify-it-py==2.0.3
- llvmlite==0.43.0
- markdown-it-py==2.2.0
- markupsafe==3.0.2
- matplotlib==3.9.4
- matplotlib-inline==0.1.7
- mdit-py-plugins==0.3.5
- mdurl==0.1.2
- mistune==3.1.3
- ml-dtypes==0.5.1
- myst-nb==0.17.2
- myst-parser==0.18.1
- nbclient==0.7.4
- nbconvert==7.16.6
- nbformat==5.10.4
- nest-asyncio==1.6.0
- networkx==3.2.1
- notebook==7.3.3
- notebook-shim==0.2.4
- numba==0.60.0
- numpy==2.0.2
- numpydoc==1.6.0
- openmdao==3.38.0
- opt-einsum==3.4.0
- overrides==7.7.0
- packaging==24.2
- pandas==2.2.3
- pandocfilters==1.5.1
- parso==0.8.4
- pexpect==4.9.0
- pillow==11.1.0
- platformdirs==4.3.7
- playwright==1.51.0
- pluggy==1.5.0
- prometheus-client==0.21.1
- prompt-toolkit==3.0.50
- psutil==7.0.0
- ptyprocess==0.7.0
- pure-eval==0.2.3
- pybtex==0.24.0
- pybtex-docutils==1.0.3
- pycodestyle==2.13.0
- pycparser==2.22
- pydata-sphinx-theme==0.12.0
- pyee==12.1.1
- pygments==2.19.1
- pyparsing==3.2.3
- pytest==8.3.5
- python-dateutil==2.9.0.post0
- python-json-logger==3.3.0
- pytz==2025.2
- pyyaml==6.0.2
- pyzmq==26.3.0
- redbaron==0.9.2
- referencing==0.36.2
- requests==2.32.3
- rfc3339-validator==0.1.4
- rfc3986-validator==0.1.1
- rpds-py==0.24.0
- rply==0.7.8
- scipy==1.13.1
- send2trash==1.8.3
- six==1.17.0
- sniffio==1.3.1
- snowballstemmer==2.2.0
- soupsieve==2.6
- sphinx==5.0.2
- sphinx-book-theme==0.4.0rc1
- sphinx-comments==0.0.3
- sphinx-copybutton==0.5.2
- sphinx-design==0.3.0
- sphinx-external-toc==0.3.1
- sphinx-jupyterbook-latex==0.5.2
- sphinx-multitoc-numbering==0.1.3
- sphinx-thebe==0.2.1
- sphinx-togglebutton==0.3.2
- sphinxcontrib-applehelp==2.0.0
- sphinxcontrib-bibtex==2.5.0
- sphinxcontrib-devhelp==2.0.0
- sphinxcontrib-htmlhelp==2.1.0
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==2.0.0
- sphinxcontrib-serializinghtml==2.0.0
- sqlalchemy==2.0.40
- stack-data==0.6.3
- tabulate==0.9.0
- terminado==0.18.1
- testflo==1.4.19
- tinycss2==1.4.0
- tomli==2.2.1
- tornado==6.4.2
- traitlets==5.14.3
- types-python-dateutil==2.9.0.20241206
- typing-extensions==4.13.0
- tzdata==2025.2
- uc-micro-py==1.0.3
- uri-template==1.3.0
- urllib3==2.3.0
- wcwidth==0.2.13
- webcolors==24.11.1
- webencodings==0.5.1
- websocket-client==1.8.0
- widgetsnbextension==4.0.13
- wrapt==1.17.2
- xyzservices==2025.1.0
- zipp==3.21.0
prefix: /opt/conda/envs/dymos
| [
"dymos/examples/brachistochrone/test/test_brachistochrone_subprob_reports.py::TestSubproblemReportToggle::test_make_sim_reports"
] | [
"dymos/test/test_load_case.py::TestLoadCase::test_load_case_lgl_to_radau",
"dymos/test/test_load_case.py::TestLoadCase::test_load_case_radau_to_birkhoff",
"dymos/test/test_load_case.py::TestLoadCase::test_load_case_radau_to_lgl",
"dymos/test/test_load_case.py::TestLoadCase::test_load_case_unchanged_grid",
"dymos/test/test_load_case.py::TestLoadCase::test_load_case_unchanged_grid_polynomial_control",
"dymos/test/test_load_case.py::TestLoadCase::test_load_case_warn_fix_final_control",
"dymos/test/test_load_case.py::TestLoadCase::test_load_case_warn_fix_final_polynomial_control",
"dymos/test/test_load_case.py::TestLoadCase::test_load_case_warn_fix_final_states"
] | [
"dymos/examples/brachistochrone/test/test_brachistochrone_subprob_reports.py::TestSubproblemReportToggle::test_explicitshooting_make_subprob_reports",
"dymos/examples/brachistochrone/test/test_brachistochrone_subprob_reports.py::TestSubproblemReportToggle::test_explicitshooting_no_subprob_reports",
"dymos/examples/brachistochrone/test/test_brachistochrone_subprob_reports.py::TestSubproblemReportToggle::test_no_sim_reports"
] | [] | Apache License 2.0 | 19,068 | 3,879 | [
"dymos/phase/phase.py",
"dymos/trajectory/trajectory.py",
"dymos/transcriptions/explicit_shooting/ode_integration_comp.py",
"dymos/transcriptions/pseudospectral/pseudospectral_base.py",
"dymos/utils/misc.py"
] |
pymc-devs__pymc-7428 | e988bc5416d8df6d32e4e984173d0f91c9644823 | 2024-07-24 08:03:07 | b9fbfeda3dd8fdb081d538684bd2dcc81b14fb61 | diff --git a/pymc/printing.py b/pymc/printing.py
index 6695cf38f..56445ab9e 100644
--- a/pymc/printing.py
+++ b/pymc/printing.py
@@ -232,6 +232,12 @@ def _str_for_expression(var: Variable, formatting: str) -> str:
if x.owner and isinstance(x.owner.op, RandomVariable | SymbolicRandomVariable):
parents.append(x)
xname = x.name
+ if xname is None:
+ # If the variable is unnamed, we show the op's name as we do
+ # with constants
+ opname = x.owner.op.name
+ if opname is not None:
+ xname = rf"<{opname}>"
assert xname is not None
names.append(xname)
| The model string representation fails if there are unnamed RVs in the graph
### Describe the issue:
If a model defines components that use pytensor random variables that don't have a name, trying to get the model's string representation raises an error.
### Reproduceable code example:
```python
import pymc as pm
with pm.Model() as m:
a = pm.Deterministic("a", pm.Normal.dist())
m.str_repr()
```
### Error message:
```shell
<details>
----> 1 m.str_repr()
File ~/repos/pymc/pymc/printing.py:109, in str_for_model(model, formatting, include_params)
107 free_rv_reprs = [sfd(dist) for dist in model.free_RVs]
108 observed_rv_reprs = [sfd(rv) for rv in model.observed_RVs]
--> 109 det_reprs = [sfp(dist, dist_name="Deterministic") for dist in model.deterministics]
110 potential_reprs = [sfp(pot, dist_name="Potential") for pot in model.potentials]
112 var_reprs = free_rv_reprs + det_reprs + observed_rv_reprs + potential_reprs
File ~/repos/pymc/pymc/printing.py:109, in <listcomp>(.0)
107 free_rv_reprs = [sfd(dist) for dist in model.free_RVs]
108 observed_rv_reprs = [sfd(rv) for rv in model.observed_RVs]
--> 109 det_reprs = [sfp(dist, dist_name="Deterministic") for dist in model.deterministics]
110 potential_reprs = [sfp(pot, dist_name="Potential") for pot in model.potentials]
112 var_reprs = free_rv_reprs + det_reprs + observed_rv_reprs + potential_reprs
File ~/repos/pymc/pymc/printing.py:156, in str_for_potential_or_deterministic(var, formatting, include_params, dist_name)
154 else: # plain
155 if include_params:
--> 156 return rf"{print_name} ~ {dist_name}({_str_for_expression(var, formatting=formatting)})"
157 else:
158 return rf"{print_name} ~ {dist_name}"
File ~/repos/pymc/pymc/printing.py:235, in _str_for_expression(var, formatting)
233 parents.append(x)
234 xname = x.name
--> 235 assert xname is not None
236 names.append(xname)
238 if "latex" in formatting:
AssertionError:
</details>
```
### PyMC version information:
5.16.2
### Context for the issue:
_No response_ | pymc-devs/pymc | diff --git a/tests/test_printing.py b/tests/test_printing.py
index 95a1e812e..406032b12 100644
--- a/tests/test_printing.py
+++ b/tests/test_printing.py
@@ -125,8 +125,11 @@ class TestMonolith(BaseTestStrAndLatexRepr):
# add a potential as well
pot = Potential("pot", mu**2)
+ # add a deterministic that depends on an unnamed random variable
+ pred = Deterministic("pred", Normal.dist(0, 1))
+
self.distributions = [alpha, sigma, mu, b, Z, nb2, zip, w, nested_mix, Y_obs, pot]
- self.deterministics_or_potentials = [mu, pot]
+ self.deterministics_or_potentials = [mu, pot, pred]
# tuples of (formatting, include_params)
self.formats = [("plain", True), ("plain", False), ("latex", True), ("latex", False)]
self.expected = {
@@ -146,6 +149,7 @@ class TestMonolith(BaseTestStrAndLatexRepr):
),
r"Y_obs ~ Normal(mu, sigma)",
r"pot ~ Potential(f(beta, alpha))",
+ r"pred ~ Deterministic(f(<normal>))",
],
("plain", False): [
r"alpha ~ Normal",
@@ -159,6 +163,7 @@ class TestMonolith(BaseTestStrAndLatexRepr):
r"nested_mix ~ MarginalMixture",
r"Y_obs ~ Normal",
r"pot ~ Potential",
+ r"pred ~ Deterministic",
],
("latex", True): [
r"$\text{alpha} \sim \operatorname{Normal}(0,~10)$",
@@ -176,6 +181,7 @@ class TestMonolith(BaseTestStrAndLatexRepr):
),
r"$\text{Y_obs} \sim \operatorname{Normal}(\text{mu},~\text{sigma})$",
r"$\text{pot} \sim \operatorname{Potential}(f(\text{beta},~\text{alpha}))$",
+ r"$\text{pred} \sim \operatorname{Deterministic}(f(\text{<normal>}))",
],
("latex", False): [
r"$\text{alpha} \sim \operatorname{Normal}$",
@@ -189,6 +195,7 @@ class TestMonolith(BaseTestStrAndLatexRepr):
r"$\text{nested_mix} \sim \operatorname{MarginalMixture}$",
r"$\text{Y_obs} \sim \operatorname{Normal}$",
r"$\text{pot} \sim \operatorname{Potential}$",
+ r"$\text{pred} \sim \operatorname{Deterministic}",
],
}
| {
"commit_name": "merge_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 1
} | 5.16 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-xdist",
"pytest-mock",
"pytest-asyncio"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.10",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | arviz==0.21.0
cachetools==5.5.2
cloudpickle==3.1.1
cons==0.4.6
contourpy==1.3.1
coverage==7.8.0
cycler==0.12.1
etuples==0.3.9
exceptiongroup==1.2.2
execnet==2.1.1
filelock==3.18.0
fonttools==4.56.0
h5netcdf==1.6.1
h5py==3.13.0
iniconfig==2.1.0
kiwisolver==1.4.8
logical-unification==0.4.6
markdown-it-py==3.0.0
matplotlib==3.10.1
mdurl==0.1.2
miniKanren==1.0.3
multipledispatch==1.0.0
numpy==1.26.4
packaging==24.2
pandas==2.2.3
pillow==11.1.0
pluggy==1.5.0
Pygments==2.19.1
-e git+https://github.com/pymc-devs/pymc.git@e988bc5416d8df6d32e4e984173d0f91c9644823#egg=pymc
pyparsing==3.2.3
pytensor==2.25.5
pytest==8.3.5
pytest-asyncio==0.26.0
pytest-cov==6.0.0
pytest-mock==3.14.0
pytest-xdist==3.6.1
python-dateutil==2.9.0.post0
pytz==2025.2
rich==14.0.0
scipy==1.15.2
six==1.17.0
threadpoolctl==3.6.0
tomli==2.2.1
toolz==1.0.0
typing_extensions==4.13.0
tzdata==2025.2
xarray==2025.3.1
xarray-einstats==0.8.0
| name: pymc
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- bzip2=1.0.8=h5eee18b_6
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- libuuid=1.41.5=h5eee18b_0
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py310h06a4308_0
- python=3.10.16=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py310h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.45.1=py310h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- arviz==0.21.0
- cachetools==5.5.2
- cloudpickle==3.1.1
- cons==0.4.6
- contourpy==1.3.1
- coverage==7.8.0
- cycler==0.12.1
- etuples==0.3.9
- exceptiongroup==1.2.2
- execnet==2.1.1
- filelock==3.18.0
- fonttools==4.56.0
- h5netcdf==1.6.1
- h5py==3.13.0
- iniconfig==2.1.0
- kiwisolver==1.4.8
- logical-unification==0.4.6
- markdown-it-py==3.0.0
- matplotlib==3.10.1
- mdurl==0.1.2
- minikanren==1.0.3
- multipledispatch==1.0.0
- numpy==1.26.4
- packaging==24.2
- pandas==2.2.3
- pillow==11.1.0
- pluggy==1.5.0
- pygments==2.19.1
- pymc==5.16.2+8.ge988bc541
- pyparsing==3.2.3
- pytensor==2.25.5
- pytest==8.3.5
- pytest-asyncio==0.26.0
- pytest-cov==6.0.0
- pytest-mock==3.14.0
- pytest-xdist==3.6.1
- python-dateutil==2.9.0.post0
- pytz==2025.2
- rich==14.0.0
- scipy==1.15.2
- six==1.17.0
- threadpoolctl==3.6.0
- tomli==2.2.1
- toolz==1.0.0
- typing-extensions==4.13.0
- tzdata==2025.2
- xarray==2025.3.1
- xarray-einstats==0.8.0
prefix: /opt/conda/envs/pymc
| [
"tests/test_printing.py::TestMonolith::test__repr_latex_",
"tests/test_printing.py::TestMonolith::test_str_repr"
] | [] | [
"tests/test_printing.py::TestData::test__repr_latex_",
"tests/test_printing.py::TestData::test_str_repr",
"tests/test_printing.py::test_model_latex_repr_three_levels_model",
"tests/test_printing.py::test_model_latex_repr_mixture_model",
"tests/test_printing.py::test_model_repr_variables_without_monkey_patched_repr",
"tests/test_printing.py::test_truncated_repr",
"tests/test_printing.py::test_custom_dist_repr"
] | [] | Apache License 2.0 | 19,073 | 203 | [
"pymc/printing.py"
] |
|
tobymao__sqlglot-3806 | 5c93acd7046cdd1ed1f872fa024c1bb85da282c8 | 2024-07-24 10:36:10 | 416f4a1b6a04b858ff8ed94509aacd9bacca145b | diff --git a/sqlglot/dialects/tsql.py b/sqlglot/dialects/tsql.py
index f233e5d4..fd3e48cc 100644
--- a/sqlglot/dialects/tsql.py
+++ b/sqlglot/dialects/tsql.py
@@ -855,6 +855,7 @@ class TSQL(Dialect):
transforms.eliminate_qualify,
]
),
+ exp.Stddev: rename_func("STDEV"),
exp.StrPosition: lambda self, e: self.func(
"CHARINDEX", e.args.get("substr"), e.this, e.args.get("position")
),
diff --git a/sqlglot/expressions.py b/sqlglot/expressions.py
index f28da45c..3368b56b 100644
--- a/sqlglot/expressions.py
+++ b/sqlglot/expressions.py
@@ -1394,6 +1394,7 @@ class Create(DDL):
"end": False,
"clone": False,
"concurrently": False,
+ "clustered": False,
}
@property
@@ -5887,7 +5888,7 @@ class Sqrt(Func):
class Stddev(AggFunc):
- pass
+ _sql_names = ["STDDEV", "STDEV"]
class StddevPop(AggFunc):
diff --git a/sqlglot/generator.py b/sqlglot/generator.py
index a6bbd396..d91b6f4f 100644
--- a/sqlglot/generator.py
+++ b/sqlglot/generator.py
@@ -1027,6 +1027,14 @@ class Generator(metaclass=_Generator):
replace = " OR REPLACE" if expression.args.get("replace") else ""
unique = " UNIQUE" if expression.args.get("unique") else ""
+ clustered = expression.args.get("clustered")
+ if clustered is None:
+ clustered_sql = ""
+ elif clustered:
+ clustered_sql = " CLUSTERED COLUMNSTORE"
+ else:
+ clustered_sql = " NONCLUSTERED COLUMNSTORE"
+
postcreate_props_sql = ""
if properties_locs.get(exp.Properties.Location.POST_CREATE):
postcreate_props_sql = self.properties(
@@ -1036,7 +1044,7 @@ class Generator(metaclass=_Generator):
wrapped=False,
)
- modifiers = "".join((replace, unique, postcreate_props_sql))
+ modifiers = "".join((clustered_sql, replace, unique, postcreate_props_sql))
postexpression_props_sql = ""
if properties_locs.get(exp.Properties.Location.POST_EXPRESSION):
diff --git a/sqlglot/parser.py b/sqlglot/parser.py
index c340a9e7..0026ae7a 100644
--- a/sqlglot/parser.py
+++ b/sqlglot/parser.py
@@ -1663,6 +1663,15 @@ class Parser(metaclass=_Parser):
unique = self._match(TokenType.UNIQUE)
+ if self._match_text_seq("CLUSTERED", "COLUMNSTORE"):
+ clustered = True
+ elif self._match_text_seq("NONCLUSTERED", "COLUMNSTORE") or self._match_text_seq(
+ "COLUMNSTORE"
+ ):
+ clustered = False
+ else:
+ clustered = None
+
if self._match_pair(TokenType.TABLE, TokenType.FUNCTION, advance=False):
self._advance()
@@ -1804,6 +1813,7 @@ class Parser(metaclass=_Parser):
end=end,
clone=clone,
concurrently=concurrently,
+ clustered=clustered,
)
def _parse_sequence_properties(self) -> t.Optional[exp.SequenceProperties]:
| STDDEV parsing for TSQL dialect
**Before you file an issue**
- Make sure you specify the "read" dialect eg. `parse_one(sql, read="spark")`
- Make sure you specify the "write" dialect eg. `ast.sql(dialect="duckdb")`
- Check if the issue still exists on main
**Fully reproducible code snippet**
1. Parsing `STDDEV` from any dialect to `tsql` produces STDDEV instead of STDEV
```python
from sqlglot import parse_one
sample_q = 'SELECT STDDEV("test_column") FROM "test_table"'
sample_pg = parse_one(sample_q, read="postgres")
print(sample_pg.sql("tsql"))
```
Output:
```SQL
SELECT STDDEV([test_column]) FROM [test_table]
```
Expected:
```SQL
SELECT STDEV([test_column]) FROM [test_table]
```
2. Parsing `STDEV` as `tsql` and writing to any dialect also produces STDEV instead of STDDEV:
sample_q_tsql = "SELECT STDEV([test_column]) FROM [test_table]"
sample_tsql = parse_one(sample_q_tsql, read="tsql")
print(sample_tsql.sql("postgres"))
```
Output:
```SQL
SELECT STDEV("test_column") FROM "test_table"
```
Expected:
```
SELECT STDDEV("test_column") FROM "test_table"
```
**Official Documentation**
Please include links to official SQL documentation related to your issue.
| tobymao/sqlglot | diff --git a/tests/dialects/test_tsql.py b/tests/dialects/test_tsql.py
index 11d60e7f..9658a02c 100644
--- a/tests/dialects/test_tsql.py
+++ b/tests/dialects/test_tsql.py
@@ -391,6 +391,17 @@ class TestTSQL(Validator):
self.validate_identity("HASHBYTES('MD2', 'x')")
self.validate_identity("LOG(n, b)")
+ self.validate_all(
+ "STDEV(x)",
+ read={
+ "": "STDDEV(x)",
+ },
+ write={
+ "": "STDDEV(x)",
+ "tsql": "STDEV(x)",
+ },
+ )
+
def test_option(self):
possible_options = [
"HASH GROUP",
@@ -888,6 +899,14 @@ class TestTSQL(Validator):
},
)
+ for colstore in ("NONCLUSTERED COLUMNSTORE", "CLUSTERED COLUMNSTORE"):
+ self.validate_identity(f"CREATE {colstore} INDEX index_name ON foo.bar")
+
+ self.validate_identity(
+ "CREATE COLUMNSTORE INDEX index_name ON foo.bar",
+ "CREATE NONCLUSTERED COLUMNSTORE INDEX index_name ON foo.bar",
+ )
+
def test_insert_cte(self):
self.validate_all(
"INSERT INTO foo.bar WITH cte AS (SELECT 1 AS one) SELECT * FROM cte",
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 3
},
"num_modified_files": 4
} | 25.6 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest pytest-cov pytest-xdist pytest-mock pytest-asyncio"
],
"pre_install": null,
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | cfgv==3.4.0
coverage==7.8.0
distlib==0.3.9
duckdb==1.2.1
exceptiongroup==1.2.2
execnet==2.1.1
filelock==3.18.0
identify==2.6.9
iniconfig==2.1.0
Jinja2==3.1.6
MarkupSafe==3.0.2
maturin==1.8.3
mypy==1.15.0
mypy-extensions==1.0.0
nodeenv==1.9.1
numpy==2.0.2
packaging==24.2
pandas==2.2.3
pandas-stubs==2.2.2.240807
pdoc==15.0.1
platformdirs==4.3.7
pluggy==1.5.0
pre_commit==4.2.0
Pygments==2.19.1
pytest==8.3.5
pytest-asyncio==0.26.0
pytest-cov==6.0.0
pytest-mock==3.14.0
pytest-xdist==3.6.1
python-dateutil==2.9.0.post0
pytz==2025.2
PyYAML==6.0.2
ruff==0.4.3
six==1.17.0
-e git+https://github.com/tobymao/sqlglot.git@5c93acd7046cdd1ed1f872fa024c1bb85da282c8#egg=sqlglot
tomli==2.2.1
types-python-dateutil==2.9.0.20241206
types-pytz==2025.2.0.20250326
typing_extensions==4.13.0
tzdata==2025.2
virtualenv==20.29.3
| name: sqlglot
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- cfgv==3.4.0
- coverage==7.8.0
- distlib==0.3.9
- duckdb==1.2.1
- exceptiongroup==1.2.2
- execnet==2.1.1
- filelock==3.18.0
- identify==2.6.9
- iniconfig==2.1.0
- jinja2==3.1.6
- markupsafe==3.0.2
- maturin==1.8.3
- mypy==1.15.0
- mypy-extensions==1.0.0
- nodeenv==1.9.1
- numpy==2.0.2
- packaging==24.2
- pandas==2.2.3
- pandas-stubs==2.2.2.240807
- pdoc==15.0.1
- platformdirs==4.3.7
- pluggy==1.5.0
- pre-commit==4.2.0
- pygments==2.19.1
- pytest==8.3.5
- pytest-asyncio==0.26.0
- pytest-cov==6.0.0
- pytest-mock==3.14.0
- pytest-xdist==3.6.1
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyyaml==6.0.2
- ruff==0.4.3
- six==1.17.0
- tomli==2.2.1
- types-python-dateutil==2.9.0.20241206
- types-pytz==2025.2.0.20250326
- typing-extensions==4.13.0
- tzdata==2025.2
- virtualenv==20.29.3
prefix: /opt/conda/envs/sqlglot
| [
"tests/dialects/test_tsql.py::TestTSQL::test_ddl",
"tests/dialects/test_tsql.py::TestTSQL::test_tsql"
] | [] | [
"tests/dialects/test_tsql.py::TestTSQL::test_add_date",
"tests/dialects/test_tsql.py::TestTSQL::test_charindex",
"tests/dialects/test_tsql.py::TestTSQL::test_commit",
"tests/dialects/test_tsql.py::TestTSQL::test_convert",
"tests/dialects/test_tsql.py::TestTSQL::test_current_user",
"tests/dialects/test_tsql.py::TestTSQL::test_date_diff",
"tests/dialects/test_tsql.py::TestTSQL::test_datefromparts",
"tests/dialects/test_tsql.py::TestTSQL::test_datename",
"tests/dialects/test_tsql.py::TestTSQL::test_datepart",
"tests/dialects/test_tsql.py::TestTSQL::test_declare",
"tests/dialects/test_tsql.py::TestTSQL::test_eomonth",
"tests/dialects/test_tsql.py::TestTSQL::test_format",
"tests/dialects/test_tsql.py::TestTSQL::test_fullproc",
"tests/dialects/test_tsql.py::TestTSQL::test_hints",
"tests/dialects/test_tsql.py::TestTSQL::test_identifier_prefixes",
"tests/dialects/test_tsql.py::TestTSQL::test_insert_cte",
"tests/dialects/test_tsql.py::TestTSQL::test_isnull",
"tests/dialects/test_tsql.py::TestTSQL::test_json",
"tests/dialects/test_tsql.py::TestTSQL::test_lateral_subquery",
"tests/dialects/test_tsql.py::TestTSQL::test_lateral_table_valued_function",
"tests/dialects/test_tsql.py::TestTSQL::test_len",
"tests/dialects/test_tsql.py::TestTSQL::test_openjson",
"tests/dialects/test_tsql.py::TestTSQL::test_option",
"tests/dialects/test_tsql.py::TestTSQL::test_procedure_keywords",
"tests/dialects/test_tsql.py::TestTSQL::test_qualify_derived_table_outputs",
"tests/dialects/test_tsql.py::TestTSQL::test_replicate",
"tests/dialects/test_tsql.py::TestTSQL::test_rollback",
"tests/dialects/test_tsql.py::TestTSQL::test_scope_resolution_op",
"tests/dialects/test_tsql.py::TestTSQL::test_set",
"tests/dialects/test_tsql.py::TestTSQL::test_string",
"tests/dialects/test_tsql.py::TestTSQL::test_system_time",
"tests/dialects/test_tsql.py::TestTSQL::test_temporal_table",
"tests/dialects/test_tsql.py::TestTSQL::test_top",
"tests/dialects/test_tsql.py::TestTSQL::test_transaction",
"tests/dialects/test_tsql.py::TestTSQL::test_types",
"tests/dialects/test_tsql.py::TestTSQL::test_types_bin",
"tests/dialects/test_tsql.py::TestTSQL::test_types_date",
"tests/dialects/test_tsql.py::TestTSQL::test_types_decimals",
"tests/dialects/test_tsql.py::TestTSQL::test_types_ints",
"tests/dialects/test_tsql.py::TestTSQL::test_types_string",
"tests/dialects/test_tsql.py::TestTSQL::test_udf"
] | [] | MIT License | 19,075 | 872 | [
"sqlglot/dialects/tsql.py",
"sqlglot/expressions.py",
"sqlglot/generator.py",
"sqlglot/parser.py"
] |
|
tobymao__sqlglot-3816 | 804edc5b8673725d4e1e83b9967d815073580d89 | 2024-07-25 09:45:58 | 416f4a1b6a04b858ff8ed94509aacd9bacca145b | diff --git a/sqlglot/parser.py b/sqlglot/parser.py
index 3a611e86..99ac0f8e 100644
--- a/sqlglot/parser.py
+++ b/sqlglot/parser.py
@@ -1149,6 +1149,17 @@ class Parser(metaclass=_Parser):
**dict.fromkeys(("BINDING", "COMPENSATION", "EVOLUTION"), tuple()),
}
+ KEY_CONSTRAINT_OPTIONS: OPTIONS_TYPE = {
+ "NOT": ("ENFORCED",),
+ "MATCH": (
+ "FULL",
+ "PARTIAL",
+ "SIMPLE",
+ ),
+ "INITIALLY": ("DEFERRED", "IMMEDIATE"),
+ **dict.fromkeys(("DEFERRABLE", "NORELY"), tuple()),
+ }
+
INSERT_ALTERNATIVES = {"ABORT", "FAIL", "IGNORE", "REPLACE", "ROLLBACK"}
CLONE_KEYWORDS = {"CLONE", "COPY"}
@@ -5282,18 +5293,13 @@ class Parser(metaclass=_Parser):
self.raise_error("Invalid key constraint")
options.append(f"ON {on} {action}")
- elif self._match_text_seq("NOT", "ENFORCED"):
- options.append("NOT ENFORCED")
- elif self._match_text_seq("DEFERRABLE"):
- options.append("DEFERRABLE")
- elif self._match_text_seq("INITIALLY", "DEFERRED"):
- options.append("INITIALLY DEFERRED")
- elif self._match_text_seq("NORELY"):
- options.append("NORELY")
- elif self._match_text_seq("MATCH", "FULL"):
- options.append("MATCH FULL")
else:
- break
+ var = self._parse_var_from_options(
+ self.KEY_CONSTRAINT_OPTIONS, raise_unmatched=False
+ )
+ if not var:
+ break
+ options.append(var.name)
return options
| ParseError `Expecting )` for CREATE TABLE statement PostgreSQL
**Fully reproducible code snippet**
A test file:
```py
from sqlglot import parse_one
query = """
CREATE TABLE IF NOT EXISTS public.rental
(
rental_id integer NOT NULL DEFAULT nextval('rental_rental_id_seq'::regclass),
rental_date timestamp without time zone NOT NULL,
inventory_id integer NOT NULL,
customer_id integer NOT NULL,
return_date timestamp without time zone,
staff_id integer NOT NULL,
last_update timestamp without time zone NOT NULL DEFAULT now(),
CONSTRAINT rental_pkey PRIMARY KEY (rental_id),
CONSTRAINT rental_customer_id_fkey FOREIGN KEY (customer_id)
REFERENCES public.customer (customer_id) MATCH SIMPLE
ON UPDATE CASCADE
ON DELETE RESTRICT,
CONSTRAINT rental_inventory_id_fkey FOREIGN KEY (inventory_id)
REFERENCES public.inventory (inventory_id) MATCH SIMPLE
ON UPDATE CASCADE
ON DELETE RESTRICT,
CONSTRAINT rental_staff_id_fkey FOREIGN KEY (staff_id)
REFERENCES public.staff (staff_id) MATCH SIMPLE
ON UPDATE CASCADE
ON DELETE RESTRICT
)
"""
dialect = "postgres"
print(parse_one(query, read=dialect))
```
An exception:
```py
Traceback (most recent call last):
File "/Users/user/folder/api/test.py", line 31, in <module>
print(parse_one(query, read=dialect))
File "/Users/user/Library/Caches/pypoetry/virtualenvs/folder-venv/lib/python3.10/site-packages/sqlglot/__init__.py", line 136, in parse_one
result = dialect.parse(sql, **opts)
File "/Users/user/Library/Caches/pypoetry/virtualenvs/folder-venv/lib/python3.10/site-packages/sqlglot/dialects/dialect.py", line 695, in parse
return self.parser(**opts).parse(self.tokenize(sql), sql)
File "/Users/user/Library/Caches/pypoetry/virtualenvs/folder-venv/lib/python3.10/site-packages/sqlglot/parser.py", line 1300, in parse
return self._parse(
File "/Users/user/Library/Caches/pypoetry/virtualenvs/folder-venv/lib/python3.10/site-packages/sqlglot/parser.py", line 1369, in _parse
expressions.append(parse_method(self))
File "/Users/user/Library/Caches/pypoetry/virtualenvs/folder-venv/lib/python3.10/site-packages/sqlglot/parser.py", line 1601, in _parse_statement
return self.STATEMENT_PARSERS[self._prev.token_type](self)
File "/Users/user/Library/Caches/pypoetry/virtualenvs/folder-venv/lib/python3.10/site-packages/sqlglot/parser.py", line 709, in <lambda>
TokenType.CREATE: lambda self: self._parse_create(),
File "/Users/user/Library/Caches/pypoetry/virtualenvs/folder-venv/lib/python3.10/site-packages/sqlglot/parser.py", line 1743, in _parse_create
this = self._parse_schema(this=table_parts)
File "/Users/user/Library/Caches/pypoetry/virtualenvs/folder-venv/lib/python3.10/site-packages/sqlglot/parser.py", line 4986, in _parse_schema
self._match_r_paren()
File "/Users/user/Library/Caches/pypoetry/virtualenvs/folder-venv/lib/python3.10/site-packages/sqlglot/parser.py", line 6706, in _match_r_paren
self.raise_error("Expecting )")
File "/Users/user/Library/Caches/pypoetry/virtualenvs/folder-venv/lib/python3.10/site-packages/sqlglot/parser.py", line 1413, in raise_error
raise error
sqlglot.errors.ParseError: Expecting ). Line 13, Col: 47.
rental_customer_id_fkey FOREIGN KEY (customer_id)
REFERENCES public.customer (customer_id) MATCH SIMPLE
ON UPDATE CASCADE
ON DELETE RESTRICT,
CONSTRAINT rental_inventory_id_fke
```
**Official Documentation**
[PostgreSQL 16 CREATE TABLE docs](https://www.postgresql.org/docs/16/sql-createtable.html) (please look at `and table_constraint is` section)
| tobymao/sqlglot | diff --git a/tests/dialects/test_postgres.py b/tests/dialects/test_postgres.py
index 92e8443d..c7688f59 100644
--- a/tests/dialects/test_postgres.py
+++ b/tests/dialects/test_postgres.py
@@ -1003,6 +1003,29 @@ class TestPostgres(Validator):
"CREATE INDEX CONCURRENTLY IF NOT EXISTS ix_table_id ON tbl USING btree(id)"
)
+ self.validate_identity(
+ """
+ CREATE TABLE IF NOT EXISTS public.rental
+ (
+ inventory_id INT NOT NULL,
+ CONSTRAINT rental_customer_id_fkey FOREIGN KEY (customer_id)
+ REFERENCES public.customer (customer_id) MATCH FULL
+ ON UPDATE CASCADE
+ ON DELETE RESTRICT,
+ CONSTRAINT rental_inventory_id_fkey FOREIGN KEY (inventory_id)
+ REFERENCES public.inventory (inventory_id) MATCH PARTIAL
+ ON UPDATE CASCADE
+ ON DELETE RESTRICT,
+ CONSTRAINT rental_staff_id_fkey FOREIGN KEY (staff_id)
+ REFERENCES public.staff (staff_id) MATCH SIMPLE
+ ON UPDATE CASCADE
+ ON DELETE RESTRICT,
+ INITIALLY IMMEDIATE
+ )
+ """,
+ "CREATE TABLE IF NOT EXISTS public.rental (inventory_id INT NOT NULL, CONSTRAINT rental_customer_id_fkey FOREIGN KEY (customer_id) REFERENCES public.customer (customer_id) MATCH FULL ON UPDATE CASCADE ON DELETE RESTRICT, CONSTRAINT rental_inventory_id_fkey FOREIGN KEY (inventory_id) REFERENCES public.inventory (inventory_id) MATCH PARTIAL ON UPDATE CASCADE ON DELETE RESTRICT, CONSTRAINT rental_staff_id_fkey FOREIGN KEY (staff_id) REFERENCES public.staff (staff_id) MATCH SIMPLE ON UPDATE CASCADE ON DELETE RESTRICT, INITIALLY IMMEDIATE)",
+ )
+
with self.assertRaises(ParseError):
transpile("CREATE TABLE products (price DECIMAL CHECK price > 0)", read="postgres")
with self.assertRaises(ParseError):
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 1
} | 25.6 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": null,
"pre_install": null,
"python": "3.9",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | cfgv==3.4.0
distlib==0.3.9
duckdb==1.2.1
filelock==3.18.0
identify==2.6.9
Jinja2==3.1.6
MarkupSafe==3.0.2
maturin==1.8.3
mypy==1.15.0
mypy-extensions==1.0.0
nodeenv==1.9.1
numpy==2.0.2
pandas==2.2.3
pandas-stubs==2.2.2.240807
pdoc==15.0.1
platformdirs==4.3.7
pre_commit==4.2.0
Pygments==2.19.1
python-dateutil==2.9.0.post0
pytz==2025.2
PyYAML==6.0.2
ruff==0.4.3
six==1.17.0
-e git+https://github.com/tobymao/sqlglot.git@804edc5b8673725d4e1e83b9967d815073580d89#egg=sqlglot
tomli==2.2.1
types-python-dateutil==2.9.0.20241206
types-pytz==2025.2.0.20250326
typing_extensions==4.13.0
tzdata==2025.2
virtualenv==20.29.3
| name: sqlglot
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- cfgv==3.4.0
- distlib==0.3.9
- duckdb==1.2.1
- filelock==3.18.0
- identify==2.6.9
- jinja2==3.1.6
- markupsafe==3.0.2
- maturin==1.8.3
- mypy==1.15.0
- mypy-extensions==1.0.0
- nodeenv==1.9.1
- numpy==2.0.2
- pandas==2.2.3
- pandas-stubs==2.2.2.240807
- pdoc==15.0.1
- platformdirs==4.3.7
- pre-commit==4.2.0
- pygments==2.19.1
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyyaml==6.0.2
- ruff==0.4.3
- six==1.17.0
- tomli==2.2.1
- types-python-dateutil==2.9.0.20241206
- types-pytz==2025.2.0.20250326
- typing-extensions==4.13.0
- tzdata==2025.2
- virtualenv==20.29.3
prefix: /opt/conda/envs/sqlglot
| [
"tests/dialects/test_postgres.py::TestPostgres::test_ddl"
] | [] | [
"tests/dialects/test_postgres.py::TestPostgres::test_array_offset",
"tests/dialects/test_postgres.py::TestPostgres::test_bool_or",
"tests/dialects/test_postgres.py::TestPostgres::test_operator",
"tests/dialects/test_postgres.py::TestPostgres::test_postgres",
"tests/dialects/test_postgres.py::TestPostgres::test_regexp_binary",
"tests/dialects/test_postgres.py::TestPostgres::test_rows_from",
"tests/dialects/test_postgres.py::TestPostgres::test_string_concat",
"tests/dialects/test_postgres.py::TestPostgres::test_unnest",
"tests/dialects/test_postgres.py::TestPostgres::test_unnest_json_array",
"tests/dialects/test_postgres.py::TestPostgres::test_variance"
] | [] | MIT License | 19,086 | 458 | [
"sqlglot/parser.py"
] |
|
tobymao__sqlglot-3820 | 804edc5b8673725d4e1e83b9967d815073580d89 | 2024-07-25 12:09:37 | 416f4a1b6a04b858ff8ed94509aacd9bacca145b | diff --git a/sqlglot/dialects/snowflake.py b/sqlglot/dialects/snowflake.py
index e9cf487d..4a425d29 100644
--- a/sqlglot/dialects/snowflake.py
+++ b/sqlglot/dialects/snowflake.py
@@ -614,7 +614,7 @@ class Snowflake(Dialect):
# can be joined in a query with a comma separator, as well as closing paren
# in case of subqueries
while self._is_connected() and not self._match_set(
- (TokenType.COMMA, TokenType.R_PAREN), advance=False
+ (TokenType.COMMA, TokenType.L_PAREN, TokenType.R_PAREN), advance=False
):
parts.append(self._advance_any(ignore_reserved=True))
diff --git a/sqlglot/parser.py b/sqlglot/parser.py
index 3a611e86..7978474a 100644
--- a/sqlglot/parser.py
+++ b/sqlglot/parser.py
@@ -929,7 +929,8 @@ class Parser(metaclass=_Parser):
enforced=self._match_text_seq("ENFORCED"),
),
"COLLATE": lambda self: self.expression(
- exp.CollateColumnConstraint, this=self._parse_var(any_token=True)
+ exp.CollateColumnConstraint,
+ this=self._parse_identifier() or self._parse_column(),
),
"COMMENT": lambda self: self.expression(
exp.CommentColumnConstraint, this=self._parse_string()
@@ -1149,6 +1150,17 @@ class Parser(metaclass=_Parser):
**dict.fromkeys(("BINDING", "COMPENSATION", "EVOLUTION"), tuple()),
}
+ KEY_CONSTRAINT_OPTIONS: OPTIONS_TYPE = {
+ "NOT": ("ENFORCED",),
+ "MATCH": (
+ "FULL",
+ "PARTIAL",
+ "SIMPLE",
+ ),
+ "INITIALLY": ("DEFERRED", "IMMEDIATE"),
+ **dict.fromkeys(("DEFERRABLE", "NORELY"), tuple()),
+ }
+
INSERT_ALTERNATIVES = {"ABORT", "FAIL", "IGNORE", "REPLACE", "ROLLBACK"}
CLONE_KEYWORDS = {"CLONE", "COPY"}
@@ -5282,18 +5294,13 @@ class Parser(metaclass=_Parser):
self.raise_error("Invalid key constraint")
options.append(f"ON {on} {action}")
- elif self._match_text_seq("NOT", "ENFORCED"):
- options.append("NOT ENFORCED")
- elif self._match_text_seq("DEFERRABLE"):
- options.append("DEFERRABLE")
- elif self._match_text_seq("INITIALLY", "DEFERRED"):
- options.append("INITIALLY DEFERRED")
- elif self._match_text_seq("NORELY"):
- options.append("NORELY")
- elif self._match_text_seq("MATCH", "FULL"):
- options.append("MATCH FULL")
else:
- break
+ var = self._parse_var_from_options(
+ self.KEY_CONSTRAINT_OPTIONS, raise_unmatched=False
+ )
+ if not var:
+ break
+ options.append(var.name)
return options
| ParseError `Expecting )` for collation when using `CREATE TABLE` statement PostgreSQL
**Fully reproducible code snippet**
A test file is:
```py
from sqlglot import parse_one
query = """
CREATE TABLE IF NOT EXISTS public.language
(
language_id integer NOT NULL DEFAULT nextval('language_language_id_seq'::regclass),
name character(20) COLLATE pg_catalog."default" NOT NULL,
last_update timestamp without time zone NOT NULL DEFAULT now(),
CONSTRAINT language_pkey PRIMARY KEY (language_id)
)
"""
dialect = "postgres"
print(parse_one(query, read=dialect))
```
A traceback is:
```py
Traceback (most recent call last):
File "/Users/user/folder/api/test.py", line 15, in <module>
print(parse_one(query, read=dialect))
File "/Users/user/Library/Caches/pypoetry/virtualenvs/folder-venv/lib/python3.10/site-packages/sqlglot/__init__.py", line 136, in parse_one
result = dialect.parse(sql, **opts)
File "/Users/user/Library/Caches/pypoetry/virtualenvs/folder-venv/lib/python3.10/site-packages/sqlglot/dialects/dialect.py", line 695, in parse
return self.parser(**opts).parse(self.tokenize(sql), sql)
File "/Users/user/Library/Caches/pypoetry/virtualenvs/folder-venv/lib/python3.10/site-packages/sqlglot/parser.py", line 1300, in parse
return self._parse(
File "/Users/user/Library/Caches/pypoetry/virtualenvs/folder-venv/lib/python3.10/site-packages/sqlglot/parser.py", line 1369, in _parse
expressions.append(parse_method(self))
File "/Users/user/Library/Caches/pypoetry/virtualenvs/folder-venv/lib/python3.10/site-packages/sqlglot/parser.py", line 1601, in _parse_statement
return self.STATEMENT_PARSERS[self._prev.token_type](self)
File "/Users/user/Library/Caches/pypoetry/virtualenvs/folder-venv/lib/python3.10/site-packages/sqlglot/parser.py", line 709, in <lambda>
TokenType.CREATE: lambda self: self._parse_create(),
File "/Users/user/Library/Caches/pypoetry/virtualenvs/folder-venv/lib/python3.10/site-packages/sqlglot/parser.py", line 1743, in _parse_create
this = self._parse_schema(this=table_parts)
File "/Users/user/Library/Caches/pypoetry/virtualenvs/folder-venv/lib/python3.10/site-packages/sqlglot/parser.py", line 4986, in _parse_schema
self._match_r_paren()
File "/Users/user/Library/Caches/pypoetry/virtualenvs/folder-venv/lib/python3.10/site-packages/sqlglot/parser.py", line 6706, in _match_r_paren
self.raise_error("Expecting )")
File "/Users/user/Library/Caches/pypoetry/virtualenvs/folder-venv/lib/python3.10/site-packages/sqlglot/parser.py", line 1413, in raise_error
raise error
sqlglot.errors.ParseError: Expecting ). Line 5, Col: 39.
ULL DEFAULT nextval('language_language_id_seq'::regclass),
name character(20) COLLATE pg_catalog."default" NOT NULL,
last_update timestamp without time zone NOT NULL DEFAULT now(),
CONSTRAI
```
**Official Documentation**
[PostgreSQL 16 CREATE TABLE docs](https://www.postgresql.org/docs/16/sql-createtable.html)
[PostgreSQL 16 Collation Support docs](https://www.postgresql.org/docs/16/collation.html)
| tobymao/sqlglot | diff --git a/tests/dialects/test_postgres.py b/tests/dialects/test_postgres.py
index 92e8443d..21c7e2bc 100644
--- a/tests/dialects/test_postgres.py
+++ b/tests/dialects/test_postgres.py
@@ -17,9 +17,6 @@ class TestPostgres(Validator):
)
self.validate_identity("SHA384(x)")
- self.validate_identity(
- 'CREATE TABLE x (a TEXT COLLATE "de_DE")', "CREATE TABLE x (a TEXT COLLATE de_DE)"
- )
self.validate_identity("1.x", "1. AS x")
self.validate_identity("|/ x", "SQRT(x)")
self.validate_identity("||/ x", "CBRT(x)")
@@ -792,6 +789,8 @@ class TestPostgres(Validator):
cdef.args["kind"].assert_is(exp.DataType)
self.assertEqual(expr.sql(dialect="postgres"), "CREATE TABLE t (x INTERVAL DAY)")
+ self.validate_identity('CREATE TABLE x (a TEXT COLLATE "de_DE")')
+ self.validate_identity('CREATE TABLE x (a TEXT COLLATE pg_catalog."default")')
self.validate_identity("CREATE TABLE t (col INT[3][5])")
self.validate_identity("CREATE TABLE t (col INT[3])")
self.validate_identity("CREATE INDEX IF NOT EXISTS ON t(c)")
@@ -1003,6 +1002,29 @@ class TestPostgres(Validator):
"CREATE INDEX CONCURRENTLY IF NOT EXISTS ix_table_id ON tbl USING btree(id)"
)
+ self.validate_identity(
+ """
+ CREATE TABLE IF NOT EXISTS public.rental
+ (
+ inventory_id INT NOT NULL,
+ CONSTRAINT rental_customer_id_fkey FOREIGN KEY (customer_id)
+ REFERENCES public.customer (customer_id) MATCH FULL
+ ON UPDATE CASCADE
+ ON DELETE RESTRICT,
+ CONSTRAINT rental_inventory_id_fkey FOREIGN KEY (inventory_id)
+ REFERENCES public.inventory (inventory_id) MATCH PARTIAL
+ ON UPDATE CASCADE
+ ON DELETE RESTRICT,
+ CONSTRAINT rental_staff_id_fkey FOREIGN KEY (staff_id)
+ REFERENCES public.staff (staff_id) MATCH SIMPLE
+ ON UPDATE CASCADE
+ ON DELETE RESTRICT,
+ INITIALLY IMMEDIATE
+ )
+ """,
+ "CREATE TABLE IF NOT EXISTS public.rental (inventory_id INT NOT NULL, CONSTRAINT rental_customer_id_fkey FOREIGN KEY (customer_id) REFERENCES public.customer (customer_id) MATCH FULL ON UPDATE CASCADE ON DELETE RESTRICT, CONSTRAINT rental_inventory_id_fkey FOREIGN KEY (inventory_id) REFERENCES public.inventory (inventory_id) MATCH PARTIAL ON UPDATE CASCADE ON DELETE RESTRICT, CONSTRAINT rental_staff_id_fkey FOREIGN KEY (staff_id) REFERENCES public.staff (staff_id) MATCH SIMPLE ON UPDATE CASCADE ON DELETE RESTRICT, INITIALLY IMMEDIATE)",
+ )
+
with self.assertRaises(ParseError):
transpile("CREATE TABLE products (price DECIMAL CHECK price > 0)", read="postgres")
with self.assertRaises(ParseError):
diff --git a/tests/dialects/test_snowflake.py b/tests/dialects/test_snowflake.py
index ce8a8053..3686de5a 100644
--- a/tests/dialects/test_snowflake.py
+++ b/tests/dialects/test_snowflake.py
@@ -919,6 +919,11 @@ WHERE
"SELECT * FROM @foo/bar (FILE_FORMAT => ds_sandbox.test.my_csv_format, PATTERN => 'test') AS bla",
)
+ self.validate_identity(
+ "SELECT * FROM @test.public.thing/location/somefile.csv( FILE_FORMAT => 'fmt' )",
+ "SELECT * FROM @test.public.thing/location/somefile.csv (FILE_FORMAT => 'fmt')",
+ )
+
def test_sample(self):
self.validate_identity("SELECT * FROM testtable TABLESAMPLE BERNOULLI (20.3)")
self.validate_identity("SELECT * FROM testtable TABLESAMPLE SYSTEM (3) SEED (82)")
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 2
} | 25.6 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest_v2",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest pytest-cov pytest-xdist pytest-mock pytest-asyncio"
],
"pre_install": null,
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest -xvs"
} | cfgv==3.4.0
coverage==7.8.0
distlib==0.3.9
duckdb==1.2.1
exceptiongroup==1.2.2
execnet==2.1.1
filelock==3.18.0
identify==2.6.9
iniconfig==2.1.0
Jinja2==3.1.6
MarkupSafe==3.0.2
maturin==1.8.3
mypy==1.15.0
mypy-extensions==1.0.0
nodeenv==1.9.1
numpy==2.0.2
packaging==24.2
pandas==2.2.3
pandas-stubs==2.2.2.240807
pdoc==15.0.1
platformdirs==4.3.7
pluggy==1.5.0
pre_commit==4.2.0
Pygments==2.19.1
pytest==8.3.5
pytest-asyncio==0.26.0
pytest-cov==6.0.0
pytest-mock==3.14.0
pytest-xdist==3.6.1
python-dateutil==2.9.0.post0
pytz==2025.2
PyYAML==6.0.2
ruff==0.4.3
six==1.17.0
-e git+https://github.com/tobymao/sqlglot.git@804edc5b8673725d4e1e83b9967d815073580d89#egg=sqlglot
tomli==2.2.1
types-python-dateutil==2.9.0.20241206
types-pytz==2025.2.0.20250326
typing_extensions==4.13.0
tzdata==2025.2
virtualenv==20.29.3
| name: sqlglot
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- cfgv==3.4.0
- coverage==7.8.0
- distlib==0.3.9
- duckdb==1.2.1
- exceptiongroup==1.2.2
- execnet==2.1.1
- filelock==3.18.0
- identify==2.6.9
- iniconfig==2.1.0
- jinja2==3.1.6
- markupsafe==3.0.2
- maturin==1.8.3
- mypy==1.15.0
- mypy-extensions==1.0.0
- nodeenv==1.9.1
- numpy==2.0.2
- packaging==24.2
- pandas==2.2.3
- pandas-stubs==2.2.2.240807
- pdoc==15.0.1
- platformdirs==4.3.7
- pluggy==1.5.0
- pre-commit==4.2.0
- pygments==2.19.1
- pytest==8.3.5
- pytest-asyncio==0.26.0
- pytest-cov==6.0.0
- pytest-mock==3.14.0
- pytest-xdist==3.6.1
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyyaml==6.0.2
- ruff==0.4.3
- six==1.17.0
- tomli==2.2.1
- types-python-dateutil==2.9.0.20241206
- types-pytz==2025.2.0.20250326
- typing-extensions==4.13.0
- tzdata==2025.2
- virtualenv==20.29.3
prefix: /opt/conda/envs/sqlglot
| [
"tests/dialects/test_postgres.py::TestPostgres::test_ddl",
"tests/dialects/test_postgres.py::TestPostgres::test_operator",
"tests/dialects/test_postgres.py::TestPostgres::test_postgres",
"tests/dialects/test_postgres.py::TestPostgres::test_regexp_binary",
"tests/dialects/test_postgres.py::TestPostgres::test_rows_from",
"tests/dialects/test_postgres.py::TestPostgres::test_string_concat",
"tests/dialects/test_postgres.py::TestPostgres::test_unnest",
"tests/dialects/test_postgres.py::TestPostgres::test_unnest_json_array",
"tests/dialects/test_postgres.py::TestPostgres::test_variance",
"tests/dialects/test_snowflake.py::TestSnowflake::test_alter_set_unset",
"tests/dialects/test_snowflake.py::TestSnowflake::test_copy",
"tests/dialects/test_snowflake.py::TestSnowflake::test_ddl",
"tests/dialects/test_snowflake.py::TestSnowflake::test_describe_table",
"tests/dialects/test_snowflake.py::TestSnowflake::test_flatten",
"tests/dialects/test_snowflake.py::TestSnowflake::test_from_changes",
"tests/dialects/test_snowflake.py::TestSnowflake::test_historical_data",
"tests/dialects/test_snowflake.py::TestSnowflake::test_match_recognize",
"tests/dialects/test_snowflake.py::TestSnowflake::test_minus",
"tests/dialects/test_snowflake.py::TestSnowflake::test_null_treatment",
"tests/dialects/test_snowflake.py::TestSnowflake::test_parse_like_any",
"tests/dialects/test_snowflake.py::TestSnowflake::test_querying_semi_structured_data",
"tests/dialects/test_snowflake.py::TestSnowflake::test_regexp_replace",
"tests/dialects/test_snowflake.py::TestSnowflake::test_regexp_substr",
"tests/dialects/test_snowflake.py::TestSnowflake::test_sample",
"tests/dialects/test_snowflake.py::TestSnowflake::test_semi_structured_types",
"tests/dialects/test_snowflake.py::TestSnowflake::test_show_columns",
"tests/dialects/test_snowflake.py::TestSnowflake::test_show_imported_keys",
"tests/dialects/test_snowflake.py::TestSnowflake::test_show_objects",
"tests/dialects/test_snowflake.py::TestSnowflake::test_show_primary_keys",
"tests/dialects/test_snowflake.py::TestSnowflake::test_show_schemas",
"tests/dialects/test_snowflake.py::TestSnowflake::test_show_sequences",
"tests/dialects/test_snowflake.py::TestSnowflake::test_show_tables",
"tests/dialects/test_snowflake.py::TestSnowflake::test_show_unique_keys",
"tests/dialects/test_snowflake.py::TestSnowflake::test_show_users",
"tests/dialects/test_snowflake.py::TestSnowflake::test_show_views",
"tests/dialects/test_snowflake.py::TestSnowflake::test_snowflake",
"tests/dialects/test_snowflake.py::TestSnowflake::test_staged_files",
"tests/dialects/test_snowflake.py::TestSnowflake::test_storage_integration",
"tests/dialects/test_snowflake.py::TestSnowflake::test_stored_procedures",
"tests/dialects/test_snowflake.py::TestSnowflake::test_swap",
"tests/dialects/test_snowflake.py::TestSnowflake::test_table_literal",
"tests/dialects/test_snowflake.py::TestSnowflake::test_timestamps",
"tests/dialects/test_snowflake.py::TestSnowflake::test_try_cast",
"tests/dialects/test_snowflake.py::TestSnowflake::test_user_defined_functions",
"tests/dialects/test_snowflake.py::TestSnowflake::test_values"
] | [] | [
"tests/dialects/test_postgres.py::TestPostgres::test_array_offset",
"tests/dialects/test_postgres.py::TestPostgres::test_bool_or"
] | [] | MIT License | 19,087 | 759 | [
"sqlglot/dialects/snowflake.py",
"sqlglot/parser.py"
] |
|
ceb10n__pydantic-settings-aws-25 | c8aca4e4597d379a6111deff2bb4cd989b4ec106 | 2024-07-25 18:20:51 | 4de0bd045a44209e98691a3e44ed059b0bd6ce9c | diff --git a/pydantic_settings_aws/aws.py b/pydantic_settings_aws/aws.py
index 6ee7b82..f862433 100644
--- a/pydantic_settings_aws/aws.py
+++ b/pydantic_settings_aws/aws.py
@@ -12,6 +12,8 @@ AWSService = Literal["ssm", "secretsmanager"]
ClientParam = Literal["secrets_client", "ssm_client"]
+_client_cache: Dict[str, Any] = {}
+
def get_ssm_content(
settings: Type[BaseSettings],
@@ -153,8 +155,16 @@ def _create_boto3_client(session_args: AwsSession, service: AWSService): # type
Returns:
boto3.client: An aws service boto3 client.
"""
+ cache_key = service + "_" + session_args.session_key()
+
+ if cache_key in _client_cache:
+ return _client_cache[cache_key]
+
session: boto3.Session = boto3.Session(
**session_args.model_dump(by_alias=True, exclude_none=True)
)
- return session.client(service)
+ client = session.client(service)
+ _client_cache[cache_key] = client
+
+ return client
diff --git a/pydantic_settings_aws/models.py b/pydantic_settings_aws/models.py
index 6804a5e..57e7371 100644
--- a/pydantic_settings_aws/models.py
+++ b/pydantic_settings_aws/models.py
@@ -19,3 +19,18 @@ class AwsSession(BaseModel):
aws_access_key_id: Optional[str] = None
aws_secret_access_key: Optional[str] = None
aws_session_token: Optional[str] = None
+
+ def session_key(self) -> str:
+ key = ""
+ for k in self.model_fields.keys():
+ # session token is too long
+ if k != "aws_session_token":
+ v = getattr(self, k)
+ if v:
+ key += f"{v}_"
+ print(key)
+
+ if not key:
+ key = "default"
+
+ return key.rstrip("_")
| Add boto3 client cache
Add boto3 client cache to same service, region and account | ceb10n/pydantic-settings-aws | diff --git a/tests/aws_mocks.py b/tests/aws_mocks.py
index 2795237..4bb75ae 100644
--- a/tests/aws_mocks.py
+++ b/tests/aws_mocks.py
@@ -4,13 +4,9 @@ from .boto3_mocks import ClientMock
TARGET_SESSION = "pydantic_settings_aws.aws.boto3.Session"
-TARGET_SECRETS_BOTO3_CLIENT = "pydantic_settings_aws.aws._get_secrets_boto3_client"
-
-TARGET_SSM_BOTO3_CLIENT = "pydantic_settings_aws.aws._get_ssm_boto3_client"
-
-TARGET_SECRETS_CLIENT = "pydantic_settings_aws.aws._create_boto3_client"
-
-TARGET_CREATE_CLIENT_FROM_SETTINGS = "pydantic_settings_aws.aws._create_client_from_settings"
+TARGET_CREATE_CLIENT_FROM_SETTINGS = (
+ "pydantic_settings_aws.aws._create_client_from_settings"
+)
TARGET_SECRET_CONTENT = "pydantic_settings_aws.aws._get_secrets_content"
@@ -23,7 +19,14 @@ def mock_secrets_content_empty(*args):
return ClientMock(secret_string=None)
-def mock_ssm(*args):
+def mock_ssm(
+ region_name=None,
+ profile_name=None,
+ aws_access_key_id=None,
+ aws_secret_access_key=None,
+ aws_session_token=None,
+ *args
+):
return ClientMock(ssm_value="value")
diff --git a/tests/aws_test.py b/tests/aws_test.py
index d44fd1f..6273f9a 100644
--- a/tests/aws_test.py
+++ b/tests/aws_test.py
@@ -9,8 +9,6 @@ from pydantic_settings_aws import aws
from .aws_mocks import (
TARGET_CREATE_CLIENT_FROM_SETTINGS,
TARGET_SECRET_CONTENT,
- TARGET_SECRETS_BOTO3_CLIENT,
- TARGET_SECRETS_CLIENT,
TARGET_SESSION,
BaseSettingsMock,
mock_create_client,
@@ -23,6 +21,7 @@ from .boto3_mocks import SessionMock
@mock.patch(TARGET_CREATE_CLIENT_FROM_SETTINGS, mock_ssm)
def test_get_ssm_content_must_return_parameter_content_if_annotated_with_parameter_name(*args):
+ aws._client_cache = {}
settings = BaseSettingsMock()
settings.model_config = {"aws_region": "region", "aws_profile": "profile"}
parameter_value = aws.get_ssm_content(settings, "field", "my/parameter/name")
@@ -33,6 +32,7 @@ def test_get_ssm_content_must_return_parameter_content_if_annotated_with_paramet
@mock.patch(TARGET_CREATE_CLIENT_FROM_SETTINGS, mock_ssm)
def test_get_ssm_content_must_return_parameter_content_if_annotated_with_dict_args(*args):
+ aws._client_cache = {}
settings = BaseSettingsMock()
settings.model_config = {"aws_region": "region", "aws_profile": "profile"}
parameter_value = aws.get_ssm_content(settings, "field", {"ssm": "my/parameter/name"})
@@ -43,6 +43,7 @@ def test_get_ssm_content_must_return_parameter_content_if_annotated_with_dict_ar
@mock.patch(TARGET_CREATE_CLIENT_FROM_SETTINGS, mock_ssm)
def test_get_ssm_content_must_use_client_if_present_in_metadata(*args):
+ aws._client_cache = {}
settings = BaseSettingsMock()
settings.model_config = {"aws_region": "region", "aws_profile": "profile"}
parameter_value = aws.get_ssm_content(settings, "field", {"ssm": "my/parameter/name", "ssm_client": mock_ssm()})
@@ -53,6 +54,7 @@ def test_get_ssm_content_must_use_client_if_present_in_metadata(*args):
@mock.patch(TARGET_CREATE_CLIENT_FROM_SETTINGS, mock_ssm)
def test_get_ssm_content_must_use_field_name_if_ssm_name_not_in_metadata(*args):
+ aws._client_cache = {}
settings = BaseSettingsMock()
settings.model_config = {"aws_region": "region", "aws_profile": "profile"}
parameter_value = aws.get_ssm_content(settings, "field", None)
@@ -63,6 +65,7 @@ def test_get_ssm_content_must_use_field_name_if_ssm_name_not_in_metadata(*args):
@mock.patch(TARGET_SESSION, SessionMock)
def test_create_ssm_client(*args):
+ aws._client_cache = {}
settings = BaseSettingsMock()
settings.model_config = {"aws_region": "region", "aws_profile": "profile"}
client = aws._create_client_from_settings(settings, "ssm", "ssm_client")
@@ -72,6 +75,7 @@ def test_create_ssm_client(*args):
@mock.patch(TARGET_CREATE_CLIENT_FROM_SETTINGS, mock_create_client)
def test_get_ssm_boto3_client_must_create_a_client_if_its_not_given(*args):
+ aws._client_cache = {}
settings = BaseSettingsMock()
settings.model_config = {}
client = aws._create_client_from_settings(settings, "ssm", "ssm_client")
@@ -81,6 +85,7 @@ def test_get_ssm_boto3_client_must_create_a_client_if_its_not_given(*args):
@mock.patch(TARGET_SESSION, SessionMock)
def test_create_secrets_client(*args):
+ aws._client_cache = {}
settings = BaseSettingsMock()
settings.model_config = {"aws_region": "region", "aws_profile": "profile"}
client = aws._create_client_from_settings(settings, "secretsmanager", "secrets_client")
@@ -90,6 +95,7 @@ def test_create_secrets_client(*args):
@mock.patch(TARGET_CREATE_CLIENT_FROM_SETTINGS, mock_create_client)
def test_get_secrets_boto3_client_must_create_a_client_if_its_not_given(*args):
+ aws._client_cache = {}
settings = BaseSettingsMock()
settings.model_config = {}
client = aws._create_client_from_settings(settings, "secretsmanager", "secrets_client")
@@ -102,6 +108,7 @@ def test_get_secrets_boto3_client_must_create_a_client_if_its_not_given(*args):
def test_get_secrets_content_must_raise_value_error_if_secrets_content_is_none(
*args,
):
+ aws._client_cache = {}
settings = BaseSettingsMock()
settings.model_config = {
"secrets_name": "secrets/name",
@@ -115,6 +122,7 @@ def test_get_secrets_content_must_raise_value_error_if_secrets_content_is_none(
@mock.patch(TARGET_CREATE_CLIENT_FROM_SETTINGS, mock_secrets_content_invalid_json)
def test_should_not_obfuscate_json_error_in_case_of_invalid_secrets(*args):
+ aws._client_cache = {}
settings = BaseSettingsMock()
settings.model_config = {
"secrets_name": "secrets/name",
@@ -127,6 +135,7 @@ def test_should_not_obfuscate_json_error_in_case_of_invalid_secrets(*args):
def test_get_secrets_content_must_get_binary_content_if_string_is_not_set(*args):
+ aws._client_cache = {}
content = {
"SecretBinary": json.dumps({"username": "admin"}).encode("utf-8")
}
@@ -136,6 +145,7 @@ def test_get_secrets_content_must_get_binary_content_if_string_is_not_set(*args)
def test_get_secrets_content_must_not_hide_decode_error_if_not_binary_in_secret_binary(*args):
+ aws._client_cache = {}
content = {
"SecretBinary": json.dumps({"username": "admin"})
}
@@ -145,12 +155,14 @@ def test_get_secrets_content_must_not_hide_decode_error_if_not_binary_in_secret_
def test_get_secrets_content_must_return_none_if_neither_string_nor_binary_are_present(*args):
+ aws._client_cache = {}
secret_content = aws._get_secrets_content({})
assert secret_content is None
def test_get_secrets_content_must_return_none_if_binary_is_present_but_none(*args):
+ aws._client_cache = {}
content = {
"SecretBinary": None
}
@@ -160,8 +172,22 @@ def test_get_secrets_content_must_return_none_if_binary_is_present_but_none(*arg
def test_get_secrets_args_must_not_shadow_pydantic_validation_if_required_args_are_not_present(*args):
+ aws._client_cache = {}
settings = BaseSettingsMock()
settings.model_config = {}
with pytest.raises(ValidationError):
aws._get_secrets_args(settings)
+
+
[email protected](TARGET_SESSION, mock_ssm)
+def test_must_cache_boto3_clients_for_the_same_service_region_and_account(*args):
+ aws._client_cache = {}
+
+ settings = BaseSettingsMock()
+ settings.model_config = {"aws_region": "region", "aws_profile": "profile"}
+ aws._create_client_from_settings(settings, "secretsmanager", "secrets_client")
+ aws._create_client_from_settings(settings, "secretsmanager", "secrets_client")
+ aws._create_client_from_settings(settings, "ssm", "ssm_client")
+
+ assert len(aws._client_cache) == 2
diff --git a/tests/boto3_mocks.py b/tests/boto3_mocks.py
index 935d623..6e94d64 100644
--- a/tests/boto3_mocks.py
+++ b/tests/boto3_mocks.py
@@ -20,6 +20,9 @@ class ClientMock:
self.secret_bytes = secret_bytes
self.ssm_value = ssm_value
+ def client(self, *args):
+ return self
+
def get_parameter(self, Name=None, WithDecryption=None):
return {
"Parameter": {
diff --git a/tests/models_test.py b/tests/models_test.py
new file mode 100644
index 0000000..2a3c988
--- /dev/null
+++ b/tests/models_test.py
@@ -0,0 +1,6 @@
+from pydantic_settings_aws.models import AwsSession
+
+
+def test_aws_session_key_must_be_default_if_all_values_are_none():
+ session = AwsSession()
+ assert session.session_key() == "default"
diff --git a/tests/settings_mocks.py b/tests/settings_mocks.py
index 3875c1a..77a6d13 100644
--- a/tests/settings_mocks.py
+++ b/tests/settings_mocks.py
@@ -70,6 +70,7 @@ class ParameterWithTwoSSMClientSettings(ParameterStoreBaseSettings):
)
my_ssm: Annotated[str, {"ssm": "my/parameter", "ssm_client": ClientMock(ssm_value="value")}]
+ my_ssm_1: Annotated[str, {"ssm": "my/parameter", "ssm_client": ClientMock(ssm_value="value1")}]
my_ssm_2: Annotated[str, "my/ssm/2/parameter"]
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 2
} | 0.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"black",
"ruff",
"mypy",
"pre-commit",
"pytest",
"pytest-cov"
],
"pre_install": null,
"python": "3.9",
"reqs_path": [
"requirements.txt",
"requirements-test.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | annotated-types==0.7.0
black==25.1.0
boto3==1.37.23
botocore==1.37.23
cfgv==3.4.0
click==8.1.8
coverage==7.8.0
distlib==0.3.9
exceptiongroup==1.2.2
filelock==3.18.0
identify==2.6.9
iniconfig==2.1.0
jmespath==1.0.1
mypy==1.15.0
mypy-extensions==1.0.0
nodeenv==1.9.1
packaging==24.2
pathspec==0.12.1
platformdirs==4.3.7
pluggy==1.5.0
pre_commit==4.2.0
pydantic==2.11.1
pydantic-settings==2.8.1
-e git+https://github.com/ceb10n/pydantic-settings-aws.git@c8aca4e4597d379a6111deff2bb4cd989b4ec106#egg=pydantic_settings_aws
pydantic_core==2.33.0
pytest==8.3.5
pytest-cov==6.0.0
python-dateutil==2.9.0.post0
python-dotenv==1.1.0
PyYAML==6.0.2
ruff==0.11.2
s3transfer==0.11.4
six==1.17.0
tomli==2.2.1
typing-inspection==0.4.0
typing_extensions==4.13.0
urllib3==1.26.20
virtualenv==20.29.3
| name: pydantic-settings-aws
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- annotated-types==0.7.0
- black==25.1.0
- boto3==1.37.23
- botocore==1.37.23
- cfgv==3.4.0
- click==8.1.8
- coverage==7.8.0
- distlib==0.3.9
- exceptiongroup==1.2.2
- filelock==3.18.0
- identify==2.6.9
- iniconfig==2.1.0
- jmespath==1.0.1
- mypy==1.15.0
- mypy-extensions==1.0.0
- nodeenv==1.9.1
- packaging==24.2
- pathspec==0.12.1
- platformdirs==4.3.7
- pluggy==1.5.0
- pre-commit==4.2.0
- pydantic==2.11.1
- pydantic-core==2.33.0
- pydantic-settings==2.8.1
- pydantic-settings-aws==0.0.2
- pytest==8.3.5
- pytest-cov==6.0.0
- python-dateutil==2.9.0.post0
- python-dotenv==1.1.0
- pyyaml==6.0.2
- ruff==0.11.2
- s3transfer==0.11.4
- six==1.17.0
- tomli==2.2.1
- typing-extensions==4.13.0
- typing-inspection==0.4.0
- urllib3==1.26.20
- virtualenv==20.29.3
prefix: /opt/conda/envs/pydantic-settings-aws
| [
"tests/aws_test.py::test_must_cache_boto3_clients_for_the_same_service_region_and_account",
"tests/models_test.py::test_aws_session_key_must_be_default_if_all_values_are_none"
] | [] | [
"tests/aws_test.py::test_get_ssm_content_must_return_parameter_content_if_annotated_with_parameter_name",
"tests/aws_test.py::test_get_ssm_content_must_return_parameter_content_if_annotated_with_dict_args",
"tests/aws_test.py::test_get_ssm_content_must_use_client_if_present_in_metadata",
"tests/aws_test.py::test_get_ssm_content_must_use_field_name_if_ssm_name_not_in_metadata",
"tests/aws_test.py::test_create_ssm_client",
"tests/aws_test.py::test_get_ssm_boto3_client_must_create_a_client_if_its_not_given",
"tests/aws_test.py::test_create_secrets_client",
"tests/aws_test.py::test_get_secrets_boto3_client_must_create_a_client_if_its_not_given",
"tests/aws_test.py::test_get_secrets_content_must_raise_value_error_if_secrets_content_is_none",
"tests/aws_test.py::test_should_not_obfuscate_json_error_in_case_of_invalid_secrets",
"tests/aws_test.py::test_get_secrets_content_must_get_binary_content_if_string_is_not_set",
"tests/aws_test.py::test_get_secrets_content_must_not_hide_decode_error_if_not_binary_in_secret_binary",
"tests/aws_test.py::test_get_secrets_content_must_return_none_if_neither_string_nor_binary_are_present",
"tests/aws_test.py::test_get_secrets_content_must_return_none_if_binary_is_present_but_none",
"tests/aws_test.py::test_get_secrets_args_must_not_shadow_pydantic_validation_if_required_args_are_not_present"
] | [] | MIT License | 19,090 | 507 | [
"pydantic_settings_aws/aws.py",
"pydantic_settings_aws/models.py"
] |
|
kiorky__croniter-86 | b46e567be24103f3bf5d234d9f32bba123fe784c | 2024-07-26 08:21:44 | b46e567be24103f3bf5d234d9f32bba123fe784c | diff --git a/src/croniter/croniter.py b/src/croniter/croniter.py
index 8a01cf1..4f75398 100644
--- a/src/croniter/croniter.py
+++ b/src/croniter/croniter.py
@@ -235,10 +235,10 @@ class croniter(object):
def get_next(self, ret_type=None, start_time=None, update_current=True):
if start_time and self._expand_from_start_time:
raise ValueError("start_time is not supported when using expand_from_start_time = True.")
- return self._get_next(ret_type or self._ret_type, is_prev=False, update_current=update_current)
+ return self._get_next(ret_type or self._ret_type, start_time=start_time, is_prev=False, update_current=update_current)
def get_prev(self, ret_type=None, start_time=None, update_current=True):
- return self._get_next(ret_type or self._ret_type, is_prev=True, update_current=update_current)
+ return self._get_next(ret_type or self._ret_type, start_time=start_time, is_prev=True, update_current=update_current)
def get_current(self, ret_type=None):
ret_type = ret_type or self._ret_type
@@ -382,6 +382,7 @@ class croniter(object):
self._is_prev = False
yield self._get_next(ret_type or self._ret_type,
start_time=start_time, update_current=update_current)
+ start_time = None
except CroniterBadDateError:
if self._max_years_btw_matches_explicitly_set:
return
@@ -395,6 +396,7 @@ class croniter(object):
self._is_prev = True
yield self._get_next(ret_type or self._ret_type,
start_time=start_time, update_current=update_current)
+ start_time = None
except CroniterBadDateError:
if self._max_years_btw_matches_explicitly_set:
return
| start_time not respected in get_next/get_prev/all_next/all_prev
The argument start_time does not have any effect with version 3.0.1
| kiorky/croniter | diff --git a/src/croniter/tests/test_croniter.py b/src/croniter/tests/test_croniter.py
index 7944c17..31391dc 100755
--- a/src/croniter/tests/test_croniter.py
+++ b/src/croniter/tests/test_croniter.py
@@ -1896,6 +1896,9 @@ class CroniterTest(base.TestCase):
(datetime(2024, 7, 12, 0, 0, 3), datetime(2024, 7, 12, 0, 0, 3))]
)
+ retns = (cron.get_next(datetime, start_time=datetime(2024, 7, 12)), cron.get_current(datetime))
+ self.assertEqual(retn[0], retns)
+
cron.set_current(datetime(2024, 7, 12), force=True)
retp = [(cron.get_prev(datetime), cron.get_current(datetime))
for a in range(3)]
@@ -1906,16 +1909,27 @@ class CroniterTest(base.TestCase):
(datetime(2024, 7, 11, 23, 59, 57), datetime(2024, 7, 11, 23, 59, 57))]
)
+ retps = (cron.get_prev(datetime, start_time=datetime(2024, 7, 12)), cron.get_current(datetime))
+ self.assertEqual(retp[0], retps)
+
cron.set_current(datetime(2024, 7, 12), force=True)
r = cron.all_next(datetime)
retan = [(next(r), cron.get_current(datetime)) for a in range(3)]
+ r = cron.all_next(datetime, start_time=datetime(2024, 7, 12))
+ retans = [(next(r), cron.get_current(datetime)) for a in range(3)]
+
cron.set_current(datetime(2024, 7, 12), force=True)
r = cron.all_prev(datetime)
retap = [(next(r), cron.get_current(datetime)) for a in range(3)]
+ r = cron.all_prev(datetime, start_time=datetime(2024, 7, 12))
+ retaps = [(next(r), cron.get_current(datetime)) for a in range(3)]
+
self.assertEqual(retp, retap)
+ self.assertEqual(retp, retaps)
self.assertEqual(retn, retan)
+ self.assertEqual(retn, retans)
cron.set_current(datetime(2024, 7, 12), force=True)
uretn = [(cron.get_next(datetime, update_current=False), cron.get_current(datetime))
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 0
},
"num_modified_files": 1
} | 3.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": null,
"pre_install": null,
"python": "3.9",
"reqs_path": [
"requirements/base.txt",
"requirements/test.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | cachetools==5.5.2
certifi==2025.1.31
chardet==5.2.0
charset-normalizer==3.4.1
colorama==0.4.6
coverage==7.8.0
coveralls==4.0.1
-e git+https://github.com/kiorky/croniter.git@b46e567be24103f3bf5d234d9f32bba123fe784c#egg=croniter
distlib==0.3.9
docopt==0.6.2
exceptiongroup==1.2.2
filelock==3.18.0
flake8==7.2.0
idna==3.10
iniconfig==2.1.0
mccabe==0.7.0
mock==5.2.0
packaging==24.2
platformdirs==4.3.7
pluggy==1.5.0
pycodestyle==2.13.0
pyflakes==3.3.2
pyproject-api==1.9.0
pytest==8.3.5
python-dateutil==2.9.0.post0
pytz==2025.2
requests==2.32.3
six==1.17.0
tomli==2.2.1
tox==4.25.0
typing_extensions==4.13.0
tzlocal==5.3.1
urllib3==2.3.0
virtualenv==20.29.3
| name: croniter
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- cachetools==5.5.2
- certifi==2025.1.31
- chardet==5.2.0
- charset-normalizer==3.4.1
- colorama==0.4.6
- coverage==7.8.0
- coveralls==4.0.1
- distlib==0.3.9
- docopt==0.6.2
- exceptiongroup==1.2.2
- filelock==3.18.0
- flake8==7.2.0
- idna==3.10
- iniconfig==2.1.0
- mccabe==0.7.0
- mock==5.2.0
- packaging==24.2
- platformdirs==4.3.7
- pluggy==1.5.0
- pycodestyle==2.13.0
- pyflakes==3.3.2
- pyproject-api==1.9.0
- pytest==8.3.5
- python-dateutil==2.9.0.post0
- pytz==2025.2
- requests==2.32.3
- six==1.17.0
- tomli==2.2.1
- tox==4.25.0
- typing-extensions==4.13.0
- tzlocal==5.3.1
- urllib3==2.3.0
- virtualenv==20.29.3
prefix: /opt/conda/envs/croniter
| [
"src/croniter/tests/test_croniter.py::CroniterTest::test_get_next_update_current"
] | [] | [
"src/croniter/tests/test_croniter.py::CroniterTest::testBlockDupRanges",
"src/croniter/tests/test_croniter.py::CroniterTest::testBug1",
"src/croniter/tests/test_croniter.py::CroniterTest::testBug2",
"src/croniter/tests/test_croniter.py::CroniterTest::testBug3",
"src/croniter/tests/test_croniter.py::CroniterTest::testBug57",
"src/croniter/tests/test_croniter.py::CroniterTest::testDay",
"src/croniter/tests/test_croniter.py::CroniterTest::testDay2",
"src/croniter/tests/test_croniter.py::CroniterTest::testDomDowVixieCronBug",
"src/croniter/tests/test_croniter.py::CroniterTest::testError",
"src/croniter/tests/test_croniter.py::CroniterTest::testGetCurrent",
"src/croniter/tests/test_croniter.py::CroniterTest::testHour",
"src/croniter/tests/test_croniter.py::CroniterTest::testISOWeekday",
"src/croniter/tests/test_croniter.py::CroniterTest::testInitNoStartTime",
"src/croniter/tests/test_croniter.py::CroniterTest::testLastDayOfMonth",
"src/croniter/tests/test_croniter.py::CroniterTest::testMinute",
"src/croniter/tests/test_croniter.py::CroniterTest::testMonth",
"src/croniter/tests/test_croniter.py::CroniterTest::testNthWeekDay",
"src/croniter/tests/test_croniter.py::CroniterTest::testOptimizeCronExpressions",
"src/croniter/tests/test_croniter.py::CroniterTest::testPrevDayOfMonthWithCrossing",
"src/croniter/tests/test_croniter.py::CroniterTest::testPrevLastDayOfMonth",
"src/croniter/tests/test_croniter.py::CroniterTest::testPrevMinute",
"src/croniter/tests/test_croniter.py::CroniterTest::testPrevNthWeekDay",
"src/croniter/tests/test_croniter.py::CroniterTest::testPrevWeekDay",
"src/croniter/tests/test_croniter.py::CroniterTest::testPrevWeekDay2",
"src/croniter/tests/test_croniter.py::CroniterTest::testPreviousDay",
"src/croniter/tests/test_croniter.py::CroniterTest::testPreviousDow",
"src/croniter/tests/test_croniter.py::CroniterTest::testPreviousHour",
"src/croniter/tests/test_croniter.py::CroniterTest::testPreviousMonth",
"src/croniter/tests/test_croniter.py::CroniterTest::testRangeWithUppercaseLastDayOfMonth",
"src/croniter/tests/test_croniter.py::CroniterTest::testSecond",
"src/croniter/tests/test_croniter.py::CroniterTest::testSecondRepeat",
"src/croniter/tests/test_croniter.py::CroniterTest::testSecondSec",
"src/croniter/tests/test_croniter.py::CroniterTest::testSundayToThursdayWithAlphaConversion",
"src/croniter/tests/test_croniter.py::CroniterTest::testTimezone",
"src/croniter/tests/test_croniter.py::CroniterTest::testTimezoneDateutil",
"src/croniter/tests/test_croniter.py::CroniterTest::testTimezoneSummerTime",
"src/croniter/tests/test_croniter.py::CroniterTest::testTimezoneWinterTime",
"src/croniter/tests/test_croniter.py::CroniterTest::testWeekDay",
"src/croniter/tests/test_croniter.py::CroniterTest::testWeekDayDayAnd",
"src/croniter/tests/test_croniter.py::CroniterTest::test_bug34",
"src/croniter/tests/test_croniter.py::CroniterTest::test_bug_62_leap",
"src/croniter/tests/test_croniter.py::CroniterTest::test_configure_second_location",
"src/croniter/tests/test_croniter.py::CroniterTest::test_confirm_sort",
"src/croniter/tests/test_croniter.py::CroniterTest::test_dst_issue90_st31ny",
"src/croniter/tests/test_croniter.py::CroniterTest::test_dst_iter",
"src/croniter/tests/test_croniter.py::CroniterTest::test_error_alpha_cron",
"src/croniter/tests/test_croniter.py::CroniterTest::test_error_bad_cron",
"src/croniter/tests/test_croniter.py::CroniterTest::test_exactly_the_same_minute",
"src/croniter/tests/test_croniter.py::CroniterTest::test_expand_from_start_time_date",
"src/croniter/tests/test_croniter.py::CroniterTest::test_expand_from_start_time_day_of_week",
"src/croniter/tests/test_croniter.py::CroniterTest::test_expand_from_start_time_hour",
"src/croniter/tests/test_croniter.py::CroniterTest::test_expand_from_start_time_minute",
"src/croniter/tests/test_croniter.py::CroniterTest::test_expand_from_start_time_month",
"src/croniter/tests/test_croniter.py::CroniterTest::test_explicit_year_forward",
"src/croniter/tests/test_croniter.py::CroniterTest::test_get_next_fails_with_expand_from_start_time_true",
"src/croniter/tests/test_croniter.py::CroniterTest::test_hash_mixup_all_fri_3rd_sat",
"src/croniter/tests/test_croniter.py::CroniterTest::test_invalid_question_mark",
"src/croniter/tests/test_croniter.py::CroniterTest::test_invalid_year",
"src/croniter/tests/test_croniter.py::CroniterTest::test_invalid_zerorepeat",
"src/croniter/tests/test_croniter.py::CroniterTest::test_is_valid",
"src/croniter/tests/test_croniter.py::CroniterTest::test_issue145_getnext",
"src/croniter/tests/test_croniter.py::CroniterTest::test_issue151",
"src/croniter/tests/test_croniter.py::CroniterTest::test_issue156",
"src/croniter/tests/test_croniter.py::CroniterTest::test_issue_142_dow",
"src/croniter/tests/test_croniter.py::CroniterTest::test_issue_47",
"src/croniter/tests/test_croniter.py::CroniterTest::test_issue_k11",
"src/croniter/tests/test_croniter.py::CroniterTest::test_issue_k12",
"src/croniter/tests/test_croniter.py::CroniterTest::test_issue_k33",
"src/croniter/tests/test_croniter.py::CroniterTest::test_issue_k34",
"src/croniter/tests/test_croniter.py::CroniterTest::test_issue_k6",
"src/croniter/tests/test_croniter.py::CroniterTest::test_issue_monsun_117",
"src/croniter/tests/test_croniter.py::CroniterTest::test_last_out_of_range",
"src/croniter/tests/test_croniter.py::CroniterTest::test_lwom_friday",
"src/croniter/tests/test_croniter.py::CroniterTest::test_lwom_friday_2hours",
"src/croniter/tests/test_croniter.py::CroniterTest::test_lwom_friday_2xh_2xm",
"src/croniter/tests/test_croniter.py::CroniterTest::test_lwom_mixup_4th_and_last",
"src/croniter/tests/test_croniter.py::CroniterTest::test_lwom_mixup_all_fri_last_sat",
"src/croniter/tests/test_croniter.py::CroniterTest::test_lwom_mixup_firstlast_sat",
"src/croniter/tests/test_croniter.py::CroniterTest::test_lwom_saturday_rev",
"src/croniter/tests/test_croniter.py::CroniterTest::test_lwom_tue_thu",
"src/croniter/tests/test_croniter.py::CroniterTest::test_match",
"src/croniter/tests/test_croniter.py::CroniterTest::test_match_handle_bad_cron",
"src/croniter/tests/test_croniter.py::CroniterTest::test_match_range",
"src/croniter/tests/test_croniter.py::CroniterTest::test_milliseconds",
"src/croniter/tests/test_croniter.py::CroniterTest::test_mixdow",
"src/croniter/tests/test_croniter.py::CroniterTest::test_multiple_months",
"src/croniter/tests/test_croniter.py::CroniterTest::test_next_when_now_satisfies_cron",
"src/croniter/tests/test_croniter.py::CroniterTest::test_nth_as_last_wday_simple",
"src/croniter/tests/test_croniter.py::CroniterTest::test_nth_out_of_range",
"src/croniter/tests/test_croniter.py::CroniterTest::test_nth_wday_simple",
"src/croniter/tests/test_croniter.py::CroniterTest::test_overflow",
"src/croniter/tests/test_croniter.py::CroniterTest::test_question_mark",
"src/croniter/tests/test_croniter.py::CroniterTest::test_rangeGenerator",
"src/croniter/tests/test_croniter.py::CroniterTest::test_std_dst",
"src/croniter/tests/test_croniter.py::CroniterTest::test_std_dst2",
"src/croniter/tests/test_croniter.py::CroniterTest::test_std_dst3",
"src/croniter/tests/test_croniter.py::CroniterTest::test_wdom_core_leap_year",
"src/croniter/tests/test_croniter.py::CroniterTest::test_weekday_range",
"src/croniter/tests/test_croniter.py::CroniterTest::test_year",
"src/croniter/tests/test_croniter.py::CroniterTest::test_year_bad_date_error",
"src/croniter/tests/test_croniter.py::CroniterTest::test_year_get_prev",
"src/croniter/tests/test_croniter.py::CroniterTest::test_year_match",
"src/croniter/tests/test_croniter.py::CroniterTest::test_year_with_other_field",
"src/croniter/tests/test_croniter.py::CroniterTest::test_year_with_second_at_beginning"
] | [] | MIT License | 19,095 | 459 | [
"src/croniter/croniter.py"
] |
|
tobymao__sqlglot-3839 | 11853754b70b00d627c77599590cb1a0af7fd9e2 | 2024-07-26 14:36:04 | d71eb4ebc2a0f82c567b32de51298f0d82f400a1 | diff --git a/sqlglot/dialects/bigquery.py b/sqlglot/dialects/bigquery.py
index e6fcf356..58f459b9 100644
--- a/sqlglot/dialects/bigquery.py
+++ b/sqlglot/dialects/bigquery.py
@@ -322,6 +322,7 @@ class BigQuery(Dialect):
"ANY TYPE": TokenType.VARIANT,
"BEGIN": TokenType.COMMAND,
"BEGIN TRANSACTION": TokenType.BEGIN,
+ "BYTEINT": TokenType.INT,
"BYTES": TokenType.BINARY,
"CURRENT_DATETIME": TokenType.CURRENT_DATETIME,
"DATETIME": TokenType.TIMESTAMP,
diff --git a/sqlglot/dialects/clickhouse.py b/sqlglot/dialects/clickhouse.py
index 6303d739..9c3df9ae 100644
--- a/sqlglot/dialects/clickhouse.py
+++ b/sqlglot/dialects/clickhouse.py
@@ -81,6 +81,14 @@ def _build_count_if(args: t.List) -> exp.CountIf | exp.CombinedAggFunc:
return exp.CombinedAggFunc(this="countIf", expressions=args, parts=("count", "If"))
+def _build_str_to_date(args: t.List) -> exp.Cast | exp.Anonymous:
+ if len(args) == 3:
+ return exp.Anonymous(this="STR_TO_DATE", expressions=args)
+
+ strtodate = exp.StrToDate.from_arg_list(args)
+ return exp.cast(strtodate, exp.DataType.build(exp.DataType.Type.DATETIME))
+
+
def _datetime_delta_sql(name: str) -> t.Callable[[Generator, DATEΤΙΜΕ_DELTA], str]:
def _delta_sql(self: Generator, expression: DATEΤΙΜΕ_DELTA) -> str:
if not expression.unit:
@@ -181,6 +189,7 @@ class ClickHouse(Dialect):
"MAP": parser.build_var_map,
"MATCH": exp.RegexpLike.from_arg_list,
"RANDCANONICAL": exp.Rand.from_arg_list,
+ "STR_TO_DATE": _build_str_to_date,
"TUPLE": exp.Struct.from_arg_list,
"TIMESTAMP_SUB": build_date_delta(exp.TimestampSub, default_unit=None),
"TIMESTAMPSUB": build_date_delta(exp.TimestampSub, default_unit=None),
@@ -836,6 +845,24 @@ class ClickHouse(Dialect):
"NAMED COLLECTION",
}
+ def strtodate_sql(self, expression: exp.StrToDate) -> str:
+ strtodate_sql = super().strtodate_sql(expression)
+
+ if not isinstance(expression.parent, exp.Cast):
+ # StrToDate returns DATEs in other dialects (eg. postgres), so
+ # this branch aims to improve the transpilation to clickhouse
+ return f"CAST({strtodate_sql} AS DATE)"
+
+ return strtodate_sql
+
+ def cast_sql(self, expression: exp.Cast, safe_prefix: t.Optional[str] = None) -> str:
+ this = expression.this
+
+ if isinstance(this, exp.StrToDate) and expression.to == exp.DataType.build("datetime"):
+ return self.sql(this)
+
+ return super().cast_sql(expression, safe_prefix=safe_prefix)
+
def _jsonpathsubscript_sql(self, expression: exp.JSONPathSubscript) -> str:
this = self.json_path_part(expression.this)
return str(int(this) + 1) if is_int(this) else this
| Converting TO_DATE() from postgres to clickhouse cannot be converted to toDate()
Hello,
When converting TO_DATE() from postgres to clickhouse cannot be converted to toDate(), it doesn't look like he can make the switch.
Fully reproducible code snippet
with sqlglot 25.7.1 or 25.3.1
`sqlglot.transpile("SELECT TO_DATE(date) AS date", read="postgres", write="clickhouse")`
output:
`['SELECT TO_DATE(date) AS date']` | tobymao/sqlglot | diff --git a/tests/dialects/test_bigquery.py b/tests/dialects/test_bigquery.py
index e3fb04f9..45f3856c 100644
--- a/tests/dialects/test_bigquery.py
+++ b/tests/dialects/test_bigquery.py
@@ -289,6 +289,10 @@ LANGUAGE js AS
r"REGEXP_EXTRACT(svc_plugin_output, r'\\\((.*)')",
r"REGEXP_EXTRACT(svc_plugin_output, '\\\\\\((.*)')",
)
+ self.validate_identity(
+ "SELECT CAST(1 AS BYTEINT)",
+ "SELECT CAST(1 AS INT64)",
+ )
self.validate_all(
"SAFE_CAST(some_date AS DATE FORMAT 'DD MONTH YYYY')",
diff --git a/tests/dialects/test_clickhouse.py b/tests/dialects/test_clickhouse.py
index 92fbd4bf..2356ad00 100644
--- a/tests/dialects/test_clickhouse.py
+++ b/tests/dialects/test_clickhouse.py
@@ -28,6 +28,8 @@ class TestClickhouse(Validator):
self.assertEqual(expr.sql(dialect="clickhouse"), "COUNT(x)")
self.assertIsNone(expr._meta)
+ self.validate_identity("SELECT STR_TO_DATE(str, fmt, tz)")
+ self.validate_identity("SELECT STR_TO_DATE('05 12 2000', '%d %m %Y')")
self.validate_identity("SELECT EXTRACT(YEAR FROM toDateTime('2023-02-01'))")
self.validate_identity("extract(haystack, pattern)")
self.validate_identity("SELECT * FROM x LIMIT 1 UNION ALL SELECT * FROM y")
@@ -153,6 +155,17 @@ class TestClickhouse(Validator):
"SELECT SUM(1) AS impressions FROM (SELECT ['Istanbul', 'Berlin', 'Bobruisk'] AS cities) WHERE arrayJoin(cities) IN ('Istanbul', 'Berlin')",
)
+ self.validate_all(
+ "SELECT CAST(STR_TO_DATE('05 12 2000', '%d %m %Y') AS DATE)",
+ read={
+ "clickhouse": "SELECT CAST(STR_TO_DATE('05 12 2000', '%d %m %Y') AS DATE)",
+ "postgres": "SELECT TO_DATE('05 12 2000', 'DD MM YYYY')",
+ },
+ write={
+ "clickhouse": "SELECT CAST(STR_TO_DATE('05 12 2000', '%d %m %Y') AS DATE)",
+ "postgres": "SELECT CAST(CAST(TO_DATE('05 12 2000', 'DD MM YYYY') AS TIMESTAMP) AS DATE)",
+ },
+ )
self.validate_all(
"SELECT * FROM x PREWHERE y = 1 WHERE z = 2",
write={
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 2
} | 25.7 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"ruff",
"mypy"
],
"pre_install": [],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | cfgv==3.4.0
distlib==0.3.9
duckdb==1.2.1
exceptiongroup==1.2.2
filelock==3.18.0
identify==2.6.9
iniconfig==2.1.0
Jinja2==3.1.6
MarkupSafe==3.0.2
maturin==1.8.3
mypy==1.15.0
mypy-extensions==1.0.0
nodeenv==1.9.1
numpy==2.0.2
packaging==24.2
pandas==2.2.3
pandas-stubs==2.2.2.240807
pdoc==15.0.1
platformdirs==4.3.7
pluggy==1.5.0
pre_commit==4.2.0
Pygments==2.19.1
pytest==8.3.5
python-dateutil==2.9.0.post0
pytz==2025.2
PyYAML==6.0.2
ruff==0.4.3
six==1.17.0
-e git+https://github.com/tobymao/sqlglot.git@11853754b70b00d627c77599590cb1a0af7fd9e2#egg=sqlglot
tomli==2.2.1
types-python-dateutil==2.9.0.20241206
types-pytz==2025.2.0.20250326
typing_extensions==4.13.0
tzdata==2025.2
virtualenv==20.29.3
| name: sqlglot
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- cfgv==3.4.0
- distlib==0.3.9
- duckdb==1.2.1
- exceptiongroup==1.2.2
- filelock==3.18.0
- identify==2.6.9
- iniconfig==2.1.0
- jinja2==3.1.6
- markupsafe==3.0.2
- maturin==1.8.3
- mypy==1.15.0
- mypy-extensions==1.0.0
- nodeenv==1.9.1
- numpy==2.0.2
- packaging==24.2
- pandas==2.2.3
- pandas-stubs==2.2.2.240807
- pdoc==15.0.1
- platformdirs==4.3.7
- pluggy==1.5.0
- pre-commit==4.2.0
- pygments==2.19.1
- pytest==8.3.5
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyyaml==6.0.2
- ruff==0.4.3
- six==1.17.0
- tomli==2.2.1
- types-python-dateutil==2.9.0.20241206
- types-pytz==2025.2.0.20250326
- typing-extensions==4.13.0
- tzdata==2025.2
- virtualenv==20.29.3
prefix: /opt/conda/envs/sqlglot
| [
"tests/dialects/test_bigquery.py::TestBigQuery::test_bigquery",
"tests/dialects/test_clickhouse.py::TestClickhouse::test_clickhouse"
] | [] | [
"tests/dialects/test_bigquery.py::TestBigQuery::test_errors",
"tests/dialects/test_bigquery.py::TestBigQuery::test_gap_fill",
"tests/dialects/test_bigquery.py::TestBigQuery::test_group_concat",
"tests/dialects/test_bigquery.py::TestBigQuery::test_inline_constructor",
"tests/dialects/test_bigquery.py::TestBigQuery::test_json_object",
"tests/dialects/test_bigquery.py::TestBigQuery::test_merge",
"tests/dialects/test_bigquery.py::TestBigQuery::test_mod",
"tests/dialects/test_bigquery.py::TestBigQuery::test_models",
"tests/dialects/test_bigquery.py::TestBigQuery::test_pushdown_cte_column_names",
"tests/dialects/test_bigquery.py::TestBigQuery::test_remove_precision_parameterized_types",
"tests/dialects/test_bigquery.py::TestBigQuery::test_rename_table",
"tests/dialects/test_bigquery.py::TestBigQuery::test_user_defined_functions",
"tests/dialects/test_bigquery.py::TestBigQuery::test_warnings",
"tests/dialects/test_clickhouse.py::TestClickhouse::test_agg_functions",
"tests/dialects/test_clickhouse.py::TestClickhouse::test_cte",
"tests/dialects/test_clickhouse.py::TestClickhouse::test_datetime_funcs",
"tests/dialects/test_clickhouse.py::TestClickhouse::test_ddl",
"tests/dialects/test_clickhouse.py::TestClickhouse::test_drop_on_cluster",
"tests/dialects/test_clickhouse.py::TestClickhouse::test_parameterization",
"tests/dialects/test_clickhouse.py::TestClickhouse::test_signed_and_unsigned_types",
"tests/dialects/test_clickhouse.py::TestClickhouse::test_ternary"
] | [] | MIT License | 19,102 | 800 | [
"sqlglot/dialects/bigquery.py",
"sqlglot/dialects/clickhouse.py"
] |
|
reagento__dishka-189 | e676d5a80b228888df88e1680776ab415cf63572 | 2024-07-26 21:47:21 | 80fc5564b2688a76bfdce013bf0e96af9f9ca847 | diff --git a/src/dishka/exceptions.py b/src/dishka/exceptions.py
index 1a05e98..fe8a810 100644
--- a/src/dishka/exceptions.py
+++ b/src/dishka/exceptions.py
@@ -30,7 +30,12 @@ class CycleDependenciesError(InvalidGraphError):
self.path = path
def __str__(self):
- return "Cycle dependencies detected.\n" + _renderer.render(self.path)
+ if len(self.path) == 1:
+ hint = " Did you mean @decorate instead of @provide?"
+ else:
+ hint = ""
+ details = _renderer.render(self.path)
+ return f"Cycle dependencies detected.{hint}\n{details}"
class ExitError(ExceptionGroup, DishkaError):
diff --git a/src/dishka/registry.py b/src/dishka/registry.py
index 4e3b09b..f1e9ca2 100644
--- a/src/dishka/registry.py
+++ b/src/dishka/registry.py
@@ -163,6 +163,11 @@ class GraphValidator:
self, factory: Factory, registry_index: int,
):
self.path[factory.provides] = factory
+ if (
+ factory.provides in factory.kw_dependencies.values() or
+ factory.provides in factory.dependencies
+ ):
+ raise CycleDependenciesError([factory])
try:
for dep in factory.dependencies:
# ignore TypeVar parameters
| Missing exception for provider when trying to use and return same type of dependency
## Description
You cannot create a dependency that relies on another dependency of the same type. This rule applies without exception, and if you try to do so, you will encounter a "RecursionError" instead of a library-level exception. This can make it difficult to understand the root cause of the issue.
## Code example
```python
class DBProvider(Provider):
@provide(scope=Scope.REQUEST)
async def get_transaction(self, db: DB) -> AsyncIterable[DB]:
db.begin()
yield db # RecursionError: maximum recursion depth exceeded
db.commit()
``` | reagento/dishka | diff --git a/tests/unit/container/test_decorator.py b/tests/unit/container/test_decorator.py
index 3236f9b..b11cc7d 100644
--- a/tests/unit/container/test_decorator.py
+++ b/tests/unit/container/test_decorator.py
@@ -10,7 +10,7 @@ from dishka import (
make_container,
provide,
)
-from dishka.exceptions import NoFactoryError
+from dishka.exceptions import CycleDependenciesError, NoFactoryError
class A:
@@ -149,3 +149,19 @@ def test_missing_factory():
with pytest.raises(NoFactoryError) as e:
make_container(MyProvider())
assert e.value.requested == DependencyKey(int, component=DEFAULT_COMPONENT)
+
+
+def test_expected_decorator():
+ class MyProvider(Provider):
+ scope = Scope.REQUEST
+
+ @provide(scope=Scope.APP)
+ def bar(self) -> A:
+ return A()
+
+ @provide
+ def foo(self, a: A) -> A:
+ return a
+
+ with pytest.raises(CycleDependenciesError):
+ make_container(MyProvider())
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 2
} | 1.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": null,
"python": "3.10",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | coverage==7.8.0
-e git+https://github.com/reagento/dishka.git@e676d5a80b228888df88e1680776ab415cf63572#egg=dishka
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
pytest @ file:///croot/pytest_1738938843180/work
pytest-cov==6.0.0
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
| name: dishka
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- bzip2=1.0.8=h5eee18b_6
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py310h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- libuuid=1.41.5=h5eee18b_0
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py310h06a4308_0
- pip=25.0=py310h06a4308_0
- pluggy=1.5.0=py310h06a4308_0
- pytest=8.3.4=py310h06a4308_0
- python=3.10.16=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py310h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py310h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py310h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==7.8.0
- dishka==0.1
- pytest-cov==6.0.0
prefix: /opt/conda/envs/dishka
| [
"tests/unit/container/test_decorator.py::test_expected_decorator"
] | [] | [
"tests/unit/container/test_decorator.py::test_simple",
"tests/unit/container/test_decorator.py::test_decorator",
"tests/unit/container/test_decorator.py::test_kwargs",
"tests/unit/container/test_decorator.py::test_decorator_with_provides",
"tests/unit/container/test_decorator.py::test_alias",
"tests/unit/container/test_decorator.py::test_double",
"tests/unit/container/test_decorator.py::test_double_ok",
"tests/unit/container/test_decorator.py::test_missing_factory"
] | [] | Apache License 2.0 | 19,109 | 351 | [
"src/dishka/exceptions.py",
"src/dishka/registry.py"
] |
|
tefra__xsdata-1066 | cce0a16680b59ce50d1349f9cc0a891054838ad0 | 2024-07-28 05:46:01 | 6eccffb886ca537c4d1b1fa0f59cac637311e43f | codecov[bot]: ## [Codecov](https://app.codecov.io/gh/tefra/xsdata/pull/1066?dropdown=coverage&src=pr&el=h1&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Chris+Tsou) Report
All modified and coverable lines are covered by tests :white_check_mark:
> Project coverage is 99.74%. Comparing base [(`cce0a16`)](https://app.codecov.io/gh/tefra/xsdata/commit/cce0a16680b59ce50d1349f9cc0a891054838ad0?dropdown=coverage&el=desc&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Chris+Tsou) to head [(`1f05835`)](https://app.codecov.io/gh/tefra/xsdata/commit/1f0583529c8f77c08b84888b047d7a1ffadccfa2?dropdown=coverage&el=desc&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Chris+Tsou).
<details><summary>Additional details and impacted files</summary>
```diff
@@ Coverage Diff @@
## main #1066 +/- ##
===========================================
- Coverage 100.00% 99.74% -0.26%
===========================================
Files 115 115
Lines 9238 9259 +21
Branches 2179 2103 -76
===========================================
- Hits 9238 9235 -3
- Misses 0 20 +20
- Partials 0 4 +4
```
</details>
[:umbrella: View full report in Codecov by Sentry](https://app.codecov.io/gh/tefra/xsdata/pull/1066?dropdown=coverage&src=pr&el=continue&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Chris+Tsou).
:loudspeaker: Have feedback on the report? [Share it here](https://about.codecov.io/codecov-pr-comment-feedback/?utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Chris+Tsou).
| diff --git a/xsdata/formats/dataclass/parsers/nodes/union.py b/xsdata/formats/dataclass/parsers/nodes/union.py
index 68377336..6839ad58 100644
--- a/xsdata/formats/dataclass/parsers/nodes/union.py
+++ b/xsdata/formats/dataclass/parsers/nodes/union.py
@@ -1,7 +1,8 @@
import copy
+import functools
from contextlib import suppress
from dataclasses import replace
-from typing import Any, Dict, List, Optional, Tuple
+from typing import Any, Dict, List, Optional, Tuple, Type
from xsdata.exceptions import ParserError
from xsdata.formats.dataclass.context import XmlContext
@@ -40,6 +41,7 @@ class UnionNode(XmlNode):
"context",
"level",
"events",
+ "candidates",
)
def __init__(
@@ -60,8 +62,41 @@ class UnionNode(XmlNode):
self.config = config
self.context = context
self.level = 0
+ self.candidates = self.filter_candidates()
self.events: List[Tuple[str, str, Any, Any]] = []
+ def filter_candidates(self) -> List[Type]:
+ """Filter union candidates by fixed attributes."""
+ candidates = list(self.var.types)
+ fixed_attribute = functools.partial(
+ self.filter_fixed_attrs, parent_ns=target_uri(self.var.qname)
+ )
+
+ return list(filter(fixed_attribute, candidates))
+
+ def filter_fixed_attrs(self, candidate: Type, parent_ns: str) -> bool:
+ """Return whether the node attrs are incompatible with fixed attrs.
+
+ Args:
+ candidate: The candidate type
+ parent_ns: The parent namespace
+ """
+ if not self.context.class_type.is_model(candidate):
+ return not self.attrs
+
+ meta = self.context.build(candidate, parent_ns=parent_ns)
+ for qname, value in self.attrs.items():
+ var = meta.find_attribute(qname)
+ if not var or var.init:
+ continue
+
+ try:
+ ParserUtils.validate_fixed_value(meta, var, value)
+ except ParserError:
+ return False
+
+ return True
+
def child(self, qname: str, attrs: Dict, ns_map: Dict, position: int) -> XmlNode:
"""Record the event for the child element.
@@ -120,29 +155,31 @@ class UnionNode(XmlNode):
parent_namespace = target_uri(qname)
config = replace(self.config, fail_on_converter_warnings=True)
- for clazz in self.var.types:
- candidate = None
+ for candidate in self.candidates:
+ result = None
with suppress(Exception):
- if self.context.class_type.is_model(clazz):
- self.context.build(clazz, parent_ns=parent_namespace)
+ if self.context.class_type.is_model(candidate):
+ self.context.build(candidate, parent_ns=parent_namespace)
parser = NodeParser(
- config=config, context=self.context, handler=EventsHandler
+ config=config,
+ context=self.context,
+ handler=EventsHandler,
)
- candidate = parser.parse(self.events, clazz)
+ result = parser.parse(self.events, candidate)
else:
- candidate = ParserUtils.parse_var(
+ result = ParserUtils.parse_var(
meta=self.meta,
var=self.var,
config=config,
value=text,
- types=[clazz],
+ types=[candidate],
ns_map=self.ns_map,
)
- score = self.context.class_type.score_object(candidate)
+ score = self.context.class_type.score_object(result)
if score > max_score:
max_score = score
- obj = candidate
+ obj = result
if obj:
objects.append((self.var.qname, obj))
diff --git a/xsdata/formats/dataclass/parsers/utils.py b/xsdata/formats/dataclass/parsers/utils.py
index 1a774210..1c3b1ba4 100644
--- a/xsdata/formats/dataclass/parsers/utils.py
+++ b/xsdata/formats/dataclass/parsers/utils.py
@@ -228,7 +228,7 @@ class ParserUtils:
Special cases
- float nans are never equal in python
- strings with whitespaces, need trimming
-
+ - comparing raw str values
"""
default_value = var.default() if callable(var.default) else var.default
@@ -244,6 +244,9 @@ class ParserUtils:
):
return
+ if isinstance(value, str) and not isinstance(default_value, str):
+ default_value = converter.serialize(default_value, format=var.format)
+
if default_value != value:
raise ParserError(
f"Fixed value mismatch {meta.qname}:{var.qname}, `{default_value} != {value}`"
| XML parsing approximately 200 times slower than parsing to in-built xml.etree.ElementTree.Element
Using:
- xsdata 24.6
- Python 3.12.3
I'd like to ask about the performance of xsdata XML parsing. In my benchmarking I found it to be approximately 200 times slower than parsing to the in-built xml.etree.ElementTree.Element. I was expecting xsdata to be a little slower but this difference seems to be extreme. I tried both XmlEventHandler and LxmlEventHandler and got similar results.
Is this difference expected? If it's expected then I apologise for raising this as an issue.
Test script:
```python
from pathlib import Path
import time
import xml.etree.ElementTree
import my_dataclass
from xsdata.formats.dataclass.parsers import XmlParser
from xsdata.formats.dataclass.parsers.handlers import XmlEventHandler
TEST_ITERATIONS = 2000
my_path = Path(__file__).parent
xml_file = my_path / "input.xml"
def main():
with xml_file.open() as f:
file_contents = f.read()
start_time = time.time()
using_in_built_element(file_contents, TEST_ITERATIONS)
end_time = time.time()
time_using_in_built_element = end_time - start_time
print("Time using Python xml.etree.ElementTree.Element:", time_using_in_built_element)
start_time = time.time()
using_xsdata(file_contents, TEST_ITERATIONS)
end_time = time.time()
time_using_xsdata = end_time - start_time
print("Time using xsdata:", time_using_xsdata)
print ("Ratio:", time_using_xsdata / time_using_in_built_element)
def using_in_built_element(xml_string, iterations):
for _ in range(iterations):
xml_root = xml.etree.ElementTree.fromstring(xml_string)
def using_xsdata(xml_string, iterations):
parser = XmlParser(handler=XmlEventHandler)
for _ in range(iterations):
record_as_obj = parser.from_string(xml_string, my_dataclass.LogRecord)
if __name__ == "__main__":
main()
```
My results:
```
Time using Python xml.etree.ElementTree.Element: 0.2820000648498535
Time using xsdata: 55.9834668636322
Ratio: 198.52288648741876
```
I've attached this script, "input.xml" and "my_dataclass.py": [xsdata_xml_parse_performance.zip](https://github.com/user-attachments/files/15958630/xsdata_xml_parse_performance.zip)
| tefra/xsdata | diff --git a/tests/formats/dataclass/parsers/nodes/test_union.py b/tests/formats/dataclass/parsers/nodes/test_union.py
index 15a44b51..67f533b9 100644
--- a/tests/formats/dataclass/parsers/nodes/test_union.py
+++ b/tests/formats/dataclass/parsers/nodes/test_union.py
@@ -1,4 +1,4 @@
-from dataclasses import make_dataclass
+from dataclasses import field, make_dataclass
from typing import Union
from unittest import TestCase
@@ -60,6 +60,41 @@ class UnionNodeTests(TestCase):
self.assertEqual(0, node.level)
self.assertEqual([("end", "bar", "text", "tail")], node.events)
+ def test_filter_fixed_attrs(self):
+ a = make_dataclass(
+ "A",
+ [("x", int, field(init=False, default=1, metadata={"type": "Attribute"}))],
+ )
+ b = make_dataclass(
+ "A",
+ [("x", int, field(init=False, default=2, metadata={"type": "Attribute"}))],
+ )
+
+ root = make_dataclass("Root", [("value", Union[a, b, int])])
+ meta = self.context.build(root)
+ var = next(meta.find_children("value"))
+ node = UnionNode(
+ meta=meta,
+ var=var,
+ position=0,
+ config=self.config,
+ context=self.context,
+ attrs={"x": 2},
+ ns_map={},
+ )
+ self.assertEqual([b], node.candidates)
+
+ node = UnionNode(
+ meta=meta,
+ var=var,
+ position=0,
+ config=self.config,
+ context=self.context,
+ attrs={},
+ ns_map={},
+ )
+ self.assertEqual([a, b, int], node.candidates)
+
def test_bind_returns_best_matching_object(self):
item = make_dataclass(
"Item", [("value", str), ("a", int, attribute()), ("b", int, attribute())]
@@ -95,8 +130,15 @@ class UnionNodeTests(TestCase):
self.assertIsNot(node.attrs, node.events[0][2])
self.assertIs(node.ns_map, node.events[0][3])
- node.events.clear()
- node.attrs.clear()
+ node = UnionNode(
+ meta=meta,
+ var=var,
+ position=0,
+ config=self.config,
+ context=self.context,
+ attrs={},
+ ns_map=ns_map,
+ )
self.assertTrue(node.bind("item", "1", None, objects))
self.assertEqual(1, objects[-1][1])
diff --git a/tests/formats/dataclass/parsers/test_utils.py b/tests/formats/dataclass/parsers/test_utils.py
index 61844489..4dfaca43 100644
--- a/tests/formats/dataclass/parsers/test_utils.py
+++ b/tests/formats/dataclass/parsers/test_utils.py
@@ -7,7 +7,7 @@ from xsdata.formats.converter import ConverterFactory
from xsdata.formats.dataclass.context import XmlContext
from xsdata.formats.dataclass.parsers.config import ParserConfig
from xsdata.formats.dataclass.parsers.utils import ParserUtils
-from xsdata.models.enums import Namespace, QNames
+from xsdata.models.enums import Namespace, ProcessType, QNames
from xsdata.utils.testing import FactoryTestCase, XmlMetaFactory, XmlVarFactory
@@ -117,6 +117,9 @@ class ParserUtilsTests(FactoryTestCase):
var = XmlVarFactory.create("fixed", default=lambda: float("nan"))
ParserUtils.validate_fixed_value(meta, var, float("nan"))
+ var = XmlVarFactory.create("fixed", default=lambda: ProcessType.LAX)
+ ParserUtils.validate_fixed_value(meta, var, "lax")
+
def test_parse_var_with_error(self):
meta = XmlMetaFactory.create(clazz=TypeA, qname="foo")
var = XmlVarFactory.create("fixed", default="a")
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 2
} | 24.6 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[cli,lxml,soap]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest",
"pytest-benchmark",
"pytest-cov"
],
"pre_install": null,
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2025.1.31
charset-normalizer==3.4.1
click==8.1.8
click-default-group==1.2.4
coverage==7.8.0
docformatter==1.7.5
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
idna==3.10
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
Jinja2==3.1.6
lxml==5.3.1
MarkupSafe==3.0.2
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
py-cpuinfo==9.0.0
pytest @ file:///croot/pytest_1738938843180/work
pytest-benchmark==5.1.0
pytest-cov==6.0.0
requests==2.32.3
ruff==0.11.2
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
toposort==1.10
typing_extensions==4.13.0
untokenize==0.1.1
urllib3==2.3.0
-e git+https://github.com/tefra/xsdata.git@cce0a16680b59ce50d1349f9cc0a891054838ad0#egg=xsdata
| name: xsdata
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- charset-normalizer==3.4.1
- click==8.1.8
- click-default-group==1.2.4
- coverage==7.8.0
- docformatter==1.7.5
- idna==3.10
- jinja2==3.1.6
- lxml==5.3.1
- markupsafe==3.0.2
- py-cpuinfo==9.0.0
- pytest-benchmark==5.1.0
- pytest-cov==6.0.0
- requests==2.32.3
- ruff==0.11.2
- toposort==1.10
- typing-extensions==4.13.0
- untokenize==0.1.1
- urllib3==2.3.0
- xsdata==24.6.1
prefix: /opt/conda/envs/xsdata
| [
"tests/formats/dataclass/parsers/nodes/test_union.py::UnionNodeTests::test_filter_fixed_attrs",
"tests/formats/dataclass/parsers/test_utils.py::ParserUtilsTests::test_validate_fixed_value"
] | [] | [
"tests/formats/dataclass/parsers/nodes/test_union.py::UnionNodeTests::test_bind_appends_end_event_when_level_not_zero",
"tests/formats/dataclass/parsers/nodes/test_union.py::UnionNodeTests::test_bind_raises_parser_error_on_failure",
"tests/formats/dataclass/parsers/nodes/test_union.py::UnionNodeTests::test_bind_returns_best_matching_object",
"tests/formats/dataclass/parsers/nodes/test_union.py::UnionNodeTests::test_child",
"tests/formats/dataclass/parsers/test_utils.py::ParserUtilsTests::test_parse_any_attribute",
"tests/formats/dataclass/parsers/test_utils.py::ParserUtilsTests::test_parse_any_attributes",
"tests/formats/dataclass/parsers/test_utils.py::ParserUtilsTests::test_parse_value",
"tests/formats/dataclass/parsers/test_utils.py::ParserUtilsTests::test_parse_value_with_format",
"tests/formats/dataclass/parsers/test_utils.py::ParserUtilsTests::test_parse_value_with_ns_map",
"tests/formats/dataclass/parsers/test_utils.py::ParserUtilsTests::test_parse_value_with_tokens_true",
"tests/formats/dataclass/parsers/test_utils.py::ParserUtilsTests::test_parse_var_with_error",
"tests/formats/dataclass/parsers/test_utils.py::ParserUtilsTests::test_xsi_nil",
"tests/formats/dataclass/parsers/test_utils.py::ParserUtilsTests::test_xsi_type"
] | [] | MIT License | 19,119 | 1,113 | [
"xsdata/formats/dataclass/parsers/nodes/union.py",
"xsdata/formats/dataclass/parsers/utils.py"
] |
Toloka__crowd-kit-109 | 380db1badd7705c43ae5f523ebebb6223abf5e53 | 2024-07-29 05:01:39 | eac37e325b1e5edd06a878134df35248c93ef893 | shenxiangzhuang: Hi, this is a simple implementation that support #107 . I'm wondering if the changes are acceptable for crowd-kit or not? If it's ok, I would like to add more documentation and tests for this.
dustalov: Crowd-Kit is currently in a relatively mature development stage, yet occasionally, we accept the new functionality. The support for the true labels is a reasonable extension that I would happily accept.
Could you please make a separate PR with additional docs and tests? | diff --git a/crowdkit/aggregation/classification/dawid_skene.py b/crowdkit/aggregation/classification/dawid_skene.py
index 50b6c2c..2382545 100644
--- a/crowdkit/aggregation/classification/dawid_skene.py
+++ b/crowdkit/aggregation/classification/dawid_skene.py
@@ -146,11 +146,15 @@ class DawidSkene(BaseClassificationAggregator):
entropy = -(np.log(probas) * probas).sum().sum()
return float(joint_expectation + entropy)
- def fit(self, data: pd.DataFrame) -> "DawidSkene":
+ def fit(
+ self, data: pd.DataFrame, true_labels: Optional["pd.Series[Any]"] = None
+ ) -> "DawidSkene":
"""Fits the model to the training data with the EM algorithm.
Args:
data (DataFrame): The training dataset of workers' labeling results
which is represented as the `pandas.DataFrame` data containing `task`, `worker`, and `label` columns.
+ true_labels (Series): The ground truth labels of tasks. The `pandas.Series` data is indexed by `task`
+ so that `labels.loc[task]` is the task ground truth label.
Returns:
DawidSkene: self.
"""
@@ -167,6 +171,9 @@ class DawidSkene(BaseClassificationAggregator):
# Initialization
probas = MajorityVote().fit_predict_proba(data)
+ # correct the probas by true_labels
+ if true_labels is not None:
+ probas = self._correct_probas_with_golden(probas, true_labels)
priors = probas.mean()
errors = self._m_step(data, probas)
loss = -np.inf
@@ -175,6 +182,9 @@ class DawidSkene(BaseClassificationAggregator):
# Updating proba and errors n_iter times
for _ in range(self.n_iter):
probas = self._e_step(data, priors, errors)
+ # correct the probas by true_labels
+ if true_labels is not None:
+ probas = self._correct_probas_with_golden(probas, true_labels)
priors = probas.mean()
errors = self._m_step(data, probas)
new_loss = self._evidence_lower_bound(data, probas, priors, errors) / len(
@@ -225,6 +235,29 @@ class DawidSkene(BaseClassificationAggregator):
assert self.labels_ is not None, "no labels_"
return self.labels_
+ @staticmethod
+ def _correct_probas_with_golden(
+ probas: pd.DataFrame, true_labels: "pd.Series[Any]"
+ ) -> pd.DataFrame:
+ """
+ Correct the probas by `true_labels`
+ """
+ corrected_probas = probas
+
+ # Iterate over the unique labels present in true_labels
+ for label in true_labels.unique():
+ # Find the indices in both probas and true_labels where the true label is the current label
+ indices = true_labels[true_labels == label].index.intersection(probas.index)
+ # Set the corresponding probabilities to 1 for the correct label and 0 for others
+ corrected_probas.loc[indices] = (
+ 0 # Set all columns to 0 for the given indices
+ )
+ corrected_probas.loc[indices, label] = (
+ 1 # Set the correct label column to 1
+ )
+
+ return corrected_probas
+
@attr.s
class OneCoinDawidSkene(DawidSkene):
@@ -308,7 +341,7 @@ class OneCoinDawidSkene(DawidSkene):
skills = skilled_data.groupby(["worker"], sort=False)["skill"].mean()
return skills
- def fit(self, data: pd.DataFrame) -> "OneCoinDawidSkene":
+ def fit(self, data: pd.DataFrame) -> "OneCoinDawidSkene": # type: ignore[override]
"""Fits the model to the training data with the EM algorithm.
Args:
data (DataFrame): The training dataset of workers' labeling results
| [FEATURE] Make DS algorithm takes the information from gold standard questions
### Problem description
In the real world, when doing truth inference, we have a small set of gold standard questions mixed in with other unlabeled questions. I thought it would help if we could use the true labels of these gold standard questions to adjust the estimation of the user confusion matrix and the prior distribution of the options.
### Feature description
Make ds algorithm accept an optional argument, `gt` for example, which can be used to adjust the estimation of user's confusion matrix and the priori distribution of options.
### Potential alternatives
_No response_
### Papers connected with feature
_No response_
### Additional information
This feature may make the EM process in DS more complicated if we add the argument and make some changes directly on the original implementation. Maybe there are better ways to do this? | Toloka/crowd-kit | diff --git a/tests/aggregation/test_ds_aggregation.py b/tests/aggregation/test_ds_aggregation.py
index ba78ae0..51101fe 100644
--- a/tests/aggregation/test_ds_aggregation.py
+++ b/tests/aggregation/test_ds_aggregation.py
@@ -13,6 +13,21 @@ from pandas.testing import assert_frame_equal, assert_series_equal
from crowdkit.aggregation import DawidSkene, OneCoinDawidSkene
[email protected]("n_iter, tol", [(10, 0), (100500, 1e-5)])
+def test_aggregate_ds_gold_on_toy_ysda(
+ n_iter: int,
+ tol: float,
+ toy_answers_df: pd.DataFrame,
+ toy_ground_truth_df: "pd.Series[Any]",
+ toy_gold_df: "pd.Series[Any]",
+) -> None:
+ np.random.seed(42)
+ assert_series_equal(
+ DawidSkene(n_iter=n_iter, tol=tol).fit(toy_answers_df, toy_gold_df).labels_.sort_index(), # type: ignore
+ toy_ground_truth_df.sort_index(),
+ )
+
+
@pytest.mark.parametrize("n_iter, tol", [(10, 0), (100500, 1e-5)])
def test_aggregate_ds_on_toy_ysda(
n_iter: int,
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 2
},
"num_modified_files": 1
} | 1.3 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | anyio==4.9.0
argon2-cffi==23.1.0
argon2-cffi-bindings==21.2.0
arrow==1.3.0
asttokens==3.0.0
async-lru==2.0.5
attrs==25.3.0
autopep8==2.3.2
babel==2.17.0
backports.tarfile==1.2.0
beautifulsoup4==4.13.3
black==25.1.0
bleach==6.2.0
build==1.2.2.post1
certifi==2025.1.31
cffi==1.17.1
cfgv==3.4.0
charset-normalizer==3.4.1
click==8.1.8
codecov==2.1.13
comm==0.2.2
coverage==7.8.0
-e git+https://github.com/Toloka/crowd-kit.git@380db1badd7705c43ae5f523ebebb6223abf5e53#egg=crowd_kit
cryptography==44.0.2
debugpy==1.8.13
decorator==5.2.1
defusedxml==0.7.1
distlib==0.3.9
docutils==0.21.2
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
executing==2.2.0
fastjsonschema==2.21.1
filelock==3.18.0
flake8==7.2.0
fqdn==1.5.1
fsspec==2025.3.1
h11==0.14.0
httpcore==1.0.7
httpx==0.28.1
huggingface-hub==0.30.0
id==1.5.0
identify==2.6.9
idna==3.10
importlib_metadata==8.6.1
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
ipykernel==6.29.5
ipython==8.18.1
ipywidgets==8.1.5
isoduration==20.11.0
isort==6.0.1
jaraco.classes==3.4.0
jaraco.context==6.0.1
jaraco.functools==4.1.0
jedi==0.19.2
jeepney==0.9.0
Jinja2==3.1.6
joblib==1.4.2
json5==0.10.0
jsonpointer==3.0.0
jsonschema==4.23.0
jsonschema-specifications==2024.10.1
jupyter-events==0.12.0
jupyter-lsp==2.2.5
jupyter_client==8.6.3
jupyter_core==5.7.2
jupyter_server==2.15.0
jupyter_server_terminals==0.5.3
jupyterlab==4.3.6
jupyterlab_pygments==0.3.0
jupyterlab_server==2.27.3
jupyterlab_widgets==3.0.13
keyring==25.6.0
markdown-it-py==3.0.0
MarkupSafe==3.0.2
matplotlib-inline==0.1.7
mccabe==0.7.0
mdurl==0.1.2
mistune==3.1.3
more-itertools==10.6.0
mypy==1.15.0
mypy-extensions==1.0.0
nbclient==0.10.2
nbconvert==7.16.6
nbformat==5.10.4
nbqa==1.9.1
nest-asyncio==1.6.0
nh3==0.2.21
nltk==3.9.1
nodeenv==1.9.1
notebook==7.3.3
notebook_shim==0.2.4
numpy==2.0.2
overrides==7.7.0
packaging @ file:///croot/packaging_1734472117206/work
pandas==2.2.3
pandas-stubs==2.2.2.240807
pandocfilters==1.5.1
parso==0.8.4
pathspec==0.12.1
pexpect==4.9.0
platformdirs==4.3.7
pluggy @ file:///croot/pluggy_1733169602837/work
pre_commit==4.2.0
prometheus_client==0.21.1
prompt_toolkit==3.0.50
psutil==7.0.0
ptyprocess==0.7.0
pure_eval==0.2.3
pyarrow==19.0.1
pycodestyle==2.13.0
pycparser==2.22
pyflakes==3.3.2
Pygments==2.19.1
pyproject_hooks==1.2.0
pytest @ file:///croot/pytest_1738938843180/work
python-dateutil==2.9.0.post0
python-json-logger==3.3.0
pytz==2025.2
pyupgrade==3.19.1
PyYAML==6.0.2
pyzmq==26.3.0
readme_renderer==44.0
referencing==0.36.2
regex==2024.11.6
requests==2.32.3
requests-toolbelt==1.0.0
rfc3339-validator==0.1.4
rfc3986==2.0.0
rfc3986-validator==0.1.1
rich==14.0.0
rpds-py==0.24.0
safetensors==0.5.3
scikit-learn==1.6.1
scipy==1.13.1
SecretStorage==3.3.3
Send2Trash==1.8.3
six==1.17.0
sniffio==1.3.1
soupsieve==2.6
stack-data==0.6.3
terminado==0.18.1
threadpoolctl==3.6.0
tinycss2==1.4.0
tokenize_rt==6.1.0
tokenizers==0.21.1
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
tornado==6.4.2
tqdm==4.67.1
traitlets==5.14.3
transformers==4.50.3
twine==6.1.0
types-python-dateutil==2.9.0.20241206
types-pytz==2025.2.0.20250326
types-requests==2.32.0.20250328
types-tqdm==4.67.0.20250319
typing_extensions==4.13.0
tzdata==2025.2
uri-template==1.3.0
urllib3==2.3.0
virtualenv==20.29.3
wcwidth==0.2.13
webcolors==24.11.1
webencodings==0.5.1
websocket-client==1.8.0
widgetsnbextension==4.0.13
zipp==3.21.0
| name: crowd-kit
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- anyio==4.9.0
- argon2-cffi==23.1.0
- argon2-cffi-bindings==21.2.0
- arrow==1.3.0
- asttokens==3.0.0
- async-lru==2.0.5
- attrs==25.3.0
- autopep8==2.3.2
- babel==2.17.0
- backports-tarfile==1.2.0
- beautifulsoup4==4.13.3
- black==25.1.0
- bleach==6.2.0
- build==1.2.2.post1
- certifi==2025.1.31
- cffi==1.17.1
- cfgv==3.4.0
- charset-normalizer==3.4.1
- click==8.1.8
- codecov==2.1.13
- comm==0.2.2
- coverage==7.8.0
- crowd-kit==1.3.0.post0
- cryptography==44.0.2
- debugpy==1.8.13
- decorator==5.2.1
- defusedxml==0.7.1
- distlib==0.3.9
- docutils==0.21.2
- executing==2.2.0
- fastjsonschema==2.21.1
- filelock==3.18.0
- flake8==7.2.0
- fqdn==1.5.1
- fsspec==2025.3.1
- h11==0.14.0
- httpcore==1.0.7
- httpx==0.28.1
- huggingface-hub==0.30.0
- id==1.5.0
- identify==2.6.9
- idna==3.10
- importlib-metadata==8.6.1
- ipykernel==6.29.5
- ipython==8.18.1
- ipywidgets==8.1.5
- isoduration==20.11.0
- isort==6.0.1
- jaraco-classes==3.4.0
- jaraco-context==6.0.1
- jaraco-functools==4.1.0
- jedi==0.19.2
- jeepney==0.9.0
- jinja2==3.1.6
- joblib==1.4.2
- json5==0.10.0
- jsonpointer==3.0.0
- jsonschema==4.23.0
- jsonschema-specifications==2024.10.1
- jupyter-client==8.6.3
- jupyter-core==5.7.2
- jupyter-events==0.12.0
- jupyter-lsp==2.2.5
- jupyter-server==2.15.0
- jupyter-server-terminals==0.5.3
- jupyterlab==4.3.6
- jupyterlab-pygments==0.3.0
- jupyterlab-server==2.27.3
- jupyterlab-widgets==3.0.13
- keyring==25.6.0
- markdown-it-py==3.0.0
- markupsafe==3.0.2
- matplotlib-inline==0.1.7
- mccabe==0.7.0
- mdurl==0.1.2
- mistune==3.1.3
- more-itertools==10.6.0
- mypy==1.15.0
- mypy-extensions==1.0.0
- nbclient==0.10.2
- nbconvert==7.16.6
- nbformat==5.10.4
- nbqa==1.9.1
- nest-asyncio==1.6.0
- nh3==0.2.21
- nltk==3.9.1
- nodeenv==1.9.1
- notebook==7.3.3
- notebook-shim==0.2.4
- numpy==2.0.2
- overrides==7.7.0
- pandas==2.2.3
- pandas-stubs==2.2.2.240807
- pandocfilters==1.5.1
- parso==0.8.4
- pathspec==0.12.1
- pexpect==4.9.0
- platformdirs==4.3.7
- pre-commit==4.2.0
- prometheus-client==0.21.1
- prompt-toolkit==3.0.50
- psutil==7.0.0
- ptyprocess==0.7.0
- pure-eval==0.2.3
- pyarrow==19.0.1
- pycodestyle==2.13.0
- pycparser==2.22
- pyflakes==3.3.2
- pygments==2.19.1
- pyproject-hooks==1.2.0
- python-dateutil==2.9.0.post0
- python-json-logger==3.3.0
- pytz==2025.2
- pyupgrade==3.19.1
- pyyaml==6.0.2
- pyzmq==26.3.0
- readme-renderer==44.0
- referencing==0.36.2
- regex==2024.11.6
- requests==2.32.3
- requests-toolbelt==1.0.0
- rfc3339-validator==0.1.4
- rfc3986==2.0.0
- rfc3986-validator==0.1.1
- rich==14.0.0
- rpds-py==0.24.0
- safetensors==0.5.3
- scikit-learn==1.6.1
- scipy==1.13.1
- secretstorage==3.3.3
- send2trash==1.8.3
- six==1.17.0
- sniffio==1.3.1
- soupsieve==2.6
- stack-data==0.6.3
- terminado==0.18.1
- threadpoolctl==3.6.0
- tinycss2==1.4.0
- tokenize-rt==6.1.0
- tokenizers==0.21.1
- tornado==6.4.2
- tqdm==4.67.1
- traitlets==5.14.3
- transformers==4.50.3
- twine==6.1.0
- types-python-dateutil==2.9.0.20241206
- types-pytz==2025.2.0.20250326
- types-requests==2.32.0.20250328
- types-tqdm==4.67.0.20250319
- typing-extensions==4.13.0
- tzdata==2025.2
- uri-template==1.3.0
- urllib3==2.3.0
- virtualenv==20.29.3
- wcwidth==0.2.13
- webcolors==24.11.1
- webencodings==0.5.1
- websocket-client==1.8.0
- widgetsnbextension==4.0.13
- zipp==3.21.0
prefix: /opt/conda/envs/crowd-kit
| [
"tests/aggregation/test_ds_aggregation.py::test_aggregate_ds_gold_on_toy_ysda[10-0]",
"tests/aggregation/test_ds_aggregation.py::test_aggregate_ds_gold_on_toy_ysda[100500-1e-05]"
] | [] | [
"tests/aggregation/test_ds_aggregation.py::test_aggregate_ds_on_toy_ysda[10-0]",
"tests/aggregation/test_ds_aggregation.py::test_aggregate_ds_on_toy_ysda[100500-1e-05]",
"tests/aggregation/test_ds_aggregation.py::test_aggregate_hds_on_toy_ysda[10-0]",
"tests/aggregation/test_ds_aggregation.py::test_aggregate_hds_on_toy_ysda[100500-1e-05]",
"tests/aggregation/test_ds_aggregation.py::test_aggregate_ds_on_simple[10-0]",
"tests/aggregation/test_ds_aggregation.py::test_aggregate_ds_on_simple[100500-1e-05]",
"tests/aggregation/test_ds_aggregation.py::test_aggregate_hds_on_simple[10-0]",
"tests/aggregation/test_ds_aggregation.py::test_aggregate_hds_on_simple[100500-1e-05]",
"tests/aggregation/test_ds_aggregation.py::test_dawid_skene_step_by_step[0]",
"tests/aggregation/test_ds_aggregation.py::test_dawid_skene_step_by_step[1]",
"tests/aggregation/test_ds_aggregation.py::test_dawid_skene_on_empty_input",
"tests/aggregation/test_ds_aggregation.py::test_dawid_skene_overlap[3]",
"tests/aggregation/test_ds_aggregation.py::test_dawid_skene_overlap[300]",
"tests/aggregation/test_ds_aggregation.py::test_dawid_skene_overlap[30000]",
"tests/aggregation/test_ds_aggregation.py::test_ds_on_bool_labels",
"tests/aggregation/test_ds_aggregation.py::test_hds_on_bool_labels"
] | [] | Apache License 2.0 | 19,124 | 980 | [
"crowdkit/aggregation/classification/dawid_skene.py"
] |
materialsvirtuallab__monty-703 | 069ee2c997c2c0f11a8b18df3758e8e9ebb60cc9 | 2024-07-29 11:20:52 | 069ee2c997c2c0f11a8b18df3758e8e9ebb60cc9 | diff --git a/src/monty/shutil.py b/src/monty/shutil.py
index 37d28a3..98047f5 100644
--- a/src/monty/shutil.py
+++ b/src/monty/shutil.py
@@ -54,11 +54,14 @@ def gzip_dir(path: str | Path, compresslevel: int = 6) -> None:
compresslevel (int): Level of compression, 1-9. 9 is default for
GzipFile, 6 is default for gzip.
"""
- path = Path(path)
- for root, _, files in os.walk(path):
+ for root, _, files in os.walk(Path(path)):
for f in files:
full_f = Path(root, f).resolve()
if Path(f).suffix.lower() != ".gz" and not full_f.is_dir():
+ if os.path.exists(f"{full_f}.gz"):
+ warnings.warn(f"Both {f} and {f}.gz exist.", stacklevel=2)
+ continue
+
with (
open(full_f, "rb") as f_in,
GzipFile(
| [Dev] Unit test for `tempfile` alters test file in place
Unit test for `tempfile` (`test_tempfile.py`) alters test file `3000_lines.txt.gz` in place (because of re-gzip), could lead to repeated commit of this file, for example in fd6b16ddde887573d1f8b986d00bdf5a2407f840.
Triggered by the following test: https://github.com/materialsvirtuallab/monty/blob/09d36926b6e613738adf19e61da61bc9f85f11e2/tests/test_tempfile.py#L47-L66 | materialsvirtuallab/monty | diff --git a/tests/test_shutil.py b/tests/test_shutil.py
index 98ddc1a..16cd9aa 100644
--- a/tests/test_shutil.py
+++ b/tests/test_shutil.py
@@ -127,7 +127,7 @@ class TestGzipDir:
self.mtime = os.path.getmtime(os.path.join(test_dir, "gzip_dir", "tempfile"))
- def test_gzip(self):
+ def test_gzip_dir(self):
full_f = os.path.join(test_dir, "gzip_dir", "tempfile")
gzip_dir(os.path.join(test_dir, "gzip_dir"))
@@ -139,6 +139,29 @@ class TestGzipDir:
assert os.path.getmtime(f"{full_f}.gz") == pytest.approx(self.mtime, 4)
+ def test_gzip_dir_file_coexist(self):
+ """Test case where both file and file.gz exist."""
+ full_f = os.path.join(test_dir, "gzip_dir", "temptestfile")
+ gz_f = f"{full_f}.gz"
+
+ # Create both the file and its gzipped version
+ with open(full_f, "w") as f:
+ f.write("not gzipped")
+ with GzipFile(gz_f, "wb") as g:
+ g.write(b"gzipped")
+
+ with pytest.warns(
+ UserWarning, match="Both temptestfile and temptestfile.gz exist."
+ ):
+ gzip_dir(os.path.join(test_dir, "gzip_dir"))
+
+ # Verify contents of the files
+ with open(full_f, "r") as f:
+ assert f.read() == "not gzipped"
+
+ with GzipFile(gz_f, "rb") as g:
+ assert g.read() == b"gzipped"
+
def test_handle_sub_dirs(self):
sub_dir = os.path.join(test_dir, "gzip_dir", "sub_dir")
sub_file = os.path.join(sub_dir, "new_tempfile")
diff --git a/tests/test_tempfile.py b/tests/test_tempfile.py
index 4bce408..4dfd3b1 100644
--- a/tests/test_tempfile.py
+++ b/tests/test_tempfile.py
@@ -3,6 +3,7 @@ from __future__ import annotations
import os
import shutil
+import pytest
from monty.tempfile import ScratchDir
TEST_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "test_files")
@@ -49,23 +50,24 @@ class TestScratchDir:
# We write a pre-scratch file.
with open("pre_scratch_text", "w") as f:
f.write("write")
- init_gz = [f for f in os.listdir(os.getcwd()) if f.endswith(".gz")]
- with (
- ScratchDir(
- self.scratch_root,
- copy_from_current_on_enter=True,
- copy_to_current_on_exit=True,
- gzip_on_exit=True,
- ),
- open("scratch_text", "w") as f,
- ):
- f.write("write")
+ init_gz_files = [f for f in os.listdir(os.getcwd()) if f.endswith(".gz")]
+ with pytest.warns(match="Both 3000_lines.txt and 3000_lines.txt.gz exist."):
+ with (
+ ScratchDir(
+ self.scratch_root,
+ copy_from_current_on_enter=True,
+ copy_to_current_on_exit=True,
+ gzip_on_exit=True,
+ ),
+ open("scratch_text", "w") as f,
+ ):
+ f.write("write")
files = os.listdir(os.getcwd())
- # Make sure the stratch_text.gz exists
+ # Make sure the scratch_text.gz exists
assert "scratch_text.gz" in files
for f in files:
- if f.endswith(".gz") and f not in init_gz:
+ if f.endswith(".gz") and f not in init_gz_files:
os.remove(f)
os.remove("pre_scratch_text")
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_git_commit_hash",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 3
},
"num_modified_files": 1
} | 2024.7 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest>=8",
"pytest-cov>=4",
"types-requests",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | coverage==7.8.0
exceptiongroup==1.2.2
iniconfig==2.1.0
-e git+https://github.com/materialsvirtuallab/monty.git@069ee2c997c2c0f11a8b18df3758e8e9ebb60cc9#egg=monty
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
pytest-cov==6.0.0
tomli==2.2.1
types-requests==2.32.0.20250328
urllib3==2.3.0
| name: monty
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==7.8.0
- exceptiongroup==1.2.2
- iniconfig==2.1.0
- monty==2024.7.30
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- pytest-cov==6.0.0
- tomli==2.2.1
- types-requests==2.32.0.20250328
- urllib3==2.3.0
prefix: /opt/conda/envs/monty
| [
"tests/test_shutil.py::TestGzipDir::test_gzip_dir_file_coexist",
"tests/test_tempfile.py::TestScratchDir::test_with_copy_gzip"
] | [] | [
"tests/test_shutil.py::TestCopyR::test_recursive_copy_and_compress",
"tests/test_shutil.py::TestCopyR::test_pathlib",
"tests/test_shutil.py::TestCompressFileDir::test_compress_and_decompress_file",
"tests/test_shutil.py::TestCompressFileDir::test_compress_and_decompress_with_target_dir",
"tests/test_shutil.py::TestGzipDir::test_gzip_dir",
"tests/test_shutil.py::TestGzipDir::test_handle_sub_dirs",
"tests/test_shutil.py::TestRemove::test_remove_file",
"tests/test_shutil.py::TestRemove::test_remove_folder",
"tests/test_shutil.py::TestRemove::test_remove_symlink",
"tests/test_shutil.py::TestRemove::test_remove_symlink_follow",
"tests/test_tempfile.py::TestScratchDir::test_with_copy",
"tests/test_tempfile.py::TestScratchDir::test_with_copy_nodelete",
"tests/test_tempfile.py::TestScratchDir::test_no_copy",
"tests/test_tempfile.py::TestScratchDir::test_symlink",
"tests/test_tempfile.py::TestScratchDir::test_bad_root"
] | [] | MIT License | 19,127 | 262 | [
"src/monty/shutil.py"
] |
|
holoviz__holoviews-6346 | c1e22a3e2d38046da285ae3c67f076e60c19226a | 2024-07-29 13:49:11 | 6a9643355f12027207eb0d427dc81ca8798cb78c | codecov[bot]: ## [Codecov](https://app.codecov.io/gh/holoviz/holoviews/pull/6346?dropdown=coverage&src=pr&el=h1&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=holoviz) Report
All modified and coverable lines are covered by tests :white_check_mark:
> Project coverage is 88.50%. Comparing base [(`55856e1`)](https://app.codecov.io/gh/holoviz/holoviews/commit/55856e1b17850129144b9dd0d27fcc2b770c76a3?dropdown=coverage&el=desc&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=holoviz) to head [(`3368ebf`)](https://app.codecov.io/gh/holoviz/holoviews/commit/3368ebffa841cfdf3e7e96bbb528776060b7435c?dropdown=coverage&el=desc&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=holoviz).
> Report is 3 commits behind head on main.
<details><summary>Additional details and impacted files</summary>
```diff
@@ Coverage Diff @@
## main #6346 +/- ##
==========================================
- Coverage 88.51% 88.50% -0.02%
==========================================
Files 323 323
Lines 68098 68110 +12
==========================================
+ Hits 60276 60279 +3
- Misses 7822 7831 +9
```
</details>
[:umbrella: View full report in Codecov by Sentry](https://app.codecov.io/gh/holoviz/holoviews/pull/6346?dropdown=coverage&src=pr&el=continue&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=holoviz).
:loudspeaker: Have feedback on the report? [Share it here](https://about.codecov.io/codecov-pr-comment-feedback/?utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=holoviz).
| diff --git a/holoviews/core/data/pandas.py b/holoviews/core/data/pandas.py
index 300391cbf..c09b0348e 100644
--- a/holoviews/core/data/pandas.py
+++ b/holoviews/core/data/pandas.py
@@ -7,6 +7,7 @@ from .. import util
from ..dimension import Dimension, dimension_name
from ..element import Element
from ..ndmapping import NdMapping, item_check, sorted_context
+from ..util import PANDAS_GE_210
from .interface import DataError, Interface
from .util import finite_range
@@ -256,10 +257,9 @@ class PandasInterface(Interface, PandasAPI):
# when iterating over a groupby with a grouper equal to a list of length 1.
# Don't supply a list with a single grouper to avoid this warning.
group_by = group_by[0]
- if util.pandas_version >= Version("2.1.0"):
- groupby_kwargs = {"sort": False, "observed": False}
- else:
- groupby_kwargs = {"sort": False}
+ groupby_kwargs = {"sort": False}
+ if PANDAS_GE_210:
+ groupby_kwargs["observed"] = False
data = [(k, group_type(v, **group_kwargs)) for k, v in
dataset.data.groupby(group_by, **groupby_kwargs)]
if issubclass(container_type, NdMapping):
@@ -295,7 +295,10 @@ class PandasInterface(Interface, PandasAPI):
c for c, d in zip(reindexed.columns, reindexed.dtypes)
if is_numeric_dtype(d) and c not in cols
]
- grouped = reindexed.groupby(cols, sort=False)
+ groupby_kwargs = {"sort": False}
+ if PANDAS_GE_210:
+ groupby_kwargs["observed"] = False
+ grouped = reindexed.groupby(cols, **groupby_kwargs)
df = grouped[numeric_cols].aggregate(fn, **kwargs).reset_index()
else:
agg = reindexed.apply(fn, **kwargs)
diff --git a/holoviews/core/util.py b/holoviews/core/util.py
index 24e95e271..590b7ab6e 100644
--- a/holoviews/core/util.py
+++ b/holoviews/core/util.py
@@ -43,6 +43,7 @@ param_version = Version(param.__version__)
pandas_version = Version(pd.__version__)
NUMPY_GE_200 = numpy_version >= Version("2")
+PANDAS_GE_210 = pandas_version >= Version("2.1")
# Types
generator_types = (zip, range, types.GeneratorType)
diff --git a/holoviews/element/util.py b/holoviews/element/util.py
index 19d47ba5e..6e6f9edce 100644
--- a/holoviews/element/util.py
+++ b/holoviews/element/util.py
@@ -10,6 +10,7 @@ from ..core.data import PandasInterface, default_datatype
from ..core.operation import Operation
from ..core.sheetcoords import Slice
from ..core.util import (
+ PANDAS_GE_210,
cartesian_product,
datetime_types,
is_cyclic,
@@ -200,7 +201,10 @@ class categorical_aggregate2d(Operation):
def _aggregate_dataset_pandas(self, obj):
index_cols = [d.name for d in obj.kdims]
- df = obj.data.set_index(index_cols).groupby(index_cols, sort=False).first()
+ groupby_kwargs = {"sort": False}
+ if PANDAS_GE_210:
+ groupby_kwargs["observed"] = False
+ df = obj.data.set_index(index_cols).groupby(index_cols, **groupby_kwargs).first()
label = 'unique' if len(df) == len(obj) else 'non-unique'
levels = self._get_coords(obj)
index = pd.MultiIndex.from_product(levels, names=df.index.names)
| Pandas `observed=False` warnings
The warnings:
```
[/Users/mliquet/dev/holoviews/holoviews/core/data/pandas.py:298](http://localhost:8889/Users/mliquet/dev/holoviews/holoviews/core/data/pandas.py#line=297): FutureWarning: The default of observed=False is deprecated and will be changed to True in a future version of pandas. Pass observed=False to retain current behavior or observed=True to adopt the future default and silence this warning.
grouped = reindexed.groupby(cols, sort=False)
[/Users/mliquet/dev/holoviews/holoviews/element/util.py:203](http://localhost:8889/Users/mliquet/dev/holoviews/holoviews/element/util.py#line=202): FutureWarning: The default of observed=False is deprecated and will be changed to True in a future version of pandas. Pass observed=False to retain current behavior or observed=True to adopt the future default and silence this warning.
df = obj.data.set_index(index_cols).groupby(index_cols, sort=False).first()
```
```python
import holoviews as hv
import numpy as np
import pandas as pd
hv.extension('bokeh')
df = pd.DataFrame({
'X': pd.Series(['a', 'a', 'a', 'b', 'b', 'b', 'c', 'c', 'c',], dtype='category'),
'Y': pd.Series(['O', 'P', 'Q', 'O', 'P', 'Q', 'O', 'P', 'Q',], dtype='category'),
'Z': [1, 2, 3, 4, 5, 6, 7, 8, 9],
})
hv.HeatMap(df, ['X', 'Y'], 'Z').aggregate(function=np.mean)
```

| holoviz/holoviews | diff --git a/holoviews/tests/plotting/bokeh/test_heatmapplot.py b/holoviews/tests/plotting/bokeh/test_heatmapplot.py
index f44e69e61..67f315692 100644
--- a/holoviews/tests/plotting/bokeh/test_heatmapplot.py
+++ b/holoviews/tests/plotting/bokeh/test_heatmapplot.py
@@ -1,4 +1,5 @@
import numpy as np
+import pandas as pd
from bokeh.models import FactorRange, HoverTool, Range1d
from holoviews.element import HeatMap, Image, Points
@@ -140,3 +141,18 @@ class TestHeatMapPlot(TestBokehPlot):
self.assertEqual(cds.data['col'], np.array([1, 1, 2, 2]))
self.assertEqual(cds.data['alpha'], np.array([0, 1, 0, 0]))
self.assertEqual(cds.data['zvalues'], np.array([0.5, 0.1, 0.2, 0.6]))
+
+ def test_heatmap_pandas_categorial(self):
+ # Test for https://github.com/holoviz/holoviews/issues/6313
+ df = pd.DataFrame({
+ 'X': pd.Series(['a', 'a', 'a', 'b', 'b', 'b', 'c', 'c', 'c',], dtype='category'),
+ 'Y': pd.Series(['O', 'P', 'Q', 'O', 'P', 'Q', 'O', 'P', 'Q',], dtype='category'),
+ 'Z': [1, 2, 3, 4, 5, 6, 7, 8, 9],
+ })
+
+ hm = HeatMap(df, ['X', 'Y'], 'Z').aggregate(function=np.mean)
+ plot = bokeh_renderer.get_plot(hm)
+ data = plot.handles["cds"].data
+ np.testing.assert_array_equal(data["X"], df["X"])
+ np.testing.assert_array_equal(data["Y"], df["Y"])
+ np.testing.assert_array_equal(data["zvalues"], df["Z"])
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 3
} | 1.19 | {
"env_vars": null,
"env_yml_path": [
"binder/environment.yml"
],
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "environment.yml",
"pip_packages": [
"pytest",
"codecov"
],
"pre_install": null,
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | atpublic @ file:///home/conda/feedstock_root/build_artifacts/atpublic_1737771474411/work
attrs @ file:///home/conda/feedstock_root/build_artifacts/attrs_1741918516150/work
bidict @ file:///home/conda/feedstock_root/build_artifacts/bidict_1734272627465/work
bleach @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_bleach_1737382993/work
bokeh==3.4.3
Brotli @ file:///home/conda/feedstock_root/build_artifacts/brotli-split_1725267488082/work
certifi @ file:///home/conda/feedstock_root/build_artifacts/certifi_1739515848642/work/certifi
cffi @ file:///home/conda/feedstock_root/build_artifacts/cffi_1725571112467/work
cftime @ file:///home/conda/feedstock_root/build_artifacts/cftime_1725400455427/work
charset-normalizer @ file:///home/conda/feedstock_root/build_artifacts/charset-normalizer_1735929714516/work
click @ file:///home/conda/feedstock_root/build_artifacts/click_1734858813237/work
cloudpickle @ file:///home/conda/feedstock_root/build_artifacts/cloudpickle_1736947526808/work
codecov==2.1.13
colorama @ file:///home/conda/feedstock_root/build_artifacts/colorama_1733218098505/work
colorcet @ file:///home/conda/feedstock_root/build_artifacts/colorcet_1734007314889/work
contourpy @ file:///home/conda/feedstock_root/build_artifacts/contourpy_1727293517607/work
coverage==7.8.0
cycler @ file:///home/conda/feedstock_root/build_artifacts/cycler_1733332471406/work
cytoolz @ file:///home/conda/feedstock_root/build_artifacts/cytoolz_1734107207199/work
dask @ file:///home/conda/feedstock_root/build_artifacts/dask-core_1722976580461/work
dask-expr @ file:///home/conda/feedstock_root/build_artifacts/dask-expr_1722982607046/work
datashader @ file:///home/conda/feedstock_root/build_artifacts/datashader_1734341715727/work
distributed @ file:///home/conda/feedstock_root/build_artifacts/distributed_1722982528621/work
exceptiongroup @ file:///home/conda/feedstock_root/build_artifacts/exceptiongroup_1733208806608/work
filelock @ file:///home/conda/feedstock_root/build_artifacts/filelock_1741969488311/work
fonttools @ file:///home/conda/feedstock_root/build_artifacts/fonttools_1738940303262/work
fsspec @ file:///home/conda/feedstock_root/build_artifacts/fsspec_1743361113926/work
graphviz @ file:///home/conda/feedstock_root/build_artifacts/python-graphviz_1733791968395/work
h11 @ file:///home/conda/feedstock_root/build_artifacts/h11_1733327467879/work
h2 @ file:///home/conda/feedstock_root/build_artifacts/h2_1738578511449/work
-e git+https://github.com/holoviz/holoviews.git@c1e22a3e2d38046da285ae3c67f076e60c19226a#egg=holoviews
hpack @ file:///home/conda/feedstock_root/build_artifacts/hpack_1737618293087/work
hyperframe @ file:///home/conda/feedstock_root/build_artifacts/hyperframe_1737618333194/work
ibis-framework @ file:///home/conda/feedstock_root/build_artifacts/ibis-framework-ext_1726155380758/work
idna @ file:///home/conda/feedstock_root/build_artifacts/idna_1733211830134/work
importlib_metadata @ file:///home/conda/feedstock_root/build_artifacts/importlib-metadata_1737420181517/work
importlib_resources @ file:///home/conda/feedstock_root/build_artifacts/importlib_resources_1736252299705/work
iniconfig==2.1.0
Jinja2 @ file:///home/conda/feedstock_root/build_artifacts/jinja2_1741263328855/work
kiwisolver @ file:///home/conda/feedstock_root/build_artifacts/kiwisolver_1725459266648/work
linkify-it-py==2.0.3
llvmlite==0.43.0
locket @ file:///home/conda/feedstock_root/build_artifacts/locket_1650660393415/work
lz4 @ file:///home/conda/feedstock_root/build_artifacts/lz4_1733474248677/work
Markdown @ file:///home/conda/feedstock_root/build_artifacts/markdown_1710435156458/work
markdown-it-py @ file:///home/conda/feedstock_root/build_artifacts/markdown-it-py_1733250460757/work
MarkupSafe @ file:///home/conda/feedstock_root/build_artifacts/markupsafe_1733219680183/work
matplotlib==3.9.4
mdit-py-plugins==0.4.2
mdurl @ file:///home/conda/feedstock_root/build_artifacts/mdurl_1733255585584/work
msgpack @ file:///home/conda/feedstock_root/build_artifacts/msgpack-python_1725975012026/work
multipledispatch @ file:///home/conda/feedstock_root/build_artifacts/multipledispatch_1721907546485/work
munkres==1.1.4
narwhals @ file:///home/conda/feedstock_root/build_artifacts/narwhals_1742841036354/work
netCDF4 @ file:///home/conda/feedstock_root/build_artifacts/netcdf4_1733253078561/work
networkx @ file:///home/conda/feedstock_root/build_artifacts/networkx_1698504735452/work
numba @ file:///home/conda/feedstock_root/build_artifacts/numba_1718888028049/work
numpy @ file:///home/conda/feedstock_root/build_artifacts/numpy_1732314280888/work/dist/numpy-2.0.2-cp39-cp39-linux_x86_64.whl#sha256=62d98eb3da9f13e6b227c430d01026b7427f341b3fdcb838430f2a9e520417b1
outcome @ file:///home/conda/feedstock_root/build_artifacts/outcome_1733406188332/work
packaging @ file:///home/conda/feedstock_root/build_artifacts/packaging_1733203243479/work
pandas @ file:///home/conda/feedstock_root/build_artifacts/pandas_1736810577256/work
panel==1.4.5
param==2.2.0
parsy @ file:///home/conda/feedstock_root/build_artifacts/parsy_1734616013612/work
partd @ file:///home/conda/feedstock_root/build_artifacts/partd_1715026491486/work
pillow @ file:///home/conda/feedstock_root/build_artifacts/pillow_1735929703139/work
plotly @ file:///home/conda/feedstock_root/build_artifacts/plotly_1742240435426/work
pluggy==1.5.0
psutil @ file:///home/conda/feedstock_root/build_artifacts/psutil_1740663125313/work
pyarrow==19.0.1
pyarrow-hotfix @ file:///home/conda/feedstock_root/build_artifacts/pyarrow-hotfix_1734380560621/work
pycparser @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_pycparser_1733195786/work
pyct @ file:///home/conda/feedstock_root/build_artifacts/pyct_1734342495516/work
Pygments @ file:///home/conda/feedstock_root/build_artifacts/pygments_1736243443484/work
pyparsing @ file:///home/conda/feedstock_root/build_artifacts/pyparsing_1743089729650/work
PySide6==6.8.3
PySocks @ file:///home/conda/feedstock_root/build_artifacts/pysocks_1733217236728/work
pytest==8.3.5
python-dateutil @ file:///home/conda/feedstock_root/build_artifacts/python-dateutil_1733215673016/work
pytz @ file:///home/conda/feedstock_root/build_artifacts/pytz_1706886791323/work
pyviz_comms @ file:///home/conda/feedstock_root/build_artifacts/pyviz_comms_1736890319493/work
PyYAML @ file:///home/conda/feedstock_root/build_artifacts/pyyaml_1737454647378/work
regex @ file:///home/conda/feedstock_root/build_artifacts/regex_1730952178579/work
requests @ file:///home/conda/feedstock_root/build_artifacts/requests_1733217035951/work
retrying @ file:///home/conda/feedstock_root/build_artifacts/retrying_1740053923454/work
rich @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_rich_1743371105/work/dist
scipy @ file:///home/conda/feedstock_root/build_artifacts/scipy-split_1716470218293/work/dist/scipy-1.13.1-cp39-cp39-linux_x86_64.whl#sha256=e6696cb8683d94467891b7648e068a3970f6bc0a1b3c1aa7f9bc89458eafd2f0
selenium @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_selenium_1742656691/work
shiboken6==6.8.3
six @ file:///home/conda/feedstock_root/build_artifacts/six_1733380938961/work
sniffio @ file:///home/conda/feedstock_root/build_artifacts/sniffio_1733244044561/work
sortedcontainers @ file:///home/conda/feedstock_root/build_artifacts/sortedcontainers_1738440353519/work
spatialpandas @ file:///home/conda/feedstock_root/build_artifacts/spatialpandas_1704978503377/work
sqlglot @ file:///home/conda/feedstock_root/build_artifacts/sqlglot_1725660219720/work
streamz @ file:///home/conda/feedstock_root/build_artifacts/streamz_1734714199010/work
tblib @ file:///home/conda/feedstock_root/build_artifacts/tblib_1733842374544/work
tomli==2.2.1
toolz @ file:///home/conda/feedstock_root/build_artifacts/toolz_1706112571092/work
tornado @ file:///home/conda/feedstock_root/build_artifacts/tornado_1732615921868/work
tqdm @ file:///home/conda/feedstock_root/build_artifacts/tqdm_1735661334605/work
trio @ file:///home/conda/feedstock_root/build_artifacts/trio_1739529631723/work
trio-websocket @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_trio-websocket_1740493956/work
typing_extensions @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_typing_extensions_1743201626/work
tzdata @ file:///home/conda/feedstock_root/build_artifacts/python-tzdata_1742745135198/work
uc-micro-py==1.0.3
unicodedata2 @ file:///home/conda/feedstock_root/build_artifacts/unicodedata2_1736692503055/work
urllib3 @ file:///home/conda/feedstock_root/build_artifacts/urllib3_1734859416348/work
webencodings @ file:///home/conda/feedstock_root/build_artifacts/webencodings_1733236011802/work
websocket-client @ file:///home/conda/feedstock_root/build_artifacts/websocket-client_1733157342724/work
wsproto @ file:///home/conda/feedstock_root/build_artifacts/wsproto_1733060193308/work
xarray @ file:///home/conda/feedstock_root/build_artifacts/xarray_1722348170975/work
xyzservices==2025.1.0
zict @ file:///home/conda/feedstock_root/build_artifacts/zict_1733261551178/work
zipp @ file:///home/conda/feedstock_root/build_artifacts/zipp_1732827521216/work
zstandard==0.23.0
| name: holoviews
channels:
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=conda_forge
- _openmp_mutex=4.5=2_gnu
- adwaita-icon-theme=48.0=unix_0
- alsa-lib=1.2.13=hb9d3cd8_0
- aom=3.9.1=hac33072_0
- at-spi2-atk=2.38.0=h0630a04_3
- at-spi2-core=2.40.3=h0630a04_0
- atk-1.0=2.38.0=h04ea711_2
- atpublic=5.1=pyhd8ed1ab_0
- attr=2.5.1=h166bdaf_1
- attrs=25.3.0=pyh71513ae_0
- aws-c-auth=0.8.6=hd08a7f5_4
- aws-c-cal=0.8.7=h043a21b_0
- aws-c-common=0.12.0=hb9d3cd8_0
- aws-c-compression=0.3.1=h3870646_2
- aws-c-event-stream=0.5.4=h04a3f94_2
- aws-c-http=0.9.4=hb9b18c6_4
- aws-c-io=0.17.0=h3dad3f2_6
- aws-c-mqtt=0.12.2=h108da3e_2
- aws-c-s3=0.7.13=h822ba82_2
- aws-c-sdkutils=0.2.3=h3870646_2
- aws-checksums=0.2.3=h3870646_2
- aws-crt-cpp=0.31.0=h55f77e1_4
- aws-sdk-cpp=1.11.510=h37a5c72_3
- azure-core-cpp=1.14.0=h5cfcd09_0
- azure-identity-cpp=1.10.0=h113e628_0
- azure-storage-blobs-cpp=12.13.0=h3cf044e_1
- azure-storage-common-cpp=12.8.0=h736e048_1
- azure-storage-files-datalake-cpp=12.12.0=ha633028_1
- bidict=0.23.1=pyhd8ed1ab_1
- bleach=6.2.0=pyh29332c3_4
- blosc=1.21.6=he440d0b_1
- brotli=1.1.0=hb9d3cd8_2
- brotli-bin=1.1.0=hb9d3cd8_2
- brotli-python=1.1.0=py39hf88036b_2
- bzip2=1.0.8=h4bc722e_7
- c-ares=1.34.4=hb9d3cd8_0
- ca-certificates=2025.1.31=hbcca054_0
- cairo=1.18.4=h3394656_0
- certifi=2025.1.31=pyhd8ed1ab_0
- cffi=1.17.1=py39h15c3d72_0
- cftime=1.6.4=py39hf3d9206_1
- charset-normalizer=3.4.1=pyhd8ed1ab_0
- click=8.1.8=pyh707e725_0
- cloudpickle=3.1.1=pyhd8ed1ab_0
- colorama=0.4.6=pyhd8ed1ab_1
- colorcet=3.1.0=pyhd8ed1ab_1
- contourpy=1.3.0=py39h74842e3_2
- cycler=0.12.1=pyhd8ed1ab_1
- cyrus-sasl=2.1.27=h54b06d7_7
- cytoolz=1.0.1=py39h8cd3c5a_0
- dask=2024.8.0=pyhd8ed1ab_0
- dask-core=2024.8.0=pyhd8ed1ab_0
- dask-expr=1.1.10=pyhd8ed1ab_0
- datashader=0.16.3=pyhd8ed1ab_1
- dav1d=1.2.1=hd590300_0
- dbus=1.13.6=h5008d03_3
- distributed=2024.8.0=pyhd8ed1ab_0
- double-conversion=3.3.1=h5888daf_0
- epoxy=1.5.10=h166bdaf_1
- exceptiongroup=1.2.2=pyhd8ed1ab_1
- expat=2.6.4=h5888daf_0
- ffmpeg=7.1.1=gpl_h0b79d52_704
- filelock=3.18.0=pyhd8ed1ab_0
- font-ttf-dejavu-sans-mono=2.37=hab24e00_0
- font-ttf-inconsolata=3.000=h77eed37_0
- font-ttf-source-code-pro=2.038=h77eed37_0
- font-ttf-ubuntu=0.83=h77eed37_3
- fontconfig=2.15.0=h7e30c49_1
- fonts-conda-ecosystem=1=0
- fonts-conda-forge=1=0
- fonttools=4.56.0=py39h9399b63_0
- freetype=2.13.3=h48d6fc4_0
- fribidi=1.0.10=h36c2ea0_0
- fsspec=2025.3.1=pyhd8ed1ab_0
- gdk-pixbuf=2.42.12=hb9ae30d_0
- gettext=0.23.1=h5888daf_0
- gettext-tools=0.23.1=h5888daf_0
- gflags=2.2.2=h5888daf_1005
- glib-tools=2.84.0=h4833e2c_0
- glog=0.7.1=hbabe93e_0
- gmp=6.3.0=hac33072_2
- graphite2=1.3.13=h59595ed_1003
- graphviz=12.2.1=h5ae0cbf_1
- gtk3=3.24.43=h0c6a113_5
- gts=0.7.6=h977cf35_4
- h11=0.14.0=pyhd8ed1ab_1
- h2=4.2.0=pyhd8ed1ab_0
- harfbuzz=11.0.0=h76408a6_0
- hdf4=4.2.15=h2a13503_7
- hdf5=1.14.4=nompi_h2d575fe_105
- hicolor-icon-theme=0.17=ha770c72_2
- hpack=4.1.0=pyhd8ed1ab_0
- hyperframe=6.1.0=pyhd8ed1ab_0
- ibis-framework-core=9.5.0=pyhd8ed1ab_0
- ibis-sqlite=9.5.0=hd8ed1ab_0
- icu=75.1=he02047a_0
- idna=3.10=pyhd8ed1ab_1
- importlib-metadata=8.6.1=pyha770c72_0
- importlib-resources=6.5.2=pyhd8ed1ab_0
- importlib_metadata=8.6.1=hd8ed1ab_0
- importlib_resources=6.5.2=pyhd8ed1ab_0
- jack=1.9.22=h7c63dc7_2
- jinja2=3.1.6=pyhd8ed1ab_0
- keyutils=1.6.1=h166bdaf_0
- kiwisolver=1.4.7=py39h74842e3_0
- krb5=1.21.3=h659f571_0
- lame=3.100=h166bdaf_1003
- lcms2=2.17=h717163a_0
- ld_impl_linux-64=2.43=h712a8e2_4
- lerc=4.0.0=h27087fc_0
- level-zero=1.21.6=h84d6215_0
- libabseil=20250127.1=cxx17_hbbce691_0
- libaec=1.1.3=h59595ed_0
- libarrow=19.0.1=h120c447_5_cpu
- libarrow-acero=19.0.1=hcb10f89_5_cpu
- libarrow-dataset=19.0.1=hcb10f89_5_cpu
- libarrow-substrait=19.0.1=h1bed206_5_cpu
- libasprintf=0.23.1=h8e693c7_0
- libasprintf-devel=0.23.1=h8e693c7_0
- libass=0.17.3=h52826cd_2
- libblas=3.9.0=31_h59b9bed_openblas
- libbrotlicommon=1.1.0=hb9d3cd8_2
- libbrotlidec=1.1.0=hb9d3cd8_2
- libbrotlienc=1.1.0=hb9d3cd8_2
- libcap=2.75=h39aace5_0
- libcblas=3.9.0=31_he106b2a_openblas
- libclang-cpp20.1=20.1.1=default_hb5137d0_0
- libclang13=20.1.1=default_h9c6a7e4_0
- libcrc32c=1.1.2=h9c3ff4c_0
- libcups=2.3.3=h4637d8d_4
- libcurl=8.12.1=h332b0f4_0
- libdb=6.2.32=h9c3ff4c_0
- libdeflate=1.23=h4ddbbb0_0
- libdrm=2.4.124=hb9d3cd8_0
- libedit=3.1.20250104=pl5321h7949ede_0
- libegl=1.7.0=ha4b6fd6_2
- libev=4.33=hd590300_2
- libevent=2.1.12=hf998b51_1
- libexpat=2.6.4=h5888daf_0
- libffi=3.4.6=h2dba641_0
- libflac=1.4.3=h59595ed_0
- libgcc=14.2.0=h767d61c_2
- libgcc-ng=14.2.0=h69a702a_2
- libgcrypt-lib=1.11.0=hb9d3cd8_2
- libgd=2.3.3=h6f5c62b_11
- libgettextpo=0.23.1=h5888daf_0
- libgettextpo-devel=0.23.1=h5888daf_0
- libgfortran=14.2.0=h69a702a_2
- libgfortran-ng=14.2.0=h69a702a_2
- libgfortran5=14.2.0=hf1ad2bd_2
- libgl=1.7.0=ha4b6fd6_2
- libglib=2.84.0=h2ff4ddf_0
- libglvnd=1.7.0=ha4b6fd6_2
- libglx=1.7.0=ha4b6fd6_2
- libgomp=14.2.0=h767d61c_2
- libgoogle-cloud=2.36.0=hc4361e1_1
- libgoogle-cloud-storage=2.36.0=h0121fbd_1
- libgpg-error=1.51=hbd13f7d_1
- libgrpc=1.71.0=he753a82_0
- libhwloc=2.11.2=default_h0d58e46_1001
- libiconv=1.18=h4ce23a2_1
- libjpeg-turbo=3.0.0=hd590300_1
- liblapack=3.9.0=31_h7ac8fdf_openblas
- libllvm14=14.0.6=hcd5def8_4
- libllvm20=20.1.1=ha7bfdaf_0
- liblzma=5.6.4=hb9d3cd8_0
- libnetcdf=4.9.2=nompi_h5ddbaa4_116
- libnghttp2=1.64.0=h161d5f1_0
- libnsl=2.0.1=hd590300_0
- libntlm=1.8=hb9d3cd8_0
- libogg=1.3.5=h4ab18f5_0
- libopenblas=0.3.29=pthreads_h94d23a6_0
- libopengl=1.7.0=ha4b6fd6_2
- libopentelemetry-cpp=1.19.0=hd1b1c89_0
- libopentelemetry-cpp-headers=1.19.0=ha770c72_0
- libopenvino=2025.0.0=hdc3f47d_3
- libopenvino-auto-batch-plugin=2025.0.0=h4d9b6c2_3
- libopenvino-auto-plugin=2025.0.0=h4d9b6c2_3
- libopenvino-hetero-plugin=2025.0.0=h981d57b_3
- libopenvino-intel-cpu-plugin=2025.0.0=hdc3f47d_3
- libopenvino-intel-gpu-plugin=2025.0.0=hdc3f47d_3
- libopenvino-intel-npu-plugin=2025.0.0=hdc3f47d_3
- libopenvino-ir-frontend=2025.0.0=h981d57b_3
- libopenvino-onnx-frontend=2025.0.0=h0e684df_3
- libopenvino-paddle-frontend=2025.0.0=h0e684df_3
- libopenvino-pytorch-frontend=2025.0.0=h5888daf_3
- libopenvino-tensorflow-frontend=2025.0.0=h684f15b_3
- libopenvino-tensorflow-lite-frontend=2025.0.0=h5888daf_3
- libopus=1.3.1=h7f98852_1
- libparquet=19.0.1=h081d1f1_5_cpu
- libpciaccess=0.18=hd590300_0
- libpng=1.6.47=h943b412_0
- libpq=17.4=h27ae623_0
- libprotobuf=5.29.3=h501fc15_0
- libre2-11=2024.07.02=hba17884_3
- librsvg=2.58.4=he92a37e_3
- libsndfile=1.2.2=hc60ed4a_1
- libsqlite=3.49.1=hee588c1_2
- libssh2=1.11.1=hf672d98_0
- libstdcxx=14.2.0=h8f9b012_2
- libstdcxx-ng=14.2.0=h4852527_2
- libsystemd0=257.4=h4e0b6ca_1
- libthrift=0.21.0=h0e7cc3e_0
- libtiff=4.7.0=hd9ff511_3
- libudev1=257.4=hbe16f8c_1
- libunwind=1.6.2=h9c3ff4c_0
- liburing=2.9=h84d6215_0
- libusb=1.0.28=hb9d3cd8_0
- libutf8proc=2.10.0=h4c51ac1_0
- libuuid=2.38.1=h0b41bf4_0
- libva=2.22.0=h4f16b4b_2
- libvorbis=1.3.7=h9c3ff4c_0
- libvpx=1.14.1=hac33072_0
- libwebp-base=1.5.0=h851e524_0
- libxcb=1.17.0=h8a09558_0
- libxcrypt=4.4.36=hd590300_1
- libxkbcommon=1.8.1=hc4a0caf_0
- libxml2=2.13.7=h8d12d68_0
- libxslt=1.1.39=h76b75d6_0
- libzip=1.11.2=h6991a6a_0
- libzlib=1.3.1=hb9d3cd8_2
- llvmlite=0.43.0=py39hf8b6b1a_1
- locket=1.0.0=pyhd8ed1ab_0
- lz4=4.3.3=py39h92207c2_2
- lz4-c=1.10.0=h5888daf_1
- markdown=3.6=pyhd8ed1ab_0
- markdown-it-py=3.0.0=pyhd8ed1ab_1
- markupsafe=3.0.2=py39h9399b63_1
- matplotlib=3.9.4=py39hf3d152e_0
- matplotlib-base=3.9.4=py39h16632d1_0
- mdurl=0.1.2=pyhd8ed1ab_1
- mpg123=1.32.9=hc50e24c_0
- msgpack-python=1.1.0=py39h74842e3_0
- multipledispatch=0.6.0=pyhd8ed1ab_1
- munkres=1.1.4=pyh9f0ad1d_0
- mysql-common=9.0.1=h266115a_5
- mysql-libs=9.0.1=he0572af_5
- narwhals=1.32.0=pyhd8ed1ab_0
- ncurses=6.5=h2d0b736_3
- netcdf4=1.7.2=nompi_py39h1defa26_101
- networkx=3.2.1=pyhd8ed1ab_0
- nlohmann_json=3.11.3=he02047a_1
- numba=0.60.0=py39h0320e7d_0
- numpy=2.0.2=py39h9cb892a_1
- ocl-icd=2.3.2=hb9d3cd8_2
- opencl-headers=2024.10.24=h5888daf_0
- openh264=2.6.0=hc22cd8d_0
- openjpeg=2.5.3=h5fbd93e_0
- openldap=2.6.9=he970967_0
- openssl=3.4.1=h7b32b05_0
- orc=2.1.1=h17f744e_1
- outcome=1.3.0.post0=pyhd8ed1ab_1
- packaging=24.2=pyhd8ed1ab_2
- pandas=2.2.3=py39h3b40f6f_2
- pango=1.56.3=h9ac818e_1
- parsy=2.1=pyhd8ed1ab_1
- partd=1.4.2=pyhd8ed1ab_0
- pcre2=10.44=hba22ea6_2
- phantomjs=2.1.1=ha770c72_1
- pillow=11.1.0=py39h15c0740_0
- pip=25.0.1=pyh8b19718_0
- pixman=0.44.2=h29eaf8c_0
- plotly=6.0.1=pyhd8ed1ab_0
- prometheus-cpp=1.3.0=ha5d0236_0
- psutil=7.0.0=py39h8cd3c5a_0
- pthread-stubs=0.4=hb9d3cd8_1002
- pugixml=1.15=h3f63f65_0
- pulseaudio-client=17.0=hac146a9_1
- pyarrow=19.0.1=py39hf3d152e_0
- pyarrow-core=19.0.1=py39h6117c73_0_cpu
- pyarrow-hotfix=0.6=pyhd8ed1ab_1
- pycparser=2.22=pyh29332c3_1
- pyct=0.5.0=pyhd8ed1ab_1
- pygments=2.19.1=pyhd8ed1ab_0
- pyparsing=3.2.3=pyhd8ed1ab_1
- pyside6=6.8.3=py39h0383914_0
- pysocks=1.7.1=pyha55dd90_7
- python=3.9.21=h9c0c6dc_1_cpython
- python-dateutil=2.9.0.post0=pyhff2d567_1
- python-graphviz=0.20.3=pyh91182bf_2
- python-tzdata=2025.2=pyhd8ed1ab_0
- python_abi=3.9=5_cp39
- pytz=2024.1=pyhd8ed1ab_0
- pyviz_comms=3.0.4=pyhd8ed1ab_1
- pyyaml=6.0.2=py39h9399b63_2
- qhull=2020.2=h434a139_5
- qt6-main=6.8.3=h6441bc3_1
- re2=2024.07.02=h9925aae_3
- readline=8.2=h8c095d6_2
- regex=2024.11.6=py39h8cd3c5a_0
- requests=2.32.3=pyhd8ed1ab_1
- retrying=1.3.4=pyhd8ed1ab_0
- rich=14.0.0=pyh29332c3_0
- s2n=1.5.14=h6c98b2b_0
- scipy=1.13.1=py39haf93ffa_0
- sdl2=2.32.50=h9b8e6db_1
- sdl3=3.2.8=h3083f51_0
- selenium=4.30.0=pyh29332c3_0
- selenium-manager=4.30.0=h6c30b3d_0
- setuptools=75.8.2=pyhff2d567_0
- six=1.17.0=pyhd8ed1ab_0
- snappy=1.2.1=h8bd8927_1
- sniffio=1.3.1=pyhd8ed1ab_1
- sortedcontainers=2.4.0=pyhd8ed1ab_1
- spatialpandas=0.4.10=pyhd8ed1ab_1
- sqlglot=25.20.1=pyhd8ed1ab_0
- streamz=0.6.4=pyhd8ed1ab_1
- svt-av1=3.0.2=h5888daf_0
- tbb=2022.0.0=hceb3a55_0
- tblib=3.0.0=pyhd8ed1ab_1
- tk=8.6.13=noxft_h4845f30_101
- toolz=0.12.1=pyhd8ed1ab_0
- tornado=6.4.2=py39h8cd3c5a_0
- tqdm=4.67.1=pyhd8ed1ab_1
- trio=0.29.0=py39hf3d152e_0
- trio-websocket=0.12.2=pyh29332c3_0
- typing-extensions=4.13.0=h9fa5a19_1
- typing_extensions=4.13.0=pyh29332c3_1
- tzdata=2025b=h78e105d_0
- unicodedata2=16.0.0=py39h8cd3c5a_0
- urllib3=2.3.0=pyhd8ed1ab_0
- wayland=1.23.1=h3e06ad9_0
- wayland-protocols=1.42=hd8ed1ab_0
- webencodings=0.5.1=pyhd8ed1ab_3
- websocket-client=1.8.0=pyhd8ed1ab_1
- wheel=0.45.1=pyhd8ed1ab_1
- wsproto=1.2.0=pyhd8ed1ab_1
- x264=1!164.3095=h166bdaf_2
- x265=3.5=h924138e_3
- xarray=2024.7.0=pyhd8ed1ab_0
- xcb-util=0.4.1=hb711507_2
- xcb-util-cursor=0.1.5=hb9d3cd8_0
- xcb-util-image=0.4.0=hb711507_2
- xcb-util-keysyms=0.4.1=hb711507_0
- xcb-util-renderutil=0.3.10=hb711507_0
- xcb-util-wm=0.4.2=hb711507_0
- xkeyboard-config=2.43=hb9d3cd8_0
- xorg-libice=1.1.2=hb9d3cd8_0
- xorg-libsm=1.2.6=he73a12e_0
- xorg-libx11=1.8.12=h4f16b4b_0
- xorg-libxau=1.0.12=hb9d3cd8_0
- xorg-libxcomposite=0.4.6=hb9d3cd8_2
- xorg-libxcursor=1.2.3=hb9d3cd8_0
- xorg-libxdamage=1.1.6=hb9d3cd8_0
- xorg-libxdmcp=1.1.5=hb9d3cd8_0
- xorg-libxext=1.3.6=hb9d3cd8_0
- xorg-libxfixes=6.0.1=hb9d3cd8_0
- xorg-libxi=1.8.2=hb9d3cd8_0
- xorg-libxinerama=1.1.5=h5888daf_1
- xorg-libxrandr=1.5.4=hb9d3cd8_0
- xorg-libxrender=0.9.12=hb9d3cd8_0
- xorg-libxscrnsaver=1.2.4=hb9d3cd8_0
- xorg-libxtst=1.2.5=hb9d3cd8_3
- xorg-libxxf86vm=1.1.6=hb9d3cd8_0
- yaml=0.2.5=h7f98852_2
- zict=3.0.0=pyhd8ed1ab_1
- zipp=3.21.0=pyhd8ed1ab_1
- zlib=1.3.1=hb9d3cd8_2
- zstandard=0.23.0=py39h8cd3c5a_1
- zstd=1.5.7=hb8e6e7a_2
- pip:
- bokeh==3.4.3
- codecov==2.1.13
- coverage==7.8.0
- holoviews==1.19.1.post1.dev6+gc1e22a3e2
- iniconfig==2.1.0
- linkify-it-py==2.0.3
- mdit-py-plugins==0.4.2
- panel==1.4.5
- param==2.2.0
- pluggy==1.5.0
- pytest==8.3.5
- tomli==2.2.1
- uc-micro-py==1.0.3
- xyzservices==2025.1.0
prefix: /opt/conda/envs/holoviews
| [
"holoviews/tests/plotting/bokeh/test_heatmapplot.py::TestHeatMapPlot::test_heatmap_pandas_categorial"
] | [] | [
"holoviews/tests/plotting/bokeh/test_heatmapplot.py::TestHeatMapPlot::test_heatmap_alpha_dim",
"holoviews/tests/plotting/bokeh/test_heatmapplot.py::TestHeatMapPlot::test_heatmap_categorical_axes_string_int",
"holoviews/tests/plotting/bokeh/test_heatmapplot.py::TestHeatMapPlot::test_heatmap_categorical_axes_string_int_invert_xyaxis",
"holoviews/tests/plotting/bokeh/test_heatmapplot.py::TestHeatMapPlot::test_heatmap_categorical_axes_string_int_inverted",
"holoviews/tests/plotting/bokeh/test_heatmapplot.py::TestHeatMapPlot::test_heatmap_colormapping",
"holoviews/tests/plotting/bokeh/test_heatmapplot.py::TestHeatMapPlot::test_heatmap_custom_string_tooltip_hover",
"holoviews/tests/plotting/bokeh/test_heatmapplot.py::TestHeatMapPlot::test_heatmap_dilate",
"holoviews/tests/plotting/bokeh/test_heatmapplot.py::TestHeatMapPlot::test_heatmap_hover_ensure_kdims_sanitized",
"holoviews/tests/plotting/bokeh/test_heatmapplot.py::TestHeatMapPlot::test_heatmap_hover_ensure_vdims_sanitized",
"holoviews/tests/plotting/bokeh/test_heatmapplot.py::TestHeatMapPlot::test_heatmap_invert_axes",
"holoviews/tests/plotting/bokeh/test_heatmapplot.py::TestHeatMapPlot::test_heatmap_points_categorical_axes_string_int",
"holoviews/tests/plotting/bokeh/test_heatmapplot.py::TestHeatMapPlot::test_heatmap_points_categorical_axes_string_int_inverted",
"holoviews/tests/plotting/bokeh/test_heatmapplot.py::TestHeatMapPlot::test_heatmap_single_x_value",
"holoviews/tests/plotting/bokeh/test_heatmapplot.py::TestHeatMapPlot::test_heatmap_single_y_value"
] | [] | BSD 3-Clause "New" or "Revised" License | 19,130 | 970 | [
"holoviews/core/data/pandas.py",
"holoviews/core/util.py",
"holoviews/element/util.py"
] |
dag-hammarskjold-library__dlx-351 | e30412f241b5165636e6728ab5bf31653dc070cd | 2024-07-30 16:16:40 | 7bc9c436adc657b48945eafa70a190381864e26c | diff --git a/dlx/marc/__init__.py b/dlx/marc/__init__.py
index 943a46a..0990edd 100644
--- a/dlx/marc/__init__.py
+++ b/dlx/marc/__init__.py
@@ -1712,14 +1712,10 @@ class Auth(Marc):
def count(lookup_class, xref):
tags = list(Config.bib_authority_controlled.keys()) if lookup_class == Bib else list(Config.auth_authority_controlled.keys())
-
- total = 0
-
- for tag in tags:
- total += lookup_class.count_documents({f'{tag}.subfields.xref': xref})
-
- return total
-
+ set_class = BibSet if lookup_class == Bib else AuthSet
+
+ return set_class.from_query({'$or': [{f'{tag}.subfields.xref': xref} for tag in tags]}).count
+
if usage_type is None:
total = 0
@@ -1849,27 +1845,33 @@ class Diff():
The fields unique to record "b"
c : list(dlx.marc.Field)
The fields common to both records
+ d : list(dlx.marc.Field)
+ Fields that are common to both records but in a different order
+ e : list(dlx.marc.Field)
+ Fields that are duplicated in both records a different number of times
"""
- def __init__(self, a, b):
- """Initilizes the object. Sets attribute "a" to a list of the fields
- unique to record a. Sets attribute "b" to a list of the fields unique
- to record b. Sets attribute "c" to a list of the fields common to both
- records.
-
- Positional arguments
- --------------------
- a : Marc
- b : Marc
- """
- assert isinstance(a, Marc)
- assert isinstance(b, Marc)
+ def __init__(self, a: Marc, b: Marc) -> None:
+ assert all([isinstance(x, Marc) for x in (a, b)])
+ self.records = (a, b)
+ # fields unique to record a
self.a = list(filter(lambda x: x not in b.fields, a.fields))
+
+ # fields unique to record b
self.b = list(filter(lambda x: x not in a.fields, b.fields))
+
+ # fields commone to both records
self.c = list(filter(lambda x: x in b.fields, a.fields))
+
+ # field orders are different
+ self.d = [x for x in self.c if self.records[0].get_fields(x.tag).index(x) != self.records[1].get_fields(x.tag).index(x)]
- # todo: check if any duplciated fields are duplicated the same number of times in both
+ # fields that are duplicated a different number of times
+ a_fields = Counter([x.to_mrk() for x in a.fields])
+ b_fields = Counter([x.to_mrk() for x in b.fields])
+
+ self.e = [field for field in self.c if a_fields[field.to_mrk()] != b_fields[field.to_mrk()]]
### Field classes
@@ -1884,6 +1886,9 @@ class Controlfield(Field):
def __eq__(self, other):
if not isinstance(other, Controlfield):
return False
+
+ if self.tag != other.tag:
+ return False
return self.value == other.value
@@ -1909,6 +1914,9 @@ class Datafield(Field):
if not isinstance(other, Datafield):
return False
+ if self.tag != other.tag:
+ return False
+
return self.to_dict() == other.to_dict()
@classmethod
| marc.Diff not taking into account tag or field order
* compare tags when comparing fields
* compare field order of fields that are common to both records | dag-hammarskjold-library/dlx | diff --git a/tests/test_marc.py b/tests/test_marc.py
index 78be271..1ae64f4 100644
--- a/tests/test_marc.py
+++ b/tests/test_marc.py
@@ -762,12 +762,25 @@ def test_diff(db):
bib1, bib2 = Bib.from_id(1), Bib.from_id(2)
diff = bib1.diff(bib2)
+
assert isinstance(diff, Diff)
+ assert diff.records == (bib1, bib2)
assert len(diff.a) == 5
assert len(diff.b) == 1
assert len(diff.c) == 2
+ assert len(diff.d) == 0
+
+ bib1.set('999', 'a', 'abc', address='+')
+ bib1.set('999', 'a', 'xyz', address='+')
+ bib2.set('999', 'a', 'xyz', address='+')
+ bib2.set('999', 'a', 'abc', address='+')
+
+ assert len(Diff(bib1, bib2).d) == 2
+
+ bib2.set('999', 'a', 'abc', address='+')
+ assert len(Diff(bib1, bib2).e) == 1
- for field in diff.a + diff.b + diff.c:
+ for field in diff.a + diff.b + diff.c + diff.d:
assert isinstance(field, Field)
def test_blank_fields(db):
diff --git a/tests/test_scripts.py b/tests/test_scripts.py
index 614285c..d5246e9 100644
--- a/tests/test_scripts.py
+++ b/tests/test_scripts.py
@@ -65,7 +65,7 @@ def test_auth_merge(db):
from dlx.scripts import auth_merge
auth_merge.run(connect='mongomock://localhost', gaining_id=1, losing_id=2, user='test', skip_prompt=True)
- assert Auth.from_id(1).in_use() == 2
+ assert Auth.from_id(1).in_use(usage_type='bib') == 2
assert Auth.from_id(2) is None
def test_import_marc(db, bibs, auths):
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 1
} | 1.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==23.2.0
boto3==1.34.100
botocore==1.34.100
certifi==2024.7.4
cffi==1.16.0
charset-normalizer==3.3.2
click==8.1.7
cryptography==42.0.7
-e git+https://github.com/dag-hammarskjold-library/dlx.git@e30412f241b5165636e6728ab5bf31653dc070cd#egg=dlx
dnspython==2.6.1
exceptiongroup==1.2.1
idna==3.7
iniconfig==2.0.0
Jinja2==3.1.4
jmespath==1.0.1
joblib==1.4.2
jsonschema==4.0.0
lxml==5.2.1
MarkupSafe==2.1.5
mongomock==4.1.2
moto==5.0.8
nltk==3.8.1
packaging==24.0
pluggy==1.5.0
pycparser==2.22
pymongo==4.6.3
pyrsistent==0.20.0
pytest==8.2.0
python-dateutil==2.9.0.post0
pytz==2024.1
PyYAML==6.0.1
regex==2024.4.28
requests==2.32.3
responses==0.25.0
s3transfer==0.10.1
sentinels==1.0.0
six==1.16.0
tomli==2.0.1
tqdm==4.66.4
urllib3==1.26.19
Werkzeug==3.0.3
xlrd==1.2.0
xmldiff==2.4
xmltodict==0.13.0
| name: dlx
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==23.2.0
- boto3==1.34.100
- botocore==1.34.100
- certifi==2024.7.4
- cffi==1.16.0
- charset-normalizer==3.3.2
- click==8.1.7
- cryptography==42.0.7
- dnspython==2.6.1
- exceptiongroup==1.2.1
- idna==3.7
- iniconfig==2.0.0
- jinja2==3.1.4
- jmespath==1.0.1
- joblib==1.4.2
- jsonschema==4.0.0
- lxml==5.2.1
- markupsafe==2.1.5
- mongomock==4.1.2
- moto==5.0.8
- nltk==3.8.1
- packaging==24.0
- pluggy==1.5.0
- pycparser==2.22
- pymongo==4.6.3
- pyrsistent==0.20.0
- pytest==8.2.0
- python-dateutil==2.9.0.post0
- pytz==2024.1
- pyyaml==6.0.1
- regex==2024.4.28
- requests==2.32.3
- responses==0.25.0
- s3transfer==0.10.1
- sentinels==1.0.0
- six==1.16.0
- tomli==2.0.1
- tqdm==4.66.4
- urllib3==1.26.19
- werkzeug==3.0.3
- xlrd==1.2.0
- xmldiff==2.4
- xmltodict==0.13.0
prefix: /opt/conda/envs/dlx
| [
"tests/test_marc.py::test_diff"
] | [] | [
"tests/test_marc.py::test_init_marc",
"tests/test_marc.py::test_init_bib",
"tests/test_marc.py::test_init_auth",
"tests/test_marc.py::test_init_auth_check",
"tests/test_marc.py::test_commit",
"tests/test_marc.py::test_delete",
"tests/test_marc.py::test_from_id",
"tests/test_marc.py::test_querydocument",
"tests/test_marc.py::test_from_query",
"tests/test_marc.py::test_querystring",
"tests/test_marc.py::test_from_aggregation",
"tests/test_marc.py::test_atlasquery",
"tests/test_marc.py::test_get_field",
"tests/test_marc.py::test_field_get_value",
"tests/test_marc.py::test_set_field",
"tests/test_marc.py::test_get_value",
"tests/test_marc.py::test_get_xref",
"tests/test_marc.py::test_set",
"tests/test_marc.py::test_zmerge",
"tests/test_marc.py::test_xmerge",
"tests/test_marc.py::test_set_008",
"tests/test_marc.py::test_delete_field",
"tests/test_marc.py::test_auth_lookup",
"tests/test_marc.py::test_xlookup",
"tests/test_marc.py::test_auth_control",
"tests/test_marc.py::test_language",
"tests/test_marc.py::test_to_xml",
"tests/test_marc.py::test_xml_encoding",
"tests/test_marc.py::test_to_mrc",
"tests/test_marc.py::test_to_mrk",
"tests/test_marc.py::test_from_mrk",
"tests/test_marc.py::test_from_json",
"tests/test_marc.py::test_to_jmarcnx",
"tests/test_marc.py::test_field_from_json",
"tests/test_marc.py::test_partial_lookup",
"tests/test_marc.py::test_blank_fields",
"tests/test_marc.py::test_auth_in_use",
"tests/test_marc.py::test_catch_delete_auth",
"tests/test_marc.py::test_from_xml",
"tests/test_marc.py::test_auth_use_count",
"tests/test_marc.py::test_auth_merge",
"tests/test_marc.py::test_logical_fields",
"tests/test_marc.py::test_bib_files",
"tests/test_marc.py::test_list_attached",
"tests/test_scripts.py::test_init_db",
"tests/test_scripts.py::test_build_logical_fields",
"tests/test_scripts.py::test_build_text_collections",
"tests/test_scripts.py::test_auth_merge",
"tests/test_scripts.py::test_import_marc"
] | [] | null | 19,142 | 890 | [
"dlx/marc/__init__.py"
] |
|
globocom__m3u8-374 | 54f5288d23129506168d34d87482f4dcbfa25c62 | 2024-07-30 16:21:43 | 54f5288d23129506168d34d87482f4dcbfa25c62 | diff --git a/m3u8/parser.py b/m3u8/parser.py
index cb16fe3..e2df4e2 100644
--- a/m3u8/parser.py
+++ b/m3u8/parser.py
@@ -320,9 +320,14 @@ def _parse_attribute_list(prefix, line, attribute_parser, default_parser=None):
attributes = {}
for param in params:
- name, value = param.split("=", 1)
- name = normalize_attribute(name)
+ param_parts = param.split("=", 1)
+ if len(param_parts) == 1:
+ name = ""
+ value = param_parts[0]
+ else:
+ name, value = param_parts
+ name = normalize_attribute(name)
if name in attribute_parser:
value = attribute_parser[name](value)
elif default_parser is not None:
@@ -548,16 +553,6 @@ def _parse_cueout_cont(line, state, **kwargs):
if len(elements) != 2:
return
- # EXT-X-CUE-OUT-CONT:2.436/120 style
- res = re.match(
- r"^[-+]?([0-9]+(\.[0-9]+)?|\.[0-9]+)/[-+]?([0-9]+(\.[0-9]+)?|\.[0-9]+)$",
- elements[1],
- )
- if res:
- state["current_cue_out_elapsedtime"] = res.group(1)
- state["current_cue_out_duration"] = res.group(3)
- return
-
# EXT-X-CUE-OUT-CONT:ElapsedTime=10,Duration=60,SCTE35=... style
cue_info = _parse_attribute_list(
protocol.ext_x_cue_out_cont,
@@ -565,6 +560,16 @@ def _parse_cueout_cont(line, state, **kwargs):
remove_quotes_parser("duration", "elapsedtime", "scte35"),
)
+ # EXT-X-CUE-OUT-CONT:2.436/120 style
+ progress = cue_info.get("")
+ if progress:
+ progress_parts = progress.split("/", 1)
+ if len(progress_parts) == 1:
+ state["current_cue_out_duration"] = progress_parts[0]
+ else:
+ state["current_cue_out_elapsedtime"] = progress_parts[0]
+ state["current_cue_out_duration"] = progress_parts[1]
+
duration = cue_info.get("duration")
if duration:
state["current_cue_out_duration"] = duration
@@ -578,55 +583,28 @@ def _parse_cueout_cont(line, state, **kwargs):
state["current_cue_out_elapsedtime"] = elapsedtime
-def _cueout_no_duration(line):
- # this needs to be called first since line.split in all other
- # parsers will throw a ValueError if passed just this tag
- if line == protocol.ext_x_cue_out:
- return (None, None)
-
-
-def _cueout_envivio(line, state):
- param, value = line.split(":", 1)
- res = re.match('.*DURATION=(.*),.*,CUE="(.*)"', value)
- if res:
- return (res.group(2), res.group(1))
- else:
- return None
-
-
-def _cueout_duration(line):
- # This was added separately rather than modifying "simple"
- param, value = line.split(":", 1)
- res = re.match(r"DURATION=(.*)", value)
- if res:
- return (None, res.group(1))
-
-
-def _cueout_simple(line):
- param, value = line.split(":", 1)
- res = re.match(r"^(\d+(?:\.\d)?\d*)$", value)
- if res:
- return (None, res.group(1))
-
-
def _parse_cueout(line, state, **kwargs):
- _cueout_state = (
- _cueout_no_duration(line)
- or _cueout_envivio(line, state)
- or _cueout_duration(line)
- or _cueout_simple(line)
- )
- if _cueout_state:
- cue_out_scte35, cue_out_duration = _cueout_state
- current_cue_out_scte35 = state.get("current_cue_out_scte35")
- state["current_cue_out_scte35"] = cue_out_scte35 or current_cue_out_scte35
- state["current_cue_out_duration"] = cue_out_duration
-
state["cue_out_start"] = True
state["cue_out"] = True
if "DURATION" in line.upper():
state["cue_out_explicitly_duration"] = True
+ elements = line.split(":", 1)
+ if len(elements) != 2:
+ return
+
+ cue_info = _parse_attribute_list(
+ protocol.ext_x_cue_out,
+ line,
+ remove_quotes_parser("cue"),
+ )
+ cue_out_scte35 = cue_info.get("cue")
+ cue_out_duration = cue_info.get("duration") or cue_info.get("")
+
+ current_cue_out_scte35 = state.get("current_cue_out_scte35")
+ state["current_cue_out_scte35"] = cue_out_scte35 or current_cue_out_scte35
+ state["current_cue_out_duration"] = cue_out_duration
+
def _parse_server_control(line, data, **kwargs):
attribute_parser = {
| MediaConvert EXT-CUE-OUT-CONT still problematic
Hi,
I still have problems with some HLS manifests generated by AWS MediaConvert, even after the fix to #349.
Here is an example: https://bpkio-cs-demos.s3-eu-west-1.amazonaws.com/fabre/mediaconvert-conditioning/output/hls/cts-laworder720p.m3u8
The lib fails on this line:
`#EXT-X-CUE-OUT-CONT:10/4, SpliceType=VOD_DAI,Action=REPLACE,PAID=amazon.com/TEST2014020500000346,Acds=BA`.
Note that this type of markup is coming straight from the (poor) AWS documentation at https://docs.aws.amazon.com/mediaconvert/latest/ug/example-esam-xml-manifest-conditioning.html | globocom/m3u8 | diff --git a/tests/playlists.py b/tests/playlists.py
index 4a5e497..c2d249a 100755
--- a/tests/playlists.py
+++ b/tests/playlists.py
@@ -781,6 +781,31 @@ segment_19980228.ts
segment_19980229.ts
"""
+CUE_OUT_MEDIACONVERT_PLAYLIST = """\
+#EXTM3U
+#EXT-X-VERSION:3
+#EXT-X-TARGETDURATION:11
+#EXT-X-MEDIA-SEQUENCE:1
+#EXT-X-PLAYLIST-TYPE:VOD
+#EXTINF:10,
+segment_00001.ts
+#EXT-X-CUE-OUT:4,SpliceType=VOD_DAI,Action=REPLACE, PAID=example.com/2024073010700,Acds=BA
+#EXTINF:10,
+segment_00002.ts
+#EXT-X-CUE-OUT-CONT:10/4, SpliceType=VOD_DAI,Action=REPLACE,PAID=example.com/2024073010700,Acds=BA
+#EXTINF:10,
+segment_00003.ts
+#EXTINF:10,
+segment_00004.ts
+#EXT-X-CUE-IN:4,SpliceType=VOD_DAI
+#EXTINF:0,
+segment_00005.ts
+#EXTINF:10,
+segment_00006.ts
+#EXT-X-ENDLIST
+"""
+
+
CUE_OUT_ENVIVIO_PLAYLIST = """
#EXTM3U
#EXT-X-VERSION:3
diff --git a/tests/test_model.py b/tests/test_model.py
index 97d3bcd..dd66a4b 100755
--- a/tests/test_model.py
+++ b/tests/test_model.py
@@ -257,6 +257,14 @@ def test_segment_cue_out_cont_alt():
assert segments[3].scte35_duration == "120.0"
+def test_segment_cue_out_cont_mediaconvert():
+ obj = m3u8.M3U8(playlists.CUE_OUT_MEDIACONVERT_PLAYLIST)
+ segments = obj.segments
+
+ assert segments[2].scte35_elapsedtime == "10"
+ assert segments[2].scte35_duration == "4"
+
+
def test_segment_envivio_scte35_attribute():
obj = m3u8.M3U8(playlists.CUE_OUT_ENVIVIO_PLAYLIST)
segments = obj.segments
@@ -273,7 +281,7 @@ def test_segment_envivio_scte35_attribute():
def test_segment_unknown_scte35_attribute():
obj = m3u8.M3U8(playlists.CUE_OUT_INVALID_PLAYLIST)
assert obj.segments[0].scte35 is None
- assert obj.segments[0].scte35_duration is None
+ assert obj.segments[0].scte35_duration == "INVALID"
def test_segment_cue_out_no_duration():
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_issue_reference",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 3
},
"num_modified_files": 1
} | 5.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": [],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | backports-datetime-fromisoformat==2.0.3
coverage==7.8.0
exceptiongroup==1.2.2
iniconfig==2.1.0
-e git+https://github.com/globocom/m3u8.git@54f5288d23129506168d34d87482f4dcbfa25c62#egg=m3u8
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
pytest-cov==6.0.0
tomli==2.2.1
| name: m3u8
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- backports-datetime-fromisoformat==2.0.3
- coverage==7.8.0
- exceptiongroup==1.2.2
- iniconfig==2.1.0
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- pytest-cov==6.0.0
- tomli==2.2.1
prefix: /opt/conda/envs/m3u8
| [
"tests/test_model.py::test_segment_cue_out_cont_mediaconvert",
"tests/test_model.py::test_segment_unknown_scte35_attribute"
] | [
"tests/test_model.py::test_dump_should_raise_if_create_sub_directories_fails"
] | [
"tests/test_model.py::test_base_path_playlist_with_slash_in_query_string",
"tests/test_model.py::test_target_duration_attribute",
"tests/test_model.py::test_media_sequence_attribute",
"tests/test_model.py::test_program_date_time_attribute",
"tests/test_model.py::test_program_date_time_attribute_for_each_segment",
"tests/test_model.py::test_program_date_time_attribute_with_discontinuity",
"tests/test_model.py::test_program_date_time_attribute_without_discontinuity",
"tests/test_model.py::test_segment_discontinuity_attribute",
"tests/test_model.py::test_segment_cue_out_attribute",
"tests/test_model.py::test_segment_cue_out_start_attribute",
"tests/test_model.py::test_segment_cue_in_attribute",
"tests/test_model.py::test_segment_cue_out_cont_dumps",
"tests/test_model.py::test_segment_cue_out_cont_attributes_dumps",
"tests/test_model.py::test_segment_oatcls_scte35_cue_out_dumps",
"tests/test_model.py::test_segment_oatcls_scte35_non_cue_out_dumps",
"tests/test_model.py::test_segment_cue_out_start_dumps",
"tests/test_model.py::test_segment_cue_out_start_explicit_dumps",
"tests/test_model.py::test_segment_cue_out_start_no_duration_dumps",
"tests/test_model.py::test_segment_cue_out_in_dumps",
"tests/test_model.py::test_segment_elemental_scte35_attribute",
"tests/test_model.py::test_segment_cue_out_cont_alt",
"tests/test_model.py::test_segment_envivio_scte35_attribute",
"tests/test_model.py::test_segment_cue_out_no_duration",
"tests/test_model.py::test_segment_asset_metadata_dumps",
"tests/test_model.py::test_keys_on_clear_playlist",
"tests/test_model.py::test_keys_on_simple_encrypted_playlist",
"tests/test_model.py::test_key_attribute",
"tests/test_model.py::test_key_attribute_on_none",
"tests/test_model.py::test_key_attribute_without_initialization_vector",
"tests/test_model.py::test_session_keys_on_clear_playlist",
"tests/test_model.py::test_session_keys_on_simple_encrypted_playlist",
"tests/test_model.py::test_session_key_attribute",
"tests/test_model.py::test_session_key_attribute_on_none",
"tests/test_model.py::test_session_key_attribute_without_initialization_vector",
"tests/test_model.py::test_segments_attribute",
"tests/test_model.py::test_segments_attribute_without_title",
"tests/test_model.py::test_segments_attribute_without_duration",
"tests/test_model.py::test_segments_attribute_with_byterange",
"tests/test_model.py::test_segment_attribute_with_multiple_keys",
"tests/test_model.py::test_segment_title_dumps",
"tests/test_model.py::test_is_variant_attribute",
"tests/test_model.py::test_is_endlist_attribute",
"tests/test_model.py::test_is_i_frames_only_attribute",
"tests/test_model.py::test_playlists_attribute",
"tests/test_model.py::test_playlists_attribute_without_program_id",
"tests/test_model.py::test_playlists_attribute_with_resolution",
"tests/test_model.py::test_iframe_playlists_attribute",
"tests/test_model.py::test_version_attribute",
"tests/test_model.py::test_version_settable_as_int",
"tests/test_model.py::test_version_settable_as_string",
"tests/test_model.py::test_allow_cache_attribute",
"tests/test_model.py::test_files_attribute_should_list_all_files_including_segments_and_key",
"tests/test_model.py::test_vod_playlist_type_should_be_imported_as_a_simple_attribute",
"tests/test_model.py::test_event_playlist_type_should_be_imported_as_a_simple_attribute",
"tests/test_model.py::test_independent_segments_should_be_true",
"tests/test_model.py::test_independent_segments_should_be_false",
"tests/test_model.py::test_no_playlist_type_leaves_attribute_empty",
"tests/test_model.py::test_dump_playlists_with_resolution",
"tests/test_model.py::test_dump_should_build_file_with_same_content",
"tests/test_model.py::test_dump_should_create_sub_directories",
"tests/test_model.py::test_dump_should_work_for_variant_streams",
"tests/test_model.py::test_dump_should_work_for_variant_playlists_with_iframe_playlists",
"tests/test_model.py::test_dump_should_work_for_iframe_playlists",
"tests/test_model.py::test_dump_should_include_program_date_time",
"tests/test_model.py::test_dump_segment_honors_timespec",
"tests/test_model.py::test_dump_honors_timespec",
"tests/test_model.py::test_dump_should_not_ignore_zero_duration",
"tests/test_model.py::test_dump_should_use_decimal_floating_point_for_very_short_durations",
"tests/test_model.py::test_dump_should_include_segment_level_program_date_time",
"tests/test_model.py::test_dump_should_include_segment_level_program_date_time_without_discontinuity",
"tests/test_model.py::test_dump_should_include_map_attributes",
"tests/test_model.py::test_multiple_map_attributes",
"tests/test_model.py::test_dump_should_include_multiple_map_attributes",
"tests/test_model.py::test_dump_should_work_for_playlists_using_byteranges",
"tests/test_model.py::test_should_dump_with_endlist_tag",
"tests/test_model.py::test_should_dump_without_endlist_tag",
"tests/test_model.py::test_should_dump_multiple_keys",
"tests/test_model.py::test_should_dump_unencrypted_encrypted_keys_together",
"tests/test_model.py::test_should_dump_complex_unencrypted_encrypted_keys",
"tests/test_model.py::test_should_dump_complex_unencrypted_encrypted_keys_no_uri_attr",
"tests/test_model.py::test_should_dump_session_data",
"tests/test_model.py::test_should_dump_multiple_session_data",
"tests/test_model.py::test_length_segments_by_key",
"tests/test_model.py::test_list_segments_by_key",
"tests/test_model.py::test_replace_segment_key",
"tests/test_model.py::test_keyformat_and_keyformatversion",
"tests/test_model.py::test_should_dump_program_datetime_and_discontinuity",
"tests/test_model.py::test_should_normalize_segments_and_key_urls_if_base_path_passed_to_constructor",
"tests/test_model.py::test_should_normalize_session_key_urls_if_base_path_passed_to_constructor",
"tests/test_model.py::test_should_normalize_variant_streams_urls_if_base_path_passed_to_constructor",
"tests/test_model.py::test_should_normalize_segments_and_key_urls_if_base_path_attribute_updated",
"tests/test_model.py::test_playlist_type_dumped_to_appropriate_m3u8_field",
"tests/test_model.py::test_empty_playlist_type_is_gracefully_ignored",
"tests/test_model.py::test_none_playlist_type_is_gracefully_ignored",
"tests/test_model.py::test_0_media_sequence_added_to_file",
"tests/test_model.py::test_none_media_sequence_gracefully_ignored",
"tests/test_model.py::test_0_discontinuity_sequence_added_to_file",
"tests/test_model.py::test_none_discontinuity_sequence_gracefully_ignored",
"tests/test_model.py::test_non_zero_discontinuity_sequence_added_to_file",
"tests/test_model.py::test_should_correctly_update_base_path_if_its_blank",
"tests/test_model.py::test_base_path_should_just_return_uri_if_absolute",
"tests/test_model.py::test_m3u8_should_propagate_base_uri_to_segments",
"tests/test_model.py::test_m3u8_should_propagate_base_uri_to_key",
"tests/test_model.py::test_m3u8_should_propagate_base_uri_to_session_key",
"tests/test_model.py::test_base_path_with_optional_uri_should_do_nothing",
"tests/test_model.py::test_medialist_uri_method",
"tests/test_model.py::test_segment_map_uri_attribute",
"tests/test_model.py::test_segment_map_uri_attribute_with_byterange",
"tests/test_model.py::test_start_with_negative_offset",
"tests/test_model.py::test_start_with_precise",
"tests/test_model.py::test_playlist_stream_info_contains_group_id_refs",
"tests/test_model.py::test_should_dump_frame_rate",
"tests/test_model.py::test_should_round_frame_rate",
"tests/test_model.py::test_add_segment_to_playlist",
"tests/test_model.py::test_segment_str_method",
"tests/test_model.py::test_attribute_denormaliser",
"tests/test_model.py::test_find_key_throws_when_no_match",
"tests/test_model.py::test_ll_playlist",
"tests/test_model.py::test_add_rendition_report_to_playlist",
"tests/test_model.py::test_add_part_to_segment",
"tests/test_model.py::test_partial_segment_gap_and_byterange",
"tests/test_model.py::test_session_data_with_value",
"tests/test_model.py::test_session_data_with_uri",
"tests/test_model.py::test_session_data_cannot_be_created_with_value_and_uri_at_the_same_time",
"tests/test_model.py::test_endswith_newline",
"tests/test_model.py::test_init_section_base_path_update",
"tests/test_model.py::test_iframe_playlists_base_path_update",
"tests/test_model.py::test_partial_segment_base_path_update",
"tests/test_model.py::test_add_preload_hint",
"tests/test_model.py::test_add_daterange",
"tests/test_model.py::test_daterange_simple",
"tests/test_model.py::test_daterange_scte_out_and_in",
"tests/test_model.py::test_daterange_enddate_sctecmd",
"tests/test_model.py::test_daterange_in_parts",
"tests/test_model.py::test_add_gap",
"tests/test_model.py::test_gap",
"tests/test_model.py::test_gap_in_parts",
"tests/test_model.py::test_skip_dateranges",
"tests/test_model.py::test_add_skip",
"tests/test_model.py::test_content_steering",
"tests/test_model.py::test_add_content_steering",
"tests/test_model.py::test_content_steering_base_path_update",
"tests/test_model.py::test_add_content_steering_base_uri_update",
"tests/test_model.py::test_dump_should_work_for_variant_playlists_with_image_playlists",
"tests/test_model.py::test_segment_media_sequence",
"tests/test_model.py::test_low_latency_output"
] | [] | MIT License | 19,143 | 1,306 | [
"m3u8/parser.py"
] |
|
modin-project__modin-7353 | a40cef7f54c54571008346a7e7882add12ac9dc1 | 2024-07-30 23:04:27 | f3c0a63579bb6cee861ea04344ddedd72221634e | diff --git a/modin/pandas/base.py b/modin/pandas/base.py
index 51c9cd81..04dd8459 100644
--- a/modin/pandas/base.py
+++ b/modin/pandas/base.py
@@ -99,7 +99,13 @@ sentinel = object()
# Do not lookup certain attributes in columns or index, as they're used for some
# special purposes, like serving remote context
-_ATTRS_NO_LOOKUP = {"__name__", "_cache"}
+_ATTRS_NO_LOOKUP = {
+ "__name__",
+ "_cache",
+ "_ipython_canary_method_should_not_exist_",
+ "_ipython_display_",
+ "_repr_mimebundle_",
+}
_DEFAULT_BEHAVIOUR = {
"__init__",
diff --git a/modin/utils.py b/modin/utils.py
index 34071be1..86237326 100644
--- a/modin/utils.py
+++ b/modin/utils.py
@@ -462,7 +462,18 @@ def _inherit_docstrings_in_place(
if doc_module != DocModule.default and "pandas" in str(
getattr(parent, "__module__", "")
):
- parent = getattr(imported_doc_module, getattr(parent, "__name__", ""), parent)
+ parent_name = (
+ # DocModule should use the class BasePandasDataset to override the
+ # docstrings of BasePandasDataset, even if BasePandasDataset
+ # normally inherits docstrings from a different `parent`.
+ "BasePandasDataset"
+ if getattr(cls_or_func, "__name__", "") == "BasePandasDataset"
+ # For other classes, override docstrings with the class that has the
+ # same name as the `parent` class, e.g. DataFrame inherits
+ # docstrings from doc_module.DataFrame.
+ else getattr(parent, "__name__", "")
+ )
+ parent = getattr(imported_doc_module, parent_name, parent)
if parent != default_parent:
# Reset API link in case the docs are overridden.
apilink = None
| BUG: BasePandasDataset tries to inherit custom docstrings from DataFrame instead of from a separate base class
Modin's `BasePandasDataset` inherits docstrings from `pandas.DataFrame`, so any docstring overrides for dataframe affect `BasePandasDataset`. Some options:
1. we could have the base dataset inherit from the pandas base class instead, but that might change some existing docstrings
2. we could explicitly choose a parent docstring class for BasePandasDataset in _inherit_docstrings
3. we could accept some incorrect docstrings in BasePandasDataset
@devin-petersohn and I prefer option 2) | modin-project/modin | diff --git a/modin/tests/config/docs_module/__init__.py b/modin/tests/config/docs_module/__init__.py
index aa21549f..5f617d68 100644
--- a/modin/tests/config/docs_module/__init__.py
+++ b/modin/tests/config/docs_module/__init__.py
@@ -11,7 +11,7 @@
# ANY KIND, either express or implied. See the License for the specific language
# governing permissions and limitations under the License.
-from .classes import DataFrame, Series
+from .classes import BasePandasDataset, DataFrame, Series
from .functions import read_csv
-__all__ = ["DataFrame", "Series", "read_csv"]
+__all__ = ["BasePandasDataset", "DataFrame", "Series", "read_csv"]
diff --git a/modin/tests/config/docs_module/classes.py b/modin/tests/config/docs_module/classes.py
index 8dc152e2..235c99bd 100644
--- a/modin/tests/config/docs_module/classes.py
+++ b/modin/tests/config/docs_module/classes.py
@@ -22,3 +22,11 @@ class Series:
def isna(self):
"""This is a test of the documentation module for Series."""
return
+
+
+class BasePandasDataset:
+ """This is a test of the documentation module for BasePandasDataSet."""
+
+ def apply():
+ """This is a test of the documentation module for BasePandasDataSet.apply."""
+ return
diff --git a/modin/tests/config/test_envvars.py b/modin/tests/config/test_envvars.py
index 384bd5f1..d057ecb0 100644
--- a/modin/tests/config/test_envvars.py
+++ b/modin/tests/config/test_envvars.py
@@ -20,6 +20,7 @@ import modin.config as cfg
import modin.pandas as pd
from modin.config.envvars import _check_vars
from modin.config.pubsub import _UNSET, ExactStr
+from modin.pandas.base import BasePandasDataset
def reset_vars(*vars: tuple[cfg.Parameter]):
@@ -89,6 +90,12 @@ class TestDocModule:
cfg.DocModule.put("modin.tests.config.docs_module")
# Test for override
+ assert BasePandasDataset.__doc__ == (
+ "This is a test of the documentation module for BasePandasDataSet."
+ )
+ assert BasePandasDataset.apply.__doc__ == (
+ "This is a test of the documentation module for BasePandasDataSet.apply."
+ )
assert (
pd.DataFrame.apply.__doc__
== "This is a test of the documentation module for DataFrame."
@@ -96,6 +103,7 @@ class TestDocModule:
# Test for pandas doc when method is not defined on the plugin module
assert pandas.DataFrame.isna.__doc__ in pd.DataFrame.isna.__doc__
assert pandas.DataFrame.isnull.__doc__ in pd.DataFrame.isnull.__doc__
+ assert BasePandasDataset.astype.__doc__ in pd.DataFrame.astype.__doc__
# Test for override
assert (
pd.Series.isna.__doc__
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 2
} | 0.31 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[all]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-xdist",
"pytest-mock",
"pytest-asyncio"
],
"pre_install": [
"apt-get update",
"apt-get install -y libhdf5-dev"
],
"python": "3.9",
"reqs_path": [
"requirements-dev.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | aiobotocore==2.21.1
aiohappyeyeballs==2.6.1
aiohttp==3.11.14
aioitertools==0.12.0
aiosignal==1.3.2
alabaster==0.7.16
anyio==4.9.0
argon2-cffi==23.1.0
argon2-cffi-bindings==21.2.0
arrow==1.3.0
asttokens==3.0.0
asv==0.5.1
async-lru==2.0.5
async-timeout==5.0.1
attrs==25.3.0
babel==2.17.0
beautifulsoup4==4.13.3
black==25.1.0
bleach==6.2.0
blinker==1.9.0
blosc2==2.5.1
bokeh==3.4.3
boto3==1.37.1
botocore==1.37.1
cachetools==5.5.2
certifi==2025.1.31
cffi==1.17.1
charset-normalizer==3.4.1
click==8.1.8
cloudpickle==3.1.1
comm==0.2.2
connectorx==0.3.4a3
contourpy==1.3.0
coverage==7.8.0
cramjam==2.9.1
cryptography==44.0.2
cycler==0.12.1
dask==2024.8.0
dask-expr==1.1.10
dataframe_api_compat==0.2.7
db-dtypes==1.4.2
debugpy==1.8.13
decorator==5.2.1
defusedxml==0.7.1
Deprecated==1.2.18
distributed==2024.8.0
docutils==0.21.2
et_xmlfile==2.0.0
exceptiongroup==1.2.2
execnet==2.1.1
executing==2.2.0
Faker==37.1.0
fastjsonschema==2.21.1
fastparquet==2024.11.0
filelock==3.18.0
flake8==7.2.0
flake8-no-implicit-concat==0.3.7
flake8-print==5.0.0
Flask==3.1.0
flask-cors==5.0.1
fonttools==4.56.0
fqdn==1.5.1
frozenlist==1.5.0
fsspec==2025.3.1
fuzzydata==0.0.11
google-api-core==2.24.2
google-auth==2.38.0
google-auth-oauthlib==1.2.1
google-cloud-bigquery==3.31.0
google-cloud-core==2.4.3
google-crc32c==1.7.1
google-resumable-media==2.7.2
googleapis-common-protos==1.69.2
greenlet==3.1.1
grpcio==1.71.0
grpcio-status==1.71.0
h11==0.14.0
httpcore==1.0.7
httpx==0.28.1
idna==3.10
imagesize==1.4.1
importlib_metadata==8.6.1
importlib_resources==6.5.2
iniconfig==2.1.0
ipykernel==6.29.5
ipython==8.18.1
ipywidgets==8.1.5
isoduration==20.11.0
isort==6.0.1
itsdangerous==2.2.0
jedi==0.19.2
Jinja2==3.1.6
jmespath==1.0.1
json5==0.10.0
jsonpointer==3.0.0
jsonschema==4.23.0
jsonschema-specifications==2024.10.1
jupyter==1.1.1
jupyter-console==6.6.3
jupyter-events==0.12.0
jupyter-lsp==2.2.5
jupyter_client==8.6.3
jupyter_core==5.7.2
jupyter_server==2.15.0
jupyter_server_terminals==0.5.3
jupyterlab==4.3.6
jupyterlab_pygments==0.3.0
jupyterlab_server==2.27.3
jupyterlab_widgets==3.0.13
kiwisolver==1.4.7
locket==1.0.0
lxml==5.3.1
lz4==4.4.3
MarkupSafe==3.0.2
matplotlib==3.9.4
matplotlib-inline==0.1.7
mccabe==0.7.0
mistune==3.1.3
-e git+https://github.com/modin-project/modin.git@a40cef7f54c54571008346a7e7882add12ac9dc1#egg=modin
modin-spreadsheet @ git+https://github.com/modin-project/modin-spreadsheet.git@49ffd89f683f54c311867d602c55443fb11bf2a5
more-itertools==10.6.0
moto==5.1.2
msgpack==1.1.0
multidict==6.2.0
mypy==1.15.0
mypy-extensions==1.0.0
nbclient==0.10.2
nbconvert==7.16.6
nbformat==5.10.4
ndindex==1.9.2
nest-asyncio==1.6.0
networkx==3.2.1
notebook==7.3.3
notebook_shim==0.2.4
numexpr==2.10.2
numpy==2.0.2
numpydoc==1.1.0
oauthlib==3.2.2
openpyxl==3.1.5
overrides==7.7.0
packaging==24.2
pandas==2.2.3
pandas-gbq==0.28.0
pandas-stubs==2.2.2.240807
pandocfilters==1.5.1
parso==0.8.4
partd==1.4.2
pathspec==0.12.1
pexpect==4.9.0
pillow==11.1.0
platformdirs==4.3.7
pluggy==1.5.0
prometheus_client==0.21.1
prompt_toolkit==3.0.50
propcache==0.3.1
proto-plus==1.26.1
protobuf==5.29.4
psutil==7.0.0
psycopg2-binary==2.9.10
ptyprocess==0.7.0
pure_eval==0.2.3
py-cpuinfo==9.0.0
pyarrow==19.0.1
pyarrow-hotfix==0.6
pyasn1==0.6.1
pyasn1_modules==0.4.2
pycodestyle==2.13.0
pycparser==2.22
pydata-google-auth==1.9.1
pyflakes==3.3.1
pygit2==1.15.1
PyGithub==2.6.1
Pygments==2.19.1
PyJWT==2.10.1
pymssql==2.3.2
PyNaCl==1.5.0
pyparsing==3.2.3
pytest==8.3.5
pytest-asyncio==0.26.0
pytest-benchmark==5.1.0
pytest-cov==6.0.0
pytest-mock==3.14.0
pytest-xdist==3.6.1
python-dateutil==2.9.0.post0
python-json-logger==3.3.0
pytz==2025.2
PyYAML==6.0.2
pyzmq==26.3.0
ray==2.44.1
referencing==0.36.2
requests==2.32.3
requests-oauthlib==2.0.0
responses==0.25.7
rfc3339-validator==0.1.4
rfc3986-validator==0.1.1
rpds-py==0.24.0
rsa==4.9
s3fs==2025.3.1
s3transfer==0.11.3
scipy==1.13.1
Send2Trash==1.8.3
six==1.17.0
sniffio==1.3.1
snowballstemmer==2.2.0
sortedcontainers==2.4.0
soupsieve==2.6
Sphinx==7.4.7
sphinxcontrib-applehelp==2.0.0
sphinxcontrib-devhelp==2.0.0
sphinxcontrib-htmlhelp==2.1.0
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==2.0.0
sphinxcontrib-serializinghtml==2.0.0
SQLAlchemy==2.0.40
stack-data==0.6.3
tables==3.9.2
tblib==3.0.0
terminado==0.18.1
tinycss2==1.4.0
tomli==2.2.1
toolz==1.0.0
tornado==6.4.2
tqdm==4.67.1
traitlets==5.14.3
types-python-dateutil==2.9.0.20241206
types-pytz==2025.2.0.20250326
typing_extensions==4.13.0
tzdata==2025.2
uri-template==1.3.0
urllib3==1.26.20
wcwidth==0.2.13
webcolors==24.11.1
webencodings==0.5.1
websocket-client==1.8.0
Werkzeug==3.1.3
widgetsnbextension==4.0.13
wrapt==1.17.2
xarray==2024.7.0
xlrd==2.0.1
xmltodict==0.14.2
xyzservices==2025.1.0
yarl==1.18.3
zict==3.0.0
zipp==3.21.0
| name: modin
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- aiobotocore==2.21.1
- aiohappyeyeballs==2.6.1
- aiohttp==3.11.14
- aioitertools==0.12.0
- aiosignal==1.3.2
- alabaster==0.7.16
- anyio==4.9.0
- argon2-cffi==23.1.0
- argon2-cffi-bindings==21.2.0
- arrow==1.3.0
- asttokens==3.0.0
- asv==0.5.1
- async-lru==2.0.5
- async-timeout==5.0.1
- attrs==25.3.0
- babel==2.17.0
- beautifulsoup4==4.13.3
- black==25.1.0
- bleach==6.2.0
- blinker==1.9.0
- blosc2==2.5.1
- bokeh==3.4.3
- boto3==1.37.1
- botocore==1.37.1
- cachetools==5.5.2
- certifi==2025.1.31
- cffi==1.17.1
- charset-normalizer==3.4.1
- click==8.1.8
- cloudpickle==3.1.1
- comm==0.2.2
- connectorx==0.3.4a3
- contourpy==1.3.0
- coverage==7.8.0
- cramjam==2.9.1
- cryptography==44.0.2
- cycler==0.12.1
- dask==2024.8.0
- dask-expr==1.1.10
- dataframe-api-compat==0.2.7
- db-dtypes==1.4.2
- debugpy==1.8.13
- decorator==5.2.1
- defusedxml==0.7.1
- deprecated==1.2.18
- distributed==2024.8.0
- docutils==0.21.2
- et-xmlfile==2.0.0
- exceptiongroup==1.2.2
- execnet==2.1.1
- executing==2.2.0
- faker==37.1.0
- fastjsonschema==2.21.1
- fastparquet==2024.11.0
- filelock==3.18.0
- flake8==7.2.0
- flake8-no-implicit-concat==0.3.7
- flake8-print==5.0.0
- flask==3.1.0
- flask-cors==5.0.1
- fonttools==4.56.0
- fqdn==1.5.1
- frozenlist==1.5.0
- fsspec==2025.3.1
- fuzzydata==0.0.11
- google-api-core==2.24.2
- google-auth==2.38.0
- google-auth-oauthlib==1.2.1
- google-cloud-bigquery==3.31.0
- google-cloud-core==2.4.3
- google-crc32c==1.7.1
- google-resumable-media==2.7.2
- googleapis-common-protos==1.69.2
- greenlet==3.1.1
- grpcio==1.71.0
- grpcio-status==1.71.0
- h11==0.14.0
- httpcore==1.0.7
- httpx==0.28.1
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==8.6.1
- importlib-resources==6.5.2
- iniconfig==2.1.0
- ipykernel==6.29.5
- ipython==8.18.1
- ipywidgets==8.1.5
- isoduration==20.11.0
- isort==6.0.1
- itsdangerous==2.2.0
- jedi==0.19.2
- jinja2==3.1.6
- jmespath==1.0.1
- json5==0.10.0
- jsonpointer==3.0.0
- jsonschema==4.23.0
- jsonschema-specifications==2024.10.1
- jupyter==1.1.1
- jupyter-client==8.6.3
- jupyter-console==6.6.3
- jupyter-core==5.7.2
- jupyter-events==0.12.0
- jupyter-lsp==2.2.5
- jupyter-server==2.15.0
- jupyter-server-terminals==0.5.3
- jupyterlab==4.3.6
- jupyterlab-pygments==0.3.0
- jupyterlab-server==2.27.3
- jupyterlab-widgets==3.0.13
- kiwisolver==1.4.7
- locket==1.0.0
- lxml==5.3.1
- lz4==4.4.3
- markupsafe==3.0.2
- matplotlib==3.9.4
- matplotlib-inline==0.1.7
- mccabe==0.7.0
- mistune==3.1.3
- modin-spreadsheet==0.1.2+3.g49ffd89
- more-itertools==10.6.0
- moto==5.1.2
- msgpack==1.1.0
- multidict==6.2.0
- mypy==1.15.0
- mypy-extensions==1.0.0
- nbclient==0.10.2
- nbconvert==7.16.6
- nbformat==5.10.4
- ndindex==1.9.2
- nest-asyncio==1.6.0
- networkx==3.2.1
- notebook==7.3.3
- notebook-shim==0.2.4
- numexpr==2.10.2
- numpy==2.0.2
- numpydoc==1.1.0
- oauthlib==3.2.2
- openpyxl==3.1.5
- overrides==7.7.0
- packaging==24.2
- pandas==2.2.3
- pandas-gbq==0.28.0
- pandas-stubs==2.2.2.240807
- pandocfilters==1.5.1
- parso==0.8.4
- partd==1.4.2
- pathspec==0.12.1
- pexpect==4.9.0
- pillow==11.1.0
- platformdirs==4.3.7
- pluggy==1.5.0
- prometheus-client==0.21.1
- prompt-toolkit==3.0.50
- propcache==0.3.1
- proto-plus==1.26.1
- protobuf==5.29.4
- psutil==7.0.0
- psycopg2-binary==2.9.10
- ptyprocess==0.7.0
- pure-eval==0.2.3
- py-cpuinfo==9.0.0
- pyarrow==19.0.1
- pyarrow-hotfix==0.6
- pyasn1==0.6.1
- pyasn1-modules==0.4.2
- pycodestyle==2.13.0
- pycparser==2.22
- pydata-google-auth==1.9.1
- pyflakes==3.3.1
- pygit2==1.15.1
- pygithub==2.6.1
- pygments==2.19.1
- pyjwt==2.10.1
- pymssql==2.3.2
- pynacl==1.5.0
- pyparsing==3.2.3
- pytest==8.3.5
- pytest-asyncio==0.26.0
- pytest-benchmark==5.1.0
- pytest-cov==6.0.0
- pytest-mock==3.14.0
- pytest-xdist==3.6.1
- python-dateutil==2.9.0.post0
- python-json-logger==3.3.0
- pytz==2025.2
- pyyaml==6.0.2
- pyzmq==26.3.0
- ray==2.44.1
- referencing==0.36.2
- requests==2.32.3
- requests-oauthlib==2.0.0
- responses==0.25.7
- rfc3339-validator==0.1.4
- rfc3986-validator==0.1.1
- rpds-py==0.24.0
- rsa==4.9
- s3fs==2025.3.1
- s3transfer==0.11.3
- scipy==1.13.1
- send2trash==1.8.3
- six==1.17.0
- sniffio==1.3.1
- snowballstemmer==2.2.0
- sortedcontainers==2.4.0
- soupsieve==2.6
- sphinx==7.4.7
- sphinxcontrib-applehelp==2.0.0
- sphinxcontrib-devhelp==2.0.0
- sphinxcontrib-htmlhelp==2.1.0
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==2.0.0
- sphinxcontrib-serializinghtml==2.0.0
- sqlalchemy==2.0.40
- stack-data==0.6.3
- tables==3.9.2
- tblib==3.0.0
- terminado==0.18.1
- tinycss2==1.4.0
- tomli==2.2.1
- toolz==1.0.0
- tornado==6.4.2
- tqdm==4.67.1
- traitlets==5.14.3
- types-python-dateutil==2.9.0.20241206
- types-pytz==2025.2.0.20250326
- typing-extensions==4.13.0
- tzdata==2025.2
- uri-template==1.3.0
- urllib3==1.26.20
- wcwidth==0.2.13
- webcolors==24.11.1
- webencodings==0.5.1
- websocket-client==1.8.0
- werkzeug==3.1.3
- widgetsnbextension==4.0.13
- wrapt==1.17.2
- xarray==2024.7.0
- xlrd==2.0.1
- xmltodict==0.14.2
- xyzservices==2025.1.0
- yarl==1.18.3
- zict==3.0.0
- zipp==3.21.0
prefix: /opt/conda/envs/modin
| [
"modin/tests/config/test_envvars.py::TestDocModule::test_overrides"
] | [] | [
"modin/tests/config/test_envvars.py::test_unknown",
"modin/tests/config/test_envvars.py::test_custom_default[str]",
"modin/tests/config/test_envvars.py::test_custom_default[ExactStr]",
"modin/tests/config/test_envvars.py::test_custom_set[str]",
"modin/tests/config/test_envvars.py::test_custom_set[ExactStr]",
"modin/tests/config/test_envvars.py::test_custom_help[str]",
"modin/tests/config/test_envvars.py::test_custom_help[ExactStr]",
"modin/tests/config/test_envvars.py::TestDocModule::test_not_redefining_classes_modin_issue_7138",
"modin/tests/config/test_envvars.py::test_ray_cluster_resources",
"modin/tests/config/test_envvars.py::test_context_manager_update_config[modify_config0]",
"modin/tests/config/test_envvars.py::test_wrong_values[NPartitions]",
"modin/tests/config/test_envvars.py::test_wrong_values[CpuCount]",
"modin/tests/config/test_envvars.py::test_wrong_values[LogMemoryInterval]",
"modin/tests/config/test_envvars.py::test_wrong_values[LogFileSize]",
"modin/tests/config/test_envvars.py::test_wrong_values[MinRowPartitionSize]",
"modin/tests/config/test_envvars.py::test_wrong_values[MinColumnPartitionSize]"
] | [] | Apache License 2.0 | 19,145 | 489 | [
"modin/pandas/base.py",
"modin/utils.py"
] |
|
globocom__m3u8-375 | e537b2ddd960450cbe7b8b45306b5bfb91c5e9e5 | 2024-07-31 02:42:18 | e537b2ddd960450cbe7b8b45306b5bfb91c5e9e5 | diff --git a/m3u8/httpclient.py b/m3u8/httpclient.py
index 28025c0..a6babad 100644
--- a/m3u8/httpclient.py
+++ b/m3u8/httpclient.py
@@ -1,6 +1,6 @@
+import gzip
import ssl
import urllib.request
-
from urllib.parse import urljoin
@@ -15,9 +15,15 @@ class DefaultHTTPClient:
opener.addheaders = headers.items()
resource = opener.open(uri, timeout=timeout)
base_uri = urljoin(resource.geturl(), ".")
- content = resource.read().decode(
- resource.headers.get_content_charset(failobj="utf-8")
- )
+
+ if resource.info().get("Content-Encoding") == "gzip":
+ content = gzip.decompress(resource.read()).decode(
+ resource.headers.get_content_charset(failobj="utf-8")
+ )
+ else:
+ content = resource.read().decode(
+ resource.headers.get_content_charset(failobj="utf-8")
+ )
return content, base_uri
| UnicodeDecodeError: 'utf-8' codec can't decode byte 0x8b in position 1: invalid start byte
I get this error when doing
`m3u8.load("https://cdn-vod-1.nxplay.com.br/hls/movies/94934c51-70e8-4583-8fac-2e045529c6d6/media_0.mp4/index-v1-a1.m3u8")`
UnicodeDecodeError: 'utf-8' codec can't decode byte 0x8b in position 1: invalid start byte
Not quite sure why...
It seems to be the code in httpclient.py, but can't get my head around it. | globocom/m3u8 | diff --git a/tests/test_http_client.py b/tests/test_http_client.py
new file mode 100644
index 0000000..3f1946e
--- /dev/null
+++ b/tests/test_http_client.py
@@ -0,0 +1,63 @@
+import gzip
+import unittest
+from http.client import HTTPResponse
+from unittest.mock import Mock, patch
+
+from m3u8.httpclient import DefaultHTTPClient
+
+
+class MockHeaders:
+ def __init__(self, encoding=None):
+ self.encoding = encoding
+
+ def get_content_charset(self, failobj="utf-8"):
+ return self.encoding or failobj
+
+
+class TestDefaultHTTPClient(unittest.TestCase):
+ @patch("urllib.request.OpenerDirector.open")
+ def test_download_normal_content(self, mock_open):
+ client = DefaultHTTPClient()
+ mock_response = Mock(spec=HTTPResponse)
+ mock_response.read.return_value = b"playlist content"
+ mock_response.info.return_value = {}
+ mock_response.geturl.return_value = "http://example.com/index.m3u8"
+ mock_response.headers = MockHeaders()
+ mock_open.return_value = mock_response
+
+ content, base_uri = client.download("http://example.com/index.m3u8")
+
+ self.assertEqual(content, "playlist content")
+ self.assertEqual(base_uri, "http://example.com/")
+
+ @patch("urllib.request.OpenerDirector.open")
+ def test_download_gzipped_content(self, mock_open):
+ client = DefaultHTTPClient()
+ original_content = "playlist gzipped content"
+ gzipped_content = gzip.compress(original_content.encode("utf-8"))
+ mock_response = Mock(spec=HTTPResponse)
+ mock_response.read.return_value = gzipped_content
+ mock_response.info.return_value = {"Content-Encoding": "gzip"}
+ mock_response.geturl.return_value = "http://example.com/index.m3u8"
+ mock_response.headers = MockHeaders("utf-8")
+ mock_open.return_value = mock_response
+
+ content, base_uri = client.download("http://example.com/index.m3u8")
+
+ self.assertEqual(content, original_content)
+ self.assertEqual(base_uri, "http://example.com/")
+
+ @patch("urllib.request.OpenerDirector.open")
+ def test_download_with_proxy(self, mock_open):
+ client = DefaultHTTPClient(proxies={"http": "http://proxy.example.com"})
+ mock_response = Mock(spec=HTTPResponse)
+ mock_response.read.return_value = b"playlist proxied content"
+ mock_response.info.return_value = {}
+ mock_response.geturl.return_value = "http://example.com/index.m3u8"
+ mock_response.headers = MockHeaders()
+ mock_open.return_value = mock_response
+
+ content, base_uri = client.download("http://example.com/index.m3u8")
+
+ self.assertEqual(content, "playlist proxied content")
+ self.assertEqual(base_uri, "http://example.com/")
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_media"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 1
} | 5.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": null,
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | backports-datetime-fromisoformat==2.0.3
coverage==7.8.0
exceptiongroup==1.2.2
iniconfig==2.1.0
-e git+https://github.com/globocom/m3u8.git@e537b2ddd960450cbe7b8b45306b5bfb91c5e9e5#egg=m3u8
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
pytest-cov==6.0.0
tomli==2.2.1
| name: m3u8
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- backports-datetime-fromisoformat==2.0.3
- coverage==7.8.0
- exceptiongroup==1.2.2
- iniconfig==2.1.0
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- pytest-cov==6.0.0
- tomli==2.2.1
prefix: /opt/conda/envs/m3u8
| [
"tests/test_http_client.py::TestDefaultHTTPClient::test_download_gzipped_content"
] | [] | [
"tests/test_http_client.py::TestDefaultHTTPClient::test_download_normal_content",
"tests/test_http_client.py::TestDefaultHTTPClient::test_download_with_proxy"
] | [] | MIT License | 19,149 | 253 | [
"m3u8/httpclient.py"
] |
|
asottile__babi-350 | ea70fd2eab90a406f582e2bd886a30c9bad46c50 | 2024-08-01 12:59:00 | ea70fd2eab90a406f582e2bd886a30c9bad46c50 | diff --git a/babi/file.py b/babi/file.py
index 852c197..0165154 100644
--- a/babi/file.py
+++ b/babi/file.py
@@ -189,6 +189,11 @@ class _SearchIter:
def __iter__(self) -> _SearchIter:
return self
+ def replaced(self, y: int, match: Match[str], new: str) -> None:
+ if not self.wrapped or y != self._start_y:
+ return
+ self._start_x += len(new) - match.end() - match.start()
+
def _stop_if_past_original(self, y: int, match: Match[str]) -> Found:
if (
self.wrapped and (
@@ -487,6 +492,7 @@ class File:
count += 1
with self.edit_action_context('replace', final=True):
replaced = match.expand(replace)
+ search.replaced(line_y, match, replaced)
line = screen.file.buf[line_y]
if '\n' in replaced:
replaced_lines = replaced.split('\n')
| replace all does not account for line change
The search and replace functionality does not account for line change when replacing all occurences.
Example:
Write a simple file with the following content:
```
A_A_
```
and have the cursor on the last position.
When replacing every `A` with `XXX` (or anything of length 3 or greater) the following change will be made:
```
XXX_A_
```
The expected bahavior would be to have the following result:
```
XXX_XXX_
```
| asottile/babi | diff --git a/tests/features/replace_test.py b/tests/features/replace_test.py
index f864450..262dd1a 100644
--- a/tests/features/replace_test.py
+++ b/tests/features/replace_test.py
@@ -239,6 +239,19 @@ def test_replace_multiple_occurrences_in_line(run):
h.await_text('bqbq')
+def test_replace_multiple_occurences_with_line_length_change(run):
+ with run() as h, and_exit(h):
+ h.press('a_a_')
+ h.press('^\\')
+ h.await_text('search (to replace):')
+ h.press_and_enter('a')
+ h.await_text('replace with:')
+ h.press_and_enter('XXX')
+ h.await_text('replace [yes, no, all]?')
+ h.press('a')
+ h.await_text('XXX_XXX_')
+
+
def test_replace_after_wrapping(run, ten_lines):
with run(str(ten_lines)) as h, and_exit(h):
h.press('Down')
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 1
},
"num_modified_files": 1
} | 1.5 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": [
"requirements-dev.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | -e git+https://github.com/asottile/babi.git@ea70fd2eab90a406f582e2bd886a30c9bad46c50#egg=babi
babi_grammars==0.0.62
cffi==1.17.1
covdefaults==2.3.0
coverage==7.8.0
exceptiongroup==1.2.2
hecate @ git+https://github.com/asottile/hecate@875567f2ca2a58220c4f1f70b0db9a79c018e2ae
identify==2.6.9
iniconfig==2.1.0
onigurumacffi==1.4.1
packaging==24.2
pluggy==1.5.0
pycparser==2.22
pytest==8.3.5
remote-pdb==2.1.0
tomli==2.2.1
| name: babi
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- babi-grammars==0.0.62
- cffi==1.17.1
- covdefaults==2.3.0
- coverage==7.8.0
- exceptiongroup==1.2.2
- hecate==0.1.0
- identify==2.6.9
- iniconfig==2.1.0
- onigurumacffi==1.4.1
- packaging==24.2
- pluggy==1.5.0
- pycparser==2.22
- pytest==8.3.5
- remote-pdb==2.1.0
- tomli==2.2.1
prefix: /opt/conda/envs/babi
| [
"tests/features/replace_test.py::test_replace_multiple_occurences_with_line_length_change[fake]"
] | [
"tests/features/replace_test.py::test_replace_cancel[tmux-^C]",
"tests/features/replace_test.py::test_replace_cancel[tmux-Enter]",
"tests/features/replace_test.py::test_replace_invalid_regex[tmux]",
"tests/features/replace_test.py::test_replace_invalid_replacement[tmux]",
"tests/features/replace_test.py::test_replace_cancel_at_replace_string[tmux]",
"tests/features/replace_test.py::test_replace_actual_contents[tmux-y]",
"tests/features/replace_test.py::test_replace_actual_contents[tmux-Y]",
"tests/features/replace_test.py::test_replace_sets_x_hint_properly[tmux]",
"tests/features/replace_test.py::test_replace_cancel_at_individual_replace[tmux]",
"tests/features/replace_test.py::test_replace_unknown_characters_at_individual_replace[tmux]",
"tests/features/replace_test.py::test_replace_say_no_to_individual_replace[tmux]",
"tests/features/replace_test.py::test_replace_all[tmux]",
"tests/features/replace_test.py::test_replace_with_empty_string[tmux]",
"tests/features/replace_test.py::test_replace_search_not_found[tmux]",
"tests/features/replace_test.py::test_replace_small_window_size[tmux]",
"tests/features/replace_test.py::test_replace_height_1_highlight[tmux]",
"tests/features/replace_test.py::test_replace_line_goes_off_screen[tmux]",
"tests/features/replace_test.py::test_replace_undo_undoes_only_one[tmux]",
"tests/features/replace_test.py::test_replace_multiple_occurrences_in_line[tmux]",
"tests/features/replace_test.py::test_replace_multiple_occurences_with_line_length_change[tmux]",
"tests/features/replace_test.py::test_replace_after_wrapping[tmux]",
"tests/features/replace_test.py::test_replace_after_cursor_after_wrapping[tmux]",
"tests/features/replace_test.py::test_replace_separate_line_after_wrapping[tmux]",
"tests/features/replace_test.py::test_replace_with_newline_characters[tmux]",
"tests/features/replace_test.py::test_replace_with_multiple_newline_characters[tmux]",
"tests/features/replace_test.py::test_replace_end_of_file[tmux]"
] | [
"tests/features/replace_test.py::test_replace_cancel[fake-^C]",
"tests/features/replace_test.py::test_replace_cancel[fake-Enter]",
"tests/features/replace_test.py::test_replace_invalid_regex[fake]",
"tests/features/replace_test.py::test_replace_invalid_replacement[fake]",
"tests/features/replace_test.py::test_replace_cancel_at_replace_string[fake]",
"tests/features/replace_test.py::test_replace_actual_contents[fake-y]",
"tests/features/replace_test.py::test_replace_actual_contents[fake-Y]",
"tests/features/replace_test.py::test_replace_sets_x_hint_properly[fake]",
"tests/features/replace_test.py::test_replace_cancel_at_individual_replace[fake]",
"tests/features/replace_test.py::test_replace_unknown_characters_at_individual_replace[fake]",
"tests/features/replace_test.py::test_replace_say_no_to_individual_replace[fake]",
"tests/features/replace_test.py::test_replace_all[fake]",
"tests/features/replace_test.py::test_replace_with_empty_string[fake]",
"tests/features/replace_test.py::test_replace_search_not_found[fake]",
"tests/features/replace_test.py::test_replace_small_window_size[fake]",
"tests/features/replace_test.py::test_replace_height_1_highlight[fake]",
"tests/features/replace_test.py::test_replace_line_goes_off_screen[fake]",
"tests/features/replace_test.py::test_replace_undo_undoes_only_one[fake]",
"tests/features/replace_test.py::test_replace_multiple_occurrences_in_line[fake]",
"tests/features/replace_test.py::test_replace_after_wrapping[fake]",
"tests/features/replace_test.py::test_replace_after_cursor_after_wrapping[fake]",
"tests/features/replace_test.py::test_replace_separate_line_after_wrapping[fake]",
"tests/features/replace_test.py::test_replace_with_newline_characters[fake]",
"tests/features/replace_test.py::test_replace_with_multiple_newline_characters[fake]",
"tests/features/replace_test.py::test_replace_end_of_file[fake]"
] | [] | MIT License | 19,162 | 260 | [
"babi/file.py"
] |
|
conan-io__conan-16762 | e1063a152b8a969383f35c122a8b26eca58a0820 | 2024-08-01 14:08:04 | 8ebf1ca548854919398c38d9f15912ad01f3b18a | CLAassistant: [](https://cla-assistant.io/conan-io/conan?pullRequest=16762) <br/>Thank you for your submission! We really appreciate it. Like many open source projects, we ask that you sign our [Contributor License Agreement](https://cla-assistant.io/conan-io/conan?pullRequest=16762) before we can accept your contribution.<br/><hr/>**Peter Würth** seems not to be a GitHub user. You need a GitHub account to be able to sign the CLA. If you have already a GitHub account, please [add the email address used for this commit to your account](https://help.github.com/articles/why-are-my-commits-linked-to-the-wrong-user/#commits-are-not-linked-to-any-user).<br/><sub>You have signed the CLA already but the status is still pending? Let us [recheck](https://cla-assistant.io/check/conan-io/conan?pullRequest=16762) it.</sub> | diff --git a/conan/tools/cmake/presets.py b/conan/tools/cmake/presets.py
index b7e616150..43739a955 100644
--- a/conan/tools/cmake/presets.py
+++ b/conan/tools/cmake/presets.py
@@ -5,7 +5,7 @@ import textwrap
from conan.api.output import ConanOutput, Color
from conan.tools.cmake.layout import get_build_folder_custom_vars
-from conan.tools.cmake.toolchain.blocks import GenericSystemBlock
+from conan.tools.cmake.toolchain.blocks import GenericSystemBlock, CompilersBlock
from conan.tools.cmake.utils import is_multi_configuration
from conan.tools.build import build_jobs
from conan.tools.microsoft import is_msvc
@@ -158,6 +158,13 @@ class _CMakePresets:
"strategy": "external"
}
+ # Set the compiler like in the toolchain. Some IDEs like VS or VSCode require the compiler
+ # being set to cl.exe in order to activate the environment using vcvarsall.bat according to
+ # the toolset and architecture settings.
+ compilers = CompilersBlock.get_compilers(conanfile)
+ for lang, compiler in compilers.items():
+ ret["cacheVariables"][f"CMAKE_{lang}_COMPILER"] = compiler.replace("\\", "/")
+
ret["toolchainFile"] = toolchain_file
if conanfile.build_folder:
# If we are installing a ref: "conan install <ref>", we don't have build_folder, because
diff --git a/conan/tools/cmake/toolchain/blocks.py b/conan/tools/cmake/toolchain/blocks.py
index 425e62b2a..1343865c9 100644
--- a/conan/tools/cmake/toolchain/blocks.py
+++ b/conan/tools/cmake/toolchain/blocks.py
@@ -852,9 +852,13 @@ class CompilersBlock(Block):
""")
def context(self):
+ return {"compilers": self.get_compilers(self._conanfile)}
+
+ @staticmethod
+ def get_compilers(conanfile):
# Reading configuration from "tools.build:compiler_executables" -> {"C": "/usr/bin/gcc"}
- compilers_by_conf = self._conanfile.conf.get("tools.build:compiler_executables", default={},
- check_type=dict)
+ compilers_by_conf = conanfile.conf.get("tools.build:compiler_executables", default={},
+ check_type=dict)
# Map the possible languages
compilers = {}
# Allowed <LANG> variables (and <LANG>_LAUNCHER)
@@ -865,7 +869,7 @@ class CompilersBlock(Block):
# To set CMAKE_<LANG>_COMPILER
if comp in compilers_by_conf:
compilers[lang] = compilers_by_conf[comp]
- return {"compilers": compilers}
+ return compilers
class GenericSystemBlock(Block):
diff --git a/conans/model/requires.py b/conans/model/requires.py
index 23987d0dc..b1974a7dc 100644
--- a/conans/model/requires.py
+++ b/conans/model/requires.py
@@ -270,15 +270,18 @@ class Requirement:
if require.build: # public!
# TODO: To discuss if this way of conflicting build_requires is actually useful or not
+ # Build-requires will propagate its main trait for running exes/shared to downstream
+ # consumers so run=require.run, irrespective of the 'self.run' trait
downstream_require = Requirement(require.ref, headers=False, libs=False, build=True,
- run=False, visible=self.visible, direct=False)
+ run=require.run, visible=self.visible, direct=False)
return downstream_require
if self.build: # Build-requires
# If the above is shared or the requirement is explicit run=True
+ # visible=self.visible will further propagate it downstream
if dep_pkg_type is PackageType.SHARED or require.run:
downstream_require = Requirement(require.ref, headers=False, libs=False, build=True,
- run=True, visible=False, direct=False)
+ run=True, visible=self.visible, direct=False)
return downstream_require
return
| [feature] CMakePresets support for msvc toolset version
### What is your suggestion?
## Suggestion
Would it be possible for the Conan generated CMakePresets.json to include `msvc.toolset` version details (i.e. `v143`, `v142`, etc.) when populating the `toolset.value` field?
This would allow IDEs to know which `vcvarsall.bat` environment to use when building the project.
## Expected Behavior
For example, when cross-compiling with VS2022 x64 to x86, I'd expect to see the conan generated CMakePresets.json look something like:
```diff
"generator": "Ninja",
"cacheVariables": {
"CMAKE_POLICY_DEFAULT_CMP0091": "NEW",
"CMAKE_BUILD_TYPE": "RelWithDebInfo"
},
"architecture": {
"value": "x86",
"strategy": "external"
},
+"toolset": {
+ "value": "v143,host=x64",
+ "strategy": "external"
+},
"toolchainFile": "C:\\projects\\build\\foo\\RelWithDebInfo\\generators\\conan_toolchain.cmake",
"binaryDir": "C:\\projects\\build\\foo\\RelWithDebInfo
```
_I can use `tools.cmake.cmaketoolchain:toolset_arch=x64` to add the "host=x64" portion, but I don't have anyway to get `v143` added._
VSCode recognizes this `toolset` / `architecture` combination and correctly chooses VS2022 and calls `vcvarsall.bat amd64_x86` prior to calling CMake.
## Current Behavior
Conan *doesn't* include `toolset` details in the generated CMakePresets.json file, so in order to get IDEs like VSCode to find the right environment, we need to adjust CMakePresets.json (as described above) or worse adjust the CMakeUserPresets.json file as follows:
```diff
{
"version": 4,
"vendor": {
"conan": {}
},
"include": [
"C:\\path\\to\\conan\\CMakePresets.json"
- ]
+ ],
+ "configurePresets": [
+ {
+ "name": "RelWithDebInfo",
+ "displayName": "RelWithDebInfo",
+ "inherits": "relwithdebinfo",
+ "toolset": {
+ "value": "v143,host=x64",
+ "strategy": "external"
+ }
+ ],
+ "buildPresets": [
+ {
+ "name": "RelWithDebInfo",
+ "displayName": "RelWithDebInfo",
+ "configurePreset": "RelWithDebInfo",
+ "configuration": "RelWithDebInfo"
+ }
+ ]
}
```
Conan already knows *which* vcvarsall to call, and and ensures the correct one gets called in the generated `conanvcvars.bat`.
Conan also appears to know about `msvc.toolset` version information:
https://github.com/conan-io/conan/blob/e33b55a486fd208223524dda49cce02dbe70c214/conans/client/conf/__init__.py#L117
_At least for the legacy toolsets_
And seems to know about the mapping:
https://github.com/conan-io/conan/blob/e0a8ee058bc8dc2d9811b8aeb6999f69aeb78d85/conan/tools/microsoft/visual.py#L53-L60
It seems like Conan could should be able to use the `msvc.toolset` information from the build profile and populate the version field correctly.
## Context
Follow up to https://github.com/conan-io/conan/issues/11623
Some IDEs are capable of targeting multiple toolsets (e.g. VS2015, VS2019, VS2022), and `toolset.value` is the way for the IDE to know which `vcvarsall.bat` to load and use internally while compiling.
> It's the same step that Visual Studio takes for you when the IDE invokes CMake. **Visual Studio parses the active Configure Preset for the host and target architecture specified by toolset and architecture. Visual Studio then sources the specified environment from vcvarsall.bat.** When you build from the Windows command line with Ninja, you'll need to take this step yourself.
_Excerpt from MSDN [Sourcing the environment when building with command-line generators on Windows](https://learn.microsoft.com/en-us/cpp/build/cmake-presets-vs?view=msvc-170#sourcing-the-environment-when-building-with-command-line-generators-on-windows)_
IDE | Support
----|----------
VSCode | [Already Supported](https://github.com/microsoft/vscode-cmake-tools/search?q=varsForVSInstallation) <br> https://github.com/microsoft/vscode-cmake-tools/pull/2524
QtCreator | [In Progress](https://codereview.qt-project.org/c/qt-creator/qt-creator/+/457588/8/src/plugins/cmakeprojectmanager/cmakeprojectimporter.cpp)<br> [QTCREATOR-BUG 28693](https://bugreports.qt.io/browse/QTCREATORBUG-28693)
CLion | [Feature Request](https://youtrack.jetbrains.com/issue/CPP-31353/Support-external-strategy-of-toolset-and-architecture-fields-in-CMakePresets.json-for-MSVC-compilers.)
Thanks for your support!
### Have you read the CONTRIBUTING guide?
- [X] I've read the CONTRIBUTING guide | conan-io/conan | diff --git a/test/integration/graph/core/test_build_requires.py b/test/integration/graph/core/test_build_requires.py
index acf8c40e2..7e396044b 100644
--- a/test/integration/graph/core/test_build_requires.py
+++ b/test/integration/graph/core/test_build_requires.py
@@ -639,7 +639,89 @@ class PublicBuildRequiresTest(GraphManagerTest):
# node, include, link, build, run
_check_transitive(lib, [(cmake, False, False, True, True)])
_check_transitive(app, [(lib, True, True, False, False),
- (cmake, False, False, True, False)])
+ (cmake, False, False, True, True)])
+
+ def test_deep_dependency_tree(self):
+ # app -> liba -> libb-(br public) -> sfun -> libsfun -> libx -> liby -> libz
+ # -(normal req) -> libsfun -> libx -> liby -> libz
+ self.recipe_conanfile("libz/0.1", GenConanfile())
+ self.recipe_conanfile("liby/0.1", GenConanfile().with_requirement("libz/0.1", run=True))
+ self.recipe_conanfile("libx/0.1", GenConanfile().with_requirement("liby/0.1", run=True))
+ self.recipe_conanfile("libsfun/0.1", GenConanfile().with_requirement("libx/0.1", run=True))
+ self.recipe_conanfile("sfun/0.1", GenConanfile().with_requirement("libsfun/0.1", run=True))
+ self.recipe_conanfile("libb/0.1", GenConanfile()
+ .with_tool_requirement("sfun/0.1", visible=True)
+ .with_requirement("libsfun/0.1", run=True))
+ self.recipe_conanfile("liba/0.1", GenConanfile().with_requirement("libb/0.1", run=True))
+ deps_graph = self.build_graph(GenConanfile("app", "0.1").with_requirement("liba/0.1", run=True))
+
+ # Build requires always apply to the consumer
+ self.assertEqual(8 + 4, len(deps_graph.nodes))
+ app = deps_graph.root
+ liba = app.dependencies[0].dst
+ libb = liba.dependencies[0].dst
+ libsfun = libb.dependencies[0].dst
+ libx = libsfun.dependencies[0].dst
+ liby = libx.dependencies[0].dst
+ libz = liby.dependencies[0].dst
+ sfun = libb.dependencies[1].dst
+ libsfun_build = sfun.dependencies[0].dst
+ libx_build = libsfun_build.dependencies[0].dst
+ liby_build = libx_build.dependencies[0].dst
+ libz_build = liby_build.dependencies[0].dst
+
+ # TODO non-build-requires
+
+ self._check_node(app, "app/0.1@", deps=[liba], dependents=[])
+ self._check_node(liba, "liba/0.1#123", deps=[libb], dependents=[app])
+ self._check_node(libb, "libb/0.1#123", deps=[sfun, libsfun], dependents=[liba])
+ self._check_node(sfun, "sfun/0.1#123", deps=[libsfun_build], dependents=[libb])
+ self._check_node(libsfun_build, "libsfun/0.1#123", deps=[libx_build], dependents=[sfun])
+ self._check_node(libx_build, "libx/0.1#123", deps=[liby_build], dependents=[libsfun_build])
+ self._check_node(liby_build, "liby/0.1#123", deps=[libz_build], dependents=[libx_build])
+ self._check_node(libz_build, "libz/0.1#123", deps=[], dependents=[liby_build])
+
+ # node, include, link, build, run
+ _check_transitive(liby_build, [(libz_build, True, True, False, True)])
+ _check_transitive(libx_build, [(liby_build, True, True, False, True),
+ (libz_build, True, True, False, True)])
+ _check_transitive(libsfun_build, [(libx_build, True, True, False, True),
+ (liby_build, True, True, False, True),
+ (libz_build, True, True, False, True)])
+ _check_transitive(sfun, [(libsfun_build, True, True, False, True),
+ (libx_build, True, True, False, True),
+ (liby_build, True, True, False, True),
+ (libz_build, True, True, False, True)])
+ _check_transitive(libb, [(libsfun, True, True, False, True),
+ (libx, True, True, False, True),
+ (liby, True, True, False, True),
+ (libz, True, True, False, True),
+ (sfun, False, False, True, True),
+ (libsfun_build, False, False, True, True),
+ (libx_build, False, False, True, True),
+ (liby_build, False, False, True, True),
+ (libz_build, False, False, True, True)])
+ _check_transitive(liba, [(libb, True, True, False, True),
+ (libsfun, True, True, False, True),
+ (libx, True, True, False, True),
+ (liby, True, True, False, True),
+ (libz, True, True, False, True),
+ (sfun, False, False, True, True),
+ (libsfun_build, False, False, True, True),
+ (libx_build, False, False, True, True),
+ (liby_build, False, False, True, True),
+ (libz_build, False, False, True, True)])
+ _check_transitive(app, [(liba, True, True, False, True),
+ (libb, True, True, False, True),
+ (libsfun, True, True, False, True),
+ (libx, True, True, False, True),
+ (liby, True, True, False, True),
+ (libz, True, True, False, True),
+ (sfun, False, False, True, True),
+ (libsfun_build, False, False, True, True),
+ (libx_build, False, False, True, True),
+ (liby_build, False, False, True, True),
+ (libz_build, False, False, True, True)])
def test_conflict_diamond(self):
# app -> libb -(br public)-> cmake/0.1
@@ -668,6 +750,40 @@ class PublicBuildRequiresTest(GraphManagerTest):
self._check_node(libb, "libb/0.1#123", deps=[cmake1], dependents=[app])
self._check_node(cmake1, "cmake/0.1#123", deps=[], dependents=[libb])
+ def test_conflict_diamond_two_levels(self):
+ # app -> libd -> libb -(br public)-> cmake/0.1
+ # \--> libe -> libc -(br public)-> cmake/0.2
+ self.recipe_conanfile("cmake/0.1", GenConanfile())
+ self.recipe_conanfile("cmake/0.2", GenConanfile())
+ self.recipe_conanfile("libb/0.1",
+ GenConanfile().with_tool_requirement("cmake/0.1", visible=True))
+ self.recipe_conanfile("libc/0.1",
+ GenConanfile().with_tool_requirement("cmake/0.2", visible=True))
+ self.recipe_conanfile("libd/0.1", GenConanfile().with_requires("libb/0.1"))
+ self.recipe_conanfile("libe/0.1", GenConanfile().with_requires("libc/0.1"))
+
+ deps_graph = self.build_graph(GenConanfile("app", "0.1").with_requires("libd/0.1",
+ "libe/0.1"),
+ install=False)
+
+ assert type(deps_graph.error) == GraphConflictError
+
+ # Build requires always apply to the consumer
+ self.assertEqual(6, len(deps_graph.nodes))
+ app = deps_graph.root
+ libd = app.dependencies[0].dst
+ libe = app.dependencies[1].dst
+ libb = libd.dependencies[0].dst
+ libc = libe.dependencies[0].dst
+ cmake1 = libb.dependencies[0].dst
+
+ self._check_node(app, "app/0.1@", deps=[libd, libe], dependents=[])
+ self._check_node(libd, "libd/0.1#123", deps=[libb], dependents=[app])
+ self._check_node(libe, "libe/0.1#123", deps=[libc], dependents=[app])
+ self._check_node(libb, "libb/0.1#123", deps=[cmake1], dependents=[libd])
+ self._check_node(libc, "libc/0.1#123", deps=[], dependents=[libe])
+ self._check_node(cmake1, "cmake/0.1#123", deps=[], dependents=[libb])
+
def test_tool_requires(self):
# app -> libb -(br public)-> protobuf/0.1
# \--------------> protobuf/0.2
@@ -695,7 +811,7 @@ class PublicBuildRequiresTest(GraphManagerTest):
# node, headers, lib, build, run
_check_transitive(app, [(libb, True, True, False, False),
(protobuf_host, True, True, False, False),
- (protobuf_build, False, False, True, False)])
+ (protobuf_build, False, False, True, True)])
def test_tool_requires_override(self):
# app -> libb -(br public)-> protobuf/0.1
@@ -726,7 +842,7 @@ class PublicBuildRequiresTest(GraphManagerTest):
# node, headers, lib, build, run
_check_transitive(app, [(libb, True, True, False, False),
(protobuf_host, True, True, False, False),
- (protobuf_build, False, False, True, False)])
+ (protobuf_build, False, False, True, True)])
_check_transitive(libb, [(protobuf_host, True, True, False, False),
(protobuf_build, False, False, True, True)])
diff --git a/test/integration/toolchains/cmake/test_cmaketoolchain.py b/test/integration/toolchains/cmake/test_cmaketoolchain.py
index 38ac9049b..c174193ec 100644
--- a/test/integration/toolchains/cmake/test_cmaketoolchain.py
+++ b/test/integration/toolchains/cmake/test_cmaketoolchain.py
@@ -1046,6 +1046,30 @@ def test_set_cmake_lang_compilers_and_launchers():
assert 'set(CMAKE_RC_COMPILER "C:/local/rc.exe")' in toolchain
+def test_cmake_presets_compiler():
+ profile = textwrap.dedent(r"""
+ [settings]
+ os=Windows
+ arch=x86_64
+ compiler=msvc
+ compiler.version=193
+ compiler.runtime=dynamic
+ [conf]
+ tools.build:compiler_executables={"c": "cl", "cpp": "cl.exe", "rc": "C:\\local\\rc.exe"}
+ """)
+ client = TestClient()
+ conanfile = GenConanfile().with_settings("os", "arch", "compiler")\
+ .with_generator("CMakeToolchain")
+ client.save({"conanfile.py": conanfile,
+ "profile": profile})
+ client.run("install . -pr:b profile -pr:h profile")
+ presets = json.loads(client.load("CMakePresets.json"))
+ cache_variables = presets["configurePresets"][0]["cacheVariables"]
+ assert cache_variables["CMAKE_C_COMPILER"] == "cl"
+ assert cache_variables["CMAKE_CXX_COMPILER"] == "cl.exe"
+ assert cache_variables["CMAKE_RC_COMPILER"] == "C:/local/rc.exe"
+
+
def test_cmake_layout_toolchain_folder():
""" in single-config generators, the toolchain is a different file per configuration
https://github.com/conan-io/conan/issues/12827
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 3
} | 1.65 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"export PYTHONPATH=$PYTHONPATH:$(pwd)"
],
"python": "3.9",
"reqs_path": [
"conans/requirements.txt",
"conans/requirements_server.txt",
"conans/requirements_dev.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | beautifulsoup4==4.13.3
bottle==0.12.25
certifi==2025.1.31
charset-normalizer==3.4.1
colorama==0.4.6
conan==2.7.0.dev0
distro==1.8.0
docker==7.1.0
exceptiongroup==1.2.2
execnet==2.1.1
fasteners==0.19
idna==3.10
iniconfig==2.1.0
Jinja2==3.1.6
MarkupSafe==3.0.2
mock==1.3.0
packaging==24.2
parameterized==0.9.0
patch-ng==1.18.1
pbr==6.1.1
pluggy==1.5.0
pluginbase==1.0.1
PyJWT==2.10.1
pytest==7.4.4
pytest-xdist==3.6.1
python-dateutil==2.9.0.post0
PyYAML==6.0.2
requests==2.32.3
six==1.17.0
soupsieve==2.6
tomli==2.2.1
typing_extensions==4.13.0
urllib3==1.26.20
waitress==3.0.2
WebOb==1.8.9
WebTest==2.0.35
| name: conan
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- beautifulsoup4==4.13.3
- bottle==0.12.25
- certifi==2025.1.31
- charset-normalizer==3.4.1
- colorama==0.4.6
- conan==2.7.0.dev0
- distro==1.8.0
- docker==7.1.0
- exceptiongroup==1.2.2
- execnet==2.1.1
- fasteners==0.19
- idna==3.10
- iniconfig==2.1.0
- jinja2==3.1.6
- markupsafe==3.0.2
- mock==1.3.0
- packaging==24.2
- parameterized==0.9.0
- patch-ng==1.18.1
- pbr==6.1.1
- pluggy==1.5.0
- pluginbase==1.0.1
- pyjwt==2.10.1
- pytest==7.4.4
- pytest-xdist==3.6.1
- python-dateutil==2.9.0.post0
- pyyaml==6.0.2
- requests==2.32.3
- six==1.17.0
- soupsieve==2.6
- tomli==2.2.1
- typing-extensions==4.13.0
- urllib3==1.26.20
- waitress==3.0.2
- webob==1.8.9
- webtest==2.0.35
prefix: /opt/conda/envs/conan
| [
"test/integration/graph/core/test_build_requires.py::PublicBuildRequiresTest::test_deep_dependency_tree",
"test/integration/graph/core/test_build_requires.py::PublicBuildRequiresTest::test_simple",
"test/integration/graph/core/test_build_requires.py::PublicBuildRequiresTest::test_tool_requires",
"test/integration/graph/core/test_build_requires.py::PublicBuildRequiresTest::test_tool_requires_override",
"test/integration/toolchains/cmake/test_cmaketoolchain.py::test_cmake_presets_compiler"
] | [] | [
"test/integration/graph/core/test_build_requires.py::BuildRequiresGraphTest::test_basic_0_recipe",
"test/integration/graph/core/test_build_requires.py::BuildRequiresGraphTest::test_basic_1_profile",
"test/integration/graph/core/test_build_requires.py::BuildRequiresGraphTest::test_build_require_bootstrap",
"test/integration/graph/core/test_build_requires.py::BuildRequiresGraphTest::test_build_require_private",
"test/integration/graph/core/test_build_requires.py::BuildRequiresGraphTest::test_build_require_transitive_0_shared",
"test/integration/graph/core/test_build_requires.py::BuildRequiresGraphTest::test_build_require_transitive_1_static",
"test/integration/graph/core/test_build_requires.py::BuildRequiresGraphTest::test_build_require_transitive_2_notrun",
"test/integration/graph/core/test_build_requires.py::BuildRequiresGraphTest::test_build_require_transitive_3_run",
"test/integration/graph/core/test_build_requires.py::BuildRequiresGraphTest::test_lib_build_require",
"test/integration/graph/core/test_build_requires.py::TestBuildRequiresTransitivityDiamond::test_build_require_conflict",
"test/integration/graph/core/test_build_requires.py::TestBuildRequiresTransitivityDiamond::test_build_require_transitive_shared",
"test/integration/graph/core/test_build_requires.py::TestBuildRequiresTransitivityDiamond::test_build_require_transitive_static",
"test/integration/graph/core/test_build_requires.py::TestBuildRequiresVisible::test_visible_build",
"test/integration/graph/core/test_build_requires.py::TestTestRequire::test_basic",
"test/integration/graph/core/test_build_requires.py::TestTestRequire::test_lib_build_require",
"test/integration/graph/core/test_build_requires.py::TestTestRequire::test_lib_build_require_transitive",
"test/integration/graph/core/test_build_requires.py::TestTestRequire::test_test_require_loop",
"test/integration/graph/core/test_build_requires.py::TestTestRequire::test_test_require_transitive_0_shared",
"test/integration/graph/core/test_build_requires.py::TestTestRequire::test_test_require_transitive_1_static",
"test/integration/graph/core/test_build_requires.py::TestTestRequire::test_test_require_transitive_2_notrun",
"test/integration/graph/core/test_build_requires.py::TestTestRequire::test_test_require_transitive_3_run",
"test/integration/graph/core/test_build_requires.py::TestTestRequire::test_trait_aggregated",
"test/integration/graph/core/test_build_requires.py::TestTestRequiresProblemsShared::test_fixed_versions_0",
"test/integration/graph/core/test_build_requires.py::TestTestRequiresProblemsShared::test_fixed_versions_1",
"test/integration/graph/core/test_build_requires.py::TestTestRequiresProblemsShared::test_fixed_versions_conflict_0",
"test/integration/graph/core/test_build_requires.py::TestTestRequiresProblemsShared::test_fixed_versions_conflict_1",
"test/integration/graph/core/test_build_requires.py::TestTestRequiresProblemsShared::test_fixed_versions_hybrid_0",
"test/integration/graph/core/test_build_requires.py::TestTestRequiresProblemsShared::test_fixed_versions_hybrid_1",
"test/integration/graph/core/test_build_requires.py::TestTestRequiresProblemsShared::test_fixed_versions_hybrid_conflict_0",
"test/integration/graph/core/test_build_requires.py::TestTestRequiresProblemsShared::test_fixed_versions_hybrid_conflict_1",
"test/integration/graph/core/test_build_requires.py::TestTestRequiresProblemsShared::test_version_ranges_0",
"test/integration/graph/core/test_build_requires.py::TestTestRequiresProblemsShared::test_version_ranges_1",
"test/integration/graph/core/test_build_requires.py::TestTestRequiresProblemsShared::test_version_ranges_conflict_0",
"test/integration/graph/core/test_build_requires.py::TestTestRequiresProblemsShared::test_version_ranges_conflict_1",
"test/integration/graph/core/test_build_requires.py::TestTestRequiresProblemsShared::test_version_ranges_hybrid_0",
"test/integration/graph/core/test_build_requires.py::TestTestRequiresProblemsShared::test_version_ranges_hybrid_1",
"test/integration/graph/core/test_build_requires.py::TestTestRequiresProblemsShared::test_version_ranges_hybrid_conflict_0",
"test/integration/graph/core/test_build_requires.py::TestTestRequiresProblemsShared::test_version_ranges_hybrid_conflict_1",
"test/integration/graph/core/test_build_requires.py::BuildRequiresPackageIDTest::test_default_no_affect",
"test/integration/graph/core/test_build_requires.py::BuildRequiresPackageIDTest::test_minor_mode",
"test/integration/graph/core/test_build_requires.py::PublicBuildRequiresTest::test_conflict_diamond",
"test/integration/graph/core/test_build_requires.py::PublicBuildRequiresTest::test_conflict_diamond_two_levels",
"test/integration/graph/core/test_build_requires.py::PublicBuildRequiresTest::test_test_require",
"test/integration/graph/core/test_build_requires.py::TestLoops::test_direct_loop_error",
"test/integration/graph/core/test_build_requires.py::TestLoops::test_indirect_loop_error",
"test/integration/graph/core/test_build_requires.py::TestLoops::test_infinite_recursion_test",
"test/integration/graph/core/test_build_requires.py::test_tool_requires",
"test/integration/graph/core/test_build_requires.py::TestDuplicateBuildRequires::test_tool_requires_in_test_package",
"test/integration/graph/core/test_build_requires.py::TestDuplicateBuildRequires::test_test_requires_in_test_package",
"test/integration/toolchains/cmake/test_cmaketoolchain.py::test_cross_build",
"test/integration/toolchains/cmake/test_cmaketoolchain.py::test_cross_build_linux_to_macos",
"test/integration/toolchains/cmake/test_cmaketoolchain.py::test_cross_build_user_toolchain",
"test/integration/toolchains/cmake/test_cmaketoolchain.py::test_cross_build_user_toolchain_confs",
"test/integration/toolchains/cmake/test_cmaketoolchain.py::test_no_cross_build",
"test/integration/toolchains/cmake/test_cmaketoolchain.py::test_cross_arch",
"test/integration/toolchains/cmake/test_cmaketoolchain.py::test_no_cross_build_arch",
"test/integration/toolchains/cmake/test_cmaketoolchain.py::test_cross_build_conf",
"test/integration/toolchains/cmake/test_cmaketoolchain.py::test_find_builddirs",
"test/integration/toolchains/cmake/test_cmaketoolchain.py::test_runtime_lib_dirs_single_conf",
"test/integration/toolchains/cmake/test_cmaketoolchain.py::test_runtime_lib_dirs_multiconf",
"test/integration/toolchains/cmake/test_cmaketoolchain.py::test_extra_flags_via_conf",
"test/integration/toolchains/cmake/test_cmaketoolchain.py::test_cmake_presets_binary_dir_available",
"test/integration/toolchains/cmake/test_cmaketoolchain.py::test_cmake_presets_shared_preset[CMakePresets.json]",
"test/integration/toolchains/cmake/test_cmaketoolchain.py::test_cmake_presets_shared_preset[CMakeUserPresets.json]",
"test/integration/toolchains/cmake/test_cmaketoolchain.py::test_cmake_presets_multiconfig",
"test/integration/toolchains/cmake/test_cmaketoolchain.py::test_cmake_presets_singleconfig",
"test/integration/toolchains/cmake/test_cmaketoolchain.py::test_toolchain_cache_variables",
"test/integration/toolchains/cmake/test_cmaketoolchain.py::test_variables_types",
"test/integration/toolchains/cmake/test_cmaketoolchain.py::test_android_c_library",
"test/integration/toolchains/cmake/test_cmaketoolchain.py::test_android_legacy_toolchain_flag[True]",
"test/integration/toolchains/cmake/test_cmaketoolchain.py::test_android_legacy_toolchain_flag[False]",
"test/integration/toolchains/cmake/test_cmaketoolchain.py::test_android_legacy_toolchain_flag[None]",
"test/integration/toolchains/cmake/test_cmaketoolchain.py::test_android_legacy_toolchain_with_compileflags[True]",
"test/integration/toolchains/cmake/test_cmaketoolchain.py::test_android_legacy_toolchain_with_compileflags[False]",
"test/integration/toolchains/cmake/test_cmaketoolchain.py::test_android_legacy_toolchain_with_compileflags[None]",
"test/integration/toolchains/cmake/test_cmaketoolchain.py::test_presets_ninja_msvc[x86-x86_64]",
"test/integration/toolchains/cmake/test_cmaketoolchain.py::test_presets_ninja_msvc[x86_64-x86_64]",
"test/integration/toolchains/cmake/test_cmaketoolchain.py::test_presets_ninja_msvc[x86-x86]",
"test/integration/toolchains/cmake/test_cmaketoolchain.py::test_presets_ninja_msvc[x86_64-x86]",
"test/integration/toolchains/cmake/test_cmaketoolchain.py::test_pkg_config_block",
"test/integration/toolchains/cmake/test_cmaketoolchain.py::test_user_presets_custom_location[subproject]",
"test/integration/toolchains/cmake/test_cmaketoolchain.py::test_user_presets_custom_location[False]",
"test/integration/toolchains/cmake/test_cmaketoolchain.py::test_set_cmake_lang_compilers_and_launchers",
"test/integration/toolchains/cmake/test_cmaketoolchain.py::test_cmake_layout_toolchain_folder",
"test/integration/toolchains/cmake/test_cmaketoolchain.py::test_build_folder_vars_editables",
"test/integration/toolchains/cmake/test_cmaketoolchain.py::test_set_linker_scripts",
"test/integration/toolchains/cmake/test_cmaketoolchain.py::test_test_package_layout",
"test/integration/toolchains/cmake/test_cmaketoolchain.py::test_presets_not_found_error_msg",
"test/integration/toolchains/cmake/test_cmaketoolchain.py::test_recipe_build_folders_vars",
"test/integration/toolchains/cmake/test_cmaketoolchain.py::test_build_folder_vars_self_name_version",
"test/integration/toolchains/cmake/test_cmaketoolchain.py::test_build_folder_vars_constants_user",
"test/integration/toolchains/cmake/test_cmaketoolchain.py::test_extra_flags",
"test/integration/toolchains/cmake/test_cmaketoolchain.py::test_avoid_ovewrite_user_cmakepresets",
"test/integration/toolchains/cmake/test_cmaketoolchain.py::test_presets_njobs",
"test/integration/toolchains/cmake/test_cmaketoolchain.py::test_add_cmakeexe_to_presets",
"test/integration/toolchains/cmake/test_cmaketoolchain.py::test_toolchain_ends_newline",
"test/integration/toolchains/cmake/test_cmaketoolchain.py::test_toolchain_and_compilers_build_context",
"test/integration/toolchains/cmake/test_cmaketoolchain.py::test_toolchain_keep_absolute_paths",
"test/integration/toolchains/cmake/test_cmaketoolchain.py::test_output_dirs_gnudirs_local_default",
"test/integration/toolchains/cmake/test_cmaketoolchain.py::test_output_dirs_gnudirs_local_custom",
"test/integration/toolchains/cmake/test_cmaketoolchain.py::test_toolchain_extra_variables",
"test/integration/toolchains/cmake/test_cmaketoolchain.py::test_variables_wrong_scaping"
] | [] | MIT License | 19,164 | 982 | [
"conan/tools/cmake/presets.py",
"conan/tools/cmake/toolchain/blocks.py",
"conans/model/requires.py"
] |
iterative__datachain-225 | 7bb9baedc586d2e635893d87555fe069b6fa0662 | 2024-08-02 01:24:41 | 1e5178be8c196b21bcd34604c7993433b10d9cde | cloudflare-pages[bot]: ## Deploying datachain-documentation with <a href="https://pages.dev"><img alt="Cloudflare Pages" src="https://user-images.githubusercontent.com/23264/106598434-9e719e00-654f-11eb-9e59-6167043cfa01.png" width="16"></a> Cloudflare Pages
<table><tr><td><strong>Latest commit:</strong> </td><td>
<code>f3bf112</code>
</td></tr>
<tr><td><strong>Status:</strong></td><td> ✅ Deploy successful!</td></tr>
<tr><td><strong>Preview URL:</strong></td><td>
<a href='https://33809ae1.datachain-documentation.pages.dev'>https://33809ae1.datachain-documentation.pages.dev</a>
</td></tr>
<tr><td><strong>Branch Preview URL:</strong></td><td>
<a href='https://fix-show-limit.datachain-documentation.pages.dev'>https://fix-show-limit.datachain-documentation.pages.dev</a>
</td></tr>
</table>
[View logs](https://dash.cloudflare.com/?to=/:account/pages/view/datachain-documentation/33809ae1-69af-4f51-83bd-000dfb4d245f)
| diff --git a/src/datachain/lib/dc.py b/src/datachain/lib/dc.py
index cae56b5..a91e2fa 100644
--- a/src/datachain/lib/dc.py
+++ b/src/datachain/lib/dc.py
@@ -193,8 +193,6 @@ class DataChain(DatasetQuery):
```
"""
- max_row_count: Optional[int] = None
-
DEFAULT_FILE_RECORD: ClassVar[dict] = {
"source": "",
"name": "",
@@ -1603,18 +1601,7 @@ class DataChain(DatasetQuery):
@detach
def limit(self, n: int) -> "Self":
"""Return the first n rows of the chain."""
- n = max(n, 0)
-
- if self.max_row_count is None:
- self.max_row_count = n
- return super().limit(n)
-
- limit = min(n, self.max_row_count)
- if limit == self.max_row_count:
- return self
-
- self.max_row_count = limit
- return super().limit(self.max_row_count)
+ return super().limit(n)
@detach
def offset(self, offset: int) -> "Self":
diff --git a/src/datachain/query/dataset.py b/src/datachain/query/dataset.py
index 9a05044..5407119 100644
--- a/src/datachain/query/dataset.py
+++ b/src/datachain/query/dataset.py
@@ -1383,6 +1383,9 @@ class DatasetQuery:
@detach
def limit(self, n: int) -> "Self":
query = self.clone(new_table=False)
+ for step in query.steps:
+ if isinstance(step, SQLLimit) and step.n < n:
+ return query
query.steps.append(SQLLimit(n))
return query
| `limit()` ignored after `show()`
In some cases, `.limit()` seems to have no effect. Here's a reproducer:
```python
from datachain import DataChain
img_dc = DataChain.from_storage("gs://datachain-demo/newyorker_caption_contest/images")
img_dc.show(3)
assert img_dc.limit(10).count() == 10
```
Commenting out `img_dc.show(3)` makes the assert succeed as expected.
Note that I've only been able to reproduce the failure with a real bucket, but not in tests.
| iterative/datachain | diff --git a/tests/unit/lib/test_datachain.py b/tests/unit/lib/test_datachain.py
index d7cc1de..2db6f1f 100644
--- a/tests/unit/lib/test_datachain.py
+++ b/tests/unit/lib/test_datachain.py
@@ -1228,3 +1228,33 @@ def test_custom_model_with_nested_lists():
traces_double=[[{"x": 0.5, "y": 0.5}], [{"x": 0.5, "y": 0.5}]],
)
]
+
+
+def test_min_limit():
+ dc = DataChain.from_values(a=[1, 2, 3, 4, 5])
+ assert dc.count() == 5
+ assert dc.limit(4).count() == 4
+ assert dc.count() == 5
+ assert dc.limit(1).count() == 1
+ assert dc.count() == 5
+ assert dc.limit(2).limit(3).count() == 2
+ assert dc.count() == 5
+ assert dc.limit(3).limit(2).count() == 2
+ assert dc.count() == 5
+
+
+def test_show_limit():
+ dc = DataChain.from_values(a=[1, 2, 3, 4, 5])
+ assert dc.count() == 5
+ assert dc.limit(4).count() == 4
+ dc.show(1)
+ assert dc.count() == 5
+ assert dc.limit(1).count() == 1
+ dc.show(1)
+ assert dc.count() == 5
+ assert dc.limit(2).limit(3).count() == 2
+ dc.show(1)
+ assert dc.count() == 5
+ assert dc.limit(3).limit(2).count() == 2
+ dc.show(1)
+ assert dc.count() == 5
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 2
} | 0.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-xdist",
"pytest-mock",
"pytest-asyncio"
],
"pre_install": null,
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | adlfs==2024.12.0
aiobotocore==2.21.1
aiohappyeyeballs==2.6.1
aiohttp==3.11.14
aioitertools==0.12.0
aiosignal==1.3.2
aiotools==1.8.2
annotated-types==0.7.0
antlr4-python3-runtime==4.13.2
argcomplete==3.6.1
async-timeout==5.0.1
attrs==25.3.0
aws-sam-translator==1.95.0
aws-xray-sdk==2.14.0
azure-core==1.32.0
azure-datalake-store==0.0.53
azure-identity==1.21.0
azure-storage-blob==12.25.1
babel==2.17.0
backrefs==5.8
black==25.1.0
blinker==1.9.0
boto3==1.37.1
botocore==1.37.1
cachetools==5.5.2
certifi==2025.1.31
cffi==1.17.1
cfn-lint==1.32.1
charset-normalizer==3.4.1
click==8.1.8
cloudpickle==3.1.1
colorama==0.4.6
coverage==7.8.0
cryptography==44.0.2
-e git+https://github.com/iterative/datachain.git@7bb9baedc586d2e635893d87555fe069b6fa0662#egg=datachain
datamodel-code-generator==0.28.5
decorator==5.2.1
dictdiffer==0.9.0
dill==0.3.8
diskcache==5.6.3
distlib==0.3.9
docker==7.1.0
dulwich==0.22.8
dvc-data==3.16.9
dvc-objects==5.1.0
exceptiongroup==1.2.2
execnet==2.1.1
filelock==3.18.0
Flask==3.1.0
flask-cors==5.0.1
frozenlist==1.5.0
fsspec==2025.3.2
ftfy==6.3.1
funcy==2.0
gcsfs==2025.3.2
genson==1.3.0
ghp-import==2.1.0
google-api-core==2.24.2
google-auth==2.38.0
google-auth-oauthlib==1.2.1
google-cloud-core==2.4.3
google-cloud-storage==3.1.0
google-crc32c==1.7.1
google-resumable-media==2.7.2
googleapis-common-protos==1.69.2
graphql-core==3.2.6
greenlet==3.1.1
griffe==1.7.1
huggingface-hub==0.30.1
hypothesis==6.130.6
idna==3.10
importlib_metadata==8.6.1
inflect==5.6.2
iniconfig==2.1.0
isodate==0.7.2
isort==6.0.1
itsdangerous==2.2.0
Jinja2==3.1.6
jmespath==1.0.1
joserfc==1.0.4
jsonpatch==1.33
jsonpath-ng==1.7.0
jsonpointer==3.0.0
jsonschema==4.23.0
jsonschema-path==0.3.4
jsonschema-specifications==2024.10.1
lazy-object-proxy==1.10.0
lz4==4.4.3
Markdown==3.7
MarkupSafe==3.0.2
mergedeep==1.3.4
mkdocs==1.6.1
mkdocs-autorefs==1.4.1
mkdocs-gen-files==0.5.0
mkdocs-get-deps==0.2.0
mkdocs-literate-nav==0.6.2
mkdocs-material==9.6.10
mkdocs-material-extensions==1.3.1
mkdocs-section-index==0.3.9
mkdocstrings==0.29.1
mkdocstrings-python==1.16.8
moto==5.1.2
mpmath==1.3.0
msal==1.32.0
msal-extensions==1.3.1
msgpack==1.1.0
multidict==6.2.0
multiprocess==0.70.16
mypy==1.10.1
mypy-extensions==1.0.0
networkx==3.2.1
numpy==2.0.2
nvidia-cublas-cu12==12.4.5.8
nvidia-cuda-cupti-cu12==12.4.127
nvidia-cuda-nvrtc-cu12==12.4.127
nvidia-cuda-runtime-cu12==12.4.127
nvidia-cudnn-cu12==9.1.0.70
nvidia-cufft-cu12==11.2.1.3
nvidia-curand-cu12==10.3.5.147
nvidia-cusolver-cu12==11.6.1.9
nvidia-cusparse-cu12==12.3.1.170
nvidia-cusparselt-cu12==0.6.2
nvidia-nccl-cu12==2.21.5
nvidia-nvjitlink-cu12==12.4.127
nvidia-nvtx-cu12==12.4.127
oauthlib==3.2.2
open_clip_torch==2.31.0
openapi-schema-validator==0.6.3
openapi-spec-validator==0.7.1
orjson==3.10.16
packaging==24.2
paginate==0.5.7
pandas==2.2.3
pathable==0.4.4
pathspec==0.12.1
pillow==10.4.0
platformdirs==4.3.7
pluggy==1.5.0
ply==3.11
propcache==0.3.1
proto-plus==1.26.1
protobuf==6.30.2
py-cpuinfo==9.0.0
py-partiql-parser==0.6.1
pyarrow==19.0.1
pyasn1==0.6.1
pyasn1_modules==0.4.2
pycparser==2.22
pydantic==2.11.1
pydantic_core==2.33.0
pygal==3.0.5
pygaljs==1.0.2
Pygments==2.19.1
pygtrie==2.5.0
PyJWT==2.10.1
pymdown-extensions==10.14.3
pyparsing==3.2.3
pytest==8.3.5
pytest-asyncio==0.26.0
pytest-benchmark==5.1.0
pytest-cov==6.0.0
pytest-mock==3.14.0
pytest-servers==0.5.10
pytest-sugar==1.0.0
pytest-xdist==3.6.1
python-dateutil==2.9.0.post0
pytz==2025.2
PyYAML==6.0.2
pyyaml_env_tag==0.1
referencing==0.36.2
regex==2024.11.6
requests==2.32.3
requests-mock==1.12.1
requests-oauthlib==2.0.0
responses==0.25.7
rfc3339-validator==0.1.4
rpds-py==0.24.0
rsa==4.9
s3fs==2025.3.2
s3transfer==0.11.3
safetensors==0.5.3
shtab==1.7.1
simsimd==6.2.1
six==1.17.0
sortedcontainers==2.4.0
SQLAlchemy==2.0.40
sqltrie==0.11.2
sympy==1.13.1
termcolor==3.0.0
timm==1.0.15
tokenizers==0.21.1
tomli==2.2.1
tomlkit==0.13.2
torch==2.6.0
torchvision==0.21.0
tqdm==4.67.1
transformers==4.50.3
triton==3.2.0
types-python-dateutil==2.9.0.20241206
types-pytz==2025.2.0.20250326
types-PyYAML==6.0.12.20250326
types-requests==2.31.0.6
types-urllib3==1.26.25.14
typing-inspection==0.4.0
typing_extensions==4.13.0
tzdata==2025.2
universal_pathlib==0.2.6
urllib3==1.26.20
usearch==2.16.9
virtualenv==20.29.3
watchdog==6.0.0
wcwidth==0.2.13
Werkzeug==3.1.3
wrapt==1.17.2
xmltodict==0.14.2
yarl==1.18.3
zipp==3.21.0
| name: datachain
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- adlfs==2024.12.0
- aiobotocore==2.21.1
- aiohappyeyeballs==2.6.1
- aiohttp==3.11.14
- aioitertools==0.12.0
- aiosignal==1.3.2
- aiotools==1.8.2
- annotated-types==0.7.0
- antlr4-python3-runtime==4.13.2
- argcomplete==3.6.1
- async-timeout==5.0.1
- attrs==25.3.0
- aws-sam-translator==1.95.0
- aws-xray-sdk==2.14.0
- azure-core==1.32.0
- azure-datalake-store==0.0.53
- azure-identity==1.21.0
- azure-storage-blob==12.25.1
- babel==2.17.0
- backrefs==5.8
- black==25.1.0
- blinker==1.9.0
- boto3==1.37.1
- botocore==1.37.1
- cachetools==5.5.2
- certifi==2025.1.31
- cffi==1.17.1
- cfn-lint==1.32.1
- charset-normalizer==3.4.1
- click==8.1.8
- cloudpickle==3.1.1
- colorama==0.4.6
- coverage==7.8.0
- cryptography==44.0.2
- datachain==0.2.16.dev3+g7bb9bae
- datamodel-code-generator==0.28.5
- decorator==5.2.1
- dictdiffer==0.9.0
- dill==0.3.8
- diskcache==5.6.3
- distlib==0.3.9
- docker==7.1.0
- dulwich==0.22.8
- dvc-data==3.16.9
- dvc-objects==5.1.0
- exceptiongroup==1.2.2
- execnet==2.1.1
- filelock==3.18.0
- flask==3.1.0
- flask-cors==5.0.1
- frozenlist==1.5.0
- fsspec==2025.3.2
- ftfy==6.3.1
- funcy==2.0
- gcsfs==2025.3.2
- genson==1.3.0
- ghp-import==2.1.0
- google-api-core==2.24.2
- google-auth==2.38.0
- google-auth-oauthlib==1.2.1
- google-cloud-core==2.4.3
- google-cloud-storage==3.1.0
- google-crc32c==1.7.1
- google-resumable-media==2.7.2
- googleapis-common-protos==1.69.2
- graphql-core==3.2.6
- greenlet==3.1.1
- griffe==1.7.1
- huggingface-hub==0.30.1
- hypothesis==6.130.6
- idna==3.10
- importlib-metadata==8.6.1
- inflect==5.6.2
- iniconfig==2.1.0
- isodate==0.7.2
- isort==6.0.1
- itsdangerous==2.2.0
- jinja2==3.1.6
- jmespath==1.0.1
- joserfc==1.0.4
- jsonpatch==1.33
- jsonpath-ng==1.7.0
- jsonpointer==3.0.0
- jsonschema==4.23.0
- jsonschema-path==0.3.4
- jsonschema-specifications==2024.10.1
- lazy-object-proxy==1.10.0
- lz4==4.4.3
- markdown==3.7
- markupsafe==3.0.2
- mergedeep==1.3.4
- mkdocs==1.6.1
- mkdocs-autorefs==1.4.1
- mkdocs-gen-files==0.5.0
- mkdocs-get-deps==0.2.0
- mkdocs-literate-nav==0.6.2
- mkdocs-material==9.6.10
- mkdocs-material-extensions==1.3.1
- mkdocs-section-index==0.3.9
- mkdocstrings==0.29.1
- mkdocstrings-python==1.16.8
- moto==5.1.2
- mpmath==1.3.0
- msal==1.32.0
- msal-extensions==1.3.1
- msgpack==1.1.0
- multidict==6.2.0
- multiprocess==0.70.16
- mypy==1.10.1
- mypy-extensions==1.0.0
- networkx==3.2.1
- numpy==2.0.2
- nvidia-cublas-cu12==12.4.5.8
- nvidia-cuda-cupti-cu12==12.4.127
- nvidia-cuda-nvrtc-cu12==12.4.127
- nvidia-cuda-runtime-cu12==12.4.127
- nvidia-cudnn-cu12==9.1.0.70
- nvidia-cufft-cu12==11.2.1.3
- nvidia-curand-cu12==10.3.5.147
- nvidia-cusolver-cu12==11.6.1.9
- nvidia-cusparse-cu12==12.3.1.170
- nvidia-cusparselt-cu12==0.6.2
- nvidia-nccl-cu12==2.21.5
- nvidia-nvjitlink-cu12==12.4.127
- nvidia-nvtx-cu12==12.4.127
- oauthlib==3.2.2
- open-clip-torch==2.31.0
- openapi-schema-validator==0.6.3
- openapi-spec-validator==0.7.1
- orjson==3.10.16
- packaging==24.2
- paginate==0.5.7
- pandas==2.2.3
- pathable==0.4.4
- pathspec==0.12.1
- pillow==10.4.0
- platformdirs==4.3.7
- pluggy==1.5.0
- ply==3.11
- propcache==0.3.1
- proto-plus==1.26.1
- protobuf==6.30.2
- py-cpuinfo==9.0.0
- py-partiql-parser==0.6.1
- pyarrow==19.0.1
- pyasn1==0.6.1
- pyasn1-modules==0.4.2
- pycparser==2.22
- pydantic==2.11.1
- pydantic-core==2.33.0
- pygal==3.0.5
- pygaljs==1.0.2
- pygments==2.19.1
- pygtrie==2.5.0
- pyjwt==2.10.1
- pymdown-extensions==10.14.3
- pyparsing==3.2.3
- pytest==8.3.5
- pytest-asyncio==0.26.0
- pytest-benchmark==5.1.0
- pytest-cov==6.0.0
- pytest-mock==3.14.0
- pytest-servers==0.5.10
- pytest-sugar==1.0.0
- pytest-xdist==3.6.1
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyyaml==6.0.2
- pyyaml-env-tag==0.1
- referencing==0.36.2
- regex==2024.11.6
- requests==2.32.3
- requests-mock==1.12.1
- requests-oauthlib==2.0.0
- responses==0.25.7
- rfc3339-validator==0.1.4
- rpds-py==0.24.0
- rsa==4.9
- s3fs==2025.3.2
- s3transfer==0.11.3
- safetensors==0.5.3
- shtab==1.7.1
- simsimd==6.2.1
- six==1.17.0
- sortedcontainers==2.4.0
- sqlalchemy==2.0.40
- sqltrie==0.11.2
- sympy==1.13.1
- termcolor==3.0.0
- timm==1.0.15
- tokenizers==0.21.1
- tomli==2.2.1
- tomlkit==0.13.2
- torch==2.6.0
- torchvision==0.21.0
- tqdm==4.67.1
- transformers==4.50.3
- triton==3.2.0
- types-python-dateutil==2.9.0.20241206
- types-pytz==2025.2.0.20250326
- types-pyyaml==6.0.12.20250326
- types-requests==2.31.0.6
- types-urllib3==1.26.25.14
- typing-extensions==4.13.0
- typing-inspection==0.4.0
- tzdata==2025.2
- universal-pathlib==0.2.6
- urllib3==1.26.20
- usearch==2.16.9
- virtualenv==20.29.3
- watchdog==6.0.0
- wcwidth==0.2.13
- werkzeug==3.1.3
- wrapt==1.17.2
- xmltodict==0.14.2
- yarl==1.18.3
- zipp==3.21.0
prefix: /opt/conda/envs/datachain
| [
"tests/unit/lib/test_datachain.py::test_min_limit",
"tests/unit/lib/test_datachain.py::test_show_limit"
] | [] | [
"tests/unit/lib/test_datachain.py::test_pandas_conversion",
"tests/unit/lib/test_datachain.py::test_pandas_file_column_conflict",
"tests/unit/lib/test_datachain.py::test_pandas_uppercase_columns",
"tests/unit/lib/test_datachain.py::test_pandas_incorrect_column_names",
"tests/unit/lib/test_datachain.py::test_from_features_basic",
"tests/unit/lib/test_datachain.py::test_from_features",
"tests/unit/lib/test_datachain.py::test_datasets",
"tests/unit/lib/test_datachain.py::test_preserve_feature_schema",
"tests/unit/lib/test_datachain.py::test_from_features_simple_types",
"tests/unit/lib/test_datachain.py::test_from_features_more_simple_types",
"tests/unit/lib/test_datachain.py::test_file_list",
"tests/unit/lib/test_datachain.py::test_gen",
"tests/unit/lib/test_datachain.py::test_map",
"tests/unit/lib/test_datachain.py::test_agg",
"tests/unit/lib/test_datachain.py::test_agg_two_params",
"tests/unit/lib/test_datachain.py::test_agg_simple_iterator",
"tests/unit/lib/test_datachain.py::test_agg_simple_iterator_error",
"tests/unit/lib/test_datachain.py::test_agg_tuple_result_iterator",
"tests/unit/lib/test_datachain.py::test_agg_tuple_result_generator",
"tests/unit/lib/test_datachain.py::test_batch_map",
"tests/unit/lib/test_datachain.py::test_batch_map_wrong_size",
"tests/unit/lib/test_datachain.py::test_batch_map_two_params",
"tests/unit/lib/test_datachain.py::test_batch_map_tuple_result_iterator",
"tests/unit/lib/test_datachain.py::test_collect",
"tests/unit/lib/test_datachain.py::test_collect_nested_feature",
"tests/unit/lib/test_datachain.py::test_select_feature",
"tests/unit/lib/test_datachain.py::test_select_columns_intersection",
"tests/unit/lib/test_datachain.py::test_select_except",
"tests/unit/lib/test_datachain.py::test_select_wrong_type",
"tests/unit/lib/test_datachain.py::test_select_except_error",
"tests/unit/lib/test_datachain.py::test_select_restore_from_saving",
"tests/unit/lib/test_datachain.py::test_select_distinct",
"tests/unit/lib/test_datachain.py::test_from_dataset_name_version",
"tests/unit/lib/test_datachain.py::test_chain_of_maps",
"tests/unit/lib/test_datachain.py::test_vector",
"tests/unit/lib/test_datachain.py::test_vector_of_vectors",
"tests/unit/lib/test_datachain.py::test_unsupported_output_type",
"tests/unit/lib/test_datachain.py::test_collect_single_item",
"tests/unit/lib/test_datachain.py::test_default_output_type",
"tests/unit/lib/test_datachain.py::test_parse_tabular",
"tests/unit/lib/test_datachain.py::test_parse_tabular_format",
"tests/unit/lib/test_datachain.py::test_parse_tabular_partitions",
"tests/unit/lib/test_datachain.py::test_parse_tabular_empty",
"tests/unit/lib/test_datachain.py::test_parse_tabular_unify_schema",
"tests/unit/lib/test_datachain.py::test_parse_tabular_output_dict",
"tests/unit/lib/test_datachain.py::test_parse_tabular_output_feature",
"tests/unit/lib/test_datachain.py::test_parse_tabular_output_list",
"tests/unit/lib/test_datachain.py::test_from_csv",
"tests/unit/lib/test_datachain.py::test_from_csv_no_header_error",
"tests/unit/lib/test_datachain.py::test_from_csv_no_header_output_dict",
"tests/unit/lib/test_datachain.py::test_from_csv_no_header_output_feature",
"tests/unit/lib/test_datachain.py::test_from_csv_no_header_output_list",
"tests/unit/lib/test_datachain.py::test_from_csv_tab_delimited",
"tests/unit/lib/test_datachain.py::test_from_csv_null_collect",
"tests/unit/lib/test_datachain.py::test_from_parquet",
"tests/unit/lib/test_datachain.py::test_from_parquet_partitioned",
"tests/unit/lib/test_datachain.py::test_to_parquet",
"tests/unit/lib/test_datachain.py::test_to_parquet_partitioned",
"tests/unit/lib/test_datachain.py::test_parallel[False]",
"tests/unit/lib/test_datachain.py::test_parallel[2]",
"tests/unit/lib/test_datachain.py::test_parallel[True]",
"tests/unit/lib/test_datachain.py::test_exec",
"tests/unit/lib/test_datachain.py::test_extend_features",
"tests/unit/lib/test_datachain.py::test_from_storage_object_name",
"tests/unit/lib/test_datachain.py::test_from_features_object_name",
"tests/unit/lib/test_datachain.py::test_parse_tabular_object_name",
"tests/unit/lib/test_datachain.py::test_sys_feature",
"tests/unit/lib/test_datachain.py::test_to_pandas_multi_level",
"tests/unit/lib/test_datachain.py::test_mutate",
"tests/unit/lib/test_datachain.py::test_order_by_with_nested_columns[True]",
"tests/unit/lib/test_datachain.py::test_order_by_with_nested_columns[False]",
"tests/unit/lib/test_datachain.py::test_order_by_descending[True]",
"tests/unit/lib/test_datachain.py::test_order_by_descending[False]",
"tests/unit/lib/test_datachain.py::test_union",
"tests/unit/lib/test_datachain.py::test_subtract",
"tests/unit/lib/test_datachain.py::test_subtract_error",
"tests/unit/lib/test_datachain.py::test_column_math",
"tests/unit/lib/test_datachain.py::test_from_values_array_of_floats",
"tests/unit/lib/test_datachain.py::test_custom_model_with_nested_lists"
] | [] | Apache License 2.0 | 19,173 | 435 | [
"src/datachain/lib/dc.py",
"src/datachain/query/dataset.py"
] |
Sceptre__sceptre-1494 | deef08e31911493ac18220bf9ed46218210cb94d | 2024-08-03 07:21:52 | deef08e31911493ac18220bf9ed46218210cb94d | diff --git a/sceptre/cli/update.py b/sceptre/cli/update.py
index 35d4239..1502422 100644
--- a/sceptre/cli/update.py
+++ b/sceptre/cli/update.py
@@ -41,6 +41,7 @@ def update_command(
:type verbose: bool
:param yes: A flag to answer 'yes' to all CLI questions.
:type yes: bool
+ :param disable_rollback: A flag to disable cloudformation rollback.
"""
context = SceptreContext(
diff --git a/sceptre/plan/actions.py b/sceptre/plan/actions.py
index 7b7fd57..08994c4 100644
--- a/sceptre/plan/actions.py
+++ b/sceptre/plan/actions.py
@@ -139,6 +139,12 @@ class StackActions:
{"Key": str(k), "Value": str(v)} for k, v in self.stack.tags.items()
],
}
+
+ if self.stack.disable_rollback:
+ update_stack_kwargs.update(
+ {"DisableRollback": self.stack.disable_rollback}
+ )
+
update_stack_kwargs.update(self.stack.template.get_boto_call_parameter())
update_stack_kwargs.update(self._get_role_arn())
response = self.connection_manager.call(
| DisableRollback not fully implemented in stack updates
### Subject of the issue
DisableRollback is handled only during the creation of a stack and not for stack updates. This appears to be an oversight. It is already half-implemented in the CLI.
### Your environment
* version of sceptre (sceptre --version)
* version of python (python --version)
* which OS/distro
### Steps to reproduce
Tell us how to reproduce this issue. Please provide sceptre projct files if possible,
you can use https://plnkr.co/edit/ANFHm61Ilt4mQVgF as a base.
### Expected behaviour
Update stack should handle DisableRollback if passed.
### Actual behaviour
Nothing. | Sceptre/sceptre | diff --git a/tests/test_actions.py b/tests/test_actions.py
index 9e66b76..877ed39 100644
--- a/tests/test_actions.py
+++ b/tests/test_actions.py
@@ -275,6 +275,44 @@ class TestStackActions(object):
sentinel.stack_timeout, boto_response=ANY
)
+ @patch("sceptre.plan.actions.StackActions._wait_for_completion")
+ @patch("sceptre.plan.actions.StackActions._get_stack_timeout")
+ def test_update_disable_rollback_overrides_on_failure(
+ self, mock_get_stack_timeout, mock_wait_for_completion
+ ):
+ self.actions.stack._template = Mock(spec=Template)
+ self.actions.stack._template.get_boto_call_parameter.return_value = {
+ "Template": sentinel.template
+ }
+
+ self.actions.stack.on_failure = "ROLLBACK"
+ self.actions.stack.disable_rollback = True
+
+ mock_get_stack_timeout.return_value = {"TimeoutInMinutes": sentinel.timeout}
+
+ self.actions.update()
+ self.actions.connection_manager.call.assert_called_with(
+ service="cloudformation",
+ command="update_stack",
+ kwargs={
+ "StackName": sentinel.external_name,
+ "Template": sentinel.template,
+ "Parameters": [{"ParameterKey": "key1", "ParameterValue": "val1"}],
+ "Capabilities": [
+ "CAPABILITY_IAM",
+ "CAPABILITY_NAMED_IAM",
+ "CAPABILITY_AUTO_EXPAND",
+ ],
+ "RoleARN": sentinel.cloudformation_service_role,
+ "NotificationARNs": [sentinel.notification],
+ "Tags": [{"Key": "tag1", "Value": "val1"}],
+ "DisableRollback": True,
+ },
+ )
+ mock_wait_for_completion.assert_called_once_with(
+ sentinel.stack_timeout, boto_response=ANY
+ )
+
@patch("sceptre.plan.actions.StackActions._wait_for_completion")
def test_update_cancels_after_timeout(self, mock_wait_for_completion):
self.actions.stack._template = Mock(spec=Template)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 1
},
"num_modified_files": 2
} | 4.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest pytest-cov"
],
"pre_install": null,
"python": "3.8",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==25.3.0
boto3==1.37.23
botocore==1.37.23
certifi==2025.1.31
cfn-flip==1.3.0
charset-normalizer==3.4.1
click==8.1.8
colorama==0.4.3
coverage==7.6.1
deepdiff==5.8.1
deprecation==2.1.0
exceptiongroup==1.2.2
idna==3.10
iniconfig==2.1.0
Jinja2==3.1.6
jmespath==1.0.1
jsonschema==3.2.0
MarkupSafe==2.1.5
networkx==2.6.3
ordered-set==4.1.0
packaging==21.3
pluggy==1.5.0
pyparsing==3.1.4
pyrsistent==0.20.0
pytest==8.3.5
pytest-cov==5.0.0
python-dateutil==2.9.0.post0
PyYAML==6.0.2
requests==2.32.3
s3transfer==0.11.4
-e git+https://github.com/Sceptre/sceptre.git@deef08e31911493ac18220bf9ed46218210cb94d#egg=sceptre
sceptre-cmd-resolver==2.0.0
sceptre-file-resolver==1.0.6
six==1.17.0
tomli==2.2.1
urllib3==1.26.20
| name: sceptre
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=24.2=py38h06a4308_0
- python=3.8.20=he870216_0
- readline=8.2=h5eee18b_0
- setuptools=75.1.0=py38h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.44.0=py38h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==25.3.0
- boto3==1.37.23
- botocore==1.37.23
- certifi==2025.1.31
- cfn-flip==1.3.0
- charset-normalizer==3.4.1
- click==8.1.8
- colorama==0.4.3
- coverage==7.6.1
- deepdiff==5.8.1
- deprecation==2.1.0
- exceptiongroup==1.2.2
- idna==3.10
- iniconfig==2.1.0
- jinja2==3.1.6
- jmespath==1.0.1
- jsonschema==3.2.0
- markupsafe==2.1.5
- networkx==2.6.3
- ordered-set==4.1.0
- packaging==21.3
- pluggy==1.5.0
- pyparsing==3.1.4
- pyrsistent==0.20.0
- pytest==8.3.5
- pytest-cov==5.0.0
- python-dateutil==2.9.0.post0
- pyyaml==6.0.2
- requests==2.32.3
- s3transfer==0.11.4
- sceptre==4.4.2
- sceptre-cmd-resolver==2.0.0
- sceptre-file-resolver==1.0.6
- six==1.17.0
- tomli==2.2.1
- urllib3==1.26.20
prefix: /opt/conda/envs/sceptre
| [
"tests/test_actions.py::TestStackActions::test_update_disable_rollback_overrides_on_failure"
] | [] | [
"tests/test_actions.py::TestStackActions::test_template_loads_template",
"tests/test_actions.py::TestStackActions::test_template_returns_template_if_it_exists",
"tests/test_actions.py::TestStackActions::test_external_name_with_custom_stack_name",
"tests/test_actions.py::TestStackActions::test_create_sends_correct_request",
"tests/test_actions.py::TestStackActions::test_create_disable_rollback_overrides_on_failure",
"tests/test_actions.py::TestStackActions::test_create_sends_correct_request_no_notifications",
"tests/test_actions.py::TestStackActions::test_create_sends_correct_request_with_no_failure_no_timeout",
"tests/test_actions.py::TestStackActions::test_create_stack_already_exists",
"tests/test_actions.py::TestStackActions::test_update_sends_correct_request",
"tests/test_actions.py::TestStackActions::test_update_cancels_after_timeout",
"tests/test_actions.py::TestStackActions::test_update_sends_correct_request_no_notification",
"tests/test_actions.py::TestStackActions::test_update_with_complete_stack_with_no_updates_to_perform",
"tests/test_actions.py::TestStackActions::test_cancel_update_sends_correct_request",
"tests/test_actions.py::TestStackActions::test_launch_with_stack_that_does_not_exist",
"tests/test_actions.py::TestStackActions::test_launch_with_stack_that_failed_to_create",
"tests/test_actions.py::TestStackActions::test_launch_with_stack_in_review_in_progress",
"tests/test_actions.py::TestStackActions::test_launch_with_complete_stack_with_updates_to_perform",
"tests/test_actions.py::TestStackActions::test_launch_with_complete_stack_with_no_updates_to_perform",
"tests/test_actions.py::TestStackActions::test_launch_with_complete_stack_with_unknown_client_error",
"tests/test_actions.py::TestStackActions::test_launch_with_in_progress_stack",
"tests/test_actions.py::TestStackActions::test_launch_with_failed_stack",
"tests/test_actions.py::TestStackActions::test_launch_with_unknown_stack_status",
"tests/test_actions.py::TestStackActions::test_delete_with_created_stack",
"tests/test_actions.py::TestStackActions::test_delete_when_wait_for_completion_raises_stack_does_not_exist_error",
"tests/test_actions.py::TestStackActions::test_delete_when_wait_for_completion_raises_non_existent_client_error",
"tests/test_actions.py::TestStackActions::test_delete_when_wait_for_completion_raises_unexpected_client_error",
"tests/test_actions.py::TestStackActions::test_delete_with_non_existent_stack",
"tests/test_actions.py::TestStackActions::test_describe_stack_sends_correct_request",
"tests/test_actions.py::TestStackActions::test_describe_events_sends_correct_request",
"tests/test_actions.py::TestStackActions::test_describe_resources_sends_correct_request",
"tests/test_actions.py::TestStackActions::test_describe_outputs_sends_correct_request",
"tests/test_actions.py::TestStackActions::test_describe_outputs_handles_stack_with_no_outputs",
"tests/test_actions.py::TestStackActions::test_continue_update_rollback_sends_correct_request",
"tests/test_actions.py::TestStackActions::test_set_stack_policy_sends_correct_request",
"tests/test_actions.py::TestStackActions::test_get_stack_policy_sends_correct_request",
"tests/test_actions.py::TestStackActions::test_create_change_set_sends_correct_request",
"tests/test_actions.py::TestStackActions::test_create_change_set_sends_correct_request_no_notifications",
"tests/test_actions.py::TestStackActions::test_create_change_set_with_non_existent_stack",
"tests/test_actions.py::TestStackActions::test_delete_change_set_sends_correct_request",
"tests/test_actions.py::TestStackActions::test_describe_change_set_sends_correct_request",
"tests/test_actions.py::TestStackActions::test_execute_change_set_sends_correct_request",
"tests/test_actions.py::TestStackActions::test_execute_change_set__change_set_is_failed_for_no_changes__returns_0",
"tests/test_actions.py::TestStackActions::test_execute_change_set__change_set_is_failed_for_no_updates__returns_0",
"tests/test_actions.py::TestStackActions::test_list_change_sets_sends_correct_request",
"tests/test_actions.py::TestStackActions::test_list_change_sets",
"tests/test_actions.py::TestStackActions::test_list_change_sets_url_mode",
"tests/test_actions.py::TestStackActions::test_list_change_sets_empty[True]",
"tests/test_actions.py::TestStackActions::test_list_change_sets_empty[False]",
"tests/test_actions.py::TestStackActions::test_lock_calls_set_stack_policy_with_policy",
"tests/test_actions.py::TestStackActions::test_unlock_calls_set_stack_policy_with_policy",
"tests/test_actions.py::TestStackActions::test_format_parameters_with_sting_values",
"tests/test_actions.py::TestStackActions::test_format_parameters_with_none_values",
"tests/test_actions.py::TestStackActions::test_format_parameters_with_none_and_string_values",
"tests/test_actions.py::TestStackActions::test_format_parameters_with_list_values",
"tests/test_actions.py::TestStackActions::test_format_parameters_with_none_and_list_values",
"tests/test_actions.py::TestStackActions::test_format_parameters_with_list_and_string_values",
"tests/test_actions.py::TestStackActions::test_format_parameters_with_none_list_and_string_values",
"tests/test_actions.py::TestStackActions::test_get_status_with_created_stack",
"tests/test_actions.py::TestStackActions::test_get_status_with_non_existent_stack",
"tests/test_actions.py::TestStackActions::test_get_status_with_unknown_clinet_error",
"tests/test_actions.py::TestStackActions::test_get_cloudformation_service_role_without_role",
"tests/test_actions.py::TestStackActions::test_get_role_arn_with_role",
"tests/test_actions.py::TestStackActions::test_protect_execution_without_protection",
"tests/test_actions.py::TestStackActions::test_protect_execution_without_explicit_protection",
"tests/test_actions.py::TestStackActions::test_protect_execution_with_protection",
"tests/test_actions.py::TestStackActions::test_wait_for_completion_calls_log_new_events",
"tests/test_actions.py::TestStackActions::test_get_simplified_status_with_known_stack_statuses[ROLLBACK_COMPLETE-failed]",
"tests/test_actions.py::TestStackActions::test_get_simplified_status_with_known_stack_statuses[STACK_COMPLETE-complete]",
"tests/test_actions.py::TestStackActions::test_get_simplified_status_with_known_stack_statuses[STACK_IN_PROGRESS-in",
"tests/test_actions.py::TestStackActions::test_get_simplified_status_with_known_stack_statuses[STACK_FAILED-failed]",
"tests/test_actions.py::TestStackActions::test_get_simplified_status_with_stack_in_unknown_state",
"tests/test_actions.py::TestStackActions::test_log_new_events_calls_describe_events",
"tests/test_actions.py::TestStackActions::test_log_new_events_prints_correct_event",
"tests/test_actions.py::TestStackActions::test_log_new_events_with_hook_status_prints_correct_event",
"tests/test_actions.py::TestStackActions::test_wait_for_cs_completion_calls_get_cs_status",
"tests/test_actions.py::TestStackActions::test_get_cs_status_handles_all_statuses",
"tests/test_actions.py::TestStackActions::test_get_cs_status_raises_unexpected_exceptions",
"tests/test_actions.py::TestStackActions::test_fetch_remote_template__cloudformation_returns_validation_error__returns_none",
"tests/test_actions.py::TestStackActions::test_fetch_remote_template__calls_cloudformation_get_template",
"tests/test_actions.py::TestStackActions::test_fetch_remote_template__dict_template__returns_json",
"tests/test_actions.py::TestStackActions::test_fetch_remote_template__cloudformation_returns_string_template__returns_that_string",
"tests/test_actions.py::TestStackActions::test_fetch_remote_template_summary__calls_cloudformation_get_template_summary",
"tests/test_actions.py::TestStackActions::test_fetch_remote_template_summary__returns_response_from_cloudformation",
"tests/test_actions.py::TestStackActions::test_fetch_local_template_summary__calls_cloudformation_get_template_summary",
"tests/test_actions.py::TestStackActions::test_fetch_local_template_summary__returns_response_from_cloudformation",
"tests/test_actions.py::TestStackActions::test_fetch_local_template_summary__cloudformation_returns_validation_error_invalid_stack__raises_it",
"tests/test_actions.py::TestStackActions::test_fetch_remote_template_summary__cloudformation_returns_validation_error_for_no_stack__returns_none",
"tests/test_actions.py::TestStackActions::test_diff__invokes_diff_method_on_injected_differ_with_self",
"tests/test_actions.py::TestStackActions::test_diff__returns_result_of_injected_differs_diff_method",
"tests/test_actions.py::TestStackActions::test_drift_detect",
"tests/test_actions.py::TestStackActions::test_drift_show[DETECTION_COMPLETE]",
"tests/test_actions.py::TestStackActions::test_drift_show[DETECTION_FAILED]",
"tests/test_actions.py::TestStackActions::test_drift_show_drift_only",
"tests/test_actions.py::TestStackActions::test_drift_show_with_stack_that_does_not_exist",
"tests/test_actions.py::TestStackActions::test_drift_show_times_out"
] | [] | Apache License 2.0 | 19,182 | 313 | [
"sceptre/cli/update.py",
"sceptre/plan/actions.py"
] |
|
python-wheel-build__fromager-290 | 2a7d14c5282463a3b35d57611c14eeb412020a61 | 2024-08-04 14:38:00 | 2a7d14c5282463a3b35d57611c14eeb412020a61 | diff --git a/src/fromager/sources.py b/src/fromager/sources.py
index 705b2d7..13fc9e4 100644
--- a/src/fromager/sources.py
+++ b/src/fromager/sources.py
@@ -437,7 +437,11 @@ def default_build_sdist(
# The format argument is specified based on
# https://peps.python.org/pep-0517/#build-sdist.
with tarfile.open(sdist_filename, "x:gz", format=tarfile.PAX_FORMAT) as sdist:
- tarballs.tar_reproducible(sdist, sdist_root_dir)
+ tarballs.tar_reproducible(
+ tar=sdist,
+ basedir=sdist_root_dir,
+ prefix=sdist_root_dir.parent,
+ )
return sdist_filename
diff --git a/src/fromager/tarballs.py b/src/fromager/tarballs.py
index c670b55..5e0847b 100644
--- a/src/fromager/tarballs.py
+++ b/src/fromager/tarballs.py
@@ -22,8 +22,17 @@ def _tar_reset(tarinfo: tarfile.TarInfo) -> tarfile.TarInfo:
return tarinfo
-def tar_reproducible(tar: tarfile.TarFile, basedir: pathlib.Path) -> None:
- """Create reproducible tar file"""
+def tar_reproducible(
+ tar: tarfile.TarFile,
+ basedir: pathlib.Path,
+ prefix: pathlib.Path | None = None,
+) -> None:
+ """Create reproducible tar file
+
+ Add content from basedir to already opened tar. If prefix is provided, use
+ it to set relative paths for the content being added.
+
+ """
content = [str(basedir)] # convert from pathlib.Path, if that's what we have
for root, dirs, files in os.walk(basedir):
@@ -34,4 +43,7 @@ def tar_reproducible(tar: tarfile.TarFile, basedir: pathlib.Path) -> None:
content.sort()
for fn in content:
- tar.add(fn, filter=_tar_reset, recursive=False, arcname=fn)
+ # Ensure that the paths in the tarfile are rooted at the prefix
+ # directory, if we have one.
+ arcname = fn if prefix is None else os.path.relpath(fn, prefix)
+ tar.add(fn, filter=_tar_reset, recursive=False, arcname=arcname)
| Fromager creates bad sdist tar ball
```console
$ tar tf deepspeed-0.14.4.tar.gz | grep PKG-INFO
build-output/work-dir/deepspeed-0.14.4/deepspeed-0.14.4/PKG-INFO
```
The correct path is `deepspeed-0.14.4/PKG-INFO`. The leading segments `build-output/work-dir/deepspeed-0.14.4` are invalid. | python-wheel-build/fromager | diff --git a/tests/test_tarballs.py b/tests/test_tarballs.py
index 46068a6..3704350 100644
--- a/tests/test_tarballs.py
+++ b/tests/test_tarballs.py
@@ -1,3 +1,4 @@
+import os
import tarfile
from fromager import tarballs
@@ -23,3 +24,35 @@ def test_modes_change(tmp_path):
t1_contents = t1.read_bytes()
t2_contents = t2.read_bytes()
assert t1_contents == t2_contents, "file contents differ"
+
+
+def test_prefix_strip(tmp_path):
+ root = tmp_path / "root"
+ root.mkdir()
+ subdir = root / "subdir"
+ subdir.mkdir()
+ a = subdir / "a"
+ a.write_text("this is file a")
+
+ t1 = tmp_path / "out1.tar"
+ with tarfile.open(t1, "w") as tf:
+ tarballs.tar_reproducible(tar=tf, basedir=root, prefix=subdir.parent)
+ with tarfile.open(t1, "r") as tf:
+ names = tf.getnames()
+ assert names == [".", "subdir", "subdir/a"]
+
+
+def test_no_prefix_strip(tmp_path):
+ root = tmp_path / "root"
+ root.mkdir()
+ subdir = root / "subdir"
+ subdir.mkdir()
+ a = subdir / "a"
+ a.write_text("this is file a")
+
+ t1 = tmp_path / "out1.tar"
+ with tarfile.open(t1, "w") as tf:
+ tarballs.tar_reproducible(tar=tf, basedir=root)
+ with tarfile.open(t1, "r") as tf:
+ names = tf.getnames()
+ assert names == [str(p).lstrip(os.sep) for p in [root, subdir, a]]
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 2
} | 0.25 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"requests-mock",
"setuptools_scm>=8",
"setuptools>=64"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.11",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2025.1.31
cffi==1.17.1
charset-normalizer==3.4.1
click==8.1.8
coverage==7.8.0
cryptography==44.0.2
Deprecated==1.2.18
distlib==0.3.9
filelock==3.18.0
-e git+https://github.com/python-wheel-build/fromager.git@2a7d14c5282463a3b35d57611c14eeb412020a61#egg=fromager
html5lib==1.1
idna==3.10
iniconfig==2.1.0
packaging==24.2
pbr==6.1.1
pkginfo==1.12.1.2
platformdirs==4.3.7
pluggy==1.5.0
pycparser==2.22
PyGithub==2.6.1
PyJWT==2.10.1
PyNaCl==1.5.0
pyproject_hooks==1.2.0
pytest==8.3.5
pytest-cov==6.0.0
python-pypi-mirror==5.2.1
PyYAML==6.0.2
requests==2.32.3
requests-mock==1.12.1
resolvelib==1.1.0
setuptools-scm==8.2.0
six==1.17.0
stevedore==5.4.1
tomlkit==0.13.2
tqdm==4.67.1
typing_extensions==4.13.0
urllib3==2.3.0
virtualenv==20.29.3
webencodings==0.5.1
wrapt==1.17.2
| name: fromager
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- bzip2=1.0.8=h5eee18b_6
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- libuuid=1.41.5=h5eee18b_0
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py311h06a4308_0
- python=3.11.11=he870216_0
- readline=8.2=h5eee18b_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py311h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- cffi==1.17.1
- charset-normalizer==3.4.1
- click==8.1.8
- coverage==7.8.0
- cryptography==44.0.2
- deprecated==1.2.18
- distlib==0.3.9
- filelock==3.18.0
- fromager==0.25.2.dev8+g2a7d14c
- html5lib==1.1
- idna==3.10
- iniconfig==2.1.0
- packaging==24.2
- pbr==6.1.1
- pkginfo==1.12.1.2
- platformdirs==4.3.7
- pluggy==1.5.0
- pycparser==2.22
- pygithub==2.6.1
- pyjwt==2.10.1
- pynacl==1.5.0
- pyproject-hooks==1.2.0
- pytest==8.3.5
- pytest-cov==6.0.0
- python-pypi-mirror==5.2.1
- pyyaml==6.0.2
- requests==2.32.3
- requests-mock==1.12.1
- resolvelib==1.1.0
- setuptools==71.1.0
- setuptools-scm==8.2.0
- six==1.17.0
- stevedore==5.4.1
- tomlkit==0.13.2
- tqdm==4.67.1
- typing-extensions==4.13.0
- urllib3==2.3.0
- virtualenv==20.29.3
- webencodings==0.5.1
- wrapt==1.17.2
prefix: /opt/conda/envs/fromager
| [
"tests/test_tarballs.py::test_prefix_strip"
] | [] | [
"tests/test_tarballs.py::test_modes_change",
"tests/test_tarballs.py::test_no_prefix_strip"
] | [] | Apache License 2.0 | 19,189 | 584 | [
"src/fromager/sources.py",
"src/fromager/tarballs.py"
] |
|
TomerFi__aioswitcher-784 | 1f63d1e17c2c2a68a41bc7f160afccd992dd1ad0 | 2024-08-04 23:26:33 | 1f63d1e17c2c2a68a41bc7f160afccd992dd1ad0 | YogevBokobza: @thecode
Will this fix work for HA? how does it retrieve the MAC there?
YogevBokobza: @thecode
I just wonder how we didn't see this in HA until now.. it looks like an old issue..
thecode: > @thecode I just wonder how we didn't see this in HA until now.. it looks like an old issue..
We did, there were few reports of it, the shift itself is not creating a problem as long as the left bits are not changing so most people probably doesn't notice it.
I looked at the message history, this was reported to Switcher on the 30/11/2022, they said they will check and report back (still waiting...)
TomerFi: @YogevBokobza - Thank you! | diff --git a/src/aioswitcher/bridge.py b/src/aioswitcher/bridge.py
index 25ae2f3..1676123 100644
--- a/src/aioswitcher/bridge.py
+++ b/src/aioswitcher/bridge.py
@@ -104,7 +104,7 @@ def _parse_device_from_datagram(
parser.get_device_id(),
parser.get_device_key(),
parser.get_ip_type1(),
- parser.get_mac(),
+ parser.get_mac_type1(),
parser.get_name(),
device_type.token_needed,
power_consumption,
@@ -127,7 +127,7 @@ def _parse_device_from_datagram(
parser.get_device_id(),
parser.get_device_key(),
parser.get_ip_type1(),
- parser.get_mac(),
+ parser.get_mac_type1(),
parser.get_name(),
device_type.token_needed,
power_consumption,
@@ -144,7 +144,7 @@ def _parse_device_from_datagram(
parser.get_device_id(),
parser.get_device_key(),
parser.get_ip_type2(),
- parser.get_mac(),
+ parser.get_mac_type2(),
parser.get_name(),
device_type.token_needed,
parser.get_shutter_position(
@@ -168,7 +168,7 @@ def _parse_device_from_datagram(
parser.get_device_id(),
parser.get_device_key(),
parser.get_ip_type2(),
- parser.get_mac(),
+ parser.get_mac_type2(),
parser.get_name(),
device_type.token_needed,
parser.get_shutter_position(
@@ -197,7 +197,7 @@ def _parse_device_from_datagram(
parser.get_device_id(),
parser.get_device_key(),
parser.get_ip_type2(),
- parser.get_mac(),
+ parser.get_mac_type2(),
parser.get_name(),
device_type.token_needed,
parser.get_thermostat_mode(),
@@ -351,8 +351,8 @@ def get_ip_type2(self) -> str:
ip_addr = int(hex_ip[0:2] + hex_ip[2:4] + hex_ip[4:6] + hex_ip[6:8], 16)
return inet_ntoa(pack(">L", ip_addr))
- def get_mac(self) -> str:
- """Extract the MAC address from the broadcast message."""
+ def get_mac_type1(self) -> str:
+ """Extract the MAC address from the broadcast message (Heater, Plug)."""
hex_mac = hexlify(self.message)[160:172].decode().upper()
return (
hex_mac[0:2]
@@ -368,6 +368,23 @@ def get_mac(self) -> str:
+ hex_mac[10:12]
)
+ def get_mac_type2(self) -> str:
+ """Extract the MAC address from the broadcast message (Breeze, Runners)."""
+ hex_mac = hexlify(self.message)[162:174].decode().upper()
+ return (
+ hex_mac[0:2]
+ + ":"
+ + hex_mac[2:4]
+ + ":"
+ + hex_mac[4:6]
+ + ":"
+ + hex_mac[6:8]
+ + ":"
+ + hex_mac[8:10]
+ + ":"
+ + hex_mac[10:12]
+ )
+
def get_name(self) -> str:
"""Extract the device name from the broadcast message."""
return self.message[42:74].decode().rstrip("\x00")
| Incorrect parsing of MAC address for Switcher Runner
### What happened?
Details in https://github.com/home-assistant/core/issues/122715#issuecomment-2254539132, it looks like we have a one byte shift when parsing the MAC address for Switcher Runner.
@YogevBokobza you recently checked all runner devices, can you verify that?
### Module Version
3.4.3
### Device Type
Switcher Runner
### Firmware Version
1.79
### Relevant log output
```shell
39:E8:68:E7:80:3B
3C:E8:68:E7:80:3B
3F:E8:68:E7:80:3B
49:E8:68:E7:80:3B
```
The real device MAC starts with `E8` so the first byte is not related to the MAC
```
| TomerFi/aioswitcher | diff --git a/tests/test_udp_datagram_parsing.py b/tests/test_udp_datagram_parsing.py
index 326532c..22a19a7 100644
--- a/tests/test_udp_datagram_parsing.py
+++ b/tests/test_udp_datagram_parsing.py
@@ -39,7 +39,7 @@ def test_datagram_state_off(resource_path, type_suffix, expected_type):
assert_that(sut_parser.is_switcher_originator()).is_true()
assert_that(sut_parser.get_ip_type1()).is_equal_to("192.168.1.33")
- assert_that(sut_parser.get_mac()).is_equal_to("12:A1:A2:1A:BC:1A")
+ assert_that(sut_parser.get_mac_type1()).is_equal_to("12:A1:A2:1A:BC:1A")
assert_that(sut_parser.get_name()).is_equal_to("My Switcher Boiler")
assert_that(sut_parser.get_device_id()).is_equal_to("aaaaaa")
assert_that(sut_parser.get_device_state()).is_equal_to(DeviceState.OFF)
@@ -65,7 +65,7 @@ def test_datagram_state_on(resource_path, type_suffix, expected_type):
assert_that(sut_parser.is_switcher_originator()).is_true()
assert_that(sut_parser.get_ip_type1()).is_equal_to("192.168.1.33")
- assert_that(sut_parser.get_mac()).is_equal_to("12:A1:A2:1A:BC:1A")
+ assert_that(sut_parser.get_mac_type1()).is_equal_to("12:A1:A2:1A:BC:1A")
assert_that(sut_parser.get_name()).is_equal_to("My Switcher Boiler")
assert_that(sut_parser.get_device_id()).is_equal_to("aaaaaa")
assert_that(sut_parser.get_device_state()).is_equal_to(DeviceState.ON)
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 1
} | 3.4 | {
"env_vars": null,
"env_yml_path": [],
"install": "poetry install --no-interaction",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | aiohappyeyeballs==2.6.1
aiohttp==3.11.14
aiosignal==1.3.2
-e git+https://github.com/TomerFi/aioswitcher.git@1f63d1e17c2c2a68a41bc7f160afccd992dd1ad0#egg=aioswitcher
assertpy==1.1
async-timeout==5.0.1
attrs==25.3.0
babel==2.17.0
backrefs==5.8
black==24.10.0
build==0.10.0
CacheControl==0.12.14
certifi==2025.1.31
cffi==1.17.1
charset-normalizer==3.4.1
cleo==2.1.0
click==8.1.8
colorama==0.4.6
coverage==7.8.0
crashtest==0.4.1
cryptography==44.0.2
distlib==0.3.9
dulwich==0.21.7
exceptiongroup==1.2.2
filelock==3.18.0
flake8==6.1.0
flake8-docstrings==1.7.0
Flake8-pyproject==1.2.3
frozenlist==1.5.0
ghp-import==2.1.0
gitdb==4.0.12
GitPython==3.1.44
griffe==0.32.3
html5lib==1.1
idna==3.10
importlib_metadata==8.6.1
iniconfig==2.1.0
installer==0.7.0
isort==5.13.2
jaraco.classes==3.4.0
jeepney==0.9.0
Jinja2==3.1.6
jsonschema==4.23.0
jsonschema-specifications==2024.10.1
keyring==23.13.1
lockfile==0.12.2
Markdown==3.7
MarkupSafe==3.0.2
mccabe==0.7.0
mergedeep==1.3.4
mkdocs==1.6.1
mkdocs-autorefs==1.4.1
mkdocs-get-deps==0.2.0
mkdocs-git-revision-date-plugin==0.3.2
mkdocs-material==9.6.10
mkdocs-material-extensions==1.3.1
mkdocstrings==0.22.0
mkdocstrings-python==1.3.0
more-itertools==10.6.0
msgpack==1.1.0
multidict==6.2.0
mypy==1.4.1
mypy-extensions==1.0.0
packaging==24.2
paginate==0.5.7
pastel==0.2.1
pathspec==0.12.1
pexpect==4.9.0
pkginfo==1.12.1.2
platformdirs==4.3.7
pluggy==1.5.0
poethepoet==0.21.1
poetry==1.5.1
poetry-core==1.6.1
poetry-plugin-export==1.5.0
propcache==0.3.1
ptyprocess==0.7.0
pycodestyle==2.11.1
pycparser==2.22
pycryptodome==3.22.0
pydocstyle==6.3.0
pyflakes==3.1.0
Pygments==2.19.1
pymdown-extensions==10.14.3
pyproject_hooks==1.2.0
pytest==7.4.4
pytest-asyncio==0.21.2
pytest-cov==4.1.0
pytest-mockservers==0.6.0
pytest-resource-path==1.3.0
pytest-sugar==0.9.7
python-dateutil==2.9.0.post0
PyYAML==6.0.2
pyyaml_env_tag==0.1
RapidFuzz==3.12.2
referencing==0.36.2
requests==2.32.3
requests-toolbelt==1.0.0
rpds-py==0.24.0
SecretStorage==3.3.3
shellingham==1.5.4
six==1.17.0
smmap==5.0.2
snowballstemmer==2.2.0
termcolor==3.0.0
time-machine==2.16.0
tomli==2.2.1
tomlkit==0.13.2
trove-classifiers==2025.3.19.19
types-requests==2.32.0.20250328
typing_extensions==4.13.0
urllib3==2.3.0
virtualenv==20.30.0
watchdog==6.0.0
webencodings==0.5.1
yamllint==1.37.0
yarl==1.18.3
zipp==3.21.0
| name: aioswitcher
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- aiohappyeyeballs==2.6.1
- aiohttp==3.11.14
- aiosignal==1.3.2
- aioswitcher==3.4.4.dev0
- assertpy==1.1
- async-timeout==5.0.1
- attrs==25.3.0
- babel==2.17.0
- backrefs==5.8
- black==24.10.0
- build==0.10.0
- cachecontrol==0.12.14
- certifi==2025.1.31
- cffi==1.17.1
- charset-normalizer==3.4.1
- cleo==2.1.0
- click==8.1.8
- colorama==0.4.6
- coverage==7.8.0
- crashtest==0.4.1
- cryptography==44.0.2
- distlib==0.3.9
- dulwich==0.21.7
- exceptiongroup==1.2.2
- filelock==3.18.0
- flake8==6.1.0
- flake8-docstrings==1.7.0
- flake8-pyproject==1.2.3
- frozenlist==1.5.0
- ghp-import==2.1.0
- gitdb==4.0.12
- gitpython==3.1.44
- griffe==0.32.3
- html5lib==1.1
- idna==3.10
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- installer==0.7.0
- isort==5.13.2
- jaraco-classes==3.4.0
- jeepney==0.9.0
- jinja2==3.1.6
- jsonschema==4.23.0
- jsonschema-specifications==2024.10.1
- keyring==23.13.1
- lockfile==0.12.2
- markdown==3.7
- markupsafe==3.0.2
- mccabe==0.7.0
- mergedeep==1.3.4
- mkdocs==1.6.1
- mkdocs-autorefs==1.4.1
- mkdocs-get-deps==0.2.0
- mkdocs-git-revision-date-plugin==0.3.2
- mkdocs-material==9.6.10
- mkdocs-material-extensions==1.3.1
- mkdocstrings==0.22.0
- mkdocstrings-python==1.3.0
- more-itertools==10.6.0
- msgpack==1.1.0
- multidict==6.2.0
- mypy==1.4.1
- mypy-extensions==1.0.0
- packaging==24.2
- paginate==0.5.7
- pastel==0.2.1
- pathspec==0.12.1
- pexpect==4.9.0
- pkginfo==1.12.1.2
- platformdirs==4.3.7
- pluggy==1.5.0
- poethepoet==0.21.1
- poetry==1.5.1
- poetry-core==1.6.1
- poetry-plugin-export==1.5.0
- propcache==0.3.1
- ptyprocess==0.7.0
- pycodestyle==2.11.1
- pycparser==2.22
- pycryptodome==3.22.0
- pydocstyle==6.3.0
- pyflakes==3.1.0
- pygments==2.19.1
- pymdown-extensions==10.14.3
- pyproject-hooks==1.2.0
- pytest==7.4.4
- pytest-asyncio==0.21.2
- pytest-cov==4.1.0
- pytest-mockservers==0.6.0
- pytest-resource-path==1.3.0
- pytest-sugar==0.9.7
- python-dateutil==2.9.0.post0
- pyyaml==6.0.2
- pyyaml-env-tag==0.1
- rapidfuzz==3.12.2
- referencing==0.36.2
- requests==2.32.3
- requests-toolbelt==1.0.0
- rpds-py==0.24.0
- secretstorage==3.3.3
- shellingham==1.5.4
- six==1.17.0
- smmap==5.0.2
- snowballstemmer==2.2.0
- termcolor==3.0.0
- time-machine==2.16.0
- tomli==2.2.1
- tomlkit==0.13.2
- trove-classifiers==2025.3.19.19
- types-requests==2.32.0.20250328
- typing-extensions==4.13.0
- urllib3==2.3.0
- virtualenv==20.30.0
- watchdog==6.0.0
- webencodings==0.5.1
- yamllint==1.37.0
- yarl==1.18.3
- zipp==3.21.0
prefix: /opt/conda/envs/aioswitcher
| [
"tests/test_udp_datagram_parsing.py::test_datagram_state_off[mini-DeviceType.MINI]",
"tests/test_udp_datagram_parsing.py::test_datagram_state_off[power_plug-DeviceType.POWER_PLUG]",
"tests/test_udp_datagram_parsing.py::test_datagram_state_off[touch-DeviceType.TOUCH]",
"tests/test_udp_datagram_parsing.py::test_datagram_state_off[v2_esp-DeviceType.V2_ESP]",
"tests/test_udp_datagram_parsing.py::test_datagram_state_off[v2_qca-DeviceType.V2_QCA]",
"tests/test_udp_datagram_parsing.py::test_datagram_state_off[v4-DeviceType.V4]",
"tests/test_udp_datagram_parsing.py::test_datagram_state_on[mini-DeviceType.MINI]",
"tests/test_udp_datagram_parsing.py::test_datagram_state_on[power_plug-DeviceType.POWER_PLUG]",
"tests/test_udp_datagram_parsing.py::test_datagram_state_on[touch-DeviceType.TOUCH]",
"tests/test_udp_datagram_parsing.py::test_datagram_state_on[v2_esp-DeviceType.V2_ESP]",
"tests/test_udp_datagram_parsing.py::test_datagram_state_on[v2_qca-DeviceType.V2_QCA]",
"tests/test_udp_datagram_parsing.py::test_datagram_state_on[v4-DeviceType.V4]"
] | [] | [
"tests/test_udp_datagram_parsing.py::test_a_faulty_datagram[too_short]",
"tests/test_udp_datagram_parsing.py::test_a_faulty_datagram[wrong_start]"
] | [] | Apache License 2.0 | 19,191 | 819 | [
"src/aioswitcher/bridge.py"
] |
tenable__pyTenable-826 | eeb4cab2c02b4c5219a70fc79ec1fd615efeb81b | 2024-08-05 02:45:37 | eeb4cab2c02b4c5219a70fc79ec1fd615efeb81b | diff --git a/tenable/io/exports/api.py b/tenable/io/exports/api.py
index d17be9c..8e91551 100644
--- a/tenable/io/exports/api.py
+++ b/tenable/io/exports/api.py
@@ -365,8 +365,6 @@ class ExportsAPI(APIEndpoint):
Returns Compliance findings for the specified list of plugin names.
plugin_id (list[int], optional):
Returns Compliance findings for the specified list of plugin IDs.
- asset_tags (list[str], optional):
- Returns Compliance findings for the specified list of asset tags.
audit_name (str, optional):
Restricts compliance findings to those associated with the specified audit.
audit_file_name (str, optional):
@@ -384,6 +382,11 @@ class ExportsAPI(APIEndpoint):
Vulnerability Management on or after the specified unix timestamp.
state (list[str], optional):
Restricts compliance findings to those associated with the provided list of states, such as open, reopened and fixed.
+ tags (list[tuple[str, list[str]]], optional):
+ A list of tag pairs to filter the results on. The tag pairs
+ should be presented as ``('CATEGORY', ['VALUE'])``.
+ network_id (str, optional):
+ Returns Compliance findings for the specified network ID.
num_findings (int):
The number of findings to return per chunk of data. If left
unspecified, the default is ``5000``.
@@ -551,3 +554,18 @@ class ExportsAPI(APIEndpoint):
... )
'''
return self._export('vulns', VulnExportSchema(), **kwargs)
+
+ def list_compliance_export_jobs(self):
+ """
+ Returns a list of the last 1,000 compliance export requests along with their statuses
+ and related metadata.
+
+ Returns:
+ :obj:`list`:
+ List of job records.
+
+ Examples:
+ >>> for compliance_job in tio.exports.list_compliance_export_jobs():
+ ... pprint(compliance_job)
+ """
+ return self._api.get('compliance/export/status').json()["exports"]
\ No newline at end of file
diff --git a/tenable/io/exports/schema.py b/tenable/io/exports/schema.py
index d6f75ea..63e5c5e 100644
--- a/tenable/io/exports/schema.py
+++ b/tenable/io/exports/schema.py
@@ -18,6 +18,24 @@ def serialize_tags(data: Dict) -> Dict:
data[tag_name].append(tag[1])
return data
+def serialize_compliance_tags(data: Dict) -> Dict:
+ """
+ Converts the tag tuples into a list of objects
+ """
+ tags = data.pop("tags", [])
+ modified_tags = []
+ for tag in tags:
+ category = tag[0]
+ values = tag[1]
+ modified_tags.append({
+ "category": category,
+ "values": values
+ })
+
+ if tags:
+ data["tags"] = modified_tags
+
+ return data
class AssetExportSchema(Schema):
'''
@@ -119,7 +137,6 @@ class ComplianceExportSchema(Schema):
ipv6_addresses = fields.List(fields.Str())
plugin_name = fields.List(fields.Str())
plugin_id = fields.List(fields.Int())
- asset_tags = fields.List(fields.Str())
audit_name = fields.Str()
audit_file_name = fields.Str()
compliance_results = fields.List(fields.Str())
@@ -127,6 +144,8 @@ class ComplianceExportSchema(Schema):
indexed_at = fields.Int()
since = fields.Int()
state = fields.List(fields.Str())
+ tags = fields.List(fields.Tuple((fields.Str(), fields.List(fields.Str()))))
+ network_id = fields.Str()
# Other params
asset = fields.List(fields.UUID())
@@ -134,7 +153,7 @@ class ComplianceExportSchema(Schema):
@post_dump
def post_serialization(self, data, **kwargs): # noqa PLR0201 PLW0613
- data = serialize_tags(data)
+ data = serialize_compliance_tags(data)
data = envelope(data, 'filters', excludes=['asset',
'num_findings'
])
| Support Phase 2 filters and endpoints for VM Compliance Export
**Is your feature request related to a problem? Please describe.**
I want pyTenable to support Phase 2 filters and endpoints for VM Compliance Export, namely `tags` and `network_id`, and the job status list endpoint.
**Describe the solution you'd like**
I want pyTenable to support Phase 2 filters and endpoints for VM Compliance Export, namely `tags` and `network_id`, and the job status list endpoint.
**Describe alternatives you've considered**
NA
**Additional context**
NA
| tenable/pyTenable | diff --git a/tests/io/exports/cassettes/test_list_compliance_export_jobs_returns_a_list.yaml b/tests/io/exports/cassettes/test_list_compliance_export_jobs_returns_a_list.yaml
new file mode 100644
index 0000000..cf86017
--- /dev/null
+++ b/tests/io/exports/cassettes/test_list_compliance_export_jobs_returns_a_list.yaml
@@ -0,0 +1,63 @@
+interactions:
+- request:
+ body: null
+ headers:
+ Accept:
+ - '*/*'
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ User-Agent:
+ - Integration/1.0 (pytest; pytenable-automated-testing; Build/unknown) pyTenable/1.5.0
+ (Restfly/1.4.7; Python/3.10.6; Darwin/arm64)
+ X-APIKeys:
+ - accessKey=TIO_ACCESS_KEY;secretKey=TIO_SECRET_KEY
+ method: GET
+ uri: https://cloud.tenable.com/compliance/export/status
+ response:
+ body:
+ string: '{"exports":[]}'
+ headers:
+ Accept-Ranges:
+ - bytes
+ Cache-Control:
+ - no-store
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '14'
+ Content-Type:
+ - application/json; charset=utf-8
+ Date:
+ - Mon, 05 Aug 2024 02:21:24 GMT
+ Expect-CT:
+ - enforce, max-age=86400
+ Pragma:
+ - no-cache
+ Referrer-Policy:
+ - strict-origin-when-cross-origin
+ Set-Cookie:
+ - nginx-cloud-site-id=qa-develop; path=/; HttpOnly; SameSite=Strict; Secure
+ Strict-Transport-Security:
+ - max-age=63072000; includeSubDomains
+ Vary:
+ - origin
+ X-Content-Type-Options:
+ - nosniff
+ X-Download-Options:
+ - noopen
+ X-Frame-Options:
+ - DENY
+ X-Gateway-Site-ID:
+ - service-nginx-router-us-east-1-eng-5689f7f454-gbj9n
+ X-Path-Handler:
+ - tenable-io
+ X-Request-Uuid:
+ - '"f1adf991e7bd765c92b6c693a08dc846"'
+ X-Xss-Protection:
+ - 1; mode=block
+ status:
+ code: 200
+ message: OK
+version: 1
diff --git a/tests/io/exports/test_api.py b/tests/io/exports/test_api.py
index 3e88732..b00227e 100644
--- a/tests/io/exports/test_api.py
+++ b/tests/io/exports/test_api.py
@@ -131,3 +131,8 @@ def test_export_adoption(tvm):
assert UUID(job_id) == tvm.exports.initiate_export('vulns')
with pytest.raises(RequestConflictError):
tvm.exports.initiate_export('vulns', adopt_existing=False)
+
[email protected]()
+def test_list_compliance_export_jobs_returns_a_list(api):
+ jobs = api.exports.list_compliance_export_jobs()
+ assert isinstance(jobs, list)
diff --git a/tests/io/exports/test_schema.py b/tests/io/exports/test_schema.py
index 7cf3b9f..310ccbb 100644
--- a/tests/io/exports/test_schema.py
+++ b/tests/io/exports/test_schema.py
@@ -85,7 +85,7 @@ def compliance_export():
@pytest.fixture
-def compliance_export_phase_1_schema():
+def compliance_export_phase_1_and_2_schema():
"""
Example compliance export request with phase 1 filters
"""
@@ -100,14 +100,17 @@ def compliance_export_phase_1_schema():
'ipv6_addresses': ['2001:0db8:85a3:0000:0000:8a2e:0370:7334'],
'plugin_name': ['Debian dla-3719 : php-seclib - security update', 'Debian dsa-5607 : chromium - security update'],
'plugin_id': [189491, 189490],
- 'asset_tags': ['tag-a', 'tag-b'],
'audit_name': 'my-audit-name',
'audit_file_name': 'my-audit-file-name',
'compliance_results': ['PASSED'],
'last_observed': 1635798607,
'indexed_at': 1635798607,
'since': 1635798607,
- 'state': ['Active']
+ 'state': ['Active'],
+ 'tags': [
+ ('Category', ['value1', 'value2'])
+ ],
+ 'network_id': 'd6797cf4-42b9-4cad-8591-9dd91c3f0fc3'
}
@@ -272,12 +275,14 @@ def test_asset_export_schema_without_open_ports(asset_export_with_out_open_ports
schema_dump = schema.dump(schema.load(asset_export_with_out_open_ports))
assert "include_open_ports" not in schema_dump
-def test_compliance_export_phase_1_filters(compliance_export_phase_1_schema):
+def test_compliance_export_phase_1_and_2_filters(compliance_export_phase_1_and_2_schema):
"""
Test Compliance Export Phase 1 Filter Schema
"""
schema = ComplianceExportSchema()
- schema_dump = schema.dump(schema.load(compliance_export_phase_1_schema))
+ schema_dump = schema.dump(schema.load(compliance_export_phase_1_and_2_schema))
# checking random element
- assert schema_dump["filters"]["state"][0] == "Active"
\ No newline at end of file
+ assert schema_dump["filters"]["state"][0] == "Active"
+ assert len(schema_dump["filters"]["tags"]) == 1
+ assert schema_dump["filters"]["network_id"] == "d6797cf4-42b9-4cad-8591-9dd91c3f0fc3"
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 2
} | 1.5 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-vcr",
"pytest-datafiles",
"pytest-cov",
"responses"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | arrow==1.3.0
certifi==2025.1.31
charset-normalizer==3.4.1
coverage==7.8.0
defusedxml==0.7.1
exceptiongroup==1.2.2
idna==3.10
iniconfig==2.1.0
marshmallow==3.26.1
multidict==6.2.0
packaging==24.2
pluggy==1.5.0
propcache==0.3.1
-e git+https://github.com/tenable/pyTenable.git@eeb4cab2c02b4c5219a70fc79ec1fd615efeb81b#egg=pyTenable
pytest==8.3.5
pytest-cov==6.0.0
pytest-datafiles==3.0.0
pytest-vcr==1.0.2
python-box==7.3.2
python-dateutil==2.9.0.post0
PyYAML==6.0.2
requests==2.32.3
requests-toolbelt==1.0.0
responses==0.25.7
restfly==1.5.1
semver==3.0.4
six==1.17.0
tomli==2.2.1
types-python-dateutil==2.9.0.20241206
typing_extensions==4.13.0
urllib3==1.26.18
vcrpy==7.0.0
wrapt==1.17.2
yarl==1.18.3
| name: pyTenable
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- arrow==1.3.0
- certifi==2025.1.31
- charset-normalizer==3.4.1
- coverage==7.8.0
- defusedxml==0.7.1
- exceptiongroup==1.2.2
- idna==3.10
- iniconfig==2.1.0
- marshmallow==3.26.1
- multidict==6.2.0
- packaging==24.2
- pluggy==1.5.0
- propcache==0.3.1
- pytest==8.3.5
- pytest-cov==6.0.0
- pytest-datafiles==3.0.0
- pytest-vcr==1.0.2
- python-box==7.3.2
- python-dateutil==2.9.0.post0
- pyyaml==6.0.2
- requests==2.32.3
- requests-toolbelt==1.0.0
- responses==0.25.7
- restfly==1.5.1
- semver==3.0.4
- six==1.17.0
- tomli==2.2.1
- types-python-dateutil==2.9.0.20241206
- typing-extensions==4.13.0
- urllib3==1.26.18
- vcrpy==7.0.0
- wrapt==1.17.2
- yarl==1.18.3
prefix: /opt/conda/envs/pyTenable
| [
"tests/io/exports/test_api.py::test_list_compliance_export_jobs_returns_a_list",
"tests/io/exports/test_schema.py::test_compliance_export_phase_1_and_2_filters"
] | [] | [
"tests/io/exports/test_api.py::test_status",
"tests/io/exports/test_api.py::test_cancel",
"tests/io/exports/test_api.py::test_download_chunk",
"tests/io/exports/test_api.py::test_jobs",
"tests/io/exports/test_api.py::test_base_export",
"tests/io/exports/test_api.py::test_asset_export",
"tests/io/exports/test_api.py::test_vuln_export",
"tests/io/exports/test_api.py::test_compliance_export",
"tests/io/exports/test_api.py::test_initiate_export",
"tests/io/exports/test_api.py::test_export_adoption",
"tests/io/exports/test_schema.py::test_assetschema",
"tests/io/exports/test_schema.py::test_complianceschema",
"tests/io/exports/test_schema.py::test_vulnerabilityschema",
"tests/io/exports/test_schema.py::test_asset_export_schema_for_open_ports_true",
"tests/io/exports/test_schema.py::test_asset_export_schema_for_open_ports_false",
"tests/io/exports/test_schema.py::test_asset_export_schema_without_open_ports"
] | [] | MIT License | 19,192 | 996 | [
"tenable/io/exports/api.py",
"tenable/io/exports/schema.py"
] |
|
softlayer__softlayer-python-2178 | 40b7c8298cc324c6f21499f4bcba1bbf77041cdc | 2024-08-05 22:51:45 | 26c7d62691a85f252b4c07b9ada7961f07b3b6e4 | diff --git a/SoftLayer/CLI/dns/zone_delete.py b/SoftLayer/CLI/dns/zone_delete.py
index 83eb1127..cca4c9c9 100644
--- a/SoftLayer/CLI/dns/zone_delete.py
+++ b/SoftLayer/CLI/dns/zone_delete.py
@@ -17,6 +17,7 @@ def cli(env, zone):
"""Delete zone.
Example::
+
slcli dns zone-delete ibm.com
This command deletes a zone that is named ibm.com
"""
diff --git a/SoftLayer/CLI/formatting.py b/SoftLayer/CLI/formatting.py
index c4c28463..0e51eb30 100644
--- a/SoftLayer/CLI/formatting.py
+++ b/SoftLayer/CLI/formatting.py
@@ -254,8 +254,7 @@ def confirm(prompt_str, default=False):
def no_going_back(confirmation):
"""Show a confirmation to a user.
- :param confirmation str: the string the user has to enter in order to
- confirm their action.
+ :param confirmation str: the string the user has to enter in order to confirm their action.
"""
if not confirmation:
confirmation = 'yes'
diff --git a/SoftLayer/CLI/globalip/assign.py b/SoftLayer/CLI/globalip/assign.py
index 1e793761..a03d8374 100644
--- a/SoftLayer/CLI/globalip/assign.py
+++ b/SoftLayer/CLI/globalip/assign.py
@@ -5,27 +5,51 @@
import SoftLayer
from SoftLayer.CLI import environment
+from SoftLayer.CLI import helpers
-target_types = {'vlan': 'SoftLayer_Network_Vlan',
- 'ip': 'SoftLayer_Network_Subnet_IpAddress',
- 'hardware': 'SoftLayer_Hardware_Server',
- 'vsi': 'SoftLayer_Virtual_Guest'}
+
+# pylint: disable=unused-argument
+def targetipcallback(ctx, param, value):
+ """This is here to allow for using --target-id in some cases. Takes the first value and returns it"""
+ if value:
+ return value[0]
+ return value
@click.command(cls=SoftLayer.CLI.command.SLCommand, epilog="More information about types and identifiers "
"on https://sldn.softlayer.com/reference/services/SoftLayer_Network_Subnet/route/")
[email protected]('identifier')
[email protected]('globalip')
[email protected]('targetip', nargs=-1, callback=targetipcallback)
@click.option('--target', type=click.Choice(['vlan', 'ip', 'hardware', 'vsi']),
help='choose the type. vlan, ip, hardware, vsi')
[email protected]('--target-id', help='The identifier for the destination resource to route this subnet to. ')
[email protected]('--target-id', help='The identifier for the destination resource to route this subnet to.')
@environment.pass_env
-def cli(env, identifier, target, target_id):
- """Assigns the subnet to a target.
+def cli(env, globalip, targetip, target, target_id):
+ """Assigns the GLOBALIP to TARGETIP.
+ GLOBALIP should be either the Global IP address, or the SoftLayer_Network_Subnet_IpAddress_Global id
+ See `slcli globalip list`
+ TARGETIP should be either the target IP address, or the SoftLayer_Network_Subnet_IpAddress id
+ See `slcli subnet list`
Example::
+
slcli globalip assign 12345678 9.111.123.456
- This command assigns IP address with ID 12345678 to a target device whose IP address is 9.111.123.456
- """
+ This command assigns Global IP address with ID 12345678 to a target device whose IP address is 9.111.123.456
+ slcli globalip assign 123.4.5.6 6.5.4.123
+ Global IPs can be specified by their IP address
+ """
mgr = SoftLayer.NetworkManager(env.client)
- mgr.route(identifier, target_types.get(target), target_id)
+ # Find SoftLayer_Network_Subnet_IpAddress_Global::id
+ global_ip_id = helpers.resolve_id(mgr.resolve_global_ip_ids, globalip, name='Global IP')
+
+ # Find Global IPs SoftLayer_Network_Subnet::id
+ mask = "mask[id,ipAddress[subnetId]]"
+ subnet = env.client.call('SoftLayer_Network_Subnet_IpAddress_Global', 'getObject', id=global_ip_id, mask=mask)
+ subnet_id = subnet.get('ipAddress', {}).get('subnetId')
+
+ # For backwards compatibility
+ if target_id:
+ targetip = target_id
+
+ mgr.route(subnet_id, 'SoftLayer_Network_Subnet_IpAddress', targetip)
diff --git a/SoftLayer/CLI/globalip/cancel.py b/SoftLayer/CLI/globalip/cancel.py
index 0d9394b2..920d07c7 100644
--- a/SoftLayer/CLI/globalip/cancel.py
+++ b/SoftLayer/CLI/globalip/cancel.py
@@ -18,12 +18,12 @@ def cli(env, identifier, force):
"""Cancel global IP.
Example::
+
slcli globalip cancel 12345
"""
mgr = SoftLayer.NetworkManager(env.client)
- global_ip_id = helpers.resolve_id(mgr.resolve_global_ip_ids, identifier,
- name='global ip')
+ global_ip_id = helpers.resolve_id(mgr.resolve_global_ip_ids, identifier, name='global ip')
if not force:
if not (env.skip_confirmations or
diff --git a/SoftLayer/CLI/globalip/unassign.py b/SoftLayer/CLI/globalip/unassign.py
index 563ebb10..564c74a8 100644
--- a/SoftLayer/CLI/globalip/unassign.py
+++ b/SoftLayer/CLI/globalip/unassign.py
@@ -12,9 +12,21 @@
@click.argument('identifier')
@environment.pass_env
def cli(env, identifier):
- """Unassigns a global IP from a target."""
+ """Unroutes IDENTIFIER
+
+ IDENTIFIER should be either the Global IP address, or the SoftLayer_Network_Subnet_IpAddress_Global id
+ Example::
+
+ slcli globalip unassign 123456
+
+ slcli globalip unassign 123.43.22.11
+"""
mgr = SoftLayer.NetworkManager(env.client)
- global_ip_id = helpers.resolve_id(mgr.resolve_global_ip_ids, identifier,
- name='global ip')
- mgr.unassign_global_ip(global_ip_id)
+ global_ip_id = helpers.resolve_id(mgr.resolve_global_ip_ids, identifier, name='global ip')
+
+ # Find Global IPs SoftLayer_Network_Subnet::id
+ mask = "mask[id,ipAddress[subnetId]]"
+ subnet = env.client.call('SoftLayer_Network_Subnet_IpAddress_Global', 'getObject', id=global_ip_id, mask=mask)
+ subnet_id = subnet.get('ipAddress', {}).get('subnetId')
+ mgr.clear_route(subnet_id)
diff --git a/SoftLayer/fixtures/SoftLayer_Network_Subnet_IpAddress_Global.py b/SoftLayer/fixtures/SoftLayer_Network_Subnet_IpAddress_Global.py
index 89cd22f5..39244730 100644
--- a/SoftLayer/fixtures/SoftLayer_Network_Subnet_IpAddress_Global.py
+++ b/SoftLayer/fixtures/SoftLayer_Network_Subnet_IpAddress_Global.py
@@ -1,3 +1,3 @@
route = True
unroute = True
-getObject = {'id': 1234, 'billingItem': {'id': 1234}}
+getObject = {'id': 1234, 'billingItem': {'id': 1234}, 'ipAddress': {'subnetId': 9988}}
diff --git a/SoftLayer/managers/network.py b/SoftLayer/managers/network.py
index 20186460..49af7197 100644
--- a/SoftLayer/managers/network.py
+++ b/SoftLayer/managers/network.py
@@ -841,12 +841,12 @@ def get_closed_pods(self):
def route(self, subnet_id, type_serv, target):
"""Assigns a subnet to a specified target.
- :param int subnet_id: The ID of the global IP being assigned
+ https://sldn.softlayer.com/reference/services/SoftLayer_Network_Subnet/route/
+ :param int subnet_id: The ID of the SoftLayer_Network_Subnet_IpAddress being routed
:param string type_serv: The type service to assign
:param string target: The instance to assign
"""
- return self.client.call('SoftLayer_Network_Subnet', 'route',
- type_serv, target, id=subnet_id, )
+ return self.client.call('SoftLayer_Network_Subnet', 'route', type_serv, target, id=subnet_id, )
def get_datacenter(self, _filter=None, datacenter=None):
"""Calls SoftLayer_Location::getDatacenters()
| `slcli globalip assign` should only take in an IP as an argument
`slcli globalip assign 12345678 9.111.123.456 ` this is the expected syntax since you can not route global ips to subnets/devices/etc.
Should still be able to support the `--target` and `--target-id` options of course.
Update the help text to explain this a bit as well.
This command changed in #1614 | softlayer/softlayer-python | diff --git a/tests/CLI/modules/globalip_tests.py b/tests/CLI/modules/globalip_tests.py
index 43c5b0f4..a309d563 100644
--- a/tests/CLI/modules/globalip_tests.py
+++ b/tests/CLI/modules/globalip_tests.py
@@ -12,15 +12,9 @@
import json
-class DnsTests(testing.TestCase):
+class GlobalIpTests(testing.TestCase):
- def test_ip_assign(self):
- result = self.run_command(['globalip', 'assign', '1'])
-
- self.assert_no_fail(result)
- self.assertEqual(result.output, "")
-
- @mock.patch('SoftLayer.CLI.formatting.no_going_back')
+ @mock.patch('SoftLayer.CLI.formatting.confirm')
def test_ip_cancel(self, no_going_back_mock):
# Test using --really flag
result = self.run_command(['--really', 'globalip', 'cancel', '1'])
@@ -39,7 +33,7 @@ def test_ip_cancel(self, no_going_back_mock):
no_going_back_mock.return_value = False
result = self.run_command(['globalip', 'cancel', '1'])
- self.assertEqual(result.exit_code, 0)
+ self.assertEqual(result.exit_code, 2)
def test_ip_list(self):
result = self.run_command(['globalip', 'list', '--ip-version=v4'])
@@ -84,6 +78,31 @@ def test_ip_unassign(self):
result = self.run_command(['globalip', 'unassign', '1'])
self.assert_no_fail(result)
self.assertEqual(result.output, "")
+ self.assert_called_with('SoftLayer_Network_Subnet', 'clearRoute', identifier=9988)
+
+ def test_ip_assign(self):
+ result = self.run_command(['globalip', 'assign', '1', '999'])
+ self.assert_no_fail(result)
+ self.assertEqual(result.output, "")
+ service = 'SoftLayer_Network_Subnet_IpAddress'
+ self.assert_called_with('SoftLayer_Network_Subnet', 'route', identifier=9988, args=(service, '999'))
+
+ def test_ip_assign_target(self):
+ result = self.run_command(['globalip', 'assign', '1', '--target-id=8123'])
+ self.assert_no_fail(result)
+ self.assertEqual(result.output, "")
+ service = 'SoftLayer_Network_Subnet_IpAddress'
+ self.assert_called_with('SoftLayer_Network_Subnet', 'route', identifier=9988, args=(service, '8123'))
+
+ def test_ip_assign_ip(self):
+ mock_api = self.set_mock('SoftLayer_Account', 'getGlobalIpRecords')
+ mock_api.return_value = [{"id": 112233}]
+ result = self.run_command(['globalip', 'assign', '192.168.1.1', '1.2.3.4'])
+ self.assert_no_fail(result)
+ self.assertEqual(result.output, "")
+ service = 'SoftLayer_Network_Subnet_IpAddress'
+ self.assert_called_with(f"{service}_Global", "getObject", identifier=112233)
+ self.assert_called_with('SoftLayer_Network_Subnet', 'route', identifier=9988, args=(service, '1.2.3.4'))
def test_ip_cancel_force(self):
result = self.run_command(['globalip', 'cancel', '1', '--force'])
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 7
} | 6.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2025.1.31
charset-normalizer==3.4.1
click==8.1.8
coverage==7.8.0
exceptiongroup==1.2.2
idna==3.10
iniconfig==2.1.0
markdown-it-py==3.0.0
mdurl==0.1.2
packaging==24.2
pluggy==1.5.0
prettytable==3.16.0
prompt_toolkit==3.0.50
Pygments==2.19.1
pytest==8.3.5
pytest-cov==6.0.0
requests==2.32.3
rich==13.7.1
-e git+https://github.com/softlayer/softlayer-python.git@40b7c8298cc324c6f21499f4bcba1bbf77041cdc#egg=SoftLayer
tomli==2.2.1
urllib3==2.3.0
wcwidth==0.2.13
| name: softlayer-python
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- charset-normalizer==3.4.1
- click==8.1.8
- coverage==7.8.0
- exceptiongroup==1.2.2
- idna==3.10
- iniconfig==2.1.0
- markdown-it-py==3.0.0
- mdurl==0.1.2
- packaging==24.2
- pluggy==1.5.0
- prettytable==3.16.0
- prompt-toolkit==3.0.50
- pygments==2.19.1
- pytest==8.3.5
- pytest-cov==6.0.0
- requests==2.32.3
- rich==13.7.1
- tomli==2.2.1
- urllib3==2.3.0
- wcwidth==0.2.13
prefix: /opt/conda/envs/softlayer-python
| [
"tests/CLI/modules/globalip_tests.py::GlobalIpTests::test_ip_assign",
"tests/CLI/modules/globalip_tests.py::GlobalIpTests::test_ip_assign_ip",
"tests/CLI/modules/globalip_tests.py::GlobalIpTests::test_ip_assign_target",
"tests/CLI/modules/globalip_tests.py::GlobalIpTests::test_ip_unassign"
] | [] | [
"tests/CLI/modules/globalip_tests.py::GlobalIpTests::test_create",
"tests/CLI/modules/globalip_tests.py::GlobalIpTests::test_ip_cancel",
"tests/CLI/modules/globalip_tests.py::GlobalIpTests::test_ip_cancel_abort",
"tests/CLI/modules/globalip_tests.py::GlobalIpTests::test_ip_cancel_force",
"tests/CLI/modules/globalip_tests.py::GlobalIpTests::test_ip_cancel_no_abort",
"tests/CLI/modules/globalip_tests.py::GlobalIpTests::test_ip_list"
] | [] | MIT License | 19,207 | 2,210 | [
"SoftLayer/CLI/dns/zone_delete.py",
"SoftLayer/CLI/formatting.py",
"SoftLayer/CLI/globalip/assign.py",
"SoftLayer/CLI/globalip/cancel.py",
"SoftLayer/CLI/globalip/unassign.py",
"SoftLayer/fixtures/SoftLayer_Network_Subnet_IpAddress_Global.py",
"SoftLayer/managers/network.py"
] |
|
rayokota__jsonata-python-5 | cda7eb6c4860dcb38a546d30bd454fd774a30210 | 2024-08-07 01:49:39 | cda7eb6c4860dcb38a546d30bd454fd774a30210 | diff --git a/src/jsonata/functions.py b/src/jsonata/functions.py
index cdea849..fd69643 100644
--- a/src/jsonata/functions.py
+++ b/src/jsonata/functions.py
@@ -755,22 +755,82 @@ class Functions:
if isinstance(pattern, str):
if not pattern:
raise jexception.JException("Second argument of replace function cannot be an empty string", 0)
- if limit is None:
- if isinstance(pattern, str):
- return re.sub(pattern, str(replacement), string)
- else:
- return Functions.safe_replace_all(string, pattern, replacement)
- else:
+
+ if limit is not None and limit < 0:
+ raise jexception.JException("Fourth argument of replace function must evaluate to a positive number", 0)
+
+ def string_replacer(match):
+ result = ''
+ position = 0
+ repl = str(replacement)
+ while position < len(repl):
+ index = repl.find('$', position)
+ if index == -1:
+ result += repl[position:]
+ break
+ result += repl[position:index]
+ position = index + 1
+ if position < len(repl):
+ dollar_val = repl[position]
+ if dollar_val == '$':
+ result += '$'
+ position += 1
+ elif dollar_val == '0':
+ result += match.group(0)
+ position += 1
+ else:
+ max_digits = len(str(len(match.groups())))
+ group_num = repl[position:position+max_digits]
+ if group_num.isdigit():
+ group_index = int(group_num)
+ if 0 < group_index <= len(match.groups()):
+ result += match.group(group_index) or ''
+ position += len(group_num)
+ else:
+ result += '$'
+ else:
+ result += '$'
+ else:
+ result += '$'
+ return result
- if limit < 0:
- raise jexception.JException("Fourth argument of replace function must evaluate to a positive number", 0)
+ if callable(replacement):
+ replacer = lambda m: replacement(m.groupdict())
+ elif isinstance(replacement, str):
+ replacer = string_replacer
+ else:
+ replacer = lambda m: str(replacement)
- for i in range(0, limit):
- if isinstance(pattern, str):
- string = re.sub(pattern, str(replacement), string, 1)
- else:
- string = Functions.safe_replace_first(string, pattern, str(replacement))
- return string
+ if isinstance(pattern, str):
+ # Use string methods for literal string patterns
+ result = ''
+ position = 0
+ count = 0
+ while True:
+ if limit is not None and count >= limit:
+ result += string[position:]
+ break
+ index = string.find(pattern, position)
+ if index == -1:
+ result += string[position:]
+ break
+ result += string[position:index]
+ match = re.match(re.escape(pattern), string[index:])
+ result += replacer(match)
+ position = index + len(pattern)
+ count += 1
+ return result
+ else:
+ # Use regex for pattern objects
+ if limit is None:
+ return Functions.safe_replace_all(string, pattern, replacement)
+ else:
+ count = 0
+ result = string
+ while count < limit:
+ result = Functions.safe_replace_first(result, pattern, str(replacement))
+ count += 1
+ return result
#
# Base64 encode a string
| a `.` in $replace doesn't match reference implementation
Running a `$replace` in the js version of the library returns the full string:
<img width="277" alt="image" src="https://github.com/user-attachments/assets/3bda4c30-2e30-4e67-b3fb-b6ebe6774fcb">
But in `jsonata-python` returns an empty string
```
echo '{}' | python3 -m jsonata.cli '$replace("hello", ".", "")'
```
This is due to the difference in `$replace` implementation in the `js` version of the library versus the `python` library.
The `js` library does a simple replace:
<img width="730" alt="image" src="https://github.com/user-attachments/assets/9ec3fe39-0977-470c-a445-466e85bbcc43">
While the `python` library does a `re.sub` in this case:
<img width="927" alt="image" src="https://github.com/user-attachments/assets/cb8c1a17-0424-4384-b269-9c77736bbd8d">
| rayokota/jsonata-python | diff --git a/tests/string_test.py b/tests/string_test.py
index 4fcd09c..9b09b21 100644
--- a/tests/string_test.py
+++ b/tests/string_test.py
@@ -32,6 +32,12 @@ class TestString:
assert jsonata.Jsonata("$string($)").evaluate({"a": str('\n')}) == "{\"a\":\"\\n\"}"
assert jsonata.Jsonata("$string($)").evaluate({"a": "</"}) == "{\"a\":\"</\"}"
+ def test_replace(self):
+ assert jsonata.Jsonata("$replace('hello', '.', '')").evaluate(None) == "hello"
+ assert jsonata.Jsonata("$replace('hello', 'l', 'x')").evaluate(None) == "hexxo"
+ assert jsonata.Jsonata("$replace('h.ello', '.', '')").evaluate(None) == "hello"
+ assert jsonata.Jsonata("$replace('h.e.l.l.o', '.', '',2)").evaluate(None) == "hel.l.o"
+
#
# Additional $split tests
#
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 1
},
"num_modified_files": 1
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest",
"pytest-asyncio"
],
"pre_install": null,
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
-e git+https://github.com/rayokota/jsonata-python.git@cda7eb6c4860dcb38a546d30bd454fd774a30210#egg=jsonata_python
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
pytest @ file:///croot/pytest_1738938843180/work
pytest-asyncio==0.26.0
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
typing_extensions==4.13.0
| name: jsonata-python
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- jsonata-python==0.3.0
- pytest-asyncio==0.26.0
- typing-extensions==4.13.0
prefix: /opt/conda/envs/jsonata-python
| [
"tests/string_test.py::TestString::test_replace"
] | [] | [
"tests/string_test.py::TestString::test_string",
"tests/string_test.py::TestString::test_boolean",
"tests/string_test.py::TestString::test_number",
"tests/string_test.py::TestString::test_array",
"tests/string_test.py::TestString::test_map",
"tests/string_test.py::TestString::test_map2",
"tests/string_test.py::TestString::test_escape",
"tests/string_test.py::TestString::test_split",
"tests/string_test.py::TestString::test_trim"
] | [] | Apache License 2.0 | 19,219 | 838 | [
"src/jsonata/functions.py"
] |
|
tobymao__sqlglot-3885 | 62ceed2fa3cd7b41919839d837b860f3814fa769 | 2024-08-07 10:33:29 | 2ad9bfef71ae707b83f604f16b47aa583d082c3b | diff --git a/sqlglot/parser.py b/sqlglot/parser.py
index d43a916e..3a0f529a 100644
--- a/sqlglot/parser.py
+++ b/sqlglot/parser.py
@@ -3198,6 +3198,15 @@ class Parser(metaclass=_Parser):
self._match_set(self.JOIN_KINDS) and self._prev,
)
+ def _parse_using_identifiers(self) -> t.List[exp.Expression]:
+ def _parse_column_as_identifier() -> t.Optional[exp.Expression]:
+ this = self._parse_column()
+ if isinstance(this, exp.Column):
+ return this.this
+ return this
+
+ return self._parse_wrapped_csv(_parse_column_as_identifier, optional=True)
+
def _parse_join(
self, skip_join_token: bool = False, parse_bracket: bool = False
) -> t.Optional[exp.Join]:
@@ -3238,7 +3247,7 @@ class Parser(metaclass=_Parser):
if self._match(TokenType.ON):
kwargs["on"] = self._parse_assignment()
elif self._match(TokenType.USING):
- kwargs["using"] = self._parse_wrapped_id_vars()
+ kwargs["using"] = self._parse_using_identifiers()
elif not isinstance(kwargs["this"], exp.Unnest) and not (
kind and kind.token_type == TokenType.CROSS
):
@@ -3248,7 +3257,7 @@ class Parser(metaclass=_Parser):
if joins and self._match(TokenType.ON):
kwargs["on"] = self._parse_assignment()
elif joins and self._match(TokenType.USING):
- kwargs["using"] = self._parse_wrapped_id_vars()
+ kwargs["using"] = self._parse_using_identifiers()
else:
joins = None
self._retreat(index)
| Error in parsing of `JOIN ... USING` expressions in Snowflake
In Snowflake (maybe in other dialects too), it is valid to specify the column name in `USING` expression as `TABLE.COL_NAME` (in addition to the more commonly used format of `USING COL_NAME` without specifying a table).
For example, the following query is syntactically valid in Snowflake:
```sql
SELECT *
FROM T1
LEFT JOIN T2 USING(T1.a)
```
When trying to parse such query with sqlglot an exception is raised:
```
sqlglot.errors.ParseError: Expecting ). Line 3, Col: 22.
SELECT * FROM T1 LEFT JOIN T2 USING(T1.a)
```
**Fully reproducible code snippet**
```python
sqlglot.parse(
sql="""
SELECT *
FROM T1
LEFT JOIN T2 USING(T1.a)
""",
dialect=sqlglot.Dialects.SNOWFLAKE)
```
**Official Documentation**
https://docs.snowflake.com/en/sql-reference/constructs/join | tobymao/sqlglot | diff --git a/tests/dialects/test_snowflake.py b/tests/dialects/test_snowflake.py
index 16ef0196..a84d82a7 100644
--- a/tests/dialects/test_snowflake.py
+++ b/tests/dialects/test_snowflake.py
@@ -138,6 +138,10 @@ WHERE
self.validate_identity(
"SELECT * FROM DATA AS DATA_L ASOF JOIN DATA AS DATA_R MATCH_CONDITION (DATA_L.VAL > DATA_R.VAL) ON DATA_L.ID = DATA_R.ID"
)
+ self.validate_identity(
+ "SELECT * FROM t1 INNER JOIN t2 USING (t1.col)",
+ "SELECT * FROM t1 INNER JOIN t2 USING (col)",
+ )
self.validate_identity(
"CURRENT_TIMESTAMP - INTERVAL '1 w' AND (1 = 1)",
"CURRENT_TIMESTAMP() - INTERVAL '1 WEEK' AND (1 = 1)",
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 1
} | 25.9 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest pytest-cov pytest-xdist pytest-mock pytest-asyncio"
],
"pre_install": null,
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | cfgv==3.4.0
coverage==7.8.0
distlib==0.3.9
duckdb==1.2.1
exceptiongroup==1.2.2
execnet==2.1.1
filelock==3.18.0
identify==2.6.9
iniconfig==2.1.0
Jinja2==3.1.6
MarkupSafe==3.0.2
maturin==1.8.3
mypy==1.15.0
mypy-extensions==1.0.0
nodeenv==1.9.1
numpy==2.0.2
packaging==24.2
pandas==2.2.3
pandas-stubs==2.2.2.240807
pdoc==15.0.1
platformdirs==4.3.7
pluggy==1.5.0
pre_commit==4.2.0
Pygments==2.19.1
pytest==8.3.5
pytest-asyncio==0.26.0
pytest-cov==6.0.0
pytest-mock==3.14.0
pytest-xdist==3.6.1
python-dateutil==2.9.0.post0
pytz==2025.2
PyYAML==6.0.2
ruff==0.4.3
six==1.17.0
-e git+https://github.com/tobymao/sqlglot.git@62ceed2fa3cd7b41919839d837b860f3814fa769#egg=sqlglot
tomli==2.2.1
types-python-dateutil==2.9.0.20241206
types-pytz==2025.2.0.20250326
typing_extensions==4.13.0
tzdata==2025.2
virtualenv==20.29.3
| name: sqlglot
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- cfgv==3.4.0
- coverage==7.8.0
- distlib==0.3.9
- duckdb==1.2.1
- exceptiongroup==1.2.2
- execnet==2.1.1
- filelock==3.18.0
- identify==2.6.9
- iniconfig==2.1.0
- jinja2==3.1.6
- markupsafe==3.0.2
- maturin==1.8.3
- mypy==1.15.0
- mypy-extensions==1.0.0
- nodeenv==1.9.1
- numpy==2.0.2
- packaging==24.2
- pandas==2.2.3
- pandas-stubs==2.2.2.240807
- pdoc==15.0.1
- platformdirs==4.3.7
- pluggy==1.5.0
- pre-commit==4.2.0
- pygments==2.19.1
- pytest==8.3.5
- pytest-asyncio==0.26.0
- pytest-cov==6.0.0
- pytest-mock==3.14.0
- pytest-xdist==3.6.1
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyyaml==6.0.2
- ruff==0.4.3
- six==1.17.0
- tomli==2.2.1
- types-python-dateutil==2.9.0.20241206
- types-pytz==2025.2.0.20250326
- typing-extensions==4.13.0
- tzdata==2025.2
- virtualenv==20.29.3
prefix: /opt/conda/envs/sqlglot
| [
"tests/dialects/test_snowflake.py::TestSnowflake::test_snowflake"
] | [] | [
"tests/dialects/test_snowflake.py::TestSnowflake::test_alter_set_unset",
"tests/dialects/test_snowflake.py::TestSnowflake::test_copy",
"tests/dialects/test_snowflake.py::TestSnowflake::test_ddl",
"tests/dialects/test_snowflake.py::TestSnowflake::test_describe_table",
"tests/dialects/test_snowflake.py::TestSnowflake::test_flatten",
"tests/dialects/test_snowflake.py::TestSnowflake::test_from_changes",
"tests/dialects/test_snowflake.py::TestSnowflake::test_historical_data",
"tests/dialects/test_snowflake.py::TestSnowflake::test_match_recognize",
"tests/dialects/test_snowflake.py::TestSnowflake::test_minus",
"tests/dialects/test_snowflake.py::TestSnowflake::test_null_treatment",
"tests/dialects/test_snowflake.py::TestSnowflake::test_parse_like_any",
"tests/dialects/test_snowflake.py::TestSnowflake::test_querying_semi_structured_data",
"tests/dialects/test_snowflake.py::TestSnowflake::test_regexp_replace",
"tests/dialects/test_snowflake.py::TestSnowflake::test_regexp_substr",
"tests/dialects/test_snowflake.py::TestSnowflake::test_sample",
"tests/dialects/test_snowflake.py::TestSnowflake::test_semi_structured_types",
"tests/dialects/test_snowflake.py::TestSnowflake::test_show_columns",
"tests/dialects/test_snowflake.py::TestSnowflake::test_show_imported_keys",
"tests/dialects/test_snowflake.py::TestSnowflake::test_show_objects",
"tests/dialects/test_snowflake.py::TestSnowflake::test_show_primary_keys",
"tests/dialects/test_snowflake.py::TestSnowflake::test_show_schemas",
"tests/dialects/test_snowflake.py::TestSnowflake::test_show_sequences",
"tests/dialects/test_snowflake.py::TestSnowflake::test_show_tables",
"tests/dialects/test_snowflake.py::TestSnowflake::test_show_unique_keys",
"tests/dialects/test_snowflake.py::TestSnowflake::test_show_users",
"tests/dialects/test_snowflake.py::TestSnowflake::test_show_views",
"tests/dialects/test_snowflake.py::TestSnowflake::test_staged_files",
"tests/dialects/test_snowflake.py::TestSnowflake::test_storage_integration",
"tests/dialects/test_snowflake.py::TestSnowflake::test_stored_procedures",
"tests/dialects/test_snowflake.py::TestSnowflake::test_swap",
"tests/dialects/test_snowflake.py::TestSnowflake::test_table_literal",
"tests/dialects/test_snowflake.py::TestSnowflake::test_timestamps",
"tests/dialects/test_snowflake.py::TestSnowflake::test_try_cast",
"tests/dialects/test_snowflake.py::TestSnowflake::test_user_defined_functions",
"tests/dialects/test_snowflake.py::TestSnowflake::test_values"
] | [] | MIT License | 19,222 | 428 | [
"sqlglot/parser.py"
] |
|
tsdat__tsdat-222 | b2792ec429c9666daff1937a1890b1166f1ea549 | 2024-08-08 14:29:16 | b2792ec429c9666daff1937a1890b1166f1ea549 | codecov[bot]: ## [Codecov](https://app.codecov.io/gh/tsdat/tsdat/pull/222?dropdown=coverage&src=pr&el=h1&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=tsdat) Report
All modified and coverable lines are covered by tests :white_check_mark:
> Project coverage is 92.52%. Comparing base [(`b2792ec`)](https://app.codecov.io/gh/tsdat/tsdat/commit/b2792ec429c9666daff1937a1890b1166f1ea549?dropdown=coverage&el=desc&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=tsdat) to head [(`c06dc9f`)](https://app.codecov.io/gh/tsdat/tsdat/commit/c06dc9f3ecd86703a12f5a87e4044a982b7b5233?dropdown=coverage&el=desc&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=tsdat).
<details><summary>Additional details and impacted files</summary>
```diff
@@ Coverage Diff @@
## main #222 +/- ##
=======================================
Coverage 92.52% 92.52%
=======================================
Files 187 187
Lines 3370 3371 +1
Branches 448 448
=======================================
+ Hits 3118 3119 +1
Misses 125 125
Partials 127 127
```
</details>
[:umbrella: View full report in Codecov by Sentry](https://app.codecov.io/gh/tsdat/tsdat/pull/222?dropdown=coverage&src=pr&el=continue&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=tsdat).
:loudspeaker: Have feedback on the report? [Share it here](https://about.codecov.io/codecov-pr-comment-feedback/?utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=tsdat).
| diff --git a/tsdat/utils/get_filename.py b/tsdat/utils/get_filename.py
index 2e017ed..6d32f2b 100644
--- a/tsdat/utils/get_filename.py
+++ b/tsdat/utils/get_filename.py
@@ -32,6 +32,7 @@ def get_filename(
str: The filename constructed from provided parameters.
---------------------------------------------------------------------------------"""
- substitutions = dict(extension=extension.lstrip("."), title=title)
+ substitutions: dict[str, str] = dict()
substitutions.update(get_fields_from_dataset(dataset))
+ substitutions.update(extension=extension.lstrip("."), title=title)
return FILENAME_TEMPLATE.substitute(substitutions)
| get_filename function replaces title to dataset's title
**Describe the bug**
Tsdat is failing to overwrite the value of title for saving a plot file.
example code line: plot_file = get_filename(ds, title="wave_data_plots", extension="png")
In this case, the plot_file should have title wave_data_plots but its being set to "cdip.201.c1.20230706.180000.Directional wave and sea surface temperature measurements collected in situ by Datawell DWR-M3 directional buoy located near SCRIPPS NEARSHORE, CA from 2023/07/06 18:00:00 to 2024/07/29 22:56:40..png" where "Directional wave and sea surface temperature measurements collected in situ by Datawell DWR-M3 directional buoy located near SCRIPPS NEARSHORE, CA from 2023/07/06 18:00:00 to 2024/07/29 22:56:40." is dataset's title.


FILENAME_TEMPLATE.substitute(substitutions) results in 'cdip.201.c1.20230706.180000.Directional wave and sea surface temperature measurements collected in situ by Datawell DWR-M3 directional buoy located near SCRIPPS NEARSHORE, CA from 2023/07/06 18:00:00 to 2024/07/29 22:56:40..png'
Repo: Ingest-Oracle
Pipeline: CDIP
Test: test_cdip_pipeline_rt
Please include the full traceback message (if applicable):
```
E FileNotFoundError: [Errno 2] No such file or directory: 'C:\\Users\\kaur770\\AppData\\Local\\Temp\\tmpi4xp7qye\\cdip.201.c1.20230706.180000.Directional wave and sea surface temperature measurements collected in situ by Datawell DWR-M3 directional buoy located near SCRIPPS NEARSHORE, CA from 2023\\07\\06 18:00:00 to 2024\\07\\29 22:56:40..png'
```
**Environment**
- OS: Windows 11
- `tsdat`: 0.8.5
| tsdat/tsdat | diff --git a/test/utils/test_get_filename.py b/test/utils/test_get_filename.py
new file mode 100644
index 0000000..81eb1e5
--- /dev/null
+++ b/test/utils/test_get_filename.py
@@ -0,0 +1,29 @@
+import pandas as pd
+import pytest
+import xarray as xr
+
+from tsdat.utils.get_filename import get_filename
+
+
[email protected](
+ ("attrs", "ext", "title", "expected"),
+ (
+ (dict(datastream="test"), "nc", None, "test.20240808.000000.nc"),
+ (dict(datastream="test"), "nc", "title", "test.20240808.000000.title.nc"),
+ (dict(datastream="test", title="title"), "nc", None, "test.20240808.000000.nc"),
+ (
+ dict(datastream="test", title="ds_title"),
+ "nc",
+ "arg_title",
+ "test.20240808.000000.arg_title.nc",
+ ),
+ ),
+)
+def test_get_filename(
+ attrs: dict[str, str], ext: str, title: str | None, expected: str
+):
+ dataset = xr.Dataset(
+ coords=dict(time=pd.date_range("2024-08-08", "2024-08-09", freq="1h")),
+ attrs=attrs,
+ )
+ assert get_filename(dataset, ext, title) == expected
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 1
} | 0.8 | {
"env_vars": null,
"env_yml_path": [
"environment.yml"
],
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "environment.yml",
"pip_packages": [
"pytest",
"coverage",
"mypy",
"black",
"ruff",
"types-PyYAML",
"boto3-stubs[essential]",
"moto[s3,sts]==4.0.1",
"pandas-stubs",
"mkdocs-gen-files",
"mkdocs-literate-nav",
"mkdocs-material",
"mkdocstrings[python]"
],
"pre_install": [
"apt-get update",
"apt-get install -y build-essential"
],
"python": "3.10",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | act-atmos==2.1.9
adi-py @ file:///opt/conda/conda-bld/adi_py_1697562583152/work
aiohappyeyeballs==2.6.1
aiohttp==3.11.14
aiosignal==1.3.2
arm-test-data==0.0.13
asciitree==0.3.3
async-timeout==5.0.1
attrs==25.3.0
babel==2.17.0
backrefs==5.8
beautifulsoup4==4.13.3
black==25.1.0
boto3==1.37.23
boto3-stubs==1.37.23
botocore==1.37.23
botocore-stubs==1.37.23
Bottleneck==1.4.2
build==1.2.2.post1
certifi==2025.1.31
cffi==1.17.1
cftime==1.6.4.post1
charset-normalizer==3.4.1
click==8.1.8
cloudpickle==3.1.1
cmweather==0.3.2
colorama==0.4.6
contourpy==1.3.1
coverage==7.8.0
cryptography==44.0.2
cycler==0.12.1
Cython @ file:///home/conda/feedstock_root/build_artifacts/cython_1702977794498/work
dask==2025.3.0
distributed==2025.3.0
dunamai==1.9.0
exceptiongroup==1.2.2
fasteners==0.19
fatpack==0.7.8
flexcache==0.3
flexparser==0.4
fonttools==4.56.0
frozenlist==1.5.0
fsspec==2025.3.1
ghp-import==2.1.0
griffe==1.7.1
h5netcdf==1.6.1
h5py==3.13.0
h5pyd==0.18.0
idna==3.10
importlib_metadata==8.6.1
iniconfig==2.1.0
Jinja2==3.1.6
jmespath==1.0.1
joblib==1.4.2
jplephem==2.22
jsonpointer==2.2
kiwisolver==1.4.8
lazy_loader==0.4
locket==1.0.0
lxml==5.3.1
Markdown==3.7
markdown-it-py==3.0.0
MarkupSafe==3.0.2
matplotlib==3.10.1
mdurl==0.1.2
mergedeep==1.3.4
MetPy==1.6.3
mhkit==0.8.2
mkdocs==1.6.1
mkdocs-autorefs==1.4.1
mkdocs-gen-files==0.5.0
mkdocs-get-deps==0.2.0
mkdocs-literate-nav==0.6.2
mkdocs-material==9.6.10
mkdocs-material-extensions==1.3.1
mkdocstrings==0.29.1
mkdocstrings-python==1.16.8
moto==4.0.1
msgpack==1.1.0
multidict==6.2.0
mypy==1.15.0
mypy-boto3-cloudformation==1.37.22
mypy-boto3-dynamodb==1.37.12
mypy-boto3-ec2==1.37.16
mypy-boto3-lambda==1.37.16
mypy-boto3-rds==1.37.21
mypy-boto3-s3==1.37.0
mypy-boto3-sqs==1.37.0
mypy-extensions==1.0.0
netCDF4==1.6.5
NREL-rex==0.2.98
numcodecs==0.13.1
numpy @ file:///home/conda/feedstock_root/build_artifacts/numpy_1707225380409/work/dist/numpy-1.26.4-cp310-cp310-linux_x86_64.whl#sha256=51131fd8fc130cd168aecaf1bc0ea85f92e8ffebf211772ceb16ac2e7f10d7ca
packaging==24.2
paginate==0.5.7
pandas==2.2.3
pandas-stubs==2.2.3.250308
partd==1.4.2
pathspec==0.12.1
patsy==1.0.1
pecos==0.3.1
pillow==11.1.0
Pint==0.24.4
platformdirs==4.3.7
pluggy==1.5.0
pooch==1.8.2
propcache==0.3.1
psutil==7.0.0
pyarrow==19.0.1
pycparser==2.22
pydantic==1.10.21
Pygments==2.19.1
PyJWT==2.10.1
pymdown-extensions==10.14.3
pyparsing==3.2.3
pyproj==3.7.1
pyproject_hooks==1.2.0
pytest==8.3.5
python-dateutil==2.9.0.post0
pytz==2025.2
PyYAML==6.0.2
pyyaml_env_tag==0.1
requests==2.32.3
requests-unixsocket==0.4.1
responses==0.25.7
rich==14.0.0
ruff==0.11.2
s3transfer==0.11.4
scikit-learn==1.6.1
scipy==1.13.1
sgp4==2.24
shellingham==1.5.4
six==1.17.0
skyfield==1.52
sortedcontainers==2.4.0
soupsieve==2.6
statsmodels==0.14.4
tblib==3.0.0
threadpoolctl==3.6.0
toml==0.10.2
tomli==2.2.1
toolz==1.0.0
tornado==6.4.2
traitlets==5.14.3
-e git+https://github.com/tsdat/tsdat.git@b2792ec429c9666daff1937a1890b1166f1ea549#egg=tsdat
typer==0.15.2
types-awscrt==0.24.2
types-pytz==2025.2.0.20250326
types-PyYAML==6.0.12.20250326
types-s3transfer==0.11.4
typing_extensions==4.13.0
tzdata==2025.2
urllib3==2.3.0
watchdog==6.0.0
Werkzeug==2.1.2
xarray==2025.3.1
xmltodict==0.14.2
yarl==1.18.3
zarr==2.18.3
zict==3.0.0
zipp==3.21.0
| name: tsdat
channels:
- arm-doe
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=conda_forge
- _openmp_mutex=4.5=2_gnu
- adi_py=3.21.1=py310h6538b2e_0
- blosc=1.21.6=he440d0b_1
- bzip2=1.0.8=h4bc722e_7
- c-ares=1.34.4=hb9d3cd8_0
- ca-certificates=2025.1.31=hbcca054_0
- curl=8.12.1=h332b0f4_0
- cyrus-sasl=2.1.27=h54b06d7_7
- cython=0.29.37=py310hc6cd4ac_0
- hdf4=4.2.15=h2a13503_7
- hdf5=1.14.4=nompi_h2d575fe_105
- icu=75.1=he02047a_0
- keyutils=1.6.1=h166bdaf_0
- krb5=1.21.3=h659f571_0
- lapack=3.6.1=ha44fe06_2
- ld_impl_linux-64=2.43=h712a8e2_4
- libaec=1.1.3=h59595ed_0
- libarmutils=1.17.0=h9bf148f_0
- libblas=3.9.0=31_h59b9bed_openblas
- libcblas=3.9.0=31_he106b2a_openblas
- libcds3=1.27.0=h9bf148f_0
- libcurl=8.12.1=h332b0f4_0
- libdbconn=1.12.4=h9bf148f_0
- libdsdb3=1.12.1=h9bf148f_0
- libdsproc3=2.71.0=h9bf148f_0
- libedit=3.1.20250104=pl5321h7949ede_0
- libev=4.33=hd590300_2
- libexpat=2.6.4=h5888daf_0
- libffi=3.4.6=h2dba641_0
- libgcc=14.2.0=h767d61c_2
- libgcc-ng=14.2.0=h69a702a_2
- libgfortran=14.2.0=h69a702a_2
- libgfortran-ng=14.2.0=h69a702a_2
- libgfortran5=14.2.0=hf1ad2bd_2
- libgomp=14.2.0=h767d61c_2
- libiconv=1.18=h4ce23a2_1
- libjpeg-turbo=3.0.0=hd590300_1
- liblapack=3.9.0=31_h7ac8fdf_openblas
- liblzma=5.6.4=hb9d3cd8_0
- libmsngr=1.12.4=h9bf148f_0
- libncds3=1.16.1=h9bf148f_0
- libnetcdf=4.9.2=nompi_h5ddbaa4_116
- libnghttp2=1.64.0=h161d5f1_0
- libnsl=2.0.1=hd590300_0
- libntlm=1.8=hb9d3cd8_0
- libopenblas=0.3.29=pthreads_h94d23a6_0
- libpq=17.4=h27ae623_0
- libsqlite=3.49.1=hee588c1_2
- libssh2=1.11.1=hf672d98_0
- libstdcxx=14.2.0=h8f9b012_2
- libstdcxx-ng=14.2.0=h4852527_2
- libtrans=2.7.0=h3218e01_1
- libudunits2=2.2.28=h40f5838_3
- libuuid=2.38.1=h0b41bf4_0
- libxcrypt=4.4.36=hd590300_1
- libxml2=2.13.7=h8d12d68_0
- libxslt=1.1.39=h76b75d6_0
- libzip=1.11.2=h6991a6a_0
- libzlib=1.3.1=hb9d3cd8_2
- lz4-c=1.10.0=h5888daf_1
- ncurses=6.5=h2d0b736_3
- numpy=1.26.4=py310hb13e2d6_0
- openldap=2.6.9=he970967_0
- openssl=3.4.1=h7b32b05_0
- pip=25.0.1=pyh8b19718_0
- postgresql=17.4=h9e3fa73_0
- python=3.10.16=he725a3c_1_cpython
- python_abi=3.10=5_cp310
- readline=8.2=h8c095d6_2
- setuptools=75.8.2=pyhff2d567_0
- snappy=1.2.1=h8bd8927_1
- tk=8.6.13=noxft_h4845f30_101
- tzcode=2025b=hb9d3cd8_0
- udunits2=2.2.28=h40f5838_3
- wheel=0.45.1=pyhd8ed1ab_1
- zlib=1.3.1=hb9d3cd8_2
- zstd=1.5.7=hb8e6e7a_2
- pip:
- act-atmos==2.1.9
- aiohappyeyeballs==2.6.1
- aiohttp==3.11.14
- aiosignal==1.3.2
- arm-test-data==0.0.13
- asciitree==0.3.3
- async-timeout==5.0.1
- attrs==25.3.0
- babel==2.17.0
- backrefs==5.8
- beautifulsoup4==4.13.3
- black==25.1.0
- boto3==1.37.23
- boto3-stubs==1.37.23
- botocore==1.37.23
- botocore-stubs==1.37.23
- bottleneck==1.4.2
- build==1.2.2.post1
- certifi==2025.1.31
- cffi==1.17.1
- cftime==1.6.4.post1
- charset-normalizer==3.4.1
- click==8.1.8
- cloudpickle==3.1.1
- cmweather==0.3.2
- colorama==0.4.6
- contourpy==1.3.1
- coverage==7.8.0
- cryptography==44.0.2
- cycler==0.12.1
- dask==2025.3.0
- distributed==2025.3.0
- dunamai==1.9.0
- exceptiongroup==1.2.2
- fasteners==0.19
- fatpack==0.7.8
- flexcache==0.3
- flexparser==0.4
- fonttools==4.56.0
- frozenlist==1.5.0
- fsspec==2025.3.1
- ghp-import==2.1.0
- griffe==1.7.1
- h5netcdf==1.6.1
- h5py==3.13.0
- h5pyd==0.18.0
- idna==3.10
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- jinja2==3.1.6
- jmespath==1.0.1
- joblib==1.4.2
- jplephem==2.22
- jsonpointer==2.2
- kiwisolver==1.4.8
- lazy-loader==0.4
- locket==1.0.0
- lxml==5.3.1
- markdown==3.7
- markdown-it-py==3.0.0
- markupsafe==3.0.2
- matplotlib==3.10.1
- mdurl==0.1.2
- mergedeep==1.3.4
- metpy==1.6.3
- mhkit==0.8.2
- mkdocs==1.6.1
- mkdocs-autorefs==1.4.1
- mkdocs-gen-files==0.5.0
- mkdocs-get-deps==0.2.0
- mkdocs-literate-nav==0.6.2
- mkdocs-material==9.6.10
- mkdocs-material-extensions==1.3.1
- mkdocstrings==0.29.1
- mkdocstrings-python==1.16.8
- moto==4.0.1
- msgpack==1.1.0
- multidict==6.2.0
- mypy==1.15.0
- mypy-boto3-cloudformation==1.37.22
- mypy-boto3-dynamodb==1.37.12
- mypy-boto3-ec2==1.37.16
- mypy-boto3-lambda==1.37.16
- mypy-boto3-rds==1.37.21
- mypy-boto3-s3==1.37.0
- mypy-boto3-sqs==1.37.0
- mypy-extensions==1.0.0
- netcdf4==1.6.5
- nrel-rex==0.2.98
- numcodecs==0.13.1
- packaging==24.2
- paginate==0.5.7
- pandas==2.2.3
- pandas-stubs==2.2.3.250308
- partd==1.4.2
- pathspec==0.12.1
- patsy==1.0.1
- pecos==0.3.1
- pillow==11.1.0
- pint==0.24.4
- platformdirs==4.3.7
- pluggy==1.5.0
- pooch==1.8.2
- propcache==0.3.1
- psutil==7.0.0
- pyarrow==19.0.1
- pycparser==2.22
- pydantic==1.10.21
- pygments==2.19.1
- pyjwt==2.10.1
- pymdown-extensions==10.14.3
- pyparsing==3.2.3
- pyproj==3.7.1
- pyproject-hooks==1.2.0
- pytest==8.3.5
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyyaml==6.0.2
- pyyaml-env-tag==0.1
- requests==2.32.3
- requests-unixsocket==0.4.1
- responses==0.25.7
- rich==14.0.0
- ruff==0.11.2
- s3transfer==0.11.4
- scikit-learn==1.6.1
- scipy==1.13.1
- sgp4==2.24
- shellingham==1.5.4
- six==1.17.0
- skyfield==1.52
- sortedcontainers==2.4.0
- soupsieve==2.6
- statsmodels==0.14.4
- tblib==3.0.0
- threadpoolctl==3.6.0
- toml==0.10.2
- tomli==2.2.1
- toolz==1.0.0
- tornado==6.4.2
- traitlets==5.14.3
- tsdat==0.8.6.dev5
- typer==0.15.2
- types-awscrt==0.24.2
- types-pytz==2025.2.0.20250326
- types-pyyaml==6.0.12.20250326
- types-s3transfer==0.11.4
- typing-extensions==4.13.0
- tzdata==2025.2
- urllib3==2.3.0
- watchdog==6.0.0
- werkzeug==2.1.2
- xarray==2025.3.1
- xmltodict==0.14.2
- yarl==1.18.3
- zarr==2.18.3
- zict==3.0.0
- zipp==3.21.0
prefix: /opt/conda/envs/tsdat
| [
"test/utils/test_get_filename.py::test_get_filename[attrs2-nc-None-test.20240808.000000.nc]",
"test/utils/test_get_filename.py::test_get_filename[attrs3-nc-arg_title-test.20240808.000000.arg_title.nc]"
] | [] | [
"test/utils/test_get_filename.py::test_get_filename[attrs0-nc-None-test.20240808.000000.nc]",
"test/utils/test_get_filename.py::test_get_filename[attrs1-nc-title-test.20240808.000000.title.nc]"
] | [] | Simplified BSD 2-Clause License | 19,239 | 151 | [
"tsdat/utils/get_filename.py"
] |
dask__dask-11288 | 1ba9c5bf9d0b005b89a04804f29c16e5f0f2dadf | 2024-08-08 16:40:34 | b7d9bf49f682de8d2ef51f4617e3da782400c290 | GPUtester: Can one of the admins verify this patch?
Admins can comment `ok to test` to allow this one PR to run or `add to allowlist` to allow all future PRs from the same author to run.
lucascolley: cc @jsignell @jakirkham @jrbourbeau from gh-8106
lucascolley: Hmm, it looks like `from_array` does not accept Dask arrays, redirecting to `asarray`, but in `asarray` `dtype` and `order` are ignored. I suppose we want to coerce the dtype and order from `asarray` then.
Should I try using `Array.astype`? And perhaps we don't need to allow changing order?
lucascolley: I've hopefully fixed the case of `Array` input with `dtype`. Otherwise, I have added the warning:
```
.. warning::
`dtype` is ignored if `a` has the attribute ``to_dask_array`` or
is a list or tuple of `Array`'s.
`order` is ignored if `a` is an `Array`, has the attribute ``to_dask_array``,
or is a list or tuple of `Array`'s.
```
lucascolley: Currently the warning is below Examples on the docs pages. Not sure if it is possible to move upwards while keeping the admonition. | diff --git a/dask/array/core.py b/dask/array/core.py
index b710fe491..3009dcb11 100644
--- a/dask/array/core.py
+++ b/dask/array/core.py
@@ -4524,6 +4524,13 @@ def retrieve_from_ooc(
return load_dsk
+def _as_dtype(a, dtype):
+ if dtype is None:
+ return a
+ else:
+ return a.astype(dtype)
+
+
def asarray(
a, allow_unknown_chunksizes=False, dtype=None, order=None, *, like=None, **kwargs
):
@@ -4573,16 +4580,22 @@ def asarray(
>>> y = [[1, 2, 3], [4, 5, 6]]
>>> da.asarray(y)
dask.array<array, shape=(2, 3), dtype=int64, chunksize=(2, 3), chunktype=numpy.ndarray>
+
+ .. warning::
+ `order` is ignored if `a` is an `Array`, has the attribute ``to_dask_array``,
+ or is a list or tuple of `Array`'s.
"""
if like is None:
if isinstance(a, Array):
- return a
+ return _as_dtype(a, dtype)
elif hasattr(a, "to_dask_array"):
- return a.to_dask_array()
+ return _as_dtype(a.to_dask_array(), dtype)
elif type(a).__module__.split(".")[0] == "xarray" and hasattr(a, "data"):
- return asarray(a.data)
+ return _as_dtype(asarray(a.data, order=order), dtype)
elif isinstance(a, (list, tuple)) and any(isinstance(i, Array) for i in a):
- return stack(a, allow_unknown_chunksizes=allow_unknown_chunksizes)
+ return _as_dtype(
+ stack(a, allow_unknown_chunksizes=allow_unknown_chunksizes), dtype
+ )
elif not isinstance(getattr(a, "shape", None), Iterable):
a = np.asarray(a, dtype=dtype, order=order)
else:
@@ -4641,16 +4654,20 @@ def asanyarray(a, dtype=None, order=None, *, like=None, inline_array=False):
>>> y = [[1, 2, 3], [4, 5, 6]]
>>> da.asanyarray(y)
dask.array<array, shape=(2, 3), dtype=int64, chunksize=(2, 3), chunktype=numpy.ndarray>
+
+ .. warning::
+ `order` is ignored if `a` is an `Array`, has the attribute ``to_dask_array``,
+ or is a list or tuple of `Array`'s.
"""
if like is None:
if isinstance(a, Array):
- return a
+ return _as_dtype(a, dtype)
elif hasattr(a, "to_dask_array"):
- return a.to_dask_array()
+ return _as_dtype(a.to_dask_array(), dtype)
elif type(a).__module__.split(".")[0] == "xarray" and hasattr(a, "data"):
- return asanyarray(a.data)
+ return _as_dtype(asarray(a.data, order=order), dtype)
elif isinstance(a, (list, tuple)) and any(isinstance(i, Array) for i in a):
- return stack(a)
+ return _as_dtype(stack(a), dtype)
elif not isinstance(getattr(a, "shape", None), Iterable):
a = np.asanyarray(a, dtype=dtype, order=order)
else:
| BUG: `array.asarray` does not respect `dtype` arg
**Describe the issue**:
`dask.array.asarray` does not respect the `dtype` argument.
**Minimal Complete Verifiable Example**:
```python
>>> import numpy as np
>>> import dask.array as da
>>> Zm = da.asarray([[1, 2, 3]])
>>> Zm
dask.array<array, shape=(1, 3), dtype=int64, chunksize=(1, 3), chunktype=numpy.ndarray>
>>> Z = da.asarray(Zm, dtype=da.float64)
>>> Z
dask.array<array, shape=(1, 3), dtype=int64, chunksize=(1, 3), chunktype=numpy.ndarray>
>>> Z.compute().dtype
dtype('int64')
# same issue is present with `np` dtypes directly
>>> Z = da.asarray(Zm, dtype=np.float64)
>>> Z
dask.array<array, shape=(1, 3), dtype=int64, chunksize=(1, 3), chunktype=numpy.ndarray>
>>> Z.compute().dtype
dtype('int64')
```
**Anything else we need to know?**:
**Environment**:
- Dask version: 2024.8.0+3.g65270980
- Python version: 3.12.4
- Operating System: Ubuntu
- Install method (conda, pip, source): `python -m pip install git+https://github.com/dask/dask.git`
| dask/dask | diff --git a/dask/array/tests/test_array_core.py b/dask/array/tests/test_array_core.py
index 70eeb0a9f..d975cfced 100644
--- a/dask/array/tests/test_array_core.py
+++ b/dask/array/tests/test_array_core.py
@@ -2704,6 +2704,18 @@ def test_asarray(asarray):
assert_eq(asarray(y), x)
[email protected]("asarray", [da.asarray, da.asanyarray])
+def test_asarray_array_dtype(asarray):
+ # test array input
+ x = asarray([1, 2])
+ assert_eq(asarray(x, dtype=da.float32), np.asarray(x, dtype=np.float32))
+
+ x = asarray(x, dtype=da.float64)
+ assert x.dtype == da.float64
+ x = asarray(x, dtype=da.int32)
+ assert x.dtype == da.int32
+
+
@pytest.mark.parametrize("asarray", [da.asarray, da.asanyarray])
def test_asarray_dask_dataframe(asarray):
# https://github.com/dask/dask/issues/3885
| {
"commit_name": "merge_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 1
} | 2024.8 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[complete]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pip",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-rerunfailures",
"pytest-timeout",
"pytest-xdist"
],
"pre_install": null,
"python": "3.10",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | bokeh==3.7.2
click==8.1.8
cloudpickle==3.1.1
contourpy==1.3.1
coverage==7.8.0
-e git+https://github.com/dask/dask.git@1ba9c5bf9d0b005b89a04804f29c16e5f0f2dadf#egg=dask
dask-expr==1.1.10
distributed==2024.8.0
exceptiongroup==1.2.2
execnet==2.1.1
fsspec==2025.3.2
importlib_metadata==8.6.1
iniconfig==2.1.0
Jinja2==3.1.6
locket==1.0.0
lz4==4.4.3
MarkupSafe==3.0.2
msgpack==1.1.0
narwhals==1.32.0
numpy==2.2.4
packaging==24.2
pandas==2.2.3
partd==1.4.2
pillow==11.1.0
pluggy==1.5.0
psutil==7.0.0
pyarrow==19.0.1
pyarrow-hotfix==0.6
pytest==8.3.5
pytest-cov==6.0.0
pytest-rerunfailures==15.0
pytest-timeout==2.3.1
pytest-xdist==3.6.1
python-dateutil==2.9.0.post0
pytz==2025.2
PyYAML==6.0.2
six==1.17.0
sortedcontainers==2.4.0
tblib==3.1.0
tomli==2.2.1
toolz==1.0.0
tornado==6.4.2
tzdata==2025.2
urllib3==2.3.0
xyzservices==2025.1.0
zict==3.0.0
zipp==3.21.0
| name: dask
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- bzip2=1.0.8=h5eee18b_6
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- libuuid=1.41.5=h5eee18b_0
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py310h06a4308_0
- python=3.10.16=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py310h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.45.1=py310h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- bokeh==3.7.2
- click==8.1.8
- cloudpickle==3.1.1
- contourpy==1.3.1
- coverage==7.8.0
- dask==2024.8.0+5.g1ba9c5bf9
- dask-expr==1.1.10
- distributed==2024.8.0
- exceptiongroup==1.2.2
- execnet==2.1.1
- fsspec==2025.3.2
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- jinja2==3.1.6
- locket==1.0.0
- lz4==4.4.3
- markupsafe==3.0.2
- msgpack==1.1.0
- narwhals==1.32.0
- numpy==2.2.4
- packaging==24.2
- pandas==2.2.3
- partd==1.4.2
- pillow==11.1.0
- pluggy==1.5.0
- psutil==7.0.0
- pyarrow==19.0.1
- pyarrow-hotfix==0.6
- pytest==8.3.5
- pytest-cov==6.0.0
- pytest-rerunfailures==15.0
- pytest-timeout==2.3.1
- pytest-xdist==3.6.1
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyyaml==6.0.2
- six==1.17.0
- sortedcontainers==2.4.0
- tblib==3.1.0
- tomli==2.2.1
- toolz==1.0.0
- tornado==6.4.2
- tzdata==2025.2
- urllib3==2.3.0
- xyzservices==2025.1.0
- zict==3.0.0
- zipp==3.21.0
prefix: /opt/conda/envs/dask
| [
"dask/array/tests/test_array_core.py::test_asarray_array_dtype[asarray]",
"dask/array/tests/test_array_core.py::test_asarray_array_dtype[asanyarray]"
] | [] | [
"dask/array/tests/test_array_core.py::test_graph_from_arraylike[True]",
"dask/array/tests/test_array_core.py::test_graph_from_arraylike[False]",
"dask/array/tests/test_array_core.py::test_top",
"dask/array/tests/test_array_core.py::test_top_supports_broadcasting_rules",
"dask/array/tests/test_array_core.py::test_top_literals",
"dask/array/tests/test_array_core.py::test_blockwise_literals",
"dask/array/tests/test_array_core.py::test_blockwise_1_in_shape_I",
"dask/array/tests/test_array_core.py::test_blockwise_1_in_shape_II",
"dask/array/tests/test_array_core.py::test_blockwise_1_in_shape_III",
"dask/array/tests/test_array_core.py::test_concatenate3_on_scalars",
"dask/array/tests/test_array_core.py::test_chunked_dot_product",
"dask/array/tests/test_array_core.py::test_chunked_transpose_plus_one",
"dask/array/tests/test_array_core.py::test_broadcast_dimensions_works_with_singleton_dimensions",
"dask/array/tests/test_array_core.py::test_broadcast_dimensions",
"dask/array/tests/test_array_core.py::test_Array",
"dask/array/tests/test_array_core.py::test_uneven_chunks",
"dask/array/tests/test_array_core.py::test_numblocks_suppoorts_singleton_block_dims",
"dask/array/tests/test_array_core.py::test_keys",
"dask/array/tests/test_array_core.py::test_Array_computation",
"dask/array/tests/test_array_core.py::test_Array_numpy_gufunc_call__array_ufunc__01",
"dask/array/tests/test_array_core.py::test_Array_numpy_gufunc_call__array_ufunc__02",
"dask/array/tests/test_array_core.py::test_stack",
"dask/array/tests/test_array_core.py::test_stack_zero_size",
"dask/array/tests/test_array_core.py::test_short_stack",
"dask/array/tests/test_array_core.py::test_stack_scalars",
"dask/array/tests/test_array_core.py::test_stack_promote_type",
"dask/array/tests/test_array_core.py::test_stack_rechunk",
"dask/array/tests/test_array_core.py::test_stack_unknown_chunksizes",
"dask/array/tests/test_array_core.py::test_concatenate",
"dask/array/tests/test_array_core.py::test_concatenate_types[dtypes0]",
"dask/array/tests/test_array_core.py::test_concatenate_types[dtypes1]",
"dask/array/tests/test_array_core.py::test_concatenate_unknown_axes",
"dask/array/tests/test_array_core.py::test_concatenate_flatten",
"dask/array/tests/test_array_core.py::test_concatenate_rechunk",
"dask/array/tests/test_array_core.py::test_concatenate_fixlen_strings",
"dask/array/tests/test_array_core.py::test_concatenate_zero_size",
"dask/array/tests/test_array_core.py::test_block_simple_row_wise",
"dask/array/tests/test_array_core.py::test_block_simple_column_wise",
"dask/array/tests/test_array_core.py::test_block_with_1d_arrays_row_wise",
"dask/array/tests/test_array_core.py::test_block_with_1d_arrays_multiple_rows",
"dask/array/tests/test_array_core.py::test_block_with_1d_arrays_column_wise",
"dask/array/tests/test_array_core.py::test_block_mixed_1d_and_2d",
"dask/array/tests/test_array_core.py::test_block_complicated",
"dask/array/tests/test_array_core.py::test_block_nested",
"dask/array/tests/test_array_core.py::test_block_3d",
"dask/array/tests/test_array_core.py::test_block_with_mismatched_shape",
"dask/array/tests/test_array_core.py::test_block_no_lists",
"dask/array/tests/test_array_core.py::test_block_invalid_nesting",
"dask/array/tests/test_array_core.py::test_block_empty_lists",
"dask/array/tests/test_array_core.py::test_block_tuple",
"dask/array/tests/test_array_core.py::test_broadcast_shapes",
"dask/array/tests/test_array_core.py::test_elemwise_on_scalars",
"dask/array/tests/test_array_core.py::test_elemwise_with_ndarrays",
"dask/array/tests/test_array_core.py::test_elemwise_differently_chunked",
"dask/array/tests/test_array_core.py::test_elemwise_dtype",
"dask/array/tests/test_array_core.py::test_operators",
"dask/array/tests/test_array_core.py::test_operator_dtype_promotion",
"dask/array/tests/test_array_core.py::test_field_access",
"dask/array/tests/test_array_core.py::test_field_access_with_shape",
"dask/array/tests/test_array_core.py::test_matmul",
"dask/array/tests/test_array_core.py::test_matmul_array_ufunc",
"dask/array/tests/test_array_core.py::test_T",
"dask/array/tests/test_array_core.py::test_broadcast_to",
"dask/array/tests/test_array_core.py::test_broadcast_to_array",
"dask/array/tests/test_array_core.py::test_broadcast_to_scalar",
"dask/array/tests/test_array_core.py::test_broadcast_to_chunks",
"dask/array/tests/test_array_core.py::test_broadcast_arrays",
"dask/array/tests/test_array_core.py::test_broadcast_arrays_uneven_chunks",
"dask/array/tests/test_array_core.py::test_broadcast_operator[u_shape0-v_shape0]",
"dask/array/tests/test_array_core.py::test_broadcast_operator[u_shape1-v_shape1]",
"dask/array/tests/test_array_core.py::test_broadcast_operator[u_shape2-v_shape2]",
"dask/array/tests/test_array_core.py::test_broadcast_operator[u_shape3-v_shape3]",
"dask/array/tests/test_array_core.py::test_broadcast_operator[u_shape4-v_shape4]",
"dask/array/tests/test_array_core.py::test_broadcast_operator[u_shape5-v_shape5]",
"dask/array/tests/test_array_core.py::test_broadcast_operator[u_shape6-v_shape6]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape0-new_shape0-chunks0]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape1-new_shape1-5]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape2-new_shape2-5]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape3-new_shape3-12]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape4-new_shape4-12]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape5-new_shape5-chunks5]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape6-new_shape6-4]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape7-new_shape7-4]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape8-new_shape8-4]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape9-new_shape9-2]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape10-new_shape10-2]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape11-new_shape11-2]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape12-new_shape12-2]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape13-new_shape13-2]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape14-new_shape14-2]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape15-new_shape15-2]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape16-new_shape16-chunks16]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape17-new_shape17-3]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape18-new_shape18-4]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape19-new_shape19-chunks19]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape20-new_shape20-1]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape21-new_shape21-1]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape22-new_shape22-24]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape23-new_shape23-6]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape24-new_shape24-6]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape25-new_shape25-6]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape26-new_shape26-chunks26]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape27-new_shape27-chunks27]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape28-new_shape28-chunks28]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape29-new_shape29-chunks29]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape30-new_shape30-chunks30]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape31-new_shape31-chunks31]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape32-new_shape32-chunks32]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape33-new_shape33-chunks33]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape34-new_shape34-chunks34]",
"dask/array/tests/test_array_core.py::test_reshape_exceptions",
"dask/array/tests/test_array_core.py::test_reshape_splat",
"dask/array/tests/test_array_core.py::test_reshape_not_implemented_error",
"dask/array/tests/test_array_core.py::test_reshape_unknown_dimensions",
"dask/array/tests/test_array_core.py::test_full",
"dask/array/tests/test_array_core.py::test_map_blocks",
"dask/array/tests/test_array_core.py::test_map_blocks2",
"dask/array/tests/test_array_core.py::test_map_blocks_block_info",
"dask/array/tests/test_array_core.py::test_map_blocks_block_info_with_new_axis",
"dask/array/tests/test_array_core.py::test_map_blocks_block_info_with_drop_axis",
"dask/array/tests/test_array_core.py::test_map_blocks_block_info_with_broadcast",
"dask/array/tests/test_array_core.py::test_map_blocks_with_constants",
"dask/array/tests/test_array_core.py::test_map_blocks_with_kwargs",
"dask/array/tests/test_array_core.py::test_map_blocks_infer_chunks_broadcast",
"dask/array/tests/test_array_core.py::test_map_blocks_with_chunks",
"dask/array/tests/test_array_core.py::test_map_blocks_dtype_inference",
"dask/array/tests/test_array_core.py::test_map_blocks_infer_newaxis",
"dask/array/tests/test_array_core.py::test_map_blocks_no_array_args",
"dask/array/tests/test_array_core.py::test_map_blocks_unique_name_chunks_dtype",
"dask/array/tests/test_array_core.py::test_map_blocks_unique_name_drop_axis",
"dask/array/tests/test_array_core.py::test_map_blocks_unique_name_new_axis",
"dask/array/tests/test_array_core.py::test_map_blocks_optimize_blockwise[<lambda>0]",
"dask/array/tests/test_array_core.py::test_map_blocks_optimize_blockwise[<lambda>1]",
"dask/array/tests/test_array_core.py::test_repr",
"dask/array/tests/test_array_core.py::test_repr_html_array_highlevelgraph",
"dask/array/tests/test_array_core.py::test_slicing_with_ellipsis",
"dask/array/tests/test_array_core.py::test_slicing_with_ndarray",
"dask/array/tests/test_array_core.py::test_slicing_flexible_type",
"dask/array/tests/test_array_core.py::test_slicing_with_object_dtype",
"dask/array/tests/test_array_core.py::test_dtype",
"dask/array/tests/test_array_core.py::test_blockdims_from_blockshape",
"dask/array/tests/test_array_core.py::test_coerce",
"dask/array/tests/test_array_core.py::test_bool",
"dask/array/tests/test_array_core.py::test_store_kwargs",
"dask/array/tests/test_array_core.py::test_store_delayed_target",
"dask/array/tests/test_array_core.py::test_store",
"dask/array/tests/test_array_core.py::test_store_regions",
"dask/array/tests/test_array_core.py::test_store_compute_false",
"dask/array/tests/test_array_core.py::test_store_nocompute_regions",
"dask/array/tests/test_array_core.py::test_store_locks",
"dask/array/tests/test_array_core.py::test_store_method_return",
"dask/array/tests/test_array_core.py::test_store_deterministic_keys[False-False]",
"dask/array/tests/test_array_core.py::test_store_deterministic_keys[False-True]",
"dask/array/tests/test_array_core.py::test_store_deterministic_keys[True-False]",
"dask/array/tests/test_array_core.py::test_store_deterministic_keys[True-True]",
"dask/array/tests/test_array_core.py::test_to_dask_dataframe",
"dask/array/tests/test_array_core.py::test_np_array_with_zero_dimensions",
"dask/array/tests/test_array_core.py::test_dtype_complex",
"dask/array/tests/test_array_core.py::test_astype",
"dask/array/tests/test_array_core.py::test_astype_gh1151",
"dask/array/tests/test_array_core.py::test_astype_gh9318",
"dask/array/tests/test_array_core.py::test_arithmetic",
"dask/array/tests/test_array_core.py::test_elemwise_consistent_names",
"dask/array/tests/test_array_core.py::test_optimize",
"dask/array/tests/test_array_core.py::test_slicing_with_non_ndarrays",
"dask/array/tests/test_array_core.py::test_getter",
"dask/array/tests/test_array_core.py::test_size",
"dask/array/tests/test_array_core.py::test_nbytes",
"dask/array/tests/test_array_core.py::test_itemsize",
"dask/array/tests/test_array_core.py::test_Array_normalizes_dtype",
"dask/array/tests/test_array_core.py::test_from_array_with_lock[True]",
"dask/array/tests/test_array_core.py::test_from_array_with_lock[False]",
"dask/array/tests/test_array_core.py::test_from_array_tasks_always_call_getter[True-x0-chunks0]",
"dask/array/tests/test_array_core.py::test_from_array_tasks_always_call_getter[True-x1--1]",
"dask/array/tests/test_array_core.py::test_from_array_tasks_always_call_getter[True-x2-1]",
"dask/array/tests/test_array_core.py::test_from_array_tasks_always_call_getter[True-x3-1]",
"dask/array/tests/test_array_core.py::test_from_array_tasks_always_call_getter[False-x0-chunks0]",
"dask/array/tests/test_array_core.py::test_from_array_tasks_always_call_getter[False-x1--1]",
"dask/array/tests/test_array_core.py::test_from_array_tasks_always_call_getter[False-x2-1]",
"dask/array/tests/test_array_core.py::test_from_array_tasks_always_call_getter[False-x3-1]",
"dask/array/tests/test_array_core.py::test_from_array_ndarray_onechunk[x0]",
"dask/array/tests/test_array_core.py::test_from_array_ndarray_onechunk[x1]",
"dask/array/tests/test_array_core.py::test_from_array_ndarray_onechunk[x2]",
"dask/array/tests/test_array_core.py::test_from_array_ndarray_onechunk[x3]",
"dask/array/tests/test_array_core.py::test_from_array_ndarray_onechunk[x4]",
"dask/array/tests/test_array_core.py::test_from_array_ndarray_onechunk[x5]",
"dask/array/tests/test_array_core.py::test_from_array_ndarray_getitem",
"dask/array/tests/test_array_core.py::test_from_array_list[x0]",
"dask/array/tests/test_array_core.py::test_from_array_list[x1]",
"dask/array/tests/test_array_core.py::test_from_array_list[x2]",
"dask/array/tests/test_array_core.py::test_from_array_scalar[bool0]",
"dask/array/tests/test_array_core.py::test_from_array_scalar[bytes]",
"dask/array/tests/test_array_core.py::test_from_array_scalar[complex]",
"dask/array/tests/test_array_core.py::test_from_array_scalar[float]",
"dask/array/tests/test_array_core.py::test_from_array_scalar[int]",
"dask/array/tests/test_array_core.py::test_from_array_scalar[bool1]",
"dask/array/tests/test_array_core.py::test_from_array_scalar[bytes_]",
"dask/array/tests/test_array_core.py::test_from_array_scalar[clongdouble]",
"dask/array/tests/test_array_core.py::test_from_array_scalar[complex128]",
"dask/array/tests/test_array_core.py::test_from_array_scalar[complex64]",
"dask/array/tests/test_array_core.py::test_from_array_scalar[datetime64]",
"dask/array/tests/test_array_core.py::test_from_array_scalar[float16]",
"dask/array/tests/test_array_core.py::test_from_array_scalar[float32]",
"dask/array/tests/test_array_core.py::test_from_array_scalar[float64]",
"dask/array/tests/test_array_core.py::test_from_array_scalar[int16]",
"dask/array/tests/test_array_core.py::test_from_array_scalar[int32]",
"dask/array/tests/test_array_core.py::test_from_array_scalar[int64]",
"dask/array/tests/test_array_core.py::test_from_array_scalar[int8]",
"dask/array/tests/test_array_core.py::test_from_array_scalar[longdouble]",
"dask/array/tests/test_array_core.py::test_from_array_scalar[longlong]",
"dask/array/tests/test_array_core.py::test_from_array_scalar[object_]",
"dask/array/tests/test_array_core.py::test_from_array_scalar[str_]",
"dask/array/tests/test_array_core.py::test_from_array_scalar[timedelta64]",
"dask/array/tests/test_array_core.py::test_from_array_scalar[uint16]",
"dask/array/tests/test_array_core.py::test_from_array_scalar[uint32]",
"dask/array/tests/test_array_core.py::test_from_array_scalar[uint64]",
"dask/array/tests/test_array_core.py::test_from_array_scalar[uint8]",
"dask/array/tests/test_array_core.py::test_from_array_scalar[ulonglong]",
"dask/array/tests/test_array_core.py::test_from_array_scalar[void]",
"dask/array/tests/test_array_core.py::test_from_array_scalar[str]",
"dask/array/tests/test_array_core.py::test_from_array_no_asarray[True-True-ndarray]",
"dask/array/tests/test_array_core.py::test_from_array_no_asarray[True-False-matrix]",
"dask/array/tests/test_array_core.py::test_from_array_no_asarray[False-True-ndarray]",
"dask/array/tests/test_array_core.py::test_from_array_no_asarray[False-False-matrix]",
"dask/array/tests/test_array_core.py::test_from_array_getitem[True-True]",
"dask/array/tests/test_array_core.py::test_from_array_getitem[True-False]",
"dask/array/tests/test_array_core.py::test_from_array_getitem[False-True]",
"dask/array/tests/test_array_core.py::test_from_array_getitem[False-False]",
"dask/array/tests/test_array_core.py::test_from_array_minus_one",
"dask/array/tests/test_array_core.py::test_array_copy_noop[-1]",
"dask/array/tests/test_array_core.py::test_array_copy_noop[2]",
"dask/array/tests/test_array_core.py::test_from_array_dask_array",
"dask/array/tests/test_array_core.py::test_from_array_dask_collection_warns",
"dask/array/tests/test_array_core.py::test_from_array_inline",
"dask/array/tests/test_array_core.py::test_asarray[asarray]",
"dask/array/tests/test_array_core.py::test_asarray[asanyarray]",
"dask/array/tests/test_array_core.py::test_asarray_dask_dataframe[asarray]",
"dask/array/tests/test_array_core.py::test_asarray_dask_dataframe[asanyarray]",
"dask/array/tests/test_array_core.py::test_asarray_chunks",
"dask/array/tests/test_array_core.py::test_asanyarray",
"dask/array/tests/test_array_core.py::test_asanyarray_dataframe",
"dask/array/tests/test_array_core.py::test_asanyarray_datetime64",
"dask/array/tests/test_array_core.py::test_from_func",
"dask/array/tests/test_array_core.py::test_concatenate3_2",
"dask/array/tests/test_array_core.py::test_map_blocks3",
"dask/array/tests/test_array_core.py::test_from_array_with_missing_chunks",
"dask/array/tests/test_array_core.py::test_normalize_chunks",
"dask/array/tests/test_array_core.py::test_align_chunks_to_previous_chunks",
"dask/array/tests/test_array_core.py::test_raise_on_no_chunks",
"dask/array/tests/test_array_core.py::test_chunks_is_immutable",
"dask/array/tests/test_array_core.py::test_raise_on_bad_kwargs",
"dask/array/tests/test_array_core.py::test_long_slice",
"dask/array/tests/test_array_core.py::test_ellipsis_slicing",
"dask/array/tests/test_array_core.py::test_point_slicing",
"dask/array/tests/test_array_core.py::test_point_slicing_with_full_slice",
"dask/array/tests/test_array_core.py::test_slice_with_floats",
"dask/array/tests/test_array_core.py::test_slice_with_integer_types[int32]",
"dask/array/tests/test_array_core.py::test_slice_with_integer_types[int64]",
"dask/array/tests/test_array_core.py::test_slice_with_integer_types[uint32]",
"dask/array/tests/test_array_core.py::test_slice_with_integer_types[uint64]",
"dask/array/tests/test_array_core.py::test_index_with_integer_types[int]",
"dask/array/tests/test_array_core.py::test_index_with_integer_types[int32]",
"dask/array/tests/test_array_core.py::test_index_with_integer_types[int64]",
"dask/array/tests/test_array_core.py::test_index_with_integer_types[uint32]",
"dask/array/tests/test_array_core.py::test_index_with_integer_types[uint64]",
"dask/array/tests/test_array_core.py::test_vindex_basic",
"dask/array/tests/test_array_core.py::test_vindex_nd",
"dask/array/tests/test_array_core.py::test_vindex_negative",
"dask/array/tests/test_array_core.py::test_vindex_errors",
"dask/array/tests/test_array_core.py::test_vindex_merge",
"dask/array/tests/test_array_core.py::test_vindex_identity",
"dask/array/tests/test_array_core.py::test_empty_array",
"dask/array/tests/test_array_core.py::test_memmap",
"dask/array/tests/test_array_core.py::test_to_npy_stack",
"dask/array/tests/test_array_core.py::test_view",
"dask/array/tests/test_array_core.py::test_view_fortran",
"dask/array/tests/test_array_core.py::test_map_blocks_with_changed_dimension",
"dask/array/tests/test_array_core.py::test_map_blocks_with_negative_drop_axis",
"dask/array/tests/test_array_core.py::test_map_blocks_with_invalid_drop_axis",
"dask/array/tests/test_array_core.py::test_map_blocks_with_changed_dimension_and_broadcast_chunks",
"dask/array/tests/test_array_core.py::test_broadcast_chunks",
"dask/array/tests/test_array_core.py::test_chunks_error",
"dask/array/tests/test_array_core.py::test_array_compute_forward_kwargs",
"dask/array/tests/test_array_core.py::test_dont_fuse_outputs",
"dask/array/tests/test_array_core.py::test_dont_dealias_outputs",
"dask/array/tests/test_array_core.py::test_timedelta_op",
"dask/array/tests/test_array_core.py::test_to_delayed",
"dask/array/tests/test_array_core.py::test_to_delayed_optimize_graph",
"dask/array/tests/test_array_core.py::test_cumulative",
"dask/array/tests/test_array_core.py::test_from_delayed",
"dask/array/tests/test_array_core.py::test_from_delayed_meta",
"dask/array/tests/test_array_core.py::test_A_property",
"dask/array/tests/test_array_core.py::test_copy_mutate",
"dask/array/tests/test_array_core.py::test_npartitions",
"dask/array/tests/test_array_core.py::test_elemwise_name",
"dask/array/tests/test_array_core.py::test_map_blocks_name",
"dask/array/tests/test_array_core.py::test_map_blocks_token_deprecated",
"dask/array/tests/test_array_core.py::test_from_array_names",
"dask/array/tests/test_array_core.py::test_array_picklable[array0]",
"dask/array/tests/test_array_core.py::test_array_picklable[array1]",
"dask/array/tests/test_array_core.py::test_from_array_raises_on_bad_chunks",
"dask/array/tests/test_array_core.py::test_concatenate_axes",
"dask/array/tests/test_array_core.py::test_blockwise_concatenate",
"dask/array/tests/test_array_core.py::test_common_blockdim",
"dask/array/tests/test_array_core.py::test_uneven_chunks_that_fit_neatly",
"dask/array/tests/test_array_core.py::test_elemwise_uneven_chunks",
"dask/array/tests/test_array_core.py::test_uneven_chunks_blockwise",
"dask/array/tests/test_array_core.py::test_warn_bad_rechunking",
"dask/array/tests/test_array_core.py::test_concatenate_stack_dont_warn",
"dask/array/tests/test_array_core.py::test_map_blocks_delayed",
"dask/array/tests/test_array_core.py::test_no_chunks",
"dask/array/tests/test_array_core.py::test_no_chunks_2d",
"dask/array/tests/test_array_core.py::test_no_chunks_yes_chunks",
"dask/array/tests/test_array_core.py::test_raise_informative_errors_no_chunks",
"dask/array/tests/test_array_core.py::test_no_chunks_slicing_2d",
"dask/array/tests/test_array_core.py::test_index_array_with_array_1d",
"dask/array/tests/test_array_core.py::test_index_array_with_array_2d",
"dask/array/tests/test_array_core.py::test_setitem_1d",
"dask/array/tests/test_array_core.py::test_setitem_masked",
"dask/array/tests/test_array_core.py::test_setitem_hardmask",
"dask/array/tests/test_array_core.py::test_setitem_slice_twice",
"dask/array/tests/test_array_core.py::test_setitem_2d",
"dask/array/tests/test_array_core.py::test_setitem_extended_API_0d",
"dask/array/tests/test_array_core.py::test_setitem_extended_API_1d[index0--1]",
"dask/array/tests/test_array_core.py::test_setitem_extended_API_1d[index1--2]",
"dask/array/tests/test_array_core.py::test_setitem_extended_API_1d[index2--3]",
"dask/array/tests/test_array_core.py::test_setitem_extended_API_1d[index3-value3]",
"dask/array/tests/test_array_core.py::test_setitem_extended_API_1d[index4--4]",
"dask/array/tests/test_array_core.py::test_setitem_extended_API_1d[index5-value5]",
"dask/array/tests/test_array_core.py::test_setitem_extended_API_1d[index6--5]",
"dask/array/tests/test_array_core.py::test_setitem_extended_API_1d[index7--6]",
"dask/array/tests/test_array_core.py::test_setitem_extended_API_1d[index8--4]",
"dask/array/tests/test_array_core.py::test_setitem_extended_API_1d[index9--5]",
"dask/array/tests/test_array_core.py::test_setitem_extended_API_1d[index10-value10]",
"dask/array/tests/test_array_core.py::test_setitem_extended_API_1d[index11-value11]",
"dask/array/tests/test_array_core.py::test_setitem_extended_API_2d[index0--1]",
"dask/array/tests/test_array_core.py::test_setitem_extended_API_2d[index1--1]",
"dask/array/tests/test_array_core.py::test_setitem_extended_API_2d[index2--1]",
"dask/array/tests/test_array_core.py::test_setitem_extended_API_2d[index3--1]",
"dask/array/tests/test_array_core.py::test_setitem_extended_API_2d[index4--1]",
"dask/array/tests/test_array_core.py::test_setitem_extended_API_2d[5--1]",
"dask/array/tests/test_array_core.py::test_setitem_extended_API_2d[index6-value6]",
"dask/array/tests/test_array_core.py::test_setitem_extended_API_2d[3-value7]",
"dask/array/tests/test_array_core.py::test_setitem_extended_API_2d[index8-value8]",
"dask/array/tests/test_array_core.py::test_setitem_extended_API_2d[index9-value9]",
"dask/array/tests/test_array_core.py::test_setitem_extended_API_2d[index10-value10]",
"dask/array/tests/test_array_core.py::test_setitem_extended_API_2d[index11-value11]",
"dask/array/tests/test_array_core.py::test_setitem_extended_API_2d[index12-value12]",
"dask/array/tests/test_array_core.py::test_setitem_extended_API_2d[index13-value13]",
"dask/array/tests/test_array_core.py::test_setitem_extended_API_2d[index14--1]",
"dask/array/tests/test_array_core.py::test_setitem_extended_API_2d[index15--1]",
"dask/array/tests/test_array_core.py::test_setitem_extended_API_2d[index16--1]",
"dask/array/tests/test_array_core.py::test_setitem_extended_API_2d[index17--1]",
"dask/array/tests/test_array_core.py::test_setitem_extended_API_2d[index18-value18]",
"dask/array/tests/test_array_core.py::test_setitem_extended_API_2d[index19--99]",
"dask/array/tests/test_array_core.py::test_setitem_extended_API_2d[index20-value20]",
"dask/array/tests/test_array_core.py::test_setitem_extended_API_2d[index21--98]",
"dask/array/tests/test_array_core.py::test_setitem_extended_API_2d[index22-value22]",
"dask/array/tests/test_array_core.py::test_setitem_extended_API_2d_rhs_func_of_lhs",
"dask/array/tests/test_array_core.py::test_setitem_extended_API_2d_mask[index0-value0]",
"dask/array/tests/test_array_core.py::test_setitem_extended_API_2d_mask[index1-value1]",
"dask/array/tests/test_array_core.py::test_setitem_on_read_only_blocks",
"dask/array/tests/test_array_core.py::test_setitem_errs",
"dask/array/tests/test_array_core.py::test_zero_slice_dtypes",
"dask/array/tests/test_array_core.py::test_zero_sized_array_rechunk",
"dask/array/tests/test_array_core.py::test_blockwise_zero_shape",
"dask/array/tests/test_array_core.py::test_blockwise_zero_shape_new_axes",
"dask/array/tests/test_array_core.py::test_broadcast_against_zero_shape",
"dask/array/tests/test_array_core.py::test_from_array_name",
"dask/array/tests/test_array_core.py::test_concatenate_errs",
"dask/array/tests/test_array_core.py::test_stack_errs",
"dask/array/tests/test_array_core.py::test_blockwise_with_numpy_arrays",
"dask/array/tests/test_array_core.py::test_elemwise_with_lists[other0-100]",
"dask/array/tests/test_array_core.py::test_elemwise_with_lists[other0-6]",
"dask/array/tests/test_array_core.py::test_elemwise_with_lists[other1-100]",
"dask/array/tests/test_array_core.py::test_elemwise_with_lists[other1-6]",
"dask/array/tests/test_array_core.py::test_elemwise_with_lists[other2-100]",
"dask/array/tests/test_array_core.py::test_elemwise_with_lists[other2-6]",
"dask/array/tests/test_array_core.py::test_constructor_plugin",
"dask/array/tests/test_array_core.py::test_no_warnings_on_metadata",
"dask/array/tests/test_array_core.py::test_delayed_array_key_hygeine",
"dask/array/tests/test_array_core.py::test_empty_chunks_in_array_len",
"dask/array/tests/test_array_core.py::test_meta[None]",
"dask/array/tests/test_array_core.py::test_meta[dtype1]",
"dask/array/tests/test_array_core.py::test_normalize_chunks_auto_1d[100-10-expected0]",
"dask/array/tests/test_array_core.py::test_normalize_chunks_auto_1d[20-10-expected1]",
"dask/array/tests/test_array_core.py::test_normalize_chunks_auto_1d[20-5-expected2]",
"dask/array/tests/test_array_core.py::test_normalize_chunks_auto_1d[24-5-expected3]",
"dask/array/tests/test_array_core.py::test_normalize_chunks_auto_1d[23-5-expected4]",
"dask/array/tests/test_array_core.py::test_normalize_chunks_auto_1d[1000-167-expected5]",
"dask/array/tests/test_array_core.py::test_normalize_chunks_auto_2d[shape0-chunks0-20-expected0]",
"dask/array/tests/test_array_core.py::test_normalize_chunks_auto_2d[shape1-chunks1-20-expected1]",
"dask/array/tests/test_array_core.py::test_normalize_chunks_auto_2d[shape2-auto-10-expected2]",
"dask/array/tests/test_array_core.py::test_normalize_chunks_auto_3d",
"dask/array/tests/test_array_core.py::test_constructors_chunks_dict",
"dask/array/tests/test_array_core.py::test_from_array_chunks_dict",
"dask/array/tests/test_array_core.py::test_normalize_chunks_object_dtype[object]",
"dask/array/tests/test_array_core.py::test_normalize_chunks_object_dtype[dtype1]",
"dask/array/tests/test_array_core.py::test_normalize_chunks_tuples_of_tuples",
"dask/array/tests/test_array_core.py::test_normalize_chunks_nan",
"dask/array/tests/test_array_core.py::test_pandas_from_dask_array",
"dask/array/tests/test_array_core.py::test_regular_chunks[data0]",
"dask/array/tests/test_array_core.py::test_regular_chunks[data1]",
"dask/array/tests/test_array_core.py::test_regular_chunks[data2]",
"dask/array/tests/test_array_core.py::test_regular_chunks[data3]",
"dask/array/tests/test_array_core.py::test_regular_chunks[data4]",
"dask/array/tests/test_array_core.py::test_regular_chunks[data5]",
"dask/array/tests/test_array_core.py::test_regular_chunks[data6]",
"dask/array/tests/test_array_core.py::test_regular_chunks[data7]",
"dask/array/tests/test_array_core.py::test_blockview",
"dask/array/tests/test_array_core.py::test_blocks_indexer",
"dask/array/tests/test_array_core.py::test_partitions_indexer",
"dask/array/tests/test_array_core.py::test_3851",
"dask/array/tests/test_array_core.py::test_3925",
"dask/array/tests/test_array_core.py::test_map_blocks_large_inputs_delayed",
"dask/array/tests/test_array_core.py::test_blockwise_large_inputs_delayed",
"dask/array/tests/test_array_core.py::test_slice_reversed",
"dask/array/tests/test_array_core.py::test_map_blocks_chunks",
"dask/array/tests/test_array_core.py::test_nbytes_auto",
"dask/array/tests/test_array_core.py::test_no_warnings_from_blockwise",
"dask/array/tests/test_array_core.py::test_compute_chunk_sizes",
"dask/array/tests/test_array_core.py::test_compute_chunk_sizes_2d_array",
"dask/array/tests/test_array_core.py::test_compute_chunk_sizes_3d_array",
"dask/array/tests/test_array_core.py::test_compute_chunk_sizes_warning_fixes_rechunk",
"dask/array/tests/test_array_core.py::test_compute_chunk_sizes_warning_fixes_to_svg",
"dask/array/tests/test_array_core.py::test_compute_chunk_sizes_warning_fixes_concatenate",
"dask/array/tests/test_array_core.py::test_compute_chunk_sizes_warning_fixes_reduction",
"dask/array/tests/test_array_core.py::test_compute_chunk_sizes_warning_fixes_reshape",
"dask/array/tests/test_array_core.py::test_compute_chunk_sizes_warning_fixes_slicing",
"dask/array/tests/test_array_core.py::test_rechunk_auto",
"dask/array/tests/test_array_core.py::test_chunk_assignment_invalidates_cached_properties",
"dask/array/tests/test_array_core.py::test_dask_layers",
"dask/array/tests/test_array_core.py::test_len_object_with_unknown_size",
"dask/array/tests/test_array_core.py::test_chunk_shape_broadcast[0]",
"dask/array/tests/test_array_core.py::test_chunk_shape_broadcast[1]",
"dask/array/tests/test_array_core.py::test_chunk_shape_broadcast[3]",
"dask/array/tests/test_array_core.py::test_chunk_shape_broadcast[8]",
"dask/array/tests/test_array_core.py::test_chunk_non_array_like",
"dask/array/tests/test_array_core.py::test_to_backend",
"dask/array/tests/test_array_core.py::test_load_store_chunk"
] | [] | BSD 3-Clause "New" or "Revised" License | 19,242 | 826 | [
"dask/array/core.py"
] |
tobymao__sqlglot-3891 | 2cac14f480dcaf458b1eb36b694770ce24f56e61 | 2024-08-09 18:37:25 | e0cd7e20298f84dc245676ecded6f174cf1c9c3e | diff --git a/sqlglot/dialects/snowflake.py b/sqlglot/dialects/snowflake.py
index 58fdb620..981dba15 100644
--- a/sqlglot/dialects/snowflake.py
+++ b/sqlglot/dialects/snowflake.py
@@ -406,6 +406,26 @@ class Snowflake(Dialect):
),
}
+ def _negate_range(
+ self, this: t.Optional[exp.Expression] = None
+ ) -> t.Optional[exp.Expression]:
+ if not this:
+ return this
+
+ query = this.args.get("query")
+ if isinstance(this, exp.In) and isinstance(query, exp.Query):
+ # Snowflake treats `value NOT IN (subquery)` as `VALUE <> ALL (subquery)`, so
+ # we do this conversion here to avoid parsing it into `NOT value IN (subquery)`
+ # which can produce different results (most likely a SnowFlake bug).
+ #
+ # https://docs.snowflake.com/en/sql-reference/functions/in
+ # Context: https://github.com/tobymao/sqlglot/issues/3890
+ return self.expression(
+ exp.NEQ, this=this.this, expression=exp.All(this=query.unnest())
+ )
+
+ return self.expression(exp.Not, this=this)
+
def _parse_with_constraint(self) -> t.Optional[exp.Expression]:
if self._prev.token_type != TokenType.WITH:
self._retreat(self._index - 1)
diff --git a/sqlglot/parser.py b/sqlglot/parser.py
index 5d69ced5..12b71a67 100644
--- a/sqlglot/parser.py
+++ b/sqlglot/parser.py
@@ -4241,13 +4241,19 @@ class Parser(metaclass=_Parser):
this = self.expression(exp.Not, this=this)
if negate:
- this = self.expression(exp.Not, this=this)
+ this = self._negate_range(this)
if self._match(TokenType.IS):
this = self._parse_is(this)
return this
+ def _negate_range(self, this: t.Optional[exp.Expression] = None) -> t.Optional[exp.Expression]:
+ if not this:
+ return this
+
+ return self.expression(exp.Not, this=this)
+
def _parse_is(self, this: t.Optional[exp.Expression]) -> t.Optional[exp.Expression]:
index = self._index - 1
negate = self._match(TokenType.NOT)
| [Snowflake]: Changed place of 'NOT' in 'WHERE' statement
SQLGlot implicitly changes the place, where 'NOT' is put in 'WHERE' statement. Such change, may result in different output of the query. I would assume that with the same dialect provided on both ends, the same query (logically) would be provided.
Code to reproduce:
```
from sqlglot import parse_one
if __name__ == '__main__':
before_sql = "SELECT * FROM table WHERE column NOT IN (1, 2, 3)"
expression = parse_one(before_sql, dialect='snowflake')
after_sql = expression.sql(dialect='snowflake')
assert before_sql == after_sql, f"\n{before_sql}\nIS NOT THE SAME AS\n{after_sql}"
```
Output:
```
AssertionError:
SELECT * FROM table WHERE column NOT IN (1, 2, 3)
IS NOT THE SAME AS
SELECT * FROM table WHERE NOT column IN (1, 2, 3)
```
Screenshot:
<img width="1143" alt="Zrzut ekranu 2024-08-9 o 13 35 33" src="https://github.com/user-attachments/assets/d45472b0-68c7-4310-8a22-66eb163f1ae2">
Official Snowflake documentation does not cover that case. | tobymao/sqlglot | diff --git a/tests/dialects/test_snowflake.py b/tests/dialects/test_snowflake.py
index f62c72b7..e8e86a43 100644
--- a/tests/dialects/test_snowflake.py
+++ b/tests/dialects/test_snowflake.py
@@ -138,6 +138,14 @@ WHERE
self.validate_identity(
"SELECT * FROM DATA AS DATA_L ASOF JOIN DATA AS DATA_R MATCH_CONDITION (DATA_L.VAL > DATA_R.VAL) ON DATA_L.ID = DATA_R.ID"
)
+ self.validate_identity(
+ "SELECT * FROM s WHERE c NOT IN (1, 2, 3)",
+ "SELECT * FROM s WHERE NOT c IN (1, 2, 3)",
+ )
+ self.validate_identity(
+ "SELECT * FROM s WHERE c NOT IN (SELECT * FROM t)",
+ "SELECT * FROM s WHERE c <> ALL (SELECT * FROM t)",
+ )
self.validate_identity(
"SELECT * FROM t1 INNER JOIN t2 USING (t1.col)",
"SELECT * FROM t1 INNER JOIN t2 USING (col)",
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 2
} | 25.10 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | cfgv==3.4.0
distlib==0.3.9
duckdb==1.2.1
exceptiongroup==1.2.2
filelock==3.18.0
identify==2.6.9
iniconfig==2.1.0
Jinja2==3.1.6
MarkupSafe==3.0.2
maturin==1.8.3
mypy==1.15.0
mypy-extensions==1.0.0
nodeenv==1.9.1
numpy==2.0.2
packaging==24.2
pandas==2.2.3
pandas-stubs==2.2.2.240807
pdoc==15.0.1
platformdirs==4.3.7
pluggy==1.5.0
pre_commit==4.2.0
Pygments==2.19.1
pytest==8.3.5
python-dateutil==2.9.0.post0
pytz==2025.2
PyYAML==6.0.2
ruff==0.4.3
six==1.17.0
-e git+https://github.com/tobymao/sqlglot.git@2cac14f480dcaf458b1eb36b694770ce24f56e61#egg=sqlglot
tomli==2.2.1
types-python-dateutil==2.9.0.20241206
types-pytz==2025.2.0.20250326
typing_extensions==4.13.0
tzdata==2025.2
virtualenv==20.29.3
| name: sqlglot
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- cfgv==3.4.0
- distlib==0.3.9
- duckdb==1.2.1
- exceptiongroup==1.2.2
- filelock==3.18.0
- identify==2.6.9
- iniconfig==2.1.0
- jinja2==3.1.6
- markupsafe==3.0.2
- maturin==1.8.3
- mypy==1.15.0
- mypy-extensions==1.0.0
- nodeenv==1.9.1
- numpy==2.0.2
- packaging==24.2
- pandas==2.2.3
- pandas-stubs==2.2.2.240807
- pdoc==15.0.1
- platformdirs==4.3.7
- pluggy==1.5.0
- pre-commit==4.2.0
- pygments==2.19.1
- pytest==8.3.5
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyyaml==6.0.2
- ruff==0.4.3
- six==1.17.0
- tomli==2.2.1
- types-python-dateutil==2.9.0.20241206
- types-pytz==2025.2.0.20250326
- typing-extensions==4.13.0
- tzdata==2025.2
- virtualenv==20.29.3
prefix: /opt/conda/envs/sqlglot
| [
"tests/dialects/test_snowflake.py::TestSnowflake::test_snowflake"
] | [] | [
"tests/dialects/test_snowflake.py::TestSnowflake::test_alter_set_unset",
"tests/dialects/test_snowflake.py::TestSnowflake::test_copy",
"tests/dialects/test_snowflake.py::TestSnowflake::test_ddl",
"tests/dialects/test_snowflake.py::TestSnowflake::test_describe_table",
"tests/dialects/test_snowflake.py::TestSnowflake::test_flatten",
"tests/dialects/test_snowflake.py::TestSnowflake::test_from_changes",
"tests/dialects/test_snowflake.py::TestSnowflake::test_historical_data",
"tests/dialects/test_snowflake.py::TestSnowflake::test_match_recognize",
"tests/dialects/test_snowflake.py::TestSnowflake::test_minus",
"tests/dialects/test_snowflake.py::TestSnowflake::test_null_treatment",
"tests/dialects/test_snowflake.py::TestSnowflake::test_parse_like_any",
"tests/dialects/test_snowflake.py::TestSnowflake::test_querying_semi_structured_data",
"tests/dialects/test_snowflake.py::TestSnowflake::test_regexp_replace",
"tests/dialects/test_snowflake.py::TestSnowflake::test_regexp_substr",
"tests/dialects/test_snowflake.py::TestSnowflake::test_sample",
"tests/dialects/test_snowflake.py::TestSnowflake::test_semi_structured_types",
"tests/dialects/test_snowflake.py::TestSnowflake::test_show_columns",
"tests/dialects/test_snowflake.py::TestSnowflake::test_show_imported_keys",
"tests/dialects/test_snowflake.py::TestSnowflake::test_show_objects",
"tests/dialects/test_snowflake.py::TestSnowflake::test_show_primary_keys",
"tests/dialects/test_snowflake.py::TestSnowflake::test_show_schemas",
"tests/dialects/test_snowflake.py::TestSnowflake::test_show_sequences",
"tests/dialects/test_snowflake.py::TestSnowflake::test_show_tables",
"tests/dialects/test_snowflake.py::TestSnowflake::test_show_unique_keys",
"tests/dialects/test_snowflake.py::TestSnowflake::test_show_users",
"tests/dialects/test_snowflake.py::TestSnowflake::test_show_views",
"tests/dialects/test_snowflake.py::TestSnowflake::test_staged_files",
"tests/dialects/test_snowflake.py::TestSnowflake::test_storage_integration",
"tests/dialects/test_snowflake.py::TestSnowflake::test_stored_procedures",
"tests/dialects/test_snowflake.py::TestSnowflake::test_swap",
"tests/dialects/test_snowflake.py::TestSnowflake::test_table_literal",
"tests/dialects/test_snowflake.py::TestSnowflake::test_timestamps",
"tests/dialects/test_snowflake.py::TestSnowflake::test_try_cast",
"tests/dialects/test_snowflake.py::TestSnowflake::test_user_defined_functions",
"tests/dialects/test_snowflake.py::TestSnowflake::test_values"
] | [] | MIT License | 19,256 | 595 | [
"sqlglot/dialects/snowflake.py",
"sqlglot/parser.py"
] |
|
psf__pyperf-199 | c6c33d9db2f298b9fde575361055643912551cf1 | 2024-08-09 19:05:45 | c6c33d9db2f298b9fde575361055643912551cf1 | diff --git a/pyperf/_formatter.py b/pyperf/_formatter.py
index 767faf2..6c3a1ee 100644
--- a/pyperf/_formatter.py
+++ b/pyperf/_formatter.py
@@ -30,9 +30,9 @@ def format_filesize(size):
return '%.0f byte' % size
if size > 10 * 1024 * 1024:
- return '%.1f MB' % (size / (1024.0 * 1024.0))
+ return '%.1f MiB' % (size / (1024.0 * 1024.0))
- return '%.1f kB' % (size / 1024.0)
+ return '%.1f KiB' % (size / 1024.0)
def format_filesizes(sizes):
| Minor issue regarding memory units (kB vs KiB)
The units in the output when `track-memory` is enabled say `kB` and `MB`. However, as far as I can tell, the correct units are KiB and MiB? It's a minor issue, and might not be worth changing due to the hassle of changing the output for downstream applications, but I found it confusing at first that the summary didn't match up with the actual numbers in the JSON file.
I wasn't sure if it was worth opening an issue for such a minor issue, but I thought it might be worth making a concious decision on whether to use precise SI units.
Sorry if there's already been a discussion on this, I tried looking but I couldn't find anything. | psf/pyperf | diff --git a/pyperf/tests/test_metadata.py b/pyperf/tests/test_metadata.py
index 58a2481..0e08304 100644
--- a/pyperf/tests/test_metadata.py
+++ b/pyperf/tests/test_metadata.py
@@ -55,7 +55,7 @@ class CpuFunctionsTests(unittest.TestCase):
stepping\t: 9
microcode\t: 0x1c
cpu MHz\t\t: 1287.554
- cache size\t: 4096 KB
+ cache size\t: 4096 KiB
physical id\t: 0
siblings\t: 4
core id\t\t: 0
@@ -82,7 +82,7 @@ class CpuFunctionsTests(unittest.TestCase):
stepping\t: 9
microcode\t: 0x1c
cpu MHz\t\t: 1225.363
- cache size\t: 4096 KB
+ cache size\t: 4096 KiB
physical id\t: 0
siblings\t: 4
core id\t\t: 0
@@ -109,7 +109,7 @@ class CpuFunctionsTests(unittest.TestCase):
stepping\t: 9
microcode\t: 0x1c
cpu MHz\t\t: 1200.101
- cache size\t: 4096 KB
+ cache size\t: 4096 KiB
physical id\t: 0
siblings\t: 4
core id\t\t: 1
diff --git a/pyperf/tests/test_perf_cli.py b/pyperf/tests/test_perf_cli.py
index 57f1dab..6423c52 100644
--- a/pyperf/tests/test_perf_cli.py
+++ b/pyperf/tests/test_perf_cli.py
@@ -556,13 +556,13 @@ class TestPerfCLI(BaseTestCase, unittest.TestCase):
def test_dump_track_memory(self):
expected = """
Run 1: calibrate the number of loops: 2^15
- - calibrate 1: 7188.0 kB (loops: 2^15)
+ - calibrate 1: 7188.0 KiB (loops: 2^15)
Run 2: 0 warmups, 1 value, 2^15 loops
- - value 1: 7188.0 kB
+ - value 1: 7188.0 KiB
Run 3: 0 warmups, 1 value, 2^15 loops
- - value 1: 7192.0 kB
+ - value 1: 7192.0 KiB
Run 4: 0 warmups, 1 value, 2^15 loops
- - value 1: 7208.0 kB
+ - value 1: 7208.0 KiB
"""
filename = os.path.join(TESTDIR, 'track_memory.json')
stdout = self.run_command('dump', filename)
@@ -595,7 +595,7 @@ class TestPerfCLI(BaseTestCase, unittest.TestCase):
date: 2016-10-21 03:14:19.670631
duration: 338 ms
load_avg_1min: 0.29
- mem_max_rss: 13.4 MB
+ mem_max_rss: 13.4 MiB
runnable_threads: 1
uptime: 2 day 2 hour 4 min
Run 2: 1 warmup, 3 values, 8 loops
@@ -609,7 +609,7 @@ class TestPerfCLI(BaseTestCase, unittest.TestCase):
date: 2016-10-21 03:14:20.496710
duration: 723 ms
load_avg_1min: 0.29
- mem_max_rss: 13.5 MB
+ mem_max_rss: 13.5 MiB
runnable_threads: 1
uptime: 2 day 2 hour 4 min
"""
@@ -689,7 +689,7 @@ class TestPerfCLI(BaseTestCase, unittest.TestCase):
'[1,2]*1000',
'-o', tmp_name)
bench = pyperf.Benchmark.load(tmp_name)
-
+
self._check_track_memory_bench(bench, loops=5)
def test_track_memory(self):
diff --git a/pyperf/tests/test_utils.py b/pyperf/tests/test_utils.py
index 85e3afb..edf8549 100644
--- a/pyperf/tests/test_utils.py
+++ b/pyperf/tests/test_utils.py
@@ -146,9 +146,9 @@ class TestUtils(unittest.TestCase):
self.assertEqual(format_filesize(1),
'1 byte')
self.assertEqual(format_filesize(10 * 1024),
- '10.0 kB')
+ '10.0 KiB')
self.assertEqual(format_filesize(12.4 * 1024 * 1024),
- '12.4 MB')
+ '12.4 MiB')
def test_get_python_names(self):
self.assertEqual(utils.get_python_names('/usr/bin/python3.6',
| {
"commit_name": "merge_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 1
} | 2.7 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"tox",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | cachetools==5.5.2
chardet==5.2.0
colorama==0.4.6
distlib==0.3.9
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
filelock==3.18.0
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
packaging @ file:///croot/packaging_1734472117206/work
platformdirs==4.3.7
pluggy @ file:///croot/pluggy_1733169602837/work
psutil==7.0.0
-e git+https://github.com/psf/pyperf.git@c6c33d9db2f298b9fde575361055643912551cf1#egg=pyperf
pyproject-api==1.9.0
pytest @ file:///croot/pytest_1738938843180/work
tomli==2.2.1
tox==4.25.0
typing_extensions==4.13.0
virtualenv==20.29.3
| name: pyperf
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- cachetools==5.5.2
- chardet==5.2.0
- colorama==0.4.6
- distlib==0.3.9
- filelock==3.18.0
- platformdirs==4.3.7
- psutil==7.0.0
- pyperf==2.7.0
- pyproject-api==1.9.0
- tomli==2.2.1
- tox==4.25.0
- typing-extensions==4.13.0
- virtualenv==20.29.3
prefix: /opt/conda/envs/pyperf
| [
"pyperf/tests/test_perf_cli.py::TestPerfCLI::test_dump_track_memory",
"pyperf/tests/test_perf_cli.py::TestPerfCLI::test_dump_verbose",
"pyperf/tests/test_utils.py::TestUtils::test_format_filesize"
] | [] | [
"pyperf/tests/test_metadata.py::TestMetadata::test_collect_cpu_affinity",
"pyperf/tests/test_metadata.py::TestMetadata::test_collect_metadata",
"pyperf/tests/test_metadata.py::CpuFunctionsTests::test_cpu_config",
"pyperf/tests/test_metadata.py::CpuFunctionsTests::test_intel_cpu_frequencies",
"pyperf/tests/test_metadata.py::CpuFunctionsTests::test_power8_cpu_frequencies",
"pyperf/tests/test_perf_cli.py::TestPerfCLI::test_check_stable",
"pyperf/tests/test_perf_cli.py::TestPerfCLI::test_check_unstable",
"pyperf/tests/test_perf_cli.py::TestPerfCLI::test_collect_metadata",
"pyperf/tests/test_perf_cli.py::TestPerfCLI::test_command",
"pyperf/tests/test_perf_cli.py::TestPerfCLI::test_command_track_memory",
"pyperf/tests/test_perf_cli.py::TestPerfCLI::test_compare_to",
"pyperf/tests/test_perf_cli.py::TestPerfCLI::test_compare_to_cli",
"pyperf/tests/test_perf_cli.py::TestPerfCLI::test_compare_to_cli_min_speed",
"pyperf/tests/test_perf_cli.py::TestPerfCLI::test_compare_to_cli_tags",
"pyperf/tests/test_perf_cli.py::TestPerfCLI::test_compare_to_md_table",
"pyperf/tests/test_perf_cli.py::TestPerfCLI::test_compare_to_not_significant",
"pyperf/tests/test_perf_cli.py::TestPerfCLI::test_compare_to_not_significant_verbose",
"pyperf/tests/test_perf_cli.py::TestPerfCLI::test_compare_to_rest_table",
"pyperf/tests/test_perf_cli.py::TestPerfCLI::test_compare_to_same",
"pyperf/tests/test_perf_cli.py::TestPerfCLI::test_compare_to_table_not_significant",
"pyperf/tests/test_perf_cli.py::TestPerfCLI::test_dump",
"pyperf/tests/test_perf_cli.py::TestPerfCLI::test_dump_quiet",
"pyperf/tests/test_perf_cli.py::TestPerfCLI::test_dump_raw",
"pyperf/tests/test_perf_cli.py::TestPerfCLI::test_hist",
"pyperf/tests/test_perf_cli.py::TestPerfCLI::test_hook",
"pyperf/tests/test_perf_cli.py::TestPerfCLI::test_metadata",
"pyperf/tests/test_perf_cli.py::TestPerfCLI::test_show",
"pyperf/tests/test_perf_cli.py::TestPerfCLI::test_show_common_metadata",
"pyperf/tests/test_perf_cli.py::TestPerfCLI::test_slowest",
"pyperf/tests/test_perf_cli.py::TestPerfCLI::test_stats",
"pyperf/tests/test_perf_cli.py::TestPerfCLI::test_tracemalloc",
"pyperf/tests/test_perf_cli.py::TestPerfCLI::test_track_memory",
"pyperf/tests/test_perf_cli.py::TestConvert::test_convert",
"pyperf/tests/test_perf_cli.py::TestConvert::test_filter_benchmarks",
"pyperf/tests/test_perf_cli.py::TestConvert::test_filter_runs",
"pyperf/tests/test_perf_cli.py::TestConvert::test_indent",
"pyperf/tests/test_perf_cli.py::TestConvert::test_remove_warmups",
"pyperf/tests/test_perf_cli.py::TestConvert::test_stdout",
"pyperf/tests/test_utils.py::TestClocks::test_perf_counter",
"pyperf/tests/test_utils.py::TestStatistics::test_geometric_mean",
"pyperf/tests/test_utils.py::TestStatistics::test_is_significant",
"pyperf/tests/test_utils.py::TestStatistics::test_is_significant_FIXME",
"pyperf/tests/test_utils.py::TestStatistics::test_median_abs_dev",
"pyperf/tests/test_utils.py::TestStatistics::test_percentile",
"pyperf/tests/test_utils.py::TestUtils::test_format_number",
"pyperf/tests/test_utils.py::TestUtils::test_format_seconds",
"pyperf/tests/test_utils.py::TestUtils::test_format_timedelta",
"pyperf/tests/test_utils.py::TestUtils::test_get_python_names",
"pyperf/tests/test_utils.py::TestUtils::test_timedelta_stdev",
"pyperf/tests/test_utils.py::CPUToolsTests::test_format_cpu_list",
"pyperf/tests/test_utils.py::CPUToolsTests::test_format_cpu_mask",
"pyperf/tests/test_utils.py::CPUToolsTests::test_format_cpus_as_mask",
"pyperf/tests/test_utils.py::CPUToolsTests::test_get_isolated_cpus",
"pyperf/tests/test_utils.py::CPUToolsTests::test_parse_cpu_list",
"pyperf/tests/test_utils.py::CPUToolsTests::test_parse_cpu_mask"
] | [] | MIT License | 19,257 | 214 | [
"pyperf/_formatter.py"
] |
|
kevinzakka__mink-19 | 8f99665834553175bb5e7dfc973a0260ceff695e | 2024-08-09 21:30:39 | 8f99665834553175bb5e7dfc973a0260ceff695e | diff --git a/mink/tasks/posture_task.py b/mink/tasks/posture_task.py
index fef3074..3a8da48 100644
--- a/mink/tasks/posture_task.py
+++ b/mink/tasks/posture_task.py
@@ -55,8 +55,8 @@ class PostureTask(Task):
cost = np.atleast_1d(cost)
if cost.ndim != 1 or cost.shape[0] not in (1, self.k):
raise TaskDefinitionError(
- f"{self.__class__.__name__} cost should be a vector of shape 1"
- "(aka identical cost for all dofs) or ({self.k},) but got {cost.shape}"
+ f"{self.__class__.__name__} cost must be a vector of shape (1,) "
+ f"(aka identical cost for all dofs) or ({self.k},). Got {cost.shape}"
)
if not np.all(cost >= 0.0):
raise TaskDefinitionError(f"{self.__class__.__name__} cost should be >= 0")
| Allow posture task to take in a cost array.
Example use case: assigning different costs to an arm/hand combination. | kevinzakka/mink | diff --git a/tests/test_posture_task.py b/tests/test_posture_task.py
index 3428691..59b2fd5 100644
--- a/tests/test_posture_task.py
+++ b/tests/test_posture_task.py
@@ -28,6 +28,24 @@ class TestPostureTask(absltest.TestCase):
PostureTask(model=self.model, cost=(-1))
self.assertEqual(str(cm.exception), "PostureTask cost should be >= 0")
+ def test_cost_correctly_broadcast(self):
+ task = PostureTask(model=self.model, cost=5.0)
+ np.testing.assert_array_equal(task.cost, np.ones((self.model.nv,)) * 5.0)
+ task = PostureTask(model=self.model, cost=[5.0])
+ np.testing.assert_array_equal(task.cost, np.ones((self.model.nv,)) * 5.0)
+ cost = np.random.random(size=(self.model.nv,))
+ task = PostureTask(model=self.model, cost=cost)
+ np.testing.assert_array_equal(task.cost, cost)
+
+ def test_cost_invalid_shape(self):
+ with self.assertRaises(TaskDefinitionError) as cm:
+ PostureTask(model=self.model, cost=(0.5, 2.0))
+ expected_error_message = (
+ "PostureTask cost must be a vector of shape (1,) (aka identical cost for "
+ f"all dofs) or ({self.model.nv},). Got (2,)"
+ )
+ self.assertEqual(str(cm.exception), expected_error_message)
+
def test_task_raises_error_if_target_is_invalid(self):
task = PostureTask(model=self.model, cost=1.0)
with self.assertRaises(InvalidTarget) as cm:
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 1
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | absl-py==2.2.1
black==25.1.0
certifi==2025.1.31
charset-normalizer==3.4.1
click==8.1.8
coverage==7.8.0
coveralls==4.0.1
docopt==0.6.2
etils==1.5.2
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
fsspec==2025.3.1
gitdb==4.0.12
GitPython==3.1.44
glfw==2.8.0
idna==3.10
importlib_resources==6.5.2
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
-e git+https://github.com/kevinzakka/mink.git@8f99665834553175bb5e7dfc973a0260ceff695e#egg=mink
mujoco==3.3.0
mypy==1.15.0
mypy-extensions==1.0.0
numpy==1.26.4
packaging @ file:///croot/packaging_1734472117206/work
pathspec==0.12.1
platformdirs==4.3.7
pluggy @ file:///croot/pluggy_1733169602837/work
PyOpenGL==3.1.9
pytest @ file:///croot/pytest_1738938843180/work
qpsolvers==4.5.0
quadprog==0.1.13
requests==2.32.3
robot_descriptions==1.15.0
ruff==0.11.2
scipy==1.13.1
smmap==5.0.2
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
tqdm==4.67.1
typing_extensions==4.13.0
urllib3==2.3.0
zipp==3.21.0
| name: mink
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- absl-py==2.2.1
- black==25.1.0
- certifi==2025.1.31
- charset-normalizer==3.4.1
- click==8.1.8
- coverage==7.8.0
- coveralls==4.0.1
- docopt==0.6.2
- etils==1.5.2
- fsspec==2025.3.1
- gitdb==4.0.12
- gitpython==3.1.44
- glfw==2.8.0
- idna==3.10
- importlib-resources==6.5.2
- mink==0.0.2
- mujoco==3.3.0
- mypy==1.15.0
- mypy-extensions==1.0.0
- numpy==1.26.4
- pathspec==0.12.1
- platformdirs==4.3.7
- pyopengl==3.1.9
- qpsolvers==4.5.0
- quadprog==0.1.13
- requests==2.32.3
- robot-descriptions==1.15.0
- ruff==0.11.2
- scipy==1.13.1
- smmap==5.0.2
- tqdm==4.67.1
- typing-extensions==4.13.0
- urllib3==2.3.0
- zipp==3.21.0
prefix: /opt/conda/envs/mink
| [
"tests/test_posture_task.py::TestPostureTask::test_cost_invalid_shape"
] | [] | [
"tests/test_posture_task.py::TestPostureTask::test_cost_correctly_broadcast",
"tests/test_posture_task.py::TestPostureTask::test_error_without_target",
"tests/test_posture_task.py::TestPostureTask::test_jacobian_without_target",
"tests/test_posture_task.py::TestPostureTask::test_set_target_from_configuration",
"tests/test_posture_task.py::TestPostureTask::test_target_is_a_copy",
"tests/test_posture_task.py::TestPostureTask::test_task_raises_error_if_cost_negative",
"tests/test_posture_task.py::TestPostureTask::test_task_raises_error_if_target_is_invalid",
"tests/test_posture_task.py::TestPostureTask::test_unit_cost_qp_objective",
"tests/test_posture_task.py::TestPostureTask::test_zero_cost_same_as_disabling_task",
"tests/test_posture_task.py::TestPostureTask::test_zero_error_when_target_is_current_configuration"
] | [] | Apache License 2.0 | 19,261 | 242 | [
"mink/tasks/posture_task.py"
] |
|
feder-observatory__stellarphot-423 | 0a08565dc0929a181cf62ca80ef8b3157705985f | 2024-08-10 14:32:03 | adc4858fef2a9b3f828903752b8f846da16e81b0 | diff --git a/stellarphot/differential_photometry/aij_rel_fluxes.py b/stellarphot/differential_photometry/aij_rel_fluxes.py
index 730479d..cf6a37c 100644
--- a/stellarphot/differential_photometry/aij_rel_fluxes.py
+++ b/stellarphot/differential_photometry/aij_rel_fluxes.py
@@ -1,7 +1,7 @@
import astropy.units as u
import numpy as np
from astropy.coordinates import SkyCoord
-from astropy.table import Table
+from astropy.table import QTable, Table
__all__ = ["add_in_quadrature", "calc_aij_relative_flux"]
@@ -125,9 +125,11 @@ def calc_aij_relative_flux(
# stars.
comp_fluxes = star_data["date-obs", counts_column_name, error_column_name][good]
- # print(np.isnan(comp_fluxes[flux_column_name]).sum(),
- # np.isnan(comp_fluxes[error_column_name]).sum())
- # print(star_data[good][flux_column_name][np.isnan(comp_fluxes[flux_column_name])])
+ # Convert comp_fluxes to a regular Table, not a QTable, to work around
+ # https://github.com/astropy/astropy/issues/10944
+ # in which it was reported that QTable columns with units cannot be aggregated.
+
+ comp_fluxes = Table(comp_fluxes)
# Check whether any of the columns are masked, but with no masked values,
# and convert to regular column...eventually
@@ -138,7 +140,7 @@ def calc_aij_relative_flux(
comp_errors = comp_fluxes.groups.aggregate(add_in_quadrature)[error_column_name]
comp_total_vector = np.ones_like(star_data[counts_column_name])
- comp_error_vector = np.ones_like(star_data[counts_column_name])
+ comp_error_vector = np.ones_like(star_data[error_column_name])
if len(set(comp_num_stars)) > 1:
raise RuntimeError("Different number of stars in comparison sets")
@@ -147,17 +149,22 @@ def calc_aij_relative_flux(
# Have to remove the flux of the star if the star is a comparison
# star.
- is_comp = np.zeros_like(star_data[counts_column_name])
+ # Use the .value below so that we can set the array to 1 and multiply
+ # by it without affecting units of the result.
+ is_comp = np.zeros_like(star_data[counts_column_name]).value
is_comp[good] = 1
flux_offset = -star_data[counts_column_name] * is_comp
+ # Convert comp_fluxes back to a QTable and redo groups
+ comp_fluxes = QTable(comp_fluxes)
+ comp_fluxes = comp_fluxes.group_by("date-obs")
# This seems a little hacky; there must be a better way
for date_obs, comp_total, comp_error in zip(
comp_fluxes.groups.keys, comp_totals, comp_errors, strict=True
):
this_time = star_data["date-obs"] == date_obs[0]
comp_total_vector[this_time] *= comp_total
- comp_error_vector[this_time] = comp_error
+ comp_error_vector[this_time] = comp_error * comp_fluxes[error_column_name].unit
relative_flux = star_data[counts_column_name] / (comp_total_vector + flux_offset)
relative_flux = relative_flux.flatten()
| Calculation of AIJ relative fluxes fails
A consequence of #421 is the calculation of `aij_relative_flux` fails because it relies on aggregation to calculate the relative flux. | feder-observatory/stellarphot | diff --git a/stellarphot/differential_photometry/tests/test_aij_rel_fluxes.py b/stellarphot/differential_photometry/tests/test_aij_rel_fluxes.py
index 6afd173..fb8dd39 100644
--- a/stellarphot/differential_photometry/tests/test_aij_rel_fluxes.py
+++ b/stellarphot/differential_photometry/tests/test_aij_rel_fluxes.py
@@ -29,8 +29,8 @@ def _raw_photometry_table():
# and four stars
star_ra = 250.0 * u.degree + np.arange(n_stars) * 10 * u.arcmin
star_dec = np.array([45.0] * n_stars) * u.degree
- fluxes = np.array([10000.0, 20000, 30000, 40000])
- errors = np.sqrt(fluxes) + 50
+ fluxes = np.array([10000.0, 20000, 30000, 40000]) * u.adu
+ errors = (np.sqrt(fluxes.value) + 50) * u.electron
star_ids = np.arange(1, 5, dtype="int")
# Stars 2, 3 and 4 will be the comparison stars
@@ -65,10 +65,21 @@ def _raw_photometry_table():
"noise_electrons",
"star_id",
],
+ units=[
+ None,
+ u.degree,
+ u.degree,
+ u.adu,
+ u.electron,
+ None,
+ ],
)
- _ = PhotometryData(raw_table)
- return expected_flux_ratios, expected_flux_error, raw_table, raw_table[1:4]
+ photom = PhotometryData(raw_table)
+ # MAKE SURE to return photom, not raw_table, below to trigger the bug
+ # https://github.com/feder-observatory/stellarphot/issues/421
+ # in which, it turns out, QTable columns with units cannot be aggregated.
+ return expected_flux_ratios, expected_flux_error, photom, photom[1:4]
@pytest.mark.parametrize("comp_ra_dec_have_units", [True, False])
@@ -77,6 +88,8 @@ def _raw_photometry_table():
def test_relative_flux_calculation(
in_place, star_ra_dec_have_units, comp_ra_dec_have_units
):
+ # In addition to checking the flux calculation values, this is also a regression
+ # test for #421.
expected_flux, expected_error, input_table, comp_star = _raw_photometry_table()
# Try doing it all at once
@@ -95,7 +108,7 @@ def test_relative_flux_calculation(
output_table = calc_aij_relative_flux(input_table, comp_star, in_place=in_place)
output_flux = output_table["relative_flux"]
output_error = output_table["relative_flux_error"]
- print(all_expected_flux - output_flux)
+
np.testing.assert_allclose(output_flux, all_expected_flux)
np.testing.assert_allclose(output_error, all_expected_error)
if in_place:
@@ -122,8 +135,7 @@ def test_bad_comp_star(bad_thing):
ra=last_one["ra"][0], dec=last_one["dec"][0], unit=u.degree
)
coord_bad_ra = coord_inp.ra + 3 * u.arcsecond
- print(len(last_one), coord_inp)
- input_table["ra"][-1] = coord_bad_ra.degree
+ input_table["ra"][-1] = coord_bad_ra
elif bad_thing == "NaN":
input_table["aperture_net_cnts"][-1] = np.nan
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_issue_reference",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 1
} | 2.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[test]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": null,
"python": "3.10",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | aggdraw==1.3.19
aiohappyeyeballs==2.6.1
aiohttp==3.11.14
aiosignal==1.3.2
annotated-types==0.7.0
ansicolors==1.1.8
anyio==4.9.0
argon2-cffi==23.1.0
argon2-cffi-bindings==21.2.0
arrow==1.3.0
asciitree==0.3.3
astropy==6.1.7
astropy-iers-data==0.2025.3.31.0.36.18
astropy_healpix==1.1.2
astroquery==0.4.10
astroscrappy==1.2.0
astrowidgets==0.3.0
asttokens==3.0.0
async-lru==2.0.5
async-timeout==5.0.1
attrs==25.3.0
babel==2.17.0
backports.tarfile==1.2.0
beautifulsoup4==4.13.3
black==25.1.0
bleach==6.2.0
Bottleneck==1.4.2
bqplot==0.12.44
bracex==2.5.post1
cachetools==5.5.2
camel-converter==4.0.1
ccdproc==2.4.3
certifi==2025.1.31
cffi==1.17.1
cfgv==3.4.0
chardet==5.2.0
charset-normalizer==3.4.1
click==8.1.8
cloudpickle==3.1.1
colorama==0.4.6
comm==0.2.2
contourpy==1.3.1
coverage==7.8.0
cryptography==44.0.2
cycler==0.12.1
dask==2025.3.0
debugpy==1.8.13
decorator==5.2.1
defusedxml==0.7.1
distlib==0.3.9
entrypoints==0.4
et_xmlfile==2.0.0
exceptiongroup==1.2.2
executing==2.2.0
fasteners==0.19
fastjsonschema==2.21.1
filelock==3.18.0
fonttools==4.56.0
fqdn==1.5.1
frozenlist==1.5.0
fsspec==2025.3.1
gast==0.4.0
ginga==5.2.0
h11==0.14.0
html5lib==1.1
httpcore==1.0.7
httpx==0.28.1
hypothesis==6.130.5
identify==2.6.9
idna==3.10
imageio==2.37.0
immutables==0.21
importlib_metadata==8.6.1
iniconfig==2.1.0
ipyautoui==0.7.24
ipydatagrid==1.4.0
ipyevents==2.0.2
ipyfilechooser==0.6.0
ipykernel==6.29.5
ipython==8.34.0
ipyvue==1.11.2
ipyvuetify==1.11.1
ipywidgets==8.1.5
isoduration==20.11.0
jaraco.classes==3.4.0
jaraco.context==6.0.1
jaraco.functools==4.1.0
jedi==0.19.2
jeepney==0.9.0
Jinja2==3.1.6
json5==0.10.0
jsonpointer==3.0.0
jsonref==1.1.0
jsonschema==4.23.0
jsonschema-specifications==2024.10.1
jupyter-events==0.12.0
jupyter-lsp==2.2.5
jupyter_app_launcher==0.3.2
jupyter_client==8.6.3
jupyter_core==5.7.2
jupyter_server==2.15.0
jupyter_server_proxy==4.4.0
jupyter_server_terminals==0.5.3
jupyterlab==4.3.6
jupyterlab_pygments==0.3.0
jupyterlab_server==2.27.3
jupyterlab_widgets==3.0.13
keyring==25.6.0
kiwisolver==1.4.8
lazy_loader==0.4
locket==1.0.0
Markdown==3.7
MarkupSafe==3.0.2
matplotlib==3.10.1
matplotlib-inline==0.1.7
mistune==3.1.3
more-itertools==10.6.0
multidict==6.2.0
mypy-extensions==1.0.0
nbclient==0.10.2
nbconvert==7.16.6
nbformat==5.10.4
nest-asyncio==1.6.0
networkx==3.4.2
nodeenv==1.9.1
notebook_shim==0.2.4
numcodecs==0.13.1
numpy==2.2.4
openpyxl==3.1.5
overrides==7.7.0
packaging==24.2
pandas==2.2.3
pandocfilters==1.5.1
papermill==2.6.0
parso==0.8.4
partd==1.4.2
pathspec==0.12.1
pexpect==4.9.0
photutils==2.0.2
pillow==11.1.0
platformdirs==4.3.7
pluggy==1.5.0
pre_commit==4.2.0
prometheus_client==0.21.1
prompt_toolkit==3.0.50
propcache==0.3.1
psutil==7.0.0
ptyprocess==0.7.0
pure_eval==0.2.3
puremagic==1.28
py2vega==0.6.1
pycparser==2.22
pydantic==2.11.1
pydantic-extra-types==2.10.3
pydantic-settings==2.8.1
pydantic_core==2.33.0
pyerfa==2.0.1.5
Pygments==2.19.1
pyparsing==3.2.3
pyproject-api==1.9.0
pytest==8.3.5
pytest-arraydiff==0.6.1
pytest-astropy==0.11.0
pytest-astropy-header==0.2.2
pytest-cov==6.0.0
pytest-doctestplus==1.4.0
pytest-filter-subpackage==0.2.0
pytest-mock==3.14.0
pytest-remotedata==0.4.1
python-dateutil==2.9.0.post0
python-dotenv==1.1.0
python-json-logger==3.3.0
pytz==2025.2
pyvo==1.6.1
PyYAML==6.0.2
pyzmq==26.3.0
QtPy==2.4.3
referencing==0.36.2
reproject==0.14.1
requests==2.32.3
rfc3339-validator==0.1.4
rfc3986-validator==0.1.1
rpds-py==0.24.0
ruff==0.11.2
scikit-image==0.25.2
scipy==1.15.2
SecretStorage==3.3.3
Send2Trash==1.8.3
simpervisor==1.0.0
six==1.17.0
sniffio==1.3.1
sortedcontainers==2.4.0
soupsieve==2.6
stack-data==0.6.3
-e git+https://github.com/feder-observatory/stellarphot.git@0a08565dc0929a181cf62ca80ef8b3157705985f#egg=stellarphot
stringcase==1.2.0
tenacity==9.0.0
terminado==0.18.1
tifffile==2025.3.30
tinycss2==1.4.0
tomli==2.2.1
toolz==1.0.0
tornado==6.4.2
tox==4.25.0
tqdm==4.67.1
traitlets==5.14.3
traittypes==0.2.1
types-python-dateutil==2.9.0.20241206
typing-inspection==0.4.0
typing_extensions==4.13.0
tzdata==2025.2
uri-template==1.3.0
urllib3==2.3.0
virtualenv==20.29.3
wcmatch==10.0
wcwidth==0.2.13
webcolors==24.11.1
webencodings==0.5.1
websocket-client==1.8.0
widgetsnbextension==4.0.13
yarl==1.18.3
zarr==2.18.3
zipp==3.21.0
| name: stellarphot
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- bzip2=1.0.8=h5eee18b_6
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- libuuid=1.41.5=h5eee18b_0
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py310h06a4308_0
- python=3.10.16=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py310h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.45.1=py310h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- aggdraw==1.3.19
- aiohappyeyeballs==2.6.1
- aiohttp==3.11.14
- aiosignal==1.3.2
- annotated-types==0.7.0
- ansicolors==1.1.8
- anyio==4.9.0
- argon2-cffi==23.1.0
- argon2-cffi-bindings==21.2.0
- arrow==1.3.0
- asciitree==0.3.3
- astropy==6.1.7
- astropy-healpix==1.1.2
- astropy-iers-data==0.2025.3.31.0.36.18
- astroquery==0.4.10
- astroscrappy==1.2.0
- astrowidgets==0.3.0
- asttokens==3.0.0
- async-lru==2.0.5
- async-timeout==5.0.1
- attrs==25.3.0
- babel==2.17.0
- backports-tarfile==1.2.0
- beautifulsoup4==4.13.3
- black==25.1.0
- bleach==6.2.0
- bottleneck==1.4.2
- bqplot==0.12.44
- bracex==2.5.post1
- cachetools==5.5.2
- camel-converter==4.0.1
- ccdproc==2.4.3
- certifi==2025.1.31
- cffi==1.17.1
- cfgv==3.4.0
- chardet==5.2.0
- charset-normalizer==3.4.1
- click==8.1.8
- cloudpickle==3.1.1
- colorama==0.4.6
- comm==0.2.2
- contourpy==1.3.1
- coverage==7.8.0
- cryptography==44.0.2
- cycler==0.12.1
- dask==2025.3.0
- debugpy==1.8.13
- decorator==5.2.1
- defusedxml==0.7.1
- distlib==0.3.9
- entrypoints==0.4
- et-xmlfile==2.0.0
- exceptiongroup==1.2.2
- executing==2.2.0
- fasteners==0.19
- fastjsonschema==2.21.1
- filelock==3.18.0
- fonttools==4.56.0
- fqdn==1.5.1
- frozenlist==1.5.0
- fsspec==2025.3.1
- gast==0.4.0
- ginga==5.2.0
- h11==0.14.0
- html5lib==1.1
- httpcore==1.0.7
- httpx==0.28.1
- hypothesis==6.130.5
- identify==2.6.9
- idna==3.10
- imageio==2.37.0
- immutables==0.21
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- ipyautoui==0.7.24
- ipydatagrid==1.4.0
- ipyevents==2.0.2
- ipyfilechooser==0.6.0
- ipykernel==6.29.5
- ipython==8.34.0
- ipyvue==1.11.2
- ipyvuetify==1.11.1
- ipywidgets==8.1.5
- isoduration==20.11.0
- jaraco-classes==3.4.0
- jaraco-context==6.0.1
- jaraco-functools==4.1.0
- jedi==0.19.2
- jeepney==0.9.0
- jinja2==3.1.6
- json5==0.10.0
- jsonpointer==3.0.0
- jsonref==1.1.0
- jsonschema==4.23.0
- jsonschema-specifications==2024.10.1
- jupyter-app-launcher==0.3.2
- jupyter-client==8.6.3
- jupyter-core==5.7.2
- jupyter-events==0.12.0
- jupyter-lsp==2.2.5
- jupyter-server==2.15.0
- jupyter-server-proxy==4.4.0
- jupyter-server-terminals==0.5.3
- jupyterlab==4.3.6
- jupyterlab-pygments==0.3.0
- jupyterlab-server==2.27.3
- jupyterlab-widgets==3.0.13
- keyring==25.6.0
- kiwisolver==1.4.8
- lazy-loader==0.4
- locket==1.0.0
- markdown==3.7
- markupsafe==3.0.2
- matplotlib==3.10.1
- matplotlib-inline==0.1.7
- mistune==3.1.3
- more-itertools==10.6.0
- multidict==6.2.0
- mypy-extensions==1.0.0
- nbclient==0.10.2
- nbconvert==7.16.6
- nbformat==5.10.4
- nest-asyncio==1.6.0
- networkx==3.4.2
- nodeenv==1.9.1
- notebook-shim==0.2.4
- numcodecs==0.13.1
- numpy==2.2.4
- openpyxl==3.1.5
- overrides==7.7.0
- packaging==24.2
- pandas==2.2.3
- pandocfilters==1.5.1
- papermill==2.6.0
- parso==0.8.4
- partd==1.4.2
- pathspec==0.12.1
- pexpect==4.9.0
- photutils==2.0.2
- pillow==11.1.0
- platformdirs==4.3.7
- pluggy==1.5.0
- pre-commit==4.2.0
- prometheus-client==0.21.1
- prompt-toolkit==3.0.50
- propcache==0.3.1
- psutil==7.0.0
- ptyprocess==0.7.0
- pure-eval==0.2.3
- puremagic==1.28
- py2vega==0.6.1
- pycparser==2.22
- pydantic==2.11.1
- pydantic-core==2.33.0
- pydantic-extra-types==2.10.3
- pydantic-settings==2.8.1
- pyerfa==2.0.1.5
- pygments==2.19.1
- pyparsing==3.2.3
- pyproject-api==1.9.0
- pytest==8.3.5
- pytest-arraydiff==0.6.1
- pytest-astropy==0.11.0
- pytest-astropy-header==0.2.2
- pytest-cov==6.0.0
- pytest-doctestplus==1.4.0
- pytest-filter-subpackage==0.2.0
- pytest-mock==3.14.0
- pytest-remotedata==0.4.1
- python-dateutil==2.9.0.post0
- python-dotenv==1.1.0
- python-json-logger==3.3.0
- pytz==2025.2
- pyvo==1.6.1
- pyyaml==6.0.2
- pyzmq==26.3.0
- qtpy==2.4.3
- referencing==0.36.2
- reproject==0.14.1
- requests==2.32.3
- rfc3339-validator==0.1.4
- rfc3986-validator==0.1.1
- rpds-py==0.24.0
- ruff==0.11.2
- scikit-image==0.25.2
- scipy==1.15.2
- secretstorage==3.3.3
- send2trash==1.8.3
- simpervisor==1.0.0
- six==1.17.0
- sniffio==1.3.1
- sortedcontainers==2.4.0
- soupsieve==2.6
- stack-data==0.6.3
- stellarphot==2.0.0a2.dev13+g0a08565
- stringcase==1.2.0
- tenacity==9.0.0
- terminado==0.18.1
- tifffile==2025.3.30
- tinycss2==1.4.0
- tomli==2.2.1
- toolz==1.0.0
- tornado==6.4.2
- tox==4.25.0
- tqdm==4.67.1
- traitlets==5.14.3
- traittypes==0.2.1
- types-python-dateutil==2.9.0.20241206
- typing-extensions==4.13.0
- typing-inspection==0.4.0
- tzdata==2025.2
- uri-template==1.3.0
- urllib3==2.3.0
- virtualenv==20.29.3
- wcmatch==10.0
- wcwidth==0.2.13
- webcolors==24.11.1
- webencodings==0.5.1
- websocket-client==1.8.0
- widgetsnbextension==4.0.13
- yarl==1.18.3
- zarr==2.18.3
- zipp==3.21.0
prefix: /opt/conda/envs/stellarphot
| [
"stellarphot/differential_photometry/tests/test_aij_rel_fluxes.py::test_relative_flux_calculation[True-True-True]",
"stellarphot/differential_photometry/tests/test_aij_rel_fluxes.py::test_relative_flux_calculation[True-True-False]",
"stellarphot/differential_photometry/tests/test_aij_rel_fluxes.py::test_relative_flux_calculation[True-False-True]",
"stellarphot/differential_photometry/tests/test_aij_rel_fluxes.py::test_relative_flux_calculation[True-False-False]",
"stellarphot/differential_photometry/tests/test_aij_rel_fluxes.py::test_relative_flux_calculation[False-True-True]",
"stellarphot/differential_photometry/tests/test_aij_rel_fluxes.py::test_relative_flux_calculation[False-True-False]",
"stellarphot/differential_photometry/tests/test_aij_rel_fluxes.py::test_relative_flux_calculation[False-False-True]",
"stellarphot/differential_photometry/tests/test_aij_rel_fluxes.py::test_relative_flux_calculation[False-False-False]",
"stellarphot/differential_photometry/tests/test_aij_rel_fluxes.py::test_bad_comp_star[RA]",
"stellarphot/differential_photometry/tests/test_aij_rel_fluxes.py::test_bad_comp_star[NaN]"
] | [] | [] | [] | BSD 3-Clause "New" or "Revised" License | 19,263 | 806 | [
"stellarphot/differential_photometry/aij_rel_fluxes.py"
] |
|
danielgtaylor__python-betterproto-594 | 5fdd0bb24fc43091599a2284a948fc06e1a55edb | 2024-08-11 15:14:36 | ed7eefac6fbb563e462d7ab1e0611627d66c4f76 | diff --git a/src/betterproto/__init__.py b/src/betterproto/__init__.py
index 050f414..cbafc83 100644
--- a/src/betterproto/__init__.py
+++ b/src/betterproto/__init__.py
@@ -62,6 +62,13 @@ if TYPE_CHECKING:
SupportsWrite,
)
+if sys.version_info >= (3, 10):
+ from types import UnionType as _types_UnionType
+else:
+
+ class _types_UnionType:
+ ...
+
# Proto 3 data types
TYPE_ENUM = "enum"
@@ -148,6 +155,7 @@ def datetime_default_gen() -> datetime:
DATETIME_ZERO = datetime_default_gen()
+
# Special protobuf json doubles
INFINITY = "Infinity"
NEG_INFINITY = "-Infinity"
@@ -1166,30 +1174,29 @@ class Message(ABC):
def _get_field_default_gen(cls, field: dataclasses.Field) -> Any:
t = cls._type_hint(field.name)
- if hasattr(t, "__origin__"):
- if t.__origin__ is dict:
- # This is some kind of map (dict in Python).
- return dict
- elif t.__origin__ is list:
- # This is some kind of list (repeated) field.
- return list
- elif t.__origin__ is Union and t.__args__[1] is type(None):
+ is_310_union = isinstance(t, _types_UnionType)
+ if hasattr(t, "__origin__") or is_310_union:
+ if is_310_union or t.__origin__ is Union:
# This is an optional field (either wrapped, or using proto3
# field presence). For setting the default we really don't care
# what kind of field it is.
return type(None)
- else:
- return t
- elif issubclass(t, Enum):
+ if t.__origin__ is list:
+ # This is some kind of list (repeated) field.
+ return list
+ if t.__origin__ is dict:
+ # This is some kind of map (dict in Python).
+ return dict
+ return t
+ if issubclass(t, Enum):
# Enums always default to zero.
return t.try_value
- elif t is datetime:
+ if t is datetime:
# Offsets are relative to 1970-01-01T00:00:00Z
return datetime_default_gen
- else:
- # This is either a primitive scalar or another message type. Calling
- # it should result in its zero value.
- return t
+ # This is either a primitive scalar or another message type. Calling
+ # it should result in its zero value.
+ return t
def _postprocess_single(
self, wire_type: int, meta: FieldMetadata, field_name: str, value: Any
diff --git a/src/betterproto/compile/importing.py b/src/betterproto/compile/importing.py
index 4221122..b216dfc 100644
--- a/src/betterproto/compile/importing.py
+++ b/src/betterproto/compile/importing.py
@@ -1,6 +1,9 @@
+from __future__ import annotations
+
import os
import re
from typing import (
+ TYPE_CHECKING,
Dict,
List,
Set,
@@ -13,6 +16,9 @@ from ..lib.google import protobuf as google_protobuf
from .naming import pythonize_class_name
+if TYPE_CHECKING:
+ from ..plugin.typing_compiler import TypingCompiler
+
WRAPPER_TYPES: Dict[str, Type] = {
".google.protobuf.DoubleValue": google_protobuf.DoubleValue,
".google.protobuf.FloatValue": google_protobuf.FloatValue,
@@ -47,7 +53,7 @@ def get_type_reference(
package: str,
imports: set,
source_type: str,
- typing_compiler: "TypingCompiler",
+ typing_compiler: TypingCompiler,
unwrap: bool = True,
pydantic: bool = False,
) -> str:
diff --git a/src/betterproto/plugin/typing_compiler.py b/src/betterproto/plugin/typing_compiler.py
index 937c7bf..eca3691 100644
--- a/src/betterproto/plugin/typing_compiler.py
+++ b/src/betterproto/plugin/typing_compiler.py
@@ -139,29 +139,35 @@ class TypingImportTypingCompiler(TypingCompiler):
class NoTyping310TypingCompiler(TypingCompiler):
_imports: Dict[str, Set[str]] = field(default_factory=lambda: defaultdict(set))
+ @staticmethod
+ def _fmt(type: str) -> str: # for now this is necessary till 3.14
+ if type.startswith('"'):
+ return type[1:-1]
+ return type
+
def optional(self, type: str) -> str:
- return f"{type} | None"
+ return f'"{self._fmt(type)} | None"'
def list(self, type: str) -> str:
- return f"list[{type}]"
+ return f'"list[{self._fmt(type)}]"'
def dict(self, key: str, value: str) -> str:
- return f"dict[{key}, {value}]"
+ return f'"dict[{key}, {self._fmt(value)}]"'
def union(self, *types: str) -> str:
- return " | ".join(types)
+ return f'"{" | ".join(map(self._fmt, types))}"'
def iterable(self, type: str) -> str:
- self._imports["typing"].add("Iterable")
- return f"Iterable[{type}]"
+ self._imports["collections.abc"].add("Iterable")
+ return f'"Iterable[{type}]"'
def async_iterable(self, type: str) -> str:
- self._imports["typing"].add("AsyncIterable")
- return f"AsyncIterable[{type}]"
+ self._imports["collections.abc"].add("AsyncIterable")
+ return f'"AsyncIterable[{type}]"'
def async_iterator(self, type: str) -> str:
- self._imports["typing"].add("AsyncIterator")
- return f"AsyncIterator[{type}]"
+ self._imports["collections.abc"].add("AsyncIterator")
+ return f'"AsyncIterator[{type}]"'
def imports(self) -> Dict[str, Optional[Set[str]]]:
return {k: v if v else None for k, v in self._imports.items()}
| pydantic option broken
### Summary
Instantiating messages generated with pydantic option throws exceptions.
### Protobuf schema
```proto
syntax = "proto3";
package prototest.v1;
message One {
uint32 message = 1;
}
message Two {
string message = 1;
}
message Message {
oneof message {
One one = 1;
Two two = 2;
}
}
```
### Reproduction Steps
- install main branch betterproto
- generate code with the `pydantic_dataclasses` option enabled
- import your generated code
- instantiate a message object
### Expected Results
No exception
### Actual Results
```python
>>> Message()
Traceback (most recent call last):
File "/home/bb/.cache/pypoetry/virtualenvs/testproto-ZoH_3K0Y-py3.12/lib/python3.12/site-packages/betterproto/__init__.py", line 901, in _betterproto
return cls._betterproto_meta
^^^^^^^^^^^^^^^^^^^^^
AttributeError: type object 'Message' has no attribute '_betterproto_meta'. Did you mean: '_betterproto'?
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/home/bb/.cache/pypoetry/virtualenvs/testproto-ZoH_3K0Y-py3.12/lib/python3.12/site-packages/pydantic/_internal/_dataclasses.py", line 141, in __init__
s.__pydantic_validator__.validate_python(ArgsKwargs(args, kwargs), self_instance=s)
File "/home/bb/.cache/pypoetry/virtualenvs/testproto-ZoH_3K0Y-py3.12/lib/python3.12/site-packages/betterproto/__init__.py", line 747, in __post_init__
for field_name, meta in self._betterproto.meta_by_field_name.items():
^^^^^^^^^^^^^^^^^
File "/home/bb/.cache/pypoetry/virtualenvs/testproto-ZoH_3K0Y-py3.12/lib/python3.12/site-packages/betterproto/__init__.py", line 838, in __getattribute__
value = super().__getattribute__(name)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/bb/.cache/pypoetry/virtualenvs/testproto-ZoH_3K0Y-py3.12/lib/python3.12/site-packages/betterproto/utils.py", line 56, in __get__
return self.__func__(type)
^^^^^^^^^^^^^^^^^^^
File "/home/bb/.cache/pypoetry/virtualenvs/testproto-ZoH_3K0Y-py3.12/lib/python3.12/site-packages/betterproto/__init__.py", line 903, in _betterproto
cls._betterproto_meta = meta = ProtoClassMetadata(cls)
^^^^^^^^^^^^^^^^^^^^^^^
File "/home/bb/.cache/pypoetry/virtualenvs/testproto-ZoH_3K0Y-py3.12/lib/python3.12/site-packages/betterproto/__init__.py", line 683, in __init__
self.default_gen = self._get_default_gen(cls, fields)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/bb/.cache/pypoetry/virtualenvs/testproto-ZoH_3K0Y-py3.12/lib/python3.12/site-packages/betterproto/__init__.py", line 690, in _get_default_gen
return {field.name: cls._get_field_default_gen(field) for field in fields}
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/bb/.cache/pypoetry/virtualenvs/testproto-ZoH_3K0Y-py3.12/lib/python3.12/site-packages/betterproto/__init__.py", line 1183, in _get_field_default_gen
elif issubclass(t, Enum):
^^^^^^^^^^^^^^^^^^^
TypeError: issubclass() arg 1 must be a class
```
### System Information
pydantic 2.8.2
libprotoc 27.2
Python 3.12.4
Name: betterproto
Version: 2.0.0b6
Summary: A better Protobuf / gRPC generator & library
Home-page: https://github.com/danielgtaylor/python-betterproto
Author: Daniel G. Taylor
Author-email: [email protected]
License: MIT
Location: /home/bb/.cache/pypoetry/virtualenvs/testproto-ZoH_3K0Y-py3.12/lib/python3.12/site-packages
Requires: grpclib, python-dateutil, typing-extensions
Required-by: testproto
### Checklist
- [X] I have searched the issues for duplicates.
- [X] I have shown the entire traceback, if possible.
- [X] I have verified this issue occurs on the latest prelease of betterproto which can be installed using `pip install -U --pre betterproto`, if possible. | danielgtaylor/python-betterproto | diff --git a/tests/test_typing_compiler.py b/tests/test_typing_compiler.py
index 3d1083c..ee17449 100644
--- a/tests/test_typing_compiler.py
+++ b/tests/test_typing_compiler.py
@@ -62,19 +62,17 @@ def test_typing_import_typing_compiler():
def test_no_typing_311_typing_compiler():
compiler = NoTyping310TypingCompiler()
assert compiler.imports() == {}
- assert compiler.optional("str") == "str | None"
+ assert compiler.optional("str") == '"str | None"'
assert compiler.imports() == {}
- assert compiler.list("str") == "list[str]"
+ assert compiler.list("str") == '"list[str]"'
assert compiler.imports() == {}
- assert compiler.dict("str", "int") == "dict[str, int]"
+ assert compiler.dict("str", "int") == '"dict[str, int]"'
assert compiler.imports() == {}
- assert compiler.union("str", "int") == "str | int"
+ assert compiler.union("str", "int") == '"str | int"'
assert compiler.imports() == {}
- assert compiler.iterable("str") == "Iterable[str]"
- assert compiler.imports() == {"typing": {"Iterable"}}
- assert compiler.async_iterable("str") == "AsyncIterable[str]"
- assert compiler.imports() == {"typing": {"Iterable", "AsyncIterable"}}
- assert compiler.async_iterator("str") == "AsyncIterator[str]"
+ assert compiler.iterable("str") == '"Iterable[str]"'
+ assert compiler.async_iterable("str") == '"AsyncIterable[str]"'
+ assert compiler.async_iterator("str") == '"AsyncIterator[str]"'
assert compiler.imports() == {
- "typing": {"Iterable", "AsyncIterable", "AsyncIterator"}
+ "collections.abc": {"Iterable", "AsyncIterable", "AsyncIterator"}
}
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 3
},
"num_modified_files": 3
} | .2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[compiler]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-asyncio",
"pytest-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | -e git+https://github.com/danielgtaylor/python-betterproto.git@5fdd0bb24fc43091599a2284a948fc06e1a55edb#egg=betterproto
black==25.1.0
click==8.1.8
coverage==7.8.0
exceptiongroup==1.2.2
grpclib==0.4.7
h2==4.2.0
hpack==4.1.0
hyperframe==6.1.0
iniconfig==2.1.0
isort==5.13.2
Jinja2==3.1.6
MarkupSafe==3.0.2
multidict==6.2.0
mypy-extensions==1.0.0
packaging==24.2
pathspec==0.12.1
platformdirs==4.3.7
pluggy==1.5.0
pytest==8.3.5
pytest-asyncio==0.26.0
pytest-cov==6.0.0
python-dateutil==2.9.0.post0
six==1.17.0
tomli==2.2.1
typing_extensions==4.13.0
| name: python-betterproto
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- betterproto==2.0.0b6
- black==25.1.0
- click==8.1.8
- coverage==7.8.0
- exceptiongroup==1.2.2
- grpclib==0.4.7
- h2==4.2.0
- hpack==4.1.0
- hyperframe==6.1.0
- iniconfig==2.1.0
- isort==5.13.2
- jinja2==3.1.6
- markupsafe==3.0.2
- multidict==6.2.0
- mypy-extensions==1.0.0
- packaging==24.2
- pathspec==0.12.1
- platformdirs==4.3.7
- pluggy==1.5.0
- pytest==8.3.5
- pytest-asyncio==0.26.0
- pytest-cov==6.0.0
- python-dateutil==2.9.0.post0
- six==1.17.0
- tomli==2.2.1
- typing-extensions==4.13.0
prefix: /opt/conda/envs/python-betterproto
| [
"tests/test_typing_compiler.py::test_no_typing_311_typing_compiler"
] | [] | [
"tests/test_typing_compiler.py::test_direct_import_typing_compiler",
"tests/test_typing_compiler.py::test_typing_import_typing_compiler"
] | [] | MIT License | 19,267 | 1,544 | [
"src/betterproto/__init__.py",
"src/betterproto/compile/importing.py",
"src/betterproto/plugin/typing_compiler.py"
] |
|
getsentry__sentry-python-3438 | 275c63efe9959dac68cc6ab3019545d74ea85ea8 | 2024-08-12 13:48:29 | 275c63efe9959dac68cc6ab3019545d74ea85ea8 | codecov[bot]: **Test Failures Detected**: Due to failing tests, we cannot provide coverage reports at this time.
### :x: Failed Test Results:
Completed 1770 tests with **`5 failed`**, 1574 passed and 191 skipped.
<details><summary>View the full list of failed tests</summary>
## py3.11-common
- **Class name:** tests.test_exceptiongroup<br>**Test name:** test_exception_chain_cause<br><br>
<pre>tests/test_exceptiongroup.py:206: in test_exception_chain_cause<br> (event, _) = event_from_exception(<br>sentry_sdk/utils.py:1050: in event_from_exception<br> "values": exceptions_from_error_tuple(<br>sentry_sdk/utils.py:918: in exceptions_from_error_tuple<br> single_exception_from_error_tuple(<br>sentry_sdk/utils.py:734: in single_exception_from_error_tuple<br> custom_repr = client_options["custom_repr"]<br>E KeyError: 'custom_repr'</pre>
- **Class name:** tests.test_exceptiongroup<br>**Test name:** test_exception_chain_context<br><br>
<pre>tests/test_exceptiongroup.py:246: in test_exception_chain_context<br> (event, _) = event_from_exception(<br>sentry_sdk/utils.py:1050: in event_from_exception<br> "values": exceptions_from_error_tuple(<br>sentry_sdk/utils.py:918: in exceptions_from_error_tuple<br> single_exception_from_error_tuple(<br>sentry_sdk/utils.py:734: in single_exception_from_error_tuple<br> custom_repr = client_options["custom_repr"]<br>E KeyError: 'custom_repr'</pre>
- **Class name:** tests.test_exceptiongroup<br>**Test name:** test_exceptiongroup<br><br>
<pre>tests/test_exceptiongroup.py:45: in test_exceptiongroup<br> (event, _) = event_from_exception(<br>sentry_sdk/utils.py:1050: in event_from_exception<br> "values": exceptions_from_error_tuple(<br>sentry_sdk/utils.py:904: in exceptions_from_error_tuple<br> (_, exceptions) = exceptions_from_error(<br>sentry_sdk/utils.py:814: in exceptions_from_error<br> parent = single_exception_from_error_tuple(<br>sentry_sdk/utils.py:734: in single_exception_from_error_tuple<br> custom_repr = client_options["custom_repr"]<br>E KeyError: 'custom_repr'</pre>
- **Class name:** tests.test_exceptiongroup<br>**Test name:** test_exceptiongroup_simple<br><br>
<pre>tests/test_exceptiongroup.py:161: in test_exceptiongroup_simple<br> (event, _) = event_from_exception(<br>sentry_sdk/utils.py:1050: in event_from_exception<br> "values": exceptions_from_error_tuple(<br>sentry_sdk/utils.py:904: in exceptions_from_error_tuple<br> (_, exceptions) = exceptions_from_error(<br>sentry_sdk/utils.py:814: in exceptions_from_error<br> parent = single_exception_from_error_tuple(<br>sentry_sdk/utils.py:734: in single_exception_from_error_tuple<br> custom_repr = client_options["custom_repr"]<br>E KeyError: 'custom_repr'</pre>
- **Class name:** tests.test_exceptiongroup<br>**Test name:** test_simple_exception<br><br>
<pre>tests/test_exceptiongroup.py:285: in test_simple_exception<br> (event, _) = event_from_exception(<br>sentry_sdk/utils.py:1050: in event_from_exception<br> "values": exceptions_from_error_tuple(<br>sentry_sdk/utils.py:918: in exceptions_from_error_tuple<br> single_exception_from_error_tuple(<br>sentry_sdk/utils.py:734: in single_exception_from_error_tuple<br> custom_repr = client_options["custom_repr"]<br>E KeyError: 'custom_repr'</pre>
</details>
sl0thentr0py: docs here
https://github.com/getsentry/sentry-docs/pull/11059
| diff --git a/sentry_sdk/client.py b/sentry_sdk/client.py
index d22dd1c0..8a3cd715 100644
--- a/sentry_sdk/client.py
+++ b/sentry_sdk/client.py
@@ -531,6 +531,7 @@ class _Client(BaseClient):
cast("Dict[str, Any]", event),
max_request_body_size=self.options.get("max_request_body_size"),
max_value_length=self.options.get("max_value_length"),
+ custom_repr=self.options.get("custom_repr"),
),
)
diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py
index b50a2843..ca805d3a 100644
--- a/sentry_sdk/consts.py
+++ b/sentry_sdk/consts.py
@@ -539,6 +539,7 @@ class ClientConstructor:
spotlight=None, # type: Optional[Union[bool, str]]
cert_file=None, # type: Optional[str]
key_file=None, # type: Optional[str]
+ custom_repr=None, # type: Optional[Callable[..., Optional[str]]]
):
# type: (...) -> None
pass
diff --git a/sentry_sdk/serializer.py b/sentry_sdk/serializer.py
index 010c1a96..7171885f 100644
--- a/sentry_sdk/serializer.py
+++ b/sentry_sdk/serializer.py
@@ -112,6 +112,7 @@ def serialize(event, **kwargs):
:param max_request_body_size: If set to "always", will never trim request bodies.
:param max_value_length: The max length to strip strings to, defaults to sentry_sdk.consts.DEFAULT_MAX_VALUE_LENGTH
:param is_vars: If we're serializing vars early, we want to repr() things that are JSON-serializable to make their type more apparent. For example, it's useful to see the difference between a unicode-string and a bytestring when viewing a stacktrace.
+ :param custom_repr: A custom repr function that runs before safe_repr on the object to be serialized. If it returns None or throws internally, we will fallback to safe_repr.
"""
memo = Memo()
@@ -123,6 +124,17 @@ def serialize(event, **kwargs):
) # type: bool
max_value_length = kwargs.pop("max_value_length", None) # type: Optional[int]
is_vars = kwargs.pop("is_vars", False)
+ custom_repr = kwargs.pop("custom_repr", None) # type: Callable[..., Optional[str]]
+
+ def _safe_repr_wrapper(value):
+ # type: (Any) -> str
+ try:
+ repr_value = None
+ if custom_repr is not None:
+ repr_value = custom_repr(value)
+ return repr_value or safe_repr(value)
+ except Exception:
+ return safe_repr(value)
def _annotate(**meta):
# type: (**Any) -> None
@@ -257,7 +269,7 @@ def serialize(event, **kwargs):
_annotate(rem=[["!limit", "x"]])
if is_databag:
return _flatten_annotated(
- strip_string(safe_repr(obj), max_length=max_value_length)
+ strip_string(_safe_repr_wrapper(obj), max_length=max_value_length)
)
return None
@@ -274,7 +286,7 @@ def serialize(event, **kwargs):
if should_repr_strings or (
isinstance(obj, float) and (math.isinf(obj) or math.isnan(obj))
):
- return safe_repr(obj)
+ return _safe_repr_wrapper(obj)
else:
return obj
@@ -285,7 +297,7 @@ def serialize(event, **kwargs):
return (
str(format_timestamp(obj))
if not should_repr_strings
- else safe_repr(obj)
+ else _safe_repr_wrapper(obj)
)
elif isinstance(obj, Mapping):
@@ -345,13 +357,13 @@ def serialize(event, **kwargs):
return rv_list
if should_repr_strings:
- obj = safe_repr(obj)
+ obj = _safe_repr_wrapper(obj)
else:
if isinstance(obj, bytes) or isinstance(obj, bytearray):
obj = obj.decode("utf-8", "replace")
if not isinstance(obj, str):
- obj = safe_repr(obj)
+ obj = _safe_repr_wrapper(obj)
is_span_description = (
len(path) == 3 and path[0] == "spans" and path[-1] == "description"
diff --git a/sentry_sdk/utils.py b/sentry_sdk/utils.py
index 8b718a1f..d731fa22 100644
--- a/sentry_sdk/utils.py
+++ b/sentry_sdk/utils.py
@@ -585,8 +585,9 @@ def serialize_frame(
include_local_variables=True,
include_source_context=True,
max_value_length=None,
+ custom_repr=None,
):
- # type: (FrameType, Optional[int], bool, bool, Optional[int]) -> Dict[str, Any]
+ # type: (FrameType, Optional[int], bool, bool, Optional[int], Optional[Callable[..., Optional[str]]]) -> Dict[str, Any]
f_code = getattr(frame, "f_code", None)
if not f_code:
abs_path = None
@@ -618,7 +619,9 @@ def serialize_frame(
if include_local_variables:
from sentry_sdk.serializer import serialize
- rv["vars"] = serialize(dict(frame.f_locals), is_vars=True)
+ rv["vars"] = serialize(
+ dict(frame.f_locals), is_vars=True, custom_repr=custom_repr
+ )
return rv
@@ -723,10 +726,12 @@ def single_exception_from_error_tuple(
include_local_variables = True
include_source_context = True
max_value_length = DEFAULT_MAX_VALUE_LENGTH # fallback
+ custom_repr = None
else:
include_local_variables = client_options["include_local_variables"]
include_source_context = client_options["include_source_context"]
max_value_length = client_options["max_value_length"]
+ custom_repr = client_options.get("custom_repr")
frames = [
serialize_frame(
@@ -735,6 +740,7 @@ def single_exception_from_error_tuple(
include_local_variables=include_local_variables,
include_source_context=include_source_context,
max_value_length=max_value_length,
+ custom_repr=custom_repr,
)
for tb in iter_stacks(tb)
]
| Expose custom safe_repr config
@sl0thentr0py Thanks, yes, I think we prefer option 2 - the option for a custom `repr` function to be tried in `safe_repr` (my current understanding of how everything works is that this will both solve our needs and reduce the likelihood of this bug to creeping back in).
Thanks for giving us the choice 🙂
_Originally posted by @philipstarkey in https://github.com/getsentry/sentry-python/issues/3409#issuecomment-2277748660_
| getsentry/sentry-python | diff --git a/tests/test_client.py b/tests/test_client.py
index f6c2cec0..d56bab0b 100644
--- a/tests/test_client.py
+++ b/tests/test_client.py
@@ -944,6 +944,39 @@ def test_dict_changed_during_iteration(sentry_init, capture_events):
assert frame["vars"]["environ"] == {"a": "<This is me>"}
+def test_custom_repr_on_vars(sentry_init, capture_events):
+ class Foo:
+ pass
+
+ class Fail:
+ pass
+
+ def custom_repr(value):
+ if isinstance(value, Foo):
+ return "custom repr"
+ elif isinstance(value, Fail):
+ raise ValueError("oops")
+ else:
+ return None
+
+ sentry_init(custom_repr=custom_repr)
+ events = capture_events()
+
+ try:
+ my_vars = {"foo": Foo(), "fail": Fail(), "normal": 42}
+ 1 / 0
+ except ZeroDivisionError:
+ capture_exception()
+
+ (event,) = events
+ (exception,) = event["exception"]["values"]
+ (frame,) = exception["stacktrace"]["frames"]
+ my_vars = frame["vars"]["my_vars"]
+ assert my_vars["foo"] == "custom repr"
+ assert my_vars["normal"] == "42"
+ assert "Fail object" in my_vars["fail"]
+
+
@pytest.mark.parametrize(
"dsn",
[
diff --git a/tests/test_serializer.py b/tests/test_serializer.py
index a3ead112..2f158097 100644
--- a/tests/test_serializer.py
+++ b/tests/test_serializer.py
@@ -114,6 +114,31 @@ def test_custom_mapping_doesnt_mess_with_mock(extra_normalizer):
assert len(m.mock_calls) == 0
+def test_custom_repr(extra_normalizer):
+ class Foo:
+ pass
+
+ def custom_repr(value):
+ if isinstance(value, Foo):
+ return "custom"
+ else:
+ return value
+
+ result = extra_normalizer({"foo": Foo(), "string": "abc"}, custom_repr=custom_repr)
+ assert result == {"foo": "custom", "string": "abc"}
+
+
+def test_custom_repr_graceful_fallback_to_safe_repr(extra_normalizer):
+ class Foo:
+ pass
+
+ def custom_repr(value):
+ raise ValueError("oops")
+
+ result = extra_normalizer({"foo": Foo()}, custom_repr=custom_repr)
+ assert "Foo object" in result["foo"]
+
+
def test_trim_databag_breadth(body_normalizer):
data = {
"key{}".format(i): "value{}".format(i) for i in range(MAX_DATABAG_BREADTH + 10)
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 1
},
"num_modified_files": 4
} | 2.12 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": [
"requirements-devenv.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | asttokens==3.0.0
attrs==25.3.0
black==25.1.0
certifi==2025.1.31
cffi==1.17.1
cfgv==3.4.0
charset-normalizer==3.4.1
click==8.1.8
colorama==0.4.6
coverage==7.8.0
cryptography==44.0.2
Deprecated==1.2.18
distlib==0.3.9
dnspython==2.7.0
docopt==0.6.2
executing==2.2.0
filelock==3.18.0
flake8==5.0.4
flake8-bugbear==23.3.12
identify==2.6.9
idna==3.10
importlib_metadata==8.6.1
iniconfig==2.1.0
jsonschema==4.23.0
jsonschema-specifications==2024.10.1
loguru==0.7.3
MarkupSafe==3.0.2
mccabe==0.7.0
mockupdb==1.8.1
mypy==1.15.0
mypy-extensions==1.0.0
nodeenv==1.9.1
opentelemetry-api==1.31.1
opentelemetry-distro==0.52b1
opentelemetry-instrumentation==0.52b1
opentelemetry-sdk==1.31.1
opentelemetry-semantic-conventions==0.52b1
packaging==24.2
pathspec==0.12.1
pep8-naming==0.14.1
platformdirs==4.3.7
pluggy==1.5.0
pre_commit==4.2.0
py==1.11.0
pycodestyle==2.9.1
pycparser==2.22
pyflakes==2.5.0
pymongo==4.11.3
pyrsistent==0.20.0
PySocks==1.7.1
pytest==6.2.5
pytest-asyncio==0.20.3
pytest-cov==6.0.0
pytest-forked==1.6.0
pytest-localserver==0.9.0.post0
pytest-watch==4.2.0
PyYAML==6.0.2
referencing==0.36.2
requests==2.32.3
responses==0.25.7
rpds-py==0.24.0
-e git+https://github.com/getsentry/sentry-python.git@275c63efe9959dac68cc6ab3019545d74ea85ea8#egg=sentry_sdk
toml==0.10.2
tomli==2.2.1
types-certifi==2021.10.8.3
types-cffi==1.17.0.20250326
types-gevent==24.11.0.20250305
types-greenlet==3.1.0.20250318
types-protobuf==5.29.1.20250315
types-psutil==7.0.0.20250218
types-pyOpenSSL==24.1.0.20240722
types-redis==4.6.0.20241004
types-setuptools==78.1.0.20250329
types-WebOb==1.8.0.20250319
typing_extensions==4.13.0
urllib3==2.3.0
virtualenv==20.29.3
watchdog==6.0.0
Werkzeug==3.1.3
wrapt==1.17.2
zipp==3.21.0
| name: sentry-python
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- asttokens==3.0.0
- attrs==25.3.0
- black==25.1.0
- certifi==2025.1.31
- cffi==1.17.1
- cfgv==3.4.0
- charset-normalizer==3.4.1
- click==8.1.8
- colorama==0.4.6
- coverage==7.8.0
- cryptography==44.0.2
- deprecated==1.2.18
- distlib==0.3.9
- dnspython==2.7.0
- docopt==0.6.2
- executing==2.2.0
- filelock==3.18.0
- flake8==5.0.4
- flake8-bugbear==23.3.12
- identify==2.6.9
- idna==3.10
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- jsonschema==4.23.0
- jsonschema-specifications==2024.10.1
- loguru==0.7.3
- markupsafe==3.0.2
- mccabe==0.7.0
- mockupdb==1.8.1
- mypy==1.15.0
- mypy-extensions==1.0.0
- nodeenv==1.9.1
- opentelemetry-api==1.31.1
- opentelemetry-distro==0.52b1
- opentelemetry-instrumentation==0.52b1
- opentelemetry-sdk==1.31.1
- opentelemetry-semantic-conventions==0.52b1
- packaging==24.2
- pathspec==0.12.1
- pep8-naming==0.14.1
- platformdirs==4.3.7
- pluggy==1.5.0
- pre-commit==4.2.0
- py==1.11.0
- pycodestyle==2.9.1
- pycparser==2.22
- pyflakes==2.5.0
- pymongo==4.11.3
- pyrsistent==0.20.0
- pysocks==1.7.1
- pytest==6.2.5
- pytest-asyncio==0.20.3
- pytest-cov==6.0.0
- pytest-forked==1.6.0
- pytest-localserver==0.9.0.post0
- pytest-watch==4.2.0
- pyyaml==6.0.2
- referencing==0.36.2
- requests==2.32.3
- responses==0.25.7
- rpds-py==0.24.0
- sentry-sdk==2.12.0
- toml==0.10.2
- tomli==2.2.1
- types-certifi==2021.10.8.3
- types-cffi==1.17.0.20250326
- types-gevent==24.11.0.20250305
- types-greenlet==3.1.0.20250318
- types-protobuf==5.29.1.20250315
- types-psutil==7.0.0.20250218
- types-pyopenssl==24.1.0.20240722
- types-redis==4.6.0.20241004
- types-setuptools==78.1.0.20250329
- types-webob==1.8.0.20250319
- typing-extensions==4.13.0
- urllib3==2.3.0
- virtualenv==20.29.3
- watchdog==6.0.0
- werkzeug==3.1.3
- wrapt==1.17.2
- zipp==3.21.0
prefix: /opt/conda/envs/sentry-python
| [
"tests/test_client.py::test_custom_repr_on_vars",
"tests/test_serializer.py::test_custom_repr",
"tests/test_serializer.py::test_custom_repr_graceful_fallback_to_safe_repr"
] | [] | [
"tests/test_client.py::test_transport_option",
"tests/test_client.py::test_proxy[testcase0]",
"tests/test_client.py::test_proxy[testcase1]",
"tests/test_client.py::test_proxy[testcase2]",
"tests/test_client.py::test_proxy[testcase3]",
"tests/test_client.py::test_proxy[testcase4]",
"tests/test_client.py::test_proxy[testcase5]",
"tests/test_client.py::test_proxy[testcase6]",
"tests/test_client.py::test_proxy[testcase7]",
"tests/test_client.py::test_proxy[testcase8]",
"tests/test_client.py::test_proxy[testcase9]",
"tests/test_client.py::test_proxy[testcase10]",
"tests/test_client.py::test_proxy[testcase11]",
"tests/test_client.py::test_proxy[testcase12]",
"tests/test_client.py::test_proxy[testcase13]",
"tests/test_client.py::test_proxy[testcase14]",
"tests/test_client.py::test_proxy[testcase15]",
"tests/test_client.py::test_proxy[testcase16]",
"tests/test_client.py::test_proxy[testcase17]",
"tests/test_client.py::test_proxy[testcase18]",
"tests/test_client.py::test_proxy[testcase19]",
"tests/test_client.py::test_proxy[testcase20]",
"tests/test_client.py::test_socks_proxy[testcase0]",
"tests/test_client.py::test_socks_proxy[testcase1]",
"tests/test_client.py::test_socks_proxy[testcase2]",
"tests/test_client.py::test_socks_proxy[testcase3]",
"tests/test_client.py::test_socks_proxy[testcase4]",
"tests/test_client.py::test_socks_proxy[testcase5]",
"tests/test_client.py::test_socks_proxy[testcase6]",
"tests/test_client.py::test_socks_proxy[testcase7]",
"tests/test_client.py::test_socks_proxy[testcase8]",
"tests/test_client.py::test_simple_transport",
"tests/test_client.py::test_ignore_errors",
"tests/test_client.py::test_include_local_variables_enabled",
"tests/test_client.py::test_include_local_variables_disabled",
"tests/test_client.py::test_include_source_context_enabled",
"tests/test_client.py::test_include_source_context_disabled",
"tests/test_client.py::test_function_names[integrations0]",
"tests/test_client.py::test_function_names[integrations1]",
"tests/test_client.py::test_attach_stacktrace_enabled",
"tests/test_client.py::test_attach_stacktrace_enabled_no_locals",
"tests/test_client.py::test_attach_stacktrace_in_app",
"tests/test_client.py::test_attach_stacktrace_disabled",
"tests/test_client.py::test_capture_event_works",
"tests/test_client.py::test_atexit[10]",
"tests/test_client.py::test_atexit[20]",
"tests/test_client.py::test_configure_scope_available",
"tests/test_client.py::test_client_debug_option_enabled",
"tests/test_client.py::test_client_debug_option_disabled[True]",
"tests/test_client.py::test_client_debug_option_disabled[False]",
"tests/test_client.py::test_weird_chars",
"tests/test_client.py::test_nan",
"tests/test_client.py::test_cyclic_frame_vars",
"tests/test_client.py::test_cyclic_data",
"tests/test_client.py::test_databag_depth_stripping",
"tests/test_client.py::test_databag_string_stripping",
"tests/test_client.py::test_databag_breadth_stripping",
"tests/test_client.py::test_chained_exceptions",
"tests/test_client.py::test_broken_mapping",
"tests/test_client.py::test_mapping_sends_exception",
"tests/test_client.py::test_object_sends_exception",
"tests/test_client.py::test_errno_errors",
"tests/test_client.py::test_non_string_variables",
"tests/test_client.py::test_dict_changed_during_iteration",
"tests/test_client.py::test_init_string_types[http://894b7d594095440f8dfea9b300e6f572@localhost:8000/20]",
"tests/test_client.py::test_init_string_types[http://894b7d594095440f8dfea9b300e6f572@localhost:8000/21]",
"tests/test_client.py::test_max_breadcrumbs_option[sdk_options0-100]",
"tests/test_client.py::test_max_breadcrumbs_option[sdk_options1-50]",
"tests/test_client.py::test_multiple_positional_args",
"tests/test_client.py::test_max_value_length_option[sdk_options0-1024]",
"tests/test_client.py::test_max_value_length_option[sdk_options1-1800]",
"tests/test_client.py::test_debug_option[None--False]",
"tests/test_client.py::test_debug_option[None-t-True]",
"tests/test_client.py::test_debug_option[None-1-True]",
"tests/test_client.py::test_debug_option[None-True-True]",
"tests/test_client.py::test_debug_option[None-true-True]",
"tests/test_client.py::test_debug_option[None-f-False]",
"tests/test_client.py::test_debug_option[None-0-False]",
"tests/test_client.py::test_debug_option[None-False-False]",
"tests/test_client.py::test_debug_option[None-false-False]",
"tests/test_client.py::test_debug_option[None-xxx-False]",
"tests/test_client.py::test_debug_option[True--True]",
"tests/test_client.py::test_debug_option[True-t-True]",
"tests/test_client.py::test_debug_option[True-1-True]",
"tests/test_client.py::test_debug_option[True-True-True]",
"tests/test_client.py::test_debug_option[True-true-True]",
"tests/test_client.py::test_debug_option[True-f-True]",
"tests/test_client.py::test_debug_option[True-0-True]",
"tests/test_client.py::test_debug_option[True-False-True]",
"tests/test_client.py::test_debug_option[True-false-True]",
"tests/test_client.py::test_debug_option[True-xxx-True]",
"tests/test_client.py::test_debug_option[False--False]",
"tests/test_client.py::test_debug_option[False-t-False]",
"tests/test_client.py::test_debug_option[False-1-False]",
"tests/test_client.py::test_debug_option[False-True-False]",
"tests/test_client.py::test_debug_option[False-true-False]",
"tests/test_client.py::test_debug_option[False-f-False]",
"tests/test_client.py::test_debug_option[False-0-False]",
"tests/test_client.py::test_debug_option[False-False-False]",
"tests/test_client.py::test_debug_option[False-false-False]",
"tests/test_client.py::test_debug_option[False-xxx-False]",
"tests/test_client.py::test_error_sampler[test_config0]",
"tests/test_client.py::test_error_sampler[test_config1]",
"tests/test_client.py::test_error_sampler[test_config2]",
"tests/test_client.py::test_error_sampler[test_config3]",
"tests/test_client.py::test_error_sampler[test_config4]",
"tests/test_client.py::test_error_sampler[test_config5]",
"tests/test_client.py::test_error_sampler[test_config6]",
"tests/test_client.py::test_error_sampler[test_config7]",
"tests/test_client.py::test_error_sampler[test_config8]",
"tests/test_client.py::test_error_sampler[test_config9]",
"tests/test_client.py::test_error_sampler[test_config10]",
"tests/test_client.py::test_error_sampler[test_config11]",
"tests/test_client.py::test_error_sampler[test_config12]",
"tests/test_client.py::test_error_sampler[test_config13]",
"tests/test_client.py::test_error_sampler[test_config14]",
"tests/test_client.py::test_error_sampler[test_config15]",
"tests/test_client.py::test_error_sampler[test_config16]",
"tests/test_client.py::test_uwsgi_warnings[opt0-missing_flags0]",
"tests/test_client.py::test_uwsgi_warnings[opt1-missing_flags1]",
"tests/test_client.py::test_uwsgi_warnings[opt2-missing_flags2]",
"tests/test_client.py::test_uwsgi_warnings[opt3-missing_flags3]",
"tests/test_client.py::test_uwsgi_warnings[opt4-missing_flags4]",
"tests/test_client.py::test_uwsgi_warnings[opt5-missing_flags5]",
"tests/test_client.py::test_uwsgi_warnings[opt6-missing_flags6]",
"tests/test_client.py::test_uwsgi_warnings[opt7-missing_flags7]",
"tests/test_client.py::test_uwsgi_warnings[opt8-missing_flags8]",
"tests/test_client.py::test_uwsgi_warnings[opt9-missing_flags9]",
"tests/test_client.py::test_uwsgi_warnings[opt10-missing_flags10]",
"tests/test_client.py::test_uwsgi_warnings[opt11-missing_flags11]",
"tests/test_client.py::test_uwsgi_warnings[opt12-missing_flags12]",
"tests/test_client.py::test_dropped_transaction[test_config0]",
"tests/test_client.py::test_dropped_transaction[test_config1]",
"tests/test_client.py::test_dropped_transaction[test_config2]",
"tests/test_client.py::test_dropped_transaction[test_config3]",
"tests/test_client.py::test_dropped_transaction[test_config4]",
"tests/test_client.py::test_dropped_transaction[test_config5]",
"tests/test_client.py::test_dropped_transaction[test_config6]",
"tests/test_client.py::test_dropped_transaction[test_config7]",
"tests/test_client.py::test_dropped_transaction[test_config8]",
"tests/test_client.py::test_dropped_transaction[test_config9]",
"tests/test_serializer.py::test_bytes_serialization_decode",
"tests/test_serializer.py::test_bytes_serialization_repr",
"tests/test_serializer.py::test_bytearray_serialization_decode",
"tests/test_serializer.py::test_bytearray_serialization_repr",
"tests/test_serializer.py::test_memoryview_serialization_repr",
"tests/test_serializer.py::test_serialize_sets",
"tests/test_serializer.py::test_serialize_custom_mapping",
"tests/test_serializer.py::test_custom_mapping_doesnt_mess_with_mock",
"tests/test_serializer.py::test_trim_databag_breadth",
"tests/test_serializer.py::test_no_trimming_if_max_request_body_size_is_always",
"tests/test_serializer.py::test_max_value_length_default",
"tests/test_serializer.py::test_max_value_length"
] | [] | MIT License | 19,275 | 1,503 | [
"sentry_sdk/client.py",
"sentry_sdk/consts.py",
"sentry_sdk/serializer.py",
"sentry_sdk/utils.py"
] |
python-wheel-build__fromager-320 | 8645730ea616c967a7927db5038c1ce394d61c9e | 2024-08-12 18:30:59 | 3302edce40a7cb10c57cde21a61bb67d985630e0 | diff --git a/src/fromager/settings.py b/src/fromager/settings.py
index c90bc91..5fc5aa1 100644
--- a/src/fromager/settings.py
+++ b/src/fromager/settings.py
@@ -96,6 +96,11 @@ class Settings:
return sdist_root_dir / relative_build_dir
return sdist_root_dir
+ def build_tag(self, pkg: str, version: Version | str) -> int:
+ p = self.get_package_settings(pkg)
+ changelog = p.get("changelog", {}).get(str(version), [])
+ return len(changelog)
+
def get_package_settings(self, pkg: str) -> dict[str, dict[str, str]]:
p = self.packages()
return self._return_value_or_default(
| Add package-version-specific changelog to settings
- Add a new field `changelog` to the per package setting
- Include a way to specify versions
- We just care about the length of the changelog (the ordering of the entries doesn't matter to us. it is on the user to maintain the ordering)
Agreed upon template:
```yaml
torch:
changelog:
"2.3.1":
- rebuild for xyz
``` | python-wheel-build/fromager | diff --git a/tests/test_settings.py b/tests/test_settings.py
index 7949083..49e12b7 100644
--- a/tests/test_settings.py
+++ b/tests/test_settings.py
@@ -3,6 +3,7 @@ import textwrap
import pytest
from packaging.requirements import Requirement
+from packaging.version import Version
from fromager import settings
@@ -141,6 +142,23 @@ def test_escape_sdist_root_build_dir():
str(s.build_dir("foo", sdist_root_dir)).startswith("/foo/bar")
+def test_changelog():
+ s = settings._parse(
+ textwrap.dedent("""
+ packages:
+ foo:
+ changelog:
+ "2.1.0":
+ - "rebuild abc"
+ - "rebuild xyz"
+ """)
+ )
+ assert s.build_tag("foo", Version("2.1.0")) == 2
+ assert s.build_tag("foo", "2.1.0") == 2
+ assert s.build_tag("foo", "3.1.0") == 0
+ assert s.build_tag("bar", "2.1.0") == 0
+
+
def test_resolve_template_with_no_template():
req = Requirement("foo==1.0")
assert settings._resolve_template(None, req, "1.0") is None
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 3
},
"num_modified_files": 1
} | 0.27 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.11",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2025.1.31
cffi==1.17.1
charset-normalizer==3.4.1
click==8.1.8
coverage==7.8.0
cryptography==44.0.2
Deprecated==1.2.18
distlib==0.3.9
elfdeps==0.2.0
filelock==3.18.0
-e git+https://github.com/python-wheel-build/fromager.git@8645730ea616c967a7927db5038c1ce394d61c9e#egg=fromager
html5lib==1.1
idna==3.10
iniconfig==2.1.0
packaging==24.2
pbr==6.1.1
pkginfo==1.12.1.2
platformdirs==4.3.7
pluggy==1.5.0
pycparser==2.22
pyelftools==0.32
PyGithub==2.6.1
PyJWT==2.10.1
PyNaCl==1.5.0
pyproject_hooks==1.2.0
pytest==8.3.5
pytest-cov==6.0.0
python-pypi-mirror==5.2.1
PyYAML==6.0.2
requests==2.32.3
resolvelib==1.1.0
six==1.17.0
stevedore==5.4.1
tomlkit==0.13.2
tqdm==4.67.1
typing_extensions==4.13.0
urllib3==2.3.0
virtualenv==20.29.3
webencodings==0.5.1
wrapt==1.17.2
| name: fromager
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- bzip2=1.0.8=h5eee18b_6
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- libuuid=1.41.5=h5eee18b_0
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py311h06a4308_0
- python=3.11.11=he870216_0
- readline=8.2=h5eee18b_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py311h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- cffi==1.17.1
- charset-normalizer==3.4.1
- click==8.1.8
- coverage==7.8.0
- cryptography==44.0.2
- deprecated==1.2.18
- distlib==0.3.9
- elfdeps==0.2.0
- filelock==3.18.0
- fromager==0.27.1.dev11+g8645730
- html5lib==1.1
- idna==3.10
- iniconfig==2.1.0
- packaging==24.2
- pbr==6.1.1
- pkginfo==1.12.1.2
- platformdirs==4.3.7
- pluggy==1.5.0
- pycparser==2.22
- pyelftools==0.32
- pygithub==2.6.1
- pyjwt==2.10.1
- pynacl==1.5.0
- pyproject-hooks==1.2.0
- pytest==8.3.5
- pytest-cov==6.0.0
- python-pypi-mirror==5.2.1
- pyyaml==6.0.2
- requests==2.32.3
- resolvelib==1.1.0
- setuptools==71.1.0
- six==1.17.0
- stevedore==5.4.1
- tomlkit==0.13.2
- tqdm==4.67.1
- typing-extensions==4.13.0
- urllib3==2.3.0
- virtualenv==20.29.3
- webencodings==0.5.1
- wrapt==1.17.2
prefix: /opt/conda/envs/fromager
| [
"tests/test_settings.py::test_changelog"
] | [] | [
"tests/test_settings.py::test_empty",
"tests/test_settings.py::test_no_pre_built",
"tests/test_settings.py::test_with_pre_built",
"tests/test_settings.py::test_with_download_source",
"tests/test_settings.py::test_no_download_source",
"tests/test_settings.py::test_with_resolver_dist",
"tests/test_settings.py::test_no_resolver_dist",
"tests/test_settings.py::test_relative_path_build_dir",
"tests/test_settings.py::test_only_name_build_dir",
"tests/test_settings.py::test_absolute_path_build_dir",
"tests/test_settings.py::test_escape_sdist_root_build_dir",
"tests/test_settings.py::test_resolve_template_with_no_template",
"tests/test_settings.py::test_resolve_template_with_version",
"tests/test_settings.py::test_resolve_template_with_no_matching_template"
] | [] | Apache License 2.0 | 19,277 | 178 | [
"src/fromager/settings.py"
] |
|
mmcdermott__MEDS_transforms-154 | a27d361221689bde213c0665216ec3a662d7e935 | 2024-08-13 15:19:37 | 15ffb9ee27d5cd19f2c44c13a3cf4de5c00e08bb | diff --git a/src/MEDS_transforms/extract/extract_code_metadata.py b/src/MEDS_transforms/extract/extract_code_metadata.py
index 1b8b394..e9133eb 100644
--- a/src/MEDS_transforms/extract/extract_code_metadata.py
+++ b/src/MEDS_transforms/extract/extract_code_metadata.py
@@ -364,6 +364,10 @@ def main(cfg: DictConfig):
OmegaConf.save(event_conversion_cfg, partial_metadata_dir / "event_conversion_config.yaml")
events_and_metadata_by_metadata_fp = get_events_and_metadata_by_metadata_fp(event_conversion_cfg)
+ if not events_and_metadata_by_metadata_fp:
+ logger.info("No _metadata blocks in the event_conversion_config.yaml found. Exiting...")
+ return
+
event_metadata_configs = list(events_and_metadata_by_metadata_fp.items())
random.shuffle(event_metadata_configs)
| Extraction ETL crashes if you include the `extract_metadata` stage but you don't have any `_metadata` blocks in your configs.
Instead, this stage should just copy or symlink over any existing `metadata/codes.parquet` and terminate in this case.
**In case this issue is impacting anybody**, before it gets formally fixed you can solve this by just removing this stage from your extraction pipeline. You can do this on the command line on the fly by just overwriting the `stages` parameter in the normal Hydra manner to include all the other stages, but not this stage (and then also skip the stage-specific script as well, naturally). See https://github.com/mmcdermott/MEDS_transforms?tab=readme-ov-file#notes for an example of this overwrite syntax. | mmcdermott/MEDS_transforms | diff --git a/tests/test_extract_no_metadata.py b/tests/test_extract_no_metadata.py
new file mode 100644
index 0000000..f1945af
--- /dev/null
+++ b/tests/test_extract_no_metadata.py
@@ -0,0 +1,631 @@
+"""Tests the full end-to-end extraction process.
+
+Set the bash env variable `DO_USE_LOCAL_SCRIPTS=1` to use the local py files, rather than the installed
+scripts.
+"""
+
+import os
+
+import rootutils
+
+root = rootutils.setup_root(__file__, dotenv=True, pythonpath=True, cwd=True)
+
+code_root = root / "src" / "MEDS_transforms"
+extraction_root = code_root / "extract"
+
+if os.environ.get("DO_USE_LOCAL_SCRIPTS", "0") == "1":
+ SHARD_EVENTS_SCRIPT = extraction_root / "shard_events.py"
+ SPLIT_AND_SHARD_SCRIPT = extraction_root / "split_and_shard_patients.py"
+ CONVERT_TO_SHARDED_EVENTS_SCRIPT = extraction_root / "convert_to_sharded_events.py"
+ MERGE_TO_MEDS_COHORT_SCRIPT = extraction_root / "merge_to_MEDS_cohort.py"
+ EXTRACT_CODE_METADATA_SCRIPT = extraction_root / "extract_code_metadata.py"
+ FINALIZE_DATA_SCRIPT = extraction_root / "finalize_MEDS_data.py"
+ FINALIZE_METADATA_SCRIPT = extraction_root / "finalize_MEDS_metadata.py"
+else:
+ SHARD_EVENTS_SCRIPT = "MEDS_extract-shard_events"
+ SPLIT_AND_SHARD_SCRIPT = "MEDS_extract-split_and_shard_patients"
+ CONVERT_TO_SHARDED_EVENTS_SCRIPT = "MEDS_extract-convert_to_sharded_events"
+ MERGE_TO_MEDS_COHORT_SCRIPT = "MEDS_extract-merge_to_MEDS_cohort"
+ EXTRACT_CODE_METADATA_SCRIPT = "MEDS_extract-extract_code_metadata"
+ FINALIZE_DATA_SCRIPT = "MEDS_extract-finalize_MEDS_data"
+ FINALIZE_METADATA_SCRIPT = "MEDS_extract-finalize_MEDS_metadata"
+
+import json
+import tempfile
+from io import StringIO
+from pathlib import Path
+
+import polars as pl
+from meds import __version__ as MEDS_VERSION
+
+from .utils import assert_df_equal, run_command
+
+# Test data (inputs)
+
+SUBJECTS_CSV = """
+MRN,dob,eye_color,height
+1195293,06/20/1978,BLUE,164.6868838269085
+239684,12/28/1980,BROWN,175.271115221764
+1500733,07/20/1986,BROWN,158.60131573580904
+814703,03/28/1976,HAZEL,156.48559093209357
+754281,12/19/1988,BROWN,166.22261567137025
+68729,03/09/1978,HAZEL,160.3953106166676
+"""
+
+ADMIT_VITALS_CSV = """
+patient_id,admit_date,disch_date,department,vitals_date,HR,temp
+239684,"05/11/2010, 17:41:51","05/11/2010, 19:27:19",CARDIAC,"05/11/2010, 18:57:18",112.6,95.5
+754281,"01/03/2010, 06:27:59","01/03/2010, 08:22:13",PULMONARY,"01/03/2010, 06:27:59",142.0,99.8
+814703,"02/05/2010, 05:55:39","02/05/2010, 07:02:30",ORTHOPEDIC,"02/05/2010, 05:55:39",170.2,100.1
+239684,"05/11/2010, 17:41:51","05/11/2010, 19:27:19",CARDIAC,"05/11/2010, 18:25:35",113.4,95.8
+68729,"05/26/2010, 02:30:56","05/26/2010, 04:51:52",PULMONARY,"05/26/2010, 02:30:56",86.0,97.8
+1195293,"06/20/2010, 19:23:52","06/20/2010, 20:50:04",CARDIAC,"06/20/2010, 20:12:31",112.5,99.8
+1500733,"06/03/2010, 14:54:38","06/03/2010, 16:44:26",ORTHOPEDIC,"06/03/2010, 16:20:49",90.1,100.1
+239684,"05/11/2010, 17:41:51","05/11/2010, 19:27:19",CARDIAC,"05/11/2010, 17:48:48",105.1,96.2
+239684,"05/11/2010, 17:41:51","05/11/2010, 19:27:19",CARDIAC,"05/11/2010, 17:41:51",102.6,96.0
+1195293,"06/20/2010, 19:23:52","06/20/2010, 20:50:04",CARDIAC,"06/20/2010, 19:25:32",114.1,100.0
+1500733,"06/03/2010, 14:54:38","06/03/2010, 16:44:26",ORTHOPEDIC,"06/03/2010, 14:54:38",91.4,100.0
+1195293,"06/20/2010, 19:23:52","06/20/2010, 20:50:04",CARDIAC,"06/20/2010, 20:41:33",107.5,100.4
+1195293,"06/20/2010, 19:23:52","06/20/2010, 20:50:04",CARDIAC,"06/20/2010, 20:24:44",107.7,100.0
+1195293,"06/20/2010, 19:23:52","06/20/2010, 20:50:04",CARDIAC,"06/20/2010, 19:45:19",119.8,99.9
+1195293,"06/20/2010, 19:23:52","06/20/2010, 20:50:04",CARDIAC,"06/20/2010, 19:23:52",109.0,100.0
+1500733,"06/03/2010, 14:54:38","06/03/2010, 16:44:26",ORTHOPEDIC,"06/03/2010, 15:39:49",84.4,100.3
+"""
+
+INPUT_METADATA_FILE = """
+lab_code,title,loinc
+HR,Heart Rate,8867-4
+temp,Body Temperature,8310-5
+"""
+
+DEMO_METADATA_FILE = """
+eye_color,description
+BROWN,"Brown Eyes. The most common eye color."
+BLUE,"Blue Eyes. Less common than brown."
+HAZEL,"Hazel eyes. These are uncommon"
+GREEN,"Green eyes. These are rare."
+"""
+
+EVENT_CFGS_YAML = """
+subjects:
+ patient_id_col: MRN
+ eye_color:
+ code:
+ - EYE_COLOR
+ - col(eye_color)
+ time: null
+ height:
+ code: HEIGHT
+ time: null
+ numeric_value: height
+ dob:
+ code: DOB
+ time: col(dob)
+ time_format: "%m/%d/%Y"
+admit_vitals:
+ admissions:
+ code:
+ - ADMISSION
+ - col(department)
+ time: col(admit_date)
+ time_format: "%m/%d/%Y, %H:%M:%S"
+ discharge:
+ code: DISCHARGE
+ time: col(disch_date)
+ time_format: "%m/%d/%Y, %H:%M:%S"
+ HR:
+ code: HR
+ time: col(vitals_date)
+ time_format: "%m/%d/%Y, %H:%M:%S"
+ numeric_value: HR
+ temp:
+ code: TEMP
+ time: col(vitals_date)
+ time_format: "%m/%d/%Y, %H:%M:%S"
+ numeric_value: temp
+"""
+
+# Test data (expected outputs) -- ALL OF THIS MAY CHANGE IF THE SEED OR DATA CHANGES
+EXPECTED_SPLITS = {
+ "train/0": [239684, 1195293],
+ "train/1": [68729, 814703],
+ "tuning/0": [754281],
+ "held_out/0": [1500733],
+}
+
+PATIENT_SPLITS_DF = pl.DataFrame(
+ {
+ "patient_id": [239684, 1195293, 68729, 814703, 754281, 1500733],
+ "split": ["train", "train", "train", "train", "tuning", "held_out"],
+ }
+)
+
+
+def get_expected_output(df: str) -> pl.DataFrame:
+ return (
+ pl.read_csv(source=StringIO(df))
+ .select(
+ "patient_id",
+ pl.col("time").str.strptime(pl.Datetime, "%m/%d/%Y, %H:%M:%S").alias("time"),
+ pl.col("code"),
+ "numeric_value",
+ )
+ .sort(by=["patient_id", "time"])
+ )
+
+
+MEDS_OUTPUT_TRAIN_0_SUBJECTS = """
+patient_id,time,code,numeric_value
+239684,,EYE_COLOR//BROWN,
+239684,,HEIGHT,175.271115221764
+239684,"12/28/1980, 00:00:00",DOB,
+1195293,,EYE_COLOR//BLUE,
+1195293,,HEIGHT,164.6868838269085
+1195293,"06/20/1978, 00:00:00",DOB,
+"""
+
+MEDS_OUTPUT_TRAIN_0_ADMIT_VITALS = """
+patient_id,time,code,numeric_value
+239684,"05/11/2010, 17:41:51",ADMISSION//CARDIAC,
+239684,"05/11/2010, 17:41:51",HR,102.6
+239684,"05/11/2010, 17:41:51",TEMP,96.0
+239684,"05/11/2010, 17:48:48",HR,105.1
+239684,"05/11/2010, 17:48:48",TEMP,96.2
+239684,"05/11/2010, 18:25:35",HR,113.4
+239684,"05/11/2010, 18:25:35",TEMP,95.8
+239684,"05/11/2010, 18:57:18",HR,112.6
+239684,"05/11/2010, 18:57:18",TEMP,95.5
+239684,"05/11/2010, 19:27:19",DISCHARGE,
+1195293,"06/20/2010, 19:23:52",ADMISSION//CARDIAC,
+1195293,"06/20/2010, 19:23:52",HR,109.0
+1195293,"06/20/2010, 19:23:52",TEMP,100.0
+1195293,"06/20/2010, 19:25:32",HR,114.1
+1195293,"06/20/2010, 19:25:32",TEMP,100.0
+1195293,"06/20/2010, 19:45:19",HR,119.8
+1195293,"06/20/2010, 19:45:19",TEMP,99.9
+1195293,"06/20/2010, 20:12:31",HR,112.5
+1195293,"06/20/2010, 20:12:31",TEMP,99.8
+1195293,"06/20/2010, 20:24:44",HR,107.7
+1195293,"06/20/2010, 20:24:44",TEMP,100.0
+1195293,"06/20/2010, 20:41:33",HR,107.5
+1195293,"06/20/2010, 20:41:33",TEMP,100.4
+1195293,"06/20/2010, 20:50:04",DISCHARGE,
+"""
+
+MEDS_OUTPUT_TRAIN_1_SUBJECTS = """
+patient_id,time,code,numeric_value
+68729,,EYE_COLOR//HAZEL,
+68729,,HEIGHT,160.3953106166676
+68729,"03/09/1978, 00:00:00",DOB,
+814703,,EYE_COLOR//HAZEL,
+814703,,HEIGHT,156.48559093209357
+814703,"03/28/1976, 00:00:00",DOB,
+"""
+
+MEDS_OUTPUT_TRAIN_1_ADMIT_VITALS = """
+patient_id,time,code,numeric_value
+68729,"05/26/2010, 02:30:56",ADMISSION//PULMONARY,
+68729,"05/26/2010, 02:30:56",HR,86.0
+68729,"05/26/2010, 02:30:56",TEMP,97.8
+68729,"05/26/2010, 04:51:52",DISCHARGE,
+814703,"02/05/2010, 05:55:39",ADMISSION//ORTHOPEDIC,
+814703,"02/05/2010, 05:55:39",HR,170.2
+814703,"02/05/2010, 05:55:39",TEMP,100.1
+814703,"02/05/2010, 07:02:30",DISCHARGE,
+"""
+
+MEDS_OUTPUT_TUNING_0_SUBJECTS = """
+patient_id,time,code,numeric_value
+754281,,EYE_COLOR//BROWN,
+754281,,HEIGHT,166.22261567137025
+754281,"12/19/1988, 00:00:00",DOB,
+"""
+
+MEDS_OUTPUT_TUNING_0_ADMIT_VITALS = """
+patient_id,time,code,numeric_value
+754281,"01/03/2010, 06:27:59",ADMISSION//PULMONARY,
+754281,"01/03/2010, 06:27:59",HR,142.0
+754281,"01/03/2010, 06:27:59",TEMP,99.8
+754281,"01/03/2010, 08:22:13",DISCHARGE,
+"""
+
+MEDS_OUTPUT_HELD_OUT_0_SUBJECTS = """
+patient_id,time,code,numeric_value
+1500733,,EYE_COLOR//BROWN,
+1500733,,HEIGHT,158.60131573580904
+1500733,"07/20/1986, 00:00:00",DOB,
+"""
+
+MEDS_OUTPUT_HELD_OUT_0_ADMIT_VITALS = """
+patient_id,time,code,numeric_value
+1500733,"06/03/2010, 14:54:38",ADMISSION//ORTHOPEDIC,
+1500733,"06/03/2010, 14:54:38",HR,91.4
+1500733,"06/03/2010, 14:54:38",TEMP,100.0
+1500733,"06/03/2010, 15:39:49",HR,84.4
+1500733,"06/03/2010, 15:39:49",TEMP,100.3
+1500733,"06/03/2010, 16:20:49",HR,90.1
+1500733,"06/03/2010, 16:20:49",TEMP,100.1
+1500733,"06/03/2010, 16:44:26",DISCHARGE,
+"""
+
+MEDS_OUTPUT_CODE_METADATA_FILE = """
+code,description,parent_codes
+"""
+
+MEDS_OUTPUT_DATASET_METADATA_JSON = {
+ "dataset_name": "TEST",
+ "dataset_version": "1.0",
+ "etl_name": "MEDS_transforms",
+ # "etl_version": None, # We don't test this as it changes with the commits.
+ "meds_version": MEDS_VERSION,
+}
+
+SUB_SHARDED_OUTPUTS = {
+ "train/0": {
+ "subjects": MEDS_OUTPUT_TRAIN_0_SUBJECTS,
+ "admit_vitals": MEDS_OUTPUT_TRAIN_0_ADMIT_VITALS,
+ },
+ "train/1": {
+ "subjects": MEDS_OUTPUT_TRAIN_1_SUBJECTS,
+ "admit_vitals": MEDS_OUTPUT_TRAIN_1_ADMIT_VITALS,
+ },
+ "tuning/0": {
+ "subjects": MEDS_OUTPUT_TUNING_0_SUBJECTS,
+ "admit_vitals": MEDS_OUTPUT_TUNING_0_ADMIT_VITALS,
+ },
+ "held_out/0": {
+ "subjects": MEDS_OUTPUT_HELD_OUT_0_SUBJECTS,
+ "admit_vitals": MEDS_OUTPUT_HELD_OUT_0_ADMIT_VITALS,
+ },
+}
+
+
+MEDS_OUTPUTS = {
+ "train/0": [MEDS_OUTPUT_TRAIN_0_SUBJECTS, MEDS_OUTPUT_TRAIN_0_ADMIT_VITALS],
+ "train/1": [MEDS_OUTPUT_TRAIN_1_SUBJECTS, MEDS_OUTPUT_TRAIN_1_ADMIT_VITALS],
+ "tuning/0": [MEDS_OUTPUT_TUNING_0_SUBJECTS, MEDS_OUTPUT_TUNING_0_ADMIT_VITALS],
+ "held_out/0": [MEDS_OUTPUT_HELD_OUT_0_SUBJECTS, MEDS_OUTPUT_HELD_OUT_0_ADMIT_VITALS],
+}
+
+
+def test_extraction():
+ with tempfile.TemporaryDirectory() as d:
+ raw_cohort_dir = Path(d) / "raw_cohort"
+ MEDS_cohort_dir = Path(d) / "MEDS_cohort"
+
+ # Create the directories
+ raw_cohort_dir.mkdir()
+ MEDS_cohort_dir.mkdir()
+
+ subjects_csv = raw_cohort_dir / "subjects.csv"
+ admit_vitals_csv = raw_cohort_dir / "admit_vitals.csv"
+ event_cfgs_yaml = raw_cohort_dir / "event_cfgs.yaml"
+
+ demo_metadata_csv = raw_cohort_dir / "demo_metadata.csv"
+ input_metadata_csv = raw_cohort_dir / "input_metadata.csv"
+
+ # Write the CSV files
+ subjects_csv.write_text(SUBJECTS_CSV.strip())
+ admit_vitals_csv.write_text(ADMIT_VITALS_CSV.strip())
+ demo_metadata_csv.write_text(DEMO_METADATA_FILE.strip())
+ input_metadata_csv.write_text(INPUT_METADATA_FILE.strip())
+
+ # Mix things up -- have one CSV be also in parquet format.
+ admit_vitals_parquet = raw_cohort_dir / "admit_vitals.parquet"
+ df = pl.read_csv(admit_vitals_csv)
+
+ df.write_parquet(admit_vitals_parquet, use_pyarrow=True)
+
+ # Write the event config YAML
+ event_cfgs_yaml.write_text(EVENT_CFGS_YAML)
+
+ # Run the extraction script
+ # 1. Sub-shard the data (this will be a null operation in this case, but it is worth doing just in
+ # case.
+ # 2. Collect the patient splits.
+ # 3. Extract the events and sub-shard by patient.
+ # 4. Merge to the final output.
+
+ extraction_config_kwargs = {
+ "input_dir": str(raw_cohort_dir.resolve()),
+ "cohort_dir": str(MEDS_cohort_dir.resolve()),
+ "event_conversion_config_fp": str(event_cfgs_yaml.resolve()),
+ "stage_configs.split_and_shard_patients.split_fracs.train": 4 / 6,
+ "stage_configs.split_and_shard_patients.split_fracs.tuning": 1 / 6,
+ "stage_configs.split_and_shard_patients.split_fracs.held_out": 1 / 6,
+ "stage_configs.shard_events.row_chunksize": 10,
+ "stage_configs.split_and_shard_patients.n_patients_per_shard": 2,
+ "hydra.verbose": True,
+ "etl_metadata.dataset_name": "TEST",
+ "etl_metadata.dataset_version": "1.0",
+ }
+
+ all_stderrs = []
+ all_stdouts = []
+
+ # Stage 1: Sub-shard the data
+ stderr, stdout = run_command(SHARD_EVENTS_SCRIPT, extraction_config_kwargs, "shard_events")
+
+ all_stderrs.append(stderr)
+ all_stdouts.append(stdout)
+
+ subsharded_dir = MEDS_cohort_dir / "shard_events"
+
+ try:
+ out_files = list(subsharded_dir.glob("**/*.parquet"))
+ assert len(out_files) == 3, f"Expected 3 output files, got {len(out_files)}."
+
+ # Checking specific out files:
+ # 1. subjects.parquet
+ subjects_out = subsharded_dir / "subjects" / "[0-6).parquet"
+ assert subjects_out.is_file(), f"Expected {subjects_out} to exist. Files include {out_files}."
+
+ assert_df_equal(
+ pl.read_csv(subjects_csv),
+ pl.read_parquet(subjects_out, glob=False),
+ "Subjects should be equal after sub-sharding",
+ check_column_order=False,
+ check_row_order=False,
+ )
+ except AssertionError as e:
+ full_stderr = "\n".join(all_stderrs)
+ print("Sub-sharding failed")
+ print(f"stderr:\n{full_stderr}")
+ raise e
+
+ # 2. admit_vitals.parquet
+ df_chunks = []
+ for chunk in ["[0-10)", "[10-16)"]:
+ admit_vitals_chunk_fp = subsharded_dir / "admit_vitals" / f"{chunk}.parquet"
+ assert admit_vitals_chunk_fp.is_file(), f"Expected {admit_vitals_chunk_fp} to exist."
+
+ df_chunks.append(pl.read_parquet(admit_vitals_chunk_fp, glob=False))
+
+ assert_df_equal(
+ pl.read_csv(admit_vitals_csv),
+ pl.concat(df_chunks),
+ "Admit vitals should be equal after sub-sharding",
+ check_column_order=False,
+ check_row_order=False,
+ )
+
+ # Stage 2: Collect the patient splits
+ stderr, stdout = run_command(
+ SPLIT_AND_SHARD_SCRIPT,
+ extraction_config_kwargs,
+ "split_and_shard_patients",
+ )
+
+ all_stderrs.append(stderr)
+ all_stdouts.append(stdout)
+
+ try:
+ shards_fp = MEDS_cohort_dir / "metadata" / ".shards.json"
+ assert shards_fp.is_file(), f"Expected splits @ {str(shards_fp.resolve())} to exist."
+
+ splits = json.loads(shards_fp.read_text())
+ expected_keys = ["train/0", "train/1", "tuning/0", "held_out/0"]
+
+ expected_keys_str = ", ".join(f"'{k}'" for k in expected_keys)
+ got_keys_str = ", ".join(f"'{k}'" for k in splits.keys())
+
+ assert set(splits.keys()) == set(expected_keys), (
+ f"Expected splits to have keys {expected_keys_str}.\n" f"Got keys: {got_keys_str}"
+ )
+
+ assert splits == EXPECTED_SPLITS, (
+ f"Expected splits to be {EXPECTED_SPLITS}, got {splits}. NOTE THIS MAY CHANGE IF THE SEED OR "
+ "DATA CHANGES -- FAILURE HERE MAY BE JUST DUE TO A NON-DETERMINISTIC SPLIT AND THE TEST "
+ "NEEDING TO BE UPDATED."
+ )
+ except AssertionError as e:
+ print("Failed to split patients")
+ print(f"stderr:\n{stderr}")
+ print(f"stdout:\n{stdout}")
+ raise e
+
+ # Stage 3: Extract the events and sub-shard by patient
+ stderr, stdout = run_command(
+ CONVERT_TO_SHARDED_EVENTS_SCRIPT,
+ extraction_config_kwargs,
+ "convert_events",
+ )
+ all_stderrs.append(stderr)
+ all_stdouts.append(stdout)
+
+ patient_subsharded_folder = MEDS_cohort_dir / "convert_to_sharded_events"
+ assert patient_subsharded_folder.is_dir(), f"Expected {patient_subsharded_folder} to be a directory."
+
+ for split, expected_outputs in SUB_SHARDED_OUTPUTS.items():
+ for prefix, expected_df_L in expected_outputs.items():
+ if not isinstance(expected_df_L, list):
+ expected_df_L = [expected_df_L]
+
+ expected_df = pl.concat([get_expected_output(df) for df in expected_df_L])
+
+ fps = list((patient_subsharded_folder / split / prefix).glob("*.parquet"))
+ assert len(fps) > 0
+
+ # We add a "unique" here as there may be some duplicates across the row-group sub-shards.
+ got_df = pl.concat([pl.read_parquet(fp, glob=False) for fp in fps]).unique()
+ try:
+ assert_df_equal(
+ expected_df,
+ got_df,
+ f"Expected output for split {split}/{prefix} to be equal to the expected output.",
+ check_column_order=False,
+ check_row_order=False,
+ )
+ except AssertionError as e:
+ print(f"Failed on split {split}/{prefix}")
+ print(f"stderr:\n{stderr}")
+ print(f"stdout:\n{stdout}")
+ raise e
+
+ # Stage 4: Merge to the final output
+ stderr, stdout = run_command(
+ MERGE_TO_MEDS_COHORT_SCRIPT,
+ extraction_config_kwargs,
+ "merge_to_MEDS_cohort",
+ )
+ all_stderrs.append(stderr)
+ all_stdouts.append(stdout)
+
+ full_stderr = "\n".join(all_stderrs)
+ full_stdout = "\n".join(all_stdouts)
+
+ # Check the final output
+ output_folder = MEDS_cohort_dir / "merge_to_MEDS_cohort"
+ try:
+ for split, expected_df_L in MEDS_OUTPUTS.items():
+ if not isinstance(expected_df_L, list):
+ expected_df_L = [expected_df_L]
+
+ expected_df = pl.concat([get_expected_output(df) for df in expected_df_L])
+
+ fp = output_folder / f"{split}.parquet"
+ assert fp.is_file(), f"Expected {fp} to exist.\nstderr:\n{stderr}\nstdout:\n{stdout}"
+
+ got_df = pl.read_parquet(fp, glob=False)
+ assert_df_equal(
+ expected_df,
+ got_df,
+ f"Expected output for split {split} to be equal to the expected output.",
+ check_column_order=False,
+ check_row_order=False,
+ )
+
+ assert got_df["patient_id"].is_sorted(), f"Patient IDs should be sorted for split {split}."
+ for subj in splits[split]:
+ got_df_subj = got_df.filter(pl.col("patient_id") == subj)
+ assert got_df_subj[
+ "time"
+ ].is_sorted(), f"Times should be sorted for patient {subj} in split {split}."
+
+ except AssertionError as e:
+ print(f"Failed on split {split}")
+ print(f"stderr:\n{full_stderr}")
+ print(f"stdout:\n{full_stdout}")
+ raise e
+
+ # Stage 6: Extract code metadata
+ stderr, stdout = run_command(
+ EXTRACT_CODE_METADATA_SCRIPT,
+ extraction_config_kwargs,
+ "extract_code_metadata",
+ )
+ all_stderrs.append(stderr)
+ all_stdouts.append(stdout)
+
+ full_stderr = "\n".join(all_stderrs)
+ full_stdout = "\n".join(all_stdouts)
+
+ output_file = MEDS_cohort_dir / "extract_code_metadata" / "codes.parquet"
+ assert (
+ not output_file.is_file()
+ ), f"Expected {output_file} to not exist: stderr:\n{stderr}\nstdout:\n{stdout}"
+
+ # Stage 7: Finalize the MEDS data
+ stderr, stdout = run_command(
+ FINALIZE_DATA_SCRIPT,
+ extraction_config_kwargs,
+ "finalize_MEDS_data",
+ )
+ all_stderrs.append(stderr)
+ all_stdouts.append(stdout)
+
+ full_stderr = "\n".join(all_stderrs)
+ full_stdout = "\n".join(all_stdouts)
+
+ # Check the final output
+ output_folder = MEDS_cohort_dir / "data"
+ try:
+ for split, expected_df_L in MEDS_OUTPUTS.items():
+ if not isinstance(expected_df_L, list):
+ expected_df_L = [expected_df_L]
+
+ expected_df = pl.concat([get_expected_output(df) for df in expected_df_L]).with_columns(
+ pl.col("numeric_value").cast(pl.Float32)
+ )
+
+ fp = output_folder / f"{split}.parquet"
+ assert fp.is_file(), f"Expected {fp} to exist."
+
+ got_df = pl.read_parquet(fp, glob=False)
+ assert_df_equal(
+ expected_df,
+ got_df,
+ f"Expected output for split {split} to be equal to the expected output.",
+ check_column_order=False,
+ check_row_order=False,
+ )
+
+ assert got_df["patient_id"].is_sorted(), f"Patient IDs should be sorted for split {split}."
+ for subj in splits[split]:
+ got_df_subj = got_df.filter(pl.col("patient_id") == subj)
+ assert got_df_subj[
+ "time"
+ ].is_sorted(), f"Times should be sorted for patient {subj} in split {split}."
+
+ except AssertionError as e:
+ print(f"Failed on split {split}")
+ print(f"stderr:\n{full_stderr}")
+ print(f"stdout:\n{full_stdout}")
+ raise e
+
+ # Stage 8: Finalize the metadata
+ stderr, stdout = run_command(
+ FINALIZE_METADATA_SCRIPT,
+ extraction_config_kwargs,
+ "finalize_metadata",
+ )
+ all_stderrs.append(stderr)
+ all_stdouts.append(stdout)
+
+ full_stderr = "\n".join(all_stderrs)
+ full_stdout = "\n".join(all_stdouts)
+
+ # Check code metadata
+ output_file = MEDS_cohort_dir / "metadata" / "codes.parquet"
+ assert output_file.is_file(), f"Expected {output_file} to exist: stderr:\n{stderr}\nstdout:\n{stdout}"
+
+ got_df = pl.read_parquet(output_file, glob=False, use_pyarrow=True)
+
+ want_df = pl.read_csv(source=StringIO(MEDS_OUTPUT_CODE_METADATA_FILE)).with_columns(
+ pl.col("code"),
+ pl.col("parent_codes").cast(pl.List(pl.Utf8)),
+ )
+
+ # We collapse the list type as it throws an error in the assert_df_equal otherwise
+ got_df = got_df.with_columns(pl.col("parent_codes").list.join("||"))
+ want_df = want_df.with_columns(pl.col("parent_codes").list.join("||"))
+
+ assert_df_equal(
+ want=want_df,
+ got=got_df,
+ msg=f"Finalized code metadata differs:\nstderr:\n{stderr}\nstdout:\n{stdout}",
+ check_column_order=False,
+ check_row_order=False,
+ )
+
+ # Check dataset metadata
+ output_file = MEDS_cohort_dir / "metadata" / "dataset.json"
+ assert output_file.is_file(), f"Expected {output_file} to exist: stderr:\n{stderr}\nstdout:\n{stdout}"
+
+ got_json = json.loads(output_file.read_text())
+ assert "etl_version" in got_json, "Expected 'etl_version' to be in the dataset metadata."
+ got_json.pop("etl_version") # We don't test this as it changes with the commits.
+ assert got_json == MEDS_OUTPUT_DATASET_METADATA_JSON, f"Dataset metadata differs: {got_json}"
+
+ # Check the splits parquet
+ output_file = MEDS_cohort_dir / "metadata" / "patient_splits.parquet"
+ assert output_file.is_file(), f"Expected {output_file} to exist: stderr:\n{stderr}\nstdout:\n{stdout}"
+
+ got_df = pl.read_parquet(output_file, glob=False, use_pyarrow=True)
+ assert_df_equal(
+ PATIENT_SPLITS_DF,
+ got_df,
+ "Patient splits should be equal to the expected splits.",
+ check_column_order=False,
+ check_row_order=False,
+ )
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 1
} | 0.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev,tests]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.12",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | antlr4-python3-runtime==4.9.3
attrs==25.3.0
cfgv==3.4.0
coverage==7.8.0
distlib==0.3.9
filelock==3.18.0
hydra-core==1.3.2
identify==2.6.9
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
jsonschema==4.23.0
jsonschema-specifications==2024.10.1
loguru==0.7.3
meds==0.3.0
-e git+https://github.com/mmcdermott/MEDS_transforms.git@a27d361221689bde213c0665216ec3a662d7e935#egg=MEDS_transforms
nested_ragged_tensors==0.1
nodeenv==1.9.1
numpy==2.2.4
omegaconf==2.3.0
packaging @ file:///croot/packaging_1734472117206/work
platformdirs==4.3.7
pluggy @ file:///croot/pluggy_1733169602837/work
polars==1.1.0
pre_commit==4.2.0
pyarrow==19.0.1
pytest @ file:///croot/pytest_1738938843180/work
pytest-cov==6.0.0
python-dotenv==1.1.0
PyYAML==6.0.2
referencing==0.36.2
rootutils==1.0.7
rpds-py==0.24.0
safetensors==0.5.3
setuptools==75.8.0
typing_extensions==4.13.0
virtualenv==20.29.3
wheel==0.45.1
| name: MEDS_transforms
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- bzip2=1.0.8=h5eee18b_6
- ca-certificates=2025.2.25=h06a4308_0
- expat=2.6.4=h6a678d5_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- libuuid=1.41.5=h5eee18b_0
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py312h06a4308_0
- pip=25.0=py312h06a4308_0
- pluggy=1.5.0=py312h06a4308_0
- pytest=8.3.4=py312h06a4308_0
- python=3.12.9=h5148396_0
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py312h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py312h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- antlr4-python3-runtime==4.9.3
- attrs==25.3.0
- cfgv==3.4.0
- coverage==7.8.0
- distlib==0.3.9
- filelock==3.18.0
- hydra-core==1.3.2
- identify==2.6.9
- jsonschema==4.23.0
- jsonschema-specifications==2024.10.1
- loguru==0.7.3
- meds==0.3.0
- meds-transforms==0.0.6.dev33+ga27d361
- nested-ragged-tensors==0.1
- nodeenv==1.9.1
- numpy==2.2.4
- omegaconf==2.3.0
- platformdirs==4.3.7
- polars==1.1.0
- pre-commit==4.2.0
- pyarrow==19.0.1
- pytest-cov==6.0.0
- python-dotenv==1.1.0
- pyyaml==6.0.2
- referencing==0.36.2
- rootutils==1.0.7
- rpds-py==0.24.0
- safetensors==0.5.3
- typing-extensions==4.13.0
- virtualenv==20.29.3
prefix: /opt/conda/envs/MEDS_transforms
| [
"tests/test_extract_no_metadata.py::test_extraction"
] | [] | [] | [] | MIT License | 19,288 | 205 | [
"src/MEDS_transforms/extract/extract_code_metadata.py"
] |
|
lincc-frameworks__nested-pandas-136 | 975fbc88de84b6d8d9ae309f71ae3f0b96831818 | 2024-08-13 20:55:21 | 975fbc88de84b6d8d9ae309f71ae3f0b96831818 | diff --git a/src/nested_pandas/nestedframe/core.py b/src/nested_pandas/nestedframe/core.py
index a7ec7dc..cb743fc 100644
--- a/src/nested_pandas/nestedframe/core.py
+++ b/src/nested_pandas/nestedframe/core.py
@@ -214,6 +214,69 @@ class NestedFrame(pd.DataFrame):
nested_columns = [col for col in df.columns if col not in base_columns]
return out_df.add_nested(df[nested_columns], name=name)
+ @classmethod
+ def from_lists(cls, df, base_columns=None, list_columns=None, name="nested"):
+ """Creates a NestedFrame with base and nested columns from a flat
+ dataframe.
+
+ Parameters
+ ----------
+ df: pd.DataFrame or NestedFrame
+ A dataframe with list columns.
+ base_columns: list-like, or None
+ Any columns that have non-list values in the input df. These will
+ simply be kept as identical columns in the result
+ list_columns: list-like, or None
+ The list-value columns that should be packed into a nested column.
+ All columns in the list will attempt to be packed into a single
+ nested column with the name provided in `nested_name`. If None, is
+ defined as all columns not in `base_columns`.
+ name:
+ The name of the output column the `nested_columns` are packed into.
+
+ Returns
+ -------
+ NestedFrame
+ A NestedFrame with the specified nesting structure.
+
+ Examples
+ --------
+
+ >>> nf = NestedFrame({"c":[1,2,3], "d":[2,4,6],
+ ... "e":[[1,2,3], [4,5,6], [7,8,9]]},
+ ... index=[0,1,2])
+
+
+ >>> NestedFrame.from_lists(nf, base_columns=["c","d"])
+ """
+
+ # Resolve base and list columns
+ if base_columns is None:
+ if list_columns is None:
+ # with no inputs, assume all columns are list-valued
+ list_columns = df.columns
+ else:
+ # if list_columns are defined, assume everything else is base
+ base_columns = [col for col in df.columns if col not in list_columns]
+ else:
+ if list_columns is None:
+ # with defined base_columns, assume everything else is list
+ list_columns = [col for col in df.columns if col not in base_columns]
+
+ if len(list_columns) == 0:
+ raise ValueError("No columns were assigned as list columns.")
+
+ # Pack list columns into a nested column
+ packed_df = packer.pack_lists(df[list_columns])
+ packed_df.name = name
+
+ # join the nested column to the base_column df
+ if base_columns is not None:
+ return df[base_columns].join(packed_df)
+ # or just return the packed_df as a nestedframe if no base cols
+ else:
+ return NestedFrame(packed_df.to_frame())
+
def _split_query(self, expr) -> dict:
"""Splits a pandas query into multiple subqueries for nested and base layers"""
# Ensure query has needed spacing for upcoming split
| Add a shorthand `from_lists` NestedFrame generator function
**Feature request**
In the wild, users will frequently encounter data that is already stored as a dataframe with list columns or a mix of scalar and list columns. For example:
```
ndf = npd.NestedFrame({"a":[1,2,3], "b":[[1,1,1],[2,2,2],[3,3,3]]})
a b
0 1 [1, 1, 1]
1 2 [2, 2, 2]
2 3 [3, 3, 3]
```
In these cases, a user that wants to nest column "b" will have to find our `pack_lists` function:
```
npd.series.packer.pack_lists(ndf[["b"]])
```
And furthermore, as shown in the example they can supply a dataframe composing only columns of equal length lists. It would be good to add a function like `NestedFrame.from_flat` (`NestedFrame.from_lists`) that will be easier for the user to find within the API, and also have additional flexibility to specify base columns.
**Before submitting**
Please check the following:
- [x] I have described the purpose of the suggested change, specifying what I need the enhancement to accomplish, i.e. what problem it solves.
- [x] I have included any relevant links, screenshots, environment information, and data relevant to implementing the requested feature, as well as pseudocode for how I want to access the new functionality.
- [x] If I have ideas for how the new feature could be implemented, I have provided explanations and/or pseudocode and/or task lists for the steps.
| lincc-frameworks/nested-pandas | diff --git a/tests/nested_pandas/nestedframe/test_nestedframe.py b/tests/nested_pandas/nestedframe/test_nestedframe.py
index 3dbf34b..7c4d2fc 100644
--- a/tests/nested_pandas/nestedframe/test_nestedframe.py
+++ b/tests/nested_pandas/nestedframe/test_nestedframe.py
@@ -320,6 +320,59 @@ def test_recover_from_flat():
assert nf2.equals(nf)
+def test_from_lists():
+ """Test NestedFrame.from_lists behavior"""
+ nf = NestedFrame(
+ {"c": [1, 2, 3], "d": [2, 4, 6], "e": [[1, 2, 3], [4, 5, 6], [7, 8, 9]]}, index=[0, 1, 2]
+ )
+
+ # Test a few combinations
+ res = NestedFrame.from_lists(nf, base_columns=["c", "d"], name="nested_e")
+ assert list(res.columns) == ["c", "d", "nested_e"]
+ assert list(res.nested_columns) == ["nested_e"]
+
+ res = NestedFrame.from_lists(nf, base_columns=["c", "d"], list_columns=["e"])
+ assert list(res.columns) == ["c", "d", "nested"]
+ assert list(res.nested_columns) == ["nested"]
+
+ res = NestedFrame.from_lists(nf, list_columns=["e"])
+ assert list(res.columns) == ["c", "d", "nested"]
+ assert list(res.nested_columns) == ["nested"]
+
+ # Check for the no list columns error
+ with pytest.raises(ValueError):
+ res = NestedFrame.from_lists(nf, base_columns=["c", "d", "e"])
+
+ # Multiple list columns (of uneven length)
+ nf2 = NestedFrame(
+ {
+ "c": [1, 2, 3],
+ "d": [2, 4, 6],
+ "e": [[1, 2, 3], [4, 5, 6, 7], [8, 9]],
+ "f": [[10, 20, 30], [40, 50, 60, 70], [80, 90]],
+ },
+ index=[0, 1, 2],
+ )
+
+ res = NestedFrame.from_lists(nf2, list_columns=["e", "f"])
+ assert list(res.columns) == ["c", "d", "nested"]
+ assert list(res.nested_columns) == ["nested"]
+ assert list(res.nested.nest.fields) == ["e", "f"]
+
+ # Check for subsetting
+ res = NestedFrame.from_lists(nf, base_columns=["c"], list_columns=["e"])
+ assert list(res.columns) == ["c", "nested"]
+ assert list(res.nested_columns) == ["nested"]
+
+ res = NestedFrame.from_lists(nf, base_columns=[], list_columns=["e"])
+ assert list(res.columns) == ["nested"]
+ assert list(res.nested_columns) == ["nested"]
+
+ res = NestedFrame.from_lists(nf[["e"]], base_columns=None, list_columns=None)
+ assert list(res.columns) == ["nested"]
+ assert list(res.nested_columns) == ["nested"]
+
+
def test_query():
"""Test that NestedFrame.query handles nested queries correctly"""
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 1
} | 0.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"docs/requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | accessible-pygments==0.0.5
alabaster==0.7.16
anyio==4.9.0
argon2-cffi==23.1.0
argon2-cffi-bindings==21.2.0
arrow==1.3.0
astroid==3.3.9
astropy==6.0.1
astropy-iers-data==0.2025.3.31.0.36.18
astroquery==0.4.10
asttokens==3.0.0
asv==0.6.3
asv_runner==0.2.1
async-lru==2.0.5
attrs==25.3.0
babel==2.17.0
backports.tarfile==1.2.0
beautifulsoup4==4.13.3
bleach==6.2.0
build==1.2.2.post1
certifi==2025.1.31
cffi==1.17.1
cfgv==3.4.0
charset-normalizer==3.4.1
comm==0.2.2
contourpy==1.3.0
coverage==7.8.0
cryptography==44.0.2
cycler==0.12.1
debugpy==1.8.13
decorator==5.2.1
defusedxml==0.7.1
distlib==0.3.9
docutils==0.21.2
exceptiongroup==1.2.2
executing==2.2.0
fastjsonschema==2.21.1
filelock==3.18.0
fonttools==4.56.0
fqdn==1.5.1
h11==0.14.0
html5lib==1.1
httpcore==1.0.7
httpx==0.28.1
identify==2.6.9
idna==3.10
imagesize==1.4.1
importlib_metadata==8.6.1
importlib_resources==6.5.2
iniconfig==2.1.0
ipykernel==6.29.5
ipython==8.18.1
ipywidgets==8.1.5
isoduration==20.11.0
jaraco.classes==3.4.0
jaraco.context==6.0.1
jaraco.functools==4.1.0
jedi==0.19.2
jeepney==0.9.0
Jinja2==3.1.6
json5==0.10.0
jsonpointer==3.0.0
jsonschema==4.23.0
jsonschema-specifications==2024.10.1
jupyter==1.1.1
jupyter-console==6.6.3
jupyter-events==0.12.0
jupyter-lsp==2.2.5
jupyter_client==8.6.3
jupyter_core==5.7.2
jupyter_server==2.15.0
jupyter_server_terminals==0.5.3
jupyterlab==4.3.6
jupyterlab_pygments==0.3.0
jupyterlab_server==2.27.3
jupyterlab_widgets==3.0.13
jupytext==1.16.7
keyring==25.6.0
kiwisolver==1.4.7
markdown-it-py==3.0.0
MarkupSafe==3.0.2
matplotlib==3.9.4
matplotlib-inline==0.1.7
mdit-py-plugins==0.4.2
mdurl==0.1.2
mistune==3.1.3
more-itertools==10.6.0
mypy==1.15.0
mypy-extensions==1.0.0
nbclient==0.10.2
nbconvert==7.16.6
nbformat==5.10.4
nbsphinx==0.9.7
nest-asyncio==1.6.0
-e git+https://github.com/lincc-frameworks/nested-pandas.git@975fbc88de84b6d8d9ae309f71ae3f0b96831818#egg=nested_pandas
nodeenv==1.9.1
notebook==7.3.3
notebook_shim==0.2.4
numpy==1.26.4
overrides==7.7.0
packaging==24.2
pandas==2.2.3
pandocfilters==1.5.1
parso==0.8.4
pexpect==4.9.0
pillow==11.1.0
platformdirs==4.3.7
pluggy==1.5.0
pre_commit==4.2.0
prometheus_client==0.21.1
prompt_toolkit==3.0.50
psutil==7.0.0
ptyprocess==0.7.0
pure_eval==0.2.3
pyarrow==19.0.1
pycparser==2.22
pydata-sphinx-theme==0.15.4
pyerfa==2.0.1.5
Pygments==2.19.1
Pympler==1.1
pyparsing==3.2.3
pyproject_hooks==1.2.0
pytest==8.3.5
pytest-cov==6.0.0
python-dateutil==2.9.0.post0
python-json-logger==3.3.0
pytz==2025.2
pyvo==1.6.1
PyYAML==6.0.2
pyzmq==26.3.0
referencing==0.36.2
requests==2.32.3
rfc3339-validator==0.1.4
rfc3986-validator==0.1.1
rpds-py==0.24.0
ruff==0.11.2
SecretStorage==3.3.3
Send2Trash==1.8.3
six==1.17.0
sniffio==1.3.1
snowballstemmer==2.2.0
soupsieve==2.6
Sphinx==7.4.7
sphinx-autoapi==3.6.0
sphinx-book-theme==1.1.4
sphinx-copybutton==0.5.2
sphinxcontrib-applehelp==2.0.0
sphinxcontrib-devhelp==2.0.0
sphinxcontrib-htmlhelp==2.1.0
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==2.0.0
sphinxcontrib-serializinghtml==2.0.0
stack-data==0.6.3
stdlib-list==0.11.1
tabulate==0.9.0
terminado==0.18.1
tinycss2==1.4.0
tomli==2.2.1
tornado==6.4.2
traitlets==5.14.3
types-python-dateutil==2.9.0.20241206
typing_extensions==4.13.0
tzdata==2025.2
uri-template==1.3.0
urllib3==2.3.0
virtualenv==20.29.3
wcwidth==0.2.13
webcolors==24.11.1
webencodings==0.5.1
websocket-client==1.8.0
widgetsnbextension==4.0.13
zipp==3.21.0
| name: nested-pandas
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- accessible-pygments==0.0.5
- alabaster==0.7.16
- anyio==4.9.0
- argon2-cffi==23.1.0
- argon2-cffi-bindings==21.2.0
- arrow==1.3.0
- astroid==3.3.9
- astropy==6.0.1
- astropy-iers-data==0.2025.3.31.0.36.18
- astroquery==0.4.10
- asttokens==3.0.0
- asv==0.6.3
- asv-runner==0.2.1
- async-lru==2.0.5
- attrs==25.3.0
- babel==2.17.0
- backports-tarfile==1.2.0
- beautifulsoup4==4.13.3
- bleach==6.2.0
- build==1.2.2.post1
- certifi==2025.1.31
- cffi==1.17.1
- cfgv==3.4.0
- charset-normalizer==3.4.1
- comm==0.2.2
- contourpy==1.3.0
- coverage==7.8.0
- cryptography==44.0.2
- cycler==0.12.1
- debugpy==1.8.13
- decorator==5.2.1
- defusedxml==0.7.1
- distlib==0.3.9
- docutils==0.21.2
- exceptiongroup==1.2.2
- executing==2.2.0
- fastjsonschema==2.21.1
- filelock==3.18.0
- fonttools==4.56.0
- fqdn==1.5.1
- h11==0.14.0
- html5lib==1.1
- httpcore==1.0.7
- httpx==0.28.1
- identify==2.6.9
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==8.6.1
- importlib-resources==6.5.2
- iniconfig==2.1.0
- ipykernel==6.29.5
- ipython==8.18.1
- ipywidgets==8.1.5
- isoduration==20.11.0
- jaraco-classes==3.4.0
- jaraco-context==6.0.1
- jaraco-functools==4.1.0
- jedi==0.19.2
- jeepney==0.9.0
- jinja2==3.1.6
- json5==0.10.0
- jsonpointer==3.0.0
- jsonschema==4.23.0
- jsonschema-specifications==2024.10.1
- jupyter==1.1.1
- jupyter-client==8.6.3
- jupyter-console==6.6.3
- jupyter-core==5.7.2
- jupyter-events==0.12.0
- jupyter-lsp==2.2.5
- jupyter-server==2.15.0
- jupyter-server-terminals==0.5.3
- jupyterlab==4.3.6
- jupyterlab-pygments==0.3.0
- jupyterlab-server==2.27.3
- jupyterlab-widgets==3.0.13
- jupytext==1.16.7
- keyring==25.6.0
- kiwisolver==1.4.7
- markdown-it-py==3.0.0
- markupsafe==3.0.2
- matplotlib==3.9.4
- matplotlib-inline==0.1.7
- mdit-py-plugins==0.4.2
- mdurl==0.1.2
- mistune==3.1.3
- more-itertools==10.6.0
- mypy==1.15.0
- mypy-extensions==1.0.0
- nbclient==0.10.2
- nbconvert==7.16.6
- nbformat==5.10.4
- nbsphinx==0.9.7
- nest-asyncio==1.6.0
- nested-pandas==0.1.4.dev15+g975fbc8
- nodeenv==1.9.1
- notebook==7.3.3
- notebook-shim==0.2.4
- numpy==1.26.4
- overrides==7.7.0
- packaging==24.2
- pandas==2.2.3
- pandocfilters==1.5.1
- parso==0.8.4
- pexpect==4.9.0
- pillow==11.1.0
- platformdirs==4.3.7
- pluggy==1.5.0
- pre-commit==4.2.0
- prometheus-client==0.21.1
- prompt-toolkit==3.0.50
- psutil==7.0.0
- ptyprocess==0.7.0
- pure-eval==0.2.3
- pyarrow==19.0.1
- pycparser==2.22
- pydata-sphinx-theme==0.15.4
- pyerfa==2.0.1.5
- pygments==2.19.1
- pympler==1.1
- pyparsing==3.2.3
- pyproject-hooks==1.2.0
- pytest==8.3.5
- pytest-cov==6.0.0
- python-dateutil==2.9.0.post0
- python-json-logger==3.3.0
- pytz==2025.2
- pyvo==1.6.1
- pyyaml==6.0.2
- pyzmq==26.3.0
- referencing==0.36.2
- requests==2.32.3
- rfc3339-validator==0.1.4
- rfc3986-validator==0.1.1
- rpds-py==0.24.0
- ruff==0.11.2
- secretstorage==3.3.3
- send2trash==1.8.3
- six==1.17.0
- sniffio==1.3.1
- snowballstemmer==2.2.0
- soupsieve==2.6
- sphinx==7.4.7
- sphinx-autoapi==3.6.0
- sphinx-book-theme==1.1.4
- sphinx-copybutton==0.5.2
- sphinxcontrib-applehelp==2.0.0
- sphinxcontrib-devhelp==2.0.0
- sphinxcontrib-htmlhelp==2.1.0
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==2.0.0
- sphinxcontrib-serializinghtml==2.0.0
- stack-data==0.6.3
- stdlib-list==0.11.1
- tabulate==0.9.0
- terminado==0.18.1
- tinycss2==1.4.0
- tomli==2.2.1
- tornado==6.4.2
- traitlets==5.14.3
- types-python-dateutil==2.9.0.20241206
- typing-extensions==4.13.0
- tzdata==2025.2
- uri-template==1.3.0
- urllib3==2.3.0
- virtualenv==20.29.3
- wcwidth==0.2.13
- webcolors==24.11.1
- webencodings==0.5.1
- websocket-client==1.8.0
- widgetsnbextension==4.0.13
- zipp==3.21.0
prefix: /opt/conda/envs/nested-pandas
| [
"tests/nested_pandas/nestedframe/test_nestedframe.py::test_from_lists"
] | [] | [
"tests/nested_pandas/nestedframe/test_nestedframe.py::test_nestedframe_construction",
"tests/nested_pandas/nestedframe/test_nestedframe.py::test_all_columns",
"tests/nested_pandas/nestedframe/test_nestedframe.py::test_nested_columns",
"tests/nested_pandas/nestedframe/test_nestedframe.py::test_is_known_hierarchical_column",
"tests/nested_pandas/nestedframe/test_nestedframe.py::test_get_nested_column",
"tests/nested_pandas/nestedframe/test_nestedframe.py::test_set_or_replace_nested_col",
"tests/nested_pandas/nestedframe/test_nestedframe.py::test_set_new_nested_col",
"tests/nested_pandas/nestedframe/test_nestedframe.py::test_add_nested_with_flat_df",
"tests/nested_pandas/nestedframe/test_nestedframe.py::test_add_nested_with_flat_df_and_mismatched_index",
"tests/nested_pandas/nestedframe/test_nestedframe.py::test_add_nested_with_series",
"tests/nested_pandas/nestedframe/test_nestedframe.py::test_add_nested_with_series_and_mismatched_index",
"tests/nested_pandas/nestedframe/test_nestedframe.py::test_add_nested_for_empty_df",
"tests/nested_pandas/nestedframe/test_nestedframe.py::test_from_flat[None]",
"tests/nested_pandas/nestedframe/test_nestedframe.py::test_from_flat[a]",
"tests/nested_pandas/nestedframe/test_nestedframe.py::test_from_flat[c]",
"tests/nested_pandas/nestedframe/test_nestedframe.py::test_recover_from_flat",
"tests/nested_pandas/nestedframe/test_nestedframe.py::test_query",
"tests/nested_pandas/nestedframe/test_nestedframe.py::test_dropna",
"tests/nested_pandas/nestedframe/test_nestedframe.py::test_dropna_layer_as_base_column",
"tests/nested_pandas/nestedframe/test_nestedframe.py::test_dropna_inplace_base",
"tests/nested_pandas/nestedframe/test_nestedframe.py::test_dropna_inplace_nested",
"tests/nested_pandas/nestedframe/test_nestedframe.py::test_dropna_errors",
"tests/nested_pandas/nestedframe/test_nestedframe.py::test_reduce",
"tests/nested_pandas/nestedframe/test_nestedframe.py::test_reduce_duplicated_cols"
] | [] | MIT License | 19,292 | 752 | [
"src/nested_pandas/nestedframe/core.py"
] |
|
mmcdermott__MEDS_transforms-166 | 3cab51260c104867fc026535ceb66c6ac3959454 | 2024-08-14 13:32:37 | 15ffb9ee27d5cd19f2c44c13a3cf4de5c00e08bb | diff --git a/src/MEDS_transforms/aggregate_code_metadata.py b/src/MEDS_transforms/aggregate_code_metadata.py
index bcc056a..13e9b34 100755
--- a/src/MEDS_transforms/aggregate_code_metadata.py
+++ b/src/MEDS_transforms/aggregate_code_metadata.py
@@ -406,7 +406,9 @@ def mapper_fntr(
│ C ┆ 1 ┆ 81.25 ┆ 5.0 ┆ 7.5 │
│ D ┆ null ┆ 0.0 ┆ null ┆ null │
└──────┴───────────┴────────────────┴────────────┴────────────┘
- >>> stage_cfg = DictConfig({"aggregations": ["values/quantiles"]})
+ >>> stage_cfg = DictConfig({
+ ... "aggregations": [{"name": "values/quantiles", "quantiles": [0.25, 0.5, 0.75]}]
+ ... })
>>> mapper = mapper_fntr(stage_cfg, code_modifiers)
>>> mapper(df.lazy()).collect().select("code", "modifier1", pl.col("values/quantiles"))
shape: (5, 3)
@@ -421,6 +423,25 @@ def mapper_fntr(
│ C ┆ 1 ┆ [5.0, 7.5] │
│ D ┆ null ┆ [] │
└──────┴───────────┴──────────────────┘
+ >>> stage_cfg = DictConfig({
+ ... "aggregations": [{"name": "values/quantiles", "quantiles": [0.25, 0.5, 0.75]}],
+ ... "do_summarize_over_all_codes": True,
+ ... })
+ >>> mapper = mapper_fntr(stage_cfg, code_modifiers)
+ >>> mapper(df.lazy()).collect().select("code", "modifier1", pl.col("values/quantiles"))
+ shape: (6, 3)
+ ┌──────┬───────────┬───────────────────┐
+ │ code ┆ modifier1 ┆ values/quantiles │
+ │ --- ┆ --- ┆ --- │
+ │ str ┆ i64 ┆ list[f64] │
+ ╞══════╪═══════════╪═══════════════════╡
+ │ null ┆ null ┆ [1.1, 2.0, … 7.5] │
+ │ A ┆ 1 ┆ [1.1, 1.1] │
+ │ A ┆ 2 ┆ [6.0] │
+ │ B ┆ 2 ┆ [2.0, 4.0] │
+ │ C ┆ 1 ┆ [5.0, 7.5] │
+ │ D ┆ null ┆ [] │
+ └──────┴───────────┴───────────────────┘
"""
code_key_columns = validate_args_and_get_code_cols(stage_cfg, code_modifiers)
@@ -435,7 +456,12 @@ def mapper_fntr(
return df.group_by(code_key_columns).agg(**agg_operations).sort(code_key_columns)
def all_patients_mapper(df: pl.LazyFrame) -> pl.LazyFrame:
- return df.select(**agg_operations)
+ local_agg_operations = agg_operations.copy()
+ if METADATA_FN.VALUES_QUANTILES in agg_operations:
+ local_agg_operations[METADATA_FN.VALUES_QUANTILES] = agg_operations[
+ METADATA_FN.VALUES_QUANTILES
+ ].implode()
+ return df.select(**local_agg_operations)
if stage_cfg.get("do_summarize_over_all_codes", False):
@@ -443,7 +469,7 @@ def mapper_fntr(
by_code = by_code_mapper(df)
all_patients = all_patients_mapper(df)
return pl.concat([all_patients, by_code], how="diagonal_relaxed").select(
- *code_key_columns, *aggregations
+ *code_key_columns, *agg_operations.keys()
)
else:
| Aggregation integration test should cover all integrations | mmcdermott/MEDS_transforms | diff --git a/tests/test_aggregate_code_metadata.py b/tests/test_aggregate_code_metadata.py
index 2cdc02a..2a8f78c 100644
--- a/tests/test_aggregate_code_metadata.py
+++ b/tests/test_aggregate_code_metadata.py
@@ -4,28 +4,154 @@ Set the bash env variable `DO_USE_LOCAL_SCRIPTS=1` to use the local py files, ra
scripts.
"""
+import polars as pl
+
from .transform_tester_base import (
AGGREGATE_CODE_METADATA_SCRIPT,
- parse_code_metadata_csv,
+ MEDS_CODE_METADATA_SCHEMA,
single_stage_transform_tester,
)
WANT_OUTPUT_CODE_METADATA_FILE = """
-code,code/n_occurrences,code/n_patients,values/n_occurrences,values/sum,values/sum_sqd,description,parent_codes
-,44,4,28,3198.8389005974336,382968.28937288234,,
-ADMISSION//CARDIAC,2,2,0,0,0,,
-ADMISSION//ORTHOPEDIC,1,1,0,0,0,,
-ADMISSION//PULMONARY,1,1,0,0,0,,
-DISCHARGE,4,4,0,0,0,,
-DOB,4,4,0,0,0,,
-EYE_COLOR//BLUE,1,1,0,0,0,"Blue Eyes. Less common than brown.",
-EYE_COLOR//BROWN,1,1,0,0,0,"Brown Eyes. The most common eye color.",
-EYE_COLOR//HAZEL,2,2,0,0,0,"Hazel eyes. These are uncommon",
-HEIGHT,4,4,4,656.8389005974336,108056.12937288235,,
-HR,12,4,12,1360.5000000000002,158538.77,"Heart Rate",LOINC/8867-4
-TEMP,12,4,12,1181.4999999999998,116373.38999999998,"Body Temperature",LOINC/8310-5
+code,code/n_occurrences,code/n_patients,values/n_occurrences,values/n_patients,values/sum,values/sum_sqd,values/n_ints,values/min,values/max,description,parent_codes
+,44,4,28,4,3198.8389005974336,382968.28937288234,6,86.0,175.271118,,
+ADMISSION//CARDIAC,2,2,0,0,0,0,0,,,,
+ADMISSION//ORTHOPEDIC,1,1,0,0,0,0,0,,,,
+ADMISSION//PULMONARY,1,1,0,0,0,0,0,,,,
+DISCHARGE,4,4,0,0,0,0,0,,,,
+DOB,4,4,0,0,0,0,0,,,,
+EYE_COLOR//BLUE,1,1,0,0,0,0,0,,,"Blue Eyes. Less common than brown.",
+EYE_COLOR//BROWN,1,1,0,0,0,0,0,,,"Brown Eyes. The most common eye color.",
+EYE_COLOR//HAZEL,2,2,0,0,0,0,0,,,"Hazel eyes. These are uncommon",
+HEIGHT,4,4,4,4,656.8389005974336,108056.12937288235,0,156.485596,175.271118,,
+HR,12,4,12,4,1360.5000000000002,158538.77,2,86.0,170.199997,"Heart Rate",LOINC/8867-4
+TEMP,12,4,12,4,1181.4999999999998,116373.38999999998,4,95.5,100.400002,"Body Temperature",LOINC/8310-5
"""
+WANT_OUTPUT_CODE_METADATA_FILE = pl.DataFrame(
+ {
+ "code": [
+ None,
+ "ADMISSION//CARDIAC",
+ "ADMISSION//ORTHOPEDIC",
+ "ADMISSION//PULMONARY",
+ "DISCHARGE",
+ "DOB",
+ "EYE_COLOR//BLUE",
+ "EYE_COLOR//BROWN",
+ "EYE_COLOR//HAZEL",
+ "HEIGHT",
+ "HR",
+ "TEMP",
+ ],
+ "code/n_occurrences": [44, 2, 1, 1, 4, 4, 1, 1, 2, 4, 12, 12],
+ "code/n_patients": [4, 2, 1, 1, 4, 4, 1, 1, 2, 4, 4, 4],
+ "values/n_occurrences": [28, 0, 0, 0, 0, 0, 0, 0, 0, 4, 12, 12],
+ "values/n_patients": [4, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 4],
+ "values/sum": [
+ 3198.8389005974336,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 656.8389005974336,
+ 1360.5000000000002,
+ 1181.4999999999998,
+ ],
+ "values/sum_sqd": [
+ 382968.28937288234,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 108056.12937288235,
+ 158538.77,
+ 116373.38999999998,
+ ],
+ "values/n_ints": [6, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 4],
+ "values/min": [86.0, None, None, None, None, None, None, None, None, 156.485596, 86.0, 95.5],
+ "values/max": [
+ 175.271118,
+ None,
+ None,
+ None,
+ None,
+ None,
+ None,
+ None,
+ None,
+ 175.271118,
+ 170.199997,
+ 100.400002,
+ ],
+ "values/quantiles": [
+ {"values/quantile/0.25": 99.9, "values/quantile/0.5": 105.1, "values/quantile/0.75": 113.4},
+ None,
+ None,
+ None,
+ None,
+ None,
+ None,
+ None,
+ None,
+ {
+ "values/quantile/0.25": 160.395311,
+ "values/quantile/0.5": 164.686884,
+ "values/quantile/0.75": 164.686884,
+ },
+ {"values/quantile/0.25": 107.5, "values/quantile/0.5": 112.5, "values/quantile/0.75": 113.4},
+ {"values/quantile/0.25": 96.2, "values/quantile/0.5": 99.9, "values/quantile/0.75": 100.0},
+ ],
+ "description": [
+ None,
+ None,
+ None,
+ None,
+ None,
+ None,
+ "Blue Eyes. Less common than brown.",
+ "Brown Eyes. The most common eye color.",
+ "Hazel eyes. These are uncommon",
+ None,
+ "Heart Rate",
+ "Body Temperature",
+ ],
+ "parent_codes": [
+ None,
+ None,
+ None,
+ None,
+ None,
+ None,
+ None,
+ None,
+ None,
+ None,
+ ["LOINC/8867-4"],
+ ["LOINC/8310-5"],
+ ],
+ },
+ schema={
+ **{k: v for k, v in MEDS_CODE_METADATA_SCHEMA.items() if k != "code/vocab_index"},
+ "parent_codes": pl.List(pl.String),
+ "values/quantiles": pl.Struct(
+ {
+ "values/quantile/0.25": pl.Float32,
+ "values/quantile/0.5": pl.Float32,
+ "values/quantile/0.75": pl.Float32,
+ }
+ ),
+ },
+)
+
MEDS_CODE_METADATA_FILE = """
code,description,parent_codes
EYE_COLOR//BLUE,"Blue Eyes. Less common than brown.",
@@ -39,8 +165,13 @@ AGGREGATIONS = [
"code/n_occurrences",
"code/n_patients",
"values/n_occurrences",
+ "values/n_patients",
"values/sum",
"values/sum_sqd",
+ "values/n_ints",
+ "values/min",
+ "values/max",
+ {"name": "values/quantiles", "quantiles": [0.25, 0.5, 0.75]},
]
@@ -49,7 +180,7 @@ def test_aggregate_code_metadata():
transform_script=AGGREGATE_CODE_METADATA_SCRIPT,
stage_name="aggregate_code_metadata",
transform_stage_kwargs={"aggregations": AGGREGATIONS, "do_summarize_over_all_codes": True},
- want_outputs=parse_code_metadata_csv(WANT_OUTPUT_CODE_METADATA_FILE),
+ want_outputs=WANT_OUTPUT_CODE_METADATA_FILE,
code_metadata=MEDS_CODE_METADATA_FILE,
do_use_config_yaml=True,
)
diff --git a/tests/transform_tester_base.py b/tests/transform_tester_base.py
index 2efeb02..8dbd637 100644
--- a/tests/transform_tester_base.py
+++ b/tests/transform_tester_base.py
@@ -183,8 +183,12 @@ MEDS_CODE_METADATA_SCHEMA = {
"code/n_occurrences": pl.UInt8,
"code/n_patients": pl.UInt8,
"values/n_occurrences": pl.UInt8,
+ "values/n_patients": pl.UInt8,
"values/sum": pl.Float32,
"values/sum_sqd": pl.Float32,
+ "values/n_ints": pl.UInt8,
+ "values/min": pl.Float32,
+ "values/max": pl.Float32,
"description": pl.Utf8,
"parent_codes": pl.Utf8,
"code/vocab_index": pl.UInt8,
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 1
} | 0.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev,tests]",
"log_parser": "parse_log_pytest_v2",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": null,
"pre_install": null,
"python": "3.12",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest -v --doctest-modules -s --ignore=docs"
} | antlr4-python3-runtime==4.9.3
attrs==25.3.0
cfgv==3.4.0
coverage==7.8.0
distlib==0.3.9
filelock==3.18.0
hydra-core==1.3.2
identify==2.6.9
iniconfig==2.1.0
jsonschema==4.23.0
jsonschema-specifications==2024.10.1
loguru==0.7.3
meds==0.3.0
-e git+https://github.com/mmcdermott/MEDS_transforms.git@3cab51260c104867fc026535ceb66c6ac3959454#egg=MEDS_transforms
nested_ragged_tensors==0.1
nodeenv==1.9.1
numpy==2.2.4
omegaconf==2.3.0
packaging==24.2
platformdirs==4.3.7
pluggy==1.5.0
polars==1.1.0
pre_commit==4.2.0
pyarrow==19.0.1
pytest==8.3.5
pytest-cov==6.0.0
python-dotenv==1.1.0
PyYAML==6.0.2
referencing==0.36.2
rootutils==1.0.7
rpds-py==0.24.0
safetensors==0.5.3
setuptools==75.8.0
typing_extensions==4.13.0
virtualenv==20.29.3
wheel==0.45.1
| name: MEDS_transforms
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- bzip2=1.0.8=h5eee18b_6
- ca-certificates=2025.2.25=h06a4308_0
- expat=2.6.4=h6a678d5_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- libuuid=1.41.5=h5eee18b_0
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py312h06a4308_0
- python=3.12.9=h5148396_0
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py312h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py312h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- antlr4-python3-runtime==4.9.3
- attrs==25.3.0
- cfgv==3.4.0
- coverage==7.8.0
- distlib==0.3.9
- filelock==3.18.0
- hydra-core==1.3.2
- identify==2.6.9
- iniconfig==2.1.0
- jsonschema==4.23.0
- jsonschema-specifications==2024.10.1
- loguru==0.7.3
- meds==0.3.0
- meds-transforms==0.0.6.dev8+g3cab512
- nested-ragged-tensors==0.1
- nodeenv==1.9.1
- numpy==2.2.4
- omegaconf==2.3.0
- packaging==24.2
- platformdirs==4.3.7
- pluggy==1.5.0
- polars==1.1.0
- pre-commit==4.2.0
- pyarrow==19.0.1
- pytest==8.3.5
- pytest-cov==6.0.0
- python-dotenv==1.1.0
- pyyaml==6.0.2
- referencing==0.36.2
- rootutils==1.0.7
- rpds-py==0.24.0
- safetensors==0.5.3
- typing-extensions==4.13.0
- virtualenv==20.29.3
prefix: /opt/conda/envs/MEDS_transforms
| [
"tests/test_aggregate_code_metadata.py::test_aggregate_code_metadata"
] | [] | [] | [] | MIT License | 19,297 | 1,007 | [
"src/MEDS_transforms/aggregate_code_metadata.py"
] |
|
Chilipp__autodocsumm-101 | 354e67b443fe212401b1fc0f9e2f0d669c5852c4 | 2024-08-14 22:05:27 | 811352b20750366151bd705ce8d2081e3adda07e | diff --git a/autodocsumm/__init__.py b/autodocsumm/__init__.py
index 5808308..9727ec1 100755
--- a/autodocsumm/__init__.py
+++ b/autodocsumm/__init__.py
@@ -414,7 +414,11 @@ class AutoSummClassDocumenter(ClassDocumenter, AutosummaryDocumenter):
def add_content(self, *args, **kwargs):
super().add_content(*args, **kwargs)
- self.add_autosummary(relative_ref_paths=True)
+ # If the class is already documented under another name, Sphinx
+ # documents it as data/attribute. In this case, we do not want to
+ # generate an autosummary of the class for the attribute (see #69).
+ if not self.doc_as_attr:
+ self.add_autosummary(relative_ref_paths=True)
class CallableDataDocumenter(DataDocumenter):
| Alias Expands Nested Class Documentation
Assigning a type to a class attribute is documented as an alias, but its nested class members are being expanded.
```py
class NestedClass:
"""Parent class"""
class Foo:
"""Nested class"""
def foo(self):
"""Nested method"""
pass
def bar(self):
"""Nested method"""
pass
class Attribute:
"""Attribute"""
#: Alias
foo = NestedClass.Foo
#: Attribute
bar = 'bar'
```
<img width="721" alt="Screen Shot 2022-02-25 at 6 30 14 PM" src="https://user-images.githubusercontent.com/3108007/155820699-26a82d09-3591-4b47-9a8c-0047f1c1d11d.png">
Without autosummary on auto doc does not recurse on aliases:
<img width="326" alt="Screen Shot 2022-02-25 at 6 28 57 PM" src="https://user-images.githubusercontent.com/3108007/155820704-ea14cb71-ecc7-4b6a-95aa-c3f1415fdeda.png">
| Chilipp/autodocsumm | diff --git a/tests/test-root/dummy.py b/tests/test-root/dummy.py
index 36be801..f980498 100644
--- a/tests/test-root/dummy.py
+++ b/tests/test-root/dummy.py
@@ -84,6 +84,14 @@ class TestClassWithInlineAutoClassSumm:
pass
+class TestClassWithRefToOtherClass:
+ """Class test for the autodocsummary when a class attribute is a reference
+ to another class. No autosummary of the class should be generated for
+ the attribute. See also issue #69"""
+
+ foo = TestClass
+
+
#: data to be skipped
large_data = 'Should also be skipped'
diff --git a/tests/test-root/test_class_with_ref_to_other_class.rst b/tests/test-root/test_class_with_ref_to_other_class.rst
new file mode 100644
index 0000000..4cbb5cf
--- /dev/null
+++ b/tests/test-root/test_class_with_ref_to_other_class.rst
@@ -0,0 +1,6 @@
+Autoclasssumm of Dummy Class
+============================
+
+.. autoclass:: dummy.TestClassWithRefToOtherClass
+ :members:
+ :autosummary:
diff --git a/tests/test_autodocsumm.py b/tests/test_autodocsumm.py
index 1fdcb59..fd16399 100644
--- a/tests/test_autodocsumm.py
+++ b/tests/test_autodocsumm.py
@@ -322,6 +322,22 @@ class TestAutosummaryDocumenter:
assert '()' not in html
+ def test_class_no_summary_for_reference_to_class(self, app):
+ # see also: issue #69
+ app.build()
+
+ html = get_html(app, 'test_class_with_ref_to_other_class.html')
+
+ # assert that the class itself has an autosummary that contains its
+ # attributes
+ assert in_autosummary("foo", html)
+
+ # Assert that there is no autosummary of the attribute that is an alias
+ # of another class. This autosummary would contain attrs/methods/...
+ # of the referenced class.
+ assert not in_autosummary("test_method", html)
+ assert not in_autosummary("test_attr", html)
+
def test_inherited(self, app):
app.build()
html = get_html(app, 'test_inherited.html')
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_media"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 1
} | 0.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"codecov",
"beautifulsoup4"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"docs/requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.16
-e git+https://github.com/Chilipp/autodocsumm.git@354e67b443fe212401b1fc0f9e2f0d669c5852c4#egg=autodocsumm
babel==2.17.0
beautifulsoup4==4.13.3
certifi==2025.1.31
charset-normalizer==3.4.1
codecov==2.1.13
coverage==7.8.0
docutils==0.21.2
exceptiongroup==1.2.2
idna==3.10
imagesize==1.4.1
importlib_metadata==8.6.1
iniconfig==2.1.0
Jinja2==3.1.6
MarkupSafe==3.0.2
packaging==24.2
pluggy==1.5.0
Pygments==2.19.1
pytest==8.3.5
pytest-cov==6.0.0
requests==2.32.3
snowballstemmer==2.2.0
soupsieve==2.6
Sphinx==7.4.7
sphinx-rtd-theme==3.0.2
sphinxcontrib-applehelp==2.0.0
sphinxcontrib-devhelp==2.0.0
sphinxcontrib-htmlhelp==2.1.0
sphinxcontrib-jquery==4.1
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==2.0.0
sphinxcontrib-serializinghtml==2.0.0
tomli==2.2.1
typing_extensions==4.13.0
urllib3==2.3.0
zipp==3.21.0
| name: autodocsumm
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.16
- autodocsumm==0.2.13
- babel==2.17.0
- beautifulsoup4==4.13.3
- certifi==2025.1.31
- charset-normalizer==3.4.1
- codecov==2.1.13
- coverage==7.8.0
- docutils==0.21.2
- exceptiongroup==1.2.2
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- jinja2==3.1.6
- markupsafe==3.0.2
- packaging==24.2
- pluggy==1.5.0
- pygments==2.19.1
- pytest==8.3.5
- pytest-cov==6.0.0
- requests==2.32.3
- snowballstemmer==2.2.0
- soupsieve==2.6
- sphinx==7.4.7
- sphinx-rtd-theme==3.0.2
- sphinxcontrib-applehelp==2.0.0
- sphinxcontrib-devhelp==2.0.0
- sphinxcontrib-htmlhelp==2.1.0
- sphinxcontrib-jquery==4.1
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==2.0.0
- sphinxcontrib-serializinghtml==2.0.0
- tomli==2.2.1
- typing-extensions==4.13.0
- urllib3==2.3.0
- zipp==3.21.0
prefix: /opt/conda/envs/autodocsumm
| [
"tests/test_autodocsumm.py::TestAutosummaryDocumenter::test_class_no_summary_for_reference_to_class"
] | [] | [
"tests/test-root/dummy.py::test_func",
"tests/test-root/dummy.py::TestClassWithInlineAutoClassSumm::test_method_of_inline_test",
"tests/test_autodocsumm.py::TestAutosummaryDocumenter::test_module",
"tests/test_autodocsumm.py::TestAutosummaryDocumenter::test_module_no_nesting",
"tests/test_autodocsumm.py::TestAutosummaryDocumenter::test_module_summary_only",
"tests/test_autodocsumm.py::TestAutosummaryDocumenter::test_module_with_title",
"tests/test_autodocsumm.py::TestAutosummaryDocumenter::test_module_nosignatures",
"tests/test_autodocsumm.py::TestAutosummaryDocumenter::test_class",
"tests/test_autodocsumm.py::TestAutosummaryDocumenter::test_class_order",
"tests/test_autodocsumm.py::TestAutosummaryDocumenter::test_class_summary_only",
"tests/test_autodocsumm.py::TestAutosummaryDocumenter::test_class_nosignatures",
"tests/test_autodocsumm.py::TestAutosummaryDocumenter::test_inherited",
"tests/test_autodocsumm.py::TestAutosummaryDocumenter::test_autoclasssumm_inline",
"tests/test_autodocsumm.py::TestAutosummaryDocumenter::test_class_submodule",
"tests/test_autodocsumm.py::TestAutosummaryDocumenter::test_module_submodule",
"tests/test_autodocsumm.py::TestAutosummaryDocumenter::test_sorted_sections",
"tests/test_autodocsumm.py::TestAutoDocSummDirective::test_autoclasssumm",
"tests/test_autodocsumm.py::TestAutoDocSummDirective::test_autoclasssumm_no_titles",
"tests/test_autodocsumm.py::TestAutoDocSummDirective::test_autoclasssumm_some_sections",
"tests/test_autodocsumm.py::TestAutoDocSummDirective::test_autoclasssumm_nosignatures",
"tests/test_autodocsumm.py::TestAutoDocSummDirective::test_automodulesumm",
"tests/test_autodocsumm.py::TestAutoDocSummDirective::test_automodulesumm_some_sections",
"tests/test_autodocsumm.py::TestAutoDocSummDirective::test_automodulesumm_nosignatures",
"tests/test_autodocsumm.py::TestAutoDocSummDirective::test_empty"
] | [] | Apache License 2.0 | 19,306 | 228 | [
"autodocsumm/__init__.py"
] |
|
karllark__dust_extinction-229 | d0d94cffef380516afe536e3301bd5906f134148 | 2024-08-15 20:44:37 | 6601d6a8c45aa4c497fc6da1476866aae1169b3a | diff --git a/dust_extinction/shapes.py b/dust_extinction/shapes.py
index 3ae5b8e..33f3cee 100644
--- a/dust_extinction/shapes.py
+++ b/dust_extinction/shapes.py
@@ -233,12 +233,13 @@ class FM90(Fittable1DModel):
n_inputs = 1
n_outputs = 1
- C1 = Parameter(description="linear term: y-intercept", default=0.10)
- C2 = Parameter(description="linear term: slope", default=0.70)
- C3 = Parameter(description="bump: amplitude", default=3.23)
- C4 = Parameter(description="FUV rise: amplitude", default=0.41)
- xo = Parameter(description="bump: centroid", default=4.60, min=0.0)
- gamma = Parameter(description="bump: width", default=0.99, min=0.0)
+ # bounds based on Gordon et al. (2024) results
+ C1 = Parameter(description="linear term: y-intercept", default=0.10, bounds=(-10.0, 5.0))
+ C2 = Parameter(description="linear term: slope", default=0.70, bounds=(-0.1, 5.0))
+ C3 = Parameter(description="bump: amplitude", default=3.23, bounds=(-1.0, 6.0))
+ C4 = Parameter(description="FUV rise: amplitude", default=0.41, bounds=(-0.5, 1.5))
+ xo = Parameter(description="bump: centroid", default=4.59, bounds=(4.5, 4.9))
+ gamma = Parameter(description="bump: width", default=0.95, bounds=(0.6, 1.7))
x_range = x_range_FM90
@@ -400,12 +401,13 @@ class FM90_B3(Fittable1DModel):
n_inputs = 1
n_outputs = 1
- C1 = Parameter(description="linear term: y-intercept", default=0.10)
- C2 = Parameter(description="linear term: slope", default=0.70)
- B3 = Parameter(description="bump: amplitude", default=3.23)
- C4 = Parameter(description="FUV rise: amplitude", default=0.41)
- xo = Parameter(description="bump: centroid", default=4.60, min=0.0)
- gamma = Parameter(description="bump: width", default=0.99, min=0.0)
+ # bounds based on Gordon et al. (2024) results
+ C1 = Parameter(description="linear term: y-intercept", default=0.10, bounds=(-10.0, 5.0))
+ C2 = Parameter(description="linear term: slope", default=0.70, bounds=(-0.1, 5.0))
+ B3 = Parameter(description="bump: amplitude", default=3.23, bounds=(-1.0, 6.0))
+ C4 = Parameter(description="FUV rise: amplitude", default=0.41, bounds=(-0.5, 1.5))
+ xo = Parameter(description="bump: centroid", default=4.59, bounds=(4.5, 4.9))
+ gamma = Parameter(description="bump: width", default=0.95, bounds=(0.6, 1.7))
x_range = x_range_FM90
| Add reasonable bounds for FM90
Work on MW and SMC has resulted in reasonable default bounds for the FM90 shape model. Should be added to the definition of this model. Can always be overridden.
Code snippet with values:
```
fm90_init.C1.bounds = (-2.0, 3.0)
fm90_init.C2.bounds = (-0.1, 1.0)
fm90_init.C3.bounds = (0.0, 2.5)
fm90_init.C4.bounds = (0.0, 1.0)
fm90_init.xo.bounds = (4.5, 4.9)
fm90_init.gamma.bounds = (0.6, 1.5)
``` | karllark/dust_extinction | diff --git a/dust_extinction/tests/test_fm90.py b/dust_extinction/tests/test_fm90.py
index a7c04b2..30dd842 100644
--- a/dust_extinction/tests/test_fm90.py
+++ b/dust_extinction/tests/test_fm90.py
@@ -16,18 +16,10 @@ def get_elvebv_cor_vals():
x = x / u.micron
# correct values
- cor_vals = np.array(
- [
- 2.9829317,
- 4.1215415,
- 6.4135842,
- 5.6574243,
- 4.7573250,
- 5.4905843,
- 9.2853567,
- 12.462238,
- ]
- )
+ # fmt: off
+ cor_vals = np.array([2.995507, 4.187955, 6.70251, 5.723752, 4.757428,
+ 5.490276, 9.285265, 12.462183])
+ # fmt: on
return (x, cor_vals)
@@ -93,15 +85,9 @@ def test_FM90_fitting():
g03_fit.gamma.value,
]
- good_vals = np.array(
- [
- -0.958016797002,
- 1.0109751831,
- 2.96430606652,
- 0.313137860902,
- 4.59996300532,
- 0.99000982258,
- ]
- )
+ # fmt: off
+ good_vals = np.array([-0.941674, 1.013711, 2.725373, 0.301217,
+ 4.589078, 0.948576])
+ # fmt: on
- np.testing.assert_allclose(good_vals, fit_vals)
+ np.testing.assert_allclose(good_vals, fit_vals, rtol=1e-5)
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 1
},
"num_modified_files": 1
} | 1.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[test]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest-astropy",
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | astropy==6.0.1
astropy-iers-data==0.2025.3.31.0.36.18
attrs==25.3.0
coverage==7.8.0
-e git+https://github.com/karllark/dust_extinction.git@d0d94cffef380516afe536e3301bd5906f134148#egg=dust_extinction
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
hypothesis==6.130.5
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
numpy==1.26.4
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
pyerfa==2.0.1.5
pytest @ file:///croot/pytest_1738938843180/work
pytest-arraydiff==0.6.1
pytest-astropy==0.11.0
pytest-astropy-header==0.2.2
pytest-cov==6.0.0
pytest-doctestplus==1.4.0
pytest-filter-subpackage==0.2.0
pytest-mock==3.14.0
pytest-remotedata==0.4.1
PyYAML==6.0.2
scipy==1.13.1
sortedcontainers==2.4.0
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
| name: dust_extinction
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- astropy==6.0.1
- astropy-iers-data==0.2025.3.31.0.36.18
- attrs==25.3.0
- coverage==7.8.0
- dust-extinction==1.4.2.dev58+gd0d94cf
- hypothesis==6.130.5
- numpy==1.26.4
- pyerfa==2.0.1.5
- pytest-arraydiff==0.6.1
- pytest-astropy==0.11.0
- pytest-astropy-header==0.2.2
- pytest-cov==6.0.0
- pytest-doctestplus==1.4.0
- pytest-filter-subpackage==0.2.0
- pytest-mock==3.14.0
- pytest-remotedata==0.4.1
- pyyaml==6.0.2
- scipy==1.13.1
- sortedcontainers==2.4.0
prefix: /opt/conda/envs/dust_extinction
| [
"dust_extinction/tests/test_fm90.py::test_extinction_FM90_values",
"dust_extinction/tests/test_fm90.py::test_extinction_FM90_single_values[xtest_vals0]",
"dust_extinction/tests/test_fm90.py::test_extinction_FM90_single_values[xtest_vals1]",
"dust_extinction/tests/test_fm90.py::test_extinction_FM90_single_values[xtest_vals2]",
"dust_extinction/tests/test_fm90.py::test_extinction_FM90_single_values[xtest_vals3]",
"dust_extinction/tests/test_fm90.py::test_extinction_FM90_single_values[xtest_vals4]",
"dust_extinction/tests/test_fm90.py::test_extinction_FM90_single_values[xtest_vals5]",
"dust_extinction/tests/test_fm90.py::test_extinction_FM90_single_values[xtest_vals6]",
"dust_extinction/tests/test_fm90.py::test_extinction_FM90_single_values[xtest_vals7]",
"dust_extinction/tests/test_fm90.py::test_FM90_fitting"
] | [] | [] | [] | BSD 3-Clause "New" or "Revised" License | 19,319 | 846 | [
"dust_extinction/shapes.py"
] |
|
canonical__operator-1317 | 449793f9867eba11dc2e7e1c28ea3dca1e1da231 | 2024-08-15 23:13:08 | 3677507a2fd94e45eeb2e88e79e217490ae2ed51 | diff --git a/ops/model.py b/ops/model.py
index 3a9a1cf..f801a5b 100644
--- a/ops/model.py
+++ b/ops/model.py
@@ -1182,7 +1182,7 @@ class SecretInfo:
@classmethod
def from_dict(cls, id: str, d: Dict[str, Any]) -> 'SecretInfo':
"""Create new SecretInfo object from ID and dict parsed from JSON."""
- expires = typing.cast(Optional[str], d.get('expires'))
+ expires = typing.cast(Optional[str], d.get('expiry'))
try:
rotation = SecretRotate(typing.cast(Optional[str], d.get('rotation')))
except ValueError:
| Juju secret expiry is null
## Description
We have a charm that creates a secret with an expiry date on a given event and reads that secret on another event. During that second event, the expiry shows up as None when we would have expected it to be the value we initially set it to.
### Example charm
In this example charm we would expect the charm status to be `Secret expires on: ...`. Instead we see that the expiry date is not set. The content is set correctly however.
```python
#!/usr/bin/env python3
# Copyright 2024 Canonical Ltd.
# See LICENSE file for licensing details.
"""My charm."""
import datetime
import logging
from typing import Optional
from ops.charm import CharmBase, CollectStatusEvent
from ops.framework import EventBase
from ops.main import main
from ops.model import ActiveStatus, BlockedStatus, SecretNotFoundError, WaitingStatus
logger = logging.getLogger(__name__)
SECRET_LABEL = "food"
class MyCharm(CharmBase):
"""Main class to handle Juju events."""
def __init__(self, *args):
super().__init__(*args)
self.framework.observe(self.on.collect_unit_status, self._on_collect_unit_status)
self.framework.observe(self.on.update_status, self._configure)
self.framework.observe(self.on.config_changed, self._configure)
def _on_collect_unit_status(self, event: CollectStatusEvent):
"""Centralized status management for the charm."""
if not self.unit.is_leader():
event.add_status(BlockedStatus("Scaling is not implemented for this charm"))
return
if not self._secret_is_created():
event.add_status(WaitingStatus("Waiting for the secret to be created"))
return
secret_expiry = self._get_secret_expiry()
if not secret_expiry:
event.add_status(ActiveStatus("Error: Secret expiry not found"))
return
event.add_status(ActiveStatus("Secret expires on: {}".format(secret_expiry)))
def _configure(self, event: EventBase) -> None:
if not self._secret_is_created():
self._create_secret()
def _create_secret(self):
"""Create a secret."""
secret_content = {
"food": "apple",
}
self.app.add_secret(
content=secret_content,
label=SECRET_LABEL,
expire=datetime.timedelta(days=20),
)
def _secret_is_created(self) -> bool:
"""Check if the secret is created."""
try:
self.model.get_secret(label=SECRET_LABEL)
return True
except SecretNotFoundError:
return False
def _get_secret_expiry(self) -> Optional[datetime.datetime]:
"""Get the expiry date of the secret."""
secret = self.model.get_secret(label=SECRET_LABEL)
secret_info = secret.get_info()
secret_content = secret.get_content()
logger.info("Secret info: %s", secret_info)
logger.info("Secret content: %s", secret_content)
return secret_info.expires
if __name__ == "__main__":
main(MyCharm)
```
### Logs
```
unit-self-signed-certificates-0: 16:42:26 INFO unit.self-signed-certificates/0.juju-log Secret info: SecretInfo(id='secret:cqv6gcfmp25c77ug4n5g', label='food', revision=1, expires=None, rotation=SecretRotate.NEVER, rotates=None)
unit-self-signed-certificates-0: 16:42:26 INFO unit.self-signed-certificates/0.juju-log Secret content: {'food': 'apple'}
```
```logs
guillaume@thinkpad:~/code/self-signed-certificates-operator$ juju status
Model Controller Cloud/Region Version SLA Timestamp
ddd microk8s-localhost microk8s/localhost 3.5.1 unsupported 16:36:48-04:00
App Version Status Scale Charm Channel Rev Address Exposed Message
self-signed-certificates active 1 self-signed-certificates 0 10.152.183.137 no Error: Secret expiry not found
Unit Workload Agent Address Ports Message
self-signed-certificates/0* active idle 10.1.19.182 Error: Secret expiry not found
```
## Is the issue in Juju?
At this point I don't know whether the issue is with `ops`, with Juju, or with me.
## Environment
- Juju: `3.5.1`
- Ops: `2.15.0`
- MicroK8s: `1.29-strict/stable` | canonical/operator | diff --git a/test/test_model.py b/test/test_model.py
index 0100570..e41ff03 100644
--- a/test/test_model.py
+++ b/test/test_model.py
@@ -3568,7 +3568,7 @@ class TestSecretInfo:
{
'label': 'fromdict',
'revision': 8,
- 'expires': '2022-12-09T14:10:00Z',
+ 'expiry': '2022-12-09T14:10:00Z',
'rotation': 'yearly',
'rotates': '2023-01-09T14:10:00Z',
},
| {
"commit_name": "merge_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 3
},
"num_modified_files": 1
} | 2.15 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest pytest-cov pytest-xdist pytest-mock pytest-asyncio"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc libyaml-dev"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | coverage==7.8.0
exceptiongroup==1.2.2
execnet==2.1.1
iniconfig==2.1.0
-e git+https://github.com/canonical/operator.git@449793f9867eba11dc2e7e1c28ea3dca1e1da231#egg=ops
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
pytest-asyncio==0.26.0
pytest-cov==6.0.0
pytest-mock==3.14.0
pytest-xdist==3.6.1
PyYAML==6.0.2
tomli==2.2.1
typing_extensions==4.13.0
websocket-client==1.8.0
| name: operator
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==7.8.0
- exceptiongroup==1.2.2
- execnet==2.1.1
- iniconfig==2.1.0
- ops==2.16.0.dev0
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- pytest-asyncio==0.26.0
- pytest-cov==6.0.0
- pytest-mock==3.14.0
- pytest-xdist==3.6.1
- pyyaml==6.0.2
- tomli==2.2.1
- typing-extensions==4.13.0
- websocket-client==1.8.0
prefix: /opt/conda/envs/operator
| [
"test/test_model.py::TestSecretInfo::test_from_dict"
] | [
"test/test_model.py::TestContainerPebble::test_exec",
"test/test_model.py::TestContainerPebble::test_exec_service_context_not_supported"
] | [
"test/test_model.py::TestModel::test_model_attributes",
"test/test_model.py::TestModel::test_unit_immutable",
"test/test_model.py::TestModel::test_app_immutable",
"test/test_model.py::TestModel::test_model_name_from_backend",
"test/test_model.py::TestModel::test_relations_keys",
"test/test_model.py::TestModel::test_relations_immutable",
"test/test_model.py::TestModel::test_get_relation",
"test/test_model.py::TestModel::test_peer_relation_app",
"test/test_model.py::TestModel::test_remote_units_is_our",
"test/test_model.py::TestModel::test_our_unit_is_our",
"test/test_model.py::TestModel::test_invalid_type_relation_data",
"test/test_model.py::TestModel::test_get_app_relation_data",
"test/test_model.py::TestModel::test_update_app_relation_data[args0-kwargs0]",
"test/test_model.py::TestModel::test_update_app_relation_data[args1-kwargs1]",
"test/test_model.py::TestModel::test_update_app_relation_data[args2-kwargs2]",
"test/test_model.py::TestModel::test_unit_relation_data",
"test/test_model.py::TestModel::test_remote_app_relation_data",
"test/test_model.py::TestModel::test_relation_data_modify_remote",
"test/test_model.py::TestModel::test_relation_data_modify_our",
"test/test_model.py::TestModel::test_app_relation_data_modify_local_as_leader",
"test/test_model.py::TestModel::test_app_relation_data_modify_local_as_minion",
"test/test_model.py::TestModel::test_relation_data_access_peer_leader",
"test/test_model.py::TestModel::test_relation_data_access_peer_minion",
"test/test_model.py::TestModel::test_relation_data_del_key",
"test/test_model.py::TestModel::test_relation_data_del_missing_key",
"test/test_model.py::TestModel::test_relation_set_fail",
"test/test_model.py::TestModel::test_relation_data_type_check",
"test/test_model.py::TestModel::test_relation_local_app_data_readability_leader",
"test/test_model.py::TestModel::test_relation_local_app_data_readability_follower",
"test/test_model.py::TestModel::test_relation_no_units",
"test/test_model.py::TestModel::test_config",
"test/test_model.py::TestModel::test_config_immutable",
"test/test_model.py::TestModel::test_is_leader",
"test/test_model.py::TestModel::test_workload_version",
"test/test_model.py::TestModel::test_workload_version_invalid",
"test/test_model.py::TestModel::test_resources",
"test/test_model.py::TestModel::test_resources_immutable",
"test/test_model.py::TestModel::test_pod_spec",
"test/test_model.py::TestModel::test_pod_immutable",
"test/test_model.py::TestModel::test_base_status_instance_raises",
"test/test_model.py::TestModel::test_status_repr",
"test/test_model.py::TestModel::test_status_eq",
"test/test_model.py::TestModel::test_active_message_default",
"test/test_model.py::TestModel::test_local_set_valid_unit_status[target_status0-backend_call0]",
"test/test_model.py::TestModel::test_local_set_valid_unit_status[target_status1-backend_call1]",
"test/test_model.py::TestModel::test_local_set_valid_unit_status[target_status2-backend_call2]",
"test/test_model.py::TestModel::test_local_set_valid_unit_status[target_status3-backend_call3]",
"test/test_model.py::TestModel::test_local_set_valid_app_status[target_status0-backend_call0]",
"test/test_model.py::TestModel::test_local_set_valid_app_status[target_status1-backend_call1]",
"test/test_model.py::TestModel::test_local_set_valid_app_status[target_status2-backend_call2]",
"test/test_model.py::TestModel::test_local_set_valid_app_status[target_status3-backend_call3]",
"test/test_model.py::TestModel::test_set_app_status_non_leader_raises",
"test/test_model.py::TestModel::test_set_unit_status_invalid",
"test/test_model.py::TestModel::test_set_app_status_invalid",
"test/test_model.py::TestModel::test_remote_unit_status[target_status0]",
"test/test_model.py::TestModel::test_remote_unit_status[target_status1]",
"test/test_model.py::TestModel::test_remote_unit_status[target_status2]",
"test/test_model.py::TestModel::test_remote_unit_status[target_status3]",
"test/test_model.py::TestModel::test_remote_unit_status[target_status4]",
"test/test_model.py::TestModel::test_remote_app_status[target_status0]",
"test/test_model.py::TestModel::test_remote_app_status[target_status1]",
"test/test_model.py::TestModel::test_remote_app_status[target_status2]",
"test/test_model.py::TestModel::test_remote_app_status[target_status3]",
"test/test_model.py::TestModel::test_remote_app_status[target_status4]",
"test/test_model.py::TestModel::test_storage",
"test/test_model.py::TestModel::test_storages_immutable",
"test/test_model.py::TestModel::test_run_error",
"test/test_model.py::TestModel::test_push_path_unnamed",
"test/test_model.py::test_recursive_list[case0]",
"test/test_model.py::test_recursive_list[case1]",
"test/test_model.py::test_recursive_list[case2]",
"test/test_model.py::test_recursive_push_and_pull[case0]",
"test/test_model.py::test_recursive_push_and_pull[case1]",
"test/test_model.py::test_recursive_push_and_pull[case2]",
"test/test_model.py::test_recursive_push_and_pull[case3]",
"test/test_model.py::test_recursive_push_and_pull[case4]",
"test/test_model.py::test_recursive_push_and_pull[case5]",
"test/test_model.py::test_recursive_push_and_pull[case6]",
"test/test_model.py::test_recursive_push_and_pull[case7]",
"test/test_model.py::test_recursive_push_and_pull[case8]",
"test/test_model.py::test_recursive_push_and_pull[case9]",
"test/test_model.py::test_push_path_relative[case0]",
"test/test_model.py::test_push_path_relative[case1]",
"test/test_model.py::test_push_path_relative[case2]",
"test/test_model.py::TestApplication::test_mocked_get_services",
"test/test_model.py::TestApplication::test_planned_units",
"test/test_model.py::TestApplication::test_planned_units_user_set",
"test/test_model.py::TestApplication::test_planned_units_garbage_values",
"test/test_model.py::TestApplication::test_planned_units_override",
"test/test_model.py::TestContainers::test_unit_containers",
"test/test_model.py::TestContainers::test_unit_get_container",
"test/test_model.py::TestContainerPebble::test_socket_path",
"test/test_model.py::TestContainerPebble::test_autostart",
"test/test_model.py::TestContainerPebble::test_replan",
"test/test_model.py::TestContainerPebble::test_can_connect",
"test/test_model.py::TestContainerPebble::test_start",
"test/test_model.py::TestContainerPebble::test_start_no_arguments",
"test/test_model.py::TestContainerPebble::test_stop",
"test/test_model.py::TestContainerPebble::test_stop_no_arguments",
"test/test_model.py::TestContainerPebble::test_restart",
"test/test_model.py::TestContainerPebble::test_restart_fallback",
"test/test_model.py::TestContainerPebble::test_restart_fallback_non_400_error",
"test/test_model.py::TestContainerPebble::test_restart_no_arguments",
"test/test_model.py::TestContainerPebble::test_type_errors",
"test/test_model.py::TestContainerPebble::test_add_layer",
"test/test_model.py::TestContainerPebble::test_get_plan",
"test/test_model.py::TestContainerPebble::test_get_services",
"test/test_model.py::TestContainerPebble::test_get_service",
"test/test_model.py::TestContainerPebble::test_get_checks",
"test/test_model.py::TestContainerPebble::test_get_check",
"test/test_model.py::TestContainerPebble::test_pull",
"test/test_model.py::TestContainerPebble::test_push",
"test/test_model.py::TestContainerPebble::test_list_files",
"test/test_model.py::TestContainerPebble::test_make_dir",
"test/test_model.py::TestContainerPebble::test_remove_path",
"test/test_model.py::TestContainerPebble::test_can_connect_simple",
"test/test_model.py::TestContainerPebble::test_can_connect_connection_error",
"test/test_model.py::TestContainerPebble::test_can_connect_file_not_found_error",
"test/test_model.py::TestContainerPebble::test_can_connect_api_error",
"test/test_model.py::TestContainerPebble::test_send_signal",
"test/test_model.py::TestContainerPebble::test_get_notice",
"test/test_model.py::TestContainerPebble::test_get_notice_not_found",
"test/test_model.py::TestContainerPebble::test_get_notices",
"test/test_model.py::TestModelBindings::test_invalid_keys",
"test/test_model.py::TestModelBindings::test_dead_relations",
"test/test_model.py::TestModelBindings::test_broken_relations",
"test/test_model.py::TestModelBindings::test_binding_by_relation_name",
"test/test_model.py::TestModelBindings::test_binding_by_relation",
"test/test_model.py::TestModelBindings::test_binding_no_iface_name",
"test/test_model.py::TestModelBindings::test_missing_bind_addresses",
"test/test_model.py::TestModelBindings::test_empty_bind_addresses",
"test/test_model.py::TestModelBindings::test_no_bind_addresses",
"test/test_model.py::TestModelBindings::test_empty_interface_info",
"test/test_model.py::TestModelBindings::test_missing_ingress_addresses",
"test/test_model.py::TestModelBindings::test_missing_egress_subnets",
"test/test_model.py::TestModelBindings::test_unresolved_ingress_addresses",
"test/test_model.py::TestModelBackend::test_relation_get_set_is_app_arg",
"test/test_model.py::TestModelBackend::test_is_leader_refresh",
"test/test_model.py::TestModelBackend::test_relation_tool_errors",
"test/test_model.py::TestModelBackend::test_relation_get_juju_version_quirks[2.8.0]",
"test/test_model.py::TestModelBackend::test_relation_get_juju_version_quirks[2.7.0]",
"test/test_model.py::TestModelBackend::test_relation_set_juju_version_quirks[2.8.0]",
"test/test_model.py::TestModelBackend::test_relation_set_juju_version_quirks[2.7.0]",
"test/test_model.py::TestModelBackend::test_status_get",
"test/test_model.py::TestModelBackend::test_status_is_app_forced_kwargs",
"test/test_model.py::TestModelBackend::test_local_set_invalid_status",
"test/test_model.py::TestModelBackend::test_local_get_status[active]",
"test/test_model.py::TestModelBackend::test_local_get_status[waiting]",
"test/test_model.py::TestModelBackend::test_local_get_status[blocked]",
"test/test_model.py::TestModelBackend::test_local_get_status[maintenance]",
"test/test_model.py::TestModelBackend::test_local_get_status[error]",
"test/test_model.py::TestModelBackend::test_status_set_is_app_not_bool_raises",
"test/test_model.py::TestModelBackend::test_storage_tool_errors",
"test/test_model.py::TestModelBackend::test_network_get",
"test/test_model.py::TestModelBackend::test_network_get_errors",
"test/test_model.py::TestModelBackend::test_action_get_error",
"test/test_model.py::TestModelBackend::test_action_set_error",
"test/test_model.py::TestModelBackend::test_action_log_error",
"test/test_model.py::TestModelBackend::test_action_get",
"test/test_model.py::TestModelBackend::test_action_set",
"test/test_model.py::TestModelBackend::test_action_set_key_validation",
"test/test_model.py::TestModelBackend::test_action_set_nested",
"test/test_model.py::TestModelBackend::test_action_set_more_nested",
"test/test_model.py::TestModelBackend::test_action_set_dotted_dict",
"test/test_model.py::TestModelBackend::test_action_set_duplicated_keys",
"test/test_model.py::TestModelBackend::test_action_fail",
"test/test_model.py::TestModelBackend::test_action_log",
"test/test_model.py::TestModelBackend::test_application_version_set",
"test/test_model.py::TestModelBackend::test_application_version_set_invalid",
"test/test_model.py::TestModelBackend::test_juju_log",
"test/test_model.py::TestModelBackend::test_valid_metrics",
"test/test_model.py::TestModelBackend::test_invalid_metric_names",
"test/test_model.py::TestModelBackend::test_invalid_metric_values",
"test/test_model.py::TestModelBackend::test_invalid_metric_labels",
"test/test_model.py::TestModelBackend::test_invalid_metric_label_values",
"test/test_model.py::TestModelBackend::test_relation_remote_app_name_env",
"test/test_model.py::TestModelBackend::test_relation_remote_app_name_script_success",
"test/test_model.py::TestModelBackend::test_relation_remote_app_name_script_errors",
"test/test_model.py::TestModelBackend::test_planned_units",
"test/test_model.py::TestLazyMapping::test_invalidate",
"test/test_model.py::TestSecrets::test_app_add_secret_simple",
"test/test_model.py::TestSecrets::test_app_add_secret_args",
"test/test_model.py::TestSecrets::test_unit_add_secret_simple",
"test/test_model.py::TestSecrets::test_unit_add_secret_args",
"test/test_model.py::TestSecrets::test_unit_add_secret_errors",
"test/test_model.py::TestSecrets::test_add_secret_errors",
"test/test_model.py::TestSecrets::test_get_secret_id",
"test/test_model.py::TestSecrets::test_get_secret_label",
"test/test_model.py::TestSecrets::test_get_secret_id_and_label",
"test/test_model.py::TestSecrets::test_get_secret_no_args",
"test/test_model.py::TestSecrets::test_get_secret_not_found",
"test/test_model.py::TestSecrets::test_get_secret_other_error",
"test/test_model.py::TestSecrets::test_secret_unique_identifier",
"test/test_model.py::TestSecretInfo::test_init",
"test/test_model.py::TestSecretClass::test_id_and_label",
"test/test_model.py::TestSecretClass::test_get_content_cached",
"test/test_model.py::TestSecretClass::test_get_content_refresh",
"test/test_model.py::TestSecretClass::test_get_content_uncached",
"test/test_model.py::TestSecretClass::test_get_content_copies_dict",
"test/test_model.py::TestSecretClass::test_peek_content",
"test/test_model.py::TestSecretClass::test_get_info",
"test/test_model.py::TestSecretClass::test_set_content",
"test/test_model.py::TestSecretClass::test_set_info",
"test/test_model.py::TestSecretClass::test_grant",
"test/test_model.py::TestSecretClass::test_revoke",
"test/test_model.py::TestSecretClass::test_remove_revision",
"test/test_model.py::TestSecretClass::test_remove_all_revisions",
"test/test_model.py::TestPorts::test_open_port",
"test/test_model.py::TestPorts::test_open_port_error",
"test/test_model.py::TestPorts::test_close_port",
"test/test_model.py::TestPorts::test_close_port_error",
"test/test_model.py::TestPorts::test_opened_ports",
"test/test_model.py::TestPorts::test_opened_ports_warnings",
"test/test_model.py::TestPorts::test_set_ports_all_open",
"test/test_model.py::TestPorts::test_set_ports_mixed",
"test/test_model.py::TestPorts::test_set_ports_replace",
"test/test_model.py::TestPorts::test_set_ports_close_all",
"test/test_model.py::TestPorts::test_set_ports_noop",
"test/test_model.py::TestUnit::test_reboot",
"test/test_model.py::TestLazyNotice::test_lazy_notice",
"test/test_model.py::TestLazyNotice::test_repr",
"test/test_model.py::TestCloudCredential::test_from_dict",
"test/test_model.py::TestCloudCredential::test_from_dict_full",
"test/test_model.py::TestCloudSpec::test_from_dict",
"test/test_model.py::TestCloudSpec::test_from_dict_full",
"test/test_model.py::TestCloudSpec::test_from_dict_no_credential",
"test/test_model.py::TestGetCloudSpec::test_success",
"test/test_model.py::TestGetCloudSpec::test_error"
] | [] | Apache License 2.0 | 19,322 | 164 | [
"ops/model.py"
] |
|
pyvista__pyvista-6528 | adc548ab5bd7e38447b1524302f7534d515b8456 | 2024-08-17 20:25:49 | 1402ec78e18e58c3f1f512739d5bce6a2ef58a09 | codecov[bot]: ## [Codecov](https://app.codecov.io/gh/pyvista/pyvista/pull/6528?dropdown=coverage&src=pr&el=h1&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=pyvista) Report
All modified and coverable lines are covered by tests :white_check_mark:
> Project coverage is 91.50%. Comparing base [(`99a71b5`)](https://app.codecov.io/gh/pyvista/pyvista/commit/99a71b5096d44299a78fba7ef428b0f9e14fa04b?dropdown=coverage&el=desc&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=pyvista) to head [(`8f69d82`)](https://app.codecov.io/gh/pyvista/pyvista/commit/8f69d82aa2c351a601d2fb71a80e0099fd688745?dropdown=coverage&el=desc&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=pyvista).
> Report is 1 commits behind head on main.
> :exclamation: There is a different number of reports uploaded between BASE (99a71b5) and HEAD (8f69d82). Click for more details.
>
> <details><summary>HEAD has 5 uploads less than BASE</summary>
>
>| Flag | BASE (99a71b5) | HEAD (8f69d82) |
>|------|------|------|
>||6|1|
></details>
<details><summary>Additional details and impacted files</summary>
```diff
@@ Coverage Diff @@
## main #6528 +/- ##
==========================================
- Coverage 97.38% 91.50% -5.89%
==========================================
Files 143 143
Lines 27386 27388 +2
==========================================
- Hits 26671 25062 -1609
- Misses 715 2326 +1611
```
</details>
pyvista-bot: <!-- NETLIFY DEPLOY COMMENT GENERATED BY ACTIONS_NETLIFY - APP ID SHA256: d3574f413fe51079937885070831e027ca5b10f7facb9e609494e0a481a31f79 -->
🚀 Deployed on https://66c18db03f837a5464c50e35--pyvista-dev.netlify.app | diff --git a/pyvista/core/utilities/points.py b/pyvista/core/utilities/points.py
index 57e4428e..a07daf29 100644
--- a/pyvista/core/utilities/points.py
+++ b/pyvista/core/utilities/points.py
@@ -185,7 +185,7 @@ def lines_from_points(points, close=False):
return poly
-def fit_plane_to_points(points, return_meta=False):
+def fit_plane_to_points(points, return_meta=False, resolution=10):
"""Fit a plane to a set of points using the SVD algorithm.
The plane is automatically sized and oriented to fit the extents of
@@ -208,6 +208,13 @@ def fit_plane_to_points(points, return_meta=False):
If ``True``, also returns the center and normal of the
generated plane.
+ resolution : int, default: 10
+ Number of points on the plane mesh along its edges. Specify two numbers to
+ set the resolution along the plane's long and short edge (respectively) or
+ a single number to set both edges to have the same resolution.
+
+ .. versionadded:: 0.45.0
+
Returns
-------
pyvista.PolyData
@@ -225,22 +232,18 @@ def fit_plane_to_points(points, return_meta=False):
>>> import pyvista as pv
>>> import numpy as np
+ >>> from pyvista import examples
>>>
- >>> # Create point cloud
>>> rng = np.random.default_rng(seed=0)
>>> cloud = rng.random((10, 3))
>>> cloud[:, 2] *= 0.1
>>>
- >>> # Fit plane
- >>> plane, center, normal = pv.fit_plane_to_points(
- ... cloud, return_meta=True
- ... )
- >>>
- >>> # Plot the fitted plane
+ >>> plane = pv.fit_plane_to_points(cloud)
+
+ Plot the point cloud and fitted plane.
+
>>> pl = pv.Plotter()
- >>> _ = pl.add_mesh(
- ... plane, color='lightblue', style='wireframe', line_width=4
- ... )
+ >>> _ = pl.add_mesh(plane, style='wireframe', line_width=4)
>>> _ = pl.add_points(
... cloud,
... render_points_as_spheres=True,
@@ -249,22 +252,18 @@ def fit_plane_to_points(points, return_meta=False):
... )
>>> pl.show()
- Fit a plane to a mesh.
+ Fit a plane to a mesh and return its meta-data. Set the plane resolution to 1
+ so that the plane has no internal points or edges.
- >>> import pyvista as pv
- >>> from pyvista import examples
- >>>
- >>> # Create mesh
>>> mesh = examples.download_shark()
- >>>
- >>> # Fit plane
- >>> plane = pv.fit_plane_to_points(mesh.points)
- >>>
- >>> # Plot the fitted plane
- >>> pl = pv.Plotter()
- >>> _ = pl.add_mesh(
- ... plane, show_edges=True, color='lightblue', opacity=0.25
+ >>> plane, center, normal = pv.fit_plane_to_points(
+ ... mesh.points, return_meta=True, resolution=1
... )
+
+ Plot the mesh and fitted plane.
+
+ >>> pl = pv.Plotter()
+ >>> _ = pl.add_mesh(plane, show_edges=True, opacity=0.25)
>>> _ = pl.add_mesh(mesh, color='gray')
>>> pl.camera_position = [
... (-117, 76, 235),
@@ -273,7 +272,34 @@ def fit_plane_to_points(points, return_meta=False):
... ]
>>> pl.show()
+ Use the meta data with :meth:`pyvista.DataSetFilter.clip` to split the mesh into
+ two.
+
+ >>> first_half, second_half = mesh.clip(
+ ... origin=center, normal=normal, return_clipped=True
+ ... )
+
+ Plot the two halves of the clipped mesh.
+
+ >>> pl = pv.Plotter()
+ >>> _ = pl.add_mesh(first_half, color='red')
+ >>> _ = pl.add_mesh(second_half, color='blue')
+ >>> pl.camera_position = [
+ ... (-143, 43, 40),
+ ... (-8.7, -11, -14),
+ ... (0.25, 0.92, -0.29),
+ ... ]
+ >>> pl.show()
"""
+ valid_resolution = _validation.validate_array(
+ resolution,
+ must_have_shape=[(), (2,)],
+ must_be_integer=True,
+ broadcast_to=(2,),
+ dtype_out=int,
+ )
+ i_resolution, j_resolution = valid_resolution
+
# Apply SVD to get orthogonal basis vectors to define the plane
data = np.array(points)
data_center = data.mean(axis=0)
@@ -304,7 +330,14 @@ def fit_plane_to_points(points, return_meta=False):
center = rotate_transform_inv[:3, :3] @ projected.center + data_center
# Initialize plane then move to final position
- plane = pyvista.Plane(center=(0, 0, 0), direction=(0, 0, 1), i_size=i_size, j_size=j_size)
+ plane = pyvista.Plane(
+ center=(0, 0, 0),
+ direction=(0, 0, 1),
+ i_size=i_size,
+ j_size=j_size,
+ i_resolution=i_resolution,
+ j_resolution=j_resolution,
+ )
plane.transform(rotate_transform_inv)
plane.points += center
@@ -503,6 +536,9 @@ def principal_axes(points: MatrixLike[float], *, return_std: bool = False):
See Also
--------
+ fit_plane_to_points
+ Fit a plane to points using the first two principal axes.
+
pyvista.DataSetFilters.align_xyz
Filter which aligns principal axes to the x-y-z axes.
| Add `covariance` method to `fit_plane_to_points` and expose `Plane` arguments
### Describe the feature you would like to be added.
This is a follow-up to #4650.
### Links to VTK Documentation, Examples, or Class Definitions.
Currently, `fit_plane_to_points` uses SVD. However, calculating fit using covariance may be more "VTK-like" since:
[`vtkPlane`](https://vtk.org/doc/nightly/html/classvtkPlane.html#a7b88495d93a5aa5053b1044965df67bd) has a fitting method which uses covariance, and [`vtkOBBTree`](https://vtk.org/doc/nightly/html/classvtkOBBTree.html) also computes fit using covariance. I provided a graphical example of the different fit methods [here](https://github.com/pyvista/pyvista/issues/555#issuecomment-1646108382).
### Pseudocode or Screenshots
I propose adding `method='covariance'` as the default arg for `fit_plane_to_points` and having `method='svd'` as a non-default alternative.
In addition, sometimes it's useful to extract only the corners of the best-fit plane. In this case, we want to set `i_resolution=1` and `j_resolution=1` in the `Plane` constructor so that the plane mesh only has four points. However, we don't have direct access to these parameters in `fit_plane_to_points`. So I think we should expose these parameters directly.
This would look something like:
``` python
def fit_plane_to_points(
points,
method='covariance',
return_meta=False,
i_resolution=10,
j_resolution=10):
```
Any thoughts on this or suggestions for an alternative API (e.g. use a `plane_parameters` dict instead)? Should the default resolution be 1 instead of 10? | pyvista/pyvista | diff --git a/tests/core/test_utilities.py b/tests/core/test_utilities.py
index c2dbd292..6ce35dea 100644
--- a/tests/core/test_utilities.py
+++ b/tests/core/test_utilities.py
@@ -902,6 +902,16 @@ def test_has_module():
assert not has_module('not_a_module')
+def test_fit_plane_to_points_resolution(airplane):
+ DEFAULT_RESOLUTION = 10
+ plane = fit_plane_to_points(airplane.points)
+ assert plane.n_points == (DEFAULT_RESOLUTION + 1) ** 2
+
+ resolution = (1.0, 2.0) # Test with integer-valued floats
+ plane = fit_plane_to_points(airplane.points, resolution=resolution)
+ assert plane.n_points == (resolution[0] + 1) * (resolution[1] + 1)
+
+
def test_fit_plane_to_points():
points = ex.load_airplane().points
plane, center, normal = fit_plane_to_points(points, return_meta=True)
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_issue_reference",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 1
} | 0.44 | {
"env_vars": null,
"env_yml_path": [
"environment.yml"
],
"install": "pip install -e .[all]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "environment.yml",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-xdist",
"pytest-mock"
],
"pre_install": [
"apt-get update",
"apt-get install -y libgl1-mesa-glx xvfb"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | aiohappyeyeballs @ file:///home/conda/feedstock_root/build_artifacts/aiohappyeyeballs_1741775197943/work
aiohttp @ file:///home/conda/feedstock_root/build_artifacts/aiohttp_1742268596946/work
aiosignal @ file:///home/conda/feedstock_root/build_artifacts/aiosignal_1734342155601/work
anyio @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_anyio_1742243108/work
argon2-cffi @ file:///home/conda/feedstock_root/build_artifacts/argon2-cffi_1733311059102/work
argon2-cffi-bindings @ file:///home/conda/feedstock_root/build_artifacts/argon2-cffi-bindings_1725356560642/work
arrow @ file:///home/conda/feedstock_root/build_artifacts/arrow_1733584251875/work
asttokens @ file:///home/conda/feedstock_root/build_artifacts/asttokens_1733250440834/work
async-lru @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_async-lru_1742153708/work
async-timeout @ file:///home/conda/feedstock_root/build_artifacts/async-timeout_1733235340728/work
attrs @ file:///home/conda/feedstock_root/build_artifacts/attrs_1741918516150/work
babel @ file:///home/conda/feedstock_root/build_artifacts/babel_1738490167835/work
beautifulsoup4 @ file:///home/conda/feedstock_root/build_artifacts/beautifulsoup4_1738740337718/work
bleach @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_bleach_1737382993/work
Brotli @ file:///home/conda/feedstock_root/build_artifacts/brotli-split_1725267488082/work
cached-property @ file:///home/conda/feedstock_root/build_artifacts/cached_property_1615209429212/work
certifi @ file:///home/conda/feedstock_root/build_artifacts/certifi_1739515848642/work/certifi
cffi @ file:///home/conda/feedstock_root/build_artifacts/cffi_1725571112467/work
cftime @ file:///home/conda/feedstock_root/build_artifacts/cftime_1725400455427/work
charset-normalizer @ file:///home/conda/feedstock_root/build_artifacts/charset-normalizer_1735929714516/work
click @ file:///home/conda/feedstock_root/build_artifacts/click_1734858813237/work
cmocean @ file:///home/conda/feedstock_root/build_artifacts/cmocean_1734343940570/work
codecov @ file:///home/conda/feedstock_root/build_artifacts/codecov_1734975286850/work
colorama @ file:///home/conda/feedstock_root/build_artifacts/colorama_1733218098505/work
colorcet @ file:///home/conda/feedstock_root/build_artifacts/colorcet_1734007314889/work
colorspacious @ file:///home/conda/feedstock_root/build_artifacts/colorspacious_1734341944815/work
comm @ file:///home/conda/feedstock_root/build_artifacts/comm_1733502965406/work
contourpy @ file:///home/conda/feedstock_root/build_artifacts/contourpy_1727293517607/work
coverage @ file:///home/conda/feedstock_root/build_artifacts/coverage_1743381224823/work
cycler @ file:///home/conda/feedstock_root/build_artifacts/cycler_1733332471406/work
debugpy @ file:///home/conda/feedstock_root/build_artifacts/debugpy_1741148409996/work
decorator @ file:///home/conda/feedstock_root/build_artifacts/decorator_1740384970518/work
defusedxml @ file:///home/conda/feedstock_root/build_artifacts/defusedxml_1615232257335/work
exceptiongroup @ file:///home/conda/feedstock_root/build_artifacts/exceptiongroup_1733208806608/work
execnet @ file:///home/conda/feedstock_root/build_artifacts/execnet_1733230988954/work
executing @ file:///home/conda/feedstock_root/build_artifacts/executing_1733569351617/work
fastjsonschema @ file:///home/conda/feedstock_root/build_artifacts/python-fastjsonschema_1733235979760/work/dist
fonttools @ file:///home/conda/feedstock_root/build_artifacts/fonttools_1738940303262/work
fqdn @ file:///home/conda/feedstock_root/build_artifacts/fqdn_1733327382592/work/dist
frozenlist @ file:///home/conda/feedstock_root/build_artifacts/frozenlist_1737645236190/work
h11 @ file:///home/conda/feedstock_root/build_artifacts/h11_1733327467879/work
h2 @ file:///home/conda/feedstock_root/build_artifacts/h2_1738578511449/work
h5py @ file:///home/conda/feedstock_root/build_artifacts/h5py_1739952287730/work
hpack @ file:///home/conda/feedstock_root/build_artifacts/hpack_1737618293087/work
httpcore @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_httpcore_1731707562/work
httpx @ file:///home/conda/feedstock_root/build_artifacts/httpx_1733663348460/work
hyperframe @ file:///home/conda/feedstock_root/build_artifacts/hyperframe_1737618333194/work
hypothesis @ file:///home/conda/feedstock_root/build_artifacts/hypothesis_1742900877539/work
idna @ file:///home/conda/feedstock_root/build_artifacts/idna_1733211830134/work
imageio @ file:///home/conda/feedstock_root/build_artifacts/imageio_1738273805233/work
imageio-ffmpeg @ file:///home/conda/feedstock_root/build_artifacts/imageio-ffmpeg_1737102630951/work
importlib_metadata @ file:///home/conda/feedstock_root/build_artifacts/importlib-metadata_1737420181517/work
importlib_resources @ file:///home/conda/feedstock_root/build_artifacts/importlib_resources_1736252299705/work
iniconfig @ file:///home/conda/feedstock_root/build_artifacts/iniconfig_1733223141826/work
ipykernel @ file:///home/conda/feedstock_root/build_artifacts/ipykernel_1719845459717/work
ipython @ file:///home/conda/feedstock_root/build_artifacts/ipython_1701831663892/work
ipywidgets==8.1.5
isoduration @ file:///home/conda/feedstock_root/build_artifacts/isoduration_1733493628631/work/dist
jedi @ file:///home/conda/feedstock_root/build_artifacts/jedi_1733300866624/work
Jinja2 @ file:///home/conda/feedstock_root/build_artifacts/jinja2_1741263328855/work
json5 @ file:///home/conda/feedstock_root/build_artifacts/json5_1733272076743/work
jsonpointer @ file:///home/conda/feedstock_root/build_artifacts/jsonpointer_1725302957584/work
jsonschema @ file:///home/conda/feedstock_root/build_artifacts/jsonschema_1733472696581/work
jsonschema-specifications @ file:///tmp/tmpk0f344m9/src
jupyter-events @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_jupyter_events_1738765986/work
jupyter-lsp @ file:///home/conda/feedstock_root/build_artifacts/jupyter-lsp-meta_1733492907176/work/jupyter-lsp
jupyter_client @ file:///home/conda/feedstock_root/build_artifacts/jupyter_client_1733440914442/work
jupyter_core @ file:///home/conda/feedstock_root/build_artifacts/jupyter_core_1727163409502/work
jupyter_server @ file:///home/conda/feedstock_root/build_artifacts/jupyter_server_1734702637701/work
jupyter_server_proxy==4.4.0
jupyter_server_terminals @ file:///home/conda/feedstock_root/build_artifacts/jupyter_server_terminals_1733427956852/work
jupyterlab @ file:///home/conda/feedstock_root/build_artifacts/jupyterlab_1741964057182/work
jupyterlab_pygments @ file:///home/conda/feedstock_root/build_artifacts/jupyterlab_pygments_1733328101776/work
jupyterlab_server @ file:///home/conda/feedstock_root/build_artifacts/jupyterlab_server_1733599573484/work
jupyterlab_widgets==3.0.13
kiwisolver @ file:///home/conda/feedstock_root/build_artifacts/kiwisolver_1725459266648/work
loguru @ file:///home/conda/feedstock_root/build_artifacts/loguru_1725349754278/work
markdown-it-py @ file:///home/conda/feedstock_root/build_artifacts/markdown-it-py_1733250460757/work
MarkupSafe @ file:///home/conda/feedstock_root/build_artifacts/markupsafe_1733219680183/work
matplotlib==3.9.4
matplotlib-inline @ file:///home/conda/feedstock_root/build_artifacts/matplotlib-inline_1733416936468/work
mdurl @ file:///home/conda/feedstock_root/build_artifacts/mdurl_1733255585584/work
meshio @ file:///home/conda/feedstock_root/build_artifacts/meshio_1706720595231/work
mistune @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_mistune_1742402716/work
more-itertools @ file:///home/conda/feedstock_root/build_artifacts/more-itertools_1736883817510/work
msgpack @ file:///home/conda/feedstock_root/build_artifacts/msgpack-python_1725975012026/work
multidict @ file:///home/conda/feedstock_root/build_artifacts/multidict_1742308123521/work
munkres==1.1.4
nbclient @ file:///home/conda/feedstock_root/build_artifacts/nbclient_1734628800805/work
nbconvert @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_nbconvert-core_1738067871/work
nbformat @ file:///home/conda/feedstock_root/build_artifacts/nbformat_1733402752141/work
nest_asyncio @ file:///home/conda/feedstock_root/build_artifacts/nest-asyncio_1733325553580/work
netCDF4 @ file:///home/conda/feedstock_root/build_artifacts/netcdf4_1733253338730/work
notebook_shim @ file:///home/conda/feedstock_root/build_artifacts/notebook-shim_1733408315203/work
numpy @ file:///home/conda/feedstock_root/build_artifacts/numpy_1707225342954/work/dist/numpy-1.26.4-cp39-cp39-linux_x86_64.whl#sha256=c799942b5898f6e6c60264d1663a6469a475290e758c654aeeb78e2596463abd
overrides @ file:///home/conda/feedstock_root/build_artifacts/overrides_1734587627321/work
packaging @ file:///home/conda/feedstock_root/build_artifacts/packaging_1733203243479/work
pandocfilters @ file:///home/conda/feedstock_root/build_artifacts/pandocfilters_1631603243851/work
parso @ file:///home/conda/feedstock_root/build_artifacts/parso_1733271261340/work
pexpect @ file:///home/conda/feedstock_root/build_artifacts/pexpect_1733301927746/work
pickleshare @ file:///home/conda/feedstock_root/build_artifacts/pickleshare_1733327343728/work
pillow @ file:///home/conda/feedstock_root/build_artifacts/pillow_1735929703139/work
pkgutil_resolve_name @ file:///home/conda/feedstock_root/build_artifacts/pkgutil-resolve-name_1733344503739/work
platformdirs @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_platformdirs_1742485085/work
pluggy @ file:///home/conda/feedstock_root/build_artifacts/pluggy_1733222765875/work
pooch @ file:///home/conda/feedstock_root/build_artifacts/pooch_1733421311631/work
prometheus_client @ file:///home/conda/feedstock_root/build_artifacts/prometheus_client_1733327310477/work
prompt_toolkit @ file:///home/conda/feedstock_root/build_artifacts/prompt-toolkit_1737453357274/work
propcache @ file:///home/conda/feedstock_root/build_artifacts/propcache_1737635528546/work
psutil @ file:///home/conda/feedstock_root/build_artifacts/psutil_1740663125313/work
ptyprocess @ file:///home/conda/feedstock_root/build_artifacts/ptyprocess_1733302279685/work/dist/ptyprocess-0.7.0-py2.py3-none-any.whl#sha256=92c32ff62b5fd8cf325bec5ab90d7be3d2a8ca8c8a3813ff487a8d2002630d1f
pure_eval @ file:///home/conda/feedstock_root/build_artifacts/pure_eval_1733569405015/work
pycparser @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_pycparser_1733195786/work
pyembree @ file:///home/conda/feedstock_root/build_artifacts/pyembree_1718702851992/work
Pygments @ file:///home/conda/feedstock_root/build_artifacts/pygments_1736243443484/work
pyparsing @ file:///home/conda/feedstock_root/build_artifacts/pyparsing_1743089729650/work
PySide6==6.8.3
PySocks @ file:///home/conda/feedstock_root/build_artifacts/pysocks_1733217236728/work
pytest @ file:///home/conda/feedstock_root/build_artifacts/pytest_1740946542080/work
pytest-cov @ file:///home/conda/feedstock_root/build_artifacts/pytest-cov_1733223023082/work
pytest-memprof==0.2.0
pytest-mock==3.14.0
pytest-xdist @ file:///home/conda/feedstock_root/build_artifacts/pytest-xdist_1733240780199/work
python-dateutil @ file:///home/conda/feedstock_root/build_artifacts/python-dateutil_1733215673016/work
python-json-logger @ file:///home/conda/feedstock_root/build_artifacts/python-json-logger_1677079630776/work
pytz @ file:///home/conda/feedstock_root/build_artifacts/pytz_1742920838005/work
-e git+https://github.com/pyvista/pyvista.git@adc548ab5bd7e38447b1524302f7534d515b8456#egg=pyvista
PyYAML @ file:///home/conda/feedstock_root/build_artifacts/pyyaml_1737454647378/work
pyzmq @ file:///home/conda/feedstock_root/build_artifacts/pyzmq_1741805177758/work
referencing @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_referencing_1737836872/work
requests @ file:///home/conda/feedstock_root/build_artifacts/requests_1733217035951/work
rfc3339_validator @ file:///home/conda/feedstock_root/build_artifacts/rfc3339-validator_1733599910982/work
rfc3986-validator @ file:///home/conda/feedstock_root/build_artifacts/rfc3986-validator_1598024191506/work
rich @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_rich_1743371105/work/dist
rpds-py @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_rpds-py_1743037693/work
rtree @ file:///home/conda/feedstock_root/build_artifacts/rtree_1741378561624/work
scooby @ file:///home/conda/feedstock_root/build_artifacts/scooby_1734299019922/work
Send2Trash @ file:///home/conda/feedstock_root/build_artifacts/send2trash_1733322040660/work
shiboken6==6.8.3
simpervisor==1.0.0
six @ file:///home/conda/feedstock_root/build_artifacts/six_1733380938961/work
sniffio @ file:///home/conda/feedstock_root/build_artifacts/sniffio_1733244044561/work
sortedcontainers @ file:///home/conda/feedstock_root/build_artifacts/sortedcontainers_1738440353519/work
soupsieve @ file:///home/conda/feedstock_root/build_artifacts/soupsieve_1693929250441/work
stack_data @ file:///home/conda/feedstock_root/build_artifacts/stack_data_1733569443808/work
terminado @ file:///home/conda/feedstock_root/build_artifacts/terminado_1710262609923/work
tinycss2 @ file:///home/conda/feedstock_root/build_artifacts/tinycss2_1729802851396/work
toml @ file:///home/conda/feedstock_root/build_artifacts/toml_1734091811753/work
tomli @ file:///home/conda/feedstock_root/build_artifacts/tomli_1733256695513/work
tornado @ file:///home/conda/feedstock_root/build_artifacts/tornado_1732615921868/work
tqdm @ file:///home/conda/feedstock_root/build_artifacts/tqdm_1735661334605/work
traitlets @ file:///home/conda/feedstock_root/build_artifacts/traitlets_1733367359838/work
trame @ file:///home/conda/feedstock_root/build_artifacts/trame_1741413642518/work
trame-client @ file:///home/conda/feedstock_root/build_artifacts/trame-client_1742874331306/work
trame-server @ file:///home/conda/feedstock_root/build_artifacts/trame-server_1741638011360/work
trame-vtk @ file:///home/conda/feedstock_root/build_artifacts/trame-vtk_1739145199105/work
trame-vuetify @ file:///home/conda/feedstock_root/build_artifacts/trame-vuetify_1743263303319/work
trimesh @ file:///home/conda/feedstock_root/build_artifacts/trimesh_1743289679380/work
types-python-dateutil @ file:///home/conda/feedstock_root/build_artifacts/types-python-dateutil_1733612335562/work
typing_extensions @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_typing_extensions_1743201626/work
typing_utils @ file:///home/conda/feedstock_root/build_artifacts/typing_utils_1733331286120/work
unicodedata2 @ file:///home/conda/feedstock_root/build_artifacts/unicodedata2_1736692503055/work
uri-template @ file:///home/conda/feedstock_root/build_artifacts/uri-template_1733323593477/work/dist
urllib3 @ file:///home/conda/feedstock_root/build_artifacts/urllib3_1734859416348/work
vtk==9.3.1
wcwidth @ file:///home/conda/feedstock_root/build_artifacts/wcwidth_1733231326287/work
webcolors @ file:///home/conda/feedstock_root/build_artifacts/webcolors_1733359735138/work
webencodings @ file:///home/conda/feedstock_root/build_artifacts/webencodings_1733236011802/work
websocket-client @ file:///home/conda/feedstock_root/build_artifacts/websocket-client_1733157342724/work
widgetsnbextension==4.0.13
wslink @ file:///home/conda/feedstock_root/build_artifacts/wslink_1742768409749/work
yarl @ file:///home/conda/feedstock_root/build_artifacts/yarl_1737575777699/work
zipp @ file:///home/conda/feedstock_root/build_artifacts/zipp_1732827521216/work
zstandard==0.23.0
| name: pyvista
channels:
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=conda_forge
- _openmp_mutex=4.5=2_gnu
- aiohappyeyeballs=2.6.1=pyhd8ed1ab_0
- aiohttp=3.11.14=py39h9399b63_0
- aiosignal=1.3.2=pyhd8ed1ab_0
- alsa-lib=1.2.13=hb9d3cd8_0
- anyio=4.9.0=pyh29332c3_0
- aom=3.9.1=hac33072_0
- argon2-cffi=23.1.0=pyhd8ed1ab_1
- argon2-cffi-bindings=21.2.0=py39h8cd3c5a_5
- arrow=1.3.0=pyhd8ed1ab_1
- asttokens=3.0.0=pyhd8ed1ab_1
- async-lru=2.0.5=pyh29332c3_0
- async-timeout=5.0.1=pyhd8ed1ab_1
- attr=2.5.1=h166bdaf_1
- attrs=25.3.0=pyh71513ae_0
- babel=2.17.0=pyhd8ed1ab_0
- beautifulsoup4=4.13.3=pyha770c72_0
- bleach=6.2.0=pyh29332c3_4
- bleach-with-css=6.2.0=h82add2a_4
- blosc=1.21.6=he440d0b_1
- brotli=1.1.0=hb9d3cd8_2
- brotli-bin=1.1.0=hb9d3cd8_2
- brotli-python=1.1.0=py39hf88036b_2
- bzip2=1.0.8=h4bc722e_7
- c-ares=1.34.4=hb9d3cd8_0
- ca-certificates=2025.1.31=hbcca054_0
- cached-property=1.5.2=hd8ed1ab_1
- cached_property=1.5.2=pyha770c72_1
- cairo=1.18.4=h3394656_0
- certifi=2025.1.31=pyhd8ed1ab_0
- cffi=1.17.1=py39h15c3d72_0
- cftime=1.6.4=py39hf3d9206_1
- charset-normalizer=3.4.1=pyhd8ed1ab_0
- click=8.1.8=pyh707e725_0
- cmocean=4.0.3=pyhd8ed1ab_1
- codecov=2.1.13=pyhd8ed1ab_1
- colorama=0.4.6=pyhd8ed1ab_1
- colorcet=3.1.0=pyhd8ed1ab_1
- colorspacious=1.1.2=pyhecae5ae_1
- comm=0.2.2=pyhd8ed1ab_1
- contourpy=1.3.0=py39h74842e3_2
- coverage=7.8.0=py39h9399b63_0
- cycler=0.12.1=pyhd8ed1ab_1
- cyrus-sasl=2.1.27=h54b06d7_7
- dav1d=1.2.1=hd590300_0
- dbus=1.13.6=h5008d03_3
- debugpy=1.8.13=py39hf88036b_0
- decorator=5.2.1=pyhd8ed1ab_0
- defusedxml=0.7.1=pyhd8ed1ab_0
- double-conversion=3.3.1=h5888daf_0
- embree=2.17.7=ha770c72_3
- exceptiongroup=1.2.2=pyhd8ed1ab_1
- execnet=2.1.1=pyhd8ed1ab_1
- executing=2.1.0=pyhd8ed1ab_1
- expat=2.6.4=h5888daf_0
- ffmpeg=7.1.1=gpl_h0b79d52_704
- font-ttf-dejavu-sans-mono=2.37=hab24e00_0
- font-ttf-inconsolata=3.000=h77eed37_0
- font-ttf-source-code-pro=2.038=h77eed37_0
- font-ttf-ubuntu=0.83=h77eed37_3
- fontconfig=2.15.0=h7e30c49_1
- fonts-conda-ecosystem=1=0
- fonts-conda-forge=1=0
- fonttools=4.56.0=py39h9399b63_0
- fqdn=1.5.1=pyhd8ed1ab_1
- freetype=2.13.3=h48d6fc4_0
- fribidi=1.0.10=h36c2ea0_0
- frozenlist=1.5.0=py39h9399b63_1
- gdk-pixbuf=2.42.12=hb9ae30d_0
- gettext=0.23.1=h5888daf_0
- gettext-tools=0.23.1=h5888daf_0
- gl2ps=1.4.2=hae5d5c5_1
- glew=2.1.0=h9c3ff4c_2
- gmp=6.3.0=hac33072_2
- graphite2=1.3.13=h59595ed_1003
- h11=0.14.0=pyhd8ed1ab_1
- h2=4.2.0=pyhd8ed1ab_0
- h5py=3.13.0=nompi_py39h30a5a8d_100
- harfbuzz=11.0.0=h76408a6_0
- hdf4=4.2.15=h2a13503_7
- hdf5=1.14.3=nompi_h2d575fe_109
- hpack=4.1.0=pyhd8ed1ab_0
- httpcore=1.0.7=pyh29332c3_1
- httpx=0.28.1=pyhd8ed1ab_0
- hyperframe=6.1.0=pyhd8ed1ab_0
- hypothesis=6.130.4=pyha770c72_0
- icu=75.1=he02047a_0
- idna=3.10=pyhd8ed1ab_1
- imageio=2.37.0=pyhfb79c49_0
- imageio-ffmpeg=0.6.0=pyhd8ed1ab_0
- importlib-metadata=8.6.1=pyha770c72_0
- importlib-resources=6.5.2=pyhd8ed1ab_0
- importlib_metadata=8.6.1=hd8ed1ab_0
- importlib_resources=6.5.2=pyhd8ed1ab_0
- iniconfig=2.0.0=pyhd8ed1ab_1
- ipykernel=6.29.5=pyh3099207_0
- ipython=8.18.1=pyh707e725_3
- isoduration=20.11.0=pyhd8ed1ab_1
- jack=1.9.22=h7c63dc7_2
- jedi=0.19.2=pyhd8ed1ab_1
- jinja2=3.1.6=pyhd8ed1ab_0
- json5=0.10.0=pyhd8ed1ab_1
- jsoncpp=1.9.6=hf42df4d_1
- jsonpointer=3.0.0=py39hf3d152e_1
- jsonschema=4.23.0=pyhd8ed1ab_1
- jsonschema-specifications=2024.10.1=pyhd8ed1ab_1
- jsonschema-with-format-nongpl=4.23.0=hd8ed1ab_1
- jupyter-lsp=2.2.5=pyhd8ed1ab_1
- jupyter_client=8.6.3=pyhd8ed1ab_1
- jupyter_core=5.7.2=pyh31011fe_1
- jupyter_events=0.12.0=pyh29332c3_0
- jupyter_server=2.15.0=pyhd8ed1ab_0
- jupyter_server_terminals=0.5.3=pyhd8ed1ab_1
- jupyterlab=4.3.6=pyhd8ed1ab_0
- jupyterlab_pygments=0.3.0=pyhd8ed1ab_2
- jupyterlab_server=2.27.3=pyhd8ed1ab_1
- keyutils=1.6.1=h166bdaf_0
- kiwisolver=1.4.7=py39h74842e3_0
- krb5=1.21.3=h659f571_0
- lame=3.100=h166bdaf_1003
- lcms2=2.17=h717163a_0
- ld_impl_linux-64=2.43=h712a8e2_4
- lerc=4.0.0=h27087fc_0
- level-zero=1.21.6=h84d6215_0
- libabseil=20250127.1=cxx17_hbbce691_0
- libaec=1.1.3=h59595ed_0
- libasprintf=0.23.1=h8e693c7_0
- libasprintf-devel=0.23.1=h8e693c7_0
- libass=0.17.3=h52826cd_2
- libblas=3.9.0=31_h59b9bed_openblas
- libbrotlicommon=1.1.0=hb9d3cd8_2
- libbrotlidec=1.1.0=hb9d3cd8_2
- libbrotlienc=1.1.0=hb9d3cd8_2
- libcap=2.75=h39aace5_0
- libcblas=3.9.0=31_he106b2a_openblas
- libclang-cpp20.1=20.1.1=default_hb5137d0_0
- libclang13=20.1.1=default_h9c6a7e4_0
- libcups=2.3.3=h4637d8d_4
- libcurl=8.12.1=h332b0f4_0
- libdb=6.2.32=h9c3ff4c_0
- libdeflate=1.23=h4ddbbb0_0
- libdrm=2.4.124=hb9d3cd8_0
- libedit=3.1.20250104=pl5321h7949ede_0
- libegl=1.7.0=ha4b6fd6_2
- libev=4.33=hd590300_2
- libexpat=2.6.4=h5888daf_0
- libffi=3.4.6=h2dba641_0
- libflac=1.4.3=h59595ed_0
- libgcc=14.2.0=h767d61c_2
- libgcc-ng=14.2.0=h69a702a_2
- libgcrypt-lib=1.11.0=hb9d3cd8_2
- libgettextpo=0.23.1=h5888daf_0
- libgettextpo-devel=0.23.1=h5888daf_0
- libgfortran=14.2.0=h69a702a_2
- libgfortran5=14.2.0=hf1ad2bd_2
- libgl=1.7.0=ha4b6fd6_2
- libglib=2.84.0=h2ff4ddf_0
- libglu=9.0.3=h03adeef_0
- libglvnd=1.7.0=ha4b6fd6_2
- libglx=1.7.0=ha4b6fd6_2
- libgomp=14.2.0=h767d61c_2
- libgpg-error=1.51=hbd13f7d_1
- libhwloc=2.11.2=default_h0d58e46_1001
- libiconv=1.18=h4ce23a2_1
- libjpeg-turbo=3.0.0=hd590300_1
- liblapack=3.9.0=31_h7ac8fdf_openblas
- libllvm20=20.1.1=ha7bfdaf_0
- liblzma=5.6.4=hb9d3cd8_0
- libnetcdf=4.9.2=nompi_h00e09a9_116
- libnghttp2=1.64.0=h161d5f1_0
- libnsl=2.0.1=hd590300_0
- libntlm=1.8=hb9d3cd8_0
- libogg=1.3.5=h4ab18f5_0
- libopenblas=0.3.29=pthreads_h94d23a6_0
- libopengl=1.7.0=ha4b6fd6_2
- libopenvino=2025.0.0=hdc3f47d_3
- libopenvino-auto-batch-plugin=2025.0.0=h4d9b6c2_3
- libopenvino-auto-plugin=2025.0.0=h4d9b6c2_3
- libopenvino-hetero-plugin=2025.0.0=h981d57b_3
- libopenvino-intel-cpu-plugin=2025.0.0=hdc3f47d_3
- libopenvino-intel-gpu-plugin=2025.0.0=hdc3f47d_3
- libopenvino-intel-npu-plugin=2025.0.0=hdc3f47d_3
- libopenvino-ir-frontend=2025.0.0=h981d57b_3
- libopenvino-onnx-frontend=2025.0.0=h0e684df_3
- libopenvino-paddle-frontend=2025.0.0=h0e684df_3
- libopenvino-pytorch-frontend=2025.0.0=h5888daf_3
- libopenvino-tensorflow-frontend=2025.0.0=h684f15b_3
- libopenvino-tensorflow-lite-frontend=2025.0.0=h5888daf_3
- libopus=1.3.1=h7f98852_1
- libpciaccess=0.18=hd590300_0
- libpng=1.6.47=h943b412_0
- libpq=17.4=h27ae623_0
- libprotobuf=5.29.3=h501fc15_0
- librsvg=2.58.4=he92a37e_3
- libsndfile=1.2.2=hc60ed4a_1
- libsodium=1.0.20=h4ab18f5_0
- libspatialindex=2.1.0=he57a185_0
- libsqlite=3.49.1=hee588c1_2
- libssh2=1.11.1=hf672d98_0
- libstdcxx=14.2.0=h8f9b012_2
- libstdcxx-ng=14.2.0=h4852527_2
- libsystemd0=257.4=h4e0b6ca_1
- libtheora=1.1.1=h4ab18f5_1006
- libtiff=4.7.0=hd9ff511_3
- libudev1=257.4=hbe16f8c_1
- libunwind=1.6.2=h9c3ff4c_0
- liburing=2.9=h84d6215_0
- libusb=1.0.28=hb9d3cd8_0
- libuuid=2.38.1=h0b41bf4_0
- libva=2.22.0=h4f16b4b_2
- libvorbis=1.3.7=h9c3ff4c_0
- libvpx=1.14.1=hac33072_0
- libwebp-base=1.5.0=h851e524_0
- libxcb=1.17.0=h8a09558_0
- libxcrypt=4.4.36=hd590300_1
- libxkbcommon=1.8.1=hc4a0caf_0
- libxml2=2.13.7=h8d12d68_0
- libxslt=1.1.39=h76b75d6_0
- libzip=1.11.2=h6991a6a_0
- libzlib=1.3.1=hb9d3cd8_2
- loguru=0.7.2=py39hf3d152e_2
- lz4-c=1.10.0=h5888daf_1
- markdown-it-py=3.0.0=pyhd8ed1ab_1
- markupsafe=3.0.2=py39h9399b63_1
- matplotlib=3.9.4=py39hf3d152e_0
- matplotlib-base=3.9.4=py39h16632d1_0
- matplotlib-inline=0.1.7=pyhd8ed1ab_1
- mdurl=0.1.2=pyhd8ed1ab_1
- meshio=5.3.5=pyhd8ed1ab_0
- mistune=3.1.3=pyh29332c3_0
- more-itertools=10.6.0=pyhd8ed1ab_0
- mpg123=1.32.9=hc50e24c_0
- msgpack-python=1.1.0=py39h74842e3_0
- multidict=6.2.0=py39h9399b63_0
- munkres=1.1.4=pyh9f0ad1d_0
- mysql-common=9.0.1=h266115a_5
- mysql-libs=9.0.1=he0572af_5
- nbclient=0.10.2=pyhd8ed1ab_0
- nbconvert-core=7.16.6=pyh29332c3_0
- nbformat=5.10.4=pyhd8ed1ab_1
- ncurses=6.5=h2d0b736_3
- nest-asyncio=1.6.0=pyhd8ed1ab_1
- netcdf4=1.7.2=nompi_py39h9d02bfe_101
- nlohmann_json=3.11.3=he02047a_1
- notebook-shim=0.2.4=pyhd8ed1ab_1
- numpy=1.26.4=py39h474f0d3_0
- ocl-icd=2.3.2=hb9d3cd8_2
- opencl-headers=2024.10.24=h5888daf_0
- openh264=2.6.0=hc22cd8d_0
- openjpeg=2.5.3=h5fbd93e_0
- openldap=2.6.9=he970967_0
- openssl=3.4.1=h7b32b05_0
- overrides=7.7.0=pyhd8ed1ab_1
- packaging=24.2=pyhd8ed1ab_2
- pandocfilters=1.5.0=pyhd8ed1ab_0
- pango=1.56.3=h9ac818e_1
- parso=0.8.4=pyhd8ed1ab_1
- pcre2=10.44=hba22ea6_2
- pexpect=4.9.0=pyhd8ed1ab_1
- pickleshare=0.7.5=pyhd8ed1ab_1004
- pillow=11.1.0=py39h15c0740_0
- pip=25.0.1=pyh8b19718_0
- pixman=0.44.2=h29eaf8c_0
- pkgutil-resolve-name=1.3.10=pyhd8ed1ab_2
- platformdirs=4.3.7=pyh29332c3_0
- pluggy=1.5.0=pyhd8ed1ab_1
- pooch=1.8.2=pyhd8ed1ab_1
- proj=9.5.1=h0054346_0
- prometheus_client=0.21.1=pyhd8ed1ab_0
- prompt-toolkit=3.0.50=pyha770c72_0
- propcache=0.2.1=py39h9399b63_1
- psutil=7.0.0=py39h8cd3c5a_0
- pthread-stubs=0.4=hb9d3cd8_1002
- ptyprocess=0.7.0=pyhd8ed1ab_1
- pugixml=1.15=h3f63f65_0
- pulseaudio-client=17.0=hac146a9_1
- pure_eval=0.2.3=pyhd8ed1ab_1
- pycparser=2.22=pyh29332c3_1
- pyembree=0.1.6=py39hddac248_4
- pygments=2.19.1=pyhd8ed1ab_0
- pyparsing=3.2.3=pyhd8ed1ab_1
- pyside6=6.8.3=py39h0383914_0
- pysocks=1.7.1=pyha55dd90_7
- pytest=8.3.5=pyhd8ed1ab_0
- pytest-cov=6.0.0=pyhd8ed1ab_1
- pytest-xdist=3.6.1=pyhd8ed1ab_1
- python=3.9.21=h9c0c6dc_1_cpython
- python-dateutil=2.9.0.post0=pyhff2d567_1
- python-fastjsonschema=2.21.1=pyhd8ed1ab_0
- python-json-logger=2.0.7=pyhd8ed1ab_0
- python_abi=3.9=5_cp39
- pytz=2025.2=pyhd8ed1ab_0
- pyyaml=6.0.2=py39h9399b63_2
- pyzmq=26.3.0=py39h4e4fb57_0
- qhull=2020.2=h434a139_5
- qt6-main=6.8.3=h6441bc3_1
- readline=8.2=h8c095d6_2
- referencing=0.36.2=pyh29332c3_0
- requests=2.32.3=pyhd8ed1ab_1
- rfc3339-validator=0.1.4=pyhd8ed1ab_1
- rfc3986-validator=0.1.1=pyh9f0ad1d_0
- rich=14.0.0=pyh29332c3_0
- rpds-py=0.24.0=py39h3506688_0
- rtree=1.4.0=pyh11ca60a_1
- scooby=0.10.0=pyhd8ed1ab_1
- sdl2=2.32.50=h9b8e6db_1
- sdl3=3.2.8=h3083f51_0
- send2trash=1.8.3=pyh0d859eb_1
- setuptools=75.8.2=pyhff2d567_0
- six=1.17.0=pyhd8ed1ab_0
- snappy=1.2.1=h8bd8927_1
- sniffio=1.3.1=pyhd8ed1ab_1
- sortedcontainers=2.4.0=pyhd8ed1ab_1
- soupsieve=2.5=pyhd8ed1ab_1
- sqlite=3.49.1=h9eae976_2
- stack_data=0.6.3=pyhd8ed1ab_1
- svt-av1=3.0.2=h5888daf_0
- tbb=2022.0.0=hceb3a55_0
- terminado=0.18.1=pyh0d859eb_0
- tinycss2=1.4.0=pyhd8ed1ab_0
- tk=8.6.13=noxft_h4845f30_101
- toml=0.10.2=pyhd8ed1ab_1
- tomli=2.2.1=pyhd8ed1ab_1
- tornado=6.4.2=py39h8cd3c5a_0
- tqdm=4.67.1=pyhd8ed1ab_1
- traitlets=5.14.3=pyhd8ed1ab_1
- trame=3.8.1=pyhd8ed1ab_0
- trame-client=3.6.1=pyhd8ed1ab_0
- trame-server=3.4.0=pyhd8ed1ab_0
- trame-vtk=2.8.15=pyhb419c8b_0
- trame-vuetify=2.9.0=pyhd8ed1ab_0
- trimesh=4.6.6=pyh7b2049a_0
- types-python-dateutil=2.9.0.20241206=pyhd8ed1ab_0
- typing-extensions=4.13.0=h9fa5a19_1
- typing_extensions=4.13.0=pyh29332c3_1
- typing_utils=0.1.0=pyhd8ed1ab_1
- tzdata=2025b=h78e105d_0
- unicodedata2=16.0.0=py39h8cd3c5a_0
- uri-template=1.3.0=pyhd8ed1ab_1
- urllib3=2.3.0=pyhd8ed1ab_0
- utfcpp=4.0.6=h005c6e1_0
- vtk=9.3.1=qt_py39h71fb23e_216
- vtk-base=9.3.1=qt_py39habd23de_216
- vtk-io-ffmpeg=9.3.1=qt_py39h71fb23e_216
- wayland=1.23.1=h3e06ad9_0
- wayland-protocols=1.42=hd8ed1ab_0
- wcwidth=0.2.13=pyhd8ed1ab_1
- webcolors=24.11.1=pyhd8ed1ab_0
- webencodings=0.5.1=pyhd8ed1ab_3
- websocket-client=1.8.0=pyhd8ed1ab_1
- wheel=0.45.1=pyhd8ed1ab_1
- wslink=2.3.3=pyhd8ed1ab_0
- x264=1!164.3095=h166bdaf_2
- x265=3.5=h924138e_3
- xcb-util=0.4.1=hb711507_2
- xcb-util-cursor=0.1.5=hb9d3cd8_0
- xcb-util-image=0.4.0=hb711507_2
- xcb-util-keysyms=0.4.1=hb711507_0
- xcb-util-renderutil=0.3.10=hb711507_0
- xcb-util-wm=0.4.2=hb711507_0
- xkeyboard-config=2.43=hb9d3cd8_0
- xorg-libice=1.1.2=hb9d3cd8_0
- xorg-libsm=1.2.6=he73a12e_0
- xorg-libx11=1.8.12=h4f16b4b_0
- xorg-libxau=1.0.12=hb9d3cd8_0
- xorg-libxcomposite=0.4.6=hb9d3cd8_2
- xorg-libxcursor=1.2.3=hb9d3cd8_0
- xorg-libxdamage=1.1.6=hb9d3cd8_0
- xorg-libxdmcp=1.1.5=hb9d3cd8_0
- xorg-libxext=1.3.6=hb9d3cd8_0
- xorg-libxfixes=6.0.1=hb9d3cd8_0
- xorg-libxi=1.8.2=hb9d3cd8_0
- xorg-libxrandr=1.5.4=hb9d3cd8_0
- xorg-libxrender=0.9.12=hb9d3cd8_0
- xorg-libxscrnsaver=1.2.4=hb9d3cd8_0
- xorg-libxt=1.3.1=hb9d3cd8_0
- xorg-libxtst=1.2.5=hb9d3cd8_3
- xorg-libxxf86vm=1.1.6=hb9d3cd8_0
- yaml=0.2.5=h7f98852_2
- yarl=1.18.3=py39h9399b63_1
- zeromq=4.3.5=h3b0a872_7
- zipp=3.21.0=pyhd8ed1ab_1
- zlib=1.3.1=hb9d3cd8_2
- zstandard=0.23.0=py39h8cd3c5a_1
- zstd=1.5.7=hb8e6e7a_2
- pip:
- ipywidgets==8.1.5
- jupyter-server-proxy==4.4.0
- jupyterlab-widgets==3.0.13
- pytest-memprof==0.2.0
- pytest-mock==3.14.0
- pyvista==0.45.dev0
- simpervisor==1.0.0
- widgetsnbextension==4.0.13
prefix: /opt/conda/envs/pyvista
| [
"tests/core/test_utilities.py::test_fit_plane_to_points_resolution"
] | [] | [
"tests/core/test_utilities.py::test_version",
"tests/core/test_utilities.py::test_createvectorpolydata_error",
"tests/core/test_utilities.py::test_createvectorpolydata_1D",
"tests/core/test_utilities.py::test_createvectorpolydata",
"tests/core/test_utilities.py::test_get_ext[/data/mesh.stl-.stl]",
"tests/core/test_utilities.py::test_get_ext[/data/image.nii.gz-.nii.gz]",
"tests/core/test_utilities.py::test_get_ext[/data/other.gz-.gz]",
"tests/core/test_utilities.py::test_read[True]",
"tests/core/test_utilities.py::test_read[False]",
"tests/core/test_utilities.py::test_read_force_ext",
"tests/core/test_utilities.py::test_read_progress_bar",
"tests/core/test_utilities.py::test_read_force_ext_wrong_extension",
"tests/core/test_utilities.py::test_pyvista_read_exodus",
"tests/core/test_utilities.py::test_get_array_cell",
"tests/core/test_utilities.py::test_get_array_point",
"tests/core/test_utilities.py::test_get_array_field",
"tests/core/test_utilities.py::test_get_array_error",
"tests/core/test_utilities.py::test_get_array_none",
"tests/core/test_utilities.py::test_is_inside_bounds",
"tests/core/test_utilities.py::test_voxelize",
"tests/core/test_utilities.py::test_voxelize_non_uniform_density",
"tests/core/test_utilities.py::test_voxelize_invalid_density",
"tests/core/test_utilities.py::test_voxelize_throws_point_cloud",
"tests/core/test_utilities.py::test_voxelize_volume_default_density",
"tests/core/test_utilities.py::test_voxelize_volume_invalid_density",
"tests/core/test_utilities.py::test_voxelize_volume_no_face_mesh",
"tests/core/test_utilities.py::test_report",
"tests/core/test_utilities.py::test_line_segments_from_points",
"tests/core/test_utilities.py::test_lines_from_points",
"tests/core/test_utilities.py::test_grid_from_sph_coords",
"tests/core/test_utilities.py::test_transform_vectors_sph_to_cart",
"tests/core/test_utilities.py::test_vtkmatrix_to_from_array",
"tests/core/test_utilities.py::test_assert_empty_kwargs",
"tests/core/test_utilities.py::test_convert_id_list",
"tests/core/test_utilities.py::test_progress_monitor",
"tests/core/test_utilities.py::test_observer",
"tests/core/test_utilities.py::test_check_valid_vector",
"tests/core/test_utilities.py::test_cells_dict_utils",
"tests/core/test_utilities.py::test_apply_transformation_to_points",
"tests/core/test_utilities.py::test_vtk_error_catcher",
"tests/core/test_utilities.py::test_axis_angle_rotation",
"tests/core/test_utilities.py::test_axis_angle_rotation_many_times[axis0-90-4]",
"tests/core/test_utilities.py::test_axis_angle_rotation_many_times[axis1-180-2]",
"tests/core/test_utilities.py::test_axis_angle_rotation_many_times[axis2-270-4]",
"tests/core/test_utilities.py::test_axis_angle_rotation_many_times[axis3-90-4]",
"tests/core/test_utilities.py::test_axis_angle_rotation_many_times[axis4-180-2]",
"tests/core/test_utilities.py::test_axis_angle_rotation_many_times[axis5-270-4]",
"tests/core/test_utilities.py::test_axis_angle_rotation_many_times[axis6-90-4]",
"tests/core/test_utilities.py::test_axis_angle_rotation_many_times[axis7-180-2]",
"tests/core/test_utilities.py::test_axis_angle_rotation_many_times[axis8-270-4]",
"tests/core/test_utilities.py::test_reflection",
"tests/core/test_utilities.py::test_merge",
"tests/core/test_utilities.py::test_convert_array",
"tests/core/test_utilities.py::test_has_duplicates",
"tests/core/test_utilities.py::test_copy_vtk_array",
"tests/core/test_utilities.py::test_cartesian_to_spherical",
"tests/core/test_utilities.py::test_spherical_to_cartesian",
"tests/core/test_utilities.py::test_set_pickle_format",
"tests/core/test_utilities.py::test_linkcode_resolve",
"tests/core/test_utilities.py::test_coerce_point_like_arg",
"tests/core/test_utilities.py::test_coerce_point_like_arg_copy",
"tests/core/test_utilities.py::test_coerce_point_like_arg_errors",
"tests/core/test_utilities.py::test_coerce_points_like_args_does_not_copy",
"tests/core/test_utilities.py::test_has_module",
"tests/core/test_utilities.py::test_fit_plane_to_points",
"tests/core/test_utilities.py::test_principal_axes[case0]",
"tests/core/test_utilities.py::test_principal_axes[case1]",
"tests/core/test_utilities.py::test_principal_axes[case2]",
"tests/core/test_utilities.py::test_principal_axes[case3]",
"tests/core/test_utilities.py::test_principal_axes_return_std",
"tests/core/test_utilities.py::test_principal_axes_empty",
"tests/core/test_utilities.py::test_principal_axes_single_point",
"tests/core/test_utilities.py::test_principal_axes_success_with_many_points",
"tests/core/test_utilities.py::test_no_new_attr_subclass",
"tests/core/test_utilities.py::test_serial_dict_init",
"tests/core/test_utilities.py::test_serial_dict_as_dict",
"tests/core/test_utilities.py::test_serial_dict_overrides__setitem__",
"tests/core/test_utilities.py::test_serial_dict_overrides__delitem__",
"tests/core/test_utilities.py::test_serial_dict_overrides__setattr__",
"tests/core/test_utilities.py::test_serial_dict_overrides_popitem",
"tests/core/test_utilities.py::test_serial_dict_overrides_pop",
"tests/core/test_utilities.py::test_serial_dict_overrides_update",
"tests/core/test_utilities.py::test_serial_dict_overrides_clear",
"tests/core/test_utilities.py::test_serial_dict_overrides_setdefault",
"tests/core/test_utilities.py::test_transform_scale[scale_args0]",
"tests/core/test_utilities.py::test_transform_scale[scale_args1]",
"tests/core/test_utilities.py::test_transform_scale[scale_args2]",
"tests/core/test_utilities.py::test_transform_translate[translate_args0]",
"tests/core/test_utilities.py::test_transform_translate[translate_args1]",
"tests/core/test_utilities.py::test_transform_reflect[reflect_args0]",
"tests/core/test_utilities.py::test_transform_reflect[reflect_args1]",
"tests/core/test_utilities.py::test_transform_rotate",
"tests/core/test_utilities.py::test_transform_with_point[scale-args0-post]",
"tests/core/test_utilities.py::test_transform_with_point[scale-args0-pre]",
"tests/core/test_utilities.py::test_transform_with_point[reflect-args1-post]",
"tests/core/test_utilities.py::test_transform_with_point[reflect-args1-pre]",
"tests/core/test_utilities.py::test_transform_with_point[rotate-args2-post]",
"tests/core/test_utilities.py::test_transform_with_point[rotate-args2-pre]",
"tests/core/test_utilities.py::test_transform_with_point[rotate_x-args3-post]",
"tests/core/test_utilities.py::test_transform_with_point[rotate_x-args3-pre]",
"tests/core/test_utilities.py::test_transform_with_point[rotate_y-args4-post]",
"tests/core/test_utilities.py::test_transform_with_point[rotate_y-args4-pre]",
"tests/core/test_utilities.py::test_transform_with_point[rotate_z-args5-post]",
"tests/core/test_utilities.py::test_transform_with_point[rotate_z-args5-pre]",
"tests/core/test_utilities.py::test_transform_with_point[rotate_vector-args6-post]",
"tests/core/test_utilities.py::test_transform_with_point[rotate_vector-args6-pre]",
"tests/core/test_utilities.py::test_transform_rotate_x",
"tests/core/test_utilities.py::test_transform_rotate_y",
"tests/core/test_utilities.py::test_transform_rotate_z",
"tests/core/test_utilities.py::test_transform_rotate_vector",
"tests/core/test_utilities.py::test_transform_concatenate_vtkmatrix",
"tests/core/test_utilities.py::test_transform_invert",
"tests/core/test_utilities.py::test_transform_matrix_list[matrix_list]",
"tests/core/test_utilities.py::test_transform_matrix_list[inverse_matrix_list]",
"tests/core/test_utilities.py::test_transform_multiply_mode_override[pre-pre]",
"tests/core/test_utilities.py::test_transform_multiply_mode_override[pre-post]",
"tests/core/test_utilities.py::test_transform_multiply_mode_override[post-pre]",
"tests/core/test_utilities.py::test_transform_multiply_mode_override[post-post]",
"tests/core/test_utilities.py::test_transform_multiply_mode",
"tests/core/test_utilities.py::test_transform_identity",
"tests/core/test_utilities.py::test_transform_init",
"tests/core/test_utilities.py::test_transform_chain_methods",
"tests/core/test_utilities.py::test_transform_add",
"tests/core/test_utilities.py::test_transform_add_other[other0]",
"tests/core/test_utilities.py::test_transform_add_other[other1]",
"tests/core/test_utilities.py::test_transform_add_other[other2]",
"tests/core/test_utilities.py::test_transform_radd",
"tests/core/test_utilities.py::test_transform_mul[2]",
"tests/core/test_utilities.py::test_transform_mul[scale_factor1]",
"tests/core/test_utilities.py::test_transform_rmul[2]",
"tests/core/test_utilities.py::test_transform_rmul[scale_factor1]",
"tests/core/test_utilities.py::test_transform_matmul",
"tests/core/test_utilities.py::test_transform_add_raises",
"tests/core/test_utilities.py::test_transform_radd_raises",
"tests/core/test_utilities.py::test_transform_rmul_raises",
"tests/core/test_utilities.py::test_transform_mul_raises",
"tests/core/test_utilities.py::test_transform_matmul_raises",
"tests/core/test_utilities.py::test_transform_copy[pre]",
"tests/core/test_utilities.py::test_transform_copy[post]",
"tests/core/test_utilities.py::test_transform_repr"
] | [] | MIT License | 19,336 | 1,450 | [
"pyvista/core/utilities/points.py"
] |
iterative__datachain-314 | 61aeed4f77a087ecab0ae0ddc56ad0a76e5422ac | 2024-08-18 12:35:18 | cd6c831a79f2956f6de48eaf4c253b607f158439 | EdwardLi-coder: > Thanks @EdwardLi-coder ! It would be great to add a test for this.
Thank you @shcheklein for pointing that out. I apologize for forgetting to add a test. I will add one now.
shcheklein: Looks good, but we need also implement the ClickHouse part (that is used in the private Studio product). I'm getting atm:
```python
NotImplementedError: Compiler not implemented for the SQLAlchemy function, string.regexp_replace, with dialect, clickhouse. For information on adding dialect-specific compilers, see https://docs.sqlalchemy.org/en/14/core/compiler.html
```
let me try to help you here and create a companion PR is possible.
shcheklein: I've added a companion PR to the Studio (private) repo. Made small changes here (@EdwardLi-coder please review). It should be good to go @iterative/datachain | diff --git a/src/datachain/sql/functions/string.py b/src/datachain/sql/functions/string.py
index 25b7b2c..7d8c3c9 100644
--- a/src/datachain/sql/functions/string.py
+++ b/src/datachain/sql/functions/string.py
@@ -26,5 +26,17 @@ class split(GenericFunction): # noqa: N801
inherit_cache = True
+class regexp_replace(GenericFunction): # noqa: N801
+ """
+ Replaces substring that match a regular expression.
+ """
+
+ type = String()
+ package = "string"
+ name = "regexp_replace"
+ inherit_cache = True
+
+
compiler_not_implemented(length)
compiler_not_implemented(split)
+compiler_not_implemented(regexp_replace)
diff --git a/src/datachain/sql/sqlite/base.py b/src/datachain/sql/sqlite/base.py
index 43ef7a3..fe8d6ec 100644
--- a/src/datachain/sql/sqlite/base.py
+++ b/src/datachain/sql/sqlite/base.py
@@ -1,4 +1,5 @@
import logging
+import re
import sqlite3
from collections.abc import Iterable
from datetime import MAXYEAR, MINYEAR, datetime, timezone
@@ -77,6 +78,7 @@ def setup():
compiles(array.length, "sqlite")(compile_array_length)
compiles(string.length, "sqlite")(compile_string_length)
compiles(string.split, "sqlite")(compile_string_split)
+ compiles(string.regexp_replace, "sqlite")(compile_regexp_replace)
compiles(conditional.greatest, "sqlite")(compile_greatest)
compiles(conditional.least, "sqlite")(compile_least)
compiles(Values, "sqlite")(compile_values)
@@ -178,9 +180,15 @@ def register_user_defined_sql_functions() -> None:
_registered_function_creators["vector_functions"] = create_vector_functions
+ def sqlite_regexp_replace(string: str, pattern: str, replacement: str) -> str:
+ return re.sub(pattern, replacement, string)
+
def create_string_functions(conn):
conn.create_function("split", 2, sqlite_string_split, deterministic=True)
conn.create_function("split", 3, sqlite_string_split, deterministic=True)
+ conn.create_function(
+ "regexp_replace", 3, sqlite_regexp_replace, deterministic=True
+ )
_registered_function_creators["string_functions"] = create_string_functions
@@ -265,6 +273,10 @@ def path_file_ext(path):
return func.substr(path, func.length(path) - path_file_ext_length(path) + 1)
+def compile_regexp_replace(element, compiler, **kwargs):
+ return f"regexp_replace({compiler.process(element.clauses, **kwargs)})"
+
+
def compile_path_parent(element, compiler, **kwargs):
return compiler.process(path_parent(*element.clauses.clauses), **kwargs)
| Need regexp_replace() function
### Description
It is common to transform text columns in datasets.
For example, if class is encoded as part of the filename, one currently needs to write a mapper to extract label "cat" from the name "cat.1001.jpg" :
```
import re
def extract_label(filepath: str) -> str:
match = re.search(r'\.(\pL+)\.', filepath)
if match:
return match.group(1)
else:
return None
map(label = lambda file: extract_label(file.path))
```
It should be straightforward in many SQL engines to use regexp_replace() for this part:
```
mutate(label = regexp_replace(Column("file.path"), '\.(\pL+)\.', '\1') ))
``` | iterative/datachain | diff --git a/tests/unit/sql/test_string.py b/tests/unit/sql/test_string.py
index 0842614..e4a0674 100644
--- a/tests/unit/sql/test_string.py
+++ b/tests/unit/sql/test_string.py
@@ -21,3 +21,28 @@ def test_split(warehouse, args, expected):
query = select(string.split(*args))
result = tuple(warehouse.dataset_rows_select(query))
assert result == ((expected,),)
+
+
[email protected](
+ "input_string,pattern,replacement,expected",
+ [
+ ("hello world", "world", "universe", "hello universe"),
+ ("abc123def456", r"\d+", "X", "abcXdefX"),
+ ("cat.1001.jpg", r"\.(\w+)\.", r"_\1_", "cat_1001_jpg"),
+ (
+ "dog_photo.jpg",
+ r"(\w+)\.(jpg|jpeg|png|gif)$",
+ r"\1_thumb.\2",
+ "dog_photo_thumb.jpg",
+ ),
+ ("file.with...dots.txt", r"\.+", ".", "file.with.dots.txt"),
+ ],
+)
+def test_regexp_replace(warehouse, input_string, pattern, replacement, expected):
+ query = select(
+ string.regexp_replace(
+ literal(input_string), literal(pattern), literal(replacement)
+ )
+ )
+ result = tuple(warehouse.db.execute(query))
+ assert result == ((expected,),)
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_media",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 2
} | 0.3 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | adlfs==2024.12.0
aiobotocore==2.21.1
aiohappyeyeballs==2.6.1
aiohttp==3.11.14
aioitertools==0.12.0
aiosignal==1.3.2
aiotools==1.8.2
annotated-types==0.7.0
antlr4-python3-runtime==4.13.2
argcomplete==3.6.1
async-timeout==5.0.1
attrs==25.3.0
aws-sam-translator==1.95.0
aws-xray-sdk==2.14.0
azure-core==1.32.0
azure-datalake-store==0.0.53
azure-identity==1.21.0
azure-storage-blob==12.25.1
babel==2.17.0
backrefs==5.8
black==25.1.0
blinker==1.9.0
boto3==1.37.1
botocore==1.37.1
cachetools==5.5.2
certifi==2025.1.31
cffi==1.17.1
cfn-lint==1.32.1
charset-normalizer==3.4.1
click==8.1.8
cloudpickle==3.1.1
colorama==0.4.6
coverage==7.8.0
cryptography==44.0.2
-e git+https://github.com/iterative/datachain.git@61aeed4f77a087ecab0ae0ddc56ad0a76e5422ac#egg=datachain
datamodel-code-generator==0.28.5
decorator==5.2.1
dictdiffer==0.9.0
dill==0.3.8
diskcache==5.6.3
distlib==0.3.9
docker==7.1.0
dulwich==0.22.8
dvc-data==3.16.9
dvc-objects==5.1.0
exceptiongroup==1.2.2
execnet==2.1.1
filelock==3.18.0
Flask==3.1.0
flask-cors==5.0.1
frozenlist==1.5.0
fsspec==2025.3.1
ftfy==6.3.1
funcy==2.0
gcsfs==2025.3.1
genson==1.3.0
ghp-import==2.1.0
google-api-core==2.24.2
google-auth==2.38.0
google-auth-oauthlib==1.2.1
google-cloud-core==2.4.3
google-cloud-storage==3.1.0
google-crc32c==1.7.1
google-resumable-media==2.7.2
googleapis-common-protos==1.69.2
graphql-core==3.2.6
greenlet==3.1.1
griffe==1.7.1
huggingface-hub==0.30.0
hypothesis==6.130.5
idna==3.10
importlib_metadata==8.6.1
inflect==5.6.2
iniconfig==2.1.0
isodate==0.7.2
isort==6.0.1
itsdangerous==2.2.0
Jinja2==3.1.6
jmespath==1.0.1
joserfc==1.0.4
jsonpatch==1.33
jsonpath-ng==1.7.0
jsonpointer==3.0.0
jsonschema==4.23.0
jsonschema-path==0.3.4
jsonschema-specifications==2024.10.1
lazy-object-proxy==1.10.0
lz4==4.4.3
Markdown==3.7
MarkupSafe==3.0.2
mergedeep==1.3.4
mkdocs==1.6.1
mkdocs-autorefs==1.4.1
mkdocs-gen-files==0.5.0
mkdocs-get-deps==0.2.0
mkdocs-literate-nav==0.6.2
mkdocs-material==9.6.10
mkdocs-material-extensions==1.3.1
mkdocs-section-index==0.3.9
mkdocstrings==0.29.1
mkdocstrings-python==1.16.8
moto==5.1.2
mpmath==1.3.0
msal==1.32.0
msal-extensions==1.3.1
msgpack==1.1.0
multidict==6.2.0
multiprocess==0.70.16
mypy==1.11.1
mypy-extensions==1.0.0
networkx==3.2.1
numpy==2.0.2
nvidia-cublas-cu12==12.4.5.8
nvidia-cuda-cupti-cu12==12.4.127
nvidia-cuda-nvrtc-cu12==12.4.127
nvidia-cuda-runtime-cu12==12.4.127
nvidia-cudnn-cu12==9.1.0.70
nvidia-cufft-cu12==11.2.1.3
nvidia-curand-cu12==10.3.5.147
nvidia-cusolver-cu12==11.6.1.9
nvidia-cusparse-cu12==12.3.1.170
nvidia-cusparselt-cu12==0.6.2
nvidia-nccl-cu12==2.21.5
nvidia-nvjitlink-cu12==12.4.127
nvidia-nvtx-cu12==12.4.127
oauthlib==3.2.2
open_clip_torch==2.31.0
openapi-schema-validator==0.6.3
openapi-spec-validator==0.7.1
orjson==3.10.16
packaging==24.2
paginate==0.5.7
pandas==2.2.3
pathable==0.4.4
pathspec==0.12.1
pillow==10.4.0
platformdirs==4.3.7
pluggy==1.5.0
ply==3.11
propcache==0.3.1
proto-plus==1.26.1
protobuf==6.30.2
py-cpuinfo==9.0.0
py-partiql-parser==0.6.1
pyarrow==19.0.1
pyasn1==0.6.1
pyasn1_modules==0.4.2
pycparser==2.22
pydantic==2.11.1
pydantic_core==2.33.0
pygal==3.0.5
pygaljs==1.0.2
Pygments==2.19.1
pygtrie==2.5.0
PyJWT==2.10.1
pymdown-extensions==10.14.3
pyparsing==3.2.3
pytest==8.3.5
pytest-asyncio==0.26.0
pytest-benchmark==5.1.0
pytest-cov==6.0.0
pytest-mock==3.14.0
pytest-servers==0.5.10
pytest-sugar==1.0.0
pytest-xdist==3.6.1
python-dateutil==2.9.0.post0
pytz==2025.2
PyYAML==6.0.2
pyyaml_env_tag==0.1
referencing==0.36.2
regex==2024.11.6
requests==2.32.3
requests-mock==1.12.1
requests-oauthlib==2.0.0
responses==0.25.7
rfc3339-validator==0.1.4
rpds-py==0.24.0
rsa==4.9
s3fs==2025.3.1
s3transfer==0.11.3
safetensors==0.5.3
shtab==1.7.1
simsimd==6.2.1
six==1.17.0
sortedcontainers==2.4.0
SQLAlchemy==2.0.40
sqltrie==0.11.2
sympy==1.13.1
termcolor==3.0.0
timm==1.0.15
tokenizers==0.21.1
tomli==2.2.1
tomlkit==0.13.2
torch==2.6.0
torchvision==0.21.0
tqdm==4.67.1
transformers==4.50.3
triton==3.2.0
types-python-dateutil==2.9.0.20241206
types-pytz==2025.2.0.20250326
types-PyYAML==6.0.12.20250326
types-requests==2.31.0.6
types-urllib3==1.26.25.14
typing-inspection==0.4.0
typing_extensions==4.13.0
tzdata==2025.2
universal_pathlib==0.2.6
urllib3==1.26.20
usearch==2.16.9
virtualenv==20.29.3
watchdog==6.0.0
wcwidth==0.2.13
Werkzeug==3.1.3
wrapt==1.17.2
xmltodict==0.14.2
yarl==1.18.3
zipp==3.21.0
| name: datachain
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- adlfs==2024.12.0
- aiobotocore==2.21.1
- aiohappyeyeballs==2.6.1
- aiohttp==3.11.14
- aioitertools==0.12.0
- aiosignal==1.3.2
- aiotools==1.8.2
- annotated-types==0.7.0
- antlr4-python3-runtime==4.13.2
- argcomplete==3.6.1
- async-timeout==5.0.1
- attrs==25.3.0
- aws-sam-translator==1.95.0
- aws-xray-sdk==2.14.0
- azure-core==1.32.0
- azure-datalake-store==0.0.53
- azure-identity==1.21.0
- azure-storage-blob==12.25.1
- babel==2.17.0
- backrefs==5.8
- black==25.1.0
- blinker==1.9.0
- boto3==1.37.1
- botocore==1.37.1
- cachetools==5.5.2
- certifi==2025.1.31
- cffi==1.17.1
- cfn-lint==1.32.1
- charset-normalizer==3.4.1
- click==8.1.8
- cloudpickle==3.1.1
- colorama==0.4.6
- coverage==7.8.0
- cryptography==44.0.2
- datachain==0.3.3
- datamodel-code-generator==0.28.5
- decorator==5.2.1
- dictdiffer==0.9.0
- dill==0.3.8
- diskcache==5.6.3
- distlib==0.3.9
- docker==7.1.0
- dulwich==0.22.8
- dvc-data==3.16.9
- dvc-objects==5.1.0
- exceptiongroup==1.2.2
- execnet==2.1.1
- filelock==3.18.0
- flask==3.1.0
- flask-cors==5.0.1
- frozenlist==1.5.0
- fsspec==2025.3.1
- ftfy==6.3.1
- funcy==2.0
- gcsfs==2025.3.1
- genson==1.3.0
- ghp-import==2.1.0
- google-api-core==2.24.2
- google-auth==2.38.0
- google-auth-oauthlib==1.2.1
- google-cloud-core==2.4.3
- google-cloud-storage==3.1.0
- google-crc32c==1.7.1
- google-resumable-media==2.7.2
- googleapis-common-protos==1.69.2
- graphql-core==3.2.6
- greenlet==3.1.1
- griffe==1.7.1
- huggingface-hub==0.30.0
- hypothesis==6.130.5
- idna==3.10
- importlib-metadata==8.6.1
- inflect==5.6.2
- iniconfig==2.1.0
- isodate==0.7.2
- isort==6.0.1
- itsdangerous==2.2.0
- jinja2==3.1.6
- jmespath==1.0.1
- joserfc==1.0.4
- jsonpatch==1.33
- jsonpath-ng==1.7.0
- jsonpointer==3.0.0
- jsonschema==4.23.0
- jsonschema-path==0.3.4
- jsonschema-specifications==2024.10.1
- lazy-object-proxy==1.10.0
- lz4==4.4.3
- markdown==3.7
- markupsafe==3.0.2
- mergedeep==1.3.4
- mkdocs==1.6.1
- mkdocs-autorefs==1.4.1
- mkdocs-gen-files==0.5.0
- mkdocs-get-deps==0.2.0
- mkdocs-literate-nav==0.6.2
- mkdocs-material==9.6.10
- mkdocs-material-extensions==1.3.1
- mkdocs-section-index==0.3.9
- mkdocstrings==0.29.1
- mkdocstrings-python==1.16.8
- moto==5.1.2
- mpmath==1.3.0
- msal==1.32.0
- msal-extensions==1.3.1
- msgpack==1.1.0
- multidict==6.2.0
- multiprocess==0.70.16
- mypy==1.11.1
- mypy-extensions==1.0.0
- networkx==3.2.1
- numpy==2.0.2
- nvidia-cublas-cu12==12.4.5.8
- nvidia-cuda-cupti-cu12==12.4.127
- nvidia-cuda-nvrtc-cu12==12.4.127
- nvidia-cuda-runtime-cu12==12.4.127
- nvidia-cudnn-cu12==9.1.0.70
- nvidia-cufft-cu12==11.2.1.3
- nvidia-curand-cu12==10.3.5.147
- nvidia-cusolver-cu12==11.6.1.9
- nvidia-cusparse-cu12==12.3.1.170
- nvidia-cusparselt-cu12==0.6.2
- nvidia-nccl-cu12==2.21.5
- nvidia-nvjitlink-cu12==12.4.127
- nvidia-nvtx-cu12==12.4.127
- oauthlib==3.2.2
- open-clip-torch==2.31.0
- openapi-schema-validator==0.6.3
- openapi-spec-validator==0.7.1
- orjson==3.10.16
- packaging==24.2
- paginate==0.5.7
- pandas==2.2.3
- pathable==0.4.4
- pathspec==0.12.1
- pillow==10.4.0
- platformdirs==4.3.7
- pluggy==1.5.0
- ply==3.11
- propcache==0.3.1
- proto-plus==1.26.1
- protobuf==6.30.2
- py-cpuinfo==9.0.0
- py-partiql-parser==0.6.1
- pyarrow==19.0.1
- pyasn1==0.6.1
- pyasn1-modules==0.4.2
- pycparser==2.22
- pydantic==2.11.1
- pydantic-core==2.33.0
- pygal==3.0.5
- pygaljs==1.0.2
- pygments==2.19.1
- pygtrie==2.5.0
- pyjwt==2.10.1
- pymdown-extensions==10.14.3
- pyparsing==3.2.3
- pytest==8.3.5
- pytest-asyncio==0.26.0
- pytest-benchmark==5.1.0
- pytest-cov==6.0.0
- pytest-mock==3.14.0
- pytest-servers==0.5.10
- pytest-sugar==1.0.0
- pytest-xdist==3.6.1
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyyaml==6.0.2
- pyyaml-env-tag==0.1
- referencing==0.36.2
- regex==2024.11.6
- requests==2.32.3
- requests-mock==1.12.1
- requests-oauthlib==2.0.0
- responses==0.25.7
- rfc3339-validator==0.1.4
- rpds-py==0.24.0
- rsa==4.9
- s3fs==2025.3.1
- s3transfer==0.11.3
- safetensors==0.5.3
- shtab==1.7.1
- simsimd==6.2.1
- six==1.17.0
- sortedcontainers==2.4.0
- sqlalchemy==2.0.40
- sqltrie==0.11.2
- sympy==1.13.1
- termcolor==3.0.0
- timm==1.0.15
- tokenizers==0.21.1
- tomli==2.2.1
- tomlkit==0.13.2
- torch==2.6.0
- torchvision==0.21.0
- tqdm==4.67.1
- transformers==4.50.3
- triton==3.2.0
- types-python-dateutil==2.9.0.20241206
- types-pytz==2025.2.0.20250326
- types-pyyaml==6.0.12.20250326
- types-requests==2.31.0.6
- types-urllib3==1.26.25.14
- typing-extensions==4.13.0
- typing-inspection==0.4.0
- tzdata==2025.2
- universal-pathlib==0.2.6
- urllib3==1.26.20
- usearch==2.16.9
- virtualenv==20.29.3
- watchdog==6.0.0
- wcwidth==0.2.13
- werkzeug==3.1.3
- wrapt==1.17.2
- xmltodict==0.14.2
- yarl==1.18.3
- zipp==3.21.0
prefix: /opt/conda/envs/datachain
| [
"tests/unit/sql/test_string.py::test_regexp_replace[hello",
"tests/unit/sql/test_string.py::test_regexp_replace[abc123def456-\\\\d+-X-abcXdefX]",
"tests/unit/sql/test_string.py::test_regexp_replace[cat.1001.jpg-\\\\.(\\\\w+)\\\\.-_\\\\1_-cat_1001_jpg]",
"tests/unit/sql/test_string.py::test_regexp_replace[dog_photo.jpg-(\\\\w+)\\\\.(jpg|jpeg|png|gif)$-\\\\1_thumb.\\\\2-dog_photo_thumb.jpg]",
"tests/unit/sql/test_string.py::test_regexp_replace[file.with...dots.txt-\\\\.+-.-file.with.dots.txt]"
] | [] | [
"tests/unit/sql/test_string.py::test_length",
"tests/unit/sql/test_string.py::test_split[args0-expected0]",
"tests/unit/sql/test_string.py::test_split[args1-expected1]"
] | [] | Apache License 2.0 | 19,338 | 670 | [
"src/datachain/sql/functions/string.py",
"src/datachain/sql/sqlite/base.py"
] |
tobymao__sqlglot-3935 | f9a1efd42361767bb2f867f827048c3a059b3188 | 2024-08-19 16:23:23 | f4c34d37c5773c37a13437c7e0e7eb27b4e98877 | diff --git a/sqlglot/dialects/oracle.py b/sqlglot/dialects/oracle.py
index 3acabb94..2562422c 100644
--- a/sqlglot/dialects/oracle.py
+++ b/sqlglot/dialects/oracle.py
@@ -90,7 +90,6 @@ class Oracle(Dialect):
"ORDER SIBLINGS BY": TokenType.ORDER_SIBLINGS_BY,
"SAMPLE": TokenType.TABLE_SAMPLE,
"START": TokenType.BEGIN,
- "SYSDATE": TokenType.CURRENT_TIMESTAMP,
"TOP": TokenType.TOP,
"VARCHAR2": TokenType.VARCHAR,
}
@@ -109,6 +108,11 @@ class Oracle(Dialect):
}
FUNCTIONS.pop("NVL")
+ NO_PAREN_FUNCTION_PARSERS = {
+ **parser.Parser.NO_PAREN_FUNCTION_PARSERS,
+ "SYSDATE": lambda self: self.expression(exp.CurrentTimestamp, sysdate=True),
+ }
+
FUNCTION_PARSERS: t.Dict[str, t.Callable] = {
**parser.Parser.FUNCTION_PARSERS,
"JSON_ARRAY": lambda self: self._parse_json_array(
@@ -275,6 +279,9 @@ class Oracle(Dialect):
}
def currenttimestamp_sql(self, expression: exp.CurrentTimestamp) -> str:
+ if expression.args.get("sysdate"):
+ return "SYSDATE"
+
this = expression.this
return self.func("CURRENT_TIMESTAMP", this) if this else "CURRENT_TIMESTAMP"
diff --git a/sqlglot/dialects/redshift.py b/sqlglot/dialects/redshift.py
index 0b12df12..694af28a 100644
--- a/sqlglot/dialects/redshift.py
+++ b/sqlglot/dialects/redshift.py
@@ -80,7 +80,7 @@ class Redshift(Postgres):
NO_PAREN_FUNCTION_PARSERS = {
**Postgres.Parser.NO_PAREN_FUNCTION_PARSERS,
"APPROXIMATE": lambda self: self._parse_approximate_count(),
- "SYSDATE": lambda self: self.expression(exp.CurrentTimestamp, transaction=True),
+ "SYSDATE": lambda self: self.expression(exp.CurrentTimestamp, sysdate=True),
}
SUPPORTS_IMPLICIT_UNNEST = True
@@ -180,7 +180,7 @@ class Redshift(Postgres):
exp.ApproxDistinct: lambda self,
e: f"APPROXIMATE COUNT(DISTINCT {self.sql(e, 'this')})",
exp.CurrentTimestamp: lambda self, e: (
- "SYSDATE" if e.args.get("transaction") else "GETDATE()"
+ "SYSDATE" if e.args.get("sysdate") else "GETDATE()"
),
exp.DateAdd: date_delta_sql("DATEADD"),
exp.DateDiff: date_delta_sql("DATEDIFF"),
diff --git a/sqlglot/expressions.py b/sqlglot/expressions.py
index 404162d4..8a720486 100644
--- a/sqlglot/expressions.py
+++ b/sqlglot/expressions.py
@@ -5133,7 +5133,7 @@ class CurrentTime(Func):
class CurrentTimestamp(Func):
- arg_types = {"this": False, "transaction": False}
+ arg_types = {"this": False, "sysdate": False}
class CurrentUser(Func):
| in ORACLE queries, SYSDATE is replaced by CURRENT_TIMESTAMP. These 2 functions are not the same.
SQLGlot version: 25.14.1.dev1
**Fully reproducible code snippet**
```python
import sqlglot
sql = sqlglot.parse_one(sql='SELECT SYSDATE', read='oracle')
print(sql.sql(dialect='oracle'))
```
--> SELECT CURRENT_TIMESTAMP
These 2 functions are not returning the same thing. And more important is they are not returning the same datatype. SYSDATE returns a date and therefore can be use for a date comparaison directly. It's not the case for CURRENT_TIMESTAMP.
**Official Documentation**
https://docs.oracle.com/cd/E11882_01/server.112/e41084/functions191.htm
https://docs.oracle.com/cd/B19306_01/server.102/b14200/functions037.htm | tobymao/sqlglot | diff --git a/tests/dialects/test_oracle.py b/tests/dialects/test_oracle.py
index 7f743d39..79b8a024 100644
--- a/tests/dialects/test_oracle.py
+++ b/tests/dialects/test_oracle.py
@@ -15,6 +15,7 @@ class TestOracle(Validator):
)
self.parse_one("ALTER TABLE tbl_name DROP FOREIGN KEY fk_symbol").assert_is(exp.Alter)
+ self.validate_identity("SYSDATE")
self.validate_identity("CREATE GLOBAL TEMPORARY TABLE t AS SELECT * FROM orders")
self.validate_identity("CREATE PRIVATE TEMPORARY TABLE t AS SELECT * FROM orders")
self.validate_identity("REGEXP_REPLACE('source', 'search')")
@@ -72,10 +73,6 @@ class TestOracle(Validator):
"SELECT JSON_OBJECTAGG(KEY department_name VALUE department_id) FROM dep WHERE id <= 30",
"SELECT JSON_OBJECTAGG(department_name: department_id) FROM dep WHERE id <= 30",
)
- self.validate_identity(
- "SYSDATE",
- "CURRENT_TIMESTAMP",
- )
self.validate_identity(
"SELECT last_name, department_id, salary, MIN(salary) KEEP (DENSE_RANK FIRST ORDER BY commission_pct) "
'OVER (PARTITION BY department_id) AS "Worst", MAX(salary) KEEP (DENSE_RANK LAST ORDER BY commission_pct) '
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 1
},
"num_modified_files": 3
} | 25.14 | {
"env_vars": null,
"env_yml_path": [],
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": [],
"python": "3.9",
"reqs_path": [],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | cfgv==3.4.0
distlib==0.3.9
duckdb==1.2.1
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
filelock==3.18.0
identify==2.6.9
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
Jinja2==3.1.6
MarkupSafe==3.0.2
maturin==1.8.3
mypy==1.15.0
mypy-extensions==1.0.0
nodeenv==1.9.1
numpy==2.0.2
packaging @ file:///croot/packaging_1734472117206/work
pandas==2.2.3
pandas-stubs==2.2.2.240807
pdoc==15.0.1
platformdirs==4.3.7
pluggy @ file:///croot/pluggy_1733169602837/work
pre_commit==4.2.0
Pygments==2.19.1
pytest @ file:///croot/pytest_1738938843180/work
python-dateutil==2.9.0.post0
pytz==2025.2
PyYAML==6.0.2
ruff==0.4.3
six==1.17.0
-e git+https://github.com/tobymao/sqlglot.git@f9a1efd42361767bb2f867f827048c3a059b3188#egg=sqlglot
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
types-python-dateutil==2.9.0.20241206
types-pytz==2025.2.0.20250326
typing_extensions==4.13.0
tzdata==2025.2
virtualenv==20.29.3
| name: sqlglot
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- cfgv==3.4.0
- distlib==0.3.9
- duckdb==1.2.1
- filelock==3.18.0
- identify==2.6.9
- jinja2==3.1.6
- markupsafe==3.0.2
- maturin==1.8.3
- mypy==1.15.0
- mypy-extensions==1.0.0
- nodeenv==1.9.1
- numpy==2.0.2
- pandas==2.2.3
- pandas-stubs==2.2.2.240807
- pdoc==15.0.1
- platformdirs==4.3.7
- pre-commit==4.2.0
- pygments==2.19.1
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyyaml==6.0.2
- ruff==0.4.3
- six==1.17.0
- types-python-dateutil==2.9.0.20241206
- types-pytz==2025.2.0.20250326
- typing-extensions==4.13.0
- tzdata==2025.2
- virtualenv==20.29.3
prefix: /opt/conda/envs/sqlglot
| [
"tests/dialects/test_oracle.py::TestOracle::test_oracle"
] | [] | [
"tests/dialects/test_oracle.py::TestOracle::test_connect_by",
"tests/dialects/test_oracle.py::TestOracle::test_hints",
"tests/dialects/test_oracle.py::TestOracle::test_join_marker",
"tests/dialects/test_oracle.py::TestOracle::test_json_table",
"tests/dialects/test_oracle.py::TestOracle::test_match_recognize",
"tests/dialects/test_oracle.py::TestOracle::test_query_restrictions",
"tests/dialects/test_oracle.py::TestOracle::test_xml_table"
] | [] | MIT License | 19,344 | 796 | [
"sqlglot/dialects/oracle.py",
"sqlglot/dialects/redshift.py",
"sqlglot/expressions.py"
] |
|
pyvista__pyvista-6541 | 89b855e37b5971ee728bf5150fae8f118997c068 | 2024-08-19 18:19:12 | 1402ec78e18e58c3f1f512739d5bce6a2ef58a09 | codecov[bot]: ## [Codecov](https://app.codecov.io/gh/pyvista/pyvista/pull/6541?dropdown=coverage&src=pr&el=h1&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=pyvista) Report
All modified and coverable lines are covered by tests :white_check_mark:
> Project coverage is 91.45%. Comparing base [(`010b33a`)](https://app.codecov.io/gh/pyvista/pyvista/commit/010b33a3d420222c2223293a48110e8f699cc597?dropdown=coverage&el=desc&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=pyvista) to head [(`dbe8907`)](https://app.codecov.io/gh/pyvista/pyvista/commit/dbe8907bb7af92d139c30b0ebbfe5ba2b7147fcb?dropdown=coverage&el=desc&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=pyvista).
> :exclamation: There is a different number of reports uploaded between BASE (010b33a) and HEAD (dbe8907). Click for more details.
>
> <details><summary>HEAD has 11 uploads less than BASE</summary>
>
>| Flag | BASE (010b33a) | HEAD (dbe8907) |
>|------|------|------|
>||12|1|
></details>
<details><summary>Additional details and impacted files</summary>
```diff
@@ Coverage Diff @@
## main #6541 +/- ##
==========================================
- Coverage 97.34% 91.45% -5.90%
==========================================
Files 143 143
Lines 27358 27360 +2
==========================================
- Hits 26632 25022 -1610
- Misses 726 2338 +1612
```
</details>
pyvista-bot: <!-- NETLIFY DEPLOY COMMENT GENERATED BY ACTIONS_NETLIFY - APP ID SHA256: d3574f413fe51079937885070831e027ca5b10f7facb9e609494e0a481a31f79 -->
🚀 Deployed on https://66c3a4b560349130f4d4eec8--pyvista-dev.netlify.app | diff --git a/pyvista/core/dataset.py b/pyvista/core/dataset.py
index 4b61fe46..606bb01d 100644
--- a/pyvista/core/dataset.py
+++ b/pyvista/core/dataset.py
@@ -2240,6 +2240,10 @@ class DataSet(DataSetFilters, DataObject):
def format_array(name, arr, field):
"""Format array information for printing (internal helper)."""
+ if isinstance(arr, str):
+ # Convert string scalar into a numpy array. Otherwise, get_data_range
+ # will treat the string as an array name, not an array value.
+ arr = np.array(arr)
dl, dh = self.get_data_range(arr)
dl = pyvista.FLOAT_FORMAT.format(dl)
dh = pyvista.FLOAT_FORMAT.format(dh)
| `add_field_data` breaks `_repr_html_`
### Describe the bug, what's wrong, and what you expected.
Adding a field data (e.g., through ``user_dict`` in my case) breaks the method ``_repr_html_()``.
### Steps to reproduce the bug.
```python
import pyvista
grid = pyvista.Dodecahedron()
grid._repr_html_() # this works
grid.add_field_data("foo", "bar")
grid._repr_html_()
```
The last line gives the error:
```python
---------------------------------------------------------------------------
KeyError Traceback (most recent call last)
File c:\Users\keurf\Git\pyvista\bug.py:12
[10](file:///C:/Users/keurf/Git/pyvista/bug.py:10) grid._repr_html_() # this works
[11](file:///C:/Users/keurf/Git/pyvista/bug.py:11) grid.add_field_data("foo", "bar")
---> [12](file:///C:/Users/keurf/Git/pyvista/bug.py:12) grid._repr_html_()
File c:\Users\keurf\Git\pyvista\pyvista\core\dataset.py:2268, in DataSet._repr_html_(self)
[2266](file:///C:/Users/keurf/Git/pyvista/pyvista/core/dataset.py:2266) fmt += format_array(key, arr, 'Cells')
[2267](file:///C:/Users/keurf/Git/pyvista/pyvista/core/dataset.py:2267) for key, arr in self.field_data.items():
-> [2268](file:///C:/Users/keurf/Git/pyvista/pyvista/core/dataset.py:2268) fmt += format_array(key, arr, 'Fields')
[2270](file:///C:/Users/keurf/Git/pyvista/pyvista/core/dataset.py:2270) fmt += "</table>\n"
[2271](file:///C:/Users/keurf/Git/pyvista/pyvista/core/dataset.py:2271) fmt += "\n"
File c:\Users\keurf\Git\pyvista\pyvista\core\dataset.py:2255, in DataSet._repr_html_.<locals>.format_array(name, arr, field)
[2253](file:///C:/Users/keurf/Git/pyvista/pyvista/core/dataset.py:2253) def format_array(name, arr, field):
[2254](file:///C:/Users/keurf/Git/pyvista/pyvista/core/dataset.py:2254) """Format array information for printing (internal helper)."""
-> [2255](file:///C:/Users/keurf/Git/pyvista/pyvista/core/dataset.py:2255) dl, dh = self.get_data_range(arr)
[2256](file:///C:/Users/keurf/Git/pyvista/pyvista/core/dataset.py:2256) dl = pyvista.FLOAT_FORMAT.format(dl)
[2257](file:///C:/Users/keurf/Git/pyvista/pyvista/core/dataset.py:2257) dh = pyvista.FLOAT_FORMAT.format(dh)
File c:\Users\keurf\Git\pyvista\pyvista\core\dataset.py:888, in DataSet.get_data_range(self, arr_var, preference)
[886](file:///C:/Users/keurf/Git/pyvista/pyvista/core/dataset.py:886) if isinstance(arr_var, str):
[887](file:///C:/Users/keurf/Git/pyvista/pyvista/core/dataset.py:887) name = arr_var
--> [888](file:///C:/Users/keurf/Git/pyvista/pyvista/core/dataset.py:888) arr = get_array(self, name, preference=preference, err=True)
[889](file:///C:/Users/keurf/Git/pyvista/pyvista/core/dataset.py:889) else:
[890](file:///C:/Users/keurf/Git/pyvista/pyvista/core/dataset.py:890) arr = arr_var
File c:\Users\keurf\Git\pyvista\pyvista\core\utilities\arrays.py:309, in get_array(mesh, name, preference, err)
[307](file:///C:/Users/keurf/Git/pyvista/pyvista/core/utilities/arrays.py:307) return farr
[308](file:///C:/Users/keurf/Git/pyvista/pyvista/core/utilities/arrays.py:308) elif err:
--> [309](file:///C:/Users/keurf/Git/pyvista/pyvista/core/utilities/arrays.py:309) raise KeyError(f'Data array ({name}) not present in this dataset.')
[310](file:///C:/Users/keurf/Git/pyvista/pyvista/core/utilities/arrays.py:310) return None
KeyError: 'Data array (foo) not present in this dataset.'
```
### System Information
```shell
--------------------------------------------------------------------------------
Date: Mon Aug 19 18:51:12 2024 Romance Daylight Time
OS : Windows (11 10.0.26100 SP0 Multiprocessor Free)
CPU(s) : 16
Machine : AMD64
Architecture : 64bit
RAM : 15.7 GiB
Environment : Jupyter
GPU Vendor : Intel
GPU Renderer : Intel(R) Iris(R) Xe Graphics
GPU Version : 4.5.0 - Build 31.0.101.5592
MathText Support : False
Python 3.12.4 | packaged by Anaconda, Inc. | (main, Jun 18 2024, 15:03:56)
[MSC v.1929 64 bit (AMD64)]
pyvista : 0.45.dev0
vtk : 9.3.1
numpy : 1.26.4
matplotlib : 3.9.1.post1
scooby : 0.10.0
pooch : 1.8.2
pillow : 10.4.0
imageio : 2.34.2
PyQt5 : 5.15.10
IPython : 8.26.0
colorcet : 3.1.0
cmocean : 4.0.3
ipywidgets : 8.1.3
scipy : 1.14.0
tqdm : 4.66.5
meshio : 5.3.5
jupyterlab : 4.0.11
pytest_pyvista : 0.1.8
trame : 3.6.3
trame_client : 3.2.1
trame_server : 3.0.3
trame_vtk : 2.8.10
trame_vuetify : 2.6.2
nest_asyncio : 1.6.0
Intel(R) oneAPI Math Kernel Library Version 2023.1-Product Build 20230303
for Intel(R) 64 architecture applications
--------------------------------------------------------------------------------
```
### Screenshots
_No response_ | pyvista/pyvista | diff --git a/tests/core/test_dataset.py b/tests/core/test_dataset.py
index b08059a0..8a25d141 100644
--- a/tests/core/test_dataset.py
+++ b/tests/core/test_dataset.py
@@ -527,6 +527,13 @@ def test_html_repr(grid):
assert grid._repr_html_() is not None
+def test_html_repr_string_scalar(grid):
+ array_data = "data"
+ array_name = "name"
+ grid.add_field_data(array_data, array_name)
+ assert grid._repr_html_() is not None
+
+
@pytest.mark.parametrize('html', [True, False])
@pytest.mark.parametrize('display', [True, False])
def test_print_repr(grid, display, html):
| {
"commit_name": "merge_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 1
} | 0.44 | {
"env_vars": null,
"env_yml_path": [
"environment.yml"
],
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "environment.yml",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-xdist",
"pytest-mock",
"pytest-asyncio"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | aiohappyeyeballs @ file:///home/conda/feedstock_root/build_artifacts/aiohappyeyeballs_1741775197943/work
aiohttp @ file:///home/conda/feedstock_root/build_artifacts/aiohttp_1742268596946/work
aiosignal @ file:///home/conda/feedstock_root/build_artifacts/aiosignal_1734342155601/work
anyio @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_anyio_1742243108/work
argon2-cffi @ file:///home/conda/feedstock_root/build_artifacts/argon2-cffi_1733311059102/work
argon2-cffi-bindings @ file:///home/conda/feedstock_root/build_artifacts/argon2-cffi-bindings_1725356560642/work
arrow @ file:///home/conda/feedstock_root/build_artifacts/arrow_1733584251875/work
asttokens @ file:///home/conda/feedstock_root/build_artifacts/asttokens_1733250440834/work
async-lru @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_async-lru_1742153708/work
async-timeout @ file:///home/conda/feedstock_root/build_artifacts/async-timeout_1733235340728/work
attrs @ file:///home/conda/feedstock_root/build_artifacts/attrs_1741918516150/work
babel @ file:///home/conda/feedstock_root/build_artifacts/babel_1738490167835/work
beautifulsoup4 @ file:///home/conda/feedstock_root/build_artifacts/beautifulsoup4_1738740337718/work
bleach @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_bleach_1737382993/work
Brotli @ file:///home/conda/feedstock_root/build_artifacts/brotli-split_1725267488082/work
cached-property @ file:///home/conda/feedstock_root/build_artifacts/cached_property_1615209429212/work
certifi @ file:///home/conda/feedstock_root/build_artifacts/certifi_1739515848642/work/certifi
cffi @ file:///home/conda/feedstock_root/build_artifacts/cffi_1725571112467/work
cftime @ file:///home/conda/feedstock_root/build_artifacts/cftime_1725400455427/work
charset-normalizer @ file:///home/conda/feedstock_root/build_artifacts/charset-normalizer_1735929714516/work
click @ file:///home/conda/feedstock_root/build_artifacts/click_1734858813237/work
cmocean @ file:///home/conda/feedstock_root/build_artifacts/cmocean_1734343940570/work
codecov @ file:///home/conda/feedstock_root/build_artifacts/codecov_1734975286850/work
colorama @ file:///home/conda/feedstock_root/build_artifacts/colorama_1733218098505/work
colorcet @ file:///home/conda/feedstock_root/build_artifacts/colorcet_1734007314889/work
colorspacious @ file:///home/conda/feedstock_root/build_artifacts/colorspacious_1734341944815/work
comm @ file:///home/conda/feedstock_root/build_artifacts/comm_1733502965406/work
contourpy @ file:///home/conda/feedstock_root/build_artifacts/contourpy_1727293517607/work
coverage @ file:///home/conda/feedstock_root/build_artifacts/coverage_1743381224823/work
cycler @ file:///home/conda/feedstock_root/build_artifacts/cycler_1733332471406/work
debugpy @ file:///home/conda/feedstock_root/build_artifacts/debugpy_1741148409996/work
decorator @ file:///home/conda/feedstock_root/build_artifacts/decorator_1740384970518/work
defusedxml @ file:///home/conda/feedstock_root/build_artifacts/defusedxml_1615232257335/work
exceptiongroup @ file:///home/conda/feedstock_root/build_artifacts/exceptiongroup_1733208806608/work
execnet @ file:///home/conda/feedstock_root/build_artifacts/execnet_1733230988954/work
executing @ file:///home/conda/feedstock_root/build_artifacts/executing_1733569351617/work
fastjsonschema @ file:///home/conda/feedstock_root/build_artifacts/python-fastjsonschema_1733235979760/work/dist
fonttools @ file:///home/conda/feedstock_root/build_artifacts/fonttools_1738940303262/work
fqdn @ file:///home/conda/feedstock_root/build_artifacts/fqdn_1733327382592/work/dist
frozenlist @ file:///home/conda/feedstock_root/build_artifacts/frozenlist_1737645236190/work
h11 @ file:///home/conda/feedstock_root/build_artifacts/h11_1733327467879/work
h2 @ file:///home/conda/feedstock_root/build_artifacts/h2_1738578511449/work
h5py @ file:///home/conda/feedstock_root/build_artifacts/h5py_1739952287730/work
hpack @ file:///home/conda/feedstock_root/build_artifacts/hpack_1737618293087/work
httpcore @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_httpcore_1731707562/work
httpx @ file:///home/conda/feedstock_root/build_artifacts/httpx_1733663348460/work
hyperframe @ file:///home/conda/feedstock_root/build_artifacts/hyperframe_1737618333194/work
hypothesis @ file:///home/conda/feedstock_root/build_artifacts/hypothesis_1742900877539/work
idna @ file:///home/conda/feedstock_root/build_artifacts/idna_1733211830134/work
imageio @ file:///home/conda/feedstock_root/build_artifacts/imageio_1738273805233/work
imageio-ffmpeg @ file:///home/conda/feedstock_root/build_artifacts/imageio-ffmpeg_1737102630951/work
importlib_metadata @ file:///home/conda/feedstock_root/build_artifacts/importlib-metadata_1737420181517/work
importlib_resources @ file:///home/conda/feedstock_root/build_artifacts/importlib_resources_1736252299705/work
iniconfig @ file:///home/conda/feedstock_root/build_artifacts/iniconfig_1733223141826/work
ipykernel @ file:///home/conda/feedstock_root/build_artifacts/ipykernel_1719845459717/work
ipython @ file:///home/conda/feedstock_root/build_artifacts/ipython_1701831663892/work
isoduration @ file:///home/conda/feedstock_root/build_artifacts/isoduration_1733493628631/work/dist
jedi @ file:///home/conda/feedstock_root/build_artifacts/jedi_1733300866624/work
Jinja2 @ file:///home/conda/feedstock_root/build_artifacts/jinja2_1741263328855/work
json5 @ file:///home/conda/feedstock_root/build_artifacts/json5_1733272076743/work
jsonpointer @ file:///home/conda/feedstock_root/build_artifacts/jsonpointer_1725302957584/work
jsonschema @ file:///home/conda/feedstock_root/build_artifacts/jsonschema_1733472696581/work
jsonschema-specifications @ file:///tmp/tmpk0f344m9/src
jupyter-events @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_jupyter_events_1738765986/work
jupyter-lsp @ file:///home/conda/feedstock_root/build_artifacts/jupyter-lsp-meta_1733492907176/work/jupyter-lsp
jupyter_client @ file:///home/conda/feedstock_root/build_artifacts/jupyter_client_1733440914442/work
jupyter_core @ file:///home/conda/feedstock_root/build_artifacts/jupyter_core_1727163409502/work
jupyter_server @ file:///home/conda/feedstock_root/build_artifacts/jupyter_server_1734702637701/work
jupyter_server_terminals @ file:///home/conda/feedstock_root/build_artifacts/jupyter_server_terminals_1733427956852/work
jupyterlab @ file:///home/conda/feedstock_root/build_artifacts/jupyterlab_1741964057182/work
jupyterlab_pygments @ file:///home/conda/feedstock_root/build_artifacts/jupyterlab_pygments_1733328101776/work
jupyterlab_server @ file:///home/conda/feedstock_root/build_artifacts/jupyterlab_server_1733599573484/work
kiwisolver @ file:///home/conda/feedstock_root/build_artifacts/kiwisolver_1725459266648/work
loguru @ file:///home/conda/feedstock_root/build_artifacts/loguru_1725349754278/work
markdown-it-py @ file:///home/conda/feedstock_root/build_artifacts/markdown-it-py_1733250460757/work
MarkupSafe @ file:///home/conda/feedstock_root/build_artifacts/markupsafe_1733219680183/work
matplotlib==3.9.4
matplotlib-inline @ file:///home/conda/feedstock_root/build_artifacts/matplotlib-inline_1733416936468/work
mdurl @ file:///home/conda/feedstock_root/build_artifacts/mdurl_1733255585584/work
meshio @ file:///home/conda/feedstock_root/build_artifacts/meshio_1706720595231/work
mistune @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_mistune_1742402716/work
more-itertools @ file:///home/conda/feedstock_root/build_artifacts/more-itertools_1736883817510/work
msgpack @ file:///home/conda/feedstock_root/build_artifacts/msgpack-python_1725975012026/work
multidict @ file:///home/conda/feedstock_root/build_artifacts/multidict_1742308123521/work
munkres==1.1.4
nbclient @ file:///home/conda/feedstock_root/build_artifacts/nbclient_1734628800805/work
nbconvert @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_nbconvert-core_1738067871/work
nbformat @ file:///home/conda/feedstock_root/build_artifacts/nbformat_1733402752141/work
nest_asyncio @ file:///home/conda/feedstock_root/build_artifacts/nest-asyncio_1733325553580/work
netCDF4 @ file:///home/conda/feedstock_root/build_artifacts/netcdf4_1733253338730/work
notebook_shim @ file:///home/conda/feedstock_root/build_artifacts/notebook-shim_1733408315203/work
numpy @ file:///home/conda/feedstock_root/build_artifacts/numpy_1707225342954/work/dist/numpy-1.26.4-cp39-cp39-linux_x86_64.whl#sha256=c799942b5898f6e6c60264d1663a6469a475290e758c654aeeb78e2596463abd
overrides @ file:///home/conda/feedstock_root/build_artifacts/overrides_1734587627321/work
packaging @ file:///home/conda/feedstock_root/build_artifacts/packaging_1733203243479/work
pandocfilters @ file:///home/conda/feedstock_root/build_artifacts/pandocfilters_1631603243851/work
parso @ file:///home/conda/feedstock_root/build_artifacts/parso_1733271261340/work
pexpect @ file:///home/conda/feedstock_root/build_artifacts/pexpect_1733301927746/work
pickleshare @ file:///home/conda/feedstock_root/build_artifacts/pickleshare_1733327343728/work
pillow @ file:///home/conda/feedstock_root/build_artifacts/pillow_1735929703139/work
pkgutil_resolve_name @ file:///home/conda/feedstock_root/build_artifacts/pkgutil-resolve-name_1733344503739/work
platformdirs @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_platformdirs_1742485085/work
pluggy @ file:///home/conda/feedstock_root/build_artifacts/pluggy_1733222765875/work
pooch @ file:///home/conda/feedstock_root/build_artifacts/pooch_1733421311631/work
prometheus_client @ file:///home/conda/feedstock_root/build_artifacts/prometheus_client_1733327310477/work
prompt_toolkit @ file:///home/conda/feedstock_root/build_artifacts/prompt-toolkit_1737453357274/work
propcache @ file:///home/conda/feedstock_root/build_artifacts/propcache_1737635528546/work
psutil @ file:///home/conda/feedstock_root/build_artifacts/psutil_1740663125313/work
ptyprocess @ file:///home/conda/feedstock_root/build_artifacts/ptyprocess_1733302279685/work/dist/ptyprocess-0.7.0-py2.py3-none-any.whl#sha256=92c32ff62b5fd8cf325bec5ab90d7be3d2a8ca8c8a3813ff487a8d2002630d1f
pure_eval @ file:///home/conda/feedstock_root/build_artifacts/pure_eval_1733569405015/work
pycparser @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_pycparser_1733195786/work
pyembree @ file:///home/conda/feedstock_root/build_artifacts/pyembree_1718702851992/work
Pygments @ file:///home/conda/feedstock_root/build_artifacts/pygments_1736243443484/work
pyparsing @ file:///home/conda/feedstock_root/build_artifacts/pyparsing_1743089729650/work
PySide6==6.8.3
PySocks @ file:///home/conda/feedstock_root/build_artifacts/pysocks_1733217236728/work
pytest @ file:///home/conda/feedstock_root/build_artifacts/pytest_1740946542080/work
pytest-asyncio==0.26.0
pytest-cov @ file:///home/conda/feedstock_root/build_artifacts/pytest-cov_1733223023082/work
pytest-memprof==0.2.0
pytest-mock==3.14.0
pytest-xdist @ file:///home/conda/feedstock_root/build_artifacts/pytest-xdist_1733240780199/work
python-dateutil @ file:///home/conda/feedstock_root/build_artifacts/python-dateutil_1733215673016/work
python-json-logger @ file:///home/conda/feedstock_root/build_artifacts/python-json-logger_1677079630776/work
pytz @ file:///home/conda/feedstock_root/build_artifacts/pytz_1742920838005/work
-e git+https://github.com/pyvista/pyvista.git@89b855e37b5971ee728bf5150fae8f118997c068#egg=pyvista
PyYAML @ file:///home/conda/feedstock_root/build_artifacts/pyyaml_1737454647378/work
pyzmq @ file:///home/conda/feedstock_root/build_artifacts/pyzmq_1741805177758/work
referencing @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_referencing_1737836872/work
requests @ file:///home/conda/feedstock_root/build_artifacts/requests_1733217035951/work
rfc3339_validator @ file:///home/conda/feedstock_root/build_artifacts/rfc3339-validator_1733599910982/work
rfc3986-validator @ file:///home/conda/feedstock_root/build_artifacts/rfc3986-validator_1598024191506/work
rich @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_rich_1743371105/work/dist
rpds-py @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_rpds-py_1743037693/work
rtree @ file:///home/conda/feedstock_root/build_artifacts/rtree_1741378561624/work
scooby @ file:///home/conda/feedstock_root/build_artifacts/scooby_1734299019922/work
Send2Trash @ file:///home/conda/feedstock_root/build_artifacts/send2trash_1733322040660/work
shiboken6==6.8.3
six @ file:///home/conda/feedstock_root/build_artifacts/six_1733380938961/work
sniffio @ file:///home/conda/feedstock_root/build_artifacts/sniffio_1733244044561/work
sortedcontainers @ file:///home/conda/feedstock_root/build_artifacts/sortedcontainers_1738440353519/work
soupsieve @ file:///home/conda/feedstock_root/build_artifacts/soupsieve_1693929250441/work
stack_data @ file:///home/conda/feedstock_root/build_artifacts/stack_data_1733569443808/work
terminado @ file:///home/conda/feedstock_root/build_artifacts/terminado_1710262609923/work
tinycss2 @ file:///home/conda/feedstock_root/build_artifacts/tinycss2_1729802851396/work
toml @ file:///home/conda/feedstock_root/build_artifacts/toml_1734091811753/work
tomli @ file:///home/conda/feedstock_root/build_artifacts/tomli_1733256695513/work
tornado @ file:///home/conda/feedstock_root/build_artifacts/tornado_1732615921868/work
tqdm @ file:///home/conda/feedstock_root/build_artifacts/tqdm_1735661334605/work
traitlets @ file:///home/conda/feedstock_root/build_artifacts/traitlets_1733367359838/work
trame @ file:///home/conda/feedstock_root/build_artifacts/trame_1741413642518/work
trame-client @ file:///home/conda/feedstock_root/build_artifacts/trame-client_1742874331306/work
trame-server @ file:///home/conda/feedstock_root/build_artifacts/trame-server_1741638011360/work
trame-vtk @ file:///home/conda/feedstock_root/build_artifacts/trame-vtk_1739145199105/work
trame-vuetify @ file:///home/conda/feedstock_root/build_artifacts/trame-vuetify_1743263303319/work
trimesh @ file:///home/conda/feedstock_root/build_artifacts/trimesh_1743289679380/work
types-python-dateutil @ file:///home/conda/feedstock_root/build_artifacts/types-python-dateutil_1733612335562/work
typing_extensions @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_typing_extensions_1743201626/work
typing_utils @ file:///home/conda/feedstock_root/build_artifacts/typing_utils_1733331286120/work
unicodedata2 @ file:///home/conda/feedstock_root/build_artifacts/unicodedata2_1736692503055/work
uri-template @ file:///home/conda/feedstock_root/build_artifacts/uri-template_1733323593477/work/dist
urllib3 @ file:///home/conda/feedstock_root/build_artifacts/urllib3_1734859416348/work
vtk==9.3.1
wcwidth @ file:///home/conda/feedstock_root/build_artifacts/wcwidth_1733231326287/work
webcolors @ file:///home/conda/feedstock_root/build_artifacts/webcolors_1733359735138/work
webencodings @ file:///home/conda/feedstock_root/build_artifacts/webencodings_1733236011802/work
websocket-client @ file:///home/conda/feedstock_root/build_artifacts/websocket-client_1733157342724/work
wslink @ file:///home/conda/feedstock_root/build_artifacts/wslink_1742768409749/work
yarl @ file:///home/conda/feedstock_root/build_artifacts/yarl_1737575777699/work
zipp @ file:///home/conda/feedstock_root/build_artifacts/zipp_1732827521216/work
zstandard==0.23.0
| name: pyvista
channels:
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=conda_forge
- _openmp_mutex=4.5=2_gnu
- aiohappyeyeballs=2.6.1=pyhd8ed1ab_0
- aiohttp=3.11.14=py39h9399b63_0
- aiosignal=1.3.2=pyhd8ed1ab_0
- alsa-lib=1.2.13=hb9d3cd8_0
- anyio=4.9.0=pyh29332c3_0
- aom=3.9.1=hac33072_0
- argon2-cffi=23.1.0=pyhd8ed1ab_1
- argon2-cffi-bindings=21.2.0=py39h8cd3c5a_5
- arrow=1.3.0=pyhd8ed1ab_1
- asttokens=3.0.0=pyhd8ed1ab_1
- async-lru=2.0.5=pyh29332c3_0
- async-timeout=5.0.1=pyhd8ed1ab_1
- attr=2.5.1=h166bdaf_1
- attrs=25.3.0=pyh71513ae_0
- babel=2.17.0=pyhd8ed1ab_0
- beautifulsoup4=4.13.3=pyha770c72_0
- bleach=6.2.0=pyh29332c3_4
- bleach-with-css=6.2.0=h82add2a_4
- blosc=1.21.6=he440d0b_1
- brotli=1.1.0=hb9d3cd8_2
- brotli-bin=1.1.0=hb9d3cd8_2
- brotli-python=1.1.0=py39hf88036b_2
- bzip2=1.0.8=h4bc722e_7
- c-ares=1.34.4=hb9d3cd8_0
- ca-certificates=2025.1.31=hbcca054_0
- cached-property=1.5.2=hd8ed1ab_1
- cached_property=1.5.2=pyha770c72_1
- cairo=1.18.4=h3394656_0
- certifi=2025.1.31=pyhd8ed1ab_0
- cffi=1.17.1=py39h15c3d72_0
- cftime=1.6.4=py39hf3d9206_1
- charset-normalizer=3.4.1=pyhd8ed1ab_0
- click=8.1.8=pyh707e725_0
- cmocean=4.0.3=pyhd8ed1ab_1
- codecov=2.1.13=pyhd8ed1ab_1
- colorama=0.4.6=pyhd8ed1ab_1
- colorcet=3.1.0=pyhd8ed1ab_1
- colorspacious=1.1.2=pyhecae5ae_1
- comm=0.2.2=pyhd8ed1ab_1
- contourpy=1.3.0=py39h74842e3_2
- coverage=7.8.0=py39h9399b63_0
- cycler=0.12.1=pyhd8ed1ab_1
- cyrus-sasl=2.1.27=h54b06d7_7
- dav1d=1.2.1=hd590300_0
- dbus=1.13.6=h5008d03_3
- debugpy=1.8.13=py39hf88036b_0
- decorator=5.2.1=pyhd8ed1ab_0
- defusedxml=0.7.1=pyhd8ed1ab_0
- double-conversion=3.3.1=h5888daf_0
- embree=2.17.7=ha770c72_3
- exceptiongroup=1.2.2=pyhd8ed1ab_1
- execnet=2.1.1=pyhd8ed1ab_1
- executing=2.1.0=pyhd8ed1ab_1
- expat=2.6.4=h5888daf_0
- ffmpeg=7.1.1=gpl_h0b79d52_704
- font-ttf-dejavu-sans-mono=2.37=hab24e00_0
- font-ttf-inconsolata=3.000=h77eed37_0
- font-ttf-source-code-pro=2.038=h77eed37_0
- font-ttf-ubuntu=0.83=h77eed37_3
- fontconfig=2.15.0=h7e30c49_1
- fonts-conda-ecosystem=1=0
- fonts-conda-forge=1=0
- fonttools=4.56.0=py39h9399b63_0
- fqdn=1.5.1=pyhd8ed1ab_1
- freetype=2.13.3=h48d6fc4_0
- fribidi=1.0.10=h36c2ea0_0
- frozenlist=1.5.0=py39h9399b63_1
- gdk-pixbuf=2.42.12=hb9ae30d_0
- gettext=0.23.1=h5888daf_0
- gettext-tools=0.23.1=h5888daf_0
- gl2ps=1.4.2=hae5d5c5_1
- glew=2.1.0=h9c3ff4c_2
- gmp=6.3.0=hac33072_2
- graphite2=1.3.13=h59595ed_1003
- h11=0.14.0=pyhd8ed1ab_1
- h2=4.2.0=pyhd8ed1ab_0
- h5py=3.13.0=nompi_py39h30a5a8d_100
- harfbuzz=11.0.0=h76408a6_0
- hdf4=4.2.15=h2a13503_7
- hdf5=1.14.3=nompi_h2d575fe_109
- hpack=4.1.0=pyhd8ed1ab_0
- httpcore=1.0.7=pyh29332c3_1
- httpx=0.28.1=pyhd8ed1ab_0
- hyperframe=6.1.0=pyhd8ed1ab_0
- hypothesis=6.130.4=pyha770c72_0
- icu=75.1=he02047a_0
- idna=3.10=pyhd8ed1ab_1
- imageio=2.37.0=pyhfb79c49_0
- imageio-ffmpeg=0.6.0=pyhd8ed1ab_0
- importlib-metadata=8.6.1=pyha770c72_0
- importlib-resources=6.5.2=pyhd8ed1ab_0
- importlib_metadata=8.6.1=hd8ed1ab_0
- importlib_resources=6.5.2=pyhd8ed1ab_0
- iniconfig=2.0.0=pyhd8ed1ab_1
- ipykernel=6.29.5=pyh3099207_0
- ipython=8.18.1=pyh707e725_3
- isoduration=20.11.0=pyhd8ed1ab_1
- jack=1.9.22=h7c63dc7_2
- jedi=0.19.2=pyhd8ed1ab_1
- jinja2=3.1.6=pyhd8ed1ab_0
- json5=0.10.0=pyhd8ed1ab_1
- jsoncpp=1.9.6=hf42df4d_1
- jsonpointer=3.0.0=py39hf3d152e_1
- jsonschema=4.23.0=pyhd8ed1ab_1
- jsonschema-specifications=2024.10.1=pyhd8ed1ab_1
- jsonschema-with-format-nongpl=4.23.0=hd8ed1ab_1
- jupyter-lsp=2.2.5=pyhd8ed1ab_1
- jupyter_client=8.6.3=pyhd8ed1ab_1
- jupyter_core=5.7.2=pyh31011fe_1
- jupyter_events=0.12.0=pyh29332c3_0
- jupyter_server=2.15.0=pyhd8ed1ab_0
- jupyter_server_terminals=0.5.3=pyhd8ed1ab_1
- jupyterlab=4.3.6=pyhd8ed1ab_0
- jupyterlab_pygments=0.3.0=pyhd8ed1ab_2
- jupyterlab_server=2.27.3=pyhd8ed1ab_1
- keyutils=1.6.1=h166bdaf_0
- kiwisolver=1.4.7=py39h74842e3_0
- krb5=1.21.3=h659f571_0
- lame=3.100=h166bdaf_1003
- lcms2=2.17=h717163a_0
- ld_impl_linux-64=2.43=h712a8e2_4
- lerc=4.0.0=h27087fc_0
- level-zero=1.21.6=h84d6215_0
- libabseil=20250127.1=cxx17_hbbce691_0
- libaec=1.1.3=h59595ed_0
- libasprintf=0.23.1=h8e693c7_0
- libasprintf-devel=0.23.1=h8e693c7_0
- libass=0.17.3=h52826cd_2
- libblas=3.9.0=31_h59b9bed_openblas
- libbrotlicommon=1.1.0=hb9d3cd8_2
- libbrotlidec=1.1.0=hb9d3cd8_2
- libbrotlienc=1.1.0=hb9d3cd8_2
- libcap=2.75=h39aace5_0
- libcblas=3.9.0=31_he106b2a_openblas
- libclang-cpp20.1=20.1.1=default_hb5137d0_0
- libclang13=20.1.1=default_h9c6a7e4_0
- libcups=2.3.3=h4637d8d_4
- libcurl=8.12.1=h332b0f4_0
- libdb=6.2.32=h9c3ff4c_0
- libdeflate=1.23=h4ddbbb0_0
- libdrm=2.4.124=hb9d3cd8_0
- libedit=3.1.20250104=pl5321h7949ede_0
- libegl=1.7.0=ha4b6fd6_2
- libev=4.33=hd590300_2
- libexpat=2.6.4=h5888daf_0
- libffi=3.4.6=h2dba641_0
- libflac=1.4.3=h59595ed_0
- libgcc=14.2.0=h767d61c_2
- libgcc-ng=14.2.0=h69a702a_2
- libgcrypt-lib=1.11.0=hb9d3cd8_2
- libgettextpo=0.23.1=h5888daf_0
- libgettextpo-devel=0.23.1=h5888daf_0
- libgfortran=14.2.0=h69a702a_2
- libgfortran5=14.2.0=hf1ad2bd_2
- libgl=1.7.0=ha4b6fd6_2
- libglib=2.84.0=h2ff4ddf_0
- libglu=9.0.3=h03adeef_0
- libglvnd=1.7.0=ha4b6fd6_2
- libglx=1.7.0=ha4b6fd6_2
- libgomp=14.2.0=h767d61c_2
- libgpg-error=1.51=hbd13f7d_1
- libhwloc=2.11.2=default_h0d58e46_1001
- libiconv=1.18=h4ce23a2_1
- libjpeg-turbo=3.0.0=hd590300_1
- liblapack=3.9.0=31_h7ac8fdf_openblas
- libllvm20=20.1.1=ha7bfdaf_0
- liblzma=5.6.4=hb9d3cd8_0
- libnetcdf=4.9.2=nompi_h00e09a9_116
- libnghttp2=1.64.0=h161d5f1_0
- libnsl=2.0.1=hd590300_0
- libntlm=1.8=hb9d3cd8_0
- libogg=1.3.5=h4ab18f5_0
- libopenblas=0.3.29=pthreads_h94d23a6_0
- libopengl=1.7.0=ha4b6fd6_2
- libopenvino=2025.0.0=hdc3f47d_3
- libopenvino-auto-batch-plugin=2025.0.0=h4d9b6c2_3
- libopenvino-auto-plugin=2025.0.0=h4d9b6c2_3
- libopenvino-hetero-plugin=2025.0.0=h981d57b_3
- libopenvino-intel-cpu-plugin=2025.0.0=hdc3f47d_3
- libopenvino-intel-gpu-plugin=2025.0.0=hdc3f47d_3
- libopenvino-intel-npu-plugin=2025.0.0=hdc3f47d_3
- libopenvino-ir-frontend=2025.0.0=h981d57b_3
- libopenvino-onnx-frontend=2025.0.0=h0e684df_3
- libopenvino-paddle-frontend=2025.0.0=h0e684df_3
- libopenvino-pytorch-frontend=2025.0.0=h5888daf_3
- libopenvino-tensorflow-frontend=2025.0.0=h684f15b_3
- libopenvino-tensorflow-lite-frontend=2025.0.0=h5888daf_3
- libopus=1.3.1=h7f98852_1
- libpciaccess=0.18=hd590300_0
- libpng=1.6.47=h943b412_0
- libpq=17.4=h27ae623_0
- libprotobuf=5.29.3=h501fc15_0
- librsvg=2.58.4=he92a37e_3
- libsndfile=1.2.2=hc60ed4a_1
- libsodium=1.0.20=h4ab18f5_0
- libspatialindex=2.1.0=he57a185_0
- libsqlite=3.49.1=hee588c1_2
- libssh2=1.11.1=hf672d98_0
- libstdcxx=14.2.0=h8f9b012_2
- libstdcxx-ng=14.2.0=h4852527_2
- libsystemd0=257.4=h4e0b6ca_1
- libtheora=1.1.1=h4ab18f5_1006
- libtiff=4.7.0=hd9ff511_3
- libudev1=257.4=hbe16f8c_1
- libunwind=1.6.2=h9c3ff4c_0
- liburing=2.9=h84d6215_0
- libusb=1.0.28=hb9d3cd8_0
- libuuid=2.38.1=h0b41bf4_0
- libva=2.22.0=h4f16b4b_2
- libvorbis=1.3.7=h9c3ff4c_0
- libvpx=1.14.1=hac33072_0
- libwebp-base=1.5.0=h851e524_0
- libxcb=1.17.0=h8a09558_0
- libxcrypt=4.4.36=hd590300_1
- libxkbcommon=1.8.1=hc4a0caf_0
- libxml2=2.13.7=h8d12d68_0
- libxslt=1.1.39=h76b75d6_0
- libzip=1.11.2=h6991a6a_0
- libzlib=1.3.1=hb9d3cd8_2
- loguru=0.7.2=py39hf3d152e_2
- lz4-c=1.10.0=h5888daf_1
- markdown-it-py=3.0.0=pyhd8ed1ab_1
- markupsafe=3.0.2=py39h9399b63_1
- matplotlib=3.9.4=py39hf3d152e_0
- matplotlib-base=3.9.4=py39h16632d1_0
- matplotlib-inline=0.1.7=pyhd8ed1ab_1
- mdurl=0.1.2=pyhd8ed1ab_1
- meshio=5.3.5=pyhd8ed1ab_0
- mistune=3.1.3=pyh29332c3_0
- more-itertools=10.6.0=pyhd8ed1ab_0
- mpg123=1.32.9=hc50e24c_0
- msgpack-python=1.1.0=py39h74842e3_0
- multidict=6.2.0=py39h9399b63_0
- munkres=1.1.4=pyh9f0ad1d_0
- mysql-common=9.0.1=h266115a_5
- mysql-libs=9.0.1=he0572af_5
- nbclient=0.10.2=pyhd8ed1ab_0
- nbconvert-core=7.16.6=pyh29332c3_0
- nbformat=5.10.4=pyhd8ed1ab_1
- ncurses=6.5=h2d0b736_3
- nest-asyncio=1.6.0=pyhd8ed1ab_1
- netcdf4=1.7.2=nompi_py39h9d02bfe_101
- nlohmann_json=3.11.3=he02047a_1
- notebook-shim=0.2.4=pyhd8ed1ab_1
- numpy=1.26.4=py39h474f0d3_0
- ocl-icd=2.3.2=hb9d3cd8_2
- opencl-headers=2024.10.24=h5888daf_0
- openh264=2.6.0=hc22cd8d_0
- openjpeg=2.5.3=h5fbd93e_0
- openldap=2.6.9=he970967_0
- openssl=3.4.1=h7b32b05_0
- overrides=7.7.0=pyhd8ed1ab_1
- packaging=24.2=pyhd8ed1ab_2
- pandocfilters=1.5.0=pyhd8ed1ab_0
- pango=1.56.3=h9ac818e_1
- parso=0.8.4=pyhd8ed1ab_1
- pcre2=10.44=hba22ea6_2
- pexpect=4.9.0=pyhd8ed1ab_1
- pickleshare=0.7.5=pyhd8ed1ab_1004
- pillow=11.1.0=py39h15c0740_0
- pip=25.0.1=pyh8b19718_0
- pixman=0.44.2=h29eaf8c_0
- pkgutil-resolve-name=1.3.10=pyhd8ed1ab_2
- platformdirs=4.3.7=pyh29332c3_0
- pluggy=1.5.0=pyhd8ed1ab_1
- pooch=1.8.2=pyhd8ed1ab_1
- proj=9.5.1=h0054346_0
- prometheus_client=0.21.1=pyhd8ed1ab_0
- prompt-toolkit=3.0.50=pyha770c72_0
- propcache=0.2.1=py39h9399b63_1
- psutil=7.0.0=py39h8cd3c5a_0
- pthread-stubs=0.4=hb9d3cd8_1002
- ptyprocess=0.7.0=pyhd8ed1ab_1
- pugixml=1.15=h3f63f65_0
- pulseaudio-client=17.0=hac146a9_1
- pure_eval=0.2.3=pyhd8ed1ab_1
- pycparser=2.22=pyh29332c3_1
- pyembree=0.1.6=py39hddac248_4
- pygments=2.19.1=pyhd8ed1ab_0
- pyparsing=3.2.3=pyhd8ed1ab_1
- pyside6=6.8.3=py39h0383914_0
- pysocks=1.7.1=pyha55dd90_7
- pytest=8.3.5=pyhd8ed1ab_0
- pytest-cov=6.0.0=pyhd8ed1ab_1
- pytest-xdist=3.6.1=pyhd8ed1ab_1
- python=3.9.21=h9c0c6dc_1_cpython
- python-dateutil=2.9.0.post0=pyhff2d567_1
- python-fastjsonschema=2.21.1=pyhd8ed1ab_0
- python-json-logger=2.0.7=pyhd8ed1ab_0
- python_abi=3.9=5_cp39
- pytz=2025.2=pyhd8ed1ab_0
- pyyaml=6.0.2=py39h9399b63_2
- pyzmq=26.3.0=py39h4e4fb57_0
- qhull=2020.2=h434a139_5
- qt6-main=6.8.3=h6441bc3_1
- readline=8.2=h8c095d6_2
- referencing=0.36.2=pyh29332c3_0
- requests=2.32.3=pyhd8ed1ab_1
- rfc3339-validator=0.1.4=pyhd8ed1ab_1
- rfc3986-validator=0.1.1=pyh9f0ad1d_0
- rich=14.0.0=pyh29332c3_0
- rpds-py=0.24.0=py39h3506688_0
- rtree=1.4.0=pyh11ca60a_1
- scooby=0.10.0=pyhd8ed1ab_1
- sdl2=2.32.50=h9b8e6db_1
- sdl3=3.2.8=h3083f51_0
- send2trash=1.8.3=pyh0d859eb_1
- setuptools=75.8.2=pyhff2d567_0
- six=1.17.0=pyhd8ed1ab_0
- snappy=1.2.1=h8bd8927_1
- sniffio=1.3.1=pyhd8ed1ab_1
- sortedcontainers=2.4.0=pyhd8ed1ab_1
- soupsieve=2.5=pyhd8ed1ab_1
- sqlite=3.49.1=h9eae976_2
- stack_data=0.6.3=pyhd8ed1ab_1
- svt-av1=3.0.2=h5888daf_0
- tbb=2022.0.0=hceb3a55_0
- terminado=0.18.1=pyh0d859eb_0
- tinycss2=1.4.0=pyhd8ed1ab_0
- tk=8.6.13=noxft_h4845f30_101
- toml=0.10.2=pyhd8ed1ab_1
- tomli=2.2.1=pyhd8ed1ab_1
- tornado=6.4.2=py39h8cd3c5a_0
- tqdm=4.67.1=pyhd8ed1ab_1
- traitlets=5.14.3=pyhd8ed1ab_1
- trame=3.8.1=pyhd8ed1ab_0
- trame-client=3.6.1=pyhd8ed1ab_0
- trame-server=3.4.0=pyhd8ed1ab_0
- trame-vtk=2.8.15=pyhb419c8b_0
- trame-vuetify=2.9.0=pyhd8ed1ab_0
- trimesh=4.6.6=pyh7b2049a_0
- types-python-dateutil=2.9.0.20241206=pyhd8ed1ab_0
- typing-extensions=4.13.0=h9fa5a19_1
- typing_extensions=4.13.0=pyh29332c3_1
- typing_utils=0.1.0=pyhd8ed1ab_1
- tzdata=2025b=h78e105d_0
- unicodedata2=16.0.0=py39h8cd3c5a_0
- uri-template=1.3.0=pyhd8ed1ab_1
- urllib3=2.3.0=pyhd8ed1ab_0
- utfcpp=4.0.6=h005c6e1_0
- vtk=9.3.1=qt_py39h71fb23e_216
- vtk-base=9.3.1=qt_py39habd23de_216
- vtk-io-ffmpeg=9.3.1=qt_py39h71fb23e_216
- wayland=1.23.1=h3e06ad9_0
- wayland-protocols=1.42=hd8ed1ab_0
- wcwidth=0.2.13=pyhd8ed1ab_1
- webcolors=24.11.1=pyhd8ed1ab_0
- webencodings=0.5.1=pyhd8ed1ab_3
- websocket-client=1.8.0=pyhd8ed1ab_1
- wheel=0.45.1=pyhd8ed1ab_1
- wslink=2.3.3=pyhd8ed1ab_0
- x264=1!164.3095=h166bdaf_2
- x265=3.5=h924138e_3
- xcb-util=0.4.1=hb711507_2
- xcb-util-cursor=0.1.5=hb9d3cd8_0
- xcb-util-image=0.4.0=hb711507_2
- xcb-util-keysyms=0.4.1=hb711507_0
- xcb-util-renderutil=0.3.10=hb711507_0
- xcb-util-wm=0.4.2=hb711507_0
- xkeyboard-config=2.43=hb9d3cd8_0
- xorg-libice=1.1.2=hb9d3cd8_0
- xorg-libsm=1.2.6=he73a12e_0
- xorg-libx11=1.8.12=h4f16b4b_0
- xorg-libxau=1.0.12=hb9d3cd8_0
- xorg-libxcomposite=0.4.6=hb9d3cd8_2
- xorg-libxcursor=1.2.3=hb9d3cd8_0
- xorg-libxdamage=1.1.6=hb9d3cd8_0
- xorg-libxdmcp=1.1.5=hb9d3cd8_0
- xorg-libxext=1.3.6=hb9d3cd8_0
- xorg-libxfixes=6.0.1=hb9d3cd8_0
- xorg-libxi=1.8.2=hb9d3cd8_0
- xorg-libxrandr=1.5.4=hb9d3cd8_0
- xorg-libxrender=0.9.12=hb9d3cd8_0
- xorg-libxscrnsaver=1.2.4=hb9d3cd8_0
- xorg-libxt=1.3.1=hb9d3cd8_0
- xorg-libxtst=1.2.5=hb9d3cd8_3
- xorg-libxxf86vm=1.1.6=hb9d3cd8_0
- yaml=0.2.5=h7f98852_2
- yarl=1.18.3=py39h9399b63_1
- zeromq=4.3.5=h3b0a872_7
- zipp=3.21.0=pyhd8ed1ab_1
- zlib=1.3.1=hb9d3cd8_2
- zstandard=0.23.0=py39h8cd3c5a_1
- zstd=1.5.7=hb8e6e7a_2
- pip:
- pytest-asyncio==0.26.0
- pytest-memprof==0.2.0
- pytest-mock==3.14.0
- pyvista==0.45.dev0
prefix: /opt/conda/envs/pyvista
| [
"tests/core/test_dataset.py::test_html_repr_string_scalar"
] | [] | [
"tests/core/test_dataset.py::test_invalid_copy_from",
"tests/core/test_dataset.py::test_memory_address",
"tests/core/test_dataset.py::test_point_data",
"tests/core/test_dataset.py::test_point_data_bad_value",
"tests/core/test_dataset.py::test_ipython_key_completions",
"tests/core/test_dataset.py::test_cell_data",
"tests/core/test_dataset.py::test_cell_array_range",
"tests/core/test_dataset.py::test_cell_data_bad_value",
"tests/core/test_dataset.py::test_point_cell_data_single_scalar_no_exception_raised",
"tests/core/test_dataset.py::test_field_data",
"tests/core/test_dataset.py::test_field_data_string",
"tests/core/test_dataset.py::test_add_field_data[field0]",
"tests/core/test_dataset.py::test_add_field_data[field1]",
"tests/core/test_dataset.py::test_modify_field_data",
"tests/core/test_dataset.py::test_active_scalars_cell",
"tests/core/test_dataset.py::test_field_data_bad_value",
"tests/core/test_dataset.py::test_copy",
"tests/core/test_dataset.py::test_copy_metadata",
"tests/core/test_dataset.py::test_transform_should_match_vtk_transformation",
"tests/core/test_dataset.py::test_transform_should_match_vtk_transformation_non_homogeneous",
"tests/core/test_dataset.py::test_translate_should_not_fail_given_none",
"tests/core/test_dataset.py::test_set_points",
"tests/core/test_dataset.py::test_translate_should_fail_bad_points_or_transform",
"tests/core/test_dataset.py::test_transform_should_fail_given_wrong_numpy_shape",
"tests/core/test_dataset.py::test_translate_should_translate_grid[axis_amounts0]",
"tests/core/test_dataset.py::test_translate_should_translate_grid[axis_amounts1]",
"tests/core/test_dataset.py::test_translate_should_translate_grid[axis_amounts2]",
"tests/core/test_dataset.py::test_rotate_should_match_vtk_rotation[x]",
"tests/core/test_dataset.py::test_rotate_should_match_vtk_rotation[y]",
"tests/core/test_dataset.py::test_rotate_should_match_vtk_rotation[z]",
"tests/core/test_dataset.py::test_rotate_90_degrees_four_times_should_return_original_geometry",
"tests/core/test_dataset.py::test_rotate_180_degrees_two_times_should_return_original_geometry",
"tests/core/test_dataset.py::test_rotate_vector_90_degrees_should_not_distort_geometry",
"tests/core/test_dataset.py::test_make_points_double",
"tests/core/test_dataset.py::test_invalid_points",
"tests/core/test_dataset.py::test_points_np_bool",
"tests/core/test_dataset.py::test_cells_np_bool",
"tests/core/test_dataset.py::test_field_np_bool",
"tests/core/test_dataset.py::test_cells_uint8",
"tests/core/test_dataset.py::test_points_uint8",
"tests/core/test_dataset.py::test_field_uint8",
"tests/core/test_dataset.py::test_bitarray_points",
"tests/core/test_dataset.py::test_bitarray_cells",
"tests/core/test_dataset.py::test_bitarray_field",
"tests/core/test_dataset.py::test_html_repr",
"tests/core/test_dataset.py::test_print_repr[True-True]",
"tests/core/test_dataset.py::test_print_repr[True-False]",
"tests/core/test_dataset.py::test_print_repr[False-True]",
"tests/core/test_dataset.py::test_print_repr[False-False]",
"tests/core/test_dataset.py::test_invalid_vector",
"tests/core/test_dataset.py::test_no_texture_coordinates",
"tests/core/test_dataset.py::test_no_arrows",
"tests/core/test_dataset.py::test_arrows",
"tests/core/test_dataset.py::test_set_active_vectors",
"tests/core/test_dataset.py::test_set_active_tensors",
"tests/core/test_dataset.py::test_set_texture_coordinates",
"tests/core/test_dataset.py::test_set_active_vectors_fail",
"tests/core/test_dataset.py::test_set_active_tensors_fail",
"tests/core/test_dataset.py::test_set_active_scalars",
"tests/core/test_dataset.py::test_set_active_scalars_name",
"tests/core/test_dataset.py::test_rename_array_point",
"tests/core/test_dataset.py::test_rename_array_cell",
"tests/core/test_dataset.py::test_rename_array_field",
"tests/core/test_dataset.py::test_rename_array_doesnt_delete",
"tests/core/test_dataset.py::test_change_name_fail",
"tests/core/test_dataset.py::test_get_cell_array_fail",
"tests/core/test_dataset.py::test_get_item",
"tests/core/test_dataset.py::test_set_item",
"tests/core/test_dataset.py::test_set_item_range",
"tests/core/test_dataset.py::test_str",
"tests/core/test_dataset.py::test_set_cell_vectors",
"tests/core/test_dataset.py::test_axis_rotation_invalid",
"tests/core/test_dataset.py::test_axis_rotation_not_inplace",
"tests/core/test_dataset.py::test_bad_instantiation",
"tests/core/test_dataset.py::test_string_arrays",
"tests/core/test_dataset.py::test_clear_data",
"tests/core/test_dataset.py::test_scalars_dict_update",
"tests/core/test_dataset.py::test_handle_array_with_null_name",
"tests/core/test_dataset.py::test_add_point_array_list",
"tests/core/test_dataset.py::test_shallow_copy_back_propagation",
"tests/core/test_dataset.py::test_find_closest_point",
"tests/core/test_dataset.py::test_find_closest_cell",
"tests/core/test_dataset.py::test_find_closest_cells",
"tests/core/test_dataset.py::test_find_closest_cell_surface_point",
"tests/core/test_dataset.py::test_find_containing_cell",
"tests/core/test_dataset.py::test_find_containing_cells",
"tests/core/test_dataset.py::test_find_cells_along_line",
"tests/core/test_dataset.py::test_find_cells_intersecting_line",
"tests/core/test_dataset.py::test_find_cells_within_bounds",
"tests/core/test_dataset.py::test_setting_points_by_different_types",
"tests/core/test_dataset.py::test_empty_points",
"tests/core/test_dataset.py::test_no_active",
"tests/core/test_dataset.py::test_get_data_range",
"tests/core/test_dataset.py::test_actual_memory_size",
"tests/core/test_dataset.py::test_copy_structure",
"tests/core/test_dataset.py::test_copy_attributes",
"tests/core/test_dataset.py::test_point_is_inside_cell",
"tests/core/test_dataset.py::test_serialize_deserialize[xml]",
"tests/core/test_dataset.py::test_serialize_deserialize[legacy]",
"tests/core/test_dataset.py::test_multiprocessing[xml]",
"tests/core/test_dataset.py::test_multiprocessing[legacy]",
"tests/core/test_dataset.py::test_rotations_should_match_by_a_360_degree_difference",
"tests/core/test_dataset.py::test_rotate_x",
"tests/core/test_dataset.py::test_rotate_y",
"tests/core/test_dataset.py::test_rotate_z",
"tests/core/test_dataset.py::test_rotate_vector",
"tests/core/test_dataset.py::test_rotate",
"tests/core/test_dataset.py::test_transform_integers",
"tests/core/test_dataset.py::test_scale",
"tests/core/test_dataset.py::test_flip_x",
"tests/core/test_dataset.py::test_flip_y",
"tests/core/test_dataset.py::test_flip_z",
"tests/core/test_dataset.py::test_flip_normal",
"tests/core/test_dataset.py::test_active_normals",
"tests/core/test_dataset.py::test_cast_to_pointset",
"tests/core/test_dataset.py::test_cast_to_pointset_implicit",
"tests/core/test_dataset.py::test_cast_to_poly_points_implicit",
"tests/core/test_dataset.py::test_partition",
"tests/core/test_dataset.py::test_explode",
"tests/core/test_dataset.py::test_separate_cells",
"tests/core/test_dataset.py::test_volume_area",
"tests/core/test_dataset.py::test_raises_cell_neighbors_ExplicitStructuredGrid",
"tests/core/test_dataset.py::test_raises_point_neighbors_ind_overflow",
"tests/core/test_dataset.py::test_raises_cell_neighbors_connections",
"tests/core/test_dataset.py::test_point_cell_ids[0-PolyData]",
"tests/core/test_dataset.py::test_point_cell_ids[0-StructuredGrid]",
"tests/core/test_dataset.py::test_point_cell_ids[0-UnstructuredGrid0]",
"tests/core/test_dataset.py::test_point_cell_ids[0-RectilinearGrid]",
"tests/core/test_dataset.py::test_point_cell_ids[0-UnstructuredGrid1]",
"tests/core/test_dataset.py::test_point_cell_ids[0-ImageData]",
"tests/core/test_dataset.py::test_point_cell_ids[0-ExplicitStructuredGrid]",
"tests/core/test_dataset.py::test_point_cell_ids[1-PolyData]",
"tests/core/test_dataset.py::test_point_cell_ids[1-StructuredGrid]",
"tests/core/test_dataset.py::test_point_cell_ids[1-UnstructuredGrid0]",
"tests/core/test_dataset.py::test_point_cell_ids[1-RectilinearGrid]",
"tests/core/test_dataset.py::test_point_cell_ids[1-UnstructuredGrid1]",
"tests/core/test_dataset.py::test_point_cell_ids[1-ImageData]",
"tests/core/test_dataset.py::test_point_cell_ids[1-ExplicitStructuredGrid]",
"tests/core/test_dataset.py::test_cell_point_neighbors_ids[0-PolyData]",
"tests/core/test_dataset.py::test_cell_point_neighbors_ids[0-StructuredGrid]",
"tests/core/test_dataset.py::test_cell_point_neighbors_ids[0-UnstructuredGrid0]",
"tests/core/test_dataset.py::test_cell_point_neighbors_ids[0-RectilinearGrid]",
"tests/core/test_dataset.py::test_cell_point_neighbors_ids[0-UnstructuredGrid1]",
"tests/core/test_dataset.py::test_cell_point_neighbors_ids[0-ImageData]",
"tests/core/test_dataset.py::test_cell_point_neighbors_ids[1-PolyData]",
"tests/core/test_dataset.py::test_cell_point_neighbors_ids[1-StructuredGrid]",
"tests/core/test_dataset.py::test_cell_point_neighbors_ids[1-UnstructuredGrid0]",
"tests/core/test_dataset.py::test_cell_point_neighbors_ids[1-RectilinearGrid]",
"tests/core/test_dataset.py::test_cell_point_neighbors_ids[1-UnstructuredGrid1]",
"tests/core/test_dataset.py::test_cell_point_neighbors_ids[1-ImageData]",
"tests/core/test_dataset.py::test_cell_edge_neighbors_ids[0-PolyData]",
"tests/core/test_dataset.py::test_cell_edge_neighbors_ids[0-StructuredGrid]",
"tests/core/test_dataset.py::test_cell_edge_neighbors_ids[0-UnstructuredGrid0]",
"tests/core/test_dataset.py::test_cell_edge_neighbors_ids[0-RectilinearGrid]",
"tests/core/test_dataset.py::test_cell_edge_neighbors_ids[0-UnstructuredGrid1]",
"tests/core/test_dataset.py::test_cell_edge_neighbors_ids[0-ImageData]",
"tests/core/test_dataset.py::test_cell_edge_neighbors_ids[1-PolyData]",
"tests/core/test_dataset.py::test_cell_edge_neighbors_ids[1-StructuredGrid]",
"tests/core/test_dataset.py::test_cell_edge_neighbors_ids[1-UnstructuredGrid0]",
"tests/core/test_dataset.py::test_cell_edge_neighbors_ids[1-RectilinearGrid]",
"tests/core/test_dataset.py::test_cell_edge_neighbors_ids[1-UnstructuredGrid1]",
"tests/core/test_dataset.py::test_cell_edge_neighbors_ids[1-ImageData]",
"tests/core/test_dataset.py::test_cell_face_neighbors_ids[0-UnstructuredGrid0]",
"tests/core/test_dataset.py::test_cell_face_neighbors_ids[0-RectilinearGrid]",
"tests/core/test_dataset.py::test_cell_face_neighbors_ids[0-UnstructuredGrid1]",
"tests/core/test_dataset.py::test_cell_face_neighbors_ids[0-ImageData]",
"tests/core/test_dataset.py::test_cell_face_neighbors_ids[1-UnstructuredGrid0]",
"tests/core/test_dataset.py::test_cell_face_neighbors_ids[1-RectilinearGrid]",
"tests/core/test_dataset.py::test_cell_face_neighbors_ids[1-UnstructuredGrid1]",
"tests/core/test_dataset.py::test_cell_face_neighbors_ids[1-ImageData]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=points-n_levels=1-i0=0-PolyData]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=points-n_levels=1-i0=0-StructuredGrid]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=points-n_levels=1-i0=0-UnstructuredGrid0]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=points-n_levels=1-i0=0-RectilinearGrid]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=points-n_levels=1-i0=0-UnstructuredGrid1]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=points-n_levels=1-i0=0-ImageData]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=points-n_levels=1-i0=1-PolyData]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=points-n_levels=1-i0=1-StructuredGrid]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=points-n_levels=1-i0=1-UnstructuredGrid0]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=points-n_levels=1-i0=1-RectilinearGrid]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=points-n_levels=1-i0=1-UnstructuredGrid1]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=points-n_levels=1-i0=1-ImageData]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=points-n_levels=3-i0=0-PolyData]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=points-n_levels=3-i0=0-StructuredGrid]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=points-n_levels=3-i0=0-UnstructuredGrid0]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=points-n_levels=3-i0=0-RectilinearGrid]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=points-n_levels=3-i0=0-UnstructuredGrid1]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=points-n_levels=3-i0=0-ImageData]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=points-n_levels=3-i0=1-PolyData]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=points-n_levels=3-i0=1-StructuredGrid]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=points-n_levels=3-i0=1-UnstructuredGrid0]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=points-n_levels=3-i0=1-RectilinearGrid]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=points-n_levels=3-i0=1-UnstructuredGrid1]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=points-n_levels=3-i0=1-ImageData]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=edges-n_levels=1-i0=0-PolyData]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=edges-n_levels=1-i0=0-StructuredGrid]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=edges-n_levels=1-i0=0-UnstructuredGrid0]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=edges-n_levels=1-i0=0-RectilinearGrid]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=edges-n_levels=1-i0=0-UnstructuredGrid1]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=edges-n_levels=1-i0=0-ImageData]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=edges-n_levels=1-i0=1-PolyData]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=edges-n_levels=1-i0=1-StructuredGrid]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=edges-n_levels=1-i0=1-UnstructuredGrid0]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=edges-n_levels=1-i0=1-RectilinearGrid]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=edges-n_levels=1-i0=1-UnstructuredGrid1]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=edges-n_levels=1-i0=1-ImageData]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=edges-n_levels=3-i0=0-PolyData]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=edges-n_levels=3-i0=0-StructuredGrid]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=edges-n_levels=3-i0=0-UnstructuredGrid0]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=edges-n_levels=3-i0=0-RectilinearGrid]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=edges-n_levels=3-i0=0-UnstructuredGrid1]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=edges-n_levels=3-i0=0-ImageData]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=edges-n_levels=3-i0=1-PolyData]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=edges-n_levels=3-i0=1-StructuredGrid]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=edges-n_levels=3-i0=1-UnstructuredGrid0]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=edges-n_levels=3-i0=1-RectilinearGrid]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=edges-n_levels=3-i0=1-UnstructuredGrid1]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=edges-n_levels=3-i0=1-ImageData]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=faces-n_levels=1-i0=0-UnstructuredGrid0]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=faces-n_levels=1-i0=0-RectilinearGrid]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=faces-n_levels=1-i0=0-UnstructuredGrid1]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=faces-n_levels=1-i0=0-ImageData]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=faces-n_levels=1-i0=1-UnstructuredGrid0]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=faces-n_levels=1-i0=1-RectilinearGrid]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=faces-n_levels=1-i0=1-UnstructuredGrid1]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=faces-n_levels=1-i0=1-ImageData]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=faces-n_levels=3-i0=0-UnstructuredGrid0]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=faces-n_levels=3-i0=0-RectilinearGrid]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=faces-n_levels=3-i0=0-UnstructuredGrid1]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=faces-n_levels=3-i0=0-ImageData]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=faces-n_levels=3-i0=1-UnstructuredGrid0]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=faces-n_levels=3-i0=1-RectilinearGrid]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=faces-n_levels=3-i0=1-UnstructuredGrid1]",
"tests/core/test_dataset.py::test_cell_neighbors_levels[connections=faces-n_levels=3-i0=1-ImageData]",
"tests/core/test_dataset.py::test_point_neighbors_levels[1-0-PolyData]",
"tests/core/test_dataset.py::test_point_neighbors_levels[1-0-StructuredGrid]",
"tests/core/test_dataset.py::test_point_neighbors_levels[1-0-UnstructuredGrid0]",
"tests/core/test_dataset.py::test_point_neighbors_levels[1-0-RectilinearGrid]",
"tests/core/test_dataset.py::test_point_neighbors_levels[1-0-UnstructuredGrid1]",
"tests/core/test_dataset.py::test_point_neighbors_levels[1-0-ImageData]",
"tests/core/test_dataset.py::test_point_neighbors_levels[1-0-ExplicitStructuredGrid]",
"tests/core/test_dataset.py::test_point_neighbors_levels[1-1-PolyData]",
"tests/core/test_dataset.py::test_point_neighbors_levels[1-1-StructuredGrid]",
"tests/core/test_dataset.py::test_point_neighbors_levels[1-1-UnstructuredGrid0]",
"tests/core/test_dataset.py::test_point_neighbors_levels[1-1-RectilinearGrid]",
"tests/core/test_dataset.py::test_point_neighbors_levels[1-1-UnstructuredGrid1]",
"tests/core/test_dataset.py::test_point_neighbors_levels[1-1-ImageData]",
"tests/core/test_dataset.py::test_point_neighbors_levels[1-1-ExplicitStructuredGrid]",
"tests/core/test_dataset.py::test_point_neighbors_levels[3-0-PolyData]",
"tests/core/test_dataset.py::test_point_neighbors_levels[3-0-StructuredGrid]",
"tests/core/test_dataset.py::test_point_neighbors_levels[3-0-UnstructuredGrid0]",
"tests/core/test_dataset.py::test_point_neighbors_levels[3-0-RectilinearGrid]",
"tests/core/test_dataset.py::test_point_neighbors_levels[3-0-UnstructuredGrid1]",
"tests/core/test_dataset.py::test_point_neighbors_levels[3-0-ImageData]",
"tests/core/test_dataset.py::test_point_neighbors_levels[3-0-ExplicitStructuredGrid]",
"tests/core/test_dataset.py::test_point_neighbors_levels[3-1-PolyData]",
"tests/core/test_dataset.py::test_point_neighbors_levels[3-1-StructuredGrid]",
"tests/core/test_dataset.py::test_point_neighbors_levels[3-1-UnstructuredGrid0]",
"tests/core/test_dataset.py::test_point_neighbors_levels[3-1-RectilinearGrid]",
"tests/core/test_dataset.py::test_point_neighbors_levels[3-1-UnstructuredGrid1]",
"tests/core/test_dataset.py::test_point_neighbors_levels[3-1-ImageData]",
"tests/core/test_dataset.py::test_point_neighbors_levels[3-1-ExplicitStructuredGrid]",
"tests/core/test_dataset.py::test_active_t_coords_deprecated"
] | [] | MIT License | 19,347 | 198 | [
"pyvista/core/dataset.py"
] |
tobymao__sqlglot-3937 | f4c34d37c5773c37a13437c7e0e7eb27b4e98877 | 2024-08-19 20:15:25 | f4c34d37c5773c37a13437c7e0e7eb27b4e98877 | diff --git a/sqlglot/generator.py b/sqlglot/generator.py
index acd4c096..67400514 100644
--- a/sqlglot/generator.py
+++ b/sqlglot/generator.py
@@ -2909,11 +2909,13 @@ class Generator(metaclass=_Generator):
def pivotalias_sql(self, expression: exp.PivotAlias) -> str:
alias = expression.args["alias"]
+
identifier_alias = isinstance(alias, exp.Identifier)
+ literal_alias = isinstance(alias, exp.Literal)
if identifier_alias and not self.UNPIVOT_ALIASES_ARE_IDENTIFIERS:
alias.replace(exp.Literal.string(alias.output_name))
- elif not identifier_alias and self.UNPIVOT_ALIASES_ARE_IDENTIFIERS:
+ elif not identifier_alias and literal_alias and self.UNPIVOT_ALIASES_ARE_IDENTIFIERS:
alias.replace(exp.to_identifier(alias.output_name))
return self.alias_sql(expression)
diff --git a/sqlglot/parser.py b/sqlglot/parser.py
index 6171580c..fb78d1ab 100644
--- a/sqlglot/parser.py
+++ b/sqlglot/parser.py
@@ -3776,8 +3776,10 @@ class Parser(metaclass=_Parser):
this = self._parse_select_or_expression()
self._match(TokenType.ALIAS)
- alias = self._parse_field()
+ alias = self._parse_bitwise()
if alias:
+ if isinstance(alias, exp.Column) and not alias.db:
+ alias = alias.this
return self.expression(exp.PivotAlias, this=this, alias=alias)
return this
| Failure to parse complex UNPIVOT statement with Oracle Query
**Before you file an issue**
- Make sure you specify the "read" dialect eg. `parse_one(sql, read="spark")`
- Make sure you specify the "write" dialect eg. `ast.sql(dialect="duckdb")`
- Check if the issue still exists on main
**Fully reproducible code snippet**
Please include a fully reproducible code snippet or the input sql, dialect, and expected output.
**Official Documentation**
Please include links to official SQL documentation related to your issue.
I have a very complex query that a client wrote. I am using sqlglot to parse the sql and to traverse the lineage. I cannot post the exact query as it has sensitive information. However, I was able to reproduce it with a simplified sample.
I am using sqlglot 25.14.0, which was posted 5 hours before this post. however the issue existed in prior versions of sqlglot
Here is the test query:
SELECT
*
FROM
test
UNPIVOT INCLUDE NULLS(
value FOR Description IN (
col AS 'PREFIX ' || CHR(38) || ' SUFFIX'
)
)
I am calling:
ast = parse_one(sql_stmt, read='oracle', dialect='oracle')
where sql_stmt is the sql I posted.
sqlglot is failing on the concatenation of strings using CHR(38).
It works If I change the sql to (removing the concatenation):
SELECT
*
FROM
test
UNPIVOT INCLUDE NULLS(
value FOR Description IN (
col AS 'PREFIX & SUFFIX'
)
)
Let me know if you need more information. | tobymao/sqlglot | diff --git a/tests/dialects/test_oracle.py b/tests/dialects/test_oracle.py
index 79b8a024..4a570721 100644
--- a/tests/dialects/test_oracle.py
+++ b/tests/dialects/test_oracle.py
@@ -44,6 +44,9 @@ class TestOracle(Validator):
self.validate_identity("SELECT COUNT(*) * 10 FROM orders SAMPLE (10) SEED (1)")
self.validate_identity("SELECT * FROM V$SESSION")
self.validate_identity("SELECT TO_DATE('January 15, 1989, 11:00 A.M.')")
+ self.validate_identity(
+ "SELECT * FROM test UNPIVOT INCLUDE NULLS (value FOR Description IN (col AS 'PREFIX ' || CHR(38) || ' SUFFIX'))"
+ )
self.validate_identity(
"SELECT last_name, employee_id, manager_id, LEVEL FROM employees START WITH employee_id = 100 CONNECT BY PRIOR employee_id = manager_id ORDER SIBLINGS BY last_name"
)
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 2
} | 25.14 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest pytest-cov pytest-xdist pytest-mock pytest-asyncio",
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | cfgv==3.4.0
coverage==7.8.0
distlib==0.3.9
duckdb==1.2.1
exceptiongroup==1.2.2
execnet==2.1.1
filelock==3.18.0
identify==2.6.9
iniconfig==2.1.0
Jinja2==3.1.6
MarkupSafe==3.0.2
maturin==1.8.3
mypy==1.15.0
mypy-extensions==1.0.0
nodeenv==1.9.1
numpy==2.0.2
packaging==24.2
pandas==2.2.3
pandas-stubs==2.2.2.240807
pdoc==15.0.1
platformdirs==4.3.7
pluggy==1.5.0
pre_commit==4.2.0
Pygments==2.19.1
pytest==8.3.5
pytest-asyncio==0.26.0
pytest-cov==6.0.0
pytest-mock==3.14.0
pytest-xdist==3.6.1
python-dateutil==2.9.0.post0
pytz==2025.2
PyYAML==6.0.2
ruff==0.4.3
six==1.17.0
-e git+https://github.com/tobymao/sqlglot.git@f4c34d37c5773c37a13437c7e0e7eb27b4e98877#egg=sqlglot
tomli==2.2.1
types-python-dateutil==2.9.0.20241206
types-pytz==2025.2.0.20250326
typing_extensions==4.13.0
tzdata==2025.2
virtualenv==20.29.3
| name: sqlglot
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- cfgv==3.4.0
- coverage==7.8.0
- distlib==0.3.9
- duckdb==1.2.1
- exceptiongroup==1.2.2
- execnet==2.1.1
- filelock==3.18.0
- identify==2.6.9
- iniconfig==2.1.0
- jinja2==3.1.6
- markupsafe==3.0.2
- maturin==1.8.3
- mypy==1.15.0
- mypy-extensions==1.0.0
- nodeenv==1.9.1
- numpy==2.0.2
- packaging==24.2
- pandas==2.2.3
- pandas-stubs==2.2.2.240807
- pdoc==15.0.1
- platformdirs==4.3.7
- pluggy==1.5.0
- pre-commit==4.2.0
- pygments==2.19.1
- pytest==8.3.5
- pytest-asyncio==0.26.0
- pytest-cov==6.0.0
- pytest-mock==3.14.0
- pytest-xdist==3.6.1
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyyaml==6.0.2
- ruff==0.4.3
- six==1.17.0
- tomli==2.2.1
- types-python-dateutil==2.9.0.20241206
- types-pytz==2025.2.0.20250326
- typing-extensions==4.13.0
- tzdata==2025.2
- virtualenv==20.29.3
prefix: /opt/conda/envs/sqlglot
| [
"tests/dialects/test_oracle.py::TestOracle::test_oracle"
] | [] | [
"tests/dialects/test_oracle.py::TestOracle::test_connect_by",
"tests/dialects/test_oracle.py::TestOracle::test_hints",
"tests/dialects/test_oracle.py::TestOracle::test_join_marker",
"tests/dialects/test_oracle.py::TestOracle::test_json_table",
"tests/dialects/test_oracle.py::TestOracle::test_match_recognize",
"tests/dialects/test_oracle.py::TestOracle::test_query_restrictions",
"tests/dialects/test_oracle.py::TestOracle::test_xml_table"
] | [] | MIT License | 19,349 | 401 | [
"sqlglot/generator.py",
"sqlglot/parser.py"
] |
|
enthought__okonomiyaki-472 | 46fbff1efdf5e6daea887e7fcc2faa9aff1c5a36 | 2024-08-21 11:38:36 | 46fbff1efdf5e6daea887e7fcc2faa9aff1c5a36 | diff --git a/okonomiyaki/platforms/_platform.py b/okonomiyaki/platforms/_platform.py
index f5f6e55..21a12ac 100644
--- a/okonomiyaki/platforms/_platform.py
+++ b/okonomiyaki/platforms/_platform.py
@@ -75,9 +75,10 @@ class Platform(object):
machine = attr(validator=instance_of(Arch))
"""
- The machine. This is the CPU architecture (e.g. for a 32 bits python
- running on 64 bits Intel OS, machine will be an x86_64 arch, whereas arch
- will be an 'x86' arch)
+ This is the CPU architecture of the host machine (e.g. for a
+ 32 bits python running on 64 bits Intel OS, machine will be an
+ x86_64 arch, whereas arch will be an 'x86' arch)
+
"""
@classmethod
@@ -103,6 +104,53 @@ class Platform(object):
arch = Arch.from_name(arch_string)
return _guess_platform(arch)
+ @classmethod
+ def from_dict(cls, **kw):
+ """ Create a Platfrom instace from dictionary values.
+
+ Parameters
+ ----------
+ os_kind: str, OSKind
+ The most generic OS description.
+
+ name_kind: str, NameKind
+ The most specific platform description.
+
+ family_kind: str, FamilyKind
+ The 'family' of platforms. For example, both debian and
+ ubuntu distributions share the same kind, 'debian'.
+
+ release: str
+ The release string. May be an empty string.
+ arch: str, Arch
+ Actual architecture of the Python runtime
+ machine: str, Arch
+ This is the CPU architecture of the host machine
+ (e.g. for a 32 bits python running on 64 bits Intel OS,
+ machine will be an x86_64 arch, whereas arch will be an 'x86'
+ arch)
+
+ """
+ os_kind, name_kind, family_kind, arch, machine = (
+ kw['os_kind'], kw['name_kind'], kw['family_kind'], kw['arch'], kw['machine'])
+ if isinstance(os_kind, str):
+ os_kind = OSKind[os_kind]
+ if isinstance(name_kind, str):
+ name_kind = NameKind[name_kind]
+ if isinstance(family_kind, str):
+ family_kind = FamilyKind[family_kind]
+ if isinstance(arch, str):
+ arch = Arch.from_name(arch)
+ if isinstance(machine, str):
+ machine = Arch.from_name(machine)
+ return cls(
+ os_kind=os_kind,
+ family_kind=family_kind,
+ name_kind=name_kind,
+ release=kw['release'],
+ arch=arch,
+ machine=machine)
+
@property
def family(self):
return self.family_kind.name
diff --git a/okonomiyaki/utils/misc.py b/okonomiyaki/utils/misc.py
index 89452d4..81199d4 100644
--- a/okonomiyaki/utils/misc.py
+++ b/okonomiyaki/utils/misc.py
@@ -20,7 +20,7 @@ class _AssignmentParser(ast.NodeVisitor):
return self._data
def generic_visit(self, node):
- if type(node) != ast.Module:
+ if type(node) is not ast.Module:
raise OkonomiyakiError("Unexpected expression @ line {0}".
format(node.lineno))
super(_AssignmentParser, self).generic_visit(node)
diff --git a/okonomiyaki/versions/pep440.py b/okonomiyaki/versions/pep440.py
index 8481b2c..9badf45 100644
--- a/okonomiyaki/versions/pep440.py
+++ b/okonomiyaki/versions/pep440.py
@@ -147,7 +147,7 @@ class PEP440Version(object):
return hash(self._parts)
def _ensure_compatible(self, other):
- if type(self) != type(other):
+ if type(self) != type(other): # noqa
raise TypeError('cannot compare %r and %r' % (self, other))
def _compute_string(self, epoch, nums, pre, post, dev, local):
| Make it easier to create Platform instance from a json dictionary of values.
We make it easier to create a new Platform instance from a json dictionary
At the moment the only way to perform the operation is
```
Platform(
os_kind=OSKind.__members__[data['os_kind']],
family_kind=FamilyKind.__members__[data['family_kind']],
name_kind=NameKind.__members__[data['name_kind']],
release=data['release'],
arch=Arch.from_name(data['arch']),
machine=Arch.from_name(data['machine']))
```
We can add a new constructor or update the Platform class to coerse the string values to the necessary nametuples and classes | enthought/okonomiyaki | diff --git a/okonomiyaki/platforms/tests/test_platform.py b/okonomiyaki/platforms/tests/test_platform.py
index 52b83c9..fb6e808 100644
--- a/okonomiyaki/platforms/tests/test_platform.py
+++ b/okonomiyaki/platforms/tests/test_platform.py
@@ -4,7 +4,7 @@ from parameterized import parameterized
from okonomiyaki.errors import OkonomiyakiError
from .._arch import X86, X86_64, ARM, ARM64
-from .._platform import Platform
+from .._platform import Platform, OSKind, NameKind, FamilyKind
from .common import (
mock_x86, mock_x86_64,
@@ -273,3 +273,32 @@ class TestPlatform(unittest.TestCase):
self.assertFalse(win32_1 == win64)
self.assertNotEqual(win32_1, None)
+
+ @parameterized.expand([
+ (OSKind.linux, NameKind.rocky, FamilyKind.rhel, X86, X86_64),
+ (OSKind.linux, NameKind.rocky, FamilyKind.rhel, X86, X86),
+ (OSKind.linux, NameKind.rocky, FamilyKind.rhel, X86, ARM64),
+ (OSKind.linux, NameKind.rocky, FamilyKind.rhel, ARM64, ARM64),
+ ('linux', 'rocky', 'rhel', 'x86', 'x86_64'),
+ ('linux', 'rocky', 'rhel', 'x86', 'x86'),
+ ('linux', 'rocky', 'rhel', 'x86', 'arm64'),
+ ('linux', 'rocky', 'rhel', 'arm64', 'arm64')])
+ def test_from_dict(self, os, name, family, arch, machine):
+ # Given
+ dictionary = {
+ 'os_kind': os,
+ 'name_kind': name,
+ 'family_kind': family,
+ 'release': '8.9',
+ 'arch': arch,
+ 'machine': machine}
+
+ # When
+ platform = Platform.from_dict(**dictionary)
+
+ # Then
+ self.assertEqual(platform.os, 'linux')
+ self.assertEqual(platform.name, 'rocky')
+ self.assertEqual(platform.family, 'rhel')
+ self.assertEqual(platform.release, '8.9')
+ self.assertEqual(str(platform), f'Rocky Linux 8.9 on {machine} using {arch} arch')
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 3
} | 1.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"haas",
"coverage",
"flake8",
"testfixtures",
"packaging>=20.0",
"distro",
"parameterized",
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==25.3.0
coverage==7.8.0
distro==1.9.0
docutils==0.21.2
enum34==1.1.10
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
flake8==7.2.0
haas==0.9.0
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
jsonschema==4.23.0
jsonschema-specifications==2024.10.1
mccabe==0.7.0
-e git+https://github.com/enthought/okonomiyaki.git@46fbff1efdf5e6daea887e7fcc2faa9aff1c5a36#egg=okonomiyaki
packaging @ file:///croot/packaging_1734472117206/work
parameterized==0.9.0
pbr==6.1.1
pluggy @ file:///croot/pluggy_1733169602837/work
pycodestyle==2.13.0
pyflakes==3.3.2
pytest @ file:///croot/pytest_1738938843180/work
referencing==0.36.2
rpds-py==0.24.0
statistics==1.0.3.5
stevedore==4.1.1
testfixtures==8.3.0
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
typing_extensions==4.13.0
zipfile2==0.0.12
| name: okonomiyaki
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==25.3.0
- coverage==7.8.0
- distro==1.9.0
- docutils==0.21.2
- enum34==1.1.10
- flake8==7.2.0
- haas==0.9.0
- jsonschema==4.23.0
- jsonschema-specifications==2024.10.1
- mccabe==0.7.0
- parameterized==0.9.0
- pbr==6.1.1
- pycodestyle==2.13.0
- pyflakes==3.3.2
- referencing==0.36.2
- rpds-py==0.24.0
- statistics==1.0.3.5
- stevedore==4.1.1
- testfixtures==8.3.0
- typing-extensions==4.13.0
- zipfile2==0.0.12
prefix: /opt/conda/envs/okonomiyaki
| [
"okonomiyaki/platforms/tests/test_platform.py::TestPlatform::test_from_dict_0",
"okonomiyaki/platforms/tests/test_platform.py::TestPlatform::test_from_dict_1",
"okonomiyaki/platforms/tests/test_platform.py::TestPlatform::test_from_dict_2",
"okonomiyaki/platforms/tests/test_platform.py::TestPlatform::test_from_dict_3",
"okonomiyaki/platforms/tests/test_platform.py::TestPlatform::test_from_dict_4_linux",
"okonomiyaki/platforms/tests/test_platform.py::TestPlatform::test_from_dict_5_linux",
"okonomiyaki/platforms/tests/test_platform.py::TestPlatform::test_from_dict_6_linux",
"okonomiyaki/platforms/tests/test_platform.py::TestPlatform::test_from_dict_7_linux"
] | [] | [
"okonomiyaki/platforms/tests/test_platform.py::TestPlatformRunningPython::test_centos_3_5_0",
"okonomiyaki/platforms/tests/test_platform.py::TestPlatformRunningPython::test_centos_3_5_1",
"okonomiyaki/platforms/tests/test_platform.py::TestPlatformRunningPython::test_centos_5_8_0",
"okonomiyaki/platforms/tests/test_platform.py::TestPlatformRunningPython::test_centos_5_8_1",
"okonomiyaki/platforms/tests/test_platform.py::TestPlatformRunningPython::test_centos_6_3_0",
"okonomiyaki/platforms/tests/test_platform.py::TestPlatformRunningPython::test_centos_6_3_1",
"okonomiyaki/platforms/tests/test_platform.py::TestPlatformRunningPython::test_centos_6_3_invalid",
"okonomiyaki/platforms/tests/test_platform.py::TestPlatformRunningPython::test_mydistro_2_8_0",
"okonomiyaki/platforms/tests/test_platform.py::TestPlatformRunningPython::test_mydistro_2_8_1",
"okonomiyaki/platforms/tests/test_platform.py::TestPlatformRunningPython::test_osx_10_7_0",
"okonomiyaki/platforms/tests/test_platform.py::TestPlatformRunningPython::test_osx_10_7_1",
"okonomiyaki/platforms/tests/test_platform.py::TestPlatformRunningPython::test_osx_12_0",
"okonomiyaki/platforms/tests/test_platform.py::TestPlatformRunningPython::test_osx_12_1",
"okonomiyaki/platforms/tests/test_platform.py::TestPlatformRunningPython::test_osx_12_2",
"okonomiyaki/platforms/tests/test_platform.py::TestPlatformRunningPython::test_osx_12_3",
"okonomiyaki/platforms/tests/test_platform.py::TestPlatformRunningPython::test_rocky_8_8_0",
"okonomiyaki/platforms/tests/test_platform.py::TestPlatformRunningPython::test_rocky_8_8_1",
"okonomiyaki/platforms/tests/test_platform.py::TestPlatformRunningPython::test_rocky_8_8_2",
"okonomiyaki/platforms/tests/test_platform.py::TestPlatformRunningPython::test_rocky_8_8_3",
"okonomiyaki/platforms/tests/test_platform.py::TestPlatformRunningPython::test_solaris",
"okonomiyaki/platforms/tests/test_platform.py::TestPlatformRunningPython::test_ubuntu_raring",
"okonomiyaki/platforms/tests/test_platform.py::TestPlatformRunningPython::test_windows10_0",
"okonomiyaki/platforms/tests/test_platform.py::TestPlatformRunningPython::test_windows10_1",
"okonomiyaki/platforms/tests/test_platform.py::TestPlatformRunningPython::test_windows10_2",
"okonomiyaki/platforms/tests/test_platform.py::TestPlatformRunningPython::test_windows10_3",
"okonomiyaki/platforms/tests/test_platform.py::TestPlatformRunningPython::test_windows11_0",
"okonomiyaki/platforms/tests/test_platform.py::TestPlatformRunningPython::test_windows11_1",
"okonomiyaki/platforms/tests/test_platform.py::TestPlatformRunningPython::test_windows11_2",
"okonomiyaki/platforms/tests/test_platform.py::TestPlatformRunningPython::test_windows11_3",
"okonomiyaki/platforms/tests/test_platform.py::TestPlatformRunningPython::test_windows7_0",
"okonomiyaki/platforms/tests/test_platform.py::TestPlatformRunningPython::test_windows7_1",
"okonomiyaki/platforms/tests/test_platform.py::TestPlatformRunningSystem::test_windows10_0",
"okonomiyaki/platforms/tests/test_platform.py::TestPlatformRunningSystem::test_windows10_1",
"okonomiyaki/platforms/tests/test_platform.py::TestPlatformRunningSystem::test_windows10_2",
"okonomiyaki/platforms/tests/test_platform.py::TestPlatformRunningSystem::test_windows10_3",
"okonomiyaki/platforms/tests/test_platform.py::TestPlatformRunningSystem::test_windows7_0",
"okonomiyaki/platforms/tests/test_platform.py::TestPlatformRunningSystem::test_windows7_1",
"okonomiyaki/platforms/tests/test_platform.py::TestPlatform::test_hashing"
] | [] | BSD License | 19,361 | 1,049 | [
"okonomiyaki/platforms/_platform.py",
"okonomiyaki/utils/misc.py",
"okonomiyaki/versions/pep440.py"
] |
|
CrossGL__crosstl-63 | 49a1c56a2b186090a472094a5156129495e11e27 | 2024-08-21 11:49:10 | 36bed5871a8d102f73cfebf82c8d8495aaa89e87 | Raghav-2903: @samthakur587 I have made the necessary changes to the translator in test_lexer.py
samthakur587: hii @Raghav-2903 on which PR are you working on 😄 i have seen your 2 PR with same changes . if your not working on any of those please close that PR .
Raghav-2903: Hi @samthakur587 yes I was assigned issues 42-47, I see the mistake made and I will close this PR
Raghav-2903: @samthakur587 the task to add the tests for the changes in test_lexer.py is still assigned to me right?, I am working on them and will complete them soon
| diff --git a/crosstl/src/translator/lexer.py b/crosstl/src/translator/lexer.py
index f613e17..f8e77d5 100644
--- a/crosstl/src/translator/lexer.py
+++ b/crosstl/src/translator/lexer.py
@@ -37,6 +37,8 @@ TOKENS = [
("ELSE", r"\belse\b"),
("FOR", r"\bfor\b"),
("RETURN", r"\breturn\b"),
+ ("BITWISE_SHIFT_LEFT", r"<<"),
+ ("BITWISE_SHIFT_RIGHT", r">>"),
("LESS_EQUAL", r"<="),
("GREATER_EQUAL", r">="),
("GREATER_THAN", r">"),
@@ -64,6 +66,10 @@ TOKENS = [
("EQUALS", r"="),
("QUESTION", r"\?"),
("COLON", r":"),
+ ("BITWISE_AND", r"&"),
+ ("BITWISE_OR", r"\|"),
+ ("BITWISE_XOR", r"\^"),
+ ("BITWISE_NOT", r"~"),
]
KEYWORDS = {
| add `BITWISE_AND` Operators support at `translator ` frontend
Implement the BITWISE_AND token to recognize the & operator for performing bitwise AND operations.
| CrossGL/crosstl | diff --git a/tests/test_translator/test_lexer.py b/tests/test_translator/test_lexer.py
index 83cdda1..3de1a3a 100644
--- a/tests/test_translator/test_lexer.py
+++ b/tests/test_translator/test_lexer.py
@@ -6,7 +6,7 @@ from typing import List
def tokenize_code(code: str) -> List:
"""Helper function to tokenize code."""
lexer = Lexer(code)
- return lexer.tokenize()
+ return lexer.tokens
def test_input_output_tokenization():
@@ -101,6 +101,24 @@ def test_function_call_tokenization():
pytest.fail("Function call tokenization not implemented.")
+def test_bitwise_operator_tokenization():
+ code = """
+ int a = 60; // 60 = 0011 1100
+ int b = 13; // 13 = 0000 1101
+ int c = 0;
+ c = a & b; // 12 = 0000 1100
+ c = a | b; // 61 = 0011 1101
+ c = a ^ b; // 49 = 0011 0001
+ c = ~a; // -61 = 1100 0011
+ c = a << 2; // 240 = 1111 0000
+ c = a >> 2; // 15 = 0000 1111
+ """
+ try:
+ tokenize_code(code)
+ except SyntaxError:
+ pytest.fail("Bitwise operator tokenization not implemented.")
+
+
def test_data_types_tokenization():
code = """
int a;
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 1
},
"num_modified_files": 1
} | 0.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": null,
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.10",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | -e git+https://github.com/CrossGL/crosstl.git@49a1c56a2b186090a472094a5156129495e11e27#egg=crosstl
exceptiongroup==1.2.2
gast==0.6.0
iniconfig==2.1.0
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
tomli==2.2.1
| name: crosstl
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- bzip2=1.0.8=h5eee18b_6
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- libuuid=1.41.5=h5eee18b_0
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py310h06a4308_0
- python=3.10.16=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py310h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py310h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- exceptiongroup==1.2.2
- gast==0.6.0
- iniconfig==2.1.0
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- tomli==2.2.1
prefix: /opt/conda/envs/crosstl
| [
"tests/test_translator/test_lexer.py::test_bitwise_operator_tokenization"
] | [] | [
"tests/test_translator/test_lexer.py::test_input_output_tokenization",
"tests/test_translator/test_lexer.py::test_if_statement_tokenization",
"tests/test_translator/test_lexer.py::test_for_statement_tokenization",
"tests/test_translator/test_lexer.py::test_else_statement_tokenization",
"tests/test_translator/test_lexer.py::test_function_call_tokenization",
"tests/test_translator/test_lexer.py::test_data_types_tokenization",
"tests/test_translator/test_lexer.py::test_logical_operators_tokenization"
] | [] | Apache License 2.0 | 19,362 | 265 | [
"crosstl/src/translator/lexer.py"
] |
CrossGL__crosstl-68 | d456c2b5f509104dd97b74ce40c597531784e2ca | 2024-08-21 19:33:50 | 36bed5871a8d102f73cfebf82c8d8495aaa89e87 | diff --git a/crosstl/src/backend/DirectX/DirectxCrossGLCodeGen.py b/crosstl/src/backend/DirectX/DirectxCrossGLCodeGen.py
index 910eec2..e936097 100644
--- a/crosstl/src/backend/DirectX/DirectxCrossGLCodeGen.py
+++ b/crosstl/src/backend/DirectX/DirectxCrossGLCodeGen.py
@@ -157,9 +157,13 @@ class HLSLToCrossGLConverter:
code += " " * indent + "}"
if node.else_body:
- code += " else {\n"
- code += self.generate_function_body(node.else_body, indent + 1, is_main)
- code += " " * indent + "}"
+ if isinstance(node.else_body, IfNode):
+ code += " else "
+ code += self.generate_if_statement(node.else_body, indent, is_main)
+ else:
+ code += " else {\n"
+ code += self.generate_function_body(node.else_body, indent + 1, is_main)
+ code += " " * indent + "}"
code += "\n"
return code
diff --git a/crosstl/src/backend/DirectX/DirectxLexer.py b/crosstl/src/backend/DirectX/DirectxLexer.py
index 70e72f9..6583503 100644
--- a/crosstl/src/backend/DirectX/DirectxLexer.py
+++ b/crosstl/src/backend/DirectX/DirectxLexer.py
@@ -16,6 +16,7 @@ TOKENS = [
("VOID", r"\bvoid\b"),
("RETURN", r"\breturn\b"),
("IF", r"\bif\b"),
+ ("ELSE_IF", r"\belse\sif\b"),
("ELSE", r"\belse\b"),
("FOR", r"\bfor\b"),
("REGISTER", r"\bregister\b"),
diff --git a/crosstl/src/backend/DirectX/DirectxParser.py b/crosstl/src/backend/DirectX/DirectxParser.py
index 1ebe60a..ea4f434 100644
--- a/crosstl/src/backend/DirectX/DirectxParser.py
+++ b/crosstl/src/backend/DirectX/DirectxParser.py
@@ -215,6 +215,22 @@ class HLSLParser:
if self.current_token[0] == "ELSE":
self.eat("ELSE")
else_body = self.parse_block()
+ elif self.current_token[0] == "ELSE_IF":
+ else_body = self.parse_else_if_statement()
+ return IfNode(condition, if_body, else_body)
+
+ def parse_else_if_statement(self):
+ self.eat("ELSE_IF")
+ self.eat("LPAREN")
+ condition = self.parse_expression()
+ self.eat("RPAREN")
+ if_body = self.parse_block()
+ else_body = None
+ if self.current_token[0] == "ELSE":
+ self.eat("ELSE")
+ else_body = self.parse_block()
+ elif self.current_token[0] == "ELSE_IF":
+ else_body = self.parse_else_if_statement()
return IfNode(condition, if_body, else_body)
def parse_for_statement(self):
diff --git a/crosstl/src/translator/lexer.py b/crosstl/src/translator/lexer.py
index 0a0ca33..8037759 100644
--- a/crosstl/src/translator/lexer.py
+++ b/crosstl/src/translator/lexer.py
@@ -17,6 +17,8 @@ TOKENS = [
("FLOAT_NUMBER", r"\d*\.\d+|\d+\.\d*"),
("FLOAT", r"\bfloat\b"),
("INT", r"\bint\b"),
+ ("UINT", r"\buint\b"),
+ ("DOUBLE", r"\bdouble\b"),
("SAMPLER2D", r"\bsampler2D\b"),
("IDENTIFIER", r"[a-zA-Z_][a-zA-Z_0-9]*"),
("NUMBER", r"\d+(\.\d+)?"),
@@ -43,9 +45,15 @@ TOKENS = [
("DECREMENT", r"--"),
("EQUAL", r"=="),
("NOT_EQUAL", r"!="),
+ ("ASSIGN_AND", r"&="),
+ ("ASSIGN_OR", r"\|="),
+ ("ASSIGN_XOR", r"\^="),
("AND", r"&&"),
("OR", r"\|\|"),
+ ("XOR", r"\^"),
("NOT", r"!"),
+ ("ASSIGN_MOD", r"%="),
+ ("MOD", r"%"),
("INCREMENT", r"\+\+"),
("DECREMENT", r"\-\-"),
("PLUS", r"\+"),
| Add `Double` Data Type Token at translator frontend
Implement the DOUBLE token to recognize the double data type for representing double-precision floating-point numbers. | CrossGL/crosstl | diff --git a/tests/test_backend/test_directx/test_codegen.py b/tests/test_backend/test_directx/test_codegen.py
index c72432c..8e7e3a8 100644
--- a/tests/test_backend/test_directx/test_codegen.py
+++ b/tests/test_backend/test_directx/test_codegen.py
@@ -262,6 +262,59 @@ def test_function_call_codegen():
pytest.fail("Function call parsing or code generation not implemented.")
+def test_else_if_codegen():
+ code = """
+ struct VSInput {
+ float4 position : POSITION;
+ float4 color : TEXCOORD0;
+ };
+
+ struct VSOutput {
+ float4 out_position : TEXCOORD0;
+ };
+
+ VSOutput VSMain(VSInput input) {
+ VSOutput output;
+ output.out_position = input.position;
+ if (input.color.r > 0.5) {
+ output.out_position = input.color;
+ }
+ else {
+ output.out_position = float4(0.0, 0.0, 0.0, 1.0);
+ }
+ return output;
+ }
+
+ struct PSInput {
+ float4 in_position : TEXCOORD0;
+ };
+
+ struct PSOutput {
+ float4 out_color : SV_TARGET0;
+ };
+
+ PSOutput PSMain(PSInput input) {
+ PSOutput output;
+ if (input.in_position.r > 0.5) {
+ output.out_color = input.in_position;
+ } else if (input.in_position.r == 0.5){
+ output.out_color = float4(1.0, 1.0, 1.0, 1.0);
+ } else {
+ output.out_color = float4(0.0, 0.0, 0.0, 1.0);
+ }
+ return output;
+ }
+ """
+ try:
+ tokens = tokenize_code(code)
+ ast = parse_code(tokens)
+ generated_code = generate_code(ast)
+ print("############## else if code ##############")
+ print(generated_code)
+ except SyntaxError:
+ pytest.fail("Else_if statement parsing or code generation not implemented.")
+
+
# Run all tests
if __name__ == "__main__":
pytest.main()
diff --git a/tests/test_backend/test_directx/test_lexer.py b/tests/test_backend/test_directx/test_lexer.py
index 3565f97..18f7464 100644
--- a/tests/test_backend/test_directx/test_lexer.py
+++ b/tests/test_backend/test_directx/test_lexer.py
@@ -20,8 +20,10 @@ def test_struct_tokenization():
float4 out_position : TEXCOORD0;
};
"""
- tokens = tokenize_code(code)
- print(tokens)
+ try:
+ tokenize_code(code)
+ except SyntaxError:
+ pytest.fail("struct tokenization not implemented.")
def test_if_tokenization():
@@ -38,7 +40,7 @@ def test_if_tokenization():
try:
tokenize_code(code)
except SyntaxError:
- pytest.fail("Function call tokenization not implemented.")
+ pytest.fail("if tokenization not implemented.")
def test_for_tokenization():
@@ -54,7 +56,7 @@ def test_for_tokenization():
try:
tokenize_code(code)
except SyntaxError:
- pytest.fail("Function call tokenization not implemented.")
+ pytest.fail("for tokenization not implemented.")
def test_else_tokenization():
@@ -72,7 +74,7 @@ def test_else_tokenization():
try:
tokenize_code(code)
except SyntaxError:
- pytest.fail("Function call tokenization not implemented.")
+ pytest.fail("else tokenization not implemented.")
def test_function_call_tokenization():
@@ -89,6 +91,26 @@ def test_function_call_tokenization():
pytest.fail("Function call tokenization not implemented.")
+def test_else_if_tokenization():
+ code = """
+ PSOutput PSMain(PSInput input) {
+ PSOutput output;
+ if (input.in_position.r > 0.5) {
+ output.out_color = input.in_position;
+ } else if (input.in_position.r == 0.5){
+ output.out_color = float4(1.0, 1.0, 1.0, 1.0);
+ } else {
+ output.out_color = float4(0.0, 0.0, 0.0, 1.0);
+ }
+ return output;
+ }
+ """
+ try:
+ tokenize_code(code)
+ except SyntaxError:
+ pytest.fail("else_if tokenization not implemented.")
+
+
if __name__ == "__main__":
pytest.main()
diff --git a/tests/test_backend/test_directx/test_parser.py b/tests/test_backend/test_directx/test_parser.py
index 5e6f875..564351f 100644
--- a/tests/test_backend/test_directx/test_parser.py
+++ b/tests/test_backend/test_directx/test_parser.py
@@ -55,7 +55,7 @@ def test_if_parsing():
tokens = tokenize_code(code)
parse_code(tokens)
except SyntaxError:
- pytest.fail("Struct parsing not implemented.")
+ pytest.fail("if parsing not implemented.")
def test_for_parsing():
@@ -72,7 +72,7 @@ def test_for_parsing():
tokens = tokenize_code(code)
parse_code(tokens)
except SyntaxError:
- pytest.fail("Struct parsing not implemented.")
+ pytest.fail("for parsing not implemented.")
def test_else_parsing():
@@ -91,7 +91,7 @@ def test_else_parsing():
tokens = tokenize_code(code)
parse_code(tokens)
except SyntaxError:
- pytest.fail("Struct parsing not implemented.")
+ pytest.fail("else parsing not implemented.")
def test_function_call_parsing():
@@ -106,7 +106,28 @@ def test_function_call_parsing():
tokens = tokenize_code(code)
parse_code(tokens)
except SyntaxError:
- pytest.fail("Struct parsing not implemented.")
+ pytest.fail("function call parsing not implemented.")
+
+
+def test_else_if_parsing():
+ code = """
+ PSOutput PSMain(PSInput input) {
+ PSOutput output;
+ if (input.in_position.r > 0.5) {
+ output.out_color = input.in_position;
+ } else if (input.in_position.r == 0.5){
+ output.out_color = float4(1.0, 1.0, 1.0, 1.0);
+ } else {
+ output.out_color = float4(0.0, 0.0, 0.0, 1.0);
+ }
+ return output;
+ }
+ """
+ try:
+ tokens = tokenize_code(code)
+ parse_code(tokens)
+ except SyntaxError:
+ pytest.fail("else_if parsing not implemented.")
# Run all tests
diff --git a/tests/test_translator/test_lexer.py b/tests/test_translator/test_lexer.py
index 0fa3f2c..fa21744 100644
--- a/tests/test_translator/test_lexer.py
+++ b/tests/test_translator/test_lexer.py
@@ -99,3 +99,17 @@ def test_function_call_tokenization():
tokenize_code(code)
except SyntaxError:
pytest.fail("Function call tokenization not implemented.")
+
+
+def test_data_types_tokenization():
+ code = """
+ int a;
+ uint b;
+ float c;
+ double d;
+ bool e;
+ """
+ try:
+ tokenize_code(code)
+ except SyntaxError:
+ pytest.fail("Data types tokenization not implemented.")
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 3
},
"num_modified_files": 4
} | 0.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.10",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | -e git+https://github.com/CrossGL/crosstl.git@d456c2b5f509104dd97b74ce40c597531784e2ca#egg=crosstl
exceptiongroup==1.2.2
gast==0.6.0
iniconfig==2.1.0
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
tomli==2.2.1
| name: crosstl
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- bzip2=1.0.8=h5eee18b_6
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- libuuid=1.41.5=h5eee18b_0
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py310h06a4308_0
- python=3.10.16=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py310h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py310h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- exceptiongroup==1.2.2
- gast==0.6.0
- iniconfig==2.1.0
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- tomli==2.2.1
prefix: /opt/conda/envs/crosstl
| [
"tests/test_backend/test_directx/test_codegen.py::test_else_if_codegen",
"tests/test_backend/test_directx/test_parser.py::test_else_if_parsing"
] | [] | [
"tests/test_backend/test_directx/test_codegen.py::test_struct_codegen",
"tests/test_backend/test_directx/test_codegen.py::test_if_codegen",
"tests/test_backend/test_directx/test_codegen.py::test_for_codegen",
"tests/test_backend/test_directx/test_codegen.py::test_else_codegen",
"tests/test_backend/test_directx/test_codegen.py::test_function_call_codegen",
"tests/test_backend/test_directx/test_lexer.py::test_struct_tokenization",
"tests/test_backend/test_directx/test_lexer.py::test_if_tokenization",
"tests/test_backend/test_directx/test_lexer.py::test_for_tokenization",
"tests/test_backend/test_directx/test_lexer.py::test_else_tokenization",
"tests/test_backend/test_directx/test_lexer.py::test_function_call_tokenization",
"tests/test_backend/test_directx/test_lexer.py::test_else_if_tokenization",
"tests/test_backend/test_directx/test_parser.py::test_struct_parsing",
"tests/test_backend/test_directx/test_parser.py::test_if_parsing",
"tests/test_backend/test_directx/test_parser.py::test_for_parsing",
"tests/test_backend/test_directx/test_parser.py::test_else_parsing",
"tests/test_backend/test_directx/test_parser.py::test_function_call_parsing",
"tests/test_translator/test_lexer.py::test_input_output_tokenization",
"tests/test_translator/test_lexer.py::test_if_statement_tokenization",
"tests/test_translator/test_lexer.py::test_for_statement_tokenization",
"tests/test_translator/test_lexer.py::test_else_statement_tokenization",
"tests/test_translator/test_lexer.py::test_function_call_tokenization",
"tests/test_translator/test_lexer.py::test_data_types_tokenization"
] | [] | Apache License 2.0 | 19,365 | 1,134 | [
"crosstl/src/backend/DirectX/DirectxCrossGLCodeGen.py",
"crosstl/src/backend/DirectX/DirectxLexer.py",
"crosstl/src/backend/DirectX/DirectxParser.py",
"crosstl/src/translator/lexer.py"
] |
|
NREL__bifacial_radiance-527 | 2742b883b07c4a70e6bc737289734d7e624b1ec1 | 2024-08-21 21:07:44 | dbf20626ba436315336a54731f77b578615313c8 | diff --git a/bifacial_radiance/main.py b/bifacial_radiance/main.py
index 574ca27..aa58a58 100644
--- a/bifacial_radiance/main.py
+++ b/bifacial_radiance/main.py
@@ -3002,11 +3002,11 @@ class SceneObj:
f'{nMods}modsx{nRows}rows_origin{originx},{originy}.rad' )
if self.hpc:
- text += f'"{os.path.join(os.getcwd(), self.modulefile)}"'
- radfile = os.path.join(os.getcwd(), 'objects', filename)
+ text += f'"{os.path.join(os.getcwd(), self.modulefile)}"'
+ radfile = os.path.join(os.getcwd(), 'objects', filename)
else:
- text += os.path.join(self.modulefile)
- radfile = os.path.join('objects',filename )
+ text += f'"{os.path.join(self.modulefile)}"'
+ radfile = os.path.join('objects',filename)
# py2 and 3 compatible: binary write, encode text first
with open(radfile, 'wb') as f:
@@ -3214,10 +3214,10 @@ class MetObj:
sunup['minutedelta']= int(interval.seconds/2/60) # default sun angle 30 minutes before timestamp
# vector update of minutedelta at sunrise
sunrisemask = sunup.index.hour-1==sunup['sunrise'].dt.hour
- sunup['minutedelta'].mask(sunrisemask,np.floor((60-(sunup['sunrise'].dt.minute))/2),inplace=True)
+ sunup['minutedelta'] = sunup['minutedelta'].mask(sunrisemask,np.floor((60-(sunup['sunrise'].dt.minute))/2))
# vector update of minutedelta at sunset
sunsetmask = sunup.index.hour-1==sunup['sunset'].dt.hour
- sunup['minutedelta'].mask(sunsetmask,np.floor((60-(sunup['sunset'].dt.minute))/2),inplace=True)
+ sunup['minutedelta'] = sunup['minutedelta'].mask(sunsetmask,np.floor((60-(sunup['sunset'].dt.minute))/2))
# save corrected timestamp
sunup['corrected_timestamp'] = sunup.index-pd.to_timedelta(sunup['minutedelta'], unit='m')
@@ -3228,10 +3228,10 @@ class MetObj:
sunup['minutedelta']= int(interval.seconds/2/60) # default sun angle 30 minutes after timestamp
# vector update of minutedelta at sunrise
sunrisemask = sunup.index.hour==sunup['sunrise'].dt.hour
- sunup['minutedelta'].mask(sunrisemask,np.ceil((60+sunup['sunrise'].dt.minute)/2),inplace=True)
+ sunup['minutedelta'] = sunup['minutedelta'].mask(sunrisemask,np.ceil((60+sunup['sunrise'].dt.minute)/2))
# vector update of minutedelta at sunset
sunsetmask = sunup.index.hour==sunup['sunset'].dt.hour
- sunup['minutedelta'].mask(sunsetmask,np.ceil((60+sunup['sunset'].dt.minute)/2),inplace=True)
+ sunup['minutedelta'] = sunup['minutedelta'].mask(sunsetmask,np.ceil((60+sunup['sunset'].dt.minute)/2))
# save corrected timestamp
sunup['corrected_timestamp'] = sunup.index+pd.to_timedelta(sunup['minutedelta'], unit='m')
else: raise ValueError('Error: invalid weather label passed. Valid inputs: right, left or center')
diff --git a/bifacial_radiance/performance.py b/bifacial_radiance/performance.py
index d1e0d14..d7d41fc 100644
--- a/bifacial_radiance/performance.py
+++ b/bifacial_radiance/performance.py
@@ -6,7 +6,7 @@ Created on Tue April 27 06:29:02 2021
"""
import pvlib
-
+import pandas as pd
def calculatePerformance(effective_irradiance, CECMod, temp_air=None, wind_speed=1, temp_cell=None, glassglass=False):
'''
@@ -50,17 +50,27 @@ def calculatePerformance(effective_irradiance, CECMod, temp_air=None, wind_speed
temp_cell = pvlib.temperature.sapm_cell(effective_irradiance, temp_air, wind_speed,
temp_model_params['a'], temp_model_params['b'], temp_model_params['deltaT'])
-
+
+ if isinstance(CECMod, pd.DataFrame):
+ #CECMod.to_pickle("CECMod.pkl")
+ if len(CECMod) == 1:
+ CECMod1 = CECMod.iloc[0]
+ else:
+ print("More than one Module passed. Error, using 1st one")
+ CECMod1 = CECMod.iloc[0]
+ else:
+ CECMod1 = CECMod
+
IL, I0, Rs, Rsh, nNsVth = pvlib.pvsystem.calcparams_cec(
effective_irradiance=effective_irradiance,
temp_cell=temp_cell,
- alpha_sc=float(CECMod.alpha_sc),
- a_ref=float(CECMod.a_ref),
- I_L_ref=float(CECMod.I_L_ref),
- I_o_ref=float(CECMod.I_o_ref),
- R_sh_ref=float(CECMod.R_sh_ref),
- R_s=float(CECMod.R_s),
- Adjust=float(CECMod.Adjust)
+ alpha_sc=float(CECMod1.alpha_sc),
+ a_ref=float(CECMod1.a_ref),
+ I_L_ref=float(CECMod1.I_L_ref),
+ I_o_ref=float(CECMod1.I_o_ref),
+ R_sh_ref=float(CECMod1.R_sh_ref),
+ R_s=float(CECMod1.R_s),
+ Adjust=float(CECMod1.Adjust)
)
IVcurve_info = pvlib.pvsystem.singlediode(
| Development tests are failing
First two are failing in all env's. Second two only fail in upgrade-strategy Eager.
FAILED tests/test_bifacial_radiance.py::test_SingleModule_HPC - AssertionError: assert 'groundplane' == 'a0.0.a0.test'
FAILED tests/test_performance.py::test_calculatePerformance - UnboundLocalError: cannot access local variable 'CECMod1' where it is not associated with a value
FAILED tests/test_spectra.py::test_scale_spectra - AttributeError: module 'scipy.integrate' has no attribute 'trapz'
FAILED tests/test_spectra.py::test_nonspectral_albedo - AttributeError: module 'scipy.integrate' has no attribute 'trapz' | NREL/bifacial_radiance | diff --git a/tests/test_bifacial_radiance.py b/tests/test_bifacial_radiance.py
index 3c4e2a3..5524d6d 100644
--- a/tests/test_bifacial_radiance.py
+++ b/tests/test_bifacial_radiance.py
@@ -146,7 +146,7 @@ def test_Radiance_1axis_gendaylit_modelchains():
#V 0.2.5 fixed the gcr passed to set1axis. (since gcr was not being passd to set1axis, gcr was default 0.33 default).
assert(np.mean(demo2.Wm2Front) == pytest.approx(205.0, 0.01) ) # was 214 in v0.2.3 # was 205 in early v0.2.4
assert(np.mean(demo2.Wm2Back) == pytest.approx(43.0, 0.1) )
- assert demo2.trackerdict['2001-01-01_1100']['scene'].text.__len__() == 132
+ assert demo2.trackerdict['2001-01-01_1100']['scene'].text.__len__() == 134
assert demo2.trackerdict['2001-01-01_1100']['scene'].text[23:28] == " 2.0 "
demo2.exportTrackerDict(savefile = 'results\exportedTrackerDict.csv', reindex=True)
@@ -267,7 +267,7 @@ def test_SceneObj_makeSceneNxR_lowtilt():
'sx_xinc': 0.0, 'sx_yinc':0.0, 'sx_zinc':0.0})
# zstart was 0.01 and zinc was 0 in v0.2.2
#assert scene.text == '!xform -rz -90 -t -0.795 0.475 0 -rx 10 -t 0 0 0.2 -a 20 -t 1.6 0 0 -a 7 -t 0 1.5 0 -i 1 -t -15.9 -4.5 0 -rz 0 objects\\simple_panel.rad'
- assert scene.text[0:116] == '!xform -rx 10 -t 0 0 0.2824828843917919 -a 20 -t 1.6 0 0 -a 7 -t 0 1.5 0 -i 1 -t -14.4 -4.5 0 -rz 0 -t 0 0 0 objects' #linux has different directory structure and will error here.
+ assert scene.text[0:117] == '!xform -rx 10 -t 0 0 0.2824828843917919 -a 20 -t 1.6 0 0 -a 7 -t 0 1.5 0 -i 1 -t -14.4 -4.5 0 -rz 0 -t 0 0 0 "objects' #linux has different directory structure and will error here.
def test_SceneObj_makeSceneNxR_hightilt():
# test _makeSceneNxR(tilt, height, pitch, orientation = None, azimuth = 180, nMods = 20, nRows = 7, radname = None)
@@ -311,7 +311,7 @@ def test_SceneObj_makeSceneNxR_hightilt():
'zinc': 0.08609923976848174, 'zstart': 0.28567662150674106,
'sx_xinc': 0.0, 'sx_yinc':0.0, 'sx_zinc':0.0})
#assert scene.text == '!xform -rz -90 -t -0.795 0.475 0 -rx 65 -t 0 0 0.2 -a 20 -t 1.6 0 0 -a 7 -t 0 1.5 0 -i 1 -t -15.9 -4.5 0 -rz 91 objects\\simple_panel.rad'
- assert scene.text[0:117] == '!xform -rx 65 -t 0 0 0.6304961988424087 -a 20 -t 1.6 0 0 -a 7 -t 0 1.5 0 -i 1 -t -14.4 -4.5 0 -rz 91 -t 0 0 0 objects'
+ assert scene.text[0:118] == '!xform -rx 65 -t 0 0 0.6304961988424087 -a 20 -t 1.6 0 0 -a 7 -t 0 1.5 0 -i 1 -t -14.4 -4.5 0 -rz 91 -t 0 0 0 "objects'
diff --git a/tests/test_performance.py b/tests/test_performance.py
index cc5bfb8..72b4ece 100644
--- a/tests/test_performance.py
+++ b/tests/test_performance.py
@@ -50,6 +50,13 @@ def test_calculatePerformance():
p_mp_tamb = bifacial_radiance.performance.calculatePerformance(s1, CECMod=CECMod,
temp_air=s3, wind_speed=1, glassglass=True)
assert p_mp_tamb[0] == pytest.approx(190.4431, abs=.0001)
+ # test passing CECMod as a DF
+
+ p_mp_celltemp2 = bifacial_radiance.performance.calculatePerformance(s1, pd.DataFrame([CECMod]),
+ temp_cell=s2)
+ p_mp_celltemp3 = bifacial_radiance.performance.calculatePerformance(s1, pd.DataFrame([CECMod, CECMod]),
+ temp_cell=s2)
+ assert p_mp_celltemp3.all()==p_mp_celltemp2.all()==p_mp_celltemp.all()
def test_MBD():
from bifacial_radiance import performance
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 2
} | 0.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[all]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc gfortran"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.16
anyio==4.9.0
argon2-cffi==23.1.0
argon2-cffi-bindings==21.2.0
arrow==1.3.0
astroid==3.3.9
asttokens==3.0.0
async-lru==2.0.5
attrs==25.3.0
babel==2.17.0
backcall==0.2.0
beautifulsoup4==4.13.3
-e git+https://github.com/NREL/bifacial_radiance.git@2742b883b07c4a70e6bc737289734d7e624b1ec1#egg=bifacial_radiance
bleach==6.2.0
certifi==2025.1.31
cffi==1.17.1
charset-normalizer==3.4.1
comm==0.2.2
configparser==7.2.0
coverage==7.2.1
cycler==0.11.0
debugpy==1.8.13
decorator==5.2.1
defusedxml==0.7.1
docutils==0.17.1
exceptiongroup==1.2.2
executing==2.2.0
fastjsonschema==2.21.1
fonttools==4.56.0
fqdn==1.5.1
future==1.0.0
h11==0.14.0
h5py==3.13.0
httpcore==1.0.7
httpx==0.28.1
idna==3.4
imagesize==1.4.1
importlib-metadata==6.0.0
iniconfig==2.1.0
ipykernel==6.29.5
ipython==8.10.0
ipywidgets==8.1.5
isoduration==20.11.0
jedi==0.19.2
Jinja2==3.1.6
json5==0.10.0
jsonpointer==3.0.0
jsonschema==4.23.0
jsonschema-specifications==2024.10.1
jupyter==1.1.1
jupyter-console==6.6.3
jupyter-events==0.12.0
jupyter-lsp==2.2.5
jupyter_client==8.6.3
jupyter_core==5.7.2
jupyter_server==2.15.0
jupyter_server_terminals==0.5.3
jupyterlab==4.3.6
jupyterlab_pygments==0.3.0
jupyterlab_server==2.27.3
jupyterlab_widgets==3.0.13
kiwisolver==1.4.4
MarkupSafe==3.0.2
matplotlib==3.5.1
matplotlib-inline==0.1.7
mistune==3.1.3
more-itertools==9.1.0
nbclient==0.10.2
nbconvert==7.16.6
nbformat==5.10.4
nest-asyncio==1.6.0
notebook==7.3.3
notebook_shim==0.2.4
numpy==1.24.2
overrides==7.7.0
packaging==24.2
pandas==1.3.5
pandocfilters==1.5.1
parso==0.8.4
pexpect==4.9.0
pickleshare==0.7.5
pillow==11.1.0
platformdirs==4.3.7
pluggy==1.0.0
prometheus_client==0.21.1
prompt_toolkit==3.0.50
psutil==7.0.0
ptyprocess==0.7.0
pure_eval==0.2.3
pvlib==0.9.4
pvmismatch==4.1
py==1.11.0
pycparser==2.22
Pygments==2.19.1
pyparsing==3.0.9
pySMARTS==0.0.1
pytest==7.2.2
pytest-cov==4.0.0
python-dateutil==2.8.2
python-json-logger==3.3.0
pytz==2021.3
PyYAML==6.0.2
pyzmq==26.3.0
referencing==0.36.2
requests==2.32.3
rfc3339-validator==0.1.4
rfc3986-validator==0.1.1
rpds-py==0.24.0
scipy==1.13.1
Send2Trash==1.8.3
six==1.16.0
sniffio==1.3.1
snowballstemmer==2.2.0
soupsieve==2.6
Sphinx==5.3.0
sphinx-autoapi==1.8.4
sphinx-rtd-theme==1.0.0
sphinxcontrib-applehelp==2.0.0
sphinxcontrib-devhelp==2.0.0
sphinxcontrib-htmlhelp==2.1.0
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==2.0.0
sphinxcontrib-serializinghtml==2.0.0
stack-data==0.6.3
terminado==0.18.1
tinycss2==1.4.0
tomli==2.2.1
tornado==6.4.2
tqdm==4.67.1
traitlets==5.14.3
types-python-dateutil==2.9.0.20241206
typing_extensions==4.13.0
Unidecode==1.3.8
uri-template==1.3.0
urllib3==2.3.0
wcwidth==0.2.13
webcolors==24.11.1
webencodings==0.5.1
websocket-client==1.8.0
widgetsnbextension==4.0.13
zipp==3.21.0
| name: bifacial_radiance
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.16
- anyio==4.9.0
- argon2-cffi==23.1.0
- argon2-cffi-bindings==21.2.0
- arrow==1.3.0
- astroid==3.3.9
- asttokens==3.0.0
- async-lru==2.0.5
- attrs==25.3.0
- babel==2.17.0
- backcall==0.2.0
- beautifulsoup4==4.13.3
- bleach==6.2.0
- certifi==2025.1.31
- cffi==1.17.1
- charset-normalizer==3.4.1
- comm==0.2.2
- configparser==7.2.0
- coverage==7.2.1
- cycler==0.11.0
- debugpy==1.8.13
- decorator==5.2.1
- defusedxml==0.7.1
- docutils==0.17.1
- exceptiongroup==1.2.2
- executing==2.2.0
- fastjsonschema==2.21.1
- fonttools==4.56.0
- fqdn==1.5.1
- future==1.0.0
- h11==0.14.0
- h5py==3.13.0
- httpcore==1.0.7
- httpx==0.28.1
- idna==3.4
- imagesize==1.4.1
- importlib-metadata==6.0.0
- iniconfig==2.1.0
- ipykernel==6.29.5
- ipython==8.10.0
- ipywidgets==8.1.5
- isoduration==20.11.0
- jedi==0.19.2
- jinja2==3.1.6
- json5==0.10.0
- jsonpointer==3.0.0
- jsonschema==4.23.0
- jsonschema-specifications==2024.10.1
- jupyter==1.1.1
- jupyter-client==8.6.3
- jupyter-console==6.6.3
- jupyter-core==5.7.2
- jupyter-events==0.12.0
- jupyter-lsp==2.2.5
- jupyter-server==2.15.0
- jupyter-server-terminals==0.5.3
- jupyterlab==4.3.6
- jupyterlab-pygments==0.3.0
- jupyterlab-server==2.27.3
- jupyterlab-widgets==3.0.13
- kiwisolver==1.4.4
- markupsafe==3.0.2
- matplotlib==3.5.1
- matplotlib-inline==0.1.7
- mistune==3.1.3
- more-itertools==9.1.0
- nbclient==0.10.2
- nbconvert==7.16.6
- nbformat==5.10.4
- nest-asyncio==1.6.0
- notebook==7.3.3
- notebook-shim==0.2.4
- numpy==1.24.2
- overrides==7.7.0
- packaging==24.2
- pandas==1.3.5
- pandocfilters==1.5.1
- parso==0.8.4
- pexpect==4.9.0
- pickleshare==0.7.5
- pillow==11.1.0
- platformdirs==4.3.7
- pluggy==1.0.0
- prometheus-client==0.21.1
- prompt-toolkit==3.0.50
- psutil==7.0.0
- ptyprocess==0.7.0
- pure-eval==0.2.3
- pvlib==0.9.4
- pvmismatch==4.1
- py==1.11.0
- pycparser==2.22
- pygments==2.19.1
- pyparsing==3.0.9
- pysmarts==0.0.1
- pytest==7.2.2
- pytest-cov==4.0.0
- python-dateutil==2.8.2
- python-json-logger==3.3.0
- pytz==2021.3
- pyyaml==6.0.2
- pyzmq==26.3.0
- referencing==0.36.2
- requests==2.32.3
- rfc3339-validator==0.1.4
- rfc3986-validator==0.1.1
- rpds-py==0.24.0
- scipy==1.13.1
- send2trash==1.8.3
- six==1.16.0
- sniffio==1.3.1
- snowballstemmer==2.2.0
- soupsieve==2.6
- sphinx==5.3.0
- sphinx-autoapi==1.8.4
- sphinx-rtd-theme==1.0.0
- sphinxcontrib-applehelp==2.0.0
- sphinxcontrib-devhelp==2.0.0
- sphinxcontrib-htmlhelp==2.1.0
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==2.0.0
- sphinxcontrib-serializinghtml==2.0.0
- stack-data==0.6.3
- terminado==0.18.1
- tinycss2==1.4.0
- tomli==2.2.1
- tornado==6.4.2
- tqdm==4.67.1
- traitlets==5.14.3
- types-python-dateutil==2.9.0.20241206
- typing-extensions==4.13.0
- unidecode==1.3.8
- uri-template==1.3.0
- urllib3==2.3.0
- wcwidth==0.2.13
- webcolors==24.11.1
- webencodings==0.5.1
- websocket-client==1.8.0
- widgetsnbextension==4.0.13
- zipp==3.21.0
prefix: /opt/conda/envs/bifacial_radiance
| [
"tests/test_bifacial_radiance.py::test_SceneObj_makeSceneNxR_lowtilt",
"tests/test_bifacial_radiance.py::test_SceneObj_makeSceneNxR_hightilt",
"tests/test_performance.py::test_calculatePerformance"
] | [
"tests/test_bifacial_radiance.py::test_RadianceObj_fixed_tilt_end_to_end",
"tests/test_bifacial_radiance.py::test_Radiance_high_azimuth_modelchains",
"tests/test_bifacial_radiance.py::test_Radiance_1axis_gendaylit_modelchains",
"tests/test_bifacial_radiance.py::test_1axis_gencumSky",
"tests/test_bifacial_radiance.py::test_SingleModule_HPC",
"tests/test_bifacial_radiance.py::test_analyzeRow",
"tests/test_bifacial_radiance.py::test_verticalmoduleSouthFacing",
"tests/test_bifacial_radiance.py::test_verticalmoduleEastFacing",
"tests/test_bifacial_radiance.py::test_tiltandazimuthModuleTest"
] | [
"tests/test_bifacial_radiance.py::test_RadianceObj_set1axis",
"tests/test_bifacial_radiance.py::test_AnalysisObj_linePtsMake3D",
"tests/test_bifacial_radiance.py::test_gendaylit2manual",
"tests/test_bifacial_radiance.py::test_left_label_metdata",
"tests/test_bifacial_radiance.py::test_addMaterialGroundRad",
"tests/test_bifacial_radiance.py::test_readWeatherFile_extra",
"tests/test_bifacial_radiance.py::test_readWeatherFile_subhourly",
"tests/test_bifacial_radiance.py::test_customTrackerAngles",
"tests/test_performance.py::test_MBD"
] | [] | BSD 3-Clause "New" or "Revised" License | 19,366 | 1,465 | [
"bifacial_radiance/main.py",
"bifacial_radiance/performance.py"
] |
|
PicnicSupermarket__dbt-score-74 | 6e642ee123bf967720bfcf94f77b5c4eb38945a5 | 2024-08-22 09:40:19 | 6e642ee123bf967720bfcf94f77b5c4eb38945a5 | diff --git a/src/dbt_score/formatters/human_readable_formatter.py b/src/dbt_score/formatters/human_readable_formatter.py
index 328a01e..ba49a53 100644
--- a/src/dbt_score/formatters/human_readable_formatter.py
+++ b/src/dbt_score/formatters/human_readable_formatter.py
@@ -1,6 +1,5 @@
"""Human readable formatter."""
-
from typing import Any
from dbt_score.evaluation import ModelResultsType
@@ -34,9 +33,7 @@ class HumanReadableFormatter(Formatter):
"""Callback when a model has been evaluated."""
if score.value < self._config.fail_any_model_under:
self._failed_models.append((model, score))
- print(
- f"{score.badge} {self.bold(model.name)} (score: {round(score.value, 1)!s})"
- )
+ print(f"{score.badge} {self.bold(model.name)} (score: {score.rounded_value!s})")
for rule, result in results.items():
if result is None:
print(f"{self.indent}{self.label_ok} {rule.source()}")
@@ -51,7 +48,7 @@ class HumanReadableFormatter(Formatter):
def project_evaluated(self, score: Score) -> None:
"""Callback when a project has been evaluated."""
- print(f"Project score: {self.bold(str(round(score.value, 1)))} {score.badge}")
+ print(f"Project score: {self.bold(str(score.rounded_value))} {score.badge}")
if len(self._failed_models) > 0:
print()
@@ -59,8 +56,8 @@ class HumanReadableFormatter(Formatter):
f"Error: model score too low, fail_any_model_under = "
f"{self._config.fail_any_model_under}"
)
- for model, score in self._failed_models:
- print(f"Model {model.name} scored {round(score.value, 1)}")
+ for model, model_score in self._failed_models:
+ print(f"Model {model.name} scored {model_score.value}")
elif score.value < self._config.fail_project_under:
print()
diff --git a/src/dbt_score/scoring.py b/src/dbt_score/scoring.py
index 60b05c3..5d80306 100644
--- a/src/dbt_score/scoring.py
+++ b/src/dbt_score/scoring.py
@@ -2,6 +2,7 @@
from __future__ import annotations
+import math
import typing
from dataclasses import dataclass
@@ -19,6 +20,11 @@ class Score:
value: float
badge: str
+ @property
+ def rounded_value(self) -> float:
+ """Auto-round score down to 1 decimal place."""
+ return math.floor(self.value * 10) / 10
+
class Scorer:
"""Logic for computing scores."""
| Bug: Silver medal with 10.0 project score
It appears one can get a `10.0` _project_ score, but still a silver medal

My suspicion is that the score is actually rounded (e.g. 9.95). | PicnicSupermarket/dbt-score | diff --git a/tests/formatters/test_human_readable_formatter.py b/tests/formatters/test_human_readable_formatter.py
index b4afeb1..6a3438a 100644
--- a/tests/formatters/test_human_readable_formatter.py
+++ b/tests/formatters/test_human_readable_formatter.py
@@ -1,6 +1,5 @@
"""Unit tests for the human readable formatter."""
-
from dbt_score.evaluation import ModelResultsType
from dbt_score.formatters.human_readable_formatter import HumanReadableFormatter
from dbt_score.rule import RuleViolation
@@ -48,6 +47,49 @@ def test_human_readable_formatter_project(capsys, default_config, manifest_loade
assert stdout == "Project score: \x1B[1m10.0\x1B[0m 🥇\n"
+def test_human_readable_formatter_near_perfect_model_score(
+ capsys,
+ default_config,
+ manifest_loader,
+ model1,
+ rule_severity_low,
+ rule_severity_medium,
+ rule_severity_critical,
+):
+ """Ensure the formatter has the correct output after model evaluation."""
+ formatter = HumanReadableFormatter(
+ manifest_loader=manifest_loader, config=default_config
+ )
+ results: ModelResultsType = {
+ rule_severity_low: None,
+ rule_severity_medium: Exception("Oh noes"),
+ rule_severity_critical: RuleViolation("Error"),
+ }
+ formatter.model_evaluated(model1, results, Score(9.99, "🥈"))
+ stdout = capsys.readouterr().out
+ assert (
+ stdout
+ == """🥈 \x1B[1mmodel1\x1B[0m (score: 9.9)
+ \x1B[1;32mOK \x1B[0m tests.conftest.rule_severity_low
+ \x1B[1;31mERR \x1B[0m tests.conftest.rule_severity_medium: Oh noes
+ \x1B[1;33mWARN\x1B[0m (critical) tests.conftest.rule_severity_critical: Error
+
+"""
+ )
+
+
+def test_human_readable_formatter_near_perfect_project_score(
+ capsys, default_config, manifest_loader
+):
+ """Ensure the formatter has the correct output after project evaluation."""
+ formatter = HumanReadableFormatter(
+ manifest_loader=manifest_loader, config=default_config
+ )
+ formatter.project_evaluated(Score(9.99, "🥈"))
+ stdout = capsys.readouterr().out
+ assert stdout == "Project score: \x1B[1m9.9\x1B[0m 🥈\n"
+
+
def test_human_readable_formatter_low_model_score(
capsys,
default_config,
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 2
} | 0.5 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"coverage",
"mypy"
],
"pre_install": null,
"python": "3.10",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | agate==1.9.1
annotated-types==0.7.0
attrs==25.3.0
babel==2.17.0
certifi==2025.1.31
charset-normalizer==3.4.1
click==8.1.8
colorama==0.4.6
coverage==7.8.0
daff==1.3.46
dbt-adapters==1.14.3
dbt-common==1.16.0
dbt-core==1.9.3
dbt-extractor==0.5.1
-e git+https://github.com/PicnicSupermarket/dbt-score.git@6e642ee123bf967720bfcf94f77b5c4eb38945a5#egg=dbt_score
dbt-semantic-interfaces==0.7.4
deepdiff==7.0.1
exceptiongroup==1.2.2
idna==3.10
importlib-metadata==6.11.0
iniconfig==2.1.0
isodate==0.6.1
Jinja2==3.1.6
jsonschema==4.23.0
jsonschema-specifications==2024.10.1
leather==0.4.0
MarkupSafe==3.0.2
mashumaro==3.14
more-itertools==10.6.0
msgpack==1.1.0
mypy==1.15.0
mypy-extensions==1.0.0
networkx==3.4.2
ordered-set==4.1.0
packaging==24.2
parsedatetime==2.6
pathspec==0.12.1
pluggy==1.5.0
protobuf==5.29.4
pydantic==2.11.1
pydantic_core==2.33.0
pytest==8.3.5
python-dateutil==2.9.0.post0
python-slugify==8.0.4
pytimeparse==1.1.8
pytz==2025.2
PyYAML==6.0.2
referencing==0.36.2
requests==2.32.3
rpds-py==0.24.0
six==1.17.0
snowplow-tracker==1.1.0
sqlparse==0.5.3
text-unidecode==1.3
tomli==2.2.1
typing-inspection==0.4.0
typing_extensions==4.13.0
urllib3==2.3.0
zipp==3.21.0
| name: dbt-score
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- bzip2=1.0.8=h5eee18b_6
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- libuuid=1.41.5=h5eee18b_0
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py310h06a4308_0
- python=3.10.16=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py310h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py310h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- agate==1.9.1
- annotated-types==0.7.0
- attrs==25.3.0
- babel==2.17.0
- certifi==2025.1.31
- charset-normalizer==3.4.1
- click==8.1.8
- colorama==0.4.6
- coverage==7.8.0
- daff==1.3.46
- dbt-adapters==1.14.3
- dbt-common==1.16.0
- dbt-core==1.9.3
- dbt-extractor==0.5.1
- dbt-score==0.4.1.dev2+g6e642ee
- dbt-semantic-interfaces==0.7.4
- deepdiff==7.0.1
- exceptiongroup==1.2.2
- idna==3.10
- importlib-metadata==6.11.0
- iniconfig==2.1.0
- isodate==0.6.1
- jinja2==3.1.6
- jsonschema==4.23.0
- jsonschema-specifications==2024.10.1
- leather==0.4.0
- markupsafe==3.0.2
- mashumaro==3.14
- more-itertools==10.6.0
- msgpack==1.1.0
- mypy==1.15.0
- mypy-extensions==1.0.0
- networkx==3.4.2
- ordered-set==4.1.0
- packaging==24.2
- parsedatetime==2.6
- pathspec==0.12.1
- pluggy==1.5.0
- protobuf==5.29.4
- pydantic==2.11.1
- pydantic-core==2.33.0
- pytest==8.3.5
- python-dateutil==2.9.0.post0
- python-slugify==8.0.4
- pytimeparse==1.1.8
- pytz==2025.2
- pyyaml==6.0.2
- referencing==0.36.2
- requests==2.32.3
- rpds-py==0.24.0
- six==1.17.0
- snowplow-tracker==1.1.0
- sqlparse==0.5.3
- text-unidecode==1.3
- tomli==2.2.1
- typing-extensions==4.13.0
- typing-inspection==0.4.0
- urllib3==2.3.0
- zipp==3.21.0
prefix: /opt/conda/envs/dbt-score
| [
"tests/formatters/test_human_readable_formatter.py::test_human_readable_formatter_near_perfect_model_score",
"tests/formatters/test_human_readable_formatter.py::test_human_readable_formatter_near_perfect_project_score"
] | [] | [
"tests/formatters/test_human_readable_formatter.py::test_human_readable_formatter_model",
"tests/formatters/test_human_readable_formatter.py::test_human_readable_formatter_project",
"tests/formatters/test_human_readable_formatter.py::test_human_readable_formatter_low_model_score",
"tests/formatters/test_human_readable_formatter.py::test_human_readable_formatter_low_project_score"
] | [] | MIT License | 19,370 | 672 | [
"src/dbt_score/formatters/human_readable_formatter.py",
"src/dbt_score/scoring.py"
] |
|
felipeangelimvieira__prophetverse-106 | 3f6c300c40d08f8a842e32f54e21ee6bbd353c07 | 2024-08-23 12:21:44 | 9763d4606e8f5fdd60e756874e54af1716c624d5 | diff --git a/src/prophetverse/sktime/base.py b/src/prophetverse/sktime/base.py
index e998aff..5dce588 100644
--- a/src/prophetverse/sktime/base.py
+++ b/src/prophetverse/sktime/base.py
@@ -474,6 +474,13 @@ class BaseBayesianForecaster(BaseForecaster):
lambda x: np.abs(x).max()
)
+ if isinstance(self._scale, (float, int)):
+ if self._scale == 0:
+ self._scale = 1
+ elif isinstance(self._scale, (pd.Series, pd.DataFrame)):
+ # Map any values that are 0 to 1
+ self._scale = self._scale.replace(0, 1)
+
def _scale_y(self, y: pd.DataFrame) -> pd.DataFrame:
"""
Scales the input DataFrame y (divide it by the scaling factor).
@@ -762,8 +769,9 @@ class BaseProphetForecaster(_HeterogenousMetaEstimator, BaseBayesianForecaster):
Parameters
----------
- trend : Union[str, BaseEffect], optional, one of "linear" (default) or "logistic"
- Type of trend to use. Can also be a custom effect object.
+ trend : Union[str, BaseEffect], optional
+ One of "linear" (default), "linear1" or "logistic". Type of trend to use.
+ Can also be a custom effect object.
changepoint_interval : int, optional, default=25
Number of potential changepoints to sample in the history.
@@ -1025,6 +1033,15 @@ class BaseProphetForecaster(_HeterogenousMetaEstimator, BaseBayesianForecaster):
offset_prior_scale=self.offset_prior_scale,
)
+ elif self.trend == "linear_raw":
+ return PiecewiseLinearTrend(
+ changepoint_interval=self.changepoint_interval,
+ changepoint_range=self.changepoint_range,
+ changepoint_prior_scale=self.changepoint_prior_scale,
+ offset_prior_scale=self.offset_prior_scale,
+ remove_seasonality_before_suggesting_initial_vals=False,
+ )
+
elif self.trend == "logistic":
return PiecewiseLogisticTrend(
changepoint_interval=self.changepoint_interval,
@@ -1060,9 +1077,12 @@ class BaseProphetForecaster(_HeterogenousMetaEstimator, BaseBayesianForecaster):
raise ValueError("capacity_prior_loc must be greater than 0.")
if self.offset_prior_scale <= 0:
raise ValueError("offset_prior_scale must be greater than 0.")
- if self.trend not in ["linear", "logistic", "flat"] and not isinstance(
- self.trend, BaseEffect
- ):
+ if self.trend not in [
+ "linear",
+ "linear_raw",
+ "logistic",
+ "flat",
+ ] and not isinstance(self.trend, BaseEffect):
raise ValueError('trend must be either "linear" or "logistic".')
def _match_columns(
diff --git a/src/prophetverse/sktime/multivariate.py b/src/prophetverse/sktime/multivariate.py
index 3b0352e..212cadb 100644
--- a/src/prophetverse/sktime/multivariate.py
+++ b/src/prophetverse/sktime/multivariate.py
@@ -189,9 +189,6 @@ class HierarchicalProphet(BaseProphetForecaster):
if self.correlation_matrix_concentration <= 0:
raise ValueError("correlation_matrix_concentration must be greater than 0.")
- if self.trend not in ["linear", "logistic"]:
- raise ValueError('trend must be either "linear" or "logistic".')
-
def _get_fit_data(self, y, X, fh):
"""
Prepare the data for the NumPyro model.
| [BUG] HierarchicalProphet parameter validation is not accepting custom trends
`HierarchicalProphet` is raising error when passing a custom trend. | felipeangelimvieira/prophetverse | diff --git a/tests/sktime/test_multivariate.py b/tests/sktime/test_multivariate.py
index ed865b5..e51587e 100644
--- a/tests/sktime/test_multivariate.py
+++ b/tests/sktime/test_multivariate.py
@@ -2,6 +2,7 @@ import pytest
from numpyro import distributions as dist
from prophetverse.effects.linear import LinearEffect
+from prophetverse.effects.trend import PiecewiseLinearTrend
from prophetverse.sktime.multivariate import HierarchicalProphet
from prophetverse.sktime.seasonality import seasonal_transformer
@@ -16,9 +17,14 @@ from ._utils import (
HYPERPARAMS = [
dict(
+ trend=PiecewiseLinearTrend(
+ changepoint_interval=20,
+ changepoint_range=0.8,
+ changepoint_prior_scale=0.001,
+ ),
feature_transformer=seasonal_transformer(
yearly_seasonality=True, weekly_seasonality=True
- )
+ ),
),
dict(
feature_transformer=seasonal_transformer(
@@ -42,7 +48,7 @@ HYPERPARAMS = [
],
),
dict(
- trend="linear",
+ trend="linear_raw",
),
dict(trend="logistic"),
dict(inference_method="mcmc"),
@@ -61,7 +67,7 @@ def test_hierarchy_levels(hierarchy_levels):
y = make_y(hierarchy_levels)
X = make_random_X(y)
forecaster = HierarchicalProphet(
- optimizer_steps=20, changepoint_interval=2, mcmc_samples=2, mcmc_warmup=2
+ optimizer_steps=2, changepoint_interval=2, mcmc_samples=2, mcmc_warmup=2
)
execute_fit_predict_test(forecaster, y, X)
@@ -74,7 +80,7 @@ def test_hyperparams(hyperparams):
X = make_random_X(y)
forecaster = HierarchicalProphet(
**hyperparams,
- optimizer_steps=20,
+ optimizer_steps=2,
changepoint_interval=2,
mcmc_samples=2,
mcmc_warmup=2
@@ -111,3 +117,16 @@ def test_extra_predict_methods(make_X):
)
execute_extra_predict_methods_tests(forecaster=forecaster, X=X, y=y)
+
+
+def test_hierarchical_with_series_with_zeros():
+ y = make_y((2, 2, 1))
+ # Set all values to 0
+ y.iloc[:, :] = 0
+
+ forecaster = HierarchicalProphet(
+ optimizer_steps=5, changepoint_interval=2, mcmc_samples=2, mcmc_warmup=2
+ )
+
+ forecaster.fit(y)
+ forecaster.predict(fh=[1, 2, 3])
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 3
},
"num_modified_files": 2
} | 0.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | absl-py==2.2.1
alabaster==0.7.16
astroid==3.3.9
asttokens==3.0.0
attrs==25.3.0
babel==2.17.0
backrefs==5.8
beautifulsoup4==4.13.3
black==24.10.0
bleach==6.2.0
certifi==2025.1.31
cfgv==3.4.0
charset-normalizer==3.4.1
chex==0.1.89
click==8.1.8
colorama==0.4.6
comm==0.2.2
commitlint==1.3.0
contourpy==1.3.0
coverage==7.8.0
cycler==0.12.1
debugpy==1.8.13
decorator==5.2.1
defusedxml==0.7.1
dill==0.3.9
distlib==0.3.9
docutils==0.21.2
etils==1.5.2
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
executing==2.2.0
fastjsonschema==2.21.1
filelock==3.18.0
fonttools==4.56.0
ghp-import==2.1.0
graphviz==0.20.3
griffe==1.7.1
identify==2.6.9
idna==3.10
imagesize==1.4.1
importlib_metadata==8.6.1
importlib_resources==6.5.2
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
ipykernel==6.29.5
ipython==8.18.1
isort==5.13.2
jax==0.4.30
jaxlib==0.4.30
jedi==0.19.2
Jinja2==3.1.6
joblib==1.4.2
jsonschema==4.23.0
jsonschema-specifications==2024.10.1
jupyter_client==8.6.3
jupyter_core==5.7.2
jupyterlab_pygments==0.3.0
jupytext==1.16.7
kiwisolver==1.4.7
Markdown==3.7
markdown-it-py==3.0.0
MarkupSafe==3.0.2
matplotlib==3.9.4
matplotlib-inline==0.1.7
mccabe==0.7.0
mdit-py-plugins==0.4.2
mdurl==0.1.2
mergedeep==1.3.4
mistune==3.1.3
mkdocs==1.6.1
mkdocs-autorefs==1.4.1
mkdocs-get-deps==0.2.0
mkdocs-jupyter==0.24.8
mkdocs-material==9.6.10
mkdocs-material-extensions==1.3.1
mkdocstrings==0.25.2
mkdocstrings-python==1.10.9
ml_dtypes==0.5.1
multipledispatch==1.0.0
mypy==1.15.0
mypy-extensions==1.0.0
nbclient==0.10.2
nbconvert==7.16.6
nbformat==5.10.4
nest-asyncio==1.6.0
nodeenv==1.9.1
numpy==2.0.2
numpyro==0.18.0
opt_einsum==3.4.0
optax==0.2.4
packaging @ file:///croot/packaging_1734472117206/work
paginate==0.5.7
pandas==2.2.3
pandocfilters==1.5.1
parso==0.8.4
pathspec==0.12.1
pexpect==4.9.0
pillow==11.1.0
platformdirs==4.3.7
pluggy @ file:///croot/pluggy_1733169602837/work
pre-commit==3.8.0
prompt_toolkit==3.0.50
-e git+https://github.com/felipeangelimvieira/prophetverse.git@3f6c300c40d08f8a842e32f54e21ee6bbd353c07#egg=prophetverse
psutil==7.0.0
ptyprocess==0.7.0
pure_eval==0.2.3
pydocstyle==6.3.0
Pygments==2.19.1
pylint==3.3.6
pymdown-extensions==10.14.3
pyparsing==3.2.3
pytest @ file:///croot/pytest_1738938843180/work
pytest-cov==5.0.0
python-dateutil==2.9.0.post0
pytz==2025.2
PyYAML==6.0.2
pyyaml_env_tag==0.1
pyzmq==26.3.0
referencing==0.36.2
requests==2.32.3
rpds-py==0.24.0
scikit-base==0.8.3
scikit-learn==1.6.1
scipy==1.13.1
six==1.17.0
sktime==0.36.0
snowballstemmer==2.2.0
soupsieve==2.6
Sphinx==7.4.7
sphinxcontrib-applehelp==2.0.0
sphinxcontrib-devhelp==2.0.0
sphinxcontrib-htmlhelp==2.1.0
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==2.0.0
sphinxcontrib-serializinghtml==2.0.0
stack-data==0.6.3
threadpoolctl==3.6.0
tinycss2==1.4.0
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
tomlkit==0.13.2
toolz==1.0.0
tornado==6.4.2
tqdm==4.67.1
traitlets==5.14.3
typing_extensions==4.13.0
tzdata==2025.2
urllib3==2.3.0
virtualenv==20.29.3
watchdog==6.0.0
wcwidth==0.2.13
webencodings==0.5.1
zipp==3.21.0
| name: prophetverse
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- absl-py==2.2.1
- alabaster==0.7.16
- astroid==3.3.9
- asttokens==3.0.0
- attrs==25.3.0
- babel==2.17.0
- backrefs==5.8
- beautifulsoup4==4.13.3
- black==24.10.0
- bleach==6.2.0
- certifi==2025.1.31
- cfgv==3.4.0
- charset-normalizer==3.4.1
- chex==0.1.89
- click==8.1.8
- colorama==0.4.6
- comm==0.2.2
- commitlint==1.3.0
- contourpy==1.3.0
- coverage==7.8.0
- cycler==0.12.1
- debugpy==1.8.13
- decorator==5.2.1
- defusedxml==0.7.1
- dill==0.3.9
- distlib==0.3.9
- docutils==0.21.2
- etils==1.5.2
- executing==2.2.0
- fastjsonschema==2.21.1
- filelock==3.18.0
- fonttools==4.56.0
- ghp-import==2.1.0
- graphviz==0.20.3
- griffe==1.7.1
- identify==2.6.9
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==8.6.1
- importlib-resources==6.5.2
- ipykernel==6.29.5
- ipython==8.18.1
- isort==5.13.2
- jax==0.4.30
- jaxlib==0.4.30
- jedi==0.19.2
- jinja2==3.1.6
- joblib==1.4.2
- jsonschema==4.23.0
- jsonschema-specifications==2024.10.1
- jupyter-client==8.6.3
- jupyter-core==5.7.2
- jupyterlab-pygments==0.3.0
- jupytext==1.16.7
- kiwisolver==1.4.7
- markdown==3.7
- markdown-it-py==3.0.0
- markupsafe==3.0.2
- matplotlib==3.9.4
- matplotlib-inline==0.1.7
- mccabe==0.7.0
- mdit-py-plugins==0.4.2
- mdurl==0.1.2
- mergedeep==1.3.4
- mistune==3.1.3
- mkdocs==1.6.1
- mkdocs-autorefs==1.4.1
- mkdocs-get-deps==0.2.0
- mkdocs-jupyter==0.24.8
- mkdocs-material==9.6.10
- mkdocs-material-extensions==1.3.1
- mkdocstrings==0.25.2
- mkdocstrings-python==1.10.9
- ml-dtypes==0.5.1
- multipledispatch==1.0.0
- mypy==1.15.0
- mypy-extensions==1.0.0
- nbclient==0.10.2
- nbconvert==7.16.6
- nbformat==5.10.4
- nest-asyncio==1.6.0
- nodeenv==1.9.1
- numpy==2.0.2
- numpyro==0.18.0
- opt-einsum==3.4.0
- optax==0.2.4
- paginate==0.5.7
- pandas==2.2.3
- pandocfilters==1.5.1
- parso==0.8.4
- pathspec==0.12.1
- pexpect==4.9.0
- pillow==11.1.0
- platformdirs==4.3.7
- pre-commit==3.8.0
- prompt-toolkit==3.0.50
- prophetverse==0.4.0
- psutil==7.0.0
- ptyprocess==0.7.0
- pure-eval==0.2.3
- pydocstyle==6.3.0
- pygments==2.19.1
- pylint==3.3.6
- pymdown-extensions==10.14.3
- pyparsing==3.2.3
- pytest-cov==5.0.0
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyyaml==6.0.2
- pyyaml-env-tag==0.1
- pyzmq==26.3.0
- referencing==0.36.2
- requests==2.32.3
- rpds-py==0.24.0
- scikit-base==0.8.3
- scikit-learn==1.6.1
- scipy==1.13.1
- six==1.17.0
- sktime==0.36.0
- snowballstemmer==2.2.0
- soupsieve==2.6
- sphinx==7.4.7
- sphinxcontrib-applehelp==2.0.0
- sphinxcontrib-devhelp==2.0.0
- sphinxcontrib-htmlhelp==2.1.0
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==2.0.0
- sphinxcontrib-serializinghtml==2.0.0
- stack-data==0.6.3
- threadpoolctl==3.6.0
- tinycss2==1.4.0
- tomlkit==0.13.2
- toolz==1.0.0
- tornado==6.4.2
- tqdm==4.67.1
- traitlets==5.14.3
- typing-extensions==4.13.0
- tzdata==2025.2
- urllib3==2.3.0
- virtualenv==20.29.3
- watchdog==6.0.0
- wcwidth==0.2.13
- webencodings==0.5.1
- zipp==3.21.0
prefix: /opt/conda/envs/prophetverse
| [
"tests/sktime/test_multivariate.py::test_hyperparams[hyperparams0]",
"tests/sktime/test_multivariate.py::test_hyperparams[hyperparams3]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams0-make_random_X-0]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams0-make_random_X-hierarchy_levels1]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams0-make_random_X-hierarchy_levels2]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams0-make_random_X-hierarchy_levels3]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams0-make_random_X-hierarchy_levels4]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams0-make_None_X-0]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams0-make_None_X-hierarchy_levels1]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams0-make_None_X-hierarchy_levels2]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams0-make_None_X-hierarchy_levels3]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams0-make_None_X-hierarchy_levels4]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams0-make_empty_X-0]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams0-make_empty_X-hierarchy_levels1]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams0-make_empty_X-hierarchy_levels2]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams0-make_empty_X-hierarchy_levels3]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams0-make_empty_X-hierarchy_levels4]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams3-make_random_X-0]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams3-make_random_X-hierarchy_levels1]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams3-make_random_X-hierarchy_levels2]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams3-make_random_X-hierarchy_levels3]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams3-make_random_X-hierarchy_levels4]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams3-make_None_X-0]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams3-make_None_X-hierarchy_levels1]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams3-make_None_X-hierarchy_levels2]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams3-make_None_X-hierarchy_levels3]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams3-make_None_X-hierarchy_levels4]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams3-make_empty_X-0]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams3-make_empty_X-hierarchy_levels1]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams3-make_empty_X-hierarchy_levels2]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams3-make_empty_X-hierarchy_levels3]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams3-make_empty_X-hierarchy_levels4]",
"tests/sktime/test_multivariate.py::test_hierarchical_with_series_with_zeros"
] | [] | [
"tests/sktime/test_multivariate.py::test_hierarchy_levels[0]",
"tests/sktime/test_multivariate.py::test_hierarchy_levels[hierarchy_levels1]",
"tests/sktime/test_multivariate.py::test_hierarchy_levels[hierarchy_levels2]",
"tests/sktime/test_multivariate.py::test_hierarchy_levels[hierarchy_levels3]",
"tests/sktime/test_multivariate.py::test_hierarchy_levels[hierarchy_levels4]",
"tests/sktime/test_multivariate.py::test_hyperparams[hyperparams1]",
"tests/sktime/test_multivariate.py::test_hyperparams[hyperparams2]",
"tests/sktime/test_multivariate.py::test_hyperparams[hyperparams4]",
"tests/sktime/test_multivariate.py::test_hyperparams[hyperparams5]",
"tests/sktime/test_multivariate.py::test_hyperparams[hyperparams6]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams1-make_random_X-0]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams1-make_random_X-hierarchy_levels1]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams1-make_random_X-hierarchy_levels2]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams1-make_random_X-hierarchy_levels3]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams1-make_random_X-hierarchy_levels4]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams1-make_None_X-0]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams1-make_None_X-hierarchy_levels1]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams1-make_None_X-hierarchy_levels2]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams1-make_None_X-hierarchy_levels3]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams1-make_None_X-hierarchy_levels4]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams1-make_empty_X-0]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams1-make_empty_X-hierarchy_levels1]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams1-make_empty_X-hierarchy_levels2]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams1-make_empty_X-hierarchy_levels3]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams1-make_empty_X-hierarchy_levels4]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams2-make_random_X-0]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams2-make_random_X-hierarchy_levels1]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams2-make_random_X-hierarchy_levels2]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams2-make_random_X-hierarchy_levels3]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams2-make_random_X-hierarchy_levels4]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams2-make_None_X-0]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams2-make_None_X-hierarchy_levels1]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams2-make_None_X-hierarchy_levels2]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams2-make_None_X-hierarchy_levels3]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams2-make_None_X-hierarchy_levels4]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams2-make_empty_X-0]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams2-make_empty_X-hierarchy_levels1]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams2-make_empty_X-hierarchy_levels2]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams2-make_empty_X-hierarchy_levels3]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams2-make_empty_X-hierarchy_levels4]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams4-make_random_X-0]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams4-make_random_X-hierarchy_levels1]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams4-make_random_X-hierarchy_levels2]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams4-make_random_X-hierarchy_levels3]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams4-make_random_X-hierarchy_levels4]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams4-make_None_X-0]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams4-make_None_X-hierarchy_levels1]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams4-make_None_X-hierarchy_levels2]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams4-make_None_X-hierarchy_levels3]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams4-make_None_X-hierarchy_levels4]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams4-make_empty_X-0]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams4-make_empty_X-hierarchy_levels1]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams4-make_empty_X-hierarchy_levels2]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams4-make_empty_X-hierarchy_levels3]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams4-make_empty_X-hierarchy_levels4]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams5-make_random_X-0]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams5-make_random_X-hierarchy_levels1]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams5-make_random_X-hierarchy_levels2]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams5-make_random_X-hierarchy_levels3]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams5-make_random_X-hierarchy_levels4]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams5-make_None_X-0]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams5-make_None_X-hierarchy_levels1]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams5-make_None_X-hierarchy_levels2]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams5-make_None_X-hierarchy_levels3]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams5-make_None_X-hierarchy_levels4]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams5-make_empty_X-0]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams5-make_empty_X-hierarchy_levels1]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams5-make_empty_X-hierarchy_levels2]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams5-make_empty_X-hierarchy_levels3]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams5-make_empty_X-hierarchy_levels4]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams6-make_random_X-0]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams6-make_random_X-hierarchy_levels1]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams6-make_random_X-hierarchy_levels2]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams6-make_random_X-hierarchy_levels3]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams6-make_random_X-hierarchy_levels4]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams6-make_None_X-0]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams6-make_None_X-hierarchy_levels1]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams6-make_None_X-hierarchy_levels2]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams6-make_None_X-hierarchy_levels3]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams6-make_None_X-hierarchy_levels4]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams6-make_empty_X-0]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams6-make_empty_X-hierarchy_levels1]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams6-make_empty_X-hierarchy_levels2]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams6-make_empty_X-hierarchy_levels3]",
"tests/sktime/test_multivariate.py::test_prophet2_fit_with_different_nlevels[hyperparams6-make_empty_X-hierarchy_levels4]",
"tests/sktime/test_multivariate.py::test_extra_predict_methods[make_random_X]",
"tests/sktime/test_multivariate.py::test_extra_predict_methods[make_None_X]",
"tests/sktime/test_multivariate.py::test_extra_predict_methods[make_empty_X]"
] | [] | Apache License 2.0 | 19,378 | 912 | [
"src/prophetverse/sktime/base.py",
"src/prophetverse/sktime/multivariate.py"
] |
|
CrossGL__crosstl-98 | d4a35e6554bc249fff7764276173ba42c6b5d422 | 2024-08-25 10:36:57 | 36bed5871a8d102f73cfebf82c8d8495aaa89e87 | ArchitGupta07: Updated PR description
samthakur587: hii @ArchitGupta07 can you please resolve the merge conflict
ArchitGupta07: Resolved the merge conflict
samthakur587: hii @ArchitGupta07 can you please resolve the merge conflicts
ArchitGupta07: Resolved the merge conflicts
coderabbitai[bot]: <!-- This is an auto-generated comment: summarize by coderabbit.ai -->
<!-- This is an auto-generated comment: summarize review in progress by coderabbit.ai -->
> [!NOTE]
> Currently processing new changes in this PR. This may take a few minutes, please wait...
>
> <details>
> <summary>Commits</summary>
>
> Files that changed from the base of the PR and between d4a35e6554bc249fff7764276173ba42c6b5d422 and a635301ecd0825e092f99e4be9def47b584386a4.
>
> </details>
>
>
> <details>
> <summary>Files selected for processing (2)</summary>
>
> * crosstl/src/translator/parser.py (5 hunks)
> * tests/test_translator/test_parser.py (1 hunks)
>
> </details>
>
> ```ascii
> _________________________________________________
> < Code Wars Episode II: Attack of the Git Clones. >
> -------------------------------------------------
> \
> \ (\__/)
> (•ㅅ•)
> / づ
> ```
<!-- end of auto-generated comment: summarize review in progress by coderabbit.ai --><!-- usage_tips_start -->
> [!TIP]
> <details>
> <summary>You can generate walkthrough in a markdown collapsible section to save space.</summary>
>
> Enable the `reviews.collapse_walkthrough` setting in your project's settings in CodeRabbit to generate walkthrough in a markdown collapsible section.
>
> </details>
<!-- usage_tips_end --><!-- tips_start -->
---
Thank you for using CodeRabbit. We offer it for free to the OSS community and would appreciate your support in helping us grow. If you find it useful, would you consider giving us a shout-out on your favorite social media?
<details>
<summary>Share</summary>
- [X](https://twitter.com/intent/tweet?text=I%20just%20used%20%40coderabbitai%20for%20my%20code%20review%2C%20and%20it%27s%20fantastic%21%20It%27s%20free%20for%20OSS%20and%20offers%20a%20free%20trial%20for%20the%20proprietary%20code.%20Check%20it%20out%3A&url=https%3A//coderabbit.ai)
- [Mastodon](https://mastodon.social/share?text=I%20just%20used%20%40coderabbitai%20for%20my%20code%20review%2C%20and%20it%27s%20fantastic%21%20It%27s%20free%20for%20OSS%20and%20offers%20a%20free%20trial%20for%20the%20proprietary%20code.%20Check%20it%20out%3A%20https%3A%2F%2Fcoderabbit.ai)
- [Reddit](https://www.reddit.com/submit?title=Great%20tool%20for%20code%20review%20-%20CodeRabbit&text=I%20just%20used%20CodeRabbit%20for%20my%20code%20review%2C%20and%20it%27s%20fantastic%21%20It%27s%20free%20for%20OSS%20and%20offers%20a%20free%20trial%20for%20proprietary%20code.%20Check%20it%20out%3A%20https%3A//coderabbit.ai)
- [LinkedIn](https://www.linkedin.com/sharing/share-offsite/?url=https%3A%2F%2Fcoderabbit.ai&mini=true&title=Great%20tool%20for%20code%20review%20-%20CodeRabbit&summary=I%20just%20used%20CodeRabbit%20for%20my%20code%20review%2C%20and%20it%27s%20fantastic%21%20It%27s%20free%20for%20OSS%20and%20offers%20a%20free%20trial%20for%20proprietary%20code)
</details>
<details>
<summary>Tips</summary>
### Chat
There are 3 ways to chat with [CodeRabbit](https://coderabbit.ai):
- Review comments: Directly reply to a review comment made by CodeRabbit. Example:
- `I pushed a fix in commit <commit_id>.`
- `Generate unit testing code for this file.`
- `Open a follow-up GitHub issue for this discussion.`
- Files and specific lines of code (under the "Files changed" tab): Tag `@coderabbitai` in a new review comment at the desired location with your query. Examples:
- `@coderabbitai generate unit testing code for this file.`
- `@coderabbitai modularize this function.`
- PR comments: Tag `@coderabbitai` in a new PR comment to ask questions about the PR branch. For the best results, please provide a very specific query, as very limited context is provided in this mode. Examples:
- `@coderabbitai generate interesting stats about this repository and render them as a table.`
- `@coderabbitai show all the console.log statements in this repository.`
- `@coderabbitai read src/utils.ts and generate unit testing code.`
- `@coderabbitai read the files in the src/scheduler package and generate a class diagram using mermaid and a README in the markdown format.`
- `@coderabbitai help me debug CodeRabbit configuration file.`
Note: Be mindful of the bot's finite context window. It's strongly recommended to break down tasks such as reading entire modules into smaller chunks. For a focused discussion, use review comments to chat about specific files and their changes, instead of using the PR comments.
### CodeRabbit Commands (Invoked using PR comments)
- `@coderabbitai pause` to pause the reviews on a PR.
- `@coderabbitai resume` to resume the paused reviews.
- `@coderabbitai review` to trigger an incremental review. This is useful when automatic reviews are disabled for the repository.
- `@coderabbitai full review` to do a full review from scratch and review all the files again.
- `@coderabbitai summary` to regenerate the summary of the PR.
- `@coderabbitai resolve` resolve all the CodeRabbit review comments.
- `@coderabbitai configuration` to show the current CodeRabbit configuration for the repository.
- `@coderabbitai help` to get help.
### Other keywords and placeholders
- Add `@coderabbitai ignore` anywhere in the PR description to prevent this PR from being reviewed.
- Add `@coderabbitai summary` to generate the high-level summary at a specific location in the PR description.
- Add `@coderabbitai` anywhere in the PR title to generate the title automatically.
### CodeRabbit Configuration File (`.coderabbit.yaml`)
- You can programmatically configure CodeRabbit by adding a `.coderabbit.yaml` file to the root of your repository.
- Please see the [configuration documentation](https://docs.coderabbit.ai/guides/configure-coderabbit) for more information.
- If your editor has YAML language server enabled, you can add the path at the top of this file to enable auto-completion and validation: `# yaml-language-server: $schema=https://coderabbit.ai/integrations/schema.v2.json`
### Documentation and Community
- Visit our [Documentation](https://coderabbit.ai/docs) for detailed information on how to use CodeRabbit.
- Join our [Discord Community](https://discord.com/invite/GsXnASn26c) to get help, request features, and share feedback.
- Follow us on [X/Twitter](https://twitter.com/coderabbitai) for updates and announcements.
</details>
<!-- tips_end --> | diff --git a/crosstl/src/translator/parser.py b/crosstl/src/translator/parser.py
index 82899e6..a46a9d2 100644
--- a/crosstl/src/translator/parser.py
+++ b/crosstl/src/translator/parser.py
@@ -638,6 +638,10 @@ class Parser:
"GREATER_THAN",
"LESS_EQUAL",
"GREATER_EQUAL",
+ "ASSIGN_AND",
+ "ASSIGN_OR",
+ "ASSIGN_XOR",
+ "ASSIGN_MOD",
"BITWISE_SHIFT_RIGHT",
"BITWISE_SHIFT_LEFT",
]:
@@ -694,6 +698,10 @@ class Parser:
"GREATER_THAN",
"LESS_EQUAL",
"GREATER_EQUAL",
+ "ASSIGN_AND",
+ "ASSIGN_OR",
+ "ASSIGN_XOR",
+ "ASSIGN_MOD",
"BITWISE_SHIFT_RIGHT",
"BITWISE_SHIFT_LEFT",
]:
@@ -727,6 +735,10 @@ class Parser:
"BITWISE_SHIFT_RIGHT",
"BITWISE_SHIFT_LEFT",
"EQUAL",
+ "ASSIGN_AND",
+ "ASSIGN_OR",
+ "ASSIGN_XOR",
+ "ASSIGN_MOD",
):
op = self.current_token[0]
self.eat(op)
@@ -774,6 +786,10 @@ class Parser:
"GREATER_THAN",
"LESS_EQUAL",
"GREATER_EQUAL",
+ "ASSIGN_AND",
+ "ASSIGN_OR",
+ "ASSIGN_XOR",
+ "ASSIGN_MOD",
"BITWISE_SHIFT_RIGHT",
"BITWISE_SHIFT_LEFT",
]:
@@ -934,6 +950,10 @@ class Parser:
"ASSIGN_SUB",
"ASSIGN_MUL",
"ASSIGN_DIV",
+ "ASSIGN_AND",
+ "ASSIGN_OR",
+ "ASSIGN_XOR",
+ "ASSIGN_MOD",
"BITWISE_SHIFT_RIGHT",
"BITWISE_SHIFT_LEFT",
]:
| Add Parsing for `Assignment AND` Token
Update the parser to handle the ASSIGN_AND token, allowing it to correctly parse expressions involving the &= operator. | CrossGL/crosstl | diff --git a/tests/test_translator/test_parser.py b/tests/test_translator/test_parser.py
index 672f075..95c1eae 100644
--- a/tests/test_translator/test_parser.py
+++ b/tests/test_translator/test_parser.py
@@ -335,6 +335,50 @@ def test_var_assignment():
pytest.fail("Variable assignment parsing not implemented.")
+def test_assign_ops():
+
+ code = """
+ shader LightControl {
+ vertex {
+ input vec3 position;
+ output int lightStatus;
+
+ void main() {
+ int xStatus = int(position.x * 10.0);
+ int yStatus = int(position.y * 10.0);
+ int zStatus = int(position.z * 10.0);
+
+ xStatus |= yStatus;
+ yStatus &= zStatus;
+ zStatus %= xStatus;
+ lightStatus = xStatus;
+ lightStatus ^= zStatus;
+
+ gl_Position = vec4(position, 1.0);
+ }
+ }
+
+ fragment {
+ input int lightStatus;
+ output vec4 fragColor;
+
+ void main() {
+ if (lightStatus > 0) {
+ fragColor = vec4(1.0, 1.0, 0.0, 1.0);
+ } else {
+ fragColor = vec4(0.0, 0.0, 0.0, 1.0);
+ }
+ }
+ }
+ }
+ """
+ try:
+ tokens = tokenize_code(code)
+ parse_code(tokens)
+ except SyntaxError:
+ pytest.fail("Assignment Operator parsing not implemented.")
+
+
def test_bitwise_operators():
code = """
shader LightControl {
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 1
} | 0.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | -e git+https://github.com/CrossGL/crosstl.git@d4a35e6554bc249fff7764276173ba42c6b5d422#egg=crosstl
exceptiongroup==1.2.2
gast==0.6.0
iniconfig==2.1.0
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
tomli==2.2.1
| name: crosstl
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- exceptiongroup==1.2.2
- gast==0.6.0
- iniconfig==2.1.0
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- tomli==2.2.1
prefix: /opt/conda/envs/crosstl
| [
"tests/test_translator/test_parser.py::test_assign_ops"
] | [] | [
"tests/test_translator/test_parser.py::test_input_output",
"tests/test_translator/test_parser.py::test_if_statement",
"tests/test_translator/test_parser.py::test_for_statement",
"tests/test_translator/test_parser.py::test_else_statement",
"tests/test_translator/test_parser.py::test_else_if_statement",
"tests/test_translator/test_parser.py::test_function_call",
"tests/test_translator/test_parser.py::test_logical_operators",
"tests/test_translator/test_parser.py::test_var_assignment",
"tests/test_translator/test_parser.py::test_bitwise_operators"
] | [] | Apache License 2.0 | 19,389 | 480 | [
"crosstl/src/translator/parser.py"
] |
edgewall__genshi-87 | ca08e57c90b352a39d9ebf0de5d6a6107c48288f | 2024-08-25 10:58:24 | 43496ec3cd21113f5332bb2664d9f9d37c40270b | diff --git a/genshi/input.py b/genshi/input.py
index fa18c38..c21990a 100644
--- a/genshi/input.py
+++ b/genshi/input.py
@@ -346,9 +346,13 @@ class HTMLParser(html.HTMLParser, object):
for tag in open_tags:
yield END, QName(tag), pos
break
- except html.HTMLParseError as e:
- msg = '%s: line %d, column %d' % (e.msg, e.lineno, e.offset)
- raise ParseError(msg, self.filename, e.lineno, e.offset)
+ except Exception as e:
+ # Python's simple HTMLParser does not raise detailed
+ # errors except in strict mode which was deprecated
+ # in Python 3.3 and removed in Python 3.5 and which in
+ # any case is not used is this code.
+ msg = str(e)
+ raise ParseError(msg, self.filename)
return Stream(_generate()).filter(_coalesce)
def __iter__(self):
| Exception handler tries to use nonexistent error class (html.HTMLParseError)
At line 349 of `input.py` the exception handler tries to access html.HTMLParseError, an error class, that used to exist in the cPython's html library but has been [removed](https://github.com/python/cpython/blob/d57f8a9f76e75384ec997686c2a826b1dc3c69c4/Doc/whatsnew/3.3.rst#html) since Python 3.3.
The genshi code in question is this:
```py
def _generate():
if self.encoding:
reader = codecs.getreader(self.encoding)
source = reader(self.source)
else:
source = self.source
try:
bufsize = 4 * 1024 # 4K
done = False
while 1:
while not done and len(self._queue) == 0:
data = source.read(bufsize)
if not data: # end of data
self.close()
done = True
else:
if not isinstance(data, six.text_type):
raise UnicodeError("source returned bytes, but no encoding specified")
self.feed(data)
for kind, data, pos in self._queue:
yield kind, data, pos
self._queue = []
if done:
open_tags = self._open_tags
open_tags.reverse()
for tag in open_tags:
yield END, QName(tag), pos
break
except html.HTMLParseError as e:
msg = '%s: line %d, column %d' % (e.msg, e.lineno, e.offset)
raise ParseError(msg, self.filename, e.lineno, e.offset)
return Stream(_generate()).filter(_coalesce)
``` | edgewall/genshi | diff --git a/genshi/tests/test_input.py b/genshi/tests/test_input.py
index 44b7442..e68515d 100644
--- a/genshi/tests/test_input.py
+++ b/genshi/tests/test_input.py
@@ -15,7 +15,7 @@ import unittest
from genshi.core import Attrs, QName, Stream
from genshi.input import XMLParser, HTMLParser, ParseError, ET
-from genshi.compat import StringIO, BytesIO
+from genshi.compat import IS_PYTHON2, StringIO, BytesIO
from genshi.tests.utils import doctest_suite
from xml.etree import ElementTree
@@ -294,6 +294,20 @@ bar</elem>'''
self.assertEqual((Stream.END, QName("span")), events[4][:2])
self.assertEqual((Stream.END, QName("div")), events[5][:2])
+ def test_parsing_error(self):
+ text = u'<div></div>'.encode('utf-8')
+ events = HTMLParser(BytesIO(text))
+ if IS_PYTHON2:
+ self.assertRaises(ParseError, list, events)
+ else:
+ self.assertRaisesRegex(
+ ParseError,
+ r"source returned bytes, but no encoding specified",
+ list,
+ events,
+ )
+
+
def suite():
suite = unittest.TestSuite()
suite.addTest(doctest_suite(XMLParser.__module__))
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 1
},
"num_modified_files": 1
} | 0.7 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
-e git+https://github.com/edgewall/genshi.git@ca08e57c90b352a39d9ebf0de5d6a6107c48288f#egg=Genshi
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
pytest @ file:///croot/pytest_1738938843180/work
six==1.17.0
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
| name: genshi
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- genshi==0.8
- six==1.17.0
prefix: /opt/conda/envs/genshi
| [
"genshi/tests/test_input.py::HTMLParserTestCase::test_parsing_error"
] | [] | [
"genshi/tests/test_input.py::XMLParserTestCase::test_element_attribute_order",
"genshi/tests/test_input.py::XMLParserTestCase::test_html_entity_in_attribute",
"genshi/tests/test_input.py::XMLParserTestCase::test_html_entity_with_dtd",
"genshi/tests/test_input.py::XMLParserTestCase::test_html_entity_without_dtd",
"genshi/tests/test_input.py::XMLParserTestCase::test_latin1_encoded",
"genshi/tests/test_input.py::XMLParserTestCase::test_latin1_encoded_xmldecl",
"genshi/tests/test_input.py::XMLParserTestCase::test_text_node_pos_multi_line",
"genshi/tests/test_input.py::XMLParserTestCase::test_text_node_pos_single_line",
"genshi/tests/test_input.py::XMLParserTestCase::test_undefined_entity_with_dtd",
"genshi/tests/test_input.py::XMLParserTestCase::test_undefined_entity_without_dtd",
"genshi/tests/test_input.py::XMLParserTestCase::test_unicode_input",
"genshi/tests/test_input.py::HTMLParserTestCase::test_convert_ElementTree_to_markup_stream",
"genshi/tests/test_input.py::HTMLParserTestCase::test_hex_charref",
"genshi/tests/test_input.py::HTMLParserTestCase::test_html_entity_in_attribute",
"genshi/tests/test_input.py::HTMLParserTestCase::test_html_entity_in_text",
"genshi/tests/test_input.py::HTMLParserTestCase::test_input_encoding_attribute",
"genshi/tests/test_input.py::HTMLParserTestCase::test_input_encoding_text",
"genshi/tests/test_input.py::HTMLParserTestCase::test_multibyte_character_on_chunk_boundary",
"genshi/tests/test_input.py::HTMLParserTestCase::test_out_of_order_tags1",
"genshi/tests/test_input.py::HTMLParserTestCase::test_out_of_order_tags2",
"genshi/tests/test_input.py::HTMLParserTestCase::test_out_of_order_tags3",
"genshi/tests/test_input.py::HTMLParserTestCase::test_processing_instruction",
"genshi/tests/test_input.py::HTMLParserTestCase::test_processing_instruction_no_data_1",
"genshi/tests/test_input.py::HTMLParserTestCase::test_processing_instruction_no_data_2",
"genshi/tests/test_input.py::HTMLParserTestCase::test_processing_instruction_trailing_qmark",
"genshi/tests/test_input.py::HTMLParserTestCase::test_text_node_pos_multi_line",
"genshi/tests/test_input.py::HTMLParserTestCase::test_text_node_pos_single_line",
"genshi/tests/test_input.py::HTMLParserTestCase::test_unicode_input",
"genshi/tests/test_input.py::HTMLParserTestCase::test_xmldecl",
"genshi/tests/test_input.py::HTMLParserTestCase::test_xmldecl_encoding",
"genshi/tests/test_input.py::HTMLParserTestCase::test_xmldecl_standalone"
] | [] | BSD License | 19,390 | 253 | [
"genshi/input.py"
] |
|
ipython__ipython-14503 | 78fea5f8d7bf6ca55796e82ed341e7fd291878f0 | 2024-08-26 12:06:47 | 78fea5f8d7bf6ca55796e82ed341e7fd291878f0 | diff --git a/IPython/core/ultratb.py b/IPython/core/ultratb.py
index cc139b1e2..66c9ce910 100644
--- a/IPython/core/ultratb.py
+++ b/IPython/core/ultratb.py
@@ -552,28 +552,31 @@ def structured_traceback(
lines = ''.join(self._format_exception_only(etype, evalue))
out_list.append(lines)
- exception = self.get_parts_of_chained_exception(evalue)
+ # Find chained exceptions if we have a traceback (not for exception-only mode)
+ if etb is not None:
+ exception = self.get_parts_of_chained_exception(evalue)
- if exception and (id(exception[1]) not in chained_exc_ids):
- chained_exception_message = (
- self.prepare_chained_exception_message(evalue.__cause__)[0]
- if evalue is not None
- else ""
- )
- etype, evalue, etb = exception
- # Trace exception to avoid infinite 'cause' loop
- chained_exc_ids.add(id(exception[1]))
- chained_exceptions_tb_offset = 0
- out_list = (
- self.structured_traceback(
- etype,
- evalue,
- (etb, chained_exc_ids), # type: ignore
- chained_exceptions_tb_offset,
- context,
+ if exception and (id(exception[1]) not in chained_exc_ids):
+ chained_exception_message = (
+ self.prepare_chained_exception_message(evalue.__cause__)[0]
+ if evalue is not None
+ else ""
+ )
+ etype, evalue, etb = exception
+ # Trace exception to avoid infinite 'cause' loop
+ chained_exc_ids.add(id(exception[1]))
+ chained_exceptions_tb_offset = 0
+ out_list = (
+ self.structured_traceback(
+ etype,
+ evalue,
+ (etb, chained_exc_ids), # type: ignore
+ chained_exceptions_tb_offset,
+ context,
+ )
+ + chained_exception_message
+ + out_list
)
- + chained_exception_message
- + out_list)
return out_list
| Raising SystemExit leads to internal inspect module error
I recently noticed the following error now occurs in IPython and consequently Jupyter notebooks. Raising `SystemExit` in a `try/except` clause gives (possibly related to #11960):
```python
try:
5 / 0
except Exception as e:
raise SystemExit
```
```
ERROR:root:Internal Python error in the inspect module.
Below is the traceback from this internal error.
Traceback (most recent call last):
File "<ipython-input-4-30deb0bd6c2a>", line 2, in <module>
5/0
ZeroDivisionError: division by zero
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/Users/paul/opt/miniconda3/lib/python3.8/site-packages/IPython/core/interactiveshell.py", line 3319, in run_code
exec(code_obj, self.user_global_ns, self.user_ns)
File "<ipython-input-4-30deb0bd6c2a>", line 4, in <module>
raise SystemExit
SystemExit
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/Users/paul/opt/miniconda3/lib/python3.8/site-packages/IPython/core/ultratb.py", line 1151, in get_records
return _fixed_getinnerframes(etb, number_of_lines_of_context, tb_offset)
File "/Users/paul/opt/miniconda3/lib/python3.8/site-packages/IPython/core/ultratb.py", line 319, in wrapped
return f(*args, **kwargs)
File "/Users/paul/opt/miniconda3/lib/python3.8/site-packages/IPython/core/ultratb.py", line 353, in _fixed_getinnerframes
records = fix_frame_records_filenames(inspect.getinnerframes(etb, context))
File "/Users/paul/opt/miniconda3/lib/python3.8/inspect.py", line 1503, in getinnerframes
frameinfo = (tb.tb_frame,) + getframeinfo(tb, context)
AttributeError: 'tuple' object has no attribute 'tb_frame'
An exception has occurred, use %tb to see the full traceback.
```
This used to work fine until I updated to the latest version (on OSX): `7.11.1`. This occurs on both Py37 and Py38. This error does not occur in `7.10.2`. | ipython/ipython | diff --git a/IPython/core/tests/test_ultratb.py b/IPython/core/tests/test_ultratb.py
index e167d9950..8ed73873a 100644
--- a/IPython/core/tests/test_ultratb.py
+++ b/IPython/core/tests/test_ultratb.py
@@ -298,6 +298,13 @@ class Python3ChainedExceptionsTest(unittest.TestCase):
raise ValueError("Yikes") from None
"""
+ SYS_EXIT_WITH_CONTEXT_CODE = """
+try:
+ 1/0
+except Exception as e:
+ raise SystemExit(1)
+ """
+
def test_direct_cause_error(self):
with tt.AssertPrints(["KeyError", "NameError", "direct cause"]):
ip.run_cell(self.DIRECT_CAUSE_ERROR_CODE)
@@ -306,6 +313,11 @@ def test_exception_during_handling_error(self):
with tt.AssertPrints(["KeyError", "NameError", "During handling"]):
ip.run_cell(self.EXCEPTION_DURING_HANDLING_CODE)
+ def test_sysexit_while_handling_error(self):
+ with tt.AssertPrints(["SystemExit", "to see the full traceback"]):
+ with tt.AssertNotPrints(["another exception"], suppress=False):
+ ip.run_cell(self.SYS_EXIT_WITH_CONTEXT_CODE)
+
def test_suppress_exception_chaining(self):
with tt.AssertNotPrints("ZeroDivisionError"), \
tt.AssertPrints("ValueError", suppress=False):
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_issue_reference"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 1
} | 8.26 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[test]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.10",
"reqs_path": [
"docs/requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==1.0.0
asttokens==3.0.0
babel==2.17.0
certifi==2025.1.31
charset-normalizer==3.4.1
comm==0.2.2
contourpy==1.3.1
cycler==0.12.1
debugpy==1.8.13
decorator==5.2.1
docrepr==0.2.0
docutils==0.21.2
exceptiongroup==1.2.2
executing==2.2.0
fonttools==4.56.0
idna==3.10
imagesize==1.4.1
iniconfig==2.1.0
intersphinx_registry==0.2501.23
ipykernel==6.29.5
-e git+https://github.com/ipython/ipython.git@78fea5f8d7bf6ca55796e82ed341e7fd291878f0#egg=ipython
jedi==0.19.2
Jinja2==3.1.6
jupyter_client==8.6.3
jupyter_core==5.7.2
kiwisolver==1.4.8
MarkupSafe==3.0.2
matplotlib==3.10.1
matplotlib-inline==0.1.7
nest-asyncio==1.6.0
numpy==2.2.4
packaging==24.2
parso==0.8.4
pexpect==4.9.0
pickleshare==0.7.5
pillow==11.1.0
platformdirs==4.3.7
pluggy==1.5.0
prompt_toolkit==3.0.50
psutil==7.0.0
ptyprocess==0.7.0
pure_eval==0.2.3
Pygments==2.19.1
pyparsing==3.2.3
pytest==8.3.5
pytest-asyncio==0.21.2
python-dateutil==2.9.0.post0
pyzmq==26.3.0
requests==2.32.3
six==1.17.0
snowballstemmer==2.2.0
Sphinx==8.1.3
sphinx-rtd-theme==3.0.2
sphinxcontrib-applehelp==2.0.0
sphinxcontrib-devhelp==2.0.0
sphinxcontrib-htmlhelp==2.1.0
sphinxcontrib-jquery==4.1
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==2.0.0
sphinxcontrib-serializinghtml==2.0.0
stack-data==0.6.3
testpath==0.6.0
tomli==2.2.1
tornado==6.4.2
traitlets==5.14.3
typing_extensions==4.13.0
urllib3==2.3.0
wcwidth==0.2.13
| name: ipython
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- bzip2=1.0.8=h5eee18b_6
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- libuuid=1.41.5=h5eee18b_0
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py310h06a4308_0
- python=3.10.16=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py310h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py310h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==1.0.0
- asttokens==3.0.0
- babel==2.17.0
- certifi==2025.1.31
- charset-normalizer==3.4.1
- comm==0.2.2
- contourpy==1.3.1
- cycler==0.12.1
- debugpy==1.8.13
- decorator==5.2.1
- docrepr==0.2.0
- docutils==0.21.2
- exceptiongroup==1.2.2
- executing==2.2.0
- fonttools==4.56.0
- idna==3.10
- imagesize==1.4.1
- iniconfig==2.1.0
- intersphinx-registry==0.2501.23
- ipykernel==6.29.5
- ipython==8.27.0.dev0
- jedi==0.19.2
- jinja2==3.1.6
- jupyter-client==8.6.3
- jupyter-core==5.7.2
- kiwisolver==1.4.8
- markupsafe==3.0.2
- matplotlib==3.10.1
- matplotlib-inline==0.1.7
- nest-asyncio==1.6.0
- numpy==2.2.4
- packaging==24.2
- parso==0.8.4
- pexpect==4.9.0
- pickleshare==0.7.5
- pillow==11.1.0
- platformdirs==4.3.7
- pluggy==1.5.0
- prompt-toolkit==3.0.50
- psutil==7.0.0
- ptyprocess==0.7.0
- pure-eval==0.2.3
- pygments==2.19.1
- pyparsing==3.2.3
- pytest==8.3.5
- pytest-asyncio==0.21.2
- python-dateutil==2.9.0.post0
- pyzmq==26.3.0
- requests==2.32.3
- six==1.17.0
- snowballstemmer==2.2.0
- sphinx==8.1.3
- sphinx-rtd-theme==3.0.2
- sphinxcontrib-applehelp==2.0.0
- sphinxcontrib-devhelp==2.0.0
- sphinxcontrib-htmlhelp==2.1.0
- sphinxcontrib-jquery==4.1
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==2.0.0
- sphinxcontrib-serializinghtml==2.0.0
- stack-data==0.6.3
- testpath==0.6.0
- tomli==2.2.1
- tornado==6.4.2
- traitlets==5.14.3
- typing-extensions==4.13.0
- urllib3==2.3.0
- wcwidth==0.2.13
prefix: /opt/conda/envs/ipython
| [
"IPython/core/tests/test_ultratb.py::Python3ChainedExceptionsTest::test_sysexit_while_handling_error"
] | [] | [
"IPython/core/tests/test_ultratb.py::ChangedPyFileTest::test_changing_py_file",
"IPython/core/tests/test_ultratb.py::NonAsciiTest::test_iso8859_5",
"IPython/core/tests/test_ultratb.py::NonAsciiTest::test_nonascii_msg",
"IPython/core/tests/test_ultratb.py::NonAsciiTest::test_nonascii_path",
"IPython/core/tests/test_ultratb.py::NestedGenExprTestCase::test_nested_genexpr",
"IPython/core/tests/test_ultratb.py::IndentationErrorTest::test_indentationerror_shows_line",
"IPython/core/tests/test_ultratb.py::SyntaxErrorTest::test_changing_py_file",
"IPython/core/tests/test_ultratb.py::SyntaxErrorTest::test_non_syntaxerror",
"IPython/core/tests/test_ultratb.py::SyntaxErrorTest::test_syntaxerror_no_stacktrace_at_compile_time",
"IPython/core/tests/test_ultratb.py::SyntaxErrorTest::test_syntaxerror_stacktrace_when_running_compiled_code",
"IPython/core/tests/test_ultratb.py::MemoryErrorTest::test_memoryerror",
"IPython/core/tests/test_ultratb.py::Python3ChainedExceptionsTest::test_direct_cause_error",
"IPython/core/tests/test_ultratb.py::Python3ChainedExceptionsTest::test_exception_during_handling_error",
"IPython/core/tests/test_ultratb.py::Python3ChainedExceptionsTest::test_plain_direct_cause_error",
"IPython/core/tests/test_ultratb.py::Python3ChainedExceptionsTest::test_plain_exception_during_handling_error",
"IPython/core/tests/test_ultratb.py::Python3ChainedExceptionsTest::test_plain_suppress_exception_chaining",
"IPython/core/tests/test_ultratb.py::Python3ChainedExceptionsTest::test_suppress_exception_chaining",
"IPython/core/tests/test_ultratb.py::RecursionTest::test_no_recursion",
"IPython/core/tests/test_ultratb.py::RecursionTest::test_recursion_one_frame",
"IPython/core/tests/test_ultratb.py::RecursionTest::test_recursion_three_frames",
"IPython/core/tests/test_ultratb.py::PEP678NotesReportingTest::test_plain_reports_notes",
"IPython/core/tests/test_ultratb.py::PEP678NotesReportingTest::test_verbose_reports_notes",
"IPython/core/tests/test_ultratb.py::test_handlers"
] | [] | BSD 3-Clause "New" or "Revised" License | 19,397 | 519 | [
"IPython/core/ultratb.py"
] |
|
sfneal__pdfconduit-127 | cef92d8105fca0bb2a91d356a2680c510f1c3b31 | 2024-08-26 18:02:28 | cef92d8105fca0bb2a91d356a2680c510f1c3b31 | diff --git a/pdfconduit/_version.py b/pdfconduit/_version.py
index 7039708..72aa758 100644
--- a/pdfconduit/_version.py
+++ b/pdfconduit/_version.py
@@ -1,1 +1,1 @@
-__version__ = "4.1.0"
+__version__ = "4.1.1"
diff --git a/pdfconduit/internals/base.py b/pdfconduit/internals/base.py
index 3bb457b..22f5729 100644
--- a/pdfconduit/internals/base.py
+++ b/pdfconduit/internals/base.py
@@ -21,6 +21,7 @@ class BaseConduit(ABC):
_closed: bool = False
_pdf_file = None
+ _stream: BytesIO = None
_reader: PdfReader
_writer: PdfWriter
@@ -55,7 +56,8 @@ class BaseConduit(ABC):
return self
def _read_stream(self, stream: BytesIO) -> Self:
- self._reader = PdfReader(stream)
+ self._stream = stream
+ self._reader = PdfReader(self._stream)
self._writer: PdfWriter = PdfWriter(clone_from=self._reader)
return self
@@ -80,6 +82,8 @@ class BaseConduit(ABC):
self._reader.close()
if self._pdf_file is not None:
self._pdf_file.close()
+ if self._stream is not None:
+ self._stream.close()
# Confirm output path is set
if self.output is None:
diff --git a/pdfconduit/pdfconduit.py b/pdfconduit/pdfconduit.py
index 8253723..9606a1b 100644
--- a/pdfconduit/pdfconduit.py
+++ b/pdfconduit/pdfconduit.py
@@ -1,3 +1,5 @@
+from tempfile import TemporaryFile, NamedTemporaryFile
+
from pypdf import PdfWriter
from pypdf import PdfWriter
@@ -5,7 +7,7 @@ from pypdf import PdfWriter
from pdfconduit.convert import Flatten
from pdfconduit.internals import BaseConduit
from pdfconduit.settings import Compression, ImageQualityRange, Encryption
-from pdfconduit.transform import Merge
+from pdfconduit.transform import Merge2
from pdfconduit.transform import Rotate, Upscale
from pdfconduit.utils import Info
from pdfconduit.utils.typing import Optional, Tuple, Self, Annotated
@@ -33,9 +35,8 @@ class Pdfconduit(BaseConduit):
def merge_fast(self, pdfs: list) -> Self:
self._set_default_output("merged")
- self._path = (
- Merge([self._path] + pdfs, output_dir=self._output_dir).use_pdfrw().merge()
- )
+ pdf_objects = [self._stream if self._stream is not None else self._path] + pdfs
+ self._path = Merge2(pdf_objects, output=self.output).use_pdfrw().merge()
return self._open_and_read()
def rotate(self, degrees: int) -> Self:
@@ -48,7 +49,16 @@ class Pdfconduit(BaseConduit):
if degrees % 90 == 0:
return self.rotate(degrees)
- self._path = Rotate(self._path, degrees).use_pdfrw().rotate()
+ self._set_default_output("rotated")
+ self._path = (
+ Rotate(
+ self._stream if self._stream is not None else self._path,
+ degrees,
+ output=self.output,
+ )
+ .use_pdfrw()
+ .rotate()
+ )
return self._open_and_read()
def slice(self, start: int, end: int) -> Self:
@@ -89,11 +99,22 @@ class Pdfconduit(BaseConduit):
def flatten(self) -> Self:
# todo: re-write Flatten & other convert classes
# todo: fix issue with flattened pdf output path
+ if not self._path and self._stream:
+ temp = NamedTemporaryFile(suffix=".pdf")
+ temp.write(self._stream.getvalue())
+ path = temp.name
+ else:
+ temp = None
+ path = self._path
+
if not self._closed:
self.write()
- self._path = Flatten(
- self._path, suffix="flattened", tempdir=self._output_dir
- ).save()
+
+ self._path = Flatten(path, suffix="flattened", tempdir=self._output_dir).save()
+
+ if temp is not None:
+ temp.close()
+
return self._open_and_read()
def minify(self) -> Self:
diff --git a/pdfconduit/transform/__init__.py b/pdfconduit/transform/__init__.py
index 68cea6a..5e4614f 100644
--- a/pdfconduit/transform/__init__.py
+++ b/pdfconduit/transform/__init__.py
@@ -1,5 +1,6 @@
from pdfconduit.transform.merge import Merge
+from pdfconduit.transform.merge2 import Merge2
from pdfconduit.transform.rotate import Rotate
from pdfconduit.transform.upscale import Upscale
-__all__ = [Merge, Rotate, Upscale]
+__all__ = [Merge, Merge2, Rotate, Upscale]
diff --git a/pdfconduit/transform/merge2.py b/pdfconduit/transform/merge2.py
new file mode 100644
index 0000000..129f495
--- /dev/null
+++ b/pdfconduit/transform/merge2.py
@@ -0,0 +1,51 @@
+# Merge PDF documents
+from io import BytesIO
+from typing import Union, Iterable
+
+from pdfrw import (
+ PdfReader as PdfrwReader,
+ PdfWriter as PdfrwWriter,
+ IndirectPdfDict as PdfrwIndirectPdfDict,
+)
+from pypdf import PdfWriter as PyPdfWriter
+
+from pdfconduit.utils.driver import PdfDriver
+
+
+class Merge2(PdfDriver):
+ def __init__(self, pdfs: Iterable[Union[str, BytesIO]], output: str):
+ self._pdfs = pdfs
+ self._output = output
+
+ def merge(self) -> str:
+ """Merge list of PDF files to a single PDF file."""
+ return self.execute()
+
+ def pdfrw(self):
+ writer = PdfrwWriter()
+
+ for pdf_object in self._pdfs:
+ if isinstance(pdf_object, BytesIO):
+ reader = PdfrwReader(fdata=pdf_object.getvalue())
+ else:
+ reader = PdfrwReader(fname=pdf_object)
+ writer.addpages(reader.pages)
+
+ writer.trailer.Info = PdfrwIndirectPdfDict(
+ Author="Stephen Neal",
+ Creator="pdfconduit",
+ Producer="pdfconduit",
+ )
+ writer.write(self._output)
+ return self._output
+
+ def pypdf(self):
+ merger = PyPdfWriter()
+
+ for pdf in self._pdfs:
+ merger.append(pdf)
+
+ merger.write(self._output)
+ merger.close()
+
+ return self._output
diff --git a/pdfconduit/transform/rotate.py b/pdfconduit/transform/rotate.py
index 784de03..e88ccd1 100644
--- a/pdfconduit/transform/rotate.py
+++ b/pdfconduit/transform/rotate.py
@@ -1,7 +1,8 @@
# Rotate a pdf file
import os
+from io import BytesIO
from tempfile import NamedTemporaryFile
-from typing import Optional
+from typing import Optional, Union
from pdfrw import (
PdfReader as PdfrwReader,
@@ -17,25 +18,30 @@ class Rotate(PdfDriver):
def __init__(
self,
- file_name: str,
+ pdf: Union[str, BytesIO],
rotation: int,
suffix: str = "rotated",
tempdir: Optional[str] = None,
+ output: Optional[str] = None,
):
- self.file_name = file_name
+ self.pdf_object = pdf
self.rotation = rotation
self.suffix = suffix
- self.tempdir = tempdir
-
- if tempdir:
- with NamedTemporaryFile(suffix=".pdf", dir=tempdir, delete=False) as temp:
- self.outfn = temp.name
- elif suffix:
- self.outfn = os.path.join(
- os.path.dirname(file_name), add_suffix(file_name, suffix)
- )
+
+ if output:
+ self.outfn = output
else:
- self.outfn = NamedTemporaryFile(suffix=".pdf").name
+ self.tempdir = tempdir
+
+ if tempdir:
+ with NamedTemporaryFile(
+ suffix=".pdf", dir=tempdir, delete=False
+ ) as temp:
+ self.outfn = temp.name
+ elif suffix:
+ self.outfn = os.path.join(os.path.dirname(pdf), add_suffix(pdf, suffix))
+ else:
+ self.outfn = NamedTemporaryFile(suffix=".pdf").name
def __str__(self) -> str:
return self.file
@@ -48,7 +54,11 @@ class Rotate(PdfDriver):
return str(self.outfn)
def pdfrw(self) -> str:
- trailer = PdfrwReader(self.file_name)
+ if isinstance(self.pdf_object, BytesIO):
+ trailer = PdfrwReader(fdata=self.pdf_object.getvalue())
+ else:
+ trailer = PdfrwReader(fname=self.pdf_object)
+
pages = trailer.pages
ranges = [[1, len(pages)]]
@@ -66,7 +76,7 @@ class Rotate(PdfDriver):
return self.outfn
def pypdf(self) -> str:
- reader = PypdfReader(self.file_name)
+ reader = PypdfReader(self.pdf_object)
writer = PypdfWriter()
for page_num in range(1, reader.get_num_pages()):
| Add support for pdfrw actions from PDF streams
- fix issues with flatten, rotate_exact, etc methods that use pdfrw back-end
- [x] figure out how to use streams with pdfrw
### tests
- [x] can flatten pdf from stream
- [x] can rotate_exact pdf from stream
- [x] [add more as found] | sfneal/pdfconduit | diff --git a/tests/__init__.py b/tests/__init__.py
index 5b8aff4..c144c1f 100644
--- a/tests/__init__.py
+++ b/tests/__init__.py
@@ -4,8 +4,10 @@ import os
import shutil
import unittest
from decimal import Decimal
+from io import BytesIO
from tempfile import TemporaryDirectory
from time import time
+from typing import Optional
from pdfconduit import Pdfconduit, Info
from pdfconduit.utils.typing import Iterable
@@ -145,6 +147,10 @@ class PdfconduitTestCase(unittest.TestCase):
def assertFileSizeDecreased(self, original: str, modified: str):
self.assertLess(os.path.getsize(modified), os.path.getsize(original))
+ def _get_pdf_byte_stream(self, path: Optional[str] = None) -> BytesIO:
+ with open(path if path else self.pdf_path, "rb") as fh:
+ return BytesIO(fh.read())
+
class Timer:
def __init__(self, decimal_places=2):
diff --git a/tests/test_flatten.py b/tests/test_flatten.py
index 4d60890..f5fb1df 100644
--- a/tests/test_flatten.py
+++ b/tests/test_flatten.py
@@ -33,3 +33,14 @@ class TestFlatten(PdfconduitTestCase):
self.assertPdfExists(self.conduit.output)
self.assertPdfPagesEqual(pdf_path, self.conduit.output)
# todo: improve assertions
+
+ @parameterized.expand(flatten_params, name_func=flatten_name_func)
+ def test_flatten_from_stram(self, pdf_path: str):
+ self.conduit = Pdfconduit(
+ self._get_pdf_byte_stream(pdf_path)
+ ).set_output_directory(self.temp.name)
+ self.conduit.flatten().write()
+
+ self.assertPdfExists(self.conduit.output)
+ self.assertPdfPagesEqual(pdf_path, self.conduit.output)
+ # todo: improve assertions
diff --git a/tests/test_merge.py b/tests/test_merge.py
index a8ada3b..fd3b96b 100644
--- a/tests/test_merge.py
+++ b/tests/test_merge.py
@@ -68,3 +68,19 @@ class TestMerge(PdfconduitTestCase):
self.assertPdfExists(self.conduit.output)
self.assertCorrectNumPages(main_pdf, pdfs_to_merge, self.conduit.info.pages)
+
+ @parameterized.expand(merge_params, name_func=merge_name_func)
+ def test_can_merge_pdfs_fast_from_stream(
+ self, main_pdf: str, pdfs_to_merge: List[str]
+ ):
+ self.pdf_path = main_pdf
+ self.conduit = Pdfconduit(
+ self._get_pdf_byte_stream(self.pdf_path)
+ ).set_output_directory(self.temp.name)
+
+ self.conduit.merge_fast(pdfs_to_merge)
+
+ self.conduit.write()
+
+ self.assertPdfExists(self.conduit.output)
+ self.assertCorrectNumPages(main_pdf, pdfs_to_merge, self.conduit.info.pages)
diff --git a/tests/test_pdfconduit.py b/tests/test_pdfconduit.py
index d3335b9..601cbd8 100644
--- a/tests/test_pdfconduit.py
+++ b/tests/test_pdfconduit.py
@@ -1,7 +1,6 @@
import os
import warnings
-from io import BufferedReader, BytesIO
-from typing import Optional
+from io import BufferedReader
from pypdf import PdfReader, PdfWriter
@@ -114,7 +113,3 @@ class TestUsage(PdfconduitTestCase):
self.conduit.cleanup()
self.assertPdfDoesntExists(self.conduit.output)
-
- def _get_pdf_byte_stream(self, path: Optional[str] = None) -> BytesIO:
- with open(path if path else self.pdf_path, "rb") as fh:
- return BytesIO(fh.read())
diff --git a/tests/test_rotate.py b/tests/test_rotate.py
index 8c6a294..95f6b7a 100644
--- a/tests/test_rotate.py
+++ b/tests/test_rotate.py
@@ -3,7 +3,7 @@ from typing import List
from parameterized import parameterized
-from pdfconduit import Info
+from pdfconduit import Info, Pdfconduit
from tests import PdfconduitTestCase
@@ -56,3 +56,15 @@ class TestRotate(PdfconduitTestCase):
self.assertTrue(
"Rotation angle must be a multiple of 90" in str(context.exception)
)
+
+ @parameterized.expand(rotate_exact_params, name_func=rotate_name_func)
+ def test_can_rotate_exact_from_stream(self, rotation: int):
+ self.conduit = Pdfconduit(
+ self._get_pdf_byte_stream(self.pdf_path)
+ ).set_output_directory(self.temp.name)
+ self.conduit.rotate_exact(rotation).set_output_suffix(
+ "rotated_{}".format(rotation)
+ ).write()
+
+ self.assertPdfExists(self.conduit.output)
+ self.assertPdfRotation(self.conduit, rotation)
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_added_files",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 5
} | 4.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | cffi==1.17.1
chardet==5.2.0
colorama==0.4.6
cryptography==43.0.0
exceptiongroup==1.2.2
iniconfig==2.1.0
looptools==1.2.4
lxml==5.3.0
packaging==24.2
parameterized==0.9.0
-e git+https://github.com/sfneal/pdfconduit.git@cef92d8105fca0bb2a91d356a2680c510f1c3b31#egg=pdfconduit
pdfrw==0.4
pillow==10.4.0
PillowImage==1.2.0
pluggy==1.5.0
PyBundle==1.0.6
pycparser==2.22
PyMuPDF==1.24.9
PyMuPDFb==1.24.9
pypdf==4.3.1
pytest==8.3.5
reportlab==4.2.2
tomli==2.2.1
tqdm==4.66.5
typing_extensions==4.13.0
Unidecode==1.3.8
| name: pdfconduit
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- cffi==1.17.1
- chardet==5.2.0
- colorama==0.4.6
- cryptography==43.0.0
- exceptiongroup==1.2.2
- iniconfig==2.1.0
- looptools==1.2.4
- lxml==5.3.0
- packaging==24.2
- parameterized==0.9.0
- pdfrw==0.4
- pillow==10.4.0
- pillowimage==1.2.0
- pluggy==1.5.0
- pybundle==1.0.6
- pycparser==2.22
- pymupdf==1.24.9
- pymupdfb==1.24.9
- pypdf==4.3.1
- pytest==8.3.5
- reportlab==4.2.2
- tomli==2.2.1
- tqdm==4.66.5
- typing-extensions==4.13.0
- unidecode==1.3.8
prefix: /opt/conda/envs/pdfconduit
| [
"tests/test_flatten.py::TestFlatten::test_flatten_from_stram.article,pdf",
"tests/test_flatten.py::TestFlatten::test_flatten_from_stram.charts,pdf",
"tests/test_flatten.py::TestFlatten::test_flatten_from_stram.document,pdf",
"tests/test_merge.py::TestMerge::test_can_merge_pdfs_fast_from_stream.0_document,pdf",
"tests/test_merge.py::TestMerge::test_can_merge_pdfs_fast_from_stream.1_document,pdf",
"tests/test_merge.py::TestMerge::test_can_merge_pdfs_fast_from_stream.2_workbook,pdf",
"tests/test_merge.py::TestMerge::test_can_merge_pdfs_fast_from_stream.3_article,pdf",
"tests/test_merge.py::TestMerge::test_can_merge_pdfs_fast_from_stream.4_document,pdf",
"tests/test_rotate.py::TestRotate::test_can_rotate_exact.124",
"tests/test_rotate.py::TestRotate::test_can_rotate_exact.239",
"tests/test_rotate.py::TestRotate::test_can_rotate_exact.289",
"tests/test_rotate.py::TestRotate::test_can_rotate_exact_from_stream.112",
"tests/test_rotate.py::TestRotate::test_can_rotate_exact_from_stream.246",
"tests/test_rotate.py::TestRotate::test_can_rotate_exact_from_stream.287",
"tests/test_rotate.py::TestRotate::test_cannot_rotate.125",
"tests/test_rotate.py::TestRotate::test_cannot_rotate.192",
"tests/test_rotate.py::TestRotate::test_cannot_rotate.301"
] | [] | [
"tests/test_flatten.py::TestFlatten::test_flatten.article,pdf",
"tests/test_flatten.py::TestFlatten::test_flatten.charts,pdf",
"tests/test_flatten.py::TestFlatten::test_flatten.document,pdf",
"tests/test_merge.py::TestMerge::test_can_merge_pdfs.0_document,pdf",
"tests/test_merge.py::TestMerge::test_can_merge_pdfs.1_document,pdf",
"tests/test_merge.py::TestMerge::test_can_merge_pdfs.2_workbook,pdf",
"tests/test_merge.py::TestMerge::test_can_merge_pdfs.3_article,pdf",
"tests/test_merge.py::TestMerge::test_can_merge_pdfs.4_document,pdf",
"tests/test_merge.py::TestMerge::test_can_merge_pdfs_fast.0_document,pdf",
"tests/test_merge.py::TestMerge::test_can_merge_pdfs_fast.1_document,pdf",
"tests/test_merge.py::TestMerge::test_can_merge_pdfs_fast.2_workbook,pdf",
"tests/test_merge.py::TestMerge::test_can_merge_pdfs_fast.3_article,pdf",
"tests/test_merge.py::TestMerge::test_can_merge_pdfs_fast.4_document,pdf",
"tests/test_merge.py::TestMerge::test_can_merge_pdfs_using_context.0_document,pdf",
"tests/test_merge.py::TestMerge::test_can_merge_pdfs_using_context.1_document,pdf",
"tests/test_merge.py::TestMerge::test_can_merge_pdfs_using_context.2_workbook,pdf",
"tests/test_merge.py::TestMerge::test_can_merge_pdfs_using_context.3_article,pdf",
"tests/test_merge.py::TestMerge::test_can_merge_pdfs_using_context.4_document,pdf",
"tests/test_pdfconduit.py::TestUsage::test_can_read_encrypted_pdf",
"tests/test_pdfconduit.py::TestUsage::test_can_read_from_stream",
"tests/test_pdfconduit.py::TestUsage::test_can_read_from_stream_and_write_to_file",
"tests/test_pdfconduit.py::TestUsage::test_can_read_unencrypted_pdf",
"tests/test_pdfconduit.py::TestUsage::test_can_set_custom_metadata",
"tests/test_pdfconduit.py::TestUsage::test_can_set_default_metadata",
"tests/test_pdfconduit.py::TestUsage::test_can_set_output",
"tests/test_pdfconduit.py::TestUsage::test_can_set_output_suffix",
"tests/test_pdfconduit.py::TestUsage::test_can_set_temp_output",
"tests/test_pdfconduit.py::TestUsage::test_can_use_context_manager",
"tests/test_pdfconduit.py::TestUsage::test_can_write_stream_to_file_without_output",
"tests/test_rotate.py::TestRotate::test_can_rotate.180",
"tests/test_rotate.py::TestRotate::test_can_rotate.270",
"tests/test_rotate.py::TestRotate::test_can_rotate.90"
] | [] | Apache License 2.0 | 19,406 | 2,358 | [
"pdfconduit/_version.py",
"pdfconduit/internals/base.py",
"pdfconduit/pdfconduit.py",
"pdfconduit/transform/__init__.py",
"pdfconduit/transform/rotate.py"
] |
|
dag-hammarskjold-library__dlx-368 | 0e791ba9d4e2763bc95a812581f573cc205cd314 | 2024-08-26 18:33:33 | 7bc9c436adc657b48945eafa70a190381864e26c | diff --git a/dlx/marc/__init__.py b/dlx/marc/__init__.py
index 8935438..59e3e8b 100644
--- a/dlx/marc/__init__.py
+++ b/dlx/marc/__init__.py
@@ -1375,10 +1375,6 @@ class Marc(object):
#### de-serializations
- @classmethod
- def resolve_ambiguous(cls, subfields):
- pass
-
def from_mij(self, string):
pass
@@ -1512,6 +1508,7 @@ class Auth(Marc):
_xcache = {}
_pcache = {}
_langcache = {}
+ _acache = {}
@classmethod
def build_cache(cls):
@@ -1595,7 +1592,7 @@ class Auth(Marc):
return
values = ''.join([x.value for x in subfields])
- cached = Auth._xcache.get('multi', {}).get(values, {}).get(auth_tag, {})
+ cached = Auth._xcache.get('__multi__', {}).get(values, {}).get(auth_tag, {})
if cached:
return cached
@@ -1603,11 +1600,40 @@ class Auth(Marc):
query = Query(Condition(auth_tag, dict(zip([x.code for x in subfields], [x.value for x in subfields])), record_type='auth'))
auths = AuthSet.from_query(query.compile(), projection={'_id': 1})
xrefs = [r.id for r in list(auths)]
-
- Auth._xcache.setdefault('multi', {}).setdefault(values, {})[auth_tag] = values
+ Auth._xcache.setdefault('__multi__', {}).setdefault(values, {})[auth_tag] = xrefs
return xrefs
+ @classmethod
+ def resolve_ambiguous(cls, *, tag: str, subfields: list, record_type: str) -> int:
+ '''Determines if there is an exact authority match for specific subfields'''
+
+ subfields_str = str([(x.code, x.value) for x in subfields])
+
+ if xref := Auth._acache.get(subfields_str):
+ return xref
+
+ if matches := cls.xlookup_multi(tag, subfields, record_type=record_type):
+ if len(matches) == 1:
+ Auth._acache.setdefault(subfields_str, matches[0])
+
+ return matches[0]
+ elif len(matches) > 1:
+ exact_matches = []
+
+ for xref in matches:
+ auth_subfields = cls.from_id(xref).heading_field.subfields
+ auth_subfields = [(x.code, x.value) for x in auth_subfields]
+
+ if [(x.code, x.value) for x in subfields] == auth_subfields:
+ exact_matches.append(xref)
+
+ Auth._acache.setdefault(subfields_str, exact_matches[0])
+
+ return exact_matches[0]
+
+ return None
+
@classmethod
def partial_lookup(cls, tag, code, string, *, record_type, limit=25):
"""Returns a list of tuples containing the authority-controlled values
| Resolving ambiguous authority string values
For use in importing records where the xref for authority controlled fields is not provided. The source auth record needs to be determined given only a string value(s), if possible | dag-hammarskjold-library/dlx | diff --git a/tests/test_marc.py b/tests/test_marc.py
index 7bb7bd0..293a555 100644
--- a/tests/test_marc.py
+++ b/tests/test_marc.py
@@ -920,3 +920,21 @@ def test_list_attached(db, bibs, auths):
assert len(auth.list_attached(usage_type='bib')) == 2
assert auth.list_attached()[0].id == 1
assert auth.list_attached()[1].id == 2
+
+def test_resolve_ambiguous(db):
+ from dlx.marc import Bib, Auth, AmbiguousAuthValue, Literal
+
+ auth = Auth().set('100', 'a', 'ambiguous').commit()
+ Auth().set('100', 'a', 'ambiguous').set('100', 'b', 'xyz').commit()
+
+ assert len(Auth.xlookup('700', 'a', 'ambiguous', record_type='bib')) == 2
+
+ xref = Auth.resolve_ambiguous(
+ tag='700',
+ subfields=[Literal(code='a', value='ambiguous')],
+ record_type='bib'
+ )
+
+ assert xref == auth.id
+
+
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 2
},
"num_modified_files": 1
} | 1.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==23.2.0
boto3==1.34.100
botocore==1.34.100
certifi==2024.7.4
cffi==1.16.0
charset-normalizer==3.3.2
click==8.1.7
cryptography==42.0.7
-e git+https://github.com/dag-hammarskjold-library/dlx.git@0e791ba9d4e2763bc95a812581f573cc205cd314#egg=dlx
dnspython==2.6.1
exceptiongroup==1.2.1
idna==3.7
iniconfig==2.0.0
Jinja2==3.1.4
jmespath==1.0.1
joblib==1.4.2
jsonschema==4.0.0
lxml==5.2.1
MarkupSafe==2.1.5
mongomock==4.1.2
moto==5.0.8
nltk==3.8.1
packaging==24.0
pluggy==1.5.0
pycparser==2.22
pymongo==4.6.3
pyrsistent==0.20.0
pytest==8.2.0
python-dateutil==2.9.0.post0
pytz==2024.1
PyYAML==6.0.1
regex==2024.4.28
requests==2.32.3
responses==0.25.0
s3transfer==0.10.1
sentinels==1.0.0
six==1.16.0
tomli==2.0.1
tqdm==4.66.4
urllib3==1.26.19
Werkzeug==3.0.3
xlrd==1.2.0
xmldiff==2.4
xmltodict==0.13.0
| name: dlx
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==23.2.0
- boto3==1.34.100
- botocore==1.34.100
- certifi==2024.7.4
- cffi==1.16.0
- charset-normalizer==3.3.2
- click==8.1.7
- cryptography==42.0.7
- dnspython==2.6.1
- exceptiongroup==1.2.1
- idna==3.7
- iniconfig==2.0.0
- jinja2==3.1.4
- jmespath==1.0.1
- joblib==1.4.2
- jsonschema==4.0.0
- lxml==5.2.1
- markupsafe==2.1.5
- mongomock==4.1.2
- moto==5.0.8
- nltk==3.8.1
- packaging==24.0
- pluggy==1.5.0
- pycparser==2.22
- pymongo==4.6.3
- pyrsistent==0.20.0
- pytest==8.2.0
- python-dateutil==2.9.0.post0
- pytz==2024.1
- pyyaml==6.0.1
- regex==2024.4.28
- requests==2.32.3
- responses==0.25.0
- s3transfer==0.10.1
- sentinels==1.0.0
- six==1.16.0
- tomli==2.0.1
- tqdm==4.66.4
- urllib3==1.26.19
- werkzeug==3.0.3
- xlrd==1.2.0
- xmldiff==2.4
- xmltodict==0.13.0
prefix: /opt/conda/envs/dlx
| [
"tests/test_marc.py::test_resolve_ambiguous"
] | [] | [
"tests/test_marc.py::test_init_marc",
"tests/test_marc.py::test_init_bib",
"tests/test_marc.py::test_init_auth",
"tests/test_marc.py::test_init_auth_check",
"tests/test_marc.py::test_commit",
"tests/test_marc.py::test_delete",
"tests/test_marc.py::test_from_id",
"tests/test_marc.py::test_querydocument",
"tests/test_marc.py::test_from_query",
"tests/test_marc.py::test_querystring",
"tests/test_marc.py::test_from_aggregation",
"tests/test_marc.py::test_atlasquery",
"tests/test_marc.py::test_get_field",
"tests/test_marc.py::test_field_get_value",
"tests/test_marc.py::test_set_field",
"tests/test_marc.py::test_get_value",
"tests/test_marc.py::test_get_xref",
"tests/test_marc.py::test_set",
"tests/test_marc.py::test_zmerge",
"tests/test_marc.py::test_xmerge",
"tests/test_marc.py::test_set_008",
"tests/test_marc.py::test_delete_field",
"tests/test_marc.py::test_auth_lookup",
"tests/test_marc.py::test_xlookup",
"tests/test_marc.py::test_auth_control",
"tests/test_marc.py::test_language",
"tests/test_marc.py::test_to_xml",
"tests/test_marc.py::test_xml_encoding",
"tests/test_marc.py::test_to_mrc",
"tests/test_marc.py::test_to_mrk",
"tests/test_marc.py::test_from_mrk",
"tests/test_marc.py::test_from_json",
"tests/test_marc.py::test_to_jmarcnx",
"tests/test_marc.py::test_field_from_json",
"tests/test_marc.py::test_partial_lookup",
"tests/test_marc.py::test_diff",
"tests/test_marc.py::test_blank_fields",
"tests/test_marc.py::test_auth_in_use",
"tests/test_marc.py::test_catch_delete_auth",
"tests/test_marc.py::test_from_xml",
"tests/test_marc.py::test_auth_use_count",
"tests/test_marc.py::test_auth_merge",
"tests/test_marc.py::test_logical_fields",
"tests/test_marc.py::test_bib_files",
"tests/test_marc.py::test_list_attached"
] | [] | null | 19,407 | 732 | [
"dlx/marc/__init__.py"
] |
|
tobymao__sqlglot-3975 | f7e4e4adc64aaef73d23c2550a4bfa9958d4851b | 2024-08-26 22:07:32 | 905b7226ae4a6dc505fe303bb4df3818cb586826 | diff --git a/sqlglot/dialects/snowflake.py b/sqlglot/dialects/snowflake.py
index 9c981aa8..cdb98407 100644
--- a/sqlglot/dialects/snowflake.py
+++ b/sqlglot/dialects/snowflake.py
@@ -93,7 +93,9 @@ def _build_date_time_add(expr_type: t.Type[E]) -> t.Callable[[t.List], E]:
# https://docs.snowflake.com/en/sql-reference/functions/div0
def _build_if_from_div0(args: t.List) -> exp.If:
- cond = exp.EQ(this=seq_get(args, 1), expression=exp.Literal.number(0))
+ cond = exp.EQ(this=seq_get(args, 1), expression=exp.Literal.number(0)).and_(
+ exp.Is(this=seq_get(args, 0), expression=exp.null()).not_()
+ )
true = exp.Literal.number(0)
false = exp.Div(this=seq_get(args, 0), expression=seq_get(args, 1))
return exp.If(this=cond, true=true, false=false)
| Snowflake's DIV0 function is incorrectly translated to a `SELECT CASE` (even when targeting Snowflake as an output dialect)
```py
QUERY = "SELECT DIV0(a, b) FROM t"
# read: None
# write: None
print( sqlglot.parse_one(QUERY).sql() )
# 'SELECT DIV0(a, b) FROM t'
# read: Snowflake
# write: None
print( sqlglot.parse_one(QUERY, read='snowflake').sql() )
# 'SELECT CASE WHEN b = 0 THEN 0 ELSE a / b END FROM t'
# read: None
# write: Snowflake
print( sqlglot.parse_one(QUERY).sql(dialect='snowflake') )
# 'SELECT DIV0(a, b) FROM t'
# read: Snowflake
# write: Snowflake
print( sqlglot.parse_one(QUERY, read='snowflake').sql(dialect='snowflake') )
# 'SELECT IFF(b = 0, 0, a / b) FROM t'
```
When reading the `DIV0(a, b)` function into sqlglot in the `read='snowflake'` dialect, the function is always converted to `IFF(b = 0, 0, a / b)`. However, these functions are different when `a IS NULL` and `b = 0`:
- `DIV0(a, b)` for `(a, b) = (NULL, 0)` gives `NULL`
- `IFF(b = 0, 0, a / b)` for `(a, b) = (NULL, 0)` gives `0`
A correct translation would be more like `IFF(b = 0 AND a IS NOT NULL, 0, a / b)`.
**Official Documentation**
> Please include links to official SQL documentation related to your issue.
Unfortunately, [Snowflake's docs for DIV0](https://docs.snowflake.com/en/sql-reference/functions/div0) don't explicitly indicate the behavior when `a IS NULL`, but I have confirmed the difference in behavior with an actual Snowflake query:
```
SET (a, b) = (NULL, 0);
SELECT DIV0($a, $b), IFF($b = 0, 0, $a/$b);
+--------------+-----------------------+
| DIV0($A, $B) | IFF($B = 0, 0, $A/$B) |
|--------------+-----------------------|
| NULL | 0.000000 |
+--------------+-----------------------+
```
Snowflake functions typically default to propagating `NULL` inputs through to outputs unless otherwise specified (see e.g. the related function [DIV0NULL](https://docs.snowflake.com/en/sql-reference/functions/div0null) which avoids propagating `NULL`s in the `b` argument).
---
When the Read and Write dialect are the same (as in this case, where our input is Snowflake SQL and our output is also Snowflake SQL) it would be much nicer to preserve the SQL closer to how it was originally written (i.e. retain `DIV0`) - I am not exactly sure how this could be done, possibly using a new sqlglot expression node type like `MultipleImplementations` which has an array of children tagged by dialect, which preserve include the "original" function call expression alongside the dialect-generic implementation?
Alternatively, a flag which disables (some?) function conversions, for the case where the read/write dialects are known to be the same would be useful. | tobymao/sqlglot | diff --git a/tests/dialects/test_snowflake.py b/tests/dialects/test_snowflake.py
index d01101e5..7837cc9e 100644
--- a/tests/dialects/test_snowflake.py
+++ b/tests/dialects/test_snowflake.py
@@ -598,12 +598,12 @@ WHERE
self.validate_all(
"DIV0(foo, bar)",
write={
- "snowflake": "IFF(bar = 0, 0, foo / bar)",
- "sqlite": "IIF(bar = 0, 0, CAST(foo AS REAL) / bar)",
- "presto": "IF(bar = 0, 0, CAST(foo AS DOUBLE) / bar)",
- "spark": "IF(bar = 0, 0, foo / bar)",
- "hive": "IF(bar = 0, 0, foo / bar)",
- "duckdb": "CASE WHEN bar = 0 THEN 0 ELSE foo / bar END",
+ "snowflake": "IFF(bar = 0 AND NOT foo IS NULL, 0, foo / bar)",
+ "sqlite": "IIF(bar = 0 AND NOT foo IS NULL, 0, CAST(foo AS REAL) / bar)",
+ "presto": "IF(bar = 0 AND NOT foo IS NULL, 0, CAST(foo AS DOUBLE) / bar)",
+ "spark": "IF(bar = 0 AND NOT foo IS NULL, 0, foo / bar)",
+ "hive": "IF(bar = 0 AND NOT foo IS NULL, 0, foo / bar)",
+ "duckdb": "CASE WHEN bar = 0 AND NOT foo IS NULL THEN 0 ELSE foo / bar END",
},
)
self.validate_all(
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 1
} | 25.17 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest pytest-cov pytest-xdist pytest-mock pytest-asyncio",
"pytest"
],
"pre_install": [],
"python": "3.9",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | cfgv==3.4.0
coverage==7.8.0
distlib==0.3.9
duckdb==1.2.1
exceptiongroup==1.2.2
execnet==2.1.1
filelock==3.18.0
identify==2.6.9
iniconfig==2.1.0
Jinja2==3.1.6
MarkupSafe==3.0.2
maturin==1.8.3
mypy==1.15.0
mypy-extensions==1.0.0
nodeenv==1.9.1
numpy==2.0.2
packaging==24.2
pandas==2.2.3
pandas-stubs==2.2.2.240807
pdoc==15.0.1
platformdirs==4.3.7
pluggy==1.5.0
pre_commit==4.2.0
Pygments==2.19.1
pytest==8.3.5
pytest-asyncio==0.26.0
pytest-cov==6.0.0
pytest-mock==3.14.0
pytest-xdist==3.6.1
python-dateutil==2.9.0.post0
pytz==2025.2
PyYAML==6.0.2
ruff==0.4.3
six==1.17.0
-e git+https://github.com/tobymao/sqlglot.git@f7e4e4adc64aaef73d23c2550a4bfa9958d4851b#egg=sqlglot
tomli==2.2.1
types-python-dateutil==2.9.0.20241206
types-pytz==2025.2.0.20250326
typing_extensions==4.13.0
tzdata==2025.2
virtualenv==20.29.3
| name: sqlglot
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- cfgv==3.4.0
- coverage==7.8.0
- distlib==0.3.9
- duckdb==1.2.1
- exceptiongroup==1.2.2
- execnet==2.1.1
- filelock==3.18.0
- identify==2.6.9
- iniconfig==2.1.0
- jinja2==3.1.6
- markupsafe==3.0.2
- maturin==1.8.3
- mypy==1.15.0
- mypy-extensions==1.0.0
- nodeenv==1.9.1
- numpy==2.0.2
- packaging==24.2
- pandas==2.2.3
- pandas-stubs==2.2.2.240807
- pdoc==15.0.1
- platformdirs==4.3.7
- pluggy==1.5.0
- pre-commit==4.2.0
- pygments==2.19.1
- pytest==8.3.5
- pytest-asyncio==0.26.0
- pytest-cov==6.0.0
- pytest-mock==3.14.0
- pytest-xdist==3.6.1
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyyaml==6.0.2
- ruff==0.4.3
- six==1.17.0
- tomli==2.2.1
- types-python-dateutil==2.9.0.20241206
- types-pytz==2025.2.0.20250326
- typing-extensions==4.13.0
- tzdata==2025.2
- virtualenv==20.29.3
prefix: /opt/conda/envs/sqlglot
| [
"tests/dialects/test_snowflake.py::TestSnowflake::test_snowflake"
] | [] | [
"tests/dialects/test_snowflake.py::TestSnowflake::test_alter_set_unset",
"tests/dialects/test_snowflake.py::TestSnowflake::test_copy",
"tests/dialects/test_snowflake.py::TestSnowflake::test_ddl",
"tests/dialects/test_snowflake.py::TestSnowflake::test_describe_table",
"tests/dialects/test_snowflake.py::TestSnowflake::test_flatten",
"tests/dialects/test_snowflake.py::TestSnowflake::test_from_changes",
"tests/dialects/test_snowflake.py::TestSnowflake::test_historical_data",
"tests/dialects/test_snowflake.py::TestSnowflake::test_match_recognize",
"tests/dialects/test_snowflake.py::TestSnowflake::test_minus",
"tests/dialects/test_snowflake.py::TestSnowflake::test_null_treatment",
"tests/dialects/test_snowflake.py::TestSnowflake::test_parse_like_any",
"tests/dialects/test_snowflake.py::TestSnowflake::test_querying_semi_structured_data",
"tests/dialects/test_snowflake.py::TestSnowflake::test_regexp_replace",
"tests/dialects/test_snowflake.py::TestSnowflake::test_regexp_substr",
"tests/dialects/test_snowflake.py::TestSnowflake::test_sample",
"tests/dialects/test_snowflake.py::TestSnowflake::test_semi_structured_types",
"tests/dialects/test_snowflake.py::TestSnowflake::test_show_columns",
"tests/dialects/test_snowflake.py::TestSnowflake::test_show_imported_keys",
"tests/dialects/test_snowflake.py::TestSnowflake::test_show_objects",
"tests/dialects/test_snowflake.py::TestSnowflake::test_show_primary_keys",
"tests/dialects/test_snowflake.py::TestSnowflake::test_show_schemas",
"tests/dialects/test_snowflake.py::TestSnowflake::test_show_sequences",
"tests/dialects/test_snowflake.py::TestSnowflake::test_show_tables",
"tests/dialects/test_snowflake.py::TestSnowflake::test_show_unique_keys",
"tests/dialects/test_snowflake.py::TestSnowflake::test_show_users",
"tests/dialects/test_snowflake.py::TestSnowflake::test_show_views",
"tests/dialects/test_snowflake.py::TestSnowflake::test_staged_files",
"tests/dialects/test_snowflake.py::TestSnowflake::test_storage_integration",
"tests/dialects/test_snowflake.py::TestSnowflake::test_stored_procedures",
"tests/dialects/test_snowflake.py::TestSnowflake::test_swap",
"tests/dialects/test_snowflake.py::TestSnowflake::test_table_literal",
"tests/dialects/test_snowflake.py::TestSnowflake::test_timestamps",
"tests/dialects/test_snowflake.py::TestSnowflake::test_try_cast",
"tests/dialects/test_snowflake.py::TestSnowflake::test_user_defined_functions",
"tests/dialects/test_snowflake.py::TestSnowflake::test_values"
] | [] | MIT License | 19,410 | 266 | [
"sqlglot/dialects/snowflake.py"
] |
|
airtai__fastagency-83 | b55c3e2910453314579bd30c27784276b188d6f5 | 2024-08-27 09:23:13 | b55c3e2910453314579bd30c27784276b188d6f5 | diff --git a/fastagency/studio/models/llms/openai.py b/fastagency/studio/models/llms/openai.py
index 83462078..adfb1912 100644
--- a/fastagency/studio/models/llms/openai.py
+++ b/fastagency/studio/models/llms/openai.py
@@ -55,7 +55,10 @@ class OpenAIAPIKey(Model):
@field_validator("api_key")
@classmethod
def validate_api_key(cls: Type["OpenAIAPIKey"], value: Any) -> Any:
- if not re.match(r"^sk-[a-zA-Z0-9]{20}T3BlbkFJ[a-zA-Z0-9]{20}$", value):
+ if not re.match(
+ r"^(sk-(proj-|None-|svcacct-)[A-Za-z0-9_-]+|sk-[a-zA-Z0-9]{20}T3BlbkFJ[a-zA-Z0-9]{20})$",
+ value,
+ ):
raise ValueError("Invalid OpenAI API Key")
return value
| Add new validation for OpenAI API key | airtai/fastagency | diff --git a/tests/studio/models/llms/test_openai.py b/tests/studio/models/llms/test_openai.py
index 007d75fb..c018bb6d 100644
--- a/tests/studio/models/llms/test_openai.py
+++ b/tests/studio/models/llms/test_openai.py
@@ -19,15 +19,25 @@ def test_import(monkeypatch: pytest.MonkeyPatch) -> None:
class TestOpenAIAPIKey:
- def test_constructor_success(self) -> None:
+ @pytest.mark.parametrize(
+ "openai_api_key",
+ [
+ "sk-sUeBP9asw6GiYHXqtg70T3BlbkFJJuLwJFco90bOpU0Ntest", # pragma: allowlist secret
+ # OpenAI currently supports three prefixes for API keys:
+ # project-based API key format
+ "sk-proj-SomeLengthStringWhichCanHave-and_inItAndTheLengthCanBeChangedAtAnyTime", # pragma: allowlist secret
+ # user-level API key format
+ "sk-None-SomeLengthStringWhichCanHave-and_inItAndTheLengthCanBeChangedAtAnyTime", # pragma: allowlist secret
+ # service account APi key format
+ "sk-svcacct-SomeLengthStringWhichCanHave-and_inItAndTheLengthCanBeChangedAtAnyTime", # pragma: allowlist secret
+ ],
+ )
+ def test_constructor_success(self, openai_api_key: str) -> None:
api_key = OpenAIAPIKey(
- api_key="sk-sUeBP9asw6GiYHXqtg70T3BlbkFJJuLwJFco90bOpU0Ntest", # pragma: allowlist secret
+ api_key=openai_api_key,
name="Hello World!",
) # pragma: allowlist secret
- assert (
- api_key.api_key
- == "sk-sUeBP9asw6GiYHXqtg70T3BlbkFJJuLwJFco90bOpU0Ntest" # pragma: allowlist secret
- ) # pragma: allowlist secret
+ assert api_key.api_key == openai_api_key # pragma: allowlist secret
def test_constructor_failure(self) -> None:
with pytest.raises(ValueError, match="Invalid OpenAI API Key"):
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 1
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | aiohttp==3.9.5
aiosignal==1.3.2
annotated-types==0.7.0
anthropic==0.49.0
anyio==4.9.0
argcomplete==3.6.1
async-timeout==4.0.3
asyncer==0.0.7
attrs==25.3.0
babel==2.17.0
bandit==1.7.9
beautifulsoup4==4.13.3
black==25.1.0
boltons==21.0.0
bracex==2.5.post1
cairocffi==1.7.1
CairoSVG==2.7.1
certifi==2025.1.31
cffi==1.17.1
cfgv==3.4.0
charset-normalizer==3.4.1
click==8.1.8
click-option-group==0.5.7
colorama==0.4.6
coverage==7.6.1
csscompressor==0.9.5
cssselect2==0.8.0
cyclic==1.0.0
datamodel-code-generator==0.25.6
decorator==5.2.1
defusedxml==0.7.1
Deprecated==1.2.18
detect-secrets==1.5.0
dirty-equals==0.7.1.post0
diskcache==5.6.3
distlib==0.3.9
distro==1.9.0
dnspython==2.7.0
docker==7.1.0
email_validator==2.2.0
eval_type_backport==0.2.2
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
face==24.0.0
fast-depends==2.4.12
-e git+https://github.com/airtai/fastagency.git@b55c3e2910453314579bd30c27784276b188d6f5#egg=fastagency
fastapi==0.111.1
fastapi-cli==0.0.7
fastapi-code-generator==0.5.1
faststream==0.5.37
filelock==3.18.0
FLAML==2.3.4
frozenlist==1.5.0
genson==1.3.0
ghp-import==2.1.0
gitdb==4.0.12
GitPython==3.1.44
glom==22.1.0
googleapis-common-protos==1.69.2
griffe==1.7.1
h11==0.14.0
htmlmin2==0.1.13
httpcore==1.0.7
httptools==0.6.4
httpx==0.27.0
identify==2.6.9
idna==3.10
importlib_metadata==7.1.0
importlib_resources==6.5.2
inflect==5.6.2
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
isort==5.13.2
Jinja2==3.1.6
jiter==0.9.0
jsmin==3.0.1
jsonschema==4.23.0
jsonschema-specifications==2024.10.1
Markdown==3.7
markdown-it-py==3.0.0
markdownify==0.13.1
MarkupSafe==3.0.2
mdurl==0.1.2
mdx-include==1.4.2
mergedeep==1.3.4
mike==2.1.2
mkdocs==1.6.1
mkdocs-autorefs==1.4.1
mkdocs-get-deps==0.2.0
mkdocs-git-revision-date-localized-plugin==1.2.6
mkdocs-glightbox==0.4.0
mkdocs-literate-nav==0.6.1
mkdocs-macros-plugin==1.0.5
mkdocs-material==9.5.33
mkdocs-material-extensions==1.3.1
mkdocs-minify-plugin==0.8.0
mkdocs-static-i18n==1.2.3
mkdocstrings==0.25.2
mkdocstrings-python==1.10.9
multidict==6.2.0
mypy==1.11.1
mypy-extensions==1.0.0
nats-py==2.10.0
nodeenv==1.9.1
numpy==1.26.4
openai==1.69.0
opentelemetry-api==1.25.0
opentelemetry-exporter-otlp-proto-common==1.25.0
opentelemetry-exporter-otlp-proto-http==1.25.0
opentelemetry-instrumentation==0.46b0
opentelemetry-instrumentation-requests==0.46b0
opentelemetry-proto==1.25.0
opentelemetry-sdk==1.25.0
opentelemetry-semantic-conventions==0.46b0
opentelemetry-util-http==0.46b0
packaging @ file:///croot/packaging_1734472117206/work
paginate==0.5.7
pathspec==0.12.1
pbr==6.1.1
peewee==3.17.9
pillow==10.4.0
platformdirs==4.3.7
pluggy @ file:///croot/pluggy_1733169602837/work
pre-commit==3.7.1
prisma==0.14.0
propcache==0.3.1
protobuf==4.25.6
pyarrow==19.0.1
pyautogen==0.2.35
pycparser==2.22
pydantic==2.11.1
pydantic-settings==2.4.0
pydantic_core==2.33.0
Pygments==2.19.1
pymdown-extensions==10.14.3
pyparsing==3.2.3
PySnooper==1.1.1
pytest==8.3.2
pytest-asyncio==0.24.0
pytest-mypy-plugins==3.1.2
pytest-rerunfailures==14.0
python-dateutil==2.9.0.post0
python-dotenv==1.1.0
python-multipart==0.0.20
python-weather==2.0.3
pytz==2025.2
pyupgrade==3.19.1
pyupgrade-directories==0.3.0
PyYAML==6.0.2
pyyaml_env_tag==0.1
rcslice==1.1.0
referencing==0.36.2
regex==2024.11.6
requests==2.32.3
rich==13.9.4
rich-toolkit==0.14.1
rpds-py==0.24.0
ruamel.yaml==0.17.40
ruamel.yaml.clib==0.2.12
ruff==0.6.2
semgrep==1.85.0
shellingham==1.5.4
six==1.17.0
smmap==5.0.2
sniffio==1.3.1
soupsieve==2.6
starlette==0.37.2
stevedore==5.4.1
StrEnum==0.4.15
stringcase==1.2.0
tabulate==0.9.0
termcolor==3.0.0
tiktoken==0.9.0
tinycss2==1.4.0
together==1.3.14
tokenize_rt==6.1.0
toml==0.10.2
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
tomlkit==0.13.2
tqdm==4.67.1
typed-ast==1.5.5
typer==0.12.5
types-docutils==0.21.0.20241128
types-Pygments==2.19.0.20250305
types-PyYAML==6.0.12.20250326
types-setuptools==78.1.0.20250329
types-ujson==5.10.0.20250326
typing-inspection==0.4.0
typing_extensions==4.13.0
urllib3==2.3.0
uvicorn==0.30.6
uvloop==0.21.0
verspec==0.1.0
virtualenv==20.29.3
watchdog==6.0.0
watchfiles==0.23.0
wcmatch==8.5.2
webencodings==0.5.1
websockets==15.0.1
wrapt==1.17.2
yarl==1.18.3
zipp==3.21.0
| name: fastagency
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- aiohttp==3.9.5
- aiosignal==1.3.2
- annotated-types==0.7.0
- anthropic==0.49.0
- anyio==4.9.0
- argcomplete==3.6.1
- async-timeout==4.0.3
- asyncer==0.0.7
- attrs==25.3.0
- babel==2.17.0
- bandit==1.7.9
- beautifulsoup4==4.13.3
- black==25.1.0
- boltons==21.0.0
- bracex==2.5.post1
- cairocffi==1.7.1
- cairosvg==2.7.1
- certifi==2025.1.31
- cffi==1.17.1
- cfgv==3.4.0
- charset-normalizer==3.4.1
- click==8.1.8
- click-option-group==0.5.7
- colorama==0.4.6
- coverage==7.6.1
- csscompressor==0.9.5
- cssselect2==0.8.0
- cyclic==1.0.0
- datamodel-code-generator==0.25.6
- decorator==5.2.1
- defusedxml==0.7.1
- deprecated==1.2.18
- detect-secrets==1.5.0
- dirty-equals==0.7.1.post0
- diskcache==5.6.3
- distlib==0.3.9
- distro==1.9.0
- dnspython==2.7.0
- docker==7.1.0
- email-validator==2.2.0
- eval-type-backport==0.2.2
- face==24.0.0
- fast-depends==2.4.12
- fastagency==0.0.0.dev0
- fastapi==0.111.1
- fastapi-cli==0.0.7
- fastapi-code-generator==0.5.1
- faststream==0.5.37
- filelock==3.18.0
- flaml==2.3.4
- frozenlist==1.5.0
- genson==1.3.0
- ghp-import==2.1.0
- gitdb==4.0.12
- gitpython==3.1.44
- glom==22.1.0
- googleapis-common-protos==1.69.2
- griffe==1.7.1
- h11==0.14.0
- htmlmin2==0.1.13
- httpcore==1.0.7
- httptools==0.6.4
- httpx==0.27.0
- identify==2.6.9
- idna==3.10
- importlib-metadata==7.1.0
- importlib-resources==6.5.2
- inflect==5.6.2
- isort==5.13.2
- jinja2==3.1.6
- jiter==0.9.0
- jsmin==3.0.1
- jsonschema==4.23.0
- jsonschema-specifications==2024.10.1
- markdown==3.7
- markdown-it-py==3.0.0
- markdownify==0.13.1
- markupsafe==3.0.2
- mdurl==0.1.2
- mdx-include==1.4.2
- mergedeep==1.3.4
- mike==2.1.2
- mkdocs==1.6.1
- mkdocs-autorefs==1.4.1
- mkdocs-get-deps==0.2.0
- mkdocs-git-revision-date-localized-plugin==1.2.6
- mkdocs-glightbox==0.4.0
- mkdocs-literate-nav==0.6.1
- mkdocs-macros-plugin==1.0.5
- mkdocs-material==9.5.33
- mkdocs-material-extensions==1.3.1
- mkdocs-minify-plugin==0.8.0
- mkdocs-static-i18n==1.2.3
- mkdocstrings==0.25.2
- mkdocstrings-python==1.10.9
- multidict==6.2.0
- mypy==1.11.1
- mypy-extensions==1.0.0
- nats-py==2.10.0
- nodeenv==1.9.1
- numpy==1.26.4
- openai==1.69.0
- opentelemetry-api==1.25.0
- opentelemetry-exporter-otlp-proto-common==1.25.0
- opentelemetry-exporter-otlp-proto-http==1.25.0
- opentelemetry-instrumentation==0.46b0
- opentelemetry-instrumentation-requests==0.46b0
- opentelemetry-proto==1.25.0
- opentelemetry-sdk==1.25.0
- opentelemetry-semantic-conventions==0.46b0
- opentelemetry-util-http==0.46b0
- paginate==0.5.7
- pathspec==0.12.1
- pbr==6.1.1
- peewee==3.17.9
- pillow==10.4.0
- platformdirs==4.3.7
- pre-commit==3.7.1
- prisma==0.14.0
- propcache==0.3.1
- protobuf==4.25.6
- pyarrow==19.0.1
- pyautogen==0.2.35
- pycparser==2.22
- pydantic==2.11.1
- pydantic-core==2.33.0
- pydantic-settings==2.4.0
- pygments==2.19.1
- pymdown-extensions==10.14.3
- pyparsing==3.2.3
- pysnooper==1.1.1
- pytest==8.3.2
- pytest-asyncio==0.24.0
- pytest-mypy-plugins==3.1.2
- pytest-rerunfailures==14.0
- python-dateutil==2.9.0.post0
- python-dotenv==1.1.0
- python-multipart==0.0.20
- python-weather==2.0.3
- pytz==2025.2
- pyupgrade==3.19.1
- pyupgrade-directories==0.3.0
- pyyaml==6.0.2
- pyyaml-env-tag==0.1
- rcslice==1.1.0
- referencing==0.36.2
- regex==2024.11.6
- requests==2.32.3
- rich==13.9.4
- rich-toolkit==0.14.1
- rpds-py==0.24.0
- ruamel-yaml==0.17.40
- ruamel-yaml-clib==0.2.12
- ruff==0.6.2
- semgrep==1.85.0
- shellingham==1.5.4
- six==1.17.0
- smmap==5.0.2
- sniffio==1.3.1
- soupsieve==2.6
- starlette==0.37.2
- stevedore==5.4.1
- strenum==0.4.15
- stringcase==1.2.0
- tabulate==0.9.0
- termcolor==3.0.0
- tiktoken==0.9.0
- tinycss2==1.4.0
- together==1.3.14
- tokenize-rt==6.1.0
- toml==0.10.2
- tomlkit==0.13.2
- tqdm==4.67.1
- typed-ast==1.5.5
- typer==0.12.5
- types-docutils==0.21.0.20241128
- types-pygments==2.19.0.20250305
- types-pyyaml==6.0.12.20250326
- types-setuptools==78.1.0.20250329
- types-ujson==5.10.0.20250326
- typing-extensions==4.13.0
- typing-inspection==0.4.0
- urllib3==2.3.0
- uvicorn==0.30.6
- uvloop==0.21.0
- verspec==0.1.0
- virtualenv==20.29.3
- watchdog==6.0.0
- watchfiles==0.23.0
- wcmatch==8.5.2
- webencodings==0.5.1
- websockets==15.0.1
- wrapt==1.17.2
- yarl==1.18.3
- zipp==3.21.0
prefix: /opt/conda/envs/fastagency
| [
"tests/studio/models/llms/test_openai.py::TestOpenAIAPIKey::test_constructor_success[sk-proj-SomeLengthStringWhichCanHave-and_inItAndTheLengthCanBeChangedAtAnyTime]",
"tests/studio/models/llms/test_openai.py::TestOpenAIAPIKey::test_constructor_success[sk-None-SomeLengthStringWhichCanHave-and_inItAndTheLengthCanBeChangedAtAnyTime]",
"tests/studio/models/llms/test_openai.py::TestOpenAIAPIKey::test_constructor_success[sk-svcacct-SomeLengthStringWhichCanHave-and_inItAndTheLengthCanBeChangedAtAnyTime]"
] | [
"tests/studio/models/llms/test_openai.py::TestOpenAI::test_openai_model_list",
"tests/studio/models/llms/test_openai.py::TestOpenAI::test_openai_schema"
] | [
"tests/studio/models/llms/test_openai.py::test_import",
"tests/studio/models/llms/test_openai.py::TestOpenAIAPIKey::test_constructor_success[sk-sUeBP9asw6GiYHXqtg70T3BlbkFJJuLwJFco90bOpU0Ntest]",
"tests/studio/models/llms/test_openai.py::TestOpenAIAPIKey::test_constructor_failure",
"tests/studio/models/llms/test_openai.py::TestOpenAI::test_openai_constructor[openai_oai_gpt35_ref]",
"tests/studio/models/llms/test_openai.py::TestOpenAI::test_openai_model_create_autogen[openai_oai_gpt35_ref]"
] | [] | Apache License 2.0 | 19,414 | 259 | [
"fastagency/studio/models/llms/openai.py"
] |
|
angr__claripy-450 | 6bfdb825ce184d45e21693afb219a82e98394b00 | 2024-08-27 23:46:51 | a8a629ddcfdcd074d71aa74350a654111dd77abc | diff --git a/claripy/ast/bool.py b/claripy/ast/bool.py
index 0e9ab8a0..a3ba13e6 100644
--- a/claripy/ast/bool.py
+++ b/claripy/ast/bool.py
@@ -205,7 +205,7 @@ def ite_dict(i, d, default):
# Find the median:
keys = list(d.keys())
keys.sort()
- split_val = keys[len(keys) // 2]
+ split_val = keys[(len(keys) - 1) // 2]
# split the dictionary
dictLow = {c: v for c, v in d.items() if c <= split_val}
| note to self: ite_dict is misbalanced (need: split_val = keys[(len(keys)-1) // 2])
### Description
a
### Steps to reproduce the bug
a
### Environment
a
### Additional context
a | angr/claripy | diff --git a/tests/test_bool.py b/tests/test_bool.py
new file mode 100644
index 00000000..0588d88b
--- /dev/null
+++ b/tests/test_bool.py
@@ -0,0 +1,36 @@
+# pylint: disable=missing-class-docstring,no-self-use
+from __future__ import annotations
+
+from unittest import TestCase
+
+from claripy import BVS, ite_dict
+
+
+class TestBool(TestCase):
+ def test_ite_dict_is_balanced(self):
+ case_even = ite_dict(
+ BVS("A", 8),
+ {
+ 1: 11,
+ 2: 22,
+ 3: 33,
+ 4: 44,
+ },
+ BVS("B", 8),
+ )
+
+ assert case_even.args[1].depth == case_even.args[2].depth
+
+ case_odd = ite_dict(
+ BVS("A", 8),
+ {
+ 1: 11,
+ 2: 22,
+ 3: 33,
+ 4: 44,
+ 5: 55,
+ },
+ BVS("B", 8),
+ )
+
+ assert case_odd.args[1].depth == case_odd.args[2].depth + 1
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 1
},
"num_modified_files": 1
} | 9.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[docs,testing]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.10",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==1.0.0
babel==2.17.0
beautifulsoup4==4.13.3
cachetools==5.5.2
certifi==2025.1.31
charset-normalizer==3.4.1
-e git+https://github.com/angr/claripy.git@6bfdb825ce184d45e21693afb219a82e98394b00#egg=claripy
docutils==0.21.2
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
execnet==2.1.1
furo==2024.8.6
idna==3.10
imagesize==1.4.1
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
Jinja2==3.1.6
markdown-it-py==3.0.0
MarkupSafe==3.0.2
mdit-py-plugins==0.4.2
mdurl==0.1.2
myst-parser==4.0.1
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
Pygments==2.19.1
pytest @ file:///croot/pytest_1738938843180/work
pytest-xdist==3.6.1
PyYAML==6.0.2
requests==2.32.3
snowballstemmer==2.2.0
soupsieve==2.6
Sphinx==8.1.3
sphinx-autodoc-typehints==3.0.1
sphinx-basic-ng==1.0.0b2
sphinxcontrib-applehelp==2.0.0
sphinxcontrib-devhelp==2.0.0
sphinxcontrib-htmlhelp==2.1.0
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==2.0.0
sphinxcontrib-serializinghtml==2.0.0
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
typing_extensions==4.13.0
urllib3==2.3.0
z3-solver==4.13.0.0
| name: claripy
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- bzip2=1.0.8=h5eee18b_6
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py310h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- libuuid=1.41.5=h5eee18b_0
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py310h06a4308_0
- pip=25.0=py310h06a4308_0
- pluggy=1.5.0=py310h06a4308_0
- pytest=8.3.4=py310h06a4308_0
- python=3.10.16=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py310h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py310h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py310h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==1.0.0
- babel==2.17.0
- beautifulsoup4==4.13.3
- cachetools==5.5.2
- certifi==2025.1.31
- charset-normalizer==3.4.1
- claripy==9.2.118.dev0
- docutils==0.21.2
- execnet==2.1.1
- furo==2024.8.6
- idna==3.10
- imagesize==1.4.1
- jinja2==3.1.6
- markdown-it-py==3.0.0
- markupsafe==3.0.2
- mdit-py-plugins==0.4.2
- mdurl==0.1.2
- myst-parser==4.0.1
- pygments==2.19.1
- pytest-xdist==3.6.1
- pyyaml==6.0.2
- requests==2.32.3
- snowballstemmer==2.2.0
- soupsieve==2.6
- sphinx==8.1.3
- sphinx-autodoc-typehints==3.0.1
- sphinx-basic-ng==1.0.0b2
- sphinxcontrib-applehelp==2.0.0
- sphinxcontrib-devhelp==2.0.0
- sphinxcontrib-htmlhelp==2.1.0
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==2.0.0
- sphinxcontrib-serializinghtml==2.0.0
- typing-extensions==4.13.0
- urllib3==2.3.0
- z3-solver==4.13.0.0
prefix: /opt/conda/envs/claripy
| [
"tests/test_bool.py::TestBool::test_ite_dict_is_balanced"
] | [] | [] | [] | BSD 2-Clause "Simplified" License | 19,424 | 170 | [
"claripy/ast/bool.py"
] |
|
pymc-devs__pymc-7482 | c92a9a9463f501d5e7977fd80719df6139425322 | 2024-08-28 10:26:10 | b9fbfeda3dd8fdb081d538684bd2dcc81b14fb61 | ricardoV94: I changed the label to maintenance to de-emphasize in the release notes. Feel free to revert if you disagree
codecov[bot]: ## [Codecov](https://app.codecov.io/gh/pymc-devs/pymc/pull/7482?dropdown=coverage&src=pr&el=h1&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=pymc-devs) Report
All modified and coverable lines are covered by tests :white_check_mark:
> Project coverage is 92.16%. Comparing base [(`c92a9a9`)](https://app.codecov.io/gh/pymc-devs/pymc/commit/c92a9a9463f501d5e7977fd80719df6139425322?dropdown=coverage&el=desc&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=pymc-devs) to head [(`5db847d`)](https://app.codecov.io/gh/pymc-devs/pymc/commit/5db847d043ab61468617e782b0b78b0e7c71c618?dropdown=coverage&el=desc&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=pymc-devs).
<details><summary>Additional details and impacted files</summary>
[](https://app.codecov.io/gh/pymc-devs/pymc/pull/7482?src=pr&el=tree&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=pymc-devs)
```diff
@@ Coverage Diff @@
## main #7482 +/- ##
=======================================
Coverage 92.16% 92.16%
=======================================
Files 103 103
Lines 17214 17214
=======================================
Hits 15866 15866
Misses 1348 1348
```
| [Files](https://app.codecov.io/gh/pymc-devs/pymc/pull/7482?dropdown=coverage&src=pr&el=tree&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=pymc-devs) | Coverage Δ | |
|---|---|---|
| [pymc/model/core.py](https://app.codecov.io/gh/pymc-devs/pymc/pull/7482?src=pr&el=tree&filepath=pymc%2Fmodel%2Fcore.py&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=pymc-devs#diff-cHltYy9tb2RlbC9jb3JlLnB5) | `91.77% <100.00%> (ø)` | |
</details> | diff --git a/pymc/model/core.py b/pymc/model/core.py
index 3a2741766..71dcfee48 100644
--- a/pymc/model/core.py
+++ b/pymc/model/core.py
@@ -1652,6 +1652,8 @@ class Model(WithMemoization, metaclass=ContextMeta):
point_fn : bool
Whether to wrap the compiled function in a PointFunc, which takes a Point
dictionary with model variable names and values as input.
+ Other keyword arguments :
+ Any other keyword argument is sent to :py:func:`pymc.pytensorf.compile_pymc`.
Returns
-------
@@ -1747,7 +1749,7 @@ class Model(WithMemoization, metaclass=ContextMeta):
)
return {name: tuple(shape) for name, shape in zip(names, f())}
- def check_start_vals(self, start):
+ def check_start_vals(self, start, **kwargs):
r"""Check that the starting values for MCMC do not cause the relevant log probability
to evaluate to something invalid (e.g. Inf or NaN)
@@ -1758,6 +1760,8 @@ class Model(WithMemoization, metaclass=ContextMeta):
Defaults to ``trace.point(-1))`` if there is a trace provided and
``model.initial_point`` if not (defaults to empty dict). Initialization
methods for NUTS (see ``init`` keyword) can overwrite the default.
+ Other keyword arguments :
+ Any other keyword argument is sent to :py:meth:`~pymc.model.core.Model.point_logps`.
Raises
------
@@ -1787,7 +1791,7 @@ class Model(WithMemoization, metaclass=ContextMeta):
f"Valid keys are: {valid_keys}, but {extra_keys} was supplied"
)
- initial_eval = self.point_logps(point=elem)
+ initial_eval = self.point_logps(point=elem, **kwargs)
if not all(np.isfinite(v) for v in initial_eval.values()):
raise SamplingError(
@@ -1797,7 +1801,7 @@ class Model(WithMemoization, metaclass=ContextMeta):
"You can call `model.debug()` for more details."
)
- def point_logps(self, point=None, round_vals=2):
+ def point_logps(self, point=None, round_vals=2, **kwargs):
"""Computes the log probability of `point` for all random variables in the model.
Parameters
@@ -1807,6 +1811,8 @@ class Model(WithMemoization, metaclass=ContextMeta):
is used.
round_vals : int, default 2
Number of decimals to round log-probabilities.
+ Other keyword arguments :
+ Any other keyword argument are sent provided to :py:meth:`~pymc.model.core.Model.compile_fn`
Returns
-------
@@ -1822,7 +1828,7 @@ class Model(WithMemoization, metaclass=ContextMeta):
factor.name: np.round(np.asarray(factor_logp), round_vals)
for factor, factor_logp in zip(
factors,
- self.compile_fn(factor_logps_fn)(point),
+ self.compile_fn(factor_logps_fn, **kwargs)(point),
)
}
| ENH: Add a way to specify `mode` for `check_start_vals`
### Before
_No response_
### After
```python
with pm.Model() as m:
...
m.check_start_vals(..., mode="JAX")
```
### Context for the issue:
Sometimes we deploy `pymc` in environments were we actually only care about GPU sampling via JAX, so there shouldn't be a need for a C compiler. This is all fine and good because we can `sample_prior_predictive`, `sample` and `sample_posterior_predictive` using JAX or any other mode that doesn't need C. The problem comes when `sample` calls `model.check_start_vals`, this will use the default mode and in the absence of a compiler, the python code might take very long to run. It would be great to have a way to set the compilation mode for `check_start_vals` as well. | pymc-devs/pymc | diff --git a/tests/model/test_core.py b/tests/model/test_core.py
index 669704bb5..2504f5c79 100644
--- a/tests/model/test_core.py
+++ b/tests/model/test_core.py
@@ -756,6 +756,20 @@ class TestCheckStartVals:
with pytest.raises(KeyError):
model.check_start_vals(start)
+ @pytest.mark.parametrize("mode", [None, "JAX", "NUMBA"])
+ def test_mode(self, mode):
+ with pm.Model() as model:
+ a = pm.Uniform("a", lower=0.0, upper=1.0)
+ b = pm.Uniform("b", lower=2.0, upper=3.0)
+ start = {
+ "a_interval__": model.rvs_to_transforms[a].forward(0.3, *a.owner.inputs).eval(),
+ "b_interval__": model.rvs_to_transforms[b].forward(2.1, *b.owner.inputs).eval(),
+ }
+ with patch("pymc.model.core.compile_pymc") as patched_compile_pymc:
+ model.check_start_vals(start, mode=mode)
+ patched_compile_pymc.assert_called_once()
+ assert patched_compile_pymc.call_args.kwargs["mode"] == mode
+
def test_set_initval():
# Make sure the dependencies between variables are maintained when
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 1
} | 5.16 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-xdist",
"pytest-mock",
"pytest-asyncio"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.10",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | arviz==0.21.0
cachetools==5.5.2
cloudpickle==3.1.1
cons==0.4.6
contourpy==1.3.1
coverage==7.8.0
cycler==0.12.1
etuples==0.3.9
exceptiongroup==1.2.2
execnet==2.1.1
filelock==3.18.0
fonttools==4.56.0
h5netcdf==1.6.1
h5py==3.13.0
iniconfig==2.1.0
kiwisolver==1.4.8
logical-unification==0.4.6
markdown-it-py==3.0.0
matplotlib==3.10.1
mdurl==0.1.2
miniKanren==1.0.3
multipledispatch==1.0.0
numpy==1.26.4
packaging==24.2
pandas==2.2.3
pillow==11.1.0
pluggy==1.5.0
Pygments==2.19.1
-e git+https://github.com/pymc-devs/pymc.git@c92a9a9463f501d5e7977fd80719df6139425322#egg=pymc
pyparsing==3.2.3
pytensor==2.25.5
pytest==8.3.5
pytest-asyncio==0.26.0
pytest-cov==6.0.0
pytest-mock==3.14.0
pytest-xdist==3.6.1
python-dateutil==2.9.0.post0
pytz==2025.2
rich==14.0.0
scipy==1.15.2
six==1.17.0
threadpoolctl==3.6.0
tomli==2.2.1
toolz==1.0.0
typing_extensions==4.13.0
tzdata==2025.2
xarray==2025.3.1
xarray-einstats==0.8.0
| name: pymc
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- bzip2=1.0.8=h5eee18b_6
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- libuuid=1.41.5=h5eee18b_0
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py310h06a4308_0
- python=3.10.16=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py310h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.45.1=py310h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- arviz==0.21.0
- cachetools==5.5.2
- cloudpickle==3.1.1
- cons==0.4.6
- contourpy==1.3.1
- coverage==7.8.0
- cycler==0.12.1
- etuples==0.3.9
- exceptiongroup==1.2.2
- execnet==2.1.1
- filelock==3.18.0
- fonttools==4.56.0
- h5netcdf==1.6.1
- h5py==3.13.0
- iniconfig==2.1.0
- kiwisolver==1.4.8
- logical-unification==0.4.6
- markdown-it-py==3.0.0
- matplotlib==3.10.1
- mdurl==0.1.2
- minikanren==1.0.3
- multipledispatch==1.0.0
- numpy==1.26.4
- packaging==24.2
- pandas==2.2.3
- pillow==11.1.0
- pluggy==1.5.0
- pygments==2.19.1
- pymc==5.16.2+31.gc92a9a946
- pyparsing==3.2.3
- pytensor==2.25.5
- pytest==8.3.5
- pytest-asyncio==0.26.0
- pytest-cov==6.0.0
- pytest-mock==3.14.0
- pytest-xdist==3.6.1
- python-dateutil==2.9.0.post0
- pytz==2025.2
- rich==14.0.0
- scipy==1.15.2
- six==1.17.0
- threadpoolctl==3.6.0
- tomli==2.2.1
- toolz==1.0.0
- typing-extensions==4.13.0
- tzdata==2025.2
- xarray==2025.3.1
- xarray-einstats==0.8.0
prefix: /opt/conda/envs/pymc
| [
"tests/model/test_core.py::TestCheckStartVals::test_mode[None]",
"tests/model/test_core.py::TestCheckStartVals::test_mode[JAX]",
"tests/model/test_core.py::TestCheckStartVals::test_mode[NUMBA]"
] | [] | [
"tests/model/test_core.py::TestBaseModel::test_setattr_properly_works",
"tests/model/test_core.py::TestBaseModel::test_context_passes_vars_to_parent_model",
"tests/model/test_core.py::TestBaseModel::test_docstring_example",
"tests/model/test_core.py::TestNested::test_nest_context_works",
"tests/model/test_core.py::TestNested::test_named_context",
"tests/model/test_core.py::TestNested::test_docstring_example1",
"tests/model/test_core.py::TestNested::test_docstring_example2",
"tests/model/test_core.py::TestNested::test_duplicates_detection",
"tests/model/test_core.py::TestNested::test_model_root",
"tests/model/test_core.py::TestNested::test_prefix_add_uses_separator",
"tests/model/test_core.py::TestNested::test_nested_named_model_repeated",
"tests/model/test_core.py::TestNested::test_nested_named_model",
"tests/model/test_core.py::TestNested::test_nested_model_to_netcdf",
"tests/model/test_core.py::TestNested::test_bad_name",
"tests/model/test_core.py::TestObserved::test_observed_rv_fail",
"tests/model/test_core.py::TestObserved::test_observed_type",
"tests/model/test_core.py::TestObserved::test_observed_compute_test_value",
"tests/model/test_core.py::test_duplicate_vars",
"tests/model/test_core.py::test_empty_observed",
"tests/model/test_core.py::TestValueGradFunction::test_extra_not_set",
"tests/model/test_core.py::TestValueGradFunction::test_grad",
"tests/model/test_core.py::TestValueGradFunction::test_invalid_type",
"tests/model/test_core.py::TestValueGradFunction::test_missing_data",
"tests/model/test_core.py::TestValueGradFunction::test_no_extra",
"tests/model/test_core.py::TestValueGradFunction::test_pytensor_switch_broadcast_edge_cases_1",
"tests/model/test_core.py::TestValueGradFunction::test_pytensor_switch_broadcast_edge_cases_2",
"tests/model/test_core.py::test_multiple_observed_rv",
"tests/model/test_core.py::test_tempered_logp_dlogp",
"tests/model/test_core.py::TestPickling::test_model_pickle",
"tests/model/test_core.py::TestPickling::test_model_pickle_deterministic",
"tests/model/test_core.py::TestPickling::test_model_roundtrip",
"tests/model/test_core.py::test_model_value_vars",
"tests/model/test_core.py::test_model_var_maps",
"tests/model/test_core.py::TestTransformArgs::test_transform_warning",
"tests/model/test_core.py::TestTransformArgs::test_transform_order",
"tests/model/test_core.py::TestTransformArgs::test_default_transform_is_applied",
"tests/model/test_core.py::test_make_obs_var",
"tests/model/test_core.py::test_initial_point",
"tests/model/test_core.py::test_point_logps",
"tests/model/test_core.py::test_point_logps_potential",
"tests/model/test_core.py::TestShapeEvaluation::test_eval_rv_shapes",
"tests/model/test_core.py::TestCheckStartVals::test_valid_start_point",
"tests/model/test_core.py::TestCheckStartVals::test_invalid_start_point",
"tests/model/test_core.py::TestCheckStartVals::test_invalid_variable_name",
"tests/model/test_core.py::test_set_initval",
"tests/model/test_core.py::test_datalogp_multiple_shapes",
"tests/model/test_core.py::test_nested_model_coords",
"tests/model/test_core.py::TestSetUpdateCoords::test_shapeerror_from_set_data_dimensionality",
"tests/model/test_core.py::TestSetUpdateCoords::test_resize_from_set_data_dim_with_coords",
"tests/model/test_core.py::TestSetUpdateCoords::test_resize_from_set_data_dim_without_coords",
"tests/model/test_core.py::TestSetUpdateCoords::test_resize_from_set_dim",
"tests/model/test_core.py::TestSetUpdateCoords::test_resize_from_set_data_and_set_dim",
"tests/model/test_core.py::TestSetUpdateCoords::test_add_named_variable_checks_dim_name",
"tests/model/test_core.py::TestSetUpdateCoords::test_add_named_variable_checks_number_of_dims",
"tests/model/test_core.py::TestSetUpdateCoords::test_rv_dims_type_check",
"tests/model/test_core.py::TestSetUpdateCoords::test_none_coords_autonumbering",
"tests/model/test_core.py::TestSetUpdateCoords::test_set_data_indirect_resize_without_coords",
"tests/model/test_core.py::TestSetUpdateCoords::test_set_data_indirect_resize_with_coords",
"tests/model/test_core.py::TestSetUpdateCoords::test_set_data_warns_on_resize_of_dims_defined_by_other_data",
"tests/model/test_core.py::TestSetUpdateCoords::test_set_data_constant_shape_error",
"tests/model/test_core.py::test_model_logp[True]",
"tests/model/test_core.py::test_model_logp[False]",
"tests/model/test_core.py::test_model_dlogp[True]",
"tests/model/test_core.py::test_model_dlogp[False]",
"tests/model/test_core.py::test_model_d2logp[True]",
"tests/model/test_core.py::test_model_d2logp[False]",
"tests/model/test_core.py::test_deterministic",
"tests/model/test_core.py::test_determinsitic_with_dims",
"tests/model/test_core.py::test_potential_with_dims",
"tests/model/test_core.py::test_empty_model_representation",
"tests/model/test_core.py::test_compile_fn",
"tests/model/test_core.py::test_model_parent_set_programmatically",
"tests/model/test_core.py::TestModelContext::test_thread_safety",
"tests/model/test_core.py::test_mixed_contexts",
"tests/model/test_core.py::TestProfile::test_profile_model",
"tests/model/test_core.py::TestProfile::test_profile_variable",
"tests/model/test_core.py::TestProfile::test_profile_count",
"tests/model/test_core.py::TestImputationMissingData::test_missing_basic[masked]",
"tests/model/test_core.py::TestImputationMissingData::test_missing_basic[pandas]",
"tests/model/test_core.py::TestImputationMissingData::test_missing_with_predictors",
"tests/model/test_core.py::TestImputationMissingData::test_missing_dual_observations",
"tests/model/test_core.py::TestImputationMissingData::test_interval_missing_observations",
"tests/model/test_core.py::TestImputationMissingData::test_missing_logp1",
"tests/model/test_core.py::TestImputationMissingData::test_missing_logp2",
"tests/model/test_core.py::TestImputationMissingData::test_missing_multivariate_separable",
"tests/model/test_core.py::TestImputationMissingData::test_missing_multivariate_unseparable",
"tests/model/test_core.py::TestImputationMissingData::test_missing_vector_parameter",
"tests/model/test_core.py::TestImputationMissingData::test_missing_symmetric",
"tests/model/test_core.py::TestImputationMissingData::test_dims",
"tests/model/test_core.py::TestImputationMissingData::test_symbolic_random_variable",
"tests/model/test_core.py::TestImputationMissingData::test_truncated_normal",
"tests/model/test_core.py::TestImputationMissingData::test_coordinates",
"tests/model/test_core.py::TestShared::test_deterministic",
"tests/model/test_core.py::test_tag_future_warning_model",
"tests/model/test_core.py::TestModelDebug::test_no_problems[logp]",
"tests/model/test_core.py::TestModelDebug::test_no_problems[dlogp]",
"tests/model/test_core.py::TestModelDebug::test_no_problems[random]",
"tests/model/test_core.py::TestModelDebug::test_invalid_parameter[logp]",
"tests/model/test_core.py::TestModelDebug::test_invalid_parameter[dlogp]",
"tests/model/test_core.py::TestModelDebug::test_invalid_parameter[random]",
"tests/model/test_core.py::TestModelDebug::test_invalid_parameter_cant_be_evaluated[logp-True]",
"tests/model/test_core.py::TestModelDebug::test_invalid_parameter_cant_be_evaluated[logp-False]",
"tests/model/test_core.py::TestModelDebug::test_invalid_parameter_cant_be_evaluated[dlogp-True]",
"tests/model/test_core.py::TestModelDebug::test_invalid_parameter_cant_be_evaluated[dlogp-False]",
"tests/model/test_core.py::TestModelDebug::test_invalid_parameter_cant_be_evaluated[random-True]",
"tests/model/test_core.py::TestModelDebug::test_invalid_parameter_cant_be_evaluated[random-False]",
"tests/model/test_core.py::TestModelDebug::test_invalid_value",
"tests/model/test_core.py::TestModelDebug::test_invalid_observed_value",
"tests/model/test_core.py::test_model_logp_fast_compile",
"tests/model/test_core.py::TestModelGraphs::test_graphviz_call_function[ext-all]",
"tests/model/test_core.py::TestModelGraphs::test_graphviz_call_function[ext-subset]",
"tests/model/test_core.py::TestModelGraphs::test_graphviz_call_function[no_ext-all]",
"tests/model/test_core.py::TestModelGraphs::test_graphviz_call_function[no_ext-subset]"
] | [] | Apache License 2.0 | 19,428 | 773 | [
"pymc/model/core.py"
] |
ga4gh__vrs-python-443 | 10157c3fbd242c0217317cb749587a52f1588332 | 2024-08-28 11:25:36 | 71eba03240b69df07b68ce388af95ac4a04e273c | korikuzma: @ahwagner can you re-review? | diff --git a/src/ga4gh/vrs/models.py b/src/ga4gh/vrs/models.py
index bfe6279..70a85c8 100644
--- a/src/ga4gh/vrs/models.py
+++ b/src/ga4gh/vrs/models.py
@@ -27,7 +27,7 @@ from ga4gh.core import (
from ga4gh.core.pydantic import get_pydantic_root
from canonicaljson import encode_canonical_json
-from pydantic import BaseModel, Field, RootModel, StringConstraints, ConfigDict
+from pydantic import BaseModel, Field, RootModel, StringConstraints, ConfigDict, ValidationInfo, field_validator
from ga4gh.core.pydantic import (
getattr_in
@@ -331,6 +331,25 @@ class Range(RootModel):
min_length=2,
)
+ @field_validator("root", mode="after")
+ def validate_range(cls, v: List[Optional[int]]) -> List[Optional[int]]:
+ """Validate range values
+
+ :param v: Root value
+ :raises ValueError: If ``root`` does not include at least one integer or if
+ the first element in ``root`` is greater than the second element in ``root``
+ :return: Inclusive range
+ """
+ if v.count(None) == 2:
+ err_msg = "Must provide at least one integer."
+ raise ValueError(err_msg)
+
+ if v[0] is not None and v[1] is not None:
+ if v[0] > v[1]:
+ err_msg = "The first integer must be less than or equal to the second integer."
+ raise ValueError(err_msg)
+
+ return v
class Residue(RootModel):
"""A character representing a specific residue (i.e., molecular species) or
@@ -454,15 +473,35 @@ class SequenceLocation(_Ga4ghIdentifiableObject):
)
start: Optional[Union[Range, int]] = Field(
None,
- description='The start coordinate or range of the SequenceLocation. The minimum value of this coordinate or range is 0. MUST represent a coordinate or range less than the value of `end`.',
+ description='The start coordinate or range of the SequenceLocation. The minimum value of this coordinate or range is 0.',
)
end: Optional[Union[Range, int]] = Field(
None,
- description='The end coordinate or range of the SequenceLocation. The minimum value of this coordinate or range is 0. MUST represent a coordinate or range greater than the value of `start`.',
+ description='The end coordinate or range of the SequenceLocation. The minimum value of this coordinate or range is 0.',
)
sequence: Optional[SequenceString] = Field(None, description="The literal sequence encoded by the `sequenceReference` at these coordinates.")
+ @field_validator("start", "end", mode="after")
+ def validate_start_end(cls, v: Optional[Union[Range, int]], info: ValidationInfo) -> Optional[Union[Range, int]]:
+ """Validate ``start`` and ``end`` fields
+
+ :param v: ``start`` or ``end`` value
+ :param info: Validation info
+ :raises ValueError: If ``start`` or ``end`` has a value less than 0
+ :return: Sequence Location
+ """
+ if v is not None:
+ if isinstance(v, int):
+ int_values = [v]
+ else:
+ int_values = [val for val in v.root if val is not None]
+
+ if any(int_val < 0 for int_val in int_values):
+ err_msg = f"The minimum value of `{info.field_name}` is 0."
+ raise ValueError(err_msg)
+ return v
+
def ga4gh_serialize_as_version(self, as_version: PrevVrsVersion):
"""This method will return a serialized string following the conventions for
SequenceLocation serialization as defined in the VRS version specified by
| SequenceLocation allows for negative start/end coordinates
https://github.com/ga4gh/vrs/blob/2.x/schema/vrs/json/SequenceLocation#L77
https://github.com/ga4gh/vrs/blob/2.x/schema/vrs/json/SequenceLocation#L88
We do not do any validation at the moment. We should add validation checks to follow what is stated in the descriptions. | ga4gh/vrs-python | diff --git a/tests/test_vrs.py b/tests/test_vrs.py
index fc3cc90..0ee47f1 100644
--- a/tests/test_vrs.py
+++ b/tests/test_vrs.py
@@ -96,6 +96,22 @@ cpb_431012_dict = {
cpb_431012 = models.CisPhasedBlock(**cpb_431012_dict)
[email protected](
+ "vrs_model, expected_err_msg",
+ [
+ (lambda: models.Range(root=[None, None]), "Must provide at least one integer."),
+ (lambda: models.Range(root=[2, 1]), "The first integer must be less than or equal to the second integer."),
+ (lambda: models.SequenceLocation(sequenceReference=allele_280320.location.sequenceReference, start=-1), "The minimum value of `start` is 0."),
+ (lambda: models.SequenceLocation(sequenceReference=allele_280320.location.sequenceReference, end=[-1, 0]), "The minimum value of `end` is 0."),
+ ]
+)
+def test_model_validation_errors(vrs_model, expected_err_msg):
+ """Test that invalid VRS models raise errors"""
+ with pytest.raises(ValueError) as e:
+ vrs_model()
+ assert str(e.value.errors()[0]["ctx"]["error"]) == expected_err_msg
+
+
def test_vr():
assert a.model_dump(exclude_none=True) == allele_dict
assert is_pydantic_instance(a)
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 1
} | 2.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev,extras]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc libpq-dev python3-dev"
],
"python": "3.10",
"reqs_path": [
"pyproject.toml"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==1.0.0
annotated-types==0.7.0
astroid==3.3.9
asttokens==3.0.0
attrs==25.3.0
babel==2.17.0
biocommons.seqrepo==0.6.11
bioutils==0.6.1
bleach==6.2.0
canonicaljson==2.0.0
certifi==2025.1.31
charset-normalizer==3.4.1
click==8.1.8
coloredlogs==15.0.1
configparser==7.2.0
coverage==7.8.0
decorator==5.2.1
dill==0.3.9
docutils==0.21.2
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
executing==2.2.0
ga4gh.vrs==2.0.0a11.dev10+g10157c3
hgvs==1.5.4
humanfriendly==10.0
idna==3.10
imagesize==1.4.1
importlib_metadata==8.6.1
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
ipython==8.34.0
isort==6.0.1
jedi==0.19.2
Jinja2==3.1.6
MarkupSafe==3.0.2
matplotlib-inline==0.1.7
mccabe==0.7.0
multidict==6.2.0
packaging @ file:///croot/packaging_1734472117206/work
Parsley==1.3
parso==0.8.4
pexpect==4.9.0
platformdirs==4.3.7
pluggy @ file:///croot/pluggy_1733169602837/work
prompt_toolkit==3.0.50
propcache==0.3.1
psycopg2==2.9.10
psycopg2-binary==2.9.10
ptyprocess==0.7.0
pure_eval==0.2.3
pydantic==2.11.1
pydantic_core==2.33.0
Pygments==2.19.1
pylint==3.3.6
pysam==0.23.0
pytest @ file:///croot/pytest_1738938843180/work
pytest-cov==6.0.0
pytest-vcr==1.0.2
PyYAML==6.0.2
readme-renderer==36.0
requests==2.32.3
restview==3.0.2
six==1.17.0
snowballstemmer==2.2.0
Sphinx==8.1.3
sphinx-rtd-theme==3.0.2
sphinxcontrib-applehelp==2.0.0
sphinxcontrib-devhelp==2.0.0
sphinxcontrib-htmlhelp==2.1.0
sphinxcontrib-jquery==4.1
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==2.0.0
sphinxcontrib-serializinghtml==2.0.0
sqlparse==0.5.3
stack-data==0.6.3
tabulate==0.9.0
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
tomlkit==0.13.2
tqdm==4.67.1
traitlets==5.14.3
typing-inspection==0.4.0
typing_extensions==4.13.0
urllib3==2.3.0
vcrpy==7.0.0
wcwidth==0.2.13
webencodings==0.5.1
wrapt==1.17.2
yapf==0.43.0
yarl==1.18.3
yoyo-migrations==9.0.0
zipp==3.21.0
| name: vrs-python
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- bzip2=1.0.8=h5eee18b_6
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py310h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- libuuid=1.41.5=h5eee18b_0
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py310h06a4308_0
- pip=25.0=py310h06a4308_0
- pluggy=1.5.0=py310h06a4308_0
- pytest=8.3.4=py310h06a4308_0
- python=3.10.16=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py310h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py310h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py310h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==1.0.0
- annotated-types==0.7.0
- astroid==3.3.9
- asttokens==3.0.0
- attrs==25.3.0
- babel==2.17.0
- biocommons-seqrepo==0.6.11
- bioutils==0.6.1
- bleach==6.2.0
- canonicaljson==2.0.0
- certifi==2025.1.31
- charset-normalizer==3.4.1
- click==8.1.8
- coloredlogs==15.0.1
- configparser==7.2.0
- coverage==7.8.0
- decorator==5.2.1
- dill==0.3.9
- docutils==0.21.2
- executing==2.2.0
- ga4gh-vrs==2.0.0a11.dev10+g10157c3
- hgvs==1.5.4
- humanfriendly==10.0
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==8.6.1
- ipython==8.34.0
- isort==6.0.1
- jedi==0.19.2
- jinja2==3.1.6
- markupsafe==3.0.2
- matplotlib-inline==0.1.7
- mccabe==0.7.0
- multidict==6.2.0
- parsley==1.3
- parso==0.8.4
- pexpect==4.9.0
- platformdirs==4.3.7
- prompt-toolkit==3.0.50
- propcache==0.3.1
- psycopg2==2.9.10
- psycopg2-binary==2.9.10
- ptyprocess==0.7.0
- pure-eval==0.2.3
- pydantic==2.11.1
- pydantic-core==2.33.0
- pygments==2.19.1
- pylint==3.3.6
- pysam==0.23.0
- pytest-cov==6.0.0
- pytest-vcr==1.0.2
- pyyaml==6.0.2
- readme-renderer==36.0
- requests==2.32.3
- restview==3.0.2
- six==1.17.0
- snowballstemmer==2.2.0
- sphinx==8.1.3
- sphinx-rtd-theme==3.0.2
- sphinxcontrib-applehelp==2.0.0
- sphinxcontrib-devhelp==2.0.0
- sphinxcontrib-htmlhelp==2.1.0
- sphinxcontrib-jquery==4.1
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==2.0.0
- sphinxcontrib-serializinghtml==2.0.0
- sqlparse==0.5.3
- stack-data==0.6.3
- tabulate==0.9.0
- tomlkit==0.13.2
- tqdm==4.67.1
- traitlets==5.14.3
- typing-extensions==4.13.0
- typing-inspection==0.4.0
- urllib3==2.3.0
- vcrpy==7.0.0
- wcwidth==0.2.13
- webencodings==0.5.1
- wrapt==1.17.2
- yapf==0.43.0
- yarl==1.18.3
- yoyo-migrations==9.0.0
- zipp==3.21.0
prefix: /opt/conda/envs/vrs-python
| [
"tests/test_vrs.py::test_model_validation_errors[<lambda>-Must",
"tests/test_vrs.py::test_model_validation_errors[<lambda>-The"
] | [] | [
"tests/test_vrs.py::test_vr",
"tests/test_vrs.py::test_cpb",
"tests/test_vrs.py::test_ga4gh_iri",
"tests/test_vrs.py::test_enref",
"tests/test_vrs.py::test_enref2",
"tests/test_vrs.py::test_class_refatt_map",
"tests/test_vrs.py::test_compute_identifiers_when"
] | [] | Apache License 2.0 | 19,429 | 911 | [
"src/ga4gh/vrs/models.py"
] |
GenericMappingTools__pygmt-3418 | 2157333f4ed9341c592bb6d22d9e710f3216db0c | 2024-08-28 11:35:35 | 2157333f4ed9341c592bb6d22d9e710f3216db0c | seisman: I'm self-proving this PR so that we can continue other PRs. | diff --git a/pygmt/figure.py b/pygmt/figure.py
index 0509e765..05cba50e 100644
--- a/pygmt/figure.py
+++ b/pygmt/figure.py
@@ -6,6 +6,7 @@ import base64
import os
from pathlib import Path, PurePath
from tempfile import TemporaryDirectory
+from typing import Literal
try:
import IPython
@@ -26,27 +27,49 @@ from pygmt.helpers import (
use_alias,
)
+
+def _get_default_display_method() -> Literal["external", "notebook", "none"]:
+ """
+ Get the default method to display preview images.
+
+ The function checks the current environment and determines the most suitable method
+ to display preview images when calling :meth:`pygmt.Figure.show`. Valid display
+ methods are:
+
+ - ``"external"``: External PDF preview using the default PDF viewer
+ - ``"notebook"``: Inline PNG preview in the current notebook
+ - ``"none"``: Disable image preview
+
+ The default display method is ``"notebook"`` in the Jupyter notebook environment,
+ and ``"external"`` in other cases.
+
+ Setting environment variable **PYGMT_USE_EXTERNAL_DISPLAY** to ``"false"`` can
+ disable image preview in external viewers. It's useful when running the tests and
+ building the documentation to avoid popping up windows.
+
+ Returns
+ -------
+ method
+ The default display method.
+ """
+ # Check if an IPython kernel is running.
+ if _HAS_IPYTHON and (ipy := IPython.get_ipython()) and "IPKernelApp" in ipy.config:
+ return "notebook"
+ # Check if the environment variable PYGMT_USE_EXTERNAL_DISPLAY is set to "false".
+ if os.environ.get("PYGMT_USE_EXTERNAL_DISPLAY", "true").lower() == "false":
+ return "none"
+ # Fallback to using the external viewer.
+ return "external"
+
+
# A registry of all figures that have had "show" called in this session.
# This is needed for the sphinx-gallery scraper in pygmt/sphinx_gallery.py
SHOWED_FIGURES = []
-
-# Configurations for figure display
+# Configurations for figure display.
SHOW_CONFIG = {
- "method": "external", # Open in an external viewer by default
+ "method": _get_default_display_method(), # The image preview display method.
}
-# Show figures in Jupyter notebooks if available
-if _HAS_IPYTHON:
- get_ipython = IPython.get_ipython()
- if get_ipython and "IPKernelApp" in get_ipython.config: # Jupyter Notebook enabled
- SHOW_CONFIG["method"] = "notebook"
-
-# Set environment variable PYGMT_USE_EXTERNAL_DISPLAY to 'false' to disable
-# external display. Use it when running the tests and building the docs to
-# avoid popping up windows.
-if os.environ.get("PYGMT_USE_EXTERNAL_DISPLAY", "true").lower() == "false":
- SHOW_CONFIG["method"] = "none"
-
class Figure:
"""
| The dynamically generated images are not shown in the documentation
In https://github.com/GenericMappingTools/pygmt/pull/3379, we enabled the `myst-nb` extension to generate images dynamically from codes in Markdown files.
In the RTD preview (https://pygmt-dev--3379.org.readthedocs.build/en/3379/install.html#testing-your-install), the image was shown correctly, but for the documentation built by GitHub Actions, the image is not shown (see https://www.pygmt.org/dev/install.html#testing-your-install).
The difference is that, when building documentation, RTD calls `sphinx-build` directly, while in the "Docs" workflow, we run `make -C doc clean all`, which set `PYGMT_USE_EXTERNAL_DISPLAY="false"` before calling `sphinx-build`:
https://github.com/GenericMappingTools/pygmt/blob/0faf52c2410601f551e7e8661cafdd28889dd0c0/doc/Makefile#L36
The complicated thing is:
Gallery examples are executed by Sphinx-Gallery, which executes the Python codes using the `compile`/`exec` functions. Thus, the IPython kernel is not available and images are opened using external viewers by default. We have to disable it by setting `PYGMT_USE_EXTERNAL_DISPLAY="false"`. The PyGMTScraper class keeps track of all `Figure` instances then call `Figure.show()` and save the images using `Figure.savefig`.
Instead, code cells in MyST Markdown files are executed by the myst-nb extension. As I understand it, it has an IPython Kernel, so the default display method is `"notebook"` and can't be `"none"`.
So, the solution should be straightforward. `PYGMT_USE_EXTERNAL_DISPLAY="false"` should only have effects when the default method is `"external"`.
| GenericMappingTools/pygmt | diff --git a/pygmt/tests/test_figure.py b/pygmt/tests/test_figure.py
index 042c9876..aadaaacf 100644
--- a/pygmt/tests/test_figure.py
+++ b/pygmt/tests/test_figure.py
@@ -4,7 +4,6 @@ Test the behavior of the Figure class.
Doesn't include the plotting commands which have their own test files.
"""
-import importlib
from pathlib import Path
import numpy as np
@@ -12,9 +11,15 @@ import numpy.testing as npt
import pytest
from pygmt import Figure, set_display
from pygmt.exceptions import GMTError, GMTInvalidInput
+from pygmt.figure import _get_default_display_method
from pygmt.helpers import GMTTempFile
-HAS_IPYTHON = bool(importlib.util.find_spec("IPython"))
+try:
+ import IPython
+
+ _HAS_IPYTHON = True
+except ImportError:
+ _HAS_IPYTHON = False
def test_figure_region():
@@ -307,7 +312,7 @@ def test_figure_savefig_worldfile():
fig.savefig(fname=imgfile.name, worldfile=True)
[email protected](not HAS_IPYTHON, reason="run when IPython is installed")
[email protected](not _HAS_IPYTHON, reason="run when IPython is installed")
def test_figure_show():
"""
Test that show creates the correct file name and deletes the temp dir.
@@ -347,7 +352,7 @@ def test_figure_show_invalid_method():
fig.show(method="test")
[email protected](HAS_IPYTHON, reason="run without IPython installed")
[email protected](_HAS_IPYTHON, reason="run without IPython installed")
def test_figure_show_notebook_error_without_ipython():
"""
Test to check if an error is raised when display method is 'notebook', but IPython
@@ -390,3 +395,48 @@ def test_figure_unsupported_xshift_yshift():
fig.plot(x=1, y=1, style="c3c", yshift="3c")
with pytest.raises(GMTInvalidInput):
fig.plot(x=1, y=1, style="c3c", Y="3c")
+
+
+class TestGetDefaultDisplayMethod:
+ """
+ Test the _get_default_display_method function.
+ """
+
+ def test_default_display_method(self, monkeypatch):
+ """
+ Default display method is "external" if PYGMT_USE_EXTERNAL_DISPLAY is undefined.
+ """
+ monkeypatch.delenv("PYGMT_USE_EXTERNAL_DISPLAY", raising=False)
+ assert _get_default_display_method() == "external"
+
+ def test_disable_external_display(self, monkeypatch):
+ """
+ Setting PYGMT_USE_EXTERNAL_DISPLAY to "false" should disable external display.
+ """
+ monkeypatch.setenv("PYGMT_USE_EXTERNAL_DISPLAY", "false")
+ assert _get_default_display_method() == "none"
+
+ @pytest.mark.skipif(not _HAS_IPYTHON, reason="Run when IPython is installed")
+ def test_notebook_display(self, monkeypatch):
+ """
+ Default display method is "notebook" when an IPython kernel is running.
+ """
+
+ class MockIPython:
+ """
+ A simple mock class to simulate an IPython instance.
+ """
+
+ def __init__(self):
+ self.config = {"IPKernelApp": True}
+
+ # Mock IPython.get_ipython() to return a MockIPython instance.
+ mock_ipython = MockIPython()
+ monkeypatch.setattr(IPython, "get_ipython", lambda: mock_ipython)
+
+ # Default display method should be "notebook" when an IPython kernel is running.
+ assert _get_default_display_method() == "notebook"
+
+ # PYGMT_USE_EXTERNAL_DISPLAY should not affect notebook display.
+ monkeypatch.setenv("PYGMT_USE_EXTERNAL_DISPLAY", "false")
+ assert _get_default_display_method() == "notebook"
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 1
} | 0.12 | {
"env_vars": null,
"env_yml_path": [
"environment.yml"
],
"install": "pip install -e .[all]",
"log_parser": "parse_log_pytest",
"no_use_env": true,
"packages": "environment.yml",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.12",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | affine @ file:///home/conda/feedstock_root/build_artifacts/affine_1733762038348/work
aiohappyeyeballs @ file:///home/conda/feedstock_root/build_artifacts/aiohappyeyeballs_1741775197943/work
aiohttp @ file:///home/conda/feedstock_root/build_artifacts/aiohttp_1742268596946/work
aiohttp-retry @ file:///home/conda/feedstock_root/build_artifacts/aiohttp-retry_1743371080905/work
aiosignal @ file:///home/conda/feedstock_root/build_artifacts/aiosignal_1734342155601/work
alabaster @ file:///home/conda/feedstock_root/build_artifacts/alabaster_1733750398730/work
amqp @ file:///home/conda/feedstock_root/build_artifacts/amqp_1733906301603/work
annotated-types @ file:///home/conda/feedstock_root/build_artifacts/annotated-types_1733247046149/work
antlr4-python3-runtime @ file:///home/conda/feedstock_root/build_artifacts/antlr-python-runtime-meta_1638309185939/work
anyio @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_anyio_1742243108/work
appdirs @ file:///home/conda/feedstock_root/build_artifacts/appdirs_1733753955715/work
argon2-cffi @ file:///home/conda/feedstock_root/build_artifacts/argon2-cffi_1733311059102/work
argon2-cffi-bindings @ file:///home/conda/feedstock_root/build_artifacts/argon2-cffi-bindings_1725356585055/work
arrow @ file:///home/conda/feedstock_root/build_artifacts/arrow_1733584251875/work
asttokens @ file:///home/conda/feedstock_root/build_artifacts/asttokens_1733250440834/work
async-lru @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_async-lru_1742153708/work
asyncssh @ file:///home/conda/feedstock_root/build_artifacts/asyncssh_1739897873808/work
atpublic @ file:///home/conda/feedstock_root/build_artifacts/atpublic_1737771474411/work
attrs @ file:///home/conda/feedstock_root/build_artifacts/attrs_1741918516150/work
babel @ file:///home/conda/feedstock_root/build_artifacts/babel_1738490167835/work
beautifulsoup4 @ file:///home/conda/feedstock_root/build_artifacts/beautifulsoup4_1738740337718/work
billiard @ file:///home/conda/feedstock_root/build_artifacts/billiard_1726941162024/work
bleach @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_bleach_1737382993/work
bokeh @ file:///home/conda/feedstock_root/build_artifacts/bokeh_1741848529421/work
boto3 @ file:///home/conda/feedstock_root/build_artifacts/boto3_1743235439640/work
botocore @ file:///home/conda/feedstock_root/build_artifacts/botocore_1743212920666/work
branca @ file:///home/conda/feedstock_root/build_artifacts/branca_1734433375112/work
Brotli @ file:///home/conda/feedstock_root/build_artifacts/brotli-split_1725267488082/work
build @ file:///home/conda/feedstock_root/build_artifacts/python-build_1733230610871/work
cached-property @ file:///home/conda/feedstock_root/build_artifacts/cached_property_1615209429212/work
celery @ file:///home/conda/feedstock_root/build_artifacts/celery_1734708894737/work
certifi @ file:///home/conda/feedstock_root/build_artifacts/certifi_1739515848642/work/certifi
cffi @ file:///home/conda/feedstock_root/build_artifacts/cffi_1725560558132/work
cfgv @ file:///home/conda/feedstock_root/build_artifacts/cfgv_1734267080977/work
cftime @ file:///home/conda/feedstock_root/build_artifacts/cftime_1725400475196/work
charset-normalizer @ file:///home/conda/feedstock_root/build_artifacts/charset-normalizer_1735929714516/work
click @ file:///home/conda/feedstock_root/build_artifacts/click_1734858813237/work
click-didyoumean @ file:///home/conda/feedstock_root/build_artifacts/click-didyoumean_1734293070305/work
click-plugins @ file:///home/conda/feedstock_root/build_artifacts/click-plugins_1733731077999/work
click-repl @ file:///home/conda/feedstock_root/build_artifacts/click-repl_1694959444233/work
cligj @ file:///home/conda/feedstock_root/build_artifacts/cligj_1733749956636/work
codespell @ file:///home/conda/feedstock_root/build_artifacts/codespell_1738095243753/work
colorama @ file:///home/conda/feedstock_root/build_artifacts/colorama_1733218098505/work
comm @ file:///home/conda/feedstock_root/build_artifacts/comm_1733502965406/work
configobj @ file:///home/conda/feedstock_root/build_artifacts/configobj_1734075408845/work
contextily @ file:///home/conda/feedstock_root/build_artifacts/contextily_1734596167382/work
contourpy @ file:///home/conda/feedstock_root/build_artifacts/contourpy_1731428301407/work
coverage @ file:///home/conda/feedstock_root/build_artifacts/coverage_1743381243841/work
cryptography @ file:///home/conda/feedstock_root/build_artifacts/cryptography-split_1740893570960/work
cycler @ file:///home/conda/feedstock_root/build_artifacts/cycler_1733332471406/work
debugpy @ file:///home/conda/feedstock_root/build_artifacts/debugpy_1741148399929/work
decorator @ file:///home/conda/feedstock_root/build_artifacts/decorator_1740384970518/work
defusedxml @ file:///home/conda/feedstock_root/build_artifacts/defusedxml_1615232257335/work
dictdiffer @ file:///home/conda/feedstock_root/build_artifacts/dictdiffer_1734344338200/work
diskcache @ file:///home/conda/feedstock_root/build_artifacts/diskcache_1734196270869/work
distlib @ file:///home/conda/feedstock_root/build_artifacts/distlib_1733238395481/work
distro @ file:///home/conda/feedstock_root/build_artifacts/distro_1734729835256/work
docutils @ file:///home/conda/feedstock_root/build_artifacts/docutils_1733217766141/work
dpath @ file:///home/conda/feedstock_root/build_artifacts/dpath_1718243458415/work
dulwich @ file:///home/conda/feedstock_root/build_artifacts/dulwich_1740965086112/work
dvc @ file:///home/conda/feedstock_root/build_artifacts/dvc_1739627573960/work
dvc-data @ file:///home/conda/feedstock_root/build_artifacts/dvc-data_1738341724641/work
dvc-http @ file:///home/conda/feedstock_root/build_artifacts/dvc-http_1734723780438/work
dvc-objects @ file:///home/conda/feedstock_root/build_artifacts/dvc-objects_1734661413897/work
dvc-render @ file:///home/conda/feedstock_root/build_artifacts/dvc-render_1734673264834/work
dvc-studio-client @ file:///home/conda/feedstock_root/build_artifacts/dvc-studio-client_1734664628961/work
dvc-task @ file:///home/conda/feedstock_root/build_artifacts/dvc-task_1734664522689/work
entrypoints @ file:///home/conda/feedstock_root/build_artifacts/entrypoints_1733327148154/work
exceptiongroup @ file:///home/conda/feedstock_root/build_artifacts/exceptiongroup_1733208806608/work
executing @ file:///home/conda/feedstock_root/build_artifacts/executing_1733569351617/work
fastjsonschema @ file:///home/conda/feedstock_root/build_artifacts/python-fastjsonschema_1733235979760/work/dist
filelock @ file:///home/conda/feedstock_root/build_artifacts/filelock_1741969488311/work
flatten-dict @ file:///home/conda/feedstock_root/build_artifacts/flatten-dict_1629457542349/work
flufl.lock @ file:///home/conda/feedstock_root/build_artifacts/flufl.lock_1722809909247/work
folium @ file:///home/conda/feedstock_root/build_artifacts/folium_1740766619747/work
fonttools @ file:///home/conda/feedstock_root/build_artifacts/fonttools_1738940303262/work
fqdn @ file:///home/conda/feedstock_root/build_artifacts/fqdn_1733327382592/work/dist
frozenlist @ file:///home/conda/feedstock_root/build_artifacts/frozenlist_1737645236190/work
fsspec @ file:///home/conda/feedstock_root/build_artifacts/fsspec_1743361113926/work
funcy @ file:///home/conda/feedstock_root/build_artifacts/funcy_1734381131891/work
future @ file:///home/conda/feedstock_root/build_artifacts/future_1738926421307/work
geographiclib @ file:///home/conda/feedstock_root/build_artifacts/geographiclib_1734342349249/work
geopandas @ file:///home/conda/feedstock_root/build_artifacts/geopandas_1734346029138/work
geopy @ file:///home/conda/feedstock_root/build_artifacts/geopy_1734341931581/work
gitdb @ file:///home/conda/feedstock_root/build_artifacts/gitdb_1735887193964/work
GitPython @ file:///home/conda/feedstock_root/build_artifacts/gitpython_1735929639977/work
grandalf @ file:///home/conda/feedstock_root/build_artifacts/grandalf_1734664611384/work
greenlet @ file:///home/conda/feedstock_root/build_artifacts/greenlet_1734532793030/work
gssapi @ file:///home/conda/feedstock_root/build_artifacts/python-gssapi_1733827677625/work
gto @ file:///home/conda/feedstock_root/build_artifacts/gto_1734661367984/work
h11 @ file:///home/conda/feedstock_root/build_artifacts/h11_1733327467879/work
h2 @ file:///home/conda/feedstock_root/build_artifacts/h2_1738578511449/work
hpack @ file:///home/conda/feedstock_root/build_artifacts/hpack_1737618293087/work
httpcore @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_httpcore_1731707562/work
httpx @ file:///home/conda/feedstock_root/build_artifacts/httpx_1733663348460/work
hydra-core @ file:///home/conda/feedstock_root/build_artifacts/hydra-core_1736934708722/work
hyperframe @ file:///home/conda/feedstock_root/build_artifacts/hyperframe_1737618333194/work
identify @ file:///home/conda/feedstock_root/build_artifacts/identify_1741502659866/work
idna @ file:///home/conda/feedstock_root/build_artifacts/idna_1733211830134/work
imagesize @ file:///home/conda/feedstock_root/build_artifacts/imagesize_1656939531508/work
importlib_metadata @ file:///home/conda/feedstock_root/build_artifacts/importlib-metadata_1737420181517/work
importlib_resources @ file:///home/conda/feedstock_root/build_artifacts/importlib_resources_1736252299705/work
iniconfig @ file:///home/conda/feedstock_root/build_artifacts/iniconfig_1733223141826/work
ipykernel @ file:///home/conda/feedstock_root/build_artifacts/ipykernel_1719845459717/work
ipython @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_ipython_1741457126/work
ipython_pygments_lexers @ file:///home/conda/feedstock_root/build_artifacts/ipython_pygments_lexers_1737123620466/work
ipywidgets @ file:///home/conda/feedstock_root/build_artifacts/ipywidgets_1733493556527/work
isoduration @ file:///home/conda/feedstock_root/build_artifacts/isoduration_1733493628631/work/dist
iterative-telemetry @ file:///home/conda/feedstock_root/build_artifacts/iterative-telemetry_1739252628161/work
jedi @ file:///home/conda/feedstock_root/build_artifacts/jedi_1733300866624/work
Jinja2 @ file:///home/conda/feedstock_root/build_artifacts/jinja2_1741263328855/work
jmespath @ file:///home/conda/feedstock_root/build_artifacts/jmespath_1733229141657/work
joblib @ file:///home/conda/feedstock_root/build_artifacts/joblib_1733736026804/work
json5 @ file:///home/conda/feedstock_root/build_artifacts/json5_1733272076743/work
jsonpointer @ file:///home/conda/feedstock_root/build_artifacts/jsonpointer_1725302935093/work
jsonschema @ file:///home/conda/feedstock_root/build_artifacts/jsonschema_1733472696581/work
jsonschema-specifications @ file:///tmp/tmpk0f344m9/src
jupyter @ file:///home/conda/feedstock_root/build_artifacts/jupyter_1733818543322/work
jupyter-cache @ file:///home/conda/feedstock_root/build_artifacts/jupyter-cache_1731777098974/work
jupyter-console @ file:///home/conda/feedstock_root/build_artifacts/jupyter_console_1733817997778/work
jupyter-events @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_jupyter_events_1738765986/work
jupyter-lsp @ file:///home/conda/feedstock_root/build_artifacts/jupyter-lsp-meta_1733492907176/work/jupyter-lsp
jupyter_client @ file:///home/conda/feedstock_root/build_artifacts/jupyter_client_1733440914442/work
jupyter_core @ file:///home/conda/feedstock_root/build_artifacts/jupyter_core_1727163409502/work
jupyter_server @ file:///home/conda/feedstock_root/build_artifacts/jupyter_server_1734702637701/work
jupyter_server_terminals @ file:///home/conda/feedstock_root/build_artifacts/jupyter_server_terminals_1733427956852/work
jupyterlab @ file:///home/conda/feedstock_root/build_artifacts/jupyterlab_1741964057182/work
jupyterlab_pygments @ file:///home/conda/feedstock_root/build_artifacts/jupyterlab_pygments_1733328101776/work
jupyterlab_server @ file:///home/conda/feedstock_root/build_artifacts/jupyterlab_server_1733599573484/work
jupyterlab_widgets @ file:///home/conda/feedstock_root/build_artifacts/jupyterlab_widgets_1733428046021/work
kiwisolver @ file:///home/conda/feedstock_root/build_artifacts/kiwisolver_1736908181161/work
kombu @ file:///home/conda/feedstock_root/build_artifacts/kombu_1726068845201/work
linkify-it-py @ file:///home/conda/feedstock_root/build_artifacts/linkify-it-py_1733781180837/work
mapclassify @ file:///home/conda/feedstock_root/build_artifacts/mapclassify_1733731066416/work
Markdown @ file:///home/conda/feedstock_root/build_artifacts/markdown_1710435156458/work
markdown-it-py @ file:///home/conda/feedstock_root/build_artifacts/markdown-it-py_1733250460757/work
MarkupSafe @ file:///home/conda/feedstock_root/build_artifacts/markupsafe_1733219680183/work
matplotlib==3.10.1
matplotlib-inline @ file:///home/conda/feedstock_root/build_artifacts/matplotlib-inline_1733416936468/work
mdit-py-plugins @ file:///home/conda/feedstock_root/build_artifacts/mdit-py-plugins_1733854715505/work
mdurl @ file:///home/conda/feedstock_root/build_artifacts/mdurl_1733255585584/work
mercantile @ file:///home/conda/feedstock_root/build_artifacts/mercantile_1734075348980/work
mistune @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_mistune_1742402716/work
multidict @ file:///home/conda/feedstock_root/build_artifacts/multidict_1742308123521/work
munkres==1.1.4
mypy @ file:///home/conda/feedstock_root/build_artifacts/mypy-split_1738766716979/work
mypy_extensions @ file:///home/conda/feedstock_root/build_artifacts/mypy_extensions_1733230897951/work
myst-nb @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_myst-nb_1739024493/work
myst-parser @ file:///home/conda/feedstock_root/build_artifacts/myst-parser_1739381835679/work
narwhals @ file:///home/conda/feedstock_root/build_artifacts/narwhals_1742841036354/work
nbclient @ file:///home/conda/feedstock_root/build_artifacts/nbclient_1734628800805/work
nbconvert @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_nbconvert-core_1738067871/work
nbformat @ file:///home/conda/feedstock_root/build_artifacts/nbformat_1733402752141/work
nest_asyncio @ file:///home/conda/feedstock_root/build_artifacts/nest-asyncio_1733325553580/work
netCDF4 @ file:///home/conda/feedstock_root/build_artifacts/netcdf4_1733253361203/work
networkx @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_networkx_1731521053/work
nodeenv @ file:///home/conda/feedstock_root/build_artifacts/nodeenv_1734112117269/work
notebook @ file:///home/conda/feedstock_root/build_artifacts/notebook_1741968175534/work
notebook_shim @ file:///home/conda/feedstock_root/build_artifacts/notebook-shim_1733408315203/work
numpy @ file:///home/conda/feedstock_root/build_artifacts/numpy_1742254798654/work/dist/numpy-2.2.4-cp312-cp312-linux_x86_64.whl#sha256=f71c8b591ed59dbe494706c70ae6c6fda01e9748f47e72a9eeea6dfc970db7c8
omegaconf @ file:///home/conda/feedstock_root/build_artifacts/omegaconf_1670575376789/work
orjson @ file:///home/conda/feedstock_root/build_artifacts/orjson_1742909850268/work
overrides @ file:///home/conda/feedstock_root/build_artifacts/overrides_1734587627321/work
packaging @ file:///home/conda/feedstock_root/build_artifacts/packaging_1733203243479/work
pandas @ file:///home/conda/feedstock_root/build_artifacts/pandas_1726878417179/work
pandas-stubs @ file:///home/conda/feedstock_root/build_artifacts/pandas-stubs_1741560580557/work
pandocfilters @ file:///home/conda/feedstock_root/build_artifacts/pandocfilters_1631603243851/work
panel @ file:///home/conda/feedstock_root/build_artifacts/panel_1743356992402/work
param @ file:///home/conda/feedstock_root/build_artifacts/param_1734441041763/work
parso @ file:///home/conda/feedstock_root/build_artifacts/parso_1733271261340/work
pathlib2 @ file:///home/conda/feedstock_root/build_artifacts/pathlib2_1725350157957/work
pathspec @ file:///home/conda/feedstock_root/build_artifacts/pathspec_1733233363808/work
pexpect @ file:///home/conda/feedstock_root/build_artifacts/pexpect_1733301927746/work
pickleshare @ file:///home/conda/feedstock_root/build_artifacts/pickleshare_1733327343728/work
pillow @ file:///home/conda/feedstock_root/build_artifacts/pillow_1735929693903/work
pkgutil_resolve_name @ file:///home/conda/feedstock_root/build_artifacts/pkgutil-resolve-name_1733344503739/work
platformdirs @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_platformdirs_1742485085/work
pluggy @ file:///home/conda/feedstock_root/build_artifacts/pluggy_1733222765875/work
pre_commit @ file:///home/conda/feedstock_root/build_artifacts/pre-commit_1742475552107/work
prometheus_client @ file:///home/conda/feedstock_root/build_artifacts/prometheus_client_1733327310477/work
prompt_toolkit @ file:///home/conda/feedstock_root/build_artifacts/prompt-toolkit_1737453357274/work
propcache @ file:///home/conda/feedstock_root/build_artifacts/propcache_1737635528546/work
psutil @ file:///home/conda/feedstock_root/build_artifacts/psutil_1740663123172/work
ptyprocess @ file:///home/conda/feedstock_root/build_artifacts/ptyprocess_1733302279685/work/dist/ptyprocess-0.7.0-py2.py3-none-any.whl#sha256=92c32ff62b5fd8cf325bec5ab90d7be3d2a8ca8c8a3813ff487a8d2002630d1f
pure_eval @ file:///home/conda/feedstock_root/build_artifacts/pure_eval_1733569405015/work
pycparser @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_pycparser_1733195786/work
pydantic @ file:///home/conda/feedstock_root/build_artifacts/pydantic_1743418918215/work
pydantic_core @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_pydantic-core_1743201080/work
pydot @ file:///home/conda/feedstock_root/build_artifacts/pydot_1737244074531/work
pygit2 @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_pygit2_1737056797/work
Pygments @ file:///home/conda/feedstock_root/build_artifacts/pygments_1736243443484/work
-e git+https://github.com/GenericMappingTools/pygmt.git@2157333f4ed9341c592bb6d22d9e710f3216db0c#egg=pygmt
pygtrie @ file:///home/conda/feedstock_root/build_artifacts/pygtrie_1734664549100/work
pyogrio @ file:///home/conda/feedstock_root/build_artifacts/pyogrio_1732013352702/work
pyOpenSSL @ file:///home/conda/feedstock_root/build_artifacts/pyopenssl_1737243356468/work
pyparsing @ file:///home/conda/feedstock_root/build_artifacts/pyparsing_1743089729650/work
pyproj @ file:///home/conda/feedstock_root/build_artifacts/pyproj_1742323249752/work
pyproject_hooks @ file:///home/conda/feedstock_root/build_artifacts/pyproject_hooks_1733710025763/work
PySocks @ file:///home/conda/feedstock_root/build_artifacts/pysocks_1733217236728/work
pytest @ file:///home/conda/feedstock_root/build_artifacts/pytest_1740946542080/work
pytest-cov @ file:///home/conda/feedstock_root/build_artifacts/pytest-cov_1733223023082/work
pytest-doctestplus @ file:///home/conda/feedstock_root/build_artifacts/pytest-doctestplus_1737819197221/work
pytest-mpl @ file:///home/conda/feedstock_root/build_artifacts/pytest-mpl_1734116536345/work
python-dateutil @ file:///home/conda/feedstock_root/build_artifacts/python-dateutil_1733215673016/work
python-json-logger @ file:///home/conda/feedstock_root/build_artifacts/python-json-logger_1677079630776/work
pytz @ file:///home/conda/feedstock_root/build_artifacts/pytz_1706886791323/work
pyviz_comms @ file:///home/conda/feedstock_root/build_artifacts/pyviz_comms_1736890319493/work
PyYAML @ file:///home/conda/feedstock_root/build_artifacts/pyyaml_1737454647378/work
pyzmq @ file:///home/conda/feedstock_root/build_artifacts/pyzmq_1741805125248/work
rasterio @ file:///home/conda/feedstock_root/build_artifacts/rasterio_1742428582513/work
referencing @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_referencing_1737836872/work
requests @ file:///home/conda/feedstock_root/build_artifacts/requests_1733217035951/work
rfc3339_validator @ file:///home/conda/feedstock_root/build_artifacts/rfc3339-validator_1733599910982/work
rfc3986-validator @ file:///home/conda/feedstock_root/build_artifacts/rfc3986-validator_1598024191506/work
rich @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_rich_1743371105/work/dist
rioxarray @ file:///home/conda/feedstock_root/build_artifacts/rioxarray_1737141095682/work
roman-numerals-py @ file:///home/conda/feedstock_root/build_artifacts/roman-numerals-py_1740240236123/work
rpds-py @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_rpds-py_1743037659/work
ruamel.yaml @ file:///home/conda/feedstock_root/build_artifacts/ruamel.yaml_1736248036158/work
ruamel.yaml.clib @ file:///home/conda/feedstock_root/build_artifacts/ruamel.yaml.clib_1728724466132/work
ruff @ file:///home/conda/feedstock_root/build_artifacts/ruff_1742583610685/work
s3transfer @ file:///home/conda/feedstock_root/build_artifacts/s3transfer_1741171990164/work
scikit-learn @ file:///home/conda/feedstock_root/build_artifacts/scikit-learn_1736496747744/work/dist/scikit_learn-1.6.1-cp312-cp312-linux_x86_64.whl#sha256=1e9a128c5112a3d885927ddacb2e316c3c63ff28febd4c3bc3a5e6f64f2149af
scipy @ file:///home/conda/feedstock_root/build_artifacts/scipy-split_1739790643552/work/dist/scipy-1.15.2-cp312-cp312-linux_x86_64.whl#sha256=9c4c390e67a1320f6f3b418568855f0a6759c34ce8e5b9eb6cc90d02fd7ba711
scmrepo @ file:///home/conda/feedstock_root/build_artifacts/scmrepo_1738359816593/work
semver @ file:///home/conda/feedstock_root/build_artifacts/semver_1737841553927/work
Send2Trash @ file:///home/conda/feedstock_root/build_artifacts/send2trash_1733322040660/work
setuptools==75.8.0
shapely @ file:///home/conda/feedstock_root/build_artifacts/shapely_1741166953416/work
shellingham @ file:///home/conda/feedstock_root/build_artifacts/shellingham_1733300899265/work
shortuuid @ file:///home/conda/feedstock_root/build_artifacts/shortuuid_1734272317000/work
shtab @ file:///home/conda/feedstock_root/build_artifacts/shtab_1734664526617/work
six @ file:///home/conda/feedstock_root/build_artifacts/six_1733380938961/work
smmap @ file:///home/conda/feedstock_root/build_artifacts/smmap_1739781697784/work
sniffio @ file:///home/conda/feedstock_root/build_artifacts/sniffio_1733244044561/work
snowballstemmer @ file:///home/conda/feedstock_root/build_artifacts/snowballstemmer_1637143057757/work
snuggs @ file:///home/conda/feedstock_root/build_artifacts/snuggs_1733818638588/work
soupsieve @ file:///home/conda/feedstock_root/build_artifacts/soupsieve_1693929250441/work
Sphinx @ file:///home/conda/feedstock_root/build_artifacts/sphinx_1740956487887/work
sphinx-autodoc-typehints @ file:///home/conda/feedstock_root/build_artifacts/sphinx-autodoc-typehints_1740131554391/work
sphinx-copybutton @ file:///home/conda/feedstock_root/build_artifacts/sphinx-copybutton_1734572975006/work
sphinx-gallery @ file:///home/conda/feedstock_root/build_artifacts/sphinx-gallery_1739451496361/work
sphinx_design @ file:///home/conda/feedstock_root/build_artifacts/sphinx-design_1734614570224/work
sphinx_rtd_theme @ file:///home/conda/feedstock_root/build_artifacts/sphinx_rtd_theme_1730015256242/work
sphinxcontrib-applehelp @ file:///home/conda/feedstock_root/build_artifacts/sphinxcontrib-applehelp_1733754101641/work
sphinxcontrib-devhelp @ file:///home/conda/feedstock_root/build_artifacts/sphinxcontrib-devhelp_1733754113405/work
sphinxcontrib-htmlhelp @ file:///home/conda/feedstock_root/build_artifacts/sphinxcontrib-htmlhelp_1733754280555/work
sphinxcontrib-jquery @ file:///home/conda/feedstock_root/build_artifacts/sphinxcontrib-jquery_1734344508263/work
sphinxcontrib-jsmath @ file:///home/conda/feedstock_root/build_artifacts/sphinxcontrib-jsmath_1733753744933/work
sphinxcontrib-qthelp @ file:///home/conda/feedstock_root/build_artifacts/sphinxcontrib-qthelp_1733753408017/work
sphinxcontrib-serializinghtml @ file:///home/conda/feedstock_root/build_artifacts/sphinxcontrib-serializinghtml_1733750479108/work
SQLAlchemy @ file:///home/conda/feedstock_root/build_artifacts/sqlalchemy_1743109724354/work
sqltrie @ file:///home/conda/feedstock_root/build_artifacts/sqltrie_1739984874333/work
stack_data @ file:///home/conda/feedstock_root/build_artifacts/stack_data_1733569443808/work
tabulate @ file:///home/conda/feedstock_root/build_artifacts/tabulate_1733589744265/work
terminado @ file:///home/conda/feedstock_root/build_artifacts/terminado_1710262609923/work
threadpoolctl @ file:///home/conda/feedstock_root/build_artifacts/threadpoolctl_1741878222898/work
tinycss2 @ file:///home/conda/feedstock_root/build_artifacts/tinycss2_1729802851396/work
toml @ file:///home/conda/feedstock_root/build_artifacts/toml_1734091811753/work
tomli @ file:///home/conda/feedstock_root/build_artifacts/tomli_1733256695513/work
tomlkit @ file:///home/conda/feedstock_root/build_artifacts/tomlkit_1733230743009/work
tornado @ file:///home/conda/feedstock_root/build_artifacts/tornado_1732615905931/work
tqdm @ file:///home/conda/feedstock_root/build_artifacts/tqdm_1735661334605/work
traitlets @ file:///home/conda/feedstock_root/build_artifacts/traitlets_1733367359838/work
typer==0.15.2
typer-slim==0.15.2
types-python-dateutil @ file:///home/conda/feedstock_root/build_artifacts/types-python-dateutil_1733612335562/work
types-pytz @ file:///home/conda/feedstock_root/build_artifacts/types-pytz_1742275774713/work
typing-inspection @ file:///home/conda/feedstock_root/build_artifacts/typing-inspection_1741438046699/work
typing_extensions @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_typing_extensions_1743201626/work
typing_utils @ file:///home/conda/feedstock_root/build_artifacts/typing_utils_1733331286120/work
tzdata @ file:///home/conda/feedstock_root/build_artifacts/python-tzdata_1742745135198/work
uc-micro-py @ file:///home/conda/feedstock_root/build_artifacts/uc-micro-py_1733784165198/work
ukkonen @ file:///home/conda/feedstock_root/build_artifacts/ukkonen_1725784043978/work
unicodedata2 @ file:///home/conda/feedstock_root/build_artifacts/unicodedata2_1736692508163/work
uri-template @ file:///home/conda/feedstock_root/build_artifacts/uri-template_1733323593477/work/dist
urllib3 @ file:///home/conda/feedstock_root/build_artifacts/urllib3_1734859416348/work
vine @ file:///home/conda/feedstock_root/build_artifacts/vine_1733906372582/work
virtualenv @ file:///home/conda/feedstock_root/build_artifacts/virtualenv_1741337798015/work
voluptuous @ file:///home/conda/feedstock_root/build_artifacts/voluptuous_1734219447985/work
wcwidth @ file:///home/conda/feedstock_root/build_artifacts/wcwidth_1733231326287/work
webcolors @ file:///home/conda/feedstock_root/build_artifacts/webcolors_1733359735138/work
webencodings @ file:///home/conda/feedstock_root/build_artifacts/webencodings_1733236011802/work
websocket-client @ file:///home/conda/feedstock_root/build_artifacts/websocket-client_1733157342724/work
wheel==0.45.1
widgetsnbextension @ file:///home/conda/feedstock_root/build_artifacts/widgetsnbextension_1733128559935/work
xarray @ file:///home/conda/feedstock_root/build_artifacts/xarray_1742448343846/work
xyzservices @ file:///home/conda/feedstock_root/build_artifacts/xyzservices_1737234886776/work
yarl @ file:///home/conda/feedstock_root/build_artifacts/yarl_1737575777699/work
zc.lockfile @ file:///home/conda/feedstock_root/build_artifacts/zc.lockfile_1732886357525/work
zipp @ file:///home/conda/feedstock_root/build_artifacts/zipp_1732827521216/work
zstandard==0.23.0
| name: pygmt
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- adwaita-icon-theme=48.0=unix_0
- affine=2.4.0=pyhd8ed1ab_1
- aiohappyeyeballs=2.6.1=pyhd8ed1ab_0
- aiohttp=3.11.14=py312h178313f_0
- aiohttp-retry=2.9.1=pyhd8ed1ab_0
- aiosignal=1.3.2=pyhd8ed1ab_0
- alabaster=1.0.0=pyhd8ed1ab_1
- amqp=5.2.0=pyhd8ed1ab_2
- annotated-types=0.7.0=pyhd8ed1ab_1
- antlr-python-runtime=4.9.3=pyhd8ed1ab_1
- anyio=4.9.0=pyh29332c3_0
- aom=3.9.1=hac33072_0
- appdirs=1.4.4=pyhd8ed1ab_1
- argon2-cffi=23.1.0=pyhd8ed1ab_1
- argon2-cffi-bindings=21.2.0=py312h66e93f0_5
- arrow=1.3.0=pyhd8ed1ab_1
- asttokens=3.0.0=pyhd8ed1ab_1
- async-lru=2.0.5=pyh29332c3_0
- asyncssh=2.20.0=pyhd8ed1ab_0
- at-spi2-atk=2.38.0=h0630a04_3
- at-spi2-core=2.40.3=h0630a04_0
- atk-1.0=2.38.0=h04ea711_2
- atpublic=5.1=pyhd8ed1ab_0
- attrs=25.3.0=pyh71513ae_0
- babel=2.17.0=pyhd8ed1ab_0
- backports.zoneinfo=0.2.1=py312h7900ff3_9
- beautifulsoup4=4.13.3=pyha770c72_0
- billiard=4.2.1=py312h66e93f0_0
- bleach=6.2.0=pyh29332c3_4
- bleach-with-css=6.2.0=h82add2a_4
- blosc=1.21.6=he440d0b_1
- bokeh=3.7.0=pyhd8ed1ab_0
- boto3=1.37.23=pyhd8ed1ab_0
- botocore=1.37.23=pyge310_1234567_0
- branca=0.8.1=pyhd8ed1ab_0
- brotli=1.1.0=hb9d3cd8_2
- brotli-bin=1.1.0=hb9d3cd8_2
- brotli-python=1.1.0=py312h2ec8cdc_2
- bzip2=1.0.8=h5eee18b_6
- c-ares=1.34.4=hb9d3cd8_0
- ca-certificates=2025.2.25=h06a4308_0
- cached-property=1.5.2=hd8ed1ab_1
- cached_property=1.5.2=pyha770c72_1
- cairo=1.18.4=h3394656_0
- celery=5.4.0=pyhd8ed1ab_2
- certifi=2025.1.31=pyhd8ed1ab_0
- cffi=1.17.1=py312h06ac9bb_0
- cfgv=3.3.1=pyhd8ed1ab_1
- cftime=1.6.4=py312hc0a28a1_1
- charset-normalizer=3.4.1=pyhd8ed1ab_0
- click=8.1.8=pyh707e725_0
- click-didyoumean=0.3.1=pyhd8ed1ab_1
- click-plugins=1.1.1=pyhd8ed1ab_1
- click-repl=0.3.0=pyhd8ed1ab_0
- cligj=0.7.2=pyhd8ed1ab_2
- codespell=2.4.1=pyhd8ed1ab_0
- colorama=0.4.6=pyhd8ed1ab_1
- comm=0.2.2=pyhd8ed1ab_1
- configobj=5.0.9=pyhd8ed1ab_1
- contextily=1.6.2=pyhd8ed1ab_1
- contourpy=1.3.1=py312h68727a3_0
- coverage=7.8.0=py312h178313f_0
- cryptography=44.0.2=py312hda17c39_0
- curl=8.12.1=h332b0f4_0
- cycler=0.12.1=pyhd8ed1ab_1
- dav1d=1.2.1=hd590300_0
- dbus=1.13.6=h5008d03_3
- dcw-gmt=2.2.0=ha770c72_0
- debugpy=1.8.13=py312h2ec8cdc_0
- decorator=5.2.1=pyhd8ed1ab_0
- defusedxml=0.7.1=pyhd8ed1ab_0
- dictdiffer=0.9.0=pyhd8ed1ab_1
- diskcache=5.6.3=pyhd8ed1ab_1
- distlib=0.3.9=pyhd8ed1ab_1
- distro=1.9.0=pyhd8ed1ab_1
- docutils=0.21.2=pyhd8ed1ab_1
- dpath=2.2.0=pyha770c72_0
- dulwich=0.22.8=py312h12e396e_0
- dvc=3.59.1=pyhd8ed1ab_0
- dvc-data=3.16.9=pyhd8ed1ab_0
- dvc-http=2.32.0=pyhd8ed1ab_1
- dvc-objects=5.1.0=pyhd8ed1ab_2
- dvc-render=1.0.2=pyhd8ed1ab_1
- dvc-studio-client=0.21.0=pyhd8ed1ab_1
- dvc-task=0.40.2=pyhd8ed1ab_1
- entrypoints=0.4=pyhd8ed1ab_1
- epoxy=1.5.10=h166bdaf_1
- exceptiongroup=1.2.2=pyhd8ed1ab_1
- executing=2.1.0=pyhd8ed1ab_1
- expat=2.6.4=h6a678d5_0
- fftw=3.3.10=nompi_hf1063bd_110
- filelock=3.18.0=pyhd8ed1ab_0
- flatten-dict=0.4.2=pyhd8ed1ab_1
- flufl.lock=8.1.0=pyhd8ed1ab_0
- folium=0.19.5=pyhd8ed1ab_0
- font-ttf-dejavu-sans-mono=2.37=hab24e00_0
- font-ttf-inconsolata=3.000=h77eed37_0
- font-ttf-source-code-pro=2.038=h77eed37_0
- font-ttf-ubuntu=0.83=h77eed37_3
- fontconfig=2.15.0=h7e30c49_1
- fonts-conda-ecosystem=1=0
- fonts-conda-forge=1=0
- fonttools=4.56.0=py312h178313f_0
- fqdn=1.5.1=pyhd8ed1ab_1
- freetype=2.13.3=h48d6fc4_0
- freexl=2.0.0=h9dce30a_2
- fribidi=1.0.10=h36c2ea0_0
- frozenlist=1.5.0=py312h178313f_1
- fsspec=2025.3.1=pyhd8ed1ab_0
- funcy=2.0=pyhd8ed1ab_1
- future=1.0.0=pyhd8ed1ab_2
- gdk-pixbuf=2.42.12=hb9ae30d_0
- geographiclib=2.0=pyhd8ed1ab_1
- geopandas=1.0.1=pyhd8ed1ab_3
- geopandas-base=1.0.1=pyha770c72_3
- geopy=2.4.1=pyhd8ed1ab_2
- geos=3.13.1=h97f6797_0
- geotiff=1.7.4=h239500f_2
- ghostscript=10.03.1=h59595ed_0
- giflib=5.2.2=hd590300_0
- gitdb=4.0.12=pyhd8ed1ab_0
- gitpython=3.1.44=pyhff2d567_0
- glib-tools=2.84.0=h4833e2c_0
- gmt=6.5.0=h8300b2c_8
- grandalf=0.7=pyhd8ed1ab_1
- graphite2=1.3.13=h59595ed_1003
- graphviz=12.2.1=h5ae0cbf_1
- greenlet=3.1.1=py312h2ec8cdc_1
- gshhg-gmt=2.3.7=ha770c72_1003
- gtest=1.16.0=h84d6215_0
- gtk3=3.24.43=h0c6a113_5
- gto=1.7.2=pyhd8ed1ab_1
- gts=0.7.6=h977cf35_4
- h11=0.14.0=pyhd8ed1ab_1
- h2=4.2.0=pyhd8ed1ab_0
- harfbuzz=11.0.0=h76408a6_0
- hdf4=4.2.15=h2a13503_7
- hdf5=1.14.3=nompi_h2d575fe_109
- hicolor-icon-theme=0.17=ha770c72_2
- hpack=4.1.0=pyhd8ed1ab_0
- httpcore=1.0.7=pyh29332c3_1
- httpx=0.28.1=pyhd8ed1ab_0
- hydra-core=1.3.2=pyhd8ed1ab_1
- hyperframe=6.1.0=pyhd8ed1ab_0
- icu=75.1=he02047a_0
- identify=2.6.9=pyhd8ed1ab_0
- idna=3.10=pyhd8ed1ab_1
- imagesize=1.4.1=pyhd8ed1ab_0
- importlib-metadata=8.6.1=pyha770c72_0
- importlib_resources=6.5.2=pyhd8ed1ab_0
- iniconfig=2.0.0=pyhd8ed1ab_1
- ipykernel=6.29.5=pyh3099207_0
- ipython=9.0.2=pyhfb0248b_0
- ipython_pygments_lexers=1.1.1=pyhd8ed1ab_0
- ipywidgets=8.1.5=pyhd8ed1ab_1
- isoduration=20.11.0=pyhd8ed1ab_1
- iterative-telemetry=0.0.10=pyhd8ed1ab_0
- jedi=0.19.2=pyhd8ed1ab_1
- jinja2=3.1.6=pyhd8ed1ab_0
- jmespath=1.0.1=pyhd8ed1ab_1
- joblib=1.4.2=pyhd8ed1ab_1
- json-c=0.18=h6688a6e_0
- json5=0.10.0=pyhd8ed1ab_1
- jsonpointer=3.0.0=py312h7900ff3_1
- jsonschema=4.23.0=pyhd8ed1ab_1
- jsonschema-specifications=2024.10.1=pyhd8ed1ab_1
- jsonschema-with-format-nongpl=4.23.0=hd8ed1ab_1
- jupyter=1.1.1=pyhd8ed1ab_1
- jupyter-cache=1.0.1=pyhff2d567_0
- jupyter-lsp=2.2.5=pyhd8ed1ab_1
- jupyter_client=8.6.3=pyhd8ed1ab_1
- jupyter_console=6.6.3=pyhd8ed1ab_1
- jupyter_core=5.7.2=pyh31011fe_1
- jupyter_events=0.12.0=pyh29332c3_0
- jupyter_server=2.15.0=pyhd8ed1ab_0
- jupyter_server_terminals=0.5.3=pyhd8ed1ab_1
- jupyterlab=4.3.6=pyhd8ed1ab_0
- jupyterlab_pygments=0.3.0=pyhd8ed1ab_2
- jupyterlab_server=2.27.3=pyhd8ed1ab_1
- jupyterlab_widgets=3.0.13=pyhd8ed1ab_1
- keyutils=1.6.1=h166bdaf_0
- kiwisolver=1.4.8=py312h84d6215_0
- kombu=5.4.1=py312h7900ff3_0
- krb5=1.21.3=h659f571_0
- lcms2=2.17=h717163a_0
- ld_impl_linux-64=2.40=h12ee557_0
- lerc=4.0.0=h27087fc_0
- libaec=1.1.3=h59595ed_0
- libarchive=3.7.7=h4585015_3
- libavif16=1.2.1=hbb36593_2
- libblas=3.9.0=31_h59b9bed_openblas
- libbrotlicommon=1.1.0=hb9d3cd8_2
- libbrotlidec=1.1.0=hb9d3cd8_2
- libbrotlienc=1.1.0=hb9d3cd8_2
- libcblas=3.9.0=31_he106b2a_openblas
- libcups=2.3.3=h4637d8d_4
- libcurl=8.12.1=h332b0f4_0
- libde265=1.0.15=h00ab1b0_0
- libdeflate=1.23=h4ddbbb0_0
- libedit=3.1.20250104=pl5321h7949ede_0
- libev=4.33=hd590300_2
- libexpat=2.6.4=h5888daf_0
- libffi=3.4.4=h6a678d5_1
- libgcc=14.2.0=h767d61c_2
- libgcc-ng=14.2.0=h69a702a_2
- libgd=2.3.3=h6f5c62b_11
- libgdal-core=3.10.2=hae73b24_5
- libgdal-jp2openjpeg=3.10.2=ha1d2769_5
- libgfortran=14.2.0=h69a702a_2
- libgfortran-ng=14.2.0=h69a702a_2
- libgfortran5=14.2.0=hf1ad2bd_2
- libgit2=1.9.0=hd24f944_0
- libglib=2.84.0=h2ff4ddf_0
- libgomp=14.2.0=h767d61c_2
- libheif=1.19.7=gpl_hc18d805_100
- libiconv=1.18=h4ce23a2_1
- libjpeg-turbo=3.0.0=hd590300_1
- libkml=1.3.0=hf539b9f_1021
- liblapack=3.9.0=31_h7ac8fdf_openblas
- liblzma=5.6.4=hb9d3cd8_0
- libnetcdf=4.9.2=nompi_h00e09a9_116
- libnghttp2=1.64.0=h161d5f1_0
- libnsl=2.0.1=hd590300_0
- libopenblas=0.3.29=pthreads_h94d23a6_0
- libpng=1.6.47=h943b412_0
- librsvg=2.58.4=he92a37e_3
- librttopo=1.1.0=hd718a1a_18
- libsodium=1.0.20=h4ab18f5_0
- libspatialite=5.1.0=he17ca71_14
- libsqlite=3.49.1=hee588c1_2
- libssh2=1.11.1=hf672d98_0
- libstdcxx=14.2.0=h8f9b012_2
- libstdcxx-ng=14.2.0=h4852527_2
- libtiff=4.7.0=hd9ff511_3
- libuuid=2.38.1=h0b41bf4_0
- libwebp-base=1.5.0=h851e524_0
- libxcb=1.17.0=h8a09558_0
- libxcrypt=4.4.36=hd590300_1
- libxkbcommon=1.8.1=hc4a0caf_0
- libxml2=2.13.7=h8d12d68_0
- libzip=1.11.2=h6991a6a_0
- libzlib=1.3.1=hb9d3cd8_2
- linkify-it-py=2.0.3=pyhd8ed1ab_1
- lz4-c=1.10.0=h5888daf_1
- lzo=2.10=hd590300_1001
- make=4.4.1=hb9d3cd8_2
- mapclassify=2.8.1=pyhd8ed1ab_1
- markdown=3.6=pyhd8ed1ab_0
- markdown-it-py=3.0.0=pyhd8ed1ab_1
- markupsafe=3.0.2=py312h178313f_1
- matplotlib-base=3.10.1=py312hd3ec401_0
- matplotlib-inline=0.1.7=pyhd8ed1ab_1
- mdit-py-plugins=0.4.2=pyhd8ed1ab_1
- mdurl=0.1.2=pyhd8ed1ab_1
- mercantile=1.2.1=pyhd8ed1ab_1
- minizip=4.0.7=h05a5f5f_3
- mistune=3.1.3=pyh29332c3_0
- multidict=6.2.0=py312h178313f_0
- munkres=1.1.4=pyh9f0ad1d_0
- mypy=1.15.0=py312h66e93f0_0
- mypy_extensions=1.0.0=pyha770c72_1
- myst-nb=1.2.0=pyh29332c3_0
- myst-parser=4.0.1=pyhd8ed1ab_0
- narwhals=1.32.0=pyhd8ed1ab_0
- nbclient=0.10.2=pyhd8ed1ab_0
- nbconvert-core=7.16.6=pyh29332c3_0
- nbformat=5.10.4=pyhd8ed1ab_1
- ncurses=6.5=h2d0b736_3
- nest-asyncio=1.6.0=pyhd8ed1ab_1
- netcdf4=1.7.2=nompi_py312h21d6d8e_101
- networkx=3.4.2=pyh267e887_2
- nodeenv=1.9.1=pyhd8ed1ab_1
- notebook=7.3.3=pyhd8ed1ab_0
- notebook-shim=0.2.4=pyhd8ed1ab_1
- numpy=2.2.4=py312h72c5963_0
- omegaconf=2.3.0=pyhd8ed1ab_0
- openjpeg=2.5.3=h5fbd93e_0
- openssl=3.4.1=h7b32b05_0
- orjson=3.10.16=py312h12e396e_0
- overrides=7.7.0=pyhd8ed1ab_1
- packaging=24.2=pyhd8ed1ab_2
- pandas=2.2.3=py312hf9745cd_1
- pandas-stubs=2.2.3.250308=pyhd8ed1ab_0
- pandocfilters=1.5.0=pyhd8ed1ab_0
- panel=1.6.2=pyhd8ed1ab_0
- pango=1.56.3=h9ac818e_1
- param=2.2.0=pyhd8ed1ab_0
- parso=0.8.4=pyhd8ed1ab_1
- pathlib2=2.3.7.post1=py312h7900ff3_4
- pathspec=0.12.1=pyhd8ed1ab_1
- pcre=8.45=h9c3ff4c_0
- pcre2=10.44=hba22ea6_2
- pexpect=4.9.0=pyhd8ed1ab_1
- pickleshare=0.7.5=pyhd8ed1ab_1004
- pillow=11.1.0=py312h80c1187_0
- pip=25.0.1=pyh8b19718_0
- pixman=0.44.2=h29eaf8c_0
- pkgutil-resolve-name=1.3.10=pyhd8ed1ab_2
- platformdirs=4.3.7=pyh29332c3_0
- pluggy=1.5.0=pyhd8ed1ab_1
- pre-commit=4.2.0=pyha770c72_0
- proj=9.6.0=h0054346_0
- prometheus_client=0.21.1=pyhd8ed1ab_0
- prompt-toolkit=3.0.50=pyha770c72_0
- prompt_toolkit=3.0.50=hd8ed1ab_0
- propcache=0.2.1=py312h178313f_1
- psutil=7.0.0=py312h66e93f0_0
- pthread-stubs=0.4=hb9d3cd8_1002
- ptyprocess=0.7.0=pyhd8ed1ab_1
- pure_eval=0.2.3=pyhd8ed1ab_1
- pycparser=2.22=pyh29332c3_1
- pydantic=2.11.1=pyh3cfb1c2_0
- pydantic-core=2.33.0=py312h3b7be25_0
- pydot=3.0.4=py312h7900ff3_0
- pygit2=1.17.0=py312hba6b6d9_0
- pygments=2.19.1=pyhd8ed1ab_0
- pygtrie=2.5.0=pyhd8ed1ab_1
- pyogrio=0.10.0=py312h02b19dd_1
- pyopenssl=25.0.0=pyhd8ed1ab_0
- pyparsing=3.2.3=pyhd8ed1ab_1
- pyproj=3.7.1=py312h03c6e1f_1
- pyproject_hooks=1.2.0=pyhd8ed1ab_1
- pysocks=1.7.1=pyha55dd90_7
- pytest=8.3.5=pyhd8ed1ab_0
- pytest-cov=6.0.0=pyhd8ed1ab_1
- pytest-doctestplus=1.4.0=pyhd8ed1ab_0
- pytest-mpl=0.17.0=pyhd8ed1ab_1
- python=3.12.9=h9e4cc4f_1_cpython
- python-build=1.2.2.post1=pyhff2d567_1
- python-dateutil=2.9.0.post0=pyhff2d567_1
- python-fastjsonschema=2.21.1=pyhd8ed1ab_0
- python-gssapi=1.9.0=py312h3770eae_1
- python-json-logger=2.0.7=pyhd8ed1ab_0
- python-tzdata=2025.2=pyhd8ed1ab_0
- python_abi=3.12=5_cp312
- pytz=2024.1=pyhd8ed1ab_0
- pyviz_comms=3.0.4=pyhd8ed1ab_1
- pywin32-on-windows=0.1.0=pyh1179c8e_3
- pyyaml=6.0.2=py312h178313f_2
- pyzmq=26.3.0=py312hbf22597_0
- qhull=2020.2=h434a139_5
- rasterio=1.4.3=py312h021bea1_1
- rav1e=0.6.6=he8a937b_2
- readline=8.2=h5eee18b_0
- referencing=0.36.2=pyh29332c3_0
- requests=2.32.3=pyhd8ed1ab_1
- rfc3339-validator=0.1.4=pyhd8ed1ab_1
- rfc3986-validator=0.1.1=pyh9f0ad1d_0
- rich=14.0.0=pyh29332c3_0
- rioxarray=0.18.2=pyhd8ed1ab_0
- roman-numerals-py=3.1.0=pyhd8ed1ab_0
- rpds-py=0.24.0=py312h3b7be25_0
- ruamel.yaml=0.18.10=py312h66e93f0_0
- ruamel.yaml.clib=0.2.8=py312h66e93f0_1
- ruff=0.11.2=py312hf79aa60_0
- s3transfer=0.11.4=pyhd8ed1ab_0
- scikit-learn=1.6.1=py312h7a48858_0
- scipy=1.15.2=py312ha707e6e_0
- scmrepo=3.3.10=pyhd8ed1ab_0
- semver=3.0.4=pyhd8ed1ab_0
- send2trash=1.8.3=pyh0d859eb_1
- setuptools=75.8.0=py312h06a4308_0
- shapely=2.0.7=py312h21f5128_1
- shellingham=1.5.4=pyhd8ed1ab_1
- shortuuid=1.0.13=pyhd8ed1ab_1
- shtab=1.7.1=pyhd8ed1ab_1
- six=1.17.0=pyhd8ed1ab_0
- smmap=5.0.2=pyhd8ed1ab_0
- snappy=1.2.1=h8bd8927_1
- sniffio=1.3.1=pyhd8ed1ab_1
- snowballstemmer=2.2.0=pyhd8ed1ab_0
- snuggs=1.4.7=pyhd8ed1ab_2
- soupsieve=2.5=pyhd8ed1ab_1
- sphinx=8.2.3=pyhd8ed1ab_0
- sphinx-autodoc-typehints=3.1.0=pyhd8ed1ab_0
- sphinx-copybutton=0.5.2=pyhd8ed1ab_1
- sphinx-design=0.6.1=pyhd8ed1ab_2
- sphinx-gallery=0.19.0=pyhd8ed1ab_0
- sphinx_rtd_theme=3.0.1=pyha770c72_0
- sphinxcontrib-applehelp=2.0.0=pyhd8ed1ab_1
- sphinxcontrib-devhelp=2.0.0=pyhd8ed1ab_1
- sphinxcontrib-htmlhelp=2.1.0=pyhd8ed1ab_1
- sphinxcontrib-jquery=4.1=pyhd8ed1ab_1
- sphinxcontrib-jsmath=1.0.1=pyhd8ed1ab_1
- sphinxcontrib-qthelp=2.0.0=pyhd8ed1ab_1
- sphinxcontrib-serializinghtml=1.1.10=pyhd8ed1ab_1
- sqlalchemy=2.0.40=py312h66e93f0_0
- sqlite=3.49.1=h9eae976_2
- sqltrie=0.11.2=pyhd8ed1ab_0
- stack_data=0.6.3=pyhd8ed1ab_1
- svt-av1=3.0.2=h5888daf_0
- tabulate=0.9.0=pyhd8ed1ab_2
- terminado=0.18.1=pyh0d859eb_0
- threadpoolctl=3.6.0=pyhecae5ae_0
- tinycss2=1.4.0=pyhd8ed1ab_0
- tk=8.6.13=noxft_h4845f30_101
- toml=0.10.2=pyhd8ed1ab_1
- tomli=2.2.1=pyhd8ed1ab_1
- tomlkit=0.13.2=pyha770c72_1
- tornado=6.4.2=py312h66e93f0_0
- tqdm=4.67.1=pyhd8ed1ab_1
- traitlets=5.14.3=pyhd8ed1ab_1
- typer=0.15.2=pyhff008b6_0
- typer-slim=0.15.2=pyh29332c3_0
- typer-slim-standard=0.15.2=h801b22e_0
- types-python-dateutil=2.9.0.20241206=pyhd8ed1ab_0
- types-pytz=2025.1.0.20250318=pyhd8ed1ab_0
- typing-extensions=4.13.0=h9fa5a19_1
- typing-inspection=0.4.0=pyhd8ed1ab_0
- typing_extensions=4.13.0=pyh29332c3_1
- typing_utils=0.1.0=pyhd8ed1ab_1
- tzdata=2025a=h04d1e81_0
- uc-micro-py=1.0.3=pyhd8ed1ab_1
- ukkonen=1.0.1=py312h68727a3_5
- unicodedata2=16.0.0=py312h66e93f0_0
- uri-template=1.3.0=pyhd8ed1ab_1
- uriparser=0.9.8=hac33072_0
- urllib3=2.3.0=pyhd8ed1ab_0
- vine=5.1.0=pyhd8ed1ab_1
- virtualenv=20.29.3=pyhd8ed1ab_0
- voluptuous=0.15.2=pyhd8ed1ab_2
- wayland=1.23.1=h3e06ad9_0
- wcwidth=0.2.13=pyhd8ed1ab_1
- webcolors=24.11.1=pyhd8ed1ab_0
- webencodings=0.5.1=pyhd8ed1ab_3
- websocket-client=1.8.0=pyhd8ed1ab_1
- wheel=0.45.1=py312h06a4308_0
- widgetsnbextension=4.0.13=pyhd8ed1ab_1
- x265=3.5=h924138e_3
- xarray=2025.3.0=pyhd8ed1ab_0
- xerces-c=3.2.5=h988505b_2
- xkeyboard-config=2.43=hb9d3cd8_0
- xorg-libice=1.1.2=hb9d3cd8_0
- xorg-libsm=1.2.6=he73a12e_0
- xorg-libx11=1.8.12=h4f16b4b_0
- xorg-libxau=1.0.12=hb9d3cd8_0
- xorg-libxcomposite=0.4.6=hb9d3cd8_2
- xorg-libxcursor=1.2.3=hb9d3cd8_0
- xorg-libxdamage=1.1.6=hb9d3cd8_0
- xorg-libxdmcp=1.1.5=hb9d3cd8_0
- xorg-libxext=1.3.6=hb9d3cd8_0
- xorg-libxfixes=6.0.1=hb9d3cd8_0
- xorg-libxi=1.8.2=hb9d3cd8_0
- xorg-libxinerama=1.1.5=h5888daf_1
- xorg-libxrandr=1.5.4=hb9d3cd8_0
- xorg-libxrender=0.9.12=hb9d3cd8_0
- xorg-libxtst=1.2.5=hb9d3cd8_3
- xyzservices=2025.1.0=pyhd8ed1ab_0
- xz=5.6.4=h5eee18b_1
- yaml=0.2.5=h7f98852_2
- yarl=1.18.3=py312h178313f_1
- zc.lockfile=3.0.post1=pyhd8ed1ab_1
- zeromq=4.3.5=h3b0a872_7
- zipp=3.21.0=pyhd8ed1ab_1
- zlib=1.3.1=hb9d3cd8_2
- zstandard=0.23.0=py312h66e93f0_1
- zstd=1.5.7=hb8e6e7a_2
- pip:
- pygmt==0.12.1.dev142+g2157333f
prefix: /opt/conda/envs/pygmt
| [
"pygmt/tests/test_figure.py::test_figure_region",
"pygmt/tests/test_figure.py::test_figure_region_multiple",
"pygmt/tests/test_figure.py::test_figure_region_country_codes",
"pygmt/tests/test_figure.py::test_figure_repr",
"pygmt/tests/test_figure.py::test_figure_savefig_exists",
"pygmt/tests/test_figure.py::test_figure_savefig_geotiff",
"pygmt/tests/test_figure.py::test_figure_savefig_directory_nonexists",
"pygmt/tests/test_figure.py::test_figure_savefig_unknown_extension",
"pygmt/tests/test_figure.py::test_figure_savefig_ps_extension",
"pygmt/tests/test_figure.py::test_figure_savefig_transparent",
"pygmt/tests/test_figure.py::test_figure_savefig_filename_with_spaces",
"pygmt/tests/test_figure.py::test_figure_savefig",
"pygmt/tests/test_figure.py::test_figure_savefig_worldfile",
"pygmt/tests/test_figure.py::test_figure_show",
"pygmt/tests/test_figure.py::test_figure_show_invalid_method",
"pygmt/tests/test_figure.py::test_figure_display_external",
"pygmt/tests/test_figure.py::test_figure_set_display_invalid",
"pygmt/tests/test_figure.py::test_figure_unsupported_xshift_yshift",
"pygmt/tests/test_figure.py::TestGetDefaultDisplayMethod::test_default_display_method",
"pygmt/tests/test_figure.py::TestGetDefaultDisplayMethod::test_disable_external_display",
"pygmt/tests/test_figure.py::TestGetDefaultDisplayMethod::test_notebook_display"
] | [
"pygmt/tests/test_figure.py::test_figure_shift_origin"
] | [] | [] | BSD 3-Clause "New" or "Revised" License | 19,431 | 721 | [
"pygmt/figure.py"
] |
pymc-devs__pymc-7483 | 064822a21a1071c202689d1eceda70213658c73d | 2024-08-28 13:41:20 | b9fbfeda3dd8fdb081d538684bd2dcc81b14fb61 | codecov[bot]: ## [Codecov](https://app.codecov.io/gh/pymc-devs/pymc/pull/7483?dropdown=coverage&src=pr&el=h1&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=pymc-devs) Report
All modified and coverable lines are covered by tests :white_check_mark:
> Project coverage is 92.17%. Comparing base [(`064822a`)](https://app.codecov.io/gh/pymc-devs/pymc/commit/064822a21a1071c202689d1eceda70213658c73d?dropdown=coverage&el=desc&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=pymc-devs) to head [(`e30493f`)](https://app.codecov.io/gh/pymc-devs/pymc/commit/e30493fe3eb3ce6943a7bf2118d61bf7e7294a59?dropdown=coverage&el=desc&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=pymc-devs).
<details><summary>Additional details and impacted files</summary>
[](https://app.codecov.io/gh/pymc-devs/pymc/pull/7483?src=pr&el=tree&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=pymc-devs)
```diff
@@ Coverage Diff @@
## main #7483 +/- ##
=======================================
Coverage 92.16% 92.17%
=======================================
Files 103 103
Lines 17214 17216 +2
=======================================
+ Hits 15866 15868 +2
Misses 1348 1348
```
| [Files](https://app.codecov.io/gh/pymc-devs/pymc/pull/7483?dropdown=coverage&src=pr&el=tree&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=pymc-devs) | Coverage Δ | |
|---|---|---|
| [pymc/stats/log\_density.py](https://app.codecov.io/gh/pymc-devs/pymc/pull/7483?src=pr&el=tree&filepath=pymc%2Fstats%2Flog_density.py&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=pymc-devs#diff-cHltYy9zdGF0cy9sb2dfZGVuc2l0eS5weQ==) | `97.36% <100.00%> (+0.14%)` | :arrow_up: |
</details> | diff --git a/pymc/stats/log_density.py b/pymc/stats/log_density.py
index a26f8aa60..3216e26f3 100644
--- a/pymc/stats/log_density.py
+++ b/pymc/stats/log_density.py
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from collections.abc import Sequence
-from typing import Literal
+from typing import Any, Literal
from arviz import InferenceData
from xarray import Dataset
@@ -36,6 +36,7 @@ def compute_log_likelihood(
model: Model | None = None,
sample_dims: Sequence[str] = ("chain", "draw"),
progressbar=True,
+ compile_kwargs: dict[str, Any] | None = None,
):
"""Compute elemwise log_likelihood of model given InferenceData with posterior group
@@ -51,6 +52,8 @@ def compute_log_likelihood(
model : Model, optional
sample_dims : sequence of str, default ("chain", "draw")
progressbar : bool, default True
+ compile_kwargs : dict[str, Any] | None
+ Extra compilation arguments to supply to :py:func:`~pymc.stats.compute_log_density`
Returns
-------
@@ -65,6 +68,7 @@ def compute_log_likelihood(
kind="likelihood",
sample_dims=sample_dims,
progressbar=progressbar,
+ compile_kwargs=compile_kwargs,
)
@@ -75,6 +79,7 @@ def compute_log_prior(
model: Model | None = None,
sample_dims: Sequence[str] = ("chain", "draw"),
progressbar=True,
+ compile_kwargs=None,
):
"""Compute elemwise log_prior of model given InferenceData with posterior group
@@ -90,6 +95,8 @@ def compute_log_prior(
model : Model, optional
sample_dims : sequence of str, default ("chain", "draw")
progressbar : bool, default True
+ compile_kwargs : dict[str, Any] | None
+ Extra compilation arguments to supply to :py:func:`~pymc.stats.compute_log_density`
Returns
-------
@@ -104,6 +111,7 @@ def compute_log_prior(
kind="prior",
sample_dims=sample_dims,
progressbar=progressbar,
+ compile_kwargs=compile_kwargs,
)
@@ -116,14 +124,42 @@ def compute_log_density(
kind: Literal["likelihood", "prior"] = "likelihood",
sample_dims: Sequence[str] = ("chain", "draw"),
progressbar=True,
+ compile_kwargs=None,
) -> InferenceData | Dataset:
"""
Compute elemwise log_likelihood or log_prior of model given InferenceData with posterior group
+
+ Parameters
+ ----------
+ idata : InferenceData
+ InferenceData with posterior group
+ var_names : sequence of str, optional
+ List of Observed variable names for which to compute log_prior.
+ Defaults to all all free variables.
+ extend_inferencedata : bool, default True
+ Whether to extend the original InferenceData or return a new one
+ model : Model, optional
+ kind: Literal["likelihood", "prior"]
+ Whether to compute the log density of the observed random variables (likelihood)
+ or to compute the log density of the latent random variables (prior). This
+ parameter determines the group that gets added to the returned `~arviz.InferenceData` object.
+ sample_dims : sequence of str, default ("chain", "draw")
+ progressbar : bool, default True
+ compile_kwargs : dict[str, Any] | None
+ Extra compilation arguments to supply to :py:func:`pymc.model.core.Model.compile_fn`
+
+ Returns
+ -------
+ idata : InferenceData
+ InferenceData with the ``log_likelihood`` group when ``kind == "likelihood"``
+ or the ``log_prior`` group when ``kind == "prior"``.
"""
posterior = idata["posterior"]
model = modelcontext(model)
+ if compile_kwargs is None:
+ compile_kwargs = {}
if kind not in ("likelihood", "prior"):
raise ValueError("kind must be either 'likelihood' or 'prior'")
@@ -150,6 +186,7 @@ def compute_log_density(
inputs=umodel.value_vars,
outs=umodel.logp(vars=vars, sum=False),
on_unused_input="ignore",
+ **compile_kwargs,
)
coords, dims = coords_and_dims_for_inferencedata(umodel)
| Add `compile_kwargs` to `pm.compute_log_likelihood`
### Description
They can get forwarded along to `umodel.compile_fn` in `compute_log_density`. `compute_log_likelihood` is a bit of an odd man out, because most functions that compile a function internally (`sample_xx_predictive`, `compute_deterministics`, etc), admit a `compile_kwargs` argument. So `compute_log_likelihood` should too. | pymc-devs/pymc | diff --git a/tests/stats/test_log_density.py b/tests/stats/test_log_density.py
index 0a8a79e07..c7b120af2 100644
--- a/tests/stats/test_log_density.py
+++ b/tests/stats/test_log_density.py
@@ -11,6 +11,8 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
+from unittest.mock import patch
+
import numpy as np
import pytest
import scipy.stats as st
@@ -174,3 +176,17 @@ class TestComputeLogLikelihood:
res.log_prior["x"].values,
st.norm(0, 1).logpdf(idata.posterior["x"].values),
)
+
+ def test_compilation_kwargs(self):
+ with Model() as m:
+ x = Normal("x")
+ Deterministic("d", 2 * x)
+ Normal("y", x, observed=[0, 1, 2])
+
+ idata = InferenceData(posterior=dict_to_dataset({"x": np.arange(100).reshape(4, 25)}))
+ with patch("pymc.model.core.compile_pymc") as patched_compile_pymc:
+ compute_log_prior(idata, compile_kwargs={"mode": "JAX"})
+ compute_log_likelihood(idata, compile_kwargs={"mode": "NUMBA"})
+ assert len(patched_compile_pymc.call_args_list) == 2
+ assert patched_compile_pymc.call_args_list[0].kwargs["mode"] == "JAX"
+ assert patched_compile_pymc.call_args_list[1].kwargs["mode"] == "NUMBA"
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 1
},
"num_modified_files": 1
} | 5.16 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-xdist",
"pytest-mock",
"pytest-asyncio"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.10",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | arviz==0.21.0
cachetools==5.5.2
cloudpickle==3.1.1
cons==0.4.6
contourpy==1.3.1
coverage==7.8.0
cycler==0.12.1
etuples==0.3.9
exceptiongroup==1.2.2
execnet==2.1.1
filelock==3.18.0
fonttools==4.56.0
h5netcdf==1.6.1
h5py==3.13.0
iniconfig==2.1.0
kiwisolver==1.4.8
logical-unification==0.4.6
markdown-it-py==3.0.0
matplotlib==3.10.1
mdurl==0.1.2
miniKanren==1.0.3
multipledispatch==1.0.0
numpy==1.26.4
packaging==24.2
pandas==2.2.3
pillow==11.1.0
pluggy==1.5.0
Pygments==2.19.1
-e git+https://github.com/pymc-devs/pymc.git@064822a21a1071c202689d1eceda70213658c73d#egg=pymc
pyparsing==3.2.3
pytensor==2.25.5
pytest==8.3.5
pytest-asyncio==0.26.0
pytest-cov==6.0.0
pytest-mock==3.14.0
pytest-xdist==3.6.1
python-dateutil==2.9.0.post0
pytz==2025.2
rich==14.0.0
scipy==1.15.2
six==1.17.0
threadpoolctl==3.6.0
tomli==2.2.1
toolz==1.0.0
typing_extensions==4.13.0
tzdata==2025.2
xarray==2025.3.1
xarray-einstats==0.8.0
| name: pymc
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- bzip2=1.0.8=h5eee18b_6
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- libuuid=1.41.5=h5eee18b_0
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py310h06a4308_0
- python=3.10.16=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py310h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.45.1=py310h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- arviz==0.21.0
- cachetools==5.5.2
- cloudpickle==3.1.1
- cons==0.4.6
- contourpy==1.3.1
- coverage==7.8.0
- cycler==0.12.1
- etuples==0.3.9
- exceptiongroup==1.2.2
- execnet==2.1.1
- filelock==3.18.0
- fonttools==4.56.0
- h5netcdf==1.6.1
- h5py==3.13.0
- iniconfig==2.1.0
- kiwisolver==1.4.8
- logical-unification==0.4.6
- markdown-it-py==3.0.0
- matplotlib==3.10.1
- mdurl==0.1.2
- minikanren==1.0.3
- multipledispatch==1.0.0
- numpy==1.26.4
- packaging==24.2
- pandas==2.2.3
- pillow==11.1.0
- pluggy==1.5.0
- pygments==2.19.1
- pymc==5.16.2+33.g064822a21
- pyparsing==3.2.3
- pytensor==2.25.5
- pytest==8.3.5
- pytest-asyncio==0.26.0
- pytest-cov==6.0.0
- pytest-mock==3.14.0
- pytest-xdist==3.6.1
- python-dateutil==2.9.0.post0
- pytz==2025.2
- rich==14.0.0
- scipy==1.15.2
- six==1.17.0
- threadpoolctl==3.6.0
- tomli==2.2.1
- toolz==1.0.0
- typing-extensions==4.13.0
- tzdata==2025.2
- xarray==2025.3.1
- xarray-einstats==0.8.0
prefix: /opt/conda/envs/pymc
| [
"tests/stats/test_log_density.py::TestComputeLogLikelihood::test_compilation_kwargs"
] | [] | [
"tests/stats/test_log_density.py::TestComputeLogLikelihood::test_basic[False]",
"tests/stats/test_log_density.py::TestComputeLogLikelihood::test_basic[True]",
"tests/stats/test_log_density.py::TestComputeLogLikelihood::test_multivariate",
"tests/stats/test_log_density.py::TestComputeLogLikelihood::test_var_names",
"tests/stats/test_log_density.py::TestComputeLogLikelihood::test_invalid_var_names",
"tests/stats/test_log_density.py::TestComputeLogLikelihood::test_dims_without_coords",
"tests/stats/test_log_density.py::TestComputeLogLikelihood::test_basic_log_prior[False]",
"tests/stats/test_log_density.py::TestComputeLogLikelihood::test_basic_log_prior[True]",
"tests/stats/test_log_density.py::TestComputeLogLikelihood::test_deterministic_log_prior"
] | [] | Apache License 2.0 | 19,432 | 1,055 | [
"pymc/stats/log_density.py"
] |
robotframework__robotframework-5188 | d9a285015687199742f67845af56bd1364e7f9ef | 2024-08-28 14:27:36 | d9a285015687199742f67845af56bd1364e7f9ef | diff --git a/src/robot/result/executionresult.py b/src/robot/result/executionresult.py
index b2e77eb82..168efa9b2 100644
--- a/src/robot/result/executionresult.py
+++ b/src/robot/result/executionresult.py
@@ -64,6 +64,7 @@ class Result:
#: :class:`~.executionerrors.ExecutionErrors` object.
self.errors = errors or ExecutionErrors()
self.generated_by_robot = True
+ self.generation_time = None
self._status_rc = True
self._stat_config = {}
self.rpa = rpa
diff --git a/src/robot/result/xmlelementhandlers.py b/src/robot/result/xmlelementhandlers.py
index 3c657c06d..31e57aea5 100644
--- a/src/robot/result/xmlelementhandlers.py
+++ b/src/robot/result/xmlelementhandlers.py
@@ -60,6 +60,9 @@ class ElementHandler:
def _legacy_timestamp(self, elem, attr_name):
ts = elem.get(attr_name)
+ return self._parse_legacy_timestamp(ts)
+
+ def _parse_legacy_timestamp(self, ts):
if ts == 'N/A' or not ts:
return None
ts = ts.ljust(24, '0')
@@ -85,10 +88,19 @@ class RobotHandler(ElementHandler):
def start(self, elem, result):
generator = elem.get('generator', 'unknown').split()[0].upper()
result.generated_by_robot = generator == 'ROBOT'
+ result.generation_time = self._parse_generation_time(elem.get('generated'))
if result.rpa is None:
result.rpa = elem.get('rpa', 'false') == 'true'
return result
+ def _parse_generation_time(self, generated):
+ if not generated:
+ return None
+ try:
+ return datetime.fromisoformat(generated)
+ except ValueError:
+ return self._parse_legacy_timestamp(generated)
+
@ElementHandler.register
class SuiteHandler(ElementHandler):
| Enhance result package to expose generated attribute from output.xml
The `output.xml` has `<robot generated="year-month-dayTHH:MM:SS.milliseconds" …>` in the xml. It would be handy to expose the `generated` attribute from the RF result package. We have a need to use the `generated` attribute when reading test result from multiple runs in CI and plotting the results in time series manner. | robotframework/robotframework | diff --git a/utest/result/test_resultbuilder.py b/utest/result/test_resultbuilder.py
index e55a7edfe..f30870a64 100644
--- a/utest/result/test_resultbuilder.py
+++ b/utest/result/test_resultbuilder.py
@@ -24,6 +24,13 @@ class TestBuildingSuiteExecutionResult(unittest.TestCase):
self.suite = self.result.suite
self.test = self.suite.tests[0]
+ def test_result_has_generation_time(self):
+ assert_equal(self.result.generation_time, datetime(2023, 9, 8, 12, 1, 47, 906104))
+ result = ExecutionResult("<robot><suite/></robot>")
+ assert_equal(result.generation_time, None)
+ result = ExecutionResult("<robot generated='20111024 13:41:20.873'><suite/></robot>")
+ assert_equal(result.generation_time, datetime(2011, 10, 24, 13, 41, 20, 873000))
+
def test_suite_is_built(self):
assert_equal(self.suite.source, Path('normal.html'))
assert_equal(self.suite.name, 'Normal')
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 1
},
"num_modified_files": 2
} | 7.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": [
"utest/requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==25.3.0
docutils==0.21.2
exceptiongroup==1.2.2
iniconfig==2.1.0
jsonschema==4.23.0
jsonschema-specifications==2024.10.1
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
referencing==0.36.2
-e git+https://github.com/robotframework/robotframework.git@d9a285015687199742f67845af56bd1364e7f9ef#egg=robotframework
rpds-py==0.24.0
tomli==2.2.1
typing_extensions==4.13.0
| name: robotframework
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==25.3.0
- docutils==0.21.2
- exceptiongroup==1.2.2
- iniconfig==2.1.0
- jsonschema==4.23.0
- jsonschema-specifications==2024.10.1
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- referencing==0.36.2
- rpds-py==0.24.0
- tomli==2.2.1
- typing-extensions==4.13.0
prefix: /opt/conda/envs/robotframework
| [
"utest/result/test_resultbuilder.py::TestBuildingSuiteExecutionResult::test_result_has_generation_time"
] | [] | [
"utest/result/test_resultbuilder.py::TestBuildingSuiteExecutionResult::test_errors_are_built",
"utest/result/test_resultbuilder.py::TestBuildingSuiteExecutionResult::test_for_is_built",
"utest/result/test_resultbuilder.py::TestBuildingSuiteExecutionResult::test_if_is_built",
"utest/result/test_resultbuilder.py::TestBuildingSuiteExecutionResult::test_keyword_is_built",
"utest/result/test_resultbuilder.py::TestBuildingSuiteExecutionResult::test_message_is_built",
"utest/result/test_resultbuilder.py::TestBuildingSuiteExecutionResult::test_omit_keywords",
"utest/result/test_resultbuilder.py::TestBuildingSuiteExecutionResult::test_omit_keywords_during_xml_parsing",
"utest/result/test_resultbuilder.py::TestBuildingSuiteExecutionResult::test_rpa",
"utest/result/test_resultbuilder.py::TestBuildingSuiteExecutionResult::test_suite_is_built",
"utest/result/test_resultbuilder.py::TestBuildingSuiteExecutionResult::test_suite_setup_is_built",
"utest/result/test_resultbuilder.py::TestBuildingSuiteExecutionResult::test_testcase_is_built",
"utest/result/test_resultbuilder.py::TestBuildingSuiteExecutionResult::test_user_keyword_is_built",
"utest/result/test_resultbuilder.py::TestCombiningSuites::test_name",
"utest/result/test_resultbuilder.py::TestMergingSuites::test_message",
"utest/result/test_resultbuilder.py::TestMergingSuites::test_name",
"utest/result/test_resultbuilder.py::TestElements::test_nested_suites",
"utest/result/test_resultbuilder.py::TestElements::test_suite_message",
"utest/result/test_resultbuilder.py::TestElements::test_test_message",
"utest/result/test_resultbuilder.py::TestElements::test_unknown_elements_cause_an_error",
"utest/result/test_resultbuilder.py::TestSuiteTeardownFailed::test_already_processed",
"utest/result/test_resultbuilder.py::TestSuiteTeardownFailed::test_excluding_keywords",
"utest/result/test_resultbuilder.py::TestSuiteTeardownFailed::test_excluding_keywords_and_already_processed",
"utest/result/test_resultbuilder.py::TestSuiteTeardownFailed::test_failed_test",
"utest/result/test_resultbuilder.py::TestSuiteTeardownFailed::test_passed_test",
"utest/result/test_resultbuilder.py::TestBuildingFromXmlStringAndHandlingMissingInformation::test_suite_from_byte_string",
"utest/result/test_resultbuilder.py::TestBuildingFromXmlStringAndHandlingMissingInformation::test_suite_from_string",
"utest/result/test_resultbuilder.py::TestBuildingFromXmlStringAndHandlingMissingInformation::test_test_from_byte_string",
"utest/result/test_resultbuilder.py::TestBuildingFromXmlStringAndHandlingMissingInformation::test_test_from_string",
"utest/result/test_resultbuilder.py::TestUsingPathlibPath::test_save",
"utest/result/test_resultbuilder.py::TestUsingPathlibPath::test_suite_is_built",
"utest/result/test_resultbuilder.py::TestUsingPathlibPath::test_test_is_built",
"utest/result/test_resultbuilder.py::TestJsonResult::test_json_bytes",
"utest/result/test_resultbuilder.py::TestJsonResult::test_json_file",
"utest/result/test_resultbuilder.py::TestJsonResult::test_json_path",
"utest/result/test_resultbuilder.py::TestJsonResult::test_json_string"
] | [] | Apache License 2.0 | 19,433 | 483 | [
"src/robot/result/executionresult.py",
"src/robot/result/xmlelementhandlers.py"
] |
|
tobymao__sqlglot-3994 | 905b7226ae4a6dc505fe303bb4df3818cb586826 | 2024-08-28 15:31:39 | 905b7226ae4a6dc505fe303bb4df3818cb586826 | diff --git a/sqlglot/dialects/clickhouse.py b/sqlglot/dialects/clickhouse.py
index 22752810..a0a70126 100644
--- a/sqlglot/dialects/clickhouse.py
+++ b/sqlglot/dialects/clickhouse.py
@@ -892,6 +892,7 @@ class ClickHouse(Dialect):
# There's no list in docs, but it can be found in Clickhouse code
# see `ClickHouse/src/Parsers/ParserCreate*.cpp`
ON_CLUSTER_TARGETS = {
+ "SCHEMA", # Transpiled CREATE SCHEMA may have OnCluster property set
"DATABASE",
"TABLE",
"VIEW",
diff --git a/sqlglot/generator.py b/sqlglot/generator.py
index 04818f52..e5dd4b8c 100644
--- a/sqlglot/generator.py
+++ b/sqlglot/generator.py
@@ -2447,6 +2447,13 @@ class Generator(metaclass=_Generator):
def subquery_sql(self, expression: exp.Subquery, sep: str = " AS ") -> str:
alias = self.sql(expression, "alias")
alias = f"{sep}{alias}" if alias else ""
+ sample = self.sql(expression, "sample")
+ if self.dialect.ALIAS_POST_TABLESAMPLE and sample:
+ alias = f"{sample}{alias}"
+
+ # Set to None so it's not generated again by self.query_modifiers()
+ expression.set("sample", None)
+
pivots = self.expressions(expression, key="pivots", sep="", flat=True)
sql = self.query_modifiers(expression, self.wrap(expression), alias, pivots)
return self.prepend_ctes(expression, sql)
diff --git a/sqlglot/parser.py b/sqlglot/parser.py
index f50e5ce2..3584ab94 100644
--- a/sqlglot/parser.py
+++ b/sqlglot/parser.py
@@ -2986,6 +2986,7 @@ class Parser(metaclass=_Parser):
this=this,
pivots=self._parse_pivots(),
alias=self._parse_table_alias() if parse_alias else None,
+ sample=self._parse_table_sample(),
)
def _implicit_unnests_to_explicit(self, this: E) -> E:
| tablesample with alias is incorrect when going from duckdb to pyspark
```
In [17]: import sqlglot.expressions as sge, sqlglot as sg
In [18]: sql = "select t1.* from (select * from t) t1 tablesample (30 rows)"
In [19]: sg.parse_one(sql, read="duckdb").sql("spark")
Out[19]: 'SELECT t1.* FROM (SELECT * FROM t) AS t1 TABLESAMPLE (30 ROWS)'
```
The `t1` alias must be placed after the `TABLESAMPLE` or else PySpark throws a parse error. This appears to be a PySpark-only thing. | tobymao/sqlglot | diff --git a/tests/dialects/test_clickhouse.py b/tests/dialects/test_clickhouse.py
index 51d840f7..b4ba09e0 100644
--- a/tests/dialects/test_clickhouse.py
+++ b/tests/dialects/test_clickhouse.py
@@ -622,6 +622,14 @@ class TestClickhouse(Validator):
)
self.assertEqual(create_with_cluster.sql("clickhouse"), "CREATE DATABASE foo ON CLUSTER c")
+ # Transpiled CREATE SCHEMA may have OnCluster property set
+ create_with_cluster = exp.Create(
+ this=db_table_expr,
+ kind="SCHEMA",
+ properties=exp.Properties(expressions=[exp.OnCluster(this=exp.to_identifier("c"))]),
+ )
+ self.assertEqual(create_with_cluster.sql("clickhouse"), "CREATE DATABASE foo ON CLUSTER c")
+
ctas_with_comment = exp.Create(
this=exp.table_("foo"),
kind="TABLE",
diff --git a/tests/dialects/test_duckdb.py b/tests/dialects/test_duckdb.py
index 85f50ae8..5d2d044f 100644
--- a/tests/dialects/test_duckdb.py
+++ b/tests/dialects/test_duckdb.py
@@ -1008,6 +1008,13 @@ class TestDuckDB(Validator):
"duckdb": "SELECT * FROM example TABLESAMPLE RESERVOIR (3 ROWS) REPEATABLE (82)",
},
)
+ self.validate_all(
+ "SELECT * FROM (SELECT * FROM t) AS t1 TABLESAMPLE (1 ROWS), (SELECT * FROM t) AS t2 TABLESAMPLE (2 ROWS)",
+ write={
+ "duckdb": "SELECT * FROM (SELECT * FROM t) AS t1 TABLESAMPLE RESERVOIR (1 ROWS), (SELECT * FROM t) AS t2 TABLESAMPLE RESERVOIR (2 ROWS)",
+ "spark": "SELECT * FROM (SELECT * FROM t) TABLESAMPLE (1 ROWS) AS t1, (SELECT * FROM t) TABLESAMPLE (2 ROWS) AS t2",
+ },
+ )
def test_array(self):
self.validate_identity("ARRAY(SELECT id FROM t)")
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 1
},
"num_modified_files": 3
} | 25.17 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest pytest-cov pytest-xdist pytest-mock pytest-asyncio"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | cfgv==3.4.0
coverage==7.8.0
distlib==0.3.9
duckdb==1.2.1
exceptiongroup==1.2.2
execnet==2.1.1
filelock==3.18.0
identify==2.6.9
iniconfig==2.1.0
Jinja2==3.1.6
MarkupSafe==3.0.2
maturin==1.8.3
mypy==1.15.0
mypy-extensions==1.0.0
nodeenv==1.9.1
numpy==2.0.2
packaging==24.2
pandas==2.2.3
pandas-stubs==2.2.2.240807
pdoc==15.0.1
platformdirs==4.3.7
pluggy==1.5.0
pre_commit==4.2.0
Pygments==2.19.1
pytest==8.3.5
pytest-asyncio==0.26.0
pytest-cov==6.0.0
pytest-mock==3.14.0
pytest-xdist==3.6.1
python-dateutil==2.9.0.post0
pytz==2025.2
PyYAML==6.0.2
ruff==0.4.3
six==1.17.0
-e git+https://github.com/tobymao/sqlglot.git@905b7226ae4a6dc505fe303bb4df3818cb586826#egg=sqlglot
tomli==2.2.1
types-python-dateutil==2.9.0.20241206
types-pytz==2025.2.0.20250326
typing_extensions==4.13.0
tzdata==2025.2
virtualenv==20.29.3
| name: sqlglot
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- cfgv==3.4.0
- coverage==7.8.0
- distlib==0.3.9
- duckdb==1.2.1
- exceptiongroup==1.2.2
- execnet==2.1.1
- filelock==3.18.0
- identify==2.6.9
- iniconfig==2.1.0
- jinja2==3.1.6
- markupsafe==3.0.2
- maturin==1.8.3
- mypy==1.15.0
- mypy-extensions==1.0.0
- nodeenv==1.9.1
- numpy==2.0.2
- packaging==24.2
- pandas==2.2.3
- pandas-stubs==2.2.2.240807
- pdoc==15.0.1
- platformdirs==4.3.7
- pluggy==1.5.0
- pre-commit==4.2.0
- pygments==2.19.1
- pytest==8.3.5
- pytest-asyncio==0.26.0
- pytest-cov==6.0.0
- pytest-mock==3.14.0
- pytest-xdist==3.6.1
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyyaml==6.0.2
- ruff==0.4.3
- six==1.17.0
- tomli==2.2.1
- types-python-dateutil==2.9.0.20241206
- types-pytz==2025.2.0.20250326
- typing-extensions==4.13.0
- tzdata==2025.2
- virtualenv==20.29.3
prefix: /opt/conda/envs/sqlglot
| [
"tests/dialects/test_clickhouse.py::TestClickhouse::test_ddl",
"tests/dialects/test_duckdb.py::TestDuckDB::test_sample"
] | [] | [
"tests/dialects/test_clickhouse.py::TestClickhouse::test_agg_functions",
"tests/dialects/test_clickhouse.py::TestClickhouse::test_clickhouse",
"tests/dialects/test_clickhouse.py::TestClickhouse::test_clickhouse_values",
"tests/dialects/test_clickhouse.py::TestClickhouse::test_convert",
"tests/dialects/test_clickhouse.py::TestClickhouse::test_cte",
"tests/dialects/test_clickhouse.py::TestClickhouse::test_datetime_funcs",
"tests/dialects/test_clickhouse.py::TestClickhouse::test_drop_on_cluster",
"tests/dialects/test_clickhouse.py::TestClickhouse::test_parameterization",
"tests/dialects/test_clickhouse.py::TestClickhouse::test_signed_and_unsigned_types",
"tests/dialects/test_clickhouse.py::TestClickhouse::test_ternary",
"tests/dialects/test_duckdb.py::TestDuckDB::test_array",
"tests/dialects/test_duckdb.py::TestDuckDB::test_array_index",
"tests/dialects/test_duckdb.py::TestDuckDB::test_cast",
"tests/dialects/test_duckdb.py::TestDuckDB::test_duckdb",
"tests/dialects/test_duckdb.py::TestDuckDB::test_encode_decode",
"tests/dialects/test_duckdb.py::TestDuckDB::test_ignore_nulls",
"tests/dialects/test_duckdb.py::TestDuckDB::test_isinf",
"tests/dialects/test_duckdb.py::TestDuckDB::test_isnan",
"tests/dialects/test_duckdb.py::TestDuckDB::test_parameter_token",
"tests/dialects/test_duckdb.py::TestDuckDB::test_rename_table",
"tests/dialects/test_duckdb.py::TestDuckDB::test_time",
"tests/dialects/test_duckdb.py::TestDuckDB::test_timestamps_with_units"
] | [] | MIT License | 19,435 | 558 | [
"sqlglot/dialects/clickhouse.py",
"sqlglot/generator.py",
"sqlglot/parser.py"
] |
|
tobymao__sqlglot-4007 | 4b7ca2be353e7432b84384ff9cfd43f3c43438e0 | 2024-08-29 13:39:06 | a1b980327ff94519a4cba1e0e48066c0ea51d359 | diff --git a/sqlglot/generator.py b/sqlglot/generator.py
index e5dd4b8c..81a3a95e 100644
--- a/sqlglot/generator.py
+++ b/sqlglot/generator.py
@@ -1297,7 +1297,9 @@ class Generator(metaclass=_Generator):
return self.set_operations(expression)
def except_op(self, expression: exp.Except) -> str:
- return f"EXCEPT{'' if expression.args.get('distinct') else ' ALL'}"
+ kind = " DISTINCT" if self.EXPLICIT_SET_OP else ""
+ kind = kind if expression.args.get("distinct") else " ALL"
+ return f"EXCEPT{kind}"
def fetch_sql(self, expression: exp.Fetch) -> str:
direction = expression.args.get("direction")
@@ -1679,7 +1681,9 @@ class Generator(metaclass=_Generator):
return self.set_operations(expression)
def intersect_op(self, expression: exp.Intersect) -> str:
- return f"INTERSECT{'' if expression.args.get('distinct') else ' ALL'}"
+ kind = " DISTINCT" if self.EXPLICIT_SET_OP else ""
+ kind = kind if expression.args.get("distinct") else " ALL"
+ return f"INTERSECT{kind}"
def introducer_sql(self, expression: exp.Introducer) -> str:
return f"{self.sql(expression, 'this')} {self.sql(expression, 'expression')}"
| missing `DISTINCT` in `EXCEPT` statement when producing ClickHouse SQL
```
In [7]: import sqlglot.expressions as sge, sqlglot as sg
In [8]: sg.__version__
Out[8]: '25.17.0'
In [9]: sg.parse_one('select a from t except distinct select a from t', read='duckdb').sql('clickhouse')
Out[9]: 'SELECT a FROM t EXCEPT SELECT a FROM t'
```
ClickHouse docs: https://clickhouse.com/docs/en/sql-reference/statements/select/except#except-distinct | tobymao/sqlglot | diff --git a/tests/dialects/test_dialect.py b/tests/dialects/test_dialect.py
index 8f716367..01cdd52b 100644
--- a/tests/dialects/test_dialect.py
+++ b/tests/dialects/test_dialect.py
@@ -1488,12 +1488,14 @@ class TestDialect(Validator):
"SELECT * FROM a INTERSECT SELECT * FROM b",
read={
"bigquery": "SELECT * FROM a INTERSECT DISTINCT SELECT * FROM b",
+ "clickhouse": "SELECT * FROM a INTERSECT DISTINCT SELECT * FROM b",
"duckdb": "SELECT * FROM a INTERSECT SELECT * FROM b",
"presto": "SELECT * FROM a INTERSECT SELECT * FROM b",
"spark": "SELECT * FROM a INTERSECT SELECT * FROM b",
},
write={
"bigquery": "SELECT * FROM a INTERSECT DISTINCT SELECT * FROM b",
+ "clickhouse": "SELECT * FROM a INTERSECT DISTINCT SELECT * FROM b",
"duckdb": "SELECT * FROM a INTERSECT SELECT * FROM b",
"presto": "SELECT * FROM a INTERSECT SELECT * FROM b",
"spark": "SELECT * FROM a INTERSECT SELECT * FROM b",
@@ -1503,12 +1505,14 @@ class TestDialect(Validator):
"SELECT * FROM a EXCEPT SELECT * FROM b",
read={
"bigquery": "SELECT * FROM a EXCEPT DISTINCT SELECT * FROM b",
+ "clickhouse": "SELECT * FROM a EXCEPT DISTINCT SELECT * FROM b",
"duckdb": "SELECT * FROM a EXCEPT SELECT * FROM b",
"presto": "SELECT * FROM a EXCEPT SELECT * FROM b",
"spark": "SELECT * FROM a EXCEPT SELECT * FROM b",
},
write={
"bigquery": "SELECT * FROM a EXCEPT DISTINCT SELECT * FROM b",
+ "clickhouse": "SELECT * FROM a EXCEPT DISTINCT SELECT * FROM b",
"duckdb": "SELECT * FROM a EXCEPT SELECT * FROM b",
"presto": "SELECT * FROM a EXCEPT SELECT * FROM b",
"spark": "SELECT * FROM a EXCEPT SELECT * FROM b",
@@ -1527,6 +1531,7 @@ class TestDialect(Validator):
"SELECT * FROM a INTERSECT DISTINCT SELECT * FROM b",
write={
"bigquery": "SELECT * FROM a INTERSECT DISTINCT SELECT * FROM b",
+ "clickhouse": "SELECT * FROM a INTERSECT DISTINCT SELECT * FROM b",
"duckdb": "SELECT * FROM a INTERSECT SELECT * FROM b",
"presto": "SELECT * FROM a INTERSECT SELECT * FROM b",
"spark": "SELECT * FROM a INTERSECT SELECT * FROM b",
@@ -1536,6 +1541,7 @@ class TestDialect(Validator):
"SELECT * FROM a INTERSECT ALL SELECT * FROM b",
write={
"bigquery": "SELECT * FROM a INTERSECT ALL SELECT * FROM b",
+ "clickhouse": "SELECT * FROM a INTERSECT ALL SELECT * FROM b",
"duckdb": "SELECT * FROM a INTERSECT ALL SELECT * FROM b",
"presto": "SELECT * FROM a INTERSECT ALL SELECT * FROM b",
"spark": "SELECT * FROM a INTERSECT ALL SELECT * FROM b",
@@ -1545,6 +1551,7 @@ class TestDialect(Validator):
"SELECT * FROM a EXCEPT DISTINCT SELECT * FROM b",
write={
"bigquery": "SELECT * FROM a EXCEPT DISTINCT SELECT * FROM b",
+ "clickhouse": "SELECT * FROM a EXCEPT DISTINCT SELECT * FROM b",
"duckdb": "SELECT * FROM a EXCEPT SELECT * FROM b",
"presto": "SELECT * FROM a EXCEPT SELECT * FROM b",
"spark": "SELECT * FROM a EXCEPT SELECT * FROM b",
@@ -1554,6 +1561,7 @@ class TestDialect(Validator):
"SELECT * FROM a EXCEPT ALL SELECT * FROM b",
read={
"bigquery": "SELECT * FROM a EXCEPT ALL SELECT * FROM b",
+ "clickhouse": "SELECT * FROM a EXCEPT ALL SELECT * FROM b",
"duckdb": "SELECT * FROM a EXCEPT ALL SELECT * FROM b",
"presto": "SELECT * FROM a EXCEPT ALL SELECT * FROM b",
"spark": "SELECT * FROM a EXCEPT ALL SELECT * FROM b",
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 1
} | 25.18 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | cfgv==3.4.0
distlib==0.3.9
duckdb==1.2.1
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
filelock==3.18.0
identify==2.6.9
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
Jinja2==3.1.6
MarkupSafe==3.0.2
maturin==1.8.3
mypy==1.15.0
mypy-extensions==1.0.0
nodeenv==1.9.1
numpy==2.0.2
packaging @ file:///croot/packaging_1734472117206/work
pandas==2.2.3
pandas-stubs==2.2.2.240807
pdoc==15.0.1
platformdirs==4.3.7
pluggy @ file:///croot/pluggy_1733169602837/work
pre_commit==4.2.0
Pygments==2.19.1
pytest @ file:///croot/pytest_1738938843180/work
python-dateutil==2.9.0.post0
pytz==2025.2
PyYAML==6.0.2
ruff==0.4.3
six==1.17.0
-e git+https://github.com/tobymao/sqlglot.git@4b7ca2be353e7432b84384ff9cfd43f3c43438e0#egg=sqlglot
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
types-python-dateutil==2.9.0.20241206
types-pytz==2025.2.0.20250326
typing_extensions==4.13.0
tzdata==2025.2
virtualenv==20.29.3
| name: sqlglot
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- cfgv==3.4.0
- distlib==0.3.9
- duckdb==1.2.1
- filelock==3.18.0
- identify==2.6.9
- jinja2==3.1.6
- markupsafe==3.0.2
- maturin==1.8.3
- mypy==1.15.0
- mypy-extensions==1.0.0
- nodeenv==1.9.1
- numpy==2.0.2
- pandas==2.2.3
- pandas-stubs==2.2.2.240807
- pdoc==15.0.1
- platformdirs==4.3.7
- pre-commit==4.2.0
- pygments==2.19.1
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyyaml==6.0.2
- ruff==0.4.3
- six==1.17.0
- types-python-dateutil==2.9.0.20241206
- types-pytz==2025.2.0.20250326
- typing-extensions==4.13.0
- tzdata==2025.2
- virtualenv==20.29.3
prefix: /opt/conda/envs/sqlglot
| [
"tests/dialects/test_dialect.py::TestDialect::test_set_operators"
] | [] | [
"tests/dialects/test_dialect.py::TestDialect::test_alias",
"tests/dialects/test_dialect.py::TestDialect::test_array",
"tests/dialects/test_dialect.py::TestDialect::test_array_any",
"tests/dialects/test_dialect.py::TestDialect::test_cast",
"tests/dialects/test_dialect.py::TestDialect::test_cast_to_user_defined_type",
"tests/dialects/test_dialect.py::TestDialect::test_compare_dialects",
"tests/dialects/test_dialect.py::TestDialect::test_count_if",
"tests/dialects/test_dialect.py::TestDialect::test_create_sequence",
"tests/dialects/test_dialect.py::TestDialect::test_cross_join",
"tests/dialects/test_dialect.py::TestDialect::test_ddl",
"tests/dialects/test_dialect.py::TestDialect::test_decode",
"tests/dialects/test_dialect.py::TestDialect::test_enum",
"tests/dialects/test_dialect.py::TestDialect::test_generate_date_array",
"tests/dialects/test_dialect.py::TestDialect::test_get_or_raise",
"tests/dialects/test_dialect.py::TestDialect::test_hash_comments",
"tests/dialects/test_dialect.py::TestDialect::test_heredoc_strings",
"tests/dialects/test_dialect.py::TestDialect::test_if_null",
"tests/dialects/test_dialect.py::TestDialect::test_json",
"tests/dialects/test_dialect.py::TestDialect::test_lateral_subquery",
"tests/dialects/test_dialect.py::TestDialect::test_limit",
"tests/dialects/test_dialect.py::TestDialect::test_logarithm",
"tests/dialects/test_dialect.py::TestDialect::test_merge",
"tests/dialects/test_dialect.py::TestDialect::test_nested_ctes",
"tests/dialects/test_dialect.py::TestDialect::test_nullsafe_eq",
"tests/dialects/test_dialect.py::TestDialect::test_nullsafe_neq",
"tests/dialects/test_dialect.py::TestDialect::test_nvl2",
"tests/dialects/test_dialect.py::TestDialect::test_operators",
"tests/dialects/test_dialect.py::TestDialect::test_order_by",
"tests/dialects/test_dialect.py::TestDialect::test_qualify",
"tests/dialects/test_dialect.py::TestDialect::test_random",
"tests/dialects/test_dialect.py::TestDialect::test_reserved_keywords",
"tests/dialects/test_dialect.py::TestDialect::test_safediv",
"tests/dialects/test_dialect.py::TestDialect::test_string_functions",
"tests/dialects/test_dialect.py::TestDialect::test_substring",
"tests/dialects/test_dialect.py::TestDialect::test_time",
"tests/dialects/test_dialect.py::TestDialect::test_transactions",
"tests/dialects/test_dialect.py::TestDialect::test_truncate",
"tests/dialects/test_dialect.py::TestDialect::test_typeddiv",
"tests/dialects/test_dialect.py::TestDialect::test_unsupported_null_ordering"
] | [] | MIT License | 19,450 | 343 | [
"sqlglot/generator.py"
] |
|
UKPLab__sentence-transformers-2918 | ef360c82f447fa31b6ff1f58c526151f772ff984 | 2024-08-30 08:42:37 | ef360c82f447fa31b6ff1f58c526151f772ff984 | diff --git a/sentence_transformers/SentenceTransformer.py b/sentence_transformers/SentenceTransformer.py
index 2bb55df..cb1a952 100644
--- a/sentence_transformers/SentenceTransformer.py
+++ b/sentence_transformers/SentenceTransformer.py
@@ -167,6 +167,7 @@ class SentenceTransformer(nn.Sequential, FitMixin):
self.prompts = prompts or {}
self.default_prompt_name = default_prompt_name
self.similarity_fn_name = similarity_fn_name
+ self.trust_remote_code = trust_remote_code
self.truncate_dim = truncate_dim
self.model_card_data = model_card_data or SentenceTransformerModelCardData()
self._model_card_vars = {}
diff --git a/sentence_transformers/losses/SoftmaxLoss.py b/sentence_transformers/losses/SoftmaxLoss.py
index 887356e..48a30c4 100644
--- a/sentence_transformers/losses/SoftmaxLoss.py
+++ b/sentence_transformers/losses/SoftmaxLoss.py
@@ -4,6 +4,8 @@ import logging
from typing import Callable, Iterable
import torch
+import transformers
+from packaging import version
from torch import Tensor, nn
from sentence_transformers.SentenceTransformer import SentenceTransformer
@@ -103,6 +105,13 @@ class SoftmaxLoss(nn.Module):
)
self.loss_fct = loss_fct
+ if version.parse(transformers.__version__) < version.parse("4.43.0"):
+ logger.warning(
+ "SoftmaxLoss requires transformers >= 4.43.0 to work correctly. "
+ "Otherwise, the classifier layer that maps embeddings to the labels cannot be updated. "
+ "Consider updating transformers with `pip install transformers>=4.43.0`."
+ )
+
def forward(
self, sentence_features: Iterable[dict[str, Tensor]], labels: Tensor
) -> Tensor | tuple[Tensor, Tensor]:
diff --git a/sentence_transformers/sampler.py b/sentence_transformers/sampler.py
index ad1a1d1..350f800 100644
--- a/sentence_transformers/sampler.py
+++ b/sentence_transformers/sampler.py
@@ -246,7 +246,10 @@ class ProportionalBatchSampler(SetEpochMixin, BatchSampler):
batch_samplers = [iter(sampler) for sampler in self.batch_samplers]
for dataset_idx in dataset_idx_sampler:
sample_offset = sample_offsets[dataset_idx]
- yield [idx + sample_offset for idx in next(batch_samplers[dataset_idx])]
+ try:
+ yield [idx + sample_offset for idx in next(batch_samplers[dataset_idx])]
+ except StopIteration:
+ continue
def __len__(self) -> int:
return sum([len(sampler) for sampler in self.batch_samplers])
diff --git a/sentence_transformers/trainer.py b/sentence_transformers/trainer.py
index 7677589..8cb3da3 100644
--- a/sentence_transformers/trainer.py
+++ b/sentence_transformers/trainer.py
@@ -3,6 +3,7 @@ from __future__ import annotations
import logging
import os
import warnings
+from collections import OrderedDict
from contextlib import nullcontext
from typing import TYPE_CHECKING, Any, Callable
@@ -14,7 +15,6 @@ from transformers.data.data_collator import DataCollator
from transformers.integrations import WandbCallback
from transformers.trainer import TRAINING_ARGS_NAME
from transformers.trainer_utils import EvalLoopOutput
-from transformers.training_args import ParallelMode
from sentence_transformers.data_collator import SentenceTransformerDataCollator
from sentence_transformers.evaluation import SentenceEvaluator, SequentialEvaluator
@@ -317,12 +317,13 @@ class SentenceTransformerTrainer(Trainer):
if isinstance(loss_fn, dict) and dataset_name:
loss_fn = loss_fn[dataset_name]
- # Hackishly insert the distributed model into the loss function, if the loss stores the model
- # Only called once per process
+ # Insert the wrapped (e.g. distributed or compiled) model into the loss function,
+ # if the loss stores the model. Only called once per process
if (
- self.args.parallel_mode != ParallelMode.NOT_PARALLEL
- and hasattr(model, "module")
- and hasattr(loss_fn, "model")
+ model == self.model_wrapped
+ and model != self.model # Only if the model is wrapped
+ and hasattr(loss_fn, "model") # Only if the loss stores the model
+ and loss_fn.model != model # Only if the wrapped model is not already stored
):
loss_fn = self.override_model_in_loss(loss_fn, model)
loss = loss_fn(features, labels)
@@ -734,7 +735,7 @@ class SentenceTransformerTrainer(Trainer):
def _load_from_checkpoint(self, checkpoint_path: str) -> None:
from sentence_transformers import SentenceTransformer
- loaded_model = SentenceTransformer(checkpoint_path)
+ loaded_model = SentenceTransformer(checkpoint_path, trust_remote_code=self.model.trust_remote_code)
self.model.load_state_dict(loaded_model.state_dict())
def create_model_card(
@@ -761,3 +762,41 @@ class SentenceTransformerTrainer(Trainer):
self.model.model_card_data.add_tags(tags)
self.model._create_model_card(self.args.output_dir, model_name=model_name)
+
+ def get_optimizer_cls_and_kwargs(
+ self, args: SentenceTransformerTrainingArguments, model: SentenceTransformer | None = None
+ ) -> tuple[Any, Any]:
+ """
+ We have to override the optimizer_grouped_parameters because the Trainer superclass bases it on the `model`
+ itself, but the SentenceTransformer losses can have weights that should be updated as well, e.g.
+ SoftmaxLoss (see #2872).
+
+ This method requires `transformers` >= 4.43.0.
+ """
+
+ if isinstance(self.loss, dict):
+ loss_model = nn.Sequential(OrderedDict(self.loss))
+ else:
+ loss_model = self.loss
+ optimizer_cls, optimizer_kwargs = super().get_optimizer_cls_and_kwargs(args, loss_model)
+
+ # If the kwargs were not overridden by the super() call, then we should override them here so that the potential
+ # weights in the loss(es) can also be updated.
+ if not {"params", "model", "optimizer_dict"} & set(optimizer_kwargs.keys()):
+ decay_parameters = self.get_decay_parameter_names(loss_model)
+ optimizer_kwargs["optimizer_dict"] = [
+ {
+ "params": [
+ p for n, p in loss_model.named_parameters() if (n in decay_parameters and p.requires_grad)
+ ],
+ "weight_decay": self.args.weight_decay,
+ },
+ {
+ "params": [
+ p for n, p in loss_model.named_parameters() if (n not in decay_parameters and p.requires_grad)
+ ],
+ "weight_decay": 0.0,
+ },
+ ]
+
+ return optimizer_cls, optimizer_kwargs
| Cannot restart training of model that necessitates `trust_remote_code=True` flag
## What is the issue
If your training is interrupted and you want to restart it with flag `resume_from_checkpoint`, the `Trainer` tries to reload the model via the `_load_from_checkpoint` method. However, this method does not take as input any keyword arguments. In the case where you are finetuning a model that necessitates the flag `trust_remote_code=True`, we are currently unable to reload the model to restart training.
https://github.com/UKPLab/sentence-transformers/blob/master/sentence_transformers/trainer.py#L734-L738
| UKPLab/sentence-transformers | diff --git a/tests/samplers/test_no_duplicates_batch_sampler.py b/tests/samplers/test_no_duplicates_batch_sampler.py
index ad93ff0..4fb62d5 100644
--- a/tests/samplers/test_no_duplicates_batch_sampler.py
+++ b/tests/samplers/test_no_duplicates_batch_sampler.py
@@ -3,13 +3,15 @@ from __future__ import annotations
import random
import pytest
+import torch
from datasets import Dataset
+from torch.utils.data import ConcatDataset
-from sentence_transformers.sampler import NoDuplicatesBatchSampler
+from sentence_transformers.sampler import NoDuplicatesBatchSampler, ProportionalBatchSampler
@pytest.fixture
-def dummy_dataset():
+def dummy_dataset() -> Dataset:
"""
Dummy dataset for testing purposes. The dataset looks as follows:
{
@@ -25,7 +27,22 @@ def dummy_dataset():
return Dataset.from_dict(data)
-def test_group_by_label_batch_sampler_label_a(dummy_dataset):
[email protected]
+def dummy_duplicates_dataset() -> Dataset:
+ """
+ Dummy dataset for testing purposes. The dataset looks as follows:
+ {
+ "anchor": ["anchor_1", "anchor_1", "anchor_1", ... "anchor_2", "anchor_2"],
+ "positive": ["positive_1", "positive_1", "positive_1", ... "positive_2", "positive_2"],
+ }
+ """
+ values = [{"anchor": "anchor_1", "positive": "positive_1"}] * 10 + [
+ {"anchor": "anchor_2", "positive": "positive_2"}
+ ] * 8
+ return Dataset.from_list(values)
+
+
+def test_group_by_label_batch_sampler_label_a(dummy_dataset: Dataset) -> None:
batch_size = 10
sampler = NoDuplicatesBatchSampler(
@@ -41,3 +58,36 @@ def test_group_by_label_batch_sampler_label_a(dummy_dataset):
for batch in batches:
batch_values = [dummy_dataset[i]["data"] for i in batch]
assert len(batch_values) == len(set(batch_values)), f"Batch {batch} contains duplicate values: {batch_values}"
+
+
[email protected]("drop_last", [True, False])
+def test_proportional_no_duplicates(dummy_duplicates_dataset: Dataset, drop_last: bool) -> None:
+ batch_size = 2
+ sampler_1 = NoDuplicatesBatchSampler(
+ dataset=dummy_duplicates_dataset, batch_size=batch_size, drop_last=drop_last, valid_label_columns=["anchor"]
+ )
+ sampler_2 = NoDuplicatesBatchSampler(
+ dataset=dummy_duplicates_dataset, batch_size=batch_size, drop_last=drop_last, valid_label_columns=["positive"]
+ )
+
+ concat_dataset = ConcatDataset([dummy_duplicates_dataset, dummy_duplicates_dataset])
+
+ batch_sampler = ProportionalBatchSampler(
+ concat_dataset, [sampler_1, sampler_2], generator=torch.Generator(), seed=12
+ )
+ batches = list(iter(batch_sampler))
+
+ if drop_last:
+ # If we drop the last batch (i.e. incomplete batches), we should have 16 batches out of the 18 possible,
+ # because of the duplicates being skipped by the NoDuplicatesBatchSampler.
+ # Notably, we should not crash like reported in #2816.
+ assert len(batches) == 16
+ # All batches are the same size: 2
+ assert all(len(batch) == batch_size for batch in batches)
+ assert len(sum(batches, [])) == 32
+ else:
+ # If we don't drop incomplete batches, we should be able to do 18 batches, and get more data.
+ # Note: we don't get all data, because the NoDuplicatesBatchSampler will estimate the number of batches
+ # and it would require more (non-complete) batches to get all data.
+ assert len(batches) == 18
+ assert len(sum(batches, [])) == 34
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 3
},
"num_modified_files": 4
} | 3.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | accelerate==1.5.2
aiohappyeyeballs==2.6.1
aiohttp==3.11.14
aiosignal==1.3.2
async-timeout==5.0.1
attrs==25.3.0
certifi==2025.1.31
cfgv==3.4.0
charset-normalizer==3.4.1
coverage==7.8.0
datasets==3.5.0
dill==0.3.8
distlib==0.3.9
exceptiongroup==1.2.2
filelock==3.18.0
frozenlist==1.5.0
fsspec==2024.12.0
huggingface-hub==0.30.0
identify==2.6.9
idna==3.10
iniconfig==2.1.0
Jinja2==3.1.6
joblib==1.4.2
MarkupSafe==3.0.2
mpmath==1.3.0
multidict==6.2.0
multiprocess==0.70.16
networkx==3.2.1
nodeenv==1.9.1
numpy==1.26.4
nvidia-cublas-cu12==12.4.5.8
nvidia-cuda-cupti-cu12==12.4.127
nvidia-cuda-nvrtc-cu12==12.4.127
nvidia-cuda-runtime-cu12==12.4.127
nvidia-cudnn-cu12==9.1.0.70
nvidia-cufft-cu12==11.2.1.3
nvidia-curand-cu12==10.3.5.147
nvidia-cusolver-cu12==11.6.1.9
nvidia-cusparse-cu12==12.3.1.170
nvidia-cusparselt-cu12==0.6.2
nvidia-nccl-cu12==2.21.5
nvidia-nvjitlink-cu12==12.4.127
nvidia-nvtx-cu12==12.4.127
packaging==24.2
pandas==2.2.3
pillow==11.1.0
platformdirs==4.3.7
pluggy==1.5.0
pre_commit==4.2.0
propcache==0.3.1
psutil==7.0.0
pyarrow==19.0.1
pytest==8.3.5
pytest-cov==6.0.0
python-dateutil==2.9.0.post0
pytz==2025.2
PyYAML==6.0.2
regex==2024.11.6
requests==2.32.3
safetensors==0.5.3
scikit-learn==1.6.1
scipy==1.13.1
-e git+https://github.com/UKPLab/sentence-transformers.git@ef360c82f447fa31b6ff1f58c526151f772ff984#egg=sentence_transformers
six==1.17.0
sympy==1.13.1
threadpoolctl==3.6.0
tokenizers==0.21.1
tomli==2.2.1
torch==2.6.0
tqdm==4.67.1
transformers==4.50.3
triton==3.2.0
typing_extensions==4.13.0
tzdata==2025.2
urllib3==2.3.0
virtualenv==20.29.3
xxhash==3.5.0
yarl==1.18.3
| name: sentence-transformers
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- accelerate==1.5.2
- aiohappyeyeballs==2.6.1
- aiohttp==3.11.14
- aiosignal==1.3.2
- async-timeout==5.0.1
- attrs==25.3.0
- certifi==2025.1.31
- cfgv==3.4.0
- charset-normalizer==3.4.1
- coverage==7.8.0
- datasets==3.5.0
- dill==0.3.8
- distlib==0.3.9
- exceptiongroup==1.2.2
- filelock==3.18.0
- frozenlist==1.5.0
- fsspec==2024.12.0
- huggingface-hub==0.30.0
- identify==2.6.9
- idna==3.10
- iniconfig==2.1.0
- jinja2==3.1.6
- joblib==1.4.2
- markupsafe==3.0.2
- mpmath==1.3.0
- multidict==6.2.0
- multiprocess==0.70.16
- networkx==3.2.1
- nodeenv==1.9.1
- numpy==1.26.4
- nvidia-cublas-cu12==12.4.5.8
- nvidia-cuda-cupti-cu12==12.4.127
- nvidia-cuda-nvrtc-cu12==12.4.127
- nvidia-cuda-runtime-cu12==12.4.127
- nvidia-cudnn-cu12==9.1.0.70
- nvidia-cufft-cu12==11.2.1.3
- nvidia-curand-cu12==10.3.5.147
- nvidia-cusolver-cu12==11.6.1.9
- nvidia-cusparse-cu12==12.3.1.170
- nvidia-cusparselt-cu12==0.6.2
- nvidia-nccl-cu12==2.21.5
- nvidia-nvjitlink-cu12==12.4.127
- nvidia-nvtx-cu12==12.4.127
- packaging==24.2
- pandas==2.2.3
- pillow==11.1.0
- platformdirs==4.3.7
- pluggy==1.5.0
- pre-commit==4.2.0
- propcache==0.3.1
- psutil==7.0.0
- pyarrow==19.0.1
- pytest==8.3.5
- pytest-cov==6.0.0
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyyaml==6.0.2
- regex==2024.11.6
- requests==2.32.3
- safetensors==0.5.3
- scikit-learn==1.6.1
- scipy==1.13.1
- sentence-transformers==3.1.0.dev0
- six==1.17.0
- sympy==1.13.1
- threadpoolctl==3.6.0
- tokenizers==0.21.1
- tomli==2.2.1
- torch==2.6.0
- tqdm==4.67.1
- transformers==4.50.3
- triton==3.2.0
- typing-extensions==4.13.0
- tzdata==2025.2
- urllib3==2.3.0
- virtualenv==20.29.3
- xxhash==3.5.0
- yarl==1.18.3
prefix: /opt/conda/envs/sentence-transformers
| [
"tests/samplers/test_no_duplicates_batch_sampler.py::test_proportional_no_duplicates[True]"
] | [] | [
"tests/samplers/test_no_duplicates_batch_sampler.py::test_group_by_label_batch_sampler_label_a",
"tests/samplers/test_no_duplicates_batch_sampler.py::test_proportional_no_duplicates[False]"
] | [] | Apache License 2.0 | 19,457 | 1,631 | [
"sentence_transformers/SentenceTransformer.py",
"sentence_transformers/losses/SoftmaxLoss.py",
"sentence_transformers/sampler.py",
"sentence_transformers/trainer.py"
] |
|
tobymao__sqlglot-4014 | 6494776a45ae4975cee21f70b5f383d29530d155 | 2024-08-30 09:18:27 | a1b980327ff94519a4cba1e0e48066c0ea51d359 | diff --git a/sqlglot/dialects/mysql.py b/sqlglot/dialects/mysql.py
index d7a2d926..5dde49ae 100644
--- a/sqlglot/dialects/mysql.py
+++ b/sqlglot/dialects/mysql.py
@@ -173,7 +173,7 @@ class MySQL(Dialect):
"%k": "%-H",
"%l": "%-I",
"%T": "%H:%M:%S",
- "%W": "%a",
+ "%W": "%A",
}
class Tokenizer(tokens.Tokenizer):
diff --git a/sqlglot/dialects/presto.py b/sqlglot/dialects/presto.py
index ed565503..9d4e8962 100644
--- a/sqlglot/dialects/presto.py
+++ b/sqlglot/dialects/presto.py
@@ -240,10 +240,7 @@ class Presto(Dialect):
TABLESAMPLE_SIZE_IS_PERCENT = True
LOG_BASE_FIRST: t.Optional[bool] = None
- TIME_MAPPING = {
- **MySQL.TIME_MAPPING,
- "%W": "%A",
- }
+ TIME_MAPPING = MySQL.TIME_MAPPING
# https://github.com/trinodb/trino/issues/17
# https://github.com/trinodb/trino/issues/12289
| Mysql dialect's TIME_MAPPING incorrectly maps %W
**Before you file an issue**
%a in the query converted to %W.
E.g. `select date_format(timestamp('2024-08-22 14:53:12'), '%a')`
output is converted to:
`select date_format(timestamp('2024-08-22 14:53:12'), '%W')`
mysql> select date_format(timestamp('2024-08-22 14:53:12'), '%W');
+-----------------------------------------------------+
| date_format(timestamp('2024-08-22 14:53:12'), '%W') |
+-----------------------------------------------------+
| Thursday |
+-----------------------------------------------------+
1 row in set (0.02 sec)
mysql> select date_format(timestamp('2024-08-22 14:53:12'), '%a');
+-----------------------------------------------------+
| date_format(timestamp('2024-08-22 14:53:12'), '%a') |
+-----------------------------------------------------+
| Thu |
+-----------------------------------------------------+
1 row in set (0.00 sec)
**Official Documentation**
https://dev.mysql.com/doc/refman/8.0/en/date-and-time-functions.html#function_date-format
https://docs.python.org/3/library/datetime.html#strftime-strptime-behavior
According to the python `strftime` formats documentation:
%A is Weekday as locale’s full name.(Sunday, Monday...)
%a is `Weekday as locale’s abbreviated name` (Sun .. Sat)
Let’s look at mysql:
%W Weekday name (Sunday..Saturday)
%a:Abbreviated weekday name (Sun .. Sat)
So, it is expected that the date_format output in presto or mysql should map %W to %A, as follows:
`mysql.py`
```
time_mapping = {
"%M": "%B",
"%c": "%-m",
"%e": "%-d",
"%h": "%I",
"%i": "%M",
"%s": "%S",
"%S": "%S",
"%u": "%W",
"%k": "%-H",
"%l": "%-I",
"%T": "%H:%M:%S",
- "%W": "%a",
+ "%W": "%A",
}
```
I observed that several other mappings worked well. If applied, I noticed that mysql and snowflack were affected in the single test. Then this may affect more, such as drill, hive, oracle, teradata, which also use %a to represent the pattern of Weekday as locale’s full name.
Why is this the case?
In addition, there is a PR #3855 in presto that does the conversion. Is this a workaround?
@VaggelisD PTAL
| tobymao/sqlglot | diff --git a/tests/dialects/test_mysql.py b/tests/dialects/test_mysql.py
index 45b79bf6..3ef0b0e2 100644
--- a/tests/dialects/test_mysql.py
+++ b/tests/dialects/test_mysql.py
@@ -539,9 +539,16 @@ class TestMySQL(Validator):
},
)
self.validate_all(
- "SELECT DATE_FORMAT('2009-10-04 22:23:00', '%W %M %Y')",
+ "SELECT DATE_FORMAT('2024-08-22 14:53:12', '%a')",
write={
- "mysql": "SELECT DATE_FORMAT('2009-10-04 22:23:00', '%W %M %Y')",
+ "mysql": "SELECT DATE_FORMAT('2024-08-22 14:53:12', '%a')",
+ "snowflake": "SELECT TO_CHAR(CAST('2024-08-22 14:53:12' AS TIMESTAMP), 'DY')",
+ },
+ )
+ self.validate_all(
+ "SELECT DATE_FORMAT('2009-10-04 22:23:00', '%a %M %Y')",
+ write={
+ "mysql": "SELECT DATE_FORMAT('2009-10-04 22:23:00', '%a %M %Y')",
"snowflake": "SELECT TO_CHAR(CAST('2009-10-04 22:23:00' AS TIMESTAMP), 'DY mmmm yyyy')",
},
)
@@ -555,7 +562,7 @@ class TestMySQL(Validator):
self.validate_all(
"SELECT DATE_FORMAT('1900-10-04 22:23:00', '%d %y %a %d %m %b')",
write={
- "mysql": "SELECT DATE_FORMAT('1900-10-04 22:23:00', '%d %y %W %d %m %b')",
+ "mysql": "SELECT DATE_FORMAT('1900-10-04 22:23:00', '%d %y %a %d %m %b')",
"snowflake": "SELECT TO_CHAR(CAST('1900-10-04 22:23:00' AS TIMESTAMP), 'DD yy DY DD mm mon')",
},
)
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 2
} | 25.18 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest pytest-cov pytest-xdist pytest-mock pytest-asyncio",
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | cfgv==3.4.0
coverage==7.8.0
distlib==0.3.9
duckdb==1.2.1
exceptiongroup==1.2.2
execnet==2.1.1
filelock==3.18.0
identify==2.6.9
iniconfig==2.1.0
Jinja2==3.1.6
MarkupSafe==3.0.2
maturin==1.8.3
mypy==1.15.0
mypy-extensions==1.0.0
nodeenv==1.9.1
numpy==2.0.2
packaging==24.2
pandas==2.2.3
pandas-stubs==2.2.2.240807
pdoc==15.0.1
platformdirs==4.3.7
pluggy==1.5.0
pre_commit==4.2.0
Pygments==2.19.1
pytest==8.3.5
pytest-asyncio==0.26.0
pytest-cov==6.0.0
pytest-mock==3.14.0
pytest-xdist==3.6.1
python-dateutil==2.9.0.post0
pytz==2025.2
PyYAML==6.0.2
ruff==0.4.3
six==1.17.0
-e git+https://github.com/tobymao/sqlglot.git@6494776a45ae4975cee21f70b5f383d29530d155#egg=sqlglot
tomli==2.2.1
types-python-dateutil==2.9.0.20241206
types-pytz==2025.2.0.20250326
typing_extensions==4.13.0
tzdata==2025.2
virtualenv==20.29.3
| name: sqlglot
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- cfgv==3.4.0
- coverage==7.8.0
- distlib==0.3.9
- duckdb==1.2.1
- exceptiongroup==1.2.2
- execnet==2.1.1
- filelock==3.18.0
- identify==2.6.9
- iniconfig==2.1.0
- jinja2==3.1.6
- markupsafe==3.0.2
- maturin==1.8.3
- mypy==1.15.0
- mypy-extensions==1.0.0
- nodeenv==1.9.1
- numpy==2.0.2
- packaging==24.2
- pandas==2.2.3
- pandas-stubs==2.2.2.240807
- pdoc==15.0.1
- platformdirs==4.3.7
- pluggy==1.5.0
- pre-commit==4.2.0
- pygments==2.19.1
- pytest==8.3.5
- pytest-asyncio==0.26.0
- pytest-cov==6.0.0
- pytest-mock==3.14.0
- pytest-xdist==3.6.1
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyyaml==6.0.2
- ruff==0.4.3
- six==1.17.0
- tomli==2.2.1
- types-python-dateutil==2.9.0.20241206
- types-pytz==2025.2.0.20250326
- typing-extensions==4.13.0
- tzdata==2025.2
- virtualenv==20.29.3
prefix: /opt/conda/envs/sqlglot
| [
"tests/dialects/test_mysql.py::TestMySQL::test_date_format"
] | [] | [
"tests/dialects/test_mysql.py::TestMySQL::test_at_time_zone",
"tests/dialects/test_mysql.py::TestMySQL::test_bits_literal",
"tests/dialects/test_mysql.py::TestMySQL::test_canonical_functions",
"tests/dialects/test_mysql.py::TestMySQL::test_convert",
"tests/dialects/test_mysql.py::TestMySQL::test_ddl",
"tests/dialects/test_mysql.py::TestMySQL::test_escape",
"tests/dialects/test_mysql.py::TestMySQL::test_hexadecimal_literal",
"tests/dialects/test_mysql.py::TestMySQL::test_identity",
"tests/dialects/test_mysql.py::TestMySQL::test_introducers",
"tests/dialects/test_mysql.py::TestMySQL::test_is_null",
"tests/dialects/test_mysql.py::TestMySQL::test_json_object",
"tests/dialects/test_mysql.py::TestMySQL::test_json_value",
"tests/dialects/test_mysql.py::TestMySQL::test_match_against",
"tests/dialects/test_mysql.py::TestMySQL::test_monthname",
"tests/dialects/test_mysql.py::TestMySQL::test_mysql",
"tests/dialects/test_mysql.py::TestMySQL::test_mysql_time",
"tests/dialects/test_mysql.py::TestMySQL::test_safe_div",
"tests/dialects/test_mysql.py::TestMySQL::test_set_variable",
"tests/dialects/test_mysql.py::TestMySQL::test_show_columns",
"tests/dialects/test_mysql.py::TestMySQL::test_show_db_like_or_where_sql",
"tests/dialects/test_mysql.py::TestMySQL::test_show_engine",
"tests/dialects/test_mysql.py::TestMySQL::test_show_errors",
"tests/dialects/test_mysql.py::TestMySQL::test_show_events",
"tests/dialects/test_mysql.py::TestMySQL::test_show_grants",
"tests/dialects/test_mysql.py::TestMySQL::test_show_index",
"tests/dialects/test_mysql.py::TestMySQL::test_show_like_or_where",
"tests/dialects/test_mysql.py::TestMySQL::test_show_name",
"tests/dialects/test_mysql.py::TestMySQL::test_show_processlist",
"tests/dialects/test_mysql.py::TestMySQL::test_show_profile",
"tests/dialects/test_mysql.py::TestMySQL::test_show_replica_status",
"tests/dialects/test_mysql.py::TestMySQL::test_show_simple",
"tests/dialects/test_mysql.py::TestMySQL::test_show_tables",
"tests/dialects/test_mysql.py::TestMySQL::test_string_literals",
"tests/dialects/test_mysql.py::TestMySQL::test_timestamp_trunc",
"tests/dialects/test_mysql.py::TestMySQL::test_types"
] | [] | MIT License | 19,458 | 324 | [
"sqlglot/dialects/mysql.py",
"sqlglot/dialects/presto.py"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.