instance_id
stringlengths 12
57
| base_commit
stringlengths 40
40
| created_at
stringdate 2015-01-06 14:05:07
2025-04-29 17:56:51
| environment_setup_commit
stringlengths 40
40
| hints_text
stringlengths 0
158k
| patch
stringlengths 261
20.8k
| problem_statement
stringlengths 11
52.5k
| repo
stringlengths 7
53
| test_patch
stringlengths 280
206k
| meta
dict | version
stringclasses 463
values | install_config
dict | requirements
stringlengths 93
34k
⌀ | environment
stringlengths 772
20k
⌀ | FAIL_TO_PASS
sequencelengths 1
856
| FAIL_TO_FAIL
sequencelengths 0
536
| PASS_TO_PASS
sequencelengths 0
7.87k
| PASS_TO_FAIL
sequencelengths 0
92
| license_name
stringclasses 35
values | __index_level_0__
int64 11
21.4k
| num_tokens_patch
int64 103
4.99k
| before_filepaths
sequencelengths 0
14
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
dask__dask-1150 | 71e3e413d6e00942de3ff32a3ba378408f2648e9 | 2016-05-10 15:29:30 | 71e3e413d6e00942de3ff32a3ba378408f2648e9 | diff --git a/dask/array/random.py b/dask/array/random.py
index 71050b304..9a1b0b364 100644
--- a/dask/array/random.py
+++ b/dask/array/random.py
@@ -44,8 +44,13 @@ class RandomState(object):
self._numpy_state.seed(seed)
def _wrap(self, func, *args, **kwargs):
+ """ Wrap numpy random function to produce dask.array random function
+
+ extra_chunks should be a chunks tuple to append to the end of chunks
+ """
size = kwargs.pop('size')
chunks = kwargs.pop('chunks')
+ extra_chunks = kwargs.pop('extra_chunks', ())
if not isinstance(size, (tuple, list)):
size = (size,)
@@ -62,12 +67,13 @@ class RandomState(object):
seeds = different_seeds(len(sizes), self._numpy_state)
token = tokenize(seeds, size, chunks, args, kwargs)
name = 'da.random.{0}-{1}'.format(func.__name__, token)
- keys = product([name], *[range(len(bd)) for bd in chunks])
+ keys = product([name], *([range(len(bd)) for bd in chunks]
+ + [[0]] * len(extra_chunks)))
vals = ((_apply_random, func.__name__, seed, size, args, kwargs)
for seed, size in zip(seeds, sizes))
dsk = dict(zip(keys, vals))
- return Array(dsk, name, chunks, dtype=dtype)
+ return Array(dsk, name, chunks + extra_chunks, dtype=dtype)
@doc_wraps(np.random.RandomState.beta)
def beta(self, a, b, size=None, chunks=None):
@@ -144,7 +150,11 @@ class RandomState(object):
return self._wrap(np.random.RandomState.logseries, p,
size=size, chunks=chunks)
- # multinomial
+ @doc_wraps(np.random.RandomState.multinomial)
+ def multinomial(self, n, pvals, size=None, chunks=None):
+ return self._wrap(np.random.RandomState.multinomial, n, pvals,
+ size=size, chunks=chunks,
+ extra_chunks=((len(pvals),),))
@doc_wraps(np.random.RandomState.negative_binomial)
def negative_binomial(self, n, p, size=None, chunks=None):
@@ -295,6 +305,7 @@ laplace = _state.laplace
logistic = _state.logistic
lognormal = _state.lognormal
logseries = _state.logseries
+multinomial = _state.multinomial
negative_binomial = _state.negative_binomial
noncentral_chisquare = _state.noncentral_chisquare
noncentral_f = _state.noncentral_f
| Multinomial random generator
`dask/array/random.py` is missing a multinomial random generator (there is aplaceholder `# multinomial`).
Will dask have a multinomial random generator at some point? Does it require a significantly different approach than the other generators? | dask/dask | diff --git a/dask/array/tests/test_random.py b/dask/array/tests/test_random.py
index 855b200fd..1112a13ae 100644
--- a/dask/array/tests/test_random.py
+++ b/dask/array/tests/test_random.py
@@ -134,6 +134,7 @@ def test_random_all():
da.random.logistic(size=5, chunks=3).compute()
da.random.lognormal(size=5, chunks=3).compute()
da.random.logseries(0.5, size=5, chunks=3).compute()
+ da.random.multinomial(20, [1/6.]*6, size=5, chunks=3).compute()
da.random.negative_binomial(5, 0.5, size=5, chunks=3).compute()
da.random.noncentral_chisquare(2, 2, size=5, chunks=3).compute()
@@ -159,3 +160,11 @@ def test_random_all():
da.random.standard_gamma(2, size=5, chunks=3).compute()
da.random.standard_normal(size=5, chunks=3).compute()
da.random.standard_t(2, size=5, chunks=3).compute()
+
+
+def test_multinomial():
+ for size, chunks in [(5, 3), ((5, 4), (2, 3))]:
+ x = da.random.multinomial(20, [1/6.]*6, size=size, chunks=chunks)
+ y = np.random.multinomial(20, [1/6.]*6, size=size)
+
+ assert x.shape == y.shape == x.compute().shape
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 1
} | 1.9 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[complete]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "numpy>=1.16.0 pandas>=1.0.0 cloudpickle partd distributed s3fs toolz psutil pytables bokeh bcolz scipy h5py ipython",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y graphviz liblzma-dev"
],
"python": "3.5",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | aiobotocore @ file:///opt/conda/conda-bld/aiobotocore_1643638228694/work
aiohttp @ file:///tmp/build/80754af9/aiohttp_1632748060317/work
aioitertools @ file:///tmp/build/80754af9/aioitertools_1607109665762/work
async-timeout==3.0.1
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
backcall @ file:///home/ktietz/src/ci/backcall_1611930011877/work
bcolz==1.2.1
bokeh @ file:///tmp/build/80754af9/bokeh_1620710048147/work
boto3==1.23.10
botocore==1.26.10
brotlipy==0.7.0
certifi==2021.5.30
cffi @ file:///tmp/build/80754af9/cffi_1625814693874/work
chardet @ file:///tmp/build/80754af9/chardet_1607706739153/work
click==8.0.3
cloudpickle @ file:///tmp/build/80754af9/cloudpickle_1632508026186/work
contextvars==2.4
cryptography @ file:///tmp/build/80754af9/cryptography_1635366128178/work
cytoolz==0.11.0
-e git+https://github.com/dask/dask.git@71e3e413d6e00942de3ff32a3ba378408f2648e9#egg=dask
decorator @ file:///opt/conda/conda-bld/decorator_1643638310831/work
distributed==1.9.5
fsspec @ file:///opt/conda/conda-bld/fsspec_1642510437511/work
h5py==2.10.0
HeapDict @ file:///Users/ktietz/demo/mc3/conda-bld/heapdict_1630598515714/work
idna @ file:///tmp/build/80754af9/idna_1637925883363/work
idna-ssl @ file:///tmp/build/80754af9/idna_ssl_1611752490495/work
immutables @ file:///tmp/build/80754af9/immutables_1628888996840/work
importlib-metadata==4.8.3
iniconfig==1.1.1
ipython @ file:///tmp/build/80754af9/ipython_1593447367857/work
ipython-genutils @ file:///tmp/build/80754af9/ipython_genutils_1606773439826/work
jedi @ file:///tmp/build/80754af9/jedi_1606932572482/work
Jinja2 @ file:///opt/conda/conda-bld/jinja2_1647436528585/work
jmespath @ file:///Users/ktietz/demo/mc3/conda-bld/jmespath_1630583964805/work
locket==0.2.1
MarkupSafe @ file:///tmp/build/80754af9/markupsafe_1621528150516/work
mock @ file:///tmp/build/80754af9/mock_1607622725907/work
msgpack @ file:///tmp/build/80754af9/msgpack-python_1612287171716/work
msgpack-python==0.5.6
multidict @ file:///tmp/build/80754af9/multidict_1607367768400/work
numexpr @ file:///tmp/build/80754af9/numexpr_1618853194344/work
numpy @ file:///tmp/build/80754af9/numpy_and_numpy_base_1603483703303/work
olefile @ file:///Users/ktietz/demo/mc3/conda-bld/olefile_1629805411829/work
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pandas==1.1.5
parso==0.7.0
partd @ file:///opt/conda/conda-bld/partd_1647245470509/work
pexpect @ file:///tmp/build/80754af9/pexpect_1605563209008/work
pickleshare @ file:///tmp/build/80754af9/pickleshare_1606932040724/work
Pillow @ file:///tmp/build/80754af9/pillow_1625670622947/work
pluggy==1.0.0
prompt-toolkit @ file:///tmp/build/80754af9/prompt-toolkit_1633440160888/work
psutil @ file:///tmp/build/80754af9/psutil_1612297621795/work
ptyprocess @ file:///tmp/build/80754af9/ptyprocess_1609355006118/work/dist/ptyprocess-0.7.0-py2.py3-none-any.whl
py==1.11.0
pycparser @ file:///tmp/build/80754af9/pycparser_1636541352034/work
Pygments @ file:///opt/conda/conda-bld/pygments_1644249106324/work
pyOpenSSL @ file:///opt/conda/conda-bld/pyopenssl_1643788558760/work
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
PySocks @ file:///tmp/build/80754af9/pysocks_1605305763431/work
pytest==7.0.1
python-dateutil @ file:///tmp/build/80754af9/python-dateutil_1626374649649/work
pytz==2021.3
PyYAML==5.4.1
s3fs==0.4.2
s3transfer==0.5.2
scipy @ file:///tmp/build/80754af9/scipy_1597686635649/work
six @ file:///tmp/build/80754af9/six_1644875935023/work
sortedcontainers @ file:///tmp/build/80754af9/sortedcontainers_1623949099177/work
tables==3.6.1
tblib @ file:///Users/ktietz/demo/mc3/conda-bld/tblib_1629402031467/work
tomli==1.2.3
toolz @ file:///tmp/build/80754af9/toolz_1636545406491/work
tornado @ file:///tmp/build/80754af9/tornado_1606942266872/work
traitlets @ file:///tmp/build/80754af9/traitlets_1632746497744/work
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
urllib3 @ file:///opt/conda/conda-bld/urllib3_1643638302206/work
wcwidth @ file:///Users/ktietz/demo/mc3/conda-bld/wcwidth_1629357192024/work
wrapt==1.12.1
yarl @ file:///tmp/build/80754af9/yarl_1606939915466/work
zict==2.0.0
zipp==3.6.0
| name: dask
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- aiobotocore=2.1.0=pyhd3eb1b0_0
- aiohttp=3.7.4.post0=py36h7f8727e_2
- aioitertools=0.7.1=pyhd3eb1b0_0
- async-timeout=3.0.1=py36h06a4308_0
- attrs=21.4.0=pyhd3eb1b0_0
- backcall=0.2.0=pyhd3eb1b0_0
- bcolz=1.2.1=py36h04863e7_0
- blas=1.0=openblas
- blosc=1.21.3=h6a678d5_0
- bokeh=2.3.2=py36h06a4308_0
- brotlipy=0.7.0=py36h27cfd23_1003
- bzip2=1.0.8=h5eee18b_6
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- cffi=1.14.6=py36h400218f_0
- chardet=4.0.0=py36h06a4308_1003
- click=8.0.3=pyhd3eb1b0_0
- cloudpickle=2.0.0=pyhd3eb1b0_0
- contextvars=2.4=py_0
- cryptography=35.0.0=py36hd23ed53_0
- cytoolz=0.11.0=py36h7b6447c_0
- decorator=5.1.1=pyhd3eb1b0_0
- freetype=2.12.1=h4a9f257_0
- fsspec=2022.1.0=pyhd3eb1b0_0
- giflib=5.2.2=h5eee18b_0
- h5py=2.10.0=py36h7918eee_0
- hdf5=1.10.4=hb1b8bf9_0
- heapdict=1.0.1=pyhd3eb1b0_0
- idna=3.3=pyhd3eb1b0_0
- idna_ssl=1.1.0=py36h06a4308_0
- immutables=0.16=py36h7f8727e_0
- ipython=7.16.1=py36h5ca1d4c_0
- ipython_genutils=0.2.0=pyhd3eb1b0_1
- jedi=0.17.2=py36h06a4308_1
- jinja2=3.0.3=pyhd3eb1b0_0
- jmespath=0.10.0=pyhd3eb1b0_0
- jpeg=9e=h5eee18b_3
- lcms2=2.16=hb9589c4_0
- ld_impl_linux-64=2.40=h12ee557_0
- lerc=4.0.0=h6a678d5_0
- libdeflate=1.22=h5eee18b_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgfortran-ng=7.5.0=ha8ba4b0_17
- libgfortran4=7.5.0=ha8ba4b0_17
- libgomp=11.2.0=h1234567_1
- libopenblas=0.3.18=hf726d26_0
- libpng=1.6.39=h5eee18b_0
- libstdcxx-ng=11.2.0=h1234567_1
- libtiff=4.5.1=hffd6297_1
- libwebp=1.2.4=h11a3e52_1
- libwebp-base=1.2.4=h5eee18b_1
- locket=0.2.1=py36h06a4308_1
- lz4-c=1.9.4=h6a678d5_1
- lzo=2.10=h7b6447c_2
- markupsafe=2.0.1=py36h27cfd23_0
- mock=4.0.3=pyhd3eb1b0_0
- multidict=5.1.0=py36h27cfd23_2
- ncurses=6.4=h6a678d5_0
- numexpr=2.7.3=py36h4be448d_1
- numpy=1.19.2=py36h6163131_0
- numpy-base=1.19.2=py36h75fe3a5_0
- olefile=0.46=pyhd3eb1b0_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pandas=1.1.5=py36ha9443f7_0
- parso=0.7.0=py_0
- partd=1.2.0=pyhd3eb1b0_1
- pexpect=4.8.0=pyhd3eb1b0_3
- pickleshare=0.7.5=pyhd3eb1b0_1003
- pillow=8.3.1=py36h5aabda8_0
- pip=21.2.2=py36h06a4308_0
- prompt-toolkit=3.0.20=pyhd3eb1b0_0
- psutil=5.8.0=py36h27cfd23_1
- ptyprocess=0.7.0=pyhd3eb1b0_2
- pycparser=2.21=pyhd3eb1b0_0
- pygments=2.11.2=pyhd3eb1b0_0
- pyopenssl=22.0.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pysocks=1.7.1=py36h06a4308_0
- pytables=3.6.1=py36h71ec239_0
- python=3.6.13=h12debd9_1
- python-dateutil=2.8.2=pyhd3eb1b0_0
- pytz=2021.3=pyhd3eb1b0_0
- pyyaml=5.4.1=py36h27cfd23_1
- readline=8.2=h5eee18b_0
- scipy=1.5.2=py36habc2bb6_0
- setuptools=58.0.4=py36h06a4308_0
- six=1.16.0=pyhd3eb1b0_1
- sortedcontainers=2.4.0=pyhd3eb1b0_0
- sqlite=3.45.3=h5eee18b_0
- tblib=1.7.0=pyhd3eb1b0_0
- tk=8.6.14=h39e8969_0
- toolz=0.11.2=pyhd3eb1b0_0
- tornado=6.1=py36h27cfd23_0
- traitlets=4.3.3=py36h06a4308_0
- typing-extensions=4.1.1=hd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- urllib3=1.26.8=pyhd3eb1b0_0
- wcwidth=0.2.5=pyhd3eb1b0_0
- wheel=0.37.1=pyhd3eb1b0_0
- wrapt=1.12.1=py36h7b6447c_1
- xz=5.6.4=h5eee18b_1
- yaml=0.2.5=h7b6447c_0
- yarl=1.6.3=py36h27cfd23_0
- zict=2.0.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- zstd=1.5.6=hc292b87_0
- pip:
- boto3==1.23.10
- botocore==1.26.10
- distributed==1.9.5
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- msgpack-python==0.5.6
- pluggy==1.0.0
- py==1.11.0
- pytest==7.0.1
- s3fs==0.4.2
- s3transfer==0.5.2
- tomli==1.2.3
- zipp==3.6.0
prefix: /opt/conda/envs/dask
| [
"dask/array/tests/test_random.py::test_random_all",
"dask/array/tests/test_random.py::test_multinomial"
] | [] | [
"dask/array/tests/test_random.py::test_RandomState",
"dask/array/tests/test_random.py::test_concurrency",
"dask/array/tests/test_random.py::test_doc_randomstate",
"dask/array/tests/test_random.py::test_serializability",
"dask/array/tests/test_random.py::test_determinisim_through_dask_values",
"dask/array/tests/test_random.py::test_randomstate_consistent_names",
"dask/array/tests/test_random.py::test_random",
"dask/array/tests/test_random.py::test_parametrized_random_function",
"dask/array/tests/test_random.py::test_kwargs",
"dask/array/tests/test_random.py::test_unique_names",
"dask/array/tests/test_random.py::test_docs",
"dask/array/tests/test_random.py::test_can_make_really_big_random_array",
"dask/array/tests/test_random.py::test_random_seed"
] | [] | BSD 3-Clause "New" or "Revised" License | 530 | 647 | [
"dask/array/random.py"
] |
|
Axelrod-Python__Axelrod-587 | 03dd1a9600965800125eeb8942b6b0a3dfacf29c | 2016-05-11 17:36:01 | 03dd1a9600965800125eeb8942b6b0a3dfacf29c | diff --git a/axelrod/strategies/cycler.py b/axelrod/strategies/cycler.py
index 599e97a5..e3dd9c39 100644
--- a/axelrod/strategies/cycler.py
+++ b/axelrod/strategies/cycler.py
@@ -1,5 +1,6 @@
from axelrod import Actions, Player, init_args
+import copy
class AntiCycler(Player):
"""
@@ -74,18 +75,27 @@ class Cycler(Player):
class CyclerCCD(Cycler):
+ classifier = copy.copy(Cycler.classifier)
+ classifier['memory_depth'] = 2
+
@init_args
def __init__(self, cycle="CCD"):
Cycler.__init__(self, cycle=cycle)
class CyclerCCCD(Cycler):
+ classifier = copy.copy(Cycler.classifier)
+ classifier['memory_depth'] = 3
+
@init_args
def __init__(self, cycle="CCCD"):
Cycler.__init__(self, cycle=cycle)
class CyclerCCCCCD(Cycler):
+ classifier = copy.copy(Cycler.classifier)
+ classifier['memory_depth'] = 5
+
@init_args
def __init__(self, cycle="CCCCCD"):
Cycler.__init__(self, cycle=cycle)
diff --git a/axelrod/strategies/gobymajority.py b/axelrod/strategies/gobymajority.py
index fba5f73d..efc0d525 100644
--- a/axelrod/strategies/gobymajority.py
+++ b/axelrod/strategies/gobymajority.py
@@ -1,5 +1,7 @@
from axelrod import Actions, Player, init_args
+import copy
+
C, D = Actions.C, Actions.D
@@ -77,6 +79,8 @@ class GoByMajority40(GoByMajority):
"""
GoByMajority player with a memory of 40.
"""
+ classifier = copy.copy(GoByMajority.classifier)
+ classifier['memory_depth'] = 40
@init_args
def __init__(self, memory_depth=40, soft=True):
@@ -88,6 +92,8 @@ class GoByMajority20(GoByMajority):
"""
GoByMajority player with a memory of 20.
"""
+ classifier = copy.copy(GoByMajority.classifier)
+ classifier['memory_depth'] = 20
@init_args
def __init__(self, memory_depth=20, soft=True):
@@ -99,6 +105,8 @@ class GoByMajority10(GoByMajority):
"""
GoByMajority player with a memory of 10.
"""
+ classifier = copy.copy(GoByMajority.classifier)
+ classifier['memory_depth'] = 10
@init_args
def __init__(self, memory_depth=10, soft=True):
@@ -110,6 +118,8 @@ class GoByMajority5(GoByMajority):
"""
GoByMajority player with a memory of 5.
"""
+ classifier = copy.copy(GoByMajority.classifier)
+ classifier['memory_depth'] = 5
@init_args
def __init__(self, memory_depth=5, soft=True):
@@ -136,6 +146,8 @@ class HardGoByMajority40(HardGoByMajority):
"""
HardGoByMajority player with a memory of 40.
"""
+ classifier = copy.copy(GoByMajority.classifier)
+ classifier['memory_depth'] = 40
@init_args
def __init__(self, memory_depth=40, soft=False):
@@ -147,6 +159,8 @@ class HardGoByMajority20(HardGoByMajority):
"""
HardGoByMajority player with a memory of 20.
"""
+ classifier = copy.copy(GoByMajority.classifier)
+ classifier['memory_depth'] = 20
@init_args
def __init__(self, memory_depth=20, soft=False):
@@ -158,6 +172,8 @@ class HardGoByMajority10(HardGoByMajority):
"""
HardGoByMajority player with a memory of 10.
"""
+ classifier = copy.copy(GoByMajority.classifier)
+ classifier['memory_depth'] = 10
@init_args
def __init__(self, memory_depth=10, soft=False):
@@ -169,6 +185,8 @@ class HardGoByMajority5(HardGoByMajority):
"""
HardGoByMajority player with a memory of 5.
"""
+ classifier = copy.copy(GoByMajority.classifier)
+ classifier['memory_depth'] = 5
@init_args
def __init__(self, memory_depth=5, soft=False):
diff --git a/axelrod/strategies/meta.py b/axelrod/strategies/meta.py
index 2f16d4b8..c2d5b60f 100644
--- a/axelrod/strategies/meta.py
+++ b/axelrod/strategies/meta.py
@@ -289,6 +289,14 @@ class MetaMixer(MetaPlayer):
"""
name = "Meta Mixer"
+ classifier = {
+ 'memory_depth': float('inf'), # Long memory
+ 'stochastic': True,
+ 'makes_use_of': set(),
+ 'inspects_source': False,
+ 'manipulates_source': False,
+ 'manipulates_state': False
+ }
def __init__(self, team=None, distribution=None):
| Test classification of strategy class as well as strategy player
@mojones noticed a bug in the classification of Win Stay Lose Shift: see #506.
I fixed it in #511, but really the test I added to #511 should be a test in the player class. I tried that but didn't get a failing test. Needs investigating :) | Axelrod-Python/Axelrod | diff --git a/axelrod/tests/unit/test_gambler.py b/axelrod/tests/unit/test_gambler.py
index 1448103f..c59bb8d3 100755
--- a/axelrod/tests/unit/test_gambler.py
+++ b/axelrod/tests/unit/test_gambler.py
@@ -8,6 +8,8 @@ import random
from .test_player import TestPlayer, TestHeadsUp
from axelrod import random_choice, Actions
+import copy
+
C, D = axelrod.Actions.C, axelrod.Actions.D
@@ -25,6 +27,9 @@ class TestGambler(TestPlayer):
'manipulates_state': False
}
+ expected_class_classifier = copy.copy(expected_classifier)
+ expected_class_classifier['memory_depth'] = float('inf')
+
def test_init(self):
# Test empty table
player = self.player(dict())
diff --git a/axelrod/tests/unit/test_gobymajority.py b/axelrod/tests/unit/test_gobymajority.py
index 52883322..40d3b9e2 100644
--- a/axelrod/tests/unit/test_gobymajority.py
+++ b/axelrod/tests/unit/test_gobymajority.py
@@ -126,6 +126,15 @@ def factory_TestGoByRecentMajority(L, soft=True):
name = "Hard Go By Majority: %i" % L
player = getattr(axelrod, 'HardGoByMajority%i' % L)
+ expected_classifier = {
+ 'stochastic': False,
+ 'memory_depth': L,
+ 'makes_use_of': set(),
+ 'inspects_source': False,
+ 'manipulates_source': False,
+ 'manipulates_state': False
+ }
+
def test_initial_strategy(self):
"""Starts by defecting."""
self.first_play_test(D)
diff --git a/axelrod/tests/unit/test_lookerup.py b/axelrod/tests/unit/test_lookerup.py
index 49de2ce9..ce447ae1 100755
--- a/axelrod/tests/unit/test_lookerup.py
+++ b/axelrod/tests/unit/test_lookerup.py
@@ -4,6 +4,8 @@ import axelrod
from .test_player import TestPlayer, TestHeadsUp
from axelrod.strategies.lookerup import create_lookup_table_keys
+import copy
+
C, D = axelrod.Actions.C, axelrod.Actions.D
@@ -13,7 +15,7 @@ class TestLookerUp(TestPlayer):
player = axelrod.LookerUp
expected_classifier = {
- 'memory_depth': 1, # Default TFT table
+ 'memory_depth': 1, # Default TfT
'stochastic': False,
'makes_use_of': set(),
'inspects_source': False,
@@ -21,6 +23,9 @@ class TestLookerUp(TestPlayer):
'manipulates_state': False
}
+ expected_class_classifier = copy.copy(expected_classifier)
+ expected_class_classifier['memory_depth'] = float('inf')
+
def test_init(self):
# Test empty table
player = self.player(dict())
@@ -113,6 +118,7 @@ class TestLookerUp(TestPlayer):
self.responses_test([C, C, D], [D, D, C], [D])
+
class TestEvolvedLookerUp(TestPlayer):
name = "EvolvedLookerUp"
diff --git a/axelrod/tests/unit/test_meta.py b/axelrod/tests/unit/test_meta.py
index c8355d79..25810483 100644
--- a/axelrod/tests/unit/test_meta.py
+++ b/axelrod/tests/unit/test_meta.py
@@ -3,7 +3,7 @@
import random
import axelrod
-import unittest
+import copy
from .test_player import TestPlayer
@@ -26,7 +26,7 @@ class TestMetaPlayer(TestPlayer):
'manipulates_state': False
}
- def classifier_test(self):
+ def classifier_test(self, expected_class_classifier=None):
player = self.player()
classifier = dict()
for key in ['stochastic',
@@ -47,6 +47,12 @@ class TestMetaPlayer(TestPlayer):
msg="%s - Behaviour: %s != Expected Behaviour: %s" %
(key, player.classifier[key], classifier[key]))
+ # Test that player has same classifier as it's class unless otherwise
+ # specified
+ if expected_class_classifier is None:
+ expected_class_classifier = player.classifier
+ self.assertEqual(expected_class_classifier, self.player.classifier)
+
def test_reset(self):
p1 = self.player()
p2 = axelrod.Cooperator()
@@ -70,6 +76,10 @@ class TestMetaMajority(TestMetaPlayer):
'manipulates_state': False
}
+ expected_class_classifier = copy.copy(expected_classifier)
+ expected_class_classifier['stochastic'] = False
+ expected_class_classifier['makes_use_of'] = set([])
+
def test_strategy(self):
P1 = axelrod.MetaMajority()
@@ -96,6 +106,10 @@ class TestMetaMinority(TestMetaPlayer):
'manipulates_state': False
}
+ expected_class_classifier = copy.copy(expected_classifier)
+ expected_class_classifier['stochastic'] = False
+ expected_class_classifier['makes_use_of'] = set([])
+
def test_team(self):
team = [axelrod.Cooperator]
player = self.player(team=team)
@@ -127,6 +141,10 @@ class TestMetaWinner(TestMetaPlayer):
'manipulates_state': False
}
+ expected_class_classifier = copy.copy(expected_classifier)
+ expected_class_classifier['stochastic'] = False
+ expected_class_classifier['makes_use_of'] = set([])
+
def test_strategy(self):
P1 = axelrod.MetaWinner(team = [axelrod.Cooperator, axelrod.Defector])
@@ -206,6 +224,10 @@ class TestMetaMajorityMemoryOne(TestMetaPlayer):
'manipulates_state': False
}
+ expected_class_classifier = copy.copy(expected_classifier)
+ expected_class_classifier['stochastic'] = False
+ expected_class_classifier['makes_use_of'] = set([])
+
def test_strategy(self):
self.first_play_test(C)
@@ -222,6 +244,10 @@ class TestMetaWinnerMemoryOne(TestMetaPlayer):
'manipulates_state': False
}
+ expected_class_classifier = copy.copy(expected_classifier)
+ expected_class_classifier['stochastic'] = False
+ expected_class_classifier['makes_use_of'] = set([])
+
def test_strategy(self):
self.first_play_test(C)
@@ -237,6 +263,11 @@ class TestMetaMajorityFiniteMemory(TestMetaPlayer):
'manipulates_state': False
}
+ expected_class_classifier = copy.copy(expected_classifier)
+ expected_class_classifier['stochastic'] = False
+ expected_class_classifier['makes_use_of'] = set([])
+
+
def test_strategy(self):
self.first_play_test(C)
@@ -252,6 +283,11 @@ class TestMetaWinnerFiniteMemory(TestMetaPlayer):
'manipulates_state': False
}
+ expected_class_classifier = copy.copy(expected_classifier)
+ expected_class_classifier['stochastic'] = False
+ expected_class_classifier['makes_use_of'] = set([])
+
+
def test_strategy(self):
self.first_play_test(C)
@@ -267,6 +303,11 @@ class TestMetaMajorityLongMemory(TestMetaPlayer):
'manipulates_state': False
}
+ expected_class_classifier = copy.copy(expected_classifier)
+ expected_class_classifier['stochastic'] = False
+ expected_class_classifier['makes_use_of'] = set([])
+
+
def test_strategy(self):
self.first_play_test(C)
@@ -282,6 +323,10 @@ class TestMetaWinnerLongMemory(TestMetaPlayer):
'manipulates_state': False
}
+ expected_class_classifier = copy.copy(expected_classifier)
+ expected_class_classifier['stochastic'] = False
+ expected_class_classifier['makes_use_of'] = set([])
+
def test_strategy(self):
self.first_play_test(C)
@@ -298,6 +343,9 @@ class TestMetaMixer(TestMetaPlayer):
'manipulates_state': False
}
+ expected_class_classifier = copy.copy(expected_classifier)
+ expected_class_classifier['makes_use_of'] = set()
+
def test_strategy(self):
team = [axelrod.TitForTat, axelrod.Cooperator, axelrod.Grudger]
diff --git a/axelrod/tests/unit/test_player.py b/axelrod/tests/unit/test_player.py
index 11a89e9a..601fd396 100644
--- a/axelrod/tests/unit/test_player.py
+++ b/axelrod/tests/unit/test_player.py
@@ -116,6 +116,7 @@ class TestOpponent(Player):
class TestPlayer(unittest.TestCase):
"A Test class from which other player test classes are inherited"
player = TestOpponent
+ expected_class_classifier = None
def test_initialisation(self):
"""Test that the player initiates correctly."""
@@ -126,7 +127,7 @@ class TestPlayer(unittest.TestCase):
{'length': -1, 'game': DefaultGame, 'noise': 0})
self.assertEqual(player.cooperations, 0)
self.assertEqual(player.defections, 0)
- self.classifier_test()
+ self.classifier_test(self.expected_class_classifier)
def test_repr(self):
"""Test that the representation is correct."""
@@ -237,12 +238,19 @@ class TestPlayer(unittest.TestCase):
random_seed=random_seed, attrs=attrs)
- def classifier_test(self):
+ def classifier_test(self, expected_class_classifier=None):
"""Test that the keys in the expected_classifier dictionary give the
expected values in the player classifier dictionary. Also checks that
two particular keys (memory_depth and stochastic) are in the
dictionary."""
player = self.player()
+
+ # Test that player has same classifier as it's class unless otherwise
+ # specified
+ if expected_class_classifier is None:
+ expected_class_classifier = player.classifier
+ self.assertEqual(expected_class_classifier, self.player.classifier)
+
self.assertTrue('memory_depth' in player.classifier,
msg="memory_depth not in classifier")
self.assertTrue('stochastic' in player.classifier,
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_issue_reference",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 2
},
"num_modified_files": 3
} | 0.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-xdist",
"pytest-mock",
"pytest-asyncio"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==25.3.0
-e git+https://github.com/Axelrod-Python/Axelrod.git@03dd1a9600965800125eeb8942b6b0a3dfacf29c#egg=Axelrod
coverage==7.8.0
cycler==0.12.1
exceptiongroup==1.2.2
execnet==2.1.1
hypothesis==6.130.6
iniconfig==2.1.0
kiwisolver==1.4.7
matplotlib==3.3.4
numpy==2.0.2
packaging==24.2
pillow==11.1.0
pluggy==1.5.0
pyparsing==2.1.1
pytest==8.3.5
pytest-asyncio==0.26.0
pytest-cov==6.0.0
pytest-mock==3.14.0
pytest-xdist==3.6.1
python-dateutil==2.9.0.post0
six==1.17.0
sortedcontainers==2.4.0
testfixtures==4.9.1
tomli==2.2.1
tqdm==3.4.0
typing_extensions==4.13.0
| name: Axelrod
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==25.3.0
- coverage==7.8.0
- cycler==0.12.1
- exceptiongroup==1.2.2
- execnet==2.1.1
- hypothesis==6.130.6
- iniconfig==2.1.0
- kiwisolver==1.4.7
- matplotlib==3.3.4
- numpy==2.0.2
- packaging==24.2
- pillow==11.1.0
- pluggy==1.5.0
- pyparsing==2.1.1
- pytest==8.3.5
- pytest-asyncio==0.26.0
- pytest-cov==6.0.0
- pytest-mock==3.14.0
- pytest-xdist==3.6.1
- python-dateutil==2.9.0.post0
- six==1.17.0
- sortedcontainers==2.4.0
- testfixtures==4.9.1
- tomli==2.2.1
- tqdm==3.4.0
- typing-extensions==4.13.0
prefix: /opt/conda/envs/Axelrod
| [
"axelrod/tests/unit/test_gobymajority.py::TestGoByMajority5::test_initialisation",
"axelrod/tests/unit/test_gobymajority.py::TestGoByMajority10::test_initialisation",
"axelrod/tests/unit/test_gobymajority.py::TestGoByMajority20::test_initialisation",
"axelrod/tests/unit/test_gobymajority.py::TestGoByMajority40::test_initialisation",
"axelrod/tests/unit/test_gobymajority.py::TestHardGoByMajority5::test_initialisation",
"axelrod/tests/unit/test_gobymajority.py::TestHardGoByMajority10::test_initialisation",
"axelrod/tests/unit/test_gobymajority.py::TestHardGoByMajority20::test_initialisation",
"axelrod/tests/unit/test_gobymajority.py::TestHardGoByMajority40::test_initialisation",
"axelrod/tests/unit/test_meta.py::TestMetaMixer::test_initialisation"
] | [] | [
"axelrod/tests/unit/test_gambler.py::TestPlayer::test_clone",
"axelrod/tests/unit/test_gambler.py::TestPlayer::test_initialisation",
"axelrod/tests/unit/test_gambler.py::TestPlayer::test_match_attributes",
"axelrod/tests/unit/test_gambler.py::TestPlayer::test_repr",
"axelrod/tests/unit/test_gambler.py::TestPlayer::test_reset",
"axelrod/tests/unit/test_gambler.py::TestGambler::test_clone",
"axelrod/tests/unit/test_gambler.py::TestGambler::test_defector_table",
"axelrod/tests/unit/test_gambler.py::TestGambler::test_init",
"axelrod/tests/unit/test_gambler.py::TestGambler::test_initialisation",
"axelrod/tests/unit/test_gambler.py::TestGambler::test_match_attributes",
"axelrod/tests/unit/test_gambler.py::TestGambler::test_repr",
"axelrod/tests/unit/test_gambler.py::TestGambler::test_reset",
"axelrod/tests/unit/test_gambler.py::TestGambler::test_strategy",
"axelrod/tests/unit/test_gambler.py::TestPSOGambler::test_clone",
"axelrod/tests/unit/test_gambler.py::TestPSOGambler::test_init",
"axelrod/tests/unit/test_gambler.py::TestPSOGambler::test_initialisation",
"axelrod/tests/unit/test_gambler.py::TestPSOGambler::test_match_attributes",
"axelrod/tests/unit/test_gambler.py::TestPSOGambler::test_repr",
"axelrod/tests/unit/test_gambler.py::TestPSOGambler::test_reset",
"axelrod/tests/unit/test_gambler.py::TestPSOGambler::test_strategy",
"axelrod/tests/unit/test_gambler.py::PSOGamblervsDefector::test_vs",
"axelrod/tests/unit/test_gambler.py::PSOGamblervsCooperator::test_vs",
"axelrod/tests/unit/test_gambler.py::PSOGamblervsTFT::test_vs",
"axelrod/tests/unit/test_gambler.py::PSOGamblervsAlternator::test_vs",
"axelrod/tests/unit/test_gobymajority.py::TestPlayer::test_clone",
"axelrod/tests/unit/test_gobymajority.py::TestPlayer::test_initialisation",
"axelrod/tests/unit/test_gobymajority.py::TestPlayer::test_match_attributes",
"axelrod/tests/unit/test_gobymajority.py::TestPlayer::test_repr",
"axelrod/tests/unit/test_gobymajority.py::TestPlayer::test_reset",
"axelrod/tests/unit/test_gobymajority.py::TestGoByMajority::test_clone",
"axelrod/tests/unit/test_gobymajority.py::TestGoByMajority::test_default_soft",
"axelrod/tests/unit/test_gobymajority.py::TestGoByMajority::test_initial_strategy",
"axelrod/tests/unit/test_gobymajority.py::TestGoByMajority::test_initialisation",
"axelrod/tests/unit/test_gobymajority.py::TestGoByMajority::test_match_attributes",
"axelrod/tests/unit/test_gobymajority.py::TestGoByMajority::test_name",
"axelrod/tests/unit/test_gobymajority.py::TestGoByMajority::test_repr",
"axelrod/tests/unit/test_gobymajority.py::TestGoByMajority::test_reset",
"axelrod/tests/unit/test_gobymajority.py::TestGoByMajority::test_soft",
"axelrod/tests/unit/test_gobymajority.py::TestGoByMajority::test_strategy",
"axelrod/tests/unit/test_gobymajority.py::TestHardGoByMajority::test_clone",
"axelrod/tests/unit/test_gobymajority.py::TestHardGoByMajority::test_default_soft",
"axelrod/tests/unit/test_gobymajority.py::TestHardGoByMajority::test_initial_strategy",
"axelrod/tests/unit/test_gobymajority.py::TestHardGoByMajority::test_initialisation",
"axelrod/tests/unit/test_gobymajority.py::TestHardGoByMajority::test_match_attributes",
"axelrod/tests/unit/test_gobymajority.py::TestHardGoByMajority::test_name",
"axelrod/tests/unit/test_gobymajority.py::TestHardGoByMajority::test_repr",
"axelrod/tests/unit/test_gobymajority.py::TestHardGoByMajority::test_reset",
"axelrod/tests/unit/test_gobymajority.py::TestHardGoByMajority::test_soft",
"axelrod/tests/unit/test_gobymajority.py::TestHardGoByMajority::test_strategy",
"axelrod/tests/unit/test_gobymajority.py::TestGoByMajority5::test_clone",
"axelrod/tests/unit/test_gobymajority.py::TestGoByMajority5::test_initial_strategy",
"axelrod/tests/unit/test_gobymajority.py::TestGoByMajority5::test_match_attributes",
"axelrod/tests/unit/test_gobymajority.py::TestGoByMajority5::test_repr",
"axelrod/tests/unit/test_gobymajority.py::TestGoByMajority5::test_reset",
"axelrod/tests/unit/test_gobymajority.py::TestGoByMajority5::test_strategy",
"axelrod/tests/unit/test_gobymajority.py::TestGoByMajority10::test_clone",
"axelrod/tests/unit/test_gobymajority.py::TestGoByMajority10::test_initial_strategy",
"axelrod/tests/unit/test_gobymajority.py::TestGoByMajority10::test_match_attributes",
"axelrod/tests/unit/test_gobymajority.py::TestGoByMajority10::test_repr",
"axelrod/tests/unit/test_gobymajority.py::TestGoByMajority10::test_reset",
"axelrod/tests/unit/test_gobymajority.py::TestGoByMajority10::test_strategy",
"axelrod/tests/unit/test_gobymajority.py::TestGoByMajority20::test_clone",
"axelrod/tests/unit/test_gobymajority.py::TestGoByMajority20::test_initial_strategy",
"axelrod/tests/unit/test_gobymajority.py::TestGoByMajority20::test_match_attributes",
"axelrod/tests/unit/test_gobymajority.py::TestGoByMajority20::test_repr",
"axelrod/tests/unit/test_gobymajority.py::TestGoByMajority20::test_reset",
"axelrod/tests/unit/test_gobymajority.py::TestGoByMajority20::test_strategy",
"axelrod/tests/unit/test_gobymajority.py::TestGoByMajority40::test_clone",
"axelrod/tests/unit/test_gobymajority.py::TestGoByMajority40::test_initial_strategy",
"axelrod/tests/unit/test_gobymajority.py::TestGoByMajority40::test_match_attributes",
"axelrod/tests/unit/test_gobymajority.py::TestGoByMajority40::test_repr",
"axelrod/tests/unit/test_gobymajority.py::TestGoByMajority40::test_reset",
"axelrod/tests/unit/test_gobymajority.py::TestGoByMajority40::test_strategy",
"axelrod/tests/unit/test_gobymajority.py::TestHardGoByMajority5::test_clone",
"axelrod/tests/unit/test_gobymajority.py::TestHardGoByMajority5::test_initial_strategy",
"axelrod/tests/unit/test_gobymajority.py::TestHardGoByMajority5::test_match_attributes",
"axelrod/tests/unit/test_gobymajority.py::TestHardGoByMajority5::test_repr",
"axelrod/tests/unit/test_gobymajority.py::TestHardGoByMajority5::test_reset",
"axelrod/tests/unit/test_gobymajority.py::TestHardGoByMajority5::test_strategy",
"axelrod/tests/unit/test_gobymajority.py::TestHardGoByMajority10::test_clone",
"axelrod/tests/unit/test_gobymajority.py::TestHardGoByMajority10::test_initial_strategy",
"axelrod/tests/unit/test_gobymajority.py::TestHardGoByMajority10::test_match_attributes",
"axelrod/tests/unit/test_gobymajority.py::TestHardGoByMajority10::test_repr",
"axelrod/tests/unit/test_gobymajority.py::TestHardGoByMajority10::test_reset",
"axelrod/tests/unit/test_gobymajority.py::TestHardGoByMajority10::test_strategy",
"axelrod/tests/unit/test_gobymajority.py::TestHardGoByMajority20::test_clone",
"axelrod/tests/unit/test_gobymajority.py::TestHardGoByMajority20::test_initial_strategy",
"axelrod/tests/unit/test_gobymajority.py::TestHardGoByMajority20::test_match_attributes",
"axelrod/tests/unit/test_gobymajority.py::TestHardGoByMajority20::test_repr",
"axelrod/tests/unit/test_gobymajority.py::TestHardGoByMajority20::test_reset",
"axelrod/tests/unit/test_gobymajority.py::TestHardGoByMajority20::test_strategy",
"axelrod/tests/unit/test_gobymajority.py::TestHardGoByMajority40::test_clone",
"axelrod/tests/unit/test_gobymajority.py::TestHardGoByMajority40::test_initial_strategy",
"axelrod/tests/unit/test_gobymajority.py::TestHardGoByMajority40::test_match_attributes",
"axelrod/tests/unit/test_gobymajority.py::TestHardGoByMajority40::test_repr",
"axelrod/tests/unit/test_gobymajority.py::TestHardGoByMajority40::test_reset",
"axelrod/tests/unit/test_gobymajority.py::TestHardGoByMajority40::test_strategy",
"axelrod/tests/unit/test_lookerup.py::TestPlayer::test_clone",
"axelrod/tests/unit/test_lookerup.py::TestPlayer::test_initialisation",
"axelrod/tests/unit/test_lookerup.py::TestPlayer::test_match_attributes",
"axelrod/tests/unit/test_lookerup.py::TestPlayer::test_repr",
"axelrod/tests/unit/test_lookerup.py::TestPlayer::test_reset",
"axelrod/tests/unit/test_lookerup.py::TestLookerUp::test_clone",
"axelrod/tests/unit/test_lookerup.py::TestLookerUp::test_defector_table",
"axelrod/tests/unit/test_lookerup.py::TestLookerUp::test_init",
"axelrod/tests/unit/test_lookerup.py::TestLookerUp::test_initialisation",
"axelrod/tests/unit/test_lookerup.py::TestLookerUp::test_match_attributes",
"axelrod/tests/unit/test_lookerup.py::TestLookerUp::test_repr",
"axelrod/tests/unit/test_lookerup.py::TestLookerUp::test_reset",
"axelrod/tests/unit/test_lookerup.py::TestLookerUp::test_starting_move",
"axelrod/tests/unit/test_lookerup.py::TestLookerUp::test_strategy",
"axelrod/tests/unit/test_lookerup.py::TestLookerUp::test_zero_tables",
"axelrod/tests/unit/test_lookerup.py::TestEvolvedLookerUp::test_clone",
"axelrod/tests/unit/test_lookerup.py::TestEvolvedLookerUp::test_init",
"axelrod/tests/unit/test_lookerup.py::TestEvolvedLookerUp::test_initialisation",
"axelrod/tests/unit/test_lookerup.py::TestEvolvedLookerUp::test_match_attributes",
"axelrod/tests/unit/test_lookerup.py::TestEvolvedLookerUp::test_repr",
"axelrod/tests/unit/test_lookerup.py::TestEvolvedLookerUp::test_reset",
"axelrod/tests/unit/test_lookerup.py::TestEvolvedLookerUp::test_strategy",
"axelrod/tests/unit/test_lookerup.py::EvolvedLookerUpvsDefector::test_vs",
"axelrod/tests/unit/test_lookerup.py::EvolvedLookerUpvsCooperator::test_vs",
"axelrod/tests/unit/test_lookerup.py::EvolvedLookerUpvsTFT::test_vs",
"axelrod/tests/unit/test_lookerup.py::EvolvedLookerUpvsAlternator::test_vs",
"axelrod/tests/unit/test_meta.py::TestPlayer::test_clone",
"axelrod/tests/unit/test_meta.py::TestPlayer::test_initialisation",
"axelrod/tests/unit/test_meta.py::TestPlayer::test_match_attributes",
"axelrod/tests/unit/test_meta.py::TestPlayer::test_repr",
"axelrod/tests/unit/test_meta.py::TestPlayer::test_reset",
"axelrod/tests/unit/test_meta.py::TestMetaPlayer::test_clone",
"axelrod/tests/unit/test_meta.py::TestMetaPlayer::test_initialisation",
"axelrod/tests/unit/test_meta.py::TestMetaPlayer::test_match_attributes",
"axelrod/tests/unit/test_meta.py::TestMetaPlayer::test_repr",
"axelrod/tests/unit/test_meta.py::TestMetaPlayer::test_reset",
"axelrod/tests/unit/test_meta.py::TestMetaMajority::test_clone",
"axelrod/tests/unit/test_meta.py::TestMetaMajority::test_initialisation",
"axelrod/tests/unit/test_meta.py::TestMetaMajority::test_match_attributes",
"axelrod/tests/unit/test_meta.py::TestMetaMajority::test_repr",
"axelrod/tests/unit/test_meta.py::TestMetaMajority::test_reset",
"axelrod/tests/unit/test_meta.py::TestMetaMajority::test_strategy",
"axelrod/tests/unit/test_meta.py::TestMetaMinority::test_clone",
"axelrod/tests/unit/test_meta.py::TestMetaMinority::test_initialisation",
"axelrod/tests/unit/test_meta.py::TestMetaMinority::test_match_attributes",
"axelrod/tests/unit/test_meta.py::TestMetaMinority::test_repr",
"axelrod/tests/unit/test_meta.py::TestMetaMinority::test_reset",
"axelrod/tests/unit/test_meta.py::TestMetaMinority::test_strategy",
"axelrod/tests/unit/test_meta.py::TestMetaMinority::test_team",
"axelrod/tests/unit/test_meta.py::TestMetaWinner::test_clone",
"axelrod/tests/unit/test_meta.py::TestMetaWinner::test_initialisation",
"axelrod/tests/unit/test_meta.py::TestMetaWinner::test_match_attributes",
"axelrod/tests/unit/test_meta.py::TestMetaWinner::test_repr",
"axelrod/tests/unit/test_meta.py::TestMetaWinner::test_reset",
"axelrod/tests/unit/test_meta.py::TestMetaWinner::test_strategy",
"axelrod/tests/unit/test_meta.py::TestMetaHunter::test_clone",
"axelrod/tests/unit/test_meta.py::TestMetaHunter::test_initialisation",
"axelrod/tests/unit/test_meta.py::TestMetaHunter::test_match_attributes",
"axelrod/tests/unit/test_meta.py::TestMetaHunter::test_repr",
"axelrod/tests/unit/test_meta.py::TestMetaHunter::test_reset",
"axelrod/tests/unit/test_meta.py::TestMetaHunter::test_strategy",
"axelrod/tests/unit/test_meta.py::TestMetaMajorityMemoryOne::test_clone",
"axelrod/tests/unit/test_meta.py::TestMetaMajorityMemoryOne::test_initialisation",
"axelrod/tests/unit/test_meta.py::TestMetaMajorityMemoryOne::test_match_attributes",
"axelrod/tests/unit/test_meta.py::TestMetaMajorityMemoryOne::test_repr",
"axelrod/tests/unit/test_meta.py::TestMetaMajorityMemoryOne::test_reset",
"axelrod/tests/unit/test_meta.py::TestMetaMajorityMemoryOne::test_strategy",
"axelrod/tests/unit/test_meta.py::TestMetaWinnerMemoryOne::test_clone",
"axelrod/tests/unit/test_meta.py::TestMetaWinnerMemoryOne::test_initialisation",
"axelrod/tests/unit/test_meta.py::TestMetaWinnerMemoryOne::test_match_attributes",
"axelrod/tests/unit/test_meta.py::TestMetaWinnerMemoryOne::test_repr",
"axelrod/tests/unit/test_meta.py::TestMetaWinnerMemoryOne::test_reset",
"axelrod/tests/unit/test_meta.py::TestMetaWinnerMemoryOne::test_strategy",
"axelrod/tests/unit/test_meta.py::TestMetaMajorityFiniteMemory::test_clone",
"axelrod/tests/unit/test_meta.py::TestMetaMajorityFiniteMemory::test_initialisation",
"axelrod/tests/unit/test_meta.py::TestMetaMajorityFiniteMemory::test_match_attributes",
"axelrod/tests/unit/test_meta.py::TestMetaMajorityFiniteMemory::test_repr",
"axelrod/tests/unit/test_meta.py::TestMetaMajorityFiniteMemory::test_reset",
"axelrod/tests/unit/test_meta.py::TestMetaMajorityFiniteMemory::test_strategy",
"axelrod/tests/unit/test_meta.py::TestMetaWinnerFiniteMemory::test_clone",
"axelrod/tests/unit/test_meta.py::TestMetaWinnerFiniteMemory::test_initialisation",
"axelrod/tests/unit/test_meta.py::TestMetaWinnerFiniteMemory::test_match_attributes",
"axelrod/tests/unit/test_meta.py::TestMetaWinnerFiniteMemory::test_repr",
"axelrod/tests/unit/test_meta.py::TestMetaWinnerFiniteMemory::test_reset",
"axelrod/tests/unit/test_meta.py::TestMetaWinnerFiniteMemory::test_strategy",
"axelrod/tests/unit/test_meta.py::TestMetaMajorityLongMemory::test_clone",
"axelrod/tests/unit/test_meta.py::TestMetaMajorityLongMemory::test_initialisation",
"axelrod/tests/unit/test_meta.py::TestMetaMajorityLongMemory::test_match_attributes",
"axelrod/tests/unit/test_meta.py::TestMetaMajorityLongMemory::test_repr",
"axelrod/tests/unit/test_meta.py::TestMetaMajorityLongMemory::test_reset",
"axelrod/tests/unit/test_meta.py::TestMetaMajorityLongMemory::test_strategy",
"axelrod/tests/unit/test_meta.py::TestMetaWinnerLongMemory::test_clone",
"axelrod/tests/unit/test_meta.py::TestMetaWinnerLongMemory::test_initialisation",
"axelrod/tests/unit/test_meta.py::TestMetaWinnerLongMemory::test_match_attributes",
"axelrod/tests/unit/test_meta.py::TestMetaWinnerLongMemory::test_repr",
"axelrod/tests/unit/test_meta.py::TestMetaWinnerLongMemory::test_reset",
"axelrod/tests/unit/test_meta.py::TestMetaWinnerLongMemory::test_strategy",
"axelrod/tests/unit/test_meta.py::TestMetaMixer::test_clone",
"axelrod/tests/unit/test_meta.py::TestMetaMixer::test_match_attributes",
"axelrod/tests/unit/test_meta.py::TestMetaMixer::test_raise_error_in_distribution",
"axelrod/tests/unit/test_meta.py::TestMetaMixer::test_repr",
"axelrod/tests/unit/test_meta.py::TestMetaMixer::test_reset",
"axelrod/tests/unit/test_meta.py::TestMetaMixer::test_strategy",
"axelrod/tests/unit/test_player.py::TestPlayerClass::test_add_noise",
"axelrod/tests/unit/test_player.py::TestPlayerClass::test_noisy_play",
"axelrod/tests/unit/test_player.py::TestPlayerClass::test_play",
"axelrod/tests/unit/test_player.py::TestPlayerClass::test_strategy",
"axelrod/tests/unit/test_player.py::TestPlayer::test_clone",
"axelrod/tests/unit/test_player.py::TestPlayer::test_initialisation",
"axelrod/tests/unit/test_player.py::TestPlayer::test_match_attributes",
"axelrod/tests/unit/test_player.py::TestPlayer::test_repr",
"axelrod/tests/unit/test_player.py::TestPlayer::test_reset"
] | [] | MIT License | 534 | 1,367 | [
"axelrod/strategies/cycler.py",
"axelrod/strategies/gobymajority.py",
"axelrod/strategies/meta.py"
] |
|
adamtheturtle__todo-28 | 7666d2181cdea24c963f2d99f918fd368fefafef | 2016-05-15 16:08:13 | 7666d2181cdea24c963f2d99f918fd368fefafef | diff --git a/authentication/authentication.py b/authentication/authentication.py
index 7b64ff6..f9b7cee 100644
--- a/authentication/authentication.py
+++ b/authentication/authentication.py
@@ -288,6 +288,26 @@ def create_todo():
return jsonify(create.json()), create.status_code
+
[email protected]('/todos/<id>', methods=['GET'])
+@consumes('application/json')
+def read_todo(id):
+ """
+ Get information about particular todo item.
+
+ :reqheader Content-Type: application/json
+ :resheader Content-Type: application/json
+ :resjson string id: The id of the todo item.
+ :resjson boolean completed: Whether the item is completed.
+ :resjson number completion_time: The completion UNIX timestamp, or
+ ``null`` if there is none.
+ :status 200: The requested item's information is returned.
+ :status 404: There is no item with the given ``id``.
+ """
+ url = urljoin(STORAGE_URL, 'todos/{id}').format(id=id)
+ response = requests.get(url, headers={'Content-Type': 'application/json'})
+ return jsonify(response.json()), response.status_code
+
if __name__ == '__main__': # pragma: no cover
# Specifying 0.0.0.0 as the host tells the operating system to listen on
# all public IPs. This makes the server visible externally.
diff --git a/storage/storage.py b/storage/storage.py
index ae2db38..29189c9 100644
--- a/storage/storage.py
+++ b/storage/storage.py
@@ -218,6 +218,35 @@ def todos_post():
), codes.CREATED
[email protected]('/todos/<id>', methods=['GET'])
+@consumes('application/json')
+def specific_todo_get(id):
+ """
+ Get information about particular todo item.
+
+ :reqheader Content-Type: application/json
+ :resheader Content-Type: application/json
+ :resjson string id: The id of the todo item.
+ :resjson boolean completed: Whether the item is completed.
+ :resjson number completion_time: The completion UNIX timestamp, or
+ ``null`` if there is none.
+ :status 200: The requested item's information is returned.
+ :status 404: There is no item with the given ``id``.
+ """
+ todo = Todo.query.filter_by(id=id).first()
+
+ if todo is None:
+ return jsonify(
+ title='The requested todo does not exist.',
+ detail='No todo exists with the id "{id}"'.format(id=id),
+ ), codes.NOT_FOUND
+
+ return jsonify(
+ content=todo.content,
+ completed=todo.completed,
+ completion_timestamp=todo.completion_timestamp,
+ ), codes.OK
+
if __name__ == '__main__': # pragma: no cover
# Specifying 0.0.0.0 as the host tells the operating system to listen on
# all public IPs. This makes the server visible externally.
| Add ability to read a TODO item | adamtheturtle/todo | diff --git a/authentication/tests/test_authentication.py b/authentication/tests/test_authentication.py
index c0da49d..25e8481 100644
--- a/authentication/tests/test_authentication.py
+++ b/authentication/tests/test_authentication.py
@@ -579,3 +579,123 @@ class CreateTodoTests(AuthenticationTests):
"""
response = self.app.post('/todos', content_type='text/html')
self.assertEqual(response.status_code, codes.UNSUPPORTED_MEDIA_TYPE)
+
+
+class ReadTodoTests(AuthenticationTests):
+ """
+ Tests for getting a todo item at ``GET /todos/{id}.``.
+ """
+
+ @responses.activate
+ def test_success(self):
+ """
+ A ``GET`` request for an existing todo an OK status code and the todo's
+ details.
+ """
+ create = self.app.post(
+ '/todos',
+ content_type='application/json',
+ data=json.dumps(NOT_COMPLETED_TODO_DATA),
+ )
+
+ create_data = json.loads(create.data.decode('utf8'))
+ item_id = create_data['id']
+
+ read = self.app.get(
+ '/todos/{id}'.format(id=item_id),
+ content_type='application/json',
+ data=json.dumps({}),
+ )
+
+ self.assertEqual(read.status_code, codes.OK)
+ expected = NOT_COMPLETED_TODO_DATA.copy()
+ expected['completion_timestamp'] = None
+ self.assertEqual(json.loads(read.data.decode('utf8')), expected)
+
+ @responses.activate
+ @freeze_time(datetime.datetime.fromtimestamp(5, tz=pytz.utc))
+ def test_completed(self):
+ """
+ A ``GET`` request for an existing todo an OK status code and the todo's
+ details, included the completion timestamp.
+ """
+ create = self.app.post(
+ '/todos',
+ content_type='application/json',
+ data=json.dumps(COMPLETED_TODO_DATA),
+ )
+
+ create_data = json.loads(create.data.decode('utf8'))
+ item_id = create_data['id']
+
+ read = self.app.get(
+ '/todos/{id}'.format(id=item_id),
+ content_type='application/json',
+ data=json.dumps({}),
+ )
+
+ self.assertEqual(read.status_code, codes.OK)
+ expected = COMPLETED_TODO_DATA.copy()
+ expected['completion_timestamp'] = 5
+ self.assertEqual(json.loads(read.data.decode('utf8')), expected)
+
+ @responses.activate
+ def test_multiple_todos(self):
+ """
+ A ``GET`` request gets the correct todo when there are multiple.
+ """
+ self.app.post(
+ '/todos',
+ content_type='application/json',
+ data=json.dumps(COMPLETED_TODO_DATA),
+ )
+
+ create = self.app.post(
+ '/todos',
+ content_type='application/json',
+ data=json.dumps(NOT_COMPLETED_TODO_DATA),
+ )
+
+ self.app.post(
+ '/todos',
+ content_type='application/json',
+ data=json.dumps(COMPLETED_TODO_DATA),
+ )
+
+ create_data = json.loads(create.data.decode('utf8'))
+ item_id = create_data['id']
+
+ read = self.app.get(
+ '/todos/{id}'.format(id=item_id),
+ content_type='application/json',
+ data=json.dumps({}),
+ )
+
+ self.assertEqual(read.status_code, codes.OK)
+ expected = NOT_COMPLETED_TODO_DATA.copy()
+ expected['completion_timestamp'] = None
+ self.assertEqual(json.loads(read.data.decode('utf8')), expected)
+
+ @responses.activate
+ def test_non_existant(self):
+ """
+ A ``GET`` request for a todo which does not exist returns a NOT_FOUND
+ status code and error details.
+ """
+ response = self.app.get('/todos/1', content_type='application/json')
+
+ self.assertEqual(response.headers['Content-Type'], 'application/json')
+ self.assertEqual(response.status_code, codes.NOT_FOUND)
+ expected = {
+ 'title': 'The requested todo does not exist.',
+ 'detail': 'No todo exists with the id "1"',
+ }
+ self.assertEqual(json.loads(response.data.decode('utf8')), expected)
+
+ def test_incorrect_content_type(self):
+ """
+ If a Content-Type header other than 'application/json' is given, an
+ UNSUPPORTED_MEDIA_TYPE status code is given.
+ """
+ response = self.app.get('/todos/1', content_type='text/html')
+ self.assertEqual(response.status_code, codes.UNSUPPORTED_MEDIA_TYPE)
diff --git a/storage/tests/test_storage.py b/storage/tests/test_storage.py
index 23feaba..e77676d 100644
--- a/storage/tests/test_storage.py
+++ b/storage/tests/test_storage.py
@@ -114,8 +114,8 @@ class GetUserTests(InMemoryStorageTests):
def test_success(self):
"""
- A ``GET`` request for an existing user an OK status code and the user's
- details.
+ A ``GET`` request for an existing user returns an OK status code and
+ the user's details.
"""
self.storage_app.post(
'/users',
@@ -304,3 +304,85 @@ class CreateTodoTests(InMemoryStorageTests):
"""
response = self.storage_app.post('/todos', content_type='text/html')
self.assertEqual(response.status_code, codes.UNSUPPORTED_MEDIA_TYPE)
+
+
+class GetTodoTests(InMemoryStorageTests):
+ """
+ Tests for getting a todo item at ``GET /todos/{id}.``.
+ """
+
+ def test_success(self):
+ """
+ A ``GET`` request for an existing todo an OK status code and the todo's
+ details.
+ """
+ create = self.storage_app.post(
+ '/todos',
+ content_type='application/json',
+ data=json.dumps(TODO_DATA),
+ )
+
+ create_data = json.loads(create.data.decode('utf8'))
+ item_id = create_data['id']
+
+ read = self.storage_app.get(
+ '/todos/{id}'.format(id=item_id),
+ content_type='application/json',
+ data=json.dumps({}),
+ )
+
+ self.assertEqual(read.status_code, codes.OK)
+ self.assertEqual(json.loads(read.data.decode('utf8')), TODO_DATA)
+
+ def test_timestamp_null(self):
+ """
+ If the timestamp is not given, the response includes a null timestamp.
+ """
+ data = TODO_DATA.copy()
+ del data['completion_timestamp']
+
+ create = self.storage_app.post(
+ '/todos',
+ content_type='application/json',
+ data=json.dumps(data),
+ )
+
+ create_data = json.loads(create.data.decode('utf8'))
+ item_id = create_data['id']
+
+ read = self.storage_app.get(
+ '/todos/{id}'.format(id=item_id),
+ content_type='application/json',
+ data=json.dumps({}),
+ )
+
+ self.assertEqual(read.status_code, codes.OK)
+ expected = TODO_DATA.copy()
+ expected['completion_timestamp'] = None
+ self.assertEqual(json.loads(read.data.decode('utf8')), expected)
+
+ def test_non_existant(self):
+ """
+ A ``GET`` request for a todo which does not exist returns a NOT_FOUND
+ status code and error details.
+ """
+ response = self.storage_app.get(
+ '/todos/1',
+ content_type='application/json',
+ )
+
+ self.assertEqual(response.headers['Content-Type'], 'application/json')
+ self.assertEqual(response.status_code, codes.NOT_FOUND)
+ expected = {
+ 'title': 'The requested todo does not exist.',
+ 'detail': 'No todo exists with the id "1"',
+ }
+ self.assertEqual(json.loads(response.data.decode('utf8')), expected)
+
+ def test_incorrect_content_type(self):
+ """
+ If a Content-Type header other than 'application/json' is given, an
+ UNSUPPORTED_MEDIA_TYPE status code is given.
+ """
+ response = self.storage_app.get('/todos/1', content_type='text/html')
+ self.assertEqual(response.status_code, codes.UNSUPPORTED_MEDIA_TYPE)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 2
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-xdist",
"pytest-mock",
"pytest-asyncio"
],
"pre_install": null,
"python": "3.5",
"reqs_path": [
"requirements.txt",
"dev-requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.13
attrs==22.2.0
Babel==2.11.0
bcrypt==4.0.1
certifi==2021.5.30
coverage==6.2
coveralls==3.3.1
dataclasses==0.8
doc8==0.11.2
docopt==0.6.2
docutils==0.17.1
execnet==1.9.0
flake8==5.0.4
Flask==0.10.1
Flask-Bcrypt==0.7.1
Flask-JsonSchema==0.1.1
Flask-Login==0.3.2
Flask-Negotiate==0.1.0
Flask-SQLAlchemy==2.1
freezegun==1.2.2
greenlet==2.0.2
imagesize==1.4.1
importlib-metadata==4.2.0
iniconfig==1.1.1
itsdangerous==2.0.1
Jinja2==3.0.3
jsonschema==3.2.0
MarkupSafe==2.0.1
mccabe==0.7.0
packaging==21.3
pbr==6.1.1
pluggy==1.0.0
py==1.11.0
pycodestyle==2.9.1
pyflakes==2.5.0
Pygments==2.14.0
pyparsing==3.1.4
pyrsistent==0.18.0
pytest==7.0.1
pytest-asyncio==0.16.0
pytest-cov==4.0.0
pytest-mock==3.6.1
pytest-xdist==3.0.2
python-dateutil==2.9.0.post0
pytz==2016.4
-e git+https://github.com/adamtheturtle/todo.git@7666d2181cdea24c963f2d99f918fd368fefafef#egg=Qlutter_TODOer
requests==2.10.0
responses==0.17.0
restructuredtext-lint==1.4.0
six==1.17.0
snowballstemmer==2.2.0
Sphinx==4.3.2
sphinx-rtd-theme==1.3.0
sphinxcontrib-applehelp==1.0.2
sphinxcontrib-devhelp==1.0.2
sphinxcontrib-htmlhelp==2.0.0
sphinxcontrib-httpdomain==1.8.1
sphinxcontrib-jquery==4.1
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==1.0.3
sphinxcontrib-serializinghtml==1.1.5
SQLAlchemy==1.4.54
stevedore==3.5.2
tomli==1.2.3
typing_extensions==4.1.1
urllib3==1.26.20
Werkzeug==2.0.3
zipp==3.6.0
| name: todo
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.13
- attrs==22.2.0
- babel==2.11.0
- bcrypt==4.0.1
- coverage==6.2
- coveralls==3.3.1
- dataclasses==0.8
- doc8==0.11.2
- docopt==0.6.2
- docutils==0.17.1
- execnet==1.9.0
- flake8==5.0.4
- flask==0.10.1
- flask-bcrypt==0.7.1
- flask-jsonschema==0.1.1
- flask-login==0.3.2
- flask-negotiate==0.1.0
- flask-sqlalchemy==2.1
- freezegun==1.2.2
- greenlet==2.0.2
- imagesize==1.4.1
- importlib-metadata==4.2.0
- iniconfig==1.1.1
- itsdangerous==2.0.1
- jinja2==3.0.3
- jsonschema==3.2.0
- markupsafe==2.0.1
- mccabe==0.7.0
- packaging==21.3
- pbr==6.1.1
- pluggy==1.0.0
- py==1.11.0
- pycodestyle==2.9.1
- pyflakes==2.5.0
- pygments==2.14.0
- pyparsing==3.1.4
- pyrsistent==0.18.0
- pytest==7.0.1
- pytest-asyncio==0.16.0
- pytest-cov==4.0.0
- pytest-mock==3.6.1
- pytest-xdist==3.0.2
- python-dateutil==2.9.0.post0
- pytz==2016.4
- requests==2.10.0
- responses==0.17.0
- restructuredtext-lint==1.4.0
- six==1.17.0
- snowballstemmer==2.2.0
- sphinx==4.3.2
- sphinx-rtd-theme==1.3.0
- sphinxcontrib-applehelp==1.0.2
- sphinxcontrib-devhelp==1.0.2
- sphinxcontrib-htmlhelp==2.0.0
- sphinxcontrib-httpdomain==1.8.1
- sphinxcontrib-jquery==4.1
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==1.0.3
- sphinxcontrib-serializinghtml==1.1.5
- sqlalchemy==1.4.54
- stevedore==3.5.2
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- werkzeug==2.0.3
- zipp==3.6.0
prefix: /opt/conda/envs/todo
| [
"authentication/tests/test_authentication.py::ReadTodoTests::test_incorrect_content_type",
"storage/tests/test_storage.py::GetTodoTests::test_incorrect_content_type"
] | [
"authentication/tests/test_authentication.py::SignupTests::test_existing_user",
"authentication/tests/test_authentication.py::SignupTests::test_missing_email",
"authentication/tests/test_authentication.py::SignupTests::test_missing_password",
"authentication/tests/test_authentication.py::SignupTests::test_passwords_hashed",
"authentication/tests/test_authentication.py::SignupTests::test_signup",
"authentication/tests/test_authentication.py::LoginTests::test_login",
"authentication/tests/test_authentication.py::LoginTests::test_missing_email",
"authentication/tests/test_authentication.py::LoginTests::test_missing_password",
"authentication/tests/test_authentication.py::LoginTests::test_non_existant_user",
"authentication/tests/test_authentication.py::LoginTests::test_remember_me_cookie_set",
"authentication/tests/test_authentication.py::LoginTests::test_wrong_password",
"authentication/tests/test_authentication.py::LogoutTests::test_logout",
"authentication/tests/test_authentication.py::LoadUserTests::test_user_does_not_exist",
"authentication/tests/test_authentication.py::LoadUserTests::test_user_exists",
"authentication/tests/test_authentication.py::LoadUserFromTokenTests::test_fake_token",
"authentication/tests/test_authentication.py::LoadUserFromTokenTests::test_load_user_from_token",
"authentication/tests/test_authentication.py::CreateTodoTests::test_current_completion_time",
"authentication/tests/test_authentication.py::CreateTodoTests::test_missing_completed_flag",
"authentication/tests/test_authentication.py::CreateTodoTests::test_missing_text",
"authentication/tests/test_authentication.py::CreateTodoTests::test_success_response",
"authentication/tests/test_authentication.py::ReadTodoTests::test_completed",
"authentication/tests/test_authentication.py::ReadTodoTests::test_multiple_todos",
"authentication/tests/test_authentication.py::ReadTodoTests::test_non_existant",
"authentication/tests/test_authentication.py::ReadTodoTests::test_success",
"storage/tests/test_storage.py::CreateUserTests::test_existing_user",
"storage/tests/test_storage.py::CreateUserTests::test_missing_email",
"storage/tests/test_storage.py::CreateUserTests::test_missing_password_hash",
"storage/tests/test_storage.py::CreateUserTests::test_success_response",
"storage/tests/test_storage.py::GetUserTests::test_non_existant_user",
"storage/tests/test_storage.py::GetUserTests::test_success",
"storage/tests/test_storage.py::GetUsersTests::test_with_users",
"storage/tests/test_storage.py::CreateTodoTests::test_missing_completed_flag",
"storage/tests/test_storage.py::CreateTodoTests::test_missing_completion_time",
"storage/tests/test_storage.py::CreateTodoTests::test_missing_text",
"storage/tests/test_storage.py::CreateTodoTests::test_success_response",
"storage/tests/test_storage.py::GetTodoTests::test_non_existant",
"storage/tests/test_storage.py::GetTodoTests::test_success",
"storage/tests/test_storage.py::GetTodoTests::test_timestamp_null"
] | [
"authentication/tests/test_authentication.py::SignupTests::test_incorrect_content_type",
"authentication/tests/test_authentication.py::LoginTests::test_incorrect_content_type",
"authentication/tests/test_authentication.py::LogoutTests::test_incorrect_content_type",
"authentication/tests/test_authentication.py::LogoutTests::test_logout_twice",
"authentication/tests/test_authentication.py::LogoutTests::test_not_logged_in",
"authentication/tests/test_authentication.py::UserTests::test_different_password_different_token",
"authentication/tests/test_authentication.py::UserTests::test_get_auth_token",
"authentication/tests/test_authentication.py::UserTests::test_get_id",
"authentication/tests/test_authentication.py::CreateTodoTests::test_incorrect_content_type",
"storage/tests/test_storage.py::CreateUserTests::test_incorrect_content_type",
"storage/tests/test_storage.py::GetUserTests::test_incorrect_content_type",
"storage/tests/test_storage.py::GetUsersTests::test_incorrect_content_type",
"storage/tests/test_storage.py::GetUsersTests::test_no_users",
"storage/tests/test_storage.py::CreateTodoTests::test_incorrect_content_type"
] | [] | null | 536 | 715 | [
"authentication/authentication.py",
"storage/storage.py"
] |
|
adamtheturtle__todo-47 | f81fa85e3c06d931963f76f2d0772ce0b9db67b9 | 2016-05-16 22:04:31 | f81fa85e3c06d931963f76f2d0772ce0b9db67b9 | diff --git a/authentication/authentication.py b/authentication/authentication.py
index 445bcc7..6937a58 100644
--- a/authentication/authentication.py
+++ b/authentication/authentication.py
@@ -250,9 +250,10 @@ def signup():
@app.route('/todos', methods=['POST'])
@consumes('application/json')
@jsonschema.validate('todos', 'create')
+@login_required
def create_todo():
"""
- Create a new todo item.
+ Create a new todo item. Requires log in.
:reqheader Content-Type: application/json
:resheader Content-Type: application/json
@@ -287,9 +288,10 @@ def create_todo():
@app.route('/todos/<id>', methods=['GET'])
@consumes('application/json')
+@login_required
def read_todo(id):
"""
- Get information about a particular todo item.
+ Get information about a particular todo item. Requires log in.
:reqheader Content-Type: application/json
:resheader Content-Type: application/json
@@ -307,9 +309,10 @@ def read_todo(id):
@app.route('/todos/<id>', methods=['DELETE'])
@consumes('application/json')
+@login_required
def delete_todo(id):
"""
- Delete a particular todo item.
+ Delete a particular todo item. Requires log in.
:reqheader Content-Type: application/json
:resheader Content-Type: application/json
@@ -325,9 +328,10 @@ def delete_todo(id):
@app.route('/todos', methods=['GET'])
@consumes('application/json')
+@login_required
def list_todos():
"""
- List todo items, with optional filters.
+ List todo items, with optional filters. Requires log in.
:reqheader Content-Type: application/json
:resheader Content-Type: application/json
@@ -349,10 +353,11 @@ def list_todos():
@app.route('/todos/<id>', methods=['PATCH'])
@consumes('application/json')
+@login_required
def update_todo(id):
"""
Update a todo item. If an item is changed from not-completed to completed,
- the ``completion_timestamp`` is set as now.
+ the ``completion_timestamp`` is set as now. Requires log in.
:reqheader Content-Type: application/json
| Protect the TODO CRUD APIs | adamtheturtle/todo | diff --git a/authentication/tests/test_authentication.py b/authentication/tests/test_authentication.py
index 6f7108a..29fb21c 100644
--- a/authentication/tests/test_authentication.py
+++ b/authentication/tests/test_authentication.py
@@ -30,6 +30,7 @@ from storage.tests.testtools import InMemoryStorageTests
USER_DATA = {'email': '[email protected]', 'password': 'secret'}
COMPLETED_TODO_DATA = {'content': 'Buy milk', 'completed': True}
NOT_COMPLETED_TODO_DATA = {'content': 'Get haircut', 'completed': False}
+TIMESTAMP = 1463437744.335567
class AuthenticationTests(InMemoryStorageTests):
@@ -93,6 +94,19 @@ class AuthenticationTests(InMemoryStorageTests):
{key: value for (key, value) in response.headers},
response.data)
+ def log_in_as_new_user(self):
+ """
+ Create a user and log in as that user.
+ """
+ self.app.post(
+ '/signup',
+ content_type='application/json',
+ data=json.dumps(USER_DATA))
+ self.app.post(
+ '/login',
+ content_type='application/json',
+ data=json.dumps(USER_DATA))
+
class SignupTests(AuthenticationTests):
"""
@@ -503,6 +517,7 @@ class CreateTodoTests(AuthenticationTests):
returns a JSON response with the given data and a ``null``
``completion_timestamp``.
"""
+ self.log_in_as_new_user()
response = self.app.post(
'/todos',
content_type='application/json',
@@ -516,12 +531,13 @@ class CreateTodoTests(AuthenticationTests):
self.assertEqual(response.json, expected)
@responses.activate
- @freeze_time(datetime.datetime.fromtimestamp(5.01, tz=pytz.utc))
+ @freeze_time(datetime.datetime.fromtimestamp(TIMESTAMP, tz=pytz.utc))
def test_current_completion_time(self):
"""
If the completed flag is set to ``true`` then the completed time is
the number of seconds since the epoch.
"""
+ self.log_in_as_new_user()
response = self.app.post(
'/todos',
content_type='application/json',
@@ -534,7 +550,7 @@ class CreateTodoTests(AuthenticationTests):
# some accuracy).
self.assertAlmostEqual(
response.json['completion_timestamp'],
- 5.01,
+ TIMESTAMP,
places=3,
)
@@ -580,14 +596,29 @@ class CreateTodoTests(AuthenticationTests):
}
self.assertEqual(response.json, expected)
+ @responses.activate
def test_incorrect_content_type(self):
"""
If a Content-Type header other than 'application/json' is given, an
UNSUPPORTED_MEDIA_TYPE status code is given.
"""
+ self.log_in_as_new_user()
response = self.app.post('/todos', content_type='text/html')
self.assertEqual(response.status_code, codes.UNSUPPORTED_MEDIA_TYPE)
+ @responses.activate
+ def test_not_logged_in(self):
+ """
+ When no user is logged in, an UNAUTHORIZED status code is returned.
+ """
+ response = self.app.post(
+ '/todos',
+ content_type='application/json',
+ data=json.dumps(NOT_COMPLETED_TODO_DATA),
+ )
+
+ self.assertEqual(response.status_code, codes.UNAUTHORIZED)
+
class ReadTodoTests(AuthenticationTests):
"""
@@ -600,6 +631,7 @@ class ReadTodoTests(AuthenticationTests):
A ``GET`` request for an existing todo an OK status code and the todo's
details.
"""
+ self.log_in_as_new_user()
create = self.app.post(
'/todos',
content_type='application/json',
@@ -618,12 +650,13 @@ class ReadTodoTests(AuthenticationTests):
self.assertEqual(read.json, expected)
@responses.activate
- @freeze_time(datetime.datetime.fromtimestamp(5, tz=pytz.utc))
+ @freeze_time(datetime.datetime.fromtimestamp(TIMESTAMP, tz=pytz.utc))
def test_completed(self):
"""
A ``GET`` request for an existing todo an OK status code and the todo's
details, included the completion timestamp.
"""
+ self.log_in_as_new_user()
create = self.app.post(
'/todos',
content_type='application/json',
@@ -637,8 +670,12 @@ class ReadTodoTests(AuthenticationTests):
self.assertEqual(read.status_code, codes.OK)
expected = COMPLETED_TODO_DATA.copy()
- expected['completion_timestamp'] = 5
expected['id'] = create.json['id']
+ self.assertAlmostEqual(
+ read.json.pop('completion_timestamp'),
+ TIMESTAMP,
+ places=3
+ )
self.assertEqual(read.json, expected)
@responses.activate
@@ -646,6 +683,7 @@ class ReadTodoTests(AuthenticationTests):
"""
A ``GET`` request gets the correct todo when there are multiple.
"""
+ self.log_in_as_new_user()
self.app.post(
'/todos',
content_type='application/json',
@@ -681,6 +719,7 @@ class ReadTodoTests(AuthenticationTests):
A ``GET`` request for a todo which does not exist returns a NOT_FOUND
status code and error details.
"""
+ self.log_in_as_new_user()
response = self.app.get('/todos/1', content_type='application/json')
self.assertEqual(response.headers['Content-Type'], 'application/json')
@@ -699,6 +738,27 @@ class ReadTodoTests(AuthenticationTests):
response = self.app.get('/todos/1', content_type='text/html')
self.assertEqual(response.status_code, codes.UNSUPPORTED_MEDIA_TYPE)
+ @responses.activate
+ def test_not_logged_in(self):
+ """
+ When no user is logged in, an UNAUTHORIZED status code is returned.
+ """
+ self.log_in_as_new_user()
+ create = self.app.post(
+ '/todos',
+ content_type='application/json',
+ data=json.dumps(NOT_COMPLETED_TODO_DATA),
+ )
+
+ self.app.post('/logout', content_type='application/json')
+
+ read = self.app.get(
+ '/todos/{id}'.format(id=create.json['id']),
+ content_type='application/json',
+ )
+
+ self.assertEqual(read.status_code, codes.UNAUTHORIZED)
+
class DeleteTodoTests(AuthenticationTests):
"""
@@ -710,6 +770,7 @@ class DeleteTodoTests(AuthenticationTests):
"""
It is possible to delete a todo item.
"""
+ self.log_in_as_new_user()
create = self.app.post(
'/todos',
content_type='application/json',
@@ -735,6 +796,7 @@ class DeleteTodoTests(AuthenticationTests):
"""
Deleting an item twice gives returns a 404 code and error message.
"""
+ self.log_in_as_new_user()
create = self.app.post(
'/todos',
content_type='application/json',
@@ -758,14 +820,38 @@ class DeleteTodoTests(AuthenticationTests):
}
self.assertEqual(delete.json, expected)
+ @responses.activate
def test_incorrect_content_type(self):
"""
If a Content-Type header other than 'application/json' is given, an
UNSUPPORTED_MEDIA_TYPE status code is given.
"""
+ self.log_in_as_new_user()
response = self.app.delete('/todos/1', content_type='text/html')
self.assertEqual(response.status_code, codes.UNSUPPORTED_MEDIA_TYPE)
+ @responses.activate
+ def test_not_logged_in(self):
+ """
+ When no user is logged in, an UNAUTHORIZED status code is returned.
+ """
+ self.log_in_as_new_user()
+
+ create = self.app.post(
+ '/todos',
+ content_type='application/json',
+ data=json.dumps(COMPLETED_TODO_DATA),
+ )
+
+ self.app.post('/logout', content_type='application/json')
+
+ delete = self.app.delete(
+ '/todos/{id}'.format(id=create.json['id']),
+ content_type='application/json',
+ )
+
+ self.assertEqual(delete.status_code, codes.UNAUTHORIZED)
+
class ListTodosTests(AuthenticationTests):
"""
@@ -777,6 +863,7 @@ class ListTodosTests(AuthenticationTests):
"""
When there are no todos, an empty array is returned.
"""
+ self.log_in_as_new_user()
list_todos = self.app.get(
'/todos',
content_type='application/json',
@@ -785,11 +872,24 @@ class ListTodosTests(AuthenticationTests):
self.assertEqual(list_todos.status_code, codes.OK)
self.assertEqual(list_todos.json['todos'], [])
+ @responses.activate
+ def test_not_logged_in(self):
+ """
+ When no user is logged in, an UNAUTHORIZED status code is returned.
+ """
+ list_todos = self.app.get(
+ '/todos',
+ content_type='application/json',
+ )
+
+ self.assertEqual(list_todos.status_code, codes.UNAUTHORIZED)
+
@responses.activate
def test_list(self):
"""
All todos are listed.
"""
+ self.log_in_as_new_user()
other_todo = NOT_COMPLETED_TODO_DATA.copy()
other_todo['content'] = 'Get a haircut'
@@ -815,11 +915,12 @@ class ListTodosTests(AuthenticationTests):
self.assertEqual(list_todos.json['todos'], expected)
@responses.activate
- @freeze_time(datetime.datetime.fromtimestamp(5, tz=pytz.utc))
+ @freeze_time(datetime.datetime.fromtimestamp(TIMESTAMP, tz=pytz.utc))
def test_filter_completed(self):
"""
It is possible to filter by only completed items.
"""
+ self.log_in_as_new_user()
self.app.post(
'/todos',
content_type='application/json',
@@ -842,15 +943,21 @@ class ListTodosTests(AuthenticationTests):
self.assertEqual(list_todos.status_code, codes.OK)
expected = COMPLETED_TODO_DATA.copy()
- expected['completion_timestamp'] = 5.0
expected['id'] = 2
- self.assertEqual(list_todos_data['todos'], [expected])
+ [todo] = list_todos_data['todos']
+ self.assertAlmostEqual(
+ todo.pop('completion_timestamp'),
+ TIMESTAMP,
+ places=3,
+ )
+ self.assertEqual(todo, expected)
@responses.activate
def test_filter_not_completed(self):
"""
It is possible to filter by only items which are not completed.
"""
+ self.log_in_as_new_user()
self.app.post(
'/todos',
content_type='application/json',
@@ -877,6 +984,7 @@ class ListTodosTests(AuthenticationTests):
expected['id'] = 1
self.assertEqual(list_todos_data['todos'], [expected])
+ @responses.activate
def test_incorrect_content_type(self):
"""
If a Content-Type header other than 'application/json' is given, an
@@ -896,6 +1004,7 @@ class UpdateTodoTests(AuthenticationTests):
"""
It is possible to change the content of a todo item.
"""
+ self.log_in_as_new_user()
create = self.app.post(
'/todos',
content_type='application/json',
@@ -924,11 +1033,34 @@ class UpdateTodoTests(AuthenticationTests):
self.assertEqual(read.json, expected)
@responses.activate
- @freeze_time(datetime.datetime.fromtimestamp(5.0, tz=pytz.utc))
+ def test_not_logged_in(self):
+ """
+ When no user is logged in, an UNAUTHORIZED status code is returned.
+ """
+ self.log_in_as_new_user()
+ create = self.app.post(
+ '/todos',
+ content_type='application/json',
+ data=json.dumps(NOT_COMPLETED_TODO_DATA),
+ )
+
+ self.app.post('/logout', content_type='application/json')
+
+ patch = self.app.patch(
+ '/todos/{id}'.format(id=create.json['id']),
+ content_type='application/json',
+ data=json.dumps({'content': 'Book vacation'}),
+ )
+
+ self.assertEqual(patch.status_code, codes.UNAUTHORIZED)
+
+ @responses.activate
+ @freeze_time(datetime.datetime.fromtimestamp(TIMESTAMP, tz=pytz.utc))
def test_flag_completed(self):
"""
It is possible to flag a todo item as completed.
"""
+ self.log_in_as_new_user()
create = self.app.post(
'/todos',
content_type='application/json',
@@ -943,10 +1075,14 @@ class UpdateTodoTests(AuthenticationTests):
expected = create.json
expected['completed'] = True
- # Timestamp set to now, the time it is first marked completed.
- expected['completion_timestamp'] = 5.0
+ expected['completion_timestamp'] = TIMESTAMP
self.assertEqual(patch.status_code, codes.OK)
+ self.assertAlmostEqual(
+ patch.json.pop('completion_timestamp'),
+ expected.pop('completion_timestamp'),
+ places=3,
+ )
self.assertEqual(patch.json, expected)
read = self.app.get(
@@ -954,6 +1090,11 @@ class UpdateTodoTests(AuthenticationTests):
content_type='application/json',
)
+ self.assertAlmostEqual(
+ read.json.pop('completion_timestamp'),
+ TIMESTAMP,
+ places=3,
+ )
self.assertEqual(read.json, expected)
@responses.activate
@@ -961,6 +1102,7 @@ class UpdateTodoTests(AuthenticationTests):
"""
It is possible to flag a todo item as not completed.
"""
+ self.log_in_as_new_user()
create = self.app.post(
'/todos',
content_type='application/json',
@@ -994,6 +1136,7 @@ class UpdateTodoTests(AuthenticationTests):
It is possible to change the content of a todo item, as well as marking
the item as completed.
"""
+ self.log_in_as_new_user()
create = self.app.post(
'/todos',
content_type='application/json',
@@ -1029,7 +1172,8 @@ class UpdateTodoTests(AuthenticationTests):
Flagging an already completed item as completed does not change the
completion timestamp.
"""
- create_time = datetime.datetime.fromtimestamp(5.0, tz=pytz.utc)
+ self.log_in_as_new_user()
+ create_time = datetime.datetime.fromtimestamp(TIMESTAMP, tz=pytz.utc)
with freeze_time(create_time):
create = self.app.post(
'/todos',
@@ -1037,7 +1181,8 @@ class UpdateTodoTests(AuthenticationTests):
data=json.dumps(COMPLETED_TODO_DATA),
)
- patch_time = datetime.datetime.fromtimestamp(6.0, tz=pytz.utc)
+ patch_time = datetime.datetime.fromtimestamp(
+ TIMESTAMP + 1, tz=pytz.utc)
with freeze_time(patch_time):
patch = self.app.patch(
'/todos/{id}'.format(id=create.json['id']),
@@ -1045,25 +1190,34 @@ class UpdateTodoTests(AuthenticationTests):
data=json.dumps({'completed': True}),
)
- expected = create.json
- # Timestamp set to the time it is first marked completed.
- expected['completion_timestamp'] = 5.0
-
+ self.assertAlmostEqual(
+ patch.json.pop('completion_timestamp'),
+ # Timestamp set to the time it is first marked completed.
+ create.json.pop('completion_timestamp'),
+ places=3,
+ )
self.assertEqual(patch.status_code, codes.OK)
- self.assertEqual(patch.json, expected)
+ self.assertEqual(patch.json, create.json)
read = self.app.get(
'/todos/{id}'.format(id=create.json['id']),
content_type='application/json',
)
- self.assertEqual(read.json, expected)
+ self.assertAlmostEqual(
+ read.json.pop('completion_timestamp'),
+ # Timestamp set to the time it is first marked completed.
+ TIMESTAMP,
+ places=3,
+ )
+ self.assertEqual(read.json, create.json)
@responses.activate
def test_remain_same(self):
"""
Not requesting any changes keeps the item the same.
"""
+ self.log_in_as_new_user()
create = self.app.post(
'/todos',
content_type='application/json',
@@ -1084,6 +1238,7 @@ class UpdateTodoTests(AuthenticationTests):
If the todo item to be updated does not exist, a ``NOT_FOUND`` error is
returned.
"""
+ self.log_in_as_new_user()
response = self.app.patch('/todos/1', content_type='application/json')
self.assertEqual(response.headers['Content-Type'], 'application/json')
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 0
},
"num_modified_files": 1
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-xdist",
"pytest-mock",
"pytest-asyncio"
],
"pre_install": null,
"python": "3.5",
"reqs_path": [
"requirements.txt",
"dev-requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.13
attrs==22.2.0
Babel==2.11.0
bcrypt==4.0.1
certifi==2021.5.30
coverage==6.2
coveralls==3.3.1
dataclasses==0.8
doc8==0.11.2
docopt==0.6.2
docutils==0.18.1
execnet==1.9.0
flake8==3.9.2
Flask==0.10.1
Flask-Bcrypt==0.7.1
Flask-JsonSchema==0.1.1
Flask-Login==0.3.2
Flask-Negotiate==0.1.0
Flask-SQLAlchemy==2.1
Flask-Testing==0.8.1
freezegun==1.2.2
greenlet==2.0.2
imagesize==1.4.1
importlib-metadata==4.8.3
iniconfig==1.1.1
itsdangerous==2.0.1
Jinja2==3.0.3
jsonschema==3.2.0
MarkupSafe==2.0.1
mccabe==0.6.1
packaging==21.3
pbr==6.1.1
pluggy==1.0.0
py==1.11.0
pycodestyle==2.7.0
pyflakes==2.3.1
Pygments==2.14.0
pyparsing==3.1.4
pyrsistent==0.18.0
pytest==7.0.1
pytest-asyncio==0.16.0
pytest-cov==4.0.0
pytest-mock==3.6.1
pytest-xdist==3.0.2
python-dateutil==2.9.0.post0
pytz==2016.4
-e git+https://github.com/adamtheturtle/todo.git@f81fa85e3c06d931963f76f2d0772ce0b9db67b9#egg=Qlutter_TODOer
requests==2.10.0
responses==0.17.0
restructuredtext-lint==1.4.0
six==1.17.0
snowballstemmer==2.2.0
Sphinx==5.3.0
sphinx-rtd-theme==2.0.0
sphinxcontrib-applehelp==1.0.2
sphinxcontrib-devhelp==1.0.2
sphinxcontrib-htmlhelp==2.0.0
sphinxcontrib-httpdomain==1.8.1
sphinxcontrib-jquery==4.1
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==1.0.3
sphinxcontrib-serializinghtml==1.1.5
SQLAlchemy==1.4.54
stevedore==3.5.2
tomli==1.2.3
typing_extensions==4.1.1
urllib3==1.26.20
Werkzeug==2.0.3
zipp==3.6.0
| name: todo
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.13
- attrs==22.2.0
- babel==2.11.0
- bcrypt==4.0.1
- coverage==6.2
- coveralls==3.3.1
- dataclasses==0.8
- doc8==0.11.2
- docopt==0.6.2
- docutils==0.18.1
- execnet==1.9.0
- flake8==3.9.2
- flask==0.10.1
- flask-bcrypt==0.7.1
- flask-jsonschema==0.1.1
- flask-login==0.3.2
- flask-negotiate==0.1.0
- flask-sqlalchemy==2.1
- flask-testing==0.8.1
- freezegun==1.2.2
- greenlet==2.0.2
- imagesize==1.4.1
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- itsdangerous==2.0.1
- jinja2==3.0.3
- jsonschema==3.2.0
- markupsafe==2.0.1
- mccabe==0.6.1
- packaging==21.3
- pbr==6.1.1
- pluggy==1.0.0
- py==1.11.0
- pycodestyle==2.7.0
- pyflakes==2.3.1
- pygments==2.14.0
- pyparsing==3.1.4
- pyrsistent==0.18.0
- pytest==7.0.1
- pytest-asyncio==0.16.0
- pytest-cov==4.0.0
- pytest-mock==3.6.1
- pytest-xdist==3.0.2
- python-dateutil==2.9.0.post0
- pytz==2016.4
- requests==2.10.0
- responses==0.17.0
- restructuredtext-lint==1.4.0
- six==1.17.0
- snowballstemmer==2.2.0
- sphinx==5.3.0
- sphinx-rtd-theme==2.0.0
- sphinxcontrib-applehelp==1.0.2
- sphinxcontrib-devhelp==1.0.2
- sphinxcontrib-htmlhelp==2.0.0
- sphinxcontrib-httpdomain==1.8.1
- sphinxcontrib-jquery==4.1
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==1.0.3
- sphinxcontrib-serializinghtml==1.1.5
- sqlalchemy==1.4.54
- stevedore==3.5.2
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- werkzeug==2.0.3
- zipp==3.6.0
prefix: /opt/conda/envs/todo
| [
"authentication/tests/test_authentication.py::ListTodosTests::test_not_logged_in"
] | [
"authentication/tests/test_authentication.py::SignupTests::test_existing_user",
"authentication/tests/test_authentication.py::SignupTests::test_missing_email",
"authentication/tests/test_authentication.py::SignupTests::test_missing_password",
"authentication/tests/test_authentication.py::SignupTests::test_passwords_hashed",
"authentication/tests/test_authentication.py::SignupTests::test_signup",
"authentication/tests/test_authentication.py::LoginTests::test_login",
"authentication/tests/test_authentication.py::LoginTests::test_missing_email",
"authentication/tests/test_authentication.py::LoginTests::test_missing_password",
"authentication/tests/test_authentication.py::LoginTests::test_non_existant_user",
"authentication/tests/test_authentication.py::LoginTests::test_remember_me_cookie_set",
"authentication/tests/test_authentication.py::LoginTests::test_wrong_password",
"authentication/tests/test_authentication.py::LogoutTests::test_logout",
"authentication/tests/test_authentication.py::LogoutTests::test_logout_twice",
"authentication/tests/test_authentication.py::LoadUserTests::test_user_does_not_exist",
"authentication/tests/test_authentication.py::LoadUserTests::test_user_exists",
"authentication/tests/test_authentication.py::LoadUserFromTokenTests::test_fake_token",
"authentication/tests/test_authentication.py::LoadUserFromTokenTests::test_load_user_from_token",
"authentication/tests/test_authentication.py::CreateTodoTests::test_current_completion_time",
"authentication/tests/test_authentication.py::CreateTodoTests::test_incorrect_content_type",
"authentication/tests/test_authentication.py::CreateTodoTests::test_missing_completed_flag",
"authentication/tests/test_authentication.py::CreateTodoTests::test_missing_text",
"authentication/tests/test_authentication.py::CreateTodoTests::test_not_logged_in",
"authentication/tests/test_authentication.py::CreateTodoTests::test_success_response",
"authentication/tests/test_authentication.py::ReadTodoTests::test_completed",
"authentication/tests/test_authentication.py::ReadTodoTests::test_multiple_todos",
"authentication/tests/test_authentication.py::ReadTodoTests::test_non_existant",
"authentication/tests/test_authentication.py::ReadTodoTests::test_not_logged_in",
"authentication/tests/test_authentication.py::ReadTodoTests::test_success",
"authentication/tests/test_authentication.py::DeleteTodoTests::test_delete_twice",
"authentication/tests/test_authentication.py::DeleteTodoTests::test_incorrect_content_type",
"authentication/tests/test_authentication.py::DeleteTodoTests::test_not_logged_in",
"authentication/tests/test_authentication.py::DeleteTodoTests::test_success",
"authentication/tests/test_authentication.py::ListTodosTests::test_filter_completed",
"authentication/tests/test_authentication.py::ListTodosTests::test_filter_not_completed",
"authentication/tests/test_authentication.py::ListTodosTests::test_list",
"authentication/tests/test_authentication.py::ListTodosTests::test_no_todos",
"authentication/tests/test_authentication.py::UpdateTodoTests::test_change_content",
"authentication/tests/test_authentication.py::UpdateTodoTests::test_change_content_and_flag",
"authentication/tests/test_authentication.py::UpdateTodoTests::test_flag_completed",
"authentication/tests/test_authentication.py::UpdateTodoTests::test_flag_completed_already_completed",
"authentication/tests/test_authentication.py::UpdateTodoTests::test_flag_not_completed",
"authentication/tests/test_authentication.py::UpdateTodoTests::test_non_existant",
"authentication/tests/test_authentication.py::UpdateTodoTests::test_not_logged_in",
"authentication/tests/test_authentication.py::UpdateTodoTests::test_remain_same"
] | [
"authentication/tests/test_authentication.py::SignupTests::test_incorrect_content_type",
"authentication/tests/test_authentication.py::LoginTests::test_incorrect_content_type",
"authentication/tests/test_authentication.py::LogoutTests::test_incorrect_content_type",
"authentication/tests/test_authentication.py::LogoutTests::test_not_logged_in",
"authentication/tests/test_authentication.py::UserTests::test_different_password_different_token",
"authentication/tests/test_authentication.py::UserTests::test_get_auth_token",
"authentication/tests/test_authentication.py::UserTests::test_get_id",
"authentication/tests/test_authentication.py::ReadTodoTests::test_incorrect_content_type",
"authentication/tests/test_authentication.py::ListTodosTests::test_incorrect_content_type",
"authentication/tests/test_authentication.py::UpdateTodoTests::test_incorrect_content_type"
] | [] | null | 540 | 538 | [
"authentication/authentication.py"
] |
|
peterbe__hashin-22 | ce536a0cffac911124b9af4d14ec0ab79e32816a | 2016-05-17 12:43:56 | ce536a0cffac911124b9af4d14ec0ab79e32816a | diff --git a/hashin.py b/hashin.py
index 510e9bf..d2d1fe6 100755
--- a/hashin.py
+++ b/hashin.py
@@ -120,7 +120,6 @@ def run(spec, file, algorithm, python_versions=None, verbose=False):
def amend_requirements_content(requirements, package, new_lines):
-
# if the package wasn't already there, add it to the bottom
if '%s==' % package not in requirements:
# easy peasy
@@ -132,7 +131,7 @@ def amend_requirements_content(requirements, package, new_lines):
lines = []
padding = ' ' * 4
for line in requirements.splitlines():
- if '{0}=='.format(package) in line:
+ if line.startswith('{0}=='.format(package)):
lines.append(line)
elif lines and line.startswith(padding):
lines.append(line)
| Wrong package replaced when target name is found in existing package
For example, an attempt to add hashes for the `selenium` package replaces the `pytest-selenium` package. Another example would be `pytest-django` and `django`.
Before:
```ini
pytest-selenium==1.2.1 \
--hash=sha256:e82f0a265b0e238ac42ac275d79313d0a7e0bef1a450633aeb3d6549cc14f517 \
--hash=sha256:bd2121022ff3255ce82faec0ef3602462ec6bce9ca627b53462986cfc9b391e9
selenium==2.52.0 \
--hash=sha256:820550a740ca1f746c399a0101986c0e6f94fbfe3c6f976e3f694db452cbe124
```
Command:
```bash
$ hashin selenium==2.53.1 requirements.txt
```
After:
```ini
selenium==2.53.1 \
--hash=sha256:b1af142650ed7025f906349ae0d7ed1f1a1e635e6ce7ac67e2b2f854f9f8fdc1 \
--hash=sha256:53929418a41295b526fbb68e43bc32fe93c3ef99c030b9e705caf1de486440de
``` | peterbe/hashin | diff --git a/tests/test_cli.py b/tests/test_cli.py
index 839c27a..40c9d50 100644
--- a/tests/test_cli.py
+++ b/tests/test_cli.py
@@ -188,6 +188,29 @@ autocompeter==1.2.3 \\
)
self.assertEqual(result, previous + new_lines)
+ def test_amend_requirements_content_new_similar_name(self):
+ """This test came from https://github.com/peterbe/hashin/issues/15"""
+ previous_1 = """
+pytest-selenium==1.2.1 \
+ --hash=sha256:e82f0a265b0e238ac42ac275d79313d0a7e0bef1a450633aeb3d6549cc14f517 \
+ --hash=sha256:bd2121022ff3255ce82faec0ef3602462ec6bce9ca627b53462986cfc9b391e9
+ """.strip() + '\n'
+ previous_2 = """
+selenium==2.52.0 \
+ --hash=sha256:820550a740ca1f746c399a0101986c0e6f94fbfe3c6f976e3f694db452cbe124
+ """.strip() + '\n'
+ new_lines = """
+selenium==2.53.1 \
+ --hash=sha256:b1af142650ed7025f906349ae0d7ed1f1a1e635e6ce7ac67e2b2f854f9f8fdc1 \
+ --hash=sha256:53929418a41295b526fbb68e43bc32fe93c3ef99c030b9e705caf1de486440de
+ """.strip()
+ result = hashin.amend_requirements_content(
+ previous_1 + previous_2, 'selenium', new_lines
+ )
+ self.assertTrue(previous_1 in result)
+ self.assertTrue(previous_2 not in result)
+ self.assertTrue(new_lines in result)
+
@cleanup_tmpdir('hashin*')
@mock.patch('hashin.urlopen')
def test_run(self, murlopen):
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 1
},
"num_modified_files": 1
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"mock",
"flake8",
"black",
"therapist",
"pytest-cov",
"pytest-xdist",
"pytest-mock",
"pytest-asyncio",
"pytest-bdd",
"pytest-benchmark",
"pytest-randomly",
"responses",
"hypothesis",
"freezegun",
"trustme",
"requests-mock",
"requests",
"tomlkit"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"test-requirements.txt",
"lint-requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==25.3.0
black==25.1.0
certifi==2025.1.31
cffi==1.17.1
charset-normalizer==3.4.1
click==8.1.8
colorama==0.4.6
coverage==7.8.0
cryptography==44.0.2
exceptiongroup==1.2.2
execnet==2.1.1
flake8==7.2.0
freezegun==1.5.1
gherkin-official==29.0.0
-e git+https://github.com/peterbe/hashin.git@ce536a0cffac911124b9af4d14ec0ab79e32816a#egg=hashin
hypothesis==6.130.5
idna==3.10
importlib_metadata==8.6.1
iniconfig==2.1.0
Mako==1.3.9
MarkupSafe==3.0.2
mccabe==0.7.0
mock==5.2.0
mypy-extensions==1.0.0
packaging==24.2
parse==1.20.2
parse_type==0.6.4
pathspec==0.12.1
platformdirs==4.3.7
pluggy==1.5.0
py-cpuinfo==9.0.0
pycodestyle==2.13.0
pycparser==2.22
pyflakes==3.3.1
pytest==8.3.5
pytest-asyncio==0.26.0
pytest-bdd==8.1.0
pytest-benchmark==5.1.0
pytest-cov==6.0.0
pytest-mock==3.14.0
pytest-randomly==3.16.0
pytest-xdist==3.6.1
python-dateutil==2.9.0.post0
PyYAML==6.0.2
requests==2.32.3
requests-mock==1.12.1
responses==0.25.7
six==1.17.0
sortedcontainers==2.4.0
therapist==2.2.0
tomli==2.2.1
tomlkit==0.13.2
trustme==1.2.1
typing_extensions==4.13.0
urllib3==2.3.0
zipp==3.21.0
| name: hashin
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==25.3.0
- black==25.1.0
- certifi==2025.1.31
- cffi==1.17.1
- charset-normalizer==3.4.1
- click==8.1.8
- colorama==0.4.6
- coverage==7.8.0
- cryptography==44.0.2
- exceptiongroup==1.2.2
- execnet==2.1.1
- flake8==7.2.0
- freezegun==1.5.1
- gherkin-official==29.0.0
- hypothesis==6.130.5
- idna==3.10
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- mako==1.3.9
- markupsafe==3.0.2
- mccabe==0.7.0
- mock==5.2.0
- mypy-extensions==1.0.0
- packaging==24.2
- parse==1.20.2
- parse-type==0.6.4
- pathspec==0.12.1
- platformdirs==4.3.7
- pluggy==1.5.0
- py-cpuinfo==9.0.0
- pycodestyle==2.13.0
- pycparser==2.22
- pyflakes==3.3.1
- pytest==8.3.5
- pytest-asyncio==0.26.0
- pytest-bdd==8.1.0
- pytest-benchmark==5.1.0
- pytest-cov==6.0.0
- pytest-mock==3.14.0
- pytest-randomly==3.16.0
- pytest-xdist==3.6.1
- python-dateutil==2.9.0.post0
- pyyaml==6.0.2
- requests==2.32.3
- requests-mock==1.12.1
- responses==0.25.7
- six==1.17.0
- sortedcontainers==2.4.0
- therapist==2.2.0
- tomli==2.2.1
- tomlkit==0.13.2
- trustme==1.2.1
- typing-extensions==4.13.0
- urllib3==2.3.0
- zipp==3.21.0
prefix: /opt/conda/envs/hashin
| [
"tests/test_cli.py::Tests::test_amend_requirements_content_new_similar_name"
] | [
"tests/test_cli.py::Tests::test_run"
] | [
"tests/test_cli.py::Tests::test_amend_requirements_content_replacement_2",
"tests/test_cli.py::Tests::test_get_latest_version_simple",
"tests/test_cli.py::Tests::test_amend_requirements_content_replacement_amonst_others_2",
"tests/test_cli.py::Tests::test_expand_python_version",
"tests/test_cli.py::Tests::test_filter_releases",
"tests/test_cli.py::Tests::test_get_hashes_error",
"tests/test_cli.py::Tests::test_amend_requirements_content_replacement_amonst_others",
"tests/test_cli.py::Tests::test_amend_requirements_content_new",
"tests/test_cli.py::Tests::test_release_url_metadata_python",
"tests/test_cli.py::Tests::test_amend_requirements_content_replacement",
"tests/test_cli.py::Tests::test_amend_requirements_content_replacement_single_to_multi"
] | [] | MIT License | 544 | 215 | [
"hashin.py"
] |
|
sigmavirus24__github3.py-613 | f6948ac9097f61dd44d8666ac1de42edbea666d5 | 2016-05-17 16:50:13 | 05ed0c6a02cffc6ddd0e82ce840c464e1c5fd8c4 | diff --git a/github3/pulls.py b/github3/pulls.py
index 10457b7d..497e01f0 100644
--- a/github3/pulls.py
+++ b/github3/pulls.py
@@ -8,7 +8,6 @@ This module contains all the classes relating to pull requests.
"""
from __future__ import unicode_literals
-from re import match
from json import dumps
from . import models
@@ -178,10 +177,8 @@ class PullRequest(models.GitHubCore):
#: GitHub.com url for review comments (not a template)
self.review_comments_url = pull.get('review_comments_url')
- m = match('https?://[\w\d\-\.\:]+/(\S+)/(\S+)/(?:issues|pull)?/\d+',
- self.issue_url)
#: Returns ('owner', 'repository') this issue was filed on.
- self.repository = m.groups()
+ self.repository = self.base.repo
#: The state of the pull
self.state = pull.get('state')
#: The title of the request
| PullRequest.respository return on GitHub Enterprise instance
[`github3.pulls.PullRequest.respository`](https://github.com/sigmavirus24/github3.py/blob/0.9.3/github3/pulls.py#L188-L189) returns `(u'api/v3/repos/user', u'repo')` on GitHub Enterprise instances. I believe the expected return shouls be ``(u'user', u'repo')` .
<bountysource-plugin>
---
Want to back this issue? **[Post a bounty on it!](https://www.bountysource.com/issues/7415115-pullrequest-respository-return-on-github-enterprise-instance?utm_campaign=plugin&utm_content=tracker%2F183477&utm_medium=issues&utm_source=github)** We accept bounties via [Bountysource](https://www.bountysource.com/?utm_campaign=plugin&utm_content=tracker%2F183477&utm_medium=issues&utm_source=github).
</bountysource-plugin> | sigmavirus24/github3.py | diff --git a/tests/cassettes/PullRequest_single.json b/tests/cassettes/PullRequest_single.json
new file mode 100644
index 00000000..47a27b6a
--- /dev/null
+++ b/tests/cassettes/PullRequest_single.json
@@ -0,0 +1,1 @@
+{"http_interactions": [{"request": {"body": {"string": "", "encoding": "utf-8"}, "headers": {"Accept-Charset": "utf-8", "Content-Type": "application/json", "Accept-Encoding": "gzip, deflate", "Accept": "application/vnd.github.v3.full+json", "User-Agent": "github3.py/1.0.0"}, "method": "GET", "uri": "https://api.github.com/repos/sigmavirus24/github3.py"}, "response": {"body": {"string": "", "base64_string": "H4sIAAAAAAAAA62YTY+jOBCG/0rEddNxgKTzcZmd0+ze5jB72UtkwASrASPbJEqj/u/7GgOBrDZJt1dqRQntevy6XGWq3Hg88fbhxl9ufH/ulbRg3t47cp3VUbioLt7cS+s8P3T/UPxY0BOXtQpWZDJKnEsmvX3j5eLISzDGQ0Ex0wSr5TZczj16oprKQy1zjMu0rtSeEPtQLSy1VkzGotSs1ItYFKQm1vgbUEfZAQzTi/3VNly/JttdugvW7HUX+JttxJif7EIapxsY3ExU8W4SS8ZMityozXSR3+izulqTm8GpyHNxBuV2RY8mIoOlcXNL4eXxixRYNkTojMGxWNKHcRRX+vOiWqsGu6v0gSeGo7BbkiWfFtbZQZYJjo+GSFaJFlhHKpa80lyUnxc4sQZNyCMt+Tv9Gg3WChAj7fNSWitYsxMC9fPm1qwhleQnGl+MaySLGT/B2V9E3tiDqC+Vyem/EBTG9VyzA00Kk6MpzRX7mHvt9BqD2gdzpOSz0T89AxI27Com/HnRmShnOY8klZdZKuSMI6FlSmPE6uyMM2aGcJ394PqPOpp9//nnKYRAjHsblNzN3Nb5k2ScyjGkB3tyF4H0BACS3tjFiWPsG4LPLp9ipDqNhKRaPDo07gucgBoy/mliSTNaOAlvAQBlQrh5sgUAxJWq2VOhfX/hLUeRPn/KuojskfdM1txHWwK0UoVzvmTMyYMDpCH9qYx0KOPMDdszGmK/tbtNj05SjT0wUS4iJw5elKSFNERl1L6H9MFVnaEaxgQqWeos1TAGqJaO+93KNJABiZegxtY76ewZpOk8mtPyWNOjG3WAYNfNq/pI3x8WMfdz50oB0pRvkke1+yF35RiltnZAvru59Iq5QtuC5H6Z88ABo8KmdUFR8Ed1wX1ih5iE/f+ANXF6iza/H5cxj+UaRkOuZ7I99Du6i3e7U7/XSZrrHF2v4BQSPYM0v1VUZ+bkwlQVlcxFdIcgTURRbC0WiyZjtC2rCyYdM9gSgKIyzlA1uuhsegaqnoLqtlpPjcwE1XsuaOLk2wECoN1GF62WMI6xCk2qk8AWMCYWPGdKi9LtjL1SxuxSaJ7y+JmO5X66TUDNN8XLmM1pns8RtZrHHHGMWtvsIgpO5uYhS8AycEdgO5WcIaSdvC6ZZTTEdpqxZGhEkgPVaCCCpR+8LMMXP/zl7/br7X4d/o2V1FUyGbN6WW5egnbMao0/M6aqVTbC2CHbX8tgv16BZIbgBOxCEN9w/4BP3Hn8q78ftRTm1gCGSmVXw9+vZvv/uBzpzOIcsXQT9M/Pebp9LT02hdRMFKxCmdBdswyrDKvLAp5O0H4lIlYL9MDErIy/Y+g2CMJJQRCLusR++Ds8PlON2hWv3vHDvpAYmj4zNVUHm6beXsvadJV4cj0GRg/P/I0PHZ9t2jr66wanJJdSdJdFJZIU/X7Fyo49yMBA263tjc1oBHTjQS+7W0XCUlrn+mCLZ8hOUPXnooLukukz2r4ebGjjiqNf9vbjH8Cshcw6EwAA", "encoding": "utf-8"}, "headers": {"vary": "Accept, Accept-Encoding", "x-served-by": "03d91026ad8428f4d9966d7434f9d82e", "x-xss-protection": "1; mode=block", "x-content-type-options": "nosniff", "etag": "\"103c261a609253cc5113039f6ab21f0e\"", "access-control-allow-credentials": "true", "status": "200 OK", "x-ratelimit-remaining": "54", "x-github-media-type": "github.v3; param=full; format=json", "access-control-expose-headers": "ETag, Link, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, X-RateLimit-Reset, X-OAuth-Scopes, X-Accepted-OAuth-Scopes, X-Poll-Interval", "transfer-encoding": "chunked", "x-github-request-id": "48A0C4D3:7DF9:2EE6AA1:53D5BBD8", "cache-control": "public, max-age=60, s-maxage=60", "last-modified": "Wed, 23 Jul 2014 19:45:45 GMT", "date": "Mon, 28 Jul 2014 02:56:25 GMT", "access-control-allow-origin": "*", "content-security-policy": "default-src 'none'", "content-encoding": "gzip", "strict-transport-security": "max-age=31536000; includeSubdomains", "server": "GitHub.com", "x-ratelimit-limit": "60", "x-frame-options": "deny", "content-type": "application/json; charset=utf-8", "x-ratelimit-reset": "1406517556"}, "status": {"message": "OK", "code": 200}, "url": "https://api.github.com/repos/sigmavirus24/github3.py"}, "recorded_at": "2014-07-28T02:56:25"}, {"request": {"body": {"string": "", "encoding": "utf-8"}, "headers": {"Accept-Charset": "utf-8", "Content-Type": "application/json", "Accept-Encoding": "gzip, deflate", "Accept": "application/vnd.github.v3.full+json", "User-Agent": "github3.py/1.0.0"}, "method": "GET", "uri": "https://api.github.com/repos/sigmavirus24/github3.py/pulls/235"}, "response": {"body": {"string": "", "base64_string": "H4sIAAAAAAAAA+1aW2/ruBH+K4KfWtSxLMkX2Tg42/PUC4p20WZfFgs4lETZRGRJlShnc4T8935DSral+NixafQpgBPYEufjcIYz5FzqQVUkg+VgI2VeLm2b5WK0FnJTBaMw29oFz7PSLsV6y3aiqEp3Yuu33ih/tfMqSUrb9aaD4UBEg6Uznbjj6WQxBNw2WXWRj1DP4TVwkYjj2wFGRA6mcibDjQGMoqfFlWXFezhXSUoBtKJKq23Ai8ESghsOSskkhwLCJCt5hLmkkAk9+BZF1r95wlnJR6wsuSwtJmUhggrjh4OqJIh6kGRrkWJ4sBYJUGWjCm/mePPJcMB2TLKiz7p6WDaKJqQwSyVPpdJ5ZWvinwC1LhoAUu9gNnZmk2juLgIWMz/0He7NvTh0o/GYx8EiJLbO7SaaqbSPOD2/TY4GxlmSZC+g7q+ku127E9h7KjCmv4t0fQMCqGo7kxsOQWIJbyQYUcrrmFEUNcynlCsREQZUXxQ8uoqhhgbsvKTgpFYWqsCqoAwLkUuRpdcx1qEEUlasWSq+s+uRQFkCQDmNq1alKEDJd9iE15FqktrOC7Fj4SuJouAhFzsI9ga4Hi3Q5GtO5vgL2RvELCRfsWhLNhezpORvw0GQRa8Y8bjh1t//869/WhEnTQTYNRazCm3ClkjDpMIbCyaWJ1xyPImzYqvEbOEjyMCVnY8si4z/qWv9Twfzt17goYGdYC9ZWWw9fSO6JyJ83IjSwifccJbzwsIUFjauxfI8EaGeTG5Yam3Zs2aw5Dkr4IPA6X/ho6S1E+wwN5ZbrDRbf/jj0+i34reU/v7B2Y7/aBRWZuUJC/mQvoVwYFaZbXmWcmvDsEgLRqQYE3JowZ1ZW2idpPBuxSAnsURZWNEYxf4IWggLDo6jFZOQuzt2Jg/j6cPYfRzPls4Yn18xpsqj/pj5gzt7dLzldLL01BjtdHsw3qMzWU4cuGcasuXF+t1MJ4asoNetkKtyw8CTz2Ivjlw/DrkXBn7ku5PAC+fuxI3mXjiZu/NoFgaejwkgXLFOOfZYiuMUE4oEWoCw2gca+IJVfPCgthswZSU7wV8U25eN7hp4QnuHf94dfAS+ZdSu9flJln4P5g9HsxJOwz0dy9imRlJvMezpbBw6C2c6ZaHvcah97nhRMJ2F+MwZZ8xx3XDq0m6A2eKkxbHOAk73svYMXDZ+5EHb4gNdBZSQYww6/U7vxCvm/rxSnLgBf14pPq8U//8rBTlE7f/pfoET9NgnHIdPywiXnCTL977g8Fvb/5yHXuQtcE0f+67rBg7nkb+IZ4478ya+EzBvOlv4E4o8+vZ/PA9eUwjgTsa+N74xrNDE78OK0Jn44ILYWrhTPlu4ztwPOHeihcfCeE6sXQ4retyeDy16g68KLzrR680hxgkUkzCjF1IbhBodpPuFG13Y42AF6r065OigXRt2dIivDz065PcJP3ocdcIXiOdyCKL9Ra1s1Js747njDAcp21LkckjYACrGHXPVvOjtGUr6UFqH9EHB5XF24XjopyugJIlKZXQk+OkKPph5+HQFJ3OtJ7bULdkIlQihlKJKT9yYioWN6yyGuvzBifz8KjeI0BMRFKx41dE7UoZFzEJKI6iEBOUZ/iLkX6vA+vbz33YeuZuseN5zcvYQPxsCNkhG0RixQnHpM381wiH62sb/JosXIjXJggwplOxSevLsGhF9HgHVnZ8U60rOtkaMKwAAbbLs2QhIAdApoLLiH0mynV94E4G3R+khvDeH1gjgtc2yGC18D1Ir/09agTmkyLUZwbYYta2/KW2ztREm0RN7SRYY4eDyYCuQ2kY8oTPfcmXKHaESRgcUuQxjVgljDyoLbqYYxSaB7CHvmhaqG4kmLF1XbG3G6x4EWqdb+5p9v1guOW+WBxRAUoFI1Z6MndwBhzjVgQAKV0aqP4I5gKqjyyTveFwYUSKgBK8Rnw1EZ9vfAZb2aR/6HqnRFqO2Dz5ZO/3mjYl0G6/fztFN66pqpKGoNYZd/wlV3E2TLUapw6iOC24Jwq4pJ/M2Go1qStgSuKoTGHGsEQDFinCD+pWJcOsWQ5eYVJ0wJjYjhHZJxiIjTvcgANRqNOFVIxzvMdVXYAKpAI4R90UVI9gDyjF2mkkRN7U1I/gOUP1TiZIhHzLUhLDlpAgF9jHu2qRFVR0wmksjYBnIERBiU0IwwmwxalvXt/tVOvdh7D04qJ0tllN/OVXltdNVOjVmMsWHxuRVuemW4FDI8x/HLhXypj4NgQds9gu+oY0F/9sWlh9kOaj5AYRl2faH4PefD2TL82QoHKZ9A/34nLv+sXSZFKxuUEPNcU1ANEbdOvtVevnrCPXQCOEXCqXlCOkwm1YmvmMocr5e50IQZlWKqqmzwOMXam+ho/f4YXuR2Ad9NDUrV9pMB0tZVBRV4snBDRw9fBHPYh/xqZirRZ/NqbhZFFnTjqLLnVnO0wZ7zwYG6mhtSTRHI8A3HrRsN6uIeMyqRK705RlstynwN+TOV4lIEaygolbyBLWyerDRNTPDhidAU4b5BOBlXaq+KdXnBBQl1hMwt7YXAbI9Uu+HeqiMAr5XODadZd9EdmaS+8zx/qrRCAu3uvvMAJ22FzHSbRstmIJfX0imrU9tKSu9SQdf8q/3bk/5EmYR/9rtUfmC1ePhjztVNJFqV2nG3rFppctRp3OlmWz0xc6/ooMlhTx0C8tlmpu7Wc4J6GRjCzEHZ6v0Jvnv1Ntyb6V11fVjPSkN3VE17bwdpYxIE1oNJ9/fLPneKk8KG4LWrT3tiaZ+sYB6L5tGnPbBiuyPjt0qfU5xf96TrgL0fR1aMD+LJL1D60RG+7NI8lkkoTZYs3ZN43rp4YqEOum7Cw2uxjRAXQtcxONRJFQzLC6cPhVG0L6pf6ITIkQzJTUIxtS2h+Fv/wNzD6LGUi8AAA==", "encoding": "utf-8"}, "headers": {"vary": "Accept, Accept-Encoding", "x-served-by": "3061975e1f37121b3751604ad153c687", "x-xss-protection": "1; mode=block", "x-content-type-options": "nosniff", "etag": "\"42d9e03172ef97d5dbb406c4702c5c0e\"", "access-control-allow-credentials": "true", "status": "200 OK", "x-ratelimit-remaining": "53", "x-github-media-type": "github.v3; param=full; format=json", "access-control-expose-headers": "ETag, Link, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, X-RateLimit-Reset, X-OAuth-Scopes, X-Accepted-OAuth-Scopes, X-Poll-Interval", "transfer-encoding": "chunked", "x-github-request-id": "48A0C4D3:7DF9:2EE6AAD:53D5BBD9", "cache-control": "public, max-age=60, s-maxage=60", "last-modified": "Mon, 28 Jul 2014 01:14:14 GMT", "date": "Mon, 28 Jul 2014 02:56:25 GMT", "access-control-allow-origin": "*", "content-security-policy": "default-src 'none'", "content-encoding": "gzip", "strict-transport-security": "max-age=31536000; includeSubdomains", "server": "GitHub.com", "x-ratelimit-limit": "60", "x-frame-options": "deny", "content-type": "application/json; charset=utf-8", "x-ratelimit-reset": "1406517556"}, "status": {"message": "OK", "code": 200}, "url": "https://api.github.com/repos/sigmavirus24/github3.py/pulls/235"}, "recorded_at": "2014-07-28T02:56:25"}], "recorded_with": "betamax/0.3.2"}
diff --git a/tests/integration/test_pulls.py b/tests/integration/test_pulls.py
index b32ef019..70c00b16 100644
--- a/tests/integration/test_pulls.py
+++ b/tests/integration/test_pulls.py
@@ -130,6 +130,14 @@ class TestPullRequest(IntegrationHelper):
p = self.get_pull_request(num=241)
assert p.update(p.title) is True
+ def test_repository(self):
+ """Show that the pull request has the owner repository."""
+ self.basic_login()
+ cassette_name = self.cassette_name('single')
+ with self.recorder.use_cassette(cassette_name):
+ p = self.get_pull_request()
+ assert p.repository == ('sigmavirus24', 'github3.py')
+
class TestReviewComment(IntegrationHelper):
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 1
} | 1.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest>=2.3.5",
"betamax>=0.5.0",
"betamax_matchers>=0.2.0",
"mock==1.0.1"
],
"pre_install": null,
"python": "3.4",
"reqs_path": [
"dev-requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
betamax==0.8.1
betamax-matchers==0.4.0
certifi==2021.5.30
charset-normalizer==2.0.12
distlib==0.3.9
filelock==3.4.1
-e git+https://github.com/sigmavirus24/github3.py.git@f6948ac9097f61dd44d8666ac1de42edbea666d5#egg=github3.py
idna==3.10
importlib-metadata==4.8.3
importlib-resources==5.4.0
iniconfig==1.1.1
mock==1.0.1
packaging==21.3
platformdirs==2.4.0
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
requests==2.27.1
requests-toolbelt==1.0.0
six==1.17.0
swebench-matterhorn @ file:///swebench_matterhorn
toml==0.10.2
tomli==1.2.3
tox==3.28.0
typing_extensions==4.1.1
uritemplate==4.1.1
uritemplate.py==3.0.2
urllib3==1.26.20
virtualenv==20.17.1
zipp==3.6.0
| name: github3.py
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- betamax==0.8.1
- betamax-matchers==0.4.0
- charset-normalizer==2.0.12
- distlib==0.3.9
- filelock==3.4.1
- idna==3.10
- importlib-metadata==4.8.3
- importlib-resources==5.4.0
- iniconfig==1.1.1
- mock==1.0.1
- packaging==21.3
- platformdirs==2.4.0
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- requests==2.27.1
- requests-toolbelt==1.0.0
- six==1.17.0
- swebench-matterhorn==0.0.0
- toml==0.10.2
- tomli==1.2.3
- tox==3.28.0
- typing-extensions==4.1.1
- uritemplate==4.1.1
- uritemplate-py==3.0.2
- urllib3==1.26.20
- virtualenv==20.17.1
- wheel==0.21.0
- zipp==3.6.0
prefix: /opt/conda/envs/github3.py
| [
"tests/integration/test_pulls.py::TestPullRequest::test_repository"
] | [] | [
"tests/integration/test_pulls.py::TestPullRequest::test_close",
"tests/integration/test_pulls.py::TestPullRequest::test_commits",
"tests/integration/test_pulls.py::TestPullRequest::test_create_comment",
"tests/integration/test_pulls.py::TestPullRequest::test_create_review_comment",
"tests/integration/test_pulls.py::TestPullRequest::test_diff",
"tests/integration/test_pulls.py::TestPullRequest::test_files",
"tests/integration/test_pulls.py::TestPullRequest::test_is_merged",
"tests/integration/test_pulls.py::TestPullRequest::test_issue",
"tests/integration/test_pulls.py::TestPullRequest::test_issue_comments",
"tests/integration/test_pulls.py::TestPullRequest::test_patch",
"tests/integration/test_pulls.py::TestPullRequest::test_reopen",
"tests/integration/test_pulls.py::TestPullRequest::test_review_comments",
"tests/integration/test_pulls.py::TestPullRequest::test_update",
"tests/integration/test_pulls.py::TestReviewComment::test_reply",
"tests/integration/test_pulls.py::TestPullFile::test_contents"
] | [] | BSD 3-Clause "New" or "Revised" License | 545 | 252 | [
"github3/pulls.py"
] |
|
networkx__networkx-2136 | 5aefafab2f05b97b150c6bf681c21ba6465c8d10 | 2016-05-19 01:37:02 | 3f4fd85765bf2d88188cfd4c84d0707152e6cd1e | diff --git a/networkx/readwrite/gml.py b/networkx/readwrite/gml.py
index b6ab5e9eb..af8db1d00 100644
--- a/networkx/readwrite/gml.py
+++ b/networkx/readwrite/gml.py
@@ -435,10 +435,6 @@ def parse_gml_lines(lines, label, destringizer):
if label != 'id':
G = nx.relabel_nodes(G, mapping)
- if 'name' in graph:
- G.graph['name'] = graph['name']
- else:
- del G.graph['name']
return G
diff --git a/networkx/relabel.py b/networkx/relabel.py
index ca069c950..8f885432c 100644
--- a/networkx/relabel.py
+++ b/networkx/relabel.py
@@ -147,7 +147,8 @@ def _relabel_inplace(G, mapping):
def _relabel_copy(G, mapping):
H = G.__class__()
- H.name = "(%s)" % G.name
+ if G.name:
+ H.name = "(%s)" % G.name
if G.is_multigraph():
H.add_edges_from( (mapping.get(n1, n1),mapping.get(n2, n2),k,d.copy())
for (n1,n2,k,d) in G.edges(keys=True, data=True))
| relabel_nodes adds a graph attribute when copy=True
I would have expected the following to work:
```
import networkx as nx
graph_a = nx.DiGraph()
graph_b = nx.relabel_nodes(graph_a, {}, copy=True)
print "graph_a.graph", graph_a.graph
print "graph_b.graph", graph_b.graph
assert graph_a.graph == graph_b.graph
```
However, it does not since [_relabel_copy attempts to copy a non-existent graph attribute, 'name'](https://github.com/networkx/networkx/blob/1675a824d6cdb17c3144ef46ff52a0c2b53a11d1/networkx/relabel.py#L150).
I would have expected relabel_nodes to only change the node labels, while maintaining all graph/node/edge attributes. | networkx/networkx | diff --git a/networkx/tests/test_relabel.py b/networkx/tests/test_relabel.py
index 682de98a0..65c29eeab 100644
--- a/networkx/tests/test_relabel.py
+++ b/networkx/tests/test_relabel.py
@@ -150,6 +150,17 @@ class TestRelabel():
mapping={0:'aardvark'}
G=relabel_nodes(G,mapping,copy=False)
+ def test_relabel_copy_name(self):
+ G=Graph()
+ H = relabel_nodes(G, {}, copy=True)
+ assert_equal(H.graph, G.graph)
+ H = relabel_nodes(G, {}, copy=False)
+ assert_equal(H.graph, G.graph)
+ G.name = "first"
+ H = relabel_nodes(G, {}, copy=True)
+ assert_equal(H.graph, G.graph)
+ H = relabel_nodes(G, {}, copy=False)
+ assert_equal(H.graph, G.graph)
def test_relabel_toposort(self):
K5=nx.complete_graph(4)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 2
} | help | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"nose",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y libgdal-dev graphviz"
],
"python": "3.6",
"reqs_path": [
"requirements/default.txt",
"requirements/test.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
decorator==5.1.1
importlib-metadata==4.8.3
iniconfig==1.1.1
-e git+https://github.com/networkx/networkx.git@5aefafab2f05b97b150c6bf681c21ba6465c8d10#egg=networkx
nose==1.3.7
packaging==21.3
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
tomli==1.2.3
typing_extensions==4.1.1
zipp==3.6.0
| name: networkx
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- decorator==5.1.1
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- nose==1.3.7
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- tomli==1.2.3
- typing-extensions==4.1.1
- zipp==3.6.0
prefix: /opt/conda/envs/networkx
| [
"networkx/tests/test_relabel.py::TestRelabel::test_relabel_copy_name"
] | [
"networkx/tests/test_relabel.py::test"
] | [
"networkx/tests/test_relabel.py::TestRelabel::test_convert_node_labels_to_integers",
"networkx/tests/test_relabel.py::TestRelabel::test_convert_to_integers2",
"networkx/tests/test_relabel.py::TestRelabel::test_convert_to_integers_raise",
"networkx/tests/test_relabel.py::TestRelabel::test_relabel_nodes_copy",
"networkx/tests/test_relabel.py::TestRelabel::test_relabel_nodes_function",
"networkx/tests/test_relabel.py::TestRelabel::test_relabel_nodes_graph",
"networkx/tests/test_relabel.py::TestRelabel::test_relabel_nodes_digraph",
"networkx/tests/test_relabel.py::TestRelabel::test_relabel_nodes_multigraph",
"networkx/tests/test_relabel.py::TestRelabel::test_relabel_nodes_multidigraph",
"networkx/tests/test_relabel.py::TestRelabel::test_relabel_isolated_nodes_to_same",
"networkx/tests/test_relabel.py::TestRelabel::test_relabel_nodes_missing",
"networkx/tests/test_relabel.py::TestRelabel::test_relabel_toposort",
"networkx/tests/test_relabel.py::TestRelabel::test_relabel_selfloop"
] | [] | BSD 3-Clause | 549 | 334 | [
"networkx/readwrite/gml.py",
"networkx/relabel.py"
] |
|
jubatus__jubatus-python-client-69 | 34f9f83ee2d230672518102e541286425c92c287 | 2016-05-19 05:04:03 | ecbdecb8eb9ee40694ee39c4bf1de7e7fd984ae5 | diff --git a/jubatus/common/client.py b/jubatus/common/client.py
index d599319..9cd91a8 100644
--- a/jubatus/common/client.py
+++ b/jubatus/common/client.py
@@ -54,6 +54,10 @@ class ClientBase(object):
(`unpack_encoding=None`)
"""
def __init__(self, host, port, name, timeout=10):
+ check_types(host, string_types)
+ check_types(port, int_types)
+ check_types(name, string_types)
+ check_types(timeout, int_types)
address = msgpackrpc.Address(host, port)
self.client = msgpackrpc.Client(address, timeout=timeout, pack_encoding='utf-8', unpack_encoding=None)
self.jubatus_client = Client(self.client, name)
@@ -65,6 +69,7 @@ class ClientBase(object):
return self.jubatus_client.name
def set_name(self, name):
+ check_types(name, string_types)
self.jubatus_client.name = name
def save(self, id):
| no type validation for constructor arguments
Currently argument types are validated, but constructor arguments are not.
For example, the following code:
```
c = jubatus.Classifier("localhost", 9199, 0) # it should be ("localhost", 9199, "") to work
c.get_status()
```
raises "TypeMismatch (error 2)" on RPC call, which is difficult to understand. | jubatus/jubatus-python-client | diff --git a/test/jubatus_test/common/test_client.py b/test/jubatus_test/common/test_client.py
index 0e929f8..065b538 100644
--- a/test/jubatus_test/common/test_client.py
+++ b/test/jubatus_test/common/test_client.py
@@ -67,5 +67,22 @@ class ClientTest(unittest.TestCase):
self.assertEqual("test", c.call("test", [], AnyType(), []))
self.assertRaises(TypeError, c.call, "test", [1], AnyType(), [])
+class ClientBaseTest(unittest.TestCase):
+ def test_constructor(self):
+ self.assertIsInstance(jubatus.common.ClientBase("127.0.0.1", 9199, "cluster", 10), jubatus.common.ClientBase)
+
+ # invalid host
+ self.assertRaises(TypeError, jubatus.common.ClientBase, 127001, 9199, "cluster", 10)
+
+ # invalid port
+ self.assertRaises(TypeError, jubatus.common.ClientBase, "127.0.0.1", "9199", "cluster", 10)
+
+ # invalid name
+ self.assertRaises(TypeError, jubatus.common.ClientBase, "127.0.0.1", 9199, 10, 10)
+
+ # invalid timeout
+ self.assertRaises(TypeError, jubatus.common.ClientBase, "127.0.0.1", 9199, "cluster", "test")
+ self.assertRaises(TypeError, jubatus.common.ClientBase, "127.0.0.1", 9199, "cluster", 1.5)
+
if __name__ == '__main__':
unittest.main()
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 1
} | 0.9 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[test]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
-e git+https://github.com/jubatus/jubatus-python-client.git@34f9f83ee2d230672518102e541286425c92c287#egg=jubatus
msgpack-python==0.5.6
msgpack-rpc-python==0.4.1
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
pytest @ file:///croot/pytest_1738938843180/work
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
tornado==4.5.3
| name: jubatus-python-client
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- msgpack-python==0.5.6
- msgpack-rpc-python==0.4.1
- tornado==4.5.3
prefix: /opt/conda/envs/jubatus-python-client
| [
"test/jubatus_test/common/test_client.py::ClientBaseTest::test_constructor"
] | [] | [
"test/jubatus_test/common/test_client.py::ClientTest::test_remote_error",
"test/jubatus_test/common/test_client.py::ClientTest::test_type_mismatch",
"test/jubatus_test/common/test_client.py::ClientTest::test_unknown_method",
"test/jubatus_test/common/test_client.py::ClientTest::test_wrong_number_of_arguments"
] | [] | MIT License | 550 | 248 | [
"jubatus/common/client.py"
] |
|
andycasey__ads-64 | 0afd82e0f48ee4debb9047c086488d860415bce7 | 2016-05-28 20:54:36 | c039d67c2b2e9dad936758bc89df1fdd1cbd0aa1 | diff --git a/ads/search.py b/ads/search.py
index c8a0bb4..8f36421 100644
--- a/ads/search.py
+++ b/ads/search.py
@@ -40,21 +40,20 @@ class Article(object):
return self.__unicode__().encode("utf-8")
def __unicode__(self):
- author = self.first_author or "Unknown author"
- if self.author and len(self.author) > 1:
+ author = self._raw.get("first_author", "Unknown author")
+ if len(self._raw.get("author", [])) > 1:
author += " et al."
return u"<{author} {year}, {bibcode}>".format(
author=author,
- year=self.year,
- bibcode=self.bibcode,
+ year=self._raw.get("year", "Unknown year"),
+ bibcode=self._raw.get("bibcode", "Unknown bibcode")
)
def __eq__(self, other):
- if (not hasattr(self, 'bibcode') or not hasattr(other, 'bibcode') or
- self.bibcode is None or other.bibcode is None):
+ if self._raw.get("bibcode") is None or other._raw.get("bibcode") is None:
raise TypeError("Cannot compare articles without bibcodes")
- return self.bibcode == other.bibcode
+ return self._raw['bibcode'] == other._raw['bibcode']
def __ne__(self, other):
return not self.__eq__(other)
@@ -196,8 +195,8 @@ class Article(object):
return self._get_field('indexstamp')
@cached_property
- def first_author_norm(self):
- return self._get_field('first_author_norm')
+ def first_author(self):
+ return self._get_field('first_author')
@cached_property
def issue(self):
| Exception handling in Unicode representation of Articles
In the article method `__unicode__()`, the article properties `first_author`, `bibcode` and `year` are used. This can yield an exception if the fields are not included in the original search query; generally for `first_author` as no getter exists, or if deferred loading for `bibcode` and `year` fails.
Cf. pull request #55 for a more detailed discussion of the issue. | andycasey/ads | diff --git a/ads/tests/test_search.py b/ads/tests/test_search.py
index dc36eda..75d834f 100644
--- a/ads/tests/test_search.py
+++ b/ads/tests/test_search.py
@@ -49,12 +49,13 @@ class TestArticle(unittest.TestCase):
def test_equals(self):
"""
the __eq__ method should compare bibcodes, and raise if bibcode isn't
- defined
+ defined or is None
"""
self.assertNotEqual(Article(bibcode="Not the same"), self.article)
self.assertEqual(Article(bibcode="2013A&A...552A.143S"), self.article)
- with self.assertRaises(TypeError):
- # Explicitly set bibcode to None to avoid invoking the getter.
+ with self.assertRaisesRegexp(TypeError, "Cannot compare articles without bibcodes"):
+ Article() == self.article
+ with self.assertRaisesRegexp(TypeError, "Cannot compare articles without bibcodes"):
Article(bibcode=None) == self.article
def test_init(self):
@@ -79,6 +80,10 @@ class TestArticle(unittest.TestCase):
self.article.__str__()
)
self.assertEqual(self.article.__unicode__(), self.article.__str__())
+ self.assertEqual(
+ Article().__str__(),
+ "<Unknown author Unknown year, Unknown bibcode>"
+ )
@patch('ads.search.Article._get_field')
def test_cached_properties(self, patched):
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 1
} | 0.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"nose",
"coverage",
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | -e git+https://github.com/andycasey/ads.git@0afd82e0f48ee4debb9047c086488d860415bce7#egg=ads
certifi==2025.1.31
charset-normalizer==3.4.1
coverage==7.8.0
exceptiongroup==1.2.2
httpretty==0.8.10
idna==3.10
iniconfig==2.1.0
MarkupSafe==3.0.2
mock==5.2.0
nose==1.3.7
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
requests==2.32.3
six==1.17.0
tomli==2.2.1
urllib3==2.3.0
Werkzeug==3.1.3
| name: ads
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- charset-normalizer==3.4.1
- coverage==7.8.0
- exceptiongroup==1.2.2
- httpretty==0.8.10
- idna==3.10
- iniconfig==2.1.0
- markupsafe==3.0.2
- mock==5.2.0
- nose==1.3.7
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- requests==2.32.3
- six==1.17.0
- tomli==2.2.1
- urllib3==2.3.0
- werkzeug==3.1.3
prefix: /opt/conda/envs/ads
| [
"ads/tests/test_search.py::TestArticle::test_equals",
"ads/tests/test_search.py::TestArticle::test_print_methods"
] | [
"ads/tests/test_search.py::TestArticle::test_cached_properties",
"ads/tests/test_search.py::TestArticle::test_get_field",
"ads/tests/test_search.py::TestArticle::test_init",
"ads/tests/test_search.py::TestSearchQuery::test_iter",
"ads/tests/test_search.py::TestSearchQuery::test_rows_rewrite",
"ads/tests/test_search.py::TestSolrResponse::test_articles"
] | [
"ads/tests/test_search.py::TestSearchQuery::test_init",
"ads/tests/test_search.py::TestSolrResponse::test_default_article_fields",
"ads/tests/test_search.py::TestSolrResponse::test_init",
"ads/tests/test_search.py::TestSolrResponse::test_load_http_response",
"ads/tests/test_search.py::Testquery::test_init"
] | [] | MIT License | 561 | 445 | [
"ads/search.py"
] |
|
html5lib__html5lib-python-259 | 2d376737a6246ebb38a79600a7fe75abd923cf3e | 2016-05-28 21:05:44 | 563dc298ea43021f9a9306fc7da3734ea5d9d8ec | codecov-io: ## [Current coverage][cc-pull] is **90.83%**
> Merging [#259][cc-pull] into [master][cc-base-branch] will increase coverage by **<.01%**
```diff
@@ master #259 diff @@
==========================================
Files 51 51
Lines 6836 6840 +4
Methods 0 0
Messages 0 0
Branches 1312 1312
==========================================
+ Hits 6209 6213 +4
Misses 468 468
Partials 159 159
```
> Powered by [Codecov](https://codecov.io?src=pr). Last updated by [2d37673...cbc1b34][cc-compare]
[cc-base-branch]: https://codecov.io/gh/html5lib/html5lib-python/branch/master?src=pr
[cc-compare]: https://codecov.io/gh/html5lib/html5lib-python/compare/2d376737a6246ebb38a79600a7fe75abd923cf3e...cbc1b34806178bd5119464865c263c0e254cfa55
[cc-pull]: https://codecov.io/gh/html5lib/html5lib-python/pull/259?src=pr | diff --git a/html5lib/treebuilders/etree.py b/html5lib/treebuilders/etree.py
index d394148..4d12bd4 100644
--- a/html5lib/treebuilders/etree.py
+++ b/html5lib/treebuilders/etree.py
@@ -100,6 +100,7 @@ def getETreeBuilder(ElementTreeImplementation, fullTree=False):
node.parent = self
def removeChild(self, node):
+ self._childNodes.remove(node)
self._element.remove(node._element)
node.parent = None
| etree treewalker infinite loop
This goes into an infinite loop:
```python
import html5lib
frag = html5lib.parseFragment("<b><em><foo><foob><fooc><aside></b></em>")
walker = html5lib.getTreeWalker("etree")
print list(walker(frag))
``` | html5lib/html5lib-python | diff --git a/html5lib/tests/test_parser2.py b/html5lib/tests/test_parser2.py
index 0ec5b04..b7a92fd 100644
--- a/html5lib/tests/test_parser2.py
+++ b/html5lib/tests/test_parser2.py
@@ -7,7 +7,7 @@ import io
from . import support # noqa
from html5lib.constants import namespaces
-from html5lib import parse, HTMLParser
+from html5lib import parse, parseFragment, HTMLParser
# tests that aren't autogenerated from text files
@@ -88,3 +88,8 @@ def test_debug_log():
expected[i] = tuple(log)
assert parser.log == expected
+
+
+def test_no_duplicate_clone():
+ frag = parseFragment("<b><em><foo><foob><fooc><aside></b></em>")
+ assert len(frag) == 2
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 1
} | 1.08 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[all]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"mock"
],
"pre_install": [
"git submodule update --init --recursive"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | chardet==5.2.0
datrie==0.8.2
exceptiongroup==1.2.2
Genshi==0.7.9
-e git+https://github.com/html5lib/html5lib-python.git@2d376737a6246ebb38a79600a7fe75abd923cf3e#egg=html5lib
iniconfig==2.1.0
lxml==5.3.1
mock==5.2.0
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
six==1.17.0
tomli==2.2.1
webencodings==0.5.1
| name: html5lib-python
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- chardet==5.2.0
- datrie==0.8.2
- exceptiongroup==1.2.2
- genshi==0.7.9
- iniconfig==2.1.0
- lxml==5.3.1
- mock==5.2.0
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- six==1.17.0
- tomli==2.2.1
- webencodings==0.5.1
prefix: /opt/conda/envs/html5lib-python
| [
"html5lib/tests/test_parser2.py::test_no_duplicate_clone"
] | [] | [
"html5lib/tests/test_parser2.py::test_assertDoctypeCloneable",
"html5lib/tests/test_parser2.py::test_line_counter",
"html5lib/tests/test_parser2.py::test_namespace_html_elements_0_dom",
"html5lib/tests/test_parser2.py::test_namespace_html_elements_1_dom",
"html5lib/tests/test_parser2.py::test_namespace_html_elements_0_etree",
"html5lib/tests/test_parser2.py::test_namespace_html_elements_1_etree",
"html5lib/tests/test_parser2.py::test_unicode_file",
"html5lib/tests/test_parser2.py::test_duplicate_attribute",
"html5lib/tests/test_parser2.py::test_debug_log"
] | [] | MIT License | 562 | 135 | [
"html5lib/treebuilders/etree.py"
] |
Axelrod-Python__Axelrod-603 | c919c39d58552c2db4a2719c817cfa3a3c301f92 | 2016-05-30 15:36:49 | e5b85453f0288ec9f9ea9eb91ed6042855a7b86c | diff --git a/axelrod/result_set.py b/axelrod/result_set.py
index b2bc1b94..b0a67300 100644
--- a/axelrod/result_set.py
+++ b/axelrod/result_set.py
@@ -1,5 +1,6 @@
from collections import defaultdict
import csv
+import tqdm
from numpy import mean, nanmedian, std
@@ -14,10 +15,25 @@ except ImportError:
from io import StringIO
+def update_progress_bar(method):
+ """A decorator to update a progress bar if it exists"""
+ def wrapper(*args):
+ """Run the method and update the progress bar if it exists"""
+ output = method(*args)
+
+ try:
+ args[0].progress_bar.update(1)
+ except AttributeError:
+ pass
+
+ return output
+ return wrapper
+
+
class ResultSet(object):
"""A class to hold the results of a tournament."""
- def __init__(self, players, interactions, with_morality=True):
+ def __init__(self, players, interactions, progress_bar=True):
"""
Parameters
----------
@@ -26,19 +42,24 @@ class ResultSet(object):
interactions : list
a list of dictionaries mapping tuples of player indices to
interactions (1 for each repetition)
- with_morality : bool
- a flag to determine whether morality metrics should be
- calculated.
+ progress_bar : bool
+ Whether or not to create a progress bar which will be updated
"""
self.players = players
self.nplayers = len(players)
self.interactions = interactions
self.nrepetitions = max([len(rep) for rep in list(interactions.values())])
+ if progress_bar:
+ self.progress_bar = tqdm.tqdm(total=19, desc="Analysing results")
+ else:
+ self.progress_bar = False
+
# Calculate all attributes:
- self.build_all(with_morality)
+ self.build_all()
+
- def build_all(self, with_morality):
+ def build_all(self):
"""Build all the results. In a seperate method to make inheritance more
straightforward"""
self.wins = self.build_wins()
@@ -54,15 +75,19 @@ class ResultSet(object):
self.score_diffs = self.build_score_diffs()
self.payoff_diffs_means = self.build_payoff_diffs_means()
- if with_morality:
- self.cooperation = self.build_cooperation()
- self.normalised_cooperation = self.build_normalised_cooperation()
- self.vengeful_cooperation = self.build_vengeful_cooperation()
- self.cooperating_rating = self.build_cooperating_rating()
- self.good_partner_matrix = self.build_good_partner_matrix()
- self.good_partner_rating = self.build_good_partner_rating()
- self.eigenmoses_rating = self.build_eigenmoses_rating()
- self.eigenjesus_rating = self.build_eigenjesus_rating()
+ self.cooperation = self.build_cooperation()
+ self.normalised_cooperation = self.build_normalised_cooperation()
+ self.vengeful_cooperation = self.build_vengeful_cooperation()
+ self.cooperating_rating = self.build_cooperating_rating()
+ self.good_partner_matrix = self.build_good_partner_matrix()
+ self.good_partner_rating = self.build_good_partner_rating()
+ self.eigenmoses_rating = self.build_eigenmoses_rating()
+ self.eigenjesus_rating = self.build_eigenjesus_rating()
+
+ try:
+ self.progress_bar.close()
+ except AttributeError:
+ pass
@property
def _null_results_matrix(self):
@@ -79,6 +104,7 @@ class ResultSet(object):
replist = list(range(self.nrepetitions))
return [[[0 for j in plist] for i in plist] for r in replist]
+ @update_progress_bar
def build_match_lengths(self):
"""
Returns:
@@ -110,6 +136,7 @@ class ResultSet(object):
return match_lengths
+ @update_progress_bar
def build_scores(self):
"""
Returns:
@@ -143,6 +170,7 @@ class ResultSet(object):
return scores
+ @update_progress_bar
def build_ranked_names(self):
"""
Returns:
@@ -150,8 +178,10 @@ class ResultSet(object):
Returns the ranked names. A list of names as calculated by
self.ranking.
"""
+
return [str(self.players[i]) for i in self.ranking]
+ @update_progress_bar
def build_wins(self):
"""
Returns:
@@ -187,6 +217,7 @@ class ResultSet(object):
return wins
+ @update_progress_bar
def build_normalised_scores(self):
"""
Returns:
@@ -229,6 +260,7 @@ class ResultSet(object):
return normalised_scores
+ @update_progress_bar
def build_ranking(self):
"""
Returns:
@@ -244,6 +276,7 @@ class ResultSet(object):
return sorted(range(self.nplayers),
key=lambda i: -nanmedian(self.normalised_scores[i]))
+ @update_progress_bar
def build_payoffs(self):
"""
Returns:
@@ -281,8 +314,10 @@ class ResultSet(object):
utilities.append(iu.compute_final_score_per_turn(interaction)[1])
payoffs[player][opponent] = utilities
+
return payoffs
+ @update_progress_bar
def build_payoff_matrix(self):
"""
Returns:
@@ -317,6 +352,7 @@ class ResultSet(object):
return payoff_matrix
+ @update_progress_bar
def build_payoff_stddevs(self):
"""
Returns:
@@ -353,6 +389,7 @@ class ResultSet(object):
return payoff_stddevs
+ @update_progress_bar
def build_score_diffs(self):
"""
Returns:
@@ -391,8 +428,10 @@ class ResultSet(object):
scores = iu.compute_final_score_per_turn(interaction)
diff = (scores[1] - scores[0])
score_diffs[player][opponent][repetition] = diff
+
return score_diffs
+ @update_progress_bar
def build_payoff_diffs_means(self):
"""
Returns:
@@ -429,8 +468,10 @@ class ResultSet(object):
payoff_diffs_means[player][opponent] = mean(diffs)
else:
payoff_diffs_means[player][opponent] = 0
+
return payoff_diffs_means
+ @update_progress_bar
def build_cooperation(self):
"""
Returns:
@@ -465,8 +506,10 @@ class ResultSet(object):
coop_count += iu.compute_cooperations(interaction)[1]
cooperations[player][opponent] += coop_count
+
return cooperations
+ @update_progress_bar
def build_normalised_cooperation(self):
"""
Returns:
@@ -507,8 +550,10 @@ class ResultSet(object):
# Mean over all reps:
normalised_cooperations[player][opponent] = mean(coop_counts)
+
return normalised_cooperations
+ @update_progress_bar
def build_vengeful_cooperation(self):
"""
Returns:
@@ -522,6 +567,7 @@ class ResultSet(object):
return [[2 * (element - 0.5) for element in row]
for row in self.normalised_cooperation]
+ @update_progress_bar
def build_cooperating_rating(self):
"""
Returns:
@@ -552,6 +598,7 @@ class ResultSet(object):
return [sum(cs) / max(1, float(sum(ls))) for cs, ls
in zip(self.cooperation, lengths)]
+ @update_progress_bar
def build_good_partner_matrix(self):
"""
Returns:
@@ -586,6 +633,7 @@ class ResultSet(object):
return good_partner_matrix
+ @update_progress_bar
def build_good_partner_rating(self):
"""
Returns:
@@ -607,6 +655,7 @@ class ResultSet(object):
return good_partner_rating
+ @update_progress_bar
def build_eigenjesus_rating(self):
"""
Returns:
@@ -617,8 +666,10 @@ class ResultSet(object):
"""
eigenvector, eigenvalue = eigen.principal_eigenvector(
self.normalised_cooperation)
+
return eigenvector.tolist()
+ @update_progress_bar
def build_eigenmoses_rating(self):
"""
Returns:
@@ -629,6 +680,7 @@ class ResultSet(object):
"""
eigenvector, eigenvalue = eigen.principal_eigenvector(
self.vengeful_cooperation)
+
return eigenvector.tolist()
def csv(self):
@@ -655,22 +707,26 @@ class ResultSetFromFile(ResultSet):
by the tournament class.
"""
- def __init__(self, filename, with_morality=True):
+ def __init__(self, filename, progress_bar=True):
"""
Parameters
----------
filename : string
name of a file of the correct file.
- with_morality : bool
- a flag to determine whether morality metrics should be
- calculated.
+ progress_bar : bool
+ Whether or not to create a progress bar which will be updated
"""
self.players, self.interactions = self._read_csv(filename)
self.nplayers = len(self.players)
self.nrepetitions = len(list(self.interactions.values())[0])
+ if progress_bar:
+ self.progress_bar = tqdm.tqdm(total=19, desc="Analysing results")
+ else:
+ self.progress_bar = False
+
# Calculate all attributes:
- self.build_all(with_morality)
+ self.build_all()
def _read_csv(self, filename):
"""
diff --git a/axelrod/tournament.py b/axelrod/tournament.py
index 6b638aa1..32684643 100644
--- a/axelrod/tournament.py
+++ b/axelrod/tournament.py
@@ -85,7 +85,8 @@ class Tournament(object):
axelrod.ResultSet
"""
if progress_bar:
- self.progress_bar = tqdm.tqdm(total=len(self.match_generator))
+ self.progress_bar = tqdm.tqdm(total=len(self.match_generator),
+ desc="Playing matches")
self.setup_output_file(filename)
if not build_results and not filename:
@@ -96,13 +97,16 @@ class Tournament(object):
else:
self._run_parallel(processes=processes, progress_bar=progress_bar)
+ if progress_bar:
+ self.progress_bar.close()
+
# Make sure that python has finished writing to disk
self.outputfile.flush()
if build_results:
- return self._build_result_set()
+ return self._build_result_set(progress_bar=progress_bar)
- def _build_result_set(self):
+ def _build_result_set(self, progress_bar=True):
"""
Build the result set (used by the play method)
@@ -112,7 +116,7 @@ class Tournament(object):
"""
result_set = ResultSetFromFile(
filename=self.filename,
- with_morality=self._with_morality)
+ progress_bar=progress_bar)
self.outputfile.close()
return result_set
| Results set processing shouldn't be in the progress bar | Axelrod-Python/Axelrod | diff --git a/axelrod/tests/unit/test_resultset.py b/axelrod/tests/unit/test_resultset.py
index 2df8666a..c5a084bb 100644
--- a/axelrod/tests/unit/test_resultset.py
+++ b/axelrod/tests/unit/test_resultset.py
@@ -161,7 +161,9 @@ class TestResultSet(unittest.TestCase):
'Defector,Tit For Tat,Alternator\n2.6,1.7,1.5\n2.6,1.7,1.5\n2.6,1.7,1.5\n')
def test_init(self):
- rs = axelrod.ResultSet(self.players, self.interactions)
+ rs = axelrod.ResultSet(self.players, self.interactions,
+ progress_bar=False)
+ self.assertFalse(rs.progress_bar)
self.assertEqual(rs.players, self.players)
self.assertEqual(rs.nplayers, len(self.players))
self.assertEqual(rs.interactions, self.interactions)
@@ -176,13 +178,25 @@ class TestResultSet(unittest.TestCase):
self.assertIsInstance(interaction, list)
self.assertEqual(len(interaction), self.turns)
- def test_null_results_matrix(self):
+ def test_with_progress_bar(self):
rs = axelrod.ResultSet(self.players, self.interactions)
+ self.assertTrue(rs.progress_bar)
+ self.assertEqual(rs.progress_bar.total, 19)
+
+ rs = axelrod.ResultSet(self.players, self.interactions,
+ progress_bar=True)
+ self.assertTrue(rs.progress_bar)
+ self.assertEqual(rs.progress_bar.total, 19)
+
+ def test_null_results_matrix(self):
+ rs = axelrod.ResultSet(self.players, self.interactions,
+ progress_bar=False)
self.assertEqual(
rs._null_results_matrix, self.expected_null_results_matrix)
def test_match_lengths(self):
- rs = axelrod.ResultSet(self.players, self.interactions)
+ rs = axelrod.ResultSet(self.players, self.interactions,
+ progress_bar=False)
self.assertIsInstance(rs.match_lengths, list)
self.assertEqual(len(rs.match_lengths), rs.nrepetitions)
self.assertEqual(rs.match_lengths, self.expected_match_lengths)
@@ -202,49 +216,57 @@ class TestResultSet(unittest.TestCase):
self.assertEqual(length, self.turns)
def test_scores(self):
- rs = axelrod.ResultSet(self.players, self.interactions)
+ rs = axelrod.ResultSet(self.players, self.interactions,
+ progress_bar=False)
self.assertIsInstance(rs.scores, list)
self.assertEqual(len(rs.scores), rs.nplayers)
self.assertEqual(rs.scores, self.expected_scores)
def test_ranking(self):
- rs = axelrod.ResultSet(self.players, self.interactions)
+ rs = axelrod.ResultSet(self.players, self.interactions,
+ progress_bar=False)
self.assertIsInstance(rs.ranking, list)
self.assertEqual(len(rs.ranking), rs.nplayers)
self.assertEqual(rs.ranking, self.expected_ranking)
def test_ranked_names(self):
- rs = axelrod.ResultSet(self.players, self.interactions)
+ rs = axelrod.ResultSet(self.players, self.interactions,
+ progress_bar=False)
self.assertIsInstance(rs.ranked_names, list)
self.assertEqual(len(rs.ranked_names), rs.nplayers)
self.assertEqual(rs.ranked_names, self.expected_ranked_names)
def test_wins(self):
- rs = axelrod.ResultSet(self.players, self.interactions)
+ rs = axelrod.ResultSet(self.players, self.interactions,
+ progress_bar=False)
self.assertIsInstance(rs.wins, list)
self.assertEqual(len(rs.wins), rs.nplayers)
self.assertEqual(rs.wins, self.expected_wins)
def test_normalised_scores(self):
- rs = axelrod.ResultSet(self.players, self.interactions)
+ rs = axelrod.ResultSet(self.players, self.interactions,
+ progress_bar=False)
self.assertIsInstance(rs.normalised_scores, list)
self.assertEqual(len(rs.normalised_scores), rs.nplayers)
self.assertEqual(rs.normalised_scores, self.expected_normalised_scores)
def test_payoffs(self):
- rs = axelrod.ResultSet(self.players, self.interactions)
+ rs = axelrod.ResultSet(self.players, self.interactions,
+ progress_bar=False)
self.assertIsInstance(rs.payoffs, list)
self.assertEqual(len(rs.payoffs), rs.nplayers)
self.assertEqual(rs.payoffs, self.expected_payoffs)
def test_payoff_matrix(self):
- rs = axelrod.ResultSet(self.players, self.interactions)
+ rs = axelrod.ResultSet(self.players, self.interactions,
+ progress_bar=False)
self.assertIsInstance(rs.payoff_matrix, list)
self.assertEqual(len(rs.payoff_matrix), rs.nplayers)
self.assertEqual(rs.payoff_matrix, self.expected_payoff_matrix)
def test_score_diffs(self):
- rs = axelrod.ResultSet(self.players, self.interactions)
+ rs = axelrod.ResultSet(self.players, self.interactions,
+ progress_bar=False)
self.assertIsInstance(rs.score_diffs, list)
self.assertEqual(len(rs.score_diffs), rs.nplayers)
for i, row in enumerate(rs.score_diffs):
@@ -254,7 +276,8 @@ class TestResultSet(unittest.TestCase):
self.expected_score_diffs[i][j][k])
def test_payoff_diffs_means(self):
- rs = axelrod.ResultSet(self.players, self.interactions)
+ rs = axelrod.ResultSet(self.players, self.interactions,
+ progress_bar=False)
self.assertIsInstance(rs.payoff_diffs_means, list)
self.assertEqual(len(rs.payoff_diffs_means), rs.nplayers)
for i, row in enumerate(rs.payoff_diffs_means):
@@ -263,68 +286,78 @@ class TestResultSet(unittest.TestCase):
self.expected_payoff_diffs_means[i][j])
def test_payoff_stddevs(self):
- rs = axelrod.ResultSet(self.players, self.interactions)
+ rs = axelrod.ResultSet(self.players, self.interactions,
+ progress_bar=False)
self.assertIsInstance(rs.payoff_stddevs, list)
self.assertEqual(len(rs.payoff_stddevs), rs.nplayers)
self.assertEqual(rs.payoff_stddevs, self.expected_payoff_stddevs)
def test_cooperation(self):
- rs = axelrod.ResultSet(self.players, self.interactions)
+ rs = axelrod.ResultSet(self.players, self.interactions,
+ progress_bar=False)
self.assertIsInstance(rs.cooperation, list)
self.assertEqual(len(rs.cooperation), rs.nplayers)
self.assertEqual(rs.cooperation, self.expected_cooperation)
def test_normalised_cooperation(self):
- rs = axelrod.ResultSet(self.players, self.interactions)
+ rs = axelrod.ResultSet(self.players, self.interactions,
+ progress_bar=False)
self.assertIsInstance(rs.normalised_cooperation, list)
self.assertEqual(len(rs.normalised_cooperation), rs.nplayers)
self.assertEqual(rs.normalised_cooperation,
self.expected_normalised_cooperation)
def test_vengeful_cooperation(self):
- rs = axelrod.ResultSet(self.players, self.interactions)
+ rs = axelrod.ResultSet(self.players, self.interactions,
+ progress_bar=False)
self.assertIsInstance(rs.vengeful_cooperation, list)
self.assertEqual(len(rs.vengeful_cooperation), rs.nplayers)
self.assertEqual(rs.vengeful_cooperation,
self.expected_vengeful_cooperation)
def test_cooperating_rating(self):
- rs = axelrod.ResultSet(self.players, self.interactions)
+ rs = axelrod.ResultSet(self.players, self.interactions,
+ progress_bar=False)
self.assertIsInstance(rs.cooperating_rating, list)
self.assertEqual(len(rs.cooperating_rating), rs.nplayers)
self.assertEqual(rs.cooperating_rating,
self.expected_cooperating_rating)
def test_good_partner_matrix(self):
- rs = axelrod.ResultSet(self.players, self.interactions)
+ rs = axelrod.ResultSet(self.players, self.interactions,
+ progress_bar=False)
self.assertIsInstance(rs.good_partner_matrix, list)
self.assertEqual(len(rs.good_partner_matrix), rs.nplayers)
self.assertEqual(rs.good_partner_matrix,
self.expected_good_partner_matrix)
def test_good_partner_rating(self):
- rs = axelrod.ResultSet(self.players, self.interactions)
+ rs = axelrod.ResultSet(self.players, self.interactions,
+ progress_bar=False)
self.assertIsInstance(rs.good_partner_rating, list)
self.assertEqual(len(rs.good_partner_rating), rs.nplayers)
self.assertEqual(rs.good_partner_rating,
self.expected_good_partner_rating)
def test_eigenjesus_rating(self):
- rs = axelrod.ResultSet(self.players, self.interactions)
+ rs = axelrod.ResultSet(self.players, self.interactions,
+ progress_bar=False)
self.assertIsInstance(rs.eigenjesus_rating, list)
self.assertEqual(len(rs.eigenjesus_rating), rs.nplayers)
for j, rate in enumerate(rs.eigenjesus_rating):
self.assertAlmostEqual(rate, self.expected_eigenjesus_rating[j])
def test_eigenmoses_rating(self):
- rs = axelrod.ResultSet(self.players, self.interactions)
+ rs = axelrod.ResultSet(self.players, self.interactions,
+ progress_bar=False)
self.assertIsInstance(rs.eigenmoses_rating, list)
self.assertEqual(len(rs.eigenmoses_rating), rs.nplayers)
for j, rate in enumerate(rs.eigenmoses_rating):
self.assertAlmostEqual(rate, self.expected_eigenmoses_rating[j])
def test_csv(self):
- rs = axelrod.ResultSet(self.players, self.interactions)
+ rs = axelrod.ResultSet(self.players, self.interactions,
+ progress_bar=False)
self.assertEqual(rs.csv(), self.expected_csv)
@@ -341,7 +374,7 @@ class TestResultSetFromFile(unittest.TestCase):
def test_init(self):
- rs = axelrod.ResultSetFromFile(self.tmp_file.name)
+ rs = axelrod.ResultSetFromFile(self.tmp_file.name, progress_bar=False)
players = ['Cooperator', 'Tit For Tat', 'Defector']
self.assertEqual(rs.players, players)
self.assertEqual(rs.nplayers, len(players))
@@ -354,3 +387,9 @@ class TestResultSetFromFile(unittest.TestCase):
(0, 2): [[('C', 'D'), ('C', 'D')]],
(1, 1): [[('C', 'C'), ('C', 'C')]]}
self.assertEqual(rs.interactions, expected_interactions)
+
+
+class TestDecorator(unittest.TestCase):
+ def test_update_progress_bar(self):
+ method = lambda x: None
+ self.assertEqual(axelrod.result_set.update_progress_bar(method)(1), None)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 3
},
"num_modified_files": 2
} | 1.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==25.3.0
-e git+https://github.com/Axelrod-Python/Axelrod.git@c919c39d58552c2db4a2719c817cfa3a3c301f92#egg=Axelrod
coverage==7.8.0
cycler==0.12.1
exceptiongroup==1.2.2
hypothesis==6.130.6
iniconfig==2.1.0
kiwisolver==1.4.7
matplotlib==3.3.4
numpy==2.0.2
packaging==24.2
pillow==11.1.0
pluggy==1.5.0
pyparsing==2.1.1
pytest==8.3.5
pytest-cov==6.0.0
python-dateutil==2.9.0.post0
six==1.17.0
sortedcontainers==2.4.0
testfixtures==4.9.1
tomli==2.2.1
tqdm==3.4.0
| name: Axelrod
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==25.3.0
- coverage==7.8.0
- cycler==0.12.1
- exceptiongroup==1.2.2
- hypothesis==6.130.6
- iniconfig==2.1.0
- kiwisolver==1.4.7
- matplotlib==3.3.4
- numpy==2.0.2
- packaging==24.2
- pillow==11.1.0
- pluggy==1.5.0
- pyparsing==2.1.1
- pytest==8.3.5
- pytest-cov==6.0.0
- python-dateutil==2.9.0.post0
- six==1.17.0
- sortedcontainers==2.4.0
- testfixtures==4.9.1
- tomli==2.2.1
- tqdm==3.4.0
prefix: /opt/conda/envs/Axelrod
| [
"axelrod/tests/unit/test_resultset.py::TestResultSet::test_cooperating_rating",
"axelrod/tests/unit/test_resultset.py::TestResultSet::test_cooperation",
"axelrod/tests/unit/test_resultset.py::TestResultSet::test_eigenjesus_rating",
"axelrod/tests/unit/test_resultset.py::TestResultSet::test_eigenmoses_rating",
"axelrod/tests/unit/test_resultset.py::TestResultSet::test_good_partner_matrix",
"axelrod/tests/unit/test_resultset.py::TestResultSet::test_good_partner_rating",
"axelrod/tests/unit/test_resultset.py::TestResultSet::test_init",
"axelrod/tests/unit/test_resultset.py::TestResultSet::test_match_lengths",
"axelrod/tests/unit/test_resultset.py::TestResultSet::test_normalised_cooperation",
"axelrod/tests/unit/test_resultset.py::TestResultSet::test_normalised_scores",
"axelrod/tests/unit/test_resultset.py::TestResultSet::test_null_results_matrix",
"axelrod/tests/unit/test_resultset.py::TestResultSet::test_payoff_diffs_means",
"axelrod/tests/unit/test_resultset.py::TestResultSet::test_payoff_matrix",
"axelrod/tests/unit/test_resultset.py::TestResultSet::test_payoff_stddevs",
"axelrod/tests/unit/test_resultset.py::TestResultSet::test_payoffs",
"axelrod/tests/unit/test_resultset.py::TestResultSet::test_ranked_names",
"axelrod/tests/unit/test_resultset.py::TestResultSet::test_ranking",
"axelrod/tests/unit/test_resultset.py::TestResultSet::test_score_diffs",
"axelrod/tests/unit/test_resultset.py::TestResultSet::test_scores",
"axelrod/tests/unit/test_resultset.py::TestResultSet::test_vengeful_cooperation",
"axelrod/tests/unit/test_resultset.py::TestResultSet::test_wins",
"axelrod/tests/unit/test_resultset.py::TestResultSet::test_with_progress_bar",
"axelrod/tests/unit/test_resultset.py::TestResultSetFromFile::test_init",
"axelrod/tests/unit/test_resultset.py::TestDecorator::test_update_progress_bar"
] | [
"axelrod/tests/unit/test_resultset.py::TestResultSet::test_csv"
] | [] | [] | MIT License | 564 | 2,656 | [
"axelrod/result_set.py",
"axelrod/tournament.py"
] |
|
zalando-stups__pierone-cli-37 | 991c05e9c7496b2aac071d85d0a9ca6b8afcf9dd | 2016-05-31 08:47:53 | 560cae1b4fc185c7a8aa3a1a50e0a96b2c7dd8e7 | diff --git a/pierone/cli.py b/pierone/cli.py
index 1af5790..50dba86 100644
--- a/pierone/cli.py
+++ b/pierone/cli.py
@@ -232,7 +232,8 @@ def get_clair_features(url, layer_id, access_token):
return []
else:
r.raise_for_status()
- return r.json()['Layer']['Features']
+
+ return r.json()['Layer'].get('Features', [])
@cli.command()
| pierone fails with backtrace when the CVE status is COULDNT_FIGURE_OUT
```
Traceback (most recent call last):
File "/usr/local/bin/pierone", line 11, in <module>
sys.exit(main())
File "/usr/local/lib/python3.4/dist-packages/pierone/cli.py", line 485, in main
cli()
File "/usr/local/lib/python3.4/dist-packages/click/core.py", line 716, in __call__
return self.main(*args, **kwargs)
File "/usr/local/lib/python3.4/dist-packages/click/core.py", line 696, in main
rv = self.invoke(ctx)
File "/usr/local/lib/python3.4/dist-packages/click/core.py", line 1060, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/usr/local/lib/python3.4/dist-packages/click/core.py", line 889, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/usr/local/lib/python3.4/dist-packages/click/core.py", line 534, in invoke
return callback(*args, **kwargs)
File "/usr/local/lib/python3.4/dist-packages/click/decorators.py", line 27, in new_func
return f(get_current_context().obj, *args, **kwargs)
File "/usr/local/lib/python3.4/dist-packages/pierone/cli.py", line 313, in cves
installed_software = get_clair_features(config.get('clair_url'), artifact_tag.get('clair_id'), token)
File "/usr/local/lib/python3.4/dist-packages/pierone/cli.py", line 235, in get_clair_features
return r.json()['Layer']['Features']
KeyError: 'Features'
``` | zalando-stups/pierone-cli | diff --git a/tests/test_cli.py b/tests/test_cli.py
index 6282253..087d27d 100644
--- a/tests/test_cli.py
+++ b/tests/test_cli.py
@@ -221,6 +221,61 @@ def test_cves(monkeypatch, tmpdir):
assert re.match('[^\n]+\n[^\n]+HIGH', result.output), 'Results should be ordered by highest priority'
+def test_no_cves_found(monkeypatch, tmpdir):
+ pierone_service_payload = [
+ # Former pierone payload
+ {
+ 'name': '1.0',
+ 'created_by': 'myuser',
+ 'created': '2015-08-20T08:14:59.432Z'
+ },
+ # New pierone payload with clair but no information about CVEs
+ {
+ "name": "1.1",
+ "created": "2016-05-19T15:23:41.065Z",
+ "created_by": "myuser",
+ "image": "sha256:here",
+ "clair_id": None,
+ "severity_fix_available": None,
+ "severity_no_fix_available": None
+ },
+ # New pierone payload with clair input and info about CVEs
+ {
+ "name": "1.2",
+ "created": "2016-05-23T13:29:17.753Z",
+ "created_by": "myuser",
+ "image": "sha256:here",
+ "clair_id": "sha256:here",
+ "severity_fix_available": "High",
+ "severity_no_fix_available": "Medium"
+ }
+ ]
+
+ no_cves_clair_payload = {
+ "Layer": {
+ "Name": "sha256:0000000000000000000000000000000000000000000000000000000000000000",
+ "NamespaceName": "ubuntu:16.04",
+ "ParentName": "sha256:0000000000000000000000000000000000000000000000000000000000000000",
+ "IndexedByVersion": 2
+ }
+ }
+
+ response = MagicMock()
+ response.json.side_effect = [
+ pierone_service_payload,
+ no_cves_clair_payload
+ ]
+
+ runner = CliRunner()
+ monkeypatch.setattr('stups_cli.config.load_config', lambda x: {'url': 'foobar', 'clair_url': 'barfoo'})
+ monkeypatch.setattr('zign.api.get_token', MagicMock(return_value='tok123'))
+ monkeypatch.setattr('os.path.expanduser', lambda x: x.replace('~', str(tmpdir)))
+ monkeypatch.setattr('pierone.api.session.get', MagicMock(return_value=response))
+ with runner.isolated_filesystem():
+ result = runner.invoke(cli, ['cves', 'myteam', 'myart', '1.2'], catch_exceptions=False)
+ assert re.match('^[^\n]+\n$', result.output), 'No results should be shown'
+
+
def test_latest(monkeypatch, tmpdir):
response = MagicMock()
response.json.return_value = [
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 1
} | 1.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-xdist",
"pytest-mock",
"pytest-asyncio"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2025.1.31
charset-normalizer==3.4.1
click==8.1.8
clickclick==20.10.2
coverage==7.8.0
dnspython==2.7.0
exceptiongroup==1.2.2
execnet==2.1.1
idna==3.10
iniconfig==2.1.0
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
pytest-asyncio==0.26.0
pytest-cov==6.0.0
pytest-mock==3.14.0
pytest-xdist==3.6.1
PyYAML==6.0.2
requests==2.32.3
stups-cli-support==1.1.22
-e git+https://github.com/zalando-stups/pierone-cli.git@991c05e9c7496b2aac071d85d0a9ca6b8afcf9dd#egg=stups_pierone
stups-tokens==1.1.19
stups-zign==1.2
tomli==2.2.1
typing_extensions==4.13.0
urllib3==2.3.0
| name: pierone-cli
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- charset-normalizer==3.4.1
- click==8.1.8
- clickclick==20.10.2
- coverage==7.8.0
- dnspython==2.7.0
- exceptiongroup==1.2.2
- execnet==2.1.1
- idna==3.10
- iniconfig==2.1.0
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- pytest-asyncio==0.26.0
- pytest-cov==6.0.0
- pytest-mock==3.14.0
- pytest-xdist==3.6.1
- pyyaml==6.0.2
- requests==2.32.3
- stups-cli-support==1.1.22
- stups-tokens==1.1.19
- stups-zign==1.2
- tomli==2.2.1
- typing-extensions==4.13.0
- urllib3==2.3.0
prefix: /opt/conda/envs/pierone-cli
| [
"tests/test_cli.py::test_no_cves_found"
] | [] | [
"tests/test_cli.py::test_version",
"tests/test_cli.py::test_login",
"tests/test_cli.py::test_login_given_url_option",
"tests/test_cli.py::test_scm_source",
"tests/test_cli.py::test_image",
"tests/test_cli.py::test_tags",
"tests/test_cli.py::test_cves",
"tests/test_cli.py::test_latest",
"tests/test_cli.py::test_latest_not_found",
"tests/test_cli.py::test_url_without_scheme"
] | [] | Apache License 2.0 | 565 | 122 | [
"pierone/cli.py"
] |
|
tableau__document-api-python-15 | 07aad9550d3d36a4d74c4751832c50fe81882a01 | 2016-06-02 00:21:16 | 07aad9550d3d36a4d74c4751832c50fe81882a01 | diff --git a/tableaudocumentapi/datasource.py b/tableaudocumentapi/datasource.py
index 93ebe55..617004a 100644
--- a/tableaudocumentapi/datasource.py
+++ b/tableaudocumentapi/datasource.py
@@ -72,7 +72,7 @@ class Datasource(object):
"""
# save the file
- self._datasourceTree.write(self._filename)
+ self._datasourceTree.write(self._filename, encoding="utf-8", xml_declaration=True)
def save_as(self, new_filename):
"""
@@ -85,7 +85,7 @@ class Datasource(object):
Nothing.
"""
- self._datasourceTree.write(new_filename)
+ self._datasourceTree.write(new_filename, encoding="utf-8", xml_declaration=True)
###########
# name
diff --git a/tableaudocumentapi/workbook.py b/tableaudocumentapi/workbook.py
index 67dbc32..889f746 100644
--- a/tableaudocumentapi/workbook.py
+++ b/tableaudocumentapi/workbook.py
@@ -76,7 +76,7 @@ class Workbook(object):
"""
# save the file
- self._workbookTree.write(self._filename)
+ self._workbookTree.write(self._filename, encoding="utf-8", xml_declaration=True)
def save_as(self, new_filename):
"""
@@ -90,7 +90,7 @@ class Workbook(object):
"""
- self._workbookTree.write(new_filename)
+ self._workbookTree.write(new_filename, encoding="utf-8", xml_declaration=True)
###########################################################################
#
| Tabcmd publish with .twb created via Document API
I can successfully create a .twb file via the Document API, but attempting to publish it to my Tableau Server via Tabcmd results in an unexpected error:
**Bad request
unexpected error occurred opening the packaged workbook.**
Attached is the template workbook created in Tableau Desktop (superstore_sales.twb) and one of the workbooks created from that template via the Document API (superstore_sales_arizona.twb)
[superstore_twbs.zip](https://github.com/tableau/document-api-python/files/285303/superstore_twbs.zip)
| tableau/document-api-python | diff --git a/test.py b/test.py
index fd7d1bd..5606005 100644
--- a/test.py
+++ b/test.py
@@ -17,6 +17,7 @@ TABLEAU_10_WORKBOOK = '''<?xml version='1.0' encoding='utf-8' ?><workbook source
TABLEAU_CONNECTION_XML = ET.fromstring(
'''<connection authentication='sspi' class='sqlserver' dbname='TestV1' odbc-native-protocol='yes' one-time-sql='' server='mssql2012.test.tsi.lan' username=''></connection>''')
+
class HelperMethodTests(unittest.TestCase):
def test_is_valid_file_with_valid_inputs(self):
@@ -39,7 +40,6 @@ class ConnectionParserTests(unittest.TestCase):
self.assertIsInstance(connections[0], Connection)
self.assertEqual(connections[0].dbname, 'TestV1')
-
def test_can_extract_federated_connections(self):
parser = ConnectionParser(ET.fromstring(TABLEAU_10_TDS), '10.0')
connections = parser.get_connections()
@@ -97,6 +97,17 @@ class DatasourceModelTests(unittest.TestCase):
new_tds = Datasource.from_file(self.tds_file.name)
self.assertEqual(new_tds.connections[0].dbname, 'newdb.test.tsi.lan')
+ def test_save_has_xml_declaration(self):
+ original_tds = Datasource.from_file(self.tds_file.name)
+ original_tds.connections[0].dbname = 'newdb.test.tsi.lan'
+
+ original_tds.save()
+
+ with open(self.tds_file.name) as f:
+ first_line = f.readline().strip() # first line should be xml tag
+ self.assertEqual(
+ first_line, "<?xml version='1.0' encoding='utf-8'?>")
+
class WorkbookModelTests(unittest.TestCase):
@@ -122,7 +133,8 @@ class WorkbookModelTests(unittest.TestCase):
original_wb.save()
new_wb = Workbook(self.workbook_file.name)
- self.assertEqual(new_wb.datasources[0].connections[0].dbname, 'newdb.test.tsi.lan')
+ self.assertEqual(new_wb.datasources[0].connections[
+ 0].dbname, 'newdb.test.tsi.lan')
class WorkbookModelV10Tests(unittest.TestCase):
@@ -152,7 +164,19 @@ class WorkbookModelV10Tests(unittest.TestCase):
original_wb.save()
new_wb = Workbook(self.workbook_file.name)
- self.assertEqual(new_wb.datasources[0].connections[0].dbname, 'newdb.test.tsi.lan')
+ self.assertEqual(new_wb.datasources[0].connections[
+ 0].dbname, 'newdb.test.tsi.lan')
+
+ def test_save_has_xml_declaration(self):
+ original_wb = Workbook(self.workbook_file.name)
+ original_wb.datasources[0].connections[0].dbname = 'newdb.test.tsi.lan'
+
+ original_wb.save()
+
+ with open(self.workbook_file.name) as f:
+ first_line = f.readline().strip() # first line should be xml tag
+ self.assertEqual(
+ first_line, "<?xml version='1.0' encoding='utf-8'?>")
if __name__ == '__main__':
unittest.main()
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 2
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
pytest @ file:///croot/pytest_1738938843180/work
-e git+https://github.com/tableau/document-api-python.git@07aad9550d3d36a4d74c4751832c50fe81882a01#egg=tableaudocumentapi
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
| name: document-api-python
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
prefix: /opt/conda/envs/document-api-python
| [
"test.py::DatasourceModelTests::test_save_has_xml_declaration",
"test.py::WorkbookModelV10Tests::test_save_has_xml_declaration"
] | [] | [
"test.py::HelperMethodTests::test_is_valid_file_with_invalid_inputs",
"test.py::HelperMethodTests::test_is_valid_file_with_valid_inputs",
"test.py::ConnectionParserTests::test_can_extract_federated_connections",
"test.py::ConnectionParserTests::test_can_extract_legacy_connection",
"test.py::ConnectionModelTests::test_can_read_attributes_from_connection",
"test.py::ConnectionModelTests::test_can_write_attributes_to_connection",
"test.py::DatasourceModelTests::test_can_extract_connection",
"test.py::DatasourceModelTests::test_can_extract_datasource_from_file",
"test.py::DatasourceModelTests::test_can_save_tds",
"test.py::WorkbookModelTests::test_can_extract_datasource",
"test.py::WorkbookModelTests::test_can_update_datasource_connection_and_save",
"test.py::WorkbookModelV10Tests::test_can_extract_datasourceV10",
"test.py::WorkbookModelV10Tests::test_can_update_datasource_connection_and_saveV10"
] | [] | MIT License | 569 | 397 | [
"tableaudocumentapi/datasource.py",
"tableaudocumentapi/workbook.py"
] |
|
falconry__falcon-820 | 50b1759ee7f7b54a872c01c85152f8648e350399 | 2016-06-07 21:07:32 | 67d61029847cbf59e4053c8a424df4f9f87ad36f | kgriffs: Looks like we accidentally had some overlap in effort between #729 and #811. I attempted to combine the two into a new PR with a few tweeks to param naming and docstrings. Everyone please take a look and provide feedback. Thanks!
codecov-io: ## [Current coverage][cc-pull] is **100%**
> Merging [#820][cc-pull] into [master][cc-base-branch] will not change coverage
```diff
@@ master #820 diff @@
==========================================
Files 29 29
Lines 1789 1799 +10
Methods 0 0
Messages 0 0
Branches 299 303 +4
==========================================
+ Hits 1789 1799 +10
Misses 0 0
Partials 0 0
```
> Powered by [Codecov](https://codecov.io?src=pr). Last updated by [cf3cb50...d4e630a][cc-compare]
[cc-base-branch]: https://codecov.io/gh/falconry/falcon/branch/master?src=pr
[cc-compare]: https://codecov.io/gh/falconry/falcon/compare/cf3cb5029a51d4b7c980c7851328a02046db9b3e...d4e630a70cd1774519851a78a1d1fb80a83e8b7e
[cc-pull]: https://codecov.io/gh/falconry/falcon/pull/820?src=pr
orcsly: Looks good, thanks!
jmvrbanac: :+1:
qwesda: The parameter name `csv` for the `parse_query_string` could have a more descriptive name. `parse_qs_csv` sounds logical to me, since the option general option is called `auto_parse_qs_csv`.
One evaluation in `parse_query_string` can be short-circuited:
[https://github.com/falconry/falcon/pull/820/files#diff-7d2a078ae72702ba816f18a9aa1c48b9R319](https://github.com/falconry/falcon/pull/820/files#diff-7d2a078ae72702ba816f18a9aa1c48b9R319)
Otherwise it looks good.
kgriffs: @qwesda since the method name already implies we are working with parsing query strings, is it necessary to also include that in the name of the kwarg?
qwesda: @kgriffs: I just thought something more explicit would be more in line with `keep_blank_qs_values`, which is pretty verbose. Having one verbose param and one very non-verbose seemed weird.
kgriffs: @qwesda, ah, good point! I'll switch to `parse_qs_csv` and we can see how that looks.
kgriffs: @qwesda @jmvrbanac @orcsly I think this is ready for final review.
qwesda: @kgriffs looks ok
orcsly: Yup looks good. Thanks! | diff --git a/falcon/request.py b/falcon/request.py
index 597ac80..7359991 100644
--- a/falcon/request.py
+++ b/falcon/request.py
@@ -284,6 +284,7 @@ class Request(object):
self._params = parse_query_string(
self.query_string,
keep_blank_qs_values=self.options.keep_blank_qs_values,
+ parse_qs_csv=self.options.auto_parse_qs_csv,
)
else:
@@ -1153,6 +1154,7 @@ class Request(object):
extra_params = parse_query_string(
body,
keep_blank_qs_values=self.options.keep_blank_qs_values,
+ parse_qs_csv=self.options.auto_parse_qs_csv,
)
self._params.update(extra_params)
@@ -1190,8 +1192,11 @@ class RequestOptions(object):
"""This class is a container for ``Request`` options.
Attributes:
- keep_blank_qs_values (bool): Set to ``True`` in order to retain
- blank values in query string parameters (default ``False``).
+ keep_blank_qs_values (bool): Set to ``True`` to keep query string
+ fields even if they do not have a value (default ``False``).
+ For comma-separated values, this option also determines
+ whether or not empty elements in the parsed list are
+ retained.
auto_parse_form_urlencoded: Set to ``True`` in order to
automatically consume the request stream and merge the
results into the request's query string params when the
@@ -1202,18 +1207,29 @@ class RequestOptions(object):
Note:
The character encoding for fields, before
percent-encoding non-ASCII bytes, is assumed to be
- UTF-8. The special `_charset_` field is ignored if present.
+ UTF-8. The special `_charset_` field is ignored if
+ present.
Falcon expects form-encoded request bodies to be
encoded according to the standard W3C algorithm (see
also http://goo.gl/6rlcux).
+ auto_parse_qs_csv: Set to ``False`` to treat commas in a query
+ string value as literal characters, rather than as a comma-
+ separated list (default ``True``). When this option is
+ enabled, the value will be split on any non-percent-encoded
+ commas. Disable this option when encoding lists as multiple
+ occurrences of the same parameter, and when values may be
+ encoded in alternative formats in which the comma character
+ is significant.
"""
__slots__ = (
'keep_blank_qs_values',
'auto_parse_form_urlencoded',
+ 'auto_parse_qs_csv',
)
def __init__(self):
self.keep_blank_qs_values = False
self.auto_parse_form_urlencoded = False
+ self.auto_parse_qs_csv = True
diff --git a/falcon/util/misc.py b/falcon/util/misc.py
index 5b02f05..12eb481 100644
--- a/falcon/util/misc.py
+++ b/falcon/util/misc.py
@@ -148,7 +148,7 @@ def http_date_to_dt(http_date, obs_date=False):
raise ValueError('time data %r does not match known formats' % http_date)
-def to_query_str(params):
+def to_query_str(params, comma_delimited_lists=True):
"""Converts a dictionary of params to a query string.
Args:
@@ -157,6 +157,10 @@ def to_query_str(params):
something that can be converted into a ``str``. If `params`
is a ``list``, it will be converted to a comma-delimited string
of values (e.g., 'thing=1,2,3')
+ comma_delimited_lists (bool, default ``True``):
+ If set to ``False`` encode lists by specifying multiple instances
+ of the parameter (e.g., 'thing=1&thing=2&thing=3')
+
Returns:
str: A URI query string including the '?' prefix, or an empty string
@@ -175,7 +179,20 @@ def to_query_str(params):
elif v is False:
v = 'false'
elif isinstance(v, list):
- v = ','.join(map(str, v))
+ if comma_delimited_lists:
+ v = ','.join(map(str, v))
+ else:
+ for list_value in v:
+ if list_value is True:
+ list_value = 'true'
+ elif list_value is False:
+ list_value = 'false'
+ else:
+ list_value = str(list_value)
+
+ query_str += k + '=' + list_value + '&'
+
+ continue
else:
v = str(v)
diff --git a/falcon/util/uri.py b/falcon/util/uri.py
index 2f68ec9..63ca45e 100644
--- a/falcon/util/uri.py
+++ b/falcon/util/uri.py
@@ -246,11 +246,12 @@ else:
return decoded_uri.decode('utf-8', 'replace')
-def parse_query_string(query_string, keep_blank_qs_values=False):
+def parse_query_string(query_string, keep_blank_qs_values=False,
+ parse_qs_csv=True):
"""Parse a query string into a dict.
Query string parameters are assumed to use standard form-encoding. Only
- parameters with values are parsed. for example, given 'foo=bar&flag',
+ parameters with values are returned. For example, given 'foo=bar&flag',
this function would ignore 'flag' unless the `keep_blank_qs_values` option
is set.
@@ -269,8 +270,16 @@ def parse_query_string(query_string, keep_blank_qs_values=False):
Args:
query_string (str): The query string to parse.
- keep_blank_qs_values (bool): If set to ``True``, preserves boolean
- fields and fields with no content as blank strings.
+ keep_blank_qs_values (bool): Set to ``True`` to return fields even if
+ they do not have a value (default ``False``). For comma-separated
+ values, this option also determines whether or not empty elements
+ in the parsed list are retained.
+ parse_qs_csv: Set to ``False`` in order to disable splitting query
+ parameters on ``,`` (default ``True``). Depending on the user agent,
+ encoding lists as multiple occurrences of the same parameter might
+ be preferable. In this case, setting `parse_qs_csv` to ``False``
+ will cause the framework to treat commas as literal characters in
+ each occurring parameter value.
Returns:
dict: A dictionary of (*name*, *value*) pairs, one per query
@@ -309,7 +318,7 @@ def parse_query_string(query_string, keep_blank_qs_values=False):
params[k] = [old_value, decode(v)]
else:
- if ',' in v:
+ if parse_qs_csv and ',' in v:
# NOTE(kgriffs): Falcon supports a more compact form of
# lists, in which the elements are comma-separated and
# assigned to a single param instance. If it turns out that
| Add option to opt-out from comma separated value parsing
I'm porting a project to Falcon and I stumbled upon an issue regarding its parsing of CSV values inside URIs. Let's say I have filtering engine that accepts queries such as this:
http://great.dude/api/cars?query=added:yesterday,today+spoilers:red
I obviously want to make `req.get_param('query')` return `'added:yesterday,today spoilers:red'`, and not `['added:yesterday', 'today spoilers:red']`.
Right now this [isn't really configurable](https://github.com/falconry/falcon/blob/35987b2be85456f431bbda509e884a8b0b20ed11/falcon/util/uri.py#L312-L328) and I need to check if `get_param()` returns a `list` and then join it back if needed, which looks sort of silly. Fortunately, the ability to use custom request classes alleviates the issue to some extent.
I see a few ways to improve things upstream:
1. Offer explicit `get_param_as_string` that will possibly do `','.join(...)` under the hood.
2. Add an option to disable this mechanism as an additional option to `Api`.
3. Add an option to disable this mechanism as an additional option to `add_route()`.
4. Change `get_param` to always return string.
Option 4 makes the most sense to me, but it breaks BC. If option 4 is not feasible, I'd went with option 1. | falconry/falcon | diff --git a/tests/test_options.py b/tests/test_options.py
index b3d9812..a3b8b72 100644
--- a/tests/test_options.py
+++ b/tests/test_options.py
@@ -1,16 +1,32 @@
+import ddt
+
from falcon.request import RequestOptions
import falcon.testing as testing
[email protected]
class TestRequestOptions(testing.TestBase):
- def test_correct_options(self):
+ def test_option_defaults(self):
options = RequestOptions()
+
self.assertFalse(options.keep_blank_qs_values)
- options.keep_blank_qs_values = True
- self.assertTrue(options.keep_blank_qs_values)
- options.keep_blank_qs_values = False
- self.assertFalse(options.keep_blank_qs_values)
+ self.assertFalse(options.auto_parse_form_urlencoded)
+ self.assertTrue(options.auto_parse_qs_csv)
+
+ @ddt.data(
+ 'keep_blank_qs_values',
+ 'auto_parse_form_urlencoded',
+ 'auto_parse_qs_csv',
+ )
+ def test_options_toggle(self, option_name):
+ options = RequestOptions()
+
+ setattr(options, option_name, True)
+ self.assertTrue(getattr(options, option_name))
+
+ setattr(options, option_name, False)
+ self.assertFalse(getattr(options, option_name))
def test_incorrect_options(self):
options = RequestOptions()
diff --git a/tests/test_query_params.py b/tests/test_query_params.py
index c588f23..62c906d 100644
--- a/tests/test_query_params.py
+++ b/tests/test_query_params.py
@@ -65,6 +65,60 @@ class _TestQueryParams(testing.TestBase):
self.assertEqual(req.get_param_as_list('id', int), [23, 42])
self.assertEqual(req.get_param('q'), u'\u8c46 \u74e3')
+ def test_option_auto_parse_qs_csv_simple_false(self):
+ self.api.req_options.auto_parse_qs_csv = False
+
+ query_string = 'id=23,42,,&id=2'
+ self.simulate_request('/', query_string=query_string)
+
+ req = self.resource.req
+
+ self.assertEqual(req.params['id'], [u'23,42,,', u'2'])
+ self.assertIn(req.get_param('id'), [u'23,42,,', u'2'])
+ self.assertEqual(req.get_param_as_list('id'), [u'23,42,,', u'2'])
+
+ def test_option_auto_parse_qs_csv_simple_true(self):
+ self.api.req_options.auto_parse_qs_csv = True
+
+ query_string = 'id=23,42,,&id=2'
+ self.simulate_request('/', query_string=query_string)
+
+ req = self.resource.req
+
+ self.assertEqual(req.params['id'], [u'23', u'42', u'2'])
+ self.assertIn(req.get_param('id'), [u'23', u'42', u'2'])
+ self.assertEqual(req.get_param_as_list('id', int), [23, 42, 2])
+
+ def test_option_auto_parse_qs_csv_complex_false(self):
+ self.api.req_options.auto_parse_qs_csv = False
+
+ encoded_json = '%7B%22msg%22:%22Testing%201,2,3...%22,%22code%22:857%7D'
+ decoded_json = '{"msg":"Testing 1,2,3...","code":857}'
+
+ query_string = ('colors=red,green,blue&limit=1'
+ '&list-ish1=f,,x&list-ish2=,0&list-ish3=a,,,b'
+ '&empty1=&empty2=,&empty3=,,'
+ '&thing=' + encoded_json)
+
+ self.simulate_request('/', query_string=query_string)
+
+ req = self.resource.req
+
+ self.assertIn(req.get_param('colors'), 'red,green,blue')
+ self.assertEqual(req.get_param_as_list('colors'), [u'red,green,blue'])
+
+ self.assertEqual(req.get_param_as_list('limit'), ['1'])
+
+ self.assertEqual(req.get_param_as_list('empty1'), None)
+ self.assertEqual(req.get_param_as_list('empty2'), [u','])
+ self.assertEqual(req.get_param_as_list('empty3'), [u',,'])
+
+ self.assertEqual(req.get_param_as_list('list-ish1'), [u'f,,x'])
+ self.assertEqual(req.get_param_as_list('list-ish2'), [u',0'])
+ self.assertEqual(req.get_param_as_list('list-ish3'), [u'a,,,b'])
+
+ self.assertEqual(req.get_param('thing'), decoded_json)
+
def test_bad_percentage(self):
query_string = 'x=%%20%+%&y=peregrine&z=%a%z%zz%1%20e'
self.simulate_request('/', query_string=query_string)
diff --git a/tests/test_utils.py b/tests/test_utils.py
index 957a959..6b5f75d 100644
--- a/tests/test_utils.py
+++ b/tests/test_utils.py
@@ -128,6 +128,16 @@ class TestFalconUtils(testtools.TestCase):
falcon.to_query_str({'things': ['a', 'b']}),
'?things=a,b')
+ expected = ('?things=a&things=b&things=&things=None'
+ '&things=true&things=false&things=0')
+
+ actual = falcon.to_query_str(
+ {'things': ['a', 'b', '', None, True, False, 0]},
+ comma_delimited_lists=False
+ )
+
+ self.assertEqual(actual, expected)
+
def test_pack_query_params_several(self):
garbage_in = {
'limit': 17,
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 3
} | 1.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"nose",
"ddt",
"testtools",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": [
"tools/test-requires"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
charset-normalizer==2.0.12
coverage==6.2
ddt==1.7.2
-e git+https://github.com/falconry/falcon.git@50b1759ee7f7b54a872c01c85152f8648e350399#egg=falcon
fixtures==4.0.1
idna==3.10
importlib-metadata==4.8.3
iniconfig==1.1.1
nose==1.3.7
packaging==21.3
pbr==6.1.1
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
python-mimeparse==1.6.0
PyYAML==6.0.1
requests==2.27.1
six==1.17.0
testtools==2.6.0
tomli==1.2.3
typing_extensions==4.1.1
urllib3==1.26.20
zipp==3.6.0
| name: falcon
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- charset-normalizer==2.0.12
- coverage==6.2
- ddt==1.7.2
- fixtures==4.0.1
- idna==3.10
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- nose==1.3.7
- packaging==21.3
- pbr==6.1.1
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- python-mimeparse==1.6.0
- pyyaml==6.0.1
- requests==2.27.1
- six==1.17.0
- testtools==2.6.0
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- zipp==3.6.0
prefix: /opt/conda/envs/falcon
| [
"tests/test_options.py::TestRequestOptions::test_option_defaults",
"tests/test_options.py::TestRequestOptions::test_options_toggle_3_auto_parse_qs_csv",
"tests/test_query_params.py::_TestQueryParams::test_option_auto_parse_qs_csv_complex_false",
"tests/test_query_params.py::_TestQueryParams::test_option_auto_parse_qs_csv_simple_false",
"tests/test_query_params.py::_TestQueryParams::test_option_auto_parse_qs_csv_simple_true",
"tests/test_query_params.py::PostQueryParams::test_option_auto_parse_qs_csv_complex_false",
"tests/test_query_params.py::PostQueryParams::test_option_auto_parse_qs_csv_simple_false",
"tests/test_query_params.py::PostQueryParams::test_option_auto_parse_qs_csv_simple_true",
"tests/test_query_params.py::GetQueryParams::test_option_auto_parse_qs_csv_complex_false",
"tests/test_query_params.py::GetQueryParams::test_option_auto_parse_qs_csv_simple_false",
"tests/test_query_params.py::GetQueryParams::test_option_auto_parse_qs_csv_simple_true",
"tests/test_utils.py::TestFalconUtils::test_pack_query_params_one"
] | [
"tests/test_utils.py::TestFalconUtils::test_deprecated_decorator"
] | [
"tests/test_options.py::TestRequestOptions::test_incorrect_options",
"tests/test_options.py::TestRequestOptions::test_options_toggle_1_keep_blank_qs_values",
"tests/test_options.py::TestRequestOptions::test_options_toggle_2_auto_parse_form_urlencoded",
"tests/test_query_params.py::_TestQueryParams::test_allowed_names",
"tests/test_query_params.py::_TestQueryParams::test_bad_percentage",
"tests/test_query_params.py::_TestQueryParams::test_blank",
"tests/test_query_params.py::_TestQueryParams::test_boolean",
"tests/test_query_params.py::_TestQueryParams::test_boolean_blank",
"tests/test_query_params.py::_TestQueryParams::test_get_date_invalid",
"tests/test_query_params.py::_TestQueryParams::test_get_date_missing_param",
"tests/test_query_params.py::_TestQueryParams::test_get_date_store",
"tests/test_query_params.py::_TestQueryParams::test_get_date_valid",
"tests/test_query_params.py::_TestQueryParams::test_get_date_valid_with_format",
"tests/test_query_params.py::_TestQueryParams::test_get_dict_invalid",
"tests/test_query_params.py::_TestQueryParams::test_get_dict_missing_param",
"tests/test_query_params.py::_TestQueryParams::test_get_dict_store",
"tests/test_query_params.py::_TestQueryParams::test_get_dict_valid",
"tests/test_query_params.py::_TestQueryParams::test_int",
"tests/test_query_params.py::_TestQueryParams::test_int_neg",
"tests/test_query_params.py::_TestQueryParams::test_list_transformer",
"tests/test_query_params.py::_TestQueryParams::test_list_type",
"tests/test_query_params.py::_TestQueryParams::test_list_type_blank",
"tests/test_query_params.py::_TestQueryParams::test_multiple_form_keys",
"tests/test_query_params.py::_TestQueryParams::test_multiple_form_keys_as_list",
"tests/test_query_params.py::_TestQueryParams::test_multiple_keys_as_bool",
"tests/test_query_params.py::_TestQueryParams::test_multiple_keys_as_int",
"tests/test_query_params.py::_TestQueryParams::test_none",
"tests/test_query_params.py::_TestQueryParams::test_param_property",
"tests/test_query_params.py::_TestQueryParams::test_percent_encoded",
"tests/test_query_params.py::_TestQueryParams::test_required_1_get_param",
"tests/test_query_params.py::_TestQueryParams::test_required_2_get_param_as_int",
"tests/test_query_params.py::_TestQueryParams::test_required_3_get_param_as_bool",
"tests/test_query_params.py::_TestQueryParams::test_required_4_get_param_as_list",
"tests/test_query_params.py::_TestQueryParams::test_simple",
"tests/test_query_params.py::PostQueryParams::test_allowed_names",
"tests/test_query_params.py::PostQueryParams::test_bad_percentage",
"tests/test_query_params.py::PostQueryParams::test_blank",
"tests/test_query_params.py::PostQueryParams::test_boolean",
"tests/test_query_params.py::PostQueryParams::test_boolean_blank",
"tests/test_query_params.py::PostQueryParams::test_explicitly_disable_auto_parse",
"tests/test_query_params.py::PostQueryParams::test_get_date_invalid",
"tests/test_query_params.py::PostQueryParams::test_get_date_missing_param",
"tests/test_query_params.py::PostQueryParams::test_get_date_store",
"tests/test_query_params.py::PostQueryParams::test_get_date_valid",
"tests/test_query_params.py::PostQueryParams::test_get_date_valid_with_format",
"tests/test_query_params.py::PostQueryParams::test_get_dict_invalid",
"tests/test_query_params.py::PostQueryParams::test_get_dict_missing_param",
"tests/test_query_params.py::PostQueryParams::test_get_dict_store",
"tests/test_query_params.py::PostQueryParams::test_get_dict_valid",
"tests/test_query_params.py::PostQueryParams::test_int",
"tests/test_query_params.py::PostQueryParams::test_int_neg",
"tests/test_query_params.py::PostQueryParams::test_list_transformer",
"tests/test_query_params.py::PostQueryParams::test_list_type",
"tests/test_query_params.py::PostQueryParams::test_list_type_blank",
"tests/test_query_params.py::PostQueryParams::test_multiple_form_keys",
"tests/test_query_params.py::PostQueryParams::test_multiple_form_keys_as_list",
"tests/test_query_params.py::PostQueryParams::test_multiple_keys_as_bool",
"tests/test_query_params.py::PostQueryParams::test_multiple_keys_as_int",
"tests/test_query_params.py::PostQueryParams::test_non_ascii",
"tests/test_query_params.py::PostQueryParams::test_none",
"tests/test_query_params.py::PostQueryParams::test_param_property",
"tests/test_query_params.py::PostQueryParams::test_percent_encoded",
"tests/test_query_params.py::PostQueryParams::test_required_1_get_param",
"tests/test_query_params.py::PostQueryParams::test_required_2_get_param_as_int",
"tests/test_query_params.py::PostQueryParams::test_required_3_get_param_as_bool",
"tests/test_query_params.py::PostQueryParams::test_required_4_get_param_as_list",
"tests/test_query_params.py::PostQueryParams::test_simple",
"tests/test_query_params.py::GetQueryParams::test_allowed_names",
"tests/test_query_params.py::GetQueryParams::test_bad_percentage",
"tests/test_query_params.py::GetQueryParams::test_blank",
"tests/test_query_params.py::GetQueryParams::test_boolean",
"tests/test_query_params.py::GetQueryParams::test_boolean_blank",
"tests/test_query_params.py::GetQueryParams::test_get_date_invalid",
"tests/test_query_params.py::GetQueryParams::test_get_date_missing_param",
"tests/test_query_params.py::GetQueryParams::test_get_date_store",
"tests/test_query_params.py::GetQueryParams::test_get_date_valid",
"tests/test_query_params.py::GetQueryParams::test_get_date_valid_with_format",
"tests/test_query_params.py::GetQueryParams::test_get_dict_invalid",
"tests/test_query_params.py::GetQueryParams::test_get_dict_missing_param",
"tests/test_query_params.py::GetQueryParams::test_get_dict_store",
"tests/test_query_params.py::GetQueryParams::test_get_dict_valid",
"tests/test_query_params.py::GetQueryParams::test_int",
"tests/test_query_params.py::GetQueryParams::test_int_neg",
"tests/test_query_params.py::GetQueryParams::test_list_transformer",
"tests/test_query_params.py::GetQueryParams::test_list_type",
"tests/test_query_params.py::GetQueryParams::test_list_type_blank",
"tests/test_query_params.py::GetQueryParams::test_multiple_form_keys",
"tests/test_query_params.py::GetQueryParams::test_multiple_form_keys_as_list",
"tests/test_query_params.py::GetQueryParams::test_multiple_keys_as_bool",
"tests/test_query_params.py::GetQueryParams::test_multiple_keys_as_int",
"tests/test_query_params.py::GetQueryParams::test_none",
"tests/test_query_params.py::GetQueryParams::test_param_property",
"tests/test_query_params.py::GetQueryParams::test_percent_encoded",
"tests/test_query_params.py::GetQueryParams::test_required_1_get_param",
"tests/test_query_params.py::GetQueryParams::test_required_2_get_param_as_int",
"tests/test_query_params.py::GetQueryParams::test_required_3_get_param_as_bool",
"tests/test_query_params.py::GetQueryParams::test_required_4_get_param_as_list",
"tests/test_query_params.py::GetQueryParams::test_simple",
"tests/test_query_params.py::PostQueryParamsDefaultBehavior::test_dont_auto_parse_by_default",
"tests/test_utils.py::TestFalconUtils::test_dt_to_http",
"tests/test_utils.py::TestFalconUtils::test_get_http_status",
"tests/test_utils.py::TestFalconUtils::test_http_date_to_dt",
"tests/test_utils.py::TestFalconUtils::test_http_now",
"tests/test_utils.py::TestFalconUtils::test_pack_query_params_none",
"tests/test_utils.py::TestFalconUtils::test_pack_query_params_several",
"tests/test_utils.py::TestFalconUtils::test_parse_host",
"tests/test_utils.py::TestFalconUtils::test_parse_query_string",
"tests/test_utils.py::TestFalconUtils::test_prop_uri_decode_models_stdlib_unquote_plus",
"tests/test_utils.py::TestFalconUtils::test_prop_uri_encode_models_stdlib_quote",
"tests/test_utils.py::TestFalconUtils::test_prop_uri_encode_value_models_stdlib_quote_safe_tilde",
"tests/test_utils.py::TestFalconUtils::test_uri_decode",
"tests/test_utils.py::TestFalconUtils::test_uri_encode",
"tests/test_utils.py::TestFalconUtils::test_uri_encode_value",
"tests/test_utils.py::TestFalconTesting::test_decode_empty_result",
"tests/test_utils.py::TestFalconTesting::test_httpnow_alias_for_backwards_compat",
"tests/test_utils.py::TestFalconTesting::test_none_header_value_in_create_environ",
"tests/test_utils.py::TestFalconTesting::test_path_escape_chars_in_create_environ",
"tests/test_utils.py::TestFalconTestCase::test_cached_text_in_result",
"tests/test_utils.py::TestFalconTestCase::test_path_must_start_with_slash",
"tests/test_utils.py::TestFalconTestCase::test_query_string",
"tests/test_utils.py::TestFalconTestCase::test_query_string_in_path",
"tests/test_utils.py::TestFalconTestCase::test_query_string_no_question",
"tests/test_utils.py::TestFalconTestCase::test_simple_resource_body_json_xor",
"tests/test_utils.py::TestFalconTestCase::test_status",
"tests/test_utils.py::TestFalconTestCase::test_wsgi_iterable_not_closeable",
"tests/test_utils.py::FancyTestCase::test_something"
] | [] | Apache License 2.0 | 578 | 1,655 | [
"falcon/request.py",
"falcon/util/misc.py",
"falcon/util/uri.py"
] |
cdent__gabbi-153 | 0a8a3b8faf9a900fd132d9b147f67a851d52f178 | 2016-06-12 20:11:11 | 0a8a3b8faf9a900fd132d9b147f67a851d52f178 | cdent: @jd and @EmilienM, this good for you guys?
EmilienM: :+1: | diff --git a/gabbi/driver.py b/gabbi/driver.py
index 33c0a98..49088fa 100644
--- a/gabbi/driver.py
+++ b/gabbi/driver.py
@@ -39,7 +39,8 @@ from gabbi import utils
def build_tests(path, loader, host=None, port=8001, intercept=None,
test_loader_name=None, fixture_module=None,
- response_handlers=None, prefix='', require_ssl=False):
+ response_handlers=None, prefix='', require_ssl=False,
+ url=None):
"""Read YAML files from a directory to create tests.
Each YAML file represents an ordered sequence of HTTP requests.
@@ -54,6 +55,7 @@ def build_tests(path, loader, host=None, port=8001, intercept=None,
:param response_handers: ResponseHandler classes.
:type response_handlers: List of ResponseHandler classes.
:param prefix: A URL prefix for all URLs that are not fully qualified.
+ :param url: A full URL to test against. Replaces host, port and prefix.
:param require_ssl: If ``True``, make all tests default to using SSL.
:rtype: TestSuite containing multiple TestSuites (one for each YAML file).
"""
@@ -63,6 +65,12 @@ def build_tests(path, loader, host=None, port=8001, intercept=None,
if not bool(host) ^ bool(intercept):
raise AssertionError('must specify exactly one of host or intercept')
+ # If url is being used, reset host, port and prefix.
+ if url:
+ host, port, prefix, force_ssl = utils.host_info_from_target(url)
+ if force_ssl and not require_ssl:
+ require_ssl = force_ssl
+
if test_loader_name is None:
test_loader_name = inspect.stack()[1]
test_loader_name = os.path.splitext(os.path.basename(
@@ -97,7 +105,7 @@ def build_tests(path, loader, host=None, port=8001, intercept=None,
def py_test_generator(test_dir, host=None, port=8001, intercept=None,
prefix=None, test_loader_name=None,
fixture_module=None, response_handlers=None,
- require_ssl=False):
+ require_ssl=False, url=None):
"""Generate tests cases for py.test
This uses build_tests to create TestCases and then yields them in
@@ -110,7 +118,8 @@ def py_test_generator(test_dir, host=None, port=8001, intercept=None,
test_loader_name=test_loader_name,
fixture_module=fixture_module,
response_handlers=response_handlers,
- prefix=prefix, require_ssl=require_ssl)
+ prefix=prefix, require_ssl=require_ssl,
+ url=url)
for test in tests:
if hasattr(test, '_tests'):
diff --git a/gabbi/runner.py b/gabbi/runner.py
index 3411dbe..d4e79d5 100644
--- a/gabbi/runner.py
+++ b/gabbi/runner.py
@@ -17,8 +17,6 @@ from importlib import import_module
import sys
import unittest
-from six.moves.urllib import parse as urlparse
-
from gabbi import case
from gabbi import handlers
from gabbi.reporter import ConciseTestRunner
@@ -93,7 +91,7 @@ def run():
)
args = parser.parse_args()
- host, port, prefix, force_ssl = process_target_args(
+ host, port, prefix, force_ssl = utils.host_info_from_target(
args.target, args.prefix)
# Initialize response handlers.
@@ -113,31 +111,6 @@ def run():
sys.exit(not result.wasSuccessful())
-def process_target_args(target, prefix):
- """Turn the argparse args into a host, port and prefix."""
- force_ssl = False
- split_url = urlparse.urlparse(target)
-
- if split_url.scheme:
- if split_url.scheme == 'https':
- force_ssl = True
- return split_url.hostname, split_url.port, split_url.path, force_ssl
- else:
- target = target
- prefix = prefix
-
- if ':' in target and '[' not in target:
- host, port = target.rsplit(':', 1)
- elif ']:' in target:
- host, port = target.rsplit(':', 1)
- else:
- host = target
- port = None
- host = host.replace('[', '').replace(']', '')
-
- return host, port, prefix, force_ssl
-
-
def initialize_handlers(response_handlers):
custom_response_handlers = []
for import_path in response_handlers or []:
diff --git a/gabbi/utils.py b/gabbi/utils.py
index 3de040d..172b4bf 100644
--- a/gabbi/utils.py
+++ b/gabbi/utils.py
@@ -126,6 +126,31 @@ def not_binary(content_type):
content_type.startswith('application/json'))
+def host_info_from_target(target, prefix=None):
+ """Turn url or host:port and target into test destination."""
+ force_ssl = False
+ split_url = urlparse.urlparse(target)
+
+ if split_url.scheme:
+ if split_url.scheme == 'https':
+ force_ssl = True
+ return split_url.hostname, split_url.port, split_url.path, force_ssl
+ else:
+ target = target
+ prefix = prefix
+
+ if ':' in target and '[' not in target:
+ host, port = target.rsplit(':', 1)
+ elif ']:' in target:
+ host, port = target.rsplit(':', 1)
+ else:
+ host = target
+ port = None
+ host = host.replace('[', '').replace(']', '')
+
+ return host, port, prefix, force_ssl
+
+
def _colorize(color, message):
"""Add a color to the message."""
try:
| In 'live' testing scenarios argument passing to build_tests is convoluted and SSL may not work
If you want to use `build_tests` to create real TestCases against a live server it's likely you know the URL and that would be most convenient thing to pass instead of having to parse out the host, port and prefix (script_name) and then pass those.
In addition, if you have a URL you know if your server is SSL but the tests may not have been written to do SSL (with an `ssl: true` entry). Because of the test building process this is a bit awkward at the moment. It would be better to be able to say "yeah, this is SSL" for the whole run. | cdent/gabbi | diff --git a/gabbi/tests/test_driver.py b/gabbi/tests/test_driver.py
index 0b2ce0a..8f6bca0 100644
--- a/gabbi/tests/test_driver.py
+++ b/gabbi/tests/test_driver.py
@@ -70,3 +70,20 @@ class DriverTest(unittest.TestCase):
first_test = suite._tests[0]._tests[0]
full_url = first_test._parse_url(first_test.test_data['url'])
self.assertEqual('http://localhost:8001/', full_url)
+
+ def test_build_url_target(self):
+ suite = driver.build_tests(self.test_dir, self.loader,
+ host='localhost', port='999',
+ url='https://example.com:1024/theend')
+ first_test = suite._tests[0]._tests[0]
+ full_url = first_test._parse_url(first_test.test_data['url'])
+ self.assertEqual('https://example.com:1024/theend/', full_url)
+
+ def test_build_url_target_forced_ssl(self):
+ suite = driver.build_tests(self.test_dir, self.loader,
+ host='localhost', port='999',
+ url='http://example.com:1024/theend',
+ require_ssl=True)
+ first_test = suite._tests[0]._tests[0]
+ full_url = first_test._parse_url(first_test.test_data['url'])
+ self.assertEqual('https://example.com:1024/theend/', full_url)
diff --git a/gabbi/tests/test_runner.py b/gabbi/tests/test_runner.py
index 3c132b1..a854cf9 100644
--- a/gabbi/tests/test_runner.py
+++ b/gabbi/tests/test_runner.py
@@ -229,93 +229,6 @@ class RunnerTest(unittest.TestCase):
self._stderr.write(sys.stderr.read())
-class RunnerHostArgParse(unittest.TestCase):
-
- def _test_hostport(self, url_or_host, expected_host,
- provided_prefix=None, expected_port=None,
- expected_prefix=None, expected_ssl=False):
- host, port, prefix, ssl = runner.process_target_args(
- url_or_host, provided_prefix)
-
- # normalize hosts, they are case insensitive
- self.assertEqual(expected_host.lower(), host.lower())
- # port can be a string or int depending on the inputs
- self.assertEqual(expected_port, port)
- self.assertEqual(expected_prefix, prefix)
- self.assertEqual(expected_ssl, ssl)
-
- def test_plain_url_no_port(self):
- self._test_hostport('http://foobar.com/news',
- 'foobar.com',
- expected_port=None,
- expected_prefix='/news')
-
- def test_plain_url_with_port(self):
- self._test_hostport('http://foobar.com:80/news',
- 'foobar.com',
- expected_port=80,
- expected_prefix='/news')
-
- def test_ssl_url(self):
- self._test_hostport('https://foobar.com/news',
- 'foobar.com',
- expected_prefix='/news',
- expected_ssl=True)
-
- def test_ssl_port80_url(self):
- self._test_hostport('https://foobar.com:80/news',
- 'foobar.com',
- expected_prefix='/news',
- expected_port=80,
- expected_ssl=True)
-
- def test_ssl_port_url(self):
- self._test_hostport('https://foobar.com:999/news',
- 'foobar.com',
- expected_prefix='/news',
- expected_port=999,
- expected_ssl=True)
-
- def test_simple_hostport(self):
- self._test_hostport('foobar.com:999',
- 'foobar.com',
- expected_port='999')
-
- def test_simple_hostport_with_prefix(self):
- self._test_hostport('foobar.com:999',
- 'foobar.com',
- provided_prefix='/news',
- expected_port='999',
- expected_prefix='/news')
-
- def test_ipv6_url_long(self):
- self._test_hostport(
- 'http://[FEDC:BA98:7654:3210:FEDC:BA98:7654:3210]:999/news',
- 'FEDC:BA98:7654:3210:FEDC:BA98:7654:3210',
- expected_port=999,
- expected_prefix='/news')
-
- def test_ipv6_url_localhost(self):
- self._test_hostport(
- 'http://[::1]:999/news',
- '::1',
- expected_port=999,
- expected_prefix='/news')
-
- def test_ipv6_host_localhost(self):
- # If a user wants to use the hostport form, then they need
- # to hack it with the brackets.
- self._test_hostport(
- '[::1]',
- '::1')
-
- def test_ipv6_hostport_localhost(self):
- self._test_hostport(
- '[::1]:999',
- '::1',
- expected_port='999')
-
-
class HTMLResponseHandler(handlers.ResponseHandler):
test_key_suffix = 'html'
diff --git a/gabbi/tests/test_utils.py b/gabbi/tests/test_utils.py
index 1754dad..d5b8b50 100644
--- a/gabbi/tests/test_utils.py
+++ b/gabbi/tests/test_utils.py
@@ -158,3 +158,90 @@ class CreateURLTest(unittest.TestCase):
'/foo', 'FEDC:BA98:7654:3210:FEDC:BA98:7654:3210', port=999)
self.assertEqual(
'http://[FEDC:BA98:7654:3210:FEDC:BA98:7654:3210]:999/foo', url)
+
+
+class UtilsHostInfoFromTarget(unittest.TestCase):
+
+ def _test_hostport(self, url_or_host, expected_host,
+ provided_prefix=None, expected_port=None,
+ expected_prefix=None, expected_ssl=False):
+ host, port, prefix, ssl = utils.host_info_from_target(
+ url_or_host, provided_prefix)
+
+ # normalize hosts, they are case insensitive
+ self.assertEqual(expected_host.lower(), host.lower())
+ # port can be a string or int depending on the inputs
+ self.assertEqual(expected_port, port)
+ self.assertEqual(expected_prefix, prefix)
+ self.assertEqual(expected_ssl, ssl)
+
+ def test_plain_url_no_port(self):
+ self._test_hostport('http://foobar.com/news',
+ 'foobar.com',
+ expected_port=None,
+ expected_prefix='/news')
+
+ def test_plain_url_with_port(self):
+ self._test_hostport('http://foobar.com:80/news',
+ 'foobar.com',
+ expected_port=80,
+ expected_prefix='/news')
+
+ def test_ssl_url(self):
+ self._test_hostport('https://foobar.com/news',
+ 'foobar.com',
+ expected_prefix='/news',
+ expected_ssl=True)
+
+ def test_ssl_port80_url(self):
+ self._test_hostport('https://foobar.com:80/news',
+ 'foobar.com',
+ expected_prefix='/news',
+ expected_port=80,
+ expected_ssl=True)
+
+ def test_ssl_port_url(self):
+ self._test_hostport('https://foobar.com:999/news',
+ 'foobar.com',
+ expected_prefix='/news',
+ expected_port=999,
+ expected_ssl=True)
+
+ def test_simple_hostport(self):
+ self._test_hostport('foobar.com:999',
+ 'foobar.com',
+ expected_port='999')
+
+ def test_simple_hostport_with_prefix(self):
+ self._test_hostport('foobar.com:999',
+ 'foobar.com',
+ provided_prefix='/news',
+ expected_port='999',
+ expected_prefix='/news')
+
+ def test_ipv6_url_long(self):
+ self._test_hostport(
+ 'http://[FEDC:BA98:7654:3210:FEDC:BA98:7654:3210]:999/news',
+ 'FEDC:BA98:7654:3210:FEDC:BA98:7654:3210',
+ expected_port=999,
+ expected_prefix='/news')
+
+ def test_ipv6_url_localhost(self):
+ self._test_hostport(
+ 'http://[::1]:999/news',
+ '::1',
+ expected_port=999,
+ expected_prefix='/news')
+
+ def test_ipv6_host_localhost(self):
+ # If a user wants to use the hostport form, then they need
+ # to hack it with the brackets.
+ self._test_hostport(
+ '[::1]',
+ '::1')
+
+ def test_ipv6_hostport_localhost(self):
+ self._test_hostport(
+ '[::1]:999',
+ '::1',
+ expected_port='999')
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 1
},
"num_modified_files": 3
} | 1.21 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
colorama==0.4.5
decorator==5.1.1
-e git+https://github.com/cdent/gabbi.git@0a8a3b8faf9a900fd132d9b147f67a851d52f178#egg=gabbi
importlib-metadata==4.8.3
iniconfig==1.1.1
jsonpath-rw==1.4.0
jsonpath-rw-ext==1.2.2
packaging==21.3
pbr==6.1.1
pluggy==1.0.0
ply==3.11
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
PyYAML==6.0.1
six==1.17.0
tomli==1.2.3
typing_extensions==4.1.1
urllib3==1.26.20
wsgi_intercept==1.13.1
zipp==3.6.0
| name: gabbi
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- colorama==0.4.5
- decorator==5.1.1
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- jsonpath-rw==1.4.0
- jsonpath-rw-ext==1.2.2
- packaging==21.3
- pbr==6.1.1
- pluggy==1.0.0
- ply==3.11
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- pyyaml==6.0.1
- six==1.17.0
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- wsgi-intercept==1.13.1
- zipp==3.6.0
prefix: /opt/conda/envs/gabbi
| [
"gabbi/tests/test_driver.py::DriverTest::test_build_url_target",
"gabbi/tests/test_driver.py::DriverTest::test_build_url_target_forced_ssl",
"gabbi/tests/test_utils.py::UtilsHostInfoFromTarget::test_ipv6_host_localhost",
"gabbi/tests/test_utils.py::UtilsHostInfoFromTarget::test_ipv6_hostport_localhost",
"gabbi/tests/test_utils.py::UtilsHostInfoFromTarget::test_ipv6_url_localhost",
"gabbi/tests/test_utils.py::UtilsHostInfoFromTarget::test_ipv6_url_long",
"gabbi/tests/test_utils.py::UtilsHostInfoFromTarget::test_plain_url_no_port",
"gabbi/tests/test_utils.py::UtilsHostInfoFromTarget::test_plain_url_with_port",
"gabbi/tests/test_utils.py::UtilsHostInfoFromTarget::test_simple_hostport",
"gabbi/tests/test_utils.py::UtilsHostInfoFromTarget::test_simple_hostport_with_prefix",
"gabbi/tests/test_utils.py::UtilsHostInfoFromTarget::test_ssl_port80_url",
"gabbi/tests/test_utils.py::UtilsHostInfoFromTarget::test_ssl_port_url",
"gabbi/tests/test_utils.py::UtilsHostInfoFromTarget::test_ssl_url"
] | [] | [
"gabbi/tests/test_driver.py::DriverTest::test_build_require_ssl",
"gabbi/tests/test_driver.py::DriverTest::test_build_requires_host_or_intercept",
"gabbi/tests/test_driver.py::DriverTest::test_driver_loads_two_tests",
"gabbi/tests/test_driver.py::DriverTest::test_driver_prefix",
"gabbi/tests/test_runner.py::RunnerTest::test_custom_response_handler",
"gabbi/tests/test_runner.py::RunnerTest::test_exit_code",
"gabbi/tests/test_runner.py::RunnerTest::test_target_url_parsing",
"gabbi/tests/test_runner.py::RunnerTest::test_target_url_parsing_standard_port",
"gabbi/tests/test_utils.py::BinaryTypesTest::test_binary",
"gabbi/tests/test_utils.py::BinaryTypesTest::test_not_binary",
"gabbi/tests/test_utils.py::ExtractContentTypeTest::test_extract_content_type_bad_params",
"gabbi/tests/test_utils.py::ExtractContentTypeTest::test_extract_content_type_default_both",
"gabbi/tests/test_utils.py::ExtractContentTypeTest::test_extract_content_type_default_charset",
"gabbi/tests/test_utils.py::ExtractContentTypeTest::test_extract_content_type_multiple_params",
"gabbi/tests/test_utils.py::ExtractContentTypeTest::test_extract_content_type_with_charset",
"gabbi/tests/test_utils.py::ColorizeTest::test_colorize_missing_color",
"gabbi/tests/test_utils.py::CreateURLTest::test_create_url_ipv6_already_bracket",
"gabbi/tests/test_utils.py::CreateURLTest::test_create_url_ipv6_full",
"gabbi/tests/test_utils.py::CreateURLTest::test_create_url_ipv6_ssl",
"gabbi/tests/test_utils.py::CreateURLTest::test_create_url_ipv6_ssl_weird_port",
"gabbi/tests/test_utils.py::CreateURLTest::test_create_url_no_double_colon",
"gabbi/tests/test_utils.py::CreateURLTest::test_create_url_not_ssl_on_443",
"gabbi/tests/test_utils.py::CreateURLTest::test_create_url_port",
"gabbi/tests/test_utils.py::CreateURLTest::test_create_url_port_and_ssl",
"gabbi/tests/test_utils.py::CreateURLTest::test_create_url_prefix",
"gabbi/tests/test_utils.py::CreateURLTest::test_create_url_preserve_query",
"gabbi/tests/test_utils.py::CreateURLTest::test_create_url_simple",
"gabbi/tests/test_utils.py::CreateURLTest::test_create_url_ssl",
"gabbi/tests/test_utils.py::CreateURLTest::test_create_url_ssl_on_80"
] | [] | Apache License 2.0 | 582 | 1,372 | [
"gabbi/driver.py",
"gabbi/runner.py",
"gabbi/utils.py"
] |
mozilla__bleach-205 | 2235b8fcadc8abef3a2845bb0ce67206982f3489 | 2016-06-14 16:16:47 | edd91a00e1c50cebbc512c7db61897ad3d0ba00a | diff --git a/bleach/__init__.py b/bleach/__init__.py
index 3092cb7..ac163d1 100644
--- a/bleach/__init__.py
+++ b/bleach/__init__.py
@@ -315,7 +315,7 @@ def linkify(text, callbacks=DEFAULT_CALLBACKS, skip_pre=False,
if node.tag == ETREE_TAG('pre') and skip_pre:
linkify_nodes(node, False)
elif not (node in _seen):
- linkify_nodes(node, True)
+ linkify_nodes(node, parse_text)
current_child += 1
| Children of <pre> tags should not be linkified when skip_pre=True (patch attached)
The children of `pre` tags should not be linkified when `skip_pre` is on
```
diff --git a/bleach/__init__.py b/bleach/__init__.py
index 48b6512..4c2dd1b 100644
--- a/bleach/__init__.py
+++ b/bleach/__init__.py
@@ -300,7 +300,7 @@ def linkify(text, callbacks=DEFAULT_CALLBACKS, skip_pre=False,
if node.tag == ETREE_TAG('pre') and skip_pre:
linkify_nodes(node, False)
elif not (node in _seen):
- linkify_nodes(node, True)
+ linkify_nodes(node, parse_text)
current_child += 1
diff --git a/bleach/tests/test_links.py b/bleach/tests/test_links.py
index 62da8d1..ae0fba7 100644
--- a/bleach/tests/test_links.py
+++ b/bleach/tests/test_links.py
@@ -314,6 +314,13 @@ def test_skip_pre():
eq_(nofollowed, linkify(already_linked))
eq_(nofollowed, linkify(already_linked, skip_pre=True))
+def test_skip_pre_child():
+ # Don't linkify the children of pre tags.
+ intext = '<pre><code>http://foo.com</code></pre>http://bar.com'
+ expect = '<pre><code>http://foo.com</code></pre><a href="http://bar.com" rel="nofollow">http://bar.com</a>'
+ output = linkify(intext, skip_pre=True)
+ eq_(expect, output)
+
def test_libgl():
"""libgl.so.1 should not be linkified."""
``` | mozilla/bleach | diff --git a/bleach/tests/test_links.py b/bleach/tests/test_links.py
index 62da8d1..2958f5e 100644
--- a/bleach/tests/test_links.py
+++ b/bleach/tests/test_links.py
@@ -314,6 +314,13 @@ def test_skip_pre():
eq_(nofollowed, linkify(already_linked))
eq_(nofollowed, linkify(already_linked, skip_pre=True))
+ eq_(
+ linkify('<pre><code>http://example.com</code></pre>http://example.com',
+ skip_pre=True),
+ ('<pre><code>http://example.com</code></pre>'
+ '<a href="http://example.com" rel="nofollow">http://example.com</a>')
+ )
+
def test_libgl():
"""libgl.so.1 should not be linkified."""
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 1
} | 1.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"nose",
"flake8",
"pytest"
],
"pre_install": null,
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.13
attrs==22.2.0
Babel==2.11.0
-e git+https://github.com/mozilla/bleach.git@2235b8fcadc8abef3a2845bb0ce67206982f3489#egg=bleach
certifi==2021.5.30
cffi==1.15.1
charset-normalizer==2.0.12
colorama==0.4.5
cryptography==40.0.2
distlib==0.3.9
docutils==0.17.1
filelock==3.4.1
flake8==5.0.4
html5lib==0.9999999
idna==3.10
imagesize==1.4.1
importlib-metadata==4.2.0
importlib-resources==5.4.0
iniconfig==1.1.1
jeepney==0.7.1
Jinja2==3.0.3
keyring==23.4.1
MarkupSafe==2.0.1
mccabe==0.7.0
nose==1.3.7
ordereddict==1.1
packaging==21.3
pkginfo==1.10.0
platformdirs==2.4.0
pluggy==1.0.0
py==1.11.0
pycodestyle==2.9.1
pycparser==2.21
pyflakes==2.5.0
Pygments==2.14.0
pyparsing==3.1.4
pytest==7.0.1
pytz==2025.2
readme-renderer==34.0
requests==2.27.1
requests-toolbelt==1.0.0
rfc3986==1.5.0
SecretStorage==3.3.3
six==1.17.0
snowballstemmer==2.2.0
Sphinx==4.3.2
sphinxcontrib-applehelp==1.0.2
sphinxcontrib-devhelp==1.0.2
sphinxcontrib-htmlhelp==2.0.0
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==1.0.3
sphinxcontrib-serializinghtml==1.1.5
toml==0.10.2
tomli==1.2.3
tox==3.28.0
tqdm==4.64.1
twine==3.8.0
typing_extensions==4.1.1
urllib3==1.26.20
virtualenv==20.16.2
webencodings==0.5.1
zipp==3.6.0
| name: bleach
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.13
- attrs==22.2.0
- babel==2.11.0
- cffi==1.15.1
- charset-normalizer==2.0.12
- colorama==0.4.5
- cryptography==40.0.2
- distlib==0.3.9
- docutils==0.17.1
- filelock==3.4.1
- flake8==5.0.4
- html5lib==0.9999999
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==4.2.0
- importlib-resources==5.4.0
- iniconfig==1.1.1
- jeepney==0.7.1
- jinja2==3.0.3
- keyring==23.4.1
- markupsafe==2.0.1
- mccabe==0.7.0
- nose==1.3.7
- ordereddict==1.1
- packaging==21.3
- pkginfo==1.10.0
- platformdirs==2.4.0
- pluggy==1.0.0
- py==1.11.0
- pycodestyle==2.9.1
- pycparser==2.21
- pyflakes==2.5.0
- pygments==2.14.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytz==2025.2
- readme-renderer==34.0
- requests==2.27.1
- requests-toolbelt==1.0.0
- rfc3986==1.5.0
- secretstorage==3.3.3
- six==1.17.0
- snowballstemmer==2.2.0
- sphinx==4.3.2
- sphinxcontrib-applehelp==1.0.2
- sphinxcontrib-devhelp==1.0.2
- sphinxcontrib-htmlhelp==2.0.0
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==1.0.3
- sphinxcontrib-serializinghtml==1.1.5
- toml==0.10.2
- tomli==1.2.3
- tox==3.28.0
- tqdm==4.64.1
- twine==3.8.0
- typing-extensions==4.1.1
- urllib3==1.26.20
- virtualenv==20.16.2
- webencodings==0.5.1
- zipp==3.6.0
prefix: /opt/conda/envs/bleach
| [
"bleach/tests/test_links.py::test_skip_pre"
] | [] | [
"bleach/tests/test_links.py::test_empty",
"bleach/tests/test_links.py::test_simple_link",
"bleach/tests/test_links.py::test_trailing_slash",
"bleach/tests/test_links.py::test_mangle_link",
"bleach/tests/test_links.py::test_mangle_text",
"bleach/tests/test_links.py::test_set_attrs",
"bleach/tests/test_links.py::test_only_proto_links",
"bleach/tests/test_links.py::test_stop_email",
"bleach/tests/test_links.py::test_tlds",
"bleach/tests/test_links.py::test_escaping",
"bleach/tests/test_links.py::test_nofollow_off",
"bleach/tests/test_links.py::test_link_in_html",
"bleach/tests/test_links.py::test_links_https",
"bleach/tests/test_links.py::test_add_rel_nofollow",
"bleach/tests/test_links.py::test_url_with_path",
"bleach/tests/test_links.py::test_link_ftp",
"bleach/tests/test_links.py::test_link_query",
"bleach/tests/test_links.py::test_link_fragment",
"bleach/tests/test_links.py::test_link_entities",
"bleach/tests/test_links.py::test_escaped_html",
"bleach/tests/test_links.py::test_link_http_complete",
"bleach/tests/test_links.py::test_non_url",
"bleach/tests/test_links.py::test_javascript_url",
"bleach/tests/test_links.py::test_unsafe_url",
"bleach/tests/test_links.py::test_libgl",
"bleach/tests/test_links.py::test_end_of_clause",
"bleach/tests/test_links.py::test_sarcasm",
"bleach/tests/test_links.py::test_parentheses_with_removing",
"bleach/tests/test_links.py::test_tokenizer",
"bleach/tests/test_links.py::test_ignore_bad_protocols",
"bleach/tests/test_links.py::test_max_recursion_depth",
"bleach/tests/test_links.py::test_link_emails_and_urls",
"bleach/tests/test_links.py::test_links_case_insensitive",
"bleach/tests/test_links.py::test_elements_inside_links",
"bleach/tests/test_links.py::test_remove_first_childlink"
] | [] | Apache License 2.0 | 584 | 151 | [
"bleach/__init__.py"
] |
|
juju-solutions__charms.reactive-73 | 04663e45f3683d4c497f43526d3ac26593ee10a2 | 2016-06-15 01:33:48 | 59b07bd9447d8a4cb027ea2515089216b8d20549 | diff --git a/charms/reactive/bus.py b/charms/reactive/bus.py
index 885e498..853571a 100644
--- a/charms/reactive/bus.py
+++ b/charms/reactive/bus.py
@@ -229,6 +229,7 @@ class Handler(object):
self._action = action
self._args = []
self._predicates = []
+ self._post_callbacks = []
self._states = set()
def id(self):
@@ -255,6 +256,12 @@ class Handler(object):
hookenv.log(' Adding predicate for %s: %s' % (self.id(), _predicate), level=hookenv.DEBUG)
self._predicates.append(predicate)
+ def add_post_callback(self, callback):
+ """
+ Add a callback to be run after the action is invoked.
+ """
+ self._post_callbacks.append(callback)
+
def test(self):
"""
Check the predicate(s) and return True if this handler should be invoked.
@@ -278,6 +285,8 @@ class Handler(object):
"""
args = self._get_args()
self._action(*args)
+ for callback in self._post_callbacks:
+ callback()
def register_states(self, states):
"""
diff --git a/charms/reactive/decorators.py b/charms/reactive/decorators.py
index 7918106..e89332f 100644
--- a/charms/reactive/decorators.py
+++ b/charms/reactive/decorators.py
@@ -205,18 +205,18 @@ def not_unless(*desired_states):
return _decorator
-def only_once(action):
+def only_once(action=None):
"""
- Ensure that the decorated function is only executed the first time it is called.
+ Register the decorated function to be run once, and only once.
- This can be used on reactive handlers to ensure that they are only triggered
- once, even if their conditions continue to match on subsequent calls, even
- across hook invocations.
+ This decorator will never cause arguments to be passed to the handler.
"""
- @wraps(action)
- def wrapper(*args, **kwargs):
- action_id = _action_id(action)
- if not was_invoked(action_id):
- action(*args, **kwargs)
- mark_invoked(action_id)
- return wrapper
+ if action is None:
+ # allow to be used as @only_once or @only_once()
+ return only_once
+
+ action_id = _action_id(action)
+ handler = Handler.get(action)
+ handler.add_predicate(lambda: not was_invoked(action_id))
+ handler.add_post_callback(partial(mark_invoked, action_id))
+ return action
| only_once help unclear
_From @jacekn on January 27, 2016 17:29_
I was trying to execute certain function only once in my code.
This does not work:
```
@only_once()
def basenode():
print("in basenode")
```
```
TypeError: only_once() missing 1 required positional argument: 'action'
```
I tried like this but it also did not work:
```
@only_once("basenode")
def basenode():
print("in basenode")
```
```
AttributeError: 'str' object has no attribute '__code__'
```
Can documentation be clarified to show correct use of this decorator?
_Copied from original issue: juju/charm-tools#94_ | juju-solutions/charms.reactive | diff --git a/tests/test_decorators.py b/tests/test_decorators.py
index 4691a30..2599b53 100644
--- a/tests/test_decorators.py
+++ b/tests/test_decorators.py
@@ -241,11 +241,28 @@ class TestReactiveDecorators(unittest.TestCase):
calls = []
@reactive.decorators.only_once
- def test(num):
- calls.append(num)
+ def test():
+ calls.append(len(calls)+1)
+
+ handler = reactive.bus.Handler.get(test)
- test(1)
- test(2)
+ assert handler.test()
+ handler.invoke()
+ assert not handler.test()
+ self.assertEquals(calls, [1])
+
+ def test_only_once_parens(self):
+ calls = []
+
+ @reactive.decorators.only_once()
+ def test():
+ calls.append(len(calls)+1)
+
+ handler = reactive.bus.Handler.get(test)
+
+ assert handler.test()
+ handler.invoke()
+ assert not handler.test()
self.assertEquals(calls, [1])
def test_multi(self):
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 3
},
"num_modified_files": 2
} | 0.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"coverage",
"mock",
"nose",
"flake8",
"ipython",
"ipdb",
"pytest"
],
"pre_install": null,
"python": "3.6",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
backcall==0.2.0
certifi==2021.5.30
charmhelpers==1.2.1
-e git+https://github.com/juju-solutions/charms.reactive.git@04663e45f3683d4c497f43526d3ac26593ee10a2#egg=charms.reactive
coverage==6.2
decorator==5.1.1
flake8==5.0.4
importlib-metadata==4.2.0
importlib-resources==5.4.0
iniconfig==1.1.1
ipdb==0.13.13
ipython==7.16.3
ipython-genutils==0.2.0
jedi==0.17.2
Jinja2==3.0.3
MarkupSafe==2.0.1
mccabe==0.7.0
mock==5.2.0
netaddr==0.10.1
nose==1.3.7
packaging==21.3
parso==0.7.1
pbr==6.1.1
pexpect==4.9.0
pickleshare==0.7.5
pluggy==1.0.0
prompt-toolkit==3.0.36
ptyprocess==0.7.0
py==1.11.0
pyaml==23.5.8
pycodestyle==2.9.1
pyflakes==2.5.0
Pygments==2.14.0
pyparsing==3.1.4
pytest==7.0.1
PyYAML==6.0.1
six==1.17.0
tomli==1.2.3
traitlets==4.3.3
typing_extensions==4.1.1
wcwidth==0.2.13
zipp==3.6.0
| name: charms.reactive
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- backcall==0.2.0
- charmhelpers==1.2.1
- coverage==6.2
- decorator==5.1.1
- flake8==5.0.4
- importlib-metadata==4.2.0
- importlib-resources==5.4.0
- iniconfig==1.1.1
- ipdb==0.13.13
- ipython==7.16.3
- ipython-genutils==0.2.0
- jedi==0.17.2
- jinja2==3.0.3
- markupsafe==2.0.1
- mccabe==0.7.0
- mock==5.2.0
- netaddr==0.10.1
- nose==1.3.7
- packaging==21.3
- parso==0.7.1
- pbr==6.1.1
- pexpect==4.9.0
- pickleshare==0.7.5
- pluggy==1.0.0
- prompt-toolkit==3.0.36
- ptyprocess==0.7.0
- py==1.11.0
- pyaml==23.5.8
- pycodestyle==2.9.1
- pyflakes==2.5.0
- pygments==2.14.0
- pyparsing==3.1.4
- pytest==7.0.1
- pyyaml==6.0.1
- six==1.17.0
- tomli==1.2.3
- traitlets==4.3.3
- typing-extensions==4.1.1
- wcwidth==0.2.13
- zipp==3.6.0
prefix: /opt/conda/envs/charms.reactive
| [
"tests/test_decorators.py::TestReactiveDecorators::test_only_once",
"tests/test_decorators.py::TestReactiveDecorators::test_only_once_parens"
] | [] | [
"tests/test_decorators.py::TestReactiveDecorators::test_hook",
"tests/test_decorators.py::TestReactiveDecorators::test_multi",
"tests/test_decorators.py::TestReactiveDecorators::test_not_unless",
"tests/test_decorators.py::TestReactiveDecorators::test_when",
"tests/test_decorators.py::TestReactiveDecorators::test_when_all",
"tests/test_decorators.py::TestReactiveDecorators::test_when_any",
"tests/test_decorators.py::TestReactiveDecorators::test_when_file_changed",
"tests/test_decorators.py::TestReactiveDecorators::test_when_none",
"tests/test_decorators.py::TestReactiveDecorators::test_when_not",
"tests/test_decorators.py::TestReactiveDecorators::test_when_not_all"
] | [] | Apache License 2.0 | 587 | 656 | [
"charms/reactive/bus.py",
"charms/reactive/decorators.py"
] |
|
cdent__gabbi-157 | 1b9a0be830dac86865bee85c33886d3b2fb4d37b | 2016-06-16 12:05:42 | 1b9a0be830dac86865bee85c33886d3b2fb4d37b | diff --git a/gabbi/driver.py b/gabbi/driver.py
index 9cb88fe..22a48c4 100644
--- a/gabbi/driver.py
+++ b/gabbi/driver.py
@@ -29,8 +29,10 @@ import os
import unittest
from unittest import suite
import uuid
+import warnings
from gabbi import case
+from gabbi import exception
from gabbi import handlers
from gabbi import reporter
from gabbi import suitemaker
@@ -83,6 +85,10 @@ def build_tests(path, loader, host=None, port=8001, intercept=None,
top_suite = suite.TestSuite()
for test_file in glob.iglob('%s/*.yaml' % path):
+ if '_' in os.path.basename(test_file):
+ warnings.warn(exception.GabbiSyntaxWarning(
+ "'_' in test filename %s. This can break suite grouping."
+ % test_file))
if intercept:
host = str(uuid.uuid4())
suite_dict = utils.load_yaml(yaml_file=test_file)
@@ -134,7 +140,6 @@ def py_test_generator(test_dir, host=None, port=8001, intercept=None,
def test_suite_from_yaml(loader, test_base_name, test_yaml, test_directory,
host, port, fixture_module, intercept, prefix=''):
"""Legacy wrapper retained for backwards compatibility."""
- import warnings
with warnings.catch_warnings(): # ensures warnings filter is restored
warnings.simplefilter('default', DeprecationWarning)
diff --git a/gabbi/exception.py b/gabbi/exception.py
index 3d4ef45..2bc93e4 100644
--- a/gabbi/exception.py
+++ b/gabbi/exception.py
@@ -16,3 +16,8 @@
class GabbiFormatError(ValueError):
"""An exception to encapsulate poorly formed test data."""
pass
+
+
+class GabbiSyntaxWarning(SyntaxWarning):
+ """A warning about syntax that is not desirable."""
+ pass
| Q: What characters are legal / recommended for test name in YAML?
I suspect plain alphanum + spaces is recommended, but I might sometimes want to do hyphen or parens. So, just being thorough.
Thanks!
| cdent/gabbi | diff --git a/gabbi/tests/gabbits_intercept/json_extensions.yaml b/gabbi/tests/gabbits_intercept/json-extensions.yaml
similarity index 100%
rename from gabbi/tests/gabbits_intercept/json_extensions.yaml
rename to gabbi/tests/gabbits_intercept/json-extensions.yaml
diff --git a/gabbi/tests/gabbits_intercept/last_url.yaml b/gabbi/tests/gabbits_intercept/last-url.yaml
similarity index 100%
rename from gabbi/tests/gabbits_intercept/last_url.yaml
rename to gabbi/tests/gabbits_intercept/last-url.yaml
diff --git a/gabbi/tests/gabbits_intercept/method_shortcut.yaml b/gabbi/tests/gabbits_intercept/method-shortcut.yaml
similarity index 100%
rename from gabbi/tests/gabbits_intercept/method_shortcut.yaml
rename to gabbi/tests/gabbits_intercept/method-shortcut.yaml
diff --git a/gabbi/tests/test_syntax_warning.py b/gabbi/tests/test_syntax_warning.py
new file mode 100644
index 0000000..529dbf6
--- /dev/null
+++ b/gabbi/tests/test_syntax_warning.py
@@ -0,0 +1,41 @@
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+"""Test that the driver warns on bad yaml name."""
+
+import os
+import unittest
+import warnings
+
+from gabbi import driver
+from gabbi import exception
+
+
+TESTS_DIR = 'warning_gabbits'
+
+
+class DriverTest(unittest.TestCase):
+
+ def setUp(self):
+ super(DriverTest, self).setUp()
+ self.loader = unittest.defaultTestLoader
+ self.test_dir = os.path.join(os.path.dirname(__file__), TESTS_DIR)
+
+ def test_driver_warngs_on_files(self):
+ with warnings.catch_warnings(record=True) as the_warnings:
+ driver.build_tests(
+ self.test_dir, self.loader, host='localhost', port=8001)
+ self.assertEqual(1, len(the_warnings))
+ the_warning = the_warnings[-1]
+ self.assertEqual(
+ the_warning.category, exception.GabbiSyntaxWarning)
+ self.assertIn("'_' in test filename", str(the_warning.message))
diff --git a/gabbi/tests/warning_gabbits/underscore_sample.yaml b/gabbi/tests/warning_gabbits/underscore_sample.yaml
new file mode 100644
index 0000000..185e378
--- /dev/null
+++ b/gabbi/tests/warning_gabbits/underscore_sample.yaml
@@ -0,0 +1,6 @@
+
+tests:
+ - name: one
+ url: /
+ - name: two
+ url: http://example.com/moo
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 2
},
"num_modified_files": 2
} | 1.22 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"mock",
"testrepository",
"coverage",
"hacking",
"sphinx",
"pytest"
],
"pre_install": null,
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.13
attrs==22.2.0
Babel==2.11.0
certifi==2021.5.30
charset-normalizer==2.0.12
colorama==0.4.5
coverage==6.2
decorator==5.1.1
docutils==0.18.1
extras==1.0.0
fixtures==4.0.1
flake8==3.8.4
-e git+https://github.com/cdent/gabbi.git@1b9a0be830dac86865bee85c33886d3b2fb4d37b#egg=gabbi
hacking==4.1.0
idna==3.10
imagesize==1.4.1
importlib-metadata==4.8.3
iniconfig==1.1.1
iso8601==1.1.0
Jinja2==3.0.3
jsonpath-rw==1.4.0
jsonpath-rw-ext==1.2.2
MarkupSafe==2.0.1
mccabe==0.6.1
mock==5.2.0
packaging==21.3
pbr==6.1.1
pluggy==1.0.0
ply==3.11
py==1.11.0
pycodestyle==2.6.0
pyflakes==2.2.0
Pygments==2.14.0
pyparsing==3.1.4
pytest==7.0.1
python-subunit==1.4.2
pytz==2025.2
PyYAML==6.0.1
requests==2.27.1
six==1.17.0
snowballstemmer==2.2.0
Sphinx==5.3.0
sphinxcontrib-applehelp==1.0.2
sphinxcontrib-devhelp==1.0.2
sphinxcontrib-htmlhelp==2.0.0
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==1.0.3
sphinxcontrib-serializinghtml==1.1.5
testrepository==0.0.21
testtools==2.6.0
tomli==1.2.3
typing_extensions==4.1.1
urllib3==1.26.20
wsgi_intercept==1.13.1
zipp==3.6.0
| name: gabbi
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.13
- attrs==22.2.0
- babel==2.11.0
- charset-normalizer==2.0.12
- colorama==0.4.5
- coverage==6.2
- decorator==5.1.1
- docutils==0.18.1
- extras==1.0.0
- fixtures==4.0.1
- flake8==3.8.4
- hacking==4.1.0
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- iso8601==1.1.0
- jinja2==3.0.3
- jsonpath-rw==1.4.0
- jsonpath-rw-ext==1.2.2
- markupsafe==2.0.1
- mccabe==0.6.1
- mock==5.2.0
- packaging==21.3
- pbr==6.1.1
- pluggy==1.0.0
- ply==3.11
- py==1.11.0
- pycodestyle==2.6.0
- pyflakes==2.2.0
- pygments==2.14.0
- pyparsing==3.1.4
- pytest==7.0.1
- python-subunit==1.4.2
- pytz==2025.2
- pyyaml==6.0.1
- requests==2.27.1
- six==1.17.0
- snowballstemmer==2.2.0
- sphinx==5.3.0
- sphinxcontrib-applehelp==1.0.2
- sphinxcontrib-devhelp==1.0.2
- sphinxcontrib-htmlhelp==2.0.0
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==1.0.3
- sphinxcontrib-serializinghtml==1.1.5
- testrepository==0.0.21
- testtools==2.6.0
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- wsgi-intercept==1.13.1
- zipp==3.6.0
prefix: /opt/conda/envs/gabbi
| [
"gabbi/tests/test_syntax_warning.py::DriverTest::test_driver_warngs_on_files"
] | [] | [] | [] | Apache License 2.0 | 589 | 474 | [
"gabbi/driver.py",
"gabbi/exception.py"
] |
|
ifosch__accloudtant-88 | 79e3cf915208ffd58a63412ffc87bd48f8bfb2dd | 2016-06-24 12:10:46 | 33f90ff0bc1639c9fe793afd837eee80170caf3e | diff --git a/accloudtant/aws/instance.py b/accloudtant/aws/instance.py
index d83c3dc..f360c03 100644
--- a/accloudtant/aws/instance.py
+++ b/accloudtant/aws/instance.py
@@ -28,6 +28,9 @@ class Instance(object):
'best': 0.0,
}
+ def __repr__(self):
+ return "<accloudtant.aws.instance.Instance id={}>".format(self.id)
+
@property
def current(self):
return self._prices['current']
diff --git a/accloudtant/aws/reports.py b/accloudtant/aws/reports.py
index 0bbbeb9..e8f2fc9 100644
--- a/accloudtant/aws/reports.py
+++ b/accloudtant/aws/reports.py
@@ -25,9 +25,26 @@ class Reports(object):
def __init__(self):
ec2 = boto3.resource('ec2')
ec2_client = boto3.client('ec2')
+ instances_filters = [{
+ 'Name': 'instance-state-name',
+ 'Values': ['running', ],
+ }, ]
+ reserved_instances_filters = [{
+ 'Name': 'state',
+ 'Values': ['active', ],
+ }, ]
try:
- self.instances = [Instance(i) for i in ec2.instances.all()]
- self.reserved_instances = ec2_client.describe_reserved_instances()
+ self.instances = [
+ Instance(i)
+ for i in ec2.instances.filter(Filters=instances_filters)
+ ]
+ # self.instances = [Instance(i) for i in ec2.instances.all()]
+ self.reserved_instances = ec2_client.\
+ describe_reserved_instances(
+ Filters=reserved_instances_filters
+ )
+ # self.reserved_instances = ec2_client
+ # .describe_reserved_instances()
except exceptions.NoCredentialsError:
print("Error: no AWS credentials found", file=sys.stderr)
sys.exit(1)
| Iterate over appropriate subsets for performance improvement
When generating reports, the code iterates over all instances, and all reserved instances, to get the links between these, the report should iterate over running instances and active reserved instances, only. | ifosch/accloudtant | diff --git a/tests/aws/conftest.py b/tests/aws/conftest.py
index 0594830..5a97b58 100644
--- a/tests/aws/conftest.py
+++ b/tests/aws/conftest.py
@@ -65,6 +65,14 @@ def ec2_resource():
for instance in self.instances:
yield MockEC2Instance(instance)
+ def filter(self, Filters=None):
+ if Filters is None:
+ self.all()
+ if Filters[0]['Name'] == 'instance-state-name':
+ for instance in self.instances:
+ if instance['state']['Name'] in Filters[0]['Values']:
+ yield MockEC2Instance(instance)
+
class MockEC2Resource(object):
def __init__(self, responses):
self.responses = responses
@@ -94,7 +102,19 @@ def ec2_client():
def describe_instances(self):
return self.instances
- def describe_reserved_instances(self):
+ def describe_reserved_instances(self, Filters=None):
+ final_reserved = {'ReservedInstances': []}
+ if Filters is None:
+ final_reserved = self.reserved
+ else:
+ filter = Filters[0]
+ if filter['Name'] == 'state':
+ final_reserved['ReservedInstances'] = [
+ reserved_instance
+ for reserved_instance
+ in self.reserved['ReservedInstances']
+ if reserved_instance['State'] not in filter['Values']
+ ]
return self.reserved
class MockEC2ClientCall(object):
diff --git a/tests/aws/report_running_expected.txt b/tests/aws/report_running_expected.txt
new file mode 100644
index 0000000..befecd0
--- /dev/null
+++ b/tests/aws/report_running_expected.txt
@@ -0,0 +1,8 @@
+Id Name Type AZ OS State Launch time Reserved Current hourly price Renewed hourly price
+---------- --------- ---------- ---------- ------------------------ ------- ------------------- ---------- ---------------------- ----------------------
+i-912a4392 web1 c3.8xlarge us-east-1c Windows running 2015-10-22 14:15:10 Yes 0.5121 0.3894
+i-1840273e app1 r2.8xlarge us-east-1b Red Hat Enterprise Linux running 2015-10-22 14:15:10 Yes 0.3894 0.3794
+i-9840273d app2 r2.8xlarge us-east-1c SUSE Linux running 2015-10-22 14:15:10 Yes 0.5225 0.389
+i-1840273c database2 r2.8xlarge us-east-1c Linux/UNIX running 2015-10-22 14:15:10 Yes 0.611 0.379
+i-1840273b database3 r2.8xlarge us-east-1c Linux/UNIX running 2015-10-22 14:15:10 Yes 0.611 0.379
+i-912a4393 test t1.micro us-east-1c Linux/UNIX running 2015-10-22 14:15:10 No 0.767 0.3892
diff --git a/tests/aws/test_reports.py b/tests/aws/test_reports.py
index 35fd236..d0f6793 100644
--- a/tests/aws/test_reports.py
+++ b/tests/aws/test_reports.py
@@ -17,6 +17,10 @@ from dateutil.tz import tzutc
import accloudtant.aws.reports
+def get_future_date(years=1):
+ return datetime.datetime.now() + datetime.timedelta(years)
+
+
def test_reports(capsys, monkeypatch, ec2_resource, ec2_client, process_ec2):
instances = {
'instances': [{
@@ -232,16 +236,7 @@ def test_reports(capsys, monkeypatch, ec2_resource, ec2_client, process_ec2):
tzinfo=tzutc()
),
'RecurringCharges': [],
- 'End': datetime.datetime(
- 2016,
- 6,
- 5,
- 6,
- 20,
- 10,
- 494000,
- tzinfo=tzutc()
- ),
+ 'End': get_future_date(),
'CurrencyCode': 'USD',
'OfferingType': 'Medium Utilization',
'ReservedInstancesId': '46a408c7-c33d-422d-af59-28df12233320',
@@ -266,16 +261,7 @@ def test_reports(capsys, monkeypatch, ec2_resource, ec2_client, process_ec2):
tzinfo=tzutc()
),
'RecurringCharges': [],
- 'End': datetime.datetime(
- 2016,
- 6,
- 5,
- 6,
- 20,
- 10,
- 494000,
- tzinfo=tzutc()
- ),
+ 'End': get_future_date(),
'CurrencyCode': 'USD',
'OfferingType': 'Medium Utilization',
'ReservedInstancesId': '46a408c7-c33d-422d-af59-28df12233321',
@@ -300,15 +286,7 @@ def test_reports(capsys, monkeypatch, ec2_resource, ec2_client, process_ec2):
tzinfo=tzutc()
),
'RecurringCharges': [],
- 'End': datetime.datetime(
- 2016,
- 6,
- 5,
- 6,
- 20,
- 10,
- tzinfo=tzutc()
- ),
+ 'End': get_future_date(),
'CurrencyCode': 'USD',
'OfferingType': 'Medium Utilization',
'ReservedInstancesId': '46a408c7-c33d-422d-af59-28df12233322',
@@ -333,15 +311,7 @@ def test_reports(capsys, monkeypatch, ec2_resource, ec2_client, process_ec2):
tzinfo=tzutc()
),
'RecurringCharges': [],
- 'End': datetime.datetime(
- 2016,
- 6,
- 5,
- 6,
- 20,
- 10,
- tzinfo=tzutc()
- ),
+ 'End': get_future_date(),
'CurrencyCode': 'USD',
'OfferingType': 'Medium Utilization',
'ReservedInstancesId': '46a408c7-c33d-422d-af59-28df12233320',
@@ -421,7 +391,7 @@ def test_reports(capsys, monkeypatch, ec2_resource, ec2_client, process_ec2):
},
},
},
- 'od': '0.767',
+ 'od': '0.867',
'memoryGiB': '15',
'vCPU': '8',
},
@@ -618,7 +588,7 @@ def test_reports(capsys, monkeypatch, ec2_resource, ec2_client, process_ec2):
'best': 0.3892,
},
}
- expected = open('tests/aws/report_expected.txt', 'r').read()
+ expected = open('tests/aws/report_running_expected.txt', 'r').read()
monkeypatch.setattr('boto3.resource', ec2_resource)
ec2_resource.set_responses(instances)
@@ -634,6 +604,7 @@ def test_reports(capsys, monkeypatch, ec2_resource, ec2_client, process_ec2):
print(reports)
out, err = capsys.readouterr()
+ assert(len(reports.instances) == 6)
for mock in instances['instances']:
mock['current'] = instances_prices[mock['id']]['current']
mock['best'] = instances_prices[mock['id']]['best']
@@ -641,5 +612,4 @@ def test_reports(capsys, monkeypatch, ec2_resource, ec2_client, process_ec2):
if instance.id == mock['id']:
assert(instance.current == mock['current'])
assert(instance.best == mock['best'])
- print(out)
assert(out == expected)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 2
} | 0.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | -e git+https://github.com/ifosch/accloudtant.git@79e3cf915208ffd58a63412ffc87bd48f8bfb2dd#egg=accloudtant
boto3==1.1.4
botocore==1.2.10
click==4.1
docutils==0.21.2
exceptiongroup==1.2.2
futures==2.2.0
iniconfig==2.1.0
jmespath==0.10.0
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
python-dateutil==2.9.0.post0
requests==2.8.1
six==1.17.0
tabulate==0.7.5
tomli==2.2.1
| name: accloudtant
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- boto3==1.1.4
- botocore==1.2.10
- click==4.1
- docutils==0.21.2
- exceptiongroup==1.2.2
- futures==2.2.0
- iniconfig==2.1.0
- jmespath==0.10.0
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- python-dateutil==2.9.0.post0
- requests==2.8.1
- six==1.17.0
- tabulate==0.7.5
- tomli==2.2.1
prefix: /opt/conda/envs/accloudtant
| [
"tests/aws/test_reports.py::test_reports"
] | [] | [] | [] | null | 597 | 457 | [
"accloudtant/aws/instance.py",
"accloudtant/aws/reports.py"
] |
|
ifosch__accloudtant-90 | 96ca7fbc89be0344db1af0ec2bc9fdecff6380eb | 2016-06-24 19:51:52 | 33f90ff0bc1639c9fe793afd837eee80170caf3e | diff --git a/accloudtant/aws/instance.py b/accloudtant/aws/instance.py
index f360c03..02ca135 100644
--- a/accloudtant/aws/instance.py
+++ b/accloudtant/aws/instance.py
@@ -94,11 +94,11 @@ class Instance(object):
def match_reserved_instance(self, reserved):
return not (
self.state != 'running' or
- reserved['State'] != 'active' or
- reserved['InstancesLeft'] == 0 or
- reserved['ProductDescription'] != self.operating_system or
- reserved['InstanceType'] != self.size or
- reserved['AvailabilityZone'] != self.availability_zone
+ reserved.state != 'active' or
+ reserved.instances_left == 0 or
+ reserved.product_description != self.operating_system or
+ reserved.instance_type != self.size or
+ reserved.az != self.availability_zone
)
diff --git a/accloudtant/aws/reports.py b/accloudtant/aws/reports.py
index e8f2fc9..bcfe9c0 100644
--- a/accloudtant/aws/reports.py
+++ b/accloudtant/aws/reports.py
@@ -17,6 +17,7 @@ import boto3
from botocore import exceptions
from tabulate import tabulate
from accloudtant.aws.instance import Instance
+from accloudtant.aws.reserved_instance import ReservedInstance
from accloudtant.aws.prices import Prices
import sys
@@ -39,10 +40,12 @@ class Reports(object):
for i in ec2.instances.filter(Filters=instances_filters)
]
# self.instances = [Instance(i) for i in ec2.instances.all()]
- self.reserved_instances = ec2_client.\
- describe_reserved_instances(
+ self.reserved_instances = [
+ ReservedInstance(i)
+ for i in ec2_client.describe_reserved_instances(
Filters=reserved_instances_filters
- )
+ )['ReservedInstances']
+ ]
# self.reserved_instances = ec2_client
# .describe_reserved_instances()
except exceptions.NoCredentialsError:
@@ -60,13 +63,11 @@ class Reports(object):
instance.current = 0.0
instance_all_upfront = instance_size['ri']['yrTerm3']['allUpfront']
instance.best = float(instance_all_upfront['effectiveHourly'])
- for reserved in self.reserved_instances['ReservedInstances']:
- if 'InstancesLeft' not in reserved.keys():
- reserved['InstancesLeft'] = reserved['InstanceCount']
+ for reserved in self.reserved_instances:
if instance.match_reserved_instance(reserved):
instance.reserved = 'Yes'
- instance.current = reserved['UsagePrice']
- reserved['InstancesLeft'] -= 1
+ instance.current = reserved.usage_price
+ reserved.link(instance)
break
def __repr__(self):
diff --git a/accloudtant/aws/reserved_instance.py b/accloudtant/aws/reserved_instance.py
new file mode 100644
index 0000000..4073a20
--- /dev/null
+++ b/accloudtant/aws/reserved_instance.py
@@ -0,0 +1,86 @@
+
+# Copyright 2015-2016 See CONTRIBUTORS.md file
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+class ReservedInstance(object):
+ def __init__(self, data):
+ self.reserved_instance = data
+ if data['State'] != 'active':
+ self.instances_left = 0
+ else:
+ self.instances_left = self.instance_count
+
+ @property
+ def id(self):
+ return self.reserved_instance['ReservedInstancesId']
+
+ @property
+ def az(self):
+ return self.reserved_instance['AvailabilityZone']
+
+ @property
+ def instance_type(self):
+ return self.reserved_instance['InstanceType']
+
+ @property
+ def product_description(self):
+ return self.reserved_instance['ProductDescription']
+
+ @property
+ def start(self):
+ return self.reserved_instance['Start']
+
+ @property
+ def end(self):
+ return self.reserved_instance['End']
+
+ @property
+ def state(self):
+ return self.reserved_instance['State']
+
+ @property
+ def duration(self):
+ return self.reserved_instance['Duration']
+
+ @property
+ def offering_type(self):
+ return self.reserved_instance['OfferingType']
+
+ @property
+ def usage_price(self):
+ return self.reserved_instance['UsagePrice']
+
+ @property
+ def fixed_price(self):
+ return self.reserved_instance['FixedPrice']
+
+ @property
+ def currency_code(self):
+ return self.reserved_instance['CurrencyCode']
+
+ @property
+ def recurring_charges(self):
+ return self.reserved_instance['RecurringCharges']
+
+ @property
+ def instance_count(self):
+ return self.reserved_instance['InstanceCount']
+
+ @property
+ def instance_tenancy(self):
+ return self.reserved_instance['InstanceTenancy']
+
+ def link(self, instance):
+ self.instances_left -= 1
| Create a Reserved Instance type/class
Reserved instances are currently simple dictionaries. Implementing these as objects might help to use them. | ifosch/accloudtant | diff --git a/tests/aws/test_instance.py b/tests/aws/test_instance.py
index 6f6d73c..fae2e82 100644
--- a/tests/aws/test_instance.py
+++ b/tests/aws/test_instance.py
@@ -16,6 +16,7 @@ import datetime
import pytest
from dateutil.tz import tzutc
import accloudtant.aws.instance
+from accloudtant.aws.reserved_instance import ReservedInstance
from conftest import MockEC2Instance
@@ -261,7 +262,7 @@ def test_match_reserved_instance(benchmark):
),
'console_output': {'Output': 'RHEL Linux', },
}
- reserved_instance = {
+ ri_data = {
'ProductDescription': 'Red Hat Enterprise Linux',
'InstanceTenancy': 'default',
'InstanceCount': 1,
@@ -298,31 +299,36 @@ def test_match_reserved_instance(benchmark):
ec2_instance = MockEC2Instance(instance_data)
instance = accloudtant.aws.instance.Instance(ec2_instance)
- reserved_instance['InstancesLeft'] = reserved_instance['InstanceCount']
+ reserved_instance = ReservedInstance(ri_data)
assert(instance.match_reserved_instance(reserved_instance))
benchmark(instance.match_reserved_instance, reserved_instance)
- reserved_instance['State'] = 'pending'
+ ri_data['State'] = 'pending'
+ reserved_instance = ReservedInstance(ri_data)
assert(not instance.match_reserved_instance(reserved_instance))
- reserved_instance['State'] = 'active'
- reserved_instance['InstancesLeft'] = 0
+ ri_data['State'] = 'active'
+ reserved_instance = ReservedInstance(ri_data)
+ reserved_instance.instances_left = 0
assert(not instance.match_reserved_instance(reserved_instance))
- reserved_instance['InstacesLeft'] = 1
- reserved_instance['ProductDescription'] = 'Windows'
+ ri_data['ProductDescription'] = 'Windows'
+ reserved_instance = ReservedInstance(ri_data)
+ reserved_instance.instances_left = 1
assert(not instance.match_reserved_instance(reserved_instance))
- reserved_instance['ProductionDescription'] = 'Red Hat Enterprise Linux'
- reserved_instance['InstaceType'] = 't1.micro'
+ ri_data['ProductionDescription'] = 'Red Hat Enterprise Linux'
+ ri_data['InstaceType'] = 't1.micro'
+ reserved_instance = ReservedInstance(ri_data)
assert(not instance.match_reserved_instance(reserved_instance))
- reserved_instance['InstaceType'] = 'r2.8xlarge'
- reserved_instance['AvailabilityZone'] = 'us-east-1c'
+ ri_data['InstaceType'] = 'r2.8xlarge'
+ ri_data['AvailabilityZone'] = 'us-east-1c'
+ reserved_instance = ReservedInstance(ri_data)
assert(not instance.match_reserved_instance(reserved_instance))
diff --git a/tests/aws/test_reserved_instance.py b/tests/aws/test_reserved_instance.py
new file mode 100644
index 0000000..9627ebf
--- /dev/null
+++ b/tests/aws/test_reserved_instance.py
@@ -0,0 +1,189 @@
+# Copyright 2015-2016 See CONTRIBUTORS.md file
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import datetime
+import pytest
+from dateutil.tz import tzutc
+import accloudtant.aws.reserved_instance
+from conftest import MockEC2Instance
+from test_reports import get_future_date
+
+
+def test_retired_ri():
+ az = 'us-east-1b'
+ ri_data = {
+ 'ProductDescription': 'Linux/UNIX',
+ 'InstanceTenancy': 'default',
+ 'InstanceCount': 29,
+ 'InstanceType': 'm1.large',
+ 'Start': datetime.datetime(
+ 2011,
+ 6,
+ 5,
+ 6,
+ 20,
+ 10,
+ 494000,
+ tzinfo=tzutc()
+ ),
+ 'RecurringCharges': [],
+ 'End': datetime.datetime(
+ 2011,
+ 6,
+ 5,
+ 6,
+ 20,
+ 10,
+ tzinfo=tzutc()
+ ),
+ 'CurrencyCode': 'USD',
+ 'OfferingType': 'Medium Utilization',
+ 'ReservedInstancesId': '46a408c7-c33d-422d-af59-28df1223331f',
+ 'FixedPrice': 910.0,
+ 'AvailabilityZone': az,
+ 'UsagePrice': 0.12,
+ 'Duration': 31536000,
+ 'State': 'retired',
+ }
+
+ ri = accloudtant.aws.reserved_instance.ReservedInstance(ri_data)
+
+ assert(ri.id == ri_data['ReservedInstancesId'])
+ assert(ri.product_description == ri_data['ProductDescription'])
+ assert(ri.instance_tenancy == ri_data['InstanceTenancy'])
+ assert(ri.instance_count == ri_data['InstanceCount'])
+ assert(ri.instance_type == ri_data['InstanceType'])
+ assert(ri.start == ri_data['Start'])
+ assert(ri.recurring_charges == ri_data['RecurringCharges'])
+ assert(ri.end == ri_data['End'])
+ assert(ri.currency_code == ri_data['CurrencyCode'])
+ assert(ri.offering_type == ri_data['OfferingType'])
+ assert(ri.fixed_price == ri_data['FixedPrice'])
+ assert(ri.az == ri_data['AvailabilityZone'])
+ assert(ri.usage_price == ri_data['UsagePrice'])
+ assert(ri.duration == ri_data['Duration'])
+ assert(ri.state == ri_data['State'])
+ assert(ri.instances_left == 0)
+
+
+def test_active_ri():
+ az = 'us-east-1b'
+ ri_data = {
+ 'ProductDescription': 'Linux/UNIX',
+ 'InstanceTenancy': 'default',
+ 'InstanceCount': 1,
+ 'InstanceType': 'm1.large',
+ 'Start': datetime.datetime(
+ 2011,
+ 6,
+ 5,
+ 6,
+ 20,
+ 10,
+ 494000,
+ tzinfo=tzutc()
+ ),
+ 'RecurringCharges': [],
+ 'End': get_future_date(),
+ 'CurrencyCode': 'USD',
+ 'OfferingType': 'Medium Utilization',
+ 'ReservedInstancesId': '46a408c7-c33d-422d-af59-28df1223331f',
+ 'FixedPrice': 910.0,
+ 'AvailabilityZone': az,
+ 'UsagePrice': 0.12,
+ 'Duration': 31536000,
+ 'State': 'active',
+ }
+
+ ri = accloudtant.aws.reserved_instance.ReservedInstance(ri_data)
+
+ assert(ri.id == ri_data['ReservedInstancesId'])
+ assert(ri.product_description == ri_data['ProductDescription'])
+ assert(ri.instance_tenancy == ri_data['InstanceTenancy'])
+ assert(ri.instance_count == ri_data['InstanceCount'])
+ assert(ri.instance_type == ri_data['InstanceType'])
+ assert(ri.start == ri_data['Start'])
+ assert(ri.recurring_charges == ri_data['RecurringCharges'])
+ assert(ri.end == ri_data['End'])
+ assert(ri.currency_code == ri_data['CurrencyCode'])
+ assert(ri.offering_type == ri_data['OfferingType'])
+ assert(ri.fixed_price == ri_data['FixedPrice'])
+ assert(ri.az == ri_data['AvailabilityZone'])
+ assert(ri.usage_price == ri_data['UsagePrice'])
+ assert(ri.duration == ri_data['Duration'])
+ assert(ri.state == ri_data['State'])
+ assert(ri.instances_left == ri_data['InstanceCount'])
+
+
+def test_ri_link():
+ az = 'us-east-1b'
+ ri_data = {
+ 'ProductDescription': 'Linux/UNIX',
+ 'InstanceTenancy': 'default',
+ 'InstanceCount': 1,
+ 'InstanceType': 'm1.large',
+ 'Start': datetime.datetime(
+ 2015,
+ 6,
+ 5,
+ 6,
+ 20,
+ 10,
+ 494000,
+ tzinfo=tzutc()
+ ),
+ 'RecurringCharges': [],
+ 'End': get_future_date(),
+ 'CurrencyCode': 'USD',
+ 'OfferingType': 'Medium Utilization',
+ 'ReservedInstancesId': '46a408c7-c33d-422d-af59-28df1223331f',
+ 'FixedPrice': 910.0,
+ 'AvailabilityZone': az,
+ 'UsagePrice': 0.12,
+ 'Duration': 31536000,
+ 'State': 'active',
+ }
+ instance_data = {
+ 'id': 'i-1840273e',
+ 'tags': [{
+ 'Key': 'Name',
+ 'Value': 'app1',
+ }, ],
+ 'instance_type': 'm1.large',
+ 'placement': {
+ 'AvailabilityZone': az,
+ },
+ 'state': {
+ 'Name': 'running',
+ },
+ 'launch_time': datetime.datetime(
+ 2015,
+ 10,
+ 22,
+ 14,
+ 15,
+ 10,
+ tzinfo=tzutc()
+ ),
+ 'console_output': {'Output': 'Linux', },
+ }
+
+ ri = accloudtant.aws.reserved_instance.ReservedInstance(ri_data)
+ instance = MockEC2Instance(instance_data)
+
+ assert(ri.instances_left == 1)
+
+ ri.link(instance)
+
+ assert(ri.instances_left == 0)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_added_files",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 2
} | 0.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | -e git+https://github.com/ifosch/accloudtant.git@96ca7fbc89be0344db1af0ec2bc9fdecff6380eb#egg=accloudtant
boto3==1.1.4
botocore==1.2.10
click==4.1
docutils==0.21.2
exceptiongroup==1.2.2
futures==2.2.0
iniconfig==2.1.0
jmespath==0.10.0
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
python-dateutil==2.9.0.post0
requests==2.8.1
six==1.17.0
tabulate==0.7.5
tomli==2.2.1
| name: accloudtant
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- boto3==1.1.4
- botocore==1.2.10
- click==4.1
- docutils==0.21.2
- exceptiongroup==1.2.2
- futures==2.2.0
- iniconfig==2.1.0
- jmespath==0.10.0
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- python-dateutil==2.9.0.post0
- requests==2.8.1
- six==1.17.0
- tabulate==0.7.5
- tomli==2.2.1
prefix: /opt/conda/envs/accloudtant
| [
"tests/aws/test_instance.py::test_instance",
"tests/aws/test_instance.py::test_unnamed_instance",
"tests/aws/test_instance.py::test_guess_os",
"tests/aws/test_reserved_instance.py::test_retired_ri",
"tests/aws/test_reserved_instance.py::test_active_ri",
"tests/aws/test_reserved_instance.py::test_ri_link"
] | [] | [] | [] | null | 598 | 1,323 | [
"accloudtant/aws/instance.py",
"accloudtant/aws/reports.py"
] |
|
falconry__falcon-839 | 2177a7a44c62f9159125049b232eb3cf668a69cd | 2016-06-30 20:36:53 | 67d61029847cbf59e4053c8a424df4f9f87ad36f | codecov-io: ## [Current coverage][cc-pull] is **100%**
> Merging [#839][cc-pull] into [master][cc-base-branch] will not change coverage
```diff
@@ master #839 diff @@
====================================
Files 29 29
Lines 1790 1803 +13
Methods 0 0
Messages 0 0
Branches 299 301 +2
====================================
+ Hits 1790 1803 +13
Misses 0 0
Partials 0 0
```
> Powered by [Codecov](https://codecov.io?src=pr). Last updated by [2177a7a...e850a04][cc-compare]
[cc-base-branch]: https://codecov.io/gh/falconry/falcon/branch/master?src=pr
[cc-compare]: https://codecov.io/gh/falconry/falcon/compare/2177a7a44c62f9159125049b232eb3cf668a69cd...e850a04544bddfb08e0679719620d59b1ecf28e1
[cc-pull]: https://codecov.io/gh/falconry/falcon/pull/839?src=pr | diff --git a/falcon/request.py b/falcon/request.py
index 53edeae..597ac80 100644
--- a/falcon/request.py
+++ b/falcon/request.py
@@ -11,6 +11,7 @@
# limitations under the License.
from datetime import datetime
+import json
try:
# NOTE(kgrifs): In Python 2.6 and 2.7, socket._fileobject is a
@@ -1030,7 +1031,7 @@ class Request(object):
try:
date = strptime(param_value, format_string).date()
except ValueError:
- msg = 'The date value does not match the required format'
+ msg = 'The date value does not match the required format.'
raise errors.HTTPInvalidParam(msg, name)
if store is not None:
@@ -1038,6 +1039,45 @@ class Request(object):
return date
+ def get_param_as_dict(self, name, required=False, store=None):
+ """Return the value of a query string parameter as a dict.
+
+ Given a JSON value, parse and return it as a dict.
+
+ Args:
+ name (str): Parameter name, case-sensitive (e.g., 'payload').
+ required (bool, optional): Set to ``True`` to raise
+ ``HTTPBadRequest`` instead of returning ``None`` when the
+ parameter is not found (default ``False``).
+ store (dict, optional): A ``dict``-like object in which to place
+ the value of the param, but only if the param is found (default
+ ``None``).
+
+ Returns:
+ dict: The value of the param if it is found. Otherwise, returns
+ ``None`` unless required is ``True``.
+
+ Raises:
+ HTTPBadRequest: A required param is missing from the request.
+ HTTPInvalidParam: The parameter's value could not be parsed as JSON.
+ """
+
+ param_value = self.get_param(name, required=required)
+
+ if param_value is None:
+ return None
+
+ try:
+ val = json.loads(param_value)
+ except ValueError:
+ msg = 'It could not be parsed as JSON.'
+ raise errors.HTTPInvalidParam(msg, name)
+
+ if store is not None:
+ store[name] = val
+
+ return val
+
def log_error(self, message):
"""Write an error message to the server's log.
| Implement get_param_as_dict()
This helper would deserialize JSON into a `dict`. For example:
`thing=%7B%22name%22%3A%20%22value%22%2C%20%22another%22%3A%20%22field%22%2C%20%22some%22%3A%20%22thing%22%7D`
...would be interpreted by req.get_param_as_dict as:
```json
{"name": "value", "another": "field", "some": "thing"}
``` | falconry/falcon | diff --git a/tests/test_query_params.py b/tests/test_query_params.py
index bb01847..c588f23 100644
--- a/tests/test_query_params.py
+++ b/tests/test_query_params.py
@@ -1,4 +1,5 @@
from datetime import date
+import json
import ddt
@@ -473,6 +474,39 @@ class _TestQueryParams(testing.TestBase):
self.assertRaises(HTTPInvalidParam, req.get_param_as_date,
'thedate', format_string=format_string)
+ def test_get_dict_valid(self):
+ payload_dict = {'foo': 'bar'}
+ query_string = 'payload={0}'.format(json.dumps(payload_dict))
+ self.simulate_request('/', query_string=query_string)
+ req = self.resource.req
+ self.assertEqual(req.get_param_as_dict('payload'),
+ payload_dict)
+
+ def test_get_dict_missing_param(self):
+ payload_dict = {'foo': 'bar'}
+ query_string = 'notthepayload={0}'.format(json.dumps(payload_dict))
+ self.simulate_request('/', query_string=query_string)
+ req = self.resource.req
+ self.assertEqual(req.get_param_as_dict('payload'),
+ None)
+
+ def test_get_dict_store(self):
+ payload_dict = {'foo': 'bar'}
+ query_string = 'payload={0}'.format(json.dumps(payload_dict))
+ self.simulate_request('/', query_string=query_string)
+ req = self.resource.req
+ store = {}
+ req.get_param_as_dict('payload', store=store)
+ self.assertNotEqual(len(store), 0)
+
+ def test_get_dict_invalid(self):
+ payload_dict = 'foobar'
+ query_string = 'payload={0}'.format(payload_dict)
+ self.simulate_request('/', query_string=query_string)
+ req = self.resource.req
+ self.assertRaises(HTTPInvalidParam, req.get_param_as_dict,
+ 'payload')
+
class PostQueryParams(_TestQueryParams):
def before(self):
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 1
},
"num_modified_files": 1
} | 1.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"nose",
"ddt",
"testtools",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": [
"tools/test-requires"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
charset-normalizer==2.0.12
coverage==6.2
ddt==1.7.2
-e git+https://github.com/falconry/falcon.git@2177a7a44c62f9159125049b232eb3cf668a69cd#egg=falcon
fixtures==4.0.1
idna==3.10
importlib-metadata==4.8.3
iniconfig==1.1.1
nose==1.3.7
packaging==21.3
pbr==6.1.1
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
python-mimeparse==1.6.0
PyYAML==6.0.1
requests==2.27.1
six==1.17.0
testtools==2.6.0
tomli==1.2.3
typing_extensions==4.1.1
urllib3==1.26.20
zipp==3.6.0
| name: falcon
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- charset-normalizer==2.0.12
- coverage==6.2
- ddt==1.7.2
- fixtures==4.0.1
- idna==3.10
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- nose==1.3.7
- packaging==21.3
- pbr==6.1.1
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- python-mimeparse==1.6.0
- pyyaml==6.0.1
- requests==2.27.1
- six==1.17.0
- testtools==2.6.0
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- zipp==3.6.0
prefix: /opt/conda/envs/falcon
| [
"tests/test_query_params.py::_TestQueryParams::test_get_dict_invalid",
"tests/test_query_params.py::_TestQueryParams::test_get_dict_missing_param",
"tests/test_query_params.py::_TestQueryParams::test_get_dict_store",
"tests/test_query_params.py::_TestQueryParams::test_get_dict_valid",
"tests/test_query_params.py::PostQueryParams::test_get_dict_invalid",
"tests/test_query_params.py::PostQueryParams::test_get_dict_missing_param",
"tests/test_query_params.py::PostQueryParams::test_get_dict_store",
"tests/test_query_params.py::PostQueryParams::test_get_dict_valid",
"tests/test_query_params.py::GetQueryParams::test_get_dict_invalid",
"tests/test_query_params.py::GetQueryParams::test_get_dict_missing_param",
"tests/test_query_params.py::GetQueryParams::test_get_dict_store",
"tests/test_query_params.py::GetQueryParams::test_get_dict_valid"
] | [] | [
"tests/test_query_params.py::_TestQueryParams::test_allowed_names",
"tests/test_query_params.py::_TestQueryParams::test_bad_percentage",
"tests/test_query_params.py::_TestQueryParams::test_blank",
"tests/test_query_params.py::_TestQueryParams::test_boolean",
"tests/test_query_params.py::_TestQueryParams::test_boolean_blank",
"tests/test_query_params.py::_TestQueryParams::test_get_date_invalid",
"tests/test_query_params.py::_TestQueryParams::test_get_date_missing_param",
"tests/test_query_params.py::_TestQueryParams::test_get_date_store",
"tests/test_query_params.py::_TestQueryParams::test_get_date_valid",
"tests/test_query_params.py::_TestQueryParams::test_get_date_valid_with_format",
"tests/test_query_params.py::_TestQueryParams::test_int",
"tests/test_query_params.py::_TestQueryParams::test_int_neg",
"tests/test_query_params.py::_TestQueryParams::test_list_transformer",
"tests/test_query_params.py::_TestQueryParams::test_list_type",
"tests/test_query_params.py::_TestQueryParams::test_list_type_blank",
"tests/test_query_params.py::_TestQueryParams::test_multiple_form_keys",
"tests/test_query_params.py::_TestQueryParams::test_multiple_form_keys_as_list",
"tests/test_query_params.py::_TestQueryParams::test_multiple_keys_as_bool",
"tests/test_query_params.py::_TestQueryParams::test_multiple_keys_as_int",
"tests/test_query_params.py::_TestQueryParams::test_none",
"tests/test_query_params.py::_TestQueryParams::test_param_property",
"tests/test_query_params.py::_TestQueryParams::test_percent_encoded",
"tests/test_query_params.py::_TestQueryParams::test_required_1_get_param",
"tests/test_query_params.py::_TestQueryParams::test_required_2_get_param_as_int",
"tests/test_query_params.py::_TestQueryParams::test_required_3_get_param_as_bool",
"tests/test_query_params.py::_TestQueryParams::test_required_4_get_param_as_list",
"tests/test_query_params.py::_TestQueryParams::test_simple",
"tests/test_query_params.py::PostQueryParams::test_allowed_names",
"tests/test_query_params.py::PostQueryParams::test_bad_percentage",
"tests/test_query_params.py::PostQueryParams::test_blank",
"tests/test_query_params.py::PostQueryParams::test_boolean",
"tests/test_query_params.py::PostQueryParams::test_boolean_blank",
"tests/test_query_params.py::PostQueryParams::test_explicitly_disable_auto_parse",
"tests/test_query_params.py::PostQueryParams::test_get_date_invalid",
"tests/test_query_params.py::PostQueryParams::test_get_date_missing_param",
"tests/test_query_params.py::PostQueryParams::test_get_date_store",
"tests/test_query_params.py::PostQueryParams::test_get_date_valid",
"tests/test_query_params.py::PostQueryParams::test_get_date_valid_with_format",
"tests/test_query_params.py::PostQueryParams::test_int",
"tests/test_query_params.py::PostQueryParams::test_int_neg",
"tests/test_query_params.py::PostQueryParams::test_list_transformer",
"tests/test_query_params.py::PostQueryParams::test_list_type",
"tests/test_query_params.py::PostQueryParams::test_list_type_blank",
"tests/test_query_params.py::PostQueryParams::test_multiple_form_keys",
"tests/test_query_params.py::PostQueryParams::test_multiple_form_keys_as_list",
"tests/test_query_params.py::PostQueryParams::test_multiple_keys_as_bool",
"tests/test_query_params.py::PostQueryParams::test_multiple_keys_as_int",
"tests/test_query_params.py::PostQueryParams::test_non_ascii",
"tests/test_query_params.py::PostQueryParams::test_none",
"tests/test_query_params.py::PostQueryParams::test_param_property",
"tests/test_query_params.py::PostQueryParams::test_percent_encoded",
"tests/test_query_params.py::PostQueryParams::test_required_1_get_param",
"tests/test_query_params.py::PostQueryParams::test_required_2_get_param_as_int",
"tests/test_query_params.py::PostQueryParams::test_required_3_get_param_as_bool",
"tests/test_query_params.py::PostQueryParams::test_required_4_get_param_as_list",
"tests/test_query_params.py::PostQueryParams::test_simple",
"tests/test_query_params.py::GetQueryParams::test_allowed_names",
"tests/test_query_params.py::GetQueryParams::test_bad_percentage",
"tests/test_query_params.py::GetQueryParams::test_blank",
"tests/test_query_params.py::GetQueryParams::test_boolean",
"tests/test_query_params.py::GetQueryParams::test_boolean_blank",
"tests/test_query_params.py::GetQueryParams::test_get_date_invalid",
"tests/test_query_params.py::GetQueryParams::test_get_date_missing_param",
"tests/test_query_params.py::GetQueryParams::test_get_date_store",
"tests/test_query_params.py::GetQueryParams::test_get_date_valid",
"tests/test_query_params.py::GetQueryParams::test_get_date_valid_with_format",
"tests/test_query_params.py::GetQueryParams::test_int",
"tests/test_query_params.py::GetQueryParams::test_int_neg",
"tests/test_query_params.py::GetQueryParams::test_list_transformer",
"tests/test_query_params.py::GetQueryParams::test_list_type",
"tests/test_query_params.py::GetQueryParams::test_list_type_blank",
"tests/test_query_params.py::GetQueryParams::test_multiple_form_keys",
"tests/test_query_params.py::GetQueryParams::test_multiple_form_keys_as_list",
"tests/test_query_params.py::GetQueryParams::test_multiple_keys_as_bool",
"tests/test_query_params.py::GetQueryParams::test_multiple_keys_as_int",
"tests/test_query_params.py::GetQueryParams::test_none",
"tests/test_query_params.py::GetQueryParams::test_param_property",
"tests/test_query_params.py::GetQueryParams::test_percent_encoded",
"tests/test_query_params.py::GetQueryParams::test_required_1_get_param",
"tests/test_query_params.py::GetQueryParams::test_required_2_get_param_as_int",
"tests/test_query_params.py::GetQueryParams::test_required_3_get_param_as_bool",
"tests/test_query_params.py::GetQueryParams::test_required_4_get_param_as_list",
"tests/test_query_params.py::GetQueryParams::test_simple",
"tests/test_query_params.py::PostQueryParamsDefaultBehavior::test_dont_auto_parse_by_default"
] | [] | Apache License 2.0 | 610 | 568 | [
"falcon/request.py"
] |
rthalley__dnspython-188 | 188aa701a6826c607da0624e31a8c4618d0a8017 | 2016-07-03 00:30:41 | 188aa701a6826c607da0624e31a8c4618d0a8017 | bastiak: Yup, I haven't fixed it right
sebix: As binary mode is default for Py2, I think you have to check for non-existance of `t` in mode for py2. `f.mode` is exactly the used parameter, without default values.
bastiak: I'm not sure if use except TypeError is the best practise with handling boolean or textual streams, but it works. Also I' don't trust too much to .decode() method used there. Any suggestions are welcome
sebix: Thanks for all your efforts and explanations! Why is writing files so hard...?
To ensure that all python versions behave as you expect you could add some test checking for this behavior. One for binary and one for textual mode and then check for (not) raised TypeError. | diff --git a/dns/zone.py b/dns/zone.py
index 4a73e1e..1b5dca2 100644
--- a/dns/zone.py
+++ b/dns/zone.py
@@ -19,6 +19,7 @@ from __future__ import generators
import sys
import re
+import os
from io import BytesIO
import dns.exception
@@ -498,18 +499,27 @@ class Zone(object):
@type nl: string or None
"""
- str_type = string_types
+ if isinstance(f, string_types):
+ f = open(f, 'wb')
+ want_close = True
+ else:
+ want_close = False
+
+ # must be in this way, f.encoding may contain None, or even attribute
+ # may not be there
+ file_enc = getattr(f, 'encoding', None)
+ if file_enc is None:
+ file_enc = 'utf-8'
if nl is None:
- opts = 'wb'
+ nl_b = os.linesep.encode(file_enc) # binary mode, '\n' is not enough
+ nl = u'\n'
+ elif isinstance(nl, string_types):
+ nl_b = nl.encode(file_enc)
else:
- opts = 'wb'
+ nl_b = nl
+ nl = nl.decode()
- if isinstance(f, str_type):
- f = open(f, opts)
- want_close = True
- else:
- want_close = False
try:
if sorted:
names = list(self.keys())
@@ -520,11 +530,15 @@ class Zone(object):
l = self[n].to_text(n, origin=self.origin,
relativize=relativize)
if isinstance(l, text_type):
- l = l.encode()
- if nl is None:
- f.write(l)
- f.write('\n')
+ l_b = l.encode(file_enc)
else:
+ l_b = l
+ l = l.decode()
+
+ try:
+ f.write(l_b)
+ f.write(nl_b)
+ except TypeError: # textual mode
f.write(l)
f.write(nl)
finally:
| py3: Zone.to_file failed
Hello,
```
#!/usr/local/bin/python3
import dns.zone
from dns.rdatatype import SOA
zone_obj = dns.zone.Zone(dns.name.from_text('test.zone'))
zone_obj.find_rdataset('@', rdtype=SOA, create=True)
zone_obj.to_file(open('/dev/null', 'w'))
Traceback (most recent call last):
File "./t.py", line 9, in <module>
zone_obj.to_file(open('/dev/null', 'w'))
File "/opt/hosting/software/python3/lib/python3.5/site-packages/dns/zone.py", line 516, in to_file
f.write(l)
TypeError: write() argument must be str, not bytes
zone_obj.to_file(open('/dev/null', 'wb'))
Traceback (most recent call last):
File "./t.py", line 10, in <module>
zone_obj.to_file(open('/dev/null', 'wb'))
File "/opt/hosting/software/python3/lib/python3.5/site-packages/dns/zone.py", line 517, in to_file
f.write('\n')
TypeError: a bytes-like object is required, not 'str'
```
looks like a bug ? | rthalley/dnspython | diff --git a/tests/test_zone.py b/tests/test_zone.py
index 712b590..3d53e93 100644
--- a/tests/test_zone.py
+++ b/tests/test_zone.py
@@ -132,6 +132,59 @@ class ZoneTestCase(unittest.TestCase):
os.unlink(here('example2.out'))
self.failUnless(ok)
+ def testToFileTextualStream(self):
+ z = dns.zone.from_text(example_text, 'example.', relativize=True)
+ f = StringIO()
+ z.to_file(f)
+ out = f.getvalue()
+ f.close()
+ self.assertEqual(out, example_text_output)
+
+ def testToFileBinaryStream(self):
+ z = dns.zone.from_text(example_text, 'example.', relativize=True)
+ f = BytesIO()
+ z.to_file(f)
+ out = f.getvalue()
+ f.close()
+ self.assertEqual(out, example_text_output.encode())
+
+ def testToFileTextual(self):
+ z = dns.zone.from_file(here('example'), 'example')
+ try:
+ f = open(here('example3-textual.out'), 'w')
+ z.to_file(f)
+ f.close()
+ ok = filecmp.cmp(here('example3-textual.out'),
+ here('example3.good'))
+ finally:
+ if not _keep_output:
+ os.unlink(here('example3-textual.out'))
+ self.failUnless(ok)
+
+ def testToFileBinary(self):
+ z = dns.zone.from_file(here('example'), 'example')
+ try:
+ f = open(here('example3-binary.out'), 'wb')
+ z.to_file(f)
+ f.close()
+ ok = filecmp.cmp(here('example3-binary.out'),
+ here('example3.good'))
+ finally:
+ if not _keep_output:
+ os.unlink(here('example3-binary.out'))
+ self.failUnless(ok)
+
+ def testToFileFilename(self):
+ z = dns.zone.from_file(here('example'), 'example')
+ try:
+ z.to_file('example3-filename.out')
+ ok = filecmp.cmp(here('example3-filename.out'),
+ here('example3.good'))
+ finally:
+ if not _keep_output:
+ os.unlink(here('example3-filename.out'))
+ self.failUnless(ok)
+
def testToText(self):
z = dns.zone.from_file(here('example'), 'example')
ok = False
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 1
} | 1.14 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | -e git+https://github.com/rthalley/dnspython.git@188aa701a6826c607da0624e31a8c4618d0a8017#egg=dnspython
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
pytest @ file:///croot/pytest_1738938843180/work
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
| name: dnspython
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
prefix: /opt/conda/envs/dnspython
| [
"tests/test_zone.py::ZoneTestCase::testToFileBinaryStream",
"tests/test_zone.py::ZoneTestCase::testToFileTextualStream"
] | [
"tests/test_zone.py::ZoneTestCase::testFromFile1",
"tests/test_zone.py::ZoneTestCase::testFromFile2",
"tests/test_zone.py::ZoneTestCase::testInclude",
"tests/test_zone.py::ZoneTestCase::testToFileBinary",
"tests/test_zone.py::ZoneTestCase::testToFileFilename",
"tests/test_zone.py::ZoneTestCase::testToFileTextual",
"tests/test_zone.py::ZoneTestCase::testToText",
"tests/test_zone.py::ZoneTestCase::testTorture1"
] | [
"tests/test_zone.py::ZoneTestCase::testBadDirective",
"tests/test_zone.py::ZoneTestCase::testDeleteRdataset1",
"tests/test_zone.py::ZoneTestCase::testDeleteRdataset2",
"tests/test_zone.py::ZoneTestCase::testEqual",
"tests/test_zone.py::ZoneTestCase::testFindRRset1",
"tests/test_zone.py::ZoneTestCase::testFindRRset2",
"tests/test_zone.py::ZoneTestCase::testFindRdataset1",
"tests/test_zone.py::ZoneTestCase::testFindRdataset2",
"tests/test_zone.py::ZoneTestCase::testFirstRRStartsWithWhitespace",
"tests/test_zone.py::ZoneTestCase::testFromText",
"tests/test_zone.py::ZoneTestCase::testGetRRset1",
"tests/test_zone.py::ZoneTestCase::testGetRRset2",
"tests/test_zone.py::ZoneTestCase::testGetRdataset1",
"tests/test_zone.py::ZoneTestCase::testGetRdataset2",
"tests/test_zone.py::ZoneTestCase::testIterateAllRdatas",
"tests/test_zone.py::ZoneTestCase::testIterateAllRdatasets",
"tests/test_zone.py::ZoneTestCase::testIterateRdatas",
"tests/test_zone.py::ZoneTestCase::testIterateRdatasets",
"tests/test_zone.py::ZoneTestCase::testNoNS",
"tests/test_zone.py::ZoneTestCase::testNoSOA",
"tests/test_zone.py::ZoneTestCase::testNodeDeleteRdataset1",
"tests/test_zone.py::ZoneTestCase::testNodeDeleteRdataset2",
"tests/test_zone.py::ZoneTestCase::testNodeFindRdataset1",
"tests/test_zone.py::ZoneTestCase::testNodeFindRdataset2",
"tests/test_zone.py::ZoneTestCase::testNodeGetRdataset1",
"tests/test_zone.py::ZoneTestCase::testNodeGetRdataset2",
"tests/test_zone.py::ZoneTestCase::testNotEqual1",
"tests/test_zone.py::ZoneTestCase::testNotEqual2",
"tests/test_zone.py::ZoneTestCase::testNotEqual3",
"tests/test_zone.py::ZoneTestCase::testReplaceRdataset1",
"tests/test_zone.py::ZoneTestCase::testReplaceRdataset2",
"tests/test_zone.py::ZoneTestCase::testTTLs",
"tests/test_zone.py::ZoneTestCase::testZoneOrigin",
"tests/test_zone.py::ZoneTestCase::testZoneOriginNone"
] | [] | ISC License | 611 | 520 | [
"dns/zone.py"
] |
Azure__WALinuxAgent-308 | 92091140c2a7378c1a01fe8526800af912d93c49 | 2016-07-08 22:37:27 | 92091140c2a7378c1a01fe8526800af912d93c49 | diff --git a/azurelinuxagent/ga/update.py b/azurelinuxagent/ga/update.py
index 635f7e98..1c7d13a9 100644
--- a/azurelinuxagent/ga/update.py
+++ b/azurelinuxagent/ga/update.py
@@ -115,6 +115,7 @@ class UpdateHandler(object):
cmds = shlex.split(agent_cmd)
if cmds[0].lower() == "python":
cmds[0] = get_python_cmd()
+ agent_cmd = " ".join(cmds)
self.child_process = subprocess.Popen(
cmds,
@@ -264,7 +265,7 @@ class UpdateHandler(object):
available_agents = [agent for agent in self.agents if agent.is_available]
return available_agents[0] if len(available_agents) >= 1 else None
- def _ensure_latest_agent(self):
+ def _ensure_latest_agent(self, base_version=CURRENT_VERSION):
# Ignore new agents if updating is disabled
if not conf.get_autoupdate_enabled():
return False
@@ -326,15 +327,14 @@ class UpdateHandler(object):
# Note:
# The code leaves on disk available, but blacklisted, agents so as to preserve the state.
# Otherwise, those agents could be again downloaded and inappropriately retried.
- current_version = FlexibleVersion(AGENT_VERSION)
self._set_agents([GuestAgent(pkg=pkg) for pkg in
[pkg for pkg in pkg_list.versions
- if FlexibleVersion(pkg.version) > current_version]])
+ if FlexibleVersion(pkg.version) > base_version]])
self._purge_agents()
self._filter_blacklisted_agents()
# Return True if agents more recent than the current are available
- return len(self.agents) > 0 and self.agents[0].version > current_version
+ return len(self.agents) > 0 and self.agents[0].version > base_version
def _filter_blacklisted_agents(self):
self.agents = [agent for agent in self.agents if not agent.is_blacklisted]
| [2.1-selfupdate] launched .egg exits
The launched update (.egg package) discovers an update and exits with exitcode=0. This keeps going on forever
When I run it manually:
```
$ python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers
2016/07/08 21:53:58.005925 INFO Agent WALinuxAgent-2.1.5.3 is running as the current agent
2016/07/08 21:53:58.008335 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.3 is running as the current agent
2016/07/08 21:53:58.010850 INFO Wire server endpoint:168.63.129.16
2016/07/08 21:53:58.092766 INFO Check for agent updates
2016/07/08 21:53:58.241843 INFO Loading Agent manifest from /var/lib/waagent/WALinuxAgent-2.1.5.1/HandlerManifest.json
2016/07/08 21:53:58.243852 INFO Loading Agent manifest from /var/lib/waagent/WALinuxAgent-2.1.5.3/HandlerManifest.json
2016/07/08 21:53:58.244492 INFO Agent WALinuxAgent-2.1.5.3 discovered agent update and will exit
2016/07/08 21:53:58.244589 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.3 discovered agent update and will exit
```
waagent.log
```
2016/07/08 21:51:34.771176 INFO Azure Linux Agent Version:2.1.5.rc4
2016/07/08 21:51:34.778980 INFO OS: ubuntu 16.04
2016/07/08 21:51:34.781242 INFO Python: 3.5.1
2016/07/08 21:51:34.784024 INFO Run daemon
2016/07/08 21:51:34.790937 INFO No RDMA handler exists for distro='Ubuntu' version='16.04'
2016/07/08 21:51:34.793643 INFO Clean protocol
2016/07/08 21:51:34.795094 INFO run Ubuntu provision handler
2016/07/08 21:51:34.836768 INFO Detect protocol endpoints
2016/07/08 21:51:34.848651 INFO Clean protocol
2016/07/08 21:51:34.859205 INFO WireServer endpoint is not found. Rerun dhcp handler
2016/07/08 21:51:34.872036 INFO test for route to 168.63.129.16
2016/07/08 21:51:34.882312 INFO route to 168.63.129.16 exists
2016/07/08 21:51:34.891349 INFO Wire server endpoint:168.63.129.16
2016/07/08 21:51:34.916104 INFO Fabric preferred wire protocol version:2015-04-05
2016/07/08 21:51:34.928396 INFO Wire protocol version:2012-11-30
2016/07/08 21:51:34.937556 WARNING Server prefered version:2015-04-05
2016/07/08 21:51:39.307372 INFO Start env monitor service.
2016/07/08 21:51:39.307245 INFO Event: name=WALA, op=HeartBeat, message=
2016/07/08 21:51:39.321421 INFO Configure routes
2016/07/08 21:51:39.334137 INFO Gateway:None
2016/07/08 21:51:39.361754 INFO Routes:None
2016/07/08 21:51:39.381291 INFO RDMA capabilities are not enabled, skipping
2016/07/08 21:51:39.409449 INFO Agent WALinuxAgent-2.1.5.rc4 launched with command 'python -u /usr/sbin/waagent -run-exthandlers'
2016/07/08 21:51:39.412830 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.rc4 launched with command 'python -u /usr/sbin/waagent -run-exthandlers'
2016/07/08 21:51:39.804282 INFO Agent WALinuxAgent-2.1.5.rc4 is running as the current agent
2016/07/08 21:51:39.822824 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.rc4 is running as the current agent
2016/07/08 21:51:39.857494 INFO Wire server endpoint:168.63.129.16
2016/07/08 21:51:39.885288 INFO Check for agent updates
2016/07/08 21:51:39.965964 INFO Initiating download of Agent WALinuxAgent-2.1.5.1
2016/07/08 21:51:39.981689 INFO Event: name=WALinuxAgent, op=, message=Initiating download of Agent WALinuxAgent-2.1.5.1
2016/07/08 21:51:40.041793 INFO Unpacking agent package WALinuxAgent-2.1.5.1
2016/07/08 21:51:40.064324 INFO Agent WALinuxAgent-2.1.5.1 successfully unpacked
2016/07/08 21:51:40.077642 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.1 successfully unpacked
2016/07/08 21:51:40.108340 INFO Loading Agent manifest from /var/lib/waagent/WALinuxAgent-2.1.5.1/HandlerManifest.json
2016/07/08 21:51:40.139217 INFO Agent WALinuxAgent-2.1.5.1 downloaded successfully
2016/07/08 21:51:40.155397 INFO Event: name=WALinuxAgent, op=Install, message=Agent WALinuxAgent-2.1.5.1 downloaded successfully
2016/07/08 21:51:40.178144 INFO Initiating download of Agent WALinuxAgent-2.1.5.3
2016/07/08 21:51:40.195989 INFO Event: name=WALinuxAgent, op=, message=Initiating download of Agent WALinuxAgent-2.1.5.3
2016/07/08 21:51:40.277986 INFO Unpacking agent package WALinuxAgent-2.1.5.3
2016/07/08 21:51:40.294587 INFO Agent WALinuxAgent-2.1.5.3 successfully unpacked
2016/07/08 21:51:40.307226 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.3 successfully unpacked
2016/07/08 21:51:40.329189 INFO Loading Agent manifest from /var/lib/waagent/WALinuxAgent-2.1.5.3/HandlerManifest.json
2016/07/08 21:51:40.343945 INFO Agent WALinuxAgent-2.1.5.3 downloaded successfully
2016/07/08 21:51:40.354808 INFO Event: name=WALinuxAgent, op=Install, message=Agent WALinuxAgent-2.1.5.3 downloaded successfully
2016/07/08 21:51:40.377161 INFO Agent WALinuxAgent-2.1.5.rc4 discovered agent update and will exit
2016/07/08 21:51:40.392069 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.rc4 discovered agent update and will exit
2016/07/08 21:51:40.443552 INFO Agent WALinuxAgent-2.1.5.rc4 launched with command 'python -u /usr/sbin/waagent -run-exthandlers' returned 0
2016/07/08 21:51:40.455908 INFO Event: name=WALinuxAgent, op=Enable, message=Agent WALinuxAgent-2.1.5.rc4 launched with command 'python -u /usr/sbin/waagent -run-exthandlers' returned 0
2016/07/08 21:51:40.458716 INFO Loading Agent manifest from /var/lib/waagent/WALinuxAgent-2.1.5.3/HandlerManifest.json
2016/07/08 21:51:40.459940 INFO Loading Agent manifest from /var/lib/waagent/WALinuxAgent-2.1.5.1/HandlerManifest.json
2016/07/08 21:51:40.518290 INFO Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers'
2016/07/08 21:51:40.520979 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers'
2016/07/08 21:51:41.085353 INFO Agent WALinuxAgent-2.1.5.3 is running as the current agent
2016/07/08 21:51:41.093568 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.3 is running as the current agent
2016/07/08 21:51:41.095873 INFO Wire server endpoint:168.63.129.16
2016/07/08 21:51:41.144559 INFO Check for agent updates
2016/07/08 21:51:41.219800 INFO Loading Agent manifest from /var/lib/waagent/WALinuxAgent-2.1.5.1/HandlerManifest.json
2016/07/08 21:51:41.222907 INFO Loading Agent manifest from /var/lib/waagent/WALinuxAgent-2.1.5.3/HandlerManifest.json
2016/07/08 21:51:41.235737 INFO Agent WALinuxAgent-2.1.5.3 discovered agent update and will exit
2016/07/08 21:51:41.246668 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.3 discovered agent update and will exit
2016/07/08 21:51:41.292794 INFO Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers' returned 0
2016/07/08 21:51:41.300068 INFO Event: name=WALinuxAgent, op=Enable, message=Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers' returned 0
2016/07/08 21:51:41.341243 INFO Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers'
2016/07/08 21:51:41.362334 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers'
2016/07/08 21:51:41.858292 INFO Agent WALinuxAgent-2.1.5.3 is running as the current agent
2016/07/08 21:51:41.880601 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.3 is running as the current agent
2016/07/08 21:51:41.909701 INFO Wire server endpoint:168.63.129.16
2016/07/08 21:51:41.936837 INFO Check for agent updates
2016/07/08 21:51:41.979260 INFO Loading Agent manifest from /var/lib/waagent/WALinuxAgent-2.1.5.1/HandlerManifest.json
2016/07/08 21:51:41.999360 INFO Loading Agent manifest from /var/lib/waagent/WALinuxAgent-2.1.5.3/HandlerManifest.json
2016/07/08 21:51:42.027065 INFO Agent WALinuxAgent-2.1.5.3 discovered agent update and will exit
2016/07/08 21:51:42.050964 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.3 discovered agent update and will exit
2016/07/08 21:51:42.112336 INFO Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers' returned 0
2016/07/08 21:51:42.135428 INFO Event: name=WALinuxAgent, op=Enable, message=Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers' returned 0
2016/07/08 21:51:42.167577 INFO Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers'
2016/07/08 21:51:42.176380 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers'
2016/07/08 21:51:42.765364 INFO Agent WALinuxAgent-2.1.5.3 is running as the current agent
2016/07/08 21:51:42.797351 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.3 is running as the current agent
2016/07/08 21:51:42.816600 INFO Wire server endpoint:168.63.129.16
2016/07/08 21:51:42.850009 INFO Check for agent updates
2016/07/08 21:51:42.901169 INFO Loading Agent manifest from /var/lib/waagent/WALinuxAgent-2.1.5.1/HandlerManifest.json
2016/07/08 21:51:42.926215 INFO Loading Agent manifest from /var/lib/waagent/WALinuxAgent-2.1.5.3/HandlerManifest.json
2016/07/08 21:51:42.961311 INFO Agent WALinuxAgent-2.1.5.3 discovered agent update and will exit
2016/07/08 21:51:42.991006 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.3 discovered agent update and will exit
2016/07/08 21:51:43.056817 INFO Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers' returned 0
2016/07/08 21:51:43.069516 INFO Event: name=WALinuxAgent, op=Enable, message=Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers' returned 0
2016/07/08 21:51:43.142434 INFO Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers'
2016/07/08 21:51:43.165251 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers'
2016/07/08 21:51:43.799678 INFO Agent WALinuxAgent-2.1.5.3 is running as the current agent
2016/07/08 21:51:43.802183 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.3 is running as the current agent
2016/07/08 21:51:43.810915 INFO Wire server endpoint:168.63.129.16
2016/07/08 21:51:43.837580 INFO Check for agent updates
2016/07/08 21:51:43.886126 INFO Loading Agent manifest from /var/lib/waagent/WALinuxAgent-2.1.5.1/HandlerManifest.json
2016/07/08 21:51:43.888686 INFO Loading Agent manifest from /var/lib/waagent/WALinuxAgent-2.1.5.3/HandlerManifest.json
2016/07/08 21:51:43.890895 INFO Agent WALinuxAgent-2.1.5.3 discovered agent update and will exit
2016/07/08 21:51:43.891648 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.3 discovered agent update and will exit
2016/07/08 21:51:43.951575 INFO Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers' returned 0
2016/07/08 21:51:43.982332 INFO Event: name=WALinuxAgent, op=Enable, message=Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers' returned 0
2016/07/08 21:51:44.013181 INFO Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers'
2016/07/08 21:51:44.038561 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers'
2016/07/08 21:51:44.530643 INFO Agent WALinuxAgent-2.1.5.3 is running as the current agent
2016/07/08 21:51:44.542035 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.3 is running as the current agent
2016/07/08 21:51:44.544212 INFO Wire server endpoint:168.63.129.16
2016/07/08 21:51:44.572049 INFO Check for agent updates
2016/07/08 21:51:44.601699 INFO Loading Agent manifest from /var/lib/waagent/WALinuxAgent-2.1.5.1/HandlerManifest.json
2016/07/08 21:51:44.604319 INFO Loading Agent manifest from /var/lib/waagent/WALinuxAgent-2.1.5.3/HandlerManifest.json
2016/07/08 21:51:44.614998 INFO Agent WALinuxAgent-2.1.5.3 discovered agent update and will exit
2016/07/08 21:51:44.615744 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.3 discovered agent update and will exit
2016/07/08 21:51:44.663500 INFO Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers' returned 0
2016/07/08 21:51:44.683130 INFO Event: name=WALinuxAgent, op=Enable, message=Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers' returned 0
2016/07/08 21:51:44.717203 INFO Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers'
2016/07/08 21:51:44.717801 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers'
2016/07/08 21:51:45.327595 INFO Agent WALinuxAgent-2.1.5.3 is running as the current agent
2016/07/08 21:51:45.355741 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.3 is running as the current agent
2016/07/08 21:51:45.378140 INFO Wire server endpoint:168.63.129.16
2016/07/08 21:51:45.425207 INFO Check for agent updates
2016/07/08 21:51:45.511625 INFO Loading Agent manifest from /var/lib/waagent/WALinuxAgent-2.1.5.1/HandlerManifest.json
2016/07/08 21:51:45.532343 INFO Loading Agent manifest from /var/lib/waagent/WALinuxAgent-2.1.5.3/HandlerManifest.json
2016/07/08 21:51:45.551889 INFO Agent WALinuxAgent-2.1.5.3 discovered agent update and will exit
2016/07/08 21:51:45.572167 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.3 discovered agent update and will exit
2016/07/08 21:51:45.634632 INFO Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers' returned 0
2016/07/08 21:51:45.637357 INFO Event: name=WALinuxAgent, op=Enable, message=Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers' returned 0
2016/07/08 21:51:45.730332 INFO Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers'
2016/07/08 21:51:45.767070 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers'
2016/07/08 21:51:46.357507 INFO Agent WALinuxAgent-2.1.5.3 is running as the current agent
2016/07/08 21:51:46.365985 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.3 is running as the current agent
2016/07/08 21:51:46.368831 INFO Wire server endpoint:168.63.129.16
2016/07/08 21:51:46.388904 INFO Check for agent updates
2016/07/08 21:51:46.455008 INFO Loading Agent manifest from /var/lib/waagent/WALinuxAgent-2.1.5.1/HandlerManifest.json
2016/07/08 21:51:46.457944 INFO Loading Agent manifest from /var/lib/waagent/WALinuxAgent-2.1.5.3/HandlerManifest.json
2016/07/08 21:51:46.469406 INFO Agent WALinuxAgent-2.1.5.3 discovered agent update and will exit
2016/07/08 21:51:46.472261 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.3 discovered agent update and will exit
2016/07/08 21:51:46.533666 INFO Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers' returned 0
2016/07/08 21:51:46.574132 INFO Event: name=WALinuxAgent, op=Enable, message=Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers' returned 0
2016/07/08 21:51:46.621227 INFO Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers'
2016/07/08 21:51:46.622106 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers'
2016/07/08 21:51:47.197051 INFO Agent WALinuxAgent-2.1.5.3 is running as the current agent
2016/07/08 21:51:47.211052 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.3 is running as the current agent
2016/07/08 21:51:47.228764 INFO Wire server endpoint:168.63.129.16
2016/07/08 21:51:47.254180 INFO Check for agent updates
2016/07/08 21:51:47.287889 INFO Loading Agent manifest from /var/lib/waagent/WALinuxAgent-2.1.5.1/HandlerManifest.json
2016/07/08 21:51:47.307351 INFO Loading Agent manifest from /var/lib/waagent/WALinuxAgent-2.1.5.3/HandlerManifest.json
2016/07/08 21:51:47.323870 INFO Agent WALinuxAgent-2.1.5.3 discovered agent update and will exit
2016/07/08 21:51:47.336948 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.3 discovered agent update and will exit
2016/07/08 21:51:47.387282 INFO Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers' returned 0
2016/07/08 21:51:47.389821 INFO Event: name=WALinuxAgent, op=Enable, message=Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers' returned 0
2016/07/08 21:51:47.433157 INFO Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers'
2016/07/08 21:51:47.435597 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers'
2016/07/08 21:51:47.918501 INFO Agent WALinuxAgent-2.1.5.3 is running as the current agent
2016/07/08 21:51:47.927511 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.3 is running as the current agent
2016/07/08 21:51:47.930193 INFO Wire server endpoint:168.63.129.16
2016/07/08 21:51:47.955407 INFO Check for agent updates
2016/07/08 21:51:47.997328 INFO Loading Agent manifest from /var/lib/waagent/WALinuxAgent-2.1.5.1/HandlerManifest.json
2016/07/08 21:51:48.000749 INFO Loading Agent manifest from /var/lib/waagent/WALinuxAgent-2.1.5.3/HandlerManifest.json
2016/07/08 21:51:48.004041 INFO Agent WALinuxAgent-2.1.5.3 discovered agent update and will exit
2016/07/08 21:51:48.011916 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.3 discovered agent update and will exit
2016/07/08 21:51:48.060976 INFO Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers' returned 0
2016/07/08 21:51:48.063291 INFO Event: name=WALinuxAgent, op=Enable, message=Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers' returned 0
2016/07/08 21:51:48.117204 INFO Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers'
2016/07/08 21:51:48.139992 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers'
2016/07/08 21:51:48.603085 INFO Agent WALinuxAgent-2.1.5.3 is running as the current agent
2016/07/08 21:51:48.605763 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.3 is running as the current agent
2016/07/08 21:51:48.615658 INFO Wire server endpoint:168.63.129.16
2016/07/08 21:51:48.644782 INFO Check for agent updates
2016/07/08 21:51:48.684577 INFO Loading Agent manifest from /var/lib/waagent/WALinuxAgent-2.1.5.1/HandlerManifest.json
2016/07/08 21:51:48.687354 INFO Loading Agent manifest from /var/lib/waagent/WALinuxAgent-2.1.5.3/HandlerManifest.json
2016/07/08 21:51:48.689722 INFO Agent WALinuxAgent-2.1.5.3 discovered agent update and will exit
2016/07/08 21:51:48.690430 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.3 discovered agent update and will exit
2016/07/08 21:51:48.743235 INFO Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers' returned 0
2016/07/08 21:51:48.764309 INFO Event: name=WALinuxAgent, op=Enable, message=Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers' returned 0
2016/07/08 21:51:48.793241 INFO Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers'
2016/07/08 21:51:48.825085 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers'
2016/07/08 21:51:49.362306 INFO Agent WALinuxAgent-2.1.5.3 is running as the current agent
2016/07/08 21:51:49.374656 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.3 is running as the current agent
2016/07/08 21:51:49.395527 INFO Wire server endpoint:168.63.129.16
2016/07/08 21:51:49.417136 INFO Check for agent updates
2016/07/08 21:51:49.466363 INFO Loading Agent manifest from /var/lib/waagent/WALinuxAgent-2.1.5.1/HandlerManifest.json
2016/07/08 21:51:49.484546 INFO Loading Agent manifest from /var/lib/waagent/WALinuxAgent-2.1.5.3/HandlerManifest.json
2016/07/08 21:51:49.502543 INFO Agent WALinuxAgent-2.1.5.3 discovered agent update and will exit
2016/07/08 21:51:49.514883 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.3 discovered agent update and will exit
2016/07/08 21:51:49.562007 INFO Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers' returned 0
2016/07/08 21:51:49.565266 INFO Event: name=WALinuxAgent, op=Enable, message=Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers' returned 0
2016/07/08 21:51:49.609194 INFO Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers'
2016/07/08 21:51:49.628816 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers'
2016/07/08 21:51:50.168861 INFO Agent WALinuxAgent-2.1.5.3 is running as the current agent
2016/07/08 21:51:50.171438 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.3 is running as the current agent
2016/07/08 21:51:50.180856 INFO Wire server endpoint:168.63.129.16
2016/07/08 21:51:50.213828 INFO Check for agent updates
2016/07/08 21:51:50.248302 INFO Loading Agent manifest from /var/lib/waagent/WALinuxAgent-2.1.5.1/HandlerManifest.json
2016/07/08 21:51:50.252061 INFO Loading Agent manifest from /var/lib/waagent/WALinuxAgent-2.1.5.3/HandlerManifest.json
2016/07/08 21:51:50.262508 INFO Agent WALinuxAgent-2.1.5.3 discovered agent update and will exit
2016/07/08 21:51:50.263335 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.3 discovered agent update and will exit
2016/07/08 21:51:50.318705 INFO Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers' returned 0
2016/07/08 21:51:50.342626 INFO Event: name=WALinuxAgent, op=Enable, message=Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers' returned 0
2016/07/08 21:51:50.377173 INFO Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers'
2016/07/08 21:51:50.401987 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers'
2016/07/08 21:51:50.975030 INFO Agent WALinuxAgent-2.1.5.3 is running as the current agent
2016/07/08 21:51:50.987630 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.3 is running as the current agent
2016/07/08 21:51:51.018780 INFO Wire server endpoint:168.63.129.16
2016/07/08 21:51:51.051524 INFO Check for agent updates
2016/07/08 21:51:51.084308 INFO Loading Agent manifest from /var/lib/waagent/WALinuxAgent-2.1.5.1/HandlerManifest.json
2016/07/08 21:51:51.108080 INFO Loading Agent manifest from /var/lib/waagent/WALinuxAgent-2.1.5.3/HandlerManifest.json
2016/07/08 21:51:51.129931 INFO Agent WALinuxAgent-2.1.5.3 discovered agent update and will exit
2016/07/08 21:51:51.146710 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.3 discovered agent update and will exit
2016/07/08 21:51:51.208489 INFO Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers' returned 0
2016/07/08 21:51:51.209322 INFO Event: name=WALinuxAgent, op=Enable, message=Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers' returned 0
2016/07/08 21:51:51.256133 INFO Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers'
2016/07/08 21:51:51.258511 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers'
2016/07/08 21:51:51.824414 INFO Agent WALinuxAgent-2.1.5.3 is running as the current agent
2016/07/08 21:51:51.836174 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.3 is running as the current agent
2016/07/08 21:51:51.853035 INFO Wire server endpoint:168.63.129.16
2016/07/08 21:51:51.875222 INFO Check for agent updates
2016/07/08 21:51:51.908704 INFO Loading Agent manifest from /var/lib/waagent/WALinuxAgent-2.1.5.1/HandlerManifest.json
2016/07/08 21:51:51.923015 INFO Loading Agent manifest from /var/lib/waagent/WALinuxAgent-2.1.5.3/HandlerManifest.json
2016/07/08 21:51:51.938682 INFO Agent WALinuxAgent-2.1.5.3 discovered agent update and will exit
2016/07/08 21:51:51.950739 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.3 discovered agent update and will exit
2016/07/08 21:51:51.998146 INFO Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers' returned 0
2016/07/08 21:51:52.000581 INFO Event: name=WALinuxAgent, op=Enable, message=Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers' returned 0
2016/07/08 21:51:52.038076 INFO Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers'
2016/07/08 21:51:52.040270 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers'
2016/07/08 21:51:52.586988 INFO Agent WALinuxAgent-2.1.5.3 is running as the current agent
2016/07/08 21:51:52.594895 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.3 is running as the current agent
2016/07/08 21:51:52.607576 INFO Wire server endpoint:168.63.129.16
2016/07/08 21:51:52.628226 INFO Check for agent updates
2016/07/08 21:51:52.665475 INFO Loading Agent manifest from /var/lib/waagent/WALinuxAgent-2.1.5.1/HandlerManifest.json
2016/07/08 21:51:52.668298 INFO Loading Agent manifest from /var/lib/waagent/WALinuxAgent-2.1.5.3/HandlerManifest.json
2016/07/08 21:51:52.670476 INFO Agent WALinuxAgent-2.1.5.3 discovered agent update and will exit
2016/07/08 21:51:52.680786 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.3 discovered agent update and will exit
2016/07/08 21:51:52.735177 INFO Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers' returned 0
2016/07/08 21:51:52.742582 INFO Event: name=WALinuxAgent, op=Enable, message=Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers' returned 0
2016/07/08 21:51:52.805232 INFO Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers'
2016/07/08 21:51:52.828536 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers'
2016/07/08 21:51:53.334569 INFO Agent WALinuxAgent-2.1.5.3 is running as the current agent
2016/07/08 21:51:53.342464 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.3 is running as the current agent
2016/07/08 21:51:53.345938 INFO Wire server endpoint:168.63.129.16
2016/07/08 21:51:53.368600 INFO Check for agent updates
2016/07/08 21:51:53.404426 INFO Loading Agent manifest from /var/lib/waagent/WALinuxAgent-2.1.5.1/HandlerManifest.json
2016/07/08 21:51:53.407243 INFO Loading Agent manifest from /var/lib/waagent/WALinuxAgent-2.1.5.3/HandlerManifest.json
2016/07/08 21:51:53.409710 INFO Agent WALinuxAgent-2.1.5.3 discovered agent update and will exit
2016/07/08 21:51:53.411951 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.3 discovered agent update and will exit
2016/07/08 21:51:53.466092 INFO Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers' returned 0
2016/07/08 21:51:53.484793 INFO Event: name=WALinuxAgent, op=Enable, message=Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers' returned 0
2016/07/08 21:51:53.513155 INFO Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers'
2016/07/08 21:51:53.549833 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers'
2016/07/08 21:51:54.086244 INFO Agent WALinuxAgent-2.1.5.3 is running as the current agent
2016/07/08 21:51:54.100320 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.3 is running as the current agent
2016/07/08 21:51:54.118185 INFO Wire server endpoint:168.63.129.16
2016/07/08 21:51:54.141337 INFO Check for agent updates
2016/07/08 21:51:54.179400 INFO Loading Agent manifest from /var/lib/waagent/WALinuxAgent-2.1.5.1/HandlerManifest.json
2016/07/08 21:51:54.195961 INFO Loading Agent manifest from /var/lib/waagent/WALinuxAgent-2.1.5.3/HandlerManifest.json
2016/07/08 21:51:54.212276 INFO Agent WALinuxAgent-2.1.5.3 discovered agent update and will exit
2016/07/08 21:51:54.225937 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.3 discovered agent update and will exit
2016/07/08 21:51:54.274542 INFO Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers' returned 0
2016/07/08 21:51:54.277141 INFO Event: name=WALinuxAgent, op=Enable, message=Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers' returned 0
2016/07/08 21:51:54.321209 INFO Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers'
2016/07/08 21:51:54.339802 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers'
2016/07/08 21:51:54.820216 INFO Agent WALinuxAgent-2.1.5.3 is running as the current agent
2016/07/08 21:51:54.872161 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.3 is running as the current agent
2016/07/08 21:51:54.876294 INFO Wire server endpoint:168.63.129.16
2016/07/08 21:51:54.912602 INFO Check for agent updates
2016/07/08 21:51:54.947001 INFO Loading Agent manifest from /var/lib/waagent/WALinuxAgent-2.1.5.1/HandlerManifest.json
2016/07/08 21:51:54.955836 INFO Loading Agent manifest from /var/lib/waagent/WALinuxAgent-2.1.5.3/HandlerManifest.json
2016/07/08 21:51:54.970085 INFO Agent WALinuxAgent-2.1.5.3 discovered agent update and will exit
2016/07/08 21:51:54.972439 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.3 discovered agent update and will exit
2016/07/08 21:51:55.028333 INFO Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers' returned 0
2016/07/08 21:51:55.048616 INFO Event: name=WALinuxAgent, op=Enable, message=Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers' returned 0
2016/07/08 21:51:55.077202 INFO Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers'
2016/07/08 21:51:55.079855 INFO Event: name=WALinuxAgent, op=, message=Agent WALinuxAgent-2.1.5.3 launched with command 'python -u bin/WALinuxAgent-2.1.5.rc3-py2.7.egg -run-exthandlers'
2016/07/08 21:51:55.686370 INFO Agent WALinuxAgent-2.1.5.3 is running as the current agen
``` | Azure/WALinuxAgent | diff --git a/tests/ga/test_update.py b/tests/ga/test_update.py
index 3c81437c..cfa537e7 100644
--- a/tests/ga/test_update.py
+++ b/tests/ga/test_update.py
@@ -577,6 +577,7 @@ class TestUpdate(UpdateTestCase):
def _test_ensure_latest_agent(
self,
+ base_version=FlexibleVersion(AGENT_VERSION),
protocol=None,
versions=None):
@@ -591,7 +592,7 @@ class TestUpdate(UpdateTestCase):
self.update_handler.protocol_util = protocol
conf.get_autoupdate_gafamily = Mock(return_value=protocol.family)
- return self.update_handler._ensure_latest_agent()
+ return self.update_handler._ensure_latest_agent(base_version=base_version)
def test_ensure_latest_agent_returns_true_on_first_use(self):
self.assertEqual(None, self.update_handler.last_etag)
@@ -633,7 +634,13 @@ class TestUpdate(UpdateTestCase):
self.assertFalse(self._test_ensure_latest_agent())
return
- def test_ensure_latest_agent_skips_when_no_new_versions(self):
+ def test_ensure_latest_agent_skips_if_when_no_new_versions(self):
+ self.prepare_agents()
+ base_version = self.agent_versions()[0] + 1
+ self.assertFalse(self._test_ensure_latest_agent(base_version=base_version))
+ return
+
+ def test_ensure_latest_agent_skips_when_no_versions(self):
self.assertFalse(self._test_ensure_latest_agent(protocol=ProtocolMock()))
return
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 1
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"nose",
"pyasn1",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.6",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
certifi==2021.5.30
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
nose==1.3.7
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pyasn1==0.5.1
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
-e git+https://github.com/Azure/WALinuxAgent.git@92091140c2a7378c1a01fe8526800af912d93c49#egg=WALinuxAgent
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: WALinuxAgent
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib-metadata=4.8.1=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- nose==1.3.7
- pyasn1==0.5.1
prefix: /opt/conda/envs/WALinuxAgent
| [
"tests/ga/test_update.py::TestUpdate::test_ensure_lastest_agent_purges_old_agents",
"tests/ga/test_update.py::TestUpdate::test_ensure_latest_agent_ignores_old_agents",
"tests/ga/test_update.py::TestUpdate::test_ensure_latest_agent_returns_true_on_first_use",
"tests/ga/test_update.py::TestUpdate::test_ensure_latest_agent_skips_if_too_frequent",
"tests/ga/test_update.py::TestUpdate::test_ensure_latest_agent_skips_if_when_no_new_versions",
"tests/ga/test_update.py::TestUpdate::test_ensure_latest_agent_skips_when_etag_matches",
"tests/ga/test_update.py::TestUpdate::test_ensure_latest_agent_skips_when_no_versions",
"tests/ga/test_update.py::TestUpdate::test_ensure_latest_agent_skips_when_updates_are_disabled",
"tests/ga/test_update.py::TestUpdate::test_ensure_latest_agent_sorts"
] | [] | [
"tests/ga/test_update.py::TestGuestAgentError::test_clear",
"tests/ga/test_update.py::TestGuestAgentError::test_creation",
"tests/ga/test_update.py::TestGuestAgentError::test_load_preserves_error_state",
"tests/ga/test_update.py::TestGuestAgentError::test_mark_failure",
"tests/ga/test_update.py::TestGuestAgentError::test_mark_failure_permanent",
"tests/ga/test_update.py::TestGuestAgentError::test_save",
"tests/ga/test_update.py::TestGuestAgent::test_clear_error",
"tests/ga/test_update.py::TestGuestAgent::test_creation",
"tests/ga/test_update.py::TestGuestAgent::test_download",
"tests/ga/test_update.py::TestGuestAgent::test_download_fail",
"tests/ga/test_update.py::TestGuestAgent::test_ensure_download_skips_blacklisted",
"tests/ga/test_update.py::TestGuestAgent::test_ensure_downloaded",
"tests/ga/test_update.py::TestGuestAgent::test_ensure_downloaded_download_fails",
"tests/ga/test_update.py::TestGuestAgent::test_ensure_downloaded_load_manifest_fails",
"tests/ga/test_update.py::TestGuestAgent::test_ensure_downloaded_unpack_fails",
"tests/ga/test_update.py::TestGuestAgent::test_is_available",
"tests/ga/test_update.py::TestGuestAgent::test_is_blacklisted",
"tests/ga/test_update.py::TestGuestAgent::test_is_downloaded",
"tests/ga/test_update.py::TestGuestAgent::test_load_manifest",
"tests/ga/test_update.py::TestGuestAgent::test_load_manifest_is_empty",
"tests/ga/test_update.py::TestGuestAgent::test_load_manifest_is_malformed",
"tests/ga/test_update.py::TestGuestAgent::test_load_manifest_missing",
"tests/ga/test_update.py::TestGuestAgent::test_mark_failure",
"tests/ga/test_update.py::TestGuestAgent::test_unpack",
"tests/ga/test_update.py::TestGuestAgent::test_unpack_fail",
"tests/ga/test_update.py::TestUpdate::test_filter_blacklisted_agents",
"tests/ga/test_update.py::TestUpdate::test_get_latest_agent",
"tests/ga/test_update.py::TestUpdate::test_get_latest_agent_no_updates",
"tests/ga/test_update.py::TestUpdate::test_get_latest_agent_skip_updates",
"tests/ga/test_update.py::TestUpdate::test_get_latest_agent_skips_unavailable",
"tests/ga/test_update.py::TestUpdate::test_load_agents",
"tests/ga/test_update.py::TestUpdate::test_load_agents_does_not_reload",
"tests/ga/test_update.py::TestUpdate::test_load_agents_sorts",
"tests/ga/test_update.py::TestUpdate::test_purge_agents",
"tests/ga/test_update.py::TestUpdate::test_run",
"tests/ga/test_update.py::TestUpdate::test_run_keeps_running",
"tests/ga/test_update.py::TestUpdate::test_run_latest",
"tests/ga/test_update.py::TestUpdate::test_run_latest_defaults_to_current",
"tests/ga/test_update.py::TestUpdate::test_run_latest_exception_blacklists",
"tests/ga/test_update.py::TestUpdate::test_run_latest_forwards_output",
"tests/ga/test_update.py::TestUpdate::test_run_latest_missing_code_marks_failures",
"tests/ga/test_update.py::TestUpdate::test_run_latest_nonzero_code_marks_failures",
"tests/ga/test_update.py::TestUpdate::test_run_stops_if_update_available",
"tests/ga/test_update.py::TestUpdate::test_set_agents"
] | [] | Apache License 2.0 | 616 | 479 | [
"azurelinuxagent/ga/update.py"
] |
|
falconry__falcon-843 | 0f64e94a5ab49b58b9efa8462fe8c0369428243d | 2016-07-09 18:14:43 | 67d61029847cbf59e4053c8a424df4f9f87ad36f | codecov-io: ## [Current coverage][cc-pull] is **100%**
> Merging [#843][cc-pull] into [master][cc-base-branch] will not change coverage
```diff
@@ master #843 diff @@
====================================
Files 29 29
Lines 1813 1817 +4
Methods 0 0
Messages 0 0
Branches 305 306 +1
====================================
+ Hits 1813 1817 +4
Misses 0 0
Partials 0 0
```
> Powered by [Codecov](https://codecov.io?src=pr). Last updated by [0f64e94...130da74][cc-compare]
[cc-base-branch]: https://codecov.io/gh/falconry/falcon/branch/master?src=pr
[cc-compare]: https://codecov.io/gh/falconry/falcon/compare/0f64e94a5ab49b58b9efa8462fe8c0369428243d...130da74f772ad772b74f74190cc85510333eefef?src=pr
[cc-pull]: https://codecov.io/gh/falconry/falcon/pull/843?src=pr | diff --git a/falcon/api.py b/falcon/api.py
index 3b5539b..557de77 100644
--- a/falcon/api.py
+++ b/falcon/api.py
@@ -40,17 +40,17 @@ class API(object):
class ExampleComponent(object):
def process_request(self, req, resp):
- \"""Process the request before routing it.
+ \"\"\"Process the request before routing it.
Args:
req: Request object that will eventually be
routed to an on_* responder method.
resp: Response object that will be routed to
the on_* responder.
- \"""
+ \"\"\"
def process_resource(self, req, resp, resource, params):
- \"""Process the request and resource *after* routing.
+ \"\"\"Process the request and resource *after* routing.
Note:
This method is only called when the request matches
@@ -69,10 +69,10 @@ class API(object):
template fields, that will be passed to the
resource's responder method as keyword
arguments.
- \"""
+ \"\"\"
def process_response(self, req, resp, resource)
- \"""Post-processing of the response (after routing).
+ \"\"\"Post-processing of the response (after routing).
Args:
req: Request object.
@@ -80,7 +80,7 @@ class API(object):
resource: Resource object to which the request was
routed. May be None if no route was found
for the request.
- \"""
+ \"\"\"
See also :ref:`Middleware <middleware>`.
@@ -249,6 +249,10 @@ class API(object):
def add_route(self, uri_template, resource, *args, **kwargs):
"""Associates a templatized URI path with a resource.
+ Note:
+ The following information describes the behavior of
+ Falcon's default router.
+
A resource is an instance of a class that defines various
"responder" methods, one for each HTTP method the resource
allows. Responder names start with `on_` and are named according to
@@ -272,6 +276,10 @@ class API(object):
field names defined in the template. A field expression consists
of a bracketed field name.
+ Note:
+ Since field names correspond to argument names in responder
+ methods, they must be valid Python identifiers.
+
For example, given the following template::
/user/{name}
@@ -281,8 +289,8 @@ class API(object):
def on_put(self, req, resp, name):
pass
- Individual path segments may contain one or more field expressions.
- For example::
+ Individual path segments may contain one or more field
+ expressions::
/repos/{org}/{repo}/compare/{usr0}:{branch0}...{usr1}:{branch1}
diff --git a/falcon/routing/compiled.py b/falcon/routing/compiled.py
index 057cf6e..f4e3058 100644
--- a/falcon/routing/compiled.py
+++ b/falcon/routing/compiled.py
@@ -12,6 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import keyword
import re
@@ -41,14 +42,18 @@ class CompiledRouter(object):
def add_route(self, uri_template, method_map, resource):
"""Adds a route between URI path template and resource."""
- # Can't start with a number, since these eventually get passed as
- # args to on_* responders
- if re.search('{\d', uri_template):
- raise ValueError('Field names may not start with a digit.')
if re.search('\s', uri_template):
raise ValueError('URI templates may not include whitespace.')
+ # NOTE(kgriffs): Ensure fields are valid Python identifiers,
+ # since they will be passed as kwargs to responders.
+ fields = re.findall('{([^}]*)}', uri_template)
+ for field in fields:
+ is_identifier = re.match('[A-Za-z_][A-Za-z0-9_]+$', field)
+ if not is_identifier or field in keyword.kwlist:
+ raise ValueError('Field names must be valid identifiers.')
+
path = uri_template.strip('/').split('/')
def insert(nodes, path_index=0):
| Add test for hyphens and other non-arg-friendly chars in URI template field names
Currently we only check up-front in `add_route()` for field names that start with a digit or contain whitespace. This can lead to cryptic errors later on when requests are routed to responders. | falconry/falcon | diff --git a/tests/test_default_router.py b/tests/test_default_router.py
index dec8a8e..9f3c6d5 100644
--- a/tests/test_default_router.py
+++ b/tests/test_default_router.py
@@ -137,13 +137,28 @@ class TestComplexRouting(testing.TestBase):
)
@ddt.data(
- '/repos/{org}/{repo}/compare/{simple-vs-complex}',
+ '/repos/{org}/{repo}/compare/{simple_vs_complex}',
'/repos/{complex}.{vs}.{simple}',
'/repos/{org}/{repo}/compare/{complex}:{vs}...{complex2}/full',
)
def test_non_collision(self, template):
self.router.add_route(template, {}, ResourceWithId(-1))
+ @ddt.data(
+ '/{}',
+ '/{9v}',
+ '/{@kgriffs}',
+ '/repos/{simple-thing}/etc',
+ '/repos/{or g}/{repo}/compare/{thing}',
+ '/repos/{org}/{repo}/compare/{}',
+ '/repos/{complex}.{}.{thing}',
+ '/repos/{complex}.{9v}.{thing}/etc',
+ )
+ def test_invalid_field_name(self, template):
+ self.assertRaises(
+ ValueError,
+ self.router.add_route, template, {}, ResourceWithId(-1))
+
def test_dump(self):
print(self.router._src)
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 2
} | 1.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"nose",
"ddt",
"testtools",
"requests",
"pyyaml",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": [
"tools/test-requires"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
charset-normalizer==2.0.12
coverage==6.2
ddt==1.7.2
-e git+https://github.com/falconry/falcon.git@0f64e94a5ab49b58b9efa8462fe8c0369428243d#egg=falcon
fixtures==4.0.1
idna==3.10
importlib-metadata==4.8.3
iniconfig==1.1.1
nose==1.3.7
packaging==21.3
pbr==6.1.1
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
python-mimeparse==1.6.0
PyYAML==6.0.1
requests==2.27.1
six==1.17.0
testtools==2.6.0
tomli==1.2.3
typing_extensions==4.1.1
urllib3==1.26.20
zipp==3.6.0
| name: falcon
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- charset-normalizer==2.0.12
- coverage==6.2
- ddt==1.7.2
- fixtures==4.0.1
- idna==3.10
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- nose==1.3.7
- packaging==21.3
- pbr==6.1.1
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- python-mimeparse==1.6.0
- pyyaml==6.0.1
- requests==2.27.1
- six==1.17.0
- testtools==2.6.0
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- zipp==3.6.0
prefix: /opt/conda/envs/falcon
| [
"tests/test_default_router.py::TestComplexRouting::test_invalid_field_name_1____",
"tests/test_default_router.py::TestComplexRouting::test_invalid_field_name_3____kgriffs_",
"tests/test_default_router.py::TestComplexRouting::test_invalid_field_name_6__repos__org___repo__compare___",
"tests/test_default_router.py::TestComplexRouting::test_invalid_field_name_7__repos__complex______thing_"
] | [] | [
"tests/test_default_router.py::TestRegressionCases::test_recipes",
"tests/test_default_router.py::TestRegressionCases::test_versioned_url",
"tests/test_default_router.py::TestComplexRouting::test_collision_1__teams__collision_",
"tests/test_default_router.py::TestComplexRouting::test_collision_2__emojis_signs__id_too_",
"tests/test_default_router.py::TestComplexRouting::test_collision_3__repos__org___repo__compare__complex___vs_____complex2___collision_",
"tests/test_default_router.py::TestComplexRouting::test_complex_1______5_",
"tests/test_default_router.py::TestComplexRouting::test_complex_2____full___10_",
"tests/test_default_router.py::TestComplexRouting::test_complex_3____part___15_",
"tests/test_default_router.py::TestComplexRouting::test_complex_alt_1______16_",
"tests/test_default_router.py::TestComplexRouting::test_complex_alt_2____full___17_",
"tests/test_default_router.py::TestComplexRouting::test_dead_segment_1__teams",
"tests/test_default_router.py::TestComplexRouting::test_dead_segment_2__emojis_signs",
"tests/test_default_router.py::TestComplexRouting::test_dead_segment_3__gists",
"tests/test_default_router.py::TestComplexRouting::test_dead_segment_4__gists_42",
"tests/test_default_router.py::TestComplexRouting::test_dump",
"tests/test_default_router.py::TestComplexRouting::test_invalid_field_name_2___9v_",
"tests/test_default_router.py::TestComplexRouting::test_invalid_field_name_4__repos__simple_thing__etc",
"tests/test_default_router.py::TestComplexRouting::test_invalid_field_name_5__repos__or_g___repo__compare__thing_",
"tests/test_default_router.py::TestComplexRouting::test_invalid_field_name_8__repos__complex___9v___thing__etc",
"tests/test_default_router.py::TestComplexRouting::test_literal",
"tests/test_default_router.py::TestComplexRouting::test_literal_segment",
"tests/test_default_router.py::TestComplexRouting::test_literal_vs_variable_01____teams_default___19_",
"tests/test_default_router.py::TestComplexRouting::test_literal_vs_variable_02____teams_default_members___7_",
"tests/test_default_router.py::TestComplexRouting::test_literal_vs_variable_03____teams_foo___6_",
"tests/test_default_router.py::TestComplexRouting::test_literal_vs_variable_04____teams_foo_members___7_",
"tests/test_default_router.py::TestComplexRouting::test_literal_vs_variable_05____gists_first___20_",
"tests/test_default_router.py::TestComplexRouting::test_literal_vs_variable_06____gists_first_raw___18_",
"tests/test_default_router.py::TestComplexRouting::test_literal_vs_variable_07____gists_first_pdf___21_",
"tests/test_default_router.py::TestComplexRouting::test_literal_vs_variable_08____gists_1776_pdf___21_",
"tests/test_default_router.py::TestComplexRouting::test_literal_vs_variable_09____emojis_signs_78___13_",
"tests/test_default_router.py::TestComplexRouting::test_literal_vs_variable_10____emojis_signs_78_small___22_",
"tests/test_default_router.py::TestComplexRouting::test_malformed_pattern",
"tests/test_default_router.py::TestComplexRouting::test_multivar",
"tests/test_default_router.py::TestComplexRouting::test_non_collision_1__repos__org___repo__compare__simple_vs_complex_",
"tests/test_default_router.py::TestComplexRouting::test_non_collision_2__repos__complex___vs___simple_",
"tests/test_default_router.py::TestComplexRouting::test_non_collision_3__repos__org___repo__compare__complex___vs_____complex2__full",
"tests/test_default_router.py::TestComplexRouting::test_not_found_01__this_does_not_exist",
"tests/test_default_router.py::TestComplexRouting::test_not_found_02__user_bogus",
"tests/test_default_router.py::TestComplexRouting::test_not_found_03__repos_racker_falcon_compare_johndoe_master___janedoe_dev_bogus",
"tests/test_default_router.py::TestComplexRouting::test_not_found_04__teams",
"tests/test_default_router.py::TestComplexRouting::test_not_found_05__teams_42_members_undefined",
"tests/test_default_router.py::TestComplexRouting::test_not_found_06__teams_42_undefined",
"tests/test_default_router.py::TestComplexRouting::test_not_found_07__teams_42_undefined_segments",
"tests/test_default_router.py::TestComplexRouting::test_not_found_08__teams_default_members_undefined",
"tests/test_default_router.py::TestComplexRouting::test_not_found_09__teams_default_members_thing_undefined",
"tests/test_default_router.py::TestComplexRouting::test_not_found_10__teams_default_members_thing_undefined_segments",
"tests/test_default_router.py::TestComplexRouting::test_not_found_11__teams_default_undefined",
"tests/test_default_router.py::TestComplexRouting::test_not_found_12__teams_default_undefined_segments",
"tests/test_default_router.py::TestComplexRouting::test_not_found_13__emojis_signs",
"tests/test_default_router.py::TestComplexRouting::test_not_found_14__emojis_signs_0_small",
"tests/test_default_router.py::TestComplexRouting::test_not_found_15__emojis_signs_0_undefined",
"tests/test_default_router.py::TestComplexRouting::test_not_found_16__emojis_signs_0_undefined_segments",
"tests/test_default_router.py::TestComplexRouting::test_not_found_17__emojis_signs_20_small",
"tests/test_default_router.py::TestComplexRouting::test_not_found_18__emojis_signs_20_undefined",
"tests/test_default_router.py::TestComplexRouting::test_not_found_19__emojis_signs_42_undefined",
"tests/test_default_router.py::TestComplexRouting::test_not_found_20__emojis_signs_78_undefined",
"tests/test_default_router.py::TestComplexRouting::test_override",
"tests/test_default_router.py::TestComplexRouting::test_subsegment_not_found",
"tests/test_default_router.py::TestComplexRouting::test_variable"
] | [] | Apache License 2.0 | 618 | 1,025 | [
"falcon/api.py",
"falcon/routing/compiled.py"
] |
lepture__mistune-105 | 715046fe4b5f3642b24edc7fdd62ba8915228498 | 2016-07-11 21:55:35 | 715046fe4b5f3642b24edc7fdd62ba8915228498 | nsfmc: Hello, i'm not sure what the protocol is for this.
I ran into this bug from a strangely formed embed code i got from gyfcat, but i suspect others could run into it too.
the `test_safe_links` test is currently failing in master, but the test that this adds passes.
Let me know if you have any questions!
lepture: @nsfmc a test case failed.
nsfmc: Hi @lepture, I believe that test case is failing in master as well. If there is a branch I can fork off of that is building successfully I'm happy to rebase my pull request off that one. I'm afraid I don't understand well enough the changes needed to fix the link sanitization issues :( | diff --git a/mistune.py b/mistune.py
index b341cf2..67b0880 100644
--- a/mistune.py
+++ b/mistune.py
@@ -74,7 +74,7 @@ def escape(text, quote=False, smart_amp=True):
def escape_link(url, **kwargs):
"""Remove dangerous URL schemes like javascript: and escape afterwards."""
- lower_url = url.lower()
+ lower_url = url.lower().strip('\x00\x1a \n\r\t')
for scheme in _scheme_blacklist:
if lower_url.startswith(scheme):
return ''
@@ -157,7 +157,7 @@ class BlockGrammar(object):
block_html = re.compile(
r'^ *(?:%s|%s|%s) *(?:\n{2,}|\s*$)' % (
r'<!--[\s\S]*?-->',
- r'<(%s)((?:%s)*?)>([\s\S]+?)<\/\1>' % (_block_tag, _valid_attr),
+ r'<(%s)((?:%s)*?)\s*>([\s\S]+?)<\/\1>' % (_block_tag, _valid_attr),
r'<%s(?:%s)*?\s*\/?>' % (_block_tag, _valid_attr),
)
)
@@ -447,7 +447,7 @@ class InlineGrammar(object):
inline_html = re.compile(
r'^(?:%s|%s|%s)' % (
r'<!--[\s\S]*?-->',
- r'<(\w+%s)((?:%s)*?)>([\s\S]*?)<\/\1>' % (_valid_end, _valid_attr),
+ r'<(\w+%s)((?:%s)*?)\s*>([\s\S]*?)<\/\1>' % (_valid_end, _valid_attr),
r'<\w+%s(?:%s)*?\s*\/?>' % (_valid_end, _valid_attr),
)
)
| parsing html can fail to close tag correctly
noticed while debugging an issue in https://github.com/lektor/lektor/issues/241#issuecomment-231819694 i noticed that raw html handling can be broken if there is a space before the closing `>` of the opening tag. i.e.
```
In [38]: mistune.markdown('<iframe src="http://gfycat.com" ></iframe>', escape=False)
Out[38]: '<p><iframe src="http://gfycat.com" ></iframe></p>\n'
```
but
```
In [39]: mistune.markdown('<iframe src="http://gfycat.com"></iframe>', escape=False)
Out[39]: '<p><iframe src="http://gfycat.com"></iframe></p>\n'
```
in the case of the iframe, this causes the opening iframe to gobble up the rest of the page content. this seems to hold for any tag, though, e.g.
```
In [40]: mistune.markdown('<a href="http://gfycat.com" ></a>', escape=False)
Out[40]: '<p><a href="http://gfycat.com" ></a></p>\n'
```
| lepture/mistune | diff --git a/tests/test_extra.py b/tests/test_extra.py
index 7318444..07c1ca3 100644
--- a/tests/test_extra.py
+++ b/tests/test_extra.py
@@ -77,6 +77,12 @@ def test_parse_inline_html():
assert 'href' not in ret
+def test_block_html():
+ ret = mistune.markdown(
+ '<div ></div>', escape=False
+ )
+ assert '<div ></div>' in ret
+
def test_parse_block_html():
ret = mistune.markdown(
'<div>**foo**</div>', parse_block_html=True, escape=False
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 1
} | 0.7 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"nose",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
-e git+https://github.com/lepture/mistune.git@715046fe4b5f3642b24edc7fdd62ba8915228498#egg=mistune
nose==1.3.7
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
pytest @ file:///croot/pytest_1738938843180/work
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
| name: mistune
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- nose==1.3.7
prefix: /opt/conda/envs/mistune
| [
"tests/test_extra.py::test_safe_links",
"tests/test_extra.py::test_block_html"
] | [] | [
"tests/test_extra.py::test_escape",
"tests/test_extra.py::test_linebreak",
"tests/test_extra.py::test_skip_style",
"tests/test_extra.py::test_use_xhtml",
"tests/test_extra.py::test_parse_inline_html",
"tests/test_extra.py::test_parse_block_html",
"tests/test_extra.py::test_trigger_more_cases",
"tests/test_extra.py::test_not_escape_block_tags",
"tests/test_extra.py::test_not_escape_inline_tags",
"tests/test_extra.py::test_hard_wrap_renderer"
] | [] | BSD 3-Clause "New" or "Revised" License | 621 | 449 | [
"mistune.py"
] |
dpkp__kafka-python-756 | 7a350e5fcf33f49094c820ba88b9cee4aeae6e12 | 2016-07-12 15:39:23 | 709ee3b59aff8ab205f0e09c33f4ec8391664228 | diff --git a/kafka/coordinator/base.py b/kafka/coordinator/base.py
index 168115a..25dd000 100644
--- a/kafka/coordinator/base.py
+++ b/kafka/coordinator/base.py
@@ -50,6 +50,7 @@ class BaseCoordinator(object):
'session_timeout_ms': 30000,
'heartbeat_interval_ms': 3000,
'retry_backoff_ms': 100,
+ 'api_version': (0, 9),
}
def __init__(self, client, **configs):
@@ -194,6 +195,14 @@ class BaseCoordinator(object):
"""
while self.coordinator_unknown():
+ # Prior to 0.8.2 there was no group coordinator
+ # so we will just pick a node at random and treat
+ # it as the "coordinator"
+ if self.config['api_version'] < (0, 8, 2):
+ self.coordinator_id = self._client.least_loaded_node()
+ self._client.ready(self.coordinator_id)
+ continue
+
future = self._send_group_coordinator_request()
self._client.poll(future=future)
diff --git a/kafka/coordinator/consumer.py b/kafka/coordinator/consumer.py
index 196bcc7..083a36a 100644
--- a/kafka/coordinator/consumer.py
+++ b/kafka/coordinator/consumer.py
@@ -100,6 +100,12 @@ class ConsumerCoordinator(BaseCoordinator):
interval = self.config['auto_commit_interval_ms'] / 1000.0
self._auto_commit_task = AutoCommitTask(weakref.proxy(self), interval)
+ # When using broker-coordinated consumer groups, auto-commit will
+ # be automatically enabled on group join (see _on_join_complete)
+ # Otherwise, we should enable now b/c there will be no group join
+ if self.config['api_version'] < (0, 9):
+ self._auto_commit_task.enable()
+
self._sensors = ConsumerCoordinatorMetrics(metrics, metric_group_prefix,
self._subscription)
@@ -293,8 +299,7 @@ class ConsumerCoordinator(BaseCoordinator):
return {}
while True:
- if self.config['api_version'] >= (0, 8, 2):
- self.ensure_coordinator_known()
+ self.ensure_coordinator_known()
# contact coordinator to fetch committed offsets
future = self._send_offset_fetch_request(partitions)
@@ -356,8 +361,7 @@ class ConsumerCoordinator(BaseCoordinator):
return
while True:
- if self.config['api_version'] >= (0, 8, 2):
- self.ensure_coordinator_known()
+ self.ensure_coordinator_known()
future = self._send_offset_commit_request(offsets)
self._client.poll(future=future)
@@ -415,14 +419,10 @@ class ConsumerCoordinator(BaseCoordinator):
log.debug('No offsets to commit')
return Future().success(True)
- if self.config['api_version'] >= (0, 8, 2):
- if self.coordinator_unknown():
- return Future().failure(Errors.GroupCoordinatorNotAvailableError)
- node_id = self.coordinator_id
- else:
- node_id = self._client.least_loaded_node()
- if node_id is None:
- return Future().failure(Errors.NoBrokersAvailable)
+ elif self.coordinator_unknown():
+ return Future().failure(Errors.GroupCoordinatorNotAvailableError)
+
+ node_id = self.coordinator_id
# create the offset commit request
offset_data = collections.defaultdict(dict)
@@ -571,14 +571,10 @@ class ConsumerCoordinator(BaseCoordinator):
if not partitions:
return Future().success({})
- if self.config['api_version'] >= (0, 8, 2):
- if self.coordinator_unknown():
- return Future().failure(Errors.GroupCoordinatorNotAvailableError)
- node_id = self.coordinator_id
- else:
- node_id = self._client.least_loaded_node()
- if node_id is None:
- return Future().failure(Errors.NoBrokersAvailable)
+ elif self.coordinator_unknown():
+ return Future().failure(Errors.GroupCoordinatorNotAvailableError)
+
+ node_id = self.coordinator_id
# Verify node is ready
if not self._client.ready(node_id):
| Support KafkaConsumer auto-commit with 0.8 brokers
kafka 0.8.2 kafka-python 1.1.1
when enable auto_commit, an AutoCommitTask instance will be created
but when Enable the AutoCommitTask instance?
in the code , only find the function _on_join_complete will enable the AutoCommitTask instance
```
def _on_join_complete(self, generation, member_id, protocol,
member_assignment_bytes):
...
# restart the autocommit task if needed
if self._auto_commit_task:
self._auto_commit_task.enable()
```
but api_version < 0.9, the function _on_join_complete never be called | dpkp/kafka-python | diff --git a/test/test_coordinator.py b/test/test_coordinator.py
index 15b915d..735d278 100644
--- a/test/test_coordinator.py
+++ b/test/test_coordinator.py
@@ -425,8 +425,7 @@ def test_send_offset_commit_request_fail(patched_coord, offsets):
((0, 9), OffsetCommitRequest[2])])
def test_send_offset_commit_request_versions(patched_coord, offsets,
api_version, req_type):
- # assuming fixture sets coordinator=0, least_loaded_node=1
- expect_node = 0 if api_version >= (0, 8, 2) else 1
+ expect_node = 0
patched_coord.config['api_version'] = api_version
patched_coord._send_offset_commit_request(offsets)
@@ -522,7 +521,7 @@ def test_send_offset_fetch_request_fail(patched_coord, partitions):
def test_send_offset_fetch_request_versions(patched_coord, partitions,
api_version, req_type):
# assuming fixture sets coordinator=0, least_loaded_node=1
- expect_node = 0 if api_version >= (0, 8, 2) else 1
+ expect_node = 0
patched_coord.config['api_version'] = api_version
patched_coord._send_offset_fetch_request(partitions)
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 2
} | 1.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-catchlog",
"pytest-sugar",
"pytest-mock",
"mock",
"python-snappy",
"lz4tools",
"xxhash",
"six"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc libsnappy-dev"
],
"python": "3.5",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
certifi==2021.5.30
coverage==6.2
cramjam==2.5.0
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
-e git+https://github.com/dpkp/kafka-python.git@7a350e5fcf33f49094c820ba88b9cee4aeae6e12#egg=kafka_python
lz4tools==1.3.1.2
mock==5.2.0
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
pytest-catchlog==1.2.2
pytest-cov==4.0.0
pytest-mock==3.6.1
pytest-sugar==0.9.6
python-snappy==0.7.3
six==1.17.0
termcolor==1.1.0
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
tomli==1.2.3
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
xxhash==3.2.0
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: kafka-python
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib-metadata=4.8.1=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==6.2
- cramjam==2.5.0
- lz4tools==1.3.1.2
- mock==5.2.0
- pytest-catchlog==1.2.2
- pytest-cov==4.0.0
- pytest-mock==3.6.1
- pytest-sugar==0.9.6
- python-snappy==0.7.3
- six==1.17.0
- termcolor==1.1.0
- tomli==1.2.3
- xxhash==3.2.0
prefix: /opt/conda/envs/kafka-python
| [
"test/test_coordinator.py::test_send_offset_commit_request_versions[api_version0-OffsetCommitRequest_v0]",
"test/test_coordinator.py::test_send_offset_fetch_request_versions[api_version0-OffsetFetchRequest_v0]"
] | [] | [
"test/test_coordinator.py::test_init",
"test/test_coordinator.py::test_autocommit_enable_api_version[api_version0]",
"test/test_coordinator.py::test_autocommit_enable_api_version[api_version1]",
"test/test_coordinator.py::test_autocommit_enable_api_version[api_version2]",
"test/test_coordinator.py::test_autocommit_enable_api_version[api_version3]",
"test/test_coordinator.py::test_protocol_type",
"test/test_coordinator.py::test_group_protocols",
"test/test_coordinator.py::test_pattern_subscription[api_version0]",
"test/test_coordinator.py::test_pattern_subscription[api_version1]",
"test/test_coordinator.py::test_pattern_subscription[api_version2]",
"test/test_coordinator.py::test_pattern_subscription[api_version3]",
"test/test_coordinator.py::test_lookup_assignor",
"test/test_coordinator.py::test_join_complete",
"test/test_coordinator.py::test_subscription_listener",
"test/test_coordinator.py::test_subscription_listener_failure",
"test/test_coordinator.py::test_perform_assignment",
"test/test_coordinator.py::test_on_join_prepare",
"test/test_coordinator.py::test_need_rejoin",
"test/test_coordinator.py::test_refresh_committed_offsets_if_needed",
"test/test_coordinator.py::test_fetch_committed_offsets",
"test/test_coordinator.py::test_close",
"test/test_coordinator.py::test_commit_offsets_async",
"test/test_coordinator.py::test_commit_offsets_sync",
"test/test_coordinator.py::test_maybe_auto_commit_offsets_sync[api_version0-foobar-True-None-False-False-True-False]",
"test/test_coordinator.py::test_maybe_auto_commit_offsets_sync[api_version1-foobar-False-None-False-False-False-False]",
"test/test_coordinator.py::test_maybe_auto_commit_offsets_sync[api_version2-foobar-True-error2-True-True-True-False]",
"test/test_coordinator.py::test_maybe_auto_commit_offsets_sync[api_version3-foobar-True-error3-True-True-True-False]",
"test/test_coordinator.py::test_maybe_auto_commit_offsets_sync[api_version4-foobar-True-error4-True-True-True-False]",
"test/test_coordinator.py::test_maybe_auto_commit_offsets_sync[api_version5-foobar-True-error5-True-True-False-True]",
"test/test_coordinator.py::test_maybe_auto_commit_offsets_sync[api_version6-foobar-True-None-True-True-False-False]",
"test/test_coordinator.py::test_maybe_auto_commit_offsets_sync[api_version7-None-True-None-False-False-True-False]",
"test/test_coordinator.py::test_send_offset_commit_request_fail",
"test/test_coordinator.py::test_send_offset_commit_request_versions[api_version1-OffsetCommitRequest_v1]",
"test/test_coordinator.py::test_send_offset_commit_request_versions[api_version2-OffsetCommitRequest_v2]",
"test/test_coordinator.py::test_send_offset_commit_request_failure",
"test/test_coordinator.py::test_send_offset_commit_request_success",
"test/test_coordinator.py::test_handle_offset_commit_response[response0-GroupAuthorizationFailedError-False-False]",
"test/test_coordinator.py::test_handle_offset_commit_response[response1-OffsetMetadataTooLargeError-False-False]",
"test/test_coordinator.py::test_handle_offset_commit_response[response2-InvalidCommitOffsetSizeError-False-False]",
"test/test_coordinator.py::test_handle_offset_commit_response[response3-GroupLoadInProgressError-False-False]",
"test/test_coordinator.py::test_handle_offset_commit_response[response4-GroupCoordinatorNotAvailableError-True-False]",
"test/test_coordinator.py::test_handle_offset_commit_response[response5-NotCoordinatorForGroupError-True-False]",
"test/test_coordinator.py::test_handle_offset_commit_response[response6-RequestTimedOutError-True-False]",
"test/test_coordinator.py::test_handle_offset_commit_response[response7-CommitFailedError-False-True]",
"test/test_coordinator.py::test_handle_offset_commit_response[response8-CommitFailedError-False-True]",
"test/test_coordinator.py::test_handle_offset_commit_response[response9-CommitFailedError-False-True]",
"test/test_coordinator.py::test_handle_offset_commit_response[response10-InvalidTopicError-False-False]",
"test/test_coordinator.py::test_handle_offset_commit_response[response11-TopicAuthorizationFailedError-False-False]",
"test/test_coordinator.py::test_send_offset_fetch_request_fail",
"test/test_coordinator.py::test_send_offset_fetch_request_versions[api_version1-OffsetFetchRequest_v1]",
"test/test_coordinator.py::test_send_offset_fetch_request_versions[api_version2-OffsetFetchRequest_v1]",
"test/test_coordinator.py::test_send_offset_fetch_request_failure",
"test/test_coordinator.py::test_send_offset_fetch_request_success",
"test/test_coordinator.py::test_handle_offset_fetch_response[response0-GroupLoadInProgressError-False-False]",
"test/test_coordinator.py::test_handle_offset_fetch_response[response1-NotCoordinatorForGroupError-True-False]",
"test/test_coordinator.py::test_handle_offset_fetch_response[response2-UnknownMemberIdError-False-True]",
"test/test_coordinator.py::test_handle_offset_fetch_response[response3-IllegalGenerationError-False-True]",
"test/test_coordinator.py::test_handle_offset_fetch_response[response4-TopicAuthorizationFailedError-False-False]",
"test/test_coordinator.py::test_handle_offset_fetch_response[response5-None-False-False]",
"test/test_coordinator.py::test_heartbeat"
] | [] | Apache License 2.0 | 623 | 1,025 | [
"kafka/coordinator/base.py",
"kafka/coordinator/consumer.py"
] |
|
simphony__simphony-remote-89 | cbbed1d4e4f29aa4ccc3719d52505679abcd25c3 | 2016-07-12 16:43:10 | 61ec23ffe44463cbc41f6fa54b4247963093ed79 | diff --git a/remoteappmanager/application.py b/remoteappmanager/application.py
index 89ba765..b5131db 100644
--- a/remoteappmanager/application.py
+++ b/remoteappmanager/application.py
@@ -125,7 +125,7 @@ class Application(web.Application, LoggingMixin):
"""Initializes the user at the database level."""
user_name = self.command_line_config.user
user = User(name=user_name)
- user.orm_user = self.db.get_user_by_name(user_name)
+ user.account = self.db.get_user_by_name(user_name)
return user
# Public
diff --git a/remoteappmanager/db/csv_db.py b/remoteappmanager/db/csv_db.py
index 3c00a2e..99227dc 100644
--- a/remoteappmanager/db/csv_db.py
+++ b/remoteappmanager/db/csv_db.py
@@ -158,7 +158,7 @@ class CSVAccounting(ABCAccounting):
Parameters
----------
- user : remoteappmanager.db.csv_db.CSVUser
+ user : CSVUser
Same type as the result of `get_user_by_name`
Returns
diff --git a/remoteappmanager/db/interfaces.py b/remoteappmanager/db/interfaces.py
index e3c7485..4f09018 100644
--- a/remoteappmanager/db/interfaces.py
+++ b/remoteappmanager/db/interfaces.py
@@ -55,7 +55,7 @@ class ABCApplicationPolicy(metaclass=ABCMeta):
class ABCAccounting(metaclass=ABCMeta):
- """ Main accounting interface required by the single User application.
+ """ Main accounting interface required by the single user application.
"""
@abstractmethod
@@ -69,16 +69,17 @@ class ABCAccounting(metaclass=ABCMeta):
Returns
-------
- a User-like object that the Database understands
+ account : opaque-type
+ an object that the database understands
"""
@abstractmethod
- def get_apps_for_user(self, user):
- """ Return an iterable of ApplicationConfig for a given User
+ def get_apps_for_user(self, account):
+ """ Return an iterable of ApplicationConfig for a given account
Parameters
----------
- user : User-like
+ account : opaque-type
Same type as the result of `get_user_by_name`
Returns
diff --git a/remoteappmanager/handlers/home_handler.py b/remoteappmanager/handlers/home_handler.py
index 0ec4c4e..9ba81f6 100644
--- a/remoteappmanager/handlers/home_handler.py
+++ b/remoteappmanager/handlers/home_handler.py
@@ -78,7 +78,7 @@ class HomeHandler(BaseHandler):
mapping_id = options["mapping_id"][0]
all_apps = self.application.db.get_apps_for_user(
- self.current_user.orm_user)
+ self.current_user.account)
choice = [(m_id, app, policy)
for m_id, app, policy in all_apps
@@ -90,9 +90,9 @@ class HomeHandler(BaseHandler):
_, app, policy = choice[0]
container = None
- orm_user = self.current_user.orm_user
+ user_name = self.current_user.name
try:
- container = yield self._start_container(orm_user,
+ container = yield self._start_container(user_name,
app,
policy,
mapping_id)
@@ -172,7 +172,7 @@ class HomeHandler(BaseHandler):
container_manager = self.application.container_manager
apps = self.application.db.get_apps_for_user(
- self.current_user.orm_user)
+ self.current_user.account)
images_info = []
@@ -230,18 +230,16 @@ class HomeHandler(BaseHandler):
return Container.from_docker_containers_dict(container_dict[0])
- # FIXME: The orm_user here requires any database implementation
- # to provide a user object with a name attribute
@gen.coroutine
- def _start_container(self, orm_user, app, policy, mapping_id):
+ def _start_container(self, user_name, app, policy, mapping_id):
"""Start the container. This method is a helper method that
works with low level data and helps in issuing the request to the
data container.
Parameters
----------
- orm_user : User
- database's user object (e.g. current_user.orm_user)
+ user_name : str
+ the user name to be associated with the container
app : ABCApplication
the application to start
@@ -254,7 +252,6 @@ class HomeHandler(BaseHandler):
Container
"""
- user_name = orm_user.name
image_name = app.image
mount_home = policy.allow_home
volume_spec = (policy.volume_source,
diff --git a/remoteappmanager/user.py b/remoteappmanager/user.py
index 50e3428..4820d5b 100644
--- a/remoteappmanager/user.py
+++ b/remoteappmanager/user.py
@@ -8,7 +8,5 @@ class User(HasTraits):
# The username as passed at the config line
name = Unicode()
- # FIXME: orm_user is Any to support other database implementation
-
#: Can be none if the username cannot be found in the database.
- orm_user = Any()
+ account = Any()
| User.orm_user should be renamed as User.account with an opaque type
The `orm_user` attribute of the singleuser app is an opaque object handled by the database's accounting.
It should be renamed as `account` and be given an opaque type.
| simphony/simphony-remote | diff --git a/tests/test_application.py b/tests/test_application.py
index eee2f8b..c51080d 100644
--- a/tests/test_application.py
+++ b/tests/test_application.py
@@ -44,7 +44,7 @@ class TestApplication(TempMixin, testing.AsyncTestCase):
self.assertIsNotNone(app.container_manager)
self.assertIsNotNone(app.hub)
self.assertEqual(app.user.name, "username")
- self.assertEqual(app.user.orm_user, None)
+ self.assertEqual(app.user.account, None)
# FIXME: Some of these tests are the same and should be refactored
@@ -89,4 +89,4 @@ class TestApplicationWithCSV(TempMixin, testing.AsyncTestCase):
self.assertIsNotNone(app.user)
self.assertEqual(app.user.name, "username")
- self.assertIsInstance(app.user.orm_user, test_csv_db.CSVUser)
+ self.assertIsInstance(app.user.account, test_csv_db.CSVUser)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 5
} | 0.3 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"flake8",
"sphinx",
"coverage"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.16
alembic==1.15.2
annotated-types==0.7.0
arrow==1.3.0
async-generator==1.10
attrs==25.3.0
babel==2.17.0
certifi==2025.1.31
certipy==0.2.2
cffi==1.17.1
charset-normalizer==3.4.1
click==8.1.8
coverage==7.8.0
cryptography==44.0.2
docker-py==1.10.6
docker-pycreds==0.4.0
docutils==0.21.2
escapism==1.0.1
exceptiongroup==1.2.2
flake8==7.2.0
fqdn==1.5.1
greenlet==3.1.1
idna==3.10
imagesize==1.4.1
importlib_metadata==8.6.1
iniconfig==2.1.0
isoduration==20.11.0
Jinja2==3.1.6
jsonpointer==3.0.0
jsonschema==4.23.0
jsonschema-specifications==2024.10.1
jupyter-events==0.12.0
jupyter_client==8.6.3
jupyter_core==5.7.2
jupyterhub==5.2.1
Mako==1.3.9
MarkupSafe==3.0.2
mccabe==0.7.0
oauthlib==3.2.2
packaging==24.2
pamela==1.2.0
platformdirs==4.3.7
pluggy==1.5.0
prometheus_client==0.21.1
pycodestyle==2.13.0
pycparser==2.22
pydantic==2.11.1
pydantic_core==2.33.0
pyflakes==3.3.1
Pygments==2.19.1
pytest==8.3.5
python-dateutil==2.9.0.post0
python-json-logger==3.3.0
PyYAML==6.0.2
pyzmq==26.3.0
referencing==0.36.2
-e git+https://github.com/simphony/simphony-remote.git@cbbed1d4e4f29aa4ccc3719d52505679abcd25c3#egg=remoteappmanager
requests==2.32.3
rfc3339-validator==0.1.4
rfc3986-validator==0.1.1
rpds-py==0.24.0
six==1.17.0
snowballstemmer==2.2.0
Sphinx==7.4.7
sphinxcontrib-applehelp==2.0.0
sphinxcontrib-devhelp==2.0.0
sphinxcontrib-htmlhelp==2.1.0
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==2.0.0
sphinxcontrib-serializinghtml==2.0.0
SQLAlchemy==2.0.40
tabulate==0.9.0
tomli==2.2.1
tornado==6.4.2
traitlets==5.14.3
types-python-dateutil==2.9.0.20241206
typing-inspection==0.4.0
typing_extensions==4.13.0
uri-template==1.3.0
urllib3==2.3.0
webcolors==24.11.1
websocket-client==1.8.0
zipp==3.21.0
| name: simphony-remote
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.16
- alembic==1.15.2
- annotated-types==0.7.0
- arrow==1.3.0
- async-generator==1.10
- attrs==25.3.0
- babel==2.17.0
- certifi==2025.1.31
- certipy==0.2.2
- cffi==1.17.1
- charset-normalizer==3.4.1
- click==8.1.8
- coverage==7.8.0
- cryptography==44.0.2
- docker-py==1.10.6
- docker-pycreds==0.4.0
- docutils==0.21.2
- escapism==1.0.1
- exceptiongroup==1.2.2
- flake8==7.2.0
- fqdn==1.5.1
- greenlet==3.1.1
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- isoduration==20.11.0
- jinja2==3.1.6
- jsonpointer==3.0.0
- jsonschema==4.23.0
- jsonschema-specifications==2024.10.1
- jupyter-client==8.6.3
- jupyter-core==5.7.2
- jupyter-events==0.12.0
- jupyterhub==5.2.1
- mako==1.3.9
- markupsafe==3.0.2
- mccabe==0.7.0
- oauthlib==3.2.2
- packaging==24.2
- pamela==1.2.0
- platformdirs==4.3.7
- pluggy==1.5.0
- prometheus-client==0.21.1
- pycodestyle==2.13.0
- pycparser==2.22
- pydantic==2.11.1
- pydantic-core==2.33.0
- pyflakes==3.3.1
- pygments==2.19.1
- pytest==8.3.5
- python-dateutil==2.9.0.post0
- python-json-logger==3.3.0
- pyyaml==6.0.2
- pyzmq==26.3.0
- referencing==0.36.2
- requests==2.32.3
- rfc3339-validator==0.1.4
- rfc3986-validator==0.1.1
- rpds-py==0.24.0
- six==1.17.0
- snowballstemmer==2.2.0
- sphinx==7.4.7
- sphinxcontrib-applehelp==2.0.0
- sphinxcontrib-devhelp==2.0.0
- sphinxcontrib-htmlhelp==2.1.0
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==2.0.0
- sphinxcontrib-serializinghtml==2.0.0
- sqlalchemy==2.0.40
- tabulate==0.9.0
- tomli==2.2.1
- tornado==6.4.2
- traitlets==5.14.3
- types-python-dateutil==2.9.0.20241206
- typing-extensions==4.13.0
- typing-inspection==0.4.0
- uri-template==1.3.0
- urllib3==2.3.0
- webcolors==24.11.1
- websocket-client==1.8.0
- zipp==3.21.0
prefix: /opt/conda/envs/simphony-remote
| [
"tests/test_application.py::TestApplicationWithCSV::test_database_initialization"
] | [
"tests/test_application.py::TestApplication::test_initialization"
] | [
"tests/test_application.py::TestApplicationWithCSV::test_initialization"
] | [] | BSD 3-Clause "New" or "Revised" License | 624 | 1,276 | [
"remoteappmanager/application.py",
"remoteappmanager/db/csv_db.py",
"remoteappmanager/db/interfaces.py",
"remoteappmanager/handlers/home_handler.py",
"remoteappmanager/user.py"
] |
|
googlemaps__google-maps-services-python-139 | 8013de5d7c1b4867dcafb4449b97c1cebab33127 | 2016-07-13 00:34:27 | 2ccf0b2912019341aa60aeb65fc36ca6d9d02a56 | diff --git a/googlemaps/convert.py b/googlemaps/convert.py
index 1c2264e..6206cfa 100644
--- a/googlemaps/convert.py
+++ b/googlemaps/convert.py
@@ -220,9 +220,17 @@ def components(arg):
:rtype: basestring
"""
+
+ # Components may have multiple values per type, here we
+ # expand them into individual key/value items, eg:
+ # {"country": ["US", "AU"], "foo": 1} -> "country:AU", "country:US", "foo:1"
+ def expand(arg):
+ for k, v in arg.items():
+ for item in as_list(v):
+ yield "%s:%s" % (k, item)
+
if isinstance(arg, dict):
- arg = sorted(["%s:%s" % (k, arg[k]) for k in arg])
- return "|".join(arg)
+ return "|".join(sorted(expand(arg)))
raise TypeError(
"Expected a dict for components, "
| Allow user to append several values for same component filter type
It would be nice to allow several values for same component type.
Use case: You may want to filter sublocalities via locality component type ("locality matches against both locality and sublocality types") and in addition you may restrict result set by another locality.
Maybe we should extend converting components by defaultdict containing lists?
| googlemaps/google-maps-services-python | diff --git a/test/test_convert.py b/test/test_convert.py
index 851eda1..090a95f 100644
--- a/test/test_convert.py
+++ b/test/test_convert.py
@@ -91,6 +91,9 @@ class ConvertTest(unittest.TestCase):
c = {"country": "US", "foo": 1}
self.assertEqual("country:US|foo:1", convert.components(c))
+ c = {"country": ["US", "AU"], "foo": 1}
+ self.assertEqual("country:AU|country:US|foo:1", convert.components(c))
+
with self.assertRaises(TypeError):
convert.components("test")
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 1
} | 2.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"nose",
"responses==0.3",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"test_requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2025.1.31
charset-normalizer==3.4.1
exceptiongroup==1.2.2
-e git+https://github.com/googlemaps/google-maps-services-python.git@8013de5d7c1b4867dcafb4449b97c1cebab33127#egg=googlemaps
idna==3.10
iniconfig==2.1.0
mock==5.2.0
nose==1.3.7
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
requests==2.10.0
responses==0.3.0
six==1.17.0
tomli==2.2.1
urllib3==2.3.0
| name: google-maps-services-python
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- charset-normalizer==3.4.1
- exceptiongroup==1.2.2
- idna==3.10
- iniconfig==2.1.0
- mock==5.2.0
- nose==1.3.7
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- requests==2.10.0
- responses==0.3.0
- six==1.17.0
- tomli==2.2.1
- urllib3==2.3.0
prefix: /opt/conda/envs/google-maps-services-python
| [
"test/test_convert.py::ConvertTest::test_components"
] | [] | [
"test/test_convert.py::ConvertTest::test_as_list",
"test/test_convert.py::ConvertTest::test_bounds",
"test/test_convert.py::ConvertTest::test_join_list",
"test/test_convert.py::ConvertTest::test_latlng",
"test/test_convert.py::ConvertTest::test_location_list",
"test/test_convert.py::ConvertTest::test_polyline_decode",
"test/test_convert.py::ConvertTest::test_polyline_round_trip",
"test/test_convert.py::ConvertTest::test_time"
] | [] | Apache License 2.0 | 626 | 250 | [
"googlemaps/convert.py"
] |
|
Azure__WALinuxAgent-321 | 9930df6600a061bcc5618b427ffa23cff5943f46 | 2016-07-13 19:17:07 | 9930df6600a061bcc5618b427ffa23cff5943f46 | msftclas: Hi __@brendandixon__, I'm your friendly neighborhood Microsoft Pull Request Bot (You can call me MSBOT). Thanks for your contribution!
<span>You've already signed the contribution license agreement. Thanks!</span>
<p>The agreement was validated by Microsoft and real humans are currently evaluating your PR.</p>
TTYL, MSBOT;
brendandixon: @ahmetalpbalkan As we discussed, what you saw was caused by both of us actively issuing commands on the same VM.
hglkrijger: lgtm | diff --git a/azurelinuxagent/common/version.py b/azurelinuxagent/common/version.py
index 222a7208..357d30ff 100644
--- a/azurelinuxagent/common/version.py
+++ b/azurelinuxagent/common/version.py
@@ -50,7 +50,7 @@ def get_distro():
AGENT_NAME = "WALinuxAgent"
AGENT_LONG_NAME = "Azure Linux Agent"
-AGENT_VERSION = '2.1.5.rc6'
+AGENT_VERSION = '2.1.5.rc7'
AGENT_LONG_VERSION = "{0}-{1}".format(AGENT_NAME, AGENT_VERSION)
AGENT_DESCRIPTION = """\
The Azure Linux Agent supports the provisioning and running of Linux
diff --git a/azurelinuxagent/ga/update.py b/azurelinuxagent/ga/update.py
index 6dae7aff..e89608a0 100644
--- a/azurelinuxagent/ga/update.py
+++ b/azurelinuxagent/ga/update.py
@@ -53,8 +53,10 @@ from azurelinuxagent.ga.exthandlers import HandlerManifest
AGENT_ERROR_FILE = "error.json" # File name for agent error record
AGENT_MANIFEST_FILE = "HandlerManifest.json"
+CHILD_HEALTH_INTERVAL = 15 * 60
CHILD_LAUNCH_INTERVAL = 5 * 60
CHILD_LAUNCH_RESTART_MAX = 3
+CHILD_POLL_INTERVAL = 60
MAX_FAILURE = 3 # Max failure allowed for agent before blacklisted
@@ -139,27 +141,50 @@ class UpdateHandler(object):
logger.info(u"Agent {0} launched with command '{1}'", agent_name, agent_cmd)
- ret = self.child_process.wait()
- if ret is None:
- ret = 1
+ ret = None
+ start_time = time.time()
+ while (time.time() - start_time) < CHILD_HEALTH_INTERVAL:
+ time.sleep(CHILD_POLL_INTERVAL)
+ ret = self.child_process.poll()
+ if ret is not None:
+ break
- msg = u"Agent {0} launched with command '{1}' returned code: {2}".format(
- agent_name,
- agent_cmd,
- ret)
- add_event(
- AGENT_NAME,
- version=agent_version,
- op=WALAEventOperation.Enable,
- is_success=(ret <= 0),
- message=msg)
+ if ret is None or ret <= 0:
+ msg = u"Agent {0} launched with command '{1}' is successfully running".format(
+ agent_name,
+ agent_cmd)
+ logger.info(msg)
+ add_event(
+ AGENT_NAME,
+ version=agent_version,
+ op=WALAEventOperation.Enable,
+ is_success=True,
+ message=msg)
- if ret > 0:
+ if ret is None:
+ ret = self.child_process.wait()
+
+ else:
+ msg = u"Agent {0} launched with command '{1}' failed with return code: {2}".format(
+ agent_name,
+ agent_cmd,
+ ret)
+ logger.warn(msg)
+ add_event(
+ AGENT_NAME,
+ version=agent_version,
+ op=WALAEventOperation.Enable,
+ is_success=False,
+ message=msg)
+
+ if ret is not None and ret > 0:
+ msg = u"Agent {0} launched with command '{1}' returned code: {2}".format(
+ agent_name,
+ agent_cmd,
+ ret)
logger.warn(msg)
if latest_agent is not None:
latest_agent.mark_failure()
- else:
- logger.info(msg)
except Exception as e:
msg = u"Agent {0} launched with command '{1}' failed with exception: {2}".format(
@@ -311,9 +336,7 @@ class UpdateHandler(object):
# Note:
# The code leaves on disk available, but blacklisted, agents so as to preserve the state.
# Otherwise, those agents could be again downloaded and inappropriately retried.
- self._set_agents([GuestAgent(pkg=pkg) for pkg in
- [pkg for pkg in pkg_list.versions
- if FlexibleVersion(pkg.version) > base_version]])
+ self._set_agents([GuestAgent(pkg=pkg) for pkg in pkg_list.versions])
self._purge_agents()
self._filter_blacklisted_agents()
@@ -469,7 +492,7 @@ class GuestAgent(object):
if is_fatal:
logger.warn(u"Agent {0} is permanently blacklisted", self.name)
except Exception as e:
- logger.warn(u"Agent {0} failed recording error state: {1}", ustr(e))
+ logger.warn(u"Agent {0} failed recording error state: {1}", self.name, ustr(e))
return
def _ensure_downloaded(self):
| [2.1.5.rc6] recycles indefinitely when child process crashes
When I use 2.1.5rc6 code (9930df6600a061bcc5618b427ffa23cff5943f46) and pull in update for 2.1.5rc6 , it starts just fine and stops just fine. (so #297 is resolved)
However when I kill the child process with SIGSEGV signal (pid 9215 below)
```
# pstree -ap|grep py
|-python3,9211 -u /usr/sbin/waagent -daemon
| `-python3,9215 -u bin/WALinuxAgent-2.1.5.rc6-py2.7.egg -run-exthandlers
| |-{python3},9234
| `-{python3},9237
```
the parent process remains alive, blacklists the rc6, tries to bring the original (on disk) agent back but it exits with code 0 immediately. Then it keeps restarting this original agent every second because it exits immediately.
<details>
<summary>waagent.log just after `kill -SEGV 9215`</summary>
```
2016/07/13 17:06:59.276675 INFO Agent WALinuxAgent-2.1.5.rc6 forwarding signal 15 to WALinuxAgent-2.1.5.6
2016/07/13 17:07:20.107186 INFO Azure Linux Agent Version:2.1.5.rc6
2016/07/13 17:07:20.119216 INFO OS: ubuntu 16.04
2016/07/13 17:07:20.143949 INFO Python: 3.5.1
2016/07/13 17:07:20.150224 INFO Run daemon
2016/07/13 17:07:20.155990 INFO No RDMA handler exists for distro='Ubuntu' version='16.04'
2016/07/13 17:07:20.170232 INFO Clean protocol
2016/07/13 17:07:20.176382 INFO run Ubuntu provision handler
2016/07/13 17:07:20.183917 INFO RDMA capabilities are not enabled, skipping
2016/07/13 17:07:20.196958 INFO Instantiating Agent WALinuxAgent-2.1.5.6 from disk
2016/07/13 17:07:20.207621 INFO Agent WALinuxAgent-2.1.5.6 error state: Last Failure: 0.0, Total Failures: 0, Fatal: False
2016/07/13 17:07:20.231936 INFO Ensuring Agent WALinuxAgent-2.1.5.6 is downloaded
2016/07/13 17:07:20.254939 INFO Agent WALinuxAgent-2.1.5.6 was previously downloaded - skipping download
2016/07/13 17:07:20.267972 INFO Agent WALinuxAgent-2.1.5.6 loaded manifest from /var/lib/waagent/WALinuxAgent-2.1.5.6/HandlerManifest.json
2016/07/13 17:07:20.292037 INFO Determined Agent WALinuxAgent-2.1.5.6 to be the latest agent
2016/07/13 17:07:20.312122 INFO Agent WALinuxAgent-2.1.5.6 launched with command 'python3 -u bin/WALinuxAgent-2.1.5.rc6-py2.7.egg -run-exthandlers'
2016/07/13 17:07:20.825810 INFO Agent WALinuxAgent-2.1.5.6 is running as the goal state agent
2016/07/13 17:07:20.850573 INFO Detect protocol endpoints
2016/07/13 17:07:20.860236 INFO Clean protocol
2016/07/13 17:07:20.861832 INFO WireServer endpoint is not found. Rerun dhcp handler
2016/07/13 17:07:20.863408 INFO test for route to 168.63.129.16
2016/07/13 17:07:20.865187 INFO route to 168.63.129.16 exists
2016/07/13 17:07:20.871366 INFO Wire server endpoint:168.63.129.16
2016/07/13 17:07:20.887742 INFO Fabric preferred wire protocol version:2015-04-05
2016/07/13 17:07:20.890022 INFO Wire protocol version:2012-11-30
2016/07/13 17:07:20.897740 WARNING Server prefered version:2015-04-05
2016/07/13 17:07:25.435991 INFO Event: name=WALA, op=HeartBeat, message=
2016/07/13 17:07:25.438363 INFO Start env monitor service.
2016/07/13 17:07:25.490944 INFO Configure routes
2016/07/13 17:07:25.498195 INFO Gateway:None
2016/07/13 17:07:25.512737 INFO Routes:None
2016/07/13 17:07:25.544071 INFO Checking for agent family Test updates
2016/07/13 17:07:25.554014 INFO Wire server endpoint:168.63.129.16
2016/07/13 17:07:25.627459 WARNING Running Agent 2.1.5.6 was not found in the agent manifest - adding to list
2016/07/13 17:07:25.653260 INFO Wire server endpoint:168.63.129.16
2016/07/13 17:07:25.692572 INFO Handle extensions updates for incarnation 7
2016/07/13 17:07:25.704451 INFO [Microsoft.OSTCExtensions.CustomScriptForLinux-1.5.2.0] Expected handler state: uninstall
2016/07/13 17:07:25.711403 INFO [Microsoft.OSTCExtensions.CustomScriptForLinux-1.5.2.0] Current handler state is: NotInstalled
2016/07/13 17:08:07.532646 INFO Event: name=WALinuxAgent, op=Enable, message=Agent WALinuxAgent-2.1.5.6 launched with command 'python3 -u bin/WALinuxAgent-2.1.5.rc6-py2.7.egg -run-exthandlers' returned code: -11
2016/07/13 17:08:07.536272 INFO Agent WALinuxAgent-2.1.5.6 launched with command 'python3 -u bin/WALinuxAgent-2.1.5.rc6-py2.7.egg -run-exthandlers' returned code: -11
2016/07/13 17:08:07.537055 INFO Determined Agent WALinuxAgent-2.1.5.6 to be the latest agent
2016/07/13 17:08:07.708001 INFO Agent WALinuxAgent-2.1.5.6 launched with command 'python3 -u bin/WALinuxAgent-2.1.5.rc6-py2.7.egg -run-exthandlers'
2016/07/13 17:08:08.279361 INFO Agent WALinuxAgent-2.1.5.6 is running as the goal state agent
2016/07/13 17:08:08.309054 INFO Wire server endpoint:168.63.129.16
2016/07/13 17:08:08.325240 INFO Event: name=WALA, op=HeartBeat, message=
2016/07/13 17:08:08.328355 INFO Start env monitor service.
2016/07/13 17:08:08.343749 INFO Configure routes
2016/07/13 17:08:08.348599 INFO Gateway:None
2016/07/13 17:08:08.350375 INFO Routes:None
2016/07/13 17:08:08.372374 INFO Checking for agent family Test updates
2016/07/13 17:08:08.376769 INFO Wire server endpoint:168.63.129.16
2016/07/13 17:08:08.434136 WARNING Running Agent 2.1.5.6 was not found in the agent manifest - adding to list
2016/07/13 17:08:08.439013 INFO Wire server endpoint:168.63.129.16
2016/07/13 17:08:08.454443 INFO Handle extensions updates for incarnation 7
2016/07/13 17:08:08.464798 INFO [Microsoft.OSTCExtensions.CustomScriptForLinux-1.5.2.0] Expected handler state: uninstall
2016/07/13 17:08:08.470012 INFO [Microsoft.OSTCExtensions.CustomScriptForLinux-1.5.2.0] Current handler state is: NotInstalled
2016/07/13 17:08:20.249774 INFO Event: name=WALinuxAgent, op=Enable, message=Agent WALinuxAgent-2.1.5.6 launched with command 'python3 -u bin/WALinuxAgent-2.1.5.rc6-py2.7.egg -run-exthandlers' returned code: -11
2016/07/13 17:08:20.260353 INFO Agent WALinuxAgent-2.1.5.6 launched with command 'python3 -u bin/WALinuxAgent-2.1.5.rc6-py2.7.egg -run-exthandlers' returned code: -11
2016/07/13 17:08:20.261798 INFO Determined Agent WALinuxAgent-2.1.5.6 to be the latest agent
2016/07/13 17:08:20.263587 WARNING Agent WALinuxAgent-2.1.5.6 launched with command 'python3 -u bin/WALinuxAgent-2.1.5.rc6-py2.7.egg -run-exthandlers' failed with exception: Agent WALinuxAgent-2.1.5.6 restarted more than 3 times in 300 seconds
2016/07/13 17:08:20.264827 ERROR Event: name=WALinuxAgent, op=Enable, message=Agent WALinuxAgent-2.1.5.6 launched with command 'python3 -u bin/WALinuxAgent-2.1.5.rc6-py2.7.egg -run-exthandlers' failed with exception: Agent WALinuxAgent-2.1.5.6 restarted more than 3 times in 300 seconds
2016/07/13 17:08:20.273802 WARNING Agent WALinuxAgent-2.1.5.6 is permanently blacklisted
2016/07/13 17:08:20.274808 INFO Installed Agent WALinuxAgent-2.1.5.rc6 is the most current agent
2016/07/13 17:08:20.380124 INFO Agent WALinuxAgent-2.1.5.rc6 launched with command 'python3 -u /usr/sbin/waagent -run-exthandlers'
2016/07/13 17:08:20.663884 INFO Agent WALinuxAgent-2.1.5.rc6 is running as the goal state agent
2016/07/13 17:08:20.680595 INFO Wire server endpoint:168.63.129.16
2016/07/13 17:08:20.695598 INFO Start env monitor service.
2016/07/13 17:08:20.695137 INFO Event: name=WALA, op=HeartBeat, message=
2016/07/13 17:08:20.702653 INFO Configure routes
2016/07/13 17:08:20.716589 INFO Gateway:None
2016/07/13 17:08:20.723530 INFO Routes:None
2016/07/13 17:08:20.733594 INFO Checking for agent family Test updates
2016/07/13 17:08:20.746173 INFO Wire server endpoint:168.63.129.16
2016/07/13 17:08:20.787213 INFO Instantiating Agent WALinuxAgent-2.1.5.4 from package
2016/07/13 17:08:20.798081 INFO Agent WALinuxAgent-2.1.5.4 error state: Last Failure: 0.0, Total Failures: 0, Fatal: False
2016/07/13 17:08:20.812881 INFO Ensuring Agent WALinuxAgent-2.1.5.4 is downloaded
2016/07/13 17:08:20.853544 INFO Agent WALinuxAgent-2.1.5.4 downloaded from https://rdfepirv2sg1prdstr03.blob.core.windows.net/7d89d439b79f4452950452399add2c90/Microsoft.OSTCLinuxAgent__Test__2.1.5.4
2016/07/13 17:08:20.879837 INFO Agent WALinuxAgent-2.1.5.4 unpacked successfully to /var/lib/waagent/WALinuxAgent-2.1.5.4
2016/07/13 17:08:20.894670 INFO Agent WALinuxAgent-2.1.5.4 loaded manifest from /var/lib/waagent/WALinuxAgent-2.1.5.4/HandlerManifest.json
2016/07/13 17:08:20.910666 INFO Agent WALinuxAgent-2.1.5.4 error state: Last Failure: 0.0, Total Failures: 0, Fatal: False
2016/07/13 17:08:20.924276 INFO Agent WALinuxAgent-2.1.5.4 downloaded successfully
2016/07/13 17:08:20.934040 INFO Event: name=WALinuxAgent, op=Install, message=Agent WALinuxAgent-2.1.5.4 downloaded successfully
2016/07/13 17:08:20.949440 INFO Instantiating Agent WALinuxAgent-2.1.5.5 from package
2016/07/13 17:08:20.959615 INFO Agent WALinuxAgent-2.1.5.5 error state: Last Failure: 0.0, Total Failures: 0, Fatal: False
2016/07/13 17:08:20.974076 INFO Ensuring Agent WALinuxAgent-2.1.5.5 is downloaded
2016/07/13 17:08:21.063149 INFO Agent WALinuxAgent-2.1.5.5 downloaded from https://rdfepirv2sg1prdstr03.blob.core.windows.net/7d89d439b79f4452950452399add2c90/Microsoft.OSTCLinuxAgent__Test__2.1.5.5
2016/07/13 17:08:21.090715 INFO Agent WALinuxAgent-2.1.5.5 unpacked successfully to /var/lib/waagent/WALinuxAgent-2.1.5.5
2016/07/13 17:08:21.105711 INFO Agent WALinuxAgent-2.1.5.5 loaded manifest from /var/lib/waagent/WALinuxAgent-2.1.5.5/HandlerManifest.json
2016/07/13 17:08:21.121287 INFO Agent WALinuxAgent-2.1.5.5 error state: Last Failure: 0.0, Total Failures: 0, Fatal: False
2016/07/13 17:08:21.134870 INFO Agent WALinuxAgent-2.1.5.5 downloaded successfully
2016/07/13 17:08:21.144520 INFO Event: name=WALinuxAgent, op=Install, message=Agent WALinuxAgent-2.1.5.5 downloaded successfully
2016/07/13 17:08:21.159014 INFO Instantiating Agent WALinuxAgent-2.1.5.6 from package
2016/07/13 17:08:21.169173 INFO Agent WALinuxAgent-2.1.5.6 error state: Last Failure: 1468429700.2734947, Total Failures: 1, Fatal: True
2016/07/13 17:08:21.184254 INFO Ensuring Agent WALinuxAgent-2.1.5.6 is downloaded
2016/07/13 17:08:21.194376 INFO Agent WALinuxAgent-2.1.5.6 is blacklisted - skipping download
2016/07/13 17:08:21.206521 INFO Agent WALinuxAgent-2.1.5.rc6 discovered WALinuxAgent-2.1.5.5 as an update and will exit
2016/07/13 17:08:21.254857 INFO Event: name=WALinuxAgent, op=Enable, message=Agent WALinuxAgent-2.1.5.rc6 launched with command 'python3 -u /usr/sbin/waagent -run-exthandlers' returned code: 0
2016/07/13 17:08:21.259406 INFO Agent WALinuxAgent-2.1.5.rc6 launched with command 'python3 -u /usr/sbin/waagent -run-exthandlers' returned code: 0
2016/07/13 17:08:21.269854 INFO Installed Agent WALinuxAgent-2.1.5.rc6 is the most current agent
2016/07/13 17:08:21.312086 INFO Agent WALinuxAgent-2.1.5.rc6 launched with command 'python3 -u /usr/sbin/waagent -run-exthandlers'
2016/07/13 17:08:21.571364 INFO Agent WALinuxAgent-2.1.5.rc6 is running as the goal state agent
2016/07/13 17:08:21.588498 INFO Wire server endpoint:168.63.129.16
2016/07/13 17:08:21.601277 INFO Event: name=WALA, op=HeartBeat, message=
2016/07/13 17:08:21.601958 INFO Start env monitor service.
2016/07/13 17:08:21.620125 INFO Configure routes
2016/07/13 17:08:21.627243 INFO Gateway:None
2016/07/13 17:08:21.638921 INFO Routes:None
2016/07/13 17:08:21.656949 INFO Checking for agent family Test updates
2016/07/13 17:08:21.661214 INFO Wire server endpoint:168.63.129.16
2016/07/13 17:08:21.699835 INFO Instantiating Agent WALinuxAgent-2.1.5.4 from package
2016/07/13 17:08:21.704155 INFO Agent WALinuxAgent-2.1.5.4 error state: Last Failure: 0.0, Total Failures: 0, Fatal: False
2016/07/13 17:08:21.706388 INFO Ensuring Agent WALinuxAgent-2.1.5.4 is downloaded
2016/07/13 17:08:21.715997 INFO Agent WALinuxAgent-2.1.5.4 was previously downloaded - skipping download
2016/07/13 17:08:21.718021 INFO Agent WALinuxAgent-2.1.5.4 loaded manifest from /var/lib/waagent/WALinuxAgent-2.1.5.4/HandlerManifest.json
2016/07/13 17:08:21.727165 INFO Instantiating Agent WALinuxAgent-2.1.5.5 from package
2016/07/13 17:08:21.736498 INFO Agent WALinuxAgent-2.1.5.5 error state: Last Failure: 0.0, Total Failures: 0, Fatal: False
2016/07/13 17:08:21.738249 INFO Ensuring Agent WALinuxAgent-2.1.5.5 is downloaded
2016/07/13 17:08:21.747362 INFO Agent WALinuxAgent-2.1.5.5 was previously downloaded - skipping download
2016/07/13 17:08:21.756687 INFO Agent WALinuxAgent-2.1.5.5 loaded manifest from /var/lib/waagent/WALinuxAgent-2.1.5.5/HandlerManifest.json
2016/07/13 17:08:21.765777 INFO Instantiating Agent WALinuxAgent-2.1.5.6 from package
2016/07/13 17:08:21.772285 INFO Agent WALinuxAgent-2.1.5.6 error state: Last Failure: 1468429700.2734947, Total Failures: 1, Fatal: True
2016/07/13 17:08:21.774713 INFO Ensuring Agent WALinuxAgent-2.1.5.6 is downloaded
2016/07/13 17:08:21.782079 INFO Agent WALinuxAgent-2.1.5.6 is blacklisted - skipping download
2016/07/13 17:08:21.784885 INFO Agent WALinuxAgent-2.1.5.rc6 discovered WALinuxAgent-2.1.5.5 as an update and will exit
2016/07/13 17:08:21.882052 INFO Event: name=WALinuxAgent, op=Enable, message=Agent WALinuxAgent-2.1.5.rc6 launched with command 'python3 -u /usr/sbin/waagent -run-exthandlers' returned code: 0
2016/07/13 17:08:21.886683 INFO Agent WALinuxAgent-2.1.5.rc6 launched with command 'python3 -u /usr/sbin/waagent -run-exthandlers' returned code: 0
2016/07/13 17:08:21.887854 INFO Installed Agent WALinuxAgent-2.1.5.rc6 is the most current agent
2016/07/13 17:08:21.932144 INFO Agent WALinuxAgent-2.1.5.rc6 launched with command 'python3 -u /usr/sbin/waagent -run-exthandlers'
2016/07/13 17:08:22.277177 INFO Agent WALinuxAgent-2.1.5.rc6 is running as the goal state agent
2016/07/13 17:08:22.295077 INFO Wire server endpoint:168.63.129.16
2016/07/13 17:08:22.316850 INFO Event: name=WALA, op=HeartBeat, message=
2016/07/13 17:08:22.317995 INFO Start env monitor service.
2016/07/13 17:08:22.341761 INFO Configure routes
2016/07/13 17:08:22.350760 INFO Gateway:None
2016/07/13 17:08:22.357306 INFO Routes:None
2016/07/13 17:08:22.375500 INFO Checking for agent family Test updates
2016/07/13 17:08:22.380437 INFO Wire server endpoint:168.63.129.16
2016/07/13 17:08:22.447193 INFO Instantiating Agent WALinuxAgent-2.1.5.4 from package
2016/07/13 17:08:22.460857 INFO Agent WALinuxAgent-2.1.5.4 error state: Last Failure: 0.0, Total Failures: 0, Fatal: False
2016/07/13 17:08:22.464918 INFO Ensuring Agent WALinuxAgent-2.1.5.4 is downloaded
2016/07/13 17:08:22.473861 INFO Agent WALinuxAgent-2.1.5.4 was previously downloaded - skipping download
2016/07/13 17:08:22.475868 INFO Agent WALinuxAgent-2.1.5.4 loaded manifest from /var/lib/waagent/WALinuxAgent-2.1.5.4/HandlerManifest.json
2016/07/13 17:08:22.477640 INFO Instantiating Agent WALinuxAgent-2.1.5.5 from package
2016/07/13 17:08:22.491694 INFO Agent WALinuxAgent-2.1.5.5 error state: Last Failure: 0.0, Total Failures: 0, Fatal: False
2016/07/13 17:08:22.502931 INFO Ensuring Agent WALinuxAgent-2.1.5.5 is downloaded
2016/07/13 17:08:22.513748 INFO Agent WALinuxAgent-2.1.5.5 was previously downloaded - skipping download
2016/07/13 17:08:22.518634 INFO Agent WALinuxAgent-2.1.5.5 loaded manifest from /var/lib/waagent/WALinuxAgent-2.1.5.5/HandlerManifest.json
2016/07/13 17:08:22.535001 INFO Instantiating Agent WALinuxAgent-2.1.5.6 from package
2016/07/13 17:08:22.538571 INFO Agent WALinuxAgent-2.1.5.6 error state: Last Failure: 1468429700.2734947, Total Failures: 1, Fatal: True
2016/07/13 17:08:22.543338 INFO Ensuring Agent WALinuxAgent-2.1.5.6 is downloaded
2016/07/13 17:08:22.545155 INFO Agent WALinuxAgent-2.1.5.6 is blacklisted - skipping download
2016/07/13 17:08:22.556719 INFO Agent WALinuxAgent-2.1.5.rc6 discovered WALinuxAgent-2.1.5.5 as an update and will exit
2016/07/13 17:08:22.671230 INFO Event: name=WALinuxAgent, op=Enable, message=Agent WALinuxAgent-2.1.5.rc6 launched with command 'python3 -u /usr/sbin/waagent -run-exthandlers' returned code: 0
2016/07/13 17:08:22.676133 INFO Agent WALinuxAgent-2.1.5.rc6 launched with command 'python3 -u /usr/sbin/waagent -run-exthandlers' returned code: 0
2016/07/13 17:08:22.677285 INFO Installed Agent WALinuxAgent-2.1.5.rc6 is the most current agent
2016/07/13 17:08:22.720118 INFO Agent WALinuxAgent-2.1.5.rc6 launched with command 'python3 -u /usr/sbin/waagent -run-exthandlers'
2016/07/13 17:08:23.062667 INFO Agent WALinuxAgent-2.1.5.rc6 is running as the goal state agent
2016/07/13 17:08:23.089944 INFO Wire server endpoint:168.63.129.16
2016/07/13 17:08:23.102678 INFO Start env monitor service.
2016/07/13 17:08:23.102181 INFO Event: name=WALA, op=HeartBeat, message=
2016/07/13 17:08:23.120768 INFO Configure routes
2016/07/13 17:08:23.129639 INFO Gateway:None
2016/07/13 17:08:23.137087 INFO Routes:None
2016/07/13 17:08:23.153507 INFO Checking for agent family Test updates
2016/07/13 17:08:23.166021 INFO Wire server endpoint:168.63.129.16
2016/07/13 17:08:23.214892 INFO Instantiating Agent WALinuxAgent-2.1.5.4 from package
2016/07/13 17:08:23.226747 INFO Agent WALinuxAgent-2.1.5.4 error state: Last Failure: 0.0, Total Failures: 0, Fatal: False
2016/07/13 17:08:23.241805 INFO Ensuring Agent WALinuxAgent-2.1.5.4 is downloaded
2016/07/13 17:08:23.260478 INFO Agent WALinuxAgent-2.1.5.4 was previously downloaded - skipping download
2016/07/13 17:08:23.281062 INFO Agent WALinuxAgent-2.1.5.4 loaded manifest from /var/lib/waagent/WALinuxAgent-2.1.5.4/HandlerManifest.json
2016/07/13 17:08:23.297542 INFO Instantiating Agent WALinuxAgent-2.1.5.5 from package
2016/07/13 17:08:23.311880 INFO Agent WALinuxAgent-2.1.5.5 error state: Last Failure: 0.0, Total Failures: 0, Fatal: False
2016/07/13 17:08:23.325597 INFO Ensuring Agent WALinuxAgent-2.1.5.5 is downloaded
2016/07/13 17:08:23.336594 INFO Agent WALinuxAgent-2.1.5.5 was previously downloaded - skipping download
2016/07/13 17:08:23.350936 INFO Agent WALinuxAgent-2.1.5.5 loaded manifest from /var/lib/waagent/WALinuxAgent-2.1.5.5/HandlerManifest.json
2016/07/13 17:08:23.366611 INFO Instantiating Agent WALinuxAgent-2.1.5.6 from package
2016/07/13 17:08:23.376730 INFO Agent WALinuxAgent-2.1.5.6 error state: Last Failure: 1468429700.2734947, Total Failures: 1, Fatal: True
2016/07/13 17:08:23.392365 INFO Ensuring Agent WALinuxAgent-2.1.5.6 is downloaded
2016/07/13 17:08:23.401889 INFO Agent WALinuxAgent-2.1.5.6 is blacklisted - skipping download
2016/07/13 17:08:23.412972 INFO Agent WALinuxAgent-2.1.5.rc6 discovered WALinuxAgent-2.1.5.5 as an update and will exit
2016/07/13 17:08:23.468398 INFO Event: name=WALinuxAgent, op=Enable, message=Agent WALinuxAgent-2.1.5.rc6 launched with command 'python3 -u /usr/sbin/waagent -run-exthandlers' returned code: 0
2016/07/13 17:08:23.512282 INFO Agent WALinuxAgent-2.1.5.rc6 launched with command 'python3 -u /usr/sbin/waagent -run-exthandlers' returned code: 0
2016/07/13 17:08:23.535350 INFO Installed Agent WALinuxAgent-2.1.5.rc6 is the most current agent
2016/07/13 17:08:23.556142 INFO Agent WALinuxAgent-2.1.5.rc6 launched with command 'python3 -u /usr/sbin/waagent -run-exthandlers'
2016/07/13 17:08:23.872827 INFO Agent WALinuxAgent-2.1.5.rc6 is running as the goal state agent
2016/07/13 17:08:23.898366 INFO Wire server endpoint:168.63.129.16
2016/07/13 17:08:23.910665 INFO Start env monitor service.
2016/07/13 17:08:23.910156 INFO Event: name=WALA, op=HeartBeat, message=
2016/07/13 17:08:23.917967 INFO Configure routes
2016/07/13 17:08:23.934350 INFO Gateway:None
2016/07/13 17:08:23.940358 INFO Routes:None
2016/07/13 17:08:23.955267 INFO Checking for agent family Test updates
2016/07/13 17:08:23.965374 INFO Wire server endpoint:168.63.129.16
2016/07/13 17:08:24.014087 INFO Instantiating Agent WALinuxAgent-2.1.5.4 from package
2016/07/13 17:08:24.024226 INFO Agent WALinuxAgent-2.1.5.4 error state: Last Failure: 0.0, Total Failures: 0, Fatal: False
2016/07/13 17:08:24.038005 INFO Ensuring Agent WALinuxAgent-2.1.5.4 is downloaded
2016/07/13 17:08:24.048005 INFO Agent WALinuxAgent-2.1.5.4 was previously downloaded - skipping download
2016/07/13 17:08:24.060269 INFO Agent WALinuxAgent-2.1.5.4 loaded manifest from /var/lib/waagent/WALinuxAgent-2.1.5.4/HandlerManifest.json
2016/07/13 17:08:24.075785 INFO Instantiating Agent WALinuxAgent-2.1.5.5 from package
2016/07/13 17:08:24.085725 INFO Agent WALinuxAgent-2.1.5.5 error state: Last Failure: 0.0, Total Failures: 0, Fatal: False
2016/07/13 17:08:24.105973 INFO Ensuring Agent WALinuxAgent-2.1.5.5 is downloaded
2016/07/13 17:08:24.116596 INFO Agent WALinuxAgent-2.1.5.5 was previously downloaded - skipping download
2016/07/13 17:08:24.129973 INFO Agent WALinuxAgent-2.1.5.5 loaded manifest from /var/lib/waagent/WALinuxAgent-2.1.5.5/HandlerManifest.json
2016/07/13 17:08:24.153601 INFO Instantiating Agent WALinuxAgent-2.1.5.6 from package
2016/07/13 17:08:24.163916 INFO Agent WALinuxAgent-2.1.5.6 error state: Last Failure: 1468429700.2734947, Total Failures: 1, Fatal: True
2016/07/13 17:08:24.179486 INFO Ensuring Agent WALinuxAgent-2.1.5.6 is downloaded
2016/07/13 17:08:24.189392 INFO Agent WALinuxAgent-2.1.5.6 is blacklisted - skipping download
2016/07/13 17:08:24.200856 INFO Agent WALinuxAgent-2.1.5.rc6 discovered WALinuxAgent-2.1.5.5 as an update and will exit
2016/07/13 17:08:24.253308 INFO Event: name=WALinuxAgent, op=Enable, message=Agent WALinuxAgent-2.1.5.rc6 launched with command 'python3 -u /usr/sbin/waagent -run-exthandlers' returned code: 0
2016/07/13 17:08:24.257751 INFO Agent WALinuxAgent-2.1.5.rc6 launched with command 'python3 -u /usr/sbin/waagent -run-exthandlers' returned code: 0
2016/07/13 17:08:24.259005 INFO Installed Agent WALinuxAgent-2.1.5.rc6 is the most current agent
2016/07/13 17:08:24.300801 INFO Agent WALinuxAgent-2.1.5.rc6 launched with command 'python3 -u /usr/sbin/waagent -run-exthandlers'
2016/07/13 17:08:24.594689 INFO Agent WALinuxAgent-2.1.5.rc6 is running as the goal state agent
2016/07/13 17:08:24.611077 INFO Wire server endpoint:168.63.129.16
2016/07/13 17:08:24.623003 INFO Start env monitor service.
2016/07/13 17:08:24.622590 INFO Event: name=WALA, op=HeartBeat, message=
2016/07/13 17:08:24.630357 INFO Configure routes
2016/07/13 17:08:24.644496 INFO Gateway:None
2016/07/13 17:08:24.650533 INFO Routes:None
2016/07/13 17:08:24.661371 INFO Checking for agent family Test updates
2016/07/13 17:08:24.670163 INFO Wire server endpoint:168.63.129.16
2016/07/13 17:08:24.713716 INFO Instantiating Agent WALinuxAgent-2.1.5.4 from package
2016/07/13 17:08:24.724061 INFO Agent WALinuxAgent-2.1.5.4 error state: Last Failure: 0.0, Total Failures: 0, Fatal: False
2016/07/13 17:08:24.738570 INFO Ensuring Agent WALinuxAgent-2.1.5.4 is downloaded
2016/07/13 17:08:24.748783 INFO Agent WALinuxAgent-2.1.5.4 was previously downloaded - skipping download
2016/07/13 17:08:24.762225 INFO Agent WALinuxAgent-2.1.5.4 loaded manifest from /var/lib/waagent/WALinuxAgent-2.1.5.4/HandlerManifest.json
2016/07/13 17:08:24.779150 INFO Instantiating Agent WALinuxAgent-2.1.5.5 from package
2016/07/13 17:08:24.789941 INFO Agent WALinuxAgent-2.1.5.5 error state: Last Failure: 0.0, Total Failures: 0, Fatal: False
2016/07/13 17:08:24.804309 INFO Ensuring Agent WALinuxAgent-2.1.5.5 is downloaded
2016/07/13 17:08:24.814366 INFO Agent WALinuxAgent-2.1.5.5 was previously downloaded - skipping download
2016/07/13 17:08:24.828414 INFO Agent WALinuxAgent-2.1.5.5 loaded manifest from /var/lib/waagent/WALinuxAgent-2.1.5.5/HandlerManifest.json
2016/07/13 17:08:24.845148 INFO Instantiating Agent WALinuxAgent-2.1.5.6 from package
2016/07/13 17:08:24.856241 INFO Agent WALinuxAgent-2.1.5.6 error state: Last Failure: 1468429700.2734947, Total Failures: 1, Fatal: True
2016/07/13 17:08:24.872751 INFO Ensuring Agent WALinuxAgent-2.1.5.6 is downloaded
2016/07/13 17:08:24.882343 INFO Agent WALinuxAgent-2.1.5.6 is blacklisted - skipping download
2016/07/13 17:08:24.893729 INFO Agent WALinuxAgent-2.1.5.rc6 discovered WALinuxAgent-2.1.5.5 as an update and will exit
2016/07/13 17:08:24.946472 INFO Event: name=WALinuxAgent, op=Enable, message=Agent WALinuxAgent-2.1.5.rc6 launched with command 'python3 -u /usr/sbin/waagent -run-exthandlers' returned code: 0
2016/07/13 17:08:24.949020 INFO Agent WALinuxAgent-2.1.5.rc6 launched with command 'python3 -u /usr/sbin/waagent -run-exthandlers' returned code: 0
2016/07/13 17:08:24.949469 INFO Installed Agent WALinuxAgent-2.1.5.rc6 is the most current agent
2016/07/13 17:08:25.000107 INFO Agent WALinuxAgent-2.1.5.rc6 launched with command 'python3 -u /usr/sbin/waagent -run-exthandlers'
2016/07/13 17:08:25.325422 INFO Agent WALinuxAgent-2.1.5.rc6 is running as the goal state agent
2016/07/13 17:08:25.354466 INFO Wire server endpoint:168.63.129.16
2016/07/13 17:08:25.376228 INFO Event: name=WALA, op=HeartBeat, message=
2016/07/13 17:08:25.376987 INFO Start env monitor service.
2016/07/13 17:08:25.393857 INFO Configure routes
2016/07/13 17:08:25.401321 INFO Gateway:None
2016/07/13 17:08:25.406316 INFO Routes:None
2016/07/13 17:08:25.428909 INFO Checking for agent family Test updates
2016/07/13 17:08:25.437865 INFO Wire server endpoint:168.63.129.16
2016/07/13 17:08:25.490074 INFO Instantiating Agent WALinuxAgent-2.1.5.4 from package
2016/07/13 17:08:25.494039 INFO Agent WALinuxAgent-2.1.5.4 error state: Last Failure: 0.0, Total Failures: 0, Fatal: False
2016/07/13 17:08:25.495666 INFO Ensuring Agent WALinuxAgent-2.1.5.4 is downloaded
2016/07/13 17:08:25.505548 INFO Agent WALinuxAgent-2.1.5.4 was previously downloaded - skipping download
2016/07/13 17:08:25.507342 INFO Agent WALinuxAgent-2.1.5.4 loaded manifest from /var/lib/waagent/WALinuxAgent-2.1.5.4/HandlerManifest.json
2016/07/13 17:08:25.509272 INFO Instantiating Agent WALinuxAgent-2.1.5.5 from package
2016/07/13 17:08:25.518613 INFO Agent WALinuxAgent-2.1.5.5 error state: Last Failure: 0.0, Total Failures: 0, Fatal: False
2016/07/13 17:08:25.529950 INFO Ensuring Agent WALinuxAgent-2.1.5.5 is downloaded
2016/07/13 17:08:25.531617 INFO Agent WALinuxAgent-2.1.5.5 was previously downloaded - skipping download
2016/07/13 17:08:25.533388 INFO Agent WALinuxAgent-2.1.5.5 loaded manifest from /var/lib/waagent/WALinuxAgent-2.1.5.5/HandlerManifest.json
2016/07/13 17:08:25.543227 INFO Instantiating Agent WALinuxAgent-2.1.5.6 from package
2016/07/13 17:08:25.552956 INFO Agent WALinuxAgent-2.1.5.6 error state: Last Failure: 1468429700.2734947, Total Failures: 1, Fatal: True
2016/07/13 17:08:25.554213 INFO Ensuring Agent WALinuxAgent-2.1.5.6 is downloaded
2016/07/13 17:08:25.555822 INFO Agent WALinuxAgent-2.1.5.6 is blacklisted - skipping download
2016/07/13 17:08:25.564277 INFO Agent WALinuxAgent-2.1.5.rc6 discovered WALinuxAgent-2.1.5.5 as an update and will exit
2016/07/13 17:08:25.661233 INFO Event: name=WALinuxAgent, op=Enable, message=Agent WALinuxAgent-2.1.5.rc6 launched with command 'python3 -u /usr/sbin/waagent -run-exthandlers' returned code: 0
2016/07/13 17:08:25.665709 INFO Agent WALinuxAgent-2.1.5.rc6 launched with command 'python3 -u /usr/sbin/waagent -run-exthandlers' returned code: 0
2016/07/13 17:08:25.668489 INFO Installed Agent WALinuxAgent-2.1.5.rc6 is the most current agent
2016/07/13 17:08:25.708122 INFO Agent WALinuxAgent-2.1.5.rc6 launched with command 'python3 -u /usr/sbin/waagent -run-exthandlers'
2016/07/13 17:08:26.010050 INFO Agent WALinuxAgent-2.1.5.rc6 is running as the goal state agent
2016/07/13 17:08:26.025969 INFO Wire server endpoint:168.63.129.16
2016/07/13 17:08:26.039163 INFO Event: name=WALA, op=HeartBeat, message=
2016/07/13 17:08:26.039857 INFO Start env monitor service.
2016/07/13 17:08:26.055563 INFO Configure routes
2016/07/13 17:08:26.062943 INFO Gateway:None
2016/07/13 17:08:26.067482 INFO Routes:None
2016/07/13 17:08:26.085504 INFO Checking for agent family Test updates
2016/07/13 17:08:26.089342 INFO Wire server endpoint:168.63.129.16
2016/07/13 17:08:26.127742 INFO Instantiating Agent WALinuxAgent-2.1.5.4 from package
2016/07/13 17:08:26.131606 INFO Agent WALinuxAgent-2.1.5.4 error state: Last Failure: 0.0, Total Failures: 0, Fatal: False
2016/07/13 17:08:26.133318 INFO Ensuring Agent WALinuxAgent-2.1.5.4 is downloaded
2016/07/13 17:08:26.135016 INFO Agent WALinuxAgent-2.1.5.4 was previously downloaded - skipping download
2016/07/13 17:08:26.136908 INFO Agent WALinuxAgent-2.1.5.4 loaded manifest from /var/lib/waagent/WALinuxAgent-2.1.5.4/HandlerManifest.json
2016/07/13 17:08:26.138496 INFO Instantiating Agent WALinuxAgent-2.1.5.5 from package
2016/07/13 17:08:26.145185 INFO Agent WALinuxAgent-2.1.5.5 error state: Last Failure: 0.0, Total Failures: 0, Fatal: False
2016/07/13 17:08:26.146747 INFO Ensuring Agent WALinuxAgent-2.1.5.5 is downloaded
2016/07/13 17:08:26.148457 INFO Agent WALinuxAgent-2.1.5.5 was previously downloaded - skipping download
2016/07/13 17:08:26.150265 INFO Agent WALinuxAgent-2.1.5.5 loaded manifest from /var/lib/waagent/WALinuxAgent-2.1.5.5/HandlerManifest.json
2016/07/13 17:08:26.156888 INFO Instantiating Agent WALinuxAgent-2.1.5.6 from package
2016/07/13 17:08:26.158582 INFO Agent WALinuxAgent-2.1.5.6 error state: Last Failure: 1468429700.2734947, Total Failures: 1, Fatal: True
2016/07/13 17:08:26.161523 INFO Ensuring Agent WALinuxAgent-2.1.5.6 is downloaded
2016/07/13 17:08:26.171334 INFO Agent WALinuxAgent-2.1.5.6 is blacklisted - skipping download
2016/07/13 17:08:26.174023 INFO Agent WALinuxAgent-2.1.5.rc6 discovered WALinuxAgent-2.1.5.5 as an update and will exit
2016/07/13 17:08:26.297190 INFO Event: name=WALinuxAgent, op=Enable, message=Agent WALinuxAgent-2.1.5.rc6 launched with command 'python3 -u /usr/sbin/waagent -run-exthandlers' returned code: 0
2016/07/13 17:08:26.301479 INFO Agent WALinuxAgent-2.1.5.rc6 launched with command 'python3 -u /usr/sbin/waagent -run-exthandlers' returned code: 0
2016/07/13 17:08:26.302630 INFO Installed Agent WALinuxAgent-2.1.5.rc6 is the most current agent
2016/07/13 17:08:26.348110 INFO Agent WALinuxAgent-2.1.5.rc6 launched with command 'python3 -u /usr/sbin/waagent -run-exthandlers'
2016/07/13 17:08:26.611096 INFO Agent WALinuxAgent-2.1.5.rc6 is running as the goal state agent
2016/07/13 17:08:26.645899 INFO Wire server endpoint:168.63.129.16
2016/07/13 17:08:26.659962 INFO Event: name=WALA, op=HeartBeat, message=
2016/07/13 17:08:26.660695 INFO Start env monitor service.
2016/07/13 17:08:26.681616 INFO Configure routes
2016/07/13 17:08:26.688067 INFO Gateway:None
2016/07/13 17:08:26.696180 INFO Routes:None
2016/07/13 17:08:26.720086 INFO Checking for agent family Test updates
2016/07/13 17:08:26.728702 INFO Wire server endpoint:168.63.129.16
2016/07/13 17:08:26.767986 INFO Instantiating Agent WALinuxAgent-2.1.5.4 from package
2016/07/13 17:08:26.771816 INFO Agent WALinuxAgent-2.1.5.4 error state: Last Failure: 0.0, Total Failures: 0, Fatal: False
2016/07/13 17:08:26.773444 INFO Ensuring Agent WALinuxAgent-2.1.5.4 is downloaded
2016/07/13 17:08:26.778469 INFO Agent WALinuxAgent-2.1.5.4 was previously downloaded - skipping download
2016/07/13 17:08:26.780407 INFO Agent WALinuxAgent-2.1.5.4 loaded manifest from /var/lib/waagent/WALinuxAgent-2.1.5.4/HandlerManifest.json
2016/07/13 17:08:26.785876 INFO Instantiating Agent WALinuxAgent-2.1.5.5 from package
2016/07/13 17:08:26.788006 INFO Agent WALinuxAgent-2.1.5.5 error state: Last Failure: 0.0, Total Failures: 0, Fatal: False
2016/07/13 17:08:26.795651 INFO Ensuring Agent WALinuxAgent-2.1.5.5 is downloaded
2016/07/13 17:08:26.807868 INFO Agent WALinuxAgent-2.1.5.5 was previously downloaded - skipping download
2016/07/13 17:08:26.809602 INFO Agent WALinuxAgent-2.1.5.5 loaded manifest from /var/lib/waagent/WALinuxAgent-2.1.5.5/HandlerManifest.json
2016/07/13 17:08:26.811438 INFO Instantiating Agent WALinuxAgent-2.1.5.6 from package
2016/07/13 17:08:26.814694 INFO Agent WALinuxAgent-2.1.5.6 error state: Last Failure: 1468429700.2734947, Total Failures: 1, Fatal: True
2016/07/13 17:08:26.821667 INFO Ensuring Agent WALinuxAgent-2.1.5.6 is downloaded
2016/07/13 17:08:26.823266 INFO Agent WALinuxAgent-2.1.5.6 is blacklisted - skipping download
2016/07/13 17:08:26.826265 INFO Agent WALinuxAgent-2.1.5.rc6 discovered WALinuxAgent-2.1.5.5 as an update and will exit
2016/07/13 17:08:26.958434 INFO Event: name=WALinuxAgent, op=Enable, message=Agent WALinuxAgent-2.1.5.rc6 launched with command 'python3 -u /usr/sbin/waagent -run-exthandlers' returned code: 0
2016/07/13 17:08:26.980759 INFO Agent WALinuxAgent-2.1.5.rc6 launched with command 'python3 -u /usr/sbin/waagent -run-exthandlers' returned code: 0
2016/07/13 17:08:26.998089 INFO Installed Agent WALinuxAgent-2.1.5.rc6 is the most current agent
2016/07/13 17:08:27.026593 INFO Agent WALinuxAgent-2.1.5.rc6 launched with command 'python3 -u /usr/sbin/waagent -run-exthandlers'
2016/07/13 17:08:27.371625 INFO Agent WALinuxAgent-2.1.5.rc6 is running as the goal state agent
2016/07/13 17:08:27.389267 INFO Wire server endpoint:168.63.129.16
2016/07/13 17:08:27.409599 INFO Start env monitor service.
2016/07/13 17:08:27.408979 INFO Event: name=WALA, op=HeartBeat, message=
2016/07/13 17:08:27.417052 INFO Configure routes
2016/07/13 17:08:27.431692 INFO Gateway:None
2016/07/13 17:08:27.440122 INFO Routes:None
2016/07/13 17:08:27.456165 INFO Checking for agent family Test updates
2016/07/13 17:08:27.464784 INFO Wire server endpoint:168.63.129.16
2016/07/13 17:08:27.507617 INFO Instantiating Agent WALinuxAgent-2.1.5.4 from package
2016/07/13 17:08:27.518134 INFO Agent WALinuxAgent-2.1.5.4 error state: Last Failure: 0.0, Total Failures: 0, Fatal: False
2016/07/13 17:08:27.533211 INFO Ensuring Agent WALinuxAgent-2.1.5.4 is downloaded
2016/07/13 17:08:27.544631 INFO Agent WALinuxAgent-2.1.5.4 was previously downloaded - skipping download
2016/07/13 17:08:27.556706 INFO Agent WALinuxAgent-2.1.5.4 loaded manifest from /var/lib/waagent/WALinuxAgent-2.1.5.4/HandlerManifest.json
2016/07/13 17:08:27.572246 INFO Instantiating Agent WALinuxAgent-2.1.5.5 from package
2016/07/13 17:08:27.582146 INFO Agent WALinuxAgent-2.1.5.5 error state: Last Failure: 0.0, Total Failures: 0, Fatal: False
2016/07/13 17:08:27.595782 INFO Ensuring Agent WALinuxAgent-2.1.5.5 is downloaded
2016/07/13 17:08:27.606187 INFO Agent WALinuxAgent-2.1.5.5 was previously downloaded - skipping download
2016/07/13 17:08:27.621114 INFO Agent WALinuxAgent-2.1.5.5 loaded manifest from /var/lib/waagent/WALinuxAgent-2.1.5.5/HandlerManifest.json
2016/07/13 17:08:27.637114 INFO Instantiating Agent WALinuxAgent-2.1.5.6 from package
2016/07/13 17:08:27.647934 INFO Agent WALinuxAgent-2.1.5.6 error state: Last Failure: 1468429700.2734947, Total Failures: 1, Fatal: True
2016/07/13 17:08:27.665323 INFO Ensuring Agent WALinuxAgent-2.1.5.6 is downloaded
2016/07/13 17:08:27.675803 INFO Agent WALinuxAgent-2.1.5.6 is blacklisted - skipping download
2016/07/13 17:08:27.686985 INFO Agent WALinuxAgent-2.1.5.rc6 discovered WALinuxAgent-2.1.5.5 as an update and will exit
2016/07/13 17:08:27.732140 INFO Event: name=WALinuxAgent, op=Enable, message=Agent WALinuxAgent-2.1.5.rc6 launched with command 'python3 -u /usr/sbin/waagent -run-exthandlers' returned code: 0
2016/07/13 17:08:27.736414 INFO Agent WALinuxAgent-2.1.5.rc6 launched with command 'python3 -u /usr/sbin/waagent -run-exthandlers' returned code: 0
2016/07/13 17:08:27.737599 INFO Installed Agent WALinuxAgent-2.1.5.rc6 is the most current agent
2016/07/13 17:08:27.777746 INFO Agent WALinuxAgent-2.1.5.rc6 launched with command 'python3 -u /usr/sbin/waagent -run-exthandlers'
2016/07/13 17:08:28.063376 INFO Agent WALinuxAgent-2.1.5.rc6 is running as the goal state agent
2016/07/13 17:08:28.091640 INFO Wire server endpoint:168.63.129.16
2016/07/13 17:08:28.106084 INFO Event: name=WALA, op=HeartBeat, message=
2016/07/13 17:08:28.106876 INFO Start env monitor service.
2016/07/13 17:08:28.125474 INFO Configure routes
2016/07/13 17:08:28.133395 INFO Gateway:None
2016/07/13 17:08:28.136495 INFO Routes:None
2016/07/13 17:08:28.154338 INFO Checking for agent family Test updates
2016/07/13 17:08:28.164489 INFO Wire server endpoint:168.63.129.16
2016/07/13 17:08:28.206145 INFO Instantiating Agent WALinuxAgent-2.1.5.4 from package
2016/07/13 17:08:28.209980 INFO Agent WALinuxAgent-2.1.5.4 error state: Last Failure: 0.0, Total Failures: 0, Fatal: False
2016/07/13 17:08:28.211570 INFO Ensuring Agent WALinuxAgent-2.1.5.4 is downloaded
2016/07/13 17:08:28.213487 INFO Agent WALinuxAgent-2.1.5.4 was previously downloaded - skipping download
2016/07/13 17:08:28.227965 INFO Agent WALinuxAgent-2.1.5.4 loaded manifest from /var/lib/waagent/WALinuxAgent-2.1.5.4/HandlerManifest.json
2016/07/13 17:08:28.235582 INFO Instantiating Agent WALinuxAgent-2.1.5.5 from package
2016/07/13 17:08:28.236946 INFO Agent WALinuxAgent-2.1.5.5 error state: Last Failure: 0.0, Total Failures: 0, Fatal: False
2016/07/13 17:08:28.238379 INFO Ensuring Agent WALinuxAgent-2.1.5.5 is downloaded
2016/07/13 17:08:28.239922 INFO Agent WALinuxAgent-2.1.5.5 was previously downloaded - skipping download
2016/07/13 17:08:28.247765 INFO Agent WALinuxAgent-2.1.5.5 loaded manifest from /var/lib/waagent/WALinuxAgent-2.1.5.5/HandlerManifest.json
2016/07/13 17:08:28.249302 INFO Instantiating Agent WALinuxAgent-2.1.5.6 from package
2016/07/13 17:08:28.251059 INFO Agent WALinuxAgent-2.1.5.6 error state: Last Failure: 1468429700.2734947, Total Failures: 1, Fatal: True
2016/07/13 17:08:28.259128 INFO Ensuring Agent WALinuxAgent-2.1.5.6 is downloaded
2016/07/13 17:08:28.262239 INFO Agent WALinuxAgent-2.1.5.6 is blacklisted - skipping download
2016/07/13 17:08:28.270685 INFO Agent WALinuxAgent-2.1.5.rc6 discovered WALinuxAgent-2.1.5.5 as an update and will exit
2016/07/13 17:08:28.377386 INFO Event: name=WALinuxAgent, op=Enable, message=Agent WALinuxAgent-2.1.5.rc6 launched with command 'python3 -u /usr/sbin/waagent -run-exthandlers' returned code: 0
2016/07/13 17:08:28.381594 INFO Agent WALinuxAgent-2.1.5.rc6 launched with command 'python3 -u /usr/sbin/waagent -run-exthandlers' returned code: 0
2016/07/13 17:08:28.382586 INFO Installed Agent WALinuxAgent-2.1.5.rc6 is the most current agent
2016/07/13 17:08:28.420102 INFO Agent WALinuxAgent-2.1.5.rc6 launched with command 'python3 -u /usr/sbin/waagent -run-exthandlers'
2016/07/13 17:08:28.737361 INFO Agent WALinuxAgent-2.1.5.rc6 is running as the goal state agent
2016/07/13 17:08:28.764614 INFO Wire server endpoint:168.63.129.16
2016/07/13 17:08:28.775831 INFO Start env monitor service.
2016/07/13 17:08:28.775419 INFO Event: name=WALA, op=HeartBeat, message=
2016/07/13 17:08:28.782866 INFO Configure routes
2016/07/13 17:08:28.796378 INFO Gateway:None
2016/07/13 17:08:28.801914 INFO Routes:None
2016/07/13 17:08:28.811874 INFO Checking for agent family Test updates
2016/07/13 17:08:28.820192 INFO Wire server endpoint:168.63.129.16
2016/07/13 17:08:28.867350 INFO Instantiating Agent WALinuxAgent-2.1.5.4 from package
2016/07/13 17:08:28.877139 INFO Agent WALinuxAgent-2.1.5.4 error state: Last Failure: 0.0, Total Failures: 0, Fatal: False
2016/07/13 17:08:28.890430 INFO Ensuring Agent WALinuxAgent-2.1.5.4 is downloaded
2016/07/13 17:08:28.899898 INFO Agent WALinuxAgent-2.1.5.4 was previously downloaded - skipping download
2016/07/13 17:08:28.911583 INFO Agent WALinuxAgent-2.1.5.4 loaded manifest from /var/lib/waagent/WALinuxAgent-2.1.5.4/HandlerManifest.json
2016/07/13 17:08:28.926866 INFO Instantiating Agent WALinuxAgent-2.1.5.5 from package
2016/07/13 17:08:28.937393 INFO Agent WALinuxAgent-2.1.5.5 error state: Last Failure: 0.0, Total Failures: 0, Fatal: False
2016/07/13 17:08:28.950660 INFO Ensuring Agent WALinuxAgent-2.1.5.5 is downloaded
2016/07/13 17:08:28.960036 INFO Agent WALinuxAgent-2.1.5.5 was previously downloaded - skipping download
2016/07/13 17:08:28.971698 INFO Agent WALinuxAgent-2.1.5.5 loaded manifest from /var/lib/waagent/WALinuxAgent-2.1.5.5/HandlerManifest.json
2016/07/13 17:08:28.987109 INFO Instantiating Agent WALinuxAgent-2.1.5.6 from package
2016/07/13 17:08:28.997675 INFO Agent WALinuxAgent-2.1.5.6 error state: Last Failure: 1468429700.2734947, Total Failures: 1, Fatal: True
2016/07/13 17:08:29.013517 INFO Ensuring Agent WALinuxAgent-2.1.5.6 is downloaded
2016/07/13 17:08:29.023435 INFO Agent WALinuxAgent-2.1.5.6 is blacklisted - skipping download
2016/07/13 17:08:29.035427 INFO Agent WALinuxAgent-2.1.5.rc6 discovered WALinuxAgent-2.1.5.5 as an update and will exit
2016/07/13 17:08:29.082719 INFO Event: name=WALinuxAgent, op=Enable, message=Agent WALinuxAgent-2.1.5.rc6 launched with command 'python3 -u /usr/sbin/waagent -run-exthandlers' returned code: 0
2016/07/13 17:08:29.086942 INFO Agent WALinuxAgent-2.1.5.rc6 launched with command 'python3 -u /usr/sbin/waagent -run-exthandlers' returned code: 0
2016/07/13 17:08:29.089727 INFO Installed Agent WALinuxAgent-2.1.5.rc6 is the most current agent
2016/07/13 17:08:29.132150 INFO Agent WALinuxAgent-2.1.5.rc6 launched with command 'python3 -u /usr/sbin/waagent -run-exthandlers'
2016/07/13 17:08:29.429758 INFO Agent WALinuxAgent-2.1.5.rc6 is running as the goal state agent
2016/07/13 17:08:29.446503 INFO Wire server endpoint:168.63.129.16
2016/07/13 17:08:29.455656 INFO Event: name=WALA, op=HeartBeat, message=
2016/07/13 17:08:29.468931 INFO Start env monitor service.
2016/07/13 17:08:29.476813 INFO Configure routes
2016/07/13 17:08:29.483019 INFO Gateway:None
2016/07/13 17:08:29.488580 INFO Routes:None
2016/07/13 17:08:29.498159 INFO Checking for agent family Test updates
2016/07/13 17:08:29.506566 INFO Wire server endpoint:168.63.129.16
2016/07/13 17:08:29.553213 INFO Instantiating Agent WALinuxAgent-2.1.5.4 from package
2016/07/13 17:08:29.563338 INFO Agent WALinuxAgent-2.1.5.4 error state: Last Failure: 0.0, Total Failures: 0, Fatal: False
2016/07/13 17:08:29.576955 INFO Ensuring Agent WALinuxAgent-2.1.5.4 is downloaded
2016/07/13 17:08:29.586403 INFO Agent WALinuxAgent-2.1.5.4 was previously downloaded - skipping download
2016/07/13 17:08:29.598085 INFO Agent WALinuxAgent-2.1.5.4 loaded manifest from /var/lib/waagent/WALinuxAgent-2.1.5.4/HandlerManifest.json
2016/07/13 17:08:29.613466 INFO Instantiating Agent WALinuxAgent-2.1.5.5 from package
2016/07/13 17:08:29.623208 INFO Agent WALinuxAgent-2.1.5.5 error state: Last Failure: 0.0, Total Failures: 0, Fatal: False
2016/07/13 17:08:29.636602 INFO Ensuring Agent WALinuxAgent-2.1.5.5 is downloaded
2016/07/13 17:08:29.646913 INFO Agent WALinuxAgent-2.1.5.5 was previously downloaded - skipping download
2016/07/13 17:08:29.658556 INFO Agent WALinuxAgent-2.1.5.5 loaded manifest from /var/lib/waagent/WALinuxAgent-2.1.5.5/HandlerManifest.json
2016/07/13 17:08:29.673663 INFO Instantiating Agent WALinuxAgent-2.1.5.6 from package
2016/07/13 17:08:29.683525 INFO Agent WALinuxAgent-2.1.5.6 error state: Last Failure: 1468429700.2734947, Total Failures: 1, Fatal: True
2016/07/13 17:08:29.698301 INFO Ensuring Agent WALinuxAgent-2.1.5.6 is downloaded
2016/07/13 17:08:29.707543 INFO Agent WALinuxAgent-2.1.5.6 is blacklisted - skipping download
2016/07/13 17:08:29.718347 INFO Agent WALinuxAgent-2.1.5.rc6 discovered WALinuxAgent-2.1.5.5 as an update and will exit
2016/07/13 17:08:29.764137 INFO Event: name=WALinuxAgent, op=Enable, message=Agent WALinuxAgent-2.1.5.rc6 launched with command 'python3 -u /usr/sbin/waagent -run-exthandlers' returned code: 0
2016/07/13 17:08:29.768504 INFO Agent WALinuxAgent-2.1.5.rc6 launched with command 'python3 -u /usr/sbin/waagent -run-exthandlers' returned code: 0
2016/07/13 17:08:29.780034 INFO Installed Agent WALinuxAgent-2.1.5.rc6 is the most current agent
2016/07/13 17:08:29.816108 INFO Agent WALinuxAgent-2.1.5.rc6 launched with command 'python3 -u /usr/sbin/waagent -run-exthandlers'
2016/07/13 17:08:30.099138 INFO Agent WALinuxAgent-2.1.5.rc6 is running as the goal state agent
2016/07/13 17:08:30.115475 INFO Wire server endpoint:168.63.129.16
2016/07/13 17:08:30.129022 INFO Event: name=WALA, op=HeartBeat, message=
2016/07/13 17:08:30.129776 INFO Start env monitor service.
2016/07/13 17:08:30.147169 INFO Configure routes
2016/07/13 17:08:30.154849 INFO Gateway:None
2016/07/13 17:08:30.165703 INFO Routes:None
2016/07/13 17:08:30.182106 INFO Checking for agent family Test updates
2016/07/13 17:08:30.191902 INFO Wire server endpoint:168.63.129.16
2016/07/13 17:08:30.238534 INFO Instantiating Agent WALinuxAgent-2.1.5.4 from package
2016/07/13 17:08:30.242457 INFO Agent WALinuxAgent-2.1.5.4 error state: Last Failure: 0.0, Total Failures: 0, Fatal: False
2016/07/13 17:08:30.243988 INFO Ensuring Agent WALinuxAgent-2.1.5.4 is downloaded
2016/07/13 17:08:30.245624 INFO Agent WALinuxAgent-2.1.5.4 was previously downloaded - skipping download
2016/07/13 17:08:30.247389 INFO Agent WALinuxAgent-2.1.5.4 loaded manifest from /var/lib/waagent/WALinuxAgent-2.1.5.4/HandlerManifest.json
2016/07/13 17:08:30.257769 INFO Instantiating Agent WALinuxAgent-2.1.5.5 from package
2016/07/13 17:08:30.259229 INFO Agent WALinuxAgent-2.1.5.5 error state: Last Failure: 0.0, Total Failures: 0, Fatal: False
2016/07/13 17:08:30.267368 INFO Ensuring Agent WALinuxAgent-2.1.5.5 is downloaded
2016/07/13 17:08:30.270568 INFO Agent WALinuxAgent-2.1.5.5 was previously downloaded - skipping download
2016/07/13 17:08:30.280087 INFO Agent WALinuxAgent-2.1.5.5 loaded manifest from /var/lib/waagent/WALinuxAgent-2.1.5.5/HandlerManifest.json
2016/07/13 17:08:30.281507 INFO Instantiating Agent WALinuxAgent-2.1.5.6 from package
2016/07/13 17:08:30.283152 INFO Agent WALinuxAgent-2.1.5.6 error state: Last Failure: 1468429700.2734947, Total Failures: 1, Fatal: True
2016/07/13 17:08:30.290922 INFO Ensuring Agent WALinuxAgent-2.1.5.6 is downloaded
2016/07/13 17:08:30.294040 INFO Agent WALinuxAgent-2.1.5.6 is blacklisted - skipping download
2016/07/13 17:08:30.296987 INFO Agent WALinuxAgent-2.1.5.rc6 discovered WALinuxAgent-2.1.5.5 as an update and will exit
2016/07/13 17:08:30.412728 INFO Event: name=WALinuxAgent, op=Enable, message=Agent WALinuxAgent-2.1.5.rc6 launched with command 'python3 -u /usr/sbin/waagent -run-exthandlers' returned code: 0
2016/07/13 17:08:30.417260 INFO Agent WALinuxAgent-2.1.5.rc6 launched with command 'python3 -u /usr/sbin/waagent -run-exthandlers' returned code: 0
2016/07/13 17:08:30.418371 INFO Installed Agent WALinuxAgent-2.1.5.rc6 is the most current agent
2016/07/13 17:08:30.460099 INFO Agent WALinuxAgent-2.1.5.rc6 launched with command 'python3 -u /usr/sbin/waagent -run-exthandlers'
2016/07/13 17:08:30.747794 INFO Agent WALinuxAgent-2.1.5.rc6 is running as the goal state agent
2016/07/13 17:08:30.769584 INFO Wire server endpoint:168.63.129.16
2016/07/13 17:08:30.781105 INFO Start env monitor service.
2016/07/13 17:08:30.780693 INFO Event: name=WALA, op=HeartBeat, message=
2016/07/13 17:08:30.788185 INFO Configure routes
2016/07/13 17:08:30.803017 INFO Gateway:None
2016/07/13 17:08:30.808497 INFO Routes:None
2016/07/13 17:08:30.820408 INFO Checking for agent family Test updates
2016/07/13 17:08:30.829696 INFO Wire server endpoint:168.63.129.16
2016/07/13 17:08:30.878817 INFO Instantiating Agent WALinuxAgent-2.1.5.4 from package
2016/07/13 17:08:30.889227 INFO Agent WALinuxAgent-2.1.5.4 error state: Last Failure: 0.0, Total Failures: 0, Fatal: False
2016/07/13 17:08:30.903395 INFO Ensuring Agent WALinuxAgent-2.1.5.4 is downloaded
...
keeps going
```
</details> | Azure/WALinuxAgent | diff --git a/tests/ga/test_update.py b/tests/ga/test_update.py
index 49006606..74804fb9 100644
--- a/tests/ga/test_update.py
+++ b/tests/ga/test_update.py
@@ -627,7 +627,6 @@ class TestUpdate(UpdateTestCase):
if versions is None or len(versions) <= 0:
versions = [latest_version]
- self.update_handler.protocol_util = Mock(return_value=ProtocolMock)
etag = self.update_handler.last_etag if self.update_handler.last_etag is not None else 42
if protocol is None:
protocol = ProtocolMock(etag=etag, versions=versions)
@@ -641,7 +640,7 @@ class TestUpdate(UpdateTestCase):
self.assertTrue(self._test_ensure_latest_agent())
return
- def test_ensure_latest_agent_ignores_old_agents(self):
+ def test_ensure_latest_agent_includes_old_agents(self):
self.prepare_agents()
old_count = FlexibleVersion(AGENT_VERSION).version[-1]
@@ -651,7 +650,7 @@ class TestUpdate(UpdateTestCase):
all_count = len(self.agent_versions())
self.assertTrue(self._test_ensure_latest_agent(versions=self.agent_versions()))
- self.assertEqual(all_count - old_count, len(self.update_handler.agents))
+ self.assertEqual(all_count, len(self.update_handler.agents))
return
def test_ensure_lastest_agent_purges_old_agents(self):
@@ -829,7 +828,7 @@ class TestUpdate(UpdateTestCase):
kept_agents = self.update_handler.agents[1::2]
purged_agents = self.update_handler.agents[::2]
- # Reload and assert only the kept agents remain on disk
+ # Reload and assert only the kept agents remain on disk
self.update_handler.agents = kept_agents
self.update_handler._purge_agents()
self.update_handler._load_agents()
@@ -850,14 +849,19 @@ class TestUpdate(UpdateTestCase):
self.assertTrue(os.path.exists(agent_path + ".zip"))
return
- def _test_run_latest(self, return_value=0, side_effect=None, child_calls=1):
- mock_child = Mock()
- mock_child.wait = Mock(return_value=return_value, side_effect=side_effect)
+ def _test_run_latest(self, mock_child=None, mock_time=None):
+ if mock_child is None:
+ mock_child = ChildMock()
+ if mock_time is None:
+ mock_time = TimeMock()
+
with patch('subprocess.Popen', return_value=mock_child) as mock_popen:
- self.update_handler.run_latest()
- self.assertEqual(child_calls, mock_popen.call_count)
+ with patch('time.time', side_effect=mock_time.time):
+ with patch('time.sleep', return_value=mock_time.sleep):
+ self.update_handler.run_latest()
+ self.assertEqual(1, mock_popen.call_count)
- return mock_popen.call_args
+ return mock_popen.call_args
def test_run_latest(self):
self.prepare_agents()
@@ -873,6 +877,31 @@ class TestUpdate(UpdateTestCase):
self.assertEqual(agent.get_agent_dir(), kwargs['cwd'])
return
+ def test_run_latest_polls_and_waits_for_success(self):
+ mock_child = ChildMock(return_value=None)
+ mock_time = TimeMock(time_increment=CHILD_HEALTH_INTERVAL/3)
+ self._test_run_latest(mock_child=mock_child, mock_time=mock_time)
+ self.assertEqual(2, mock_child.poll.call_count)
+ self.assertEqual(1, mock_child.wait.call_count)
+ return
+
+ def test_run_latest_polling_stops_at_success(self):
+ mock_child = ChildMock(return_value=0)
+ mock_time = TimeMock(time_increment=CHILD_HEALTH_INTERVAL/3)
+ self._test_run_latest(mock_child=mock_child, mock_time=mock_time)
+ self.assertEqual(1, mock_child.poll.call_count)
+ self.assertEqual(0, mock_child.wait.call_count)
+ return
+
+ def test_run_latest_polling_stops_at_failure(self):
+ mock_child = ChildMock(return_value=42)
+ mock_time = TimeMock()
+ self._test_run_latest(mock_child=mock_child, mock_time=mock_time)
+ self.assertEqual(1, mock_child.poll.call_count)
+ self.assertEqual(0, mock_child.wait.call_count)
+ self.assertEqual(2, mock_time.time_call_count)
+ return
+
def test_run_latest_defaults_to_current(self):
self.assertEqual(None, self.update_handler.get_latest_agent())
@@ -894,7 +923,7 @@ class TestUpdate(UpdateTestCase):
saved_stdout, sys.stdout = sys.stdout, stdout
saved_stderr, sys.stderr = sys.stderr, stderr
try:
- self._test_run_latest(side_effect=faux_logger)
+ self._test_run_latest(mock_child=ChildMock(side_effect=faux_logger))
finally:
sys.stdout = saved_stdout
sys.stderr = saved_stderr
@@ -916,23 +945,7 @@ class TestUpdate(UpdateTestCase):
self.assertEqual(0.0, latest_agent.error.last_failure)
self.assertEqual(0, latest_agent.error.failure_count)
- self._test_run_latest(return_value=1)
-
- self.assertTrue(latest_agent.is_available)
- self.assertNotEqual(0.0, latest_agent.error.last_failure)
- self.assertEqual(1, latest_agent.error.failure_count)
- return
-
- def test_run_latest_missing_code_marks_failures(self):
- # logger.add_logger_appender(logger.AppenderType.STDOUT)
- self.prepare_agents()
-
- latest_agent = self.update_handler.get_latest_agent()
- self.assertTrue(latest_agent.is_available)
- self.assertEqual(0.0, latest_agent.error.last_failure)
- self.assertEqual(0, latest_agent.error.failure_count)
-
- self._test_run_latest(return_value=None)
+ self._test_run_latest(mock_child=ChildMock(return_value=1))
self.assertTrue(latest_agent.is_available)
self.assertNotEqual(0.0, latest_agent.error.last_failure)
@@ -948,7 +961,7 @@ class TestUpdate(UpdateTestCase):
self.assertEqual(0.0, latest_agent.error.last_failure)
self.assertEqual(0, latest_agent.error.failure_count)
- self._test_run_latest(side_effect=Exception("Force blacklisting"))
+ self._test_run_latest(mock_child=ChildMock(side_effect=Exception("Force blacklisting")))
self.assertFalse(latest_agent.is_available)
self.assertTrue(latest_agent.error.is_blacklisted)
@@ -1016,11 +1029,18 @@ class TestUpdate(UpdateTestCase):
self._test_run(invocations=0, calls=[], enable_updates=True)
return
- def test_set_agents(self):
+ def test_set_agents_sets_agents(self):
self.prepare_agents()
self.update_handler._set_agents([GuestAgent(path=path) for path in self.agent_dirs()])
+ self.assertTrue(len(self.update_handler.agents) > 0)
self.assertEqual(len(self.agent_dirs()), len(self.update_handler.agents))
+ return
+
+ def test_set_agents_sorts_agents(self):
+ self.prepare_agents()
+
+ self.update_handler._set_agents([GuestAgent(path=path) for path in self.agent_dirs()])
v = FlexibleVersion("100000")
for a in self.update_handler.agents:
@@ -1029,6 +1049,15 @@ class TestUpdate(UpdateTestCase):
return
+class ChildMock(Mock):
+ def __init__(self, return_value=0, side_effect=None):
+ Mock.__init__(self, return_value=return_value, side_effect=side_effect)
+
+ self.poll = Mock(return_value=return_value, side_effect=side_effect)
+ self.wait = Mock(return_value=return_value, side_effect=side_effect)
+ return
+
+
class ProtocolMock(object):
def __init__(self, family="TestAgent", etag=42, versions=None):
self.family = family
@@ -1085,5 +1114,22 @@ class ResponseMock(Mock):
return self.response
+class TimeMock(Mock):
+ def __init__(self, time_increment=1):
+ Mock.__init__(self)
+ self.next_time = time.time()
+ self.time_call_count = 0
+ self.time_increment = time_increment
+
+ self.sleep = Mock(return_value=0)
+ return
+
+ def time(self):
+ self.time_call_count += 1
+ current_time = self.next_time
+ self.next_time += self.time_increment
+ return current_time
+
+
if __name__ == '__main__':
unittest.main()
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_issue_reference",
"has_git_commit_hash",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 2
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"nose",
"pyasn1",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.6",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
certifi==2021.5.30
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
nose==1.3.7
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pyasn1==0.5.1
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
-e git+https://github.com/Azure/WALinuxAgent.git@9930df6600a061bcc5618b427ffa23cff5943f46#egg=WALinuxAgent
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: WALinuxAgent
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib-metadata=4.8.1=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- nose==1.3.7
- pyasn1==0.5.1
prefix: /opt/conda/envs/WALinuxAgent
| [
"tests/ga/test_update.py::TestUpdate::test_ensure_latest_agent_includes_old_agents",
"tests/ga/test_update.py::TestUpdate::test_run_latest_polling_stops_at_failure",
"tests/ga/test_update.py::TestUpdate::test_run_latest_polling_stops_at_success",
"tests/ga/test_update.py::TestUpdate::test_run_latest_polls_and_waits_for_success"
] | [] | [
"tests/ga/test_update.py::TestGuestAgentError::test_clear",
"tests/ga/test_update.py::TestGuestAgentError::test_creation",
"tests/ga/test_update.py::TestGuestAgentError::test_load_preserves_error_state",
"tests/ga/test_update.py::TestGuestAgentError::test_mark_failure",
"tests/ga/test_update.py::TestGuestAgentError::test_mark_failure_permanent",
"tests/ga/test_update.py::TestGuestAgentError::test_save",
"tests/ga/test_update.py::TestGuestAgentError::test_str",
"tests/ga/test_update.py::TestGuestAgent::test_clear_error",
"tests/ga/test_update.py::TestGuestAgent::test_creation",
"tests/ga/test_update.py::TestGuestAgent::test_download",
"tests/ga/test_update.py::TestGuestAgent::test_download_fail",
"tests/ga/test_update.py::TestGuestAgent::test_ensure_download_skips_blacklisted",
"tests/ga/test_update.py::TestGuestAgent::test_ensure_downloaded",
"tests/ga/test_update.py::TestGuestAgent::test_ensure_downloaded_download_fails",
"tests/ga/test_update.py::TestGuestAgent::test_ensure_downloaded_load_manifest_fails",
"tests/ga/test_update.py::TestGuestAgent::test_ensure_downloaded_unpack_fails",
"tests/ga/test_update.py::TestGuestAgent::test_is_available",
"tests/ga/test_update.py::TestGuestAgent::test_is_blacklisted",
"tests/ga/test_update.py::TestGuestAgent::test_is_downloaded",
"tests/ga/test_update.py::TestGuestAgent::test_load_error",
"tests/ga/test_update.py::TestGuestAgent::test_load_manifest",
"tests/ga/test_update.py::TestGuestAgent::test_load_manifest_is_empty",
"tests/ga/test_update.py::TestGuestAgent::test_load_manifest_is_malformed",
"tests/ga/test_update.py::TestGuestAgent::test_load_manifest_missing",
"tests/ga/test_update.py::TestGuestAgent::test_mark_failure",
"tests/ga/test_update.py::TestGuestAgent::test_unpack",
"tests/ga/test_update.py::TestGuestAgent::test_unpack_fail",
"tests/ga/test_update.py::TestUpdate::test_creation",
"tests/ga/test_update.py::TestUpdate::test_ensure_lastest_agent_purges_old_agents",
"tests/ga/test_update.py::TestUpdate::test_ensure_latest_agent_returns_true_on_first_use",
"tests/ga/test_update.py::TestUpdate::test_ensure_latest_agent_skips_if_too_frequent",
"tests/ga/test_update.py::TestUpdate::test_ensure_latest_agent_skips_if_when_no_new_versions",
"tests/ga/test_update.py::TestUpdate::test_ensure_latest_agent_skips_when_etag_matches",
"tests/ga/test_update.py::TestUpdate::test_ensure_latest_agent_skips_when_no_versions",
"tests/ga/test_update.py::TestUpdate::test_ensure_latest_agent_skips_when_updates_are_disabled",
"tests/ga/test_update.py::TestUpdate::test_ensure_latest_agent_sorts",
"tests/ga/test_update.py::TestUpdate::test_evaluate_agent_health_ignores_installed_agent",
"tests/ga/test_update.py::TestUpdate::test_evaluate_agent_health_raises_exception_for_restarting_agent",
"tests/ga/test_update.py::TestUpdate::test_evaluate_agent_health_resets_with_new_agent",
"tests/ga/test_update.py::TestUpdate::test_evaluate_agent_health_will_not_raise_exception_for_long_restarts",
"tests/ga/test_update.py::TestUpdate::test_evaluate_agent_health_will_not_raise_exception_too_few_restarts",
"tests/ga/test_update.py::TestUpdate::test_filter_blacklisted_agents",
"tests/ga/test_update.py::TestUpdate::test_get_latest_agent",
"tests/ga/test_update.py::TestUpdate::test_get_latest_agent_no_updates",
"tests/ga/test_update.py::TestUpdate::test_get_latest_agent_skip_updates",
"tests/ga/test_update.py::TestUpdate::test_get_latest_agent_skips_unavailable",
"tests/ga/test_update.py::TestUpdate::test_load_agents",
"tests/ga/test_update.py::TestUpdate::test_load_agents_does_not_reload",
"tests/ga/test_update.py::TestUpdate::test_load_agents_sorts",
"tests/ga/test_update.py::TestUpdate::test_purge_agents",
"tests/ga/test_update.py::TestUpdate::test_run",
"tests/ga/test_update.py::TestUpdate::test_run_keeps_running",
"tests/ga/test_update.py::TestUpdate::test_run_latest",
"tests/ga/test_update.py::TestUpdate::test_run_latest_captures_signals",
"tests/ga/test_update.py::TestUpdate::test_run_latest_creates_only_one_signal_handler",
"tests/ga/test_update.py::TestUpdate::test_run_latest_defaults_to_current",
"tests/ga/test_update.py::TestUpdate::test_run_latest_exception_blacklists",
"tests/ga/test_update.py::TestUpdate::test_run_latest_forwards_output",
"tests/ga/test_update.py::TestUpdate::test_run_latest_nonzero_code_marks_failures",
"tests/ga/test_update.py::TestUpdate::test_run_stops_if_update_available",
"tests/ga/test_update.py::TestUpdate::test_set_agents_sets_agents",
"tests/ga/test_update.py::TestUpdate::test_set_agents_sorts_agents"
] | [] | Apache License 2.0 | 629 | 1,115 | [
"azurelinuxagent/common/version.py",
"azurelinuxagent/ga/update.py"
] |
ovh__python-ovh-33 | ae2981b26fce2641a9bae5af68a3d5043fdd8b46 | 2016-07-15 09:31:15 | 6d4e840baecd36a568a0bf7049f99fe9fee97db0 | ncrocfer: Yep, it's ok for me :
```python
In [1]: import ovh
In [2]: ovh.Client().get('/hosting/web/ncrocfer.ovh/ovhConfig', historical=False)
Out[2]: [1779827]
```
Thanks :+1:
| diff --git a/ovh/client.py b/ovh/client.py
index fe989cf..82db162 100644
--- a/ovh/client.py
+++ b/ovh/client.py
@@ -289,14 +289,24 @@ class Client(object):
can be prefixed with an underscore. For example, ``from`` argument of
``POST /email/domain/{domain}/redirection`` may be replaced by ``_from``
+ This function also handles Python booleans which should be serialized
+ using solely lowercase to be recognized by the API.
+
:param dict kwargs: input kwargs
:return dict: filtered kawrgs
"""
arguments = {}
for k, v in kwargs.items():
+ # Handle Python keywork collision
if k[0] == '_' and k[1:] in keyword.kwlist:
k = k[1:]
+
+ # Handle Booleans
+ if isinstance(v, bool):
+ v = str(v).lower()
+
+ # Commit
arguments[k] = v
return arguments
| API raises BadParameterError for boolean arguments
Hello,
When an API call requires a boolean argument, we send it as `True` or `False` (valid Python booleans) but the API raises `BadParametersError` because it waits for a string (`'true'` or `'false'`) :
```python
In [1]: import ovh
In [2]: client = ovh.Client()
In [3]: client.get('/hosting/web/ncrocfer.ovh/ovhConfig', historical=False)
---------------------------------------------------------------------------
BadParametersError Traceback (most recent call last)
<ipython-input-3-b9140291291d> in <module>()
----> 1 client.get('/hosting/web/ncrocfer.ovh/ovhConfig', historical=False)
/home/ncrocfer/.virtualenvs/ovh/lib/python3.4/site-packages/ovh/client.py in get(self, _target, _need_auth, **kwargs)
317 _target = '%s?%s' % (_target, query_string)
318
--> 319 return self.call('GET', _target, None, _need_auth)
320
321 def put(self, _target, _need_auth=True, **kwargs):
/home/ncrocfer/.virtualenvs/ovh/lib/python3.4/site-packages/ovh/client.py in call(self, method, path, data, need_auth)
446 raise ResourceNotFoundError(json_result.get('message'))
447 elif status == 400:
--> 448 raise BadParametersError(json_result.get('message'))
449 elif status == 409:
450 raise ResourceConflictError(json_result.get('message'))
BadParametersError: [historical] Given data (False) is not valid for type boolean
In [4]: client.get('/hosting/web/ncrocfer.ovh/ovhConfig', historical='false')
Out[4]: [1779827]
```
Is it possible to translate automatically the boolean argument to be API compatible ?
Thanks. | ovh/python-ovh | diff --git a/tests/test_client.py b/tests/test_client.py
index ddea110..66b300a 100644
--- a/tests/test_client.py
+++ b/tests/test_client.py
@@ -185,6 +185,12 @@ class testClient(unittest.TestCase):
self.assertEqual(m_call.return_value, api.get(FAKE_URL+'?query=string', param="test"))
m_call.assert_called_once_with('GET', FAKE_URL+'?query=string¶m=test', None, True)
+ # boolean arguments
+ m_call.reset_mock()
+ api = Client(ENDPOINT, APPLICATION_KEY, APPLICATION_SECRET, CONSUMER_KEY)
+ self.assertEqual(m_call.return_value, api.get(FAKE_URL+'?query=string', checkbox=True))
+ m_call.assert_called_once_with('GET', FAKE_URL+'?query=string&checkbox=true', None, True)
+
# keyword calling convention
m_call.reset_mock()
api = Client(ENDPOINT, APPLICATION_KEY, APPLICATION_SECRET, CONSUMER_KEY)
| {
"commit_name": "merge_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 1
} | 0.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": [
"requirements-dev.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2025.1.31
charset-normalizer==3.4.1
coverage==3.7.1
coveralls==0.4.2
docopt==0.6.2
docutils==0.21.2
exceptiongroup==1.2.2
idna==3.10
iniconfig==2.1.0
Jinja2==3.1.6
MarkupSafe==3.0.2
mock==1.0.1
nose==1.3.3
ordereddict==1.0
-e git+https://github.com/ovh/python-ovh.git@ae2981b26fce2641a9bae5af68a3d5043fdd8b46#egg=ovh
packaging==24.2
pluggy==1.5.0
Pygments==2.19.1
pytest==8.3.5
PyYAML==6.0.2
requests==2.32.3
Sphinx==1.2.2
tomli==2.2.1
urllib3==2.3.0
yanc==0.2.4
| name: python-ovh
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- charset-normalizer==3.4.1
- coverage==3.7.1
- coveralls==0.4.2
- docopt==0.6.2
- docutils==0.21.2
- exceptiongroup==1.2.2
- idna==3.10
- iniconfig==2.1.0
- jinja2==3.1.6
- markupsafe==3.0.2
- mock==1.0.1
- nose==1.3.3
- ordereddict==1.0
- packaging==24.2
- pluggy==1.5.0
- pygments==2.19.1
- pytest==8.3.5
- pyyaml==6.0.2
- requests==2.32.3
- sphinx==1.2.2
- tomli==2.2.1
- urllib3==2.3.0
- yanc==0.2.4
prefix: /opt/conda/envs/python-ovh
| [
"tests/test_client.py::testClient::test_get"
] | [
"tests/test_client.py::testClient::test_endpoints",
"tests/test_client.py::testClient::test_init_from_custom_config"
] | [
"tests/test_client.py::testClient::test__canonicalize_kwargs",
"tests/test_client.py::testClient::test_call_no_sign",
"tests/test_client.py::testClient::test_call_signature",
"tests/test_client.py::testClient::test_delete",
"tests/test_client.py::testClient::test_init",
"tests/test_client.py::testClient::test_init_from_config",
"tests/test_client.py::testClient::test_new_consumer_key_request",
"tests/test_client.py::testClient::test_post",
"tests/test_client.py::testClient::test_put",
"tests/test_client.py::testClient::test_request_consumerkey",
"tests/test_client.py::testClient::test_time_delta"
] | [] | BSD License | 633 | 251 | [
"ovh/client.py"
] |
cwacek__python-jsonschema-objects-47 | ad1ce37fe82cdfa75a8193566c7c7cdcae07b09c | 2016-07-16 18:15:46 | 03be1567ef25edc27fe36675444bd14da93b0f15 | diff --git a/python_jsonschema_objects/classbuilder.py b/python_jsonschema_objects/classbuilder.py
index c01fdcc..3a5ca41 100644
--- a/python_jsonschema_objects/classbuilder.py
+++ b/python_jsonschema_objects/classbuilder.py
@@ -145,7 +145,7 @@ class ProtocolBase(collections.MutableMapping):
else: # We got nothing
raise validators.ValidationError(
"Unable to instantiate any valid types: \n"
- "\n".join("{0}: {1}".format(k, e) for k, e in validation_errors)
+ "".join("{0}: {1}\n".format(k, e) for k, e in validation_errors)
)
return obj
@@ -314,7 +314,7 @@ class TypeProxy(object):
else: # We got nothing
raise validators.ValidationError(
"Unable to instantiate any valid types: \n"
- "\n".join("{0}: {1}".format(k, e) for k, e in validation_errors)
+ "".join("{0}: {1}\n".format(k, e) for k, e in validation_errors)
)
@@ -474,6 +474,18 @@ class ClassBuilder(object):
**clsdata_copy)
return self.resolved[uri]
+ elif isinstance(clsdata.get('type'), list):
+ types = []
+ for i, item_detail in enumerate(clsdata['type']):
+ subdata = {k: v for k, v in six.iteritems(clsdata) if k != 'type'}
+ subdata['type'] = item_detail
+ types.append(self._build_literal(
+ uri + "_%s" % i,
+ subdata))
+
+ self.resolved[uri] = TypeProxy(types)
+ return self.resolved[uri]
+
elif (clsdata.get('type', None) == 'object' or
clsdata.get('properties', None) is not None or
clsdata.get('additionalProperties', False)):
@@ -770,6 +782,10 @@ def make_property(prop, info, desc=""):
val = info['type'](**util.coerce_for_expansion(val))
val.validate()
+
+ elif isinstance(info['type'], TypeProxy):
+ val = info['type'](val)
+
elif info['type'] is None:
# This is the null value
if val is not None:
| Properties with multiple types are not parsed correctly
http://json-schema.org/latest/json-schema-validation.html#anchor79
Example property
"claimed_by": {
"id": "claimed",
"type": ["string", "null"],
"description": "Robots Only. The human agent that has claimed this robot.",
"required": false
},
Traceback (most recent call last):
File "/home/idanforth/fetch/src/sandbox/fetchcore/test/unit/test_scheduler.py", line 58, in setUp
agent_ns = agent_builder.build_classes()
File "/usr/local/lib/python2.7/dist-packages/python_jsonschema_objects/__init__.py", line 83, in build_classes
builder.construct(nm, self.schema)
File "/usr/local/lib/python2.7/dist-packages/python_jsonschema_objects/classbuilder.py", line 288, in construct
ret = self._construct(uri, *args, **kw)
File "/usr/local/lib/python2.7/dist-packages/python_jsonschema_objects/classbuilder.py", line 362, in _construct
parent)
File "/usr/local/lib/python2.7/dist-packages/python_jsonschema_objects/classbuilder.py", line 498, in _build_object
typ = self.construct(uri, detail)
File "/usr/local/lib/python2.7/dist-packages/python_jsonschema_objects/classbuilder.py", line 288, in construct
ret = self._construct(uri, *args, **kw)
File "/usr/local/lib/python2.7/dist-packages/python_jsonschema_objects/classbuilder.py", line 380, in _construct
"no type and no reference".format(clsdata))
NotImplementedError: Unable to parse schema object '{'raw_name': u'claimed_by', u'required': False, u'type': [u'string'
, u'null'], u'id': u'claimed', u'description': u'Robots Only. The human agent that has claimed this robot.'}' with no t
ype and no reference
| cwacek/python-jsonschema-objects | diff --git a/test/test_regression_17.py b/test/test_regression_17.py
new file mode 100644
index 0000000..c13e942
--- /dev/null
+++ b/test/test_regression_17.py
@@ -0,0 +1,33 @@
+import pytest
+
+import python_jsonschema_objects as pjo
+
+
[email protected]
+def test_class():
+ schema = {
+ 'title': 'Example',
+ 'properties': {
+ "claimed_by": {
+ "id": "claimed",
+ "type": ["string", "integer", "null"],
+ "description": "Robots Only. The human agent that has claimed this robot.",
+ "required": False
+ },
+ }
+ }
+
+ builder = pjo.ObjectBuilder(schema)
+ ns = builder.build_classes()
+ return ns
+
+
[email protected]('value', [
+ "Hi", 4, None])
+def test_properties_can_have_multiple_types(test_class, value):
+ test_class.Example(claimed_by=value)
+
[email protected]('value', [2.4])
+def test_multiply_typed_properties_still_validate(test_class, value):
+ with pytest.raises(pjo.ValidationError):
+ test_class.Example(claimed_by=value)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 1
} | 0.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest pytest-cov pytest-xdist pytest-mock pytest-asyncio"
],
"pre_install": null,
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
coverage==6.2
execnet==1.9.0
importlib-metadata==4.8.3
inflection==0.2.0
iniconfig==1.1.1
jsonschema==2.3.0
Markdown==2.4
packaging==21.3
pandocfilters==1.2
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
pytest-asyncio==0.16.0
pytest-cov==4.0.0
pytest-mock==3.6.1
pytest-xdist==3.0.2
-e git+https://github.com/cwacek/python-jsonschema-objects.git@ad1ce37fe82cdfa75a8193566c7c7cdcae07b09c#egg=python_jsonschema_objects
six==1.17.0
tomli==1.2.3
typing_extensions==4.1.1
zipp==3.6.0
| name: python-jsonschema-objects
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- coverage==6.2
- execnet==1.9.0
- importlib-metadata==4.8.3
- inflection==0.2.0
- iniconfig==1.1.1
- jsonschema==2.3.0
- markdown==2.4
- packaging==21.3
- pandocfilters==1.2
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-asyncio==0.16.0
- pytest-cov==4.0.0
- pytest-mock==3.6.1
- pytest-xdist==3.0.2
- six==1.17.0
- tomli==1.2.3
- typing-extensions==4.1.1
- zipp==3.6.0
prefix: /opt/conda/envs/python-jsonschema-objects
| [
"test/test_regression_17.py::test_properties_can_have_multiple_types[Hi]",
"test/test_regression_17.py::test_properties_can_have_multiple_types[4]",
"test/test_regression_17.py::test_properties_can_have_multiple_types[None]",
"test/test_regression_17.py::test_multiply_typed_properties_still_validate[2.4]"
] | [] | [] | [] | MIT License | 638 | 549 | [
"python_jsonschema_objects/classbuilder.py"
] |
|
dpkp__kafka-python-762 | 3666b66a21776d620f68d2f7ff2fed1bc18b94e5 | 2016-07-16 20:07:03 | 709ee3b59aff8ab205f0e09c33f4ec8391664228 | diff --git a/kafka/client.py b/kafka/client.py
index 891ae03..8a34cc4 100644
--- a/kafka/client.py
+++ b/kafka/client.py
@@ -137,7 +137,7 @@ class SimpleClient(object):
kafka.errors.check_error(resp)
# Otherwise return the BrokerMetadata
- return BrokerMetadata(resp.nodeId, resp.host, resp.port)
+ return BrokerMetadata(resp.nodeId, resp.host, resp.port, None)
def _next_id(self):
"""Generate a new correlation id"""
@@ -525,7 +525,7 @@ class SimpleClient(object):
log.debug('Updating broker metadata: %s', resp.brokers)
log.debug('Updating topic metadata: %s', [topic for _, topic, _ in resp.topics])
- self.brokers = dict([(nodeId, BrokerMetadata(nodeId, host, port))
+ self.brokers = dict([(nodeId, BrokerMetadata(nodeId, host, port, None))
for nodeId, host, port in resp.brokers])
for error, topic, partitions in resp.topics:
@@ -577,7 +577,7 @@ class SimpleClient(object):
# (not sure how this could happen. server could be in bad state)
else:
self.topics_to_brokers[topic_part] = BrokerMetadata(
- leader, None, None
+ leader, None, None, None
)
def send_metadata_request(self, payloads=[], fail_on_error=True,
diff --git a/kafka/client_async.py b/kafka/client_async.py
index 6fa9434..e064d51 100644
--- a/kafka/client_async.py
+++ b/kafka/client_async.py
@@ -178,7 +178,11 @@ class KafkaClient(object):
time.sleep(next_at - now)
self._last_bootstrap = time.time()
- metadata_request = MetadataRequest[0]([])
+ if self.config['api_version'] is None or self.config['api_version'] < (0, 10):
+ metadata_request = MetadataRequest[0]([])
+ else:
+ metadata_request = MetadataRequest[1](None)
+
for host, port, afi in hosts:
log.debug("Attempting to bootstrap via node at %s:%s", host, port)
cb = functools.partial(self._conn_state_change, 'bootstrap')
@@ -643,10 +647,17 @@ class KafkaClient(object):
topics = list(self._topics)
if self.cluster.need_all_topic_metadata:
- topics = []
+ if self.config['api_version'] < (0, 10):
+ topics = []
+ else:
+ topics = None
if self._can_send_request(node_id):
- request = MetadataRequest[0](topics)
+ if self.config['api_version'] < (0, 10):
+ api_version = 0
+ else:
+ api_version = 1
+ request = MetadataRequest[api_version](topics)
log.debug("Sending metadata request %s to node %s", request, node_id)
future = self.send(node_id, request)
future.add_callback(self.cluster.update_metadata)
diff --git a/kafka/cluster.py b/kafka/cluster.py
index 9aabec1..694e115 100644
--- a/kafka/cluster.py
+++ b/kafka/cluster.py
@@ -34,6 +34,8 @@ class ClusterMetadata(object):
self._lock = threading.Lock()
self.need_all_topic_metadata = False
self.unauthorized_topics = set()
+ self.internal_topics = set()
+ self.controller = None
self.config = copy.copy(self.DEFAULT_CONFIG)
for key in self.config:
@@ -150,13 +152,23 @@ class ClusterMetadata(object):
self._future = Future()
return self._future
- def topics(self):
+ def topics(self, exclude_internal_topics=True):
"""Get set of known topics.
+ Arguments:
+ exclude_internal_topics (bool): Whether records from internal topics
+ (such as offsets) should be exposed to the consumer. If set to
+ True the only way to receive records from an internal topic is
+ subscribing to it. Default True
+
Returns:
set: {topic (str), ...}
"""
- return set(self._partitions.keys())
+ topics = set(self._partitions.keys())
+ if exclude_internal_topics:
+ return topics - self.internal_topics
+ else:
+ return topics
def failed_update(self, exception):
"""Update cluster state given a failed MetadataRequest."""
@@ -180,23 +192,41 @@ class ClusterMetadata(object):
# In the common case where we ask for a single topic and get back an
# error, we should fail the future
if len(metadata.topics) == 1 and metadata.topics[0][0] != 0:
- error_code, topic, _ = metadata.topics[0]
+ error_code, topic = metadata.topics[0][:2]
error = Errors.for_code(error_code)(topic)
return self.failed_update(error)
if not metadata.brokers:
log.warning("No broker metadata found in MetadataResponse")
- for node_id, host, port in metadata.brokers:
+ for broker in metadata.brokers:
+ if metadata.API_VERSION == 0:
+ node_id, host, port = broker
+ rack = None
+ else:
+ node_id, host, port, rack = broker
self._brokers.update({
- node_id: BrokerMetadata(node_id, host, port)
+ node_id: BrokerMetadata(node_id, host, port, rack)
})
+ if metadata.API_VERSION == 0:
+ self.controller = None
+ else:
+ self.controller = self._brokers.get(metadata.controller_id)
+
_new_partitions = {}
_new_broker_partitions = collections.defaultdict(set)
_new_unauthorized_topics = set()
+ _new_internal_topics = set()
- for error_code, topic, partitions in metadata.topics:
+ for topic_data in metadata.topics:
+ if metadata.API_VERSION == 0:
+ error_code, topic, partitions = topic_data
+ is_internal = False
+ else:
+ error_code, topic, is_internal, partitions = topic_data
+ if is_internal:
+ _new_internal_topics.add(topic)
error_type = Errors.for_code(error_code)
if error_type is Errors.NoError:
_new_partitions[topic] = {}
@@ -226,6 +256,7 @@ class ClusterMetadata(object):
self._partitions = _new_partitions
self._broker_partitions = _new_broker_partitions
self.unauthorized_topics = _new_unauthorized_topics
+ self.internal_topics = _new_internal_topics
f = None
if self._future:
f = self._future
@@ -272,7 +303,8 @@ class ClusterMetadata(object):
coordinator = BrokerMetadata(
response.coordinator_id,
response.host,
- response.port)
+ response.port,
+ None)
# Assume that group coordinators are just brokers
# (this is true now, but could diverge in future)
@@ -281,12 +313,14 @@ class ClusterMetadata(object):
# If this happens, either brokers have moved without
# changing IDs, or our assumption above is wrong
- elif coordinator != self._brokers[node_id]:
- log.error("GroupCoordinator metadata conflicts with existing"
- " broker metadata. Coordinator: %s, Broker: %s",
- coordinator, self._brokers[node_id])
- self._groups[group] = node_id
- return False
+ else:
+ node = self._brokers[node_id]
+ if coordinator.host != node.host or coordinator.port != node.port:
+ log.error("GroupCoordinator metadata conflicts with existing"
+ " broker metadata. Coordinator: %s, Broker: %s",
+ coordinator, node)
+ self._groups[group] = node_id
+ return False
log.info("Group coordinator for %s is %s", group, coordinator)
self._groups[group] = node_id
diff --git a/kafka/conn.py b/kafka/conn.py
index 38829c6..6028867 100644
--- a/kafka/conn.py
+++ b/kafka/conn.py
@@ -547,6 +547,7 @@ class BrokerConnection(object):
Returns: version tuple, i.e. (0, 10), (0, 9), (0, 8, 2), ...
"""
+
# Monkeypatch the connection request timeout
# Generally this timeout should not get triggered
# but in case it does, we want it to be reasonably short
@@ -574,11 +575,11 @@ class BrokerConnection(object):
log.addFilter(log_filter)
test_cases = [
- ((0, 10), ApiVersionRequest[0]()),
- ((0, 9), ListGroupsRequest[0]()),
- ((0, 8, 2), GroupCoordinatorRequest[0]('kafka-python-default-group')),
- ((0, 8, 1), OffsetFetchRequest[0]('kafka-python-default-group', [])),
- ((0, 8, 0), MetadataRequest[0]([])),
+ ('0.10', ApiVersionRequest[0]()),
+ ('0.9', ListGroupsRequest[0]()),
+ ('0.8.2', GroupCoordinatorRequest[0]('kafka-python-default-group')),
+ ('0.8.1', OffsetFetchRequest[0]('kafka-python-default-group', [])),
+ ('0.8.0', MetadataRequest[0]([])),
]
def connect():
@@ -614,9 +615,9 @@ class BrokerConnection(object):
self._sock.setblocking(False)
if f.succeeded():
- log.info('Broker version identifed as %s', '.'.join(map(str, version)))
- log.info('Set configuration api_version=%s to skip auto'
- ' check_version requests on startup', version)
+ log.info('Broker version identifed as %s', version)
+ log.info("Set configuration api_version='%s' to skip auto"
+ " check_version requests on startup", version)
break
# Only enable strict checking to verify that we understand failure
@@ -633,7 +634,7 @@ class BrokerConnection(object):
# requests (bug...). In this case we expect to see a correlation
# id mismatch
elif (isinstance(f.exception, Errors.CorrelationIdError) and
- version == (0, 10)):
+ version == '0.10'):
pass
elif six.PY2:
assert isinstance(f.exception.args[0], socket.error)
@@ -647,7 +648,7 @@ class BrokerConnection(object):
log.removeFilter(log_filter)
self.config['request_timeout_ms'] = stashed_request_timeout_ms
- return version
+ return tuple(map(int, version.split('.')))
def __repr__(self):
return "<BrokerConnection host=%s/%s port=%d>" % (self.hostname, self.host,
diff --git a/kafka/protocol/metadata.py b/kafka/protocol/metadata.py
index 8063dda..2711abb 100644
--- a/kafka/protocol/metadata.py
+++ b/kafka/protocol/metadata.py
@@ -1,5 +1,5 @@
from .struct import Struct
-from .types import Array, Int16, Int32, Schema, String
+from .types import Array, Boolean, Int16, Int32, Schema, String
class MetadataResponse_v0(Struct):
@@ -22,14 +22,46 @@ class MetadataResponse_v0(Struct):
)
+class MetadataResponse_v1(Struct):
+ API_KEY = 3
+ API_VERSION = 1
+ SCHEMA = Schema(
+ ('brokers', Array(
+ ('node_id', Int32),
+ ('host', String('utf-8')),
+ ('port', Int32),
+ ('rack', String('utf-8')))),
+ ('controller_id', Int32),
+ ('topics', Array(
+ ('error_code', Int16),
+ ('topic', String('utf-8')),
+ ('is_internal', Boolean),
+ ('partitions', Array(
+ ('error_code', Int16),
+ ('partition', Int32),
+ ('leader', Int32),
+ ('replicas', Array(Int32)),
+ ('isr', Array(Int32))))))
+ )
+
+
class MetadataRequest_v0(Struct):
API_KEY = 3
API_VERSION = 0
RESPONSE_TYPE = MetadataResponse_v0
SCHEMA = Schema(
- ('topics', Array(String('utf-8')))
+ ('topics', Array(String('utf-8'))) # Empty Array (len 0) for all topics
+ )
+
+
+class MetadataRequest_v1(Struct):
+ API_KEY = 3
+ API_VERSION = 1
+ RESPONSE_TYPE = MetadataResponse_v1
+ SCHEMA = Schema(
+ ('topics', Array(String('utf-8'))) # Null Array (len -1) for all topics
)
-MetadataRequest = [MetadataRequest_v0]
-MetadataResponse = [MetadataResponse_v0]
+MetadataRequest = [MetadataRequest_v0, MetadataRequest_v1]
+MetadataResponse = [MetadataResponse_v0, MetadataResponse_v1]
diff --git a/kafka/protocol/types.py b/kafka/protocol/types.py
index 18aaca1..da10326 100644
--- a/kafka/protocol/types.py
+++ b/kafka/protocol/types.py
@@ -99,6 +99,16 @@ class Bytes(AbstractType):
return value
+class Boolean(AbstractType):
+ @classmethod
+ def encode(cls, value):
+ return _pack('>?', value)
+
+ @classmethod
+ def decode(cls, data):
+ return _unpack('>?', data.read(1))
+
+
class Schema(AbstractType):
def __init__(self, *fields):
if fields:
@@ -145,6 +155,8 @@ class Array(AbstractType):
raise ValueError('Array instantiated with no array_of type')
def encode(self, items):
+ if items is None:
+ return Int32.encode(-1)
return b''.join(
[Int32.encode(len(items))] +
[self.array_of.encode(item) for item in items]
@@ -152,7 +164,11 @@ class Array(AbstractType):
def decode(self, data):
length = Int32.decode(data)
+ if length == -1:
+ return None
return [self.array_of.decode(data) for _ in range(length)]
def repr(self, list_of_items):
+ if list_of_items is None:
+ return 'NULL'
return '[' + ', '.join([self.array_of.repr(item) for item in list_of_items]) + ']'
diff --git a/kafka/structs.py b/kafka/structs.py
index 5902930..3188516 100644
--- a/kafka/structs.py
+++ b/kafka/structs.py
@@ -58,7 +58,7 @@ TopicPartition = namedtuple("TopicPartition",
["topic", "partition"])
BrokerMetadata = namedtuple("BrokerMetadata",
- ["nodeId", "host", "port"])
+ ["nodeId", "host", "port", "rack"])
PartitionMetadata = namedtuple("PartitionMetadata",
["topic", "partition", "leader", "replicas", "isr", "error"])
| KAFKA-3306: MetadataRequest v1
Related to KIP-4 | dpkp/kafka-python | diff --git a/test/test_client.py b/test/test_client.py
index 660af61..79ac8be 100644
--- a/test/test_client.py
+++ b/test/test_client.py
@@ -1,6 +1,7 @@
import socket
from mock import ANY, MagicMock, patch
+from operator import itemgetter
import six
from . import unittest
@@ -117,9 +118,10 @@ class TestSimpleClient(unittest.TestCase):
mock_conn(conn)
brokers = [
- BrokerMetadata(0, 'broker_1', 4567),
- BrokerMetadata(1, 'broker_2', 5678)
+ BrokerMetadata(0, 'broker_1', 4567, None),
+ BrokerMetadata(1, 'broker_2', 5678, None)
]
+ resp0_brokers = list(map(itemgetter(0, 1, 2), brokers))
topics = [
(NO_ERROR, 'topic_1', [
@@ -137,7 +139,7 @@ class TestSimpleClient(unittest.TestCase):
(NO_ERROR, 2, 0, [0, 1], [0, 1])
])
]
- protocol.decode_metadata_response.return_value = MetadataResponse[0](brokers, topics)
+ protocol.decode_metadata_response.return_value = MetadataResponse[0](resp0_brokers, topics)
# client loads metadata at init
client = SimpleClient(hosts=['broker_1:4567'])
@@ -167,9 +169,10 @@ class TestSimpleClient(unittest.TestCase):
mock_conn(conn)
brokers = [
- BrokerMetadata(0, 'broker_1', 4567),
- BrokerMetadata(1, 'broker_2', 5678)
+ BrokerMetadata(0, 'broker_1', 4567, None),
+ BrokerMetadata(1, 'broker_2', 5678, None)
]
+ resp0_brokers = list(map(itemgetter(0, 1, 2), brokers))
topics = [
(NO_LEADER, 'topic_still_creating', []),
@@ -179,7 +182,7 @@ class TestSimpleClient(unittest.TestCase):
(NO_LEADER, 1, -1, [], []),
]),
]
- protocol.decode_metadata_response.return_value = MetadataResponse[0](brokers, topics)
+ protocol.decode_metadata_response.return_value = MetadataResponse[0](resp0_brokers, topics)
client = SimpleClient(hosts=['broker_1:4567'])
@@ -197,9 +200,10 @@ class TestSimpleClient(unittest.TestCase):
mock_conn(conn)
brokers = [
- BrokerMetadata(0, 'broker_1', 4567),
- BrokerMetadata(1, 'broker_2', 5678)
+ BrokerMetadata(0, 'broker_1', 4567, None),
+ BrokerMetadata(1, 'broker_2', 5678, None)
]
+ resp0_brokers = list(map(itemgetter(0, 1, 2), brokers))
topics = [
(NO_LEADER, 'topic_still_creating', []),
@@ -209,7 +213,7 @@ class TestSimpleClient(unittest.TestCase):
(NO_LEADER, 1, -1, [], []),
]),
]
- decode_metadata_response.return_value = MetadataResponse[0](brokers, topics)
+ decode_metadata_response.return_value = MetadataResponse[0](resp0_brokers, topics)
client = SimpleClient(hosts=['broker_1:4567'])
@@ -230,14 +234,15 @@ class TestSimpleClient(unittest.TestCase):
mock_conn(conn)
brokers = [
- BrokerMetadata(0, 'broker_1', 4567),
- BrokerMetadata(1, 'broker_2', 5678)
+ BrokerMetadata(0, 'broker_1', 4567, None),
+ BrokerMetadata(1, 'broker_2', 5678, None)
]
+ resp0_brokers = list(map(itemgetter(0, 1, 2), brokers))
topics = [
(NO_LEADER, 'topic_no_partitions', [])
]
- protocol.decode_metadata_response.return_value = MetadataResponse[0](brokers, topics)
+ protocol.decode_metadata_response.return_value = MetadataResponse[0](resp0_brokers, topics)
client = SimpleClient(hosts=['broker_1:4567'])
@@ -249,7 +254,7 @@ class TestSimpleClient(unittest.TestCase):
(NO_ERROR, 0, 0, [0, 1], [0, 1])
])
]
- protocol.decode_metadata_response.return_value = MetadataResponse[0](brokers, topics)
+ protocol.decode_metadata_response.return_value = MetadataResponse[0](resp0_brokers, topics)
# calling _get_leader_for_partition (from any broker aware request)
# will try loading metadata again for the same topic
@@ -267,15 +272,16 @@ class TestSimpleClient(unittest.TestCase):
mock_conn(conn)
brokers = [
- BrokerMetadata(0, 'broker_1', 4567),
- BrokerMetadata(1, 'broker_2', 5678)
+ BrokerMetadata(0, 'broker_1', 4567, None),
+ BrokerMetadata(1, 'broker_2', 5678, None)
]
+ resp0_brokers = list(map(itemgetter(0, 1, 2), brokers))
topics = [
(NO_LEADER, 'topic_no_partitions', []),
(UNKNOWN_TOPIC_OR_PARTITION, 'topic_unknown', []),
]
- protocol.decode_metadata_response.return_value = MetadataResponse[0](brokers, topics)
+ protocol.decode_metadata_response.return_value = MetadataResponse[0](resp0_brokers, topics)
client = SimpleClient(hosts=['broker_1:4567'])
@@ -294,9 +300,10 @@ class TestSimpleClient(unittest.TestCase):
mock_conn(conn)
brokers = [
- BrokerMetadata(0, 'broker_1', 4567),
- BrokerMetadata(1, 'broker_2', 5678)
+ BrokerMetadata(0, 'broker_1', 4567, None),
+ BrokerMetadata(1, 'broker_2', 5678, None)
]
+ resp0_brokers = list(map(itemgetter(0, 1, 2), brokers))
topics = [
(NO_ERROR, 'topic_noleader', [
@@ -304,7 +311,7 @@ class TestSimpleClient(unittest.TestCase):
(NO_LEADER, 1, -1, [], []),
]),
]
- protocol.decode_metadata_response.return_value = MetadataResponse[0](brokers, topics)
+ protocol.decode_metadata_response.return_value = MetadataResponse[0](resp0_brokers, topics)
client = SimpleClient(hosts=['broker_1:4567'])
self.assertDictEqual(
@@ -330,7 +337,7 @@ class TestSimpleClient(unittest.TestCase):
(NO_ERROR, 1, 1, [1, 0], [1, 0])
]),
]
- protocol.decode_metadata_response.return_value = MetadataResponse[0](brokers, topics)
+ protocol.decode_metadata_response.return_value = MetadataResponse[0](resp0_brokers, topics)
self.assertEqual(brokers[0], client._get_leader_for_partition('topic_noleader', 0))
self.assertEqual(brokers[1], client._get_leader_for_partition('topic_noleader', 1))
@@ -340,9 +347,10 @@ class TestSimpleClient(unittest.TestCase):
mock_conn(conn)
brokers = [
- BrokerMetadata(0, 'broker_1', 4567),
- BrokerMetadata(1, 'broker_2', 5678)
+ BrokerMetadata(0, 'broker_1', 4567, None),
+ BrokerMetadata(1, 'broker_2', 5678, None)
]
+ resp0_brokers = list(map(itemgetter(0, 1, 2), brokers))
topics = [
(NO_ERROR, 'topic_noleader', [
@@ -350,7 +358,7 @@ class TestSimpleClient(unittest.TestCase):
(NO_LEADER, 1, -1, [], []),
]),
]
- protocol.decode_metadata_response.return_value = MetadataResponse[0](brokers, topics)
+ protocol.decode_metadata_response.return_value = MetadataResponse[0](resp0_brokers, topics)
client = SimpleClient(hosts=['broker_1:4567'])
@@ -368,14 +376,15 @@ class TestSimpleClient(unittest.TestCase):
mock_conn(conn)
brokers = [
- BrokerMetadata(0, 'broker_1', 4567),
- BrokerMetadata(1, 'broker_2', 5678)
+ BrokerMetadata(0, 'broker_1', 4567, None),
+ BrokerMetadata(1, 'broker_2', 5678, None)
]
+ resp0_brokers = list(map(itemgetter(0, 1, 2), brokers))
topics = [
(UNKNOWN_TOPIC_OR_PARTITION, 'topic_doesnt_exist', []),
]
- protocol.decode_metadata_response.return_value = MetadataResponse[0](brokers, topics)
+ protocol.decode_metadata_response.return_value = MetadataResponse[0](resp0_brokers, topics)
client = SimpleClient(hosts=['broker_1:4567'])
diff --git a/test/test_client_async.py b/test/test_client_async.py
index dfe11ea..aa91704 100644
--- a/test/test_client_async.py
+++ b/test/test_client_async.py
@@ -53,8 +53,8 @@ def test_bootstrap_success(conn):
conn.connect.assert_called_with()
conn.send.assert_called_once_with(MetadataRequest[0]([]))
assert cli._bootstrap_fails == 0
- assert cli.cluster.brokers() == set([BrokerMetadata(0, 'foo', 12),
- BrokerMetadata(1, 'bar', 34)])
+ assert cli.cluster.brokers() == set([BrokerMetadata(0, 'foo', 12, None),
+ BrokerMetadata(1, 'bar', 34, None)])
def test_bootstrap_failure(conn):
conn.state = ConnectionStates.DISCONNECTED
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 0
},
"num_modified_files": 7
} | 1.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-catchlog",
"pytest-sugar",
"pytest-mock",
"mock",
"python-snappy",
"lz4tools",
"xxhash"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc libsnappy-dev"
],
"python": "3.5",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
coverage==6.2
cramjam==2.5.0
importlib-metadata==4.8.3
iniconfig==1.1.1
-e git+https://github.com/dpkp/kafka-python.git@3666b66a21776d620f68d2f7ff2fed1bc18b94e5#egg=kafka_python
lz4tools==1.3.1.2
mock==5.2.0
packaging==21.3
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
pytest-catchlog==1.2.2
pytest-cov==4.0.0
pytest-mock==3.6.1
pytest-sugar==0.9.6
python-snappy==0.7.3
six==1.17.0
termcolor==1.1.0
tomli==1.2.3
typing_extensions==4.1.1
xxhash==3.2.0
zipp==3.6.0
| name: kafka-python
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- coverage==6.2
- cramjam==2.5.0
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- lz4tools==1.3.1.2
- mock==5.2.0
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-catchlog==1.2.2
- pytest-cov==4.0.0
- pytest-mock==3.6.1
- pytest-sugar==0.9.6
- python-snappy==0.7.3
- six==1.17.0
- termcolor==1.1.0
- tomli==1.2.3
- typing-extensions==4.1.1
- xxhash==3.2.0
- zipp==3.6.0
prefix: /opt/conda/envs/kafka-python
| [
"test/test_client.py::TestSimpleClient::test_ensure_topic_exists",
"test/test_client.py::TestSimpleClient::test_get_leader_exceptions_when_noleader",
"test/test_client.py::TestSimpleClient::test_get_leader_for_partitions_reloads_metadata",
"test/test_client.py::TestSimpleClient::test_get_leader_for_unassigned_partitions",
"test/test_client.py::TestSimpleClient::test_has_metadata_for_topic",
"test/test_client.py::TestSimpleClient::test_load_metadata",
"test/test_client.py::TestSimpleClient::test_send_produce_request_raises_when_noleader",
"test/test_client.py::TestSimpleClient::test_send_produce_request_raises_when_topic_unknown",
"test/test_client_async.py::test_bootstrap_success"
] | [] | [
"test/test_client.py::TestSimpleClient::test_correlation_rollover",
"test/test_client.py::TestSimpleClient::test_init_with_csv",
"test/test_client.py::TestSimpleClient::test_init_with_list",
"test/test_client.py::TestSimpleClient::test_init_with_unicode_csv",
"test/test_client.py::TestSimpleClient::test_send_broker_unaware_request",
"test/test_client.py::TestSimpleClient::test_send_broker_unaware_request_fail",
"test/test_client_async.py::test_bootstrap_servers[None-expected_hosts0]",
"test/test_client_async.py::test_bootstrap_servers[foobar:1234-expected_hosts1]",
"test/test_client_async.py::test_bootstrap_servers[fizzbuzz-expected_hosts2]",
"test/test_client_async.py::test_bootstrap_servers[foo:12,bar:34-expected_hosts3]",
"test/test_client_async.py::test_bootstrap_servers[bootstrap4-expected_hosts4]",
"test/test_client_async.py::test_bootstrap_failure",
"test/test_client_async.py::test_can_connect",
"test/test_client_async.py::test_maybe_connect",
"test/test_client_async.py::test_conn_state_change",
"test/test_client_async.py::test_ready",
"test/test_client_async.py::test_is_ready",
"test/test_client_async.py::test_close",
"test/test_client_async.py::test_is_disconnected",
"test/test_client_async.py::test_send",
"test/test_client_async.py::test_poll",
"test/test_client_async.py::test__poll",
"test/test_client_async.py::test_in_flight_request_count",
"test/test_client_async.py::test_least_loaded_node",
"test/test_client_async.py::test_set_topics",
"test/test_client_async.py::test_maybe_refresh_metadata_ttl",
"test/test_client_async.py::test_maybe_refresh_metadata_backoff",
"test/test_client_async.py::test_maybe_refresh_metadata_in_progress",
"test/test_client_async.py::test_maybe_refresh_metadata_update",
"test/test_client_async.py::test_maybe_refresh_metadata_failure",
"test/test_client_async.py::test_schedule",
"test/test_client_async.py::test_unschedule"
] | [] | Apache License 2.0 | 639 | 3,643 | [
"kafka/client.py",
"kafka/client_async.py",
"kafka/cluster.py",
"kafka/conn.py",
"kafka/protocol/metadata.py",
"kafka/protocol/types.py",
"kafka/structs.py"
] |
|
dpkp__kafka-python-766 | 506d023978e7273bd323c0750e3f77af259d257b | 2016-07-17 06:19:25 | 709ee3b59aff8ab205f0e09c33f4ec8391664228 | diff --git a/kafka/consumer/group.py b/kafka/consumer/group.py
index 9ebfe02..db0022d 100644
--- a/kafka/consumer/group.py
+++ b/kafka/consumer/group.py
@@ -176,6 +176,10 @@ class KafkaConsumer(six.Iterator):
selector (selectors.BaseSelector): Provide a specific selector
implementation to use for I/O multiplexing.
Default: selectors.DefaultSelector
+ exclude_internal_topics (bool): Whether records from internal topics
+ (such as offsets) should be exposed to the consumer. If set to True
+ the only way to receive records from an internal topic is
+ subscribing to it. Requires 0.10+ Default: True
Note:
Configuration parameters are described in more detail at
@@ -222,6 +226,7 @@ class KafkaConsumer(six.Iterator):
'metrics_num_samples': 2,
'metrics_sample_window_ms': 30000,
'selector': selectors.DefaultSelector,
+ 'exclude_internal_topics': True,
}
def __init__(self, *topics, **configs):
diff --git a/kafka/coordinator/consumer.py b/kafka/coordinator/consumer.py
index 083a36a..a18329c 100644
--- a/kafka/coordinator/consumer.py
+++ b/kafka/coordinator/consumer.py
@@ -36,6 +36,7 @@ class ConsumerCoordinator(BaseCoordinator):
'heartbeat_interval_ms': 3000,
'retry_backoff_ms': 100,
'api_version': (0, 9),
+ 'exclude_internal_topics': True,
}
def __init__(self, client, subscription, metrics, metric_group_prefix,
@@ -70,6 +71,10 @@ class ConsumerCoordinator(BaseCoordinator):
using Kafka's group managementment facilities. Default: 30000
retry_backoff_ms (int): Milliseconds to backoff when retrying on
errors. Default: 100.
+ exclude_internal_topics (bool): Whether records from internal topics
+ (such as offsets) should be exposed to the consumer. If set to
+ True the only way to receive records from an internal topic is
+ subscribing to it. Requires 0.10+. Default: True
"""
super(ConsumerCoordinator, self).__init__(client, **configs)
self.config = copy.copy(self.DEFAULT_CONFIG)
@@ -81,7 +86,8 @@ class ConsumerCoordinator(BaseCoordinator):
assert self.config['assignors'], 'Coordinator requires assignors'
self._subscription = subscription
- self._partitions_per_topic = {}
+ self._metadata_snapshot = {}
+ self._assignment_snapshot = None
self._cluster = client.cluster
self._cluster.request_update()
self._cluster.add_listener(WeakMethod(self._handle_metadata_update))
@@ -131,13 +137,12 @@ class ConsumerCoordinator(BaseCoordinator):
def _handle_metadata_update(self, cluster):
# if we encounter any unauthorized topics, raise an exception
- # TODO
- #if self._cluster.unauthorized_topics:
- # raise TopicAuthorizationError(self._cluster.unauthorized_topics)
+ if cluster.unauthorized_topics:
+ raise Errors.TopicAuthorizationFailedError(cluster.unauthorized_topics)
if self._subscription.subscribed_pattern:
topics = []
- for topic in cluster.topics():
+ for topic in cluster.topics(self.config['exclude_internal_topics']):
if self._subscription.subscribed_pattern.match(topic):
topics.append(topic)
@@ -146,7 +151,7 @@ class ConsumerCoordinator(BaseCoordinator):
# check if there are any changes to the metadata which should trigger
# a rebalance
- if self._subscription_metadata_changed():
+ if self._subscription_metadata_changed(cluster):
if (self.config['api_version'] >= (0, 9)
and self.config['group_id'] is not None):
@@ -159,20 +164,20 @@ class ConsumerCoordinator(BaseCoordinator):
self._subscription.assign_from_subscribed([
TopicPartition(topic, partition)
for topic in self._subscription.subscription
- for partition in self._partitions_per_topic[topic]
+ for partition in self._metadata_snapshot[topic]
])
- def _subscription_metadata_changed(self):
+ def _subscription_metadata_changed(self, cluster):
if not self._subscription.partitions_auto_assigned():
return False
- old_partitions_per_topic = self._partitions_per_topic
- self._partitions_per_topic = {}
+ metadata_snapshot = {}
for topic in self._subscription.group_subscription():
- partitions = self._cluster.partitions_for_topic(topic) or []
- self._partitions_per_topic[topic] = set(partitions)
+ partitions = cluster.partitions_for_topic(topic) or []
+ metadata_snapshot[topic] = set(partitions)
- if self._partitions_per_topic != old_partitions_per_topic:
+ if self._metadata_snapshot != metadata_snapshot:
+ self._metadata_snapshot = metadata_snapshot
return True
return False
@@ -184,8 +189,15 @@ class ConsumerCoordinator(BaseCoordinator):
def _on_join_complete(self, generation, member_id, protocol,
member_assignment_bytes):
+ # if we were the assignor, then we need to make sure that there have
+ # been no metadata updates since the rebalance begin. Otherwise, we
+ # won't rebalance again until the next metadata change
+ if self._assignment_snapshot and self._assignment_snapshot != self._metadata_snapshot:
+ self._subscription.mark_for_reassignment()
+ return
+
assignor = self._lookup_assignor(protocol)
- assert assignor, 'invalid assignment protocol: %s' % protocol
+ assert assignor, 'Coordinator selected invalid assignment protocol: %s' % protocol
assignment = ConsumerProtocol.ASSIGNMENT.decode(member_assignment_bytes)
@@ -235,6 +247,11 @@ class ConsumerCoordinator(BaseCoordinator):
self._subscription.group_subscribe(all_subscribed_topics)
self._client.set_topics(self._subscription.group_subscription())
+ # keep track of the metadata used for assignment so that we can check
+ # after rebalance completion whether anything has changed
+ self._cluster.request_update()
+ self._assignment_snapshot = self._metadata_snapshot
+
log.debug("Performing assignment for group %s using strategy %s"
" with subscriptions %s", self.group_id, assignor.name,
member_metadata)
@@ -264,6 +281,7 @@ class ConsumerCoordinator(BaseCoordinator):
" for group %s failed on_partitions_revoked",
self._subscription.listener, self.group_id)
+ self._assignment_snapshot = None
self._subscription.mark_for_reassignment()
def need_rejoin(self):
| KAFKA-3117: handle metadata updates during consumer rebalance | dpkp/kafka-python | diff --git a/test/test_coordinator.py b/test/test_coordinator.py
index 3435292..280fa70 100644
--- a/test/test_coordinator.py
+++ b/test/test_coordinator.py
@@ -85,7 +85,7 @@ def test_pattern_subscription(coordinator, api_version):
coordinator.config['api_version'] = api_version
coordinator._subscription.subscribe(pattern='foo')
assert coordinator._subscription.subscription == set([])
- assert coordinator._subscription_metadata_changed() is False
+ assert coordinator._subscription_metadata_changed({}) is False
assert coordinator._subscription.needs_partition_assignment is False
cluster = coordinator._client.cluster
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 2
} | 1.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-catchlog",
"pytest-sugar",
"pytest-mock",
"mock",
"python-snappy",
"lz4tools",
"xxhash"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc libsnappy-dev"
],
"python": "3.5",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
certifi==2021.5.30
coverage==6.2
cramjam==2.5.0
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
-e git+https://github.com/dpkp/kafka-python.git@506d023978e7273bd323c0750e3f77af259d257b#egg=kafka_python
lz4tools==1.3.1.2
mock==5.2.0
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
pytest-catchlog==1.2.2
pytest-cov==4.0.0
pytest-mock==3.6.1
pytest-sugar==0.9.6
python-snappy==0.7.3
six==1.17.0
termcolor==1.1.0
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
tomli==1.2.3
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
xxhash==3.2.0
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: kafka-python
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib-metadata=4.8.1=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==6.2
- cramjam==2.5.0
- lz4tools==1.3.1.2
- mock==5.2.0
- pytest-catchlog==1.2.2
- pytest-cov==4.0.0
- pytest-mock==3.6.1
- pytest-sugar==0.9.6
- python-snappy==0.7.3
- six==1.17.0
- termcolor==1.1.0
- tomli==1.2.3
- xxhash==3.2.0
prefix: /opt/conda/envs/kafka-python
| [
"test/test_coordinator.py::test_pattern_subscription[api_version0]",
"test/test_coordinator.py::test_pattern_subscription[api_version1]",
"test/test_coordinator.py::test_pattern_subscription[api_version2]",
"test/test_coordinator.py::test_pattern_subscription[api_version3]"
] | [] | [
"test/test_coordinator.py::test_init",
"test/test_coordinator.py::test_autocommit_enable_api_version[api_version0]",
"test/test_coordinator.py::test_autocommit_enable_api_version[api_version1]",
"test/test_coordinator.py::test_autocommit_enable_api_version[api_version2]",
"test/test_coordinator.py::test_autocommit_enable_api_version[api_version3]",
"test/test_coordinator.py::test_protocol_type",
"test/test_coordinator.py::test_group_protocols",
"test/test_coordinator.py::test_lookup_assignor",
"test/test_coordinator.py::test_join_complete",
"test/test_coordinator.py::test_subscription_listener",
"test/test_coordinator.py::test_subscription_listener_failure",
"test/test_coordinator.py::test_perform_assignment",
"test/test_coordinator.py::test_on_join_prepare",
"test/test_coordinator.py::test_need_rejoin",
"test/test_coordinator.py::test_refresh_committed_offsets_if_needed",
"test/test_coordinator.py::test_fetch_committed_offsets",
"test/test_coordinator.py::test_close",
"test/test_coordinator.py::test_commit_offsets_async",
"test/test_coordinator.py::test_commit_offsets_sync",
"test/test_coordinator.py::test_maybe_auto_commit_offsets_sync[api_version0-foobar-True-None-False-False-True-False]",
"test/test_coordinator.py::test_maybe_auto_commit_offsets_sync[api_version1-foobar-True-None-True-True-False-False]",
"test/test_coordinator.py::test_maybe_auto_commit_offsets_sync[api_version2-foobar-True-None-True-True-False-False]",
"test/test_coordinator.py::test_maybe_auto_commit_offsets_sync[api_version3-foobar-False-None-False-False-False-False]",
"test/test_coordinator.py::test_maybe_auto_commit_offsets_sync[api_version4-foobar-True-error4-True-True-True-False]",
"test/test_coordinator.py::test_maybe_auto_commit_offsets_sync[api_version5-foobar-True-error5-True-True-True-False]",
"test/test_coordinator.py::test_maybe_auto_commit_offsets_sync[api_version6-foobar-True-error6-True-True-True-False]",
"test/test_coordinator.py::test_maybe_auto_commit_offsets_sync[api_version7-foobar-True-error7-True-True-False-True]",
"test/test_coordinator.py::test_maybe_auto_commit_offsets_sync[api_version8-foobar-True-None-True-True-False-False]",
"test/test_coordinator.py::test_maybe_auto_commit_offsets_sync[api_version9-None-True-None-False-False-True-False]",
"test/test_coordinator.py::test_send_offset_commit_request_fail",
"test/test_coordinator.py::test_send_offset_commit_request_versions[api_version0-OffsetCommitRequest_v0]",
"test/test_coordinator.py::test_send_offset_commit_request_versions[api_version1-OffsetCommitRequest_v1]",
"test/test_coordinator.py::test_send_offset_commit_request_versions[api_version2-OffsetCommitRequest_v2]",
"test/test_coordinator.py::test_send_offset_commit_request_failure",
"test/test_coordinator.py::test_send_offset_commit_request_success",
"test/test_coordinator.py::test_handle_offset_commit_response[response0-GroupAuthorizationFailedError-False-False]",
"test/test_coordinator.py::test_handle_offset_commit_response[response1-OffsetMetadataTooLargeError-False-False]",
"test/test_coordinator.py::test_handle_offset_commit_response[response2-InvalidCommitOffsetSizeError-False-False]",
"test/test_coordinator.py::test_handle_offset_commit_response[response3-GroupLoadInProgressError-False-False]",
"test/test_coordinator.py::test_handle_offset_commit_response[response4-GroupCoordinatorNotAvailableError-True-False]",
"test/test_coordinator.py::test_handle_offset_commit_response[response5-NotCoordinatorForGroupError-True-False]",
"test/test_coordinator.py::test_handle_offset_commit_response[response6-RequestTimedOutError-True-False]",
"test/test_coordinator.py::test_handle_offset_commit_response[response7-CommitFailedError-False-True]",
"test/test_coordinator.py::test_handle_offset_commit_response[response8-CommitFailedError-False-True]",
"test/test_coordinator.py::test_handle_offset_commit_response[response9-CommitFailedError-False-True]",
"test/test_coordinator.py::test_handle_offset_commit_response[response10-InvalidTopicError-False-False]",
"test/test_coordinator.py::test_handle_offset_commit_response[response11-TopicAuthorizationFailedError-False-False]",
"test/test_coordinator.py::test_send_offset_fetch_request_fail",
"test/test_coordinator.py::test_send_offset_fetch_request_versions[api_version0-OffsetFetchRequest_v0]",
"test/test_coordinator.py::test_send_offset_fetch_request_versions[api_version1-OffsetFetchRequest_v1]",
"test/test_coordinator.py::test_send_offset_fetch_request_versions[api_version2-OffsetFetchRequest_v1]",
"test/test_coordinator.py::test_send_offset_fetch_request_failure",
"test/test_coordinator.py::test_send_offset_fetch_request_success",
"test/test_coordinator.py::test_handle_offset_fetch_response[response0-GroupLoadInProgressError-False-False]",
"test/test_coordinator.py::test_handle_offset_fetch_response[response1-NotCoordinatorForGroupError-True-False]",
"test/test_coordinator.py::test_handle_offset_fetch_response[response2-UnknownMemberIdError-False-True]",
"test/test_coordinator.py::test_handle_offset_fetch_response[response3-IllegalGenerationError-False-True]",
"test/test_coordinator.py::test_handle_offset_fetch_response[response4-TopicAuthorizationFailedError-False-False]",
"test/test_coordinator.py::test_handle_offset_fetch_response[response5-None-False-False]",
"test/test_coordinator.py::test_heartbeat"
] | [] | Apache License 2.0 | 641 | 1,546 | [
"kafka/consumer/group.py",
"kafka/coordinator/consumer.py"
] |
|
simphony__simphony-remote-117 | 61ec23ffe44463cbc41f6fa54b4247963093ed79 | 2016-07-18 09:48:08 | 61ec23ffe44463cbc41f6fa54b4247963093ed79 | diff --git a/remoteappmanager/restresources/container.py b/remoteappmanager/restresources/container.py
index 864e1fd..3191c6b 100644
--- a/remoteappmanager/restresources/container.py
+++ b/remoteappmanager/restresources/container.py
@@ -202,7 +202,7 @@ class Container(Resource):
server_url = "http://{}:{}{}/".format(
container.ip,
container.port,
- url_path_join(self.application.command_line_config.base_url,
+ url_path_join(self.application.command_line_config.base_urlpath,
container.urlpath))
yield _wait_for_http_server_2xx(
| remoteapprest app start error due to unfound `base_url`
```
(simremote)kit@kit-virtual-machine:jupyterhub$ remoteapprest app start 629b1d86d69bfb4b400dfee204f5e3a0
[W 160718 10:37:39 container:154] HOME (None) is not available for kit
[I 160718 10:37:39 container_manager:218] Got container image: simphonyproject/simphonic-mayavi
[I 2016-07-18 10:37:39.576 JupyterHub log:100] 200 GET /hub/api/authorizations/cookie/jupyter-hub-token-kit/[secret] ([email protected]) 8.24ms
[E 160718 10:37:39 container_manager:417] Container 'remoteexec-kit-629b1d86d69bfb4b400dfee204f5e3a0' is gone
[I 160718 10:37:39 container_manager:260] Mounting these volumes:
/appdata/image_name/common -> /appdata
[I 160718 10:37:39 container_manager:293] Created container 'remoteexec-kit-629b1d86d69bfb4b400dfee204f5e3a0' (id: 2dcdd6621f8f736322969b25449d5c75566cf617632cb9481f865aeaf919b863) from image simphonyproject/simphonic-mayavi
[I 160718 10:37:39 container_manager:330] Started container 'remoteexec-kit-629b1d86d69bfb4b400dfee204f5e3a0' (id: 2dcdd6621f8f736322969b25449d5c75566cf617632cb9481f865aeaf919b863). Exported port reachable at 127.0.0.1:32769
[E 160718 10:37:39 rest_handler:71] Internal error during POST operation on containers
Traceback (most recent call last):
File "/mnt/hgfs/VM_shared/SimPhoNy/simphony-remote/remoteappmanager/rest/rest_handler.py", line 60, in post
resource_id = yield res_handler.create(data)
File "/home/kit/.virtualenv/simremote/lib/python3.4/site-packages/tornado/gen.py", line 1015, in run
value = future.result()
File "/home/kit/.virtualenv/simremote/lib/python3.4/site-packages/tornado/concurrent.py", line 237, in result
raise_exc_info(self._exc_info)
File "<string>", line 3, in raise_exc_info
File "/home/kit/.virtualenv/simremote/lib/python3.4/site-packages/tornado/gen.py", line 1021, in run
yielded = self.gen.throw(*exc_info)
File "/mnt/hgfs/VM_shared/SimPhoNy/simphony-remote/remoteappmanager/restresources/container.py", line 38, in create
yield self._wait_for_container_ready(container)
File "/home/kit/.virtualenv/simremote/lib/python3.4/site-packages/tornado/gen.py", line 1015, in run
value = future.result()
File "/home/kit/.virtualenv/simremote/lib/python3.4/site-packages/tornado/concurrent.py", line 237, in result
raise_exc_info(self._exc_info)
File "<string>", line 3, in raise_exc_info
File "/home/kit/.virtualenv/simremote/lib/python3.4/site-packages/tornado/gen.py", line 285, in wrapper
yielded = next(result)
File "/mnt/hgfs/VM_shared/SimPhoNy/simphony-remote/remoteappmanager/restresources/container.py", line 205, in _wait_for_container_ready
url_path_join(self.application.command_line_config.base_url,
AttributeError: 'CommandLineConfig' object has no attribute 'base_url'
``` | simphony/simphony-remote | diff --git a/tests/restmodel/test_container.py b/tests/restmodel/test_container.py
index f9862e9..ba87141 100644
--- a/tests/restmodel/test_container.py
+++ b/tests/restmodel/test_container.py
@@ -29,7 +29,7 @@ class TestContainer(AsyncHTTPTestCase):
app.file_config = Mock()
app.file_config.network_timeout = 5
app.command_line_config = Mock()
- app.command_line_config.base_url = "http://127.0.0.1:8000/"
+ app.command_line_config.base_urlpath = "/"
app.reverse_proxy = Mock()
app.reverse_proxy.add_container = mock_coro_factory()
app.reverse_proxy.remove_container = mock_coro_factory()
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 1
} | 0.3 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"flake8",
"sphinx",
"coverage"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.16
alembic==1.15.2
annotated-types==0.7.0
arrow==1.3.0
async-generator==1.10
attrs==25.3.0
babel==2.17.0
certifi==2025.1.31
certipy==0.2.2
cffi==1.17.1
charset-normalizer==3.4.1
click==8.1.8
coverage==7.8.0
cryptography==44.0.2
docker-py==1.10.6
docker-pycreds==0.4.0
docutils==0.21.2
escapism==1.0.1
exceptiongroup==1.2.2
flake8==7.2.0
fqdn==1.5.1
greenlet==3.1.1
idna==3.10
imagesize==1.4.1
importlib_metadata==8.6.1
iniconfig==2.1.0
isoduration==20.11.0
Jinja2==3.1.6
jsonpointer==3.0.0
jsonschema==4.23.0
jsonschema-specifications==2024.10.1
jupyter-events==0.12.0
jupyter_client==8.6.3
jupyter_core==5.7.2
jupyterhub==5.2.1
Mako==1.3.9
MarkupSafe==3.0.2
mccabe==0.7.0
oauthlib==3.2.2
packaging==24.2
pamela==1.2.0
platformdirs==4.3.7
pluggy==1.5.0
prometheus_client==0.21.1
pycodestyle==2.13.0
pycparser==2.22
pydantic==2.11.1
pydantic_core==2.33.0
pyflakes==3.3.1
Pygments==2.19.1
pytest==8.3.5
python-dateutil==2.9.0.post0
python-json-logger==3.3.0
PyYAML==6.0.2
pyzmq==26.3.0
referencing==0.36.2
-e git+https://github.com/simphony/simphony-remote.git@61ec23ffe44463cbc41f6fa54b4247963093ed79#egg=remoteappmanager
requests==2.32.3
rfc3339-validator==0.1.4
rfc3986-validator==0.1.1
rpds-py==0.24.0
six==1.17.0
snowballstemmer==2.2.0
Sphinx==7.4.7
sphinxcontrib-applehelp==2.0.0
sphinxcontrib-devhelp==2.0.0
sphinxcontrib-htmlhelp==2.1.0
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==2.0.0
sphinxcontrib-serializinghtml==2.0.0
SQLAlchemy==2.0.40
tabulate==0.9.0
tomli==2.2.1
tornado==6.4.2
traitlets==5.14.3
types-python-dateutil==2.9.0.20241206
typing-inspection==0.4.0
typing_extensions==4.13.0
uri-template==1.3.0
urllib3==2.3.0
webcolors==24.11.1
websocket-client==1.8.0
zipp==3.21.0
| name: simphony-remote
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.16
- alembic==1.15.2
- annotated-types==0.7.0
- arrow==1.3.0
- async-generator==1.10
- attrs==25.3.0
- babel==2.17.0
- certifi==2025.1.31
- certipy==0.2.2
- cffi==1.17.1
- charset-normalizer==3.4.1
- click==8.1.8
- coverage==7.8.0
- cryptography==44.0.2
- docker-py==1.10.6
- docker-pycreds==0.4.0
- docutils==0.21.2
- escapism==1.0.1
- exceptiongroup==1.2.2
- flake8==7.2.0
- fqdn==1.5.1
- greenlet==3.1.1
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- isoduration==20.11.0
- jinja2==3.1.6
- jsonpointer==3.0.0
- jsonschema==4.23.0
- jsonschema-specifications==2024.10.1
- jupyter-client==8.6.3
- jupyter-core==5.7.2
- jupyter-events==0.12.0
- jupyterhub==5.2.1
- mako==1.3.9
- markupsafe==3.0.2
- mccabe==0.7.0
- oauthlib==3.2.2
- packaging==24.2
- pamela==1.2.0
- platformdirs==4.3.7
- pluggy==1.5.0
- prometheus-client==0.21.1
- pycodestyle==2.13.0
- pycparser==2.22
- pydantic==2.11.1
- pydantic-core==2.33.0
- pyflakes==3.3.1
- pygments==2.19.1
- pytest==8.3.5
- python-dateutil==2.9.0.post0
- python-json-logger==3.3.0
- pyyaml==6.0.2
- pyzmq==26.3.0
- referencing==0.36.2
- requests==2.32.3
- rfc3339-validator==0.1.4
- rfc3986-validator==0.1.1
- rpds-py==0.24.0
- six==1.17.0
- snowballstemmer==2.2.0
- sphinx==7.4.7
- sphinxcontrib-applehelp==2.0.0
- sphinxcontrib-devhelp==2.0.0
- sphinxcontrib-htmlhelp==2.1.0
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==2.0.0
- sphinxcontrib-serializinghtml==2.0.0
- sqlalchemy==2.0.40
- tabulate==0.9.0
- tomli==2.2.1
- tornado==6.4.2
- traitlets==5.14.3
- types-python-dateutil==2.9.0.20241206
- typing-extensions==4.13.0
- typing-inspection==0.4.0
- uri-template==1.3.0
- urllib3==2.3.0
- webcolors==24.11.1
- websocket-client==1.8.0
- zipp==3.21.0
prefix: /opt/conda/envs/simphony-remote
| [
"tests/restmodel/test_container.py::TestContainer::test_create"
] | [] | [
"tests/restmodel/test_container.py::TestContainer::test_delete",
"tests/restmodel/test_container.py::TestContainer::test_items",
"tests/restmodel/test_container.py::TestContainer::test_retrieve"
] | [] | BSD 3-Clause "New" or "Revised" License | 645 | 152 | [
"remoteappmanager/restresources/container.py"
] |
|
simphony__simphony-remote-123 | ae2c07cdf3906952600c57b4439d57d7ff4b2cc1 | 2016-07-18 14:47:10 | 34705b9892b4c781d27e0a30e7f29019450e6b1c | diff --git a/remoteappmanager/handlers/home_handler.py b/remoteappmanager/handlers/home_handler.py
index c21824b..7d5ce97 100644
--- a/remoteappmanager/handlers/home_handler.py
+++ b/remoteappmanager/handlers/home_handler.py
@@ -131,8 +131,10 @@ class HomeHandler(BaseHandler):
container_manager = self.application.container_manager
container = yield container_manager.container_from_url_id(url_id)
if not container:
- self.finish("Unable to view the application")
- return
+ self.log.warning("Could not find container for url_id {}".format(
+ url_id
+ ))
+ raise ValueError("Unable to view container for specified url_id")
# make sure the container is actually running and working
yield self._wait_for_container_ready(container)
@@ -158,8 +160,10 @@ class HomeHandler(BaseHandler):
container = yield container_manager.container_from_url_id(url_id)
if not container:
- self.finish("Unable to view the application")
- return
+ self.log.warning("Could not find container for url_id {}".format(
+ url_id
+ ))
+ raise ValueError("Unable to view container for specified url_id")
urlpath = url_path_join(
self.application.command_line_config.base_urlpath,
| HomeHandler should not 'finish' in View and Stop but raise
Let view and stop raise and then the HomeHandler.post will handle the logging.
https://github.com/simphony/simphony-remote/blob/98ee374756694ef1855a9d38d78d3561ec6cc54e/remoteappmanager/handlers/home_handler.py#L155
https://github.com/simphony/simphony-remote/blob/98ee374756694ef1855a9d38d78d3561ec6cc54e/remoteappmanager/handlers/home_handler.py#L132 | simphony/simphony-remote | diff --git a/tests/handlers/test_home_handler.py b/tests/handlers/test_home_handler.py
index 596b8db..7a49d7f 100644
--- a/tests/handlers/test_home_handler.py
+++ b/tests/handlers/test_home_handler.py
@@ -184,3 +184,34 @@ class TestHomeHandler(TempMixin, utils.AsyncHTTPTestCase):
self.assertTrue(self._app.reverse_proxy.register.called)
self.assertTrue(redirect.called)
+
+ def test_container_manager_does_not_return_container(self):
+ self._app.container_manager.container_from_url_id = \
+ utils.mock_coro_factory(None)
+ res = self.fetch(
+ "/user/username/",
+ method="POST",
+ headers={
+ "Cookie": "jupyter-hub-token-username=foo"
+ },
+ body=urllib.parse.urlencode({
+ 'action': 'view',
+ 'url_id': '12345'
+ })
+ )
+
+ self.assertIn("ValueError", str(res.body))
+
+ res = self.fetch(
+ "/user/username/",
+ method="POST",
+ headers={
+ "Cookie": "jupyter-hub-token-username=foo"
+ },
+ body=urllib.parse.urlencode({
+ 'action': 'stop',
+ 'url_id': '12345'
+ })
+ )
+
+ self.assertIn("ValueError", str(res.body))
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 1
} | 0.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"flake8",
"sphinx",
"coverage"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt",
"dev-requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.16
alembic==1.15.2
annotated-types==0.7.0
arrow==1.3.0
async-generator==1.10
attrs==25.3.0
babel==2.17.0
certifi==2025.1.31
certipy==0.2.2
cffi==1.17.1
charset-normalizer==3.4.1
click==8.1.8
coverage==7.8.0
cryptography==44.0.2
docker-py==1.10.6
docker-pycreds==0.4.0
docutils==0.21.2
escapism==1.0.1
exceptiongroup==1.2.2
flake8==7.2.0
fqdn==1.5.1
greenlet==3.1.1
idna==3.10
imagesize==1.4.1
importlib_metadata==8.6.1
iniconfig==2.1.0
isoduration==20.11.0
Jinja2==3.1.6
jsonpointer==3.0.0
jsonschema==4.23.0
jsonschema-specifications==2024.10.1
jupyter-events==0.12.0
jupyter_client==8.6.3
jupyter_core==5.7.2
jupyterhub==5.2.1
Mako==1.3.9
MarkupSafe==3.0.2
mccabe==0.7.0
oauthlib==3.2.2
packaging==24.2
pamela==1.2.0
platformdirs==4.3.7
pluggy==1.5.0
prometheus_client==0.21.1
pycodestyle==2.13.0
pycparser==2.22
pydantic==2.11.1
pydantic_core==2.33.0
pyflakes==3.3.1
Pygments==2.19.1
pytest==8.3.5
python-dateutil==2.9.0.post0
python-json-logger==3.3.0
PyYAML==6.0.2
pyzmq==26.3.0
referencing==0.36.2
-e git+https://github.com/simphony/simphony-remote.git@ae2c07cdf3906952600c57b4439d57d7ff4b2cc1#egg=remoteappmanager
requests==2.32.3
rfc3339-validator==0.1.4
rfc3986-validator==0.1.1
rpds-py==0.24.0
six==1.17.0
snowballstemmer==2.2.0
Sphinx==7.4.7
sphinxcontrib-applehelp==2.0.0
sphinxcontrib-devhelp==2.0.0
sphinxcontrib-htmlhelp==2.1.0
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==2.0.0
sphinxcontrib-serializinghtml==2.0.0
SQLAlchemy==2.0.40
tabulate==0.9.0
tomli==2.2.1
tornado==6.4.2
traitlets==5.14.3
types-python-dateutil==2.9.0.20241206
typing-inspection==0.4.0
typing_extensions==4.13.0
uri-template==1.3.0
urllib3==2.3.0
webcolors==24.11.1
websocket-client==1.8.0
zipp==3.21.0
| name: simphony-remote
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.16
- alembic==1.15.2
- annotated-types==0.7.0
- arrow==1.3.0
- async-generator==1.10
- attrs==25.3.0
- babel==2.17.0
- certifi==2025.1.31
- certipy==0.2.2
- cffi==1.17.1
- charset-normalizer==3.4.1
- click==8.1.8
- coverage==7.8.0
- cryptography==44.0.2
- docker-py==1.10.6
- docker-pycreds==0.4.0
- docutils==0.21.2
- escapism==1.0.1
- exceptiongroup==1.2.2
- flake8==7.2.0
- fqdn==1.5.1
- greenlet==3.1.1
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- isoduration==20.11.0
- jinja2==3.1.6
- jsonpointer==3.0.0
- jsonschema==4.23.0
- jsonschema-specifications==2024.10.1
- jupyter-client==8.6.3
- jupyter-core==5.7.2
- jupyter-events==0.12.0
- jupyterhub==5.2.1
- mako==1.3.9
- markupsafe==3.0.2
- mccabe==0.7.0
- oauthlib==3.2.2
- packaging==24.2
- pamela==1.2.0
- platformdirs==4.3.7
- pluggy==1.5.0
- prometheus-client==0.21.1
- pycodestyle==2.13.0
- pycparser==2.22
- pydantic==2.11.1
- pydantic-core==2.33.0
- pyflakes==3.3.1
- pygments==2.19.1
- pytest==8.3.5
- python-dateutil==2.9.0.post0
- python-json-logger==3.3.0
- pyyaml==6.0.2
- pyzmq==26.3.0
- referencing==0.36.2
- requests==2.32.3
- rfc3339-validator==0.1.4
- rfc3986-validator==0.1.1
- rpds-py==0.24.0
- six==1.17.0
- snowballstemmer==2.2.0
- sphinx==7.4.7
- sphinxcontrib-applehelp==2.0.0
- sphinxcontrib-devhelp==2.0.0
- sphinxcontrib-htmlhelp==2.1.0
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==2.0.0
- sphinxcontrib-serializinghtml==2.0.0
- sqlalchemy==2.0.40
- tabulate==0.9.0
- tomli==2.2.1
- tornado==6.4.2
- traitlets==5.14.3
- types-python-dateutil==2.9.0.20241206
- typing-extensions==4.13.0
- typing-inspection==0.4.0
- uri-template==1.3.0
- urllib3==2.3.0
- webcolors==24.11.1
- websocket-client==1.8.0
- zipp==3.21.0
prefix: /opt/conda/envs/simphony-remote
| [
"tests/handlers/test_home_handler.py::TestHomeHandler::test_container_manager_does_not_return_container"
] | [] | [
"tests/handlers/test_home_handler.py::TestHomeHandler::test_failed_auth",
"tests/handlers/test_home_handler.py::TestHomeHandler::test_home",
"tests/handlers/test_home_handler.py::TestHomeHandler::test_post_failed_auth",
"tests/handlers/test_home_handler.py::TestHomeHandler::test_post_start",
"tests/handlers/test_home_handler.py::TestHomeHandler::test_post_stop",
"tests/handlers/test_home_handler.py::TestHomeHandler::test_post_view"
] | [] | BSD 3-Clause "New" or "Revised" License | 649 | 311 | [
"remoteappmanager/handlers/home_handler.py"
] |
|
simphony__simphony-remote-134 | af0203df6cb3232a28b50fa1023baf73e234aa6d | 2016-07-20 16:23:49 | 34705b9892b4c781d27e0a30e7f29019450e6b1c | diff --git a/remoteappmanager/file_config.py b/remoteappmanager/file_config.py
index 2f93478..8966ed8 100644
--- a/remoteappmanager/file_config.py
+++ b/remoteappmanager/file_config.py
@@ -3,7 +3,6 @@ import os
import tornado.options
from docker import tls
from traitlets import HasTraits, Int, Unicode, Bool, Dict
-from traitlets.utils.sentinel import Sentinel
from remoteappmanager import paths
from remoteappmanager.traitlets import set_traits_from_dict
@@ -110,12 +109,7 @@ class FileConfig(HasTraits):
for traitlet_name, traitlet in self.traits().items():
# tornado.OptionParser defines an option with a Python type
# and performs type validation.
- # traitlet.default_value may be a Sentinel value (e.g. Tuple,
- # Dict, Instance), in which case we use the repr
- default_value = traitlet.default_value
-
- if type(default_value) is Sentinel:
- default_value = eval(traitlet.default_value_repr())
+ default_value = getattr(self, traitlet_name)
file_line_parser.define(
traitlet_name,
| FileConfig with tls=True or tls_verify=False only, leads to docker TLSParameterError
On Linux, if my file config for the `remoteappmanager_config.py` ONLY contains the following:
```
$ cat remoteappmanager_config.py
tls = True
```
or
```
$ cat remoteappmanager_config.py
tls_verify = True
```
Then starting remoteappmanager gives this error
```
$ remoteappmanager --user=kit --port=45707 --cookie-name=jupyter-hub-token-kit --base-urlpath=/user/kit --hub-host= --hub-prefix=/hub/ --hub-api-url=http://172.16.253.129:8081/hub/api --ip=127.0.0.1 --proxy-api-url=http://127.0.0.1:8001/api/routes/ --config-file=remoteappmanager_config.py
...
[E 160720 14:33:43 web:1548] Uncaught exception GET /user/kit/ (127.0.0.1)
HTTPServerRequest(protocol='http', host='127.0.0.1:8000', method='GET', uri='/user/kit/', version='HTTP/1.1', remote_ip='127.0.0.1', headers={'X-Forwarded-Proto': 'https', 'Accept-Language': 'en-US,en;q=0.5', 'X-Forwarded-For': '127.0.0.1', 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', 'If-None-Match': '"dfba4089bef9b2e40bbd2a75ba5413bb8edbdd83"', 'Host': '127.0.0.1:8000', 'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:39.0) Gecko/20100101 Firefox/39.0', 'Referer': 'https://127.0.0.1:8000/hub/login', 'Accept-Encoding': 'gzip, deflate', 'Connection': 'close', 'X-Forwarded-Port': '8000', 'Cookie': 'jupyter-hub-token-kit="2|1:0|10:1469021623|21:jupyter-hub-token-kit|44:M2UzMjRlOGNhNDBjNGQ5ZWEwYjg3Njk0N2U0ODY4MmE=|1ec021fdad25b335a6b11b22ff198ce0860a0742a4d95c46a33a6d659d5f4e2d"'})
Traceback (most recent call last):
File "/home/kit/.virtualenv/simremote/lib/python3.4/site-packages/traitlets/traitlets.py", line 501, in get
value = obj._trait_values[self.name]
KeyError: 'container_manager'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/kit/.virtualenv/simremote/lib/python3.4/site-packages/tornado/web.py", line 1469, in _execute
result = yield result
File "/home/kit/.virtualenv/simremote/lib/python3.4/site-packages/tornado/gen.py", line 1015, in run
value = future.result()
File "/home/kit/.virtualenv/simremote/lib/python3.4/site-packages/tornado/concurrent.py", line 237, in result
raise_exc_info(self._exc_info)
File "<string>", line 3, in raise_exc_info
File "/home/kit/.virtualenv/simremote/lib/python3.4/site-packages/tornado/gen.py", line 1021, in run
yielded = self.gen.throw(*exc_info)
File "/mnt/hgfs/VM_shared/SimPhoNy/simphony-remote/remoteappmanager/handlers/home_handler.py", line 21, in get
images_info = yield self._get_images_info()
File "/home/kit/.virtualenv/simremote/lib/python3.4/site-packages/tornado/gen.py", line 1015, in run
value = future.result()
File "/home/kit/.virtualenv/simremote/lib/python3.4/site-packages/tornado/concurrent.py", line 237, in result
raise_exc_info(self._exc_info)
File "<string>", line 3, in raise_exc_info
File "/home/kit/.virtualenv/simremote/lib/python3.4/site-packages/tornado/gen.py", line 285, in wrapper
yielded = next(result)
File "/mnt/hgfs/VM_shared/SimPhoNy/simphony-remote/remoteappmanager/handlers/home_handler.py", line 184, in _get_images_info
container_manager = self.application.container_manager
File "/home/kit/.virtualenv/simremote/lib/python3.4/site-packages/traitlets/traitlets.py", line 529, in __get__
return self.get(obj, cls)
File "/home/kit/.virtualenv/simremote/lib/python3.4/site-packages/traitlets/traitlets.py", line 508, in get
value = self._validate(obj, dynamic_default())
File "/mnt/hgfs/VM_shared/SimPhoNy/simphony-remote/remoteappmanager/application.py", line 81, in _container_manager_default
docker_config=self.file_config.docker_config()
File "/mnt/hgfs/VM_shared/SimPhoNy/simphony-remote/remoteappmanager/file_config.py", line 155, in docker_config
assert_hostname=True,
File "/home/kit/.virtualenv/simremote/lib/python3.4/site-packages/docker/tls.py", line 47, in __init__
'Path to a certificate and key files must be provided'
docker.errors.TLSParameterError: Path to a certificate and key files must be provided through the client_config param. TLS configurations should map the Docker CLI client configurations. See https://docs.docker.com/engine/articles/https/ for API details.
```
| simphony/simphony-remote | diff --git a/tests/test_file_config.py b/tests/test_file_config.py
index 22ce879..484d1b9 100644
--- a/tests/test_file_config.py
+++ b/tests/test_file_config.py
@@ -151,3 +151,15 @@ class TestFileConfig(TempMixin, unittest.TestCase):
config = FileConfig(tls=True)
self.assertNotEqual(config.tls_key, '')
self.assertNotEqual(config.tls_cert, '')
+
+ def test_file_parsing_not_overriding_bug_131(self):
+ docker_config = textwrap.dedent('''
+ tls = True
+ ''')
+ with open(self.config_file, 'w') as fhandle:
+ print(docker_config, file=fhandle)
+
+ config = FileConfig()
+ config.parse_config(self.config_file)
+ self.assertNotEqual(config.tls_key, '')
+ self.assertNotEqual(config.tls_cert, '')
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_git_commit_hash"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 1
} | 0.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"flake8",
"sphinx",
"coverage"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc",
"apt-get install -y docker.io"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.16
alembic==1.15.2
annotated-types==0.7.0
arrow==1.3.0
async-generator==1.10
attrs==25.3.0
babel==2.17.0
certifi==2025.1.31
certipy==0.2.2
cffi==1.17.1
charset-normalizer==3.4.1
click==8.1.8
coverage==7.8.0
cryptography==44.0.2
docker-py==1.10.6
docker-pycreds==0.4.0
docutils==0.21.2
escapism==1.0.1
exceptiongroup==1.2.2
flake8==7.2.0
fqdn==1.5.1
greenlet==3.1.1
idna==3.10
imagesize==1.4.1
importlib_metadata==8.6.1
iniconfig==2.1.0
isoduration==20.11.0
Jinja2==3.1.6
jsonpointer==3.0.0
jsonschema==4.23.0
jsonschema-specifications==2024.10.1
jupyter-events==0.12.0
jupyter_client==8.6.3
jupyter_core==5.7.2
jupyterhub==5.2.1
Mako==1.3.9
MarkupSafe==3.0.2
mccabe==0.7.0
oauthlib==3.2.2
packaging==24.2
pamela==1.2.0
platformdirs==4.3.7
pluggy==1.5.0
prometheus_client==0.21.1
pycodestyle==2.13.0
pycparser==2.22
pydantic==2.11.1
pydantic_core==2.33.0
pyflakes==3.3.1
Pygments==2.19.1
pytest==8.3.5
python-dateutil==2.9.0.post0
python-json-logger==3.3.0
PyYAML==6.0.2
pyzmq==26.3.0
referencing==0.36.2
-e git+https://github.com/simphony/simphony-remote.git@af0203df6cb3232a28b50fa1023baf73e234aa6d#egg=remoteappmanager
requests==2.32.3
rfc3339-validator==0.1.4
rfc3986-validator==0.1.1
rpds-py==0.24.0
six==1.17.0
snowballstemmer==2.2.0
Sphinx==7.4.7
sphinxcontrib-applehelp==2.0.0
sphinxcontrib-devhelp==2.0.0
sphinxcontrib-htmlhelp==2.1.0
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==2.0.0
sphinxcontrib-serializinghtml==2.0.0
SQLAlchemy==2.0.40
tabulate==0.9.0
tomli==2.2.1
tornado==6.4.2
traitlets==5.14.3
types-python-dateutil==2.9.0.20241206
typing-inspection==0.4.0
typing_extensions==4.13.0
uri-template==1.3.0
urllib3==2.3.0
webcolors==24.11.1
websocket-client==1.8.0
zipp==3.21.0
| name: simphony-remote
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.16
- alembic==1.15.2
- annotated-types==0.7.0
- arrow==1.3.0
- async-generator==1.10
- attrs==25.3.0
- babel==2.17.0
- certifi==2025.1.31
- certipy==0.2.2
- cffi==1.17.1
- charset-normalizer==3.4.1
- click==8.1.8
- coverage==7.8.0
- cryptography==44.0.2
- docker-py==1.10.6
- docker-pycreds==0.4.0
- docutils==0.21.2
- escapism==1.0.1
- exceptiongroup==1.2.2
- flake8==7.2.0
- fqdn==1.5.1
- greenlet==3.1.1
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- isoduration==20.11.0
- jinja2==3.1.6
- jsonpointer==3.0.0
- jsonschema==4.23.0
- jsonschema-specifications==2024.10.1
- jupyter-client==8.6.3
- jupyter-core==5.7.2
- jupyter-events==0.12.0
- jupyterhub==5.2.1
- mako==1.3.9
- markupsafe==3.0.2
- mccabe==0.7.0
- oauthlib==3.2.2
- packaging==24.2
- pamela==1.2.0
- platformdirs==4.3.7
- pluggy==1.5.0
- prometheus-client==0.21.1
- pycodestyle==2.13.0
- pycparser==2.22
- pydantic==2.11.1
- pydantic-core==2.33.0
- pyflakes==3.3.1
- pygments==2.19.1
- pytest==8.3.5
- python-dateutil==2.9.0.post0
- python-json-logger==3.3.0
- pyyaml==6.0.2
- pyzmq==26.3.0
- referencing==0.36.2
- requests==2.32.3
- rfc3339-validator==0.1.4
- rfc3986-validator==0.1.1
- rpds-py==0.24.0
- six==1.17.0
- snowballstemmer==2.2.0
- sphinx==7.4.7
- sphinxcontrib-applehelp==2.0.0
- sphinxcontrib-devhelp==2.0.0
- sphinxcontrib-htmlhelp==2.1.0
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==2.0.0
- sphinxcontrib-serializinghtml==2.0.0
- sqlalchemy==2.0.40
- tabulate==0.9.0
- tomli==2.2.1
- tornado==6.4.2
- traitlets==5.14.3
- types-python-dateutil==2.9.0.20241206
- typing-extensions==4.13.0
- typing-inspection==0.4.0
- uri-template==1.3.0
- urllib3==2.3.0
- webcolors==24.11.1
- websocket-client==1.8.0
- zipp==3.21.0
prefix: /opt/conda/envs/simphony-remote
| [
"tests/test_file_config.py::TestFileConfig::test_file_parsing_not_overriding_bug_131"
] | [] | [
"tests/test_file_config.py::TestFileConfig::test_initialization_on_local_docker_machine",
"tests/test_file_config.py::TestFileConfig::test_initialization_on_nonlocal_docker_machine",
"tests/test_file_config.py::TestFileConfig::test_initialization_with_default_accounting",
"tests/test_file_config.py::TestFileConfig::test_initialization_with_good_accounting",
"tests/test_file_config.py::TestFileConfig::test_overriding",
"tests/test_file_config.py::TestFileConfig::test_tls_init",
"tests/test_file_config.py::TestFileConfig::test_tls_no_verify"
] | [] | BSD 3-Clause "New" or "Revised" License | 653 | 288 | [
"remoteappmanager/file_config.py"
] |
|
simphony__simphony-remote-141 | 3ceb572ce98207102f14d1b11491fc2edd85aaac | 2016-07-21 11:17:02 | 34705b9892b4c781d27e0a30e7f29019450e6b1c | diff --git a/remoteappmanager/db/orm.py b/remoteappmanager/db/orm.py
index 7111d0b..f877d5a 100644
--- a/remoteappmanager/db/orm.py
+++ b/remoteappmanager/db/orm.py
@@ -1,5 +1,6 @@
import contextlib
import hashlib
+import os
from sqlalchemy import (
Column, Integer, Boolean, Unicode, ForeignKey, create_engine, Enum,
@@ -157,6 +158,21 @@ class AppAccounting(ABCAccounting):
def __init__(self, url, **kwargs):
self.db = Database(url, **kwargs)
+ self.check_database_readable()
+
+ def check_database_readable(self):
+ ''' Raise IOError if the database url points to a sqlite database
+ that is not readable
+
+ TODO: may extend for validating databases in other dialects?
+ '''
+ db_url = self.db.url
+
+ if db_url.startswith('sqlite:///'):
+ file_path = os.path.abspath(db_url[10:])
+ if not os.access(file_path, os.R_OK):
+ raise IOError(
+ 'Sqlite database {} is not readable'.format(file_path))
def get_user_by_name(self, user_name):
""" Return an orm.User given a user name. Return None
| Sqlite database file created if it does not exist
Similar to #113, when `remoteappmanager` is started with its default accounting setting and that the default sqlite database does not exist, an empty file `remoteappmanager.db` would be created.
| simphony/simphony-remote | diff --git a/tests/db/test_interfaces.py b/tests/db/test_interfaces.py
index 02ced52..4ed20d0 100644
--- a/tests/db/test_interfaces.py
+++ b/tests/db/test_interfaces.py
@@ -8,7 +8,7 @@ from collections import namedtuple
from remoteappmanager.db.interfaces import (
ABCApplication, ABCApplicationPolicy, ABCAccounting)
-from .abc_test_interfaces import ABCTestDatabaseInterface
+from tests.db.abc_test_interfaces import ABCTestDatabaseInterface
User = namedtuple('User', ('name',))
diff --git a/tests/db/test_orm.py b/tests/db/test_orm.py
index 2869b94..6c1052c 100644
--- a/tests/db/test_orm.py
+++ b/tests/db/test_orm.py
@@ -199,3 +199,12 @@ class TestOrmAppAccounting(TempMixin, ABCTestDatabaseInterface,
self.assertEqual(actual_app, expected_config[0][0])
self.assertEqual(actual_policy, expected_config[0][1])
+
+ def test_no_file_creation_if_sqlite_database_not_exist(self):
+ temp_file_path = os.path.join(self.tempdir, 'some.db')
+
+ with self.assertRaises(IOError):
+ AppAccounting(
+ url="sqlite:///"+temp_file_path)
+
+ self.assertFalse(os.path.exists(temp_file_path))
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_issue_reference",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 1
} | 0.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"numpy>=1.16.0",
"pandas>=1.0.0"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alembic==1.15.2
annotated-types==0.7.0
arrow==1.3.0
async-generator==1.10
attrs==25.3.0
certifi==2025.1.31
certipy==0.2.2
cffi==1.17.1
charset-normalizer==3.4.1
click==8.1.8
cryptography==44.0.2
docker-py==1.10.6
docker-pycreds==0.4.0
escapism==1.0.1
exceptiongroup==1.2.2
fqdn==1.5.1
greenlet==3.1.1
idna==3.10
importlib_metadata==8.6.1
iniconfig==2.1.0
isoduration==20.11.0
Jinja2==3.1.6
jsonpointer==3.0.0
jsonschema==4.23.0
jsonschema-specifications==2024.10.1
jupyter-events==0.12.0
jupyter_client==8.6.3
jupyter_core==5.7.2
jupyterhub==5.2.1
Mako==1.3.9
MarkupSafe==3.0.2
numpy==2.0.2
oauthlib==3.2.2
packaging==24.2
pamela==1.2.0
pandas==2.2.3
platformdirs==4.3.7
pluggy==1.5.0
prometheus_client==0.21.1
pycparser==2.22
pydantic==2.11.1
pydantic_core==2.33.0
pytest==8.3.5
python-dateutil==2.9.0.post0
python-json-logger==3.3.0
pytz==2025.2
PyYAML==6.0.2
pyzmq==26.3.0
referencing==0.36.2
-e git+https://github.com/simphony/simphony-remote.git@3ceb572ce98207102f14d1b11491fc2edd85aaac#egg=remoteappmanager
requests==2.32.3
rfc3339-validator==0.1.4
rfc3986-validator==0.1.1
rpds-py==0.24.0
six==1.17.0
SQLAlchemy==2.0.40
tabulate==0.9.0
tomli==2.2.1
tornado==6.4.2
traitlets==5.14.3
types-python-dateutil==2.9.0.20241206
typing-inspection==0.4.0
typing_extensions==4.13.0
tzdata==2025.2
uri-template==1.3.0
urllib3==2.3.0
webcolors==24.11.1
websocket-client==1.8.0
zipp==3.21.0
| name: simphony-remote
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alembic==1.15.2
- annotated-types==0.7.0
- arrow==1.3.0
- async-generator==1.10
- attrs==25.3.0
- certifi==2025.1.31
- certipy==0.2.2
- cffi==1.17.1
- charset-normalizer==3.4.1
- click==8.1.8
- cryptography==44.0.2
- docker-py==1.10.6
- docker-pycreds==0.4.0
- escapism==1.0.1
- exceptiongroup==1.2.2
- fqdn==1.5.1
- greenlet==3.1.1
- idna==3.10
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- isoduration==20.11.0
- jinja2==3.1.6
- jsonpointer==3.0.0
- jsonschema==4.23.0
- jsonschema-specifications==2024.10.1
- jupyter-client==8.6.3
- jupyter-core==5.7.2
- jupyter-events==0.12.0
- jupyterhub==5.2.1
- mako==1.3.9
- markupsafe==3.0.2
- numpy==2.0.2
- oauthlib==3.2.2
- packaging==24.2
- pamela==1.2.0
- pandas==2.2.3
- platformdirs==4.3.7
- pluggy==1.5.0
- prometheus-client==0.21.1
- pycparser==2.22
- pydantic==2.11.1
- pydantic-core==2.33.0
- pytest==8.3.5
- python-dateutil==2.9.0.post0
- python-json-logger==3.3.0
- pytz==2025.2
- pyyaml==6.0.2
- pyzmq==26.3.0
- referencing==0.36.2
- requests==2.32.3
- rfc3339-validator==0.1.4
- rfc3986-validator==0.1.1
- rpds-py==0.24.0
- six==1.17.0
- sqlalchemy==2.0.40
- tabulate==0.9.0
- tomli==2.2.1
- tornado==6.4.2
- traitlets==5.14.3
- types-python-dateutil==2.9.0.20241206
- typing-extensions==4.13.0
- typing-inspection==0.4.0
- tzdata==2025.2
- uri-template==1.3.0
- urllib3==2.3.0
- webcolors==24.11.1
- websocket-client==1.8.0
- zipp==3.21.0
prefix: /opt/conda/envs/simphony-remote
| [
"tests/db/test_orm.py::TestOrmAppAccounting::test_no_file_creation_if_sqlite_database_not_exist"
] | [
"tests/db/test_orm.py::TestOrm::test_apps_for_user",
"tests/db/test_orm.py::TestOrmAppAccounting::test_get_apps_for_user",
"tests/db/test_orm.py::TestOrmAppAccounting::test_get_apps_for_user_across_sessions",
"tests/db/test_orm.py::TestOrmAppAccounting::test_get_apps_for_user_mapping_id_rest_compliant"
] | [
"tests/db/test_interfaces.py::TestDatabaseInterface::test_get_apps_for_user",
"tests/db/test_interfaces.py::TestDatabaseInterface::test_get_apps_for_user_mapping_id_rest_compliant",
"tests/db/test_interfaces.py::TestDatabaseInterface::test_get_user_by_name",
"tests/db/test_orm.py::TestOrm::test_database_init_and_session",
"tests/db/test_orm.py::TestOrm::test_orm_objects",
"tests/db/test_orm.py::TestOrmAppAccounting::test_get_user_by_name"
] | [] | BSD 3-Clause "New" or "Revised" License | 655 | 308 | [
"remoteappmanager/db/orm.py"
] |
|
falconry__falcon-851 | a9f1813bbd85ae58dd3d81aaea53a6db58032c3b | 2016-07-23 20:26:05 | 67d61029847cbf59e4053c8a424df4f9f87ad36f | codecov-io: ## [Current coverage][cc-pull] is 100% (diff: 100%)
> Merging [#851][cc-pull] into [master][cc-base-branch] will not change coverage
```diff
@@ master #851 diff @@
====================================
Files 29 29
Lines 1848 1846 -2
Methods 0 0
Messages 0 0
Branches 307 305 -2
====================================
- Hits 1848 1846 -2
Misses 0 0
Partials 0 0
```
> Powered by [Codecov](https://codecov.io?src=pr). Last update [a9f1813...8e1cb96][cc-compare]
[cc-base-branch]: https://codecov.io/gh/falconry/falcon/branch/master?src=pr
[cc-compare]: https://codecov.io/gh/falconry/falcon/compare/a9f1813bbd85ae58dd3d81aaea53a6db58032c3b...8e1cb96cb4f92f3abf961e3ac78c0a71edcc833f?src=pr
[cc-pull]: https://codecov.io/gh/falconry/falcon/pull/851?src=pr | diff --git a/falcon/http_error.py b/falcon/http_error.py
index 1e9143e..6b7cd1e 100644
--- a/falcon/http_error.py
+++ b/falcon/http_error.py
@@ -40,8 +40,7 @@ class HTTPError(Exception):
returns ``True``, but child classes may override it
in order to return ``False`` when an empty HTTP body is desired.
See also the ``falcon.http_error.NoRepresentation`` mixin.
- title (str): Error title to send to the client. Will be ``None`` if
- the error should result in an HTTP response with an empty body.
+ title (str): Error title to send to the client.
description (str): Description of the error to send to the client.
headers (dict): Extra headers to add to the response.
link (str): An href that the client can provide to the user for
@@ -53,7 +52,8 @@ class HTTPError(Exception):
status (str): HTTP status code and text, such as "400 Bad Request"
Keyword Args:
- title (str): Human-friendly error title (default ``None``).
+ title (str): Human-friendly error title. If not provided, defaults
+ to the HTTP status line as determined by the ``status`` argument.
description (str): Human-friendly description of the error, along with
a helpful suggestion or two (default ``None``).
headers (dict or list): A ``dict`` of header names and values
@@ -97,7 +97,13 @@ class HTTPError(Exception):
def __init__(self, status, title=None, description=None, headers=None,
href=None, href_text=None, code=None):
self.status = status
- self.title = title
+
+ # TODO(kgriffs): HTTP/2 does away with the "reason phrase". Eventually
+ # we'll probably switch over to making everything code-based to more
+ # easily support HTTP/2. When that happens, should we continue to
+ # include the reason phrase in the title?
+ self.title = title or status
+
self.description = description
self.headers = headers
self.code = code
@@ -133,8 +139,7 @@ class HTTPError(Exception):
obj = obj_type()
- if self.title is not None:
- obj['title'] = self.title
+ obj['title'] = self.title
if self.description is not None:
obj['description'] = self.description
@@ -171,8 +176,7 @@ class HTTPError(Exception):
error_element = et.Element('error')
- if self.title is not None:
- et.SubElement(error_element, 'title').text = self.title
+ et.SubElement(error_element, 'title').text = self.title
if self.description is not None:
et.SubElement(error_element, 'description').text = self.description
| Error title should default to HTTP status
When the title is set to `None`, `HTTPError` should default to simply reusing the description passed in with the code in the `status` argument, i.e.:
```py
self.title = status_code[4:]
``` | falconry/falcon | diff --git a/tests/test_httperror.py b/tests/test_httperror.py
index 9cb39b4..14f0eef 100644
--- a/tests/test_httperror.py
+++ b/tests/test_httperror.py
@@ -259,15 +259,15 @@ class TestHTTPError(testing.TestBase):
def test_no_description_json(self):
body = self.simulate_request('/fail', method='PATCH')
self.assertEqual(self.srmock.status, falcon.HTTP_400)
- self.assertEqual(body, [b'{}'])
+ self.assertEqual(body, [b'{\n "title": "400 Bad Request"\n}'])
def test_no_description_xml(self):
body = self.simulate_request('/fail', method='PATCH',
headers={'Accept': 'application/xml'})
self.assertEqual(self.srmock.status, falcon.HTTP_400)
- expected_xml = (b'<?xml version="1.0" encoding="UTF-8"?>'
- b'<error />')
+ expected_xml = (b'<?xml version="1.0" encoding="UTF-8"?><error>'
+ b'<title>400 Bad Request</title></error>')
self.assertEqual(body, [expected_xml])
@@ -550,6 +550,7 @@ class TestHTTPError(testing.TestBase):
self.assertEqual(self.srmock.status, falcon.HTTP_404)
self.assertNotEqual(response, [])
expected_body = {
+ u'title': u'404 Not Found',
u'description': u'Not Found'
}
self.assertEqual(json.loads(response), expected_body)
@@ -590,6 +591,7 @@ class TestHTTPError(testing.TestBase):
self.assertEqual(self.srmock.status, falcon.HTTP_405)
self.assertNotEqual(response, [])
expected_body = {
+ u'title': u'405 Method Not Allowed',
u'description': u'Not Allowed'
}
self.assertEqual(json.loads(response), expected_body)
@@ -777,3 +779,14 @@ class TestHTTPError(testing.TestBase):
needs_title=False)
self._misc_test(falcon.HTTPInternalServerError, falcon.HTTP_500)
self._misc_test(falcon.HTTPBadGateway, falcon.HTTP_502)
+
+ def test_title_default_message_if_none(self):
+ headers = {
+ 'X-Error-Status': falcon.HTTP_503
+ }
+
+ body = self.simulate_request('/fail', headers=headers, decode='utf-8')
+ body_json = json.loads(body)
+
+ self.assertEqual(self.srmock.status, headers['X-Error-Status'])
+ self.assertEqual(body_json['title'], headers['X-Error-Status'])
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 1
} | 1.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"nose",
"ddt",
"testtools",
"requests",
"pyyaml",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": [
"tools/test-requires"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
charset-normalizer==2.0.12
coverage==6.2
ddt==1.7.2
-e git+https://github.com/falconry/falcon.git@a9f1813bbd85ae58dd3d81aaea53a6db58032c3b#egg=falcon
fixtures==4.0.1
idna==3.10
importlib-metadata==4.8.3
iniconfig==1.1.1
nose==1.3.7
packaging==21.3
pbr==6.1.1
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
python-mimeparse==1.6.0
PyYAML==6.0.1
requests==2.27.1
six==1.17.0
testtools==2.6.0
tomli==1.2.3
typing_extensions==4.1.1
urllib3==1.26.20
zipp==3.6.0
| name: falcon
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- charset-normalizer==2.0.12
- coverage==6.2
- ddt==1.7.2
- fixtures==4.0.1
- idna==3.10
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- nose==1.3.7
- packaging==21.3
- pbr==6.1.1
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- python-mimeparse==1.6.0
- pyyaml==6.0.1
- requests==2.27.1
- six==1.17.0
- testtools==2.6.0
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- zipp==3.6.0
prefix: /opt/conda/envs/falcon
| [
"tests/test_httperror.py::TestHTTPError::test_404_with_body",
"tests/test_httperror.py::TestHTTPError::test_405_with_body",
"tests/test_httperror.py::TestHTTPError::test_no_description_json",
"tests/test_httperror.py::TestHTTPError::test_no_description_xml",
"tests/test_httperror.py::TestHTTPError::test_title_default_message_if_none"
] | [
"tests/test_httperror.py::TestHTTPError::test_custom_new_error_serializer",
"tests/test_httperror.py::TestHTTPError::test_custom_old_error_serializer"
] | [
"tests/test_httperror.py::TestHTTPError::test_401",
"tests/test_httperror.py::TestHTTPError::test_404_without_body",
"tests/test_httperror.py::TestHTTPError::test_405_without_body",
"tests/test_httperror.py::TestHTTPError::test_405_without_body_with_extra_headers",
"tests/test_httperror.py::TestHTTPError::test_405_without_body_with_extra_headers_double_check",
"tests/test_httperror.py::TestHTTPError::test_411",
"tests/test_httperror.py::TestHTTPError::test_413",
"tests/test_httperror.py::TestHTTPError::test_416",
"tests/test_httperror.py::TestHTTPError::test_429",
"tests/test_httperror.py::TestHTTPError::test_429_datetime",
"tests/test_httperror.py::TestHTTPError::test_429_no_retry_after",
"tests/test_httperror.py::TestHTTPError::test_503_datetime_retry_after",
"tests/test_httperror.py::TestHTTPError::test_503_integer_retry_after",
"tests/test_httperror.py::TestHTTPError::test_base_class",
"tests/test_httperror.py::TestHTTPError::test_client_does_not_accept_anything",
"tests/test_httperror.py::TestHTTPError::test_client_does_not_accept_json_or_xml",
"tests/test_httperror.py::TestHTTPError::test_custom_old_error_serializer_no_body",
"tests/test_httperror.py::TestHTTPError::test_epic_fail_json",
"tests/test_httperror.py::TestHTTPError::test_epic_fail_xml_1_text_xml",
"tests/test_httperror.py::TestHTTPError::test_epic_fail_xml_2_application_xml",
"tests/test_httperror.py::TestHTTPError::test_epic_fail_xml_3_application_vnd_company_system_project_resource_xml_v_1_1",
"tests/test_httperror.py::TestHTTPError::test_epic_fail_xml_4_application_atom_xml",
"tests/test_httperror.py::TestHTTPError::test_forbidden_1_application_json",
"tests/test_httperror.py::TestHTTPError::test_forbidden_2_application_vnd_company_system_project_resource_json_v_1_1",
"tests/test_httperror.py::TestHTTPError::test_forbidden_3_application_json_patch_json",
"tests/test_httperror.py::TestHTTPError::test_invalid_header",
"tests/test_httperror.py::TestHTTPError::test_invalid_param",
"tests/test_httperror.py::TestHTTPError::test_misc",
"tests/test_httperror.py::TestHTTPError::test_missing_header",
"tests/test_httperror.py::TestHTTPError::test_missing_param",
"tests/test_httperror.py::TestHTTPError::test_temporary_413_datetime_retry_after",
"tests/test_httperror.py::TestHTTPError::test_temporary_413_integer_retry_after",
"tests/test_httperror.py::TestHTTPError::test_unicode_json",
"tests/test_httperror.py::TestHTTPError::test_unicode_xml"
] | [] | Apache License 2.0 | 659 | 667 | [
"falcon/http_error.py"
] |
zalando-stups__senza-278 | 5e02aa336c41af3199acab430a9cf97440d2aac3 | 2016-07-26 07:31:01 | 35b73f49b8cb58e7892908413bdf2a61cfe3058e | diff --git a/senza/components/elastic_load_balancer.py b/senza/components/elastic_load_balancer.py
index 6140440..347b515 100644
--- a/senza/components/elastic_load_balancer.py
+++ b/senza/components/elastic_load_balancer.py
@@ -23,34 +23,7 @@ def get_load_balancer_name(stack_name: str, stack_version: str):
return '{}-{}'.format(stack_name[:l], stack_version)
-def component_elastic_load_balancer(definition, configuration, args, info, force, account_info):
- lb_name = configuration["Name"]
-
- # domains pointing to the load balancer
- subdomain = ''
- main_zone = None
- for name, domain in configuration.get('Domains', {}).items():
- name = '{}{}'.format(lb_name, name)
- definition["Resources"][name] = {
- "Type": "AWS::Route53::RecordSet",
- "Properties": {
- "Type": "CNAME",
- "TTL": 20,
- "ResourceRecords": [
- {"Fn::GetAtt": [lb_name, "DNSName"]}
- ],
- "Name": "{0}.{1}".format(domain["Subdomain"], domain["Zone"]),
- "HostedZoneName": "{0}".format(domain["Zone"])
- },
- }
-
- if domain["Type"] == "weighted":
- definition["Resources"][name]["Properties"]['Weight'] = 0
- definition["Resources"][name]["Properties"]['SetIdentifier'] = "{0}-{1}".format(info["StackName"],
- info["StackVersion"])
- subdomain = domain['Subdomain']
- main_zone = domain['Zone'] # type: str
-
+def get_listeners(subdomain, main_zone, configuration):
ssl_cert = configuration.get('SSLCertificateId')
if ACMCertificate.arn_is_acm_certificate(ssl_cert):
@@ -95,6 +68,46 @@ def component_elastic_load_balancer(definition, configuration, args, info, force
'SSL certificate for "{}"'.format(name))
else:
fatal_error('Could not find any SSL certificate')
+ return [
+ {
+ "PolicyNames": [],
+ "SSLCertificateId": ssl_cert,
+ "Protocol": "HTTPS",
+ "InstancePort": configuration["HTTPPort"],
+ "LoadBalancerPort": 443
+ }
+ ]
+
+
+def component_elastic_load_balancer(definition, configuration, args, info, force, account_info):
+ lb_name = configuration["Name"]
+
+ # domains pointing to the load balancer
+ subdomain = ''
+ main_zone = None
+ for name, domain in configuration.get('Domains', {}).items():
+ name = '{}{}'.format(lb_name, name)
+ definition["Resources"][name] = {
+ "Type": "AWS::Route53::RecordSet",
+ "Properties": {
+ "Type": "CNAME",
+ "TTL": 20,
+ "ResourceRecords": [
+ {"Fn::GetAtt": [lb_name, "DNSName"]}
+ ],
+ "Name": "{0}.{1}".format(domain["Subdomain"], domain["Zone"]),
+ "HostedZoneName": "{0}".format(domain["Zone"])
+ },
+ }
+
+ if domain["Type"] == "weighted":
+ definition["Resources"][name]["Properties"]['Weight'] = 0
+ definition["Resources"][name]["Properties"]['SetIdentifier'] = "{0}-{1}".format(info["StackName"],
+ info["StackVersion"])
+ subdomain = domain['Subdomain']
+ main_zone = domain['Zone'] # type: str
+
+ listeners = configuration.get('Listeners') or get_listeners(subdomain, main_zone, configuration)
health_check_protocol = "HTTP"
allowed_health_check_protocols = ("HTTP", "TCP", "UDP", "SSL")
@@ -157,15 +170,7 @@ def component_elastic_load_balancer(definition, configuration, args, info, force
"Timeout": "5",
"Target": health_check_target
},
- "Listeners": [
- {
- "PolicyNames": [],
- "SSLCertificateId": ssl_cert,
- "Protocol": "HTTPS",
- "InstancePort": configuration["HTTPPort"],
- "LoadBalancerPort": 443
- }
- ],
+ "Listeners": listeners,
"ConnectionDrainingPolicy": {
"Enabled": True,
"Timeout": 60
| Can't Deploy ZMON appliance (without SSL Certificate)
Senza is returning an error, when I'm trying to deploy the ZMON appliance:
```
senza --region=eu-central-1 create zmon-appliance-planetexpress-qa.yaml cd14c1 cd14
Generating Cloud Formation template.. EXCEPTION OCCURRED: An error occurred (ValidationError) when calling the GetServerCertificate operation: The specified value for serverCertificateName is invalid. It must contain only alphanumeric characters and/or the following: +=,.@_-
Unknown Error: An error occurred (ValidationError) when calling the GetServerCertificate operation: The specified value for serverCertificateName is invalid. It must contain only alphanumeric characters and/or the following: +=,.@_-.
Please create an issue with the content of /var/folders/cv/08715ldx7qx_76kkn3kmdmdnlc10w4/T/senza-traceback-l_pvs53y
```
Contents of this file are the following:
```
Traceback (most recent call last):
File "/usr/local/lib/python3.5/site-packages/senza/manaus/iam.py", line 80, in get_by_name
certificate = certificates[0]
IndexError: list index out of range
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/local/lib/python3.5/site-packages/senza/error_handling.py", line 69, in __call__
self.function(*args, **kwargs)
File "/usr/local/lib/python3.5/site-packages/click/core.py", line 716, in __call__
return self.main(*args, **kwargs)
File "/usr/local/lib/python3.5/site-packages/click/core.py", line 696, in main
rv = self.invoke(ctx)
File "/usr/local/lib/python3.5/site-packages/click/core.py", line 1060, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/usr/local/lib/python3.5/site-packages/click/core.py", line 889, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/usr/local/lib/python3.5/site-packages/click/core.py", line 534, in invoke
return callback(*args, **kwargs)
File "/usr/local/lib/python3.5/site-packages/senza/cli.py", line 555, in create
data = create_cf_template(definition, region, version, parameter, force, parameter_file)
File "/usr/local/lib/python3.5/site-packages/senza/cli.py", line 638, in create_cf_template
data = evaluate(definition.copy(), args, account_info, force)
File "/usr/local/lib/python3.5/site-packages/senza/cli.py", line 239, in evaluate
definition = componentfn(definition, configuration, args, info, force, account_info)
File "/usr/local/lib/python3.5/site-packages/senza/components/elastic_load_balancer.py", line 67, in component_elastic_load_balancer
certificate = IAMServerCertificate.get_by_name(ssl_cert)
File "/usr/local/lib/python3.5/site-packages/senza/manaus/iam.py", line 82, in get_by_name
raise error
File "/usr/local/lib/python3.5/site-packages/senza/manaus/iam.py", line 71, in get_by_name
response = client.get_server_certificate(ServerCertificateName=name)
File "/usr/local/lib/python3.5/site-packages/botocore/client.py", line 278, in _api_call
return self._make_api_call(operation_name, kwargs)
File "/usr/local/lib/python3.5/site-packages/botocore/client.py", line 572, in _make_api_call
raise ClientError(parsed_response, operation_name)
botocore.exceptions.ClientError: An error occurred (ValidationError) when calling the GetServerCertificate operation: The specified value for serverCertificateName is invalid. It must contain only alphanumeric characters and/or the following: +=,.@_-`
```
The LB-part of zmon-appliance-planetexpress-qa.yaml (autogenerated by Stups' script) looks like this:
```
ZmonApplianceLoadBalancer:
Type: Senza::ElasticLoadBalancer
HTTPPort: 9090
HealthCheckPath: /health
HealthCheckPort: 9090
SecurityGroups:
app-zmon-{{AccountInfo.TeamID}}
Scheme: internal
SSLCertificateId: arn:none # hack to disable SSL
Listeners:
LoadBalancerPort: 9090
Protocol: HTTP
InstancePort: 9090
InstanceProtocol: HTTP
```
(had to remove the '-' signs for Markup to work).
Am I right to assume SSLCertificateId: arn:none is causing the issue?
What can I do? | zalando-stups/senza | diff --git a/tests/test_components.py b/tests/test_components.py
index 7112492..4b3330f 100644
--- a/tests/test_components.py
+++ b/tests/test_components.py
@@ -136,6 +136,28 @@ def test_component_load_balancer_idletimeout(monkeypatch):
assert 'HTTPPort' not in result["Resources"]["test_lb"]["Properties"]
+def test_component_load_balancer_http_only(monkeypatch):
+ configuration = {
+ "Name": "test_lb",
+ "SecurityGroups": "",
+ "HTTPPort": "9999",
+ "SSLCertificateId": "arn:none", # should be ignored as we overwrite Listeners
+ "Listeners": [{"Foo": "Bar"}]
+ }
+ info = {'StackName': 'foobar', 'StackVersion': '0.1'}
+ definition = {"Resources": {}}
+
+ args = MagicMock()
+ args.region = "foo"
+
+ mock_string_result = MagicMock()
+ mock_string_result.return_value = "foo"
+ monkeypatch.setattr('senza.components.elastic_load_balancer.resolve_security_groups', mock_string_result)
+
+ result = component_elastic_load_balancer(definition, configuration, args, info, False, MagicMock())
+ assert 'Bar' == result["Resources"]["test_lb"]["Properties"]["Listeners"][0]["Foo"]
+
+
def test_component_load_balancer_namelength(monkeypatch):
configuration = {
"Name": "test_lb",
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 1
} | 1.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | boto3==1.37.23
botocore==1.37.23
certifi==2025.1.31
charset-normalizer==3.4.1
click==8.1.8
clickclick==20.10.2
coverage==7.8.0
dnspython==1.15.0
dnspython3==1.15.0
exceptiongroup==1.2.2
idna==3.10
importlib_metadata==8.6.1
iniconfig==2.1.0
jmespath==1.0.1
packaging==24.2
pluggy==1.5.0
pystache==0.6.8
pytest==8.3.5
pytest-cov==6.0.0
python-dateutil==2.9.0.post0
PyYAML==6.0.2
requests==2.32.3
s3transfer==0.11.4
six==1.17.0
stups-cli-support==1.1.22
stups-pierone==1.1.56
-e git+https://github.com/zalando-stups/senza.git@5e02aa336c41af3199acab430a9cf97440d2aac3#egg=stups_senza
stups-tokens==1.1.19
stups-zign==1.2
tomli==2.2.1
urllib3==1.26.20
zipp==3.21.0
| name: senza
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- boto3==1.37.23
- botocore==1.37.23
- certifi==2025.1.31
- charset-normalizer==3.4.1
- click==8.1.8
- clickclick==20.10.2
- coverage==7.8.0
- dnspython==1.15.0
- dnspython3==1.15.0
- exceptiongroup==1.2.2
- idna==3.10
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- jmespath==1.0.1
- packaging==24.2
- pluggy==1.5.0
- pystache==0.6.8
- pytest==8.3.5
- pytest-cov==6.0.0
- python-dateutil==2.9.0.post0
- pyyaml==6.0.2
- requests==2.32.3
- s3transfer==0.11.4
- six==1.17.0
- stups-cli-support==1.1.22
- stups-pierone==1.1.56
- stups-tokens==1.1.19
- stups-zign==1.2
- tomli==2.2.1
- urllib3==1.26.20
- zipp==3.21.0
prefix: /opt/conda/envs/senza
| [
"tests/test_components.py::test_component_load_balancer_http_only"
] | [
"tests/test_components.py::test_check_docker_image_exists"
] | [
"tests/test_components.py::test_invalid_component",
"tests/test_components.py::test_component_iam_role",
"tests/test_components.py::test_get_merged_policies",
"tests/test_components.py::test_component_load_balancer_healthcheck",
"tests/test_components.py::test_component_load_balancer_idletimeout",
"tests/test_components.py::test_component_load_balancer_namelength",
"tests/test_components.py::test_component_stups_auto_configuration",
"tests/test_components.py::test_component_redis_node",
"tests/test_components.py::test_component_redis_cluster",
"tests/test_components.py::test_weighted_dns_load_balancer",
"tests/test_components.py::test_weighted_dns_load_balancer_with_different_domains",
"tests/test_components.py::test_component_taupage_auto_scaling_group_user_data_without_ref",
"tests/test_components.py::test_component_taupage_auto_scaling_group_user_data_with_ref",
"tests/test_components.py::test_component_taupage_auto_scaling_group_user_data_with_lists_and_empty_dict",
"tests/test_components.py::test_component_auto_scaling_group_configurable_properties",
"tests/test_components.py::test_component_auto_scaling_group_metric_type",
"tests/test_components.py::test_to_iso8601_duration",
"tests/test_components.py::test_normalize_asg_success",
"tests/test_components.py::test_normalize_network_threshold",
"tests/test_components.py::test_check_application_id",
"tests/test_components.py::test_check_application_version"
] | [] | Apache License 2.0 | 661 | 1,048 | [
"senza/components/elastic_load_balancer.py"
] |
|
docker__docker-py-1143 | 2d3bda84de39a75e560fc79512143d43e5d61226 | 2016-07-28 01:43:06 | a44d65be370c28abd666a299456b83659dd1a1df | diff --git a/docker/utils/utils.py b/docker/utils/utils.py
index 4d218692..1cfc8acc 100644
--- a/docker/utils/utils.py
+++ b/docker/utils/utils.py
@@ -22,8 +22,8 @@ import tarfile
import tempfile
import warnings
from distutils.version import StrictVersion
-from fnmatch import fnmatch
from datetime import datetime
+from fnmatch import fnmatch
import requests
import six
@@ -33,6 +33,10 @@ from .. import errors
from .. import tls
from .types import Ulimit, LogConfig
+if six.PY2:
+ from urllib import splitnport
+else:
+ from urllib.parse import splitnport
DEFAULT_HTTP_HOST = "127.0.0.1"
DEFAULT_UNIX_SOCKET = "http+unix://var/run/docker.sock"
@@ -387,7 +391,6 @@ def parse_repository_tag(repo_name):
# Protocol translation: tcp -> http, unix -> http+unix
def parse_host(addr, is_win32=False, tls=False):
proto = "http+unix"
- host = DEFAULT_HTTP_HOST
port = None
path = ''
@@ -427,32 +430,27 @@ def parse_host(addr, is_win32=False, tls=False):
)
proto = "https" if tls else "http"
- if proto != "http+unix" and ":" in addr:
- host_parts = addr.split(':')
- if len(host_parts) != 2:
- raise errors.DockerException(
- "Invalid bind address format: {0}".format(addr)
- )
- if host_parts[0]:
- host = host_parts[0]
+ if proto in ("http", "https"):
+ address_parts = addr.split('/', 1)
+ host = address_parts[0]
+ if len(address_parts) == 2:
+ path = '/' + address_parts[1]
+ host, port = splitnport(host)
- port = host_parts[1]
- if '/' in port:
- port, path = port.split('/', 1)
- path = '/{0}'.format(path)
- try:
- port = int(port)
- except Exception:
+ if port is None:
raise errors.DockerException(
"Invalid port: {0}".format(addr)
)
- elif proto in ("http", "https") and ':' not in addr:
- raise errors.DockerException(
- "Bind address needs a port: {0}".format(addr))
+ if not host:
+ host = DEFAULT_HTTP_HOST
else:
host = addr
+ if proto in ("http", "https") and port == -1:
+ raise errors.DockerException(
+ "Bind address needs a port: {0}".format(addr))
+
if proto == "http+unix" or proto == 'npipe':
return "{0}://{1}".format(proto, host)
return "{0}://{1}:{2}{3}".format(proto, host, port, path)
| Support IPv6 addresses in DOCKER_HOST
Raised in https://github.com/docker/compose/issues/2879.
See https://github.com/docker/docker/pull/16950 for the Engine implementation. | docker/docker-py | diff --git a/tests/unit/utils_test.py b/tests/unit/utils_test.py
index 68484fe5..0f7a58c9 100644
--- a/tests/unit/utils_test.py
+++ b/tests/unit/utils_test.py
@@ -404,10 +404,18 @@ class ParseHostTest(base.BaseTestCase):
'https://kokia.jp:2375': 'https://kokia.jp:2375',
'unix:///var/run/docker.sock': 'http+unix:///var/run/docker.sock',
'unix://': 'http+unix://var/run/docker.sock',
+ '12.234.45.127:2375/docker/engine': (
+ 'http://12.234.45.127:2375/docker/engine'
+ ),
'somehost.net:80/service/swarm': (
'http://somehost.net:80/service/swarm'
),
'npipe:////./pipe/docker_engine': 'npipe:////./pipe/docker_engine',
+ '[fd12::82d1]:2375': 'http://[fd12::82d1]:2375',
+ 'https://[fd12:5672::12aa]:1090': 'https://[fd12:5672::12aa]:1090',
+ '[fd12::82d1]:2375/docker/engine': (
+ 'http://[fd12::82d1]:2375/docker/engine'
+ ),
}
for host in invalid_hosts:
@@ -415,7 +423,7 @@ class ParseHostTest(base.BaseTestCase):
parse_host(host, None)
for host, expected in valid_hosts.items():
- self.assertEqual(parse_host(host, None), expected, msg=host)
+ assert parse_host(host, None) == expected
def test_parse_host_empty_value(self):
unix_socket = 'http+unix://var/run/docker.sock'
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_hyperlinks",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 0
},
"num_modified_files": 1
} | 1.9 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"mock",
"coverage",
"pytest-cov",
"flake8"
],
"pre_install": [],
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
coverage==6.2
-e git+https://github.com/docker/docker-py.git@2d3bda84de39a75e560fc79512143d43e5d61226#egg=docker_py
flake8==5.0.4
importlib-metadata==4.2.0
iniconfig==1.1.1
mccabe==0.7.0
mock==5.2.0
packaging==21.3
pluggy==1.0.0
py==1.11.0
pycodestyle==2.9.1
pyflakes==2.5.0
pyparsing==3.1.4
pytest==7.0.1
pytest-cov==4.0.0
requests==2.5.3
six==1.17.0
tomli==1.2.3
typing_extensions==4.1.1
websocket-client==0.32.0
zipp==3.6.0
| name: docker-py
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- coverage==6.2
- flake8==5.0.4
- importlib-metadata==4.2.0
- iniconfig==1.1.1
- mccabe==0.7.0
- mock==5.2.0
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pycodestyle==2.9.1
- pyflakes==2.5.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-cov==4.0.0
- requests==2.5.3
- six==1.17.0
- tomli==1.2.3
- typing-extensions==4.1.1
- websocket-client==0.32.0
- zipp==3.6.0
prefix: /opt/conda/envs/docker-py
| [
"tests/unit/utils_test.py::ParseHostTest::test_parse_host"
] | [] | [
"tests/unit/utils_test.py::HostConfigTest::test_create_endpoint_config_with_aliases",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_invalid_cpu_cfs_types",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_no_options",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_no_options_newer_api_version",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_blkio_constraints",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_cpu_period",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_cpu_quota",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_oom_kill_disable",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_oom_score_adj",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_shm_size",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_shm_size_in_mb",
"tests/unit/utils_test.py::UlimitTest::test_create_host_config_dict_ulimit",
"tests/unit/utils_test.py::UlimitTest::test_create_host_config_dict_ulimit_capitals",
"tests/unit/utils_test.py::UlimitTest::test_create_host_config_obj_ulimit",
"tests/unit/utils_test.py::UlimitTest::test_ulimit_invalid_type",
"tests/unit/utils_test.py::LogConfigTest::test_create_host_config_dict_logconfig",
"tests/unit/utils_test.py::LogConfigTest::test_create_host_config_obj_logconfig",
"tests/unit/utils_test.py::LogConfigTest::test_logconfig_invalid_config_type",
"tests/unit/utils_test.py::KwargsFromEnvTest::test_kwargs_from_env_alternate_env",
"tests/unit/utils_test.py::KwargsFromEnvTest::test_kwargs_from_env_empty",
"tests/unit/utils_test.py::KwargsFromEnvTest::test_kwargs_from_env_no_cert_path",
"tests/unit/utils_test.py::KwargsFromEnvTest::test_kwargs_from_env_tls",
"tests/unit/utils_test.py::KwargsFromEnvTest::test_kwargs_from_env_tls_verify_false",
"tests/unit/utils_test.py::KwargsFromEnvTest::test_kwargs_from_env_tls_verify_false_no_cert",
"tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_compact",
"tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_complete",
"tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_empty",
"tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_list",
"tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_no_mode",
"tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_unicode_bytes_input",
"tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_unicode_unicode_input",
"tests/unit/utils_test.py::ParseEnvFileTest::test_parse_env_file_commented_line",
"tests/unit/utils_test.py::ParseEnvFileTest::test_parse_env_file_invalid_line",
"tests/unit/utils_test.py::ParseEnvFileTest::test_parse_env_file_proper",
"tests/unit/utils_test.py::ParseEnvFileTest::test_parse_env_file_with_equals_character",
"tests/unit/utils_test.py::ParseHostTest::test_parse_host_empty_value",
"tests/unit/utils_test.py::ParseHostTest::test_parse_host_tls",
"tests/unit/utils_test.py::ParseHostTest::test_parse_host_tls_tcp_proto",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_index_image_no_tag",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_index_image_sha",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_index_image_tag",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_index_user_image_no_tag",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_index_user_image_tag",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_private_reg_image_no_tag",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_private_reg_image_sha",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_private_reg_image_tag",
"tests/unit/utils_test.py::ParseDeviceTest::test_dict",
"tests/unit/utils_test.py::ParseDeviceTest::test_full_string_definition",
"tests/unit/utils_test.py::ParseDeviceTest::test_hybrid_list",
"tests/unit/utils_test.py::ParseDeviceTest::test_partial_string_definition",
"tests/unit/utils_test.py::ParseDeviceTest::test_permissionless_string_definition",
"tests/unit/utils_test.py::ParseBytesTest::test_parse_bytes_float",
"tests/unit/utils_test.py::ParseBytesTest::test_parse_bytes_invalid",
"tests/unit/utils_test.py::ParseBytesTest::test_parse_bytes_maxint",
"tests/unit/utils_test.py::ParseBytesTest::test_parse_bytes_valid",
"tests/unit/utils_test.py::UtilsTest::test_convert_filters",
"tests/unit/utils_test.py::UtilsTest::test_create_ipam_config",
"tests/unit/utils_test.py::UtilsTest::test_decode_json_header",
"tests/unit/utils_test.py::SplitCommandTest::test_split_command_with_unicode",
"tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_matching_internal_port_ranges",
"tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_matching_internal_ports",
"tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_nonmatching_internal_port_ranges",
"tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_nonmatching_internal_ports",
"tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_one_port",
"tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_port_range",
"tests/unit/utils_test.py::PortsTest::test_host_only_with_colon",
"tests/unit/utils_test.py::PortsTest::test_non_matching_length_port_ranges",
"tests/unit/utils_test.py::PortsTest::test_port_and_range_invalid",
"tests/unit/utils_test.py::PortsTest::test_port_only_with_colon",
"tests/unit/utils_test.py::PortsTest::test_split_port_invalid",
"tests/unit/utils_test.py::PortsTest::test_split_port_no_host_port",
"tests/unit/utils_test.py::PortsTest::test_split_port_range_no_host_port",
"tests/unit/utils_test.py::PortsTest::test_split_port_range_with_host_ip_no_port",
"tests/unit/utils_test.py::PortsTest::test_split_port_range_with_host_port",
"tests/unit/utils_test.py::PortsTest::test_split_port_range_with_protocol",
"tests/unit/utils_test.py::PortsTest::test_split_port_with_host_ip",
"tests/unit/utils_test.py::PortsTest::test_split_port_with_host_ip_no_port",
"tests/unit/utils_test.py::PortsTest::test_split_port_with_host_port",
"tests/unit/utils_test.py::PortsTest::test_split_port_with_protocol",
"tests/unit/utils_test.py::ExcludePathsTest::test_directory",
"tests/unit/utils_test.py::ExcludePathsTest::test_directory_with_single_exception",
"tests/unit/utils_test.py::ExcludePathsTest::test_directory_with_subdir_exception",
"tests/unit/utils_test.py::ExcludePathsTest::test_directory_with_trailing_slash",
"tests/unit/utils_test.py::ExcludePathsTest::test_directory_with_wildcard_exception",
"tests/unit/utils_test.py::ExcludePathsTest::test_exclude_custom_dockerfile",
"tests/unit/utils_test.py::ExcludePathsTest::test_exclude_dockerfile_child",
"tests/unit/utils_test.py::ExcludePathsTest::test_exclude_dockerfile_dockerignore",
"tests/unit/utils_test.py::ExcludePathsTest::test_no_dupes",
"tests/unit/utils_test.py::ExcludePathsTest::test_no_excludes",
"tests/unit/utils_test.py::ExcludePathsTest::test_question_mark",
"tests/unit/utils_test.py::ExcludePathsTest::test_single_filename",
"tests/unit/utils_test.py::ExcludePathsTest::test_single_filename_leading_dot_slash",
"tests/unit/utils_test.py::ExcludePathsTest::test_single_filename_trailing_slash",
"tests/unit/utils_test.py::ExcludePathsTest::test_single_subdir_single_filename",
"tests/unit/utils_test.py::ExcludePathsTest::test_single_subdir_wildcard_filename",
"tests/unit/utils_test.py::ExcludePathsTest::test_single_subdir_with_path_traversal",
"tests/unit/utils_test.py::ExcludePathsTest::test_subdirectory",
"tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_exclude",
"tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_filename_end",
"tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_filename_start",
"tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_subdir_single_filename",
"tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_subdir_wildcard_filename",
"tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_with_exception",
"tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_with_wildcard_exception",
"tests/unit/utils_test.py::TarTest::test_tar_with_directory_symlinks",
"tests/unit/utils_test.py::TarTest::test_tar_with_empty_directory",
"tests/unit/utils_test.py::TarTest::test_tar_with_excludes",
"tests/unit/utils_test.py::TarTest::test_tar_with_file_symlinks"
] | [] | Apache License 2.0 | 668 | 697 | [
"docker/utils/utils.py"
] |
|
terryyin__lizard-144 | 1933addc0f0d4febb8b2273048f81556c0062d61 | 2016-08-03 13:54:59 | 1933addc0f0d4febb8b2273048f81556c0062d61 | rakhimov: @terryyin This PR is ready to go. | diff --git a/lizard_languages/clike.py b/lizard_languages/clike.py
index 1134e96..a17fb03 100644
--- a/lizard_languages/clike.py
+++ b/lizard_languages/clike.py
@@ -235,7 +235,7 @@ class CLikeStates(CodeStateMachine):
self.context.add_to_long_function_name(token)
def _state_dec_to_imp(self, token):
- if token == 'const' or token == 'noexcept':
+ if token in ('const', 'noexcept', '&', '&&'):
self.context.add_to_long_function_name(" " + token)
elif token == 'throw':
self._state = self._state_throw
| Bug: C++11 ref qualified functions
Lizard misses C++11 ref qualified member functions.
These functions don't appear in the report or the result database.
```cpp
struct A {
void foo() & {};
void foo() const & {};
void foo() && {};
void foo() const && {};
}; | terryyin/lizard | diff --git a/test/testCyclomaticComplexity.py b/test/testCyclomaticComplexity.py
index 346117e..d6efefa 100644
--- a/test/testCyclomaticComplexity.py
+++ b/test/testCyclomaticComplexity.py
@@ -79,3 +79,13 @@ class TestCppCyclomaticComplexity(unittest.TestCase):
""")
self.assertEqual(4, result[0].cyclomatic_complexity)
+ def test_ref_qualifiers(self):
+ """C++11 rvalue ref qualifiers look like AND operator."""
+ result = get_cpp_function_list(
+ "struct A { void foo() && { return bar() && baz(); } };")
+ self.assertEqual(1, len(result))
+ self.assertEqual(2, result[0].cyclomatic_complexity)
+ result = get_cpp_function_list(
+ "struct A { void foo() const && { return bar() && baz(); } };")
+ self.assertEqual(1, len(result))
+ self.assertEqual(2, result[0].cyclomatic_complexity)
diff --git a/test/test_languages/testCAndCPP.py b/test/test_languages/testCAndCPP.py
index 0928b15..b175fcd 100644
--- a/test/test_languages/testCAndCPP.py
+++ b/test/test_languages/testCAndCPP.py
@@ -423,6 +423,7 @@ class Test_c_cpp_lizard(unittest.TestCase):
result = get_cpp_function_list('''int fun(struct a){}''')
self.assertEqual(1, len(result))
+
def test_trailing_return_type(self):
"""C++11 trailing return type for functions."""
result = get_cpp_function_list("auto foo() -> void {}")
@@ -432,6 +433,21 @@ class Test_c_cpp_lizard(unittest.TestCase):
self.assertEqual(1, len(result))
self.assertEqual("foo", result[0].name)
+ def test_ref_qualifiers(self):
+ """C++11 ref qualifiers for member functions."""
+ result = get_cpp_function_list("struct A { void foo() & {} };")
+ self.assertEqual(1, len(result))
+ self.assertEqual("A::foo", result[0].name)
+ result = get_cpp_function_list("struct A { void foo() const & {} };")
+ self.assertEqual(1, len(result))
+ self.assertEqual("A::foo", result[0].name)
+ result = get_cpp_function_list("struct A { void foo() && {} };")
+ self.assertEqual(1, len(result))
+ self.assertEqual("A::foo", result[0].name)
+ result = get_cpp_function_list("struct A { void foo() const && {} };")
+ self.assertEqual(1, len(result))
+ self.assertEqual("A::foo", result[0].name)
+
class Test_Preprocessing(unittest.TestCase):
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 1
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements/base.txt",
"dev_requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | astroid==3.3.9
dill==0.3.9
exceptiongroup==1.2.2
iniconfig==2.1.0
isort==6.0.1
-e git+https://github.com/terryyin/lizard.git@1933addc0f0d4febb8b2273048f81556c0062d61#egg=lizard
mccabe==0.7.0
mock==5.2.0
nose==1.3.7
packaging==24.2
platformdirs==4.3.7
pluggy==1.5.0
pycodestyle==2.13.0
pylint==3.3.6
pytest==8.3.5
tomli==2.2.1
tomlkit==0.13.2
typing_extensions==4.13.0
| name: lizard
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- astroid==3.3.9
- dill==0.3.9
- exceptiongroup==1.2.2
- iniconfig==2.1.0
- isort==6.0.1
- mccabe==0.7.0
- mock==5.2.0
- nose==1.3.7
- packaging==24.2
- platformdirs==4.3.7
- pluggy==1.5.0
- pycodestyle==2.13.0
- pylint==3.3.6
- pytest==8.3.5
- tomli==2.2.1
- tomlkit==0.13.2
- typing-extensions==4.13.0
prefix: /opt/conda/envs/lizard
| [
"test/testCyclomaticComplexity.py::TestCppCyclomaticComplexity::test_ref_qualifiers",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_ref_qualifiers"
] | [] | [
"test/testCyclomaticComplexity.py::TestCppCyclomaticComplexity::test_one_function_with_and",
"test/testCyclomaticComplexity.py::TestCppCyclomaticComplexity::test_one_function_with_else_if",
"test/testCyclomaticComplexity.py::TestCppCyclomaticComplexity::test_one_function_with_forever_loop",
"test/testCyclomaticComplexity.py::TestCppCyclomaticComplexity::test_one_function_with_no_condition",
"test/testCyclomaticComplexity.py::TestCppCyclomaticComplexity::test_one_function_with_non_r_value_ref_in_body",
"test/testCyclomaticComplexity.py::TestCppCyclomaticComplexity::test_one_function_with_one_condition",
"test/testCyclomaticComplexity.py::TestCppCyclomaticComplexity::test_one_function_with_question_mark",
"test/testCyclomaticComplexity.py::TestCppCyclomaticComplexity::test_one_function_with_r_value_ref_in_body",
"test/testCyclomaticComplexity.py::TestCppCyclomaticComplexity::test_one_function_with_r_value_ref_in_parameter",
"test/testCyclomaticComplexity.py::TestCppCyclomaticComplexity::test_one_function_with_statement_no_curly_brackets",
"test/testCyclomaticComplexity.py::TestCppCyclomaticComplexity::test_one_function_with_typedef",
"test/testCyclomaticComplexity.py::TestCppCyclomaticComplexity::test_sharp_if_and_sharp_elif_counts_in_cc_number",
"test/testCyclomaticComplexity.py::TestCppCyclomaticComplexity::test_two_function_with_non_r_value_ref_in_body",
"test/test_languages/testCAndCPP.py::Test_C_Token_extension::test_connecting_marcro",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_1",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_braket_that_is_not_a_namespace",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_class_with_inheritance",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_complicated_c_function",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_constructor_initialization_list",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_constructor_initialization_list_noexcept",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_constructor_initializer_list",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_constructor_uniform_initialization",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_destructor_implementation",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_double_nested_template",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_double_slash_within_string",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_empty",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_function_dec_followed_with_one_word_is_ok",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_function_dec_with_noexcept",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_function_dec_with_throw",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_function_declaration_is_not_counted",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_function_name_class",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_function_operator",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_function_that_returns_function_pointers",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_function_with_1_param",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_function_with_content",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_function_with_no_param",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_function_with_param",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_function_with_strang_param",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_function_with_strang_param2",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_global_var_constructor",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_inline_operator",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_less_then_is_not_template",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_namespace_alias",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_nested_class",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_nested_class_middle",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_nested_template",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_nested_template_function",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_nested_unnamed_namespace",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_no_function",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_non_function_initializer_list",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_non_function_uniform_initialization",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_not_old_style_c_function",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_old_style_c_function",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_old_style_c_function_has_semicolon",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_one_function",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_one_function_in_class",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_one_function_with_const",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_one_function_with_namespace",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_one_function_with_noexcept",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_one_macro_in_class",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_only_word_can_be_function_name",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_operator_overloading",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_operator_overloading_shift",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_operator_overloading_with_namespace",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_operator_with_complicated_name",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_parentheses_before_function",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_pre_class",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_struct_in_param",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_struct_in_return_type",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_stupid_macro_before_function",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_template_as_part_of_function_name",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_template_as_reference",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_template_class",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_template_class_full_specialization",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_template_class_partial_specialization",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_template_function",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_template_function_specialization",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_template_with_pointer",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_template_with_reference",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_template_with_reference_as_reference",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_trailing_return_type",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_two_function",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_two_simplest_function",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_typedef_is_not_old_style_c_function",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_underscore",
"test/test_languages/testCAndCPP.py::Test_Preprocessing::test_content_macro_should_be_ignored",
"test/test_languages/testCAndCPP.py::Test_Preprocessing::test_preprocessor_is_not_function",
"test/test_languages/testCAndCPP.py::Test_Preprocessing::test_preprocessors_should_be_ignored_outside_function_implementation",
"test/test_languages/testCAndCPP.py::Test_Big::test_trouble"
] | [] | MIT License | 679 | 165 | [
"lizard_languages/clike.py"
] |
setokinto__slack-shogi-67 | 3f692f2862e7bc2fb9fbbe8d73310c2696653e4d | 2016-08-06 17:40:57 | f5175be50a09220713ceb5547cd04f80f43f84fb | diff --git a/app/kifu.py b/app/kifu.py
new file mode 100644
index 0000000..a0a0c5d
--- /dev/null
+++ b/app/kifu.py
@@ -0,0 +1,13 @@
+
+
+class Kifu:
+
+ def __init__(self):
+ self.kifu = []
+
+ def add(self, from_x, from_y, to_x, to_y, promote):
+ self.kifu.append((from_x, from_y, to_x, to_y, promote))
+
+ def pop(self):
+ return self.kifu.pop()
+
diff --git a/app/modules/shogi_input.py b/app/modules/shogi_input.py
index 58bea57..7996f02 100644
--- a/app/modules/shogi_input.py
+++ b/app/modules/shogi_input.py
@@ -6,6 +6,7 @@ from app.slack_utils.user import User as UserFinder
from app.modules.shogi import Shogi as ShogiModule
from app.modules.parse_input import ParseInput
from app.validator import BasicUserValidator, AllPassUserValidator
+from app.kifu import Kifu
class UserDifferentException(Exception):
@@ -136,6 +137,13 @@ class ShogiInput:
"_shogi": shogi,
}
+ @staticmethod
+ def matta(channel_id, user_id):
+ shogi = ShogiInput.manager.get_shogi(channel_id)
+ if not shogi.validate(shogi, user_id):
+ raise UserDifferentException()
+ shogi.matta()
+
class Shogi:
@@ -150,9 +158,11 @@ class Shogi:
self.second_user_name = users[1]["name"]
self.id = uuid.uuid4().hex
self._validator = validator
+ self.kifu = Kifu()
def move(self, from_x, from_y, to_x, to_y, promote):
self.shogi.move(from_x, from_y, to_x, to_y, promote)
+ self.kifu.add(from_x, from_y, to_x, to_y, promote)
def drop(self, koma, to_x, to_y):
self.shogi.drop(koma, to_x, to_y)
@@ -172,6 +182,15 @@ class Shogi:
def set_validator(self, validator):
self._validator = validator
+ def matta(self):
+ if len(self.kifu.kifu) == 0:
+ raise KomaCannotMoveException
+ self.kifu.pop()
+ self.shogi = ShogiModule()
+ for kifu in self.kifu.kifu:
+ from_x, from_y, to_x, to_y, promote = kifu
+ self.shogi.move(from_x, from_y, to_x, to_y, promote)
+
@property
def first(self):
return self.shogi.first
diff --git a/app/shogi.py b/app/shogi.py
index c28afea..3173412 100644
--- a/app/shogi.py
+++ b/app/shogi.py
@@ -113,3 +113,19 @@ def resign(channel, message):
message.send(board_str)
ShogiInput.clear(channel.channel_id)
+@respond_to("待った")
+@channel_info
+@should_exist_shogi
+def matta(channel, message):
+ try:
+ ShogiInput.matta(channel.channel_id, channel.own_id)
+ message.send("mattaed")
+ except UserDifferentException:
+ message.reply("You cannot matta because *it's not your turn*")
+ except KomaCannotMoveException:
+ message.reply("You cannot matta because koma not moved")
+ finally:
+ board = ShogiInput.get_shogi_board(channel.channel_id)
+ board_str = ShogiOutput.make_board_emoji(board)
+ message.send(board_str)
+
| 待った に対応する
あえて 待った をありにしたい。 | setokinto/slack-shogi | diff --git a/test/modules/shogi_input_test.py b/test/modules/shogi_input_test.py
index 58dca13..272b547 100644
--- a/test/modules/shogi_input_test.py
+++ b/test/modules/shogi_input_test.py
@@ -4,7 +4,6 @@ from app.modules.shogi_input import ShogiInput, UserDifferentException, KomaCann
from app.modules.shogi import Koma
-
class ShogiTest(unittest.TestCase):
def setUp(self):
@@ -107,3 +106,48 @@ class ShogiTest(unittest.TestCase):
ShogiInput.setAllMode(channel_id)
ShogiInput.move("34歩", channel_id, shogi.first_user_id)
+ def test_matta(self):
+ channel_id = "test_matta"
+ shogi = ShogiInput.init(channel_id, [{
+ "id": "user1",
+ "name": "user1name",
+ }, {
+ "id": "user2",
+ "name": "user2name",
+ }])
+ ShogiInput.move("76歩", channel_id, shogi.first_user_id)
+ self.assertEqual(shogi.board[5][2], Koma.fu)
+ ShogiInput.matta(channel_id, shogi.second_user_id)
+ self.assertEqual(shogi.board[5][2], Koma.empty)
+ ShogiInput.move("76歩", channel_id, shogi.first_user_id)
+ self.assertEqual(shogi.board[5][2], Koma.fu)
+
+ def test_matta_for_UserDifferentException(self):
+ channel_id = "test_matta_for_UserDifferentException"
+ shogi = ShogiInput.init(channel_id, [{
+ "id": "user1",
+ "name": "user1name",
+ }, {
+ "id": "user2",
+ "name": "user2name",
+ }])
+ ShogiInput.move("76歩", channel_id, shogi.first_user_id)
+ self.assertEqual(shogi.board[5][2], Koma.fu)
+ with self.assertRaises(UserDifferentException):
+ ShogiInput.matta(channel_id, shogi.first_user_id)
+ ShogiInput.move("34歩", channel_id, shogi.second_user_id)
+ with self.assertRaises(UserDifferentException):
+ ShogiInput.matta(channel_id, shogi.second_user_id)
+
+ def test_matta_for_KomaCannotMoveException(self):
+ channel_id = "test_matta_for_KomaCannotMoveException"
+ shogi = ShogiInput.init(channel_id, [{
+ "id": "user1",
+ "name": "user1name",
+ }, {
+ "id": "user2",
+ "name": "user2name",
+ }])
+ with self.assertRaises(KomaCannotMoveException):
+ ShogiInput.matta(channel_id, shogi.first_user_id)
+
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_added_files",
"has_many_modified_files",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 3
},
"num_modified_files": 2
} | 0.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2025.1.31
charset-normalizer==3.4.1
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
idna==3.10
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
pytest @ file:///croot/pytest_1738938843180/work
requests==2.32.3
six==1.17.0
-e git+https://github.com/setokinto/slack-shogi.git@3f692f2862e7bc2fb9fbbe8d73310c2696653e4d#egg=Slack_Shogi
slackbot==1.0.5
slacker==0.14.0
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
urllib3==2.3.0
websocket-client==1.6.0
| name: slack-shogi
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- charset-normalizer==3.4.1
- idna==3.10
- requests==2.32.3
- six==1.17.0
- slackbot==1.0.5
- slacker==0.14.0
- urllib3==2.3.0
- websocket-client==1.6.0
prefix: /opt/conda/envs/slack-shogi
| [
"test/modules/shogi_input_test.py::ShogiTest::test_matta",
"test/modules/shogi_input_test.py::ShogiTest::test_matta_for_KomaCannotMoveException",
"test/modules/shogi_input_test.py::ShogiTest::test_matta_for_UserDifferentException"
] | [] | [
"test/modules/shogi_input_test.py::ShogiTest::test_clear_for_non_exists_channnel",
"test/modules/shogi_input_test.py::ShogiTest::test_move_method_should_raise_KomaCannotMoveException",
"test/modules/shogi_input_test.py::ShogiTest::test_move_method_should_raise_UserDifferentException",
"test/modules/shogi_input_test.py::ShogiTest::test_move_method_should_work",
"test/modules/shogi_input_test.py::ShogiTest::test_set_any_user_validator",
"test/modules/shogi_input_test.py::ShogiTest::test_shogi_input_is_initable"
] | [] | MIT License | 685 | 906 | [
"app/modules/shogi_input.py",
"app/shogi.py"
] |
|
zalando-stups__senza-301 | 6bd1f85283e4252aa58aa38fd74936dd978dcded | 2016-08-08 12:43:52 | a72ed3ba8f330170d7dc9e923bd18294a03186af | diff --git a/senza/components/auto_scaling_group.py b/senza/components/auto_scaling_group.py
index ebffa4b..17d8974 100644
--- a/senza/components/auto_scaling_group.py
+++ b/senza/components/auto_scaling_group.py
@@ -20,11 +20,9 @@ def component_auto_scaling_group(definition, configuration, args, info, force, a
}
}
- if 'BlockDeviceMappings' in configuration:
- definition['Resources'][config_name]['Properties']['BlockDeviceMappings'] = configuration['BlockDeviceMappings']
-
- if "IamInstanceProfile" in configuration:
- definition["Resources"][config_name]["Properties"]["IamInstanceProfile"] = configuration["IamInstanceProfile"]
+ for key in set(["BlockDeviceMappings", "IamInstanceProfile", "SpotPrice"]):
+ if key in configuration:
+ definition['Resources'][config_name]['Properties'][key] = configuration[key]
if 'IamRoles' in configuration:
logical_id = configuration['Name'] + 'InstanceProfile'
| Senza ignores "SpotPrice" property on auto scaling group
```
InstanceType: m4.xlarge
SpotPrice: 0.250
```
According to cloudformation specs this will create an ASG with spot instances. | zalando-stups/senza | diff --git a/tests/test_components.py b/tests/test_components.py
index 7989398..1191bae 100644
--- a/tests/test_components.py
+++ b/tests/test_components.py
@@ -504,6 +504,33 @@ def test_component_auto_scaling_group_configurable_properties():
assert result["Resources"]["FooCPUAlarmHigh"]["Properties"]["EvaluationPeriods"] == "1"
assert result["Resources"]["FooCPUAlarmLow"]["Properties"]["AlarmDescription"] == expected_desc
+def test_component_auto_scaling_group_configurable_properties():
+ definition = {"Resources": {}}
+ configuration = {
+ 'Name': 'Foo',
+ 'InstanceType': 't2.micro',
+ 'Image': 'foo',
+ 'SpotPrice': 0.250
+ }
+
+ args = MagicMock()
+ args.region = "foo"
+
+ info = {
+ 'StackName': 'FooStack',
+ 'StackVersion': 'FooVersion'
+ }
+
+ result = component_auto_scaling_group(definition, configuration, args, info, False, MagicMock())
+
+ assert result["Resources"]["FooConfig"]["Properties"]["SpotPrice"] == 0.250
+
+ del configuration["SpotPrice"]
+
+ result = component_auto_scaling_group(definition, configuration, args, info, False, MagicMock())
+
+ assert "SpotPrice" not in result["Resources"]["FooConfig"]["Properties"]
+
def test_component_auto_scaling_group_metric_type():
definition = {"Resources": {}}
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 1
} | 2.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": null,
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | boto3==1.37.23
botocore==1.37.23
certifi==2025.1.31
charset-normalizer==3.4.1
click==8.1.8
clickclick==20.10.2
coverage==7.8.0
dnspython==1.15.0
dnspython3==1.15.0
exceptiongroup==1.2.2
idna==3.10
importlib_metadata==8.6.1
iniconfig==2.1.0
jmespath==1.0.1
packaging==24.2
pluggy==1.5.0
pystache==0.6.8
pytest==8.3.5
pytest-cov==6.0.0
python-dateutil==2.9.0.post0
PyYAML==6.0.2
requests==2.32.3
s3transfer==0.11.4
six==1.17.0
stups-cli-support==1.1.22
stups-pierone==1.1.56
-e git+https://github.com/zalando-stups/senza.git@6bd1f85283e4252aa58aa38fd74936dd978dcded#egg=stups_senza
stups-tokens==1.1.19
stups-zign==1.2
tomli==2.2.1
typing==3.7.4.3
urllib3==1.26.20
zipp==3.21.0
| name: senza
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- boto3==1.37.23
- botocore==1.37.23
- certifi==2025.1.31
- charset-normalizer==3.4.1
- click==8.1.8
- clickclick==20.10.2
- coverage==7.8.0
- dnspython==1.15.0
- dnspython3==1.15.0
- exceptiongroup==1.2.2
- idna==3.10
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- jmespath==1.0.1
- packaging==24.2
- pluggy==1.5.0
- pystache==0.6.8
- pytest==8.3.5
- pytest-cov==6.0.0
- python-dateutil==2.9.0.post0
- pyyaml==6.0.2
- requests==2.32.3
- s3transfer==0.11.4
- six==1.17.0
- stups-cli-support==1.1.22
- stups-pierone==1.1.56
- stups-tokens==1.1.19
- stups-zign==1.2
- tomli==2.2.1
- typing==3.7.4.3
- urllib3==1.26.20
- zipp==3.21.0
prefix: /opt/conda/envs/senza
| [
"tests/test_components.py::test_component_auto_scaling_group_configurable_properties"
] | [
"tests/test_components.py::test_weighted_dns_load_balancer",
"tests/test_components.py::test_weighted_dns_load_balancer_with_different_domains",
"tests/test_components.py::test_check_docker_image_exists"
] | [
"tests/test_components.py::test_invalid_component",
"tests/test_components.py::test_component_iam_role",
"tests/test_components.py::test_get_merged_policies",
"tests/test_components.py::test_component_load_balancer_healthcheck",
"tests/test_components.py::test_component_load_balancer_idletimeout",
"tests/test_components.py::test_component_load_balancer_http_only",
"tests/test_components.py::test_component_load_balancer_namelength",
"tests/test_components.py::test_component_stups_auto_configuration",
"tests/test_components.py::test_component_redis_node",
"tests/test_components.py::test_component_redis_cluster",
"tests/test_components.py::test_component_taupage_auto_scaling_group_user_data_without_ref",
"tests/test_components.py::test_component_taupage_auto_scaling_group_user_data_with_ref",
"tests/test_components.py::test_component_taupage_auto_scaling_group_user_data_with_lists_and_empty_dict",
"tests/test_components.py::test_component_auto_scaling_group_metric_type",
"tests/test_components.py::test_component_auto_scaling_group_optional_metric_type",
"tests/test_components.py::test_to_iso8601_duration",
"tests/test_components.py::test_normalize_asg_success",
"tests/test_components.py::test_normalize_network_threshold",
"tests/test_components.py::test_check_application_id",
"tests/test_components.py::test_check_application_version",
"tests/test_components.py::test_get_load_balancer_name"
] | [] | Apache License 2.0 | 689 | 237 | [
"senza/components/auto_scaling_group.py"
] |
|
scikit-build__scikit-build-118 | dd9814474b79009b7ac6523b4c14bbedd418f33f | 2016-08-08 21:40:24 | 3484eb1047c9883a33d26838dc207df5526d7e18 | codecov-io: ## [Current coverage](https://codecov.io/gh/scikit-build/scikit-build/pull/118?src=pr) is 64.22% (diff: 54.00%)
> Merging [#118](https://codecov.io/gh/scikit-build/scikit-build/pull/118?src=pr) into [master](https://codecov.io/gh/scikit-build/scikit-build/branch/master?src=pr) will decrease coverage by **2.64%**
```diff
@@ master #118 diff @@
==========================================
Files 17 18 +1
Lines 486 506 +20
Methods 0 0
Messages 0 0
Branches 93 96 +3
==========================================
Hits 325 325
- Misses 124 141 +17
- Partials 37 40 +3
```
> Powered by [Codecov](https://codecov.io?src=pr). Last update [93aca7b...dac50f6](https://codecov.io/gh/scikit-build/scikit-build/compare/93aca7b67f854ab2e9dfdcaaf99cdeaea0280413...dac50f68dde49069c66abcab94512547f3ed14f3?src=pr)
AppVeyorBot: :white_check_mark: [Build scikit-build 0.0.1.59 completed](https://ci.appveyor.com/project/scikit-build/scikit-build/build/0.0.1.59) (commit https://github.com/scikit-build/scikit-build/commit/847f829829 by @msmolens)
AppVeyorBot: :white_check_mark: [Build scikit-build 0.0.1.63 completed](https://ci.appveyor.com/project/scikit-build/scikit-build/build/0.0.1.63) (commit https://github.com/scikit-build/scikit-build/commit/331ee1542f by @msmolens)
AppVeyorBot: :white_check_mark: [Build scikit-build 0.0.1.70 completed](https://ci.appveyor.com/project/scikit-build/scikit-build/build/0.0.1.70) (commit https://github.com/scikit-build/scikit-build/commit/2cc9425fc2 by @msmolens) | diff --git a/skbuild/cmaker.py b/skbuild/cmaker.py
index cdefc46..02a8e10 100644
--- a/skbuild/cmaker.py
+++ b/skbuild/cmaker.py
@@ -27,7 +27,7 @@ def pop_arg(arg, a, default=None):
"""Pops an arg(ument) from an argument list a and returns the new list
and the value of the argument if present and a default otherwise.
"""
- parser = argparse.ArgumentParser()
+ parser = argparse.ArgumentParser(add_help=False)
parser.add_argument(arg)
ns, a = parser.parse_known_args(a)
ns = tuple(vars(ns).items())
@@ -365,7 +365,7 @@ class CMaker(object):
"make?").format(CMAKE_BUILD_DIR))
cmd = ["cmake", "--build", source_dir,
- "--target", "install", "--config", config]
+ "--target", "install", "--config", config, "--"]
cmd.extend(clargs)
cmd.extend(
filter(bool,
diff --git a/skbuild/command/egg_info.py b/skbuild/command/egg_info.py
new file mode 100644
index 0000000..3944aa3
--- /dev/null
+++ b/skbuild/command/egg_info.py
@@ -0,0 +1,14 @@
+
+import os
+
+from setuptools.command.egg_info import egg_info as _egg_info
+
+
+class egg_info(_egg_info):
+ def finalize_options(self):
+ if self.egg_base is not None:
+ script_path = os.path.abspath(self.distribution.script_name)
+ script_dir = os.path.dirname(script_path)
+ self.egg_base = os.path.join(script_dir, self.egg_base)
+
+ _egg_info.finalize_options(self)
diff --git a/skbuild/setuptools_wrap.py b/skbuild/setuptools_wrap.py
index 54efdb3..9a45d50 100644
--- a/skbuild/setuptools_wrap.py
+++ b/skbuild/setuptools_wrap.py
@@ -8,7 +8,7 @@ import sys
import argparse
from . import cmaker
-from .command import build, install, clean, bdist, bdist_wheel
+from .command import build, install, clean, bdist, bdist_wheel, egg_info
from .exceptions import SKBuildError
try:
@@ -17,24 +17,42 @@ except ImportError:
from distutils.core import setup as upstream_setup
-def move_arg(arg, a, b, newarg=None, f=lambda x: x, concatenate_value=False):
- """Moves an argument from a list to b list, possibly giving it a new name
- and/or performing a transformation on the value. Returns a and b. The arg
- need not be present in a.
+def create_skbuild_argparser():
+ """Create and return a scikit-build argument parser.
"""
- newarg = newarg or arg
- parser = argparse.ArgumentParser()
- parser.add_argument(arg)
- ns, a = parser.parse_known_args(a)
- ns = tuple(vars(ns).items())
- if len(ns) > 0 and ns[0][1] is not None:
- key, value = ns[0]
- newargs = [newarg, value]
- if concatenate_value:
- b.append("=".join(newargs))
- elif value is not None:
- b.extend(newargs)
- return a, b
+ parser = argparse.ArgumentParser(add_help=False)
+ parser.add_argument(
+ '--build-type', default='Release', metavar='',
+ help='specify the CMake build type (e.g. Debug or Release)')
+ parser.add_argument(
+ '-G', '--generator', metavar='',
+ help='specify the CMake build system generator')
+ parser.add_argument(
+ '-j', metavar='N', type=int, dest='jobs',
+ help='allow N build jobs at once')
+ return parser
+
+
+def parse_skbuild_args(args, cmake_args, build_tool_args):
+ """
+ Parse arguments in the scikit-build argument set. Convert specified
+ arguments to proper format and append to cmake_args and build_tool_args.
+ Returns remaining arguments.
+ """
+ parser = create_skbuild_argparser()
+ ns, remaining_args = parser.parse_known_args(args)
+
+ # Construct CMake argument list
+ cmake_args.append('-DCMAKE_BUILD_TYPE:STRING=' + ns.build_type)
+ if ns.generator is not None:
+ cmake_args.extend(['-G', ns.generator])
+
+ # Construct build tool argument list
+ build_tool_args.extend(['--config', ns.build_type])
+ if ns.jobs is not None:
+ build_tool_args.extend(['-j', str(ns.jobs)])
+
+ return remaining_args
def parse_args():
@@ -43,31 +61,20 @@ def parse_args():
make = []
argsets = [dutils, cmake, make]
i = 0
+ separator = '--'
- argv = list(sys.argv)
- try:
- argv.index("--build-type")
- except ValueError:
- argv.append("--build-type")
- argv.append("Release")
-
- for arg in argv:
- if arg == '--':
+ for arg in sys.argv:
+ if arg == separator:
i += 1
+ if i >= len(argsets):
+ sys.exit(
+ "ERROR: Too many \"{}\" separators provided "
+ "(expected at most {}).".format(separator,
+ len(argsets) - 1))
else:
argsets[i].append(arg)
- # handle argument transformations
- dutils, cmake = move_arg('--build-type', dutils, cmake,
- newarg='-DCMAKE_BUILD_TYPE:STRING',
- concatenate_value=True)
- dutils, cmake = move_arg('-G', dutils, cmake)
- dutils, make = move_arg('-j', dutils, make)
-
- def absappend(x):
- return os.path.join(os.path.dirname(os.path.abspath(sys.argv[0])), x)
-
- dutils, dutils = move_arg('--egg-base', dutils, dutils, f=absappend)
+ dutils = parse_skbuild_args(dutils, cmake, make)
return dutils, cmake, make
@@ -79,6 +86,30 @@ def setup(*args, **kw):
"""
sys.argv, cmake_args, make_args = parse_args()
+ # Skip running CMake when user requests help
+ help_parser = argparse.ArgumentParser(add_help=False)
+ help_parser.add_argument('-h', '--help', action='store_true')
+ help_parser.add_argument('--help-commands', action='store_true')
+ ns = help_parser.parse_known_args()[0]
+ if ns.help_commands:
+ return upstream_setup(*args, **kw)
+ if ns.help:
+ # Prepend scikit-build help. Generate option descriptions using
+ # argparse.
+ skbuild_parser = create_skbuild_argparser()
+ arg_descriptions = [line
+ for line in skbuild_parser.format_help().split('\n')
+ if line.startswith(' ')]
+ print('scikit-build options:')
+ print('\n'.join(arg_descriptions))
+ print()
+ print('Arguments following a "--" are passed directly to CMake '
+ '(e.g. -DMY_VAR:BOOL=TRUE).')
+ print('Arguments following a second "--" are passed directly to the '
+ 'build tool.')
+ print()
+ return upstream_setup(*args, **kw)
+
packages = kw.get('packages', [])
package_dir = kw.get('package_dir', {})
package_data = kw.get('package_data', {}).copy()
@@ -172,6 +203,7 @@ def setup(*args, **kw):
cmdclass['bdist'] = cmdclass.get('bdist', bdist.bdist)
cmdclass['bdist_wheel'] = cmdclass.get(
'bdist_wheel', bdist_wheel.bdist_wheel)
+ cmdclass['egg_info'] = cmdclass.get('egg_info', egg_info.egg_info)
kw['cmdclass'] = cmdclass
return upstream_setup(*args, **kw)
| Improve python setup.py build --help output
- [ ] Document the different build type option
- [ ] Document how to set CMake variables
- [ ] Document how to set the CMake build system generator | scikit-build/scikit-build | diff --git a/tests/test_command_line.py b/tests/test_command_line.py
new file mode 100644
index 0000000..f48b68b
--- /dev/null
+++ b/tests/test_command_line.py
@@ -0,0 +1,51 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+"""test_command_line
+----------------------------------
+
+Tests for various command line functionality.
+"""
+
+from . import project_setup_py_test, push_dir
+
+
+@project_setup_py_test(("samples", "hello"), ["--help"])
+def test_help(capsys):
+ out, err = capsys.readouterr()
+ assert "scikit-build options" in out
+ assert "--build-type" in out
+ assert "Global options:" in out
+ assert "usage:" in out
+
+
+def test_no_command():
+ with push_dir():
+
+ @project_setup_py_test(("samples", "hello"), [""])
+ def run():
+ pass
+
+ failed = False
+ try:
+ run()
+ except SystemExit as e:
+ failed = e.args[0].startswith('invalid command name')
+
+ assert failed
+
+
+def test_too_many_separators():
+ with push_dir():
+
+ @project_setup_py_test(("samples", "hello"), ["--"] * 3)
+ def run():
+ pass
+
+ failed = False
+ try:
+ run()
+ except SystemExit as e:
+ failed = e.args[0].startswith('ERROR: Too many')
+
+ assert failed
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_added_files",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 2
} | 0.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"flake8",
"tox"
],
"pre_install": [
"apt-get update",
"apt-get install -y build-essential"
],
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
coverage==6.2
distlib==0.3.9
filelock==3.4.1
flake8==5.0.4
importlib-metadata==4.2.0
importlib-resources==5.4.0
iniconfig==1.1.1
mccabe==0.7.0
packaging==21.3
platformdirs==2.4.0
pluggy==1.0.0
py==1.11.0
pycodestyle==2.9.1
pyflakes==2.5.0
pyparsing==3.1.4
pytest==7.0.1
pytest-cov==4.0.0
-e git+https://github.com/scikit-build/scikit-build.git@dd9814474b79009b7ac6523b4c14bbedd418f33f#egg=scikit_build
six==1.17.0
toml==0.10.2
tomli==1.2.3
tox==3.28.0
typing_extensions==4.1.1
virtualenv==20.16.2
zipp==3.6.0
| name: scikit-build
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- coverage==6.2
- distlib==0.3.9
- filelock==3.4.1
- flake8==5.0.4
- importlib-metadata==4.2.0
- importlib-resources==5.4.0
- iniconfig==1.1.1
- mccabe==0.7.0
- packaging==21.3
- platformdirs==2.4.0
- pluggy==1.0.0
- py==1.11.0
- pycodestyle==2.9.1
- pyflakes==2.5.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-cov==4.0.0
- setuptools==25.1.6
- six==1.17.0
- toml==0.10.2
- tomli==1.2.3
- tox==3.28.0
- typing-extensions==4.1.1
- virtualenv==20.16.2
- wheel==0.29.0
- zipp==3.6.0
prefix: /opt/conda/envs/scikit-build
| [
"tests/test_command_line.py::test_help",
"tests/test_command_line.py::test_too_many_separators"
] | [
"tests/test_command_line.py::test_no_command"
] | [] | [] | MIT License | 690 | 1,881 | [
"skbuild/cmaker.py",
"skbuild/setuptools_wrap.py"
] |
zalando-stups__senza-304 | 87feda79265966aa5d6a67f3a652e2f0d7961e64 | 2016-08-09 13:36:53 | a72ed3ba8f330170d7dc9e923bd18294a03186af | diff --git a/senza/manaus/route53.py b/senza/manaus/route53.py
index 0075eb0..6aab215 100644
--- a/senza/manaus/route53.py
+++ b/senza/manaus/route53.py
@@ -112,8 +112,9 @@ class Route53HostedZone:
'ResourceRecordSet': record.boto_dict}
change_batch['Changes'].append(change)
- client.change_resource_record_sets(HostedZoneId=self.id,
- ChangeBatch=change_batch)
+ if change_batch['Changes']:
+ client.change_resource_record_sets(HostedZoneId=self.id,
+ ChangeBatch=change_batch)
return change_batch
| Only call API for Route53 config if there is changes to be made
Only call AWS API when there is actual changes to be made.
Users reporting this exception:
```
raise ParamValidationError(report=report.generate_report())
botocore.exceptions.ParamValidationError: Parameter validation failed:
Invalid length for parameter ChangeBatch.Changes, value: 0, valid range: 1-inf
```
Error comes from https://github.com/zalando-stups/senza/blob/master/senza/manaus/route53.py#L114-L115 | zalando-stups/senza | diff --git a/tests/test_manaus/test_route53.py b/tests/test_manaus/test_route53.py
index 2441ba1..24c5441 100644
--- a/tests/test_manaus/test_route53.py
+++ b/tests/test_manaus/test_route53.py
@@ -209,6 +209,12 @@ def test_hosted_zone_upsert(monkeypatch):
ChangeBatch={'Changes': expected_changes,
'Comment': 'test'})
+ m_client.change_resource_record_sets.reset_mock()
+ change_batch2 = hosted_zone.upsert([], comment="test")
+ assert change_batch2['Comment'] == "test"
+ assert change_batch2['Changes'] == []
+ m_client.change_resource_record_sets.assert_not_called()
+
def test_hosted_zone_create(monkeypatch):
m_client = MagicMock()
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 1
} | 2.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": null,
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | boto3==1.37.23
botocore==1.37.23
certifi==2025.1.31
charset-normalizer==3.4.1
click==8.1.8
clickclick==20.10.2
coverage==7.8.0
dnspython==1.15.0
dnspython3==1.15.0
exceptiongroup==1.2.2
idna==3.10
importlib_metadata==8.6.1
iniconfig==2.1.0
jmespath==1.0.1
packaging==24.2
pluggy==1.5.0
pystache==0.6.8
pytest==8.3.5
pytest-cov==6.0.0
python-dateutil==2.9.0.post0
PyYAML==6.0.2
requests==2.32.3
s3transfer==0.11.4
six==1.17.0
stups-cli-support==1.1.22
stups-pierone==1.1.56
-e git+https://github.com/zalando-stups/senza.git@87feda79265966aa5d6a67f3a652e2f0d7961e64#egg=stups_senza
stups-tokens==1.1.19
stups-zign==1.2
tomli==2.2.1
typing==3.7.4.3
urllib3==1.26.20
zipp==3.21.0
| name: senza
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- boto3==1.37.23
- botocore==1.37.23
- certifi==2025.1.31
- charset-normalizer==3.4.1
- click==8.1.8
- clickclick==20.10.2
- coverage==7.8.0
- dnspython==1.15.0
- dnspython3==1.15.0
- exceptiongroup==1.2.2
- idna==3.10
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- jmespath==1.0.1
- packaging==24.2
- pluggy==1.5.0
- pystache==0.6.8
- pytest==8.3.5
- pytest-cov==6.0.0
- python-dateutil==2.9.0.post0
- pyyaml==6.0.2
- requests==2.32.3
- s3transfer==0.11.4
- six==1.17.0
- stups-cli-support==1.1.22
- stups-pierone==1.1.56
- stups-tokens==1.1.19
- stups-zign==1.2
- tomli==2.2.1
- typing==3.7.4.3
- urllib3==1.26.20
- zipp==3.21.0
prefix: /opt/conda/envs/senza
| [
"tests/test_manaus/test_route53.py::test_hosted_zone_upsert"
] | [] | [
"tests/test_manaus/test_route53.py::test_hosted_zone_from_boto_dict",
"tests/test_manaus/test_route53.py::test_record_from_boto_dict",
"tests/test_manaus/test_route53.py::test_route53_hosted_zones",
"tests/test_manaus/test_route53.py::test_route53_hosted_zones_paginated",
"tests/test_manaus/test_route53.py::test_get_records",
"tests/test_manaus/test_route53.py::test_route53_record_boto_dict",
"tests/test_manaus/test_route53.py::test_hosted_zone_create",
"tests/test_manaus/test_route53.py::test_hosted_zone_delete",
"tests/test_manaus/test_route53.py::test_to_alias",
"tests/test_manaus/test_route53.py::test_convert_domain_records_to_alias",
"tests/test_manaus/test_route53.py::test_hosted_zone_get_by_domain_name",
"tests/test_manaus/test_route53.py::test_hosted_zone_get_by_id",
"tests/test_manaus/test_route53.py::test_get_by_domain_name"
] | [] | Apache License 2.0 | 692 | 173 | [
"senza/manaus/route53.py"
] |
|
zalando-stups__senza-305 | cdd45d357b6767742393d4b2aa9af68715e6dd5e | 2016-08-09 14:56:30 | a72ed3ba8f330170d7dc9e923bd18294a03186af | diff --git a/senza/components/stups_auto_configuration.py b/senza/components/stups_auto_configuration.py
index 5969330..415e380 100644
--- a/senza/components/stups_auto_configuration.py
+++ b/senza/components/stups_auto_configuration.py
@@ -28,12 +28,13 @@ def find_taupage_image(region: str):
def component_stups_auto_configuration(definition, configuration, args, info, force, account_info):
ec2 = boto3.resource('ec2', args.region)
+ vpc_id = configuration.get('VpcId', account_info.VpcID)
availability_zones = configuration.get('AvailabilityZones')
server_subnets = []
lb_subnets = []
lb_internal_subnets = []
- for subnet in ec2.subnets.filter(Filters=[{'Name': 'vpc-id', 'Values': [account_info.VpcID]}]):
+ for subnet in ec2.subnets.filter(Filters=[{'Name': 'vpc-id', 'Values': [vpc_id]}]):
name = get_tag(subnet.tags, 'Name', '')
if availability_zones and subnet.availability_zone not in availability_zones:
# skip subnet as it's not in one of the given AZs
| Support multiple VPCs
Senza's STUPS components currently assume a single VPC per region (Seven Seconds only configures a single VPC). There might be valid reasons to have multiple VPCs (e.g. a special VPC with VPN tunnel to some 3rd party location), so Senza needs to support that, too. | zalando-stups/senza | diff --git a/tests/test_components.py b/tests/test_components.py
index 1191bae..eceb9da 100644
--- a/tests/test_components.py
+++ b/tests/test_components.py
@@ -220,6 +220,42 @@ def test_component_stups_auto_configuration(monkeypatch):
assert {'myregion': {'Subnets': ['sn-3']}} == result['Mappings']['ServerSubnets']
+def test_component_stups_auto_configuration_vpc_id(monkeypatch):
+ args = MagicMock()
+ args.region = 'myregion'
+
+ configuration = {
+ 'Name': 'Config',
+ 'VpcId': 'vpc-123'
+ }
+
+ sn1 = MagicMock()
+ sn1.id = 'sn-1'
+ sn1.tags = [{'Key': 'Name', 'Value': 'dmz-1'}]
+ sn1.availability_zone = 'az-1'
+ sn2 = MagicMock()
+ sn2.id = 'sn-2'
+ sn2.tags = [{'Key': 'Name', 'Value': 'dmz-2'}]
+ sn2.availability_zone = 'az-2'
+ sn3 = MagicMock()
+ sn3.id = 'sn-3'
+ sn3.tags = [{'Key': 'Name', 'Value': 'internal-3'}]
+ sn3.availability_zone = 'az-1'
+ ec2 = MagicMock()
+ def get_subnets(Filters):
+ assert Filters == [{'Name': 'vpc-id', 'Values': ['vpc-123']}]
+ return [sn1, sn2, sn3]
+ ec2.subnets.filter = get_subnets
+ image = MagicMock()
+ ec2.images.filter.return_value = [image]
+ monkeypatch.setattr('boto3.resource', lambda x, y: ec2)
+
+ result = component_stups_auto_configuration({}, configuration, args, MagicMock(), False, MagicMock())
+
+ assert {'myregion': {'Subnets': ['sn-1', 'sn-2']}} == result['Mappings']['LoadBalancerSubnets']
+ assert {'myregion': {'Subnets': ['sn-3']}} == result['Mappings']['ServerSubnets']
+
+
def test_component_redis_node(monkeypatch):
mock_string = "foo"
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 1
} | 2.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": null,
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | boto3==1.37.23
botocore==1.37.23
certifi==2025.1.31
charset-normalizer==3.4.1
click==8.1.8
clickclick==20.10.2
dnspython==1.15.0
dnspython3==1.15.0
exceptiongroup==1.2.2
idna==3.10
importlib_metadata==8.6.1
iniconfig==2.1.0
jmespath==1.0.1
packaging==24.2
pluggy==1.5.0
pystache==0.6.8
pytest==8.3.5
python-dateutil==2.9.0.post0
PyYAML==6.0.2
requests==2.32.3
s3transfer==0.11.4
six==1.17.0
stups-cli-support==1.1.22
stups-pierone==1.1.56
-e git+https://github.com/zalando-stups/senza.git@cdd45d357b6767742393d4b2aa9af68715e6dd5e#egg=stups_senza
stups-tokens==1.1.19
stups-zign==1.2
tomli==2.2.1
typing==3.7.4.3
urllib3==1.26.20
zipp==3.21.0
| name: senza
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- boto3==1.37.23
- botocore==1.37.23
- certifi==2025.1.31
- charset-normalizer==3.4.1
- click==8.1.8
- clickclick==20.10.2
- dnspython==1.15.0
- dnspython3==1.15.0
- exceptiongroup==1.2.2
- idna==3.10
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- jmespath==1.0.1
- packaging==24.2
- pluggy==1.5.0
- pystache==0.6.8
- pytest==8.3.5
- python-dateutil==2.9.0.post0
- pyyaml==6.0.2
- requests==2.32.3
- s3transfer==0.11.4
- six==1.17.0
- stups-cli-support==1.1.22
- stups-pierone==1.1.56
- stups-tokens==1.1.19
- stups-zign==1.2
- tomli==2.2.1
- typing==3.7.4.3
- urllib3==1.26.20
- zipp==3.21.0
prefix: /opt/conda/envs/senza
| [
"tests/test_components.py::test_component_stups_auto_configuration_vpc_id"
] | [
"tests/test_components.py::test_weighted_dns_load_balancer",
"tests/test_components.py::test_weighted_dns_load_balancer_with_different_domains",
"tests/test_components.py::test_check_docker_image_exists"
] | [
"tests/test_components.py::test_invalid_component",
"tests/test_components.py::test_component_iam_role",
"tests/test_components.py::test_get_merged_policies",
"tests/test_components.py::test_component_load_balancer_healthcheck",
"tests/test_components.py::test_component_load_balancer_idletimeout",
"tests/test_components.py::test_component_load_balancer_http_only",
"tests/test_components.py::test_component_load_balancer_namelength",
"tests/test_components.py::test_component_stups_auto_configuration",
"tests/test_components.py::test_component_redis_node",
"tests/test_components.py::test_component_redis_cluster",
"tests/test_components.py::test_component_taupage_auto_scaling_group_user_data_without_ref",
"tests/test_components.py::test_component_taupage_auto_scaling_group_user_data_with_ref",
"tests/test_components.py::test_component_taupage_auto_scaling_group_user_data_with_lists_and_empty_dict",
"tests/test_components.py::test_component_auto_scaling_group_configurable_properties",
"tests/test_components.py::test_component_auto_scaling_group_metric_type",
"tests/test_components.py::test_component_auto_scaling_group_optional_metric_type",
"tests/test_components.py::test_to_iso8601_duration",
"tests/test_components.py::test_normalize_asg_success",
"tests/test_components.py::test_normalize_network_threshold",
"tests/test_components.py::test_check_application_id",
"tests/test_components.py::test_check_application_version",
"tests/test_components.py::test_get_load_balancer_name"
] | [] | Apache License 2.0 | 693 | 283 | [
"senza/components/stups_auto_configuration.py"
] |
|
fabiobatalha__chess_master-2 | 041556a8017679512fb37b2fe73ccb226eadf125 | 2016-08-10 04:17:25 | 041556a8017679512fb37b2fe73ccb226eadf125 | diff --git a/chess.py b/chess.py
index 8c57bc4..c9d7287 100644
--- a/chess.py
+++ b/chess.py
@@ -101,6 +101,19 @@ class Bishop(Pieces):
def __str__(self):
return self.NAME
+
+ def threatening_zone(self, max_size):
+ """
+ Get the current position of the piece and produce a list of threathening
+ places in the board.
+
+ Arguments:
+ max_size -- integer that defines de boundary limits of the board.
+ """
+
+ import pdb; pdb.set_trace()
+
+ self.position
class Kinight(Pieces):
@@ -110,6 +123,14 @@ class Kinight(Pieces):
return self.NAME
+ def threatening_zone():
+ """
+ Get the current position of the piece and produce a list of threathening
+ places in the board.
+ """
+
+ pass
+
class King(Pieces):
NAME = 'king'
@@ -118,6 +139,14 @@ class King(Pieces):
return self.NAME
+ def threatening_zone(self):
+ """
+ Get the current position of the piece and produce a list of threathening
+ places in the board.
+ """
+
+ pass
+
class Pawn(Pieces):
NAME = 'pawn'
@@ -126,6 +155,24 @@ class Pawn(Pieces):
return self.NAME
+ def threatening_zone(self, max_size):
+ """
+ Get the current position of the piece and produce a list of threathening
+ places in the board.
+
+ Arguments:
+ max_size -- integer that defines de boundary limits of the board.
+ """
+ zone = []
+
+ x, y = self.position
+
+ zone.append((x+1, y+1))
+ zone.append((x-1, y+1))
+
+ return [(x, y) for x, y in zone if x in range(max_size) and y in range(max_size)]
+
+
class Queen(Pieces):
NAME = 'queen'
@@ -134,6 +181,14 @@ class Queen(Pieces):
return self.NAME
+ def threatening_zone():
+ """
+ Get the current position of the piece and produce a list of threathening
+ places in the board.
+ """
+
+ pass
+
class Rook(Pieces):
NAME = 'rook'
@@ -141,3 +196,11 @@ class Rook(Pieces):
def __str__(self):
return self.NAME
+
+ def threatening_zone():
+ """
+ Get the current position of the piece and produce a list of threathening
+ places in the board.
+ """
+
+ pass
| Implement threatening zone for panws
Implement a method to delivery the threatening zone for panws | fabiobatalha/chess_master | diff --git a/tests/tests.py b/tests/tests.py
index 24ff9a6..acb7e6d 100644
--- a/tests/tests.py
+++ b/tests/tests.py
@@ -89,6 +89,84 @@ class TestsChessMasterPiece(unittest.TestCase):
with self.assertRaises(ValueError):
pawn.set_position((1, 2, 4,))
+
+ def test_pawn_threatening_zone(self):
+ """
+ Testing pawn when the piece is able to threatening other piece in both
+ sides.
+ """
+
+ pawn = chess.Pawn((4,0))
+
+ expected = [
+ (3,1),
+ (5,1)
+ ]
+
+ self.assertEqual(
+ sorted(pawn.threatening_zone(8)), sorted(expected)
+ )
+
+
+ def test_pawn_threatening_x_boundary_left(self):
+ """
+ Testing boundary where the pawn can not move to the left
+ """
+
+ pawn = chess.Pawn((0,0))
+
+ expected = [
+ (1,1)
+ ]
+
+ self.assertEqual(
+ sorted(pawn.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_pawn_threatening_x_boundary_right(self):
+ """
+ Testing boundary where the pawn can not move to the right
+ """
+
+ pawn = chess.Pawn((7,0))
+
+ expected = [
+ (6,1)
+ ]
+
+ self.assertEqual(
+ sorted(pawn.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_pawn_threatening_zone_y_boundary(self):
+ """
+ Testing boundary where the pawn can not move forward
+ """
+
+ pawn = chess.Pawn((4,7))
+
+ expected = []
+
+ self.assertEqual(
+ sorted(pawn.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_pawn_threatening_zone_y_boundary_last_move(self):
+ """
+ Testing boundary where the pawn can make your last move forward
+ """
+
+ pawn = chess.Pawn((4,6))
+
+ expected = [
+ (3,7),
+ (5,7)
+ ]
+
+ self.assertEqual(
+ sorted(pawn.threatening_zone(8)), sorted(expected)
+ )
+
class TestsChessMasterBoard(unittest.TestCase):
def test_put_1_piece(self):
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 3
},
"num_modified_files": 1
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"nose",
"coverage",
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | -e git+https://github.com/fabiobatalha/chess_master.git@041556a8017679512fb37b2fe73ccb226eadf125#egg=chessmaster
coverage==7.8.0
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
nose==1.3.7
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
pytest @ file:///croot/pytest_1738938843180/work
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
| name: chess_master
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==7.8.0
- nose==1.3.7
prefix: /opt/conda/envs/chess_master
| [
"tests/tests.py::TestsChessMasterPiece::test_pawn_threatening_x_boundary_left",
"tests/tests.py::TestsChessMasterPiece::test_pawn_threatening_x_boundary_right",
"tests/tests.py::TestsChessMasterPiece::test_pawn_threatening_zone",
"tests/tests.py::TestsChessMasterPiece::test_pawn_threatening_zone_y_boundary",
"tests/tests.py::TestsChessMasterPiece::test_pawn_threatening_zone_y_boundary_last_move"
] | [] | [
"tests/tests.py::TestsChessMasterPiece::test_instanciation_piece",
"tests/tests.py::TestsChessMasterPiece::test_instanciation_preset_position",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_1",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_2",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_3",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_4",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_5",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_6",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_7",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_8",
"tests/tests.py::TestsChessMasterPiece::test_set_position",
"tests/tests.py::TestsChessMasterBoard::test_instanciating_board_size_3",
"tests/tests.py::TestsChessMasterBoard::test_instanciating_board_size_7",
"tests/tests.py::TestsChessMasterBoard::test_instanciating_board_wrong_size",
"tests/tests.py::TestsChessMasterBoard::test_put_1_piece",
"tests/tests.py::TestsChessMasterBoard::test_put_2_pieces",
"tests/tests.py::TestsChessMasterBoard::test_put_piece_in_occupied_square"
] | [] | MIT License | 695 | 675 | [
"chess.py"
] |
|
fabiobatalha__chess_master-4 | dabbee5c27437b805262f12e0181aceae7066bcb | 2016-08-10 21:26:01 | dabbee5c27437b805262f12e0181aceae7066bcb | diff --git a/masterchess/chess.py b/masterchess/chess.py
index 63f5aab..35ba524 100644
--- a/masterchess/chess.py
+++ b/masterchess/chess.py
@@ -79,6 +79,74 @@ class Pieces(object):
self.set_position(position or (0, 0))
+ def _se_positions(self, max_size):
+ """
+ Retrieve the south east positions of as given position
+ """
+ ne_positions = []
+
+ x, y = self.position
+ rg = range(max_size)
+ while True:
+ x += 1
+ y -= 1
+ if x not in rg or y not in rg:
+ break
+ ne_positions.append((x, y))
+
+ return ne_positions
+
+ def _ne_positions(self, max_size):
+ """
+ Retrieve the north east positions of as given position
+ """
+ ne_positions = []
+
+ x, y = self.position
+ rg = range(max_size)
+ while True:
+ x += 1
+ y += 1
+ if x not in rg or y not in rg:
+ break
+ ne_positions.append((x, y))
+
+ return ne_positions
+
+ def _nw_positions(self, max_size):
+ """
+ Retrieve the south weast positions of as given position
+ """
+ ne_positions = []
+
+ x, y = self.position
+ rg = range(max_size)
+ while True:
+ x -= 1
+ y += 1
+ if x not in rg or y not in rg:
+ break
+ ne_positions.append((x, y))
+
+ return ne_positions
+
+ def _sw_positions(self, max_size):
+ """
+ Retrieve the south weast positions of as given position
+ """
+ ne_positions = []
+
+ x, y = self.position
+ rg = range(max_size)
+ while True:
+ x -= 1
+ y -= 1
+ if x not in rg or y not in rg:
+ break
+ ne_positions.append((x, y))
+
+ return ne_positions
+
def set_position(self, position):
"""
Set the x,y position of the piece on the board.
@@ -113,7 +181,14 @@ class Bishop(Pieces):
Arguments:
max_size -- integer that defines de boundary limits of the board.
"""
- pass
+ zone = []
+
+ zone += self._se_positions(max_size)
+ zone += self._ne_positions(max_size)
+ zone += self._nw_positions(max_size)
+ zone += self._sw_positions(max_size)
+
+ return zone
class Kinight(Pieces):
| Implement threatening zone for bishops | fabiobatalha/chess_master | diff --git a/tests/tests.py b/tests/tests.py
index c70e5aa..a6db443 100644
--- a/tests/tests.py
+++ b/tests/tests.py
@@ -1,4 +1,3 @@
-# coding: utf-8
import unittest
from masterchess import chess
@@ -164,6 +163,92 @@ class TestsChessMasterPiece(unittest.TestCase):
sorted(pawn.threatening_zone(8)), sorted(expected)
)
+ def test_se_positions(self):
+
+ piece = chess.Pieces((3, 4))
+
+ expected = [
+ (4, 3),
+ (5, 2),
+ (6, 1),
+ (7, 0),
+ ]
+
+ self.assertEqual(
+ sorted(piece._se_positions(8)), sorted(expected)
+ )
+
+ def test_ne_positions(self):
+
+ piece = chess.Pieces((3, 4))
+
+ expected = [
+ (4, 5),
+ (5, 6),
+ (6, 7)
+ ]
+
+ self.assertEqual(
+ sorted(piece._ne_positions(8)), sorted(expected)
+ )
+
+ def test_nw_positions(self):
+
+ piece = chess.Pieces((3, 4))
+
+ expected = [
+ (2, 5),
+ (1, 6),
+ (0, 7)
+ ]
+
+ self.assertEqual(
+ sorted(piece._nw_positions(8)), sorted(expected)
+ )
+
+ def test_sw_positions(self):
+
+ piece = chess.Pieces((3, 4))
+
+ expected = [
+ (2, 3),
+ (1, 2),
+ (0, 1)
+ ]
+
+ self.assertEqual(
+ sorted(piece._sw_positions(8)), sorted(expected)
+ )
+
+
+ def test_bishop_threatening_zone(self):
+ """
+ Testing bishop moves when the piece is able to threatening other pieces
+ in all directions.
+ """
+
+ bishop = chess.Bishop((3, 4))
+
+ expected = [
+ (0, 1),
+ (0, 7),
+ (1, 2),
+ (1, 6),
+ (2, 3),
+ (2, 5),
+ (4, 3),
+ (4, 5),
+ (5, 2),
+ (5, 6),
+ (6, 1),
+ (6, 7),
+ (7, 0)
+ ]
+
+ self.assertEqual(
+ sorted(bishop.threatening_zone(8)), sorted(expected)
+ )
+
class TestsChessMasterBoard(unittest.TestCase):
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 1
},
"num_modified_files": 1
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": [],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | -e git+https://github.com/fabiobatalha/chess_master.git@dabbee5c27437b805262f12e0181aceae7066bcb#egg=chessmaster
coverage==7.8.0
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
nose==1.3.7
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
pytest @ file:///croot/pytest_1738938843180/work
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
| name: chess_master
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==7.8.0
- nose==1.3.7
prefix: /opt/conda/envs/chess_master
| [
"tests/tests.py::TestsChessMasterPiece::test_bishop_threatening_zone",
"tests/tests.py::TestsChessMasterPiece::test_ne_positions",
"tests/tests.py::TestsChessMasterPiece::test_nw_positions",
"tests/tests.py::TestsChessMasterPiece::test_se_positions",
"tests/tests.py::TestsChessMasterPiece::test_sw_positions"
] | [] | [
"tests/tests.py::TestsChessMasterPiece::test_instanciation_piece",
"tests/tests.py::TestsChessMasterPiece::test_instanciation_preset_position",
"tests/tests.py::TestsChessMasterPiece::test_pawn_threatening_x_boundary_left",
"tests/tests.py::TestsChessMasterPiece::test_pawn_threatening_x_boundary_right",
"tests/tests.py::TestsChessMasterPiece::test_pawn_threatening_zone",
"tests/tests.py::TestsChessMasterPiece::test_pawn_threatening_zone_y_boundary",
"tests/tests.py::TestsChessMasterPiece::test_pawn_threatening_zone_y_boundary_last_move",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_1",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_2",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_3",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_4",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_5",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_6",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_7",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_8",
"tests/tests.py::TestsChessMasterPiece::test_set_position",
"tests/tests.py::TestsChessMasterBoard::test_instanciating_board_size_3",
"tests/tests.py::TestsChessMasterBoard::test_instanciating_board_size_7",
"tests/tests.py::TestsChessMasterBoard::test_instanciating_board_wrong_size",
"tests/tests.py::TestsChessMasterBoard::test_put_1_piece",
"tests/tests.py::TestsChessMasterBoard::test_put_2_pieces",
"tests/tests.py::TestsChessMasterBoard::test_put_piece_in_occupied_square"
] | [] | MIT License | 696 | 648 | [
"masterchess/chess.py"
] |
|
fabiobatalha__chess_master-6 | 9fe35b2b4029e1eeedeb69b941eba6cb955182a3 | 2016-08-10 21:46:26 | 9fe35b2b4029e1eeedeb69b941eba6cb955182a3 | diff --git a/masterchess/chess.py b/masterchess/chess.py
index 35ba524..91432b1 100644
--- a/masterchess/chess.py
+++ b/masterchess/chess.py
@@ -79,9 +79,73 @@ class Pieces(object):
self.set_position(position or (0, 0))
+ def _w_positions(self, max_size):
+ """
+ Retrieve the west positions of a given position
+ """
+ ne_positions = []
+
+ x, y = self.position
+ rg = range(max_size)
+ while True:
+ x -= 1
+ if x not in rg:
+ break
+ ne_positions.append((x, y))
+
+ return ne_positions
+
+ def _l_positions(self, max_size):
+ """
+ Retrieve the lest positions of a given position
+ """
+ ne_positions = []
+
+ x, y = self.position
+ rg = range(max_size)
+ while True:
+ x += 1
+ if x not in rg:
+ break
+ ne_positions.append((x, y))
+
+ return ne_positions
+
+ def _n_positions(self, max_size):
+ """
+ Retrieve the south positions of a given position
+ """
+ ne_positions = []
+
+ x, y = self.position
+ rg = range(max_size)
+ while True:
+ y += 1
+ if y not in rg:
+ break
+ ne_positions.append((x, y))
+
+ return ne_positions
+
+ def _s_positions(self, max_size):
+ """
+ Retrieve the south positions of a given position
+ """
+ ne_positions = []
+
+ x, y = self.position
+ rg = range(max_size)
+ while True:
+ y -= 1
+ if y not in rg:
+ break
+ ne_positions.append((x, y))
+
+ return ne_positions
+
def _se_positions(self, max_size):
"""
- Retrieve the south east positions of as given position
+ Retrieve the south east positions of a given position
"""
ne_positions = []
@@ -98,7 +162,7 @@ class Pieces(object):
def _ne_positions(self, max_size):
"""
- Retrieve the north east positions of as given position
+ Retrieve the north east positions of a given position
"""
ne_positions = []
@@ -115,7 +179,7 @@ class Pieces(object):
def _nw_positions(self, max_size):
"""
- Retrieve the south weast positions of as given position
+ Retrieve the south weast positions of a given position
"""
ne_positions = []
@@ -132,7 +196,7 @@ class Pieces(object):
def _sw_positions(self, max_size):
"""
- Retrieve the south weast positions of as given position
+ Retrieve the south weast positions of a given position
"""
ne_positions = []
@@ -276,10 +340,21 @@ class Rook(Pieces):
return self.NAME
- def threatening_zone():
+ def threatening_zone(self, max_size):
"""
Get the current position of the piece and produce a list of threathening
places in the board.
+
+ Arguments:
+ max_size -- integer that defines de boundary limits of the board.
"""
- pass
+ zone = []
+
+ zone += self._s_positions(max_size)
+ zone += self._n_positions(max_size)
+ zone += self._l_positions(max_size)
+ zone += self._w_positions(max_size)
+
+ return zone
+
| Implement threatening zone for rooks | fabiobatalha/chess_master | diff --git a/tests/tests.py b/tests/tests.py
index a6db443..e48b78b 100644
--- a/tests/tests.py
+++ b/tests/tests.py
@@ -163,6 +163,64 @@ class TestsChessMasterPiece(unittest.TestCase):
sorted(pawn.threatening_zone(8)), sorted(expected)
)
+ def test_w_positions(self):
+
+ piece = chess.Pieces((3, 4))
+
+ expected = [
+ (2, 4),
+ (1, 4),
+ (0, 4)
+ ]
+
+ self.assertEqual(
+ sorted(piece._w_positions(8)), sorted(expected)
+ )
+
+ def test_l_positions(self):
+
+ piece = chess.Pieces((3, 4))
+
+ expected = [
+ (4, 4),
+ (5, 4),
+ (6, 4),
+ (7, 4)
+ ]
+
+ self.assertEqual(
+ sorted(piece._l_positions(8)), sorted(expected)
+ )
+
+ def test_n_positions(self):
+
+ piece = chess.Pieces((3, 4))
+
+ expected = [
+ (3, 5),
+ (3, 6),
+ (3, 7)
+ ]
+
+ self.assertEqual(
+ sorted(piece._n_positions(8)), sorted(expected)
+ )
+
+ def test_s_positions(self):
+
+ piece = chess.Pieces((3, 4))
+
+ expected = [
+ (3, 3),
+ (3, 2),
+ (3, 1),
+ (3, 0),
+ ]
+
+ self.assertEqual(
+ sorted(piece._s_positions(8)), sorted(expected)
+ )
+
def test_se_positions(self):
piece = chess.Pieces((3, 4))
@@ -223,8 +281,7 @@ class TestsChessMasterPiece(unittest.TestCase):
def test_bishop_threatening_zone(self):
"""
- Testing bishop moves when the piece is able to threatening other pieces
- in all directions.
+ Testing gather the bishop allowed moves
"""
bishop = chess.Bishop((3, 4))
@@ -249,6 +306,33 @@ class TestsChessMasterPiece(unittest.TestCase):
sorted(bishop.threatening_zone(8)), sorted(expected)
)
+ def test_rook_threatening_zone(self):
+ """
+ Testing gather the rook allowed moves
+ """
+
+ rook = chess.Rook((3, 4))
+
+ expected = [
+ (0, 4),
+ (1, 4),
+ (2, 4),
+ (4, 4),
+ (5, 4),
+ (6, 4),
+ (7, 4),
+ (3, 0),
+ (3, 1),
+ (3, 2),
+ (3, 3),
+ (3, 5),
+ (3, 6),
+ (3, 7)
+ ]
+
+ self.assertEqual(
+ sorted(rook.threatening_zone(8)), sorted(expected)
+ )
class TestsChessMasterBoard(unittest.TestCase):
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 2
},
"num_modified_files": 1
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": null,
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | -e git+https://github.com/fabiobatalha/chess_master.git@9fe35b2b4029e1eeedeb69b941eba6cb955182a3#egg=chessmaster
coverage==7.8.0
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
nose==1.3.7
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
pytest @ file:///croot/pytest_1738938843180/work
pytest-cov==6.0.0
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
| name: chess_master
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==7.8.0
- nose==1.3.7
- pytest-cov==6.0.0
prefix: /opt/conda/envs/chess_master
| [
"tests/tests.py::TestsChessMasterPiece::test_l_positions",
"tests/tests.py::TestsChessMasterPiece::test_n_positions",
"tests/tests.py::TestsChessMasterPiece::test_rook_threatening_zone",
"tests/tests.py::TestsChessMasterPiece::test_s_positions",
"tests/tests.py::TestsChessMasterPiece::test_w_positions"
] | [] | [
"tests/tests.py::TestsChessMasterPiece::test_bishop_threatening_zone",
"tests/tests.py::TestsChessMasterPiece::test_instanciation_piece",
"tests/tests.py::TestsChessMasterPiece::test_instanciation_preset_position",
"tests/tests.py::TestsChessMasterPiece::test_ne_positions",
"tests/tests.py::TestsChessMasterPiece::test_nw_positions",
"tests/tests.py::TestsChessMasterPiece::test_pawn_threatening_x_boundary_left",
"tests/tests.py::TestsChessMasterPiece::test_pawn_threatening_x_boundary_right",
"tests/tests.py::TestsChessMasterPiece::test_pawn_threatening_zone",
"tests/tests.py::TestsChessMasterPiece::test_pawn_threatening_zone_y_boundary",
"tests/tests.py::TestsChessMasterPiece::test_pawn_threatening_zone_y_boundary_last_move",
"tests/tests.py::TestsChessMasterPiece::test_se_positions",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_1",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_2",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_3",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_4",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_5",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_6",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_7",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_8",
"tests/tests.py::TestsChessMasterPiece::test_set_position",
"tests/tests.py::TestsChessMasterPiece::test_sw_positions",
"tests/tests.py::TestsChessMasterBoard::test_instanciating_board_size_3",
"tests/tests.py::TestsChessMasterBoard::test_instanciating_board_size_7",
"tests/tests.py::TestsChessMasterBoard::test_instanciating_board_wrong_size",
"tests/tests.py::TestsChessMasterBoard::test_put_1_piece",
"tests/tests.py::TestsChessMasterBoard::test_put_2_pieces",
"tests/tests.py::TestsChessMasterBoard::test_put_piece_in_occupied_square"
] | [] | MIT License | 697 | 857 | [
"masterchess/chess.py"
] |
|
fabiobatalha__chess_master-12 | e1eefa4035d6d6963da2ea543fc4ecb6c919c2fb | 2016-08-11 00:33:22 | e1eefa4035d6d6963da2ea543fc4ecb6c919c2fb | diff --git a/masterchess/chess.py b/masterchess/chess.py
index f2bd2ad..146c8d9 100644
--- a/masterchess/chess.py
+++ b/masterchess/chess.py
@@ -263,13 +263,31 @@ class Kinight(Pieces):
return self.NAME
- def threatening_zone():
+ def threatening_zone(self, max_size):
"""
Get the current position of the piece and produce a list of threathening
places in the board.
+
+ Arguments:
+ max_size -- integer that defines de boundary limits of the board.
"""
- pass
+ zone = []
+
+ x, y = self.position
+
+ zone.append((x-1, y+2))
+ zone.append((x+1, y+2))
+ zone.append((x-2, y+1))
+ zone.append((x+2, y+1))
+ zone.append((x-2, y-1))
+ zone.append((x+2, y-1))
+ zone.append((x-1, y-2))
+ zone.append((x+1, y-2))
+
+ rg = range(max_size)
+
+ return [(x, y) for x, y in zone if x in rg and y in rg]
class King(Pieces):
@@ -292,6 +310,7 @@ class King(Pieces):
zone = []
x, y = self.position
+
zone.append((x-1, y+1))
zone.append((x, y+1))
zone.append((x+1, y+1))
| Implement threatening zone for kinights | fabiobatalha/chess_master | diff --git a/tests/tests.py b/tests/tests.py
index 71bb655..2837ed0 100644
--- a/tests/tests.py
+++ b/tests/tests.py
@@ -375,6 +375,692 @@ class TestsChessMasterPiece(unittest.TestCase):
sorted(queen.threatening_zone(8)), sorted(expected)
)
+ def test_kinight_threatening_zone(self):
+ """
+ Testing gather the kinight allowed moves
+ """
+
+ kinight = chess.Kinight((3, 4))
+
+ expected = [
+ (2, 6),
+ (4, 6),
+ (1, 5),
+ (5, 5),
+ (1, 3),
+ (5, 3),
+ (2, 2),
+ (4, 2)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_0_0(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((0, 0))
+
+ expected = [
+ (1, 2),
+ (2, 1)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_1_0(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((1, 0))
+
+ expected = [
+ (0, 2),
+ (2, 2),
+ (3, 1),
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_2_0(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((2, 0))
+
+ expected = [
+ (1, 2),
+ (3, 2),
+ (0, 1),
+ (4, 1)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_7_0(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((7, 0))
+
+ expected = [
+ (6, 2),
+ (5, 1)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_6_0(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((6, 0))
+
+ expected = [
+ (5, 2),
+ (7, 2),
+ (4, 1)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_5_0(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((5, 0))
+
+ expected = [
+ (4, 2),
+ (6, 2),
+ (3, 1),
+ (7, 1)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_0_1(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((0, 1))
+
+ expected = [
+ (1, 3),
+ (2, 2),
+ (2, 0)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_1_1(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((1, 1))
+
+ expected = [
+ (0, 3),
+ (2, 3),
+ (3, 2),
+ (3, 0),
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_2_1(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((2, 1))
+
+ expected = [
+ (1, 3),
+ (3, 3),
+ (0, 2),
+ (4, 2),
+ (0, 0),
+ (4, 0)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_7_1(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((7, 1))
+
+ expected = [
+ (6, 3),
+ (5, 2),
+ (5, 0)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_6_1(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((6, 1))
+
+ expected = [
+ (5, 3),
+ (7, 3),
+ (4, 2),
+ (4, 0)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_5_1(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((5, 1))
+
+ expected = [
+ (4, 3),
+ (6, 3),
+ (3, 2),
+ (7, 2),
+ (3, 0),
+ (7, 0)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_0_2(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((0, 2))
+
+ expected = [
+ (1, 4),
+ (2, 3),
+ (2, 1),
+ (1, 0)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_1_2(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((1, 2))
+
+ expected = [
+ (0, 4),
+ (2, 4),
+ (3, 3),
+ (3, 1),
+ (2, 0),
+ (0, 0)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_2_2(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((2, 2))
+
+ expected = [
+ (1, 4),
+ (3, 4),
+ (0, 3),
+ (4, 3),
+ (0, 1),
+ (4, 1),
+ (1, 0),
+ (3, 0)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_7_2(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((7, 2))
+
+ expected = [
+ (6, 4),
+ (5, 3),
+ (5, 1),
+ (6, 0),
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_6_2(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((6, 2))
+
+ expected = [
+ (5, 4),
+ (7, 4),
+ (4, 3),
+ (4, 1),
+ (5, 0),
+ (7, 0)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_5_2(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((5, 2))
+
+ expected = [
+ (4, 4),
+ (6, 4),
+ (3, 3),
+ (7, 3),
+ (3, 1),
+ (7, 1),
+ (4, 0),
+ (6, 0)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_0_7(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((0, 7))
+
+ expected = [
+ (2, 6),
+ (1, 5)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_1_7(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((1, 7))
+
+ expected = [
+ (3, 6),
+ (0, 5),
+ (2, 5)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_2_7(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((2, 7))
+
+ expected = [
+ (0, 6),
+ (4, 6),
+ (1, 5),
+ (3, 5)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_7_7(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((7, 7))
+
+ expected = [
+ (5, 6),
+ (6, 5)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_6_7(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((6, 7))
+
+ expected = [
+ (4, 6),
+ (5, 5),
+ (7, 5)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_5_7(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((5, 7))
+
+ expected = [
+ (3, 6),
+ (7, 6),
+ (4, 5),
+ (6, 5)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_0_6(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((0, 6))
+
+ expected = [
+ (2, 7),
+ (2, 5),
+ (1, 4)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_1_6(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((1, 6))
+
+ expected = [
+ (3, 7),
+ (3, 5),
+ (0, 4),
+ (2, 4)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_2_6(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((2, 6))
+
+ expected = [
+ (0, 7),
+ (4, 7),
+ (0, 5),
+ (4, 5),
+ (1, 4),
+ (3, 4)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_7_6(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((7, 6))
+
+ expected = [
+ (5, 7),
+ (5, 5),
+ (6, 4)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_6_6(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((6, 6))
+
+ expected = [
+ (4, 7),
+ (4, 5),
+ (5, 4),
+ (7, 4)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_5_6(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((5, 6))
+
+ expected = [
+ (3, 7),
+ (7, 7),
+ (3, 5),
+ (7, 5),
+ (4, 4),
+ (6, 4)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_0_5(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((0, 5))
+
+ expected = [
+ (1, 7),
+ (2, 6),
+ (2, 4),
+ (1, 3)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_1_5(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((1, 5))
+
+ expected = [
+ (0, 7),
+ (2, 7),
+ (3, 6),
+ (3, 4),
+ (0, 3),
+ (2, 3)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_2_5(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((2, 5))
+
+ expected = [
+ (1, 7),
+ (3, 7),
+ (0, 6),
+ (4, 6),
+ (0, 4),
+ (4, 4),
+ (1, 3),
+ (3, 3)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_7_5(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((7, 5))
+
+ expected = [
+ (6, 7),
+ (5, 6),
+ (5, 4),
+ (6, 3)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_6_5(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((6, 5))
+
+ expected = [
+ (5, 7),
+ (7, 7),
+ (4, 6),
+ (4, 4),
+ (5, 3),
+ (7, 3)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_5_5(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((5, 5))
+
+ expected = [
+ (4, 7),
+ (6, 7),
+ (3, 6),
+ (7, 6),
+ (3, 4),
+ (7, 4),
+ (4, 3),
+ (6, 3)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
def test_king_threatening_zone(self):
"""
Testing gather the king allowed moves
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 0
},
"num_modified_files": 1
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"nose",
"coverage",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | -e git+https://github.com/fabiobatalha/chess_master.git@e1eefa4035d6d6963da2ea543fc4ecb6c919c2fb#egg=chessmaster
coverage==7.8.0
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
nose==1.3.7
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
pytest @ file:///croot/pytest_1738938843180/work
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
| name: chess_master
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==7.8.0
- nose==1.3.7
prefix: /opt/conda/envs/chess_master
| [
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_0_0",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_0_1",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_0_2",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_0_5",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_0_6",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_0_7",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_1_0",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_1_1",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_1_2",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_1_5",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_1_6",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_1_7",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_2_0",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_2_1",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_2_2",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_2_5",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_2_6",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_2_7",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_5_0",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_5_1",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_5_2",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_5_5",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_5_6",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_5_7",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_6_0",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_6_1",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_6_2",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_6_5",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_6_6",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_6_7",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_7_0",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_7_1",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_7_2",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_7_5",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_7_6",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_7_7"
] | [] | [
"tests/tests.py::TestsChessMasterPiece::test_bishop_threatening_zone",
"tests/tests.py::TestsChessMasterPiece::test_e_positions",
"tests/tests.py::TestsChessMasterPiece::test_instanciation_piece",
"tests/tests.py::TestsChessMasterPiece::test_instanciation_preset_position",
"tests/tests.py::TestsChessMasterPiece::test_king_threatening_zone",
"tests/tests.py::TestsChessMasterPiece::test_king_threatening_zone_boundary_bottom",
"tests/tests.py::TestsChessMasterPiece::test_king_threatening_zone_boundary_bottom_left",
"tests/tests.py::TestsChessMasterPiece::test_king_threatening_zone_boundary_bottom_right",
"tests/tests.py::TestsChessMasterPiece::test_king_threatening_zone_boundary_top",
"tests/tests.py::TestsChessMasterPiece::test_king_threatening_zone_boundary_top_left",
"tests/tests.py::TestsChessMasterPiece::test_king_threatening_zone_boundary_top_right",
"tests/tests.py::TestsChessMasterPiece::test_n_positions",
"tests/tests.py::TestsChessMasterPiece::test_ne_positions",
"tests/tests.py::TestsChessMasterPiece::test_nw_positions",
"tests/tests.py::TestsChessMasterPiece::test_pawn_threatening_x_boundary_left",
"tests/tests.py::TestsChessMasterPiece::test_pawn_threatening_x_boundary_right",
"tests/tests.py::TestsChessMasterPiece::test_pawn_threatening_zone",
"tests/tests.py::TestsChessMasterPiece::test_pawn_threatening_zone_y_boundary",
"tests/tests.py::TestsChessMasterPiece::test_pawn_threatening_zone_y_boundary_last_move",
"tests/tests.py::TestsChessMasterPiece::test_queen_threatening_zone",
"tests/tests.py::TestsChessMasterPiece::test_rook_threatening_zone",
"tests/tests.py::TestsChessMasterPiece::test_s_positions",
"tests/tests.py::TestsChessMasterPiece::test_se_positions",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_1",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_2",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_3",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_4",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_5",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_6",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_7",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_8",
"tests/tests.py::TestsChessMasterPiece::test_set_position",
"tests/tests.py::TestsChessMasterPiece::test_sw_positions",
"tests/tests.py::TestsChessMasterPiece::test_w_positions",
"tests/tests.py::TestsChessMasterBoard::test_instanciating_board_size_3",
"tests/tests.py::TestsChessMasterBoard::test_instanciating_board_size_7",
"tests/tests.py::TestsChessMasterBoard::test_instanciating_board_wrong_size",
"tests/tests.py::TestsChessMasterBoard::test_put_1_piece",
"tests/tests.py::TestsChessMasterBoard::test_put_2_pieces",
"tests/tests.py::TestsChessMasterBoard::test_put_piece_in_occupied_square"
] | [] | MIT License | 699 | 382 | [
"masterchess/chess.py"
] |
|
falconry__falcon-869 | e0f059378b113dcc40e7de7fb16b4f4a104a74f1 | 2016-08-16 01:20:09 | 67d61029847cbf59e4053c8a424df4f9f87ad36f | codecov-io: ## [Current coverage](https://codecov.io/gh/falconry/falcon/pull/869?src=pr) is 100% (diff: 100%)
> Merging [#869](https://codecov.io/gh/falconry/falcon/pull/869?src=pr) into [master](https://codecov.io/gh/falconry/falcon/branch/master?src=pr) will not change coverage
```diff
@@ master #869 diff @@
====================================
Files 30 30
Lines 1864 1874 +10
Methods 0 0
Messages 0 0
Branches 305 309 +4
====================================
+ Hits 1864 1874 +10
Misses 0 0
Partials 0 0
```
> Powered by [Codecov](https://codecov.io?src=pr). Last update [9499207...901718d](https://codecov.io/gh/falconry/falcon/compare/94992071b1b9c6f828bb827b8be092146a514200...901718d0d5f4618a704f341a9bb9b923187c4b3a?src=pr)
kgriffs: [rebased]
kgriffs: [rebased]
fxfitz: LGTM
jmvrbanac: :+1: | diff --git a/falcon/util/uri.py b/falcon/util/uri.py
index fc1acd9..47726da 100644
--- a/falcon/util/uri.py
+++ b/falcon/util/uri.py
@@ -60,6 +60,7 @@ def _create_char_encoder(allowed_chars):
def _create_str_encoder(is_value):
allowed_chars = _UNRESERVED if is_value else _ALL_ALLOWED
+ allowed_chars_plus_percent = allowed_chars + '%'
encode_char = _create_char_encoder(allowed_chars)
def encoder(uri):
@@ -67,10 +68,32 @@ def _create_str_encoder(is_value):
if not uri.rstrip(allowed_chars):
return uri
+ if not uri.rstrip(allowed_chars_plus_percent):
+ # NOTE(kgriffs): There's a good chance the string has already
+ # been escaped. Do one more check to increase our certainty.
+ tokens = uri.split('%')
+ for token in tokens[1:]:
+ hex_octet = token[:2]
+
+ if not len(hex_octet) == 2:
+ break
+
+ if not (hex_octet[0] in _HEX_DIGITS and
+ hex_octet[1] in _HEX_DIGITS):
+ break
+ else:
+ # NOTE(kgriffs): All percent-encoded sequences were
+ # valid, so assume that the string has already been
+ # encoded.
+ return uri
+
+ # NOTE(kgriffs): At this point we know there is at least
+ # one unallowed percent character. We are going to assume
+ # that everything should be encoded. If the string is
+ # partially encoded, the caller will need to normalize it
+ # before passing it in here.
+
# Convert to a byte array if it is not one already
- #
- # NOTE(kgriffs): Code coverage disabled since in Py3K the uri
- # is always a text type, so we get a failure for that tox env.
if isinstance(uri, six.text_type):
uri = uri.encode('utf-8')
| resp.location double-encodes urlencoded strings
I've got a URL which contains a url-encoded URL as a parameter, and when setting resp.location, that results in it being double-encoded:
resp.location = "http://something?redirect_uri=http%3A%2F%2Fsite"
This should result in:
Location: http://something?redirect_uri=http%3A%2F%2Fsite...
But what actually happens is:
Location: http://something?redirect_uri=http%253A%252F%252Fsite
I worked around by raising a HTTPStatus() object, but this doesn't seem like the ideal situation. | falconry/falcon | diff --git a/tests/test_utils.py b/tests/test_utils.py
index 6b5f75d..de32f0a 100644
--- a/tests/test_utils.py
+++ b/tests/test_utils.py
@@ -177,6 +177,35 @@ class TestFalconUtils(testtools.TestCase):
'?limit=3&e%C3%A7ho=true')
self.assertEqual(uri.encode(url), expected)
+ def test_uri_encode_double(self):
+ url = 'http://example.com/v1/fiz bit/messages'
+ expected = 'http://example.com/v1/fiz%20bit/messages'
+ self.assertEqual(uri.encode(uri.encode(url)), expected)
+
+ url = u'http://example.com/v1/fizbit/messages?limit=3&e\u00e7ho=true'
+ expected = ('http://example.com/v1/fizbit/messages'
+ '?limit=3&e%C3%A7ho=true')
+ self.assertEqual(uri.encode(uri.encode(url)), expected)
+
+ url = 'http://example.com/v1/fiz%bit/mess%ages/%'
+ expected = 'http://example.com/v1/fiz%25bit/mess%25ages/%25'
+ self.assertEqual(uri.encode(uri.encode(url)), expected)
+
+ url = 'http://example.com/%%'
+ expected = 'http://example.com/%25%25'
+ self.assertEqual(uri.encode(uri.encode(url)), expected)
+
+ # NOTE(kgriffs): Specific example cited in GH issue
+ url = 'http://something?redirect_uri=http%3A%2F%2Fsite'
+ self.assertEqual(uri.encode(url), url)
+
+ hex_digits = 'abcdefABCDEF0123456789'
+ for c1 in hex_digits:
+ for c2 in hex_digits:
+ url = 'http://example.com/%' + c1 + c2
+ encoded = uri.encode(uri.encode(url))
+ self.assertEqual(encoded, url)
+
def test_uri_encode_value(self):
self.assertEqual(uri.encode_value('abcd'), 'abcd')
self.assertEqual(uri.encode_value(u'abcd'), u'abcd')
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 3
},
"num_modified_files": 1
} | 1.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"nose",
"coverage",
"ddt",
"pyyaml",
"requests",
"testtools",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": [
"tools/test-requires"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
charset-normalizer==2.0.12
coverage==6.2
ddt==1.7.2
-e git+https://github.com/falconry/falcon.git@e0f059378b113dcc40e7de7fb16b4f4a104a74f1#egg=falcon
fixtures==4.0.1
idna==3.10
importlib-metadata==4.8.3
iniconfig==1.1.1
nose==1.3.7
packaging==21.3
pbr==6.1.1
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
python-mimeparse==1.6.0
PyYAML==6.0.1
requests==2.27.1
six==1.17.0
testtools==2.6.0
tomli==1.2.3
typing_extensions==4.1.1
urllib3==1.26.20
zipp==3.6.0
| name: falcon
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- charset-normalizer==2.0.12
- coverage==6.2
- ddt==1.7.2
- fixtures==4.0.1
- idna==3.10
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- nose==1.3.7
- packaging==21.3
- pbr==6.1.1
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- python-mimeparse==1.6.0
- pyyaml==6.0.1
- requests==2.27.1
- six==1.17.0
- testtools==2.6.0
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- zipp==3.6.0
prefix: /opt/conda/envs/falcon
| [
"tests/test_utils.py::TestFalconUtils::test_uri_encode_double"
] | [
"tests/test_utils.py::TestFalconUtils::test_deprecated_decorator"
] | [
"tests/test_utils.py::TestFalconUtils::test_dt_to_http",
"tests/test_utils.py::TestFalconUtils::test_get_http_status",
"tests/test_utils.py::TestFalconUtils::test_http_date_to_dt",
"tests/test_utils.py::TestFalconUtils::test_http_now",
"tests/test_utils.py::TestFalconUtils::test_pack_query_params_none",
"tests/test_utils.py::TestFalconUtils::test_pack_query_params_one",
"tests/test_utils.py::TestFalconUtils::test_pack_query_params_several",
"tests/test_utils.py::TestFalconUtils::test_parse_host",
"tests/test_utils.py::TestFalconUtils::test_parse_query_string",
"tests/test_utils.py::TestFalconUtils::test_prop_uri_decode_models_stdlib_unquote_plus",
"tests/test_utils.py::TestFalconUtils::test_prop_uri_encode_models_stdlib_quote",
"tests/test_utils.py::TestFalconUtils::test_prop_uri_encode_value_models_stdlib_quote_safe_tilde",
"tests/test_utils.py::TestFalconUtils::test_uri_decode",
"tests/test_utils.py::TestFalconUtils::test_uri_encode",
"tests/test_utils.py::TestFalconUtils::test_uri_encode_value",
"tests/test_utils.py::TestFalconTesting::test_decode_empty_result",
"tests/test_utils.py::TestFalconTesting::test_httpnow_alias_for_backwards_compat",
"tests/test_utils.py::TestFalconTesting::test_none_header_value_in_create_environ",
"tests/test_utils.py::TestFalconTesting::test_path_escape_chars_in_create_environ",
"tests/test_utils.py::TestFalconTestCase::test_cached_text_in_result",
"tests/test_utils.py::TestFalconTestCase::test_path_must_start_with_slash",
"tests/test_utils.py::TestFalconTestCase::test_query_string",
"tests/test_utils.py::TestFalconTestCase::test_query_string_in_path",
"tests/test_utils.py::TestFalconTestCase::test_query_string_no_question",
"tests/test_utils.py::TestFalconTestCase::test_simple_resource_body_json_xor",
"tests/test_utils.py::TestFalconTestCase::test_status",
"tests/test_utils.py::TestFalconTestCase::test_wsgi_iterable_not_closeable",
"tests/test_utils.py::FancyTestCase::test_something"
] | [] | Apache License 2.0 | 705 | 485 | [
"falcon/util/uri.py"
] |
andir__isc-dhcp-filter-2 | efc868102f47329f7280b87a21b5fd9e9defcd64 | 2016-08-16 14:13:07 | efc868102f47329f7280b87a21b5fd9e9defcd64 | diff --git a/isc_dhcp_filter/__init__.py b/isc_dhcp_filter/__init__.py
index d769b6d..70524c3 100644
--- a/isc_dhcp_filter/__init__.py
+++ b/isc_dhcp_filter/__init__.py
@@ -1,6 +1,6 @@
+from isc_dhcp_leases import IscDhcpLeases
from isc_dhcp_leases import Lease
from isc_dhcp_leases import Lease6
-from isc_dhcp_leases import IscDhcpLeases
def parse(*files):
@@ -119,6 +119,13 @@ class Leases:
return Leases(g)
+ def count(self):
+ """
+ Returns the count of leases in the current set of leases
+ :return: int count of leases
+ """
+ return len(self)
+
def __iter__(self):
"""
Returns an iterator for the current set of leases
@@ -128,3 +135,16 @@ class Leases:
yield from iter(self._leases)
elif self._iter:
yield from self._iter()
+
+ def __len__(self):
+ """
+ Implements __len__
+ If we are dealing with a generator we will expand it into `_leases`
+ :return:
+ """
+ if type(self._leases) is list:
+ return len(self._leases)
+ else:
+ l = list(iter(self))
+ self._leases = l
+ return len(l)
| Add `.count()` method as shortcut for len(list(leases))
Sometime the only intresting part about the lease db is how many are actually in a given state. Currently you've to write `len(list(leases.active))` to get the count of active leases. `leases.active.count()` and also implementing `__len__` would probably be handy. | andir/isc-dhcp-filter | diff --git a/tests/__init__.py b/tests/__init__.py
index e924baf..426d90b 100644
--- a/tests/__init__.py
+++ b/tests/__init__.py
@@ -1,11 +1,11 @@
import os
from unittest import TestCase
+from freezegun import freeze_time
from isc_dhcp_leases import Lease
from isc_dhcp_leases.iscdhcpleases import BaseLease
from isc_dhcp_filter import parse, Leases
-from freezegun import freeze_time
class LeaseLoaderMixin:
@@ -26,6 +26,15 @@ class BaseLeaseTester:
self.assertEqual(len(active_valid), len(valid_active))
self.assertEqual(len(active_valid), len(list(self.leases.current)))
+ def test_list_twice(self):
+ a = list(self.leases)
+ b = list(self.leases)
+
+ self.assertEqual(a, b)
+
+ def test_len(self):
+ self.assertEqual(len(self.leases), self.leases.count())
+
def test_v4_filter(self):
for lease in self.leases.v4:
self.assertIsInstance(lease, Lease)
@@ -65,7 +74,7 @@ class BaseLeaseTester:
def test_filter_combine(self):
combined = Leases(self.leases.v4, self.leases.v6)
l = len(list(combined))
- self.assertEqual(l, len(list(self.leases)))
+ self.assertEqual(l, len(self.leases))
class TestDhcpd6(LeaseLoaderMixin, BaseLeaseTester, TestCase):
@@ -74,18 +83,20 @@ class TestDhcpd6(LeaseLoaderMixin, BaseLeaseTester, TestCase):
def test_dhcpv6_active(self):
leases = self.leases
- self.assertEqual(len(list(leases)), 4)
- self.assertEqual(len(list(leases.active)), 4)
+ self.assertEqual(len(leases), 4)
+ self.assertEqual(len(leases.active), 4)
+ self.assertEqual(len(leases.active), 4)
@freeze_time("2015-07-6 8:15:0")
def test_dhcpv6_active_valid(self):
leases = self.leases
- active_valid = list(leases.active.valid)
- valid_active = list(leases.valid.active)
+ active_valid = leases.active.valid
+ valid_active = leases.valid.active
self.assertEqual(len(active_valid), len(valid_active))
- self.assertEqual(len(active_valid), len(list(leases.current)))
+ self.assertEqual(len(active_valid), len(leases.current))
+ self.assertEqual(sorted(active_valid, key=id), sorted(valid_active, key=id))
def test_dhcpv6_invalid(self):
leases = self.leases
@@ -115,6 +126,11 @@ class TestDebian7(LeaseLoaderMixin, BaseLeaseTester, TestCase):
leases2 = list(self.leases.where_eq('vendor-class-identifier', 'Some Vendor Identifier'))
self.assertEqual(leases1, leases2)
+
class TestEmptyLease(BaseLeaseTester, TestCase):
def setUp(self):
- self.leases = Leases()
\ No newline at end of file
+ self.leases = Leases()
+
+ def test_lease_count_zero(self):
+ self.assertEqual(self.leases.count(), 0)
+ self.assertEqual(len(self.leases), 0)
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 1
} | 0.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"freezegun"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | exceptiongroup==1.2.2
freezegun==1.5.1
iniconfig==2.1.0
-e git+https://github.com/andir/isc-dhcp-filter.git@efc868102f47329f7280b87a21b5fd9e9defcd64#egg=isc_dhcp_filter
isc-dhcp-leases==0.10.0
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
python-dateutil==2.9.0.post0
six==1.17.0
tomli==2.2.1
| name: isc-dhcp-filter
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- exceptiongroup==1.2.2
- freezegun==1.5.1
- iniconfig==2.1.0
- isc-dhcp-leases==0.10.0
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- python-dateutil==2.9.0.post0
- six==1.17.0
- tomli==2.2.1
prefix: /opt/conda/envs/isc-dhcp-filter
| [
"tests/__init__.py::TestDhcpd6::test_dhcpv6_active",
"tests/__init__.py::TestDhcpd6::test_dhcpv6_active_valid",
"tests/__init__.py::TestDhcpd6::test_filter_combine",
"tests/__init__.py::TestDhcpd6::test_len",
"tests/__init__.py::TestDebian7::test_filter_combine",
"tests/__init__.py::TestDebian7::test_len",
"tests/__init__.py::TestEmptyLease::test_filter_combine",
"tests/__init__.py::TestEmptyLease::test_lease_count_zero",
"tests/__init__.py::TestEmptyLease::test_len"
] | [] | [
"tests/__init__.py::TestDhcpd6::test_active_filter",
"tests/__init__.py::TestDhcpd6::test_active_valid_current",
"tests/__init__.py::TestDhcpd6::test_current_filter",
"tests/__init__.py::TestDhcpd6::test_dhcpv6_invalid",
"tests/__init__.py::TestDhcpd6::test_filter_func",
"tests/__init__.py::TestDhcpd6::test_inactive_filter",
"tests/__init__.py::TestDhcpd6::test_invalid_filter",
"tests/__init__.py::TestDhcpd6::test_list_twice",
"tests/__init__.py::TestDhcpd6::test_v4_filter",
"tests/__init__.py::TestDhcpd6::test_v6_filter",
"tests/__init__.py::TestDhcpd6::test_valid_filter",
"tests/__init__.py::TestDhcpd6::test_where_eq",
"tests/__init__.py::TestDebian7::test_active_filter",
"tests/__init__.py::TestDebian7::test_active_valid_current",
"tests/__init__.py::TestDebian7::test_current_filter",
"tests/__init__.py::TestDebian7::test_filter_func",
"tests/__init__.py::TestDebian7::test_inactive_filter",
"tests/__init__.py::TestDebian7::test_invalid_filter",
"tests/__init__.py::TestDebian7::test_list_twice",
"tests/__init__.py::TestDebian7::test_v4_filter",
"tests/__init__.py::TestDebian7::test_v6_filter",
"tests/__init__.py::TestDebian7::test_valid_filter",
"tests/__init__.py::TestDebian7::test_vendor_class_identifier",
"tests/__init__.py::TestEmptyLease::test_active_filter",
"tests/__init__.py::TestEmptyLease::test_active_valid_current",
"tests/__init__.py::TestEmptyLease::test_current_filter",
"tests/__init__.py::TestEmptyLease::test_filter_func",
"tests/__init__.py::TestEmptyLease::test_inactive_filter",
"tests/__init__.py::TestEmptyLease::test_invalid_filter",
"tests/__init__.py::TestEmptyLease::test_list_twice",
"tests/__init__.py::TestEmptyLease::test_v4_filter",
"tests/__init__.py::TestEmptyLease::test_v6_filter",
"tests/__init__.py::TestEmptyLease::test_valid_filter"
] | [] | MIT License | 706 | 354 | [
"isc_dhcp_filter/__init__.py"
] |
|
grabbles__grabbit-6 | afe361809ca5c040a46caa9f8a9bae017bcc706e | 2016-08-18 05:30:39 | afe361809ca5c040a46caa9f8a9bae017bcc706e | diff --git a/grabbit/core.py b/grabbit/core.py
index 84009db..a2a87be 100644
--- a/grabbit/core.py
+++ b/grabbit/core.py
@@ -197,7 +197,7 @@ class Layout(object):
return_type (str): Type of result to return. Valid values:
'tuple': returns a list of namedtuples containing file name as
well as attribute/value pairs for all named entities.
- 'file': returns a list of File instances.
+ 'file': returns a list of matching filenames.
'dir': returns a list of directories.
'id': returns a list of unique IDs. Must be used together with
a valid target.
@@ -222,7 +222,7 @@ class Layout(object):
result.append(file)
if return_type == 'file':
- return result
+ return natural_sort([f.path for f in result])
if return_type == 'tuple':
result = [r.as_named_tuple() for r in result]
| Redefining File class is confusing
Returning File objects which are something different that python build in [file object](https://docs.python.org/3/glossary.html#term-file-object) | grabbles/grabbit | diff --git a/grabbit/tests/test_core.py b/grabbit/tests/test_core.py
index 0c92377..11da286 100644
--- a/grabbit/tests/test_core.py
+++ b/grabbit/tests/test_core.py
@@ -127,6 +127,8 @@ class TestLayout:
result = layout.get(target='subject', return_type='dir')
assert os.path.exists(result[0])
assert os.path.isdir(result[0])
+ result = layout.get(target='subject', type='phasediff', return_type='file')
+ assert all([os.path.exists(f) for f in result])
def test_unique_and_count(self, layout):
result = layout.unique('subject')
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 1
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
-e git+https://github.com/grabbles/grabbit.git@afe361809ca5c040a46caa9f8a9bae017bcc706e#egg=grabbit
importlib-metadata==4.8.3
iniconfig==1.1.1
packaging==21.3
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
six==1.17.0
tomli==1.2.3
typing_extensions==4.1.1
zipp==3.6.0
| name: grabbit
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- six==1.17.0
- tomli==1.2.3
- typing-extensions==4.1.1
- zipp==3.6.0
prefix: /opt/conda/envs/grabbit
| [
"grabbit/tests/test_core.py::TestLayout::test_querying"
] | [] | [
"grabbit/tests/test_core.py::TestFile::test_init",
"grabbit/tests/test_core.py::TestFile::test_matches",
"grabbit/tests/test_core.py::TestFile::test_named_tuple",
"grabbit/tests/test_core.py::TestEntity::test_init",
"grabbit/tests/test_core.py::TestEntity::test_matches",
"grabbit/tests/test_core.py::TestEntity::test_unique_and_count",
"grabbit/tests/test_core.py::TestEntity::test_add_file",
"grabbit/tests/test_core.py::TestLayout::test_init",
"grabbit/tests/test_core.py::TestLayout::test_absolute_paths",
"grabbit/tests/test_core.py::TestLayout::test_dynamic_getters",
"grabbit/tests/test_core.py::TestLayout::test_unique_and_count"
] | [] | MIT License | 707 | 240 | [
"grabbit/core.py"
] |
|
XD-embedded__xd-docker-52 | 00db236f2385d476cd3f32e3fd4bf68fd1dd8a35 | 2016-08-27 14:53:21 | 03179f60c8e6d5fa7d2a2a20ec429da132ff7e8f | diff --git a/xd/docker/client.py b/xd/docker/client.py
index 3d2fe2e..ec42404 100644
--- a/xd/docker/client.py
+++ b/xd/docker/client.py
@@ -484,3 +484,27 @@ class DockerClient(object):
return False
raise e
return True
+
+ def container_wait(self,
+ container: Union[Container, ContainerName, str]) -> int:
+ """Block until container stops.
+
+ Block until container stops, then returns the exit code.
+
+ Arguments:
+ container: The container to remove (id or name).
+
+ Returns:
+ Container exit code.
+ """
+
+ # Handle convenience argument types
+ if isinstance(container, str):
+ id_or_name = container
+ elif isinstance(container, ContainerName):
+ id_or_name = container.name
+ else:
+ id_or_name = container.id or container.name
+
+ r = self._post('/containers/{}/wait'.format(id_or_name))
+ return r.json()['StatusCode']
| client.container_wait()
Client API command to wait for a container to stop. | XD-embedded/xd-docker | diff --git a/tests/unit/client_test.py b/tests/unit/client_test.py
index 5b2eccd..0f1f130 100644
--- a/tests/unit/client_test.py
+++ b/tests/unit/client_test.py
@@ -1245,3 +1245,41 @@ class container_start_tests(ContextClientTestCase):
with pytest.raises(ClientError) as clienterror:
self.client.container_start('foobar')
assert clienterror.value.code == 404
+
+
+class container_wait_tests(ContextClientTestCase):
+
+ @mock.patch('requests.post')
+ def test_0(self, post_mock):
+ post_mock.return_value = requests_mock.Response(json.dumps(
+ {'StatusCode': 0}), 200)
+ assert self.client.container_wait("foobar") == 0
+
+ @mock.patch('requests.post')
+ def test_42(self, post_mock):
+ post_mock.return_value = requests_mock.Response(json.dumps(
+ {'StatusCode': 42}), 200)
+ assert self.client.container_wait("foobar") == 42
+
+ @mock.patch('requests.post')
+ def test_no_such_container(self, post_mock):
+ post_mock.return_value = requests_mock.Response(json.dumps(
+ {'StatusCode': 42}), 200)
+ post_mock.return_value = requests_mock.Response(
+ "No such container", 404)
+ with pytest.raises(ClientError) as clienterror:
+ self.client.container_wait('foobar')
+ assert clienterror.value.code == 404
+
+ @mock.patch('requests.post')
+ def test_containername(self, post_mock):
+ post_mock.return_value = requests_mock.Response(json.dumps(
+ {'StatusCode': 0}), 200)
+ assert self.client.container_wait(ContainerName("foobar")) == 0
+
+ @mock.patch('requests.post')
+ def test_container(self, post_mock):
+ post_mock.return_value = requests_mock.Response(json.dumps(
+ {'StatusCode': 0}), 200)
+ assert self.client.container_wait(Container(
+ self.client,name="foobar")) == 0
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 1
},
"num_modified_files": 1
} | 0.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"mock",
"flake8"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
charset-normalizer==2.0.12
coverage==6.2
flake8==5.0.4
idna==3.10
importlib-metadata==4.2.0
iniconfig==1.1.1
mccabe==0.7.0
mock==5.2.0
packaging==21.3
pluggy==1.0.0
py==1.11.0
pycodestyle==2.9.1
pyflakes==2.5.0
pyparsing==3.1.4
pytest==7.0.1
pytest-cov==4.0.0
requests==2.27.1
requests-unixsocket==0.3.0
tomli==1.2.3
typing==3.7.4.3
typing_extensions==4.1.1
urllib3==1.26.20
-e git+https://github.com/XD-embedded/xd-docker.git@00db236f2385d476cd3f32e3fd4bf68fd1dd8a35#egg=XD_Docker
zipp==3.6.0
| name: xd-docker
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- charset-normalizer==2.0.12
- coverage==6.2
- flake8==5.0.4
- idna==3.10
- importlib-metadata==4.2.0
- iniconfig==1.1.1
- mccabe==0.7.0
- mock==5.2.0
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pycodestyle==2.9.1
- pyflakes==2.5.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-cov==4.0.0
- requests==2.27.1
- requests-unixsocket==0.3.0
- tomli==1.2.3
- typing==3.7.4.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- zipp==3.6.0
prefix: /opt/conda/envs/xd-docker
| [
"tests/unit/client_test.py::container_wait_tests::test_0",
"tests/unit/client_test.py::container_wait_tests::test_42",
"tests/unit/client_test.py::container_wait_tests::test_container",
"tests/unit/client_test.py::container_wait_tests::test_containername",
"tests/unit/client_test.py::container_wait_tests::test_no_such_container"
] | [] | [
"tests/unit/client_test.py::init_tests::test_init_foobar",
"tests/unit/client_test.py::init_tests::test_init_http",
"tests/unit/client_test.py::init_tests::test_init_http_unix",
"tests/unit/client_test.py::init_tests::test_init_noargs",
"tests/unit/client_test.py::init_tests::test_init_tcp",
"tests/unit/client_test.py::init_tests::test_init_unix",
"tests/unit/client_test.py::version_tests::test_version",
"tests/unit/client_test.py::version_tests::test_version_httperror_404",
"tests/unit/client_test.py::version_tests::test_version_httperror_500",
"tests/unit/client_test.py::version_tests::test_version_httperror_unknown",
"tests/unit/client_test.py::ping_tests::test_ping",
"tests/unit/client_test.py::ping_tests::test_ping_server_error",
"tests/unit/client_test.py::containers_tests::test_containers_1",
"tests/unit/client_test.py::containers_tests::test_containers_3",
"tests/unit/client_test.py::containers_tests::test_containers_4",
"tests/unit/client_test.py::containers_tests::test_containers_only_running_false",
"tests/unit/client_test.py::images_tests::test_images",
"tests/unit/client_test.py::image_inspect_tests::test_image_inspect",
"tests/unit/client_test.py::image_inspect_tests::test_image_inspect_raw",
"tests/unit/client_test.py::image_build_tests::test_image_build",
"tests/unit/client_test.py::image_build_tests::test_image_build_context_as_file",
"tests/unit/client_test.py::image_build_tests::test_image_build_context_does_not_exist",
"tests/unit/client_test.py::image_build_tests::test_image_build_invalid_pull",
"tests/unit/client_test.py::image_build_tests::test_image_build_invalid_rm",
"tests/unit/client_test.py::image_build_tests::test_image_build_invalid_tag_1",
"tests/unit/client_test.py::image_build_tests::test_image_build_invalid_tag_2",
"tests/unit/client_test.py::image_build_tests::test_image_build_nonstandard_dockerfile",
"tests/unit/client_test.py::image_build_tests::test_image_build_run_error",
"tests/unit/client_test.py::image_build_tests::test_image_build_server_error",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_args",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_forcerm",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_name",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_nocache",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_norm",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_only_error_output",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_pull",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_registry_config",
"tests/unit/client_test.py::image_pull_tests::test_image_pull_1_ok",
"tests/unit/client_test.py::image_pull_tests::test_image_pull_2_not_found",
"tests/unit/client_test.py::image_pull_tests::test_image_pull_3_authconfig",
"tests/unit/client_test.py::image_pull_tests::test_image_pull_4_invalid_authconfig",
"tests/unit/client_test.py::image_remove_tests::test_image_remove_1",
"tests/unit/client_test.py::image_remove_tests::test_image_remove_2_not_found",
"tests/unit/client_test.py::image_tag_tests::test_image_tag_1_repo",
"tests/unit/client_test.py::image_tag_tests::test_image_tag_2_repo_and_tag",
"tests/unit/client_test.py::image_tag_tests::test_image_tag_3_force",
"tests/unit/client_test.py::image_tag_tests::test_image_tag_4_fail",
"tests/unit/client_test.py::container_create_tests::test_container_create_1_anon",
"tests/unit/client_test.py::container_create_tests::test_container_create_2_named",
"tests/unit/client_test.py::container_create_tests::test_container_create_3_named_str",
"tests/unit/client_test.py::container_create_tests::test_container_create_nopull_needed",
"tests/unit/client_test.py::container_create_tests::test_container_create_nopull_not_needed",
"tests/unit/client_test.py::container_create_tests::test_container_create_pull_needed",
"tests/unit/client_test.py::container_create_tests::test_container_create_pull_not_needed",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_command",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_env",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_exposed_ports",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_memory",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_memory_and_swap",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_network_false",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_oom_kill_false",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_swap_but_not_memory",
"tests/unit/client_test.py::container_remove_tests::test_container_with_id",
"tests/unit/client_test.py::container_remove_tests::test_container_with_id_and_name",
"tests/unit/client_test.py::container_remove_tests::test_container_with_name",
"tests/unit/client_test.py::container_remove_tests::test_containername",
"tests/unit/client_test.py::container_remove_tests::test_force_false",
"tests/unit/client_test.py::container_remove_tests::test_force_true",
"tests/unit/client_test.py::container_remove_tests::test_no_such_container",
"tests/unit/client_test.py::container_remove_tests::test_ok",
"tests/unit/client_test.py::container_remove_tests::test_volumes_false",
"tests/unit/client_test.py::container_remove_tests::test_volumes_true",
"tests/unit/client_test.py::container_start_tests::test_already_running",
"tests/unit/client_test.py::container_start_tests::test_container_with_id",
"tests/unit/client_test.py::container_start_tests::test_container_with_id_and_name",
"tests/unit/client_test.py::container_start_tests::test_container_with_name",
"tests/unit/client_test.py::container_start_tests::test_containername",
"tests/unit/client_test.py::container_start_tests::test_no_such_container",
"tests/unit/client_test.py::container_start_tests::test_str"
] | [] | MIT License | 724 | 250 | [
"xd/docker/client.py"
] |
|
enthought__okonomiyaki-229 | ecbef5d20e17783436532cce856d201df280eb09 | 2016-08-27 19:13:21 | ecbef5d20e17783436532cce856d201df280eb09 | diff --git a/okonomiyaki/platforms/abi.py b/okonomiyaki/platforms/abi.py
index a69af1f..a3947aa 100644
--- a/okonomiyaki/platforms/abi.py
+++ b/okonomiyaki/platforms/abi.py
@@ -44,7 +44,7 @@ def _default_cpython_abi(platform, implementation_version):
abi = u"msvc2008"
elif implementation_version.minor <= 4:
abi = u"msvc2010"
- elif implementation_version.minor == 5:
+ elif implementation_version.minor <= 6:
abi = u"msvc2015"
if abi is None:
diff --git a/setup.py b/setup.py
index fe8f301..47e2270 100644
--- a/setup.py
+++ b/setup.py
@@ -14,7 +14,7 @@ IS_RELEASED = False
VERSION = '%d.%d.%d' % (MAJOR, MINOR, MICRO)
INSTALL_REQUIRES = [
- "attrs >= 16.0.0",
+ "attrs < 16.1.0", # Needed to support Python 2.6
"jsonschema >= 2.5.1",
"six >= 1.9.0",
"zipfile2 >= 0.0.12",
| Update _default_cpython_abi() to support Python 3.6
In okonomiyaki/platforms/abi.py#L47, we do not specify a default platform abi for the upcoming Python 3.6. This is needed to set the cp36 tag as indexable in https://github.com/enthought/brood/issues/1189. Otherwise, the following error is raised:
```
okonomiyaki.errors.OkonomiyakiError: Unsupported platform/version combo for cpython: Platform(os='windows', name='windows', family='windows', arch='x86', machine='x86')/<okonomiyaki.versions.runtime_version.RuntimeVersion object at 0x7f8bc32327f0>
```
Is it safe to assume that CPython 3.6 is also using MSVC 2015? We can also add the cp36 tag as indexable in a later migration once CPython 3.6 is released. | enthought/okonomiyaki | diff --git a/okonomiyaki/file_formats/tests/test__egg_info.py b/okonomiyaki/file_formats/tests/test__egg_info.py
index c7962dc..af539c9 100644
--- a/okonomiyaki/file_formats/tests/test__egg_info.py
+++ b/okonomiyaki/file_formats/tests/test__egg_info.py
@@ -708,6 +708,35 @@ class TestGuessPlatformAbi(unittest.TestCase):
# Then
self.assertEqual(abi, "msvc2015")
+ def test_python_36(self):
+ # Given
+ platform = EPDPlatform.from_epd_string("rh5-64")
+ python_tag = "cp36"
+
+ # When
+ abi = _guess_platform_abi(platform, python_tag)
+
+ # Then
+ self.assertEqual(abi, "gnu")
+
+ # Given
+ platform = EPDPlatform.from_epd_string("osx-64")
+
+ # When
+ abi = _guess_platform_abi(platform, python_tag)
+
+ # Then
+ self.assertEqual(abi, "darwin")
+
+ # Given
+ platform = EPDPlatform.from_epd_string("win-64")
+
+ # When
+ abi = _guess_platform_abi(platform, python_tag)
+
+ # Then
+ self.assertEqual(abi, "msvc2015")
+
def test_no_platform(self):
# Given
platform = None
diff --git a/okonomiyaki/platforms/tests/test_abi.py b/okonomiyaki/platforms/tests/test_abi.py
index 5138054..3528d24 100644
--- a/okonomiyaki/platforms/tests/test_abi.py
+++ b/okonomiyaki/platforms/tests/test_abi.py
@@ -18,6 +18,7 @@ class TestDefaultABI(unittest.TestCase):
(("win_x86", "cpython", "2.7.10+1"), u"msvc2008"),
(("win_x86", "cpython", "3.4.3+1"), u"msvc2010"),
(("win_x86", "cpython", "3.5.0+1"), u"msvc2015"),
+ (("win_x86", "cpython", "3.6.0+1"), u"msvc2015"),
(("osx_x86_64", "pypy", "2.6.1+1"), u"darwin"),
(("rh5_x86_64", "pypy", "2.6.1+1"), u"gnu"),
(("win_x86", "pypy", "2.6.1+1"), u"msvc2008"),
@@ -34,7 +35,7 @@ class TestDefaultABI(unittest.TestCase):
def test_non_supported(self):
# Given
args = (
- ("win_x86", "cpython", "3.6.0+1"),
+ ("win_x86", "cpython", "3.7.0+1"),
("win_x86", "pypy", "4.1.0+1"),
("rh5_x86_64", "r", "3.0.0+1"),
)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 2
} | 0.16 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"coverage",
"flake8",
"mock",
"testfixtures",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==25.3.0
coverage==7.8.0
exceptiongroup==1.2.2
flake8==7.2.0
iniconfig==2.1.0
jsonschema==4.23.0
jsonschema-specifications==2024.10.1
mccabe==0.7.0
mock==5.2.0
-e git+https://github.com/enthought/okonomiyaki.git@ecbef5d20e17783436532cce856d201df280eb09#egg=okonomiyaki
packaging==24.2
pluggy==1.5.0
pycodestyle==2.13.0
pyflakes==3.3.1
pytest==8.3.5
referencing==0.36.2
rpds-py==0.24.0
six==1.17.0
testfixtures==8.3.0
tomli==2.2.1
typing_extensions==4.13.0
zipfile2==0.0.12
| name: okonomiyaki
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==25.3.0
- coverage==7.8.0
- exceptiongroup==1.2.2
- flake8==7.2.0
- iniconfig==2.1.0
- jsonschema==4.23.0
- jsonschema-specifications==2024.10.1
- mccabe==0.7.0
- mock==5.2.0
- packaging==24.2
- pluggy==1.5.0
- pycodestyle==2.13.0
- pyflakes==3.3.1
- pytest==8.3.5
- referencing==0.36.2
- rpds-py==0.24.0
- six==1.17.0
- testfixtures==8.3.0
- tomli==2.2.1
- typing-extensions==4.13.0
- zipfile2==0.0.12
prefix: /opt/conda/envs/okonomiyaki
| [
"okonomiyaki/file_formats/tests/test__egg_info.py::TestGuessPlatformAbi::test_python_36",
"okonomiyaki/platforms/tests/test_abi.py::TestDefaultABI::test_basics"
] | [] | [
"okonomiyaki/file_formats/tests/test__egg_info.py::TestRequirement::test_from_spec_string",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestRequirement::test_from_string",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestRequirement::test_str",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDepend::test_blacklisted_platform",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDepend::test_create_from_egg1",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDepend::test_create_from_egg2",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDepend::test_error_python_to_python_tag",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDepend::test_format_1_3",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDepend::test_format_1_4",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDepend::test_from_string",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDepend::test_missing_spec_depend",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDepend::test_to_string",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDepend::test_unsupported_metadata_version",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDepend::test_windows_platform",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDependAbi::test_default_extension_python_egg",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDependAbi::test_default_no_python_egg",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDependAbi::test_default_no_python_egg_windows",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDependAbi::test_default_pure_python_egg",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDependAbi::test_default_pure_python_egg_pypi",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDependAbi::test_to_string",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDependPlatform::test_default_all_none",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDependPlatform::test_default_rh5_32",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDependPlatform::test_default_rh5_64",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDependPlatform::test_default_win_32",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDependPlatform::test_default_win_64",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestGuessPlatformAbi::test_no_platform",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestGuessPlatformAbi::test_no_python_implementation",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestGuessPlatformAbi::test_python_27",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestGuessPlatformAbi::test_python_34",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestGuessPlatformAbi::test_python_35",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggName::test_split_egg_name",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggName::test_split_egg_name_invalid",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestParseRawspec::test_invalid_spec_strings",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestParseRawspec::test_simple_1_1",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestParseRawspec::test_simple_1_2",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestParseRawspec::test_simple_unsupported",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestParseRawspec::test_with_dependencies",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestParseRawspec::test_with_none",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggMetadata::test_blacklisted_pkg_info",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggMetadata::test_blacklisted_platform",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggMetadata::test_blacklisted_python_tag",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggMetadata::test_dump_blacklisted",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggMetadata::test_dump_blacklisted_platform",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggMetadata::test_dump_simple",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggMetadata::test_fixed_requirement",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggMetadata::test_from_cross_platform_egg",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggMetadata::test_from_json_dict",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggMetadata::test_from_platform_egg",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggMetadata::test_mkl_roundtrip",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggMetadata::test_no_pkg_info",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggMetadata::test_platform_abi",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggMetadata::test_platform_abi_no_python",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggMetadata::test_simple",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggMetadata::test_simple_non_python_egg",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggMetadata::test_strictness",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggMetadata::test_support_higher_compatible_version",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggMetadata::test_support_lower_compatible_version",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggMetadata::test_to_json_dict",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggMetadata::test_to_spec_string",
"okonomiyaki/platforms/tests/test_abi.py::TestDefaultABI::test_non_supported"
] | [] | BSD License | 726 | 333 | [
"okonomiyaki/platforms/abi.py",
"setup.py"
] |
|
XD-embedded__xd-docker-57 | 03179f60c8e6d5fa7d2a2a20ec429da132ff7e8f | 2016-08-28 09:44:51 | 03179f60c8e6d5fa7d2a2a20ec429da132ff7e8f | diff --git a/xd/docker/client.py b/xd/docker/client.py
index a0c7398..fdff2db 100644
--- a/xd/docker/client.py
+++ b/xd/docker/client.py
@@ -17,7 +17,7 @@ from typing import Optional, Union, Sequence, Dict, Tuple, List
from xd.docker.container import Container
from xd.docker.image import Image
from xd.docker.parameters import ContainerConfig, HostConfig, ContainerName, \
- Repository, RegistryAuthConfig, VolumeMount, json_update
+ Repository, RegistryAuthConfig, VolumeMount, Signal, json_update
import logging
log = logging.getLogger(__name__)
@@ -572,3 +572,29 @@ class DockerClient(object):
self._post('/containers/{}/restart'.format(id_or_name),
params=params)
+
+ def container_kill(self,
+ container: Union[Container, ContainerName, str],
+ signal: Optional[Signal]=None):
+ """Kill container.
+
+ Send signal to container, and (maybe) wait for the container to exit.
+
+ Arguments:
+ container: The container to remove (id or name).
+ signal: Signal to send to container.
+ """
+
+ # Handle convenience argument types
+ if isinstance(container, str):
+ id_or_name = container
+ elif isinstance(container, ContainerName):
+ id_or_name = container.name
+ else:
+ id_or_name = container.id or container.name
+
+ params = {}
+ if signal is not None:
+ params['signal'] = signal
+
+ self._post('/containers/{}/kill'.format(id_or_name), params=params)
| client.container_kill()
Client API command to kill a container. | XD-embedded/xd-docker | diff --git a/tests/integration/container_kill_test.py b/tests/integration/container_kill_test.py
new file mode 100644
index 0000000..bb9dfbc
--- /dev/null
+++ b/tests/integration/container_kill_test.py
@@ -0,0 +1,38 @@
+import pytest
+import os
+
+from xd.docker.client import *
+
+
+def test_kill(docker, stdout):
+ os.system("docker run -d --name xd-docker-test busybox:latest sleep 10")
+ docker.container_kill('xd-docker-test')
+
+
+def test_already_stopped(docker, stdout):
+ os.system("docker run --name xd-docker-test busybox:latest true")
+ # Prior to Docker 1.8, kill silently ignores stopped containers, and
+ # beginning with 1.8, they return HTTP 500 (ServerError)
+ if docker.api_version > (1, 19):
+ with pytest.raises(ServerError) as servererror:
+ docker.container_kill('xd-docker-test')
+ else:
+ docker.container_kill('xd-docker-test')
+
+
+def test_not_started(docker, stdout):
+ os.system("docker create --name xd-docker-test busybox:latest true")
+ # Prior to Docker 1.8, kill silently ignores stopped containers, and
+ # beginning with 1.8, they return HTTP 500 (ServerError)
+ if docker.api_version > (1, 19):
+ with pytest.raises(ServerError) as servererror:
+ docker.container_kill('xd-docker-test')
+ else:
+ docker.container_kill('xd-docker-test')
+
+
+def test_no_such_container(docker, stdout):
+ with pytest.raises(ClientError) as clienterror:
+ with stdout.redirect():
+ docker.container_kill('xd-docker-test')
+ assert clienterror.value.code == 404
diff --git a/tests/unit/client_test.py b/tests/unit/client_test.py
index e890e2d..c2f4f95 100644
--- a/tests/unit/client_test.py
+++ b/tests/unit/client_test.py
@@ -1354,3 +1354,46 @@ class container_restart_tests(ContextClientTestCase):
params = post_mock.call_args[1]['params']
assert 't' in params
assert params['t'] == 42
+
+
+class container_kill_tests(ContextClientTestCase):
+
+ @mock.patch('requests.post')
+ def test_normal(self, post_mock):
+ post_mock.return_value = requests_mock.Response(None, 204)
+ self.client.container_kill("foobar")
+
+ @mock.patch('requests.post')
+ def test_no_such_container(self, post_mock):
+ post_mock.return_value = requests_mock.Response(None, 404)
+ with pytest.raises(ClientError) as clienterror:
+ self.client.container_kill('foobar')
+ assert clienterror.value.code == 404
+
+ @mock.patch('requests.post')
+ def test_containername(self, post_mock):
+ post_mock.return_value = requests_mock.Response(None, 204)
+ self.client.container_kill(ContainerName("foobar"))
+
+ @mock.patch('requests.post')
+ def test_container(self, post_mock):
+ post_mock.return_value = requests_mock.Response(None, 204)
+ self.client.container_kill(Container(self.client,name="foobar"))
+
+ @mock.patch('requests.post')
+ def test_sigint(self, post_mock):
+ post_mock.return_value = requests_mock.Response(None, 204)
+ self.client.container_kill("foobar", signal='SIGINT')
+ assert 'params' in post_mock.call_args[1]
+ params = post_mock.call_args[1]['params']
+ assert 'signal' in params
+ assert params['signal'] == 'SIGINT'
+
+ @mock.patch('requests.post')
+ def test_sighup(self, post_mock):
+ post_mock.return_value = requests_mock.Response(None, 204)
+ self.client.container_kill("foobar", signal='SIGHUP')
+ assert 'params' in post_mock.call_args[1]
+ params = post_mock.call_args[1]['params']
+ assert 'signal' in params
+ assert params['signal'] == 'SIGHUP'
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 0
},
"num_modified_files": 1
} | 0.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"mock",
"flake8"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
charset-normalizer==2.0.12
coverage==6.2
flake8==5.0.4
idna==3.10
importlib-metadata==4.2.0
iniconfig==1.1.1
mccabe==0.7.0
mock==5.2.0
packaging==21.3
pluggy==1.0.0
py==1.11.0
pycodestyle==2.9.1
pyflakes==2.5.0
pyparsing==3.1.4
pytest==7.0.1
pytest-cov==4.0.0
requests==2.27.1
requests-unixsocket==0.3.0
tomli==1.2.3
typing==3.7.4.3
typing_extensions==4.1.1
urllib3==1.26.20
-e git+https://github.com/XD-embedded/xd-docker.git@03179f60c8e6d5fa7d2a2a20ec429da132ff7e8f#egg=XD_Docker
zipp==3.6.0
| name: xd-docker
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- charset-normalizer==2.0.12
- coverage==6.2
- flake8==5.0.4
- idna==3.10
- importlib-metadata==4.2.0
- iniconfig==1.1.1
- mccabe==0.7.0
- mock==5.2.0
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pycodestyle==2.9.1
- pyflakes==2.5.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-cov==4.0.0
- requests==2.27.1
- requests-unixsocket==0.3.0
- tomli==1.2.3
- typing==3.7.4.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- zipp==3.6.0
prefix: /opt/conda/envs/xd-docker
| [
"tests/unit/client_test.py::container_kill_tests::test_container",
"tests/unit/client_test.py::container_kill_tests::test_containername",
"tests/unit/client_test.py::container_kill_tests::test_no_such_container",
"tests/unit/client_test.py::container_kill_tests::test_normal",
"tests/unit/client_test.py::container_kill_tests::test_sighup",
"tests/unit/client_test.py::container_kill_tests::test_sigint"
] | [
"tests/integration/container_kill_test.py::test_kill",
"tests/integration/container_kill_test.py::test_already_stopped",
"tests/integration/container_kill_test.py::test_not_started",
"tests/integration/container_kill_test.py::test_no_such_container"
] | [
"tests/unit/client_test.py::init_tests::test_init_foobar",
"tests/unit/client_test.py::init_tests::test_init_http",
"tests/unit/client_test.py::init_tests::test_init_http_unix",
"tests/unit/client_test.py::init_tests::test_init_noargs",
"tests/unit/client_test.py::init_tests::test_init_tcp",
"tests/unit/client_test.py::init_tests::test_init_unix",
"tests/unit/client_test.py::version_tests::test_version",
"tests/unit/client_test.py::version_tests::test_version_httperror_404",
"tests/unit/client_test.py::version_tests::test_version_httperror_500",
"tests/unit/client_test.py::version_tests::test_version_httperror_unknown",
"tests/unit/client_test.py::ping_tests::test_ping",
"tests/unit/client_test.py::ping_tests::test_ping_server_error",
"tests/unit/client_test.py::containers_tests::test_containers_1",
"tests/unit/client_test.py::containers_tests::test_containers_3",
"tests/unit/client_test.py::containers_tests::test_containers_4",
"tests/unit/client_test.py::containers_tests::test_containers_only_running_false",
"tests/unit/client_test.py::images_tests::test_images",
"tests/unit/client_test.py::image_inspect_tests::test_image_inspect",
"tests/unit/client_test.py::image_inspect_tests::test_image_inspect_raw",
"tests/unit/client_test.py::image_build_tests::test_image_build",
"tests/unit/client_test.py::image_build_tests::test_image_build_context_as_file",
"tests/unit/client_test.py::image_build_tests::test_image_build_context_does_not_exist",
"tests/unit/client_test.py::image_build_tests::test_image_build_invalid_pull",
"tests/unit/client_test.py::image_build_tests::test_image_build_invalid_rm",
"tests/unit/client_test.py::image_build_tests::test_image_build_invalid_tag_1",
"tests/unit/client_test.py::image_build_tests::test_image_build_invalid_tag_2",
"tests/unit/client_test.py::image_build_tests::test_image_build_nonstandard_dockerfile",
"tests/unit/client_test.py::image_build_tests::test_image_build_run_error",
"tests/unit/client_test.py::image_build_tests::test_image_build_server_error",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_args",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_forcerm",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_name",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_nocache",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_norm",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_only_error_output",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_pull",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_registry_config",
"tests/unit/client_test.py::image_pull_tests::test_image_pull_1_ok",
"tests/unit/client_test.py::image_pull_tests::test_image_pull_2_not_found",
"tests/unit/client_test.py::image_pull_tests::test_image_pull_3_authconfig",
"tests/unit/client_test.py::image_pull_tests::test_image_pull_4_invalid_authconfig",
"tests/unit/client_test.py::image_remove_tests::test_image_remove_1",
"tests/unit/client_test.py::image_remove_tests::test_image_remove_2_not_found",
"tests/unit/client_test.py::image_tag_tests::test_image_tag_1_repo",
"tests/unit/client_test.py::image_tag_tests::test_image_tag_2_repo_and_tag",
"tests/unit/client_test.py::image_tag_tests::test_image_tag_3_force",
"tests/unit/client_test.py::image_tag_tests::test_image_tag_4_fail",
"tests/unit/client_test.py::container_create_tests::test_container_create_1_anon",
"tests/unit/client_test.py::container_create_tests::test_container_create_2_named",
"tests/unit/client_test.py::container_create_tests::test_container_create_3_named_str",
"tests/unit/client_test.py::container_create_tests::test_container_create_nopull_needed",
"tests/unit/client_test.py::container_create_tests::test_container_create_nopull_not_needed",
"tests/unit/client_test.py::container_create_tests::test_container_create_pull_needed",
"tests/unit/client_test.py::container_create_tests::test_container_create_pull_not_needed",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_command",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_env",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_exposed_ports",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_memory",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_memory_and_swap",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_network_false",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_oom_kill_false",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_swap_but_not_memory",
"tests/unit/client_test.py::container_remove_tests::test_container_with_id",
"tests/unit/client_test.py::container_remove_tests::test_container_with_id_and_name",
"tests/unit/client_test.py::container_remove_tests::test_container_with_name",
"tests/unit/client_test.py::container_remove_tests::test_containername",
"tests/unit/client_test.py::container_remove_tests::test_force_false",
"tests/unit/client_test.py::container_remove_tests::test_force_true",
"tests/unit/client_test.py::container_remove_tests::test_no_such_container",
"tests/unit/client_test.py::container_remove_tests::test_ok",
"tests/unit/client_test.py::container_remove_tests::test_volumes_false",
"tests/unit/client_test.py::container_remove_tests::test_volumes_true",
"tests/unit/client_test.py::container_start_tests::test_already_running",
"tests/unit/client_test.py::container_start_tests::test_container_with_id",
"tests/unit/client_test.py::container_start_tests::test_container_with_id_and_name",
"tests/unit/client_test.py::container_start_tests::test_container_with_name",
"tests/unit/client_test.py::container_start_tests::test_containername",
"tests/unit/client_test.py::container_start_tests::test_no_such_container",
"tests/unit/client_test.py::container_start_tests::test_str",
"tests/unit/client_test.py::container_wait_tests::test_0",
"tests/unit/client_test.py::container_wait_tests::test_42",
"tests/unit/client_test.py::container_wait_tests::test_container",
"tests/unit/client_test.py::container_wait_tests::test_containername",
"tests/unit/client_test.py::container_wait_tests::test_no_such_container",
"tests/unit/client_test.py::container_stop_tests::test_already_stopped",
"tests/unit/client_test.py::container_stop_tests::test_container",
"tests/unit/client_test.py::container_stop_tests::test_containername",
"tests/unit/client_test.py::container_stop_tests::test_no_such_container",
"tests/unit/client_test.py::container_stop_tests::test_normal",
"tests/unit/client_test.py::container_stop_tests::test_timeout",
"tests/unit/client_test.py::container_restart_tests::test_container",
"tests/unit/client_test.py::container_restart_tests::test_containername",
"tests/unit/client_test.py::container_restart_tests::test_no_such_container",
"tests/unit/client_test.py::container_restart_tests::test_normal",
"tests/unit/client_test.py::container_restart_tests::test_timeout"
] | [] | MIT License | 728 | 376 | [
"xd/docker/client.py"
] |
|
zalando-stups__senza-332 | 4993fb81ebcc9c8a5c6773af14eaa3cb0e069010 | 2016-08-29 09:53:21 | a72ed3ba8f330170d7dc9e923bd18294a03186af | diff --git a/senza/components/configuration.py b/senza/components/configuration.py
index e83b419..6712f03 100644
--- a/senza/components/configuration.py
+++ b/senza/components/configuration.py
@@ -3,7 +3,7 @@ from senza.utils import ensure_keys, named_value
def format_params(args):
- items = [(key, val) for key, val in args.__dict__.items() if key not in ('region', 'version')]
+ items = [(key, val) for key, val in sorted(args.__dict__.items()) if key not in ('region', 'version')]
return ', '.join(['{}: {}'.format(key, val) for key, val in items])
@@ -27,7 +27,9 @@ def component_configuration(definition, configuration, args, info, force, accoun
if 'Description' not in definition:
# set some sane default stack description
- definition['Description'] = get_default_description(info, args)
+ # we need to truncate at 1024 chars (should be Bytes actually)
+ # see http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/template-description-structure.html
+ definition['Description'] = get_default_description(info, args)[:1024]
# ServerSubnets
for region, subnets in configuration.get('ServerSubnets', {}).items():
| Shrink "description" field to allowed 1024 chars
This seems to pop up more and more, I see no real reason why senza should not just do
description[:1024]
to fix this for everyone. | zalando-stups/senza | diff --git a/tests/test_components.py b/tests/test_components.py
index fdc377d..652ccfc 100644
--- a/tests/test_components.py
+++ b/tests/test_components.py
@@ -6,6 +6,7 @@ import pytest
import senza.traffic
from senza.cli import AccountArguments
from senza.components import get_component
+from senza.components.configuration import component_configuration
from senza.components.auto_scaling_group import (component_auto_scaling_group,
normalize_asg_success,
normalize_network_threshold,
@@ -923,3 +924,14 @@ def test_weighted_dns_load_balancer_v2(monkeypatch, boto_resource):
assert result['Resources']['MyLBListener']['Properties']['Certificates'] == [{'CertificateArn': 'arn:aws:42'}]
# test that our custom drain setting works
assert result['Resources']['MyLBTargetGroup']['Properties']['TargetGroupAttributes'] == [{'Key': 'deregistration_delay.timeout_seconds', 'Value': '123'}]
+
+
+def test_max_description_length():
+ definition = {}
+ configuration = {}
+ args = MagicMock()
+ args.__dict__ = {'Param1': 'my param value', 'SecondParam': ('1234567890' * 100)}
+ info = {'StackName': 'My-Stack'}
+ component_configuration(definition, configuration, args, info, False, AccountArguments('dummyregion'))
+ assert definition['Description'].startswith('My Stack (Param1: my param value, SecondParam: 1234567890')
+ assert 0 < len(definition['Description']) <= 1024
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 1
} | 2.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest pytest-cov pytest-xdist pytest-mock pytest-asyncio"
],
"pre_install": null,
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | boto3==1.37.23
botocore==1.37.23
certifi==2025.1.31
charset-normalizer==3.4.1
click==8.1.8
clickclick==20.10.2
coverage==7.8.0
dnspython==1.15.0
dnspython3==1.15.0
exceptiongroup==1.2.2
execnet==2.1.1
idna==3.10
importlib_metadata==8.6.1
iniconfig==2.1.0
jmespath==1.0.1
packaging==24.2
pluggy==1.5.0
pystache==0.6.8
pytest==8.3.5
pytest-asyncio==0.26.0
pytest-cov==6.0.0
pytest-mock==3.14.0
pytest-xdist==3.6.1
python-dateutil==2.9.0.post0
PyYAML==6.0.2
requests==2.32.3
s3transfer==0.11.4
six==1.17.0
stups-cli-support==1.1.22
stups-pierone==1.1.56
-e git+https://github.com/zalando-stups/senza.git@4993fb81ebcc9c8a5c6773af14eaa3cb0e069010#egg=stups_senza
stups-tokens==1.1.19
stups-zign==1.2
tomli==2.2.1
typing==3.7.4.3
typing_extensions==4.13.0
urllib3==1.26.20
zipp==3.21.0
| name: senza
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- boto3==1.37.23
- botocore==1.37.23
- certifi==2025.1.31
- charset-normalizer==3.4.1
- click==8.1.8
- clickclick==20.10.2
- coverage==7.8.0
- dnspython==1.15.0
- dnspython3==1.15.0
- exceptiongroup==1.2.2
- execnet==2.1.1
- idna==3.10
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- jmespath==1.0.1
- packaging==24.2
- pluggy==1.5.0
- pystache==0.6.8
- pytest==8.3.5
- pytest-asyncio==0.26.0
- pytest-cov==6.0.0
- pytest-mock==3.14.0
- pytest-xdist==3.6.1
- python-dateutil==2.9.0.post0
- pyyaml==6.0.2
- requests==2.32.3
- s3transfer==0.11.4
- six==1.17.0
- stups-cli-support==1.1.22
- stups-pierone==1.1.56
- stups-tokens==1.1.19
- stups-zign==1.2
- tomli==2.2.1
- typing==3.7.4.3
- typing-extensions==4.13.0
- urllib3==1.26.20
- zipp==3.21.0
prefix: /opt/conda/envs/senza
| [
"tests/test_components.py::test_max_description_length"
] | [
"tests/test_components.py::test_weighted_dns_load_balancer",
"tests/test_components.py::test_weighted_dns_load_balancer_with_different_domains",
"tests/test_components.py::test_check_docker_image_exists"
] | [
"tests/test_components.py::test_invalid_component",
"tests/test_components.py::test_component_iam_role",
"tests/test_components.py::test_get_merged_policies",
"tests/test_components.py::test_component_load_balancer_healthcheck",
"tests/test_components.py::test_component_load_balancer_idletimeout",
"tests/test_components.py::test_component_load_balancer_http_only",
"tests/test_components.py::test_component_load_balancer_namelength",
"tests/test_components.py::test_component_stups_auto_configuration",
"tests/test_components.py::test_component_stups_auto_configuration_vpc_id",
"tests/test_components.py::test_component_redis_node",
"tests/test_components.py::test_component_redis_cluster",
"tests/test_components.py::test_component_taupage_auto_scaling_group_user_data_without_ref",
"tests/test_components.py::test_component_taupage_auto_scaling_group_user_data_with_ref",
"tests/test_components.py::test_component_taupage_auto_scaling_group_user_data_with_lists_and_empty_dict",
"tests/test_components.py::test_component_auto_scaling_group_configurable_properties",
"tests/test_components.py::test_component_auto_scaling_group_custom_tags",
"tests/test_components.py::test_component_auto_scaling_group_configurable_properties2",
"tests/test_components.py::test_component_auto_scaling_group_metric_type",
"tests/test_components.py::test_component_auto_scaling_group_optional_metric_type",
"tests/test_components.py::test_to_iso8601_duration",
"tests/test_components.py::test_normalize_asg_success",
"tests/test_components.py::test_normalize_network_threshold",
"tests/test_components.py::test_check_application_id",
"tests/test_components.py::test_check_application_version",
"tests/test_components.py::test_get_load_balancer_name",
"tests/test_components.py::test_weighted_dns_load_balancer_v2"
] | [] | Apache License 2.0 | 729 | 312 | [
"senza/components/configuration.py"
] |
|
napjon__krisk-39 | b7489f45df16b6805b2f576d696dabc1a3bc5235 | 2016-09-02 13:48:46 | 19fb69026ff1339649fac5ad82548ccbdc7b8d19 | codecov-io: ## [Current coverage](https://codecov.io/gh/napjon/krisk/pull/39?src=pr) is 91.69% (diff: 0.00%)
> Merging [#39](https://codecov.io/gh/napjon/krisk/pull/39?src=pr) into [0.2-develop](https://codecov.io/gh/napjon/krisk/branch/0.2-develop?src=pr) will decrease coverage by **1.95%**
```diff
@@ 0.2-develop #39 diff @@
=============================================
Files 10 10
Lines 299 301 +2
Methods 0 0
Messages 0 0
Branches 39 40 +1
=============================================
- Hits 280 276 -4
- Misses 9 13 +4
- Partials 10 12 +2
```
> Powered by [Codecov](https://codecov.io?src=pr). Last update [b7489f4...71201fa](https://codecov.io/gh/napjon/krisk/compare/b7489f45df16b6805b2f576d696dabc1a3bc5235...71201faa3b06c668a6422c677f3fb64dad2253a7?src=pr) | diff --git a/krisk/plot/__init__.py b/krisk/plot/__init__.py
index 14e24b8..e702c4a 100644
--- a/krisk/plot/__init__.py
+++ b/krisk/plot/__init__.py
@@ -8,7 +8,8 @@ def bar(df,
how='count',
stacked=False,
annotate=None,
- full=False):
+ full=False,
+ trendline=False):
"""
Parameters
----------
@@ -26,10 +27,13 @@ def bar(df,
stacked: Boolean, default to False.
Whether to stacked category on top of the other categories.
annotate: string, {'all',True} default to None
- if True, annotate value on top of the plot element. If stacked is also True, annotate the last
- category. if 'all' and stacked, annotate all category
+ if True, annotate value on top of the plot element. If stacked is also True, annotate the
+ last category. if 'all' and stacked, annotate all category
full: boolean, default to False.
If true, set to full area stacked chart. Only work if stacked is True.
+ trendline: boolean, default to False.
+ If true, add line that connected the bars. Only work if not category, category but stacked,
+ or not full.
Returns
-------
@@ -39,6 +43,7 @@ def bar(df,
# TODO: add optional argument trendline
return make_chart(df,type='bar',x=x,y=y,c=c,how=how,stacked=stacked,full=full,
+ trendline=trendline,
annotate='top' if annotate == True else annotate)
diff --git a/krisk/plot/bar_line.py b/krisk/plot/bar_line.py
index 057ed1d..1101415 100644
--- a/krisk/plot/bar_line.py
+++ b/krisk/plot/bar_line.py
@@ -51,6 +51,20 @@ def set_bar_line_chart(chart, df, x, c, **kwargs):
if kwargs['annotate'] == 'top':
series[-1]['label'] = d_annotate
+ if kwargs['type'] == 'bar' and kwargs['trendline']:
+ trendline = {'name':'trendline', 'type': 'line'}
+
+ if c and kwargs['stacked']:
+ trendline['data'] = [0] * len(series[-1]['data'])
+ trendline['stack'] = c
+ elif c is None:
+ trendline['data'] = series[0]['data']
+ else:
+ raise AssertionError('Trendline must either stacked category, or not category')
+
+ series.append(trendline)
+
+
# TODO: make annotate receive all kinds supported in echarts.
| Add trendline parameters for bar chart
Add trendline that shows changes for bar chart | napjon/krisk | diff --git a/krisk/tests/data/bar_year_pop_mean_continent_trendline.json b/krisk/tests/data/bar_year_pop_mean_continent_trendline.json
new file mode 100644
index 0000000..89aa040
--- /dev/null
+++ b/krisk/tests/data/bar_year_pop_mean_continent_trendline.json
@@ -0,0 +1,152 @@
+{
+ "legend": {
+ "data": [
+ "Africa",
+ "Americas",
+ "Asia",
+ "Europe",
+ "Oceania"
+ ]
+ },
+ "title": {
+ "text": ""
+ },
+ "yAxis": {},
+ "series": [
+ {
+ "stack": "continent",
+ "type": "bar",
+ "data": [
+ 9279525,
+ 10270856,
+ 11000948,
+ 12760499,
+ 14760787,
+ 17152804,
+ 20033753,
+ 23254956,
+ 26298373,
+ 29072015,
+ 31287142,
+ 33333216
+ ],
+ "name": "Africa"
+ },
+ {
+ "stack": "continent",
+ "type": "bar",
+ "data": [
+ 17876956,
+ 19610538,
+ 21283783,
+ 22934225,
+ 24779799,
+ 26983828,
+ 29341374,
+ 31620918,
+ 33958947,
+ 36203463,
+ 38331121,
+ 40301927
+ ],
+ "name": "Americas"
+ },
+ {
+ "stack": "continent",
+ "type": "bar",
+ "data": [
+ 8425333,
+ 9240934,
+ 10267083,
+ 11537966,
+ 13079460,
+ 14880372,
+ 12881816,
+ 13867957,
+ 16317921,
+ 22227415,
+ 25268405,
+ 31889923
+ ],
+ "name": "Asia"
+ },
+ {
+ "stack": "continent",
+ "type": "bar",
+ "data": [
+ 1282697,
+ 1476505,
+ 1728137,
+ 1984060,
+ 2263554,
+ 2509048,
+ 2780097,
+ 3075321,
+ 3326498,
+ 3428038,
+ 3508512,
+ 3600523
+ ],
+ "name": "Europe"
+ },
+ {
+ "stack": "continent",
+ "type": "bar",
+ "data": [
+ 8691212,
+ 9712569,
+ 10794968,
+ 11872264,
+ 13177000,
+ 14074100,
+ 15184200,
+ 16257249,
+ 17481977,
+ 18565243,
+ 19546792,
+ 20434176
+ ],
+ "name": "Oceania"
+ },
+ {
+ "stack": "continent",
+ "data": [
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0
+ ],
+ "type": "line",
+ "name": "trendline"
+ }
+ ],
+ "tooltip": {
+ "axisPointer": {
+ "type": ""
+ }
+ },
+ "xAxis": {
+ "data": [
+ 1952,
+ 1957,
+ 1962,
+ 1967,
+ 1972,
+ 1977,
+ 1982,
+ 1987,
+ 1992,
+ 1997,
+ 2002,
+ 2007
+ ]
+ }
+}
\ No newline at end of file
diff --git a/krisk/tests/data/bar_year_pop_mean_trendline.json b/krisk/tests/data/bar_year_pop_mean_trendline.json
new file mode 100644
index 0000000..15ef467
--- /dev/null
+++ b/krisk/tests/data/bar_year_pop_mean_trendline.json
@@ -0,0 +1,68 @@
+{
+ "legend": {
+ "data": []
+ },
+ "title": {
+ "text": ""
+ },
+ "yAxis": {},
+ "series": [
+ {
+ "type": "bar",
+ "data": [
+ 9111144.6,
+ 10062280.4,
+ 11014983.8,
+ 12217802.8,
+ 13612120.0,
+ 15120030.4,
+ 16044248.0,
+ 17615280.2,
+ 19476743.2,
+ 21899234.8,
+ 23588394.4,
+ 25911953.0
+ ],
+ "name": "year"
+ },
+ {
+ "data": [
+ 9111144.6,
+ 10062280.4,
+ 11014983.8,
+ 12217802.8,
+ 13612120.0,
+ 15120030.4,
+ 16044248.0,
+ 17615280.2,
+ 19476743.2,
+ 21899234.8,
+ 23588394.4,
+ 25911953.0
+ ],
+ "type": "line",
+ "name": "trendline"
+ }
+ ],
+ "tooltip": {
+ "axisPointer": {
+ "type": ""
+ }
+ },
+ "xAxis": {
+ "data": [
+ 1952,
+ 1957,
+ 1962,
+ 1967,
+ 1972,
+ 1977,
+ 1982,
+ 1987,
+ 1992,
+ 1997,
+ 2002,
+ 2007
+ ]
+ }
+}
\ No newline at end of file
diff --git a/krisk/tests/test_plot.py b/krisk/tests/test_plot.py
index 41aed67..bdce2e5 100644
--- a/krisk/tests/test_plot.py
+++ b/krisk/tests/test_plot.py
@@ -77,6 +77,20 @@ def test_full_bar_line(gapminder):
assert bar.option == line.option == true_option
+
+def test_trendline(gapminder):
+
+ p = kk.bar(gapminder,'year',how='mean',y='pop',trendline=True)
+ assert p.get_option() == read_option_tests('bar_year_pop_mean_trendline.json')
+
+ p = kk.bar(gapminder,'year',how='mean',y='pop',trendline=True,c='continent',stacked=True)
+ assert p.get_option() == read_option_tests('bar_year_pop_mean_continent_trendline.json')
+
+ try:
+ kk.bar(gapminder,'year',how='mean',y='pop',trendline=True,c='continent')
+ except AssertionError:
+ pass
+
def test_hist(gapminder):
true_option = read_option_tests('hist_x.json')
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 2
} | 0.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"coverage"
],
"pre_install": null,
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | anyio==3.6.2
argon2-cffi==21.3.0
argon2-cffi-bindings==21.2.0
async-generator==1.10
attrs==22.2.0
Babel==2.11.0
backcall==0.2.0
bleach==4.1.0
certifi==2021.5.30
cffi==1.15.1
charset-normalizer==2.0.12
comm==0.1.4
contextvars==2.4
coverage==6.2
dataclasses==0.8
decorator==5.1.1
defusedxml==0.7.1
entrypoints==0.4
idna==3.10
immutables==0.19
importlib-metadata==4.8.3
iniconfig==1.1.1
ipykernel==5.5.6
ipython==7.16.3
ipython-genutils==0.2.0
ipywidgets==7.8.5
jedi==0.17.2
Jinja2==3.0.3
json5==0.9.16
jsonschema==3.2.0
jupyter==1.1.1
jupyter-client==7.1.2
jupyter-console==6.4.3
jupyter-core==4.9.2
jupyter-server==1.13.1
jupyterlab==3.2.9
jupyterlab-pygments==0.1.2
jupyterlab-server==2.10.3
jupyterlab_widgets==1.1.11
-e git+https://github.com/napjon/krisk.git@b7489f45df16b6805b2f576d696dabc1a3bc5235#egg=krisk
MarkupSafe==2.0.1
mistune==0.8.4
nbclassic==0.3.5
nbclient==0.5.9
nbconvert==6.0.7
nbformat==5.1.3
nest-asyncio==1.6.0
notebook==6.4.10
numpy==1.19.5
packaging==21.3
pandas==1.1.5
pandocfilters==1.5.1
parso==0.7.1
pexpect==4.9.0
pickleshare==0.7.5
pluggy==1.0.0
prometheus-client==0.17.1
prompt-toolkit==3.0.36
ptyprocess==0.7.0
py==1.11.0
pycparser==2.21
Pygments==2.14.0
pyparsing==3.1.4
pyrsistent==0.18.0
pytest==7.0.1
python-dateutil==2.9.0.post0
pytz==2025.2
pyzmq==25.1.2
requests==2.27.1
Send2Trash==1.8.3
six==1.17.0
sniffio==1.2.0
terminado==0.12.1
testpath==0.6.0
tomli==1.2.3
tornado==6.1
traitlets==4.3.3
typing_extensions==4.1.1
urllib3==1.26.20
wcwidth==0.2.13
webencodings==0.5.1
websocket-client==1.3.1
widgetsnbextension==3.6.10
zipp==3.6.0
| name: krisk
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- anyio==3.6.2
- argon2-cffi==21.3.0
- argon2-cffi-bindings==21.2.0
- async-generator==1.10
- attrs==22.2.0
- babel==2.11.0
- backcall==0.2.0
- bleach==4.1.0
- cffi==1.15.1
- charset-normalizer==2.0.12
- comm==0.1.4
- contextvars==2.4
- coverage==6.2
- dataclasses==0.8
- decorator==5.1.1
- defusedxml==0.7.1
- entrypoints==0.4
- idna==3.10
- immutables==0.19
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- ipykernel==5.5.6
- ipython==7.16.3
- ipython-genutils==0.2.0
- ipywidgets==7.8.5
- jedi==0.17.2
- jinja2==3.0.3
- json5==0.9.16
- jsonschema==3.2.0
- jupyter==1.1.1
- jupyter-client==7.1.2
- jupyter-console==6.4.3
- jupyter-core==4.9.2
- jupyter-server==1.13.1
- jupyterlab==3.2.9
- jupyterlab-pygments==0.1.2
- jupyterlab-server==2.10.3
- jupyterlab-widgets==1.1.11
- markupsafe==2.0.1
- mistune==0.8.4
- nbclassic==0.3.5
- nbclient==0.5.9
- nbconvert==6.0.7
- nbformat==5.1.3
- nest-asyncio==1.6.0
- notebook==6.4.10
- numpy==1.19.5
- packaging==21.3
- pandas==1.1.5
- pandocfilters==1.5.1
- parso==0.7.1
- pexpect==4.9.0
- pickleshare==0.7.5
- pluggy==1.0.0
- prometheus-client==0.17.1
- prompt-toolkit==3.0.36
- ptyprocess==0.7.0
- py==1.11.0
- pycparser==2.21
- pygments==2.14.0
- pyparsing==3.1.4
- pyrsistent==0.18.0
- pytest==7.0.1
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyzmq==25.1.2
- requests==2.27.1
- send2trash==1.8.3
- six==1.17.0
- sniffio==1.2.0
- terminado==0.12.1
- testpath==0.6.0
- tomli==1.2.3
- tornado==6.1
- traitlets==4.3.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- wcwidth==0.2.13
- webencodings==0.5.1
- websocket-client==1.3.1
- widgetsnbextension==3.6.10
- zipp==3.6.0
prefix: /opt/conda/envs/krisk
| [
"krisk/tests/test_plot.py::test_trendline"
] | [] | [
"krisk/tests/test_plot.py::test_bar",
"krisk/tests/test_plot.py::test_line",
"krisk/tests/test_plot.py::test_full_bar_line",
"krisk/tests/test_plot.py::test_hist",
"krisk/tests/test_plot.py::test_scatter"
] | [] | BSD 3-Clause "New" or "Revised" License | 742 | 670 | [
"krisk/plot/__init__.py",
"krisk/plot/bar_line.py"
] |
zhmcclient__python-zhmcclient-56 | 01674398fed64b49ebbf94ac49236cee4a02b3d3 | 2016-09-08 08:00:52 | 5b90883d9934257d21ead1cbc548423b5fb682f8 | coveralls:
[](https://coveralls.io/builds/7794079)
Coverage increased (+0.4%) to 88.352% when pulling **447c885d4954aacf55f533d18e209b331d2edee9 on andy/create-return-object** into **01674398fed64b49ebbf94ac49236cee4a02b3d3 on master**.
| diff --git a/zhmcclient/_adapter.py b/zhmcclient/_adapter.py
index cf76ac0..615ddc9 100644
--- a/zhmcclient/_adapter.py
+++ b/zhmcclient/_adapter.py
@@ -145,7 +145,9 @@ class AdapterManager(BaseManager):
Returns:
- string: The resource URI of the new adapter.
+ Adapter: The resource object for the new HiperSockets adapter.
+ The object will have its 'object-uri' property set as returned by
+ the HMC, and will also have the input properties set.
Raises:
@@ -156,7 +158,11 @@ class AdapterManager(BaseManager):
"""
cpc_uri = self.cpc.get_property('object-uri')
result = self.session.post(cpc_uri + '/adapters', body=properties)
- return result['object-uri']
+ # There should not be overlaps, but just in case there are, the
+ # returned props should overwrite the input props:
+ props = properties.copy()
+ props.update(result)
+ return Adapter(self, props['object-uri'], props)
class Adapter(BaseResource):
diff --git a/zhmcclient/_hba.py b/zhmcclient/_hba.py
index 880d3a5..1042c23 100644
--- a/zhmcclient/_hba.py
+++ b/zhmcclient/_hba.py
@@ -79,9 +79,6 @@ class HbaManager(BaseManager):
:exc:`~zhmcclient.AuthError`
:exc:`~zhmcclient.ConnectionError`
"""
- if not self.partition.full_properties:
- self.partition.pull_full_properties()
-
hbas_res = self.partition.get_property('hba-uris')
hba_list = []
if hbas_res:
@@ -105,7 +102,9 @@ class HbaManager(BaseManager):
Returns:
- string: The resource URI of the new HBA.
+ Hba: The resource object for the new HBA.
+ The object will have its 'element-uri' property set as returned by
+ the HMC, and will also have the input properties set.
Raises:
@@ -116,7 +115,11 @@ class HbaManager(BaseManager):
"""
partition_uri = self.partition.get_property('object-uri')
result = self.session.post(partition_uri + '/hbas', body=properties)
- return result['element-uri']
+ # There should not be overlaps, but just in case there are, the
+ # returned props should overwrite the input props:
+ props = properties.copy()
+ props.update(result)
+ return Hba(self, props['element-uri'], props)
class Hba(BaseResource):
diff --git a/zhmcclient/_nic.py b/zhmcclient/_nic.py
index 4cc8779..720ee14 100644
--- a/zhmcclient/_nic.py
+++ b/zhmcclient/_nic.py
@@ -79,9 +79,6 @@ class NicManager(BaseManager):
:exc:`~zhmcclient.AuthError`
:exc:`~zhmcclient.ConnectionError`
"""
- if not self.partition.full_properties:
- self.partition.pull_full_properties()
-
nics_res = self.partition.get_property('nic-uris')
nic_list = []
if nics_res:
@@ -105,7 +102,9 @@ class NicManager(BaseManager):
Returns:
- string: The resource URI of the new NIC.
+ Nic: The resource object for the new NIC.
+ The object will have its 'element-uri' property set as returned by
+ the HMC, and will also have the input properties set.
Raises:
@@ -116,7 +115,11 @@ class NicManager(BaseManager):
"""
partition_uri = self.partition.get_property('object-uri')
result = self.session.post(partition_uri + '/nics', body=properties)
- return result['element-uri']
+ # There should not be overlaps, but just in case there are, the
+ # returned props should overwrite the input props:
+ props = properties.copy()
+ props.update(result)
+ return Nic(self, props['element-uri'], props)
class Nic(BaseResource):
diff --git a/zhmcclient/_partition.py b/zhmcclient/_partition.py
index 697883e..7c21dd7 100644
--- a/zhmcclient/_partition.py
+++ b/zhmcclient/_partition.py
@@ -110,7 +110,9 @@ class PartitionManager(BaseManager):
Returns:
- string: The resource URI of the new partition.
+ Partition: The resource object for the new partition.
+ The object will have its 'object-uri' property set as returned by
+ the HMC, and will also have the input properties set.
Raises:
@@ -121,7 +123,11 @@ class PartitionManager(BaseManager):
"""
cpc_uri = self.cpc.get_property('object-uri')
result = self.session.post(cpc_uri + '/partitions', body=properties)
- return result['object-uri']
+ # There should not be overlaps, but just in case there are, the
+ # returned props should overwrite the input props:
+ props = properties.copy()
+ props.update(result)
+ return Partition(self, props['object-uri'], props)
class Partition(BaseResource):
diff --git a/zhmcclient/_resource.py b/zhmcclient/_resource.py
index 169da83..4554132 100644
--- a/zhmcclient/_resource.py
+++ b/zhmcclient/_resource.py
@@ -110,9 +110,11 @@ class BaseResource(object):
@property
def full_properties(self):
"""
- A boolean indicating whether the resource properties in this object
- are the full set of resource properties, vs. just the short set of
- resource properties as obtained by list functions.
+ A boolean indicating whether or not the resource properties in this
+ object are the full set of resource properties.
+
+ Note that listing resources and creating new resources produces objects
+ that have less than the full set of properties.
"""
return self._full_properties
@@ -170,7 +172,7 @@ class BaseResource(object):
try:
return self._properties[name]
except KeyError:
- if self.full_properties:
+ if self._full_properties:
raise
self.pull_full_properties()
return self._properties[name]
diff --git a/zhmcclient/_virtual_function.py b/zhmcclient/_virtual_function.py
index 02e3127..f4b75d3 100644
--- a/zhmcclient/_virtual_function.py
+++ b/zhmcclient/_virtual_function.py
@@ -78,9 +78,6 @@ class VirtualFunctionManager(BaseManager):
:exc:`~zhmcclient.AuthError`
:exc:`~zhmcclient.ConnectionError`
"""
- if not self.partition.full_properties:
- self.partition.pull_full_properties()
-
vfs_res = self.partition.get_property('virtual-function-uris')
vf_list = []
if vfs_res:
@@ -105,7 +102,9 @@ class VirtualFunctionManager(BaseManager):
Returns:
- string: The resource URI of the new Virtual Function.
+ VirtualFunction: The resource object for the new virtual function.
+ The object will have its 'element-uri' property set as returned by
+ the HMC, and will also have the input properties set.
Raises:
@@ -117,7 +116,11 @@ class VirtualFunctionManager(BaseManager):
partition_uri = self.partition.get_property('object-uri')
result = self.session.post(partition_uri + '/virtual-functions',
body=properties)
- return result['element-uri']
+ # There should not be overlaps, but just in case there are, the
+ # returned props should overwrite the input props:
+ props = properties.copy()
+ props.update(result)
+ return VirtualFunction(self, props['element-uri'], props)
class VirtualFunction(BaseResource):
| Return newly created resources as a resource object not as a URI
In the current design, any `create()` methods return the new resource with their URI string.
This is inconsistent with the strategy of the zhmcclient project to encapsulate resources as Python objects.
**Proposal:**
Return a new Python object for the resource, in which only the URI property is set. That creates 3 different degrees of "property presence":
* only the URI property (only for resources that can be created by users)
* the short set of properties (as returned by list operations)
* the full set of properties (as returned by get properties operations)
We are already in the current design hiding the degree of property presence, for example, we are caching the properties after pulling them from the HMC, and `get_property()` pulls the properties as needed, so returning such a resource object seems to be consistent with that strategy.
We need to discuss what the `__str__()` and `__repr__()` functions should show (they will probably show the current state, i.e. make the three degrees of property presence visible to users).
We could drop the external availability of the `full_properties` flag, and make that internal only. | zhmcclient/python-zhmcclient | diff --git a/tests/test_adapter.py b/tests/test_adapter.py
index 7a1a4a3..9cfb62b 100644
--- a/tests/test_adapter.py
+++ b/tests/test_adapter.py
@@ -22,7 +22,7 @@ from __future__ import absolute_import
import unittest
import requests_mock
-from zhmcclient import Session, Client
+from zhmcclient import Session, Client, Adapter
class AdapterTests(unittest.TestCase):
@@ -184,8 +184,11 @@ class AdapterTests(unittest.TestCase):
}
m.post('/api/cpcs/adapter-cpc-id-1/adapters', json=result)
- status = adapter_mgr.create_hipersocket(properties={})
- self.assertEqual(status, result['object-uri'])
+ adapter = adapter_mgr.create_hipersocket(properties={})
+
+ self.assertTrue(isinstance(adapter, Adapter))
+ self.assertEqual(adapter.properties, result)
+ self.assertEqual(adapter.uri, result['object-uri'])
def test_delete(self):
"""
diff --git a/tests/test_hba.py b/tests/test_hba.py
index c9549fe..e4edf34 100644
--- a/tests/test_hba.py
+++ b/tests/test_hba.py
@@ -22,7 +22,7 @@ from __future__ import absolute_import
import unittest
import requests_mock
-from zhmcclient import Session, Client
+from zhmcclient import Session, Client, Hba
class HbaTests(unittest.TestCase):
@@ -194,8 +194,11 @@ class HbaTests(unittest.TestCase):
}
m.post('/api/partitions/fake-part-id-1/hbas', json=result)
- status = hba_mgr.create(properties={})
- self.assertEqual(status, result['element-uri'])
+ hba = hba_mgr.create(properties={})
+
+ self.assertTrue(isinstance(hba, Hba))
+ self.assertEqual(hba.properties, result)
+ self.assertEqual(hba.uri, result['element-uri'])
def test_delete(self):
"""
diff --git a/tests/test_nic.py b/tests/test_nic.py
index b0a77fe..a5d01e7 100644
--- a/tests/test_nic.py
+++ b/tests/test_nic.py
@@ -22,7 +22,7 @@ from __future__ import absolute_import
import unittest
import requests_mock
-from zhmcclient import Session, Client
+from zhmcclient import Session, Client, Nic
class NicTests(unittest.TestCase):
@@ -194,8 +194,11 @@ class NicTests(unittest.TestCase):
}
m.post('/api/partitions/fake-part-id-1/nics', json=result)
- status = nic_mgr.create(properties={})
- self.assertEqual(status, result['element-uri'])
+ nic = nic_mgr.create(properties={})
+
+ self.assertTrue(isinstance(nic, Nic))
+ self.assertEqual(nic.properties, result)
+ self.assertEqual(nic.uri, result['element-uri'])
def test_delete(self):
"""
diff --git a/tests/test_partition.py b/tests/test_partition.py
index 501fc0a..e7e468e 100644
--- a/tests/test_partition.py
+++ b/tests/test_partition.py
@@ -22,7 +22,7 @@ from __future__ import absolute_import
import unittest
import requests_mock
-from zhmcclient import Session, Client
+from zhmcclient import Session, Client, Partition
class PartitionTests(unittest.TestCase):
@@ -165,8 +165,11 @@ class PartitionTests(unittest.TestCase):
}
m.post('/api/cpcs/fake-cpc-id-1/partitions', json=result)
- status = partition_mgr.create(properties={})
- self.assertEqual(status, result['object-uri'])
+ partition = partition_mgr.create(properties={})
+
+ self.assertTrue(isinstance(partition, Partition))
+ self.assertEqual(partition.properties, result)
+ self.assertEqual(partition.uri, result['object-uri'])
def test_start(self):
"""
diff --git a/tests/test_virtual_function.py b/tests/test_virtual_function.py
index dbe8ef2..27e8140 100644
--- a/tests/test_virtual_function.py
+++ b/tests/test_virtual_function.py
@@ -22,7 +22,7 @@ from __future__ import absolute_import
import unittest
import requests_mock
-from zhmcclient import Session, Client
+from zhmcclient import Session, Client, VirtualFunction
class VirtualFunctionTests(unittest.TestCase):
@@ -206,8 +206,11 @@ class VirtualFunctionTests(unittest.TestCase):
m.post('/api/partitions/fake-part-id-1/virtual-functions',
json=result)
- status = vf_mgr.create(properties={})
- self.assertEqual(status, result['element-uri'])
+ vf = vf_mgr.create(properties={})
+
+ self.assertTrue(isinstance(vf, VirtualFunction))
+ self.assertEqual(vf.properties, result)
+ self.assertEqual(vf.uri, result['element-uri'])
def test_delete(self):
"""
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 6
} | 0.3 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"mock",
"requests-mock",
"testfixtures"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2025.1.31
charset-normalizer==3.4.1
coverage==7.8.0
decorator==5.2.1
exceptiongroup==1.2.2
idna==3.10
iniconfig==2.1.0
mock==5.2.0
packaging==24.2
pbr==6.1.1
pluggy==1.5.0
pytest==8.3.5
pytest-cov==6.0.0
requests==2.32.3
requests-mock==1.12.1
six==1.17.0
testfixtures==8.3.0
tomli==2.2.1
urllib3==2.3.0
-e git+https://github.com/zhmcclient/python-zhmcclient.git@01674398fed64b49ebbf94ac49236cee4a02b3d3#egg=zhmcclient
| name: python-zhmcclient
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- charset-normalizer==3.4.1
- coverage==7.8.0
- decorator==5.2.1
- exceptiongroup==1.2.2
- idna==3.10
- iniconfig==2.1.0
- mock==5.2.0
- packaging==24.2
- pbr==6.1.1
- pluggy==1.5.0
- pytest==8.3.5
- pytest-cov==6.0.0
- requests==2.32.3
- requests-mock==1.12.1
- six==1.17.0
- testfixtures==8.3.0
- tomli==2.2.1
- urllib3==2.3.0
prefix: /opt/conda/envs/python-zhmcclient
| [
"tests/test_adapter.py::AdapterTests::test_create_hipersocket",
"tests/test_hba.py::HbaTests::test_create",
"tests/test_nic.py::NicTests::test_create",
"tests/test_partition.py::PartitionTests::test_create",
"tests/test_virtual_function.py::VirtualFunctionTests::test_create"
] | [] | [
"tests/test_adapter.py::AdapterTests::test_delete",
"tests/test_adapter.py::AdapterTests::test_init",
"tests/test_adapter.py::AdapterTests::test_list_full_ok",
"tests/test_adapter.py::AdapterTests::test_list_short_ok",
"tests/test_adapter.py::AdapterTests::test_update_properties",
"tests/test_hba.py::HbaTests::test_delete",
"tests/test_hba.py::HbaTests::test_init",
"tests/test_hba.py::HbaTests::test_list_full_ok",
"tests/test_hba.py::HbaTests::test_list_short_ok",
"tests/test_hba.py::HbaTests::test_update_properties",
"tests/test_nic.py::NicTests::test_delete",
"tests/test_nic.py::NicTests::test_init",
"tests/test_nic.py::NicTests::test_list_full_ok",
"tests/test_nic.py::NicTests::test_list_short_ok",
"tests/test_nic.py::NicTests::test_update_properties",
"tests/test_partition.py::PartitionTests::test_delete",
"tests/test_partition.py::PartitionTests::test_dump_partition",
"tests/test_partition.py::PartitionTests::test_init",
"tests/test_partition.py::PartitionTests::test_list_full_ok",
"tests/test_partition.py::PartitionTests::test_list_short_ok",
"tests/test_partition.py::PartitionTests::test_mount_iso_image",
"tests/test_partition.py::PartitionTests::test_psw_restart",
"tests/test_partition.py::PartitionTests::test_start",
"tests/test_partition.py::PartitionTests::test_stop",
"tests/test_partition.py::PartitionTests::test_unmount_iso_image",
"tests/test_partition.py::PartitionTests::test_update_properties",
"tests/test_virtual_function.py::VirtualFunctionTests::test_delete",
"tests/test_virtual_function.py::VirtualFunctionTests::test_init",
"tests/test_virtual_function.py::VirtualFunctionTests::test_list_full_ok",
"tests/test_virtual_function.py::VirtualFunctionTests::test_list_short_ok",
"tests/test_virtual_function.py::VirtualFunctionTests::test_update_properties"
] | [] | Apache License 2.0 | 748 | 1,903 | [
"zhmcclient/_adapter.py",
"zhmcclient/_hba.py",
"zhmcclient/_nic.py",
"zhmcclient/_partition.py",
"zhmcclient/_resource.py",
"zhmcclient/_virtual_function.py"
] |
zalando-stups__senza-349 | e0331771ea0cc64d3ba5896f31d954f832a82ba9 | 2016-09-12 07:22:12 | a72ed3ba8f330170d7dc9e923bd18294a03186af | diff --git a/senza/manaus/ec2.py b/senza/manaus/ec2.py
index 9ea2600..6dee960 100644
--- a/senza/manaus/ec2.py
+++ b/senza/manaus/ec2.py
@@ -1,5 +1,5 @@
from collections import OrderedDict
-from typing import Dict, List, Iterator
+from typing import Dict, List, Iterator, Optional
import boto3
@@ -16,9 +16,10 @@ class EC2VPC:
def __init__(self,
vpc_id: str,
is_default: bool,
- tags: List[Dict[str, str]]):
+ tags: Optional[List[Dict[str, str]]]):
self.vpc_id = vpc_id
self.is_default = is_default
+ tags = tags or [] # type: List[Dict[str, str]]
self.tags = OrderedDict([(t['Key'], t['Value']) for t in tags]) # type: Dict[str, str]
self.name = self.tags.get('Name', self.vpc_id)
| Better error message for "create" and VPC tags
When trying to create a stack with a VPC that has no tags the user gets the following message:
```
senza create deploy-definition.yaml 1 0.1
Generating Cloud Formation template.. EXCEPTION OCCURRED: 'NoneType' object is not iterable
Unknown Error: 'NoneType' object is not iterable.
Please create an issue with the content of /var/folders/yd/p61l98fn2g9fffwgjs819gr1sprr6d/T/senza-traceback-xgrqlxbj
```
In /var/folders/yd/p61l98fn2g9fffwgjs819gr1sprr6d/T/senza-traceback-xgrqlxbj:
```
Traceback (most recent call last):
File "/usr/local/lib/python3.5/site-packages/senza/error_handling.py", line 76, in __call__
self.function(*args, **kwargs)
File "/usr/local/lib/python3.5/site-packages/click/core.py", line 716, in __call__
return self.main(*args, **kwargs)
File "/usr/local/lib/python3.5/site-packages/click/core.py", line 696, in main
rv = self.invoke(ctx)
File "/usr/local/lib/python3.5/site-packages/click/core.py", line 1060, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/usr/local/lib/python3.5/site-packages/click/core.py", line 889, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/usr/local/lib/python3.5/site-packages/click/core.py", line 534, in invoke
return callback(*args, **kwargs)
File "/usr/local/lib/python3.5/site-packages/senza/cli.py", line 663, in create
data = create_cf_template(definition, region, version, parameter, force, parameter_file)
File "/usr/local/lib/python3.5/site-packages/senza/cli.py", line 746, in create_cf_template
data = evaluate(definition.copy(), args, account_info, force)
File "/usr/local/lib/python3.5/site-packages/senza/cli.py", line 242, in evaluate
definition = componentfn(definition, configuration, args, info, force, account_info)
File "/usr/local/lib/python3.5/site-packages/senza/components/stups_auto_configuration.py", line 31, in component_stups_auto_configuration
vpc_id = configuration.get('VpcId', account_info.VpcID)
File "/usr/local/lib/python3.5/site-packages/senza/cli.py", line 329, in VpcID
vpc = ec2.get_default_vpc()
File "/usr/local/lib/python3.5/site-packages/senza/manaus/ec2.py", line 71, in get_default_vpc
return EC2VPC.from_boto_vpc(vpc)
File "/usr/local/lib/python3.5/site-packages/senza/manaus/ec2.py", line 41, in from_boto_vpc
return cls(vpc.vpc_id, vpc.is_default, vpc.tags)
File "/usr/local/lib/python3.5/site-packages/senza/manaus/ec2.py", line 22, in __init__
self.tags = OrderedDict([(t['Key'], t['Value']) for t in tags]) # type: Dict[str, str]
TypeError: 'NoneType' object is not iterable
```
The error message should be more descriptive. | zalando-stups/senza | diff --git a/tests/test_manaus/test_ec2.py b/tests/test_manaus/test_ec2.py
index 36f1588..4dd7ae6 100644
--- a/tests/test_manaus/test_ec2.py
+++ b/tests/test_manaus/test_ec2.py
@@ -37,6 +37,11 @@ def test_get_default_vpc(monkeypatch):
mock_vpc3.is_default = False
mock_vpc3.tags = []
+ mock_vpc4 = MagicMock()
+ mock_vpc4.vpc_id = 'vpc-id4'
+ mock_vpc4.is_default = True
+ mock_vpc4.tags = None
+
m_resource = MagicMock()
m_resource.return_value = m_resource
monkeypatch.setattr('boto3.resource', m_resource)
@@ -59,11 +64,16 @@ def test_get_default_vpc(monkeypatch):
ec2.get_default_vpc()
assert str(exc_info.value) == "Can't find any VPC!"
- # no vpcs
+ # multiple vpcs
m_resource.vpcs.all.return_value = [mock_vpc2, mock_vpc3]
with pytest.raises(VPCError) as exc_info:
ec2.get_default_vpc()
+ # no tags in vpc return default vpc
+ m_resource.vpcs.all.return_value = [mock_vpc4, mock_vpc2]
+ vpc3 = ec2.get_default_vpc()
+ assert vpc3.vpc_id == 'vpc-id4'
+
assert str(exc_info.value) == ("Multiple VPCs are only supported if one "
"VPC is the default VPC (IsDefault=true)!")
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 1
} | 2.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | boto3==1.37.23
botocore==1.37.23
certifi==2025.1.31
charset-normalizer==3.4.1
click==8.1.8
clickclick==20.10.2
dnspython==1.15.0
dnspython3==1.15.0
exceptiongroup==1.2.2
idna==3.10
importlib_metadata==8.6.1
iniconfig==2.1.0
jmespath==1.0.1
packaging==24.2
pluggy==1.5.0
pystache==0.6.8
pytest==8.3.5
python-dateutil==2.9.0.post0
PyYAML==6.0.2
requests==2.32.3
s3transfer==0.11.4
six==1.17.0
stups-cli-support==1.1.22
stups-pierone==1.1.56
-e git+https://github.com/zalando-stups/senza.git@e0331771ea0cc64d3ba5896f31d954f832a82ba9#egg=stups_senza
stups-tokens==1.1.19
stups-zign==1.2
tomli==2.2.1
typing==3.7.4.3
urllib3==1.26.20
zipp==3.21.0
| name: senza
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- boto3==1.37.23
- botocore==1.37.23
- certifi==2025.1.31
- charset-normalizer==3.4.1
- click==8.1.8
- clickclick==20.10.2
- dnspython==1.15.0
- dnspython3==1.15.0
- exceptiongroup==1.2.2
- idna==3.10
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- jmespath==1.0.1
- packaging==24.2
- pluggy==1.5.0
- pystache==0.6.8
- pytest==8.3.5
- python-dateutil==2.9.0.post0
- pyyaml==6.0.2
- requests==2.32.3
- s3transfer==0.11.4
- six==1.17.0
- stups-cli-support==1.1.22
- stups-pierone==1.1.56
- stups-tokens==1.1.19
- stups-zign==1.2
- tomli==2.2.1
- typing==3.7.4.3
- urllib3==1.26.20
- zipp==3.21.0
prefix: /opt/conda/envs/senza
| [
"tests/test_manaus/test_ec2.py::test_get_default_vpc"
] | [] | [
"tests/test_manaus/test_ec2.py::test_from_boto_vpc",
"tests/test_manaus/test_ec2.py::test_get_all_vpc"
] | [] | Apache License 2.0 | 752 | 246 | [
"senza/manaus/ec2.py"
] |
|
zalando-stups__senza-353 | 936b62cd98ad20892c30b8771b5db80a14c19aae | 2016-09-15 08:32:40 | a72ed3ba8f330170d7dc9e923bd18294a03186af | diff --git a/senza/manaus/route53.py b/senza/manaus/route53.py
index 6aab215..38c177e 100644
--- a/senza/manaus/route53.py
+++ b/senza/manaus/route53.py
@@ -349,9 +349,22 @@ class Route53:
if name is not None and not name.endswith('.'):
name += '.'
for zone in cls.get_hosted_zones():
- # TODO use paginator
response = client.list_resource_record_sets(HostedZoneId=zone.id)
resources = response["ResourceRecordSets"] # type: List[Dict[str, Any]]
+
+ # If the response includes more than maxitems resource record sets,
+ # the value of the IsTruncated element in the response is true,
+ # and the values of the NextRecordName and NextRecordType elements
+ # in the response identify the first resource record set in the
+ # next group of maxitems resource record sets.
+ while response.get('IsTruncated', False):
+ next_name = response['NextRecordName']
+ next_type = response['NextRecordType']
+ response = client.list_resource_record_sets(HostedZoneId=zone.id,
+ StartRecordName=next_name,
+ StartRecordType=next_type)
+ resources.extend(response['ResourceRecordSets'])
+
for resource in resources:
record = Route53Record.from_boto_dict(resource,
hosted_zone=zone)
| senza traffic displays 0.0% weight for all stacks
When executing `senza traffic [name]` the results displays a weight of 0.0% for all stacks.
`senza domains` shows the weight correctly. | zalando-stups/senza | diff --git a/tests/test_components.py b/tests/test_components.py
index 651b318..e7207a3 100644
--- a/tests/test_components.py
+++ b/tests/test_components.py
@@ -28,7 +28,8 @@ from senza.components.weighted_dns_elastic_load_balancer import \
from senza.components.weighted_dns_elastic_load_balancer_v2 import \
component_weighted_dns_elastic_load_balancer_v2
-from fixtures import HOSTED_ZONE_ZO_NE_COM, HOSTED_ZONE_ZO_NE_DEV, boto_resource
+from fixtures import (HOSTED_ZONE_ZO_NE_COM, HOSTED_ZONE_ZO_NE_DEV, # noqa: F401
+ boto_resource, boto_client)
def test_invalid_component():
@@ -180,7 +181,7 @@ def test_component_load_balancer_http_only(monkeypatch):
"Name": "test_lb",
"SecurityGroups": "",
"HTTPPort": "9999",
- "SSLCertificateId": "arn:none", # should be ignored as we overwrite Listeners
+ "SSLCertificateId": "arn:none", # should be ignored as we overwrite Listeners
"Listeners": [{"Foo": "Bar"}]
}
info = {'StackName': 'foobar', 'StackVersion': '0.1'}
@@ -281,9 +282,11 @@ def test_component_stups_auto_configuration_vpc_id(monkeypatch):
sn3.tags = [{'Key': 'Name', 'Value': 'internal-3'}]
sn3.availability_zone = 'az-1'
ec2 = MagicMock()
+
def get_subnets(Filters):
assert Filters == [{'Name': 'vpc-id', 'Values': ['vpc-123']}]
return [sn1, sn2, sn3]
+
ec2.subnets.filter = get_subnets
image = MagicMock()
ec2.images.filter.return_value = [image]
@@ -350,7 +353,7 @@ def test_component_redis_cluster(monkeypatch):
assert 'RedisReplicationGroup' in result['Resources']
assert mock_string == result['Resources']['RedisReplicationGroup']['Properties']['SecurityGroupIds']
assert 2 == result['Resources']['RedisReplicationGroup']['Properties']['NumCacheClusters']
- assert True == result['Resources']['RedisReplicationGroup']['Properties']['AutomaticFailoverEnabled']
+ assert result['Resources']['RedisReplicationGroup']['Properties']['AutomaticFailoverEnabled']
assert 'Engine' in result['Resources']['RedisReplicationGroup']['Properties']
assert 'EngineVersion' in result['Resources']['RedisReplicationGroup']['Properties']
assert 'CacheNodeType' in result['Resources']['RedisReplicationGroup']['Properties']
@@ -361,26 +364,15 @@ def test_component_redis_cluster(monkeypatch):
assert 'SubnetIds' in result['Resources']['RedisSubnetGroup']['Properties']
-def test_weighted_dns_load_balancer(monkeypatch, boto_resource):
+def test_weighted_dns_load_balancer(monkeypatch, boto_client, boto_resource): # noqa: F811
senza.traffic.DNS_ZONE_CACHE = {}
- def my_client(rtype, *args):
- if rtype == 'route53':
- route53 = MagicMock()
- route53.list_hosted_zones.return_value = {'HostedZones': [HOSTED_ZONE_ZO_NE_COM],
- 'IsTruncated': False,
- 'MaxItems': '100'}
- return route53
- return MagicMock()
-
- monkeypatch.setattr('boto3.client', my_client)
-
configuration = {
"Name": "test_lb",
"SecurityGroups": "",
"HTTPPort": "9999",
- 'MainDomain': 'great.api.zo.ne.com',
- 'VersionDomain': 'version.api.zo.ne.com'
+ 'MainDomain': 'great.api.zo.ne',
+ 'VersionDomain': 'version.api.zo.ne'
}
info = {'StackName': 'foobar', 'StackVersion': '0.1'}
definition = {"Resources": {}}
@@ -408,20 +400,16 @@ def test_weighted_dns_load_balancer(monkeypatch, boto_resource):
assert 'MainDomain' not in result["Resources"]["test_lb"]["Properties"]
-def test_weighted_dns_load_balancer_with_different_domains(monkeypatch, boto_resource):
+def test_weighted_dns_load_balancer_with_different_domains(monkeypatch, # noqa: F811
+ boto_client,
+ boto_resource):
senza.traffic.DNS_ZONE_CACHE = {}
- def my_client(rtype, *args):
- if rtype == 'route53':
- route53 = MagicMock()
- route53.list_hosted_zones.return_value = {'HostedZones': [HOSTED_ZONE_ZO_NE_DEV,
- HOSTED_ZONE_ZO_NE_COM],
- 'IsTruncated': False,
- 'MaxItems': '100'}
- return route53
- return MagicMock()
-
- monkeypatch.setattr('boto3.client', my_client)
+ boto_client['route53'].list_hosted_zones.return_value = {
+ 'HostedZones': [HOSTED_ZONE_ZO_NE_DEV,
+ HOSTED_ZONE_ZO_NE_COM],
+ 'IsTruncated': False,
+ 'MaxItems': '100'}
configuration = {
"Name": "test_lb",
@@ -589,8 +577,8 @@ def test_component_auto_scaling_group_custom_tags():
'InstanceType': 't2.micro',
'Image': 'foo',
'Tags': [
- { 'Key': 'Tag1', 'Value': 'alpha' },
- { 'Key': 'Tag2', 'Value': 'beta' }
+ {'Key': 'Tag1', 'Value': 'alpha'},
+ {'Key': 'Tag2', 'Value': 'beta'}
]
}
@@ -619,6 +607,7 @@ def test_component_auto_scaling_group_custom_tags():
assert ts is not None
assert ts["Value"] == 'FooStack-FooVersion'
+
def test_component_auto_scaling_group_configurable_properties2():
definition = {"Resources": {}}
configuration = {
@@ -911,26 +900,15 @@ def test_get_load_balancer_name():
'1') == 'toolong12345678901234567890123-1'
-def test_weighted_dns_load_balancer_v2(monkeypatch, boto_resource):
+def test_weighted_dns_load_balancer_v2(monkeypatch, boto_client, boto_resource): # noqa: F811
senza.traffic.DNS_ZONE_CACHE = {}
- def my_client(rtype, *args):
- if rtype == 'route53':
- route53 = MagicMock()
- route53.list_hosted_zones.return_value = {'HostedZones': [HOSTED_ZONE_ZO_NE_COM],
- 'IsTruncated': False,
- 'MaxItems': '100'}
- return route53
- return MagicMock()
-
- monkeypatch.setattr('boto3.client', my_client)
-
configuration = {
"Name": "MyLB",
"SecurityGroups": "",
"HTTPPort": "9999",
- 'MainDomain': 'great.api.zo.ne.com',
- 'VersionDomain': 'version.api.zo.ne.com',
+ 'MainDomain': 'great.api.zo.ne',
+ 'VersionDomain': 'version.api.zo.ne',
# test overwritting specific properties in one of the resources
'TargetGroupAttributes': [{'Key': 'deregistration_delay.timeout_seconds', 'Value': '123'}],
# test that Security Groups are resolved
@@ -961,10 +939,18 @@ def test_weighted_dns_load_balancer_v2(monkeypatch, boto_resource):
assert 'MyLBListener' in result["Resources"]
assert 'MyLBTargetGroup' in result["Resources"]
- assert result['Resources']['MyLBTargetGroup']['Properties']['HealthCheckPort'] == '9999'
- assert result['Resources']['MyLBListener']['Properties']['Certificates'] == [{'CertificateArn': 'arn:aws:42'}]
+ target_group = result['Resources']['MyLBTargetGroup']
+ lb_listener = result['Resources']['MyLBListener']
+
+ assert target_group['Properties']['HealthCheckPort'] == '9999'
+ assert lb_listener['Properties']['Certificates'] == [
+ {'CertificateArn': 'arn:aws:42'}
+ ]
# test that our custom drain setting works
- assert result['Resources']['MyLBTargetGroup']['Properties']['TargetGroupAttributes'] == [{'Key': 'deregistration_delay.timeout_seconds', 'Value': '123'}]
+ assert target_group['Properties']['TargetGroupAttributes'] == [
+ {'Key': 'deregistration_delay.timeout_seconds',
+ 'Value': '123'}
+ ]
assert result['Resources']['MyLB']['Properties']['SecurityGroups'] == ['sg-foo']
diff --git a/tests/test_manaus/test_route53.py b/tests/test_manaus/test_route53.py
index 24c5441..658195a 100644
--- a/tests/test_manaus/test_route53.py
+++ b/tests/test_manaus/test_route53.py
@@ -164,6 +164,46 @@ def test_get_records(monkeypatch):
assert records[0].name == 'domain.example.net.'
+def test_get_records_paginated(monkeypatch):
+ m_client = MagicMock()
+ m_client.return_value = m_client
+ hosted_zone1 = {'Config': {'PrivateZone': False},
+ 'CallerReference': '0000',
+ 'ResourceRecordSetCount': 42,
+ 'Id': '/hostedzone/random1',
+ 'Name': 'example.com.'}
+ mock_records = [{'Name': 'domain.example.com.',
+ 'ResourceRecords': [{'Value': '127.0.0.1'}],
+ 'TTL': 600,
+ 'Type': 'A'},
+ {'Name': 'domain.example.net.',
+ 'ResourceRecords': [{'Value': '127.0.0.1'}],
+ 'TTL': 600,
+ 'Type': 'A'}
+ ]
+ m_client.list_hosted_zones.return_value = {'MaxItems': '100',
+ 'ResponseMetadata': {
+ 'HTTPStatusCode': 200,
+ 'RequestId': 'FakeId'
+ },
+ 'HostedZones': [hosted_zone1],
+ 'IsTruncated': False}
+
+ m_client.list_resource_record_sets.side_effect = [
+ {'ResourceRecordSets': mock_records,
+ 'IsTruncated': True,
+ 'NextRecordName': 'doesnt.matter.example.com',
+ 'NextRecordType': 'A'},
+ {'ResourceRecordSets': mock_records,
+ 'IsTruncated': False},
+ ]
+ monkeypatch.setattr('boto3.client', m_client)
+
+ route53 = Route53()
+ records = list(route53.get_records())
+ assert len(records) == 4
+
+
def test_route53_record_boto_dict():
record1 = Route53Record(name='test1', type='A')
assert record1.boto_dict == {'Name': 'test1',
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 1
} | 2.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | boto3==1.37.23
botocore==1.37.23
certifi==2025.1.31
charset-normalizer==3.4.1
click==8.1.8
clickclick==20.10.2
coverage==7.8.0
dnspython==1.15.0
dnspython3==1.15.0
exceptiongroup==1.2.2
idna==3.10
importlib_metadata==8.6.1
iniconfig==2.1.0
jmespath==1.0.1
packaging==24.2
pluggy==1.5.0
pystache==0.6.8
pytest==8.3.5
pytest-cov==6.0.0
python-dateutil==2.9.0.post0
PyYAML==6.0.2
requests==2.32.3
s3transfer==0.11.4
six==1.17.0
stups-cli-support==1.1.22
stups-pierone==1.1.56
-e git+https://github.com/zalando-stups/senza.git@936b62cd98ad20892c30b8771b5db80a14c19aae#egg=stups_senza
stups-tokens==1.1.19
stups-zign==1.2
tomli==2.2.1
typing==3.7.4.3
urllib3==1.26.20
zipp==3.21.0
| name: senza
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- boto3==1.37.23
- botocore==1.37.23
- certifi==2025.1.31
- charset-normalizer==3.4.1
- click==8.1.8
- clickclick==20.10.2
- coverage==7.8.0
- dnspython==1.15.0
- dnspython3==1.15.0
- exceptiongroup==1.2.2
- idna==3.10
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- jmespath==1.0.1
- packaging==24.2
- pluggy==1.5.0
- pystache==0.6.8
- pytest==8.3.5
- pytest-cov==6.0.0
- python-dateutil==2.9.0.post0
- pyyaml==6.0.2
- requests==2.32.3
- s3transfer==0.11.4
- six==1.17.0
- stups-cli-support==1.1.22
- stups-pierone==1.1.56
- stups-tokens==1.1.19
- stups-zign==1.2
- tomli==2.2.1
- typing==3.7.4.3
- urllib3==1.26.20
- zipp==3.21.0
prefix: /opt/conda/envs/senza
| [
"tests/test_manaus/test_route53.py::test_get_records_paginated"
] | [
"tests/test_components.py::test_weighted_dns_load_balancer",
"tests/test_components.py::test_weighted_dns_load_balancer_with_different_domains",
"tests/test_components.py::test_check_docker_image_exists"
] | [
"tests/test_components.py::test_invalid_component",
"tests/test_components.py::test_component_iam_role",
"tests/test_components.py::test_get_merged_policies",
"tests/test_components.py::test_component_load_balancer_healthcheck",
"tests/test_components.py::test_component_load_balancer_idletimeout",
"tests/test_components.py::test_component_load_balancer_cert_arn",
"tests/test_components.py::test_component_load_balancer_http_only",
"tests/test_components.py::test_component_load_balancer_namelength",
"tests/test_components.py::test_component_stups_auto_configuration",
"tests/test_components.py::test_component_stups_auto_configuration_vpc_id",
"tests/test_components.py::test_component_redis_node",
"tests/test_components.py::test_component_redis_cluster",
"tests/test_components.py::test_component_taupage_auto_scaling_group_user_data_without_ref",
"tests/test_components.py::test_component_taupage_auto_scaling_group_user_data_with_ref",
"tests/test_components.py::test_component_taupage_auto_scaling_group_user_data_with_lists_and_empty_dict",
"tests/test_components.py::test_component_auto_scaling_group_configurable_properties",
"tests/test_components.py::test_component_auto_scaling_group_custom_tags",
"tests/test_components.py::test_component_auto_scaling_group_configurable_properties2",
"tests/test_components.py::test_component_auto_scaling_group_metric_type",
"tests/test_components.py::test_component_auto_scaling_group_optional_metric_type",
"tests/test_components.py::test_to_iso8601_duration",
"tests/test_components.py::test_normalize_asg_success",
"tests/test_components.py::test_normalize_network_threshold",
"tests/test_components.py::test_check_application_id",
"tests/test_components.py::test_check_application_version",
"tests/test_components.py::test_get_load_balancer_name",
"tests/test_components.py::test_weighted_dns_load_balancer_v2",
"tests/test_components.py::test_max_description_length",
"tests/test_components.py::test_component_load_balancer_default_internal_scheme",
"tests/test_components.py::test_component_load_balancer_v2_default_internal_scheme",
"tests/test_manaus/test_route53.py::test_hosted_zone_from_boto_dict",
"tests/test_manaus/test_route53.py::test_record_from_boto_dict",
"tests/test_manaus/test_route53.py::test_route53_hosted_zones",
"tests/test_manaus/test_route53.py::test_route53_hosted_zones_paginated",
"tests/test_manaus/test_route53.py::test_get_records",
"tests/test_manaus/test_route53.py::test_route53_record_boto_dict",
"tests/test_manaus/test_route53.py::test_hosted_zone_upsert",
"tests/test_manaus/test_route53.py::test_hosted_zone_create",
"tests/test_manaus/test_route53.py::test_hosted_zone_delete",
"tests/test_manaus/test_route53.py::test_to_alias",
"tests/test_manaus/test_route53.py::test_convert_domain_records_to_alias",
"tests/test_manaus/test_route53.py::test_hosted_zone_get_by_domain_name",
"tests/test_manaus/test_route53.py::test_hosted_zone_get_by_id",
"tests/test_manaus/test_route53.py::test_get_by_domain_name"
] | [] | Apache License 2.0 | 755 | 345 | [
"senza/manaus/route53.py"
] |
|
DataKind-SG__test-driven-data-cleaning-34 | f21e7b563c1e49803a098586930f2e4682eeb06f | 2016-09-17 10:17:14 | d1e67b345b5982b41560817ef859bd0157852913 | diff --git a/tddc/__main__.py b/tddc/__main__.py
index 0749707..208b1c5 100644
--- a/tddc/__main__.py
+++ b/tddc/__main__.py
@@ -1,6 +1,58 @@
+"""Test driven data cleaning
+Usage:
+ tddc summarize <input_file> [--output=<dir>] [--null=<NA>]
+ tddc build_trello <input_file> [--output=<dir>]
+ tddc build <input_file> [--output=<dir>]
+ tddc -h | --help
+ tddc --version
+
+Options:
+ -h --help Show this screen.
+ --version Show version.
+ --output=<dir> Output directory [default: output]
+ --null=<NA> Null string [default: NA]
+"""
+from docopt import docopt
import os
import sys
-import tddc.run_tddc
+
+import tddc
+from tddc import summarize, build_trello, build
+
+
+def get_input_root_dir():
+ return os.getcwd()
+
+
+def get_output_root_dir():
+ return os.getcwd()
+
+
+def execute(cli_args):
+ arguments = docopt(__doc__, cli_args, version=tddc.__version__)
+ if arguments['summarize']:
+ summarize.go(
+ input_root_dir=get_input_root_dir(),
+ input_file=arguments['<input_file>'],
+ output_root_dir=get_output_root_dir(),
+ output_dir=arguments['--output'],
+ null_string=arguments['--null'],
+ )
+ elif arguments['build_trello']:
+ build_trello.go(
+ summary_root_dir=get_input_root_dir(),
+ input_file=arguments['<input_file>'],
+ trello_summary_root_dir=get_output_root_dir(),
+ output_dir=arguments['--output']
+ )
+ elif arguments['build']:
+ build.go(
+ summaries_root_dir=get_input_root_dir(),
+ input_file=arguments['<input_file>'],
+ scripts_root_dir=get_output_root_dir(),
+ output_dir=arguments['--output']
+ )
+
if __name__ == '__main__':
- tddc.run_tddc.execute(sys.argv[1:])
+ execute(sys.argv[1:])
diff --git a/tddc/run_tddc.py b/tddc/run_tddc.py
index 208b1c5..e69de29 100644
--- a/tddc/run_tddc.py
+++ b/tddc/run_tddc.py
@@ -1,58 +0,0 @@
-"""Test driven data cleaning
-Usage:
- tddc summarize <input_file> [--output=<dir>] [--null=<NA>]
- tddc build_trello <input_file> [--output=<dir>]
- tddc build <input_file> [--output=<dir>]
- tddc -h | --help
- tddc --version
-
-Options:
- -h --help Show this screen.
- --version Show version.
- --output=<dir> Output directory [default: output]
- --null=<NA> Null string [default: NA]
-"""
-from docopt import docopt
-import os
-import sys
-
-import tddc
-from tddc import summarize, build_trello, build
-
-
-def get_input_root_dir():
- return os.getcwd()
-
-
-def get_output_root_dir():
- return os.getcwd()
-
-
-def execute(cli_args):
- arguments = docopt(__doc__, cli_args, version=tddc.__version__)
- if arguments['summarize']:
- summarize.go(
- input_root_dir=get_input_root_dir(),
- input_file=arguments['<input_file>'],
- output_root_dir=get_output_root_dir(),
- output_dir=arguments['--output'],
- null_string=arguments['--null'],
- )
- elif arguments['build_trello']:
- build_trello.go(
- summary_root_dir=get_input_root_dir(),
- input_file=arguments['<input_file>'],
- trello_summary_root_dir=get_output_root_dir(),
- output_dir=arguments['--output']
- )
- elif arguments['build']:
- build.go(
- summaries_root_dir=get_input_root_dir(),
- input_file=arguments['<input_file>'],
- scripts_root_dir=get_output_root_dir(),
- output_dir=arguments['--output']
- )
-
-
-if __name__ == '__main__':
- execute(sys.argv[1:])
| run_tddc and __main__ are not both needed
move everything in run_tddc to \_\_main\_\_ | DataKind-SG/test-driven-data-cleaning | diff --git a/tddc/tests/test_run_tddc.py b/tddc/tests/test_main.py
similarity index 83%
rename from tddc/tests/test_run_tddc.py
rename to tddc/tests/test_main.py
index 68ba0d2..a575056 100644
--- a/tddc/tests/test_run_tddc.py
+++ b/tddc/tests/test_main.py
@@ -2,7 +2,7 @@ import pytest
import os
from mock import patch
-from tddc import run_tddc, common
+from tddc import __main__, common
@pytest.fixture(scope='module')
@@ -11,15 +11,15 @@ def root_dir():
def test_get_input_root_dir(root_dir):
- assert run_tddc.get_input_root_dir() == root_dir
+ assert __main__.get_input_root_dir() == root_dir
def test_get_output_root_dir(root_dir):
- assert run_tddc.get_output_root_dir() == root_dir
+ assert __main__.get_output_root_dir() == root_dir
-@patch('tddc.run_tddc.get_input_root_dir')
-@patch('tddc.run_tddc.get_output_root_dir')
+@patch('tddc.__main__.get_input_root_dir')
+@patch('tddc.__main__.get_output_root_dir')
def test_cli_summarize(
mock_output_root_dir, mock_input_root_dir, fixtures_dir, input_filename, null_string, tmpdir
):
@@ -37,12 +37,12 @@ def test_cli_summarize(
# test_summarize.py already tests the content of the file. This just tests that the CLI works properly and
# generates a file at the expected location.
assert not os.path.isfile(expected_output_loc)
- run_tddc.execute(cli_args)
+ __main__.execute(cli_args)
assert os.path.isfile(expected_output_loc)
-@patch('tddc.run_tddc.get_input_root_dir')
-@patch('tddc.run_tddc.get_output_root_dir')
+@patch('tddc.__main__.get_input_root_dir')
+@patch('tddc.__main__.get_output_root_dir')
@patch('tddc.build_trello.Trello.client')
def test_cli_build_trello(
mock_client, mock_output_root_dir, mock_input_root_dir, fixtures_dir, input_filename, tmpdir
@@ -64,5 +64,5 @@ def test_cli_build_trello(
# test_build_trello.py already tests the content of the file. This just tests that the CLI works properly and
# generates a file at the expected location.
assert not os.path.isfile(expected_output_loc)
- run_tddc.execute(cli_args)
+ __main__.execute(cli_args)
assert os.path.isfile(expected_output_loc)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_removed_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 1
} | 0.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"mock"
],
"pre_install": [],
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
backports.functools-lru-cache==1.2.1
certifi==2021.5.30
charset-normalizer==2.0.12
coverage==6.2
docopt==0.6.2
idna==3.10
importlib-metadata==4.8.3
iniconfig==1.1.1
mock==5.2.0
oauthlib==3.2.2
packaging==21.3
pluggy==1.0.0
py==1.11.0
py-trello==0.6.1
pyparsing==3.1.4
pytest==7.0.1
pytest-cov==4.0.0
python-dateutil==2.9.0.post0
PyYAML==3.12
requests==2.27.1
requests-oauthlib==2.0.0
six==1.17.0
-e git+https://github.com/DataKind-SG/test-driven-data-cleaning.git@f21e7b563c1e49803a098586930f2e4682eeb06f#egg=tddc
tomli==1.2.3
typing_extensions==4.1.1
urllib3==1.26.20
zipp==3.6.0
| name: test-driven-data-cleaning
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- backports-functools-lru-cache==1.2.1
- charset-normalizer==2.0.12
- coverage==6.2
- docopt==0.6.2
- idna==3.10
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- mock==5.2.0
- oauthlib==3.2.2
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- py-trello==0.6.1
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-cov==4.0.0
- python-dateutil==2.9.0.post0
- pyyaml==3.12
- requests==2.27.1
- requests-oauthlib==2.0.0
- six==1.17.0
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- zipp==3.6.0
prefix: /opt/conda/envs/test-driven-data-cleaning
| [
"tddc/tests/test_main.py::test_get_input_root_dir",
"tddc/tests/test_main.py::test_get_output_root_dir",
"tddc/tests/test_main.py::test_cli_summarize",
"tddc/tests/test_main.py::test_cli_build_trello"
] | [] | [] | [] | MIT License | 758 | 1,049 | [
"tddc/__main__.py",
"tddc/run_tddc.py"
] |
|
drdoctr__doctr-123 | 45305afb454eb5dda06fd0deafce6ce70a0e5cee | 2016-09-20 19:35:45 | 45305afb454eb5dda06fd0deafce6ce70a0e5cee | diff --git a/doctr/__main__.py b/doctr/__main__.py
index 25889d4c..a50c3236 100644
--- a/doctr/__main__.py
+++ b/doctr/__main__.py
@@ -182,14 +182,15 @@ def configure(args, parser):
login_kwargs = {'auth': None, 'headers': None}
build_repo = input("What repo do you want to build the docs for (org/reponame, like 'drdoctr/doctr')? ")
- is_private = check_repo_exists(build_repo, **login_kwargs)
+ is_private = check_repo_exists(build_repo, service='github', **login_kwargs)
+ check_repo_exists(build_repo, service='travis')
deploy_repo = input("What repo do you want to deploy the docs to? [{build_repo}] ".format(build_repo=build_repo))
if not deploy_repo:
deploy_repo = build_repo
if deploy_repo != build_repo:
- check_repo_exists(deploy_repo, **login_kwargs)
+ check_repo_exists(deploy_repo, service='github', **login_kwargs)
N = IncrementingInt(1)
diff --git a/doctr/local.py b/doctr/local.py
index 7fbf9004..8fe0b6ef 100644
--- a/doctr/local.py
+++ b/doctr/local.py
@@ -212,7 +212,7 @@ def generate_ssh_key(note, keypath='github_deploy_key'):
with open(keypath + ".pub") as f:
return f.read()
-def check_repo_exists(deploy_repo, *, auth=None, headers=None):
+def check_repo_exists(deploy_repo, service='github', *, auth=None, headers=None):
"""
Checks that the repository exists on GitHub.
@@ -227,11 +227,19 @@ def check_repo_exists(deploy_repo, *, auth=None, headers=None):
raise RuntimeError('"{deploy_repo}" should be in the form username/repo'.format(deploy_repo=deploy_repo))
user, repo = deploy_repo.split('/')
- REPO_URL = 'https://api.github.com/repos/{user}/{repo}'
+ if service == 'github':
+ REPO_URL = 'https://api.github.com/repos/{user}/{repo}'
+ elif service == 'travis':
+ REPO_URL = 'https://api.travis-ci.org/repos/{user}/{repo}'
+ else:
+ raise RuntimeError('Invalid service specified for repo check (neither "travis" nor "github")')
+
r = requests.get(REPO_URL.format(user=user, repo=repo), auth=auth, headers=headers)
if r.status_code == requests.codes.not_found:
- raise RuntimeError('"{user}/{repo}" not found on GitHub. Exiting'.format(user=user, repo=repo))
+ raise RuntimeError('"{user}/{repo}" not found on {service}. Exiting'.format(user=user,
+ repo=repo,
+ service=service))
r.raise_for_status()
| Check for travis repo before generating keys.
Otherwise it will just fail and you'll have to regenerate.
It's a small optimisation though, that will mostly affect users that make typos.
Note that github is not case sensitive for usernames, travis is so for example, I regularly get into trouble when I write my username lowercase and it involves travis.
| drdoctr/doctr | diff --git a/doctr/tests/test_local.py b/doctr/tests/test_local.py
index ba07bb4b..897cfa27 100644
--- a/doctr/tests/test_local.py
+++ b/doctr/tests/test_local.py
@@ -11,20 +11,33 @@ else:
HEADERS = None
-def test_bad_user():
+def test_github_bad_user():
with raises(RuntimeError):
check_repo_exists('---/invaliduser', headers=HEADERS)
-def test_bad_repo():
+def test_github_bad_repo():
with raises(RuntimeError):
check_repo_exists('drdoctr/---', headers=HEADERS)
-def test_repo_exists():
+def test_github_repo_exists():
assert not check_repo_exists('drdoctr/doctr', headers=HEADERS)
-def test_invalid_repo():
+def test_github_invalid_repo():
with raises(RuntimeError):
check_repo_exists('fdsf', headers=HEADERS)
with raises(RuntimeError):
check_repo_exists('fdsf/fdfs/fd', headers=HEADERS)
+
+def test_travis_bad_user():
+ with raises(RuntimeError):
+ # Travis is case-sensitive
+ check_repo_exists('dRdoctr/doctr', service='travis')
+
+def test_travis_bad_repo():
+ with raises(RuntimeError):
+ # Travis is case-sensitive
+ check_repo_exists('drdoctr/DoCtR', service='travis')
+
+def test_travis_repo_exists():
+ assert not check_repo_exists('drdoctr/doctr', service='travis')
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 0
},
"num_modified_files": 2
} | 1.3 | {
"env_vars": null,
"env_yml_path": [],
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pyflakes"
],
"pre_install": [],
"python": "3.9",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2025.1.31
cffi==1.17.1
charset-normalizer==3.4.1
cryptography==44.0.2
-e git+https://github.com/drdoctr/doctr.git@45305afb454eb5dda06fd0deafce6ce70a0e5cee#egg=doctr
exceptiongroup==1.2.2
idna==3.10
iniconfig==2.1.0
packaging==24.2
pluggy==1.5.0
pycparser==2.22
pyflakes==3.3.2
pytest==8.3.5
requests==2.32.3
tomli==2.2.1
urllib3==2.3.0
| name: doctr
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- cffi==1.17.1
- charset-normalizer==3.4.1
- cryptography==44.0.2
- exceptiongroup==1.2.2
- idna==3.10
- iniconfig==2.1.0
- packaging==24.2
- pluggy==1.5.0
- pycparser==2.22
- pyflakes==3.3.2
- pytest==8.3.5
- requests==2.32.3
- tomli==2.2.1
- urllib3==2.3.0
prefix: /opt/conda/envs/doctr
| [
"doctr/tests/test_local.py::test_travis_bad_user",
"doctr/tests/test_local.py::test_travis_bad_repo"
] | [
"doctr/tests/test_local.py::test_travis_repo_exists"
] | [
"doctr/tests/test_local.py::test_github_bad_user",
"doctr/tests/test_local.py::test_github_bad_repo",
"doctr/tests/test_local.py::test_github_repo_exists",
"doctr/tests/test_local.py::test_github_invalid_repo"
] | [] | MIT License | 763 | 684 | [
"doctr/__main__.py",
"doctr/local.py"
] |
|
scrapy__scrapy-2275 | a19af5b164a23547f0327e1392526c7af67876a5 | 2016-09-21 13:32:49 | a975a50558cd78a1573bee2e957afcb419fd1bd6 | diff --git a/scrapy/http/response/__init__.py b/scrapy/http/response/__init__.py
index 983154001..58ad414f1 100644
--- a/scrapy/http/response/__init__.py
+++ b/scrapy/http/response/__init__.py
@@ -9,6 +9,8 @@ from six.moves.urllib.parse import urljoin
from scrapy.http.headers import Headers
from scrapy.utils.trackref import object_ref
from scrapy.http.common import obsolete_setter
+from scrapy.exceptions import NotSupported
+
class Response(object_ref):
@@ -80,3 +82,22 @@ class Response(object_ref):
"""Join this Response's url with a possible relative url to form an
absolute interpretation of the latter."""
return urljoin(self.url, url)
+
+ @property
+ def text(self):
+ """For subclasses of TextResponse, this will return the body
+ as text (unicode object in Python 2 and str in Python 3)
+ """
+ raise AttributeError("Response content isn't text")
+
+ def css(self, *a, **kw):
+ """Shortcut method implemented only by responses whose content
+ is text (subclasses of TextResponse).
+ """
+ raise NotSupported("Response content isn't text")
+
+ def xpath(self, *a, **kw):
+ """Shortcut method implemented only by responses whose content
+ is text (subclasses of TextResponse).
+ """
+ raise NotSupported("Response content isn't text")
| Idea: warn users when trying to use TextResponse functionality with plain Response
Currently, if we try to use TextResponse functionality like response.text or css()/xpath() methods with a plain Response (e.g. in case of binary content), we get an AttributeError:
```
>>> response.css
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-1-7d6e256164d4> in <module>()
----> 1 response.css
AttributeError: 'Response' object has no attribute 'css'
>>> response.xpath
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-2-4f61f6e9fc6e> in <module>()
----> 1 response.xpath
AttributeError: 'Response' object has no attribute 'xpath'
>>> response.text
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-3-be6a4a00df5e> in <module>()
----> 1 response.text
AttributeError: 'Response' object has no attribute 'text'
```
Would it make sense to add a few methods/properties to explain what's going on for new users?
I was thinking instead of AttributeError, a better behavior could be a ValueError with a message giving a bit more context.
So, in plain `Response`, we could have:
```
def css(self, *args, **kw):
raise ValueError('Response content is not text')
def xpath(self, *args, **kw):
raise ValueError('Response content is not text')
@property
def text(self, *args, **kw):
raise ValueError('Response content is not text')
```
This would be nice, because we'd had to explain fewer things when teaching people about responses and also about using `.css` and `.xpath` methods.
What do you think?
| scrapy/scrapy | diff --git a/tests/test_http_response.py b/tests/test_http_response.py
index c7f36687a..7624aa4c4 100644
--- a/tests/test_http_response.py
+++ b/tests/test_http_response.py
@@ -7,6 +7,7 @@ from scrapy.http import (Request, Response, TextResponse, HtmlResponse,
XmlResponse, Headers)
from scrapy.selector import Selector
from scrapy.utils.python import to_native_str
+from scrapy.exceptions import NotSupported
class BaseResponseTest(unittest.TestCase):
@@ -127,6 +128,18 @@ class BaseResponseTest(unittest.TestCase):
absolute = 'http://www.example.com/test'
self.assertEqual(joined, absolute)
+ def test_shortcut_attributes(self):
+ r = self.response_class("http://example.com", body=b'hello')
+ if self.response_class == Response:
+ msg = "Response content isn't text"
+ self.assertRaisesRegexp(AttributeError, msg, getattr, r, 'text')
+ self.assertRaisesRegexp(NotSupported, msg, r.css, 'body')
+ self.assertRaisesRegexp(NotSupported, msg, r.xpath, '//body')
+ else:
+ r.text
+ r.css('body')
+ r.xpath('//body')
+
class TextResponseTest(BaseResponseTest):
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 1
} | 1.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-xdist",
"pytest-mock",
"pytest-asyncio"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==25.3.0
Automat==24.8.1
cffi==1.17.1
constantly==23.10.4
coverage==7.8.0
cryptography==44.0.2
cssselect==1.3.0
exceptiongroup==1.2.2
execnet==2.1.1
hyperlink==21.0.0
idna==3.10
incremental==24.7.2
iniconfig==2.1.0
jmespath==1.0.1
lxml==5.3.1
packaging==24.2
parsel==1.10.0
pluggy==1.5.0
pyasn1==0.6.1
pyasn1_modules==0.4.2
pycparser==2.22
PyDispatcher==2.0.7
pyOpenSSL==25.0.0
pytest==8.3.5
pytest-asyncio==0.26.0
pytest-cov==6.0.0
pytest-mock==3.14.0
pytest-xdist==3.6.1
queuelib==1.7.0
-e git+https://github.com/scrapy/scrapy.git@a19af5b164a23547f0327e1392526c7af67876a5#egg=Scrapy
service-identity==24.2.0
six==1.17.0
tomli==2.2.1
Twisted==24.11.0
typing_extensions==4.13.0
w3lib==2.3.1
zope.interface==7.2
| name: scrapy
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==25.3.0
- automat==24.8.1
- cffi==1.17.1
- constantly==23.10.4
- coverage==7.8.0
- cryptography==44.0.2
- cssselect==1.3.0
- exceptiongroup==1.2.2
- execnet==2.1.1
- hyperlink==21.0.0
- idna==3.10
- incremental==24.7.2
- iniconfig==2.1.0
- jmespath==1.0.1
- lxml==5.3.1
- packaging==24.2
- parsel==1.10.0
- pluggy==1.5.0
- pyasn1==0.6.1
- pyasn1-modules==0.4.2
- pycparser==2.22
- pydispatcher==2.0.7
- pyopenssl==25.0.0
- pytest==8.3.5
- pytest-asyncio==0.26.0
- pytest-cov==6.0.0
- pytest-mock==3.14.0
- pytest-xdist==3.6.1
- queuelib==1.7.0
- service-identity==24.2.0
- six==1.17.0
- tomli==2.2.1
- twisted==24.11.0
- typing-extensions==4.13.0
- w3lib==2.3.1
- zope-interface==7.2
prefix: /opt/conda/envs/scrapy
| [
"tests/test_http_response.py::BaseResponseTest::test_shortcut_attributes"
] | [
"tests/test_http_response.py::TextResponseTest::test_invalid_utf8_encoded_body_with_valid_utf8_BOM",
"tests/test_http_response.py::TextResponseTest::test_selector",
"tests/test_http_response.py::TextResponseTest::test_selector_shortcuts",
"tests/test_http_response.py::TextResponseTest::test_shortcut_attributes",
"tests/test_http_response.py::HtmlResponseTest::test_invalid_utf8_encoded_body_with_valid_utf8_BOM",
"tests/test_http_response.py::HtmlResponseTest::test_selector",
"tests/test_http_response.py::HtmlResponseTest::test_selector_shortcuts",
"tests/test_http_response.py::HtmlResponseTest::test_shortcut_attributes",
"tests/test_http_response.py::XmlResponseTest::test_invalid_utf8_encoded_body_with_valid_utf8_BOM",
"tests/test_http_response.py::XmlResponseTest::test_selector",
"tests/test_http_response.py::XmlResponseTest::test_selector_shortcuts",
"tests/test_http_response.py::XmlResponseTest::test_shortcut_attributes"
] | [
"tests/test_http_response.py::BaseResponseTest::test_copy",
"tests/test_http_response.py::BaseResponseTest::test_copy_inherited_classes",
"tests/test_http_response.py::BaseResponseTest::test_copy_meta",
"tests/test_http_response.py::BaseResponseTest::test_immutable_attributes",
"tests/test_http_response.py::BaseResponseTest::test_init",
"tests/test_http_response.py::BaseResponseTest::test_replace",
"tests/test_http_response.py::BaseResponseTest::test_urljoin",
"tests/test_http_response.py::TextResponseTest::test_bom_is_removed_from_body",
"tests/test_http_response.py::TextResponseTest::test_copy",
"tests/test_http_response.py::TextResponseTest::test_copy_inherited_classes",
"tests/test_http_response.py::TextResponseTest::test_copy_meta",
"tests/test_http_response.py::TextResponseTest::test_declared_encoding_invalid",
"tests/test_http_response.py::TextResponseTest::test_encoding",
"tests/test_http_response.py::TextResponseTest::test_immutable_attributes",
"tests/test_http_response.py::TextResponseTest::test_init",
"tests/test_http_response.py::TextResponseTest::test_replace",
"tests/test_http_response.py::TextResponseTest::test_replace_wrong_encoding",
"tests/test_http_response.py::TextResponseTest::test_unicode_body",
"tests/test_http_response.py::TextResponseTest::test_unicode_url",
"tests/test_http_response.py::TextResponseTest::test_urljoin",
"tests/test_http_response.py::TextResponseTest::test_urljoin_with_base_url",
"tests/test_http_response.py::TextResponseTest::test_utf16",
"tests/test_http_response.py::HtmlResponseTest::test_bom_is_removed_from_body",
"tests/test_http_response.py::HtmlResponseTest::test_copy",
"tests/test_http_response.py::HtmlResponseTest::test_copy_inherited_classes",
"tests/test_http_response.py::HtmlResponseTest::test_copy_meta",
"tests/test_http_response.py::HtmlResponseTest::test_declared_encoding_invalid",
"tests/test_http_response.py::HtmlResponseTest::test_encoding",
"tests/test_http_response.py::HtmlResponseTest::test_html5_meta_charset",
"tests/test_http_response.py::HtmlResponseTest::test_html_encoding",
"tests/test_http_response.py::HtmlResponseTest::test_immutable_attributes",
"tests/test_http_response.py::HtmlResponseTest::test_init",
"tests/test_http_response.py::HtmlResponseTest::test_replace",
"tests/test_http_response.py::HtmlResponseTest::test_replace_wrong_encoding",
"tests/test_http_response.py::HtmlResponseTest::test_unicode_body",
"tests/test_http_response.py::HtmlResponseTest::test_unicode_url",
"tests/test_http_response.py::HtmlResponseTest::test_urljoin",
"tests/test_http_response.py::HtmlResponseTest::test_urljoin_with_base_url",
"tests/test_http_response.py::HtmlResponseTest::test_utf16",
"tests/test_http_response.py::XmlResponseTest::test_bom_is_removed_from_body",
"tests/test_http_response.py::XmlResponseTest::test_copy",
"tests/test_http_response.py::XmlResponseTest::test_copy_inherited_classes",
"tests/test_http_response.py::XmlResponseTest::test_copy_meta",
"tests/test_http_response.py::XmlResponseTest::test_declared_encoding_invalid",
"tests/test_http_response.py::XmlResponseTest::test_encoding",
"tests/test_http_response.py::XmlResponseTest::test_immutable_attributes",
"tests/test_http_response.py::XmlResponseTest::test_init",
"tests/test_http_response.py::XmlResponseTest::test_replace",
"tests/test_http_response.py::XmlResponseTest::test_replace_encoding",
"tests/test_http_response.py::XmlResponseTest::test_replace_wrong_encoding",
"tests/test_http_response.py::XmlResponseTest::test_unicode_body",
"tests/test_http_response.py::XmlResponseTest::test_unicode_url",
"tests/test_http_response.py::XmlResponseTest::test_urljoin",
"tests/test_http_response.py::XmlResponseTest::test_urljoin_with_base_url",
"tests/test_http_response.py::XmlResponseTest::test_utf16",
"tests/test_http_response.py::XmlResponseTest::test_xml_encoding"
] | [] | BSD 3-Clause "New" or "Revised" License | 764 | 344 | [
"scrapy/http/response/__init__.py"
] |
|
simphony__tornado-webapi-17 | 5dd0c146089cc0a348a9798e2021447aa4ee311e | 2016-09-21 13:35:16 | 95ad403cd70109fb94727a119545567593d8203d | diff --git a/tornadowebapi/__init__.py b/tornadowebapi/__init__.py
index 12f7bda..9cf510d 100644
--- a/tornadowebapi/__init__.py
+++ b/tornadowebapi/__init__.py
@@ -1,5 +1,4 @@
-from .handler import ResourceHandler, CollectionHandler
-from .utils import url_path_join, with_end_slash
+from . import registry
MAJOR = 0
MINOR = 1
@@ -30,13 +29,4 @@ def api_handlers(base_urlpath, version="v1"):
The current implementation does not support multiple API versions yet.
The version option is only provided for futureproofing.
"""
- return [
- (with_end_slash(
- url_path_join(base_urlpath, "api", version, "(.*)", "(.*)")),
- ResourceHandler
- ),
- (with_end_slash(
- url_path_join(base_urlpath, "api", version, "(.*)")),
- CollectionHandler
- ),
- ]
+ return registry.registry.api_handlers(base_urlpath, version)
diff --git a/tornadowebapi/handler.py b/tornadowebapi/handler.py
index f9c8956..609a1c5 100644
--- a/tornadowebapi/handler.py
+++ b/tornadowebapi/handler.py
@@ -4,11 +4,14 @@ from tornado.log import app_log
from . import exceptions
from .http import httpstatus
from .http.payloaded_http_error import PayloadedHTTPError
-from .registry import registry
from .utils import url_path_join, with_end_slash
class BaseHandler(web.RequestHandler):
+ def initialize(self, registry):
+ """Initialization method for when the class is instantiated."""
+ self._registry = registry
+
@gen.coroutine
def prepare(self):
"""Runs before any specific handler. """
@@ -17,8 +20,8 @@ class BaseHandler(web.RequestHandler):
@property
def registry(self):
- """Returns the global class vs Resource registry"""
- return registry
+ """Returns the class vs Resource registry"""
+ return self._registry
@property
def log(self):
diff --git a/tornadowebapi/registry.py b/tornadowebapi/registry.py
index c5bac97..ee6bfe0 100644
--- a/tornadowebapi/registry.py
+++ b/tornadowebapi/registry.py
@@ -1,3 +1,5 @@
+from .handler import ResourceHandler, CollectionHandler
+from .utils import url_path_join, with_end_slash
from .resource import Resource
from .authenticator import NullAuthenticator
@@ -63,5 +65,36 @@ class Registry:
"""If the registry contains the given item"""
return item in self._registered_types
+ def api_handlers(self, base_urlpath, version="v1"):
+ """Returns the API handlers for the interface.
+ Add these handlers to your application to provide an
+ interface to your Resources.
+
+
+ Parameters
+ ----------
+ base_urlpath: str
+ The base url path to serve
+ version: str
+ A string identifying the version of the API.
+
+ Notes
+ -----
+ The current implementation does not support multiple API versions yet.
+ The version option is only provided for futureproofing.
+ """
+ return [
+ (with_end_slash(
+ url_path_join(base_urlpath, "api", version, "(.*)", "(.*)")),
+ ResourceHandler,
+ dict(registry=self)
+ ),
+ (with_end_slash(
+ url_path_join(base_urlpath, "api", version, "(.*)")),
+ CollectionHandler,
+ dict(registry=self)
+ ),
+ ]
+
#: global registry for registration of the classes.
registry = Registry()
| Support multiple registries
The current global registry is convenient for a single application, but it's a global object. This introduces major problems when testing. We need to be able to handle multiple registers, each one providing its own api.
| simphony/tornado-webapi | diff --git a/tornadowebapi/tests/test_registry.py b/tornadowebapi/tests/test_registry.py
index 1dc5433..0b793df 100644
--- a/tornadowebapi/tests/test_registry.py
+++ b/tornadowebapi/tests/test_registry.py
@@ -42,3 +42,11 @@ class TestRegistry(unittest.TestCase):
reg = Registry()
self.assertIsNotNone(reg.authenticator)
+
+ def test_api_handlers(self):
+ reg = Registry()
+ api_handlers = reg.api_handlers("/foo")
+ self.assertEqual(len(api_handlers), 2)
+
+ self.assertEqual(api_handlers[0][2]["registry"], reg)
+ self.assertEqual(api_handlers[1][2]["registry"], reg)
diff --git a/tornadowebapi/tests/test_webapi.py b/tornadowebapi/tests/test_webapi.py
index d756ece..3a3c8e5 100644
--- a/tornadowebapi/tests/test_webapi.py
+++ b/tornadowebapi/tests/test_webapi.py
@@ -6,6 +6,7 @@ from unittest import mock
import tornadowebapi
from tornadowebapi import registry, exceptions
from tornadowebapi.http import httpstatus
+from tornadowebapi.registry import Registry
from tornadowebapi.resource import Resource
from tornadowebapi.handler import ResourceHandler, CollectionHandler
from tornadowebapi.tests.utils import AsyncHTTPTestCase
@@ -56,6 +57,16 @@ class Student(Resource):
return list(self.collection.keys())
+class Teacher(Resource):
+ @gen.coroutine
+ def retrieve(self, identifier):
+ return {}
+
+ @gen.coroutine
+ def items(self):
+ return []
+
+
class UnsupportAll(Resource):
pass
@@ -399,3 +410,23 @@ class TestRESTFunctions(unittest.TestCase):
self.assertEqual(handlers[0][1], ResourceHandler)
self.assertEqual(handlers[1][0], "/foo/api/v1/(.*)/")
self.assertEqual(handlers[1][1], CollectionHandler)
+
+
+class TestNonGlobalRegistry(AsyncHTTPTestCase):
+ def setUp(self):
+ super().setUp()
+ Student.collection = OrderedDict()
+ Student.id = 0
+
+ def get_app(self):
+ self.registry = Registry()
+ self.registry.register(Teacher)
+ handlers = self.registry.api_handlers('/')
+ app = web.Application(handlers=handlers)
+ return app
+
+ def test_non_global_registry(self):
+ res = self.fetch("/api/v1/teachers/")
+ self.assertEqual(res.code, httpstatus.OK)
+ self.assertEqual(escape.json_decode(res.body),
+ {"items": []})
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 3
} | 0.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"flake8",
"tornado"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt",
"dev-requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | coverage==7.8.0
exceptiongroup==1.2.2
flake8==7.2.0
iniconfig==2.1.0
mccabe==0.7.0
packaging==24.2
pluggy==1.5.0
pycodestyle==2.13.0
pyflakes==3.3.1
pytest==8.3.5
tomli==2.2.1
tornado==6.4.2
-e git+https://github.com/simphony/tornado-webapi.git@5dd0c146089cc0a348a9798e2021447aa4ee311e#egg=tornadowebapi
| name: tornado-webapi
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==7.8.0
- exceptiongroup==1.2.2
- flake8==7.2.0
- iniconfig==2.1.0
- mccabe==0.7.0
- packaging==24.2
- pluggy==1.5.0
- pycodestyle==2.13.0
- pyflakes==3.3.1
- pytest==8.3.5
- tomli==2.2.1
- tornado==6.4.2
prefix: /opt/conda/envs/tornado-webapi
| [
"tornadowebapi/tests/test_registry.py::TestRegistry::test_api_handlers",
"tornadowebapi/tests/test_webapi.py::TestNonGlobalRegistry::test_non_global_registry"
] | [] | [
"tornadowebapi/tests/test_registry.py::TestRegistry::test_authenticator",
"tornadowebapi/tests/test_registry.py::TestRegistry::test_instantiation",
"tornadowebapi/tests/test_webapi.py::TestREST::test_broken",
"tornadowebapi/tests/test_webapi.py::TestREST::test_create",
"tornadowebapi/tests/test_webapi.py::TestREST::test_delete",
"tornadowebapi/tests/test_webapi.py::TestREST::test_items",
"tornadowebapi/tests/test_webapi.py::TestREST::test_post_non_json",
"tornadowebapi/tests/test_webapi.py::TestREST::test_post_on_resource",
"tornadowebapi/tests/test_webapi.py::TestREST::test_retrieve",
"tornadowebapi/tests/test_webapi.py::TestREST::test_unexistent_resource_type",
"tornadowebapi/tests/test_webapi.py::TestREST::test_unprocessable",
"tornadowebapi/tests/test_webapi.py::TestREST::test_unsupported_methods",
"tornadowebapi/tests/test_webapi.py::TestREST::test_unsupports_collections",
"tornadowebapi/tests/test_webapi.py::TestREST::test_update",
"tornadowebapi/tests/test_webapi.py::TestRESTFunctions::test_api_handlers"
] | [] | BSD 3-Clause "New" or "Revised" License | 765 | 900 | [
"tornadowebapi/__init__.py",
"tornadowebapi/handler.py",
"tornadowebapi/registry.py"
] |
|
tornadoweb__tornado-1838 | bec14a9152438fbf1b9584dc3da4500137577308 | 2016-09-21 19:50:00 | ecd8968c5135b810cd607b5902dda2cd32122b39 | diff --git a/tornado/ioloop.py b/tornado/ioloop.py
index cadb4116..d6183176 100644
--- a/tornado/ioloop.py
+++ b/tornado/ioloop.py
@@ -616,10 +616,14 @@ class IOLoop(Configurable):
# result, which should just be ignored.
pass
else:
- self.add_future(ret, lambda f: f.result())
+ self.add_future(ret, self._discard_future_result)
except Exception:
self.handle_callback_exception(callback)
+ def _discard_future_result(self, future):
+ """Avoid unhandled-exception warnings from spawned coroutines."""
+ future.result()
+
def handle_callback_exception(self, callback):
"""This method is called whenever a callback run by the `IOLoop`
throws an exception.
| Excessive CPU load when returning empty collection
The following causes my CPU to stay pinned at 100% usage
```python
from tornado.ioloop import IOLoop
from tornado import gen
from threading import Thread
loop = IOLoop.current()
t = Thread(target=loop.start, daemon=True)
t.start()
@gen.coroutine
def f():
yield gen.sleep(0)
return {}
loop.add_callback(f)
from time import sleep # just to stick around in case you run this in a script
sleep(100)
```
Replacing the empty dict `{}` with an empty list `[]` has the same effect. Replacing with `None` behaves as expected.
Running Python 3.5, Tornado 4.4.1, on Ubuntu 16.04 | tornadoweb/tornado | diff --git a/tornado/test/ioloop_test.py b/tornado/test/ioloop_test.py
index 8570e73f..1bb8ce08 100644
--- a/tornado/test/ioloop_test.py
+++ b/tornado/test/ioloop_test.py
@@ -9,6 +9,7 @@ import socket
import sys
import threading
import time
+import types
from tornado import gen
from tornado.ioloop import IOLoop, TimeoutError, PollIOLoop, PeriodicCallback
@@ -61,6 +62,25 @@ class FakeTimeIOLoop(PollIOLoop):
class TestIOLoop(AsyncTestCase):
+ def test_add_callback_return_sequence(self):
+ # A callback returning {} or [] shouldn't spin the CPU, see Issue #1803.
+ self.calls = 0
+
+ loop = self.io_loop
+ test = self
+ old_add_callback = loop.add_callback
+
+ def add_callback(self, callback, *args, **kwargs):
+ test.calls += 1
+ old_add_callback(callback, *args, **kwargs)
+
+ loop.add_callback = types.MethodType(add_callback, loop)
+ loop.add_callback(lambda: {})
+ loop.add_callback(lambda: [])
+ loop.add_timeout(datetime.timedelta(milliseconds=50), loop.stop)
+ loop.start()
+ self.assertLess(self.calls, 10)
+
@skipOnTravis
def test_add_callback_wakeup(self):
# Make sure that add_callback from inside a running IOLoop
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 1
} | 4.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"sphinx",
"sphinx_rtd_theme",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.6",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.13
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
Babel==2.11.0
certifi==2021.5.30
charset-normalizer==2.0.12
docutils==0.18.1
idna==3.10
imagesize==1.4.1
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
Jinja2==3.0.3
MarkupSafe==2.0.1
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
Pygments==2.14.0
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
pytz==2025.2
requests==2.27.1
snowballstemmer==2.2.0
Sphinx==5.3.0
sphinx-rtd-theme==2.0.0
sphinxcontrib-applehelp==1.0.2
sphinxcontrib-devhelp==1.0.2
sphinxcontrib-htmlhelp==2.0.0
sphinxcontrib-jquery==4.1
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==1.0.3
sphinxcontrib-serializinghtml==1.1.5
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
-e git+https://github.com/tornadoweb/tornado.git@bec14a9152438fbf1b9584dc3da4500137577308#egg=tornado
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
urllib3==1.26.20
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: tornado
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib-metadata=4.8.1=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.13
- babel==2.11.0
- charset-normalizer==2.0.12
- docutils==0.18.1
- idna==3.10
- imagesize==1.4.1
- jinja2==3.0.3
- markupsafe==2.0.1
- pygments==2.14.0
- pytz==2025.2
- requests==2.27.1
- snowballstemmer==2.2.0
- sphinx==5.3.0
- sphinx-rtd-theme==2.0.0
- sphinxcontrib-applehelp==1.0.2
- sphinxcontrib-devhelp==1.0.2
- sphinxcontrib-htmlhelp==2.0.0
- sphinxcontrib-jquery==4.1
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==1.0.3
- sphinxcontrib-serializinghtml==1.1.5
- urllib3==1.26.20
prefix: /opt/conda/envs/tornado
| [
"tornado/test/ioloop_test.py::TestIOLoop::test_add_callback_return_sequence"
] | [] | [
"tornado/test/ioloop_test.py::TestIOLoop::test_add_callback_from_signal",
"tornado/test/ioloop_test.py::TestIOLoop::test_add_callback_from_signal_other_thread",
"tornado/test/ioloop_test.py::TestIOLoop::test_add_callback_wakeup",
"tornado/test/ioloop_test.py::TestIOLoop::test_add_callback_wakeup_other_thread",
"tornado/test/ioloop_test.py::TestIOLoop::test_add_callback_while_closing",
"tornado/test/ioloop_test.py::TestIOLoop::test_add_timeout_return",
"tornado/test/ioloop_test.py::TestIOLoop::test_add_timeout_timedelta",
"tornado/test/ioloop_test.py::TestIOLoop::test_call_at_return",
"tornado/test/ioloop_test.py::TestIOLoop::test_call_later_return",
"tornado/test/ioloop_test.py::TestIOLoop::test_close_file_object",
"tornado/test/ioloop_test.py::TestIOLoop::test_exception_logging",
"tornado/test/ioloop_test.py::TestIOLoop::test_exception_logging_future",
"tornado/test/ioloop_test.py::TestIOLoop::test_exception_logging_native_coro",
"tornado/test/ioloop_test.py::TestIOLoop::test_handle_callback_exception",
"tornado/test/ioloop_test.py::TestIOLoop::test_handler_callback_file_object",
"tornado/test/ioloop_test.py::TestIOLoop::test_mixed_fd_fileobj",
"tornado/test/ioloop_test.py::TestIOLoop::test_multiple_add",
"tornado/test/ioloop_test.py::TestIOLoop::test_read_while_writeable",
"tornado/test/ioloop_test.py::TestIOLoop::test_reentrant",
"tornado/test/ioloop_test.py::TestIOLoop::test_remove_handler_from_handler",
"tornado/test/ioloop_test.py::TestIOLoop::test_remove_timeout_after_fire",
"tornado/test/ioloop_test.py::TestIOLoop::test_remove_timeout_cleanup",
"tornado/test/ioloop_test.py::TestIOLoop::test_remove_timeout_from_timeout",
"tornado/test/ioloop_test.py::TestIOLoop::test_remove_without_add",
"tornado/test/ioloop_test.py::TestIOLoop::test_spawn_callback",
"tornado/test/ioloop_test.py::TestIOLoop::test_timeout_with_arguments",
"tornado/test/ioloop_test.py::TestIOLoopCurrent::test_default_current",
"tornado/test/ioloop_test.py::TestIOLoopCurrent::test_force_current",
"tornado/test/ioloop_test.py::TestIOLoopCurrent::test_non_current",
"tornado/test/ioloop_test.py::TestIOLoopAddCallback::test_pre_wrap",
"tornado/test/ioloop_test.py::TestIOLoopAddCallback::test_pre_wrap_with_args",
"tornado/test/ioloop_test.py::TestIOLoopAddCallbackFromSignal::test_pre_wrap",
"tornado/test/ioloop_test.py::TestIOLoopAddCallbackFromSignal::test_pre_wrap_with_args",
"tornado/test/ioloop_test.py::TestIOLoopFutures::test_add_future_stack_context",
"tornado/test/ioloop_test.py::TestIOLoopFutures::test_add_future_threads",
"tornado/test/ioloop_test.py::TestIOLoopRunSync::test_async_exception",
"tornado/test/ioloop_test.py::TestIOLoopRunSync::test_async_result",
"tornado/test/ioloop_test.py::TestIOLoopRunSync::test_current",
"tornado/test/ioloop_test.py::TestIOLoopRunSync::test_native_coroutine",
"tornado/test/ioloop_test.py::TestIOLoopRunSync::test_sync_exception",
"tornado/test/ioloop_test.py::TestIOLoopRunSync::test_sync_result",
"tornado/test/ioloop_test.py::TestIOLoopRunSync::test_timeout",
"tornado/test/ioloop_test.py::TestPeriodicCallback::test_basic",
"tornado/test/ioloop_test.py::TestPeriodicCallback::test_overrun"
] | [] | Apache License 2.0 | 767 | 199 | [
"tornado/ioloop.py"
] |
|
treasure-data__td-client-python-21 | 59f47438514f128cadf945f54cf56d5f311c5338 | 2016-09-23 08:10:45 | 59f47438514f128cadf945f54cf56d5f311c5338 | diff --git a/tdclient/client.py b/tdclient/client.py
index 691b225..85c2258 100644
--- a/tdclient/client.py
+++ b/tdclient/client.py
@@ -527,10 +527,7 @@ class Client(object):
[:class:`tdclient.models.Schedule`]
"""
result = self.api.list_schedules()
- def schedule(m):
- name,cron,query,database,result_url,timezone,delay,next_time,priority,retry_limit,org_name = m
- return models.Schedule(self, name, cron, query, database, result_url, timezone, delay, next_time, priority, retry_limit, org_name)
- return [ schedule(m) for m in result ]
+ return [ models.Schedule(self, m.get("name"), m.get("cron"), m.get("query"), **m) for m in result ]
def update_schedule(self, name, params=None):
"""
diff --git a/tdclient/schedule_api.py b/tdclient/schedule_api.py
index 8d9ec3b..02e7106 100644
--- a/tdclient/schedule_api.py
+++ b/tdclient/schedule_api.py
@@ -50,17 +50,12 @@ class ScheduleAPI(object):
self.raise_error("List schedules failed", res, body)
js = self.checked_json(body, ["schedules"])
def schedule(m):
- name = m.get("name")
- cron = m.get("cron")
- query = m.get("query")
- database = m.get("database")
- result_url = m.get("result")
- timezone = m.get("timezone", "UTC")
- delay = m.get("delay")
- next_time = self._parsedate(self.get_or_else(m, "next_time", "1970-01-01T00:00:00Z"), "%Y-%m-%dT%H:%M:%SZ")
- priority = m.get("priority")
- retry_limit = m.get("retry_limit")
- return (name, cron, query, database, result_url, timezone, delay, next_time, priority, retry_limit, None) # same as database
+ m = dict(m)
+ if "timezone" not in m:
+ m["timezone"] = "UTC"
+ m["created_at"] = self._parsedate(self.get_or_else(m, "created_at", "1970-01-01T00:00:00Z"), "%Y-%m-%dT%H:%M:%SZ")
+ m["next_time"] = self._parsedate(self.get_or_else(m, "next_time", "1970-01-01T00:00:00Z"), "%Y-%m-%dT%H:%M:%SZ")
+ return m
return [ schedule(m) for m in js["schedules"] ]
def update_schedule(self, name, params=None):
diff --git a/tdclient/schedule_model.py b/tdclient/schedule_model.py
index 104550d..888ae08 100644
--- a/tdclient/schedule_model.py
+++ b/tdclient/schedule_model.py
@@ -24,19 +24,27 @@ class Schedule(Model):
"""Schedule on Treasure Data Service
"""
- def __init__(self, client, name, cron, query, database=None, result_url=None, timezone=None, delay=None, next_time=None, priority=None, retry_limit=None, org_name=None):
+ def __init__(self, client, name, cron, query, **kwargs):
super(Schedule, self).__init__(client)
self._name = name
self._cron = cron
+ self._timezone = kwargs.get("timezone")
+ self._delay = kwargs.get("delay")
+ self._created_at = kwargs.get("created_at")
+ self._type = kwargs.get("type")
self._query = query
- self._database = database
- self._result_url = result_url
- self._timezone = timezone
- self._delay = delay
- self._next_time = next_time
- self._priority = priority
- self._retry_limit = retry_limit
- self._org_name = org_name
+ self._database = kwargs.get("database")
+ self._user_name = kwargs.get("user_name")
+ self._priority = kwargs.get("priority")
+ self._retry_limit = kwargs.get("retry_limit")
+ if "result_url" in kwargs:
+ # backward compatibility for td-client-python < 0.6.0
+ # TODO: remove this code if not necessary with fixing test
+ self._result = kwargs.get("result_url")
+ else:
+ self._result = kwargs.get("result")
+ self._next_time = kwargs.get("next_time")
+ self._org_name = kwargs.get("org_name")
@property
def name(self):
@@ -68,7 +76,7 @@ class Schedule(Model):
def result_url(self):
"""The result output configuration in URL form of a scheduled job
"""
- return self._result_url
+ return self._result
@property
def timezone(self):
@@ -88,7 +96,10 @@ class Schedule(Model):
def priority(self):
"""The priority of a scheduled job
"""
- return self._priority
+ if self._priority in Job.JOB_PRIORITY:
+ return Job.JOB_PRIORITY[self._priority]
+ else:
+ return str(self._priority)
@property
def retry_limit(self):
@@ -111,6 +122,27 @@ class Schedule(Model):
"""
return self._next_time
+ @property
+ def created_at(self):
+ """
+ TODO: add docstring
+ """
+ return self._created_at
+
+ @property
+ def type(self):
+ """
+ TODO: add docstring
+ """
+ return self._type
+
+ @property
+ def user_name(self):
+ """
+ TODO: add docstring
+ """
+ return self._user_name
+
def run(self, time, num=None):
"""Run a scheduled job
"""
| Missing created_time and user_name in list_schedules api
Schedule API returns the following for each scheduled.
But, created_time and user_name are missing
```
$ curl -H "AUTHORIZATION: TD1 XXXXX" "http://api.treasuredata.com/v3/schedule/list"
...
{
"name":"xxx",
"cron":null,
"timezone":"UTC",
"delay":0,
"created_at":"2016-08-15T23:03:59Z",
"type":"presto",
"query":"xxxx",
"database":"api_production",
"user_name":"YYYY",
"priority":0,
"retry_limit":0,
"result":"",
"next_time":null
}
```
https://github.com/treasure-data/td-client-python/blob/master/tdclient/schedule_api.py#L52-L63
| treasure-data/td-client-python | diff --git a/tdclient/test/schedule_api_test.py b/tdclient/test/schedule_api_test.py
index b7f18bb..faca3d1 100644
--- a/tdclient/test/schedule_api_test.py
+++ b/tdclient/test/schedule_api_test.py
@@ -70,13 +70,54 @@ def test_delete_schedule_success():
def test_list_schedules_success():
td = api.API("APIKEY")
- # TODO: should be replaced by wire dump
body = b"""
{
"schedules":[
- {"name":"foo","cron":"* * * * *","query":"SELECT COUNT(1) FROM nasdaq;","database":"sample_datasets","result":"","timezone":"UTC","delay":"","next_time":"","priority":"","retry_limit":""},
- {"name":"bar","cron":"* * * * *","query":"SELECT COUNT(1) FROM nasdaq;","database":"sample_datasets","result":"","timezone":"UTC","delay":"","next_time":"","priority":"","retry_limit":""},
- {"name":"baz","cron":"* * * * *","query":"SELECT COUNT(1) FROM nasdaq;","database":"sample_datasets","result":"","timezone":"UTC","delay":"","next_time":"","priority":"","retry_limit":""}
+ {
+ "name": "foo",
+ "cron": null,
+ "timezone": "UTC",
+ "delay": 0,
+ "created_at": "2016-08-02T17:58:40Z",
+ "type": "presto",
+ "query": "SELECT COUNT(1) FROM nasdaq;",
+ "database": "sample_datasets",
+ "user_name": "Yuu Yamashita",
+ "priority": 0,
+ "retry_limit": 0,
+ "result": "",
+ "next_time": null
+ },
+ {
+ "name": "bar",
+ "cron": "0 0 * * *",
+ "timezone": "UTC",
+ "delay": 0,
+ "created_at": "2016-08-02T18:01:04Z",
+ "type": "presto",
+ "query": "SELECT COUNT(1) FROM nasdaq;",
+ "database": "sample_datasets",
+ "user_name": "Kazuki Ota",
+ "priority": 0,
+ "retry_limit": 0,
+ "result": "",
+ "next_time": "2016-09-24T00:00:00Z"
+ },
+ {
+ "name": "baz",
+ "cron": "* * * * *",
+ "timezone": "UTC",
+ "delay": 0,
+ "created_at": "2016-03-02T23:01:59Z",
+ "type": "hive",
+ "query": "SELECT COUNT(1) FROM nasdaq;",
+ "database": "sample_datasets",
+ "user_name": "Yuu Yamashita",
+ "priority": 0,
+ "retry_limit": 0,
+ "result": "",
+ "next_time": "2016-07-06T00:00:00Z"
+ }
]
}
"""
@@ -84,6 +125,22 @@ def test_list_schedules_success():
schedules = td.list_schedules()
td.get.assert_called_with("/v3/schedule/list")
assert len(schedules) == 3
+ next_time = sorted([ schedule.get("next_time") for schedule in schedules if "next_time" in schedule ])
+ assert len(next_time) == 3
+ assert next_time[2].year == 2016
+ assert next_time[2].month == 9
+ assert next_time[2].day == 24
+ assert next_time[2].hour == 0
+ assert next_time[2].minute == 0
+ assert next_time[2].second == 0
+ created_at = sorted([ schedule.get("created_at") for schedule in schedules if "created_at" in schedule ])
+ assert len(created_at) == 3
+ assert created_at[2].year == 2016
+ assert created_at[2].month == 8
+ assert created_at[2].day == 2
+ assert created_at[2].hour == 18
+ assert created_at[2].minute == 1
+ assert created_at[2].second == 4
def test_list_schedules_failure():
td = api.API("APIKEY")
@@ -100,13 +157,59 @@ def test_update_schedule_success():
def test_history_success():
td = api.API("APIKEY")
- # TODO: should be replaced by wire dump
body = b"""
{
"history": [
- {"job_id":"12345"},
- {"job_id":"67890"}
- ]
+ {
+ "query": "SELECT COUNT(1) FROM nasdaq;",
+ "type": "presto",
+ "priority": 0,
+ "retry_limit": 0,
+ "duration": 1,
+ "status": "success",
+ "cpu_time": null,
+ "result_size": 30,
+ "job_id": "12345",
+ "created_at": "2016-04-13 05:24:59 UTC",
+ "updated_at": "2016-04-13 05:25:02 UTC",
+ "start_at": "2016-04-13 05:25:00 UTC",
+ "end_at": "2016-04-13 05:25:01 UTC",
+ "num_records": 1,
+ "database": "sample_datasets",
+ "user_name": "Ryuta Kamizono",
+ "result": "",
+ "url": "https://console.treasuredata.com/jobs/12345",
+ "hive_result_schema": "[[\\"_col0\\", \\"bigint\\"]]",
+ "organization": null,
+ "scheduled_at": ""
+ },
+ {
+ "query": "SELECT COUNT(1) FROM nasdaq;",
+ "type": "presto",
+ "priority": 0,
+ "retry_limit": 0,
+ "duration": 1,
+ "status": "success",
+ "cpu_time": null,
+ "result_size": 30,
+ "job_id": "67890",
+ "created_at": "2016-04-13 05:24:59 UTC",
+ "updated_at": "2016-04-13 05:25:02 UTC",
+ "start_at": "2016-04-13 05:25:00 UTC",
+ "end_at": "2016-04-13 05:25:01 UTC",
+ "num_records": 1,
+ "database": "sample_datasets",
+ "user_name": "Ryuta Kamizono",
+ "result": "",
+ "url": "https://console.treasuredata.com/jobs/67890",
+ "hive_result_schema": "[[\\"_col0\\", \\"bigint\\"]]",
+ "organization": null,
+ "scheduled_at": ""
+ }
+ ],
+ "count": 2,
+ "from": 0,
+ "to": 20
}
"""
td.get = mock.MagicMock(return_value=make_response(200, body))
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 3
},
"num_modified_files": 3
} | 0.5 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5.2",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "py.test --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
importlib-metadata==4.8.3
iniconfig==1.1.1
msgpack-python==0.4.8
packaging==21.3
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
python-dateutil==2.4.2
six==1.17.0
-e git+https://github.com/treasure-data/td-client-python.git@59f47438514f128cadf945f54cf56d5f311c5338#egg=td_client
tomli==1.2.3
typing_extensions==4.1.1
urllib3==1.26.20
zipp==3.6.0
| name: td-client-python
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- msgpack-python==0.4.8
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- python-dateutil==2.4.2
- six==1.17.0
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- zipp==3.6.0
prefix: /opt/conda/envs/td-client-python
| [
"tdclient/test/schedule_api_test.py::test_list_schedules_success"
] | [] | [
"tdclient/test/schedule_api_test.py::test_create_schedule_success",
"tdclient/test/schedule_api_test.py::test_create_schedule_without_cron_success",
"tdclient/test/schedule_api_test.py::test_delete_schedule_success",
"tdclient/test/schedule_api_test.py::test_list_schedules_failure",
"tdclient/test/schedule_api_test.py::test_update_schedule_success",
"tdclient/test/schedule_api_test.py::test_history_success",
"tdclient/test/schedule_api_test.py::test_run_schedule_success"
] | [] | Apache License 2.0 | 774 | 1,436 | [
"tdclient/client.py",
"tdclient/schedule_api.py",
"tdclient/schedule_model.py"
] |
|
conan-io__conan-508 | 2167f1f59f670b87acb69efd117f79ff506ed99f | 2016-09-27 16:24:01 | 2167f1f59f670b87acb69efd117f79ff506ed99f | diff --git a/conans/client/deps_builder.py b/conans/client/deps_builder.py
index 341d1c41a..83c9c8821 100644
--- a/conans/client/deps_builder.py
+++ b/conans/client/deps_builder.py
@@ -416,7 +416,7 @@ class DepsBuilder(object):
def _create_new_node(self, current_node, dep_graph, requirement, public_deps, name_req):
""" creates and adds a new node to the dependency graph
"""
- conanfile_path = self._retriever.get_conanfile(requirement.conan_reference)
+ conanfile_path = self._retriever.get_recipe(requirement.conan_reference)
output = ScopedOutput(str(requirement.conan_reference), self._output)
dep_conanfile = self._loader.load_conan(conanfile_path, output)
if dep_conanfile:
diff --git a/conans/client/proxy.py b/conans/client/proxy.py
index 57ee121b9..cdecc8855 100644
--- a/conans/client/proxy.py
+++ b/conans/client/proxy.py
@@ -70,17 +70,17 @@ class ConanProxy(object):
remote = self._registry.get_ref(package_reference.conan)
self._manifest_manager.check_package(package_reference, remote)
- def get_conanfile(self, conan_reference):
+ def get_recipe(self, conan_reference):
output = ScopedOutput(str(conan_reference), self._out)
def _refresh():
- conan_dir_path = self._client_cache.export(conan_reference)
- rmdir(conan_dir_path)
+ export_path = self._client_cache.export(conan_reference)
+ rmdir(export_path)
# It might need to remove shortpath
rmdir(self._client_cache.source(conan_reference), True)
current_remote, _ = self._get_remote(conan_reference)
output.info("Retrieving from remote '%s'..." % current_remote.name)
- self._remote_manager.get_conanfile(conan_reference, current_remote)
+ self._remote_manager.get_recipe(conan_reference, export_path, current_remote)
if self._update:
output.info("Updated!")
else:
@@ -88,7 +88,6 @@ class ConanProxy(object):
# check if it is in disk
conanfile_path = self._client_cache.conanfile(conan_reference)
-
path_exist = path_exists(conanfile_path, self._client_cache.store)
if path_exist:
@@ -122,7 +121,7 @@ class ConanProxy(object):
"to replace it." % (remote.name, conan_reference))
else:
- self._retrieve_conanfile(conan_reference, output)
+ self._retrieve_recipe(conan_reference, output)
if self._manifest_manager:
remote = self._registry.get_ref(conan_reference)
@@ -146,13 +145,14 @@ class ConanProxy(object):
return 0
- def _retrieve_conanfile(self, conan_reference, output):
+ def _retrieve_recipe(self, conan_reference, output):
""" returns the requested conanfile object, retrieving it from
remotes if necessary. Can raise NotFoundException
"""
def _retrieve_from_remote(remote):
output.info("Trying with '%s'..." % remote.name)
- result = self._remote_manager.get_conanfile(conan_reference, remote)
+ export_path = self._client_cache.export(conan_reference)
+ result = self._remote_manager.get_recipe(conan_reference, export_path, remote)
self._registry.set_ref(conan_reference, remote)
return result
@@ -261,7 +261,8 @@ class ConanProxy(object):
def download_packages(self, reference, package_ids):
assert(isinstance(package_ids, list))
remote, _ = self._get_remote(reference)
- self._remote_manager.get_conanfile(reference, remote)
+ export_path = self._client_cache.export(reference)
+ self._remote_manager.get_recipe(reference, export_path, remote)
self._registry.set_ref(reference, remote)
output = ScopedOutput(str(reference), self._out)
for package_id in package_ids:
@@ -280,7 +281,8 @@ class ConanProxy(object):
try:
output.info("Looking for package %s in remote '%s' " % (package_id, remote.name))
# Will raise if not found NotFoundException
- self._remote_manager.get_package(package_reference, remote)
+ package_path = self._client_cache.package(package_reference)
+ self._remote_manager.get_package(package_reference, package_path, remote)
output.success('Package installed %s' % package_id)
return True
except ConanConnectionError:
diff --git a/conans/client/remote_manager.py b/conans/client/remote_manager.py
index 819cfa96c..1d716f8cb 100644
--- a/conans/client/remote_manager.py
+++ b/conans/client/remote_manager.py
@@ -1,16 +1,17 @@
-from conans.errors import ConanException, ConanConnectionError
+import os
+import shutil
+import tarfile
+import time
+import traceback
+
from requests.exceptions import ConnectionError
-from conans.util.files import save, tar_extract, rmdir
+
+from conans.errors import ConanException, ConanConnectionError
+from conans.util.files import tar_extract, rmdir, relative_dirs, mkdir
from conans.util.log import logger
-import traceback
-import os
from conans.paths import PACKAGE_TGZ_NAME, CONANINFO, CONAN_MANIFEST, CONANFILE, EXPORT_TGZ_NAME
-from io import BytesIO
-import tarfile
from conans.util.files import gzopen_without_timestamps
from conans.util.files import touch
-import shutil
-import time
class RemoteManager(object):
@@ -85,35 +86,35 @@ class RemoteManager(object):
returns (ConanDigest, remote_name)"""
return self._call_remote(remote, "get_package_digest", package_reference)
- def get_conanfile(self, conan_reference, remote):
+ def get_recipe(self, conan_reference, dest_folder, remote):
"""
Read the conans from remotes
Will iterate the remotes to find the conans unless remote was specified
- returns (dict relative_filepath:content , remote_name)"""
- export_files = self._call_remote(remote, "get_conanfile", conan_reference)
- export_folder = self._client_cache.export(conan_reference)
- uncompress_files(export_files, export_folder, EXPORT_TGZ_NAME)
+ returns (dict relative_filepath:abs_path , remote_name)"""
+ zipped_files = self._call_remote(remote, "get_recipe", conan_reference, dest_folder)
+ files = unzip_and_get_files(zipped_files, dest_folder, EXPORT_TGZ_NAME)
# Make sure that the source dir is deleted
rmdir(self._client_cache.source(conan_reference), True)
# TODO: Download only the CONANFILE file and only download the rest of files
# in install if needed (not found remote package)
+ return files
- def get_package(self, package_reference, remote):
+ def get_package(self, package_reference, dest_folder, remote):
"""
Read the conans package from remotes
Will iterate the remotes to find the conans unless remote was specified
- returns (dict relative_filepath:content , remote_name)"""
- package_files = self._call_remote(remote, "get_package", package_reference)
- destination_dir = self._client_cache.package(package_reference)
- uncompress_files(package_files, destination_dir, PACKAGE_TGZ_NAME)
-
+ returns (dict relative_filepath:abs_path , remote_name)"""
+ zipped_files = self._call_remote(remote, "get_package", package_reference, dest_folder)
+ files = unzip_and_get_files(zipped_files, dest_folder, PACKAGE_TGZ_NAME)
# Issue #214 https://github.com/conan-io/conan/issues/214
- for dirname, _, files in os.walk(destination_dir):
+ for dirname, _, files in os.walk(dest_folder):
for fname in files:
touch(os.path.join(dirname, fname))
+ return files
+
def search(self, remote, pattern=None, ignorecase=True):
"""
Search exported conans information from remotes
@@ -211,20 +212,27 @@ def compress_files(files, name, excluded, dest_dir):
return ret
-def uncompress_files(files, folder, name):
+def unzip_and_get_files(files, destination_dir, tgz_name):
+ '''Moves all files from package_files, {relative_name: tmp_abs_path}
+ to destination_dir, unzipping the "tgz_name" if found'''
+
+ tgz_file = files.pop(tgz_name, None)
+ if tgz_file:
+ uncompress_file(tgz_file, destination_dir)
+
+ return relative_dirs(destination_dir)
+
+
+def uncompress_file(src_path, dest_folder):
try:
- for file_name, content in files:
- if os.path.basename(file_name) == name:
- # Unzip the file and not keep the tgz
- tar_extract(BytesIO(content), folder)
- else:
- save(os.path.join(folder, file_name), content)
+ with open(src_path, 'rb') as file_handler:
+ tar_extract(file_handler, dest_folder)
except Exception as e:
- error_msg = "Error while downloading/extracting files to %s\n%s\n" % (folder, str(e))
+ error_msg = "Error while downloading/extracting files to %s\n%s\n" % (dest_folder, str(e))
# try to remove the files
try:
- if os.path.exists(folder):
- shutil.rmtree(folder)
+ if os.path.exists(dest_folder):
+ shutil.rmtree(dest_folder)
error_msg += "Folder removed"
except Exception as e:
error_msg += "Folder not removed, files/package might be damaged, remove manually"
diff --git a/conans/client/rest/auth_manager.py b/conans/client/rest/auth_manager.py
index 966a2773d..24cb1f43a 100644
--- a/conans/client/rest/auth_manager.py
+++ b/conans/client/rest/auth_manager.py
@@ -142,12 +142,12 @@ class ConanApiAuthManager(object):
return self._rest_client.get_package_digest(package_reference)
@input_credentials_if_unauthorized
- def get_conanfile(self, conan_reference):
- return self._rest_client.get_conanfile(conan_reference)
+ def get_recipe(self, conan_reference, dest_folder):
+ return self._rest_client.get_recipe(conan_reference, dest_folder)
@input_credentials_if_unauthorized
- def get_package(self, package_reference):
- return self._rest_client.get_package(package_reference)
+ def get_package(self, package_reference, dest_folder):
+ return self._rest_client.get_package(package_reference, dest_folder)
@input_credentials_if_unauthorized
def search(self, pattern, ignorecase):
diff --git a/conans/client/rest/rest_client.py b/conans/client/rest/rest_client.py
index 68e28d8e7..fcf6affe2 100644
--- a/conans/client/rest/rest_client.py
+++ b/conans/client/rest/rest_client.py
@@ -12,6 +12,7 @@ from conans.model.manifest import FileTreeManifest
from conans.client.rest.uploader_downloader import Uploader, Downloader
from conans.model.ref import ConanFileReference
from six.moves.urllib.parse import urlsplit, parse_qs
+import tempfile
def handle_return_deserializer(deserializer=None):
@@ -107,7 +108,7 @@ class RestApiClient(object):
contents = {key: decode_text(value) for key, value in dict(contents).items()}
return FileTreeManifest.loads(contents[CONAN_MANIFEST])
- def get_conanfile(self, conan_reference):
+ def get_recipe(self, conan_reference, dest_folder):
"""Gets a dict of filename:contents from conans"""
# Get the conanfile snapshot first
url = "%s/conans/%s/download_urls" % (self._remote_api_url, "/".join(conan_reference))
@@ -117,12 +118,10 @@ class RestApiClient(object):
raise NotFoundException("Conan '%s' doesn't have a %s!" % (conan_reference, CONANFILE))
# TODO: Get fist an snapshot and compare files and download only required?
+ file_paths = self.download_files_to_folder(urls, dest_folder, self._output)
+ return file_paths
- # Download the resources
- contents = self.download_files(urls, self._output)
- return contents
-
- def get_package(self, package_reference):
+ def get_package(self, package_reference, dest_folder):
"""Gets a dict of filename:contents from package"""
url = "%s/conans/%s/packages/%s/download_urls" % (self._remote_api_url,
"/".join(package_reference.conan),
@@ -133,8 +132,8 @@ class RestApiClient(object):
# TODO: Get fist an snapshot and compare files and download only required?
# Download the resources
- contents = self.download_files(urls, self._output)
- return contents
+ file_paths = self.download_files_to_folder(urls, dest_folder, self._output)
+ return file_paths
def upload_conan(self, conan_reference, the_files):
"""
@@ -361,6 +360,25 @@ class RestApiClient(object):
output.writeln("")
yield os.path.normpath(filename), contents
+ def download_files_to_folder(self, file_urls, to_folder, output=None):
+ """
+ :param: file_urls is a dict with {filename: abs_path}
+
+ It writes downloaded files to disk (appending to file, only keeps chunks in memory)
+ """
+ downloader = Downloader(self.requester, output, self.VERIFY_SSL)
+ ret = {}
+ for filename, resource_url in file_urls.items():
+ if output:
+ output.writeln("Downloading %s" % filename)
+ auth, _ = self._file_server_capabilities(resource_url)
+ abs_path = os.path.join(to_folder, filename)
+ downloader.download(resource_url, abs_path, auth=auth)
+ if output:
+ output.writeln("")
+ ret[filename] = abs_path
+ return ret
+
def upload_files(self, file_urls, files, output):
t1 = time.time()
failed = {}
diff --git a/conans/client/rest/uploader_downloader.py b/conans/client/rest/uploader_downloader.py
index eec4d8e84..475dfb450 100644
--- a/conans/client/rest/uploader_downloader.py
+++ b/conans/client/rest/uploader_downloader.py
@@ -125,8 +125,10 @@ class Downloader(object):
if self.output:
print_progress(self.output, units)
last_progress = units
-
- return bytes(ret)
+ if not file_path:
+ return bytes(ret)
+ else:
+ return
except Exception as e:
logger.debug(e.__class__)
logger.debug(traceback.format_exc())
diff --git a/conans/server/store/file_manager.py b/conans/server/store/file_manager.py
index a7f95ab65..6b6d4295b 100644
--- a/conans/server/store/file_manager.py
+++ b/conans/server/store/file_manager.py
@@ -17,7 +17,7 @@ class FileManager(object):
self._storage_adapter = storage_adapter
# ############ SNAPSHOTS
- def get_conanfile(self, conan_reference):
+ def get_recipe(self, conan_reference):
conanfile_path = self.paths.conanfile(conan_reference)
return self._storage_adapter.get_file(conanfile_path)
| Memory error when download very big packages
`conan install node/6.1.0@silkedit/stable -s compiler="Visual Studio" -s compiler.version=14`
Downloader.download method keeps in memory too much data.
```
DEBUG :uploader_downloader.py[74]: <type 'exceptions.MemoryError'> [2016-09-23 15:15:02,983]
DEBUG :uploader_downloader.py[75]: Traceback (most recent call last):
File "c:\python27\lib\site-packages\conans\client\rest\uploader_downloader.py", line 62, in download
ret.extend(data)
MemoryError
``` | conan-io/conan | diff --git a/conans/test/download_test.py b/conans/test/download_test.py
index e1ed5a986..e41c6e29a 100644
--- a/conans/test/download_test.py
+++ b/conans/test/download_test.py
@@ -69,7 +69,7 @@ class DownloadTest(unittest.TestCase):
client2.remote_manager,
"default")
- installer.get_conanfile(conan_ref)
+ installer.get_recipe(conan_ref)
installer.get_package(package_ref, force_build=False)
reg_path = client2.paths.export(ConanFileReference.loads("Hello/1.2.1/frodo/stable"))
diff --git a/conans/test/model/order_libs_test.py b/conans/test/model/order_libs_test.py
index 5e70ef0cc..99dfa93aa 100644
--- a/conans/test/model/order_libs_test.py
+++ b/conans/test/model/order_libs_test.py
@@ -48,7 +48,7 @@ class Retriever(object):
content = base_content % (name, self._reqs(requires), name, self._libs(name))
save(conan_path, content)
- def get_conanfile(self, conan_ref):
+ def get_recipe(self, conan_ref):
conan_path = os.path.join(self.folder, "/".join(conan_ref), CONANFILE)
return conan_path
diff --git a/conans/test/model/transitive_reqs_test.py b/conans/test/model/transitive_reqs_test.py
index 441b7f8f5..a8ec49e2e 100644
--- a/conans/test/model/transitive_reqs_test.py
+++ b/conans/test/model/transitive_reqs_test.py
@@ -36,7 +36,7 @@ class Retriever(object):
conan_path = os.path.join(self.folder, "/".join(conan_ref), CONANFILE)
save(conan_path, content)
- def get_conanfile(self, conan_ref):
+ def get_recipe(self, conan_ref):
conan_path = os.path.join(self.folder, "/".join(conan_ref), CONANFILE)
return conan_path
diff --git a/conans/test/remote_manager_test.py b/conans/test/remote_manager_test.py
index 8e02eaf41..3362245b2 100644
--- a/conans/test/remote_manager_test.py
+++ b/conans/test/remote_manager_test.py
@@ -1,17 +1,20 @@
+import os
+import tempfile
import unittest
-from conans.client.remote_manager import RemoteManager
+
from mock import Mock
+
+from conans.client.client_cache import ClientCache
+from conans.client.remote_manager import RemoteManager
+from conans.client.remote_registry import Remote
from conans.errors import NotFoundException
from conans.model.ref import ConanFileReference, PackageReference
+from conans.model.manifest import FileTreeManifest
+from conans.paths import CONANFILE, CONAN_MANIFEST, CONANINFO
from conans.test.tools import TestBufferConanOutput, TestClient
from conans.test.utils.test_files import temp_folder
from conans.test.utils.cpp_test_files import cpp_hello_conan_files
-from conans.client.remote_registry import Remote
-from conans.client.client_cache import ClientCache
from conans.util.files import save
-from conans.paths import CONANFILE, CONAN_MANIFEST, CONANINFO
-import os
-from conans.model.manifest import FileTreeManifest
class MockRemoteClient(object):
@@ -19,8 +22,13 @@ class MockRemoteClient(object):
def __init__(self):
self.upload_package = Mock()
self.get_conan_digest = Mock()
- self.get_conanfile = Mock(return_value=[("one.txt", "ONE")])
- self.get_package = Mock(return_value=[("one.txt", "ONE")])
+ tmp_folder = tempfile.mkdtemp(suffix='conan_download')
+ save(os.path.join(tmp_folder, "one.txt"), "ONE")
+ self.get_recipe = Mock(return_value={"one.txt": os.path.join(tmp_folder, "one.txt")})
+
+ tmp_folder = tempfile.mkdtemp(suffix='conan_download')
+ save(os.path.join(tmp_folder, "one.txt"), "ONE")
+ self.get_package = Mock(return_value={"one.txt": os.path.join(tmp_folder, "one.txt")})
self.remote_url = None
self.raise_count = 0
@@ -78,10 +86,10 @@ class RemoteManagerTest(unittest.TestCase):
self.manager.get_conan_digest(self.conan_reference, Remote("other", "url"))
self.assertTrue(self.remote_client.get_conan_digest.called)
- self.assertFalse(self.remote_client.get_conanfile.called)
- self.manager.get_conanfile(self.conan_reference, Remote("other", "url"))
- self.assertTrue(self.remote_client.get_conanfile.called)
+ self.assertFalse(self.remote_client.get_recipe.called)
+ self.manager.get_recipe(self.conan_reference, temp_folder(), Remote("other", "url"))
+ self.assertTrue(self.remote_client.get_recipe.called)
self.assertFalse(self.remote_client.get_package.called)
- self.manager.get_package(self.package_reference, Remote("other", "url"))
+ self.manager.get_package(self.package_reference, temp_folder(), Remote("other", "url"))
self.assertTrue(self.remote_client.get_package.called)
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 7
} | 0.12 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"nose",
"coverage",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.6",
"reqs_path": [
"conans/requirements.txt",
"conans/requirements_server.txt",
"conans/requirements_dev.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
beautifulsoup4==4.12.3
boto==2.42.0
bottle==0.12.25
certifi==2021.5.30
colorama==0.3.9
-e git+https://github.com/conan-io/conan.git@2167f1f59f670b87acb69efd117f79ff506ed99f#egg=conan
coverage==6.2
fasteners==0.19
importlib-metadata==4.8.3
iniconfig==1.1.1
mock==1.3.0
nose==1.3.7
nose-parameterized==0.5.0
packaging==21.3
passlib==1.6.5
patch==1.16
pbr==6.1.1
pluggy==1.0.0
py==1.11.0
PyJWT==1.4.2
pyparsing==3.1.4
pytest==7.0.1
PyYAML==3.12
requests==2.11.1
six==1.17.0
soupsieve==2.3.2.post1
tomli==1.2.3
typing_extensions==4.1.1
waitress==2.0.0
WebOb==1.8.9
WebTest==2.0.35
zipp==3.6.0
| name: conan
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- beautifulsoup4==4.12.3
- boto==2.42.0
- bottle==0.12.25
- colorama==0.3.9
- coverage==6.2
- fasteners==0.19
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- mock==1.3.0
- nose==1.3.7
- nose-parameterized==0.5.0
- packaging==21.3
- passlib==1.6.5
- patch==1.16
- pbr==6.1.1
- pluggy==1.0.0
- py==1.11.0
- pyjwt==1.4.2
- pyparsing==3.1.4
- pytest==7.0.1
- pyyaml==3.12
- requests==2.11.1
- six==1.17.0
- soupsieve==2.3.2.post1
- tomli==1.2.3
- typing-extensions==4.1.1
- waitress==2.0.0
- webob==1.8.9
- webtest==2.0.35
- zipp==3.6.0
prefix: /opt/conda/envs/conan
| [
"conans/test/model/order_libs_test.py::ConanRequirementsTest::test_diamond_no_conflict",
"conans/test/model/transitive_reqs_test.py::ConanRequirementsTest::test_basic_transitive_option",
"conans/test/model/transitive_reqs_test.py::ConanRequirementsTest::test_conditional",
"conans/test/model/transitive_reqs_test.py::ConanRequirementsTest::test_dep_requires_clear",
"conans/test/model/transitive_reqs_test.py::ConanRequirementsTest::test_diamond_conflict",
"conans/test/model/transitive_reqs_test.py::ConanRequirementsTest::test_diamond_conflict_options",
"conans/test/model/transitive_reqs_test.py::ConanRequirementsTest::test_diamond_conflict_options_solved",
"conans/test/model/transitive_reqs_test.py::ConanRequirementsTest::test_diamond_conflict_solved",
"conans/test/model/transitive_reqs_test.py::ConanRequirementsTest::test_diamond_no_conflict",
"conans/test/model/transitive_reqs_test.py::ConanRequirementsTest::test_diamond_no_conflict_options",
"conans/test/model/transitive_reqs_test.py::ConanRequirementsTest::test_propagate_indirect_options",
"conans/test/model/transitive_reqs_test.py::ConanRequirementsTest::test_remove_build_requires",
"conans/test/model/transitive_reqs_test.py::ConanRequirementsTest::test_remove_two_build_requires",
"conans/test/model/transitive_reqs_test.py::ConanRequirementsTest::test_simple_override",
"conans/test/model/transitive_reqs_test.py::ConanRequirementsTest::test_transitive",
"conans/test/model/transitive_reqs_test.py::ConanRequirementsTest::test_transitive_diamond_private",
"conans/test/model/transitive_reqs_test.py::ConanRequirementsTest::test_transitive_private",
"conans/test/model/transitive_reqs_test.py::ConanRequirementsTest::test_transitive_two_levels",
"conans/test/model/transitive_reqs_test.py::ConanRequirementsTest::test_transitive_two_levels_options",
"conans/test/model/transitive_reqs_test.py::ConanRequirementsTest::test_transitive_two_levels_wrong_options",
"conans/test/model/transitive_reqs_test.py::ConanRequirementsTest::test_version_requires_change",
"conans/test/model/transitive_reqs_test.py::CoreSettingsTest::test_transitive_two_levels_options"
] | [] | [
"conans/test/model/transitive_reqs_test.py::ConanRequirementsTest::test_basic",
"conans/test/model/transitive_reqs_test.py::ConanRequirementsTest::test_basic_option",
"conans/test/model/transitive_reqs_test.py::CoreSettingsTest::test_basic",
"conans/test/model/transitive_reqs_test.py::CoreSettingsTest::test_config",
"conans/test/model/transitive_reqs_test.py::CoreSettingsTest::test_config_remove",
"conans/test/model/transitive_reqs_test.py::CoreSettingsTest::test_config_remove2",
"conans/test/model/transitive_reqs_test.py::CoreSettingsTest::test_errors",
"conans/test/model/transitive_reqs_test.py::CoreSettingsTest::test_new_configure",
"conans/test/remote_manager_test.py::RemoteManagerTest::test_no_remotes"
] | [] | MIT License | 781 | 3,621 | [
"conans/client/deps_builder.py",
"conans/client/proxy.py",
"conans/client/remote_manager.py",
"conans/client/rest/auth_manager.py",
"conans/client/rest/rest_client.py",
"conans/client/rest/uploader_downloader.py",
"conans/server/store/file_manager.py"
] |
|
zalando-stups__senza-365 | fe537a4234d2dd978ef0ff04fba8e5507dad203d | 2016-09-28 08:02:15 | a72ed3ba8f330170d7dc9e923bd18294a03186af | diff --git a/senza/manaus/cloudformation.py b/senza/manaus/cloudformation.py
index b8529be..a53b2e9 100644
--- a/senza/manaus/cloudformation.py
+++ b/senza/manaus/cloudformation.py
@@ -124,6 +124,11 @@ class CloudFormationStack:
for resource in resources:
resource_type = resource["ResourceType"]
if resource_type == ResourceType.route53_record_set:
+ physical_resource_id = resource.get('PhysicalResourceId')
+ if physical_resource_id is None:
+ # if there is no Physical Resource Id we can't fetch the
+ # record
+ continue
records = Route53.get_records(name=resource['PhysicalResourceId'])
for record in records:
if (record.set_identifier is None or
| Unknown Error: 'PhysicalResourceId'
I got a `senza delete` error:
Unknown Error: 'PhysicalResourceId'.
Please create an issue with the content of /tmp/senza-traceback-8ecz_cyz
****************************************senza-traceback-8ecz_cyz****************************************************
`Traceback (most recent call last):
File "/usr/local/lib/python3.5/dist-packages/senza/error_handling.py", line 82, in __call__
self.function(*args, **kwargs)
File "/usr/local/lib/python3.5/dist-packages/click/core.py", line 716, in __call__
return self.main(*args, **kwargs)
File "/usr/local/lib/python3.5/dist-packages/click/core.py", line 696, in main
rv = self.invoke(ctx)
File "/usr/local/lib/python3.5/dist-packages/click/core.py", line 1060, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/usr/local/lib/python3.5/dist-packages/click/core.py", line 889, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/usr/local/lib/python3.5/dist-packages/click/core.py", line 534, in invoke
return callback(*args, **kwargs)
File "/usr/local/lib/python3.5/dist-packages/senza/cli.py", line 835, in delete
for r in stack.resources:
File "/usr/local/lib/python3.5/dist-packages/senza/manaus/cloudformation.py", line 127, in resources
records = Route53.get_records(name=resource['PhysicalResourceId'])
KeyError: 'PhysicalResourceId'` | zalando-stups/senza | diff --git a/tests/test_manaus/test_cloudformation.py b/tests/test_manaus/test_cloudformation.py
index f700c77..44b868a 100644
--- a/tests/test_manaus/test_cloudformation.py
+++ b/tests/test_manaus/test_cloudformation.py
@@ -99,6 +99,12 @@ def test_cf_resources(monkeypatch):
'PhysicalResourceId': 'myapp1.example.com',
'ResourceStatus': 'CREATE_COMPLETE',
'ResourceType': 'AWS::Route53::RecordSet'},
+ {'LastUpdatedTimestamp': datetime(2016, 7, 20, 7, 3,
+ 45, 70000,
+ tzinfo=timezone.utc),
+ 'LogicalResourceId': 'ThisWillBeIgnored',
+ 'ResourceStatus': 'CREATE_COMPLETE',
+ 'ResourceType': 'AWS::Route53::RecordSet'},
{'LastUpdatedTimestamp': datetime(2016, 7, 20, 7, 3,
43, 871000,
tzinfo=timezone.utc),
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 0
},
"num_modified_files": 1
} | 2.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | boto3==1.37.23
botocore==1.37.23
certifi==2025.1.31
charset-normalizer==3.4.1
click==8.1.8
clickclick==20.10.2
coverage==7.8.0
dnspython==1.15.0
dnspython3==1.15.0
exceptiongroup==1.2.2
idna==3.10
importlib_metadata==8.6.1
iniconfig==2.1.0
jmespath==1.0.1
packaging==24.2
pluggy==1.5.0
pystache==0.6.8
pytest==8.3.5
pytest-cov==6.0.0
python-dateutil==2.9.0.post0
PyYAML==6.0.2
raven==6.10.0
requests==2.32.3
s3transfer==0.11.4
six==1.17.0
stups-cli-support==1.1.22
stups-pierone==1.1.56
-e git+https://github.com/zalando-stups/senza.git@fe537a4234d2dd978ef0ff04fba8e5507dad203d#egg=stups_senza
stups-tokens==1.1.19
stups-zign==1.2
tomli==2.2.1
typing==3.7.4.3
urllib3==1.26.20
zipp==3.21.0
| name: senza
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- boto3==1.37.23
- botocore==1.37.23
- certifi==2025.1.31
- charset-normalizer==3.4.1
- click==8.1.8
- clickclick==20.10.2
- coverage==7.8.0
- dnspython==1.15.0
- dnspython3==1.15.0
- exceptiongroup==1.2.2
- idna==3.10
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- jmespath==1.0.1
- packaging==24.2
- pluggy==1.5.0
- pystache==0.6.8
- pytest==8.3.5
- pytest-cov==6.0.0
- python-dateutil==2.9.0.post0
- pyyaml==6.0.2
- raven==6.10.0
- requests==2.32.3
- s3transfer==0.11.4
- six==1.17.0
- stups-cli-support==1.1.22
- stups-pierone==1.1.56
- stups-tokens==1.1.19
- stups-zign==1.2
- tomli==2.2.1
- typing==3.7.4.3
- urllib3==1.26.20
- zipp==3.21.0
prefix: /opt/conda/envs/senza
| [
"tests/test_manaus/test_cloudformation.py::test_cf_resources"
] | [] | [
"tests/test_manaus/test_cloudformation.py::test_get_by_stack_name",
"tests/test_manaus/test_cloudformation.py::test_get_stacks",
"tests/test_manaus/test_cloudformation.py::test_get_by_stack_name_not_found",
"tests/test_manaus/test_cloudformation.py::test_template",
"tests/test_manaus/test_cloudformation.py::test_stack_update"
] | [] | Apache License 2.0 | 782 | 187 | [
"senza/manaus/cloudformation.py"
] |
|
LibraryOfCongress__bagit-python-72 | 5489148ab1f365fcd518bcbc626a9f84b22640a2 | 2016-09-28 19:10:04 | 7684c797b19602c8ea9f36475a0ac26be8903b75 | diff --git a/bagit.py b/bagit.py
index 18c61da..d97b4b5 100755
--- a/bagit.py
+++ b/bagit.py
@@ -47,6 +47,8 @@ from os.path import abspath, isdir, isfile, join
LOGGER = logging.getLogger(__name__)
+VERSION = '1.5.4'
+
# standard bag-info.txt metadata
STANDARD_BAG_INFO_HEADERS = [
'Source-Organization',
@@ -141,7 +143,7 @@ def make_bag(bag_dir, bag_info=None, processes=1, checksum=None):
if 'Bagging-Date' not in bag_info:
bag_info['Bagging-Date'] = date.strftime(date.today(), "%Y-%m-%d")
if 'Bag-Software-Agent' not in bag_info:
- bag_info['Bag-Software-Agent'] = 'bagit.py <http://github.com/libraryofcongress/bagit-python>'
+ bag_info['Bag-Software-Agent'] = 'bagit.py v' + VERSION + ' <http://github.com/libraryofcongress/bagit-python>'
bag_info['Payload-Oxum'] = Oxum
_make_tag_file('bag-info.txt', bag_info)
diff --git a/setup.py b/setup.py
index e91730d..ee8c326 100644
--- a/setup.py
+++ b/setup.py
@@ -2,6 +2,8 @@ from sys import exit, version
from setuptools import setup
+import bagit
+
if version < '2.6.0':
print("python 2.6 or higher is required")
exit(1)
@@ -24,10 +26,11 @@ try:
except:
requirements.append("hashlib")
+version = bagit.VERSION
setup(
name = 'bagit',
- version = '1.5.4',
+ version = version,
url = 'https://libraryofcongress.github.io/bagit-python/',
author = 'Ed Summers',
author_email = '[email protected]',
| Bag-Software-Agent include version?
It would be nice if the default Bag-Software-Agent had a version in it. Currently it looks like:
bagit.py <http://github.com/libraryofcongress/bagit-python>
But maybe something like this would be better, for discovering bags created with (gasp) a version of bagit.py that has a bug?
bagit.py v1.5.3 <http://github.com/libraryofcongress/bagit-python>
| LibraryOfCongress/bagit-python | diff --git a/test.py b/test.py
index 5dc8fbf..285174c 100644
--- a/test.py
+++ b/test.py
@@ -303,7 +303,7 @@ class TestSingleProcessValidation(unittest.TestCase):
info = {'Bagging-Date': '1970-01-01', 'Contact-Email': '[email protected]'}
bag = bagit.make_bag(self.tmpdir, checksum=['sha1'], bag_info=info)
self.assertTrue(os.path.isfile(j(self.tmpdir, 'tagmanifest-sha1.txt')))
- self.assertEqual(bag.entries['bag-info.txt']['sha1'], 'd7f086508df433e5d7464b5a3835d5501df14404')
+ self.assertEqual(bag.entries['bag-info.txt']['sha1'], 'ec70407d895d4e550bc0a7ea40a82ad653d136e5')
def test_validate_unreadable_file(self):
bag = bagit.make_bag(self.tmpdir, checksum=["md5"])
@@ -360,7 +360,7 @@ class TestBag(unittest.TestCase):
self.assertTrue('Contact-Email: [email protected]' in bag_info_txt)
self.assertTrue('Bagging-Date: 1970-01-01' in bag_info_txt)
self.assertTrue('Payload-Oxum: 991765.5' in bag_info_txt)
- self.assertTrue('Bag-Software-Agent: bagit.py <http://github.com/libraryofcongress/bagit-python>' in bag_info_txt)
+ self.assertTrue('Bag-Software-Agent: bagit.py v1.5.4 <http://github.com/libraryofcongress/bagit-python>' in bag_info_txt)
# check tagmanifest-md5.txt
self.assertTrue(os.path.isfile(j(self.tmpdir, 'tagmanifest-md5.txt')))
@@ -368,7 +368,7 @@ class TestBag(unittest.TestCase):
tagmanifest_txt = tm.read()
self.assertTrue('9e5ad981e0d29adc278f6a294b8c2aca bagit.txt' in tagmanifest_txt)
self.assertTrue('a0ce6631a2a6d1a88e6d38453ccc72a5 manifest-md5.txt' in tagmanifest_txt)
- self.assertTrue('6a5090e27cb29d5dda8a0142fbbdf37e bag-info.txt' in tagmanifest_txt)
+ self.assertTrue('bfe59ad8af1a227d27c191b4178c399f bag-info.txt' in tagmanifest_txt)
def test_make_bag_sha1_manifest(self):
bagit.make_bag(self.tmpdir, checksum=['sha1'])
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 2
} | 1.5 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
-e git+https://github.com/LibraryOfCongress/bagit-python.git@5489148ab1f365fcd518bcbc626a9f84b22640a2#egg=bagit
certifi==2021.5.30
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: bagit-python
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib-metadata=4.8.1=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
prefix: /opt/conda/envs/bagit-python
| [
"test.py::TestSingleProcessValidation::test_sha1_tagfile",
"test.py::TestMultiprocessValidation::test_sha1_tagfile",
"test.py::TestBag::test_make_bag"
] | [
"test.py::TestSingleProcessValidation::test_validate_unreadable_file",
"test.py::TestMultiprocessValidation::test_validate_unreadable_file"
] | [
"test.py::TestSingleProcessValidation::test_allow_extraneous_dirs_in_base",
"test.py::TestSingleProcessValidation::test_allow_extraneous_files_in_base",
"test.py::TestSingleProcessValidation::test_bom_in_bagit_txt",
"test.py::TestSingleProcessValidation::test_handle_directory_end_slash_gracefully",
"test.py::TestSingleProcessValidation::test_make_bag_md5_sha1_sha256_manifest",
"test.py::TestSingleProcessValidation::test_make_bag_md5_sha256_manifest",
"test.py::TestSingleProcessValidation::test_make_bag_sha1_sha256_manifest",
"test.py::TestSingleProcessValidation::test_missing_file",
"test.py::TestSingleProcessValidation::test_missing_manifest_raises_error",
"test.py::TestSingleProcessValidation::test_missing_tagfile_raises_error",
"test.py::TestSingleProcessValidation::test_mixed_case_checksums",
"test.py::TestSingleProcessValidation::test_multiple_oxum_values",
"test.py::TestSingleProcessValidation::test_validate_fast",
"test.py::TestSingleProcessValidation::test_validate_fast_without_oxum",
"test.py::TestSingleProcessValidation::test_validate_flipped_bit",
"test.py::TestSingleProcessValidation::test_validate_optional_tagfile",
"test.py::TestSingleProcessValidation::test_validate_optional_tagfile_in_directory",
"test.py::TestSingleProcessValidation::test_validate_slow_without_oxum_extra_file",
"test.py::TestSingleProcessValidation::test_validation_error_details",
"test.py::TestMultiprocessValidation::test_allow_extraneous_dirs_in_base",
"test.py::TestMultiprocessValidation::test_allow_extraneous_files_in_base",
"test.py::TestMultiprocessValidation::test_bom_in_bagit_txt",
"test.py::TestMultiprocessValidation::test_handle_directory_end_slash_gracefully",
"test.py::TestMultiprocessValidation::test_make_bag_md5_sha1_sha256_manifest",
"test.py::TestMultiprocessValidation::test_make_bag_md5_sha256_manifest",
"test.py::TestMultiprocessValidation::test_make_bag_sha1_sha256_manifest",
"test.py::TestMultiprocessValidation::test_missing_file",
"test.py::TestMultiprocessValidation::test_missing_manifest_raises_error",
"test.py::TestMultiprocessValidation::test_missing_tagfile_raises_error",
"test.py::TestMultiprocessValidation::test_mixed_case_checksums",
"test.py::TestMultiprocessValidation::test_multiple_oxum_values",
"test.py::TestMultiprocessValidation::test_validate_fast",
"test.py::TestMultiprocessValidation::test_validate_fast_without_oxum",
"test.py::TestMultiprocessValidation::test_validate_flipped_bit",
"test.py::TestMultiprocessValidation::test_validate_optional_tagfile",
"test.py::TestMultiprocessValidation::test_validate_optional_tagfile_in_directory",
"test.py::TestMultiprocessValidation::test_validate_slow_without_oxum_extra_file",
"test.py::TestMultiprocessValidation::test_validation_error_details",
"test.py::TestBag::test_bag_class",
"test.py::TestBag::test_bag_constructor",
"test.py::TestBag::test_carriage_return_manifest",
"test.py::TestBag::test_default_bagging_date",
"test.py::TestBag::test_garbage_in_bagit_txt",
"test.py::TestBag::test_has_oxum",
"test.py::TestBag::test_is_valid",
"test.py::TestBag::test_make_bag_multiprocessing",
"test.py::TestBag::test_make_bag_sha1_manifest",
"test.py::TestBag::test_make_bag_sha256_manifest",
"test.py::TestBag::test_make_bag_sha512_manifest",
"test.py::TestBag::test_make_bag_unknown_algorithm",
"test.py::TestBag::test_make_bag_with_data_dir_present",
"test.py::TestBag::test_make_bag_with_newline",
"test.py::TestBag::test_missing_tagmanifest_valid",
"test.py::TestBag::test_multiple_meta_values",
"test.py::TestBag::test_payload_permissions",
"test.py::TestBag::test_save_baginfo",
"test.py::TestBag::test_save_baginfo_with_sha1",
"test.py::TestBag::test_save_manifests",
"test.py::TestBag::test_save_manifests_deleted_files",
"test.py::TestBag::test_save_only_baginfo",
"test.py::TestBag::test_unicode_in_tags"
] | [] | null | 783 | 476 | [
"bagit.py",
"setup.py"
] |
|
Azure__azure-data-lake-store-python-83 | 9143406b26b3dc695c325b4fbf563c2093e9982f | 2016-09-29 04:13:44 | 9143406b26b3dc695c325b4fbf563c2093e9982f | diff --git a/adlfs/cli.py b/adlfs/cli.py
index f8d2a1f..320033f 100644
--- a/adlfs/cli.py
+++ b/adlfs/cli.py
@@ -91,13 +91,13 @@ class AzureDataLakeFSCommand(cmd.Cmd, object):
def _parse_ownership(self, ownership):
if ':' in ownership:
- user, group = ownership.split(':')
- if not user:
- user = None
+ owner, group = ownership.split(':')
+ if not owner:
+ owner = None
else:
- user = ownership
+ owner = ownership
group = None
- return user, group
+ return owner, group
def do_chown(self, line):
parser = argparse.ArgumentParser(prog="chown", add_help=False)
@@ -105,10 +105,10 @@ class AzureDataLakeFSCommand(cmd.Cmd, object):
parser.add_argument('files', type=str, nargs='+')
args = parser.parse_args(line.split())
- user, group = self._parse_ownership(args.ownership)
+ owner, group = self._parse_ownership(args.ownership)
for f in args.files:
- self._fs.chown(f, user=user, group=group)
+ self._fs.chown(f, owner=owner, group=group)
def help_chown(self):
print("chown owner[:group] file ...")
| SetOwner and SetPermission are now supported, need tests
Please add tests and recordings for these two APIs now that they are supported | Azure/azure-data-lake-store-python | diff --git a/tests/recordings/test_cli/test_chgrp.yaml b/tests/recordings/test_cli/test_chgrp.yaml
new file mode 100644
index 0000000..5a4b97d
--- /dev/null
+++ b/tests/recordings/test_cli/test_chgrp.yaml
@@ -0,0 +1,113 @@
+interactions:
+- request:
+ body: '123456'
+ headers:
+ Accept: ['*/*']
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Length: ['6']
+ User-Agent: [python-requests/2.11.1]
+ method: PUT
+ uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/foo/bar?overwrite=true&write=true&OP=CREATE
+ response:
+ body: {string: ''}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Length: ['0']
+ ContentLength: ['0']
+ Date: ['Thu, 29 Sep 2016 04:09:35 GMT']
+ Expires: ['-1']
+ Location: ['https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/foo/bar?overwrite=true&write=true&OP=CREATE']
+ Pragma: [no-cache]
+ Server-Perf: ['[bd1d8cd5-2d95-4f5f-8a53-2c3d201e18a6][ AuthTime::916.891327850232::PostAuthTime::207.840105100379
+ ][S-HdfsGetFileStatusV2 :: 00:00:005 ms]%0a[S-HdfsCheckAccess :: 00:00:002
+ ms]%0a[S-FsDelete :: 00:00:005 ms]%0a[S-FsOpenStream :: 00:00:061 ms]%0a[S-FsAppendStream
+ :: 00:00:147 ms]%0a[BufferingTime :: 00:00:000 ms]%0a[WriteTime :: 00:00:147
+ ms]%0a[S-FsAppendStream :: 00:00:034 ms]%0a[S-FsCloseHandle :: 00:00:001
+ ms]%0a[CREATE :: 00:00:268 ms]%0a']
+ Status: ['0x0']
+ Strict-Transport-Security: [max-age=15724800; includeSubDomains]
+ X-Content-Type-Options: [nosniff]
+ x-ms-request-id: [bd1d8cd5-2d95-4f5f-8a53-2c3d201e18a6]
+ x-ms-webhdfs-version: [16.07.18.01]
+ status: {code: 201, message: Created}
+- request:
+ body: null
+ headers:
+ Accept: ['*/*']
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Length: ['0']
+ User-Agent: [python-requests/2.11.1]
+ method: PUT
+ uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/foo/bar?group=foo&OP=SETOWNER
+ response:
+ body: {string: ''}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Length: ['0']
+ Date: ['Thu, 29 Sep 2016 04:09:35 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server-Perf: ['[ad84325f-4c08-46a9-bb80-de4087dec9c1][ AuthTime::862.151547083054::PostAuthTime::171.917123971919
+ ][SETOWNER :: 00:00:000 ms]%0a']
+ Status: ['0x0']
+ Strict-Transport-Security: [max-age=15724800; includeSubDomains]
+ X-Content-Type-Options: [nosniff]
+ x-ms-request-id: [ad84325f-4c08-46a9-bb80-de4087dec9c1]
+ x-ms-webhdfs-version: [16.07.18.01]
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: ['*/*']
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ User-Agent: [python-requests/2.11.1]
+ method: GET
+ uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/foo?OP=LISTSTATUS
+ response:
+ body: {string: '{"FileStatuses":{"FileStatus":[{"length":6,"pathSuffix":"bar","type":"FILE","blockSize":268435456,"accessTime":1475122175622,"modificationTime":1475122175761,"replication":1,"permission":"770","owner":"49b2f9ec-818a-49ca-9424-249e1f19f7d7","group":"49b2f9ec-818a-49ca-9424-249e1f19f7d7"}]}}'}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Length: ['290']
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Thu, 29 Sep 2016 04:09:35 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server-Perf: ['[886a16e5-6b2d-4dc4-ae05-51f0e7426d8a][ AuthTime::990.876410298956::PostAuthTime::215.538071122432
+ ][S-HdfsListStatus :: 00:00:031 ms]%0a[LISTSTATUS :: 00:00:031 ms]%0a']
+ Status: ['0x0']
+ Strict-Transport-Security: [max-age=15724800; includeSubDomains]
+ X-Content-Type-Options: [nosniff]
+ x-ms-request-id: [886a16e5-6b2d-4dc4-ae05-51f0e7426d8a]
+ x-ms-webhdfs-version: [16.07.18.01]
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: ['*/*']
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Length: ['0']
+ User-Agent: [python-requests/2.11.1]
+ method: DELETE
+ uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/foo/bar?recursive=False&OP=DELETE
+ response:
+ body: {string: '{"boolean":true}'}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Length: ['16']
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Thu, 29 Sep 2016 04:09:36 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server-Perf: ['[ba03af52-3d38-43d3-b915-87fe720fe72b][ AuthTime::844.619155876924::PostAuthTime::280.969511600577
+ ][S-FsDelete :: 00:00:083 ms]%0a[DELETE :: 00:00:091 ms]%0a']
+ Status: ['0x0']
+ Strict-Transport-Security: [max-age=15724800; includeSubDomains]
+ X-Content-Type-Options: [nosniff]
+ x-ms-request-id: [ba03af52-3d38-43d3-b915-87fe720fe72b]
+ x-ms-webhdfs-version: [16.07.18.01]
+ status: {code: 200, message: OK}
+version: 1
diff --git a/tests/recordings/test_cli/test_chown.yaml b/tests/recordings/test_cli/test_chown.yaml
new file mode 100644
index 0000000..9b1b17c
--- /dev/null
+++ b/tests/recordings/test_cli/test_chown.yaml
@@ -0,0 +1,165 @@
+interactions:
+- request:
+ body: '123456'
+ headers:
+ Accept: ['*/*']
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Length: ['6']
+ User-Agent: [python-requests/2.11.1]
+ method: PUT
+ uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/foo/bar?overwrite=true&write=true&OP=CREATE
+ response:
+ body: {string: ''}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Length: ['0']
+ ContentLength: ['0']
+ Date: ['Thu, 29 Sep 2016 04:09:36 GMT']
+ Expires: ['-1']
+ Location: ['https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/foo/bar?overwrite=true&write=true&OP=CREATE']
+ Pragma: [no-cache]
+ Server-Perf: ['[a70cb23e-2849-41d0-92d6-ecd4f18309d6][ AuthTime::982.749286031361::PostAuthTime::208.26758150447
+ ][S-HdfsGetFileStatusV2 :: 00:00:006 ms]%0a[S-HdfsCheckAccess :: 00:00:002
+ ms]%0a[S-FsDelete :: 00:00:005 ms]%0a[S-FsOpenStream :: 00:00:036 ms]%0a[S-FsAppendStream
+ :: 00:00:196 ms]%0a[BufferingTime :: 00:00:000 ms]%0a[WriteTime :: 00:00:196
+ ms]%0a[S-FsAppendStream :: 00:00:033 ms]%0a[S-FsCloseHandle :: 00:00:001
+ ms]%0a[CREATE :: 00:00:296 ms]%0a']
+ Status: ['0x0']
+ Strict-Transport-Security: [max-age=15724800; includeSubDomains]
+ X-Content-Type-Options: [nosniff]
+ x-ms-request-id: [a70cb23e-2849-41d0-92d6-ecd4f18309d6]
+ x-ms-webhdfs-version: [16.07.18.01]
+ status: {code: 201, message: Created}
+- request:
+ body: null
+ headers:
+ Accept: ['*/*']
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Length: ['0']
+ User-Agent: [python-requests/2.11.1]
+ method: PUT
+ uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/foo/bar?owner=foo&OP=SETOWNER
+ response:
+ body: {string: ''}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Length: ['0']
+ Date: ['Thu, 29 Sep 2016 04:09:37 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server-Perf: ['[87b33574-03ce-45bb-97fd-5c35cae5c524][ AuthTime::945.117335675742::PostAuthTime::220.66992995868
+ ][SETOWNER :: 00:00:000 ms]%0a']
+ Status: ['0x0']
+ Strict-Transport-Security: [max-age=15724800; includeSubDomains]
+ X-Content-Type-Options: [nosniff]
+ x-ms-request-id: [87b33574-03ce-45bb-97fd-5c35cae5c524]
+ x-ms-webhdfs-version: [16.07.18.01]
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: ['*/*']
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Length: ['0']
+ User-Agent: [python-requests/2.11.1]
+ method: PUT
+ uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/foo/bar?group=foo&OP=SETOWNER
+ response:
+ body: {string: ''}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Length: ['0']
+ Date: ['Thu, 29 Sep 2016 04:09:37 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server-Perf: ['[bfe54b19-e547-4b94-a69d-f8bcee43e4c8][ AuthTime::932.288884678858::PostAuthTime::213.400070392087
+ ][SETOWNER :: 00:00:000 ms]%0a']
+ Status: ['0x0']
+ Strict-Transport-Security: [max-age=15724800; includeSubDomains]
+ X-Content-Type-Options: [nosniff]
+ x-ms-request-id: [bfe54b19-e547-4b94-a69d-f8bcee43e4c8]
+ x-ms-webhdfs-version: [16.07.18.01]
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: ['*/*']
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Length: ['0']
+ User-Agent: [python-requests/2.11.1]
+ method: PUT
+ uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/foo/bar?group=foo&owner=foo&OP=SETOWNER
+ response:
+ body: {string: ''}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Length: ['0']
+ Date: ['Thu, 29 Sep 2016 04:09:37 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server-Perf: ['[bba612c2-e2c2-480f-b7f9-4883970dc590][ AuthTime::1009.26643436432::PostAuthTime::203.13625267926
+ ][SETOWNER :: 00:00:000 ms]%0a']
+ Status: ['0x0']
+ Strict-Transport-Security: [max-age=15724800; includeSubDomains]
+ X-Content-Type-Options: [nosniff]
+ x-ms-request-id: [bba612c2-e2c2-480f-b7f9-4883970dc590]
+ x-ms-webhdfs-version: [16.07.18.01]
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: ['*/*']
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ User-Agent: [python-requests/2.11.1]
+ method: GET
+ uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/foo?OP=LISTSTATUS
+ response:
+ body: {string: '{"FileStatuses":{"FileStatus":[{"length":6,"pathSuffix":"bar","type":"FILE","blockSize":268435456,"accessTime":1475122177420,"modificationTime":1475122177607,"replication":1,"permission":"770","owner":"49b2f9ec-818a-49ca-9424-249e1f19f7d7","group":"49b2f9ec-818a-49ca-9424-249e1f19f7d7"}]}}'}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Length: ['290']
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Thu, 29 Sep 2016 04:09:38 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server-Perf: ['[590d1d73-6cab-46a8-b06f-559563296587][ AuthTime::907.482928030873::PostAuthTime::203.563559727943
+ ][S-HdfsListStatus :: 00:00:011 ms]%0a[LISTSTATUS :: 00:00:012 ms]%0a']
+ Status: ['0x0']
+ Strict-Transport-Security: [max-age=15724800; includeSubDomains]
+ X-Content-Type-Options: [nosniff]
+ x-ms-request-id: [590d1d73-6cab-46a8-b06f-559563296587]
+ x-ms-webhdfs-version: [16.07.18.01]
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: ['*/*']
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Length: ['0']
+ User-Agent: [python-requests/2.11.1]
+ method: DELETE
+ uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/foo/bar?recursive=False&OP=DELETE
+ response:
+ body: {string: '{"boolean":true}'}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Length: ['16']
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Thu, 29 Sep 2016 04:09:38 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server-Perf: ['[83093651-8421-45c0-a12d-468c16ec2267][ AuthTime::990.019830332411::PostAuthTime::239.058784084587
+ ][S-FsDelete :: 00:00:095 ms]%0a[DELETE :: 00:00:103 ms]%0a']
+ Status: ['0x0']
+ Strict-Transport-Security: [max-age=15724800; includeSubDomains]
+ X-Content-Type-Options: [nosniff]
+ x-ms-request-id: [83093651-8421-45c0-a12d-468c16ec2267]
+ x-ms-webhdfs-version: [16.07.18.01]
+ status: {code: 200, message: OK}
+version: 1
diff --git a/tests/recordings/test_core/test_chown.yaml b/tests/recordings/test_core/test_chown.yaml
new file mode 100644
index 0000000..aa4693e
--- /dev/null
+++ b/tests/recordings/test_core/test_chown.yaml
@@ -0,0 +1,164 @@
+interactions:
+- request:
+ body: null
+ headers:
+ Accept: ['*/*']
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Length: ['0']
+ User-Agent: [python-requests/2.11.1]
+ method: PUT
+ uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?OP=CREATE&overwrite=true&write=true
+ response:
+ body: {string: ''}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Length: ['0']
+ ContentLength: ['0']
+ Date: ['Thu, 29 Sep 2016 03:59:59 GMT']
+ Expires: ['-1']
+ Location: ['https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?OP=CREATE&overwrite=true&write=true']
+ Pragma: [no-cache]
+ Server-Perf: ['[d869d515-b22a-4605-ac9e-4e0f7c5b6d8c][ AuthTime::897.647641440444::PostAuthTime::207.840282868059
+ ][S-HdfsGetFileStatusV2 :: 00:00:006 ms]%0a[S-HdfsCheckAccess :: 00:00:002
+ ms]%0a[S-FsDelete :: 00:00:006 ms]%0a[S-FsOpenStream :: 00:00:044 ms]%0a[BufferingTime
+ :: 00:00:000 ms]%0a[WriteTime :: 00:00:000 ms]%0a[S-FsAppendStream :: 00:00:027
+ ms]%0a[S-FsCloseHandle :: 00:00:001 ms]%0a[CREATE :: 00:00:096 ms]%0a']
+ Status: ['0x0']
+ Strict-Transport-Security: [max-age=15724800; includeSubDomains]
+ X-Content-Type-Options: [nosniff]
+ x-ms-request-id: [d869d515-b22a-4605-ac9e-4e0f7c5b6d8c]
+ x-ms-webhdfs-version: [16.07.18.01]
+ status: {code: 201, message: Created}
+- request:
+ body: null
+ headers:
+ Accept: ['*/*']
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ User-Agent: [python-requests/2.11.1]
+ method: GET
+ uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir?OP=LISTSTATUS
+ response:
+ body: {string: '{"FileStatuses":{"FileStatus":[{"length":0,"pathSuffix":"a","type":"FILE","blockSize":268435456,"accessTime":1475121600130,"modificationTime":1475121600130,"replication":1,"permission":"770","owner":"49b2f9ec-818a-49ca-9424-249e1f19f7d7","group":"49b2f9ec-818a-49ca-9424-249e1f19f7d7"}]}}'}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Length: ['288']
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Thu, 29 Sep 2016 04:00:00 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server-Perf: ['[b7c2f7c8-7e37-46e8-8332-33364ce07654][ AuthTime::909.193157359269::PostAuthTime::209.122979279719
+ ][S-HdfsListStatus :: 00:00:011 ms]%0a[LISTSTATUS :: 00:00:012 ms]%0a']
+ Status: ['0x0']
+ Strict-Transport-Security: [max-age=15724800; includeSubDomains]
+ X-Content-Type-Options: [nosniff]
+ x-ms-request-id: [b7c2f7c8-7e37-46e8-8332-33364ce07654]
+ x-ms-webhdfs-version: [16.07.18.01]
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: ['*/*']
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Length: ['0']
+ User-Agent: [python-requests/2.11.1]
+ method: PUT
+ uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?OP=SETOWNER&owner=foo
+ response:
+ body: {string: ''}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Length: ['0']
+ Date: ['Thu, 29 Sep 2016 04:00:00 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server-Perf: ['[c9893c0b-74c5-4731-9d7e-30d5438e89bb][ AuthTime::1052.88411988168::PostAuthTime::266.856088873341
+ ][SETOWNER :: 00:00:000 ms]%0a']
+ Status: ['0x0']
+ Strict-Transport-Security: [max-age=15724800; includeSubDomains]
+ X-Content-Type-Options: [nosniff]
+ x-ms-request-id: [c9893c0b-74c5-4731-9d7e-30d5438e89bb]
+ x-ms-webhdfs-version: [16.07.18.01]
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: ['*/*']
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Length: ['0']
+ User-Agent: [python-requests/2.11.1]
+ method: PUT
+ uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?OP=SETOWNER&group=bar
+ response:
+ body: {string: ''}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Length: ['0']
+ Date: ['Thu, 29 Sep 2016 04:00:00 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server-Perf: ['[9847e1b4-5dbd-42ca-974e-9dd95db3f30a][ AuthTime::0::PostAuthTime::0
+ ][SETOWNER :: 00:00:000 ms]%0a']
+ Status: ['0x0']
+ Strict-Transport-Security: [max-age=15724800; includeSubDomains]
+ X-Content-Type-Options: [nosniff]
+ x-ms-request-id: [9847e1b4-5dbd-42ca-974e-9dd95db3f30a]
+ x-ms-webhdfs-version: [16.07.18.01]
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: ['*/*']
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Length: ['0']
+ User-Agent: [python-requests/2.11.1]
+ method: PUT
+ uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?OP=SETOWNER&owner=foo&group=bar
+ response:
+ body: {string: ''}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Length: ['0']
+ Date: ['Thu, 29 Sep 2016 04:00:01 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server-Perf: ['[5e3ddf51-8148-4d3b-a582-5f48a8ba63a9][ AuthTime::946.399490920039::PostAuthTime::216.393195845251
+ ][SETOWNER :: 00:00:000 ms]%0a']
+ Status: ['0x0']
+ Strict-Transport-Security: [max-age=15724800; includeSubDomains]
+ X-Content-Type-Options: [nosniff]
+ x-ms-request-id: [5e3ddf51-8148-4d3b-a582-5f48a8ba63a9]
+ x-ms-webhdfs-version: [16.07.18.01]
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: ['*/*']
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Length: ['0']
+ User-Agent: [python-requests/2.11.1]
+ method: DELETE
+ uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?OP=DELETE&recursive=True
+ response:
+ body: {string: '{"boolean":true}'}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Length: ['16']
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Thu, 29 Sep 2016 04:00:01 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server-Perf: ['[b928f672-0d84-4a07-9932-55e7b00d2e3f][ AuthTime::1142.69292351484::PostAuthTime::316.892012097492
+ ][S-FsDelete :: 00:00:073 ms]%0a[DELETE :: 00:00:082 ms]%0a']
+ Status: ['0x0']
+ Strict-Transport-Security: [max-age=15724800; includeSubDomains]
+ X-Content-Type-Options: [nosniff]
+ x-ms-request-id: [b928f672-0d84-4a07-9932-55e7b00d2e3f]
+ x-ms-webhdfs-version: [16.07.18.01]
+ status: {code: 200, message: OK}
+version: 1
diff --git a/tests/test_cli.py b/tests/test_cli.py
index 20b84f3..1b14260 100644
--- a/tests/test_cli.py
+++ b/tests/test_cli.py
@@ -55,6 +55,13 @@ def test_cat(capsys, azure, client):
assert read_stdout(capsys) == '123456'
+@my_vcr.use_cassette
+def test_chgrp(capsys, azure, client):
+ with setup_file(azure) as azurefile:
+ client.onecmd('chgrp foo ' + azurefile)
+ assert not read_stdout(capsys)
+
+
@my_vcr.use_cassette
def test_chmod(capsys, azure, client):
with setup_file(azure) as azurefile:
@@ -68,6 +75,19 @@ def test_chmod(capsys, azure, client):
assert 'permission = 550' in read_stdout(capsys)
+@my_vcr.use_cassette
+def test_chown(capsys, azure, client):
+ with setup_file(azure) as azurefile:
+ client.onecmd('chown foo ' + azurefile)
+ assert not read_stdout(capsys)
+
+ client.onecmd('chown :foo ' + azurefile)
+ assert not read_stdout(capsys)
+
+ client.onecmd('chown foo:foo ' + azurefile)
+ assert not read_stdout(capsys)
+
+
@my_vcr.use_cassette
def test_df(capsys, azure, client):
client.onecmd('df')
diff --git a/tests/test_core.py b/tests/test_core.py
index 8c6216f..a986219 100644
--- a/tests/test_core.py
+++ b/tests/test_core.py
@@ -605,6 +605,27 @@ def test_chmod(azure):
azure.chmod(test_dir / 'deep', '770')
+@my_vcr.use_cassette
+def test_chown(azure):
+ with azure_teardown(azure):
+ azure.touch(a)
+
+ # Account doesn't have permission to change owner
+ owner = azure.info(a)['owner']
+ azure.chown(a, owner='foo')
+ assert owner == azure.info(a)['owner']
+
+ # Account doesn't have permission to change group
+ group = azure.info(a)['group']
+ azure.chown(a, group='bar')
+ assert group == azure.info(a)['group']
+
+ # Account doesn't have permission to change owner/group
+ azure.chown(a, owner='foo', group='bar')
+ assert owner == azure.info(a)['owner']
+ assert group == azure.info(a)['group']
+
+
@pytest.mark.skipif(sys.platform != 'win32', reason="requires windows")
def test_backslash():
from adlfs.core import AzureDLPath
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 1
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"vcrpy"
],
"pre_install": null,
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | adal==1.2.7
-e git+https://github.com/Azure/azure-data-lake-store-python.git@9143406b26b3dc695c325b4fbf563c2093e9982f#egg=adlfs
attrs==22.2.0
azure==4.0.0
azure-applicationinsights==0.1.1
azure-batch==4.1.3
azure-common==1.1.28
azure-core==1.24.2
azure-cosmosdb-nspkg==2.0.2
azure-cosmosdb-table==1.0.6
azure-datalake-store==0.0.53
azure-eventgrid==1.3.0
azure-graphrbac==0.40.0
azure-keyvault==1.1.0
azure-loganalytics==0.1.1
azure-mgmt==4.0.0
azure-mgmt-advisor==1.0.1
azure-mgmt-applicationinsights==0.1.1
azure-mgmt-authorization==0.50.0
azure-mgmt-batch==5.0.1
azure-mgmt-batchai==2.0.0
azure-mgmt-billing==0.2.0
azure-mgmt-cdn==3.1.0
azure-mgmt-cognitiveservices==3.0.0
azure-mgmt-commerce==1.0.1
azure-mgmt-compute==4.6.2
azure-mgmt-consumption==2.0.0
azure-mgmt-containerinstance==1.5.0
azure-mgmt-containerregistry==2.8.0
azure-mgmt-containerservice==4.4.0
azure-mgmt-cosmosdb==0.4.1
azure-mgmt-datafactory==0.6.0
azure-mgmt-datalake-analytics==0.6.0
azure-mgmt-datalake-nspkg==3.0.1
azure-mgmt-datalake-store==0.5.0
azure-mgmt-datamigration==1.0.0
azure-mgmt-devspaces==0.1.0
azure-mgmt-devtestlabs==2.2.0
azure-mgmt-dns==2.1.0
azure-mgmt-eventgrid==1.0.0
azure-mgmt-eventhub==2.6.0
azure-mgmt-hanaonazure==0.1.1
azure-mgmt-iotcentral==0.1.0
azure-mgmt-iothub==0.5.0
azure-mgmt-iothubprovisioningservices==0.2.0
azure-mgmt-keyvault==1.1.0
azure-mgmt-loganalytics==0.2.0
azure-mgmt-logic==3.0.0
azure-mgmt-machinelearningcompute==0.4.1
azure-mgmt-managementgroups==0.1.0
azure-mgmt-managementpartner==0.1.1
azure-mgmt-maps==0.1.0
azure-mgmt-marketplaceordering==0.1.0
azure-mgmt-media==1.0.1
azure-mgmt-monitor==0.5.2
azure-mgmt-msi==0.2.0
azure-mgmt-network==2.7.0
azure-mgmt-notificationhubs==2.1.0
azure-mgmt-nspkg==3.0.2
azure-mgmt-policyinsights==0.1.0
azure-mgmt-powerbiembedded==2.0.0
azure-mgmt-rdbms==1.9.0
azure-mgmt-recoveryservices==0.3.0
azure-mgmt-recoveryservicesbackup==0.3.0
azure-mgmt-redis==5.0.0
azure-mgmt-relay==0.1.0
azure-mgmt-reservations==0.2.1
azure-mgmt-resource==2.2.0
azure-mgmt-scheduler==2.0.0
azure-mgmt-search==2.1.0
azure-mgmt-servicebus==0.5.3
azure-mgmt-servicefabric==0.2.0
azure-mgmt-signalr==0.1.1
azure-mgmt-sql==0.9.1
azure-mgmt-storage==2.0.0
azure-mgmt-subscription==0.2.0
azure-mgmt-trafficmanager==0.50.0
azure-mgmt-web==0.35.0
azure-nspkg==3.0.2
azure-servicebus==0.21.1
azure-servicefabric==6.3.0.0
azure-servicemanagement-legacy==0.20.8
azure-storage-blob==1.5.0
azure-storage-common==1.4.2
azure-storage-file==1.4.0
azure-storage-queue==1.4.0
certifi==2021.5.30
cffi==1.15.1
charset-normalizer==2.0.12
cryptography==40.0.2
idna==3.10
importlib-metadata==4.8.3
iniconfig==1.1.1
isodate==0.6.1
msal==1.27.0
msrest==0.7.1
msrestazure==0.6.4.post1
multidict==5.2.0
oauthlib==3.2.2
packaging==21.3
pathlib2==2.3.7.post1
pluggy==1.0.0
py==1.11.0
pycparser==2.21
PyJWT==2.4.0
pyparsing==3.1.4
pytest==7.0.1
python-dateutil==2.9.0.post0
PyYAML==6.0.1
requests==2.27.1
requests-oauthlib==2.0.0
six==1.17.0
tomli==1.2.3
typing_extensions==4.1.1
urllib3==1.26.20
vcrpy==4.1.1
wrapt==1.16.0
yarl==1.7.2
zipp==3.6.0
| name: azure-data-lake-store-python
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- adal==1.2.7
- attrs==22.2.0
- azure==4.0.0
- azure-applicationinsights==0.1.1
- azure-batch==4.1.3
- azure-common==1.1.28
- azure-core==1.24.2
- azure-cosmosdb-nspkg==2.0.2
- azure-cosmosdb-table==1.0.6
- azure-datalake-store==0.0.53
- azure-eventgrid==1.3.0
- azure-graphrbac==0.40.0
- azure-keyvault==1.1.0
- azure-loganalytics==0.1.1
- azure-mgmt==4.0.0
- azure-mgmt-advisor==1.0.1
- azure-mgmt-applicationinsights==0.1.1
- azure-mgmt-authorization==0.50.0
- azure-mgmt-batch==5.0.1
- azure-mgmt-batchai==2.0.0
- azure-mgmt-billing==0.2.0
- azure-mgmt-cdn==3.1.0
- azure-mgmt-cognitiveservices==3.0.0
- azure-mgmt-commerce==1.0.1
- azure-mgmt-compute==4.6.2
- azure-mgmt-consumption==2.0.0
- azure-mgmt-containerinstance==1.5.0
- azure-mgmt-containerregistry==2.8.0
- azure-mgmt-containerservice==4.4.0
- azure-mgmt-cosmosdb==0.4.1
- azure-mgmt-datafactory==0.6.0
- azure-mgmt-datalake-analytics==0.6.0
- azure-mgmt-datalake-nspkg==3.0.1
- azure-mgmt-datalake-store==0.5.0
- azure-mgmt-datamigration==1.0.0
- azure-mgmt-devspaces==0.1.0
- azure-mgmt-devtestlabs==2.2.0
- azure-mgmt-dns==2.1.0
- azure-mgmt-eventgrid==1.0.0
- azure-mgmt-eventhub==2.6.0
- azure-mgmt-hanaonazure==0.1.1
- azure-mgmt-iotcentral==0.1.0
- azure-mgmt-iothub==0.5.0
- azure-mgmt-iothubprovisioningservices==0.2.0
- azure-mgmt-keyvault==1.1.0
- azure-mgmt-loganalytics==0.2.0
- azure-mgmt-logic==3.0.0
- azure-mgmt-machinelearningcompute==0.4.1
- azure-mgmt-managementgroups==0.1.0
- azure-mgmt-managementpartner==0.1.1
- azure-mgmt-maps==0.1.0
- azure-mgmt-marketplaceordering==0.1.0
- azure-mgmt-media==1.0.1
- azure-mgmt-monitor==0.5.2
- azure-mgmt-msi==0.2.0
- azure-mgmt-network==2.7.0
- azure-mgmt-notificationhubs==2.1.0
- azure-mgmt-nspkg==3.0.2
- azure-mgmt-policyinsights==0.1.0
- azure-mgmt-powerbiembedded==2.0.0
- azure-mgmt-rdbms==1.9.0
- azure-mgmt-recoveryservices==0.3.0
- azure-mgmt-recoveryservicesbackup==0.3.0
- azure-mgmt-redis==5.0.0
- azure-mgmt-relay==0.1.0
- azure-mgmt-reservations==0.2.1
- azure-mgmt-resource==2.2.0
- azure-mgmt-scheduler==2.0.0
- azure-mgmt-search==2.1.0
- azure-mgmt-servicebus==0.5.3
- azure-mgmt-servicefabric==0.2.0
- azure-mgmt-signalr==0.1.1
- azure-mgmt-sql==0.9.1
- azure-mgmt-storage==2.0.0
- azure-mgmt-subscription==0.2.0
- azure-mgmt-trafficmanager==0.50.0
- azure-mgmt-web==0.35.0
- azure-nspkg==3.0.2
- azure-servicebus==0.21.1
- azure-servicefabric==6.3.0.0
- azure-servicemanagement-legacy==0.20.8
- azure-storage-blob==1.5.0
- azure-storage-common==1.4.2
- azure-storage-file==1.4.0
- azure-storage-queue==1.4.0
- cffi==1.15.1
- charset-normalizer==2.0.12
- cryptography==40.0.2
- idna==3.10
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- isodate==0.6.1
- msal==1.27.0
- msrest==0.7.1
- msrestazure==0.6.4.post1
- multidict==5.2.0
- oauthlib==3.2.2
- packaging==21.3
- pathlib2==2.3.7.post1
- pluggy==1.0.0
- py==1.11.0
- pycparser==2.21
- pyjwt==2.4.0
- pyparsing==3.1.4
- pytest==7.0.1
- python-dateutil==2.9.0.post0
- pyyaml==6.0.1
- requests==2.27.1
- requests-oauthlib==2.0.0
- six==1.17.0
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- vcrpy==4.1.1
- wrapt==1.16.0
- yarl==1.7.2
- zipp==3.6.0
prefix: /opt/conda/envs/azure-data-lake-store-python
| [
"tests/test_cli.py::test_chown"
] | [
"tests/test_core.py::test_ls_touch",
"tests/test_core.py::test_rm",
"tests/test_core.py::test_bad_open",
"tests/test_core.py::test_info",
"tests/test_core.py::test_exists",
"tests/test_core.py::test_read_delimited_block"
] | [
"tests/test_cli.py::test_cat",
"tests/test_cli.py::test_chgrp",
"tests/test_cli.py::test_chmod",
"tests/test_cli.py::test_df",
"tests/test_cli.py::test_du",
"tests/test_cli.py::test_exists",
"tests/test_cli.py::test_get",
"tests/test_cli.py::test_head",
"tests/test_cli.py::test_head_bytes",
"tests/test_cli.py::test_info",
"tests/test_cli.py::test_ls",
"tests/test_cli.py::test_ls_detailed",
"tests/test_cli.py::test_mkdir_and_rmdir",
"tests/test_cli.py::test_mv",
"tests/test_cli.py::test_put",
"tests/test_cli.py::test_tail",
"tests/test_cli.py::test_tail_bytes",
"tests/test_cli.py::test_touch_and_rm",
"tests/test_core.py::test_simple",
"tests/test_core.py::test_idempotent_connect",
"tests/test_core.py::test_pickle",
"tests/test_core.py::test_seek",
"tests/test_core.py::test_concat",
"tests/test_core.py::test_errors",
"tests/test_core.py::test_glob_walk",
"tests/test_core.py::test_df",
"tests/test_core.py::test_move",
"tests/test_core.py::test_cat",
"tests/test_core.py::test_full_read",
"tests/test_core.py::test_tail_head",
"tests/test_core.py::test_readline",
"tests/test_core.py::test_touch_exists",
"tests/test_core.py::test_write_in_read_mode",
"tests/test_core.py::test_readlines",
"tests/test_core.py::test_put",
"tests/test_core.py::test_get",
"tests/test_core.py::test_du",
"tests/test_core.py::test_text_bytes",
"tests/test_core.py::test_append",
"tests/test_core.py::test_write_empty",
"tests/test_core.py::test_write_blocks",
"tests/test_core.py::test_gzip",
"tests/test_core.py::test_fooable",
"tests/test_core.py::test_closed",
"tests/test_core.py::test_TextIOWrapper",
"tests/test_core.py::test_array",
"tests/test_core.py::test_delimiters_newline",
"tests/test_core.py::test_delimiters_dash",
"tests/test_core.py::test_chmod",
"tests/test_core.py::test_chown",
"tests/test_core.py::test_forward_slash"
] | [] | MIT License | 784 | 340 | [
"adlfs/cli.py"
] |
|
zalando-stups__senza-372 | 56e263195218e3fe052e95221b2d9528c4343264 | 2016-09-30 08:21:15 | a72ed3ba8f330170d7dc9e923bd18294a03186af | diff --git a/senza/aws.py b/senza/aws.py
index 1284ba3..c7f0468 100644
--- a/senza/aws.py
+++ b/senza/aws.py
@@ -55,13 +55,12 @@ def is_status_complete(status: str):
def get_security_group(region: str, sg_name: str):
ec2 = boto3.resource('ec2', region)
try:
- sec_groups = list(ec2.security_groups.filter(
- Filters=[{'Name': 'group-name', 'Values': [sg_name]}]
- ))
- if not sec_groups:
- return None
- # FIXME: What if we have 2 VPC, with a SG with the same name?!
- return sec_groups[0]
+ # first try by tag name then by group-name (cannot be changed)
+ for _filter in [{'Name': 'tag:Name', 'Values': [sg_name]}, {'Name': 'group-name', 'Values': [sg_name]}]:
+ sec_groups = list(ec2.security_groups.filter(Filters=[_filter]))
+ if sec_groups:
+ # FIXME: What if we have 2 VPC, with a SG with the same name?!
+ return sec_groups[0]
except ClientError as e:
error_code = extract_client_error_code(e)
if error_code == 'InvalidGroup.NotFound':
| Lookup SecurityGroups by the tag "Name" rather than GroupName
Both AWS API and CloudFormation allow to refer to a security group by its name if the operation runs in EC2 Classic or the default VPC. Unfortunately it uses the `GroupName` attribute that is automatically generated by AWS if the SG is a part of CloudFormation stack.
It would be a good idea to extend Senza to lookup SG during the CF template generation phase and to use the _tag_ `Name` instead. The tag can be set by another ("system") Senza stack definition, thus allowing the cross-stack references.
Another option would be to support the new cross-stack references that are recently introduced by Amazon: https://aws.amazon.com/blogs/aws/aws-cloudformation-update-yaml-cross-stack-references-simplified-substitution/ | zalando-stups/senza | diff --git a/tests/test_aws.py b/tests/test_aws.py
index 4ca762a..8dd5b44 100644
--- a/tests/test_aws.py
+++ b/tests/test_aws.py
@@ -14,6 +14,21 @@ def test_get_security_group(monkeypatch):
assert results == get_security_group('myregion', 'group_inexistant')
+def test_get_security_group_by_tag_name(monkeypatch):
+
+ def mock_filter(Filters):
+ if Filters[0]['Name'] == 'tag:Name' and Filters[0]['Values'] == ['my-sg']:
+ sg = MagicMock()
+ sg.id = 'sg-123'
+ return [sg]
+
+ ec2 = MagicMock()
+ ec2.security_groups.filter = mock_filter
+ monkeypatch.setattr('boto3.resource', MagicMock(return_value=ec2))
+
+ assert get_security_group('myregion', 'my-sg').id == 'sg-123'
+
+
def test_resolve_security_groups(monkeypatch):
ec2 = MagicMock()
ec2.security_groups.filter = MagicMock(side_effect=[
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 3
},
"num_modified_files": 1
} | 2.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"mock"
],
"pre_install": null,
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
boto3==1.23.10
botocore==1.26.10
certifi==2021.5.30
charset-normalizer==2.0.12
click==8.0.4
clickclick==20.10.2
coverage==6.2
dnspython==1.15.0
dnspython3==1.15.0
idna==3.10
importlib-metadata==4.8.3
iniconfig==1.1.1
jmespath==0.10.0
mock==5.2.0
packaging==21.3
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pystache==0.6.4
pytest==7.0.1
pytest-cov==4.0.0
python-dateutil==2.9.0.post0
PyYAML==6.0.1
raven==6.10.0
requests==2.27.1
s3transfer==0.5.2
six==1.17.0
stups-cli-support==1.1.22
stups-pierone==1.1.56
-e git+https://github.com/zalando-stups/senza.git@56e263195218e3fe052e95221b2d9528c4343264#egg=stups_senza
stups-tokens==1.1.19
stups-zign==1.2
tomli==1.2.3
typing==3.7.4.3
typing_extensions==4.1.1
urllib3==1.26.20
zipp==3.6.0
| name: senza
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- boto3==1.23.10
- botocore==1.26.10
- charset-normalizer==2.0.12
- click==8.0.4
- clickclick==20.10.2
- coverage==6.2
- dnspython==1.15.0
- dnspython3==1.15.0
- idna==3.10
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- jmespath==0.10.0
- mock==5.2.0
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pystache==0.6.4
- pytest==7.0.1
- pytest-cov==4.0.0
- python-dateutil==2.9.0.post0
- pyyaml==6.0.1
- raven==6.10.0
- requests==2.27.1
- s3transfer==0.5.2
- six==1.17.0
- stups-cli-support==1.1.22
- stups-pierone==1.1.56
- stups-tokens==1.1.19
- stups-zign==1.2
- tomli==1.2.3
- typing==3.7.4.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- zipp==3.6.0
prefix: /opt/conda/envs/senza
| [
"tests/test_aws.py::test_get_security_group_by_tag_name"
] | [] | [
"tests/test_aws.py::test_get_security_group",
"tests/test_aws.py::test_resolve_security_groups",
"tests/test_aws.py::test_create",
"tests/test_aws.py::test_encrypt",
"tests/test_aws.py::test_list_kms_keys",
"tests/test_aws.py::test_get_vpc_attribute",
"tests/test_aws.py::test_get_account_id",
"tests/test_aws.py::test_get_account_alias",
"tests/test_aws.py::test_resolve_referenced_resource",
"tests/test_aws.py::test_resolve_referenced_resource_with_update_complete_status",
"tests/test_aws.py::test_resolve_referenced_output_when_stack_is_in_update_complete_status",
"tests/test_aws.py::test_parse_time",
"tests/test_aws.py::test_required_capabilities",
"tests/test_aws.py::test_resolve_topic_arn",
"tests/test_aws.py::test_matches_any",
"tests/test_aws.py::test_get_tag"
] | [] | Apache License 2.0 | 789 | 314 | [
"senza/aws.py"
] |
|
abh1nav__gnippy-25 | ce2968934c3d25aabda033a544e3abcc0e80775e | 2016-10-06 11:03:51 | ce2968934c3d25aabda033a544e3abcc0e80775e | diff --git a/gnippy/rules.py b/gnippy/rules.py
index 37877f8..be59fcc 100644
--- a/gnippy/rules.py
+++ b/gnippy/rules.py
@@ -31,7 +31,7 @@ def _check_rules_list(rules_list):
if not isinstance(rules_list, list):
fail()
- expected = ("value", "tag")
+ expected = ("value", "tag", "id")
for r in rules_list:
if not isinstance(r, dict):
fail()
@@ -77,6 +77,7 @@ def _post(conf, built_rules):
error_text = "HTTP Response Code: %s, Text: '%s'" % (str(r.status_code), r.text)
raise RuleAddFailedException(error_text)
+
def _generate_delete_url(conf):
"""
Generate the Rules URL for a DELETE request.
@@ -89,6 +90,7 @@ def _generate_delete_url(conf):
else:
return rules_url + "?_method=delete"
+
def _delete(conf, built_rules):
"""
Generate the Delete Rules URL and make a POST request.
@@ -112,6 +114,7 @@ def _delete(conf, built_rules):
error_text = "HTTP Response Code: %s, Text: '%s'" % (str(r.status_code), r.text)
raise RuleDeleteFailedException(error_text)
+
def build(rule_string, tag=None):
"""
Takes a rule string and optional tag and turns it into a "built_rule" that looks like:
| New rule format breaks delete example
The examples in the README for deleting rules are broken due to the addition of the id field in the rules returned by GNIP.
from gnippy import rules
from gnippy.errors import RuleDeleteFailedException, RulesGetFailedException
try:
rules_list = rules.get_rules()
# Suppose I want to delete the first rule in the list
rules.delete_rule(rules_list[0])
# OR ... I want to delete ALL rules
rules.delete_rules(rules_list)
except RuleDeleteFailedException, RulesGetFailedException:
pass
At the moment a user needs to either manually remove the `id` field
del rule['id']
rules.delete_rule(rule)
or rebuild the rule using `rules.build`
rules.delete_rule(rules.build(rule['value'], rule.get('tag')))
| abh1nav/gnippy | diff --git a/gnippy/test/test_rules.py b/gnippy/test/test_rules.py
index cfa1ee4..cbb4202 100644
--- a/gnippy/test/test_rules.py
+++ b/gnippy/test/test_rules.py
@@ -42,7 +42,8 @@ def good_get_no_rules(url, auth):
def good_get_one_rule(url, auth):
- return test_utils.GoodResponse(json={"rules":[{"value": "Hello", "tag": "mytag"}]})
+ return test_utils.GoodResponse(
+ json={"rules": [{"value": "Hello", "tag": "mytag"}]})
def bad_delete(url, auth, data):
@@ -54,7 +55,6 @@ def good_delete(url, auth, data):
class RulesTestCase(unittest.TestCase):
-
rule_string = "Hello OR World"
tag = "my_tag"
@@ -79,45 +79,53 @@ class RulesTestCase(unittest.TestCase):
def test_check_one_rule_ok(self):
""" Check list of one rule. """
- l = [ { "value": "hello" } ]
+ l = [{"value": "hello"}]
rules._check_rules_list(l)
def test_check_many_rules_ok(self):
""" Check list of many rules. """
- l = [ { "value": "hello" }, { "value": "h", "tag": "w" }]
+ l = [
+ {"value": "hello", "id": 3},
+ {"value": "goodbye", "tag": "w", "id": 4},
+ {"value": "hi again", "tag": "x"},
+ {"value": "bye again"}
+ ]
rules._check_rules_list(l)
def test_check_rule_tag_none(self):
""" Check list of rules both with tag and without. """
- l = [ { "value": "hello", "tag": None }, { "value": "h", "tag": "w" }]
+ l = [{"value": "hello", "tag": None}, {"value": "h", "tag": "w"}]
rules._check_rules_list(l)
def test_check_one_rule_typo_values(self):
""" Make sure incorectly formatted rule values fail. """
- l = [ { "values": "hello" } ]
+ l = [{"values": "hello"}]
try:
rules._check_rules_list(l)
except RulesListFormatException:
return
- self.fail("_check_rules_list was supposed to throw a RuleFormatException")
+ self.fail(
+ "_check_rules_list was supposed to throw a RuleFormatException")
def test_check_one_rule_typo_tag(self):
""" Make sure incorrectly formatted rule tags fail. """
- l = [ { "value": "hello", "tags": "t" } ]
+ l = [{"value": "hello", "tags": "t"}]
try:
rules._check_rules_list(l)
except RulesListFormatException:
return
- self.fail("_check_rules_list was supposed to throw a RuleFormatException")
+ self.fail(
+ "_check_rules_list was supposed to throw a RuleFormatException")
def test_check_one_rule_extra_stuff_in_rule(self):
""" Make sure rules with unexpected keys fail. """
- l = [ { "value": "hello", "wat": "man" } ]
+ l = [{"value": "hello", "wat": "man"}]
try:
rules._check_rules_list(l)
except RulesListFormatException:
return
- self.fail("_check_rules_list was supposed to throw a RuleFormatException")
+ self.fail(
+ "_check_rules_list was supposed to throw a RuleFormatException")
def test_build_rule_bad_args(self):
""" Make sure building rules with unexpected args fail. """
@@ -125,7 +133,8 @@ class RulesTestCase(unittest.TestCase):
rules.build(None)
except BadArgumentException:
return
- self.fail("rules.build_rule was supposed to throw a BadArgumentException")
+ self.fail(
+ "rules.build_rule was supposed to throw a BadArgumentException")
def test_build_rule_without_tag(self):
""" Build rule without tag. """
@@ -149,18 +158,21 @@ class RulesTestCase(unittest.TestCase):
rules.add_rule(self.rule_string, self.tag)
except ConfigFileNotFoundException:
return
- self.fail("Rule Add was supposed to fail and throw a ConfigFileNotFoundException")
+ self.fail(
+ "Rule Add was supposed to fail and throw a ConfigFileNotFoundException")
@mock.patch('requests.post', good_post)
def test_add_one_rule_ok(self):
"""Add one rule with config. """
- rules.add_rule(self.rule_string, self.tag, config_file_path=test_utils.test_config_path)
+ rules.add_rule(self.rule_string, self.tag,
+ config_file_path=test_utils.test_config_path)
@mock.patch('requests.post', bad_post)
def test_add_one_rule_not_ok(self):
"""Add one rule with exception thrown. """
try:
- rules.add_rule(self.rule_string, self.tag, config_file_path=test_utils.test_config_path)
+ rules.add_rule(self.rule_string, self.tag,
+ config_file_path=test_utils.test_config_path)
except RuleAddFailedException:
return
self.fail("Rule Add was supposed to fail and throw a RuleAddException")
@@ -173,20 +185,23 @@ class RulesTestCase(unittest.TestCase):
rules.add_rule(self.rule_string, self.tag)
except ConfigFileNotFoundException:
return
- self.fail("Rule Add was supposed to fail and throw a ConfigFileNotFoundException")
+ self.fail(
+ "Rule Add was supposed to fail and throw a ConfigFileNotFoundException")
@mock.patch('requests.post', good_post)
def test_add_many_rules_ok(self):
""" Add many rules. """
rules_list = self._generate_rules_list()
- rules.add_rules(rules_list, config_file_path=test_utils.test_config_path)
+ rules.add_rules(rules_list,
+ config_file_path=test_utils.test_config_path)
@mock.patch('requests.post', bad_post)
def test_add_many_rules_not_ok(self):
""" Add many rules with exception thrown. """
try:
rules_list = self._generate_rules_list()
- rules.add_rules(rules_list, config_file_path=test_utils.test_config_path)
+ rules.add_rules(rules_list,
+ config_file_path=test_utils.test_config_path)
except RuleAddFailedException:
return
self.fail("Rule Add was supposed to fail and throw a RuleAddException")
@@ -226,7 +241,8 @@ class RulesTestCase(unittest.TestCase):
try:
r = rules.get_rules(config_file_path=test_utils.test_config_path)
except RulesGetFailedException as e:
- self.assertTrue("GNIP API response did not return a rules object" in str(e))
+ self.assertTrue(
+ "GNIP API response did not return a rules object" in str(e))
return
self.fail("rules.get() was supposed to throw a RulesGetFailedException")
@@ -245,13 +261,15 @@ class RulesTestCase(unittest.TestCase):
@mock.patch('requests.post', good_delete)
def test_delete_rules_single(self):
""" Delete one rule. """
- rules.delete_rule({"value": "Hello World"}, config_file_path=test_utils.test_config_path)
+ rules.delete_rule({"value": "Hello World"},
+ config_file_path=test_utils.test_config_path)
@mock.patch('requests.post', good_delete)
def test_delete_rules_multiple(self):
""" Delete multiple rules. """
rules_list = [
- { "value": "Hello World" },
- { "value": "Hello", "tag": "mytag" }
+ {"value": "Hello World"},
+ {"value": "Hello", "tag": "mytag"}
]
- rules.delete_rules(rules_list, config_file_path=test_utils.test_config_path)
\ No newline at end of file
+ rules.delete_rules(rules_list,
+ config_file_path=test_utils.test_config_path)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 1
} | 0.6 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"nose",
"mock",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements-dev.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | cachetools==5.5.2
certifi==2025.1.31
chardet==5.2.0
charset-normalizer==3.4.1
colorama==0.4.6
distlib==0.3.9
exceptiongroup==1.2.2
filelock==3.18.0
-e git+https://github.com/abh1nav/gnippy.git@ce2968934c3d25aabda033a544e3abcc0e80775e#egg=gnippy
idna==3.10
iniconfig==2.1.0
mock==1.0.1
nose==1.3.0
packaging==24.2
platformdirs==4.3.7
pluggy==1.5.0
pyproject-api==1.9.0
pytest==8.3.5
requests==2.32.3
six==1.10.0
tomli==2.2.1
tox==4.25.0
typing_extensions==4.13.0
urllib3==2.3.0
virtualenv==20.29.3
| name: gnippy
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- cachetools==5.5.2
- certifi==2025.1.31
- chardet==5.2.0
- charset-normalizer==3.4.1
- colorama==0.4.6
- distlib==0.3.9
- exceptiongroup==1.2.2
- filelock==3.18.0
- idna==3.10
- iniconfig==2.1.0
- mock==1.0.1
- nose==1.3.0
- packaging==24.2
- platformdirs==4.3.7
- pluggy==1.5.0
- pyproject-api==1.9.0
- pytest==8.3.5
- requests==2.32.3
- six==1.10.0
- tomli==2.2.1
- tox==4.25.0
- typing-extensions==4.13.0
- urllib3==2.3.0
- virtualenv==20.29.3
prefix: /opt/conda/envs/gnippy
| [
"gnippy/test/test_rules.py::RulesTestCase::test_check_many_rules_ok"
] | [] | [
"gnippy/test/test_rules.py::RulesTestCase::test_add_many_rules_no_creds",
"gnippy/test/test_rules.py::RulesTestCase::test_add_many_rules_not_ok",
"gnippy/test/test_rules.py::RulesTestCase::test_add_many_rules_ok",
"gnippy/test/test_rules.py::RulesTestCase::test_add_one_rule_no_creds",
"gnippy/test/test_rules.py::RulesTestCase::test_add_one_rule_not_ok",
"gnippy/test/test_rules.py::RulesTestCase::test_add_one_rule_ok",
"gnippy/test/test_rules.py::RulesTestCase::test_build_post_object",
"gnippy/test/test_rules.py::RulesTestCase::test_build_rule_bad_args",
"gnippy/test/test_rules.py::RulesTestCase::test_build_rule_with_tag",
"gnippy/test/test_rules.py::RulesTestCase::test_build_rule_without_tag",
"gnippy/test/test_rules.py::RulesTestCase::test_check_one_rule_extra_stuff_in_rule",
"gnippy/test/test_rules.py::RulesTestCase::test_check_one_rule_ok",
"gnippy/test/test_rules.py::RulesTestCase::test_check_one_rule_typo_tag",
"gnippy/test/test_rules.py::RulesTestCase::test_check_one_rule_typo_values",
"gnippy/test/test_rules.py::RulesTestCase::test_check_rule_tag_none",
"gnippy/test/test_rules.py::RulesTestCase::test_delete_rules_multiple",
"gnippy/test/test_rules.py::RulesTestCase::test_delete_rules_single",
"gnippy/test/test_rules.py::RulesTestCase::test_get_rules_bad_json",
"gnippy/test/test_rules.py::RulesTestCase::test_get_rules_bad_status_code",
"gnippy/test/test_rules.py::RulesTestCase::test_get_rules_no_rules_field_json",
"gnippy/test/test_rules.py::RulesTestCase::test_get_rules_requests_get_exception",
"gnippy/test/test_rules.py::RulesTestCase::test_get_rules_success_no_rules",
"gnippy/test/test_rules.py::RulesTestCase::test_get_rules_success_one_rule"
] | [] | Apache License 2.0 | 797 | 353 | [
"gnippy/rules.py"
] |
|
kytos__python-openflow-201 | 013e9c24bd4c234f0bf39210686a58af4c586d7d | 2016-10-11 23:39:44 | 013e9c24bd4c234f0bf39210686a58af4c586d7d | diff --git a/pyof/foundation/basic_types.py b/pyof/foundation/basic_types.py
index 297baa2..a3b5461 100644
--- a/pyof/foundation/basic_types.py
+++ b/pyof/foundation/basic_types.py
@@ -130,10 +130,11 @@ class DPID(GenericType):
self._value = ':'.join(bytes)
def pack(self, value=None):
- buffer = b''
- for value in self._value.split(":"):
- buffer += struct.pack('!B', int(value, 16))
- return buffer
+ if isinstance(value, type(self)):
+ return value.pack()
+ if value is None:
+ value = self._value
+ return struct.pack('!8B', *[int(v, 16) for v in value.split(':')])
class Char(GenericType):
diff --git a/pyof/v0x01/controller2switch/features_reply.py b/pyof/v0x01/controller2switch/features_reply.py
index 8dff843..33f5541 100644
--- a/pyof/v0x01/controller2switch/features_reply.py
+++ b/pyof/v0x01/controller2switch/features_reply.py
@@ -62,7 +62,7 @@ class SwitchFeatures(GenericMessage):
Args:
xid (int): xid to be used on the message header.
- datapath_id (int): UBInt64 datapath unique ID.
+ datapath_id (str or :class:`.DPID`): datapath unique ID.
The lower 48-bits are for MAC address, while
the upper 16-bits are implementer-defined.
n_buffers (int): UBInt32 max packets buffered at once.
| Fix tests
I ran bisect assigning zeros to default DPID value and it lead to d207690facd4844557fb7d53aebbd5d2fb66a414 (added a new basic type: dpid). | kytos/python-openflow | diff --git a/tests/v0x01/test_controller2switch/test_features_reply.py b/tests/v0x01/test_controller2switch/test_features_reply.py
index 2728ded..3f56325 100644
--- a/tests/v0x01/test_controller2switch/test_features_reply.py
+++ b/tests/v0x01/test_controller2switch/test_features_reply.py
@@ -1,5 +1,5 @@
"""Echo request message tests."""
-from pyof.foundation.basic_types import HWAddress
+from pyof.foundation.basic_types import HWAddress, DPID
from pyof.v0x01.common.phy_port import PhyPort, PortConfig, PortState
from pyof.v0x01.controller2switch.features_reply import FeaturesReply
from tests.test_struct import TestStruct
@@ -19,8 +19,9 @@ class TestFeaturesReply(TestStruct):
def _get_kwargs():
- return {'xid': 2, 'datapath_id': 1, 'n_buffers': 256, 'n_tables': 254,
- 'capabilities': 0x000000c7, 'actions': 4095, 'ports': _get_ports()}
+ return {'xid': 2, 'datapath_id': DPID('00:00:00:00:00:00:00:01'),
+ 'n_buffers': 256, 'n_tables': 254, 'capabilities': 0x000000c7,
+ 'actions': 4095, 'ports': _get_ports()}
def _get_ports():
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_git_commit_hash",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 2
},
"num_modified_files": 2
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-xdist",
"pytest-mock",
"pytest-asyncio",
"pylama",
"radon"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | colorama==0.4.6
coverage==7.8.0
exceptiongroup==1.2.2
execnet==2.1.1
iniconfig==2.1.0
mando==0.7.1
mccabe==0.7.0
packaging==24.2
pluggy==1.5.0
pycodestyle==2.13.0
pydocstyle==6.3.0
pyflakes==3.3.2
pylama==8.4.1
pytest==8.3.5
pytest-asyncio==0.26.0
pytest-cov==6.0.0
pytest-mock==3.14.0
pytest-xdist==3.6.1
-e git+https://github.com/kytos/python-openflow.git@013e9c24bd4c234f0bf39210686a58af4c586d7d#egg=python_openflow
radon==6.0.1
six==1.17.0
snowballstemmer==2.2.0
tomli==2.2.1
typing_extensions==4.13.0
| name: python-openflow
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- colorama==0.4.6
- coverage==7.8.0
- exceptiongroup==1.2.2
- execnet==2.1.1
- iniconfig==2.1.0
- mando==0.7.1
- mccabe==0.7.0
- packaging==24.2
- pluggy==1.5.0
- pycodestyle==2.13.0
- pydocstyle==6.3.0
- pyflakes==3.3.2
- pylama==8.4.1
- pytest==8.3.5
- pytest-asyncio==0.26.0
- pytest-cov==6.0.0
- pytest-mock==3.14.0
- pytest-xdist==3.6.1
- radon==6.0.1
- six==1.17.0
- snowballstemmer==2.2.0
- tomli==2.2.1
- typing-extensions==4.13.0
prefix: /opt/conda/envs/python-openflow
| [
"tests/v0x01/test_controller2switch/test_features_reply.py::TestFeaturesReply::test_pack",
"tests/v0x01/test_controller2switch/test_features_reply.py::TestFeaturesReply::test_unpack"
] | [
"tests/v0x01/test_controller2switch/test_features_reply.py::TestStruct::test_minimum_size",
"tests/v0x01/test_controller2switch/test_features_reply.py::TestStruct::test_pack",
"tests/v0x01/test_controller2switch/test_features_reply.py::TestStruct::test_unpack"
] | [
"tests/v0x01/test_controller2switch/test_features_reply.py::TestFeaturesReply::test_minimum_size"
] | [] | MIT License | 805 | 415 | [
"pyof/foundation/basic_types.py",
"pyof/v0x01/controller2switch/features_reply.py"
] |
|
zalando-stups__senza-393 | 5df821c7b09729f59debf89c0ff1e4068340cbc1 | 2016-10-12 12:15:38 | a72ed3ba8f330170d7dc9e923bd18294a03186af | diff --git a/senza/components/elastic_load_balancer.py b/senza/components/elastic_load_balancer.py
index 1a0fdb4..68972bd 100644
--- a/senza/components/elastic_load_balancer.py
+++ b/senza/components/elastic_load_balancer.py
@@ -76,13 +76,11 @@ def get_ssl_cert(subdomain, main_zone, configuration, account_info: AccountArgum
return ssl_cert
-def get_listeners(subdomain, main_zone, configuration,
- account_info: AccountArguments):
- ssl_cert = get_ssl_cert(subdomain, main_zone, configuration, account_info)
+def get_listeners(configuration):
return [
{
"PolicyNames": [],
- "SSLCertificateId": ssl_cert,
+ "SSLCertificateId": configuration.get('SSLCertificateId'),
"Protocol": "HTTPS",
"InstancePort": configuration["HTTPPort"],
"LoadBalancerPort": 443
@@ -90,6 +88,16 @@ def get_listeners(subdomain, main_zone, configuration,
]
+def resolve_ssl_certificates(listeners, subdomain, main_zone, account_info):
+ new_listeners = []
+ for listener in listeners:
+ if listener.get('Protocol') in ('HTTPS', 'SSL'):
+ ssl_cert = get_ssl_cert(subdomain, main_zone, listener, account_info)
+ listener['SSLCertificateId'] = ssl_cert
+ new_listeners.append(listener)
+ return new_listeners
+
+
def component_elastic_load_balancer(definition,
configuration: dict,
args: TemplateArguments,
@@ -123,7 +131,8 @@ def component_elastic_load_balancer(definition,
subdomain = domain['Subdomain']
main_zone = domain['Zone'] # type: str
- listeners = configuration.get('Listeners') or get_listeners(subdomain, main_zone, configuration, account_info)
+ listeners = configuration.get('Listeners') or get_listeners(configuration)
+ listeners = resolve_ssl_certificates(listeners, subdomain, main_zone, account_info)
health_check_protocol = configuration.get('HealthCheckProtocol') or 'HTTP'
| Make property overwriting for senza components less destructive
When using a senza component to build a stack, it would be great if it was possible to overwrite properties in a less destructive way.
### Expected Behavior
We are using the default component `WeightedDnsElasticLoadBalancer`. Now we want to open port 80 in addition to port 443 in order to allow our application to redirect users from http to https. We want to keep all of the configuration and extend it with one additional entry.
### Actual Behavior
Currently this is not possible, because we can only use the option to overwrite the `Listeners` property like this:
```
- AppLoadBalancer:
Type: Senza::WeightedDnsElasticLoadBalancer
...
Listeners:
- LoadBalancerPort: 80
Protocol: HTTP
InstancePort: 80
InstanceProtocol: HTTP
- LoadBalancerPort: 443
Protocol: HTTPS
InstancePort: 80
InstanceProtocol: HTTP
SSLCertificateId: "????"
```
Doing this, we completely miss out on the ease of use that the component provides with regard to selecting the correct SSL certificate, because our definition completely overwrites the one senza creates.
--
As a solution it would be nice if we could either flag our `Listeners` entry as "these are additional, not a replacement" or alternatively have a way of tell senza to inject the SSLCertificateId. | zalando-stups/senza | diff --git a/tests/test_components.py b/tests/test_components.py
index dfbf912..a7fe6f0 100644
--- a/tests/test_components.py
+++ b/tests/test_components.py
@@ -198,6 +198,31 @@ def test_component_load_balancer_http_only(monkeypatch):
assert 'Bar' == result["Resources"]["test_lb"]["Properties"]["Listeners"][0]["Foo"]
+def test_component_load_balancer_listeners_ssl(monkeypatch):
+ configuration = {
+ "Name": "test_lb",
+ "SecurityGroups": "",
+ "HTTPPort": "9999",
+ "Listeners": [{"Protocol": "SSL"}]
+ }
+ info = {'StackName': 'foobar', 'StackVersion': '0.1'}
+ definition = {"Resources": {}}
+
+ args = MagicMock()
+ args.region = "foo"
+
+ mock_string_result = MagicMock()
+ mock_string_result.return_value = "foo"
+ monkeypatch.setattr('senza.components.elastic_load_balancer.resolve_security_groups', mock_string_result)
+
+ get_ssl_cert = MagicMock()
+ get_ssl_cert.return_value = 'my-ssl-arn'
+ monkeypatch.setattr('senza.components.elastic_load_balancer.get_ssl_cert', get_ssl_cert)
+
+ result = component_elastic_load_balancer(definition, configuration, args, info, False, MagicMock())
+ assert 'my-ssl-arn' == result["Resources"]["test_lb"]["Properties"]["Listeners"][0]["SSLCertificateId"]
+
+
def test_component_load_balancer_namelength(monkeypatch):
configuration = {
"Name": "test_lb",
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 1
},
"num_modified_files": 1
} | 2.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"mock"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | boto3==1.37.23
botocore==1.37.23
certifi==2025.1.31
charset-normalizer==3.4.1
click==8.1.8
clickclick==20.10.2
coverage==7.8.0
dnspython==1.15.0
dnspython3==1.15.0
exceptiongroup==1.2.2
idna==3.10
importlib_metadata==8.6.1
iniconfig==2.1.0
jmespath==1.0.1
mock==5.2.0
packaging==24.2
pluggy==1.5.0
pystache==0.6.8
pytest==8.3.5
pytest-cov==6.0.0
python-dateutil==2.9.0.post0
PyYAML==6.0.2
raven==6.10.0
requests==2.32.3
s3transfer==0.11.4
six==1.17.0
stups-cli-support==1.1.22
stups-pierone==1.1.56
-e git+https://github.com/zalando-stups/senza.git@5df821c7b09729f59debf89c0ff1e4068340cbc1#egg=stups_senza
stups-tokens==1.1.19
stups-zign==1.2
tomli==2.2.1
typing==3.7.4.3
urllib3==1.26.20
zipp==3.21.0
| name: senza
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- boto3==1.37.23
- botocore==1.37.23
- certifi==2025.1.31
- charset-normalizer==3.4.1
- click==8.1.8
- clickclick==20.10.2
- coverage==7.8.0
- dnspython==1.15.0
- dnspython3==1.15.0
- exceptiongroup==1.2.2
- idna==3.10
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- jmespath==1.0.1
- mock==5.2.0
- packaging==24.2
- pluggy==1.5.0
- pystache==0.6.8
- pytest==8.3.5
- pytest-cov==6.0.0
- python-dateutil==2.9.0.post0
- pyyaml==6.0.2
- raven==6.10.0
- requests==2.32.3
- s3transfer==0.11.4
- six==1.17.0
- stups-cli-support==1.1.22
- stups-pierone==1.1.56
- stups-tokens==1.1.19
- stups-zign==1.2
- tomli==2.2.1
- typing==3.7.4.3
- urllib3==1.26.20
- zipp==3.21.0
prefix: /opt/conda/envs/senza
| [
"tests/test_components.py::test_component_load_balancer_listeners_ssl"
] | [
"tests/test_components.py::test_weighted_dns_load_balancer",
"tests/test_components.py::test_weighted_dns_load_balancer_with_different_domains",
"tests/test_components.py::test_check_docker_image_exists"
] | [
"tests/test_components.py::test_invalid_component",
"tests/test_components.py::test_component_iam_role",
"tests/test_components.py::test_get_merged_policies",
"tests/test_components.py::test_component_load_balancer_healthcheck",
"tests/test_components.py::test_component_load_balancer_idletimeout",
"tests/test_components.py::test_component_load_balancer_cert_arn",
"tests/test_components.py::test_component_load_balancer_http_only",
"tests/test_components.py::test_component_load_balancer_namelength",
"tests/test_components.py::test_component_stups_auto_configuration",
"tests/test_components.py::test_component_stups_auto_configuration_vpc_id",
"tests/test_components.py::test_component_redis_node",
"tests/test_components.py::test_component_redis_cluster",
"tests/test_components.py::test_component_taupage_auto_scaling_group_user_data_without_ref",
"tests/test_components.py::test_component_taupage_auto_scaling_group_user_data_with_ref",
"tests/test_components.py::test_component_taupage_auto_scaling_group_user_data_with_lists_and_empty_dict",
"tests/test_components.py::test_component_auto_scaling_group_configurable_properties",
"tests/test_components.py::test_component_auto_scaling_group_custom_tags",
"tests/test_components.py::test_component_auto_scaling_group_configurable_properties2",
"tests/test_components.py::test_component_auto_scaling_group_metric_type",
"tests/test_components.py::test_component_auto_scaling_group_optional_metric_type",
"tests/test_components.py::test_to_iso8601_duration",
"tests/test_components.py::test_normalize_asg_success",
"tests/test_components.py::test_normalize_network_threshold",
"tests/test_components.py::test_check_application_id",
"tests/test_components.py::test_check_application_version",
"tests/test_components.py::test_get_load_balancer_name",
"tests/test_components.py::test_weighted_dns_load_balancer_v2",
"tests/test_components.py::test_max_description_length",
"tests/test_components.py::test_component_load_balancer_default_internal_scheme",
"tests/test_components.py::test_component_load_balancer_v2_default_internal_scheme"
] | [] | Apache License 2.0 | 807 | 490 | [
"senza/components/elastic_load_balancer.py"
] |
|
napjon__krisk-58 | 8497da2333a8265b2e19c87dcec3bae20b8d4059 | 2016-10-17 08:32:31 | a676433768a62b61f5861c68c127e40970914764 | diff --git a/krisk/plot/make_chart.py b/krisk/plot/make_chart.py
index 924e015..68e3c41 100644
--- a/krisk/plot/make_chart.py
+++ b/krisk/plot/make_chart.py
@@ -43,6 +43,9 @@ def make_chart(df, **kwargs):
if kwargs.get('y', None):
chart.set_ylabel(kwargs['y'])
+ if kwargs['type'] == 'line':
+ chart.set_tooltip_style(trigger='axis',axis_pointer='shadow')
+
if kwargs['type'] in ['bar', 'line', 'hist']:
set_bar_line_chart(chart, df, **kwargs)
| Line Plot should have default trigger axis and axis pointer shadow | napjon/krisk | diff --git a/krisk/tests/test_plot.py b/krisk/tests/test_plot.py
index 50435da..13b78e3 100644
--- a/krisk/tests/test_plot.py
+++ b/krisk/tests/test_plot.py
@@ -92,6 +92,8 @@ def test_line(gapminder):
annotate='all')
opt = read_option_tests('line.json')
assert_barline_data(p, opt)
+ assert p.option['tooltip']['axisPointer']['type'] == 'shadow'
+ assert p.option['tooltip']['trigger'] == 'axis'
def test_smooth_line(gapminder):
@@ -134,6 +136,7 @@ def test_sort_bar_line(gapminder):
'name': 'Africa',
'type': 'line'}
+
def test_hist(gapminder):
p1 = kk.hist(gapminder,'lifeExp',bins=10)
opt1 = read_option_tests('hist_x.json')
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 1
} | 0.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"coverage",
"pytest"
],
"pre_install": [
"pip install cython"
],
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | anyio==3.6.2
argon2-cffi==21.3.0
argon2-cffi-bindings==21.2.0
async-generator==1.10
attrs==22.2.0
Babel==2.11.0
backcall==0.2.0
bleach==4.1.0
certifi==2021.5.30
cffi==1.15.1
charset-normalizer==2.0.12
comm==0.1.4
contextvars==2.4
coverage==6.2
Cython==3.0.12
dataclasses==0.8
decorator==5.1.1
defusedxml==0.7.1
entrypoints==0.4
idna==3.10
immutables==0.19
importlib-metadata==4.8.3
iniconfig==1.1.1
ipykernel==5.5.6
ipython==7.16.3
ipython-genutils==0.2.0
ipywidgets==7.8.5
jedi==0.17.2
Jinja2==3.0.3
json5==0.9.16
jsonschema==3.2.0
jupyter==1.1.1
jupyter-client==7.1.2
jupyter-console==6.4.3
jupyter-core==4.9.2
jupyter-server==1.13.1
jupyterlab==3.2.9
jupyterlab-pygments==0.1.2
jupyterlab-server==2.10.3
jupyterlab_widgets==1.1.11
-e git+https://github.com/napjon/krisk.git@8497da2333a8265b2e19c87dcec3bae20b8d4059#egg=krisk
MarkupSafe==2.0.1
mistune==0.8.4
nbclassic==0.3.5
nbclient==0.5.9
nbconvert==6.0.7
nbformat==5.1.3
nest-asyncio==1.6.0
notebook==6.4.10
numpy==1.19.5
packaging==21.3
pandas==1.1.5
pandocfilters==1.5.1
parso==0.7.1
pexpect==4.9.0
pickleshare==0.7.5
pluggy==1.0.0
prometheus-client==0.17.1
prompt-toolkit==3.0.36
ptyprocess==0.7.0
py==1.11.0
pycparser==2.21
Pygments==2.14.0
pyparsing==3.1.4
pyrsistent==0.18.0
pytest==7.0.1
python-dateutil==2.9.0.post0
pytz==2025.2
pyzmq==25.1.2
requests==2.27.1
Send2Trash==1.8.3
six==1.17.0
sniffio==1.2.0
terminado==0.12.1
testpath==0.6.0
tomli==1.2.3
tornado==6.1
traitlets==4.3.3
typing_extensions==4.1.1
urllib3==1.26.20
wcwidth==0.2.13
webencodings==0.5.1
websocket-client==1.3.1
widgetsnbextension==3.6.10
zipp==3.6.0
| name: krisk
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- anyio==3.6.2
- argon2-cffi==21.3.0
- argon2-cffi-bindings==21.2.0
- async-generator==1.10
- attrs==22.2.0
- babel==2.11.0
- backcall==0.2.0
- bleach==4.1.0
- cffi==1.15.1
- charset-normalizer==2.0.12
- comm==0.1.4
- contextvars==2.4
- coverage==6.2
- cython==3.0.12
- dataclasses==0.8
- decorator==5.1.1
- defusedxml==0.7.1
- entrypoints==0.4
- idna==3.10
- immutables==0.19
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- ipykernel==5.5.6
- ipython==7.16.3
- ipython-genutils==0.2.0
- ipywidgets==7.8.5
- jedi==0.17.2
- jinja2==3.0.3
- json5==0.9.16
- jsonschema==3.2.0
- jupyter==1.1.1
- jupyter-client==7.1.2
- jupyter-console==6.4.3
- jupyter-core==4.9.2
- jupyter-server==1.13.1
- jupyterlab==3.2.9
- jupyterlab-pygments==0.1.2
- jupyterlab-server==2.10.3
- jupyterlab-widgets==1.1.11
- markupsafe==2.0.1
- mistune==0.8.4
- nbclassic==0.3.5
- nbclient==0.5.9
- nbconvert==6.0.7
- nbformat==5.1.3
- nest-asyncio==1.6.0
- notebook==6.4.10
- numpy==1.19.5
- packaging==21.3
- pandas==1.1.5
- pandocfilters==1.5.1
- parso==0.7.1
- pexpect==4.9.0
- pickleshare==0.7.5
- pluggy==1.0.0
- prometheus-client==0.17.1
- prompt-toolkit==3.0.36
- ptyprocess==0.7.0
- py==1.11.0
- pycparser==2.21
- pygments==2.14.0
- pyparsing==3.1.4
- pyrsistent==0.18.0
- pytest==7.0.1
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyzmq==25.1.2
- requests==2.27.1
- send2trash==1.8.3
- six==1.17.0
- sniffio==1.2.0
- terminado==0.12.1
- testpath==0.6.0
- tomli==1.2.3
- tornado==6.1
- traitlets==4.3.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- wcwidth==0.2.13
- webencodings==0.5.1
- websocket-client==1.3.1
- widgetsnbextension==3.6.10
- zipp==3.6.0
prefix: /opt/conda/envs/krisk
| [
"krisk/tests/test_plot.py::test_line"
] | [] | [
"krisk/tests/test_plot.py::test_bar",
"krisk/tests/test_plot.py::test_trendline",
"krisk/tests/test_plot.py::test_smooth_line",
"krisk/tests/test_plot.py::test_full_bar_line",
"krisk/tests/test_plot.py::test_sort_bar_line",
"krisk/tests/test_plot.py::test_hist",
"krisk/tests/test_plot.py::test_density",
"krisk/tests/test_plot.py::test_scatter"
] | [] | BSD 3-Clause "New" or "Revised" License | 819 | 167 | [
"krisk/plot/make_chart.py"
] |
|
unnonouno__jqp-26 | 0977f3585147190adec127722e940783209b6ab6 | 2016-10-19 12:55:42 | 2cb79f866194b90b5236d8146e3cab6ace704ebd | coveralls:
[](https://coveralls.io/builds/8409367)
Coverage increased (+1.4%) to 83.721% when pulling **df0ac543dc33b5ad23fb4dd94e5f512793a1f5f0 on import** into **0977f3585147190adec127722e940783209b6ab6 on master**.
coveralls:
[](https://coveralls.io/builds/8409381)
Coverage increased (+1.4%) to 83.721% when pulling **df0ac543dc33b5ad23fb4dd94e5f512793a1f5f0 on import** into **0977f3585147190adec127722e940783209b6ab6 on master**.
| diff --git a/jqp/__init__.py b/jqp/__init__.py
index cdcc02e..90fd8de 100644
--- a/jqp/__init__.py
+++ b/jqp/__init__.py
@@ -14,7 +14,15 @@ def _exit(error, return_code, message):
sys.exit(return_code)
-def run(in_io, out_io, cmd):
+def run(in_io, out_io, cmd, imports=[]):
+ environment = {}
+ for mod_name in imports:
+ try:
+ mod = __import__(mod_name)
+ except Exception as e:
+ _exit(e, 5, 'Cannot import module: %s' % mod_name)
+ environment[mod_name] = mod
+
for i, line in enumerate(in_io):
if line.strip() == '':
continue
@@ -26,7 +34,8 @@ def run(in_io, out_io, cmd):
_exit(e, 4, 'Parse error: line %d' % line_no)
try:
- out = eval(cmd, {'j': js})
+ environment['j'] = js
+ out = eval(cmd, environment)
except Exception as e:
_exit(e, 3, 'Cannot execute command: line %d' % line_no)
@@ -44,6 +53,10 @@ def main():
parser.add_argument(
'--version', action='version', version='jqp %s' % __version__,
help='show version and exit')
+ parser.add_argument(
+ '--import', action='append',
+ help='modules to import')
+
args = parser.parse_args()
- run(sys.stdin, sys.stdout, args.cmd)
+ run(sys.stdin, sys.stdout, args.cmd, imports=getattr(args, 'import'))
| Make import option
I need an option to import modules which I use in commands. | unnonouno/jqp | diff --git a/tests/run_test.py b/tests/run_test.py
index 31ceb87..8e64641 100644
--- a/tests/run_test.py
+++ b/tests/run_test.py
@@ -16,6 +16,13 @@ class RunTest(unittest.TestCase):
jqp.run(inputs, outputs, 'j["name"]')
self.assertEqual(outputs.getvalue(), '"Taro"\n')
+ def test_import(self):
+ inputs = StringIO('''{"name": "Taro", "age": 10}
+''')
+ outputs = StringIO()
+ jqp.run(inputs, outputs, 're.sub("a", "A", j["name"])', imports=['re'])
+ self.assertEqual(outputs.getvalue(), '"TAro"\n')
+
def test_parse_error(self):
inputs = StringIO('invalid\n')
outputs = StringIO()
@@ -36,3 +43,10 @@ class RunTest(unittest.TestCase):
with self.assertRaises(SystemExit) as e:
jqp.run(inputs, outputs, 'lambda: 0')
self.assertEqual(e.exception.code, 3)
+
+ def test_import_error(self):
+ inputs = StringIO('1\n')
+ outputs = StringIO()
+ with self.assertRaises(SystemExit) as e:
+ jqp.run(inputs, outputs, 'j', imports=['unknown_module'])
+ self.assertEqual(e.exception.code, 5)
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 1
} | 0.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
coverage==6.2
importlib-metadata==4.8.3
iniconfig==1.1.1
-e git+https://github.com/unnonouno/jqp.git@0977f3585147190adec127722e940783209b6ab6#egg=jqp
packaging==21.3
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
pytest-cov==4.0.0
tomli==1.2.3
typing_extensions==4.1.1
zipp==3.6.0
| name: jqp
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- coverage==6.2
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-cov==4.0.0
- tomli==1.2.3
- typing-extensions==4.1.1
- zipp==3.6.0
prefix: /opt/conda/envs/jqp
| [
"tests/run_test.py::RunTest::test_import",
"tests/run_test.py::RunTest::test_import_error"
] | [] | [
"tests/run_test.py::RunTest::test1",
"tests/run_test.py::RunTest::test_dump_error",
"tests/run_test.py::RunTest::test_execution_error",
"tests/run_test.py::RunTest::test_parse_error"
] | [] | MIT License | 822 | 412 | [
"jqp/__init__.py"
] |
gerva__tower-companion-28 | 5c373dd2992d4404e9a0e5fc0c5195f58c7ddb7a | 2016-10-24 08:21:41 | 5c373dd2992d4404e9a0e5fc0c5195f58c7ddb7a | coveralls:
[](https://coveralls.io/builds/8472505)
Coverage remained the same at 100.0% when pulling **ad80837a4268266baeb9cebaa1d793a4d0bc46fb on michaelgaida:TC-26** into **5c373dd2992d4404e9a0e5fc0c5195f58c7ddb7a on gerva:master**.
coveralls:
[](https://coveralls.io/builds/8473235)
Coverage remained the same at 100.0% when pulling **2ac2caa48f1e7562fef046d4e62dc28cfec0ff63 on michaelgaida:TC-26** into **5c373dd2992d4404e9a0e5fc0c5195f58c7ddb7a on gerva:master**.
| diff --git a/lib/api.py b/lib/api.py
index 6d76e18..a131931 100644
--- a/lib/api.py
+++ b/lib/api.py
@@ -8,6 +8,7 @@ import json
import requests
import lib.validate as validate
from lib.adhoc import AdHocError
+from lib.configuration import ConfigError
class APIError(Exception):
@@ -27,7 +28,13 @@ class APIv1(object):
# E: Instance of 'LookupDict' has no 'created' member (no-member)
def __init__(self, config):
self.config = config
- self.host = config.get('host')
+ try:
+ self.host = config.get('host')
+ except ConfigError as error:
+ msg = "Missing key from configuration, {0}.".format(error)
+ msg = "{0} Please check your configuration.".format(msg)
+ raise APIError(msg)
+
self.api_url = "https://{0}/api/v1".format(self.host)
def _authentication(self):
@@ -38,14 +45,25 @@ class APIv1(object):
(tuple) username, password
"""
config = self.config
- return (config.get('username'), config.get('password'))
+ try:
+ return (config.get('username'), config.get('password'))
+ except ConfigError as error:
+ msg = "Missing key from configuration, {0}.".format(error)
+ msg = "{0} Please check your configuration.".format(msg)
+ raise APIError(msg)
def _verify_ssl(self):
"""
Gets the value of verify_ssl from the actual configuraion
"""
config = self.config
- return config.getboolean('verify_ssl')
+ try:
+ return config.getboolean('verify_ssl')
+ except ConfigError as error:
+ msg = "Missing key from configuration, {0}.".format(error)
+ msg = "{0} Please check your configuration.".format(msg)
+ raise APIError(msg)
+
def _get(self, url, params, data):
auth = self._authentication()
| When no configuration file is set, `kick` raises a ConfigError
we should intercept the ConfigError and explain what's wrong with the configuration, instead of just printing the full stack trace | gerva/tower-companion | diff --git a/test/test_api.py b/test/test_api.py
index 50b1a85..31d8d27 100644
--- a/test/test_api.py
+++ b/test/test_api.py
@@ -135,6 +135,25 @@ def test_get_json_error(monkeypatch):
with pytest.raises(APIError):
api._get_json(url='', params={}, data={})
+def test_less_configuration():
+ config = Config(None)
+ with pytest.raises(APIError):
+ api = APIv1(config)
+
+ config.update('host', HOST)
+ api = APIv1(config)
+ with pytest.raises(APIError):
+ api._authentication()
+
+ config.update('username', USERNAME)
+ api = APIv1(config)
+ with pytest.raises(APIError):
+ api._authentication()
+
+ config.update('password', PASSWORD)
+ api = APIv1(config)
+ with pytest.raises(APIError):
+ api._verify_ssl()
def test_job_params(monkeypatch):
def mockreturn(*args, **kwargs):
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 1
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"coverage",
"prospector"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | astroid==2.11.7
attrs==22.2.0
certifi==2021.5.30
click==6.6
coverage==6.2
dill==0.3.4
dodgy==0.2.1
flake8==4.0.1
flake8-polyfill==1.0.2
importlib-metadata==4.2.0
iniconfig==1.1.1
isort==5.10.1
lazy-object-proxy==1.7.1
mccabe==0.6.1
packaging==21.3
pep8-naming==0.10.0
platformdirs==2.4.0
pluggy==1.0.0
prospector==1.7.7
py==1.11.0
pycodestyle==2.8.0
pydocstyle==6.3.0
pyflakes==2.4.0
pylint==2.13.9
pylint-celery==0.3
pylint-django==2.5.3
pylint-flask==0.6
pylint-plugin-utils==0.7
pyparsing==3.1.4
pytest==7.0.1
PyYAML==3.12
requests==2.11.1
requirements-detector==0.7
setoptconf-tmp==0.3.1
snowballstemmer==2.2.0
toml==0.10.2
tomli==1.2.3
-e git+https://github.com/gerva/tower-companion.git@5c373dd2992d4404e9a0e5fc0c5195f58c7ddb7a#egg=tower_companion
typed-ast==1.5.5
typing_extensions==4.1.1
wrapt==1.16.0
zipp==3.6.0
| name: tower-companion
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- astroid==2.11.7
- attrs==22.2.0
- click==6.6
- coverage==6.2
- dill==0.3.4
- dodgy==0.2.1
- flake8==4.0.1
- flake8-polyfill==1.0.2
- importlib-metadata==4.2.0
- iniconfig==1.1.1
- isort==5.10.1
- lazy-object-proxy==1.7.1
- mccabe==0.6.1
- packaging==21.3
- pep8-naming==0.10.0
- platformdirs==2.4.0
- pluggy==1.0.0
- prospector==1.7.7
- py==1.11.0
- pycodestyle==2.8.0
- pydocstyle==6.3.0
- pyflakes==2.4.0
- pylint==2.13.9
- pylint-celery==0.3
- pylint-django==2.5.3
- pylint-flask==0.6
- pylint-plugin-utils==0.7
- pyparsing==3.1.4
- pytest==7.0.1
- pyyaml==3.12
- requests==2.11.1
- requirements-detector==0.7
- setoptconf-tmp==0.3.1
- snowballstemmer==2.2.0
- toml==0.10.2
- tomli==1.2.3
- typed-ast==1.5.5
- typing-extensions==4.1.1
- wrapt==1.16.0
- zipp==3.6.0
prefix: /opt/conda/envs/tower-companion
| [
"test/test_api.py::test_less_configuration"
] | [] | [
"test/test_api.py::test_api",
"test/test_api.py::test_verify_ssl",
"test/test_api.py::test_get",
"test/test_api.py::test_get_error",
"test/test_api.py::test_post",
"test/test_api.py::test_post_error",
"test/test_api.py::test_get_json",
"test/test_api.py::test_get_json_error",
"test/test_api.py::test_job_params",
"test/test_api.py::test_get_ids",
"test/test_api.py::test_get_ids_zero_results",
"test/test_api.py::test_launch_template_id",
"test/test_api.py::test_launch_data_to_url",
"test/test_api.py::test_job_stdout",
"test/test_api.py::test_job_status",
"test/test_api.py::test_job_finished",
"test/test_api.py::test_job_started",
"test/test_api.py::test_get_data",
"test/test_api.py::test_ad_hoc_to_api",
"test/test_api.py::test_launch_ad_hoc_job",
"test/test_api.py::test_job_url"
] | [] | Apache License 2.0 | 829 | 485 | [
"lib/api.py"
] |
scrapy__w3lib-77 | 8e19741b6b004d6248fb70b525255a96a1eb1ee6 | 2016-10-25 06:37:29 | e2c7b62ea59104f628c1c5f35333cb406b4e500e | redapple: Can you add tests for this?
Can you provide example websites showing this issue? | diff --git a/w3lib/html.py b/w3lib/html.py
index a4be054..a31d42b 100644
--- a/w3lib/html.py
+++ b/w3lib/html.py
@@ -311,7 +311,7 @@ def get_base_url(
"""
- utext = to_unicode(text, encoding)
+ utext: str = remove_comments(text, encoding=encoding)
m = _baseurl_re.search(utext)
if m:
return urljoin(
| It's not a good idead to parse HTML text using regular expressions
In [`w3lib.html`](https://github.com/scrapy/w3lib/blob/master/w3lib/html.py) regular expressions are used to parse HTML texts:
``` python
_ent_re = re.compile(r'&((?P<named>[a-z\d]+)|#(?P<dec>\d+)|#x(?P<hex>[a-f\d]+))(?P<semicolon>;?)', re.IGNORECASE)
_tag_re = re.compile(r'<[a-zA-Z\/!].*?>', re.DOTALL)
_baseurl_re = re.compile(six.u(r'<base\s[^>]*href\s*=\s*[\"\']\s*([^\"\'\s]+)\s*[\"\']'), re.I)
_meta_refresh_re = re.compile(six.u(r'<meta\s[^>]*http-equiv[^>]*refresh[^>]*content\s*=\s*(?P<quote>["\'])(?P<int>(\d*\.)?\d+)\s*;\s*url=\s*(?P<url>.*?)(?P=quote)'), re.DOTALL | re.IGNORECASE)
_cdata_re = re.compile(r'((?P<cdata_s><!\[CDATA\[)(?P<cdata_d>.*?)(?P<cdata_e>\]\]>))', re.DOTALL)
```
However this is definitely incorrect when it involves commented contents, e.g.
``` python
>>> from w3lib import html
>>> html.get_base_url("""<!-- <base href="http://example.com/" /> -->""")
'http://example.com/'
```
Introducing "heavier" utilities like `lxml` would solve this issue easily, but that might be an awful idea as `w3lib` aims to be lightweight & fast.
Or maybe we could implement some quick parser merely for eliminating the commented parts.
Any ideas?
| scrapy/w3lib | diff --git a/tests/test_html.py b/tests/test_html.py
index d4861ba..1e637b0 100644
--- a/tests/test_html.py
+++ b/tests/test_html.py
@@ -372,6 +372,30 @@ class GetBaseUrlTest(unittest.TestCase):
get_base_url(text, baseurl.encode("ascii")), "http://example.org/something"
)
+ def test_base_url_in_comment(self):
+ self.assertEqual(
+ get_base_url("""<!-- <base href="http://example.com/"/> -->"""), ""
+ )
+ self.assertEqual(
+ get_base_url("""<!-- <base href="http://example.com/"/>"""), ""
+ )
+ self.assertEqual(
+ get_base_url("""<!-- <base href="http://example.com/"/> --"""), ""
+ )
+ self.assertEqual(
+ get_base_url(
+ """<!-- <!-- <base href="http://example.com/"/> -- --> <base href="http://example_2.com/"/> """
+ ),
+ "http://example_2.com/",
+ )
+
+ self.assertEqual(
+ get_base_url(
+ """<!-- <base href="http://example.com/"/> --> <!-- <base href="http://example_2.com/"/> --> <base href="http://example_3.com/"/>"""
+ ),
+ "http://example_3.com/",
+ )
+
def test_relative_url_with_absolute_path(self):
baseurl = "https://example.org"
text = """\
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 1
} | 2.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
pytest @ file:///croot/pytest_1738938843180/work
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
-e git+https://github.com/scrapy/w3lib.git@8e19741b6b004d6248fb70b525255a96a1eb1ee6#egg=w3lib
| name: w3lib
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
prefix: /opt/conda/envs/w3lib
| [
"tests/test_html.py::GetBaseUrlTest::test_base_url_in_comment"
] | [] | [
"tests/test_html.py::RemoveEntitiesTest::test_browser_hack",
"tests/test_html.py::RemoveEntitiesTest::test_encoding",
"tests/test_html.py::RemoveEntitiesTest::test_illegal_entities",
"tests/test_html.py::RemoveEntitiesTest::test_keep_entities",
"tests/test_html.py::RemoveEntitiesTest::test_missing_semicolon",
"tests/test_html.py::RemoveEntitiesTest::test_regular",
"tests/test_html.py::RemoveEntitiesTest::test_returns_unicode",
"tests/test_html.py::ReplaceTagsTest::test_replace_tags",
"tests/test_html.py::ReplaceTagsTest::test_replace_tags_multiline",
"tests/test_html.py::ReplaceTagsTest::test_returns_unicode",
"tests/test_html.py::RemoveCommentsTest::test_no_comments",
"tests/test_html.py::RemoveCommentsTest::test_remove_comments",
"tests/test_html.py::RemoveCommentsTest::test_returns_unicode",
"tests/test_html.py::RemoveTagsTest::test_keep_argument",
"tests/test_html.py::RemoveTagsTest::test_remove_empty_tags",
"tests/test_html.py::RemoveTagsTest::test_remove_tags",
"tests/test_html.py::RemoveTagsTest::test_remove_tags_with_attributes",
"tests/test_html.py::RemoveTagsTest::test_remove_tags_without_tags",
"tests/test_html.py::RemoveTagsTest::test_returns_unicode",
"tests/test_html.py::RemoveTagsTest::test_uppercase_tags",
"tests/test_html.py::RemoveTagsWithContentTest::test_empty_tags",
"tests/test_html.py::RemoveTagsWithContentTest::test_returns_unicode",
"tests/test_html.py::RemoveTagsWithContentTest::test_tags_with_shared_prefix",
"tests/test_html.py::RemoveTagsWithContentTest::test_with_tags",
"tests/test_html.py::RemoveTagsWithContentTest::test_without_tags",
"tests/test_html.py::ReplaceEscapeCharsTest::test_returns_unicode",
"tests/test_html.py::ReplaceEscapeCharsTest::test_with_escape_chars",
"tests/test_html.py::ReplaceEscapeCharsTest::test_without_escape_chars",
"tests/test_html.py::UnquoteMarkupTest::test_returns_unicode",
"tests/test_html.py::UnquoteMarkupTest::test_unquote_markup",
"tests/test_html.py::GetBaseUrlTest::test_attributes_before_href",
"tests/test_html.py::GetBaseUrlTest::test_get_base_url",
"tests/test_html.py::GetBaseUrlTest::test_get_base_url_latin1",
"tests/test_html.py::GetBaseUrlTest::test_get_base_url_latin1_percent",
"tests/test_html.py::GetBaseUrlTest::test_get_base_url_utf8",
"tests/test_html.py::GetBaseUrlTest::test_no_scheme_url",
"tests/test_html.py::GetBaseUrlTest::test_relative_url_with_absolute_path",
"tests/test_html.py::GetBaseUrlTest::test_tag_name",
"tests/test_html.py::GetMetaRefreshTest::test_commented_meta_refresh",
"tests/test_html.py::GetMetaRefreshTest::test_entities_in_redirect_url",
"tests/test_html.py::GetMetaRefreshTest::test_float_refresh_intervals",
"tests/test_html.py::GetMetaRefreshTest::test_get_meta_refresh",
"tests/test_html.py::GetMetaRefreshTest::test_html_comments_with_uncommented_meta_refresh",
"tests/test_html.py::GetMetaRefreshTest::test_inside_noscript",
"tests/test_html.py::GetMetaRefreshTest::test_inside_script",
"tests/test_html.py::GetMetaRefreshTest::test_leading_newline_in_url",
"tests/test_html.py::GetMetaRefreshTest::test_multiline",
"tests/test_html.py::GetMetaRefreshTest::test_nonascii_url_latin1",
"tests/test_html.py::GetMetaRefreshTest::test_nonascii_url_latin1_query",
"tests/test_html.py::GetMetaRefreshTest::test_nonascii_url_utf8",
"tests/test_html.py::GetMetaRefreshTest::test_redirections_in_different_ordering__in_meta_tag",
"tests/test_html.py::GetMetaRefreshTest::test_relative_redirects",
"tests/test_html.py::GetMetaRefreshTest::test_tag_name",
"tests/test_html.py::GetMetaRefreshTest::test_without_url"
] | [] | BSD 3-Clause "New" or "Revised" License | 830 | 125 | [
"w3lib/html.py"
] |
wireservice__agate-637 | 0d2671358cdea94c83bd8f28b5a6718a9326b033 | 2016-10-30 16:11:15 | 97cb37f673af480f74fef546ceefd3ba24aff93b | diff --git a/agate/aggregations/any.py b/agate/aggregations/any.py
index 70fa702..67a9651 100644
--- a/agate/aggregations/any.py
+++ b/agate/aggregations/any.py
@@ -32,7 +32,7 @@ class Any(Aggregation):
column = table.columns[self._column_name]
data = column.values()
- if isinstance(column.data_type, Boolean):
+ if isinstance(column.data_type, Boolean) and self._test is None:
return any(data)
return any(self._test(d) for d in data)
| agate.All cannot test whether all data is False
If the column data type is boolean, test gets overwritten to search for True values.
| wireservice/agate | diff --git a/tests/test_aggregations.py b/tests/test_aggregations.py
index c3c8fbb..11eefe1 100644
--- a/tests/test_aggregations.py
+++ b/tests/test_aggregations.py
@@ -138,6 +138,7 @@ class TestBooleanAggregation(unittest.TestCase):
table = Table(rows, ['test'], [Boolean()])
Any('test').validate(table)
self.assertEqual(Any('test').run(table), False)
+ self.assertEqual(Any('test', lambda r: not r).run(table), True)
def test_all(self):
rows = [
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 1
},
"num_modified_files": 1
} | 1.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"numpy>=1.16.0",
"pandas>=1.0.0"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc",
"apt-get install -y locales"
],
"python": "3.9",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | -e git+https://github.com/wireservice/agate.git@0d2671358cdea94c83bd8f28b5a6718a9326b033#egg=agate
awesome-slugify==1.6.5
babel==2.17.0
exceptiongroup==1.2.2
iniconfig==2.1.0
isodate==0.7.2
numpy==2.0.2
packaging==24.2
pandas==2.2.3
parsedatetime==2.6
pluggy==1.5.0
pytest==8.3.5
python-dateutil==2.9.0.post0
pytimeparse==1.1.8
pytz==2025.2
regex==2024.11.6
six==1.17.0
tomli==2.2.1
tzdata==2025.2
Unidecode==0.4.21
| name: agate
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- awesome-slugify==1.6.5
- babel==2.17.0
- exceptiongroup==1.2.2
- iniconfig==2.1.0
- isodate==0.7.2
- numpy==2.0.2
- packaging==24.2
- pandas==2.2.3
- parsedatetime==2.6
- pluggy==1.5.0
- pytest==8.3.5
- python-dateutil==2.9.0.post0
- pytimeparse==1.1.8
- pytz==2025.2
- regex==2024.11.6
- six==1.17.0
- tomli==2.2.1
- tzdata==2025.2
- unidecode==0.04.21
prefix: /opt/conda/envs/agate
| [
"tests/test_aggregations.py::TestBooleanAggregation::test_any"
] | [] | [
"tests/test_aggregations.py::TestSimpleAggregation::test_all",
"tests/test_aggregations.py::TestSimpleAggregation::test_any",
"tests/test_aggregations.py::TestSimpleAggregation::test_count",
"tests/test_aggregations.py::TestSimpleAggregation::test_count_column",
"tests/test_aggregations.py::TestSimpleAggregation::test_count_value",
"tests/test_aggregations.py::TestSimpleAggregation::test_has_nulls",
"tests/test_aggregations.py::TestSimpleAggregation::test_summary",
"tests/test_aggregations.py::TestBooleanAggregation::test_all",
"tests/test_aggregations.py::TestDateTimeAggregation::test_max",
"tests/test_aggregations.py::TestDateTimeAggregation::test_min",
"tests/test_aggregations.py::TestNumberAggregation::test_deciles",
"tests/test_aggregations.py::TestNumberAggregation::test_iqr",
"tests/test_aggregations.py::TestNumberAggregation::test_mad",
"tests/test_aggregations.py::TestNumberAggregation::test_max",
"tests/test_aggregations.py::TestNumberAggregation::test_max_precision",
"tests/test_aggregations.py::TestNumberAggregation::test_mean",
"tests/test_aggregations.py::TestNumberAggregation::test_mean_with_nulls",
"tests/test_aggregations.py::TestNumberAggregation::test_median",
"tests/test_aggregations.py::TestNumberAggregation::test_min",
"tests/test_aggregations.py::TestNumberAggregation::test_mode",
"tests/test_aggregations.py::TestNumberAggregation::test_percentiles",
"tests/test_aggregations.py::TestNumberAggregation::test_percentiles_locate",
"tests/test_aggregations.py::TestNumberAggregation::test_population_stdev",
"tests/test_aggregations.py::TestNumberAggregation::test_population_variance",
"tests/test_aggregations.py::TestNumberAggregation::test_quartiles",
"tests/test_aggregations.py::TestNumberAggregation::test_quartiles_locate",
"tests/test_aggregations.py::TestNumberAggregation::test_quintiles",
"tests/test_aggregations.py::TestNumberAggregation::test_stdev",
"tests/test_aggregations.py::TestNumberAggregation::test_sum",
"tests/test_aggregations.py::TestNumberAggregation::test_variance",
"tests/test_aggregations.py::TestTextAggregation::test_max_length",
"tests/test_aggregations.py::TestTextAggregation::test_max_length_invalid"
] | [] | MIT License | 839 | 149 | [
"agate/aggregations/any.py"
] |
|
wireservice__agate-638 | 97cb37f673af480f74fef546ceefd3ba24aff93b | 2016-10-30 16:50:31 | 97cb37f673af480f74fef546ceefd3ba24aff93b | diff --git a/agate/aggregations/__init__.py b/agate/aggregations/__init__.py
index e4f40cc..cf82a30 100644
--- a/agate/aggregations/__init__.py
+++ b/agate/aggregations/__init__.py
@@ -21,6 +21,7 @@ from agate.aggregations.all import All # noqa
from agate.aggregations.any import Any # noqa
from agate.aggregations.count import Count # noqa
from agate.aggregations.deciles import Deciles # noqa
+from agate.aggregations.first import First # noqa
from agate.aggregations.has_nulls import HasNulls # noqa
from agate.aggregations.iqr import IQR # noqa
from agate.aggregations.mad import MAD # noqa
diff --git a/agate/aggregations/first.py b/agate/aggregations/first.py
new file mode 100644
index 0000000..37e1695
--- /dev/null
+++ b/agate/aggregations/first.py
@@ -0,0 +1,42 @@
+#!/usr/bin/env python
+
+from agate.aggregations.base import Aggregation
+from agate.data_types import Boolean
+
+
+class First(Aggregation):
+ """
+ Returns the first value that passes a test.
+
+ If the test is omitted, the aggregation will return the first value in the column.
+
+ If no values pass the test, the aggregation will raise an exception.
+
+ :param column_name:
+ The name of the column to check.
+ :param test:
+ A function that takes a value and returns `True` or `False`. Test may be
+ omitted when checking :class:`.Boolean` data.
+ """
+ def __init__(self, column_name, test=None):
+ self._column_name = column_name
+ self._test = test
+
+ def get_aggregate_data_type(self, table):
+ return table.columns[self._column_name].data_type
+
+ def validate(self, table):
+ column = table.columns[self._column_name]
+ data = column.values()
+
+ if self._test is not None and len([d for d in data if self._test(d)]) == 0:
+ raise ValueError('No values pass the given test.')
+
+ def run(self, table):
+ column = table.columns[self._column_name]
+ data = column.values()
+
+ if self._test is None:
+ return data[0]
+
+ return next((d for d in data if self._test(d)))
| agate.First aggregation
I end up doing this all the time:
```
def pick_first(c):
return c[0]
agate.Summary('Serial_Num', agate.Text(), pick_first)
``` | wireservice/agate | diff --git a/tests/test_aggregations.py b/tests/test_aggregations.py
index 11eefe1..e0dc625 100644
--- a/tests/test_aggregations.py
+++ b/tests/test_aggregations.py
@@ -67,6 +67,17 @@ class TestSimpleAggregation(unittest.TestCase):
self.assertEqual(All('one', lambda d: d != 5).run(self.table), True)
self.assertEqual(All('one', lambda d: d == 2).run(self.table), False)
+ def test_first(self):
+ with self.assertRaises(ValueError):
+ First('one', lambda d: d == 5).validate(self.table)
+
+ First('one', lambda d: d).validate(self.table)
+
+ self.assertIsInstance(First('one').get_aggregate_data_type(self.table), Number)
+ self.assertEqual(First('one').run(self.table), 1)
+ self.assertEqual(First('one', lambda d: d == 2).run(self.table), 2)
+ self.assertEqual(First('one', lambda d: not d).run(self.table), None)
+
def test_count(self):
rows = (
(1, 2, 'a'),
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_added_files",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 1
} | 1.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": [
"requirements-py3.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | -e git+https://github.com/wireservice/agate.git@97cb37f673af480f74fef546ceefd3ba24aff93b#egg=agate
alabaster==0.7.16
awesome-slugify==1.6.5
babel==2.17.0
cachetools==5.5.2
certifi==2025.1.31
chardet==5.2.0
charset-normalizer==3.4.1
colorama==0.4.6
coverage==7.8.0
distlib==0.3.9
docutils==0.21.2
exceptiongroup==1.2.2
filelock==3.18.0
idna==3.10
imagesize==1.4.1
importlib_metadata==8.6.1
iniconfig==2.1.0
isodate==0.7.2
Jinja2==3.1.6
MarkupSafe==3.0.2
nose==1.3.7
packaging==24.2
parsedatetime==2.6
platformdirs==4.3.7
pluggy==1.5.0
Pygments==2.19.1
pyproject-api==1.9.0
pytest==8.3.5
pytimeparse==1.1.8
pytz==2025.2
regex==2024.11.6
requests==2.32.3
six==1.17.0
snowballstemmer==2.2.0
Sphinx==7.4.7
sphinx-rtd-theme==3.0.2
sphinxcontrib-applehelp==2.0.0
sphinxcontrib-devhelp==2.0.0
sphinxcontrib-htmlhelp==2.1.0
sphinxcontrib-jquery==4.1
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==2.0.0
sphinxcontrib-serializinghtml==2.0.0
tomli==2.2.1
tox==4.25.0
typing_extensions==4.13.0
Unidecode==0.4.21
urllib3==2.3.0
virtualenv==20.29.3
zipp==3.21.0
| name: agate
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.16
- awesome-slugify==1.6.5
- babel==2.17.0
- cachetools==5.5.2
- certifi==2025.1.31
- chardet==5.2.0
- charset-normalizer==3.4.1
- colorama==0.4.6
- coverage==7.8.0
- distlib==0.3.9
- docutils==0.21.2
- exceptiongroup==1.2.2
- filelock==3.18.0
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- isodate==0.7.2
- jinja2==3.1.6
- markupsafe==3.0.2
- nose==1.3.7
- packaging==24.2
- parsedatetime==2.6
- platformdirs==4.3.7
- pluggy==1.5.0
- pygments==2.19.1
- pyproject-api==1.9.0
- pytest==8.3.5
- pytimeparse==1.1.8
- pytz==2025.2
- regex==2024.11.6
- requests==2.32.3
- six==1.17.0
- snowballstemmer==2.2.0
- sphinx==7.4.7
- sphinx-rtd-theme==3.0.2
- sphinxcontrib-applehelp==2.0.0
- sphinxcontrib-devhelp==2.0.0
- sphinxcontrib-htmlhelp==2.1.0
- sphinxcontrib-jquery==4.1
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==2.0.0
- sphinxcontrib-serializinghtml==2.0.0
- tomli==2.2.1
- tox==4.25.0
- typing-extensions==4.13.0
- unidecode==0.04.21
- urllib3==2.3.0
- virtualenv==20.29.3
- zipp==3.21.0
prefix: /opt/conda/envs/agate
| [
"tests/test_aggregations.py::TestSimpleAggregation::test_first"
] | [] | [
"tests/test_aggregations.py::TestSimpleAggregation::test_all",
"tests/test_aggregations.py::TestSimpleAggregation::test_any",
"tests/test_aggregations.py::TestSimpleAggregation::test_count",
"tests/test_aggregations.py::TestSimpleAggregation::test_count_column",
"tests/test_aggregations.py::TestSimpleAggregation::test_count_value",
"tests/test_aggregations.py::TestSimpleAggregation::test_has_nulls",
"tests/test_aggregations.py::TestSimpleAggregation::test_summary",
"tests/test_aggregations.py::TestBooleanAggregation::test_all",
"tests/test_aggregations.py::TestBooleanAggregation::test_any",
"tests/test_aggregations.py::TestDateTimeAggregation::test_max",
"tests/test_aggregations.py::TestDateTimeAggregation::test_min",
"tests/test_aggregations.py::TestNumberAggregation::test_deciles",
"tests/test_aggregations.py::TestNumberAggregation::test_iqr",
"tests/test_aggregations.py::TestNumberAggregation::test_mad",
"tests/test_aggregations.py::TestNumberAggregation::test_max",
"tests/test_aggregations.py::TestNumberAggregation::test_max_precision",
"tests/test_aggregations.py::TestNumberAggregation::test_mean",
"tests/test_aggregations.py::TestNumberAggregation::test_mean_with_nulls",
"tests/test_aggregations.py::TestNumberAggregation::test_median",
"tests/test_aggregations.py::TestNumberAggregation::test_min",
"tests/test_aggregations.py::TestNumberAggregation::test_mode",
"tests/test_aggregations.py::TestNumberAggregation::test_percentiles",
"tests/test_aggregations.py::TestNumberAggregation::test_percentiles_locate",
"tests/test_aggregations.py::TestNumberAggregation::test_population_stdev",
"tests/test_aggregations.py::TestNumberAggregation::test_population_variance",
"tests/test_aggregations.py::TestNumberAggregation::test_quartiles",
"tests/test_aggregations.py::TestNumberAggregation::test_quartiles_locate",
"tests/test_aggregations.py::TestNumberAggregation::test_quintiles",
"tests/test_aggregations.py::TestNumberAggregation::test_stdev",
"tests/test_aggregations.py::TestNumberAggregation::test_sum",
"tests/test_aggregations.py::TestNumberAggregation::test_variance",
"tests/test_aggregations.py::TestTextAggregation::test_max_length",
"tests/test_aggregations.py::TestTextAggregation::test_max_length_invalid"
] | [] | MIT License | 840 | 610 | [
"agate/aggregations/__init__.py"
] |
|
scrapy__scrapy-2393 | 451f1474689a18d6a54630915c42172626624ef7 | 2016-11-10 15:28:51 | d7b26edf6b419e379a7a0a425093f02cac2fcf33 | codecov-io: ## [Current coverage](https://codecov.io/gh/scrapy/scrapy/pull/2393?src=pr) is 83.36% (diff: 100%)
> Merging [#2393](https://codecov.io/gh/scrapy/scrapy/pull/2393?src=pr) into [master](https://codecov.io/gh/scrapy/scrapy/branch/master?src=pr) will decrease coverage by **0.01%**
```diff
@@ master #2393 diff @@
==========================================
Files 161 161
Lines 8730 8730
Methods 0 0
Messages 0 0
Branches 1285 1285
==========================================
- Hits 7279 7278 -1
Misses 1201 1201
- Partials 250 251 +1
```
> Powered by [Codecov](https://codecov.io?src=pr). Last update [de89b1b...626de8b](https://codecov.io/gh/scrapy/scrapy/compare/de89b1b562196fb8eec400c5b0d0ba9817a0271d...626de8b91c7a5585bf0becaf4841eeae7e38607a?src=pr)
kmike: It looks similar to https://github.com/scrapy/scrapy/pull/2001.
This is quite inconsistent in Scrapy: e.g. cache or ftp handler still won't use body to get response class.
I guess a part of the problem is that ResponseTypes.from_body is not based on any specification, so there was no strong desire to use body consistently - not using body was not seen as a bug because from_body may look like a hack.
It seems the most up-to-date document is https://mimesniff.spec.whatwg.org, especially https://mimesniff.spec.whatwg.org/#identifying-a-resource-with-an-unknown-mime-type.
While this change looks fine, the fix is not complete, and it looks like a part of a larger problem.
redapple: @kmike , what do you mean by the fix not being complete?
Is it because `responsetypes.from_args()` does not follow whatwg?
Or because other uses of `from_args()` do not use the body either?
The aim here was to fix an issue at http decompression where, something else than the default `Response` type if there are hints in the body.
kmike: @redapple the patch looks good because it improves response handling in decompression middleware, so I'm fine with merging it after a rebase.
The logic middleware uses to detect response type is still different from what browsers do, and Scrapy is inconsistent in mime sniffing it performs. I should have opened another ticket for that, but I found it while reviewing this PR, so I wrote it in a comment :)
redapple: Maybe #2145 is so rare we don't need to care. (I have never seen it myself.) | diff --git a/scrapy/downloadermiddlewares/httpcompression.py b/scrapy/downloadermiddlewares/httpcompression.py
index 19d6345e4..eb00d8923 100644
--- a/scrapy/downloadermiddlewares/httpcompression.py
+++ b/scrapy/downloadermiddlewares/httpcompression.py
@@ -38,7 +38,7 @@ class HttpCompressionMiddleware(object):
encoding = content_encoding.pop()
decoded_body = self._decode(response.body, encoding.lower())
respcls = responsetypes.from_args(headers=response.headers, \
- url=response.url)
+ url=response.url, body=decoded_body)
kwargs = dict(cls=respcls, body=decoded_body)
if issubclass(respcls, TextResponse):
# force recalculating the encoding until we make sure the
| Disabling RedirectMiddleware results in HttpCompressionMiddleware errors
I wanted not to redirect `303` responses, but instead retry them.
From the docs, I thought I could achieve it through two settings:
```
REDIRECT_ENABLED = False
RETRY_HTTP_CODES = [301, 302, 307, 308, 500, 502, 503, 504, 408]
```
It ended up giving me errors on `HttpCompressionMiddleware`:
```
Traceback (most recent call last):
File "twisted/internet/defer.py", line 1128, in _inlineCallbacks
result = g.send(result)
File "scrapy/core/downloader/middleware.py", line 53, in process_response
spider=spider)
File "scrapy/downloadermiddlewares/httpcompression.py", line 38, in process_response
response = response.replace(**kwargs)
File "scrapy/http/response/text.py", line 50, in replace
return Response.replace(self, *args, **kwargs)
File "scrapy/http/response/__init__.py", line 77, in replace
return cls(*args, **kwargs)
TypeError: __init__() got an unexpected keyword argument 'encoding'
``` | scrapy/scrapy | diff --git a/tests/test_downloadermiddleware_httpcompression.py b/tests/test_downloadermiddleware_httpcompression.py
index 7924fb3b5..5403e8f52 100644
--- a/tests/test_downloadermiddleware_httpcompression.py
+++ b/tests/test_downloadermiddleware_httpcompression.py
@@ -7,6 +7,7 @@ from scrapy.spiders import Spider
from scrapy.http import Response, Request, HtmlResponse
from scrapy.downloadermiddlewares.httpcompression import HttpCompressionMiddleware, \
ACCEPTED_ENCODINGS
+from scrapy.responsetypes import responsetypes
from tests import tests_datadir
from w3lib.encoding import resolve_encoding
@@ -152,6 +153,20 @@ class HttpCompressionTest(TestCase):
self.assertEqual(newresponse.body, plainbody)
self.assertEqual(newresponse.encoding, resolve_encoding('gb2312'))
+ def test_process_response_no_content_type_header(self):
+ headers = {
+ 'Content-Encoding': 'identity',
+ }
+ plainbody = b"""<html><head><title>Some page</title><meta http-equiv="Content-Type" content="text/html; charset=gb2312">"""
+ respcls = responsetypes.from_args(url="http://www.example.com/index", headers=headers, body=plainbody)
+ response = respcls("http://www.example.com/index", headers=headers, body=plainbody)
+ request = Request("http://www.example.com/index")
+
+ newresponse = self.mw.process_response(request, response, self.spider)
+ assert isinstance(newresponse, respcls)
+ self.assertEqual(newresponse.body, plainbody)
+ self.assertEqual(newresponse.encoding, resolve_encoding('gb2312'))
+
def test_process_response_gzipped_contenttype(self):
response = self._getresponse('gzip')
response.headers['Content-Type'] = 'application/gzip'
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 1
},
"num_modified_files": 1
} | 1.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest pytest-cov pytest-xdist pytest-mock pytest-asyncio"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==25.3.0
Automat==24.8.1
cffi==1.17.1
constantly==23.10.4
coverage==7.8.0
cryptography==44.0.2
cssselect==1.3.0
exceptiongroup==1.2.2
execnet==2.1.1
hyperlink==21.0.0
idna==3.10
incremental==24.7.2
iniconfig==2.1.0
jmespath==1.0.1
lxml==5.3.1
packaging==24.2
parsel==1.10.0
pluggy==1.5.0
pyasn1==0.6.1
pyasn1_modules==0.4.2
pycparser==2.22
PyDispatcher==2.0.7
pyOpenSSL==25.0.0
pytest==8.3.5
pytest-asyncio==0.26.0
pytest-cov==6.0.0
pytest-mock==3.14.0
pytest-xdist==3.6.1
queuelib==1.7.0
-e git+https://github.com/scrapy/scrapy.git@451f1474689a18d6a54630915c42172626624ef7#egg=Scrapy
service-identity==24.2.0
six==1.17.0
tomli==2.2.1
Twisted==24.11.0
typing_extensions==4.13.0
w3lib==2.3.1
zope.interface==7.2
| name: scrapy
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==25.3.0
- automat==24.8.1
- cffi==1.17.1
- constantly==23.10.4
- coverage==7.8.0
- cryptography==44.0.2
- cssselect==1.3.0
- exceptiongroup==1.2.2
- execnet==2.1.1
- hyperlink==21.0.0
- idna==3.10
- incremental==24.7.2
- iniconfig==2.1.0
- jmespath==1.0.1
- lxml==5.3.1
- packaging==24.2
- parsel==1.10.0
- pluggy==1.5.0
- pyasn1==0.6.1
- pyasn1-modules==0.4.2
- pycparser==2.22
- pydispatcher==2.0.7
- pyopenssl==25.0.0
- pytest==8.3.5
- pytest-asyncio==0.26.0
- pytest-cov==6.0.0
- pytest-mock==3.14.0
- pytest-xdist==3.6.1
- queuelib==1.7.0
- service-identity==24.2.0
- six==1.17.0
- tomli==2.2.1
- twisted==24.11.0
- typing-extensions==4.13.0
- w3lib==2.3.1
- zope-interface==7.2
prefix: /opt/conda/envs/scrapy
| [
"tests/test_downloadermiddleware_httpcompression.py::HttpCompressionTest::test_process_response_no_content_type_header"
] | [] | [
"tests/test_downloadermiddleware_httpcompression.py::HttpCompressionTest::test_multipleencodings",
"tests/test_downloadermiddleware_httpcompression.py::HttpCompressionTest::test_process_request",
"tests/test_downloadermiddleware_httpcompression.py::HttpCompressionTest::test_process_response_encoding_inside_body",
"tests/test_downloadermiddleware_httpcompression.py::HttpCompressionTest::test_process_response_force_recalculate_encoding",
"tests/test_downloadermiddleware_httpcompression.py::HttpCompressionTest::test_process_response_gzip",
"tests/test_downloadermiddleware_httpcompression.py::HttpCompressionTest::test_process_response_gzip_app_octetstream_contenttype",
"tests/test_downloadermiddleware_httpcompression.py::HttpCompressionTest::test_process_response_gzip_binary_octetstream_contenttype",
"tests/test_downloadermiddleware_httpcompression.py::HttpCompressionTest::test_process_response_gzipped_contenttype",
"tests/test_downloadermiddleware_httpcompression.py::HttpCompressionTest::test_process_response_head_request_no_decode_required",
"tests/test_downloadermiddleware_httpcompression.py::HttpCompressionTest::test_process_response_plain",
"tests/test_downloadermiddleware_httpcompression.py::HttpCompressionTest::test_process_response_rawdeflate",
"tests/test_downloadermiddleware_httpcompression.py::HttpCompressionTest::test_process_response_zlibdelate"
] | [] | BSD 3-Clause "New" or "Revised" License | 847 | 180 | [
"scrapy/downloadermiddlewares/httpcompression.py"
] |
cdent__gabbi-186 | f27b9aba8590dbdb16749f917cdcf3fffc6218e3 | 2016-11-10 17:05:40 | f27b9aba8590dbdb16749f917cdcf3fffc6218e3 | diff --git a/gabbi/httpclient.py b/gabbi/httpclient.py
index 22869e5..88f873d 100644
--- a/gabbi/httpclient.py
+++ b/gabbi/httpclient.py
@@ -19,6 +19,7 @@ import sys
import urllib3
+from gabbi.handlers import jsonhandler
from gabbi import utils
@@ -138,11 +139,20 @@ class VerboseHttp(Http):
def _print_body(self, headers, content):
"""Output body if not binary."""
- if self._show_body and utils.not_binary(
- utils.extract_content_type(headers)[0]):
+ content_type = utils.extract_content_type(headers)[0]
+ if self._show_body and utils.not_binary(content_type):
+ content = utils.decode_response_content(headers, content)
+ # TODO(cdent): Using the JSONHandler here instead of
+ # just the json module to make it clear that eventually
+ # we could pretty print any printable output by using a
+ # handler's loads() and dumps(). Not doing that now
+ # because it would be pointless (no other interesting
+ # handlers) and this approach may be entirely wrong.
+ if jsonhandler.JSONHandler.accepts(content_type):
+ data = jsonhandler.JSONHandler.loads(content)
+ content = jsonhandler.JSONHandler.dumps(data, pretty=True)
self._verbose_output('')
- self._verbose_output(
- utils.decode_response_content(headers, content))
+ self._verbose_output(content)
def _print_header(self, name, value, prefix='', stream=None):
"""Output one single header."""
| If verbose: True and response content-type is json may as well pretty print the output
It kinda seems like if we know the content-type when being verbose about bodies, we may as well pretty print if it is json. It's not much effort to do so (there's already pretty printing happening in some test failure messages) and it is nice.
Is it too nice?
/cc @FND
| cdent/gabbi | diff --git a/gabbi/tests/gabbits_runner/test_verbose.yaml b/gabbi/tests/gabbits_runner/test_verbose.yaml
new file mode 100644
index 0000000..99b0b0f
--- /dev/null
+++ b/gabbi/tests/gabbits_runner/test_verbose.yaml
@@ -0,0 +1,18 @@
+tests:
+
+- name: POST data with verbose true
+ verbose: true
+ POST: /
+ request_headers:
+ content-type: application/json
+ data:
+ - our text
+
+- name: structured data
+ verbose: true
+ POST: /
+ request_headers:
+ content-type: application/json
+ data:
+ cow: moo
+ dog: bark
diff --git a/gabbi/tests/test_runner.py b/gabbi/tests/test_runner.py
index 2f313a4..bf882ab 100644
--- a/gabbi/tests/test_runner.py
+++ b/gabbi/tests/test_runner.py
@@ -228,6 +228,27 @@ class RunnerTest(unittest.TestCase):
except SystemExit as err:
self.assertSuccess(err)
+ def test_verbose_output_formatting(self):
+ """Confirm that a verbose test handles output properly."""
+ sys.argv = ['gabbi-run', 'http://%s:%s/foo' % (self.host, self.port)]
+
+ sys.argv.append('--')
+ sys.argv.append('gabbi/tests/gabbits_runner/test_verbose.yaml')
+ with self.server():
+ try:
+ runner.run()
+ except SystemExit as err:
+ self.assertSuccess(err)
+
+ sys.stdout.seek(0)
+ output = sys.stdout.read()
+ self.assertIn('"our text"', output)
+ self.assertIn('"cow": "moo"', output)
+ self.assertIn('"dog": "bark"', output)
+ # confirm pretty printing
+ self.assertIn('{\n', output)
+ self.assertIn('}\n', output)
+
def assertSuccess(self, exitError):
errors = exitError.args[0]
if errors:
diff --git a/gabbi/tests/test_verbose.yaml b/gabbi/tests/test_verbose.yaml
deleted file mode 100644
index 29a6c64..0000000
--- a/gabbi/tests/test_verbose.yaml
+++ /dev/null
@@ -1,9 +0,0 @@
-tests:
-
- - name: POST data with verbose true
- verbose: true
- POST: /
- request_headers:
- content-type: application/json
- data:
- 'text'
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 1
} | 1.27 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.5",
"reqs_path": [
"requirements.txt",
"test-requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.13
attrs==22.2.0
Babel==2.11.0
certifi==2021.5.30
charset-normalizer==2.0.12
colorama==0.4.5
coverage==6.2
decorator==5.1.1
docutils==0.18.1
extras==1.0.0
fixtures==4.0.1
flake8==3.8.4
-e git+https://github.com/cdent/gabbi.git@f27b9aba8590dbdb16749f917cdcf3fffc6218e3#egg=gabbi
hacking==4.1.0
idna==3.10
imagesize==1.4.1
importlib-metadata==4.8.3
iniconfig==1.1.1
iso8601==1.1.0
Jinja2==3.0.3
jsonpath-rw==1.4.0
jsonpath-rw-ext==1.2.2
MarkupSafe==2.0.1
mccabe==0.6.1
packaging==21.3
pbr==6.1.1
pluggy==1.0.0
ply==3.11
py==1.11.0
pycodestyle==2.6.0
pyflakes==2.2.0
Pygments==2.14.0
pyparsing==3.1.4
pytest==7.0.1
python-subunit==1.4.2
pytz==2025.2
PyYAML==6.0.1
requests==2.27.1
six==1.17.0
snowballstemmer==2.2.0
Sphinx==5.3.0
sphinxcontrib-applehelp==1.0.2
sphinxcontrib-devhelp==1.0.2
sphinxcontrib-htmlhelp==2.0.0
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==1.0.3
sphinxcontrib-serializinghtml==1.1.5
testrepository==0.0.21
testtools==2.6.0
tomli==1.2.3
typing_extensions==4.1.1
urllib3==1.26.20
wsgi_intercept==1.13.1
zipp==3.6.0
| name: gabbi
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.13
- attrs==22.2.0
- babel==2.11.0
- charset-normalizer==2.0.12
- colorama==0.4.5
- coverage==6.2
- decorator==5.1.1
- docutils==0.18.1
- extras==1.0.0
- fixtures==4.0.1
- flake8==3.8.4
- hacking==4.1.0
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- iso8601==1.1.0
- jinja2==3.0.3
- jsonpath-rw==1.4.0
- jsonpath-rw-ext==1.2.2
- markupsafe==2.0.1
- mccabe==0.6.1
- packaging==21.3
- pbr==6.1.1
- pluggy==1.0.0
- ply==3.11
- py==1.11.0
- pycodestyle==2.6.0
- pyflakes==2.2.0
- pygments==2.14.0
- pyparsing==3.1.4
- pytest==7.0.1
- python-subunit==1.4.2
- pytz==2025.2
- pyyaml==6.0.1
- requests==2.27.1
- six==1.17.0
- snowballstemmer==2.2.0
- sphinx==5.3.0
- sphinxcontrib-applehelp==1.0.2
- sphinxcontrib-devhelp==1.0.2
- sphinxcontrib-htmlhelp==2.0.0
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==1.0.3
- sphinxcontrib-serializinghtml==1.1.5
- testrepository==0.0.21
- testtools==2.6.0
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- wsgi-intercept==1.13.1
- zipp==3.6.0
prefix: /opt/conda/envs/gabbi
| [
"gabbi/tests/test_runner.py::RunnerTest::test_verbose_output_formatting"
] | [] | [
"gabbi/tests/test_runner.py::RunnerTest::test_custom_response_handler",
"gabbi/tests/test_runner.py::RunnerTest::test_exit_code",
"gabbi/tests/test_runner.py::RunnerTest::test_input_files",
"gabbi/tests/test_runner.py::RunnerTest::test_target_url_parsing",
"gabbi/tests/test_runner.py::RunnerTest::test_target_url_parsing_standard_port"
] | [] | Apache License 2.0 | 848 | 375 | [
"gabbi/httpclient.py"
] |
|
sigmavirus24__github3.py-650 | e57cdf6b2f3fb3eec9bf235a1b55f2bd4bd41d85 | 2016-11-14 22:39:05 | 785562d89a01545e1efe54efc8aba5e8a15cdd18 | sigmavirus24: You'll need to rebase this on top of develop once #652 merges.
sigmavirus24: Closing & Reopening apparently will trigger a new merged build on Travis.
itsmemattchung: Thanks for submitting the PR. Instead of modifying the existing test case, test_merge, please add a new test case to cover the new use case (i.e not passing in `commit_message`)
broady: I haven't forgotten about this. I'll finish this up in the next couple days. Thanks for the review. | diff --git a/github3/pulls.py b/github3/pulls.py
index e1b843cd..2511c004 100644
--- a/github3/pulls.py
+++ b/github3/pulls.py
@@ -311,7 +311,7 @@ class PullRequest(models.GitHubCore):
return self._iter(int(number), url, IssueComment, etag=etag)
@requires_auth
- def merge(self, commit_message='', sha=None, squash=False):
+ def merge(self, commit_message=None, sha=None, squash=False):
"""Merge this pull request.
:param str commit_message: (optional), message to be used for the
@@ -322,9 +322,11 @@ class PullRequest(models.GitHubCore):
head branch.
:returns: bool
"""
- parameters = {'commit_message': commit_message, 'squash': squash}
+ parameters = {'squash': squash}
if sha:
parameters['sha'] = sha
+ if commit_message is not None:
+ parameters['commit_message'] = commit_message
url = self._build_url('merge', base_url=self._api)
json = self._json(self._put(url, data=dumps(parameters)), 200)
if not json:
| Allow `commit_message` default for merging
To do this, `commit_message` must be omitted from the request (not just empty).
https://github.com/sigmavirus24/github3.py/blob/7ebca532da8dcc7f22be9f7f1b13fdea3e4d34c0/github3/pulls.py#L312-L331
https://developer.github.com/v3/repos/merging/
<bountysource-plugin>
---
Want to back this issue? **[Post a bounty on it!](https://www.bountysource.com/issues/39229921-allow-commit_message-default-for-merging?utm_campaign=plugin&utm_content=tracker%2F183477&utm_medium=issues&utm_source=github)** We accept bounties via [Bountysource](https://www.bountysource.com/?utm_campaign=plugin&utm_content=tracker%2F183477&utm_medium=issues&utm_source=github).
</bountysource-plugin> | sigmavirus24/github3.py | diff --git a/tests/unit/test_pulls.py b/tests/unit/test_pulls.py
index a5b7480b..828c1b29 100644
--- a/tests/unit/test_pulls.py
+++ b/tests/unit/test_pulls.py
@@ -95,7 +95,7 @@ class TestPullRequest(helper.UnitHelper):
self.put_called_with(
url_for('merge'),
- data={"squash": False, "commit_message": ""}
+ data={"squash": False}
)
def test_merge_squash_message(self):
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 1
} | 0.9 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": null,
"python": "3.5",
"reqs_path": [
"dev-requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
betamax==0.8.1
betamax-matchers==0.4.0
certifi==2021.5.30
charset-normalizer==2.0.12
coverage==6.2
distlib==0.3.9
filelock==3.4.1
-e git+https://github.com/sigmavirus24/github3.py.git@e57cdf6b2f3fb3eec9bf235a1b55f2bd4bd41d85#egg=github3.py
idna==3.10
importlib-metadata==4.8.3
importlib-resources==5.4.0
iniconfig==1.1.1
mock==1.0.1
packaging==21.3
platformdirs==2.4.0
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
pytest-cov==4.0.0
requests==2.27.1
requests-toolbelt==1.0.0
six==1.17.0
swebench-matterhorn @ file:///swebench_matterhorn
toml==0.10.2
tomli==1.2.3
tox==3.28.0
typing_extensions==4.1.1
uritemplate==4.1.1
urllib3==1.26.20
virtualenv==20.17.1
zipp==3.6.0
| name: github3.py
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- betamax==0.8.1
- betamax-matchers==0.4.0
- charset-normalizer==2.0.12
- coverage==6.2
- distlib==0.3.9
- filelock==3.4.1
- idna==3.10
- importlib-metadata==4.8.3
- importlib-resources==5.4.0
- iniconfig==1.1.1
- mock==1.0.1
- packaging==21.3
- platformdirs==2.4.0
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-cov==4.0.0
- requests==2.27.1
- requests-toolbelt==1.0.0
- six==1.17.0
- swebench-matterhorn==0.0.0
- toml==0.10.2
- tomli==1.2.3
- tox==3.28.0
- typing-extensions==4.1.1
- uritemplate==4.1.1
- urllib3==1.26.20
- virtualenv==20.17.1
- wheel==0.21.0
- zipp==3.6.0
prefix: /opt/conda/envs/github3.py
| [
"tests/unit/test_pulls.py::TestPullRequest::test_merge"
] | [] | [
"tests/unit/test_pulls.py::TestPullRequest::test_close",
"tests/unit/test_pulls.py::TestPullRequest::test_create_comment",
"tests/unit/test_pulls.py::TestPullRequest::test_create_review_comment",
"tests/unit/test_pulls.py::TestPullRequest::test_diff",
"tests/unit/test_pulls.py::TestPullRequest::test_is_merged_no_requset",
"tests/unit/test_pulls.py::TestPullRequest::test_is_merged_request",
"tests/unit/test_pulls.py::TestPullRequest::test_issue",
"tests/unit/test_pulls.py::TestPullRequest::test_merge_squash_message",
"tests/unit/test_pulls.py::TestPullRequest::test_patch",
"tests/unit/test_pulls.py::TestPullRequest::test_reopen",
"tests/unit/test_pulls.py::TestPullRequest::test_update",
"tests/unit/test_pulls.py::TestPullRequestRequiresAuthentication::test_close",
"tests/unit/test_pulls.py::TestPullRequestRequiresAuthentication::test_create_review_comment",
"tests/unit/test_pulls.py::TestPullRequestRequiresAuthentication::test_merge",
"tests/unit/test_pulls.py::TestPullRequestRequiresAuthentication::test_reopen",
"tests/unit/test_pulls.py::TestPullRequestRequiresAuthentication::test_update",
"tests/unit/test_pulls.py::TestPullRequestIterator::test_commits",
"tests/unit/test_pulls.py::TestPullRequestIterator::test_files",
"tests/unit/test_pulls.py::TestPullRequestIterator::test_issue_comments",
"tests/unit/test_pulls.py::TestPullRequestIterator::test_review_comments",
"tests/unit/test_pulls.py::TestReviewComment::test_reply",
"tests/unit/test_pulls.py::TestReviewComment::test_reply_requires_authentication",
"tests/unit/test_pulls.py::TestPullFile::test_contents"
] | [] | BSD 3-Clause "New" or "Revised" License | 850 | 297 | [
"github3/pulls.py"
] |
F5Networks__f5-common-python-809 | adde820d63f53295a322e093df4438b17c9eeae1 | 2016-11-15 13:55:42 | 83adbc050922c78a30c89c9c39b601a18e798171 | wojtek0806: @pjbreaux or @caphrim007 whenever it finishes testing please merge :) | diff --git a/f5/bigip/resource.py b/f5/bigip/resource.py
index 3bfcfec..018bfe1 100644
--- a/f5/bigip/resource.py
+++ b/f5/bigip/resource.py
@@ -100,6 +100,7 @@ from f5.bigip.mixins import LazyAttributeMixin
from f5.bigip.mixins import ToDictMixin
from f5.sdk_exception import F5SDKError
from f5.sdk_exception import UnsupportedMethod
+from icontrol.exceptions import iControlUnexpectedHTTPError
from requests.exceptions import HTTPError
from six import iteritems
from six import iterkeys
@@ -508,9 +509,18 @@ class ResourceBase(PathElement, ToDictMixin):
data_dict.update(kwargs)
- response = session.put(update_uri, json=data_dict, **requests_params)
- self._meta_data = temp_meta
- self._local_update(response.json())
+ # This is necessary as when we receive exception the returned object
+ # has its _meta_data stripped.
+
+ try:
+ response = session.put(update_uri, json=data_dict,
+ **requests_params)
+ except iControlUnexpectedHTTPError:
+ response = session.get(update_uri, **requests_params)
+ raise
+ finally:
+ self._meta_data = temp_meta
+ self._local_update(response.json())
def update(self, **kwargs):
"""Update the configuration of the resource on the BIG-IP®.
| factor other supported parameters and formats into iapp_parser
The iapp_parser util supports the common formats for iapps but does not support all of them, particularly those that are part of the iapp templates that F5 releases on downloads.f5.com | F5Networks/f5-common-python | diff --git a/f5/bigip/test/unit/test_resource.py b/f5/bigip/test/unit/test_resource.py
index ed022d9..3ef0de4 100644
--- a/f5/bigip/test/unit/test_resource.py
+++ b/f5/bigip/test/unit/test_resource.py
@@ -47,6 +47,7 @@ from f5.bigip.tm.ltm.virtual import Policies_s
from f5.bigip.tm.ltm.virtual import Profiles_s
from f5.bigip.tm.ltm.virtual import Virtual
from f5.sdk_exception import UnsupportedMethod
+from icontrol.exceptions import iControlUnexpectedHTTPError
@pytest.fixture
@@ -330,6 +331,30 @@ class TestResource_update(object):
assert pre_meta == r._meta_data
assert r.raw == r.__dict__
+ def test_meta_data_exception_raised(self):
+ r = Resource(mock.MagicMock())
+ fake_session = mock.MagicMock(name='mock_session')
+ r._meta_data['allowed_lazy_attributes'] = []
+ r._meta_data['uri'] = 'URI'
+ text = 'Unexpected Error: Bad Request for uri: URI'
+ error_response = mock.MagicMock(name='error_mock')
+ error_response.status_code = 400
+ error_response.text = text
+ error = iControlUnexpectedHTTPError(response=error_response)
+ fake_session.get.return_value = MockResponse({u"generation": 0})
+ fake_session.put.side_effect = error
+ r._meta_data['bigip']._meta_data = {'icr_session': fake_session,
+ 'hostname': 'TESTDOMAINNAME',
+ 'uri':
+ 'https://TESTDOMAIN:443/mgmt/tm/'}
+ pre_meta = r._meta_data.copy()
+ with pytest.raises(iControlUnexpectedHTTPError) as err:
+ r.update(a=u"b")
+ assert err.value.response.status_code == 400
+ assert err.value.response.text == text
+ assert pre_meta == r._meta_data
+ assert r.raw == r.__dict__
+
def test_Collection_removal(self):
r = Resource(mock.MagicMock())
r._meta_data['allowed_lazy_attributes'] = []
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 1
} | 1.6 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-xdist",
"pytest-mock",
"pytest-asyncio",
"pytest-bdd",
"pytest-benchmark",
"pytest-randomly",
"responses",
"mock",
"hypothesis",
"freezegun",
"trustme",
"requests-mock",
"requests",
"tomlkit"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==25.3.0
certifi==2025.1.31
cffi==1.17.1
charset-normalizer==3.4.1
coverage==7.8.0
cryptography==44.0.2
exceptiongroup==1.2.2
execnet==2.1.1
f5-icontrol-rest==1.1.0
-e git+https://github.com/F5Networks/f5-common-python.git@adde820d63f53295a322e093df4438b17c9eeae1#egg=f5_sdk
freezegun==1.5.1
gherkin-official==29.0.0
hypothesis==6.130.6
idna==3.10
importlib_metadata==8.6.1
iniconfig==2.1.0
Mako==1.3.9
MarkupSafe==3.0.2
mock==5.2.0
packaging==24.2
parse==1.20.2
parse_type==0.6.4
pluggy==1.5.0
py-cpuinfo==9.0.0
pycparser==2.22
pytest==8.3.5
pytest-asyncio==0.26.0
pytest-bdd==8.1.0
pytest-benchmark==5.1.0
pytest-cov==6.0.0
pytest-mock==3.14.0
pytest-randomly==3.16.0
pytest-xdist==3.6.1
python-dateutil==2.9.0.post0
PyYAML==6.0.2
requests==2.32.3
requests-mock==1.12.1
responses==0.25.7
six==1.17.0
sortedcontainers==2.4.0
tomli==2.2.1
tomlkit==0.13.2
trustme==1.2.1
typing_extensions==4.13.0
urllib3==2.3.0
zipp==3.21.0
| name: f5-common-python
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==25.3.0
- certifi==2025.1.31
- cffi==1.17.1
- charset-normalizer==3.4.1
- coverage==7.8.0
- cryptography==44.0.2
- exceptiongroup==1.2.2
- execnet==2.1.1
- f5-icontrol-rest==1.1.0
- freezegun==1.5.1
- gherkin-official==29.0.0
- hypothesis==6.130.6
- idna==3.10
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- mako==1.3.9
- markupsafe==3.0.2
- mock==5.2.0
- packaging==24.2
- parse==1.20.2
- parse-type==0.6.4
- pluggy==1.5.0
- py-cpuinfo==9.0.0
- pycparser==2.22
- pytest==8.3.5
- pytest-asyncio==0.26.0
- pytest-bdd==8.1.0
- pytest-benchmark==5.1.0
- pytest-cov==6.0.0
- pytest-mock==3.14.0
- pytest-randomly==3.16.0
- pytest-xdist==3.6.1
- python-dateutil==2.9.0.post0
- pyyaml==6.0.2
- requests==2.32.3
- requests-mock==1.12.1
- responses==0.25.7
- six==1.17.0
- sortedcontainers==2.4.0
- tomli==2.2.1
- tomlkit==0.13.2
- trustme==1.2.1
- typing-extensions==4.13.0
- urllib3==2.3.0
- zipp==3.21.0
prefix: /opt/conda/envs/f5-common-python
| [
"f5/bigip/test/unit/test_resource.py::TestResource_update::test_meta_data_exception_raised"
] | [
"f5/bigip/test/unit/test_resource.py::TestResource_load::test_URICreationCollision",
"f5/bigip/test/unit/test_resource.py::TestResourceCreate::test_reduce_boolean_same_value",
"f5/bigip/test/unit/test_resource.py::TestResourceCreate::test_success",
"f5/bigip/test/unit/test_resource.py::TestResourceCreate::test_reduce_boolean_removes_enabled",
"f5/bigip/test/unit/test_resource.py::TestResourceCreate::test_reduce_boolean_removes_disabled",
"f5/bigip/test/unit/test_resource.py::TestResourceCreate::test_reduce_boolean_removes_nothing",
"f5/bigip/test/unit/test_resource.py::TestPathElement::test_check_command_parameters_fail",
"f5/bigip/test/unit/test_resource.py::TestPathElement::test_check_load_parameters_fail",
"f5/bigip/test/unit/test_resource.py::TestPathElement::test_check_create_parameters_fail",
"f5/bigip/test/unit/test_resource.py::TestPathElement::test_check_exclusive_parameters_fail",
"f5/bigip/test/unit/test_resource.py::TestAsmResource::test_load_uri_creation_collision",
"f5/bigip/test/unit/test_resource.py::TestResource_update::test_reduce_boolean_same_value",
"f5/bigip/test/unit/test_resource.py::TestStats::test_load",
"f5/bigip/test/unit/test_resource.py::TestResource_modify::test_reduce_boolean_same_value",
"f5/bigip/test/unit/test_resource.py::test_ResourceBase"
] | [
"f5/bigip/test/unit/test_resource.py::TestResource_load::test_requests_params_collision",
"f5/bigip/test/unit/test_resource.py::TestResource_load::test_icontrol_version_set",
"f5/bigip/test/unit/test_resource.py::TestResource_load::test_missing_required_params",
"f5/bigip/test/unit/test_resource.py::TestResource_load::test_icontrol_version_default",
"f5/bigip/test/unit/test_resource.py::TestResource_load::test_success",
"f5/bigip/test/unit/test_resource.py::TestUnnamedResource::test_load",
"f5/bigip/test/unit/test_resource.py::TestUnnamedResource::test_delete_raises",
"f5/bigip/test/unit/test_resource.py::TestUnnamedResource::test_create_raises",
"f5/bigip/test/unit/test_resource.py::TestResource_delete::test_invalid_force",
"f5/bigip/test/unit/test_resource.py::TestResource_delete::test_success",
"f5/bigip/test/unit/test_resource.py::TestCollection_get_collection::test_success",
"f5/bigip/test/unit/test_resource.py::TestCollection_get_collection::test_unregistered_kind",
"f5/bigip/test/unit/test_resource.py::TestResourceCreate::test_KindTypeMismatch",
"f5/bigip/test/unit/test_resource.py::TestResourceCreate::test_missing_required_creation_parameter",
"f5/bigip/test/unit/test_resource.py::TestPathElement::test_check_exclusive_parameters_pass",
"f5/bigip/test/unit/test_resource.py::TestPathElement::test_check_exclusive_parameters_empty_attr",
"f5/bigip/test/unit/test_resource.py::TestPathElement::test_missing_req_param_false",
"f5/bigip/test/unit/test_resource.py::TestPathElement::test_missing_req_param_true",
"f5/bigip/test/unit/test_resource.py::TestAsmResource::test_exists_error",
"f5/bigip/test/unit/test_resource.py::TestAsmResource::test_load_missing_required_params",
"f5/bigip/test/unit/test_resource.py::TestAsmResource::test_exists_pop_name_id_uri",
"f5/bigip/test/unit/test_resource.py::TestAsmResource::test_fetch",
"f5/bigip/test/unit/test_resource.py::TestAsmResource::test_exists_not_found",
"f5/bigip/test/unit/test_resource.py::TestAsmResource::test_update_raises",
"f5/bigip/test/unit/test_resource.py::TestAsmResource::test_exists_loadable",
"f5/bigip/test/unit/test_resource.py::TestAsmResource::test_load_success",
"f5/bigip/test/unit/test_resource.py::TestAsmResource::test_load_pop_name_id_uri",
"f5/bigip/test/unit/test_resource.py::TestAsmResource::test_delete_success",
"f5/bigip/test/unit/test_resource.py::TestResource_update::test_Collection_removal",
"f5/bigip/test/unit/test_resource.py::TestResource_update::test_reduce_boolean_removes_nothing",
"f5/bigip/test/unit/test_resource.py::TestResource_update::test__meta_data_state",
"f5/bigip/test/unit/test_resource.py::TestResource_update::test__check_generation_with_mismatch",
"f5/bigip/test/unit/test_resource.py::TestResource_update::test_read_only_removal",
"f5/bigip/test/unit/test_resource.py::TestResource_update::test_reduce_boolean_removes_disabled",
"f5/bigip/test/unit/test_resource.py::TestResource_update::test_reduce_boolean_removes_enabled",
"f5/bigip/test/unit/test_resource.py::TestStats::test_delete_raises",
"f5/bigip/test/unit/test_resource.py::TestStats::test_modify_raises",
"f5/bigip/test/unit/test_resource.py::TestStats::test_create_raises",
"f5/bigip/test/unit/test_resource.py::TestResource_exists::test_not_found",
"f5/bigip/test/unit/test_resource.py::TestResource_exists::test_error",
"f5/bigip/test/unit/test_resource.py::TestResource_exists::test_loadable",
"f5/bigip/test/unit/test_resource.py::TestResource_modify::test__meta_data_state",
"f5/bigip/test/unit/test_resource.py::TestResource_modify::test_reduce_boolean_removes_disabled",
"f5/bigip/test/unit/test_resource.py::TestResource_modify::test_read_only_validate",
"f5/bigip/test/unit/test_resource.py::TestResource_modify::test_reduce_boolean_removes_enabled",
"f5/bigip/test/unit/test_resource.py::TestResource_modify::test_Collection_removal",
"f5/bigip/test/unit/test_resource.py::TestResource_modify::test_reduce_boolean_removes_nothing",
"f5/bigip/test/unit/test_resource.py::test_Resource_refresh",
"f5/bigip/test/unit/test_resource.py::test_Resource__local_update_IncompatibleKeys",
"f5/bigip/test/unit/test_resource.py::test_Resource__local_update",
"f5/bigip/test/unit/test_resource.py::test_Resource__check_keys_valid_rict",
"f5/bigip/test/unit/test_resource.py::test__activate_URI_no_stats",
"f5/bigip/test/unit/test_resource.py::test__create_with_Collision",
"f5/bigip/test/unit/test_resource.py::test__activate_URI",
"f5/bigip/test/unit/test_resource.py::test_OrganizingCollection",
"f5/bigip/test/unit/test_resource.py::test_collection_s"
] | [] | Apache License 2.0 | 852 | 338 | [
"f5/bigip/resource.py"
] |
springload__draftjs_exporter-21 | fb287b5d9132aa8f01538aec193eccbc940ccc59 | 2016-11-16 03:08:00 | 8805d4ad5665c56f8835a100b24408a42b72df60 | coveralls:
[](https://coveralls.io/builds/8846225)
Coverage decreased (-0.3%) to 99.731% when pulling **19723bdc1a8e5e85f3a5efc3b35c05f04bcfaa58 on fix/cleanup** into **fb287b5d9132aa8f01538aec193eccbc940ccc59 on master**.
loicteixeira: Regarding the 2 TODO comments in `entities.Link`:
- `Attributes will be in an unpredictable sort order` in `render`. How important is it for attributes to be in a certain order in the html?
- `How much do we need to whitelist / blacklist attributes?` should probably just follow the [standard](https://developer.mozilla.org/en-US/docs/Web/HTML/Global_attributes/data-*)
coveralls:
[](https://coveralls.io/builds/8846408)
Coverage decreased (-0.3%) to 99.731% when pulling **318593e73d48f95d1faa95fb7e4778805167c0b6 on fix/cleanup** into **fb287b5d9132aa8f01538aec193eccbc940ccc59 on master**.
coveralls:
[](https://coveralls.io/builds/8846527)
Coverage remained the same at 100.0% when pulling **d7939a2119508d7be23d44d943164b955d25cb17 on fix/cleanup** into **fb287b5d9132aa8f01538aec193eccbc940ccc59 on master**.
| diff --git a/draftjs_exporter/constants.py b/draftjs_exporter/constants.py
index 882f5ee..8e6e18a 100644
--- a/draftjs_exporter/constants.py
+++ b/draftjs_exporter/constants.py
@@ -3,10 +3,13 @@ from __future__ import absolute_import, unicode_literals
# http://stackoverflow.com/a/22723724/1798491
class Enum(object):
- def __init__(self, tupleList):
- self.tupleList = tupleList
+ def __init__(self, *elements):
+ self.elements = tuple(elements)
def __getattr__(self, name):
+ if name not in self.elements:
+ raise AttributeError("'Enum' has no attribute '{}'".format(name))
+
return name
@@ -27,6 +30,6 @@ class BLOCK_TYPES:
ATOMIC = 'atomic'
HORIZONTAL_RULE = 'horizontal-rule'
-ENTITY_TYPES = Enum(('LINK', 'IMAGE', 'TOKEN'))
+ENTITY_TYPES = Enum('LINK', 'IMAGE', 'TOKEN')
-INLINE_STYLES = Enum(('BOLD', 'CODE', 'ITALIC', 'STRIKETHROUGH', 'UNDERLINE'))
+INLINE_STYLES = Enum('BOLD', 'CODE', 'ITALIC', 'STRIKETHROUGH', 'UNDERLINE')
diff --git a/draftjs_exporter/dom.py b/draftjs_exporter/dom.py
index 0b76d7b..89ab65d 100644
--- a/draftjs_exporter/dom.py
+++ b/draftjs_exporter/dom.py
@@ -13,11 +13,11 @@ except NameError:
unicode = lambda s: str(s)
-def Soup(str):
+def Soup(raw_str):
"""
Wrapper around BeautifulSoup to keep the code DRY.
"""
- return BeautifulSoup(str, 'html5lib')
+ return BeautifulSoup(raw_str, 'html5lib')
class DOM(object):
@@ -25,11 +25,14 @@ class DOM(object):
Wrapper around our HTML building library to facilitate changes.
"""
@staticmethod
- def create_tag(type, attributes={}):
- return Soup('').new_tag(type, **attributes)
+ def create_tag(type_, attributes=None):
+ if attributes is None:
+ attributes = {}
+
+ return Soup('').new_tag(type_, **attributes)
@staticmethod
- def create_element(type=None, props={}, *children):
+ def create_element(type_=None, props=None, *children):
"""
Signature inspired by React.createElement.
createElement(
@@ -39,15 +42,17 @@ class DOM(object):
)
https://facebook.github.io/react/docs/top-level-api.html#react.createelement
"""
- if not type:
+ if props is None:
+ props = {}
+
+ if not type_:
elt = DOM.create_document_fragment()
else:
attributes = {}
# Map props from React/Draft.js to HTML lingo.
if 'className' in props:
- props['class'] = props.get('className')
- props.pop('className', None)
+ props['class'] = props.pop('className')
for key in props:
prop = props[key]
@@ -56,10 +61,10 @@ class DOM(object):
attributes[key] = prop
# "type" is either an entity with a render method, or a tag name.
- if inspect.isclass(type):
- elt = type().render(attributes)
+ if inspect.isclass(type_):
+ elt = type_().render(attributes)
else:
- elt = DOM.create_tag(type, attributes)
+ elt = DOM.create_tag(type_, attributes)
for child in children:
if child:
diff --git a/draftjs_exporter/entities.py b/draftjs_exporter/entities.py
index e0c15f8..2ee9d77 100644
--- a/draftjs_exporter/entities.py
+++ b/draftjs_exporter/entities.py
@@ -3,18 +3,18 @@ from __future__ import absolute_import, unicode_literals
from draftjs_exporter.dom import DOM
-class Null():
+class Null:
def render(self, props):
return DOM.create_element()
-class Icon():
+class Icon:
def render(self, props):
href = 'icon-%s' % props.get('name', '')
return DOM.create_element('svg', {'class': 'icon'}, DOM.create_element('use', {'xlink:href': href}))
-class Image():
+class Image:
def render(self, props):
data = props.get('data', {})
@@ -26,13 +26,13 @@ class Image():
})
-class Link():
+class Link:
attributes = ['url', 'rel', 'target', 'title']
@staticmethod
def is_valid_attribute(key):
# TODO How much do we need to whitelist / blacklist attributes?
- valid_data_attr = (key.startswith('data-') and key.replace('data-', '') and key.replace('data-', '').islower())
+ valid_data_attr = key.startswith('data-') and len(key) > 5 and key.islower()
return key in Link.attributes or valid_data_attr
def render(self, props):
@@ -48,11 +48,16 @@ class Link():
return DOM.create_element('a', attributes)
-class Button():
+class Button:
def render(self, props):
data = props.get('data', {})
href = data.get('href', '#')
icon = data.get('icon', None)
text = data.get('text', '')
- return DOM.create_element('a', {'class': 'icon-text' if icon else None, 'href': href}, DOM.create_element(Icon, {'name': icon}) if icon else None, DOM.create_element('span', {'class': 'icon-text__text'}, text) if icon else text)
+ return DOM.create_element(
+ 'a',
+ {'class': 'icon-text' if icon else None, 'href': href},
+ DOM.create_element(Icon, {'name': icon}) if icon else None,
+ DOM.create_element('span', {'class': 'icon-text__text'}, text) if icon else text
+ )
diff --git a/draftjs_exporter/entity_state.py b/draftjs_exporter/entity_state.py
index 2da83c6..b284a84 100644
--- a/draftjs_exporter/entity_state.py
+++ b/draftjs_exporter/entity_state.py
@@ -8,7 +8,7 @@ class EntityException(ExporterException):
pass
-class EntityState():
+class EntityState:
def __init__(self, root_element, entity_decorators, entity_map):
self.entity_decorators = entity_decorators
self.entity_map = entity_map
@@ -19,9 +19,9 @@ class EntityState():
self.entity_stack = [(stack_start, {})]
def apply(self, command):
- if (command.name == 'start_entity'):
+ if command.name == 'start_entity':
self.start_command(command)
- elif (command.name == 'stop_entity'):
+ elif command.name == 'stop_entity':
self.stop_command(command)
def current_parent(self):
@@ -37,11 +37,11 @@ class EntityState():
return details
def get_entity_decorator(self, entity_details):
- type = entity_details.get('type')
- decorator = self.entity_decorators.get(type)
+ type_ = entity_details.get('type')
+ decorator = self.entity_decorators.get(type_)
if decorator is None:
- raise EntityException('Decorator "%s" does not exist in entity_decorators' % type)
+ raise EntityException('Decorator "%s" does not exist in entity_decorators' % type_)
return decorator
@@ -52,7 +52,7 @@ class EntityState():
new_element = decorator.render(entity_details)
DOM.append_child(self.current_parent(), new_element)
- self.entity_stack.append([new_element, entity_details])
+ self.entity_stack.append((new_element, entity_details))
def stop_command(self, command):
entity_details = self.get_entity_details(command)
diff --git a/draftjs_exporter/html.py b/draftjs_exporter/html.py
index c7be12d..b0cc16b 100644
--- a/draftjs_exporter/html.py
+++ b/draftjs_exporter/html.py
@@ -7,12 +7,15 @@ from draftjs_exporter.style_state import StyleState
from draftjs_exporter.wrapper_state import WrapperState
-class HTML():
+class HTML:
"""
Entry point of the exporter. Combines entity, wrapper and style state
to generate the right HTML nodes.
"""
- def __init__(self, config={}):
+ def __init__(self, config=None):
+ if config is None:
+ config = {}
+
self.entity_decorators = config.get('entity_decorators', {})
self.wrapper_state = WrapperState(config.get('block_map', BLOCK_MAP))
self.style_state = StyleState(config.get('style_map', STYLE_MAP))
diff --git a/draftjs_exporter/style_state.py b/draftjs_exporter/style_state.py
index bb9ccdf..eed38d6 100644
--- a/draftjs_exporter/style_state.py
+++ b/draftjs_exporter/style_state.py
@@ -10,13 +10,13 @@ _first_cap_re = re.compile(r'(.)([A-Z][a-z]+)')
_all_cap_re = re.compile('([a-z0-9])([A-Z])')
-def camelToDash(camelCasedStr):
- sub2 = _first_cap_re.sub(r'\1-\2', camelCasedStr)
+def camel_to_dash(camel_cased_str):
+ sub2 = _first_cap_re.sub(r'\1-\2', camel_cased_str)
dashed_case_str = _all_cap_re.sub(r'\1-\2', sub2).lower()
return dashed_case_str.replace('--', '-')
-class StyleState():
+class StyleState:
"""
Handles the creation of inline styles on elements.
Receives inline_style commands, and generates the element's `style`
@@ -52,7 +52,7 @@ class StyleState():
css_style = self.style_map.get(style, {})
for prop in css_style.keys():
if prop != 'element':
- rules.append('{0}: {1};'.format(camelToDash(prop), css_style[prop]))
+ rules.append('{0}: {1};'.format(camel_to_dash(prop), css_style[prop]))
return ''.join(sorted(rules))
diff --git a/draftjs_exporter/wrapper_state.py b/draftjs_exporter/wrapper_state.py
index 8bec7e1..2879d99 100644
--- a/draftjs_exporter/wrapper_state.py
+++ b/draftjs_exporter/wrapper_state.py
@@ -8,7 +8,7 @@ class BlockException(ExporterException):
pass
-class WrapperState():
+class WrapperState:
"""
This class does the initial node building for the tree.
It sets elements with the right tag, text content, and attributes.
@@ -25,19 +25,19 @@ class WrapperState():
]
def element_for(self, block):
- type = block.get('type', 'unstyled')
+ type_ = block.get('type', 'unstyled')
depth = block.get('depth', 0)
- block_options = self.get_block_options(type)
+ block_options = self.get_block_options(type_)
# Make an element from the options specified in the block map.
elt_options = self.map_element_options(block_options.get('element'))
elt = DOM.create_element(elt_options[0], elt_options[1])
- parent = self.parent_for(type, depth)
+ parent = self.parent_for(type_, depth)
DOM.append_child(parent, elt)
# At level 0, the element is added to the document.
- if (depth == 0):
+ if depth == 0:
DOM.append_child(self.document, parent)
return elt
@@ -48,8 +48,8 @@ class WrapperState():
def __str__(self):
return '<WrapperState: %s>' % self.to_string()
- def set_wrapper(self, options=[], depth=0):
- if len(options) == 0:
+ def set_wrapper(self, options=None, depth=0):
+ if not options:
element = DOM.create_document_fragment()
else:
element = DOM.create_element(options[0], options[1])
@@ -73,8 +73,8 @@ class WrapperState():
def get_wrapper_options(self, depth=-1):
return self.wrapper_stack[depth][2]
- def parent_for(self, type, depth):
- block_options = self.get_block_options(type)
+ def parent_for(self, type_, depth):
+ block_options = self.get_block_options(type_)
wrapper_options = block_options.get('wrapper', None)
if wrapper_options:
@@ -95,7 +95,7 @@ class WrapperState():
['ul']
['ul', {'className': 'bullet-list'}]
"""
- if (isinstance(opts, list)):
+ if isinstance(opts, list):
tag = opts[0]
attributes = opts[1] if len(opts) > 1 else {}
else:
@@ -104,11 +104,11 @@ class WrapperState():
return [tag, attributes]
- def get_block_options(self, type):
- block_options = self.block_map.get(type)
+ def get_block_options(self, type_):
+ block_options = self.block_map.get(type_)
if block_options is None:
- raise BlockException('Block "%s" does not exist in block_map' % type)
+ raise BlockException('Block "%s" does not exist in block_map' % type_)
return block_options
| Fix code relying on mutable default values
Pointed out by @loicteixeira, this happens a couple of times within the codebase. | springload/draftjs_exporter | diff --git a/tests/test_constants.py b/tests/test_constants.py
index 8d4ad3d..1bf3a41 100644
--- a/tests/test_constants.py
+++ b/tests/test_constants.py
@@ -2,7 +2,21 @@ from __future__ import absolute_import, unicode_literals
import unittest
-from draftjs_exporter.constants import BLOCK_TYPES, ENTITY_TYPES, INLINE_STYLES
+from draftjs_exporter.constants import Enum, BLOCK_TYPES, ENTITY_TYPES, INLINE_STYLES
+
+
+class EnumConstants(unittest.TestCase):
+ def test_enum_returns_the_key_if_valid(self):
+ foo_value = 'foo'
+ e = Enum(foo_value)
+
+ self.assertEqual(e.foo, foo_value)
+
+ def test_enum_raises_an_error_for_invalid_keys(self):
+ e = Enum('foo', 'bar')
+
+ with self.assertRaises(AttributeError):
+ e.invalid_key
class TestConstants(unittest.TestCase):
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 7
} | 0.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[testing,docs]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-xdist",
"pytest-mock",
"pytest-asyncio"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | beautifulsoup4==4.13.3
cachetools==5.5.2
chardet==5.2.0
colorama==0.4.6
coverage==7.8.0
distlib==0.3.9
-e git+https://github.com/springload/draftjs_exporter.git@fb287b5d9132aa8f01538aec193eccbc940ccc59#egg=draftjs_exporter
exceptiongroup==1.2.2
execnet==2.1.1
filelock==3.18.0
flake8==7.2.0
html5lib==0.9999999
iniconfig==2.1.0
isort==6.0.1
mccabe==0.7.0
packaging==24.2
platformdirs==4.3.7
pluggy==1.5.0
pycodestyle==2.13.0
pyflakes==3.3.1
pyproject-api==1.9.0
pytest==8.3.5
pytest-asyncio==0.26.0
pytest-cov==6.0.0
pytest-mock==3.14.0
pytest-xdist==3.6.1
six==1.17.0
soupsieve==2.6
tomli==2.2.1
tox==4.25.0
typing_extensions==4.13.0
virtualenv==20.29.3
| name: draftjs_exporter
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- beautifulsoup4==4.13.3
- cachetools==5.5.2
- chardet==5.2.0
- colorama==0.4.6
- coverage==7.8.0
- distlib==0.3.9
- exceptiongroup==1.2.2
- execnet==2.1.1
- filelock==3.18.0
- flake8==7.2.0
- html5lib==0.9999999
- iniconfig==2.1.0
- isort==6.0.1
- mccabe==0.7.0
- packaging==24.2
- platformdirs==4.3.7
- pluggy==1.5.0
- pycodestyle==2.13.0
- pyflakes==3.3.1
- pyproject-api==1.9.0
- pytest==8.3.5
- pytest-asyncio==0.26.0
- pytest-cov==6.0.0
- pytest-mock==3.14.0
- pytest-xdist==3.6.1
- six==1.17.0
- soupsieve==2.6
- tomli==2.2.1
- tox==4.25.0
- typing-extensions==4.13.0
- virtualenv==20.29.3
prefix: /opt/conda/envs/draftjs_exporter
| [
"tests/test_constants.py::EnumConstants::test_enum_raises_an_error_for_invalid_keys"
] | [] | [
"tests/test_constants.py::EnumConstants::test_enum_returns_the_key_if_valid",
"tests/test_constants.py::TestConstants::test_block_types",
"tests/test_constants.py::TestConstants::test_entity_types",
"tests/test_constants.py::TestConstants::test_inline_styles"
] | [] | MIT License | 854 | 3,162 | [
"draftjs_exporter/constants.py",
"draftjs_exporter/dom.py",
"draftjs_exporter/entities.py",
"draftjs_exporter/entity_state.py",
"draftjs_exporter/html.py",
"draftjs_exporter/style_state.py",
"draftjs_exporter/wrapper_state.py"
] |
blue-yonder__tsfresh-66 | 50b3ee7ee663aacf8d6ae28bcb2bfcc00bd3d9a1 | 2016-11-16 22:19:12 | 211a9f9504c710dd7de1189ff8dcf637e1390428 | coveralls:
[](https://coveralls.io/builds/8863140)
Coverage increased (+0.2%) to 93.674% when pulling **bd41d0dab7dffa94cdc7614bb8d311b16c1cc4bf on nils-braun:count-percentage-of-some-value** into **50b3ee7ee663aacf8d6ae28bcb2bfcc00bd3d9a1 on blue-yonder:master**.
coveralls:
[](https://coveralls.io/builds/8863140)
Coverage increased (+0.2%) to 93.674% when pulling **bd41d0dab7dffa94cdc7614bb8d311b16c1cc4bf on nils-braun:count-percentage-of-some-value** into **50b3ee7ee663aacf8d6ae28bcb2bfcc00bd3d9a1 on blue-yonder:master**.
MaxBenChrist: Awesome @nils-braun !
| diff --git a/tsfresh/feature_extraction/feature_calculators.py b/tsfresh/feature_extraction/feature_calculators.py
index 5882882..d216254 100644
--- a/tsfresh/feature_extraction/feature_calculators.py
+++ b/tsfresh/feature_extraction/feature_calculators.py
@@ -587,6 +587,92 @@ def first_location_of_minimum(x):
return np.argmin(x) / len(x) if len(x) > 0 else np.NaN
+@set_property("fctype", "aggregate")
+@not_apply_to_raw_numbers
+def percentage_of_reoccurring_datapoints_to_all_datapoints(x):
+ """
+ Returns the percentage of unique values, that are present in the time series
+ more than once.
+
+ len(different values occurring more than once) / len(different values)
+
+ This means the percentage is normalized to the number of unique values,
+ in contrast to the percentage_of_reoccurring_values_to_all_values.
+
+ :param x: the time series to calculate the feature of
+ :type x: pandas.Series
+ :return: the value of this feature
+ :return type: float
+ """
+ x = pd.Series(x)
+ return (x.value_counts() > 1).mean()
+
+
+@set_property("fctype", "aggregate")
+@not_apply_to_raw_numbers
+def percentage_of_reoccurring_values_to_all_values(x):
+ """
+ Returns the ratio of unique values, that are present in the time series
+ more than once.
+
+ # of data points occurring more than once / # of all data points
+
+ This means the ratio is normalized to the number of data points in the time series,
+ in contrast to the percentage_of_reoccurring_datapoints_to_all_datapoints.
+
+ :param x: the time series to calculate the feature of
+ :type x: pandas.Series
+ :return: the value of this feature
+ :return type: float
+ """
+ x = pd.Series(x)
+
+ if len(x) == 0:
+ return np.nan
+
+ value_counts = x.value_counts()
+ return 1.0 * value_counts[value_counts > 1].sum() / len(x)
+
+
+@set_property("fctype", "aggregate")
+@not_apply_to_raw_numbers
+def sum_of_reoccurring_values(x):
+ """
+ Returns the sum of all values, that are present in the time series
+ more than once.
+
+ :param x: the time series to calculate the feature of
+ :type x: pandas.Series
+ :return: the value of this feature
+ :return type: float
+ """
+ x = pd.Series(x)
+ value_counts = x.value_counts()
+ doubled_values = value_counts[value_counts > 1]
+ return sum(doubled_values.index * doubled_values)
+
+
+@set_property("fctype", "aggregate")
+@not_apply_to_raw_numbers
+def ratio_value_number_to_time_series_length(x):
+ """
+ Returns a factor which is 1 if all values in the time series occur only once, and below one if this is not the
+ case. In principle, it just returns
+
+ # unique values / # values
+
+ :param x: the time series to calculate the feature of
+ :type x: pandas.Series
+ :return: the value of this feature
+ :return type: float
+ """
+
+ if len(x) == 0:
+ return np.nan
+
+ return 1.0 * len(set(x))/len(x)
+
+
@set_property("fctype", "apply")
@not_apply_to_raw_numbers
def fft_coefficient(x, c, param):
@@ -723,12 +809,12 @@ def cwt_coefficients(x, c, param):
for widths in df_cfg["widths"].unique():
- coeff = df_cfg[df_cfg["widths"] == widths]["coeff"].unique()
# the calculated_cwt will shape (len(widths), len(x)).
calculated_cwt = cwt(x, ricker, widths)
for w in df_cfg[df_cfg["widths"] == widths]["w"].unique():
+ coeff = df_cfg[(df_cfg["widths"] == widths) & (df_cfg["w"] == w)]["coeff"].unique()
i = widths.index(w)
if calculated_cwt.shape[1] < len(coeff): # There are less data points than requested model coefficients
| Add feature class: Count/Percentage same value
I had an idea for a class of features.
1. percentage of data points that occur at least second time
2. sum of data points that occur at least second time
3. percentage of observed values that occur at lest second time
... | blue-yonder/tsfresh | diff --git a/tests/feature_extraction/test_feature_calculations.py b/tests/feature_extraction/test_feature_calculations.py
index e33c376..32cb43f 100644
--- a/tests/feature_extraction/test_feature_calculations.py
+++ b/tests/feature_extraction/test_feature_calculations.py
@@ -258,6 +258,34 @@ class FeatureCalculationTestCase(TestCase):
self.assertAlmostEqualOnAllArrayTypes(first_location_of_minimum, [1], 0.0)
self.assertIsNanOnAllArrayTypes(first_location_of_minimum, [])
+ def test_percentage_of_doubled_datapoints(self):
+ self.assertAlmostEqualOnAllArrayTypes(percentage_of_reoccurring_datapoints_to_all_datapoints, [1, 1, 2, 3, 4], 0.25)
+ self.assertAlmostEqualOnAllArrayTypes(percentage_of_reoccurring_datapoints_to_all_datapoints, [1, 1.5, 2, 3], 0)
+ self.assertAlmostEqualOnAllArrayTypes(percentage_of_reoccurring_datapoints_to_all_datapoints, [1], 0)
+ self.assertAlmostEqualOnAllArrayTypes(percentage_of_reoccurring_datapoints_to_all_datapoints, [1.111, -2.45, 1.111, 2.45], 1.0 / 3.0)
+ self.assertIsNanOnAllArrayTypes(percentage_of_reoccurring_datapoints_to_all_datapoints, [])
+
+ def test_ratio_of_doubled_values(self):
+ self.assertAlmostEqualOnAllArrayTypes(percentage_of_reoccurring_values_to_all_values, [1, 1, 2, 3, 4], 0.4)
+ self.assertAlmostEqualOnAllArrayTypes(percentage_of_reoccurring_values_to_all_values, [1, 1.5, 2, 3], 0)
+ self.assertAlmostEqualOnAllArrayTypes(percentage_of_reoccurring_values_to_all_values, [1], 0)
+ self.assertAlmostEqualOnAllArrayTypes(percentage_of_reoccurring_values_to_all_values, [1.111, -2.45, 1.111, 2.45], 0.5)
+ self.assertIsNanOnAllArrayTypes(percentage_of_reoccurring_values_to_all_values, [])
+
+ def test_sum_of_doubled_values(self):
+ self.assertAlmostEqualOnAllArrayTypes(sum_of_reoccurring_values, [1, 1, 2, 3, 4], 2)
+ self.assertAlmostEqualOnAllArrayTypes(sum_of_reoccurring_values, [1, 1.5, 2, 3], 0)
+ self.assertAlmostEqualOnAllArrayTypes(sum_of_reoccurring_values, [1], 0)
+ self.assertAlmostEqualOnAllArrayTypes(sum_of_reoccurring_values, [1.111, -2.45, 1.111, 2.45], 2.222)
+ self.assertAlmostEqualOnAllArrayTypes(sum_of_reoccurring_values, [], 0)
+
+ def test_uniqueness_factor(self):
+ self.assertAlmostEqualOnAllArrayTypes(ratio_value_number_to_time_series_length, [1, 1, 2, 3, 4], 0.8)
+ self.assertAlmostEqualOnAllArrayTypes(ratio_value_number_to_time_series_length, [1, 1.5, 2, 3], 1)
+ self.assertAlmostEqualOnAllArrayTypes(ratio_value_number_to_time_series_length, [1], 1)
+ self.assertAlmostEqualOnAllArrayTypes(ratio_value_number_to_time_series_length, [1.111, -2.45, 1.111, 2.45], 0.75)
+ self.assertIsNanOnAllArrayTypes(ratio_value_number_to_time_series_length, [])
+
def test_fft_coefficient(self):
pass
# todo: add unit test
| {
"commit_name": "merge_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 1
},
"num_modified_files": 1
} | 0.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest>=3.0.2",
"pytest-cov>=2.3.1",
"pytest-xdist>=1.15.0"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc",
"pip install --upgrade pip",
"pip install numpy",
"pip install scipy"
],
"python": "3.5.2",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
charset-normalizer==2.0.12
coverage==6.2
execnet==1.9.0
future==1.0.0
idna==3.10
importlib-metadata==4.8.3
iniconfig==1.1.1
joblib==1.1.1
numpy==1.19.5
packaging==21.3
pandas==1.1.5
patsy==1.0.1
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
pytest-cov==4.0.0
pytest-xdist==3.0.2
python-dateutil==2.9.0.post0
pytz==2025.2
requests==2.27.1
scikit-learn==0.24.2
scipy==1.5.4
six==1.17.0
statsmodels==0.12.2
threadpoolctl==3.1.0
tomli==1.2.3
-e git+https://github.com/blue-yonder/tsfresh.git@50b3ee7ee663aacf8d6ae28bcb2bfcc00bd3d9a1#egg=tsfresh
typing_extensions==4.1.1
urllib3==1.26.20
zipp==3.6.0
| name: tsfresh
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- charset-normalizer==2.0.12
- coverage==6.2
- execnet==1.9.0
- future==1.0.0
- idna==3.10
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- joblib==1.1.1
- numpy==1.19.5
- packaging==21.3
- pandas==1.1.5
- patsy==1.0.1
- pip==21.3.1
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-cov==4.0.0
- pytest-xdist==3.0.2
- python-dateutil==2.9.0.post0
- pytz==2025.2
- requests==2.27.1
- scikit-learn==0.24.2
- scipy==1.5.4
- six==1.17.0
- statsmodels==0.12.2
- threadpoolctl==3.1.0
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- zipp==3.6.0
prefix: /opt/conda/envs/tsfresh
| [
"tests/feature_extraction/test_feature_calculations.py::FeatureCalculationTestCase::test_ratio_of_doubled_values",
"tests/feature_extraction/test_feature_calculations.py::FeatureCalculationTestCase::test_percentage_of_doubled_datapoints",
"tests/feature_extraction/test_feature_calculations.py::FeatureCalculationTestCase::test_sum_of_doubled_values",
"tests/feature_extraction/test_feature_calculations.py::FeatureCalculationTestCase::test_uniqueness_factor"
] | [] | [
"tests/feature_extraction/test_feature_calculations.py::FeatureCalculationTestCase::test_count_above_mean",
"tests/feature_extraction/test_feature_calculations.py::FeatureCalculationTestCase::test_autocorrelation",
"tests/feature_extraction/test_feature_calculations.py::FeatureCalculationTestCase::test_count_below_mean",
"tests/feature_extraction/test_feature_calculations.py::FeatureCalculationTestCase::test_binned_entropy",
"tests/feature_extraction/test_feature_calculations.py::FeatureCalculationTestCase::test___get_length_sequences_where",
"tests/feature_extraction/test_feature_calculations.py::FeatureCalculationTestCase::test_abs_energy",
"tests/feature_extraction/test_feature_calculations.py::FeatureCalculationTestCase::test_approximate_entropy",
"tests/feature_extraction/test_feature_calculations.py::FeatureCalculationTestCase::test_cwt_coefficients",
"tests/feature_extraction/test_feature_calculations.py::FeatureCalculationTestCase::test_fft_coefficient",
"tests/feature_extraction/test_feature_calculations.py::FeatureCalculationTestCase::test_absolute_sum_of_changes",
"tests/feature_extraction/test_feature_calculations.py::FeatureCalculationTestCase::test_has_duplicate",
"tests/feature_extraction/test_feature_calculations.py::FeatureCalculationTestCase::test_catch_Numbers",
"tests/feature_extraction/test_feature_calculations.py::FeatureCalculationTestCase::test_kurtosis",
"tests/feature_extraction/test_feature_calculations.py::FeatureCalculationTestCase::test_has_duplicate_max",
"tests/feature_extraction/test_feature_calculations.py::FeatureCalculationTestCase::test_has_duplicate_min",
"tests/feature_extraction/test_feature_calculations.py::FeatureCalculationTestCase::test_first_location_of_maximum",
"tests/feature_extraction/test_feature_calculations.py::FeatureCalculationTestCase::test_ar_coefficient",
"tests/feature_extraction/test_feature_calculations.py::FeatureCalculationTestCase::test_last_location_maximum",
"tests/feature_extraction/test_feature_calculations.py::FeatureCalculationTestCase::test_first_location_of_minimum",
"tests/feature_extraction/test_feature_calculations.py::FeatureCalculationTestCase::test_last_location_of_minimum",
"tests/feature_extraction/test_feature_calculations.py::FeatureCalculationTestCase::test_length",
"tests/feature_extraction/test_feature_calculations.py::FeatureCalculationTestCase::test_longest_strike_above_mean",
"tests/feature_extraction/test_feature_calculations.py::FeatureCalculationTestCase::test_augmented_dickey_fuller",
"tests/feature_extraction/test_feature_calculations.py::FeatureCalculationTestCase::test_mean",
"tests/feature_extraction/test_feature_calculations.py::FeatureCalculationTestCase::test_mean_abs_change",
"tests/feature_extraction/test_feature_calculations.py::FeatureCalculationTestCase::test_longest_strike_below_mean",
"tests/feature_extraction/test_feature_calculations.py::FeatureCalculationTestCase::test_mean_change",
"tests/feature_extraction/test_feature_calculations.py::FeatureCalculationTestCase::test_large_number_of_peaks",
"tests/feature_extraction/test_feature_calculations.py::FeatureCalculationTestCase::test_large_standard_deviation",
"tests/feature_extraction/test_feature_calculations.py::FeatureCalculationTestCase::test_mass_quantile",
"tests/feature_extraction/test_feature_calculations.py::FeatureCalculationTestCase::test_mean_second_derivate_central",
"tests/feature_extraction/test_feature_calculations.py::FeatureCalculationTestCase::test_number_cwt_peaks",
"tests/feature_extraction/test_feature_calculations.py::FeatureCalculationTestCase::test_mean_abs_change_quantiles",
"tests/feature_extraction/test_feature_calculations.py::FeatureCalculationTestCase::test_median",
"tests/feature_extraction/test_feature_calculations.py::FeatureCalculationTestCase::test_quantile",
"tests/feature_extraction/test_feature_calculations.py::FeatureCalculationTestCase::test_mean_autocorrelation",
"tests/feature_extraction/test_feature_calculations.py::FeatureCalculationTestCase::test_number_peaks",
"tests/feature_extraction/test_feature_calculations.py::FeatureCalculationTestCase::test_standard_deviation",
"tests/feature_extraction/test_feature_calculations.py::FeatureCalculationTestCase::test_skewness",
"tests/feature_extraction/test_feature_calculations.py::FeatureCalculationTestCase::test_range_count",
"tests/feature_extraction/test_feature_calculations.py::FeatureCalculationTestCase::test_sum",
"tests/feature_extraction/test_feature_calculations.py::FeatureCalculationTestCase::test_symmetry_looking",
"tests/feature_extraction/test_feature_calculations.py::FeatureCalculationTestCase::test_time_reversal_asymmetry_statistic",
"tests/feature_extraction/test_feature_calculations.py::FeatureCalculationTestCase::test_spkt_welch_density",
"tests/feature_extraction/test_feature_calculations.py::FeatureCalculationTestCase::test_value_count",
"tests/feature_extraction/test_feature_calculations.py::FeatureCalculationTestCase::test_variance",
"tests/feature_extraction/test_feature_calculations.py::FeatureCalculationTestCase::test_variance_larger_than_standard_deviation"
] | [] | MIT License | 857 | 1,045 | [
"tsfresh/feature_extraction/feature_calculators.py"
] |
clld__clldutils-20 | cebffac35a42bde46fcd32d2db4a0fd5464968a3 | 2016-11-17 10:27:59 | 54679df634b93870ea9fec722a56b75e72017645 | diff --git a/clldutils/misc.py b/clldutils/misc.py
index 7979814..c97f5b3 100644
--- a/clldutils/misc.py
+++ b/clldutils/misc.py
@@ -9,9 +9,6 @@ from string import ascii_letters
from six import PY3, string_types, text_type, binary_type
-# FIXME: add: badge, Stats
-
-
def nfilter(seq):
"""Replacement for python 2's filter(None, seq).
diff --git a/clldutils/path.py b/clldutils/path.py
index 05c0b19..bd6888d 100644
--- a/clldutils/path.py
+++ b/clldutils/path.py
@@ -1,10 +1,13 @@
# coding: utf8
from __future__ import unicode_literals
import os
+import sys
import shutil
import tempfile
import subprocess
import hashlib
+from contextlib import contextmanager
+import importlib
from six import PY3, string_types, binary_type, text_type
@@ -17,6 +20,20 @@ else:
Path = pathlib.Path
+@contextmanager
+def sys_path(p):
+ p = Path(p).as_posix()
+ sys.path.append(p)
+ yield
+ if sys.path[-1] == p:
+ sys.path.pop()
+
+
+def import_module(p):
+ with sys_path(p.parent):
+ return importlib.import_module(p.stem)
+
+
# In python 3, pathlib treats path components and string-like representations or
# attributes of paths (like name and stem) as unicode strings. Unfortunately this is not
# true for pathlib under python 2.7. So as workaround for the case of using non-ASCII
| context manager to temporarily adapt `sys.path`
For several projects we want to be able to import custom code not within a python package. The most convenient (and portable) way to this seems to be via a context manager, adapting `sys.path` appropriately:
```python
@contextmanager
def with_sys_path(d):
p = d.as_posix()
sys.path.append(p)
yield
if sys.path[-1] == p:
sys.path.pop()
```
Alternatively, the complete functionality, i.e. importing a module by filesystem path could be provided:
```python
with with_sys_path(path.parent):
return import_module(path.name)
``` | clld/clldutils | diff --git a/clldutils/tests/test_path.py b/clldutils/tests/test_path.py
index ac10a6d..e208515 100644
--- a/clldutils/tests/test_path.py
+++ b/clldutils/tests/test_path.py
@@ -14,6 +14,21 @@ class Tests(WithTempDir):
fp.write('test')
return path
+ def test_import_module(self):
+ from clldutils.path import import_module
+
+ with self.tmp_path('__init__.py').open('w', encoding='ascii') as fp:
+ fp.write('A = [1, 2, 3]')
+
+ m = import_module(self.tmp_path())
+ self.assertEqual(len(m.A), 3)
+
+ with self.tmp_path('mod.py').open('w', encoding='ascii') as fp:
+ fp.write('A = [1, 2, 3]')
+
+ m = import_module(self.tmp_path('mod.py'))
+ self.assertEqual(len(m.A), 3)
+
def test_non_ascii(self):
from clldutils.path import Path, path_component, as_unicode
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 2
} | 1.5 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"nose",
"coverage",
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | -e git+https://github.com/clld/clldutils.git@cebffac35a42bde46fcd32d2db4a0fd5464968a3#egg=clldutils
configparser==7.2.0
coverage==7.8.0
exceptiongroup==1.2.2
iniconfig==2.1.0
nose==1.3.7
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
python-dateutil==2.9.0.post0
six==1.17.0
tabulate==0.9.0
tomli==2.2.1
| name: clldutils
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- configparser==7.2.0
- coverage==7.8.0
- exceptiongroup==1.2.2
- iniconfig==2.1.0
- nose==1.3.7
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- python-dateutil==2.9.0.post0
- six==1.17.0
- tabulate==0.9.0
- tomli==2.2.1
prefix: /opt/conda/envs/clldutils
| [
"clldutils/tests/test_path.py::Tests::test_import_module"
] | [] | [
"clldutils/tests/test_path.py::Tests::test_TemporaryDirectory",
"clldutils/tests/test_path.py::Tests::test_as_posix",
"clldutils/tests/test_path.py::Tests::test_copy",
"clldutils/tests/test_path.py::Tests::test_copytree",
"clldutils/tests/test_path.py::Tests::test_git_describe",
"clldutils/tests/test_path.py::Tests::test_md5",
"clldutils/tests/test_path.py::Tests::test_move",
"clldutils/tests/test_path.py::Tests::test_non_ascii",
"clldutils/tests/test_path.py::Tests::test_remove",
"clldutils/tests/test_path.py::Tests::test_rmtree",
"clldutils/tests/test_path.py::Tests::test_walk"
] | [] | Apache License 2.0 | 858 | 406 | [
"clldutils/misc.py",
"clldutils/path.py"
] |
|
beetbox__beets-2270 | 02bd7946c1f6dd84e0fd28d152f4bca5c09d9e0a | 2016-11-18 00:14:15 | 02bd7946c1f6dd84e0fd28d152f4bca5c09d9e0a | diff --git a/beets/mediafile.py b/beets/mediafile.py
index 87a9d10a6..7d1b07280 100644
--- a/beets/mediafile.py
+++ b/beets/mediafile.py
@@ -920,7 +920,16 @@ class MP3ImageStorageStyle(ListStorageStyle, MP3StorageStyle):
frame.data = image.data
frame.mime = image.mime_type
frame.desc = image.desc or u''
- frame.encoding = 3 # UTF-8 encoding of desc
+
+ # For compatibility with OS X/iTunes prefer latin-1 if possible.
+ # See issue #899
+ try:
+ frame.desc.encode("latin-1")
+ except UnicodeEncodeError:
+ frame.encoding = mutagen.id3.Encoding.UTF16
+ else:
+ frame.encoding = mutagen.id3.Encoding.LATIN1
+
frame.type = image.type_index
return frame
| MediaFile: use older text encodings in ID3v2.3 mode
I am trying to create an auto-tagging configuration in which my tags are saved in ID3v2.3 (as 2.4 lacks compatibility with some players I use) and I like the cover art to be embedded in each music file. However, the cover art of the output files is not recognised by OS X 10.9.4 (i.e. in Finder) or iTunes.
Here is a simple configuration with which the problem occurs:
``` yaml
directory: /mnt/data/home/Music
plugins: mbsync fetchart embedart
per_disc_numbering: yes
id3v23: yes
import:
copy: yes
write: yes
paths:
default: $albumartist/$album%aunique{}/$disc-$track $title
```
When I comment the `id3v23: yes` option the covers of the output files are correctly recognised in Mac.
The cover art of the ID3v2.3 output files is recognised in Windows, so it seems a Mac-specific issue. Strangely enough, the input files I used already have ID3v2.3 tags and embedded cover art and are correctly recognised in Mac. Below you have a diff of the ID3v2.3 tags (sorted by name) between an input and an output file taken with `mid3v2`:
``` diff
1c1,2
< APIC= (image/jpeg, 32205 bytes)
---
> APIC= (image/jpeg, 111083 bytes)
> COMM=iTunNORM='eng'= 00001700 00001700 00003981 00003981 00000000 00000000 00008187 00008187 00000000 00000000
3a5,6
> TBPM=0
> TCMP=0
5a9
> TLAN=eng
12a17,19
> TXXX=Album Artist Credit=The Beatles
> TXXX=ALBUMARTISTSORT=Beatles, The
> TXXX=Artist Credit=The Beatles
14a22
> TXXX=MusicBrainz Album Comment=UK mono
17c25
< TXXX=MusicBrainz Album Status=official
---
> TXXX=MusicBrainz Album Status=Official
27a36
> USLT=[unrepresentable data]
```
| beetbox/beets | diff --git a/test/test_mediafile_edge.py b/test/test_mediafile_edge.py
index 0be177699..ae758f142 100644
--- a/test/test_mediafile_edge.py
+++ b/test/test_mediafile_edge.py
@@ -19,6 +19,7 @@ from __future__ import division, absolute_import, print_function
import os
import shutil
+import mutagen.id3
from test import _common
from test._common import unittest
@@ -375,30 +376,30 @@ class ID3v23Test(unittest.TestCase, TestHelper):
finally:
self._delete_test()
- def test_v24_image_encoding(self):
- mf = self._make_test(id3v23=False)
- try:
- mf.images = [beets.mediafile.Image(b'test data')]
- mf.save()
- frame = mf.mgfile.tags.getall('APIC')[0]
- self.assertEqual(frame.encoding, 3)
- finally:
- self._delete_test()
+ def test_image_encoding(self):
+ """For compatibility with OS X/iTunes.
- @unittest.skip("a bug, see #899")
- def test_v23_image_encoding(self):
- """For compatibility with OS X/iTunes (and strict adherence to
- the standard), ID3v2.3 tags need to use an inferior text
- encoding: UTF-8 is not supported.
+ See https://github.com/beetbox/beets/issues/899#issuecomment-62437773
"""
- mf = self._make_test(id3v23=True)
- try:
- mf.images = [beets.mediafile.Image(b'test data')]
- mf.save()
- frame = mf.mgfile.tags.getall('APIC')[0]
- self.assertEqual(frame.encoding, 1)
- finally:
- self._delete_test()
+
+ for v23 in [True, False]:
+ mf = self._make_test(id3v23=v23)
+ try:
+ mf.images = [
+ beets.mediafile.Image(b'data', desc=u""),
+ beets.mediafile.Image(b'data', desc=u"foo"),
+ beets.mediafile.Image(b'data', desc=u"\u0185"),
+ ]
+ mf.save()
+ apic_frames = mf.mgfile.tags.getall('APIC')
+ encodings = dict([(f.desc, f.encoding) for f in apic_frames])
+ self.assertEqual(encodings, {
+ u"": mutagen.id3.Encoding.LATIN1,
+ u"foo": mutagen.id3.Encoding.LATIN1,
+ u"\u0185": mutagen.id3.Encoding.UTF16,
+ })
+ finally:
+ self._delete_test()
def suite():
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 1
} | 1.3 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"nose",
"nose-show-skipped",
"coverage",
"responses"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | -e git+https://github.com/beetbox/beets.git@02bd7946c1f6dd84e0fd28d152f4bca5c09d9e0a#egg=beets
certifi==2025.1.31
charset-normalizer==3.4.1
coverage==7.8.0
exceptiongroup==1.2.2
idna==3.10
iniconfig==2.1.0
jellyfish==1.1.3
munkres==1.1.4
musicbrainzngs==0.7.1
mutagen==1.47.0
nose==1.3.7
nose-show-skipped==0.1
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
PyYAML==6.0.2
requests==2.32.3
responses==0.25.7
six==1.17.0
tomli==2.2.1
Unidecode==1.3.8
urllib3==2.3.0
| name: beets
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- charset-normalizer==3.4.1
- coverage==7.8.0
- exceptiongroup==1.2.2
- idna==3.10
- iniconfig==2.1.0
- jellyfish==1.1.3
- munkres==1.1.4
- musicbrainzngs==0.7.1
- mutagen==1.47.0
- nose==1.3.7
- nose-show-skipped==0.1
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- pyyaml==6.0.2
- requests==2.32.3
- responses==0.25.7
- six==1.17.0
- tomli==2.2.1
- unidecode==1.3.8
- urllib3==2.3.0
prefix: /opt/conda/envs/beets
| [
"test/test_mediafile_edge.py::ID3v23Test::test_image_encoding"
] | [] | [
"test/test_mediafile_edge.py::EdgeTest::test_discc_alternate_field",
"test/test_mediafile_edge.py::EdgeTest::test_emptylist",
"test/test_mediafile_edge.py::EdgeTest::test_old_ape_version_bitrate",
"test/test_mediafile_edge.py::EdgeTest::test_only_magic_bytes_jpeg",
"test/test_mediafile_edge.py::EdgeTest::test_release_time_with_space",
"test/test_mediafile_edge.py::EdgeTest::test_release_time_with_t",
"test/test_mediafile_edge.py::EdgeTest::test_soundcheck_non_ascii",
"test/test_mediafile_edge.py::EdgeTest::test_tempo_with_bpm",
"test/test_mediafile_edge.py::InvalidValueToleranceTest::test_safe_cast_float_with_dot_only",
"test/test_mediafile_edge.py::InvalidValueToleranceTest::test_safe_cast_float_with_multiple_dots",
"test/test_mediafile_edge.py::InvalidValueToleranceTest::test_safe_cast_float_with_no_numbers",
"test/test_mediafile_edge.py::InvalidValueToleranceTest::test_safe_cast_int_string_to_int",
"test/test_mediafile_edge.py::InvalidValueToleranceTest::test_safe_cast_int_to_float",
"test/test_mediafile_edge.py::InvalidValueToleranceTest::test_safe_cast_intstring_to_bool",
"test/test_mediafile_edge.py::InvalidValueToleranceTest::test_safe_cast_negative_string_to_float",
"test/test_mediafile_edge.py::InvalidValueToleranceTest::test_safe_cast_special_chars_to_unicode",
"test/test_mediafile_edge.py::InvalidValueToleranceTest::test_safe_cast_string_to_bool",
"test/test_mediafile_edge.py::InvalidValueToleranceTest::test_safe_cast_string_to_float",
"test/test_mediafile_edge.py::InvalidValueToleranceTest::test_safe_cast_string_to_int",
"test/test_mediafile_edge.py::InvalidValueToleranceTest::test_safe_cast_string_with_cruft_to_float",
"test/test_mediafile_edge.py::SafetyTest::test_broken_symlink",
"test/test_mediafile_edge.py::SafetyTest::test_corrupt_flac_raises_unreadablefileerror",
"test/test_mediafile_edge.py::SafetyTest::test_corrupt_monkeys_raises_unreadablefileerror",
"test/test_mediafile_edge.py::SafetyTest::test_corrupt_mp3_raises_unreadablefileerror",
"test/test_mediafile_edge.py::SafetyTest::test_corrupt_mp4_raises_unreadablefileerror",
"test/test_mediafile_edge.py::SafetyTest::test_corrupt_ogg_raises_unreadablefileerror",
"test/test_mediafile_edge.py::SafetyTest::test_invalid_extension_raises_filetypeerror",
"test/test_mediafile_edge.py::SafetyTest::test_invalid_ogg_header_raises_unreadablefileerror",
"test/test_mediafile_edge.py::SafetyTest::test_magic_xml_raises_unreadablefileerror",
"test/test_mediafile_edge.py::SideEffectsTest::test_opening_tagless_file_leaves_untouched",
"test/test_mediafile_edge.py::MP4EncodingTest::test_unicode_label_in_m4a",
"test/test_mediafile_edge.py::MP3EncodingTest::test_comment_with_latin1_encoding",
"test/test_mediafile_edge.py::MissingAudioDataTest::test_bitrate_with_zero_length",
"test/test_mediafile_edge.py::TypeTest::test_set_date_to_none",
"test/test_mediafile_edge.py::TypeTest::test_set_replaygain_gain_to_none",
"test/test_mediafile_edge.py::TypeTest::test_set_replaygain_peak_to_none",
"test/test_mediafile_edge.py::TypeTest::test_set_track_to_none",
"test/test_mediafile_edge.py::TypeTest::test_set_year_to_none",
"test/test_mediafile_edge.py::TypeTest::test_year_integer_in_string",
"test/test_mediafile_edge.py::SoundCheckTest::test_decode_handles_unicode",
"test/test_mediafile_edge.py::SoundCheckTest::test_decode_zero",
"test/test_mediafile_edge.py::SoundCheckTest::test_malformatted",
"test/test_mediafile_edge.py::SoundCheckTest::test_round_trip",
"test/test_mediafile_edge.py::SoundCheckTest::test_special_characters",
"test/test_mediafile_edge.py::ID3v23Test::test_v23_on_non_mp3_is_noop",
"test/test_mediafile_edge.py::ID3v23Test::test_v23_year_tag",
"test/test_mediafile_edge.py::ID3v23Test::test_v24_year_tag"
] | [] | MIT License | 859 | 227 | [
"beets/mediafile.py"
] |
|
dask__dask-1799 | 7aa13ca969fde27ab4b81ed6926ef72f1358f11b | 2016-11-21 13:46:08 | aff7284671c9e3d443cec3a07855cafabeba8fc5 | diff --git a/dask/array/ufunc.py b/dask/array/ufunc.py
index c107a2880..90ac357bb 100644
--- a/dask/array/ufunc.py
+++ b/dask/array/ufunc.py
@@ -17,15 +17,16 @@ def __array_wrap__(numpy_ufunc, x, *args, **kwargs):
def wrap_elemwise(numpy_ufunc, array_wrap=False):
""" Wrap up numpy function into dask.array """
- def wrapped(x, *args, **kwargs):
- if hasattr(x, '_elemwise'):
+ def wrapped(*args, **kwargs):
+ dsk = [arg for arg in args if hasattr(arg, '_elemwise')]
+ if len(dsk) > 0:
if array_wrap:
- return x._elemwise(__array_wrap__, numpy_ufunc,
- x, *args, **kwargs)
+ return dsk[0]._elemwise(__array_wrap__, numpy_ufunc,
+ *args, **kwargs)
else:
- return x._elemwise(numpy_ufunc, x, *args, **kwargs)
+ return dsk[0]._elemwise(numpy_ufunc, *args, **kwargs)
else:
- return numpy_ufunc(x, *args, **kwargs)
+ return numpy_ufunc(*args, **kwargs)
# functools.wraps cannot wrap ufunc in Python 2.x
wrapped.__name__ = numpy_ufunc.__name__
| BUG: dask.array.maximum can trigger eager evaluation
The failure case seems to be `maximum(scalar, dask_array)`:
```
In [1]: import dask.array as da
In [2]: x = da.ones((3,), chunks=(3,))
In [3]: da.maximum(0, x)
Out[3]: array([ 1., 1., 1.])
In [4]: da.maximum(x, 0)
Out[4]: dask.array<maximum..., shape=(3,), dtype=float64, chunksize=(3,)>
In [5]: 0 + x
Out[5]: dask.array<add-5f9..., shape=(3,), dtype=float64, chunksize=(3,)>
```
Pretty sure this was introduced by https://github.com/dask/dask/pull/1669
This is causing the xarray test suite to fail: https://github.com/pydata/xarray/issues/1090
CC @sinhrks | dask/dask | diff --git a/dask/array/tests/test_ufunc.py b/dask/array/tests/test_ufunc.py
index 229c3e8c6..ac85d2cd4 100644
--- a/dask/array/tests/test_ufunc.py
+++ b/dask/array/tests/test_ufunc.py
@@ -75,6 +75,19 @@ def test_ufunc_2args(ufunc):
assert isinstance(dafunc(arr1, arr2), np.ndarray)
assert_eq(dafunc(arr1, arr2), npfunc(arr1, arr2))
+ # with scalar
+ assert isinstance(dafunc(darr1, 10), da.Array)
+ assert_eq(dafunc(darr1, 10), npfunc(arr1, 10))
+
+ assert isinstance(dafunc(10, darr1), da.Array)
+ assert_eq(dafunc(10, darr1), npfunc(10, arr1))
+
+ assert isinstance(dafunc(arr1, 10), np.ndarray)
+ assert_eq(dafunc(arr1, 10), npfunc(arr1, 10))
+
+ assert isinstance(dafunc(10, arr1), np.ndarray)
+ assert_eq(dafunc(10, arr1), npfunc(10, arr1))
+
@pytest.mark.parametrize('ufunc', ['isreal', 'iscomplex', 'real', 'imag'])
def test_complex(ufunc):
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 1
} | 1.14 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[complete]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest",
"coverage",
"flake8",
"moto"
],
"pre_install": [
"apt-get update",
"apt-get install -y graphviz liblzma-dev"
],
"python": "3.5",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | aiobotocore==2.1.2
aiohttp==3.8.6
aioitertools==0.11.0
aiosignal==1.2.0
async-timeout==4.0.2
asynctest==0.13.0
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
boto3==1.23.10
botocore==1.23.24
certifi==2021.5.30
cffi==1.15.1
charset-normalizer==2.0.12
click==8.0.4
cloudpickle==2.2.1
coverage==6.2
cryptography==40.0.2
-e git+https://github.com/dask/dask.git@7aa13ca969fde27ab4b81ed6926ef72f1358f11b#egg=dask
dataclasses==0.8
distributed==1.14.3
flake8==5.0.4
frozenlist==1.2.0
fsspec==2022.1.0
HeapDict==1.0.1
idna==3.10
idna-ssl==1.1.0
importlib-metadata==4.2.0
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
Jinja2==3.0.3
jmespath==0.10.0
locket==1.0.0
MarkupSafe==2.0.1
mccabe==0.7.0
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
moto==4.0.13
msgpack-python==0.5.6
multidict==5.2.0
numpy==1.19.5
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pandas==1.1.5
partd==1.2.0
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
psutil==7.0.0
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pycodestyle==2.9.1
pycparser==2.21
pyflakes==2.5.0
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
python-dateutil==2.9.0.post0
pytz==2025.2
requests==2.27.1
responses==0.17.0
s3fs==2022.1.0
s3transfer==0.5.2
six==1.17.0
tblib==1.7.0
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
toolz==0.12.0
tornado==6.1
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
urllib3==1.26.20
Werkzeug==2.0.3
wrapt==1.16.0
xmltodict==0.14.2
yarl==1.7.2
zict==2.1.0
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: dask
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- aiobotocore==2.1.2
- aiohttp==3.8.6
- aioitertools==0.11.0
- aiosignal==1.2.0
- async-timeout==4.0.2
- asynctest==0.13.0
- boto3==1.23.10
- botocore==1.23.24
- cffi==1.15.1
- charset-normalizer==2.0.12
- click==8.0.4
- cloudpickle==2.2.1
- coverage==6.2
- cryptography==40.0.2
- dataclasses==0.8
- distributed==1.14.3
- flake8==5.0.4
- frozenlist==1.2.0
- fsspec==2022.1.0
- heapdict==1.0.1
- idna==3.10
- idna-ssl==1.1.0
- importlib-metadata==4.2.0
- jinja2==3.0.3
- jmespath==0.10.0
- locket==1.0.0
- markupsafe==2.0.1
- mccabe==0.7.0
- moto==4.0.13
- msgpack-python==0.5.6
- multidict==5.2.0
- numpy==1.19.5
- pandas==1.1.5
- partd==1.2.0
- psutil==7.0.0
- pycodestyle==2.9.1
- pycparser==2.21
- pyflakes==2.5.0
- python-dateutil==2.9.0.post0
- pytz==2025.2
- requests==2.27.1
- responses==0.17.0
- s3fs==2022.1.0
- s3transfer==0.5.2
- six==1.17.0
- tblib==1.7.0
- toolz==0.12.0
- tornado==6.1
- urllib3==1.26.20
- werkzeug==2.0.3
- wrapt==1.16.0
- xmltodict==0.14.2
- yarl==1.7.2
- zict==2.1.0
prefix: /opt/conda/envs/dask
| [
"dask/array/tests/test_ufunc.py::test_ufunc_2args[logaddexp]",
"dask/array/tests/test_ufunc.py::test_ufunc_2args[logaddexp2]",
"dask/array/tests/test_ufunc.py::test_ufunc_2args[arctan2]",
"dask/array/tests/test_ufunc.py::test_ufunc_2args[hypot]",
"dask/array/tests/test_ufunc.py::test_ufunc_2args[copysign]",
"dask/array/tests/test_ufunc.py::test_ufunc_2args[nextafter]",
"dask/array/tests/test_ufunc.py::test_ufunc_2args[ldexp]",
"dask/array/tests/test_ufunc.py::test_ufunc_2args[fmod]",
"dask/array/tests/test_ufunc.py::test_ufunc_2args[logical_and]",
"dask/array/tests/test_ufunc.py::test_ufunc_2args[logical_or]",
"dask/array/tests/test_ufunc.py::test_ufunc_2args[logical_xor]",
"dask/array/tests/test_ufunc.py::test_ufunc_2args[maximum]",
"dask/array/tests/test_ufunc.py::test_ufunc_2args[minimum]",
"dask/array/tests/test_ufunc.py::test_ufunc_2args[fmax]",
"dask/array/tests/test_ufunc.py::test_ufunc_2args[fmin]"
] | [
"dask/array/tests/test_ufunc.py::test_complex[isreal]",
"dask/array/tests/test_ufunc.py::test_complex[real]",
"dask/array/tests/test_ufunc.py::test_complex[imag]"
] | [
"dask/array/tests/test_ufunc.py::test_ufunc_meta",
"dask/array/tests/test_ufunc.py::test_ufunc[conj]",
"dask/array/tests/test_ufunc.py::test_ufunc[exp]",
"dask/array/tests/test_ufunc.py::test_ufunc[log]",
"dask/array/tests/test_ufunc.py::test_ufunc[log2]",
"dask/array/tests/test_ufunc.py::test_ufunc[log10]",
"dask/array/tests/test_ufunc.py::test_ufunc[log1p]",
"dask/array/tests/test_ufunc.py::test_ufunc[expm1]",
"dask/array/tests/test_ufunc.py::test_ufunc[sqrt]",
"dask/array/tests/test_ufunc.py::test_ufunc[square]",
"dask/array/tests/test_ufunc.py::test_ufunc[sin]",
"dask/array/tests/test_ufunc.py::test_ufunc[cos]",
"dask/array/tests/test_ufunc.py::test_ufunc[tan]",
"dask/array/tests/test_ufunc.py::test_ufunc[arctan]",
"dask/array/tests/test_ufunc.py::test_ufunc[sinh]",
"dask/array/tests/test_ufunc.py::test_ufunc[cosh]",
"dask/array/tests/test_ufunc.py::test_ufunc[tanh]",
"dask/array/tests/test_ufunc.py::test_ufunc[arcsinh]",
"dask/array/tests/test_ufunc.py::test_ufunc[arccosh]",
"dask/array/tests/test_ufunc.py::test_ufunc[deg2rad]",
"dask/array/tests/test_ufunc.py::test_ufunc[rad2deg]",
"dask/array/tests/test_ufunc.py::test_ufunc[isfinite]",
"dask/array/tests/test_ufunc.py::test_ufunc[isinf]",
"dask/array/tests/test_ufunc.py::test_ufunc[isnan]",
"dask/array/tests/test_ufunc.py::test_ufunc[signbit]",
"dask/array/tests/test_ufunc.py::test_ufunc[degrees]",
"dask/array/tests/test_ufunc.py::test_ufunc[radians]",
"dask/array/tests/test_ufunc.py::test_ufunc[rint]",
"dask/array/tests/test_ufunc.py::test_ufunc[fabs]",
"dask/array/tests/test_ufunc.py::test_ufunc[sign]",
"dask/array/tests/test_ufunc.py::test_ufunc[absolute]",
"dask/array/tests/test_ufunc.py::test_ufunc[floor]",
"dask/array/tests/test_ufunc.py::test_ufunc[ceil]",
"dask/array/tests/test_ufunc.py::test_ufunc[trunc]",
"dask/array/tests/test_ufunc.py::test_ufunc[logical_not]",
"dask/array/tests/test_ufunc.py::test_complex[iscomplex]",
"dask/array/tests/test_ufunc.py::test_ufunc_2results[frexp]",
"dask/array/tests/test_ufunc.py::test_ufunc_2results[modf]",
"dask/array/tests/test_ufunc.py::test_clip"
] | [] | BSD 3-Clause "New" or "Revised" License | 865 | 347 | [
"dask/array/ufunc.py"
] |
|
jmcnamara__XlsxWriter-396 | 7a48769abe7e68c0f3729c4f7c951ad5ca9a0bf8 | 2016-11-22 18:13:23 | 46ba2b5453f6df3b1a29eda73d6bd66aed8c7746 | diff --git a/xlsxwriter/worksheet.py b/xlsxwriter/worksheet.py
index 3e63ea53..dbe8e63e 100644
--- a/xlsxwriter/worksheet.py
+++ b/xlsxwriter/worksheet.py
@@ -1880,6 +1880,8 @@ class Worksheet(xmlwriter.XMLwriter):
'min_color': True,
'mid_color': True,
'max_color': True,
+ 'min_length': True,
+ 'max_length': True,
'multi_range': True,
'bar_color': 1}
@@ -6026,7 +6028,15 @@ class Worksheet(xmlwriter.XMLwriter):
def _write_data_bar(self, param):
# Write the <dataBar> element.
- self._xml_start_tag('dataBar')
+ attributes = []
+
+ if 'min_length' in param:
+ attributes.append(('minLength', param['min_length']))
+
+ if 'max_length' in param:
+ attributes.append(('maxLength', param['max_length']))
+
+ self._xml_start_tag('dataBar', attributes)
self._write_cfvo(param['min_type'], param['min_value'])
self._write_cfvo(param['max_type'], param['max_value'])
| Feature Request: Add minLength and maxLength to dataBar attributes
Attributes minLength and maxLength determine maximum and minimum histogram length in percentage of cell width. Currently those attributes are not set. Default values are 10 and 90 percent. It would be useful to have possibility of setting them manually.
I've already implemented that. If you're OK I would create a merge request. | jmcnamara/XlsxWriter | diff --git a/xlsxwriter/test/worksheet/test_cond_format21.py b/xlsxwriter/test/worksheet/test_cond_format21.py
new file mode 100644
index 00000000..4428b18e
--- /dev/null
+++ b/xlsxwriter/test/worksheet/test_cond_format21.py
@@ -0,0 +1,141 @@
+###############################################################################
+#
+# Tests for XlsxWriter.
+#
+# Copyright (c), 2013-2016, John McNamara, [email protected]
+#
+
+import unittest
+from ...compatibility import StringIO
+from ..helperfunctions import _xml_to_list
+from ...worksheet import Worksheet
+
+
+class TestAssembleWorksheet(unittest.TestCase):
+ """
+ Test assembling a complete Worksheet file.
+
+ """
+ def test_assemble_xml_file(self):
+ """Test writing a worksheet with conditional formatting."""
+ self.maxDiff = None
+
+ fh = StringIO()
+ worksheet = Worksheet()
+ worksheet._set_filehandle(fh)
+ worksheet.select()
+
+ worksheet.write('A1', 1)
+ worksheet.write('A2', 2)
+ worksheet.write('A3', 3)
+ worksheet.write('A4', 4)
+ worksheet.write('A5', 5)
+ worksheet.write('A6', 6)
+ worksheet.write('A7', 7)
+ worksheet.write('A8', 8)
+ worksheet.write('A9', 9)
+ worksheet.write('A10', 10)
+ worksheet.write('A11', 11)
+ worksheet.write('A12', 12)
+
+ worksheet.conditional_format('A1:A12',
+ {'type': 'data_bar',
+ 'min_value': 5,
+ 'mid_value': 52, # Should be ignored.
+ 'max_value': 90,
+ 'min_length': 5,
+ 'max_length': 95,
+ 'min_type': 'num',
+ 'mid_type': 'percentile', # Should be ignored.
+ 'max_type': 'percent',
+ 'bar_color': '#8DB4E3',
+ })
+
+ worksheet._assemble_xml_file()
+
+ exp = _xml_to_list("""
+ <?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+ <worksheet xmlns="http://schemas.openxmlformats.org/spreadsheetml/2006/main" xmlns:r="http://schemas.openxmlformats.org/officeDocument/2006/relationships">
+ <dimension ref="A1:A12"/>
+ <sheetViews>
+ <sheetView tabSelected="1" workbookViewId="0"/>
+ </sheetViews>
+ <sheetFormatPr defaultRowHeight="15"/>
+ <sheetData>
+ <row r="1" spans="1:1">
+ <c r="A1">
+ <v>1</v>
+ </c>
+ </row>
+ <row r="2" spans="1:1">
+ <c r="A2">
+ <v>2</v>
+ </c>
+ </row>
+ <row r="3" spans="1:1">
+ <c r="A3">
+ <v>3</v>
+ </c>
+ </row>
+ <row r="4" spans="1:1">
+ <c r="A4">
+ <v>4</v>
+ </c>
+ </row>
+ <row r="5" spans="1:1">
+ <c r="A5">
+ <v>5</v>
+ </c>
+ </row>
+ <row r="6" spans="1:1">
+ <c r="A6">
+ <v>6</v>
+ </c>
+ </row>
+ <row r="7" spans="1:1">
+ <c r="A7">
+ <v>7</v>
+ </c>
+ </row>
+ <row r="8" spans="1:1">
+ <c r="A8">
+ <v>8</v>
+ </c>
+ </row>
+ <row r="9" spans="1:1">
+ <c r="A9">
+ <v>9</v>
+ </c>
+ </row>
+ <row r="10" spans="1:1">
+ <c r="A10">
+ <v>10</v>
+ </c>
+ </row>
+ <row r="11" spans="1:1">
+ <c r="A11">
+ <v>11</v>
+ </c>
+ </row>
+ <row r="12" spans="1:1">
+ <c r="A12">
+ <v>12</v>
+ </c>
+ </row>
+ </sheetData>
+ <conditionalFormatting sqref="A1:A12">
+ <cfRule type="dataBar" priority="1">
+ <dataBar minLength="5" maxLength="95">
+ <cfvo type="num" val="5"/>
+ <cfvo type="percent" val="90"/>
+ <color rgb="FF8DB4E3"/>
+ </dataBar>
+ </cfRule>
+ </conditionalFormatting>
+ <pageMargins left="0.7" right="0.7" top="0.75" bottom="0.75" header="0.3" footer="0.3"/>
+ </worksheet>
+ """)
+
+ got = _xml_to_list(fh.getvalue())
+
+ self.assertEqual(got, exp)
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 1
} | 0.9 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.5",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
certifi==2021.5.30
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
-e git+https://github.com/jmcnamara/XlsxWriter.git@7a48769abe7e68c0f3729c4f7c951ad5ca9a0bf8#egg=XlsxWriter
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: XlsxWriter
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib-metadata=4.8.1=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
prefix: /opt/conda/envs/XlsxWriter
| [
"xlsxwriter/test/worksheet/test_cond_format21.py::TestAssembleWorksheet::test_assemble_xml_file"
] | [] | [] | [] | BSD 2-Clause "Simplified" License | 868 | 291 | [
"xlsxwriter/worksheet.py"
] |
|
Azure__azure-cli-1423 | c56c791d01985f1209d30849b55922d1adac85b5 | 2016-11-23 01:05:31 | 1576ec67f5029db062579da230902a559acbb9fe | mention-bot: @brendandburns, thanks for your PR! By analyzing the history of the files in this pull request, we identified @yugangw-msft, @tjprescott and @derekbekoe to be potential reviewers.
tjprescott: @brendandburns this issue applies more generally to the CLI. See issue #1419. I think it can fixed more generally by making a change here: https://github.com/Azure/azure-cli/blob/master/src/azure-cli-core/azure/cli/core/application.py#L200
In addition to startswith('@') we can probably have something like `or '=@' in ...`.
derekbekoe: Can we not make use of this?
https://docs.python.org/3/library/argparse.html#fromfile-prefix-chars
cc @tjprescott @johanste
brendandburns: @tjprescott thanks for the tip, I moved the code to that location (and I added unit tests too as a bonus!)
please take another look.
Thanks!
--brendan
tjprescott: @derekbekoe
> then arguments that start with any of the specified characters will be treated as files, and will be replaced by the arguments they contain
This problem is that with `foo=@bar`, the @ is in the middle of the string.
brendandburns: @tjprescott fixed (and added tests to cover it) please take another look.
Thanks
--brendan | diff --git a/src/azure-cli-core/azure/cli/core/application.py b/src/azure-cli-core/azure/cli/core/application.py
index 0bc115f74..caab463a1 100644
--- a/src/azure-cli-core/azure/cli/core/application.py
+++ b/src/azure-cli-core/azure/cli/core/application.py
@@ -6,6 +6,7 @@
from collections import defaultdict
import sys
import os
+import re
import uuid
import argparse
from azure.cli.core.parser import AzCliCommandParser, enable_autocomplete
@@ -196,10 +197,24 @@ class Application(object):
global_group.add_argument('--debug', dest='_log_verbosity_debug', action='store_true',
help='Increase logging verbosity to show all debug logs.')
+ @staticmethod
+ def _maybe_load_file(arg):
+ ix = arg.find('@')
+ if ix == -1:
+ return arg
+
+ if ix == 0:
+ return Application._load_file(arg[1:])
+
+ res = re.match('(\\-\\-?[a-zA-Z0-9]+[\\-a-zA-Z0-9]*\\=)\\"?@([^\\"]*)\\"?', arg)
+ if not res:
+ return arg
+ return res.group(1) + Application._load_file(res.group(2))
+
@staticmethod
def _expand_file_prefixed_files(argv):
return list(
- [Application._load_file(arg[1:]) if arg.startswith('@') else arg for arg in argv]
+ [Application._maybe_load_file(arg) for arg in argv]
)
@staticmethod
diff --git a/src/command_modules/azure-cli-appservice/azure/cli/command_modules/appservice/_params.py b/src/command_modules/azure-cli-appservice/azure/cli/command_modules/appservice/_params.py
index 6a50031c7..2d3464686 100644
--- a/src/command_modules/azure-cli-appservice/azure/cli/command_modules/appservice/_params.py
+++ b/src/command_modules/azure-cli-appservice/azure/cli/command_modules/appservice/_params.py
@@ -53,7 +53,7 @@ register_cli_argument('appservice plan', 'admin_site_name', help='The name of th
register_cli_argument('appservice web', 'slot', help="the name of the slot. Default to the productions slot if not specified")
register_cli_argument('appservice web', 'name', arg_type=name_arg_type, completer=get_resource_name_completion_list('Microsoft.Web/sites'), id_part='name', help='name of the web')
register_cli_argument('appservice web create', 'name', options_list=('--name', '-n'), help='name of the new webapp')
-register_cli_argument('appservice web create', 'plan', options_list=('--plan',), completer=get_resource_name_completion_list('Microsoft.Web/serverFarms'),
+register_cli_argument('appservice web create', 'plan', options_list=('--plan', '-p'), completer=get_resource_name_completion_list('Microsoft.Web/serverFarms'),
help="name or resource id of the app service plan. Use 'appservice plan create' to get one")
register_cli_argument('appservice web deployment user', 'user_name', help='user name')
| CLI inconsistency -- to specify a parameters file, one must not use equal sign
If you use an equal sign between --parameters and the actual file path, you end up with the following:
```
az resource group deployment create --resource-group=jmsdcosrg --template-file=./output/azuredeploy.json --parameters="@./output/azuredeploy.parameters.json"
No JSON object could be decoded
```
Removing the equal sign works:
```
az resource group deployment create --resource-group=jmsdcosrg --template-file=./output/azuredeploy.json --parameters "@./output/azuredeploy.parameters.json"
``` | Azure/azure-cli | diff --git a/src/azure-cli-core/azure/cli/core/tests/test_application.py b/src/azure-cli-core/azure/cli/core/tests/test_application.py
index aef479668..252fd558a 100644
--- a/src/azure-cli-core/azure/cli/core/tests/test_application.py
+++ b/src/azure-cli-core/azure/cli/core/tests/test_application.py
@@ -5,10 +5,14 @@
import unittest
+import os
+import tempfile
+
from six import StringIO
from azure.cli.core.application import Application, Configuration, IterateAction
from azure.cli.core.commands import CliCommand
+from azure.cli.core._util import CLIError
class TestApplication(unittest.TestCase):
@@ -80,5 +84,32 @@ class TestApplication(unittest.TestCase):
self.assertEqual(hellos[1]['hello'], 'sir')
self.assertEqual(hellos[1]['something'], 'else')
+ def test_expand_file_prefixed_files(self):
+ f = tempfile.NamedTemporaryFile(delete=False)
+ f.close()
+
+ with open(f.name, 'w+') as stream:
+ stream.write('foo')
+
+ cases = [
+ [['--bar=baz'], ['--bar=baz']],
+ [['--bar', 'baz'], ['--bar', 'baz']],
+ [['--bar=@{}'.format(f.name)], ['--bar=foo']],
+ [['--bar', '@{}'.format(f.name)], ['--bar', 'foo']],
+ [['--bar', f.name], ['--bar', f.name]],
+ [['--bar="@{}"'.format(f.name)], ['--bar=foo']],
+ [['[email protected]'], ['[email protected]']],
+ [['--bar', '[email protected]'], ['--bar', '[email protected]']],
+ ]
+
+ for test_case in cases:
+ try:
+ args = Application._expand_file_prefixed_files(test_case[0]) #pylint: disable=protected-access
+ self.assertEqual(args, test_case[1], 'Failed for: {}'.format(test_case[0]))
+ except CLIError as ex:
+ self.fail('Unexpected error for {} ({}): {}'.format(test_case[0], args, ex))
+
+ os.remove(f.name)
+
if __name__ == '__main__':
unittest.main()
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 2
} | 0.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "python scripts/dev_setup.py",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc libssl-dev libffi-dev"
],
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | adal==0.4.3
applicationinsights==0.10.0
argcomplete==1.3.0
astroid==1.4.9
attrs==22.2.0
-e git+https://github.com/Azure/azure-cli.git@c56c791d01985f1209d30849b55922d1adac85b5#egg=azure_cli&subdirectory=src/azure-cli
-e git+https://github.com/Azure/azure-cli.git@c56c791d01985f1209d30849b55922d1adac85b5#egg=azure_cli_acr&subdirectory=src/command_modules/azure-cli-acr
-e git+https://github.com/Azure/azure-cli.git@c56c791d01985f1209d30849b55922d1adac85b5#egg=azure_cli_acs&subdirectory=src/command_modules/azure-cli-acs
-e git+https://github.com/Azure/azure-cli.git@c56c791d01985f1209d30849b55922d1adac85b5#egg=azure_cli_appservice&subdirectory=src/command_modules/azure-cli-appservice
-e git+https://github.com/Azure/azure-cli.git@c56c791d01985f1209d30849b55922d1adac85b5#egg=azure_cli_cloud&subdirectory=src/command_modules/azure-cli-cloud
-e git+https://github.com/Azure/azure-cli.git@c56c791d01985f1209d30849b55922d1adac85b5#egg=azure_cli_component&subdirectory=src/command_modules/azure-cli-component
-e git+https://github.com/Azure/azure-cli.git@c56c791d01985f1209d30849b55922d1adac85b5#egg=azure_cli_configure&subdirectory=src/command_modules/azure-cli-configure
-e git+https://github.com/Azure/azure-cli.git@c56c791d01985f1209d30849b55922d1adac85b5#egg=azure_cli_container&subdirectory=src/command_modules/azure-cli-container
-e git+https://github.com/Azure/azure-cli.git@c56c791d01985f1209d30849b55922d1adac85b5#egg=azure_cli_context&subdirectory=src/command_modules/azure-cli-context
-e git+https://github.com/Azure/azure-cli.git@c56c791d01985f1209d30849b55922d1adac85b5#egg=azure_cli_core&subdirectory=src/azure-cli-core
-e git+https://github.com/Azure/azure-cli.git@c56c791d01985f1209d30849b55922d1adac85b5#egg=azure_cli_feedback&subdirectory=src/command_modules/azure-cli-feedback
-e git+https://github.com/Azure/azure-cli.git@c56c791d01985f1209d30849b55922d1adac85b5#egg=azure_cli_iot&subdirectory=src/command_modules/azure-cli-iot
-e git+https://github.com/Azure/azure-cli.git@c56c791d01985f1209d30849b55922d1adac85b5#egg=azure_cli_keyvault&subdirectory=src/command_modules/azure-cli-keyvault
-e git+https://github.com/Azure/azure-cli.git@c56c791d01985f1209d30849b55922d1adac85b5#egg=azure_cli_network&subdirectory=src/command_modules/azure-cli-network
-e git+https://github.com/Azure/azure-cli.git@c56c791d01985f1209d30849b55922d1adac85b5#egg=azure_cli_profile&subdirectory=src/command_modules/azure-cli-profile
-e git+https://github.com/Azure/azure-cli.git@c56c791d01985f1209d30849b55922d1adac85b5#egg=azure_cli_redis&subdirectory=src/command_modules/azure-cli-redis
-e git+https://github.com/Azure/azure-cli.git@c56c791d01985f1209d30849b55922d1adac85b5#egg=azure_cli_resource&subdirectory=src/command_modules/azure-cli-resource
-e git+https://github.com/Azure/azure-cli.git@c56c791d01985f1209d30849b55922d1adac85b5#egg=azure_cli_role&subdirectory=src/command_modules/azure-cli-role
-e git+https://github.com/Azure/azure-cli.git@c56c791d01985f1209d30849b55922d1adac85b5#egg=azure_cli_storage&subdirectory=src/command_modules/azure-cli-storage
-e git+https://github.com/Azure/azure-cli.git@c56c791d01985f1209d30849b55922d1adac85b5#egg=azure_cli_taskhelp&subdirectory=src/command_modules/azure-cli-taskhelp
-e git+https://github.com/Azure/azure-cli.git@c56c791d01985f1209d30849b55922d1adac85b5#egg=azure_cli_vm&subdirectory=src/command_modules/azure-cli-vm
azure-common==1.1.4
azure-graphrbac==0.30.0rc6
azure-mgmt-authorization==0.30.0rc6
azure-mgmt-compute==0.32.1
azure-mgmt-containerregistry==0.1.0
azure-mgmt-dns==0.30.0rc6
azure-mgmt-iothub==0.1.0
azure-mgmt-keyvault==0.30.0
azure-mgmt-network==0.30.0rc6
azure-mgmt-nspkg==3.0.2
azure-mgmt-redis==1.0.0
azure-mgmt-resource==0.30.2
azure-mgmt-storage==0.30.0rc6
azure-mgmt-trafficmanager==0.30.0rc6
azure-mgmt-web==0.30.1
azure-nspkg==3.0.2
azure-storage==0.33.0
bcrypt==4.0.1
certifi==2021.5.30
cffi==1.15.1
charset-normalizer==2.0.12
colorama==0.3.7
cryptography==40.0.2
idna==3.10
importlib-metadata==4.8.3
iniconfig==1.1.1
isodate==0.6.1
jeepney==0.7.1
jmespath==0.10.0
keyring==23.4.1
lazy-object-proxy==1.7.1
mock==1.3.0
msrest==0.4.29
msrestazure==0.4.34
oauthlib==3.2.2
packaging==21.3
paramiko==2.0.2
pbr==6.1.1
pluggy==1.0.0
py==1.11.0
pyasn1==0.5.1
pycparser==2.21
Pygments==2.1.3
PyJWT==2.4.0
pylint==1.5.4
PyNaCl==1.5.0
pyOpenSSL==16.1.0
pyparsing==3.1.4
pytest==7.0.1
python-dateutil==2.9.0.post0
PyYAML==3.11
requests==2.9.1
requests-oauthlib==2.0.0
scp==0.15.0
SecretStorage==3.3.3
six==1.10.0
sshtunnel==0.4.0
tabulate==0.7.5
tomli==1.2.3
typing_extensions==4.1.1
urllib3==1.16
vcrpy==1.7.4
wrapt==1.16.0
zipp==3.6.0
| name: azure-cli
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- adal==0.4.3
- applicationinsights==0.10.0
- argcomplete==1.3.0
- astroid==1.4.9
- attrs==22.2.0
- azure-common==1.1.4
- azure-graphrbac==0.30.0rc6
- azure-mgmt-authorization==0.30.0rc6
- azure-mgmt-compute==0.32.1
- azure-mgmt-containerregistry==0.1.0
- azure-mgmt-dns==0.30.0rc6
- azure-mgmt-iothub==0.1.0
- azure-mgmt-keyvault==0.30.0
- azure-mgmt-network==0.30.0rc6
- azure-mgmt-nspkg==3.0.2
- azure-mgmt-redis==1.0.0
- azure-mgmt-resource==0.30.2
- azure-mgmt-storage==0.30.0rc6
- azure-mgmt-trafficmanager==0.30.0rc6
- azure-mgmt-web==0.30.1
- azure-nspkg==3.0.2
- azure-storage==0.33.0
- bcrypt==4.0.1
- cffi==1.15.1
- charset-normalizer==2.0.12
- colorama==0.3.7
- cryptography==40.0.2
- idna==3.10
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- isodate==0.6.1
- jeepney==0.7.1
- jmespath==0.10.0
- keyring==23.4.1
- lazy-object-proxy==1.7.1
- mock==1.3.0
- msrest==0.4.29
- msrestazure==0.4.34
- oauthlib==3.2.2
- packaging==21.3
- paramiko==2.0.2
- pbr==6.1.1
- pluggy==1.0.0
- py==1.11.0
- pyasn1==0.5.1
- pycparser==2.21
- pygments==2.1.3
- pyjwt==2.4.0
- pylint==1.5.4
- pynacl==1.5.0
- pyopenssl==16.1.0
- pyparsing==3.1.4
- pytest==7.0.1
- python-dateutil==2.9.0.post0
- pyyaml==3.11
- requests==2.9.1
- requests-oauthlib==2.0.0
- scp==0.15.0
- secretstorage==3.3.3
- six==1.10.0
- sshtunnel==0.4.0
- tabulate==0.7.5
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.16
- vcrpy==1.7.4
- wrapt==1.16.0
- zipp==3.6.0
prefix: /opt/conda/envs/azure-cli
| [
"src/azure-cli-core/azure/cli/core/tests/test_application.py::TestApplication::test_expand_file_prefixed_files"
] | [] | [
"src/azure-cli-core/azure/cli/core/tests/test_application.py::TestApplication::test_application_register_and_call_handlers",
"src/azure-cli-core/azure/cli/core/tests/test_application.py::TestApplication::test_list_value_parameter"
] | [] | MIT License | 869 | 730 | [
"src/azure-cli-core/azure/cli/core/application.py",
"src/command_modules/azure-cli-appservice/azure/cli/command_modules/appservice/_params.py"
] |
cdent__gabbi-191 | 07dc4913eb980dd4a4a6130abfd708a39235d7f2 | 2016-11-27 14:39:00 | 07dc4913eb980dd4a4a6130abfd708a39235d7f2 | diff --git a/gabbi/runner.py b/gabbi/runner.py
index ac98dea..9f45351 100644
--- a/gabbi/runner.py
+++ b/gabbi/runner.py
@@ -14,6 +14,7 @@
import argparse
from importlib import import_module
+import os
import sys
import unittest
@@ -84,8 +85,9 @@ def run():
else:
for input_file in input_files:
with open(input_file, 'r') as fh:
+ data_dir = os.path.dirname(input_file)
success = run_suite(fh, handler_objects, host, port,
- prefix, force_ssl, failfast)
+ prefix, force_ssl, failfast, data_dir)
if not failure: # once failed, this is considered immutable
failure = not success
if failure and failfast:
@@ -95,7 +97,7 @@ def run():
def run_suite(handle, handler_objects, host, port, prefix, force_ssl=False,
- failfast=False):
+ failfast=False, data_dir='.'):
"""Run the tests from the YAML in handle."""
data = utils.load_yaml(handle)
if force_ssl:
@@ -106,7 +108,7 @@ def run_suite(handle, handler_objects, host, port, prefix, force_ssl=False,
loader = unittest.defaultTestLoader
test_suite = suitemaker.test_suite_from_dict(
- loader, 'input', data, '.', host, port, None, None, prefix=prefix,
+ loader, 'input', data, data_dir, host, port, None, None, prefix=prefix,
handlers=handler_objects)
result = ConciseTestRunner(
| Data <@filename isn't relative to the YAML file
The [docs say](https://gabbi.readthedocs.io/en/latest/format.html#data):
> If the value is a string that begins with <@ then the rest of the string is treated as the name of a file to be loaded from the same directory as the YAML file.
But I haven't found it works like this unless I cd into the directory containing the yaml file.
`_load_data_file` says:
```python
path = os.path.join(self.test_directory, os.path.basename(filename))
```
this does a few things:
- makes the path begin with `self.test_directory` (defaults to `.`, the current working directory)
- disguards any folders in the <@ `filename` path
- appends `filename`
This means, if I'm in `/`, and I have a test that says `data: <@cake.jpg`, I cannot run my tests as advised in the docs:
`gabbi-run -- /my/test.yaml /my/other.yaml`
`FileNotFoundError: [Errno 2] No such file or directory: './cake.jpg'`
So in our test running script, we have to start with `cd tests/`.
My preference, would be to make the path relative to the location of the yaml file. That way, I can run my tests from whatever directory.
Obviously this isn't possible when redirecting with `<` as the filename is never seen by gabbi. So I'm happy is discuss other ideas.
---
Furthermore I can't keep my test data in a child directory, unless I cd into that directory like:
```shell
cd yaml_tests/test_data
gabbi-run -- ../test_things.yaml
```
So for that reason, I'd like to allow directories like `<@test_data/filename.txt` to be included in the path.
---
Happy to write the patch, if we come up with an approach here. | cdent/gabbi | diff --git a/gabbi/tests/gabbits_runner/subdir/sample.json b/gabbi/tests/gabbits_runner/subdir/sample.json
new file mode 100644
index 0000000..ddbce20
--- /dev/null
+++ b/gabbi/tests/gabbits_runner/subdir/sample.json
@@ -0,0 +1,1 @@
+{"items": {"house": "blue"}}
diff --git a/gabbi/tests/gabbits_runner/test_data.yaml b/gabbi/tests/gabbits_runner/test_data.yaml
new file mode 100644
index 0000000..35d056a
--- /dev/null
+++ b/gabbi/tests/gabbits_runner/test_data.yaml
@@ -0,0 +1,8 @@
+tests:
+
+- name: POST data from file
+ verbose: true
+ POST: /
+ request_headers:
+ content-type: application/json
+ data: <@subdir/sample.json
diff --git a/gabbi/tests/test_runner.py b/gabbi/tests/test_runner.py
index bf882ab..1b86235 100644
--- a/gabbi/tests/test_runner.py
+++ b/gabbi/tests/test_runner.py
@@ -22,6 +22,7 @@ from wsgi_intercept.interceptor import Urllib3Interceptor
from gabbi import exception
from gabbi.handlers import base
+from gabbi.handlers.jsonhandler import JSONHandler
from gabbi import runner
from gabbi.tests.simple_wsgi import SimpleWsgi
@@ -249,6 +250,28 @@ class RunnerTest(unittest.TestCase):
self.assertIn('{\n', output)
self.assertIn('}\n', output)
+ def test_data_dir_good(self):
+ """Confirm that data dir is the test file's dir."""
+ sys.argv = ['gabbi-run', 'http://%s:%s/foo' % (self.host, self.port)]
+
+ sys.argv.append('--')
+ sys.argv.append('gabbi/tests/gabbits_runner/test_data.yaml')
+ with self.server():
+ try:
+ runner.run()
+ except SystemExit as err:
+ self.assertSuccess(err)
+
+ # Compare the verbose output of tests with pretty printed
+ # data.
+ with open('gabbi/tests/gabbits_runner/subdir/sample.json') as data:
+ data = JSONHandler.loads(data.read())
+ expected_string = JSONHandler.dumps(data, pretty=True)
+
+ sys.stdout.seek(0)
+ output = sys.stdout.read()
+ self.assertIn(expected_string, output)
+
def assertSuccess(self, exitError):
errors = exitError.args[0]
if errors:
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_media",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 1
} | 1.28 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"mock",
"testrepository",
"coverage",
"hacking",
"sphinx"
],
"pre_install": null,
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.13
attrs==22.2.0
Babel==2.11.0
certifi==2021.5.30
charset-normalizer==2.0.12
colorama==0.4.5
coverage==6.2
decorator==5.1.1
docutils==0.18.1
extras==1.0.0
fixtures==4.0.1
flake8==3.8.4
-e git+https://github.com/cdent/gabbi.git@07dc4913eb980dd4a4a6130abfd708a39235d7f2#egg=gabbi
hacking==4.1.0
idna==3.10
imagesize==1.4.1
importlib-metadata==4.8.3
iniconfig==1.1.1
iso8601==1.1.0
Jinja2==3.0.3
jsonpath-rw==1.4.0
jsonpath-rw-ext==1.2.2
MarkupSafe==2.0.1
mccabe==0.6.1
mock==5.2.0
packaging==21.3
pbr==6.1.1
pluggy==1.0.0
ply==3.11
py==1.11.0
pycodestyle==2.6.0
pyflakes==2.2.0
Pygments==2.14.0
pyparsing==3.1.4
pytest==7.0.1
python-subunit==1.4.2
pytz==2025.2
PyYAML==6.0.1
requests==2.27.1
six==1.17.0
snowballstemmer==2.2.0
Sphinx==5.3.0
sphinxcontrib-applehelp==1.0.2
sphinxcontrib-devhelp==1.0.2
sphinxcontrib-htmlhelp==2.0.0
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==1.0.3
sphinxcontrib-serializinghtml==1.1.5
testrepository==0.0.21
testtools==2.6.0
tomli==1.2.3
typing_extensions==4.1.1
urllib3==1.26.20
wsgi_intercept==1.13.1
zipp==3.6.0
| name: gabbi
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.13
- attrs==22.2.0
- babel==2.11.0
- charset-normalizer==2.0.12
- colorama==0.4.5
- coverage==6.2
- decorator==5.1.1
- docutils==0.18.1
- extras==1.0.0
- fixtures==4.0.1
- flake8==3.8.4
- hacking==4.1.0
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- iso8601==1.1.0
- jinja2==3.0.3
- jsonpath-rw==1.4.0
- jsonpath-rw-ext==1.2.2
- markupsafe==2.0.1
- mccabe==0.6.1
- mock==5.2.0
- packaging==21.3
- pbr==6.1.1
- pluggy==1.0.0
- ply==3.11
- py==1.11.0
- pycodestyle==2.6.0
- pyflakes==2.2.0
- pygments==2.14.0
- pyparsing==3.1.4
- pytest==7.0.1
- python-subunit==1.4.2
- pytz==2025.2
- pyyaml==6.0.1
- requests==2.27.1
- six==1.17.0
- snowballstemmer==2.2.0
- sphinx==5.3.0
- sphinxcontrib-applehelp==1.0.2
- sphinxcontrib-devhelp==1.0.2
- sphinxcontrib-htmlhelp==2.0.0
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==1.0.3
- sphinxcontrib-serializinghtml==1.1.5
- testrepository==0.0.21
- testtools==2.6.0
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- wsgi-intercept==1.13.1
- zipp==3.6.0
prefix: /opt/conda/envs/gabbi
| [
"gabbi/tests/test_runner.py::RunnerTest::test_data_dir_good"
] | [] | [
"gabbi/tests/test_runner.py::RunnerTest::test_custom_response_handler",
"gabbi/tests/test_runner.py::RunnerTest::test_exit_code",
"gabbi/tests/test_runner.py::RunnerTest::test_input_files",
"gabbi/tests/test_runner.py::RunnerTest::test_target_url_parsing",
"gabbi/tests/test_runner.py::RunnerTest::test_target_url_parsing_standard_port",
"gabbi/tests/test_runner.py::RunnerTest::test_verbose_output_formatting"
] | [] | Apache License 2.0 | 875 | 392 | [
"gabbi/runner.py"
] |
|
dask__zict-13 | 4621b4c40456b3dd00eab9ce8e9d3742b080833c | 2016-11-28 13:35:08 | 4621b4c40456b3dd00eab9ce8e9d3742b080833c | diff --git a/zict/file.py b/zict/file.py
index c561471..0b45752 100644
--- a/zict/file.py
+++ b/zict/file.py
@@ -3,9 +3,9 @@ from __future__ import absolute_import, division, print_function
import errno
import os
try:
- from urllib.parse import quote
+ from urllib.parse import quote, unquote
except ImportError:
- from urllib import quote
+ from urllib import quote, unquote
from .common import ZictBase
@@ -18,11 +18,21 @@ def _safe_key(key):
return quote(key, safe='')
+def _unsafe_key(key):
+ """
+ Undo the escaping done by _safe_key().
+ """
+ return unquote(key)
+
+
class File(ZictBase):
""" Mutable Mapping interface to a directory
Keys must be strings, values must be bytes
+ Note this shouldn't be used for interprocess persistence, as keys
+ are cached in memory.
+
Parameters
----------
directory: string
@@ -38,44 +48,42 @@ class File(ZictBase):
def __init__(self, directory, mode='a'):
self.directory = directory
self.mode = mode
+ self._keys = set()
if not os.path.exists(self.directory):
os.mkdir(self.directory)
+ else:
+ for n in os.listdir(self.directory):
+ self._keys.add(_unsafe_key(n))
def __str__(self):
- return '<File: %s, mode="%s">' % (self.directory, self.mode)
+ return '<File: %s, mode="%s", %d elements>' % (self.directory, self.mode, len(self))
__repr__ = __str__
def __getitem__(self, key):
- try:
- with open(os.path.join(self.directory, _safe_key(key)), 'rb') as f:
- result = f.read()
- except EnvironmentError as e:
- if e.args[0] != errno.ENOENT:
- raise
+ if key not in self._keys:
raise KeyError(key)
- return result
+ with open(os.path.join(self.directory, _safe_key(key)), 'rb') as f:
+ return f.read()
def __setitem__(self, key, value):
with open(os.path.join(self.directory, _safe_key(key)), 'wb') as f:
f.write(value)
+ self._keys.add(key)
def __contains__(self, key):
- return os.path.exists(os.path.join(self.directory, _safe_key(key)))
+ return key in self._keys
def keys(self):
- return iter(os.listdir(self.directory))
+ return iter(self._keys)
- def __iter__(self):
- return self.keys()
+ __iter__ = keys
def __delitem__(self, key):
- try:
- os.remove(os.path.join(self.directory, _safe_key(key)))
- except EnvironmentError as e:
- if e.args[0] != errno.ENOENT:
- raise
+ if key not in self._keys:
raise KeyError(key)
+ os.remove(os.path.join(self.directory, _safe_key(key)))
+ self._keys.remove(key)
def __len__(self):
- return sum(1 for _ in self.keys())
+ return len(self._keys)
| File.__contains__ is slow
It is convenient in Dask to frequently check if a key is present in the `.data` dictionary. Unfortunately this is slow, due to calls to both `os.path.exists` and `_safe_key`. | dask/zict | diff --git a/zict/tests/test_file.py b/zict/tests/test_file.py
index d88d90b..62fe887 100644
--- a/zict/tests/test_file.py
+++ b/zict/tests/test_file.py
@@ -90,6 +90,19 @@ def test_arbitrary_chars(fn):
z[key]
z[key] = b'foo'
assert z[key] == b'foo'
+ assert list(z) == [key]
+ assert list(z.keys()) == [key]
+ assert list(z.items()) == [(key, b'foo')]
+ assert list(z.values()) == [b'foo']
+
+ zz = File(fn)
+ assert zz[key] == b'foo'
+ assert list(zz) == [key]
+ assert list(zz.keys()) == [key]
+ assert list(zz.items()) == [(key, b'foo')]
+ assert list(zz.values()) == [b'foo']
+ del zz
+
del z[key]
with pytest.raises(KeyError):
z[key]
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 1
},
"num_modified_files": 1
} | 0.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | exceptiongroup==1.2.2
HeapDict==1.0.1
iniconfig==2.1.0
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
tomli==2.2.1
-e git+https://github.com/dask/zict.git@4621b4c40456b3dd00eab9ce8e9d3742b080833c#egg=zict
| name: zict
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- exceptiongroup==1.2.2
- heapdict==1.0.1
- iniconfig==2.1.0
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- tomli==2.2.1
prefix: /opt/conda/envs/zict
| [
"zict/tests/test_file.py::test_arbitrary_chars"
] | [] | [
"zict/tests/test_file.py::test_mapping",
"zict/tests/test_file.py::test_implementation",
"zict/tests/test_file.py::test_str",
"zict/tests/test_file.py::test_setitem_typeerror",
"zict/tests/test_file.py::test_contextmanager",
"zict/tests/test_file.py::test_delitem",
"zict/tests/test_file.py::test_missing_key"
] | [] | BSD 3-Clause "New" or "Revised" License | 879 | 773 | [
"zict/file.py"
] |
|
bear__python-twitter-416 | ae88240b902d857ba099dfd17f820e640c67557d | 2016-11-28 14:30:54 | c28e9fb02680f30c9c56019f6836c2b47fa1d73a | diff --git a/twitter/__init__.py b/twitter/__init__.py
index 87bb718..0534776 100644
--- a/twitter/__init__.py
+++ b/twitter/__init__.py
@@ -23,7 +23,7 @@ __author__ = 'The Python-Twitter Developers'
__email__ = '[email protected]'
__copyright__ = 'Copyright (c) 2007-2016 The Python-Twitter Developers'
__license__ = 'Apache License 2.0'
-__version__ = '3.2'
+__version__ = '3.2.1'
__url__ = 'https://github.com/bear/python-twitter'
__download_url__ = 'https://pypi.python.org/pypi/python-twitter'
__description__ = 'A Python wrapper around the Twitter API'
diff --git a/twitter/twitter_utils.py b/twitter/twitter_utils.py
index 081d1ed..0b2af5b 100644
--- a/twitter/twitter_utils.py
+++ b/twitter/twitter_utils.py
@@ -161,12 +161,13 @@ def calc_expected_status_length(status, short_url_length=23):
Expected length of the status message as an integer.
"""
- replaced_chars = 0
- status_length = len(status)
- match = re.findall(URL_REGEXP, status)
- if len(match) >= 1:
- replaced_chars = len(''.join(match))
- status_length = status_length - replaced_chars + (short_url_length * len(match))
+ status_length = 0
+ for word in re.split(r'\s', status):
+ if is_url(word):
+ status_length += short_url_length
+ else:
+ status_length += len(word)
+ status_length += len(re.findall(r'\s', status))
return status_length
| calc_expected_status_length does not work
calc_expected_status_length is broken in two ways right now.
1. URL_REGEXP only recognizes URLs at the start of a string, which is correct for is_url, but for calc_expected_status_length, all URLs should be detected, not just URLs at the start of the tweet. There should be a different URL_REGEXP for calc_expected_status_length without the start-of-string makers.
2. The URL regex has multiple groups, so findall returns a list of tuples, not strings. If there are matches, replaced_chars = len(''.join(match)) crashes, it should be replaced_chars = len(''.join(map(lambda x: x[0], match))) instead | bear/python-twitter | diff --git a/tests/test_twitter_utils.py b/tests/test_twitter_utils.py
index 3ca619f..b021e34 100644
--- a/tests/test_twitter_utils.py
+++ b/tests/test_twitter_utils.py
@@ -5,6 +5,7 @@ import unittest
import twitter
from twitter.twitter_utils import (
+ calc_expected_status_length,
parse_media_file
)
@@ -58,3 +59,18 @@ class ApiTest(unittest.TestCase):
self.assertRaises(
twitter.TwitterError,
lambda: twitter.twitter_utils.enf_type('test', int, 'hi'))
+
+ def test_calc_expected_status_length(self):
+ status = 'hi a tweet there'
+ len_status = calc_expected_status_length(status)
+ self.assertEqual(len_status, 16)
+
+ def test_calc_expected_status_length_with_url(self):
+ status = 'hi a tweet there example.com'
+ len_status = calc_expected_status_length(status)
+ self.assertEqual(len_status, 40)
+
+ def test_calc_expected_status_length_with_url_and_extra_spaces(self):
+ status = 'hi a tweet there example.com'
+ len_status = calc_expected_status_length(status)
+ self.assertEqual(len_status, 63)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 2
} | 3.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": null,
"python": "3.9",
"reqs_path": [
"requirements.txt",
"requirements.testing.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | build==1.2.2.post1
cachetools==5.5.2
certifi==2025.1.31
chardet==5.2.0
charset-normalizer==3.4.1
check-manifest==0.50
codecov==2.1.13
colorama==0.4.6
coverage==7.8.0
coveralls==4.0.1
distlib==0.3.9
docopt==0.6.2
exceptiongroup==1.2.2
filelock==3.18.0
future==1.0.0
idna==3.10
importlib_metadata==8.6.1
iniconfig==2.1.0
mccabe==0.7.0
mock==5.2.0
oauthlib==3.2.2
packaging==24.2
platformdirs==4.3.7
pluggy==1.5.0
pycodestyle==2.13.0
pyproject-api==1.9.0
pyproject_hooks==1.2.0
pytest==8.3.5
pytest-cov==6.0.0
pytest-runner==6.0.1
-e git+https://github.com/bear/python-twitter.git@ae88240b902d857ba099dfd17f820e640c67557d#egg=python_twitter
PyYAML==6.0.2
requests==2.32.3
requests-oauthlib==2.0.0
responses==0.25.7
six==1.17.0
tomli==2.2.1
tox==4.25.0
tox-pyenv==1.1.0
typing_extensions==4.13.0
urllib3==2.3.0
virtualenv==20.30.0
zipp==3.21.0
| name: python-twitter
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- build==1.2.2.post1
- cachetools==5.5.2
- certifi==2025.1.31
- chardet==5.2.0
- charset-normalizer==3.4.1
- check-manifest==0.50
- codecov==2.1.13
- colorama==0.4.6
- coverage==7.8.0
- coveralls==4.0.1
- distlib==0.3.9
- docopt==0.6.2
- exceptiongroup==1.2.2
- filelock==3.18.0
- future==1.0.0
- idna==3.10
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- mccabe==0.7.0
- mock==5.2.0
- oauthlib==3.2.2
- packaging==24.2
- platformdirs==4.3.7
- pluggy==1.5.0
- pycodestyle==2.13.0
- pyproject-api==1.9.0
- pyproject-hooks==1.2.0
- pytest==8.3.5
- pytest-cov==6.0.0
- pytest-runner==6.0.1
- pyyaml==6.0.2
- requests==2.32.3
- requests-oauthlib==2.0.0
- responses==0.25.7
- six==1.17.0
- tomli==2.2.1
- tox==4.25.0
- tox-pyenv==1.1.0
- typing-extensions==4.13.0
- urllib3==2.3.0
- virtualenv==20.30.0
- zipp==3.21.0
prefix: /opt/conda/envs/python-twitter
| [
"tests/test_twitter_utils.py::ApiTest::test_calc_expected_status_length_with_url",
"tests/test_twitter_utils.py::ApiTest::test_calc_expected_status_length_with_url_and_extra_spaces"
] | [] | [
"tests/test_twitter_utils.py::ApiTest::test_calc_expected_status_length",
"tests/test_twitter_utils.py::ApiTest::test_parse_media_file_fileobj",
"tests/test_twitter_utils.py::ApiTest::test_parse_media_file_http",
"tests/test_twitter_utils.py::ApiTest::test_parse_media_file_local_file",
"tests/test_twitter_utils.py::ApiTest::test_utils_error_checking"
] | [] | Apache License 2.0 | 880 | 435 | [
"twitter/__init__.py",
"twitter/twitter_utils.py"
] |
|
tableau__server-client-python-109 | e853d7c79f54f232c9f1da07f6c085db399e598a | 2016-12-02 04:38:26 | e853d7c79f54f232c9f1da07f6c085db399e598a | diff --git a/tableauserverclient/models/user_item.py b/tableauserverclient/models/user_item.py
index 1e4f54a..2df6764 100644
--- a/tableauserverclient/models/user_item.py
+++ b/tableauserverclient/models/user_item.py
@@ -119,7 +119,7 @@ class UserItem(object):
@classmethod
def from_response(cls, resp):
- all_user_items = set()
+ all_user_items = []
parsed_response = ET.fromstring(resp)
all_user_xml = parsed_response.findall('.//t:user', namespaces=NAMESPACE)
for user_xml in all_user_xml:
@@ -128,7 +128,7 @@ class UserItem(object):
user_item = cls(name, site_role)
user_item._set_values(id, name, site_role, last_login, external_auth_user_id,
fullname, email, auth_setting, domain_name)
- all_user_items.add(user_item)
+ all_user_items.append(user_item)
return all_user_items
@staticmethod
| Pager with users throws TypeError
I am trying to extract the list of users using the Pager:
`print(*TSC.Pager(tableau.users))`
I get the following error:
` File "metalab_users.py", line 74, in <module>
print(*tableau_users)
File "C:\Program Files\Python35\lib\site-packages\tableauserverclient\server\pager.py", line 30, in __iter__
yield current_item_list.pop(0)
TypeError: pop() takes no arguments (1 given)`
When calling projects with the same code, I get no such error:
`print(*TSC.Pager(tableau.projects))` | tableau/server-client-python | diff --git a/test/test_user.py b/test/test_user.py
index 556cd62..fa83443 100644
--- a/test/test_user.py
+++ b/test/test_user.py
@@ -54,7 +54,7 @@ class UserTests(unittest.TestCase):
all_users, pagination_item = self.server.users.get()
self.assertEqual(0, pagination_item.total_available)
- self.assertEqual(set(), all_users)
+ self.assertEqual([], all_users)
def test_get_before_signin(self):
self.server._auth_token = None
| {
"commit_name": "merge_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 1
} | 0.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[test]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-subtests",
"requests-mock"
],
"pre_install": [],
"python": "3.9",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==25.3.0
certifi==2025.1.31
charset-normalizer==3.4.1
coverage==7.8.0
exceptiongroup==1.2.2
idna==3.10
iniconfig==2.1.0
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
pytest-cov==6.0.0
pytest-subtests==0.14.1
requests==2.11.1
requests-mock==1.12.1
-e git+https://github.com/tableau/server-client-python.git@e853d7c79f54f232c9f1da07f6c085db399e598a#egg=tableauserverclient
tomli==2.2.1
urllib3==2.3.0
| name: server-client-python
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==25.3.0
- certifi==2025.1.31
- charset-normalizer==3.4.1
- coverage==7.8.0
- exceptiongroup==1.2.2
- idna==3.10
- iniconfig==2.1.0
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- pytest-cov==6.0.0
- pytest-subtests==0.14.1
- requests==2.11.1
- requests-mock==1.12.1
- tomli==2.2.1
- urllib3==2.3.0
prefix: /opt/conda/envs/server-client-python
| [
"test/test_user.py::UserTests::test_get_empty"
] | [] | [
"test/test_user.py::UserTests::test_add",
"test/test_user.py::UserTests::test_get",
"test/test_user.py::UserTests::test_get_before_signin",
"test/test_user.py::UserTests::test_get_by_id",
"test/test_user.py::UserTests::test_get_by_id_missing_id",
"test/test_user.py::UserTests::test_populate_workbooks",
"test/test_user.py::UserTests::test_populate_workbooks_missing_id",
"test/test_user.py::UserTests::test_remove",
"test/test_user.py::UserTests::test_remove_missing_id",
"test/test_user.py::UserTests::test_update",
"test/test_user.py::UserTests::test_update_missing_id"
] | [] | MIT License | 883 | 243 | [
"tableauserverclient/models/user_item.py"
] |
|
pre-commit__pre-commit-449 | 0e2c3c1ff9b2f870ca00f2cc45c515101b6f96fe | 2016-12-04 21:31:45 | 8837cfa7ffcc419216d4e01392cee0f1ceee9c88 | diff --git a/pre_commit/languages/pcre.py b/pre_commit/languages/pcre.py
index faba1da..314ea09 100644
--- a/pre_commit/languages/pcre.py
+++ b/pre_commit/languages/pcre.py
@@ -1,11 +1,12 @@
from __future__ import unicode_literals
-from sys import platform
+import sys
from pre_commit.xargs import xargs
ENVIRONMENT_DIR = None
+GREP = 'ggrep' if sys.platform == 'darwin' else 'grep'
def install_environment(
@@ -19,10 +20,7 @@ def install_environment(
def run_hook(repo_cmd_runner, hook, file_args):
# For PCRE the entry is the regular expression to match
- cmd = (
- 'ggrep' if platform == 'darwin' else 'grep',
- '-H', '-n', '-P',
- ) + tuple(hook['args']) + (hook['entry'],)
+ cmd = (GREP, '-H', '-n', '-P') + tuple(hook['args']) + (hook['entry'],)
# Grep usually returns 0 for matches, and nonzero for non-matches so we
# negate it here.
diff --git a/pre_commit/parse_shebang.py b/pre_commit/parse_shebang.py
index 438e72e..122750a 100644
--- a/pre_commit/parse_shebang.py
+++ b/pre_commit/parse_shebang.py
@@ -11,7 +11,8 @@ printable = frozenset(string.printable)
class ExecutableNotFoundError(OSError):
- pass
+ def to_output(self):
+ return (1, self.args[0].encode('UTF-8'), b'')
def parse_bytesio(bytesio):
diff --git a/pre_commit/util.py b/pre_commit/util.py
index 18394c3..dc8e478 100644
--- a/pre_commit/util.py
+++ b/pre_commit/util.py
@@ -172,16 +172,16 @@ def cmd_output(*cmd, **kwargs):
try:
cmd = parse_shebang.normalize_cmd(cmd)
except parse_shebang.ExecutableNotFoundError as e:
- returncode, stdout, stderr = (-1, e.args[0].encode('UTF-8'), b'')
+ returncode, stdout, stderr = e.to_output()
else:
popen_kwargs.update(kwargs)
proc = __popen(cmd, **popen_kwargs)
stdout, stderr = proc.communicate()
- if encoding is not None and stdout is not None:
- stdout = stdout.decode(encoding)
- if encoding is not None and stderr is not None:
- stderr = stderr.decode(encoding)
returncode = proc.returncode
+ if encoding is not None and stdout is not None:
+ stdout = stdout.decode(encoding)
+ if encoding is not None and stderr is not None:
+ stderr = stderr.decode(encoding)
if retcode is not None and retcode != returncode:
raise CalledProcessError(
diff --git a/pre_commit/xargs.py b/pre_commit/xargs.py
index e0b8729..eea3acd 100644
--- a/pre_commit/xargs.py
+++ b/pre_commit/xargs.py
@@ -1,6 +1,7 @@
from __future__ import absolute_import
from __future__ import unicode_literals
+from pre_commit import parse_shebang
from pre_commit.util import cmd_output
@@ -52,6 +53,11 @@ def xargs(cmd, varargs, **kwargs):
stdout = b''
stderr = b''
+ try:
+ parse_shebang.normexe(cmd[0])
+ except parse_shebang.ExecutableNotFoundError as e:
+ return e.to_output()
+
for run_cmd in partition(cmd, varargs, **kwargs):
proc_retcode, proc_out, proc_err = cmd_output(
*run_cmd, encoding=None, retcode=None
| PCRE passes even if ggrep is not installed on OS X
I was scratching my head because some hooks were passing even though they should clearly have failed.
When running `run --all-files` with the `--verbose` flag, I noticed why this was happening:
````
[do_not_commit] Block if "DO NOT COMMIT" is found..................................................................Passed
hookid: do_not_commit
Executable `ggrep` not found
````
It seems like the correct behaviour would be to fail and warn user to install `ggrep`?
For the record, I installed `ggrep` as follow: `brew install homebrew/dupes/grep`, and the hooks now fail as expected. | pre-commit/pre-commit | diff --git a/tests/repository_test.py b/tests/repository_test.py
index 79400ae..f61ee88 100644
--- a/tests/repository_test.py
+++ b/tests/repository_test.py
@@ -12,11 +12,13 @@ import pkg_resources
import pytest
from pre_commit import five
+from pre_commit import parse_shebang
from pre_commit.clientlib.validate_config import CONFIG_JSON_SCHEMA
from pre_commit.clientlib.validate_config import validate_config_extra
from pre_commit.jsonschema_extensions import apply_defaults
from pre_commit.languages import helpers
from pre_commit.languages import node
+from pre_commit.languages import pcre
from pre_commit.languages import python
from pre_commit.languages import ruby
from pre_commit.repository import Repository
@@ -187,6 +189,25 @@ def test_missing_executable(tempdir_factory, store):
)
[email protected]
+def test_missing_pcre_support(tempdir_factory, store):
+ orig_find_executable = parse_shebang.find_executable
+
+ def no_grep(exe, **kwargs):
+ if exe == pcre.GREP:
+ return None
+ else:
+ return orig_find_executable(exe, **kwargs)
+
+ with mock.patch.object(parse_shebang, 'find_executable', no_grep):
+ _test_hook_repo(
+ tempdir_factory, store, 'pcre_hooks_repo',
+ 'regex-with-quotes', ['/dev/null'],
+ 'Executable `{}` not found'.format(pcre.GREP).encode('UTF-8'),
+ expected_return_code=1,
+ )
+
+
@pytest.mark.integration
def test_run_a_script_hook(tempdir_factory, store):
_test_hook_repo(
diff --git a/tests/util_test.py b/tests/util_test.py
index e9c7500..ba2b4a8 100644
--- a/tests/util_test.py
+++ b/tests/util_test.py
@@ -6,6 +6,7 @@ import random
import pytest
from pre_commit.util import clean_path_on_failure
+from pre_commit.util import cmd_output
from pre_commit.util import cwd
from pre_commit.util import memoize_by_cwd
from pre_commit.util import tmpdir
@@ -81,3 +82,9 @@ def test_tmpdir():
with tmpdir() as tempdir:
assert os.path.exists(tempdir)
assert not os.path.exists(tempdir)
+
+
+def test_cmd_output_exe_not_found():
+ ret, out, _ = cmd_output('i-dont-exist', retcode=None)
+ assert ret == 1
+ assert out == 'Executable `i-dont-exist` not found'
diff --git a/tests/xargs_test.py b/tests/xargs_test.py
index cb27f62..529eb19 100644
--- a/tests/xargs_test.py
+++ b/tests/xargs_test.py
@@ -64,6 +64,11 @@ def test_xargs_negate():
assert ret == 1
+def test_xargs_negate_command_not_found():
+ ret, _, _ = xargs.xargs(('cmd-not-found',), ('1',), negate=True)
+ assert ret != 0
+
+
def test_xargs_retcode_normal():
ret, _, _ = xargs.xargs(exit_cmd, ('0',), _max_length=max_length)
assert ret == 0
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 4
} | 0.9 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements-dev.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | aspy.yaml==1.3.0
attrs==25.3.0
cached-property==2.0.1
coverage==7.8.0
distlib==0.3.9
exceptiongroup==1.2.2
filelock==3.18.0
flake8==7.2.0
iniconfig==2.1.0
jsonschema==4.23.0
jsonschema-specifications==2024.10.1
mccabe==0.7.0
mock==5.2.0
nodeenv==1.9.1
packaging==24.2
platformdirs==4.3.7
pluggy==1.5.0
-e git+https://github.com/pre-commit/pre-commit.git@0e2c3c1ff9b2f870ca00f2cc45c515101b6f96fe#egg=pre_commit
pycodestyle==2.13.0
pyflakes==3.3.1
pytest==8.3.5
PyYAML==6.0.2
referencing==0.36.2
rpds-py==0.24.0
tomli==2.2.1
typing_extensions==4.13.0
virtualenv==20.29.3
| name: pre-commit
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- aspy-yaml==1.3.0
- attrs==25.3.0
- cached-property==2.0.1
- coverage==7.8.0
- distlib==0.3.9
- exceptiongroup==1.2.2
- filelock==3.18.0
- flake8==7.2.0
- iniconfig==2.1.0
- jsonschema==4.23.0
- jsonschema-specifications==2024.10.1
- mccabe==0.7.0
- mock==5.2.0
- nodeenv==1.9.1
- packaging==24.2
- platformdirs==4.3.7
- pluggy==1.5.0
- pycodestyle==2.13.0
- pyflakes==3.3.1
- pytest==8.3.5
- pyyaml==6.0.2
- referencing==0.36.2
- rpds-py==0.24.0
- setuptools==18.4
- tomli==2.2.1
- typing-extensions==4.13.0
- virtualenv==20.29.3
prefix: /opt/conda/envs/pre-commit
| [
"tests/util_test.py::test_cmd_output_exe_not_found",
"tests/xargs_test.py::test_xargs_negate_command_not_found"
] | [
"tests/repository_test.py::test_python_hook",
"tests/repository_test.py::test_python_hook_args_with_spaces",
"tests/repository_test.py::test_python_hook_weird_setup_cfg",
"tests/repository_test.py::test_switch_language_versions_doesnt_clobber",
"tests/repository_test.py::test_versioned_python_hook",
"tests/repository_test.py::test_run_a_node_hook",
"tests/repository_test.py::test_run_versioned_node_hook",
"tests/repository_test.py::test_run_a_ruby_hook",
"tests/repository_test.py::test_run_versioned_ruby_hook",
"tests/repository_test.py::test_system_hook_with_spaces",
"tests/repository_test.py::test_missing_executable",
"tests/repository_test.py::test_missing_pcre_support",
"tests/repository_test.py::test_run_a_script_hook",
"tests/repository_test.py::test_run_hook_with_spaced_args",
"tests/repository_test.py::test_run_hook_with_curly_braced_arguments",
"tests/repository_test.py::test_pcre_hook_no_match",
"tests/repository_test.py::test_pcre_hook_matching",
"tests/repository_test.py::test_pcre_hook_case_insensitive_option",
"tests/repository_test.py::test_pcre_many_files",
"tests/repository_test.py::test_cwd_of_hook",
"tests/repository_test.py::test_lots_of_files",
"tests/repository_test.py::test_languages",
"tests/repository_test.py::test_additional_dependencies",
"tests/repository_test.py::test_additional_python_dependencies_installed",
"tests/repository_test.py::test_additional_dependencies_roll_forward",
"tests/repository_test.py::test_additional_ruby_dependencies_installed",
"tests/repository_test.py::test_additional_node_dependencies_installed",
"tests/repository_test.py::test_reinstall",
"tests/repository_test.py::test_control_c_control_c_on_install",
"tests/repository_test.py::test_really_long_file_paths",
"tests/repository_test.py::test_config_overrides_repo_specifics",
"tests/repository_test.py::test_tags_on_repositories",
"tests/repository_test.py::test_hook_id_not_present",
"tests/repository_test.py::test_too_new_version",
"tests/repository_test.py::test_versions_ok[0.1.0]",
"tests/repository_test.py::test_versions_ok[0.9.3]"
] | [
"tests/repository_test.py::test_repo_url",
"tests/repository_test.py::test_sha",
"tests/repository_test.py::test_local_repository",
"tests/util_test.py::test_memoized_by_cwd_returns_same_twice_in_a_row",
"tests/util_test.py::test_memoized_by_cwd_returns_different_for_different_args",
"tests/util_test.py::test_memoized_by_cwd_changes_with_different_cwd",
"tests/util_test.py::test_clean_on_failure_noop",
"tests/util_test.py::test_clean_path_on_failure_does_nothing_when_not_raising",
"tests/util_test.py::test_clean_path_on_failure_cleans_for_normal_exception",
"tests/util_test.py::test_clean_path_on_failure_cleans_for_system_exit",
"tests/util_test.py::test_tmpdir",
"tests/xargs_test.py::test_partition_trivial",
"tests/xargs_test.py::test_partition_simple",
"tests/xargs_test.py::test_partition_limits",
"tests/xargs_test.py::test_argument_too_long",
"tests/xargs_test.py::test_xargs_smoke",
"tests/xargs_test.py::test_xargs_negate",
"tests/xargs_test.py::test_xargs_retcode_normal"
] | [] | MIT License | 887 | 913 | [
"pre_commit/languages/pcre.py",
"pre_commit/parse_shebang.py",
"pre_commit/util.py",
"pre_commit/xargs.py"
] |
|
joblib__joblib-444 | aab21b654e853616b31c6e50255b2dcf47af5818 | 2016-12-07 09:55:10 | 4650f03703b853c50672dabb64bef170b55a12f7 | lesteve: For the record, an excerpt of the output from the snippet in #420 (on this PR):
```
---------------------------------------------------------------------------
Sub-process traceback:
---------------------------------------------------------------------------
ZeroDivisionError Wed Dec 7 15:16:32 2016
PID: 22133 Python 3.5.2: /home/lesteve/miniconda3/bin/python
...........................................................................
/home/lesteve/dev/joblib/joblib/parallel.py in __call__(self=<joblib.parallel.BatchedCalls object>)
126 def __init__(self, iterator_slice):
127 self.items = list(iterator_slice)
128 self._size = len(self.items)
129
130 def __call__(self):
--> 131 return [func(*args, **kwargs) for func, args, kwargs in self.items]
self.items = [(<function exception_on_even>, (2,), {})]
132
133 def __len__(self):
134 return self._size
135
...........................................................................
/home/lesteve/dev/joblib/joblib/parallel.py in <listcomp>(.0=<list_iterator object>)
126 def __init__(self, iterator_slice):
127 self.items = list(iterator_slice)
128 self._size = len(self.items)
129
130 def __call__(self):
--> 131 return [func(*args, **kwargs) for func, args, kwargs in self.items]
func = <function exception_on_even>
args = (2,)
kwargs = {}
132
133 def __len__(self):
134 return self._size
135
...........................................................................
/tmp/test_module.py in exception_on_even(x=2)
1 def exception_on_even(x):
2 if x % 2 == 0:
----> 3 1/0
4 else:
5 return x
ZeroDivisionError: division by zero
___________________________________________________________________________
```
lesteve: OK I am quite happy with the tests I added and I don't think there was any strong reason for the padding, so I am going to merge this one.
@GaelVaroquaux @ogrisel shout if you see any issue with this and I'll revert it. | diff --git a/joblib/format_stack.py b/joblib/format_stack.py
index 3f3d106..4be93c1 100644
--- a/joblib/format_stack.py
+++ b/joblib/format_stack.py
@@ -135,15 +135,10 @@ def _fixed_getframes(etb, context=1, tb_offset=0):
aux = traceback.extract_tb(etb)
assert len(records) == len(aux)
for i, (file, lnum, _, _) in enumerate(aux):
- maybeStart = lnum - 1 - context // 2
- start = max(maybeStart, 0)
+ maybe_start = lnum - 1 - context // 2
+ start = max(maybe_start, 0)
end = start + context
lines = linecache.getlines(file)[start:end]
- # pad with empty lines if necessary
- if maybeStart < 0:
- lines = (['\n'] * -maybeStart) + lines
- if len(lines) < context:
- lines += ['\n'] * (context - len(lines))
buf = list(records[i])
buf[LNUM_POS] = lnum
buf[INDEX_POS] = lnum - 1 - start
@@ -400,15 +395,10 @@ def format_outer_frames(context=5, stack_start=None, stack_end=None,
if (os.path.basename(filename) in ('iplib.py', 'py3compat.py')
and func_name in ('execfile', 'safe_execfile', 'runcode')):
break
- maybeStart = line_no - 1 - context // 2
- start = max(maybeStart, 0)
+ maybe_start = line_no - 1 - context // 2
+ start = max(maybe_start, 0)
end = start + context
lines = linecache.getlines(filename)[start:end]
- # pad with empty lines if necessary
- if maybeStart < 0:
- lines = (['\n'] * -maybeStart) + lines
- if len(lines) < context:
- lines += ['\n'] * (context - len(lines))
buf = list(records[i])
buf[LNUM_POS] = line_no
buf[INDEX_POS] = line_no - 1 - start
| Wrong line pointed to in subprocess traceback
test.py
```py
from test_module import exception_on_even
from joblib import Parallel, delayed
if __name__ == '__main__':
Parallel(n_jobs=2)(delayed(exception_on_even)(x) for x in [1, 2, 3])
```
test_module.py
```py
def exception_on_even(x):
if x % 2 == 0:
1/0
else:
return x
```
Excerpt from the Traceback when running `ipython test.py`:
```
---------------------------------------------------------------------------
Sub-process traceback:
---------------------------------------------------------------------------
ZeroDivisionError Wed Nov 2 09:52:46 2016
PID: 7267 Python 3.5.2: /home/lesteve/miniconda3/bin/python
...........................................................................
/home/lesteve/dev/joblib/joblib/parallel.py in __call__(self=<joblib.parallel.BatchedCalls object>)
126 def __init__(self, iterator_slice):
127 self.items = list(iterator_slice)
128 self._size = len(self.items)
129
130 def __call__(self):
--> 131 return [func(*args, **kwargs) for func, args, kwargs in self.items]
self.items = [(<function exception_on_even>, (2,), {})]
132
133 def __len__(self):
134 return self._size
135
...........................................................................
/home/lesteve/dev/joblib/joblib/parallel.py in <listcomp>(.0=<list_iterator object>)
126 def __init__(self, iterator_slice):
127 self.items = list(iterator_slice)
128 self._size = len(self.items)
129
130 def __call__(self):
--> 131 return [func(*args, **kwargs) for func, args, kwargs in self.items]
func = <function exception_on_even>
args = (2,)
kwargs = {}
132
133 def __len__(self):
134 return self._size
135
...........................................................................
/tmp/test_module.py in exception_on_even(x=2)
1
2
----> 3
4 def exception_on_even(x):
5 if x % 2 == 0:
6 1/0
7 else:
8 return x
9
10
ZeroDivisionError: division by zero
```
The arrow in the last frame points to line 3 whereas it should point to line 6. | joblib/joblib | diff --git a/joblib/test/test_format_stack.py b/joblib/test/test_format_stack.py
index 32fc8f5..baa8076 100644
--- a/joblib/test/test_format_stack.py
+++ b/joblib/test/test_format_stack.py
@@ -6,6 +6,8 @@ Unit tests for the stack formatting utilities
# Copyright (c) 2010 Gael Varoquaux
# License: BSD Style, 3 clauses.
+import imp
+import os
import re
import sys
@@ -75,6 +77,38 @@ def test_format_records():
re.MULTILINE)
+def test_format_records_file_with_less_lines_than_context(tmpdir):
+ # See https://github.com/joblib/joblib/issues/420
+ filename = os.path.join(tmpdir.strpath, 'small_file.py')
+ code_lines = ['def func():', ' 1/0']
+ code = '\n'.join(code_lines)
+ open(filename, 'w').write(code)
+
+ small_file = imp.load_source('small_file', filename)
+ try:
+ small_file.func()
+ except ZeroDivisionError:
+ etb = sys.exc_info()[2]
+
+ records = _fixed_getframes(etb, context=10)
+ # Check that if context is bigger than the number of lines in
+ # the file you do not get padding
+ frame, tb_filename, line, func_name, context, _ = records[-1]
+ assert [l.rstrip() for l in context] == code_lines
+
+ formatted_records = format_records(records)
+ # 2 lines for header in the traceback: lines of ...... +
+ # filename with function
+ len_header = 2
+ nb_lines_formatted_records = len(formatted_records[1].splitlines())
+ assert (nb_lines_formatted_records == len_header + len(code_lines))
+ # Check exception stack
+ arrow_regex = r'^-+>\s+\d+\s+'
+ assert re.search(arrow_regex + r'1/0',
+ formatted_records[1],
+ re.MULTILINE)
+
+
@with_numpy
def test_format_exc_with_compiled_code():
# Trying to tokenize compiled C code raise SyntaxError.
| {
"commit_name": "merge_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 1
},
"num_modified_files": 1
} | 0.10 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"numpy>=1.6.1",
"pytest",
"pytest-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | coverage==7.8.0
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
-e git+https://github.com/joblib/joblib.git@aab21b654e853616b31c6e50255b2dcf47af5818#egg=joblib
numpy==2.0.2
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
pytest @ file:///croot/pytest_1738938843180/work
pytest-cov==6.0.0
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
| name: joblib
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==7.8.0
- numpy==2.0.2
- pytest-cov==6.0.0
prefix: /opt/conda/envs/joblib
| [
"joblib/test/test_format_stack.py::test_format_records_file_with_less_lines_than_context"
] | [] | [
"joblib/test/test_format_stack.py::test_safe_repr",
"joblib/test/test_format_stack.py::test_format_records",
"joblib/test/test_format_stack.py::test_format_exc_with_compiled_code"
] | [] | BSD 3-Clause "New" or "Revised" License | 891 | 531 | [
"joblib/format_stack.py"
] |
repoze__repoze.sendmail-38 | af9b7db732bf7ee0edb956c05b5c7b7818e31d64 | 2016-12-07 17:28:30 | af9b7db732bf7ee0edb956c05b5c7b7818e31d64 | mmerickel: I'll try in the next half hour here to see if I can reproduce the old error and then upgrade to see if this patch fixes it. Thanks. | diff --git a/repoze/sendmail/delivery.py b/repoze/sendmail/delivery.py
index 59115d7..28b7cf4 100644
--- a/repoze/sendmail/delivery.py
+++ b/repoze/sendmail/delivery.py
@@ -102,6 +102,7 @@ class MailDataManager(object):
if self.transaction is None:
raise ValueError("Not in a transaction")
self.state = final_state
+ self.tpc_phase = 0
def commit(self, trans):
if self.transaction is None:
| MailDataManager breaks the transaction manager upon abort
I have some code sending emails and for legitimate reasons the email is rejected by the remote server. I want to log the exception but instead I see the `ValueError('TPC in progress')` a couple times in the output, squashing the error.
It's actually causing the transaction manager to be in such a broken state that the next transaction.manager.begin() after the failed abort, causes another exception. Below is the crap fix to prove the diagnosis, but I'm not positive it's the correct fix. I downgraded to 4.1 and things work as expected.
From what I can tell this behavior appeared in https://github.com/repoze/repoze.sendmail/commit/06ad882dc4f4a220fba2583e67076e9f3dcf3359 when the manager was rewritten.
On a related side note, aborting in the second phase of a two-phase commit is a Really Bad Thing(tm). Since sending email isn't transactional it almost makes more sense to either send the email in an earlier phase or even as an `afterCommitHook`.
``` diff
diff --git a/repoze/sendmail/delivery.py b/repoze/sendmail/delivery.py
index 59115d7..6b56add 100644
--- a/repoze/sendmail/delivery.py
+++ b/repoze/sendmail/delivery.py
@@ -115,8 +115,8 @@ class MailDataManager(object):
raise ValueError("Not in a transaction")
if self.transaction is not trans:
raise ValueError("In a different transaction")
- if self.tpc_phase != 0:
- raise ValueError("TPC in progress")
+ #if self.tpc_phase != 0:
+ # raise ValueError("TPC in progress")
if self.onAbort:
self.onAbort()
```
| repoze/repoze.sendmail | diff --git a/repoze/sendmail/tests/test_delivery.py b/repoze/sendmail/tests/test_delivery.py
index 9776943..fe5ef6f 100644
--- a/repoze/sendmail/tests/test_delivery.py
+++ b/repoze/sendmail/tests/test_delivery.py
@@ -79,6 +79,7 @@ class TestMailDataManager(unittest.TestCase):
mdm.join_transaction(txn)
mdm._finish(2)
self.assertEqual(mdm.state, 2)
+ self.assertEqual(mdm.tpc_phase, 0)
def test_commit_wo_transaction(self):
mdm = self._makeOne(object)
@@ -251,6 +252,7 @@ class TestMailDataManager(unittest.TestCase):
mdm.tpc_finish(txn)
self.assertEqual(_called, [(1, 2)])
self.assertEqual(mdm.state, MailDataManagerState.TPC_FINISHED)
+ self.assertEqual(mdm.tpc_phase, 0)
def test_tpc_abort_wo_transaction(self):
mdm = self._makeOne()
@@ -287,6 +289,7 @@ class TestMailDataManager(unittest.TestCase):
mdm.tpc_phase = 1
mdm.tpc_abort(txn)
self.assertEqual(mdm.state, MailDataManagerState.TPC_ABORTED)
+ self.assertEqual(mdm.tpc_phase, 0)
def test_tpc_abort_voted_ok(self):
from ..delivery import MailDataManagerState
@@ -296,6 +299,7 @@ class TestMailDataManager(unittest.TestCase):
mdm.tpc_phase = 2
mdm.tpc_abort(txn)
self.assertEqual(mdm.state, MailDataManagerState.TPC_ABORTED)
+ self.assertEqual(mdm.tpc_phase, 0)
class TestAbstractMailDelivery(unittest.TestCase):
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 1
} | 4.2 | {
"env_vars": null,
"env_yml_path": [],
"install": "pip install -e .[testing]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "setuptools zope.interface>=3.6.0 transaction",
"pip_packages": [
"pytest"
],
"pre_install": [],
"python": "3.5",
"reqs_path": [],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
coverage==6.2
importlib-metadata==4.8.3
iniconfig==1.1.1
nose==1.3.7
packaging==21.3
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
-e git+https://github.com/repoze/repoze.sendmail.git@af9b7db732bf7ee0edb956c05b5c7b7818e31d64#egg=repoze.sendmail
tomli==1.2.3
transaction==3.0.1
typing_extensions==4.1.1
zipp==3.6.0
zope.interface @ file:///tmp/build/80754af9/zope.interface_1625036152722/work
| name: repoze.sendmail
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- transaction=3.0.1=py36h06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- zope=1.0=py36_1
- zope.interface=5.4.0=py36h7f8727e_0
- pip:
- attrs==22.2.0
- coverage==6.2
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- nose==1.3.7
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- tomli==1.2.3
- typing-extensions==4.1.1
- zipp==3.6.0
prefix: /opt/conda/envs/repoze.sendmail
| [
"repoze/sendmail/tests/test_delivery.py::TestMailDataManager::test_tpc_abort_begun_ok",
"repoze/sendmail/tests/test_delivery.py::TestMailDataManager::test_tpc_abort_voted_ok",
"repoze/sendmail/tests/test_delivery.py::TestMailDataManager::test_tpc_finish_ok"
] | [] | [
"repoze/sendmail/tests/test_delivery.py::TestMailDataManager::test__finish_w_transaction",
"repoze/sendmail/tests/test_delivery.py::TestMailDataManager::test__finish_wo_transaction",
"repoze/sendmail/tests/test_delivery.py::TestMailDataManager::test_abort_w_TPC",
"repoze/sendmail/tests/test_delivery.py::TestMailDataManager::test_abort_w_foreign_transaction",
"repoze/sendmail/tests/test_delivery.py::TestMailDataManager::test_abort_w_onAbort",
"repoze/sendmail/tests/test_delivery.py::TestMailDataManager::test_abort_w_same_transaction",
"repoze/sendmail/tests/test_delivery.py::TestMailDataManager::test_abort_wo_transaction",
"repoze/sendmail/tests/test_delivery.py::TestMailDataManager::test_class_conforms_to_IDataManager",
"repoze/sendmail/tests/test_delivery.py::TestMailDataManager::test_commit_w_TPC",
"repoze/sendmail/tests/test_delivery.py::TestMailDataManager::test_commit_w_foreign_transaction",
"repoze/sendmail/tests/test_delivery.py::TestMailDataManager::test_commit_w_same_transaction",
"repoze/sendmail/tests/test_delivery.py::TestMailDataManager::test_commit_wo_transaction",
"repoze/sendmail/tests/test_delivery.py::TestMailDataManager::test_ctor",
"repoze/sendmail/tests/test_delivery.py::TestMailDataManager::test_instance_conforms_to_IDataManager",
"repoze/sendmail/tests/test_delivery.py::TestMailDataManager::test_join_transaction_conflict",
"repoze/sendmail/tests/test_delivery.py::TestMailDataManager::test_join_transaction_duplicated",
"repoze/sendmail/tests/test_delivery.py::TestMailDataManager::test_join_transaction_explicit",
"repoze/sendmail/tests/test_delivery.py::TestMailDataManager::test_join_transaction_implicit",
"repoze/sendmail/tests/test_delivery.py::TestMailDataManager::test_savepoint_w_transaction",
"repoze/sendmail/tests/test_delivery.py::TestMailDataManager::test_savepoint_wo_transaction",
"repoze/sendmail/tests/test_delivery.py::TestMailDataManager::test_sortKey",
"repoze/sendmail/tests/test_delivery.py::TestMailDataManager::test_tpc_abort_already_finished",
"repoze/sendmail/tests/test_delivery.py::TestMailDataManager::test_tpc_abort_not_already_tpc",
"repoze/sendmail/tests/test_delivery.py::TestMailDataManager::test_tpc_abort_w_foreign_transaction",
"repoze/sendmail/tests/test_delivery.py::TestMailDataManager::test_tpc_abort_wo_transaction",
"repoze/sendmail/tests/test_delivery.py::TestMailDataManager::test_tpc_begin_already_tpc",
"repoze/sendmail/tests/test_delivery.py::TestMailDataManager::test_tpc_begin_ok",
"repoze/sendmail/tests/test_delivery.py::TestMailDataManager::test_tpc_begin_w_foreign_transaction",
"repoze/sendmail/tests/test_delivery.py::TestMailDataManager::test_tpc_begin_w_subtransaction",
"repoze/sendmail/tests/test_delivery.py::TestMailDataManager::test_tpc_begin_wo_transaction",
"repoze/sendmail/tests/test_delivery.py::TestMailDataManager::test_tpc_finish_not_already_tpc",
"repoze/sendmail/tests/test_delivery.py::TestMailDataManager::test_tpc_finish_not_voted",
"repoze/sendmail/tests/test_delivery.py::TestMailDataManager::test_tpc_finish_w_foreign_transaction",
"repoze/sendmail/tests/test_delivery.py::TestMailDataManager::test_tpc_finish_wo_transaction",
"repoze/sendmail/tests/test_delivery.py::TestMailDataManager::test_tpc_vote_not_already_tpc",
"repoze/sendmail/tests/test_delivery.py::TestMailDataManager::test_tpc_vote_ok",
"repoze/sendmail/tests/test_delivery.py::TestMailDataManager::test_tpc_vote_w_foreign_transaction",
"repoze/sendmail/tests/test_delivery.py::TestMailDataManager::test_tpc_vote_wo_transaction",
"repoze/sendmail/tests/test_delivery.py::TestAbstractMailDelivery::test_send_w_bad_message",
"repoze/sendmail/tests/test_delivery.py::TestAbstractMailDelivery::test_send_w_bare_message",
"repoze/sendmail/tests/test_delivery.py::TestAbstractMailDelivery::test_send_w_populated_message",
"repoze/sendmail/tests/test_delivery.py::TestDirectMailDelivery::test_alternate_transaction_manager",
"repoze/sendmail/tests/test_delivery.py::TestDirectMailDelivery::test_class_conforms_to_IMailDelivery",
"repoze/sendmail/tests/test_delivery.py::TestDirectMailDelivery::test_ctor",
"repoze/sendmail/tests/test_delivery.py::TestDirectMailDelivery::test_instance_conforms_to_IMailDelivery",
"repoze/sendmail/tests/test_delivery.py::TestDirectMailDelivery::test_send",
"repoze/sendmail/tests/test_delivery.py::TestDirectMailDelivery::test_send_returns_messageId",
"repoze/sendmail/tests/test_delivery.py::TestQueuedMailDelivery::test_class_conforms_to_IMailDelivery",
"repoze/sendmail/tests/test_delivery.py::TestQueuedMailDelivery::test_ctor",
"repoze/sendmail/tests/test_delivery.py::TestQueuedMailDelivery::test_instance_conforms_to_IMailDelivery",
"repoze/sendmail/tests/test_delivery.py::TestQueuedMailDelivery::test_send",
"repoze/sendmail/tests/test_delivery.py::TestQueuedMailDeliveryWithMaildir::test_send_w_non_ASCII_addrs"
] | [] | null | 893 | 134 | [
"repoze/sendmail/delivery.py"
] |
praekeltfoundation__pydexec-8 | ee5bed66d25dea943735cd5d5c05947d7623fecf | 2016-12-12 14:42:13 | ee5bed66d25dea943735cd5d5c05947d7623fecf | diff --git a/pydexec/command.py b/pydexec/command.py
index b2ba787..85478c7 100644
--- a/pydexec/command.py
+++ b/pydexec/command.py
@@ -12,6 +12,7 @@ class Command(object):
self._args = []
self._user = None
self._env = dict(os.environ)
+ self._workdir = os.getcwd()
def args(self, *args):
""" Add a list of extra arguments to the command. """
@@ -90,22 +91,37 @@ class Command(object):
self._user = User.from_spec(user)
return self
+ def workdir(self, directory):
+ """
+ Change the current working directory to the given directory path before
+ executing the command. Note that, unlike the WORKDIR Dockerfile
+ directive, this will not cause the specified directory to be created.
+ """
+ self._workdir = directory
+ return self
+
def run(self):
cmd = [self._program] + self._args
- kwargs = {'env': self._env}
+ kwargs = {
+ 'env': self._env,
+ 'preexec_fn': self._preexec_fn,
+ }
if self._user is not None:
env = self._env.copy()
env['HOME'] = self._user.home
- kwargs = {
- 'env': env,
- 'preexec_fn': self._user.set_user
- }
+ kwargs['env'] = env
retcode = Popen(cmd, **kwargs).wait()
if retcode:
raise CalledProcessError(retcode, cmd)
+ def _preexec_fn(self):
+ if self._user is not None:
+ self._user.set_user()
+
+ os.chdir(self._workdir)
+
def exec_(self):
"""
Exec the process, replacing the current process with the command.
@@ -116,4 +132,6 @@ class Command(object):
self._user.set_user()
self._env['HOME'] = self._user.home
+ os.chdir(self._workdir)
+
os.execvpe(self._program, cmd, self._env)
| Change working directory | praekeltfoundation/pydexec | diff --git a/pydexec/tests/test_command.py b/pydexec/tests/test_command.py
index 31b4f15..e72decc 100644
--- a/pydexec/tests/test_command.py
+++ b/pydexec/tests/test_command.py
@@ -1,12 +1,14 @@
# -*- coding: utf-8 -*-
+import multiprocessing
import os
-from multiprocessing import Process
+import sys
+import traceback
from subprocess import CalledProcessError
import pytest
from testtools import ExpectedException
from testtools.assertions import assert_that
-from testtools.matchers import Equals
+from testtools.matchers import Equals, Not
from pydexec.command import Command
from pydexec.tests.helpers import captured_lines
@@ -25,11 +27,42 @@ def run_cmd(cmd):
return cmd.run()
+class ExceptionProcess(multiprocessing.Process):
+ """
+ Multiprocessing Process that can be queried for an exception that occurred
+ in the child process.
+ http://stackoverflow.com/a/33599967
+ """
+ def __init__(self, *args, **kwargs):
+ multiprocessing.Process.__init__(self, *args, **kwargs)
+ self._pconn, self._cconn = multiprocessing.Pipe()
+ self._exception = None
+
+ def run(self):
+ try:
+ multiprocessing.Process.run(self)
+ self._cconn.send(None)
+ except Exception as e:
+ tb = traceback.format_exc()
+ self._cconn.send((e, tb))
+
+ @property
+ def exception(self):
+ if self._pconn.poll():
+ self._exception = self._pconn.recv()
+ return self._exception
+
+
def exec_cmd(cmd):
# Run the command in a separate process so that it can be exec-ed
- p = Process(target=cmd.exec_)
+ p = ExceptionProcess(target=cmd.exec_)
p.start()
p.join()
+ if p.exception:
+ error, tb = p.exception
+ print(tb)
+ raise error
+
if p.exitcode:
# Simulate a CalledProcessError to simplify tests
raise CalledProcessError(p.exitcode, [cmd._program] + cmd._args)
@@ -333,3 +366,90 @@ class TestCommand(object):
'option "--home" for program "/bin/sh"'):
Command('/bin/sh').opt_from_env('--home', 'DOESNOTEXIST',
required=True)
+
+ def test_workdir_changes_directory(self, capfd, tmpdir, runner):
+ """
+ When a directory is specified as the 'workdir' for a command, the
+ command's subprocess should be executed with the current working
+ directory as the specified directory.
+ """
+ cwd = os.getcwd()
+
+ runner(Command('/bin/pwd').workdir(str(tmpdir)))
+
+ out_lines, _ = captured_lines(capfd)
+ child_cwd = out_lines.pop(0)
+ assert_that(child_cwd, Equals(str(tmpdir)))
+
+ # Assert only the working directory of the child process has changed
+ assert_that(child_cwd, Not(Equals(cwd)))
+ assert_that(cwd, Equals(os.getcwd()))
+
+ def test_workdir_inherited(self, capfd, runner):
+ """
+ When a command is run its child process should inherit the current
+ working directory.
+ """
+ cwd = os.getcwd()
+
+ runner(Command('/bin/pwd'))
+
+ out_lines, _ = captured_lines(capfd)
+ assert_that(out_lines.pop(0), Equals(cwd))
+
+ def test_workdir_set_at_command_creation(self, capfd, tmpdir, runner):
+ """
+ When a command is run its child process should inherit the current
+ working directory at the time the Command object is initialised and
+ changes to the parent process's current working directory should have
+ no effect on the command.
+ """
+ old_cwd = os.getcwd()
+ new_cwd = str(tmpdir)
+
+ # Command created before chdir
+ cmd = Command('/bin/pwd')
+
+ # Change parent process's current working directory
+ os.chdir(new_cwd)
+ assert_that(os.getcwd(), Equals(new_cwd))
+
+ runner(cmd)
+ out_lines, _ = captured_lines(capfd)
+ assert_that(out_lines.pop(0), Equals(old_cwd))
+
+ @pytest.mark.skipif(sys.version_info >= (3, 0),
+ reason='only for Python < 3')
+ def test_workdir_does_not_exist(self, capfd, runner):
+ """
+ When the command is run and the specified workdir does not exist, an
+ error is raised.
+ """
+ with ExpectedException(
+ OSError,
+ r"\[Errno 2\] No such file or directory: 'DOESNOTEXIST'"):
+ runner(Command('/bin/pwd').workdir('DOESNOTEXIST'))
+
+ @pytest.mark.skipif(sys.version_info[0] < 3,
+ reason='requires Python 3')
+ def test_workdir_does_not_exist_exec(self, capfd, runner):
+ """
+ When the command is run and the specified workdir does not exist, an
+ error is raised.
+ """
+ with ExpectedException(
+ FileNotFoundError, # noqa: F821
+ r"\[Errno 2\] No such file or directory: 'DOESNOTEXIST'"):
+ exec_cmd(Command('/bin/pwd').workdir('DOESNOTEXIST'))
+
+ @pytest.mark.skipif(sys.version_info < (3, 3),
+ reason='requires Python 3.3')
+ def test_workdir_does_not_exist_run(self, capfd):
+ """
+ When the command is run and the specified workdir does not exist, an
+ error is raised.
+ """
+ from subprocess import SubprocessError
+ with ExpectedException(
+ SubprocessError, r'Exception occurred in preexec_fn\.'):
+ run_cmd(Command('/bin/pwd').workdir('DOESNOTEXIST'))
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 3
},
"num_modified_files": 1
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements-dev.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | coverage==7.8.0
exceptiongroup==1.2.2
flake8==7.2.0
iniconfig==2.1.0
mccabe==0.7.0
packaging==24.2
pluggy==1.5.0
pycodestyle==2.13.0
-e git+https://github.com/praekeltfoundation/pydexec.git@ee5bed66d25dea943735cd5d5c05947d7623fecf#egg=pydexec
pyflakes==3.3.1
pytest==8.3.5
pytest-cov==6.0.0
pytest-cover==3.0.0
pytest-coverage==0.0
testtools==2.7.2
tomli==2.2.1
| name: pydexec
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==7.8.0
- exceptiongroup==1.2.2
- flake8==7.2.0
- iniconfig==2.1.0
- mccabe==0.7.0
- packaging==24.2
- pluggy==1.5.0
- pycodestyle==2.13.0
- pyflakes==3.3.1
- pytest==8.3.5
- pytest-cov==6.0.0
- pytest-cover==3.0.0
- pytest-coverage==0.0
- testtools==2.7.2
- tomli==2.2.1
prefix: /opt/conda/envs/pydexec
| [
"pydexec/tests/test_command.py::TestCommand::test_workdir_changes_directory[run_cmd]",
"pydexec/tests/test_command.py::TestCommand::test_workdir_set_at_command_creation[run_cmd]",
"pydexec/tests/test_command.py::TestCommand::test_workdir_does_not_exist_exec[run_cmd]",
"pydexec/tests/test_command.py::TestCommand::test_workdir_changes_directory[exec_cmd]",
"pydexec/tests/test_command.py::TestCommand::test_workdir_set_at_command_creation[exec_cmd]",
"pydexec/tests/test_command.py::TestCommand::test_workdir_does_not_exist_exec[exec_cmd]",
"pydexec/tests/test_command.py::TestCommand::test_workdir_does_not_exist_run"
] | [] | [
"pydexec/tests/test_command.py::TestCommand::test_stdout[run_cmd]",
"pydexec/tests/test_command.py::TestCommand::test_stderr[run_cmd]",
"pydexec/tests/test_command.py::TestCommand::test_output_unicode[run_cmd]",
"pydexec/tests/test_command.py::TestCommand::test_error[run_cmd]",
"pydexec/tests/test_command.py::TestCommand::test_preserves_environment[run_cmd]",
"pydexec/tests/test_command.py::TestCommand::test_switch_user[run_cmd]",
"pydexec/tests/test_command.py::TestCommand::test_switch_user_preserves_environment[run_cmd]",
"pydexec/tests/test_command.py::TestCommand::test_env[run_cmd]",
"pydexec/tests/test_command.py::TestCommand::test_env_remove[run_cmd]",
"pydexec/tests/test_command.py::TestCommand::test_env_clear[run_cmd]",
"pydexec/tests/test_command.py::TestCommand::test_arg_from_env[run_cmd]",
"pydexec/tests/test_command.py::TestCommand::test_arg_from_env_not_present[run_cmd]",
"pydexec/tests/test_command.py::TestCommand::test_arg_from_env_no_remove[run_cmd]",
"pydexec/tests/test_command.py::TestCommand::test_arg_from_env_default[run_cmd]",
"pydexec/tests/test_command.py::TestCommand::test_opt_from_env[run_cmd]",
"pydexec/tests/test_command.py::TestCommand::test_opt_from_env_not_present[run_cmd]",
"pydexec/tests/test_command.py::TestCommand::test_opt_from_env_no_remove[run_cmd]",
"pydexec/tests/test_command.py::TestCommand::test_opt_from_env_default[run_cmd]",
"pydexec/tests/test_command.py::TestCommand::test_workdir_inherited[run_cmd]",
"pydexec/tests/test_command.py::TestCommand::test_stdout[exec_cmd]",
"pydexec/tests/test_command.py::TestCommand::test_stderr[exec_cmd]",
"pydexec/tests/test_command.py::TestCommand::test_output_unicode[exec_cmd]",
"pydexec/tests/test_command.py::TestCommand::test_error[exec_cmd]",
"pydexec/tests/test_command.py::TestCommand::test_preserves_environment[exec_cmd]",
"pydexec/tests/test_command.py::TestCommand::test_env[exec_cmd]",
"pydexec/tests/test_command.py::TestCommand::test_env_remove[exec_cmd]",
"pydexec/tests/test_command.py::TestCommand::test_env_clear[exec_cmd]",
"pydexec/tests/test_command.py::TestCommand::test_arg_from_env[exec_cmd]",
"pydexec/tests/test_command.py::TestCommand::test_arg_from_env_not_present[exec_cmd]",
"pydexec/tests/test_command.py::TestCommand::test_arg_from_env_no_remove[exec_cmd]",
"pydexec/tests/test_command.py::TestCommand::test_arg_from_env_default[exec_cmd]",
"pydexec/tests/test_command.py::TestCommand::test_opt_from_env[exec_cmd]",
"pydexec/tests/test_command.py::TestCommand::test_opt_from_env_not_present[exec_cmd]",
"pydexec/tests/test_command.py::TestCommand::test_opt_from_env_no_remove[exec_cmd]",
"pydexec/tests/test_command.py::TestCommand::test_opt_from_env_default[exec_cmd]",
"pydexec/tests/test_command.py::TestCommand::test_workdir_inherited[exec_cmd]",
"pydexec/tests/test_command.py::TestCommand::test_arg_from_env_required",
"pydexec/tests/test_command.py::TestCommand::test_opt_from_env_required"
] | [
"pydexec/tests/test_command.py::TestCommand::test_switch_user[exec_cmd]",
"pydexec/tests/test_command.py::TestCommand::test_switch_user_preserves_environment[exec_cmd]"
] | BSD 3-Clause "New" or "Revised" License | 899 | 507 | [
"pydexec/command.py"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.